summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/actors-migration/scala/actors/MigrationSystem.scala36
-rw-r--r--src/actors-migration/scala/actors/Pattern.scala25
-rw-r--r--src/actors-migration/scala/actors/Props.scala13
-rw-r--r--src/actors-migration/scala/actors/StashingActor.scala255
-rw-r--r--src/actors-migration/scala/actors/Timeout.scala39
-rw-r--r--src/actors/scala/actors/ActorRef.scala119
-rw-r--r--src/actors/scala/actors/ActorTask.scala1
-rw-r--r--src/actors/scala/actors/InternalActor.scala53
-rw-r--r--src/actors/scala/actors/MQueue.scala14
-rw-r--r--src/actors/scala/actors/Reactor.scala10
-rw-r--r--src/actors/scala/actors/ReplyReactor.scala7
-rw-r--r--src/actors/scala/actors/package.scala2
-rw-r--r--src/asm/scala/tools/asm/AnnotationVisitor.java157
-rw-r--r--src/asm/scala/tools/asm/AnnotationWriter.java322
-rw-r--r--src/asm/scala/tools/asm/Attribute.java254
-rw-r--r--src/asm/scala/tools/asm/ByteVector.java293
-rw-r--r--src/asm/scala/tools/asm/ClassReader.java2216
-rw-r--r--src/asm/scala/tools/asm/ClassVisitor.java277
-rw-r--r--src/asm/scala/tools/asm/ClassWriter.java1672
-rw-r--r--src/asm/scala/tools/asm/CustomAttr.java20
-rw-r--r--src/asm/scala/tools/asm/Edge.java75
-rw-r--r--src/asm/scala/tools/asm/FieldVisitor.java115
-rw-r--r--src/asm/scala/tools/asm/FieldWriter.java271
-rw-r--r--src/asm/scala/tools/asm/Frame.java1435
-rw-r--r--src/asm/scala/tools/asm/Handle.java159
-rw-r--r--src/asm/scala/tools/asm/Handler.java118
-rw-r--r--src/asm/scala/tools/asm/Item.java297
-rw-r--r--src/asm/scala/tools/asm/Label.java555
-rw-r--r--src/asm/scala/tools/asm/MethodVisitor.java588
-rw-r--r--src/asm/scala/tools/asm/MethodWriter.java2666
-rw-r--r--src/asm/scala/tools/asm/Opcodes.java358
-rw-r--r--src/asm/scala/tools/asm/Type.java865
-rw-r--r--src/asm/scala/tools/asm/signature/SignatureReader.java229
-rw-r--r--src/asm/scala/tools/asm/signature/SignatureVisitor.java228
-rw-r--r--src/asm/scala/tools/asm/signature/SignatureWriter.java227
-rw-r--r--src/asm/scala/tools/asm/tree/AbstractInsnNode.java238
-rw-r--r--src/asm/scala/tools/asm/tree/AnnotationNode.java224
-rw-r--r--src/asm/scala/tools/asm/tree/ClassNode.java371
-rw-r--r--src/asm/scala/tools/asm/tree/FieldInsnNode.java106
-rw-r--r--src/asm/scala/tools/asm/tree/FieldNode.java243
-rw-r--r--src/asm/scala/tools/asm/tree/FrameNode.java211
-rw-r--r--src/asm/scala/tools/asm/tree/IincInsnNode.java80
-rw-r--r--src/asm/scala/tools/asm/tree/InnerClassNode.java101
-rw-r--r--src/asm/scala/tools/asm/tree/InsnList.java578
-rw-r--r--src/asm/scala/tools/asm/tree/InsnNode.java84
-rw-r--r--src/asm/scala/tools/asm/tree/IntInsnNode.java84
-rw-r--r--src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java100
-rw-r--r--src/asm/scala/tools/asm/tree/JumpInsnNode.java92
-rw-r--r--src/asm/scala/tools/asm/tree/LabelNode.java78
-rw-r--r--src/asm/scala/tools/asm/tree/LdcInsnNode.java77
-rw-r--r--src/asm/scala/tools/asm/tree/LineNumberNode.java82
-rw-r--r--src/asm/scala/tools/asm/tree/LocalVariableNode.java115
-rw-r--r--src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java116
-rw-r--r--src/asm/scala/tools/asm/tree/MethodInsnNode.java107
-rw-r--r--src/asm/scala/tools/asm/tree/MethodNode.java645
-rw-r--r--src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java81
-rw-r--r--src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java115
-rw-r--r--src/asm/scala/tools/asm/tree/TryCatchBlockNode.java94
-rw-r--r--src/asm/scala/tools/asm/tree/TypeInsnNode.java87
-rw-r--r--src/asm/scala/tools/asm/tree/VarInsnNode.java90
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Analyzer.java549
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java64
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java365
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/BasicValue.java108
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java459
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Frame.java709
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Interpreter.java204
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java329
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SmallSet.java134
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java206
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SourceValue.java97
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Subroutine.java93
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Value.java45
-rw-r--r--src/asm/scala/tools/asm/util/ASMifiable.java53
-rw-r--r--src/asm/scala/tools/asm/util/ASMifier.java1238
-rw-r--r--src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java142
-rw-r--r--src/asm/scala/tools/asm/util/CheckClassAdapter.java603
-rw-r--r--src/asm/scala/tools/asm/util/CheckFieldAdapter.java97
-rw-r--r--src/asm/scala/tools/asm/util/CheckMethodAdapter.java1668
-rw-r--r--src/asm/scala/tools/asm/util/CheckSignatureAdapter.java329
-rw-r--r--src/asm/scala/tools/asm/util/Printer.java558
-rw-r--r--src/asm/scala/tools/asm/util/SignatureChecker.java47
-rw-r--r--src/asm/scala/tools/asm/util/Textifiable.java54
-rw-r--r--src/asm/scala/tools/asm/util/Textifier.java1286
-rw-r--r--src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java96
-rw-r--r--src/asm/scala/tools/asm/util/TraceClassVisitor.java232
-rw-r--r--src/asm/scala/tools/asm/util/TraceFieldVisitor.java78
-rw-r--r--src/asm/scala/tools/asm/util/TraceMethodVisitor.java264
-rw-r--r--src/asm/scala/tools/asm/util/TraceSignatureVisitor.java318
-rw-r--r--src/attic/README2
-rw-r--r--src/attic/scala/tools/nsc/models/SemanticTokens.scala701
-rw-r--r--src/attic/scala/tools/nsc/models/Signatures.scala84
-rw-r--r--src/attic/scala/tools/nsc/symtab/SymbolWalker.scala252
-rw-r--r--src/build/genprod.scala249
-rw-r--r--src/build/maven/maven-deploy.xml6
-rw-r--r--src/build/maven/scala-actors-migration-pom.xml66
-rw-r--r--src/build/maven/scala-dbc-pom.xml61
-rw-r--r--src/build/maven/scala-library-pom.xml6
-rw-r--r--src/build/pack.xml11
-rw-r--r--src/compiler/scala/reflect/internal/AnnotationInfos.scala36
-rw-r--r--src/compiler/scala/reflect/internal/ClassfileConstants.scala2
-rw-r--r--src/compiler/scala/reflect/internal/Constants.scala1
-rw-r--r--src/compiler/scala/reflect/internal/Definitions.scala558
-rw-r--r--src/compiler/scala/reflect/internal/Flags.scala116
-rw-r--r--src/compiler/scala/reflect/internal/FrontEnds.scala75
-rw-r--r--src/compiler/scala/reflect/internal/HasFlags.scala95
-rw-r--r--src/compiler/scala/reflect/internal/Importers.scala14
-rw-r--r--src/compiler/scala/reflect/internal/InfoTransformers.scala4
-rw-r--r--src/compiler/scala/reflect/internal/NameManglers.scala216
-rw-r--r--src/compiler/scala/reflect/internal/Names.scala26
-rw-r--r--src/compiler/scala/reflect/internal/Phase.scala10
-rw-r--r--src/compiler/scala/reflect/internal/Reporters.scala74
-rw-r--r--src/compiler/scala/reflect/internal/Required.scala5
-rw-r--r--src/compiler/scala/reflect/internal/Scopes.scala2
-rw-r--r--src/compiler/scala/reflect/internal/StdAttachments.scala10
-rw-r--r--src/compiler/scala/reflect/internal/StdNames.scala551
-rw-r--r--src/compiler/scala/reflect/internal/SymbolCreations.scala113
-rw-r--r--src/compiler/scala/reflect/internal/SymbolFlags.scala176
-rw-r--r--src/compiler/scala/reflect/internal/SymbolTable.scala63
-rw-r--r--src/compiler/scala/reflect/internal/Symbols.scala309
-rw-r--r--src/compiler/scala/reflect/internal/TreeBuildUtil.scala8
-rw-r--r--src/compiler/scala/reflect/internal/TreeGen.scala18
-rw-r--r--src/compiler/scala/reflect/internal/TreeInfo.scala101
-rw-r--r--src/compiler/scala/reflect/internal/TreePrinters.scala17
-rw-r--r--src/compiler/scala/reflect/internal/Trees.scala29
-rw-r--r--src/compiler/scala/reflect/internal/TypeDebugging.scala27
-rw-r--r--src/compiler/scala/reflect/internal/Types.scala403
-rw-r--r--src/compiler/scala/reflect/internal/pickling/UnPickler.scala18
-rw-r--r--src/compiler/scala/reflect/internal/settings/MutableSettings.scala3
-rw-r--r--src/compiler/scala/reflect/internal/transform/Erasure.scala9
-rw-r--r--src/compiler/scala/reflect/internal/util/Collections.scala46
-rw-r--r--src/compiler/scala/reflect/internal/util/Origins.scala86
-rw-r--r--src/compiler/scala/reflect/internal/util/TraceSymbolActivity.scala2
-rw-r--r--src/compiler/scala/reflect/makro/runtime/AbortMacroException.scala (renamed from src/compiler/scala/reflect/makro/runtime/Errors.scala)0
-rw-r--r--src/compiler/scala/reflect/makro/runtime/Context.scala5
-rw-r--r--src/compiler/scala/reflect/makro/runtime/Enclosures.scala39
-rw-r--r--src/compiler/scala/reflect/makro/runtime/FrontEnds.scala (renamed from src/compiler/scala/reflect/makro/runtime/Reporters.scala)12
-rw-r--r--src/compiler/scala/reflect/makro/runtime/Reifiers.scala63
-rw-r--r--src/compiler/scala/reflect/makro/runtime/Symbols.scala2
-rw-r--r--src/compiler/scala/reflect/makro/runtime/Traces.scala8
-rw-r--r--src/compiler/scala/reflect/makro/runtime/Typers.scala35
-rw-r--r--src/compiler/scala/reflect/makro/util/Traces.scala18
-rw-r--r--src/compiler/scala/reflect/reify/Errors.scala11
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala (renamed from src/compiler/scala/reflect/reify/Reifiers.scala)34
-rw-r--r--src/compiler/scala/reflect/reify/codegen/AnnotationInfos.scala56
-rw-r--r--src/compiler/scala/reflect/reify/codegen/Symbols.scala125
-rw-r--r--src/compiler/scala/reflect/reify/codegen/Trees.scala49
-rw-r--r--src/compiler/scala/reflect/reify/codegen/Types.scala166
-rw-r--r--src/compiler/scala/reflect/reify/package.scala43
-rw-r--r--src/compiler/scala/reflect/reify/phases/Calculate.scala9
-rw-r--r--src/compiler/scala/reflect/reify/phases/Metalevels.scala6
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reify.scala27
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala6
-rw-r--r--src/compiler/scala/reflect/runtime/AbstractFile.scala11
-rw-r--r--src/compiler/scala/reflect/runtime/JavaToScala.scala25
-rw-r--r--src/compiler/scala/reflect/runtime/Mirror.scala2
-rw-r--r--src/compiler/scala/reflect/runtime/Settings.scala3
-rw-r--r--src/compiler/scala/reflect/runtime/SynchronizedOps.scala2
-rw-r--r--src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala14
-rw-r--r--src/compiler/scala/reflect/runtime/ToolBoxes.scala50
-rw-r--r--src/compiler/scala/tools/ant/Scaladoc.scala70
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-unix.tmpl12
-rw-r--r--src/compiler/scala/tools/cmd/FromString.scala6
-rw-r--r--src/compiler/scala/tools/cmd/gen/AnyVals.scala81
-rw-r--r--src/compiler/scala/tools/cmd/program/Scmp.scala60
-rw-r--r--src/compiler/scala/tools/cmd/program/Simple.scala81
-rw-r--r--src/compiler/scala/tools/cmd/program/Tokens.scala106
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala22
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala4
-rw-r--r--src/compiler/scala/tools/nsc/CompilerCommand.scala2
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala560
-rw-r--r--src/compiler/scala/tools/nsc/InterpreterCommand.scala8
-rw-r--r--src/compiler/scala/tools/nsc/MainInterpreter.scala13
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala1
-rw-r--r--src/compiler/scala/tools/nsc/Phases.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ToolBoxes.scala15
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala37
-rw-r--r--src/compiler/scala/tools/nsc/ast/FreeVars.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala83
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala1
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala7
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala99
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala74
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala340
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala54
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala14
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Tokens.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala22
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala22
-rw-r--r--src/compiler/scala/tools/nsc/backend/MSILPlatform.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/Platform.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala7
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala7
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala24
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala69
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Primitives.scala19
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala40
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala3291
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala70
-rw-r--r--src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala47
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocFactory.scala6
-rw-r--r--src/compiler/scala/tools/nsc/doc/Settings.scala131
-rw-r--r--src/compiler/scala/tools/nsc/doc/Uncompilable.scala6
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala3
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala12
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala8
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Index.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala168
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gifbin0 -> 167 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css298
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js91
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js158
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js5442
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.pngbin0 -> 1150 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.pngbin0 -> 646 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css101
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js155
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/Entity.scala107
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala3
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala123
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala520
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/model/TreeFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala1
-rw-r--r--src/compiler/scala/tools/nsc/interactive/BuildManager.scala1
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala8
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RangePositions.scala3
-rw-r--r--src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala25
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/FindOccurrences.scala28
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala22
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala27
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala21
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Completion.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala59
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Dossiers.scala54
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Eval.scala33
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala125
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/FileCompletion.scala56
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ILoop.scala287
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala38
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/IMain.scala205
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala1
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala35
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineReader.scala3
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/KeyBinding.scala39
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Line.scala107
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala1
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/NamedParam.scala15
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Phased.scala1
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Power.scala54
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ProductCompletion.scala44
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala57
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplProps.scala7
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala9
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplTokens.scala285
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplVals.scala34
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/RichClass.scala13
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Runner.scala11
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala1
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala156
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/XMLCompletion.scala44
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/package.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala10
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/session/package.scala1
-rw-r--r--src/compiler/scala/tools/nsc/io/AbstractFile.scala3
-rw-r--r--src/compiler/scala/tools/nsc/io/ClassAndJarInfo.scala44
-rw-r--r--src/compiler/scala/tools/nsc/io/File.scala1
-rw-r--r--src/compiler/scala/tools/nsc/io/Jar.scala1
-rw-r--r--src/compiler/scala/tools/nsc/io/NullPrintStream.scala37
-rw-r--r--src/compiler/scala/tools/nsc/io/Path.scala1
-rw-r--r--src/compiler/scala/tools/nsc/io/Pickler.scala8
-rw-r--r--src/compiler/scala/tools/nsc/io/PlainFile.scala1
-rw-r--r--src/compiler/scala/tools/nsc/io/Sources.scala86
-rw-r--r--src/compiler/scala/tools/nsc/io/package.scala8
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala19
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaScanners.scala1
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatchSupport.scala6
-rw-r--r--src/compiler/scala/tools/nsc/matching/Matrix.scala3
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala1
-rw-r--r--src/compiler/scala/tools/nsc/matching/PatternBindings.scala3
-rw-r--r--src/compiler/scala/tools/nsc/package.scala5
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugin.scala2
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugins.scala4
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala18
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/AestheticSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/ImmutableSettings.scala11
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala6
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala28
-rw-r--r--src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala84
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTable.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala265
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala166
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala15
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala1
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala76
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala92
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala8
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala17
-rw-r--r--src/compiler/scala/tools/nsc/transform/InfoTransform.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala53
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala95
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala279
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala129
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala7
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala331
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala1
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala138
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala74
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala206
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala30
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala16
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala295
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala170
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala919
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala61
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Modes.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala111
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala98
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala1739
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala3169
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala123
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala10
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala48
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Taggings.scala71
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala41
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala766
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala24
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala92
-rw-r--r--src/compiler/scala/tools/nsc/util/CommandLineParser.scala3
-rw-r--r--src/compiler/scala/tools/nsc/util/Exceptional.scala132
-rw-r--r--src/compiler/scala/tools/nsc/util/FlagsUtil.scala233
-rw-r--r--src/compiler/scala/tools/nsc/util/Indenter.scala85
-rw-r--r--src/compiler/scala/tools/nsc/util/JavaStackFrame.scala71
-rw-r--r--src/compiler/scala/tools/nsc/util/Position.scala10
-rw-r--r--src/compiler/scala/tools/nsc/util/ProxyReport.scala146
-rw-r--r--src/compiler/scala/tools/nsc/util/RegexCache.scala40
-rw-r--r--src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala35
-rw-r--r--src/compiler/scala/tools/nsc/util/ScalaPrefs.scala25
-rw-r--r--src/compiler/scala/tools/nsc/util/ShowPickled.scala34
-rw-r--r--src/compiler/scala/tools/nsc/util/Statistics.scala84
-rw-r--r--src/compiler/scala/tools/nsc/util/Tracer.scala73
-rw-r--r--src/compiler/scala/tools/nsc/util/WeakHashSet.scala3
-rw-r--r--src/compiler/scala/tools/nsc/util/WorkScheduler.scala4
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala2
-rw-r--r--src/compiler/scala/tools/reflect/Invoked.scala52
-rw-r--r--src/compiler/scala/tools/reflect/Mock.scala60
-rw-r--r--src/compiler/scala/tools/reflect/Shield.scala44
-rw-r--r--src/compiler/scala/tools/reflect/SigParser.scala42
-rw-r--r--src/compiler/scala/tools/reflect/UniversalFn.scala59
-rw-r--r--src/compiler/scala/tools/reflect/package.scala43
-rw-r--r--src/compiler/scala/tools/util/AbstractTimer.scala53
-rw-r--r--src/compiler/scala/tools/util/ClassPathSettings.scala32
-rw-r--r--src/compiler/scala/tools/util/EditDistance.scala70
-rw-r--r--src/compiler/scala/tools/util/Javap.scala2
-rw-r--r--src/compiler/scala/tools/util/Profiling.scala52
-rw-r--r--src/compiler/scala/tools/util/SignalManager.scala275
-rw-r--r--src/compiler/scala/tools/util/Signallable.scala65
-rw-r--r--src/compiler/scala/tools/util/SocketConnection.scala52
-rw-r--r--src/compiler/scala/tools/util/StringOps.scala10
-rw-r--r--src/compiler/scala/tools/util/Which.scala38
-rw-r--r--src/compiler/scala/tools/util/color/Ansi.scala58
-rw-r--r--src/compiler/scala/tools/util/color/AnsiAtom.scala51
-rw-r--r--src/compiler/scala/tools/util/color/CString.scala37
-rw-r--r--src/compiler/scala/tools/util/color/ColorNames.scala391
-rw-r--r--src/compiler/scala/tools/util/color/package.scala22
-rw-r--r--src/continuations/library/scala/util/continuations/ControlContext.scala2
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala88
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala190
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala48
-rw-r--r--src/dbc/scala/dbc/DataType.scala69
-rw-r--r--src/dbc/scala/dbc/Database.scala187
-rw-r--r--src/dbc/scala/dbc/Syntax.scala47
-rw-r--r--src/dbc/scala/dbc/Utilities.scala28
-rw-r--r--src/dbc/scala/dbc/Value.scala27
-rw-r--r--src/dbc/scala/dbc/Vendor.scala41
-rw-r--r--src/dbc/scala/dbc/datatype/ApproximateNumeric.scala57
-rw-r--r--src/dbc/scala/dbc/datatype/Boolean.scala31
-rw-r--r--src/dbc/scala/dbc/datatype/Character.scala40
-rw-r--r--src/dbc/scala/dbc/datatype/CharacterLargeObject.scala31
-rw-r--r--src/dbc/scala/dbc/datatype/CharacterString.scala24
-rw-r--r--src/dbc/scala/dbc/datatype/CharacterVarying.scala41
-rw-r--r--src/dbc/scala/dbc/datatype/ExactNumeric.scala65
-rw-r--r--src/dbc/scala/dbc/datatype/Factory.scala250
-rw-r--r--src/dbc/scala/dbc/datatype/Numeric.scala32
-rw-r--r--src/dbc/scala/dbc/datatype/String.scala24
-rw-r--r--src/dbc/scala/dbc/datatype/Unknown.scala34
-rw-r--r--src/dbc/scala/dbc/exception/IncompatibleSchema.scala19
-rw-r--r--src/dbc/scala/dbc/package.scala6
-rw-r--r--src/dbc/scala/dbc/result/Field.scala63
-rw-r--r--src/dbc/scala/dbc/result/FieldMetadata.scala40
-rw-r--r--src/dbc/scala/dbc/result/Relation.scala84
-rw-r--r--src/dbc/scala/dbc/result/Status.scala28
-rw-r--r--src/dbc/scala/dbc/result/Tuple.scala42
-rw-r--r--src/dbc/scala/dbc/statement/DerivedColumn.scala38
-rw-r--r--src/dbc/scala/dbc/statement/Expression.scala28
-rw-r--r--src/dbc/scala/dbc/statement/Insert.scala31
-rw-r--r--src/dbc/scala/dbc/statement/InsertionData.scala40
-rw-r--r--src/dbc/scala/dbc/statement/IsolationLevel.scala32
-rw-r--r--src/dbc/scala/dbc/statement/JoinType.scala56
-rw-r--r--src/dbc/scala/dbc/statement/Jointure.scala45
-rw-r--r--src/dbc/scala/dbc/statement/Relation.scala55
-rw-r--r--src/dbc/scala/dbc/statement/Select.scala99
-rw-r--r--src/dbc/scala/dbc/statement/SetQuantifier.scala38
-rw-r--r--src/dbc/scala/dbc/statement/Status.scala32
-rw-r--r--src/dbc/scala/dbc/statement/Table.scala38
-rw-r--r--src/dbc/scala/dbc/statement/Transaction.scala55
-rw-r--r--src/dbc/scala/dbc/statement/Update.scala47
-rw-r--r--src/dbc/scala/dbc/statement/expression/Aggregate.scala35
-rw-r--r--src/dbc/scala/dbc/statement/expression/BinaryOperator.scala33
-rw-r--r--src/dbc/scala/dbc/statement/expression/Constant.scala23
-rw-r--r--src/dbc/scala/dbc/statement/expression/Default.scala22
-rw-r--r--src/dbc/scala/dbc/statement/expression/Field.scala40
-rw-r--r--src/dbc/scala/dbc/statement/expression/FunctionCall.scala33
-rw-r--r--src/dbc/scala/dbc/statement/expression/Select.scala28
-rw-r--r--src/dbc/scala/dbc/statement/expression/SetFunction.scala40
-rw-r--r--src/dbc/scala/dbc/statement/expression/TypeCast.scala32
-rw-r--r--src/dbc/scala/dbc/statement/expression/UnaryOperator.scala33
-rw-r--r--src/dbc/scala/dbc/syntax/DataTypeUtil.scala98
-rw-r--r--src/dbc/scala/dbc/syntax/Database.scala33
-rw-r--r--src/dbc/scala/dbc/syntax/Statement.scala274
-rw-r--r--src/dbc/scala/dbc/syntax/StatementExpression.scala221
-rw-r--r--src/dbc/scala/dbc/value/ApproximateNumeric.scala28
-rw-r--r--src/dbc/scala/dbc/value/Boolean.scala27
-rw-r--r--src/dbc/scala/dbc/value/Character.scala35
-rw-r--r--src/dbc/scala/dbc/value/CharacterLargeObject.scala35
-rw-r--r--src/dbc/scala/dbc/value/CharacterVarying.scala35
-rw-r--r--src/dbc/scala/dbc/value/Conversion.scala156
-rw-r--r--src/dbc/scala/dbc/value/ExactNumeric.scala35
-rw-r--r--src/dbc/scala/dbc/value/Factory.scala95
-rw-r--r--src/dbc/scala/dbc/value/Unknown.scala27
-rw-r--r--src/dbc/scala/dbc/vendor/PostgreSQL.scala29
-rw-r--r--src/detach/library/scala/remoting/Channel.scala32
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java414
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java195
-rw-r--r--src/intellij/dbc.iml.SAMPLE23
-rw-r--r--src/intellij/scala-lang.ipr.SAMPLE2
-rw-r--r--src/intellij/test.iml.SAMPLE16
-rw-r--r--src/library/rootdoc.txt4
-rw-r--r--src/library/scala/Array.scala80
-rw-r--r--src/library/scala/Boolean.scala4
-rw-r--r--src/library/scala/Byte.scala211
-rw-r--r--src/library/scala/Char.scala210
-rw-r--r--src/library/scala/Console.scala1
-rw-r--r--src/library/scala/DelayedInit.scala32
-rw-r--r--src/library/scala/Double.scala162
-rw-r--r--src/library/scala/Dynamic.scala23
-rw-r--r--src/library/scala/Either.scala27
-rw-r--r--src/library/scala/Enumeration.scala11
-rw-r--r--src/library/scala/Float.scala163
-rw-r--r--src/library/scala/Function.scala6
-rw-r--r--src/library/scala/Function0.scala6
-rw-r--r--src/library/scala/Function1.scala8
-rw-r--r--src/library/scala/Function10.scala7
-rw-r--r--src/library/scala/Function11.scala7
-rw-r--r--src/library/scala/Function12.scala7
-rw-r--r--src/library/scala/Function13.scala7
-rw-r--r--src/library/scala/Function14.scala7
-rw-r--r--src/library/scala/Function15.scala7
-rw-r--r--src/library/scala/Function16.scala7
-rw-r--r--src/library/scala/Function17.scala7
-rw-r--r--src/library/scala/Function18.scala7
-rw-r--r--src/library/scala/Function19.scala7
-rw-r--r--src/library/scala/Function2.scala11
-rw-r--r--src/library/scala/Function20.scala7
-rw-r--r--src/library/scala/Function21.scala7
-rw-r--r--src/library/scala/Function22.scala7
-rw-r--r--src/library/scala/Function3.scala7
-rw-r--r--src/library/scala/Function4.scala7
-rw-r--r--src/library/scala/Function5.scala7
-rw-r--r--src/library/scala/Function6.scala7
-rw-r--r--src/library/scala/Function7.scala7
-rw-r--r--src/library/scala/Function8.scala7
-rw-r--r--src/library/scala/Function9.scala7
-rw-r--r--src/library/scala/Int.scala209
-rw-r--r--src/library/scala/Long.scala208
-rw-r--r--src/library/scala/LowPriorityImplicits.scala1
-rw-r--r--src/library/scala/Option.scala16
-rw-r--r--src/library/scala/Predef.scala189
-rw-r--r--src/library/scala/Product.scala2
-rw-r--r--src/library/scala/Product1.scala2
-rw-r--r--src/library/scala/Product10.scala2
-rw-r--r--src/library/scala/Product11.scala2
-rw-r--r--src/library/scala/Product12.scala2
-rw-r--r--src/library/scala/Product13.scala2
-rw-r--r--src/library/scala/Product14.scala2
-rw-r--r--src/library/scala/Product15.scala2
-rw-r--r--src/library/scala/Product16.scala2
-rw-r--r--src/library/scala/Product17.scala2
-rw-r--r--src/library/scala/Product18.scala2
-rw-r--r--src/library/scala/Product19.scala2
-rw-r--r--src/library/scala/Product2.scala2
-rw-r--r--src/library/scala/Product20.scala2
-rw-r--r--src/library/scala/Product21.scala2
-rw-r--r--src/library/scala/Product22.scala2
-rw-r--r--src/library/scala/Product3.scala2
-rw-r--r--src/library/scala/Product4.scala2
-rw-r--r--src/library/scala/Product5.scala2
-rw-r--r--src/library/scala/Product6.scala2
-rw-r--r--src/library/scala/Product7.scala2
-rw-r--r--src/library/scala/Product8.scala2
-rw-r--r--src/library/scala/Product9.scala2
-rw-r--r--src/library/scala/Responder.scala6
-rw-r--r--src/library/scala/ScalaObject.scala (renamed from src/dbc/scala/dbc/statement/Statement.scala)14
-rw-r--r--src/library/scala/Short.scala210
-rw-r--r--src/library/scala/StringContext.scala2
-rw-r--r--src/library/scala/Tuple2.scala104
-rw-r--r--src/library/scala/Tuple3.scala120
-rw-r--r--src/library/scala/Unit.scala4
-rw-r--r--src/library/scala/annotation/bridge.scala1
-rw-r--r--src/library/scala/annotation/elidable.scala8
-rw-r--r--src/library/scala/annotation/meta/languageFeature.scala (renamed from src/dbc/scala/dbc/exception/UnsupportedFeature.scala)17
-rw-r--r--src/library/scala/annotation/unspecialized.scala (renamed from src/dbc/scala/dbc/statement/SetClause.scala)22
-rw-r--r--src/library/scala/collection/BitSet.scala2
-rw-r--r--src/library/scala/collection/BitSetLike.scala10
-rw-r--r--src/library/scala/collection/DefaultMap.scala2
-rw-r--r--src/library/scala/collection/GenIterableLike.scala21
-rw-r--r--src/library/scala/collection/GenMapLike.scala3
-rw-r--r--src/library/scala/collection/GenSeqLike.scala18
-rw-r--r--src/library/scala/collection/GenSetLike.scala16
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala84
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala30
-rw-r--r--src/library/scala/collection/IndexedSeq.scala2
-rw-r--r--src/library/scala/collection/IndexedSeqLike.scala2
-rw-r--r--src/library/scala/collection/Iterable.scala2
-rw-r--r--src/library/scala/collection/IterableLike.scala30
-rw-r--r--src/library/scala/collection/IterableViewLike.scala1
-rw-r--r--src/library/scala/collection/Iterator.scala32
-rw-r--r--src/library/scala/collection/LinearSeq.scala2
-rw-r--r--src/library/scala/collection/Map.scala4
-rw-r--r--src/library/scala/collection/MapLike.scala11
-rw-r--r--src/library/scala/collection/Seq.scala2
-rw-r--r--src/library/scala/collection/SeqExtractors.scala6
-rw-r--r--src/library/scala/collection/SeqLike.scala65
-rw-r--r--src/library/scala/collection/SeqProxyLike.scala2
-rw-r--r--src/library/scala/collection/SeqViewLike.scala5
-rw-r--r--src/library/scala/collection/Set.scala2
-rw-r--r--src/library/scala/collection/SetLike.scala9
-rw-r--r--src/library/scala/collection/Traversable.scala9
-rw-r--r--src/library/scala/collection/TraversableLike.scala10
-rw-r--r--src/library/scala/collection/TraversableOnce.scala54
-rw-r--r--src/library/scala/collection/TraversableProxyLike.scala2
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala23
-rw-r--r--src/library/scala/collection/concurrent/Map.scala2
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala76
-rw-r--r--src/library/scala/collection/convert/DecorateAsJava.scala2
-rw-r--r--src/library/scala/collection/convert/DecorateAsScala.scala13
-rw-r--r--src/library/scala/collection/convert/WrapAsJava.scala21
-rw-r--r--src/library/scala/collection/convert/WrapAsScala.scala13
-rw-r--r--src/library/scala/collection/generic/ArrayTagTraversableFactory.scala (renamed from src/library/scala/collection/generic/ClassManifestTraversableFactory.scala)14
-rw-r--r--src/library/scala/collection/generic/BitSetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/CanBuildFrom.scala2
-rw-r--r--src/library/scala/collection/generic/Clearable.scala (renamed from src/dbc/scala/dbc/statement/AccessMode.scala)32
-rw-r--r--src/library/scala/collection/generic/FromRepr.scala56
-rw-r--r--src/library/scala/collection/generic/GenMapFactory.scala3
-rw-r--r--src/library/scala/collection/generic/GenSeqFactory.scala9
-rw-r--r--src/library/scala/collection/generic/GenSetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/GenTraversableFactory.scala10
-rw-r--r--src/library/scala/collection/generic/GenericArrayTagCompanion.scala (renamed from src/library/scala/collection/generic/GenericClassManifestCompanion.scala)11
-rw-r--r--src/library/scala/collection/generic/GenericArrayTagTraversableTemplate.scala30
-rw-r--r--src/library/scala/collection/generic/GenericClassManifestTraversableTemplate.scala25
-rw-r--r--src/library/scala/collection/generic/GenericCompanion.scala5
-rw-r--r--src/library/scala/collection/generic/GenericOrderedCompanion.scala1
-rw-r--r--src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala3
-rw-r--r--src/library/scala/collection/generic/GenericParCompanion.scala3
-rw-r--r--src/library/scala/collection/generic/GenericParTemplate.scala1
-rw-r--r--src/library/scala/collection/generic/GenericSeqCompanion.scala12
-rw-r--r--src/library/scala/collection/generic/GenericSetTemplate.scala2
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala16
-rw-r--r--src/library/scala/collection/generic/Growable.scala8
-rw-r--r--src/library/scala/collection/generic/ImmutableMapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/ImmutableSetFactory.scala1
-rw-r--r--src/library/scala/collection/generic/ImmutableSortedMapFactory.scala4
-rw-r--r--src/library/scala/collection/generic/ImmutableSortedSetFactory.scala8
-rw-r--r--src/library/scala/collection/generic/MapFactory.scala4
-rw-r--r--src/library/scala/collection/generic/MutableMapFactory.scala1
-rw-r--r--src/library/scala/collection/generic/MutableSetFactory.scala1
-rw-r--r--src/library/scala/collection/generic/MutableSortedSetFactory.scala5
-rw-r--r--src/library/scala/collection/generic/OrderedTraversableFactory.scala4
-rw-r--r--src/library/scala/collection/generic/ParFactory.scala3
-rw-r--r--src/library/scala/collection/generic/ParMapFactory.scala3
-rw-r--r--src/library/scala/collection/generic/ParSetFactory.scala1
-rw-r--r--src/library/scala/collection/generic/SeqFactory.scala1
-rw-r--r--src/library/scala/collection/generic/SetFactory.scala11
-rw-r--r--src/library/scala/collection/generic/Shrinkable.scala4
-rw-r--r--src/library/scala/collection/generic/Sorted.scala2
-rw-r--r--src/library/scala/collection/generic/SortedMapFactory.scala1
-rw-r--r--src/library/scala/collection/generic/SortedSetFactory.scala1
-rw-r--r--src/library/scala/collection/generic/Subtractable.scala6
-rw-r--r--src/library/scala/collection/generic/TraversableFactory.scala47
-rw-r--r--src/library/scala/collection/generic/TraversableForwarder.scala2
-rw-r--r--src/library/scala/collection/generic/package.scala15
-rw-r--r--src/library/scala/collection/immutable/BitSet.scala4
-rw-r--r--src/library/scala/collection/immutable/GenSeq.scala.disabled2
-rw-r--r--src/library/scala/collection/immutable/GenSet.scala.disabled2
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala4
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala6
-rw-r--r--src/library/scala/collection/immutable/IndexedSeq.scala2
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala4
-rw-r--r--src/library/scala/collection/immutable/Iterable.scala4
-rw-r--r--src/library/scala/collection/immutable/LinearSeq.scala2
-rw-r--r--src/library/scala/collection/immutable/List.scala10
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala13
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala8
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala4
-rw-r--r--src/library/scala/collection/immutable/Map.scala2
-rw-r--r--src/library/scala/collection/immutable/MapLike.scala9
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala2
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala12
-rw-r--r--src/library/scala/collection/immutable/Queue.scala4
-rw-r--r--src/library/scala/collection/immutable/Range.scala13
-rw-r--r--src/library/scala/collection/immutable/Seq.scala4
-rw-r--r--src/library/scala/collection/immutable/Set.scala4
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala3
-rw-r--r--src/library/scala/collection/immutable/SortedSet.scala4
-rw-r--r--src/library/scala/collection/immutable/Stack.scala6
-rw-r--r--src/library/scala/collection/immutable/Stream.scala15
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala8
-rw-r--r--src/library/scala/collection/immutable/StringOps.scala2
-rw-r--r--src/library/scala/collection/immutable/Traversable.scala2
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala3
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala4
-rw-r--r--src/library/scala/collection/immutable/TrieIterator.scala2
-rw-r--r--src/library/scala/collection/immutable/Vector.scala2
-rw-r--r--src/library/scala/collection/immutable/WrappedString.scala2
-rw-r--r--src/library/scala/collection/interfaces/IterableMethods.scala39
-rw-r--r--src/library/scala/collection/interfaces/MapMethods.scala45
-rw-r--r--src/library/scala/collection/interfaces/SeqMethods.scala71
-rw-r--r--src/library/scala/collection/interfaces/SetMethods.scala52
-rw-r--r--src/library/scala/collection/interfaces/TraversableMethods.scala63
-rw-r--r--src/library/scala/collection/interfaces/TraversableOnceMethods.scala77
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala4
-rw-r--r--src/library/scala/collection/mutable/ArrayBuilder.scala17
-rw-r--r--src/library/scala/collection/mutable/ArrayLike.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala33
-rw-r--r--src/library/scala/collection/mutable/ArraySeq.scala4
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala8
-rw-r--r--src/library/scala/collection/mutable/BitSet.scala4
-rw-r--r--src/library/scala/collection/mutable/Buffer.scala4
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala5
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala4
-rw-r--r--src/library/scala/collection/mutable/Builder.scala2
-rw-r--r--src/library/scala/collection/mutable/ConcurrentMap.scala2
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedList.scala4
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedListLike.scala2
-rw-r--r--src/library/scala/collection/mutable/FlatArray.scala157
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala8
-rw-r--r--src/library/scala/collection/mutable/GenSeq.scala.disabled2
-rw-r--r--src/library/scala/collection/mutable/GenSet.scala.disabled2
-rw-r--r--src/library/scala/collection/mutable/GrowingBuilder.scala2
-rw-r--r--src/library/scala/collection/mutable/HashMap.scala4
-rw-r--r--src/library/scala/collection/mutable/HashSet.scala4
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala22
-rw-r--r--src/library/scala/collection/mutable/IndexedSeq.scala2
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqLike.scala4
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqView.scala1
-rw-r--r--src/library/scala/collection/mutable/Iterable.scala2
-rw-r--r--src/library/scala/collection/mutable/LinearSeq.scala4
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala12
-rw-r--r--src/library/scala/collection/mutable/LinkedHashSet.scala6
-rw-r--r--src/library/scala/collection/mutable/LinkedList.scala4
-rw-r--r--src/library/scala/collection/mutable/LinkedListLike.scala2
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala10
-rw-r--r--src/library/scala/collection/mutable/ListMap.scala4
-rw-r--r--src/library/scala/collection/mutable/Map.scala2
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala26
-rw-r--r--src/library/scala/collection/mutable/MultiMap.scala2
-rw-r--r--src/library/scala/collection/mutable/ObservableBuffer.scala14
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala4
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala30
-rw-r--r--src/library/scala/collection/mutable/Queue.scala2
-rw-r--r--src/library/scala/collection/mutable/QueueProxy.scala2
-rw-r--r--src/library/scala/collection/mutable/Seq.scala4
-rw-r--r--src/library/scala/collection/mutable/SeqLike.scala4
-rw-r--r--src/library/scala/collection/mutable/Set.scala4
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala4
-rw-r--r--src/library/scala/collection/mutable/SortedSet.scala4
-rw-r--r--src/library/scala/collection/mutable/Stack.scala4
-rw-r--r--src/library/scala/collection/mutable/StackProxy.scala5
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala2
-rw-r--r--src/library/scala/collection/mutable/SynchronizedBuffer.scala6
-rw-r--r--src/library/scala/collection/mutable/SynchronizedMap.scala2
-rw-r--r--src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala2
-rw-r--r--src/library/scala/collection/mutable/SynchronizedQueue.scala2
-rw-r--r--src/library/scala/collection/mutable/SynchronizedSet.scala2
-rw-r--r--src/library/scala/collection/mutable/SynchronizedStack.scala2
-rw-r--r--src/library/scala/collection/mutable/Traversable.scala2
-rw-r--r--src/library/scala/collection/mutable/TreeSet.scala2
-rw-r--r--src/library/scala/collection/mutable/UnrolledBuffer.scala16
-rw-r--r--src/library/scala/collection/mutable/WeakHashMap.scala4
-rw-r--r--src/library/scala/collection/mutable/WrappedArray.scala45
-rw-r--r--src/library/scala/collection/mutable/WrappedArrayBuilder.scala16
-rw-r--r--src/library/scala/collection/package.scala8
-rw-r--r--src/library/scala/collection/parallel/ParIterable.scala2
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala57
-rw-r--r--src/library/scala/collection/parallel/ParIterableViewLike.scala1
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala6
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala7
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSeq.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSet.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParVector.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala8
-rw-r--r--src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala7
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSeq.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSet.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala2
-rw-r--r--src/library/scala/collection/parallel/package.scala1
-rw-r--r--src/library/scala/concurrent/Channel.scala1
-rw-r--r--src/library/scala/concurrent/ConcurrentPackageObject.scala66
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala33
-rw-r--r--src/library/scala/concurrent/Future.scala328
-rw-r--r--src/library/scala/concurrent/FutureTaskRunner.scala2
-rw-r--r--src/library/scala/concurrent/JavaConversions.scala1
-rw-r--r--src/library/scala/concurrent/ManagedBlocker.scala1
-rw-r--r--src/library/scala/concurrent/Promise.scala60
-rw-r--r--src/library/scala/concurrent/Scheduler.scala2
-rw-r--r--src/library/scala/concurrent/SyncVar.scala50
-rw-r--r--src/library/scala/concurrent/TaskRunner.scala2
-rw-r--r--src/library/scala/concurrent/ThreadPoolRunner.scala1
-rw-r--r--src/library/scala/concurrent/ThreadRunner.scala1
-rw-r--r--src/library/scala/concurrent/impl/AbstractPromise.java25
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala108
-rw-r--r--src/library/scala/concurrent/impl/Future.scala67
-rw-r--r--src/library/scala/concurrent/impl/NonFatal.scala37
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala227
-rw-r--r--src/library/scala/concurrent/package.scala10
-rw-r--r--src/library/scala/concurrent/util/Duration.scala227
-rw-r--r--src/library/scala/concurrent/util/Unsafe.java35
-rw-r--r--src/library/scala/concurrent/util/duration/Classifier.scala9
-rw-r--r--src/library/scala/concurrent/util/duration/IntMult.scala18
-rw-r--r--src/library/scala/concurrent/util/duration/package.scala31
-rw-r--r--src/library/scala/io/BytePickle.scala2
-rw-r--r--src/library/scala/io/Codec.scala1
-rw-r--r--src/library/scala/io/Source.scala4
-rw-r--r--src/library/scala/language.scala124
-rw-r--r--src/library/scala/languageFeature.scala30
-rw-r--r--src/library/scala/math/BigDecimal.scala3
-rw-r--r--src/library/scala/math/BigInt.scala11
-rw-r--r--src/library/scala/math/Equiv.scala2
-rw-r--r--src/library/scala/math/Fractional.scala2
-rw-r--r--src/library/scala/math/Integral.scala2
-rw-r--r--src/library/scala/math/Numeric.scala6
-rw-r--r--src/library/scala/math/Ordered.scala2
-rw-r--r--src/library/scala/math/Ordering.scala1
-rw-r--r--src/library/scala/math/package.scala17
-rw-r--r--src/library/scala/package.scala19
-rw-r--r--src/library/scala/parallel/Future.scala3
-rw-r--r--src/library/scala/reflect/ArrayTag.scala (renamed from src/library/scala/reflect/ArrayTags.scala)8
-rw-r--r--src/library/scala/reflect/ClassManifest.scala242
-rw-r--r--src/library/scala/reflect/ClassTag.scala96
-rw-r--r--src/library/scala/reflect/ClassTags.scala167
-rw-r--r--src/library/scala/reflect/DummyMirror.scala783
-rw-r--r--src/library/scala/reflect/DynamicProxy.scala74
-rw-r--r--src/library/scala/reflect/ErasureTag.scala23
-rw-r--r--src/library/scala/reflect/Manifest.scala259
-rw-r--r--src/library/scala/reflect/NoManifest.scala16
-rw-r--r--src/library/scala/reflect/OptManifest.scala18
-rw-r--r--src/library/scala/reflect/ReflectionUtils.scala23
-rw-r--r--src/library/scala/reflect/TagInterop.scala34
-rw-r--r--src/library/scala/reflect/TagMaterialization.scala154
-rwxr-xr-xsrc/library/scala/reflect/api/AnnotationInfos.scala2
-rw-r--r--src/library/scala/reflect/api/Attachment.scala29
-rw-r--r--src/library/scala/reflect/api/Attachments.scala16
-rw-r--r--src/library/scala/reflect/api/Exprs.scala33
-rw-r--r--src/library/scala/reflect/api/FrontEnds.scala (renamed from src/library/scala/reflect/api/Reporters.scala)13
-rwxr-xr-xsrc/library/scala/reflect/api/Names.scala8
-rw-r--r--src/library/scala/reflect/api/RequiredFile.scala7
-rwxr-xr-xsrc/library/scala/reflect/api/StandardDefinitions.scala164
-rw-r--r--src/library/scala/reflect/api/StandardNames.scala134
-rwxr-xr-xsrc/library/scala/reflect/api/Symbols.scala168
-rw-r--r--src/library/scala/reflect/api/ToolBoxes.scala10
-rw-r--r--src/library/scala/reflect/api/TreeBuildUtil.scala38
-rw-r--r--src/library/scala/reflect/api/Trees.scala139
-rw-r--r--src/library/scala/reflect/api/TypeTags.scala202
-rwxr-xr-xsrc/library/scala/reflect/api/Types.scala37
-rwxr-xr-xsrc/library/scala/reflect/api/Universe.scala35
-rw-r--r--src/library/scala/reflect/makro/Context.scala27
-rw-r--r--src/library/scala/reflect/makro/FrontEnds.scala (renamed from src/library/scala/reflect/makro/Reporters.scala)8
-rw-r--r--src/library/scala/reflect/makro/Reifiers.scala27
-rw-r--r--src/library/scala/reflect/makro/Symbols.scala7
-rw-r--r--src/library/scala/reflect/makro/Typers.scala6
-rw-r--r--src/library/scala/reflect/makro/internal/Utils.scala146
-rw-r--r--src/library/scala/reflect/makro/internal/macroImpl.scala2
-rw-r--r--src/library/scala/reflect/makro/internal/typeTagImpl.scala133
-rw-r--r--src/library/scala/reflect/package.scala49
-rw-r--r--src/library/scala/runtime/AbstractPartialFunction.scala8
-rw-r--r--src/library/scala/runtime/BoxesRunTime.java5
-rw-r--r--src/library/scala/runtime/RichDouble.scala2
-rw-r--r--src/library/scala/runtime/RichFloat.scala2
-rw-r--r--src/library/scala/runtime/RichInt.scala7
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala61
-rw-r--r--src/library/scala/runtime/SeqCharSequence.scala44
-rw-r--r--src/library/scala/runtime/Statics.java89
-rw-r--r--src/library/scala/runtime/Tuple2Zipped.scala130
-rw-r--r--src/library/scala/runtime/Tuple3Zipped.scala141
-rw-r--r--src/library/scala/runtime/package.scala12
-rw-r--r--src/library/scala/sys/BooleanProp.scala2
-rw-r--r--src/library/scala/sys/Prop.scala2
-rw-r--r--src/library/scala/sys/SystemProperties.scala3
-rw-r--r--src/library/scala/sys/package.scala6
-rw-r--r--src/library/scala/sys/process/Process.scala1
-rw-r--r--src/library/scala/testing/Benchmark.scala3
-rw-r--r--src/library/scala/testing/Show.scala8
-rw-r--r--src/library/scala/text/Document.scala5
-rw-r--r--src/library/scala/util/Marshal.scala35
-rw-r--r--src/library/scala/util/MurmurHash3.scala10
-rw-r--r--src/library/scala/util/Random.scala6
-rw-r--r--src/library/scala/util/Sorting.scala16
-rw-r--r--src/library/scala/util/Try.scala4
-rw-r--r--src/library/scala/util/automata/BaseBerrySethi.scala12
-rw-r--r--src/library/scala/util/automata/DetWordAutom.scala1
-rw-r--r--src/library/scala/util/automata/Inclusion.scala4
-rw-r--r--src/library/scala/util/automata/NondetWordAutom.scala5
-rw-r--r--src/library/scala/util/automata/SubsetConstruction.scala3
-rw-r--r--src/library/scala/util/automata/WordBerrySethi.scala8
-rw-r--r--src/library/scala/util/control/Exception.scala8
-rw-r--r--src/library/scala/util/grammar/HedgeRHS.scala4
-rw-r--r--src/library/scala/util/grammar/TreeRHS.scala3
-rw-r--r--src/library/scala/util/hashing/Hashing.scala42
-rw-r--r--src/library/scala/util/matching/Regex.scala41
-rw-r--r--src/library/scala/util/parsing/ast/AbstractSyntax.scala1
-rw-r--r--src/library/scala/util/parsing/ast/Binders.scala8
-rw-r--r--src/library/scala/util/parsing/combinator/ImplicitConversions.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/JavaTokenParsers.scala9
-rw-r--r--src/library/scala/util/parsing/combinator/PackratParsers.scala1
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala26
-rw-r--r--src/library/scala/util/parsing/combinator/RegexParsers.scala1
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/Lexical.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/StdLexical.scala4
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala1
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala1
-rw-r--r--src/library/scala/util/parsing/combinator/testing/RegexTest.scala5
-rw-r--r--src/library/scala/util/parsing/combinator/testing/Tester.scala1
-rw-r--r--src/library/scala/util/parsing/input/CharArrayReader.scala4
-rw-r--r--src/library/scala/util/parsing/input/OffsetPosition.scala7
-rw-r--r--src/library/scala/util/parsing/input/PagedSeqReader.scala2
-rw-r--r--src/library/scala/util/parsing/input/Position.scala5
-rw-r--r--src/library/scala/util/parsing/input/StreamReader.scala8
-rw-r--r--src/library/scala/util/regexp/Base.scala5
-rw-r--r--src/library/scala/util/regexp/PointedHedgeExp.scala1
-rw-r--r--src/library/scala/util/regexp/SyntaxError.scala1
-rw-r--r--src/library/scala/util/regexp/WordExp.scala1
-rw-r--r--src/library/scala/xml/Atom.scala3
-rw-r--r--src/library/scala/xml/Attribute.scala4
-rw-r--r--src/library/scala/xml/Comment.scala2
-rwxr-xr-xsrc/library/scala/xml/Elem.scala2
-rw-r--r--src/library/scala/xml/EntityRef.scala2
-rw-r--r--src/library/scala/xml/MetaData.scala28
-rwxr-xr-xsrc/library/scala/xml/Node.scala8
-rw-r--r--src/library/scala/xml/NodeSeq.scala9
-rw-r--r--src/library/scala/xml/PrefixedAttribute.scala6
-rwxr-xr-xsrc/library/scala/xml/PrettyPrinter.scala20
-rw-r--r--src/library/scala/xml/ProcInstr.scala4
-rw-r--r--src/library/scala/xml/TextBuffer.scala3
-rwxr-xr-xsrc/library/scala/xml/Utility.scala55
-rw-r--r--src/library/scala/xml/dtd/ContentModel.scala4
-rw-r--r--src/library/scala/xml/dtd/DocType.scala2
-rw-r--r--src/library/scala/xml/dtd/ExternalID.scala6
-rw-r--r--src/library/scala/xml/factory/LoggedNodeFactory.scala2
-rw-r--r--src/library/scala/xml/include/sax/XIncludeFilter.scala2
-rw-r--r--src/library/scala/xml/parsing/ConstructingParser.scala4
-rw-r--r--src/library/scala/xml/parsing/ExternalSources.scala5
-rw-r--r--src/library/scala/xml/parsing/FactoryAdapter.scala2
-rwxr-xr-xsrc/library/scala/xml/parsing/MarkupHandler.scala2
-rw-r--r--src/library/scala/xml/parsing/MarkupParserCommon.scala4
-rw-r--r--src/library/scala/xml/pull/XMLEvent.scala2
-rw-r--r--src/library/scala/xml/transform/BasicTransformer.scala5
-rw-r--r--src/partest/scala/tools/partest/CompilerTest.scala11
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala11
-rw-r--r--src/partest/scala/tools/partest/ScaladocModelTest.scala78
-rw-r--r--src/partest/scala/tools/partest/SigTest.scala24
-rw-r--r--src/partest/scala/tools/partest/nest/AntRunner.scala1
-rw-r--r--src/partest/scala/tools/partest/nest/CompileManager.scala98
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleFileManager.scala9
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleRunner.scala7
-rw-r--r--src/partest/scala/tools/partest/nest/DirectRunner.scala4
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala1
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala4
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala20
-rw-r--r--src/partest/scala/tools/partest/nest/Worker.scala74
-rw-r--r--src/scalacheck/org/scalacheck/Arbitrary.scala2
-rw-r--r--src/scalacheck/org/scalacheck/util/Buildable.scala2
-rw-r--r--src/scalap/scala/tools/scalap/JavaWriter.scala33
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala2
-rw-r--r--src/swing/scala/swing/Font.scala.disabled32
-rw-r--r--src/yourkit/scala/tools/util/YourkitProfiling.scala63
911 files changed, 55643 insertions, 21014 deletions
diff --git a/src/actors-migration/scala/actors/MigrationSystem.scala b/src/actors-migration/scala/actors/MigrationSystem.scala
new file mode 100644
index 0000000000..ffc93d9c6f
--- /dev/null
+++ b/src/actors-migration/scala/actors/MigrationSystem.scala
@@ -0,0 +1,36 @@
+package scala.actors
+
+import scala.collection._
+
+object MigrationSystem {
+
+ private[actors] val contextStack = new ThreadLocal[immutable.Stack[Boolean]] {
+ override def initialValue() = immutable.Stack[Boolean]()
+ }
+
+ private[this] def withCleanContext(block: => ActorRef): ActorRef = {
+ // push clean marker
+ val old = contextStack.get
+ contextStack.set(old.push(true))
+ try {
+ val instance = block
+
+ if (instance eq null)
+ throw new Exception("Actor instance passed to actorOf can't be 'null'")
+
+ instance
+ } finally {
+ val stackAfter = contextStack.get
+ if (stackAfter.nonEmpty)
+ contextStack.set(if (!stackAfter.head) stackAfter.pop.pop else stackAfter.pop)
+ }
+ }
+
+ def actorOf(props: Props): ActorRef = withCleanContext {
+ val creator = props.creator()
+ val r = new InternalActorRef(creator)
+ creator.start()
+ r
+ }
+
+} \ No newline at end of file
diff --git a/src/actors-migration/scala/actors/Pattern.scala b/src/actors-migration/scala/actors/Pattern.scala
new file mode 100644
index 0000000000..97dbd2cccd
--- /dev/null
+++ b/src/actors-migration/scala/actors/Pattern.scala
@@ -0,0 +1,25 @@
+package scala.actors
+
+import scala.concurrent.util.Duration
+
+object pattern {
+
+ implicit def askSupport(ar: ActorRef): AskableActorRef =
+ new AskableActorRef(ar)
+}
+
+/**
+ * ActorRef with support for ask(?) operation.
+ */
+class AskableActorRef(val ar: ActorRef) extends ActorRef {
+
+ def !(message: Any)(implicit sender: ActorRef = null): Unit = ar.!(message)(sender)
+
+ def ?(message: Any)(timeout: Timeout): Future[Any] = ar.?(message, timeout.duration)
+
+ private[actors] def ?(message: Any, timeout: Duration): Future[Any] = ar.?(message, timeout)
+
+ def forward(message: Any) = ar.forward(message)
+
+ private[actors] def localActor: AbstractActor = ar.localActor
+} \ No newline at end of file
diff --git a/src/actors-migration/scala/actors/Props.scala b/src/actors-migration/scala/actors/Props.scala
new file mode 100644
index 0000000000..891e23213a
--- /dev/null
+++ b/src/actors-migration/scala/actors/Props.scala
@@ -0,0 +1,13 @@
+package scala.actors
+
+/**
+ * ActorRef configuration object. It represents the minimal subset of Akka Props class.
+ */
+case class Props(creator: () ⇒ InternalActor, dispatcher: String) {
+
+ /**
+ * Returns a new Props with the specified creator set
+ */
+ def withCreator(c: ⇒ InternalActor) = copy(creator = () ⇒ c)
+
+}
diff --git a/src/actors-migration/scala/actors/StashingActor.scala b/src/actors-migration/scala/actors/StashingActor.scala
new file mode 100644
index 0000000000..37300f9d63
--- /dev/null
+++ b/src/actors-migration/scala/actors/StashingActor.scala
@@ -0,0 +1,255 @@
+package scala.actors
+
+import scala.collection._
+import scala.concurrent.util.Duration
+import java.util.concurrent.TimeUnit
+
+object StashingActor extends Combinators {
+ implicit def mkBody[A](body: => A) = new InternalActor.Body[A] {
+ def andThen[B](other: => B): Unit = Actor.rawSelf.seq(body, other)
+ }
+}
+
+@deprecated("Scala Actors are being removed from the standard library. Please refer to the migration guide.", "2.10")
+trait StashingActor extends InternalActor {
+ type Receive = PartialFunction[Any, Unit]
+
+ // checks if StashingActor is created within the actorOf block
+ creationCheck;
+
+ private[actors] val ref = new InternalActorRef(this)
+
+ val self: ActorRef = ref
+
+ protected[this] val context: ActorContext = new ActorContext(this)
+
+ @volatile
+ private var myTimeout: Option[Long] = None
+
+ private val stash = new MQueue[Any]("Stash")
+
+ /**
+ * Migration notes:
+ * this method replaces receiveWithin, receive and react methods from Scala Actors.
+ */
+ def receive: Receive
+
+ /**
+ * User overridable callback.
+ * <p/>
+ * Is called when an Actor is started by invoking 'actor'.
+ */
+ def preStart() {}
+
+ /**
+ * User overridable callback.
+ * <p/>
+ * Is called when 'actor.stop()' is invoked.
+ */
+ def postStop() {}
+
+ /**
+ * User overridable callback.
+ * <p/>
+ * Is called on a crashed Actor right BEFORE it is restarted to allow clean
+ * up of resources before Actor is terminated.
+ * By default it calls postStop()
+ */
+ def preRestart(reason: Throwable, message: Option[Any]) { postStop() }
+
+ /**
+ * Changes the Actor's behavior to become the new 'Receive' (PartialFunction[Any, Unit]) handler.
+ * Puts the behavior on top of the hotswap stack.
+ * If "discardOld" is true, an unbecome will be issued prior to pushing the new behavior to the stack
+ */
+ private def become(behavior: Receive, discardOld: Boolean = true) {
+ if (discardOld) unbecome()
+ behaviorStack = behaviorStack.push(wrapWithSystemMessageHandling(behavior))
+ }
+
+ /**
+ * Reverts the Actor behavior to the previous one in the hotswap stack.
+ */
+ private def unbecome() {
+ // never unbecome the initial behavior
+ if (behaviorStack.size > 1)
+ behaviorStack = behaviorStack.pop
+ }
+
+ /**
+ * User overridable callback.
+ * <p/>
+ * Is called when a message isn't handled by the current behavior of the actor
+ * by default it does: EventHandler.warning(self, message)
+ */
+ def unhandled(message: Any) {
+ message match {
+ case Terminated(dead) ⇒ throw new DeathPactException(dead)
+ case _ ⇒ System.err.println("Unhandeled message " + message)
+ }
+ }
+
+ protected def sender: ActorRef = new OutputChannelRef(internalSender)
+
+ override def act(): Unit = internalAct()
+
+ override def start(): StashingActor = {
+ super.start()
+ this
+ }
+
+ override def receive[R](f: PartialFunction[Any, R]): R
+
+ /*
+ * Internal implementation.
+ */
+
+ private[actors] var behaviorStack = immutable.Stack[PartialFunction[Any, Unit]]()
+
+ /*
+ * Checks that StashingActor can be created only by MigrationSystem.actorOf method.
+ */
+ private[this] def creationCheck = {
+
+ // creation check (see ActorRef)
+ val context = MigrationSystem.contextStack.get
+ if (context.isEmpty)
+ throw new RuntimeException("In order to create StashingActor one must use actorOf.")
+ else {
+ if (!context.head)
+ throw new RuntimeException("Only one actor can be created per actorOf call.")
+ else
+ MigrationSystem.contextStack.set(context.push(false))
+ }
+
+ }
+
+ private[actors] override def preAct() {
+ preStart()
+ }
+
+ /**
+ * Adds message to a stash, to be processed later. Stashed messages can be fed back into the $actor's
+ * mailbox using <code>unstashAll()</code>.
+ *
+ * Temporarily stashing away messages that the $actor does not (yet) handle simplifies implementing
+ * certain messaging protocols.
+ */
+ final def stash(msg: Any): Unit = {
+ stash.append(msg, null)
+ }
+
+ final def unstashAll(): Unit = {
+ mailbox.prepend(stash)
+ stash.clear()
+ }
+
+ /**
+ * Wraps any partial function with Exit message handling.
+ */
+ private[actors] def wrapWithSystemMessageHandling(pf: PartialFunction[Any, Unit]): PartialFunction[Any, Unit] = {
+
+ def swapExitHandler(pf: PartialFunction[Any, Unit]) = new PartialFunction[Any, Unit] {
+ def swapExit(v: Any) = v match {
+ case Exit(from, reason) =>
+ Terminated(new InternalActorRef(from.asInstanceOf[InternalActor]))
+ case v => v
+ }
+
+ def isDefinedAt(v: Any) = pf.isDefinedAt(swapExit(v))
+ def apply(v: Any) = pf(swapExit(v))
+ }
+
+ swapExitHandler(pf orElse {
+ case m => unhandled(m)
+ })
+ }
+
+ /**
+ * Method that models the behavior of Akka actors.
+ */
+ private[actors] def internalAct() {
+ trapExit = true
+ behaviorStack = behaviorStack.push(wrapWithSystemMessageHandling(receive))
+ loop {
+ if (myTimeout.isDefined)
+ reactWithin(myTimeout.get)(behaviorStack.top)
+ else
+ react(behaviorStack.top)
+ }
+ }
+
+ private[actors] override def internalPostStop() = postStop()
+
+ // Used for pattern matching statement similar to Akka
+ lazy val ReceiveTimeout = TIMEOUT
+
+ /**
+ * Used to simulate Akka context behavior. Should be used only for migration purposes.
+ */
+ protected[actors] class ActorContext(val actr: StashingActor) {
+
+ /**
+ * Changes the Actor's behavior to become the new 'Receive' (PartialFunction[Any, Unit]) handler.
+ * Puts the behavior on top of the hotswap stack.
+ * If "discardOld" is true, an unbecome will be issued prior to pushing the new behavior to the stack
+ */
+ def become(behavior: Receive, discardOld: Boolean = true) = actr.become(behavior, discardOld)
+
+ /**
+ * Reverts the Actor behavior to the previous one in the hotswap stack.
+ */
+ def unbecome() = actr.unbecome()
+
+ /**
+ * Shuts down the actor its dispatcher and message queue.
+ */
+ def stop(subject: ActorRef): Nothing = if (subject != ref)
+ throw new RuntimeException("Only stoping of self is allowed during migration.")
+ else
+ actr.exit()
+
+ /**
+ * Registers this actor as a Monitor for the provided ActorRef.
+ * @return the provided ActorRef
+ */
+ def watch(subject: ActorRef): ActorRef = {
+ actr.watch(subject)
+ subject
+ }
+
+ /**
+ * Unregisters this actor as Monitor for the provided ActorRef.
+ * @return the provided ActorRef
+ */
+ def unwatch(subject: ActorRef): ActorRef = {
+ actr unwatch subject
+ subject
+ }
+
+ /**
+ * Defines the receiver timeout value.
+ */
+ final def setReceiveTimeout(timeout: Duration): Unit =
+ actr.myTimeout = Some(timeout.toMillis)
+
+ /**
+ * Gets the current receiveTimeout
+ */
+ final def receiveTimeout: Option[Duration] =
+ actr.myTimeout.map(Duration(_, TimeUnit.MILLISECONDS))
+
+ }
+}
+
+/**
+ * This message is thrown by default when an Actor does not handle termination.
+ */
+class DeathPactException(ref: ActorRef = null) extends Exception {
+ override def fillInStackTrace() = this //Don't waste cycles generating stack trace
+}
+
+/**
+ * Message that is sent to a watching actor when the watched actor terminates.
+ */
+case class Terminated(actor: ActorRef)
diff --git a/src/actors-migration/scala/actors/Timeout.scala b/src/actors-migration/scala/actors/Timeout.scala
new file mode 100644
index 0000000000..0d9532a14b
--- /dev/null
+++ b/src/actors-migration/scala/actors/Timeout.scala
@@ -0,0 +1,39 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.actors
+
+import scala.concurrent.util.Duration
+import java.util.concurrent.TimeUnit
+
+case class Timeout(duration: Duration) {
+ def this(timeout: Long) = this(Duration(timeout, TimeUnit.MILLISECONDS))
+ def this(length: Long, unit: TimeUnit) = this(Duration(length, unit))
+}
+
+object Timeout {
+
+ /**
+ * A timeout with zero duration, will cause most requests to always timeout.
+ */
+ val zero = new Timeout(Duration.Zero)
+
+ /**
+ * A Timeout with infinite duration. Will never timeout. Use extreme caution with this
+ * as it may cause memory leaks, blocked threads, or may not even be supported by
+ * the receiver, which would result in an exception.
+ */
+ val never = new Timeout(Duration.Inf)
+
+ def apply(timeout: Long) = new Timeout(timeout)
+ def apply(length: Long, unit: TimeUnit) = new Timeout(length, unit)
+
+ implicit def durationToTimeout(duration: Duration) = new Timeout(duration)
+ implicit def intToTimeout(timeout: Int) = new Timeout(timeout)
+ implicit def longToTimeout(timeout: Long) = new Timeout(timeout)
+}
diff --git a/src/actors/scala/actors/ActorRef.scala b/src/actors/scala/actors/ActorRef.scala
new file mode 100644
index 0000000000..8f70b13e01
--- /dev/null
+++ b/src/actors/scala/actors/ActorRef.scala
@@ -0,0 +1,119 @@
+package scala.actors
+
+import java.util.concurrent.TimeoutException
+import scala.concurrent.util.Duration
+
+/**
+ * Trait used for migration of Scala actors to Akka.
+ */
+@deprecated("ActorRef ought to be used only with the Actor Migration Kit.")
+trait ActorRef {
+
+ /**
+ * Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
+ * <p/>
+ *
+ * If invoked from within an actor then the actor reference is implicitly passed on as the implicit 'sender' argument.
+ * <p/>
+ *
+ * This actor 'sender' reference is then available in the receiving actor in the 'sender' member variable,
+ * if invoked from within an Actor. If not then no sender is available.
+ * <pre>
+ * actor ! message
+ * </pre>
+ * <p/>
+ */
+ def !(message: Any)(implicit sender: ActorRef = null): Unit
+
+ /**
+ * Sends a message asynchronously, returning a future which may eventually hold the reply.
+ */
+ private[actors] def ?(message: Any, timeout: Duration): Future[Any]
+
+ /**
+ * Forwards the message and passes the original sender actor as the sender.
+ * <p/>
+ * Works with '!' and '?'.
+ */
+ def forward(message: Any)
+
+ private[actors] def localActor: AbstractActor
+
+}
+
+private[actors] class OutputChannelRef(val actor: OutputChannel[Any]) extends ActorRef {
+
+ override private[actors] def ?(message: Any, timeout: Duration): Future[Any] =
+ throw new UnsupportedOperationException("Output channel does not support ?")
+
+ /**
+ * Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
+ * <p/>
+ *
+ * <p/>
+ * <pre>
+ * actor ! message
+ * </pre>
+ * <p/>
+ */
+ def !(message: Any)(implicit sender: ActorRef = null): Unit =
+ if (sender != null)
+ actor.send(message, sender.localActor)
+ else
+ actor ! message
+
+ override def equals(that: Any) =
+ that.isInstanceOf[OutputChannelRef] && that.asInstanceOf[OutputChannelRef].actor == this.actor
+
+ private[actors] override def localActor: AbstractActor =
+ throw new UnsupportedOperationException("Output channel does not have an instance of the actor")
+
+ def forward(message: Any): Unit = throw new UnsupportedOperationException("OutputChannel does not support forward.")
+
+}
+
+private[actors] class ReactorRef(override val actor: Reactor[Any]) extends OutputChannelRef(actor) {
+
+ /**
+ * Forwards the message and passes the original sender actor as the sender.
+ * <p/>
+ * Works with '!' and '?'.
+ */
+ override def forward(message: Any) = actor.forward(message)
+
+}
+
+private[actors] final class InternalActorRef(override val actor: InternalActor) extends ReactorRef(actor) {
+
+ /**
+ * Sends a message asynchronously, returning a future which may eventually hold the reply.
+ */
+ override private[actors] def ?(message: Any, timeout: Duration): Future[Any] =
+ Futures.future {
+ val dur = if (timeout.isFinite()) timeout.toMillis else (java.lang.Long.MAX_VALUE >> 2)
+ actor !? (dur, message) match {
+ case Some(x) => x
+ case None => new AskTimeoutException("? operation timed out.")
+ }
+ }
+
+ override def !(message: Any)(implicit sender: ActorRef = null): Unit =
+ if (message == PoisonPill)
+ actor.stop('normal)
+ else if (sender != null)
+ actor.send(message, sender.localActor)
+ else
+ actor ! message
+
+ private[actors] override def localActor: InternalActor = this.actor
+}
+
+/**
+ * This is what is used to complete a Future that is returned from an ask/? call,
+ * when it times out.
+ */
+class AskTimeoutException(message: String, cause: Throwable) extends TimeoutException {
+ def this(message: String) = this(message, null: Throwable)
+}
+
+object PoisonPill
diff --git a/src/actors/scala/actors/ActorTask.scala b/src/actors/scala/actors/ActorTask.scala
index bb04302238..045b00f5f2 100644
--- a/src/actors/scala/actors/ActorTask.scala
+++ b/src/actors/scala/actors/ActorTask.scala
@@ -51,7 +51,6 @@ private[actors] class ActorTask(actor: InternalActor,
super.terminateExecution(e)
() => {}
}
- actor.internalPostStop
res
}
diff --git a/src/actors/scala/actors/InternalActor.scala b/src/actors/scala/actors/InternalActor.scala
index c94da5b9fd..cb66021d1c 100644
--- a/src/actors/scala/actors/InternalActor.scala
+++ b/src/actors/scala/actors/InternalActor.scala
@@ -153,7 +153,7 @@ private[actors] trait InternalActor extends AbstractActor with InternalReplyReac
val matches = f.isDefinedAt(m)
senders = senders.tail
matches
- })
+ })
if (null eq qel) {
val todo = synchronized {
// in mean time new stuff might have arrived
@@ -317,6 +317,35 @@ private[actors] trait InternalActor extends AbstractActor with InternalReplyReac
}
/**
+ * Links <code>self</code> to actor <code>to</code>.
+ *
+ * @param to the actor to link to
+ * @return the parameter actor
+ */
+ def link(to: ActorRef): ActorRef = {
+ this.link(to.localActor)
+ to
+ }
+
+ /**
+ * Unidirectional linking. For migration purposes only
+ */
+ private[actors] def watch(subject: ActorRef): ActorRef = {
+ assert(Actor.self(scheduler) == this, "link called on actor different from self")
+ subject.localActor linkTo this
+ subject
+ }
+
+ /**
+ * Unidirectional linking. For migration purposes only
+ */
+ private[actors] def unwatch(subject: ActorRef): ActorRef = {
+ assert(Actor.self(scheduler) == this, "link called on actor different from self")
+ subject.localActor unlinkFrom this
+ subject
+ }
+
+ /**
* Links <code>self</code> to the actor defined by <code>body</code>.
*
* @param body the body of the actor to link to
@@ -346,17 +375,24 @@ private[actors] trait InternalActor extends AbstractActor with InternalReplyReac
from unlinkFrom this
}
+ /**
+ * Unlinks <code>self</code> from actor <code>from</code>.
+ */
+ def unlink(from: ActorRef) {
+ unlink(from.localActor)
+ }
+
private[actors] def unlinkFrom(from: AbstractActor) = synchronized {
links = links.filterNot(from.==)
}
- @volatile
+ @volatile
private[actors] var _trapExit = false
-
+
def trapExit = _trapExit
-
+
def trapExit_=(value: Boolean) = _trapExit = value
-
+
// guarded by this
private var exitReason: AnyRef = 'normal
// guarded by this
@@ -445,12 +481,11 @@ private[actors] trait InternalActor extends AbstractActor with InternalReplyReac
scheduler.onTerminate(this) { f }
}
- private[actors] def internalPostStop() = {}
- private[actors] def stop(reason: AnyRef): Unit = {
+ private[actors] def stop(reason: AnyRef): Unit = {
synchronized {
shouldExit = true
- exitReason = reason
+ exitReason = reason
// resume this Actor in a way that
// causes it to exit
// (because shouldExit == true)
@@ -464,7 +499,7 @@ private[actors] trait InternalActor extends AbstractActor with InternalReplyReac
/* Here we should not throw a SuspendActorControl,
since the current method is called from an actor that
is in the process of exiting.
-
+
Therefore, the contract for scheduleActor is that
it never throws a SuspendActorControl.
*/
diff --git a/src/actors/scala/actors/MQueue.scala b/src/actors/scala/actors/MQueue.scala
index 65427d68c5..4a148d2cb3 100644
--- a/src/actors/scala/actors/MQueue.scala
+++ b/src/actors/scala/actors/MQueue.scala
@@ -25,6 +25,20 @@ private[actors] class MQueue[Msg >: Null](protected val label: String) {
_size += diff
}
+ def prepend(other: MQueue[Msg]) {
+ if (!other.isEmpty) {
+ other.last.next = first
+ first = other.first
+ }
+ }
+
+ def clear() {
+ first = null
+ last = null
+ _size = 0
+ }
+
+
def append(msg: Msg, session: OutputChannel[Any]) {
changeSize(1) // size always increases by 1
val el = new MQueueElement(msg, session)
diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala
index 206a97d97c..7a8d738758 100644
--- a/src/actors/scala/actors/Reactor.scala
+++ b/src/actors/scala/actors/Reactor.scala
@@ -214,11 +214,16 @@ trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators {
scheduler executeFromActor makeReaction(null, handler, msg)
}
+ private[actors] def preAct() = {}
+
// guarded by this
private[actors] def dostart() {
_state = Actor.State.Runnable
scheduler newActor this
- scheduler execute makeReaction(() => act(), null, null)
+ scheduler execute makeReaction(() => {
+ preAct()
+ act()
+ }, null, null)
}
/**
@@ -285,12 +290,15 @@ trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators {
throw Actor.suspendException
}
+ private[actors] def internalPostStop() = {}
+
private[actors] def terminated() {
synchronized {
_state = Actor.State.Terminated
// reset waitingFor, otherwise getState returns Suspended
waitingFor = Reactor.waitingForNone
}
+ internalPostStop()
scheduler.terminated(this)
}
diff --git a/src/actors/scala/actors/ReplyReactor.scala b/src/actors/scala/actors/ReplyReactor.scala
index 0ffbbd3cce..83d7ba0f7f 100644
--- a/src/actors/scala/actors/ReplyReactor.scala
+++ b/src/actors/scala/actors/ReplyReactor.scala
@@ -7,10 +7,7 @@
\* */
package scala.actors
-@deprecated("Scala Actors are beeing removed from the standard library. Please refer to the migration guide.", "2.10")
+@deprecated("Scala Actors are being removed from the standard library. Please refer to the migration guide.", "2.10")
trait ReplyReactor extends InternalReplyReactor {
-
- protected[actors] def sender: OutputChannel[Any] = super.internalSender
-
+ protected[actors] def sender: OutputChannel[Any] = super.internalSender
}
-
diff --git a/src/actors/scala/actors/package.scala b/src/actors/scala/actors/package.scala
index d809816ff6..d176487e03 100644
--- a/src/actors/scala/actors/package.scala
+++ b/src/actors/scala/actors/package.scala
@@ -7,7 +7,7 @@ package scala
* == Guide ==
*
* A detailed guide for the actors library is available
- * [[http://www.scala-lang.org/docu/files/actors-api/actors_api_guide.html#]].
+ * [[http://docs.scala-lang.org/overviews/core/actors.html]].
*
* == Getting Started ==
*
diff --git a/src/asm/scala/tools/asm/AnnotationVisitor.java b/src/asm/scala/tools/asm/AnnotationVisitor.java
new file mode 100644
index 0000000000..b96e730a73
--- /dev/null
+++ b/src/asm/scala/tools/asm/AnnotationVisitor.java
@@ -0,0 +1,157 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A visitor to visit a Java annotation. The methods of this class must be
+ * called in the following order: ( <tt>visit</tt> | <tt>visitEnum</tt> |
+ * <tt>visitAnnotation</tt> | <tt>visitArray</tt> )* <tt>visitEnd</tt>.
+ *
+ * @author Eric Bruneton
+ * @author Eugene Kuleshov
+ */
+public abstract class AnnotationVisitor {
+
+ /**
+ * The ASM API version implemented by this visitor. The value of this field
+ * must be one of {@link Opcodes#ASM4}.
+ */
+ protected final int api;
+
+ /**
+ * The annotation visitor to which this visitor must delegate method calls.
+ * May be null.
+ */
+ protected AnnotationVisitor av;
+
+ /**
+ * Constructs a new {@link AnnotationVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ public AnnotationVisitor(final int api) {
+ this(api, null);
+ }
+
+ /**
+ * Constructs a new {@link AnnotationVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param av the annotation visitor to which this visitor must delegate
+ * method calls. May be null.
+ */
+ public AnnotationVisitor(final int api, final AnnotationVisitor av) {
+ /*if (api != Opcodes.ASM4) {
+ throw new IllegalArgumentException();
+ }*/
+ this.api = api;
+ this.av = av;
+ }
+
+ /**
+ * Visits a primitive value of the annotation.
+ *
+ * @param name the value name.
+ * @param value the actual value, whose type must be {@link Byte},
+ * {@link Boolean}, {@link Character}, {@link Short}, {@link Integer}
+ * , {@link Long}, {@link Float}, {@link Double}, {@link String} or
+ * {@link Type} or OBJECT or ARRAY sort. This value can also be an
+ * array of byte, boolean, short, char, int, long, float or double
+ * values (this is equivalent to using {@link #visitArray visitArray}
+ * and visiting each array element in turn, but is more convenient).
+ */
+ public void visit(String name, Object value) {
+ if (av != null) {
+ av.visit(name, value);
+ }
+ }
+
+ /**
+ * Visits an enumeration value of the annotation.
+ *
+ * @param name the value name.
+ * @param desc the class descriptor of the enumeration class.
+ * @param value the actual enumeration value.
+ */
+ public void visitEnum(String name, String desc, String value) {
+ if (av != null) {
+ av.visitEnum(name, desc, value);
+ }
+ }
+
+ /**
+ * Visits a nested annotation value of the annotation.
+ *
+ * @param name the value name.
+ * @param desc the class descriptor of the nested annotation class.
+ * @return a visitor to visit the actual nested annotation value, or
+ * <tt>null</tt> if this visitor is not interested in visiting
+ * this nested annotation. <i>The nested annotation value must be
+ * fully visited before calling other methods on this annotation
+ * visitor</i>.
+ */
+ public AnnotationVisitor visitAnnotation(String name, String desc) {
+ if (av != null) {
+ return av.visitAnnotation(name, desc);
+ }
+ return null;
+ }
+
+ /**
+ * Visits an array value of the annotation. Note that arrays of primitive
+ * types (such as byte, boolean, short, char, int, long, float or double)
+ * can be passed as value to {@link #visit visit}. This is what
+ * {@link ClassReader} does.
+ *
+ * @param name the value name.
+ * @return a visitor to visit the actual array value elements, or
+ * <tt>null</tt> if this visitor is not interested in visiting
+ * these values. The 'name' parameters passed to the methods of this
+ * visitor are ignored. <i>All the array values must be visited
+ * before calling other methods on this annotation visitor</i>.
+ */
+ public AnnotationVisitor visitArray(String name) {
+ if (av != null) {
+ return av.visitArray(name);
+ }
+ return null;
+ }
+
+ /**
+ * Visits the end of the annotation.
+ */
+ public void visitEnd() {
+ if (av != null) {
+ av.visitEnd();
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/AnnotationWriter.java b/src/asm/scala/tools/asm/AnnotationWriter.java
new file mode 100644
index 0000000000..e530780249
--- /dev/null
+++ b/src/asm/scala/tools/asm/AnnotationWriter.java
@@ -0,0 +1,322 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * An {@link AnnotationVisitor} that generates annotations in bytecode form.
+ *
+ * @author Eric Bruneton
+ * @author Eugene Kuleshov
+ */
+final class AnnotationWriter extends AnnotationVisitor {
+
+ /**
+ * The class writer to which this annotation must be added.
+ */
+ private final ClassWriter cw;
+
+ /**
+ * The number of values in this annotation.
+ */
+ private int size;
+
+ /**
+ * <tt>true<tt> if values are named, <tt>false</tt> otherwise. Annotation
+ * writers used for annotation default and annotation arrays use unnamed
+ * values.
+ */
+ private final boolean named;
+
+ /**
+ * The annotation values in bytecode form. This byte vector only contains
+ * the values themselves, i.e. the number of values must be stored as a
+ * unsigned short just before these bytes.
+ */
+ private final ByteVector bv;
+
+ /**
+ * The byte vector to be used to store the number of values of this
+ * annotation. See {@link #bv}.
+ */
+ private final ByteVector parent;
+
+ /**
+ * Where the number of values of this annotation must be stored in
+ * {@link #parent}.
+ */
+ private final int offset;
+
+ /**
+ * Next annotation writer. This field is used to store annotation lists.
+ */
+ AnnotationWriter next;
+
+ /**
+ * Previous annotation writer. This field is used to store annotation lists.
+ */
+ AnnotationWriter prev;
+
+ // ------------------------------------------------------------------------
+ // Constructor
+ // ------------------------------------------------------------------------
+
+ /**
+ * Constructs a new {@link AnnotationWriter}.
+ *
+ * @param cw the class writer to which this annotation must be added.
+ * @param named <tt>true<tt> if values are named, <tt>false</tt> otherwise.
+ * @param bv where the annotation values must be stored.
+ * @param parent where the number of annotation values must be stored.
+ * @param offset where in <tt>parent</tt> the number of annotation values must
+ * be stored.
+ */
+ AnnotationWriter(
+ final ClassWriter cw,
+ final boolean named,
+ final ByteVector bv,
+ final ByteVector parent,
+ final int offset)
+ {
+ super(Opcodes.ASM4);
+ this.cw = cw;
+ this.named = named;
+ this.bv = bv;
+ this.parent = parent;
+ this.offset = offset;
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the AnnotationVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(final String name, final Object value) {
+ ++size;
+ if (named) {
+ bv.putShort(cw.newUTF8(name));
+ }
+ if (value instanceof String) {
+ bv.put12('s', cw.newUTF8((String) value));
+ } else if (value instanceof Byte) {
+ bv.put12('B', cw.newInteger(((Byte) value).byteValue()).index);
+ } else if (value instanceof Boolean) {
+ int v = ((Boolean) value).booleanValue() ? 1 : 0;
+ bv.put12('Z', cw.newInteger(v).index);
+ } else if (value instanceof Character) {
+ bv.put12('C', cw.newInteger(((Character) value).charValue()).index);
+ } else if (value instanceof Short) {
+ bv.put12('S', cw.newInteger(((Short) value).shortValue()).index);
+ } else if (value instanceof Type) {
+ bv.put12('c', cw.newUTF8(((Type) value).getDescriptor()));
+ } else if (value instanceof byte[]) {
+ byte[] v = (byte[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('B', cw.newInteger(v[i]).index);
+ }
+ } else if (value instanceof boolean[]) {
+ boolean[] v = (boolean[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('Z', cw.newInteger(v[i] ? 1 : 0).index);
+ }
+ } else if (value instanceof short[]) {
+ short[] v = (short[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('S', cw.newInteger(v[i]).index);
+ }
+ } else if (value instanceof char[]) {
+ char[] v = (char[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('C', cw.newInteger(v[i]).index);
+ }
+ } else if (value instanceof int[]) {
+ int[] v = (int[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('I', cw.newInteger(v[i]).index);
+ }
+ } else if (value instanceof long[]) {
+ long[] v = (long[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('J', cw.newLong(v[i]).index);
+ }
+ } else if (value instanceof float[]) {
+ float[] v = (float[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('F', cw.newFloat(v[i]).index);
+ }
+ } else if (value instanceof double[]) {
+ double[] v = (double[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('D', cw.newDouble(v[i]).index);
+ }
+ } else {
+ Item i = cw.newConstItem(value);
+ bv.put12(".s.IFJDCS".charAt(i.type), i.index);
+ }
+ }
+
+ @Override
+ public void visitEnum(
+ final String name,
+ final String desc,
+ final String value)
+ {
+ ++size;
+ if (named) {
+ bv.putShort(cw.newUTF8(name));
+ }
+ bv.put12('e', cw.newUTF8(desc)).putShort(cw.newUTF8(value));
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String name,
+ final String desc)
+ {
+ ++size;
+ if (named) {
+ bv.putShort(cw.newUTF8(name));
+ }
+ // write tag and type, and reserve space for values count
+ bv.put12('@', cw.newUTF8(desc)).putShort(0);
+ return new AnnotationWriter(cw, true, bv, bv, bv.length - 2);
+ }
+
+ @Override
+ public AnnotationVisitor visitArray(final String name) {
+ ++size;
+ if (named) {
+ bv.putShort(cw.newUTF8(name));
+ }
+ // write tag, and reserve space for array size
+ bv.put12('[', 0);
+ return new AnnotationWriter(cw, false, bv, bv, bv.length - 2);
+ }
+
+ @Override
+ public void visitEnd() {
+ if (parent != null) {
+ byte[] data = parent.data;
+ data[offset] = (byte) (size >>> 8);
+ data[offset + 1] = (byte) size;
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the size of this annotation writer list.
+ *
+ * @return the size of this annotation writer list.
+ */
+ int getSize() {
+ int size = 0;
+ AnnotationWriter aw = this;
+ while (aw != null) {
+ size += aw.bv.length;
+ aw = aw.next;
+ }
+ return size;
+ }
+
+ /**
+ * Puts the annotations of this annotation writer list into the given byte
+ * vector.
+ *
+ * @param out where the annotations must be put.
+ */
+ void put(final ByteVector out) {
+ int n = 0;
+ int size = 2;
+ AnnotationWriter aw = this;
+ AnnotationWriter last = null;
+ while (aw != null) {
+ ++n;
+ size += aw.bv.length;
+ aw.visitEnd(); // in case user forgot to call visitEnd
+ aw.prev = last;
+ last = aw;
+ aw = aw.next;
+ }
+ out.putInt(size);
+ out.putShort(n);
+ aw = last;
+ while (aw != null) {
+ out.putByteArray(aw.bv.data, 0, aw.bv.length);
+ aw = aw.prev;
+ }
+ }
+
+ /**
+ * Puts the given annotation lists into the given byte vector.
+ *
+ * @param panns an array of annotation writer lists.
+ * @param off index of the first annotation to be written.
+ * @param out where the annotations must be put.
+ */
+ static void put(
+ final AnnotationWriter[] panns,
+ final int off,
+ final ByteVector out)
+ {
+ int size = 1 + 2 * (panns.length - off);
+ for (int i = off; i < panns.length; ++i) {
+ size += panns[i] == null ? 0 : panns[i].getSize();
+ }
+ out.putInt(size).putByte(panns.length - off);
+ for (int i = off; i < panns.length; ++i) {
+ AnnotationWriter aw = panns[i];
+ AnnotationWriter last = null;
+ int n = 0;
+ while (aw != null) {
+ ++n;
+ aw.visitEnd(); // in case user forgot to call visitEnd
+ aw.prev = last;
+ last = aw;
+ aw = aw.next;
+ }
+ out.putShort(n);
+ aw = last;
+ while (aw != null) {
+ out.putByteArray(aw.bv.data, 0, aw.bv.length);
+ aw = aw.prev;
+ }
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/Attribute.java b/src/asm/scala/tools/asm/Attribute.java
new file mode 100644
index 0000000000..408f21ce1e
--- /dev/null
+++ b/src/asm/scala/tools/asm/Attribute.java
@@ -0,0 +1,254 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A non standard class, field, method or code attribute.
+ *
+ * @author Eric Bruneton
+ * @author Eugene Kuleshov
+ */
+public class Attribute {
+
+ /**
+ * The type of this attribute.
+ */
+ public final String type;
+
+ /**
+ * The raw value of this attribute, used only for unknown attributes.
+ */
+ byte[] value;
+
+ /**
+ * The next attribute in this attribute list. May be <tt>null</tt>.
+ */
+ Attribute next;
+
+ /**
+ * Constructs a new empty attribute.
+ *
+ * @param type the type of the attribute.
+ */
+ protected Attribute(final String type) {
+ this.type = type;
+ }
+
+ /**
+ * Returns <tt>true</tt> if this type of attribute is unknown. The default
+ * implementation of this method always returns <tt>true</tt>.
+ *
+ * @return <tt>true</tt> if this type of attribute is unknown.
+ */
+ public boolean isUnknown() {
+ return true;
+ }
+
+ /**
+ * Returns <tt>true</tt> if this type of attribute is a code attribute.
+ *
+ * @return <tt>true</tt> if this type of attribute is a code attribute.
+ */
+ public boolean isCodeAttribute() {
+ return false;
+ }
+
+ /**
+ * Returns the labels corresponding to this attribute.
+ *
+ * @return the labels corresponding to this attribute, or <tt>null</tt> if
+ * this attribute is not a code attribute that contains labels.
+ */
+ protected Label[] getLabels() {
+ return null;
+ }
+
+ /**
+ * Reads a {@link #type type} attribute. This method must return a <i>new</i>
+ * {@link Attribute} object, of type {@link #type type}, corresponding to
+ * the <tt>len</tt> bytes starting at the given offset, in the given class
+ * reader.
+ *
+ * @param cr the class that contains the attribute to be read.
+ * @param off index of the first byte of the attribute's content in {@link
+ * ClassReader#b cr.b}. The 6 attribute header bytes, containing the
+ * type and the length of the attribute, are not taken into account
+ * here.
+ * @param len the length of the attribute's content.
+ * @param buf buffer to be used to call
+ * {@link ClassReader#readUTF8 readUTF8},
+ * {@link ClassReader#readClass(int,char[]) readClass} or
+ * {@link ClassReader#readConst readConst}.
+ * @param codeOff index of the first byte of code's attribute content in
+ * {@link ClassReader#b cr.b}, or -1 if the attribute to be read is
+ * not a code attribute. The 6 attribute header bytes, containing the
+ * type and the length of the attribute, are not taken into account
+ * here.
+ * @param labels the labels of the method's code, or <tt>null</tt> if the
+ * attribute to be read is not a code attribute.
+ * @return a <i>new</i> {@link Attribute} object corresponding to the given
+ * bytes.
+ */
+ protected Attribute read(
+ final ClassReader cr,
+ final int off,
+ final int len,
+ final char[] buf,
+ final int codeOff,
+ final Label[] labels)
+ {
+ Attribute attr = new Attribute(type);
+ attr.value = new byte[len];
+ System.arraycopy(cr.b, off, attr.value, 0, len);
+ return attr;
+ }
+
+ /**
+ * Returns the byte array form of this attribute.
+ *
+ * @param cw the class to which this attribute must be added. This parameter
+ * can be used to add to the constant pool of this class the items
+ * that corresponds to this attribute.
+ * @param code the bytecode of the method corresponding to this code
+ * attribute, or <tt>null</tt> if this attribute is not a code
+ * attributes.
+ * @param len the length of the bytecode of the method corresponding to this
+ * code attribute, or <tt>null</tt> if this attribute is not a code
+ * attribute.
+ * @param maxStack the maximum stack size of the method corresponding to
+ * this code attribute, or -1 if this attribute is not a code
+ * attribute.
+ * @param maxLocals the maximum number of local variables of the method
+ * corresponding to this code attribute, or -1 if this attribute is
+ * not a code attribute.
+ * @return the byte array form of this attribute.
+ */
+ protected ByteVector write(
+ final ClassWriter cw,
+ final byte[] code,
+ final int len,
+ final int maxStack,
+ final int maxLocals)
+ {
+ ByteVector v = new ByteVector();
+ v.data = value;
+ v.length = value.length;
+ return v;
+ }
+
+ /**
+ * Returns the length of the attribute list that begins with this attribute.
+ *
+ * @return the length of the attribute list that begins with this attribute.
+ */
+ final int getCount() {
+ int count = 0;
+ Attribute attr = this;
+ while (attr != null) {
+ count += 1;
+ attr = attr.next;
+ }
+ return count;
+ }
+
+ /**
+ * Returns the size of all the attributes in this attribute list.
+ *
+ * @param cw the class writer to be used to convert the attributes into byte
+ * arrays, with the {@link #write write} method.
+ * @param code the bytecode of the method corresponding to these code
+ * attributes, or <tt>null</tt> if these attributes are not code
+ * attributes.
+ * @param len the length of the bytecode of the method corresponding to
+ * these code attributes, or <tt>null</tt> if these attributes are
+ * not code attributes.
+ * @param maxStack the maximum stack size of the method corresponding to
+ * these code attributes, or -1 if these attributes are not code
+ * attributes.
+ * @param maxLocals the maximum number of local variables of the method
+ * corresponding to these code attributes, or -1 if these attributes
+ * are not code attributes.
+ * @return the size of all the attributes in this attribute list. This size
+ * includes the size of the attribute headers.
+ */
+ final int getSize(
+ final ClassWriter cw,
+ final byte[] code,
+ final int len,
+ final int maxStack,
+ final int maxLocals)
+ {
+ Attribute attr = this;
+ int size = 0;
+ while (attr != null) {
+ cw.newUTF8(attr.type);
+ size += attr.write(cw, code, len, maxStack, maxLocals).length + 6;
+ attr = attr.next;
+ }
+ return size;
+ }
+
+ /**
+ * Writes all the attributes of this attribute list in the given byte
+ * vector.
+ *
+ * @param cw the class writer to be used to convert the attributes into byte
+ * arrays, with the {@link #write write} method.
+ * @param code the bytecode of the method corresponding to these code
+ * attributes, or <tt>null</tt> if these attributes are not code
+ * attributes.
+ * @param len the length of the bytecode of the method corresponding to
+ * these code attributes, or <tt>null</tt> if these attributes are
+ * not code attributes.
+ * @param maxStack the maximum stack size of the method corresponding to
+ * these code attributes, or -1 if these attributes are not code
+ * attributes.
+ * @param maxLocals the maximum number of local variables of the method
+ * corresponding to these code attributes, or -1 if these attributes
+ * are not code attributes.
+ * @param out where the attributes must be written.
+ */
+ final void put(
+ final ClassWriter cw,
+ final byte[] code,
+ final int len,
+ final int maxStack,
+ final int maxLocals,
+ final ByteVector out)
+ {
+ Attribute attr = this;
+ while (attr != null) {
+ ByteVector b = attr.write(cw, code, len, maxStack, maxLocals);
+ out.putShort(cw.newUTF8(attr.type)).putInt(b.length);
+ out.putByteArray(b.data, 0, b.length);
+ attr = attr.next;
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/ByteVector.java b/src/asm/scala/tools/asm/ByteVector.java
new file mode 100644
index 0000000000..5081f0184b
--- /dev/null
+++ b/src/asm/scala/tools/asm/ByteVector.java
@@ -0,0 +1,293 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A dynamically extensible vector of bytes. This class is roughly equivalent to
+ * a DataOutputStream on top of a ByteArrayOutputStream, but is more efficient.
+ *
+ * @author Eric Bruneton
+ */
+public class ByteVector {
+
+ /**
+ * The content of this vector.
+ */
+ byte[] data;
+
+ /**
+ * Actual number of bytes in this vector.
+ */
+ int length;
+
+ /**
+ * Constructs a new {@link ByteVector ByteVector} with a default initial
+ * size.
+ */
+ public ByteVector() {
+ data = new byte[64];
+ }
+
+ /**
+ * Constructs a new {@link ByteVector ByteVector} with the given initial
+ * size.
+ *
+ * @param initialSize the initial size of the byte vector to be constructed.
+ */
+ public ByteVector(final int initialSize) {
+ data = new byte[initialSize];
+ }
+
+ /**
+ * Puts a byte into this byte vector. The byte vector is automatically
+ * enlarged if necessary.
+ *
+ * @param b a byte.
+ * @return this byte vector.
+ */
+ public ByteVector putByte(final int b) {
+ int length = this.length;
+ if (length + 1 > data.length) {
+ enlarge(1);
+ }
+ data[length++] = (byte) b;
+ this.length = length;
+ return this;
+ }
+
+ /**
+ * Puts two bytes into this byte vector. The byte vector is automatically
+ * enlarged if necessary.
+ *
+ * @param b1 a byte.
+ * @param b2 another byte.
+ * @return this byte vector.
+ */
+ ByteVector put11(final int b1, final int b2) {
+ int length = this.length;
+ if (length + 2 > data.length) {
+ enlarge(2);
+ }
+ byte[] data = this.data;
+ data[length++] = (byte) b1;
+ data[length++] = (byte) b2;
+ this.length = length;
+ return this;
+ }
+
+ /**
+ * Puts a short into this byte vector. The byte vector is automatically
+ * enlarged if necessary.
+ *
+ * @param s a short.
+ * @return this byte vector.
+ */
+ public ByteVector putShort(final int s) {
+ int length = this.length;
+ if (length + 2 > data.length) {
+ enlarge(2);
+ }
+ byte[] data = this.data;
+ data[length++] = (byte) (s >>> 8);
+ data[length++] = (byte) s;
+ this.length = length;
+ return this;
+ }
+
+ /**
+ * Puts a byte and a short into this byte vector. The byte vector is
+ * automatically enlarged if necessary.
+ *
+ * @param b a byte.
+ * @param s a short.
+ * @return this byte vector.
+ */
+ ByteVector put12(final int b, final int s) {
+ int length = this.length;
+ if (length + 3 > data.length) {
+ enlarge(3);
+ }
+ byte[] data = this.data;
+ data[length++] = (byte) b;
+ data[length++] = (byte) (s >>> 8);
+ data[length++] = (byte) s;
+ this.length = length;
+ return this;
+ }
+
+ /**
+ * Puts an int into this byte vector. The byte vector is automatically
+ * enlarged if necessary.
+ *
+ * @param i an int.
+ * @return this byte vector.
+ */
+ public ByteVector putInt(final int i) {
+ int length = this.length;
+ if (length + 4 > data.length) {
+ enlarge(4);
+ }
+ byte[] data = this.data;
+ data[length++] = (byte) (i >>> 24);
+ data[length++] = (byte) (i >>> 16);
+ data[length++] = (byte) (i >>> 8);
+ data[length++] = (byte) i;
+ this.length = length;
+ return this;
+ }
+
+ /**
+ * Puts a long into this byte vector. The byte vector is automatically
+ * enlarged if necessary.
+ *
+ * @param l a long.
+ * @return this byte vector.
+ */
+ public ByteVector putLong(final long l) {
+ int length = this.length;
+ if (length + 8 > data.length) {
+ enlarge(8);
+ }
+ byte[] data = this.data;
+ int i = (int) (l >>> 32);
+ data[length++] = (byte) (i >>> 24);
+ data[length++] = (byte) (i >>> 16);
+ data[length++] = (byte) (i >>> 8);
+ data[length++] = (byte) i;
+ i = (int) l;
+ data[length++] = (byte) (i >>> 24);
+ data[length++] = (byte) (i >>> 16);
+ data[length++] = (byte) (i >>> 8);
+ data[length++] = (byte) i;
+ this.length = length;
+ return this;
+ }
+
+ /**
+ * Puts an UTF8 string into this byte vector. The byte vector is
+ * automatically enlarged if necessary.
+ *
+ * @param s a String.
+ * @return this byte vector.
+ */
+ public ByteVector putUTF8(final String s) {
+ int charLength = s.length();
+ int len = length;
+ if (len + 2 + charLength > data.length) {
+ enlarge(2 + charLength);
+ }
+ byte[] data = this.data;
+ // optimistic algorithm: instead of computing the byte length and then
+ // serializing the string (which requires two loops), we assume the byte
+ // length is equal to char length (which is the most frequent case), and
+ // we start serializing the string right away. During the serialization,
+ // if we find that this assumption is wrong, we continue with the
+ // general method.
+ data[len++] = (byte) (charLength >>> 8);
+ data[len++] = (byte) charLength;
+ for (int i = 0; i < charLength; ++i) {
+ char c = s.charAt(i);
+ if (c >= '\001' && c <= '\177') {
+ data[len++] = (byte) c;
+ } else {
+ int byteLength = i;
+ for (int j = i; j < charLength; ++j) {
+ c = s.charAt(j);
+ if (c >= '\001' && c <= '\177') {
+ byteLength++;
+ } else if (c > '\u07FF') {
+ byteLength += 3;
+ } else {
+ byteLength += 2;
+ }
+ }
+ data[length] = (byte) (byteLength >>> 8);
+ data[length + 1] = (byte) byteLength;
+ if (length + 2 + byteLength > data.length) {
+ length = len;
+ enlarge(2 + byteLength);
+ data = this.data;
+ }
+ for (int j = i; j < charLength; ++j) {
+ c = s.charAt(j);
+ if (c >= '\001' && c <= '\177') {
+ data[len++] = (byte) c;
+ } else if (c > '\u07FF') {
+ data[len++] = (byte) (0xE0 | c >> 12 & 0xF);
+ data[len++] = (byte) (0x80 | c >> 6 & 0x3F);
+ data[len++] = (byte) (0x80 | c & 0x3F);
+ } else {
+ data[len++] = (byte) (0xC0 | c >> 6 & 0x1F);
+ data[len++] = (byte) (0x80 | c & 0x3F);
+ }
+ }
+ break;
+ }
+ }
+ length = len;
+ return this;
+ }
+
+ /**
+ * Puts an array of bytes into this byte vector. The byte vector is
+ * automatically enlarged if necessary.
+ *
+ * @param b an array of bytes. May be <tt>null</tt> to put <tt>len</tt>
+ * null bytes into this byte vector.
+ * @param off index of the fist byte of b that must be copied.
+ * @param len number of bytes of b that must be copied.
+ * @return this byte vector.
+ */
+ public ByteVector putByteArray(final byte[] b, final int off, final int len)
+ {
+ if (length + len > data.length) {
+ enlarge(len);
+ }
+ if (b != null) {
+ System.arraycopy(b, off, data, length, len);
+ }
+ length += len;
+ return this;
+ }
+
+ /**
+ * Enlarge this byte vector so that it can receive n more bytes.
+ *
+ * @param size number of additional bytes that this byte vector should be
+ * able to receive.
+ */
+ private void enlarge(final int size) {
+ int length1 = 2 * data.length;
+ int length2 = length + size;
+ byte[] newData = new byte[length1 > length2 ? length1 : length2];
+ System.arraycopy(data, 0, newData, 0, length);
+ data = newData;
+ }
+}
diff --git a/src/asm/scala/tools/asm/ClassReader.java b/src/asm/scala/tools/asm/ClassReader.java
new file mode 100644
index 0000000000..f3287d41ae
--- /dev/null
+++ b/src/asm/scala/tools/asm/ClassReader.java
@@ -0,0 +1,2216 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * A Java class parser to make a {@link ClassVisitor} visit an existing class.
+ * This class parses a byte array conforming to the Java class file format and
+ * calls the appropriate visit methods of a given class visitor for each field,
+ * method and bytecode instruction encountered.
+ *
+ * @author Eric Bruneton
+ * @author Eugene Kuleshov
+ */
+public class ClassReader {
+
+ /**
+ * True to enable signatures support.
+ */
+ static final boolean SIGNATURES = true;
+
+ /**
+ * True to enable annotations support.
+ */
+ static final boolean ANNOTATIONS = true;
+
+ /**
+ * True to enable stack map frames support.
+ */
+ static final boolean FRAMES = true;
+
+ /**
+ * True to enable bytecode writing support.
+ */
+ static final boolean WRITER = true;
+
+ /**
+ * True to enable JSR_W and GOTO_W support.
+ */
+ static final boolean RESIZE = true;
+
+ /**
+ * Flag to skip method code. If this class is set <code>CODE</code>
+ * attribute won't be visited. This can be used, for example, to retrieve
+ * annotations for methods and method parameters.
+ */
+ public static final int SKIP_CODE = 1;
+
+ /**
+ * Flag to skip the debug information in the class. If this flag is set the
+ * debug information of the class is not visited, i.e. the
+ * {@link MethodVisitor#visitLocalVariable visitLocalVariable} and
+ * {@link MethodVisitor#visitLineNumber visitLineNumber} methods will not be
+ * called.
+ */
+ public static final int SKIP_DEBUG = 2;
+
+ /**
+ * Flag to skip the stack map frames in the class. If this flag is set the
+ * stack map frames of the class is not visited, i.e. the
+ * {@link MethodVisitor#visitFrame visitFrame} method will not be called.
+ * This flag is useful when the {@link ClassWriter#COMPUTE_FRAMES} option is
+ * used: it avoids visiting frames that will be ignored and recomputed from
+ * scratch in the class writer.
+ */
+ public static final int SKIP_FRAMES = 4;
+
+ /**
+ * Flag to expand the stack map frames. By default stack map frames are
+ * visited in their original format (i.e. "expanded" for classes whose
+ * version is less than V1_6, and "compressed" for the other classes). If
+ * this flag is set, stack map frames are always visited in expanded format
+ * (this option adds a decompression/recompression step in ClassReader and
+ * ClassWriter which degrades performances quite a lot).
+ */
+ public static final int EXPAND_FRAMES = 8;
+
+ /**
+ * The class to be parsed. <i>The content of this array must not be
+ * modified. This field is intended for {@link Attribute} sub classes, and
+ * is normally not needed by class generators or adapters.</i>
+ */
+ public final byte[] b;
+
+ /**
+ * The start index of each constant pool item in {@link #b b}, plus one.
+ * The one byte offset skips the constant pool item tag that indicates its
+ * type.
+ */
+ private final int[] items;
+
+ /**
+ * The String objects corresponding to the CONSTANT_Utf8 items. This cache
+ * avoids multiple parsing of a given CONSTANT_Utf8 constant pool item,
+ * which GREATLY improves performances (by a factor 2 to 3). This caching
+ * strategy could be extended to all constant pool items, but its benefit
+ * would not be so great for these items (because they are much less
+ * expensive to parse than CONSTANT_Utf8 items).
+ */
+ private final String[] strings;
+
+ /**
+ * Maximum length of the strings contained in the constant pool of the
+ * class.
+ */
+ private final int maxStringLength;
+
+ /**
+ * Start index of the class header information (access, name...) in
+ * {@link #b b}.
+ */
+ public final int header;
+
+ // ------------------------------------------------------------------------
+ // Constructors
+ // ------------------------------------------------------------------------
+
+ /**
+ * Constructs a new {@link ClassReader} object.
+ *
+ * @param b the bytecode of the class to be read.
+ */
+ public ClassReader(final byte[] b) {
+ this(b, 0, b.length);
+ }
+
+ /**
+ * Constructs a new {@link ClassReader} object.
+ *
+ * @param b the bytecode of the class to be read.
+ * @param off the start offset of the class data.
+ * @param len the length of the class data.
+ */
+ public ClassReader(final byte[] b, final int off, final int len) {
+ this.b = b;
+ // checks the class version
+ if (readShort(6) > Opcodes.V1_7) {
+ throw new IllegalArgumentException();
+ }
+ // parses the constant pool
+ items = new int[readUnsignedShort(off + 8)];
+ int n = items.length;
+ strings = new String[n];
+ int max = 0;
+ int index = off + 10;
+ for (int i = 1; i < n; ++i) {
+ items[i] = index + 1;
+ int size;
+ switch (b[index]) {
+ case ClassWriter.FIELD:
+ case ClassWriter.METH:
+ case ClassWriter.IMETH:
+ case ClassWriter.INT:
+ case ClassWriter.FLOAT:
+ case ClassWriter.NAME_TYPE:
+ case ClassWriter.INDY:
+ size = 5;
+ break;
+ case ClassWriter.LONG:
+ case ClassWriter.DOUBLE:
+ size = 9;
+ ++i;
+ break;
+ case ClassWriter.UTF8:
+ size = 3 + readUnsignedShort(index + 1);
+ if (size > max) {
+ max = size;
+ }
+ break;
+ case ClassWriter.HANDLE:
+ size = 4;
+ break;
+ // case ClassWriter.CLASS:
+ // case ClassWriter.STR:
+ // case ClassWriter.MTYPE
+ default:
+ size = 3;
+ break;
+ }
+ index += size;
+ }
+ maxStringLength = max;
+ // the class header information starts just after the constant pool
+ header = index;
+ }
+
+ /**
+ * Returns the class's access flags (see {@link Opcodes}). This value may
+ * not reflect Deprecated and Synthetic flags when bytecode is before 1.5
+ * and those flags are represented by attributes.
+ *
+ * @return the class access flags
+ *
+ * @see ClassVisitor#visit(int, int, String, String, String, String[])
+ */
+ public int getAccess() {
+ return readUnsignedShort(header);
+ }
+
+ /**
+ * Returns the internal name of the class (see
+ * {@link Type#getInternalName() getInternalName}).
+ *
+ * @return the internal class name
+ *
+ * @see ClassVisitor#visit(int, int, String, String, String, String[])
+ */
+ public String getClassName() {
+ return readClass(header + 2, new char[maxStringLength]);
+ }
+
+ /**
+ * Returns the internal of name of the super class (see
+ * {@link Type#getInternalName() getInternalName}). For interfaces, the
+ * super class is {@link Object}.
+ *
+ * @return the internal name of super class, or <tt>null</tt> for
+ * {@link Object} class.
+ *
+ * @see ClassVisitor#visit(int, int, String, String, String, String[])
+ */
+ public String getSuperName() {
+ int n = items[readUnsignedShort(header + 4)];
+ return n == 0 ? null : readUTF8(n, new char[maxStringLength]);
+ }
+
+ /**
+ * Returns the internal names of the class's interfaces (see
+ * {@link Type#getInternalName() getInternalName}).
+ *
+ * @return the array of internal names for all implemented interfaces or
+ * <tt>null</tt>.
+ *
+ * @see ClassVisitor#visit(int, int, String, String, String, String[])
+ */
+ public String[] getInterfaces() {
+ int index = header + 6;
+ int n = readUnsignedShort(index);
+ String[] interfaces = new String[n];
+ if (n > 0) {
+ char[] buf = new char[maxStringLength];
+ for (int i = 0; i < n; ++i) {
+ index += 2;
+ interfaces[i] = readClass(index, buf);
+ }
+ }
+ return interfaces;
+ }
+
+ /**
+ * Copies the constant pool data into the given {@link ClassWriter}. Should
+ * be called before the {@link #accept(ClassVisitor,int)} method.
+ *
+ * @param classWriter the {@link ClassWriter} to copy constant pool into.
+ */
+ void copyPool(final ClassWriter classWriter) {
+ char[] buf = new char[maxStringLength];
+ int ll = items.length;
+ Item[] items2 = new Item[ll];
+ for (int i = 1; i < ll; i++) {
+ int index = items[i];
+ int tag = b[index - 1];
+ Item item = new Item(i);
+ int nameType;
+ switch (tag) {
+ case ClassWriter.FIELD:
+ case ClassWriter.METH:
+ case ClassWriter.IMETH:
+ nameType = items[readUnsignedShort(index + 2)];
+ item.set(tag,
+ readClass(index, buf),
+ readUTF8(nameType, buf),
+ readUTF8(nameType + 2, buf));
+ break;
+
+ case ClassWriter.INT:
+ item.set(readInt(index));
+ break;
+
+ case ClassWriter.FLOAT:
+ item.set(Float.intBitsToFloat(readInt(index)));
+ break;
+
+ case ClassWriter.NAME_TYPE:
+ item.set(tag,
+ readUTF8(index, buf),
+ readUTF8(index + 2, buf),
+ null);
+ break;
+
+ case ClassWriter.LONG:
+ item.set(readLong(index));
+ ++i;
+ break;
+
+ case ClassWriter.DOUBLE:
+ item.set(Double.longBitsToDouble(readLong(index)));
+ ++i;
+ break;
+
+ case ClassWriter.UTF8: {
+ String s = strings[i];
+ if (s == null) {
+ index = items[i];
+ s = strings[i] = readUTF(index + 2,
+ readUnsignedShort(index),
+ buf);
+ }
+ item.set(tag, s, null, null);
+ }
+ break;
+
+ case ClassWriter.HANDLE: {
+ int fieldOrMethodRef = items[readUnsignedShort(index + 1)];
+ nameType = items[readUnsignedShort(fieldOrMethodRef + 2)];
+ item.set(ClassWriter.HANDLE_BASE + readByte(index),
+ readClass(fieldOrMethodRef, buf),
+ readUTF8(nameType, buf),
+ readUTF8(nameType + 2, buf));
+
+ }
+ break;
+
+
+ case ClassWriter.INDY:
+ if (classWriter.bootstrapMethods == null) {
+ copyBootstrapMethods(classWriter, items2, buf);
+ }
+ nameType = items[readUnsignedShort(index + 2)];
+ item.set(readUTF8(nameType, buf),
+ readUTF8(nameType + 2, buf),
+ readUnsignedShort(index));
+ break;
+
+
+ // case ClassWriter.STR:
+ // case ClassWriter.CLASS:
+ // case ClassWriter.MTYPE
+ default:
+ item.set(tag, readUTF8(index, buf), null, null);
+ break;
+ }
+
+ int index2 = item.hashCode % items2.length;
+ item.next = items2[index2];
+ items2[index2] = item;
+ }
+
+ int off = items[1] - 1;
+ classWriter.pool.putByteArray(b, off, header - off);
+ classWriter.items = items2;
+ classWriter.threshold = (int) (0.75d * ll);
+ classWriter.index = ll;
+ }
+
+ private void copyBootstrapMethods(ClassWriter classWriter, Item[] items2, char[] buf) {
+ int i, j, k, u, v;
+
+ // skip class header
+ v = header;
+ v += 8 + (readUnsignedShort(v + 6) << 1);
+
+ // skips fields and methods
+ i = readUnsignedShort(v);
+ v += 2;
+ for (; i > 0; --i) {
+ j = readUnsignedShort(v + 6);
+ v += 8;
+ for (; j > 0; --j) {
+ v += 6 + readInt(v + 2);
+ }
+ }
+ i = readUnsignedShort(v);
+ v += 2;
+ for (; i > 0; --i) {
+ j = readUnsignedShort(v + 6);
+ v += 8;
+ for (; j > 0; --j) {
+ v += 6 + readInt(v + 2);
+ }
+ }
+
+ // read class attributes
+ i = readUnsignedShort(v);
+ v += 2;
+ for (; i > 0; --i) {
+ String attrName = readUTF8(v, buf);
+ int size = readInt(v + 2);
+ if ("BootstrapMethods".equals(attrName)) {
+ int boostrapMethodCount = readUnsignedShort(v + 6);
+ int x = v + 8;
+ for (j = 0; j < boostrapMethodCount; j++) {
+ int hashCode = readConst(readUnsignedShort(x), buf).hashCode();
+ k = readUnsignedShort(x + 2);
+ u = x + 4;
+ for(; k > 0; --k) {
+ hashCode ^= readConst(readUnsignedShort(u), buf).hashCode();
+ u += 2;
+ }
+ Item item = new Item(j);
+ item.set(x - v - 8, hashCode & 0x7FFFFFFF);
+
+ int index2 = item.hashCode % items2.length;
+ item.next = items2[index2];
+ items2[index2] = item;
+
+ x = u;
+ }
+
+ classWriter.bootstrapMethodsCount = boostrapMethodCount;
+ ByteVector bootstrapMethods = new ByteVector(size + 62);
+ bootstrapMethods.putByteArray(b, v + 8, size - 2);
+ classWriter.bootstrapMethods = bootstrapMethods;
+ return;
+ }
+ v += 6 + size;
+ }
+
+ // we are in trouble !!!
+ }
+
+ /**
+ * Constructs a new {@link ClassReader} object.
+ *
+ * @param is an input stream from which to read the class.
+ * @throws IOException if a problem occurs during reading.
+ */
+ public ClassReader(final InputStream is) throws IOException {
+ this(readClass(is, false));
+ }
+
+ /**
+ * Constructs a new {@link ClassReader} object.
+ *
+ * @param name the binary qualified name of the class to be read.
+ * @throws IOException if an exception occurs during reading.
+ */
+ public ClassReader(final String name) throws IOException {
+ this(readClass(ClassLoader.getSystemResourceAsStream(name.replace('.', '/')
+ + ".class"), true));
+ }
+
+ /**
+ * Reads the bytecode of a class.
+ *
+ * @param is an input stream from which to read the class.
+ * @param close true to close the input stream after reading.
+ * @return the bytecode read from the given input stream.
+ * @throws IOException if a problem occurs during reading.
+ */
+ private static byte[] readClass(final InputStream is, boolean close)
+ throws IOException
+ {
+ if (is == null) {
+ throw new IOException("Class not found");
+ }
+ try {
+ byte[] b = new byte[is.available()];
+ int len = 0;
+ while (true) {
+ int n = is.read(b, len, b.length - len);
+ if (n == -1) {
+ if (len < b.length) {
+ byte[] c = new byte[len];
+ System.arraycopy(b, 0, c, 0, len);
+ b = c;
+ }
+ return b;
+ }
+ len += n;
+ if (len == b.length) {
+ int last = is.read();
+ if (last < 0) {
+ return b;
+ }
+ byte[] c = new byte[b.length + 1000];
+ System.arraycopy(b, 0, c, 0, len);
+ c[len++] = (byte) last;
+ b = c;
+ }
+ }
+ } finally {
+ if (close) {
+ is.close();
+ }
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Public methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Makes the given visitor visit the Java class of this {@link ClassReader}.
+ * This class is the one specified in the constructor (see
+ * {@link #ClassReader(byte[]) ClassReader}).
+ *
+ * @param classVisitor the visitor that must visit this class.
+ * @param flags option flags that can be used to modify the default behavior
+ * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES},
+ * {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
+ */
+ public void accept(final ClassVisitor classVisitor, final int flags) {
+ accept(classVisitor, new Attribute[0], flags);
+ }
+
+ /**
+ * Makes the given visitor visit the Java class of this {@link ClassReader}.
+ * This class is the one specified in the constructor (see
+ * {@link #ClassReader(byte[]) ClassReader}).
+ *
+ * @param classVisitor the visitor that must visit this class.
+ * @param attrs prototypes of the attributes that must be parsed during the
+ * visit of the class. Any attribute whose type is not equal to the
+ * type of one the prototypes will not be parsed: its byte array
+ * value will be passed unchanged to the ClassWriter. <i>This may
+ * corrupt it if this value contains references to the constant pool,
+ * or has syntactic or semantic links with a class element that has
+ * been transformed by a class adapter between the reader and the
+ * writer</i>.
+ * @param flags option flags that can be used to modify the default behavior
+ * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES},
+ * {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
+ */
+ public void accept(
+ final ClassVisitor classVisitor,
+ final Attribute[] attrs,
+ final int flags)
+ {
+ byte[] b = this.b; // the bytecode array
+ char[] c = new char[maxStringLength]; // buffer used to read strings
+ int i, j, k; // loop variables
+ int u, v, w; // indexes in b
+ Attribute attr;
+
+ int access;
+ String name;
+ String desc;
+ String attrName;
+ String signature;
+ int anns = 0;
+ int ianns = 0;
+ Attribute cattrs = null;
+
+ // visits the header
+ u = header;
+ access = readUnsignedShort(u);
+ name = readClass(u + 2, c);
+ v = items[readUnsignedShort(u + 4)];
+ String superClassName = v == 0 ? null : readUTF8(v, c);
+ String[] implementedItfs = new String[readUnsignedShort(u + 6)];
+ w = 0;
+ u += 8;
+ for (i = 0; i < implementedItfs.length; ++i) {
+ implementedItfs[i] = readClass(u, c);
+ u += 2;
+ }
+
+ boolean skipCode = (flags & SKIP_CODE) != 0;
+ boolean skipDebug = (flags & SKIP_DEBUG) != 0;
+ boolean unzip = (flags & EXPAND_FRAMES) != 0;
+
+ // skips fields and methods
+ v = u;
+ i = readUnsignedShort(v);
+ v += 2;
+ for (; i > 0; --i) {
+ j = readUnsignedShort(v + 6);
+ v += 8;
+ for (; j > 0; --j) {
+ v += 6 + readInt(v + 2);
+ }
+ }
+ i = readUnsignedShort(v);
+ v += 2;
+ for (; i > 0; --i) {
+ j = readUnsignedShort(v + 6);
+ v += 8;
+ for (; j > 0; --j) {
+ v += 6 + readInt(v + 2);
+ }
+ }
+ // reads the class's attributes
+ signature = null;
+ String sourceFile = null;
+ String sourceDebug = null;
+ String enclosingOwner = null;
+ String enclosingName = null;
+ String enclosingDesc = null;
+ int[] bootstrapMethods = null; // start indexed of the bsms
+
+ i = readUnsignedShort(v);
+ v += 2;
+ for (; i > 0; --i) {
+ attrName = readUTF8(v, c);
+ // tests are sorted in decreasing frequency order
+ // (based on frequencies observed on typical classes)
+ if ("SourceFile".equals(attrName)) {
+ sourceFile = readUTF8(v + 6, c);
+ } else if ("InnerClasses".equals(attrName)) {
+ w = v + 6;
+ } else if ("EnclosingMethod".equals(attrName)) {
+ enclosingOwner = readClass(v + 6, c);
+ int item = readUnsignedShort(v + 8);
+ if (item != 0) {
+ enclosingName = readUTF8(items[item], c);
+ enclosingDesc = readUTF8(items[item] + 2, c);
+ }
+ } else if (SIGNATURES && "Signature".equals(attrName)) {
+ signature = readUTF8(v + 6, c);
+ } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) {
+ anns = v + 6;
+ } else if ("Deprecated".equals(attrName)) {
+ access |= Opcodes.ACC_DEPRECATED;
+ } else if ("Synthetic".equals(attrName)) {
+ access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
+ } else if ("SourceDebugExtension".equals(attrName)) {
+ int len = readInt(v + 2);
+ sourceDebug = readUTF(v + 6, len, new char[len]);
+ } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) {
+ ianns = v + 6;
+ } else if ("BootstrapMethods".equals(attrName)) {
+ int boostrapMethodCount = readUnsignedShort(v + 6);
+ bootstrapMethods = new int[boostrapMethodCount];
+ int x = v + 8;
+ for (j = 0; j < boostrapMethodCount; j++) {
+ bootstrapMethods[j] = x;
+ x += 2 + readUnsignedShort(x + 2) << 1;
+ }
+ } else {
+ attr = readAttribute(attrs,
+ attrName,
+ v + 6,
+ readInt(v + 2),
+ c,
+ -1,
+ null);
+ if (attr != null) {
+ attr.next = cattrs;
+ cattrs = attr;
+ }
+ }
+ v += 6 + readInt(v + 2);
+ }
+ // calls the visit method
+ classVisitor.visit(readInt(4),
+ access,
+ name,
+ signature,
+ superClassName,
+ implementedItfs);
+
+ // calls the visitSource method
+ if (!skipDebug && (sourceFile != null || sourceDebug != null)) {
+ classVisitor.visitSource(sourceFile, sourceDebug);
+ }
+
+ // calls the visitOuterClass method
+ if (enclosingOwner != null) {
+ classVisitor.visitOuterClass(enclosingOwner,
+ enclosingName,
+ enclosingDesc);
+ }
+
+ // visits the class annotations
+ if (ANNOTATIONS) {
+ for (i = 1; i >= 0; --i) {
+ v = i == 0 ? ianns : anns;
+ if (v != 0) {
+ j = readUnsignedShort(v);
+ v += 2;
+ for (; j > 0; --j) {
+ v = readAnnotationValues(v + 2,
+ c,
+ true,
+ classVisitor.visitAnnotation(readUTF8(v, c), i != 0));
+ }
+ }
+ }
+ }
+
+ // visits the class attributes
+ while (cattrs != null) {
+ attr = cattrs.next;
+ cattrs.next = null;
+ classVisitor.visitAttribute(cattrs);
+ cattrs = attr;
+ }
+
+ // calls the visitInnerClass method
+ if (w != 0) {
+ i = readUnsignedShort(w);
+ w += 2;
+ for (; i > 0; --i) {
+ classVisitor.visitInnerClass(readUnsignedShort(w) == 0
+ ? null
+ : readClass(w, c), readUnsignedShort(w + 2) == 0
+ ? null
+ : readClass(w + 2, c), readUnsignedShort(w + 4) == 0
+ ? null
+ : readUTF8(w + 4, c), readUnsignedShort(w + 6));
+ w += 8;
+ }
+ }
+
+ // visits the fields
+ i = readUnsignedShort(u);
+ u += 2;
+ for (; i > 0; --i) {
+ access = readUnsignedShort(u);
+ name = readUTF8(u + 2, c);
+ desc = readUTF8(u + 4, c);
+ // visits the field's attributes and looks for a ConstantValue
+ // attribute
+ int fieldValueItem = 0;
+ signature = null;
+ anns = 0;
+ ianns = 0;
+ cattrs = null;
+
+ j = readUnsignedShort(u + 6);
+ u += 8;
+ for (; j > 0; --j) {
+ attrName = readUTF8(u, c);
+ // tests are sorted in decreasing frequency order
+ // (based on frequencies observed on typical classes)
+ if ("ConstantValue".equals(attrName)) {
+ fieldValueItem = readUnsignedShort(u + 6);
+ } else if (SIGNATURES && "Signature".equals(attrName)) {
+ signature = readUTF8(u + 6, c);
+ } else if ("Deprecated".equals(attrName)) {
+ access |= Opcodes.ACC_DEPRECATED;
+ } else if ("Synthetic".equals(attrName)) {
+ access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
+ } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) {
+ anns = u + 6;
+ } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) {
+ ianns = u + 6;
+ } else {
+ attr = readAttribute(attrs,
+ attrName,
+ u + 6,
+ readInt(u + 2),
+ c,
+ -1,
+ null);
+ if (attr != null) {
+ attr.next = cattrs;
+ cattrs = attr;
+ }
+ }
+ u += 6 + readInt(u + 2);
+ }
+ // visits the field
+ FieldVisitor fv = classVisitor.visitField(access,
+ name,
+ desc,
+ signature,
+ fieldValueItem == 0 ? null : readConst(fieldValueItem, c));
+ // visits the field annotations and attributes
+ if (fv != null) {
+ if (ANNOTATIONS) {
+ for (j = 1; j >= 0; --j) {
+ v = j == 0 ? ianns : anns;
+ if (v != 0) {
+ k = readUnsignedShort(v);
+ v += 2;
+ for (; k > 0; --k) {
+ v = readAnnotationValues(v + 2,
+ c,
+ true,
+ fv.visitAnnotation(readUTF8(v, c), j != 0));
+ }
+ }
+ }
+ }
+ while (cattrs != null) {
+ attr = cattrs.next;
+ cattrs.next = null;
+ fv.visitAttribute(cattrs);
+ cattrs = attr;
+ }
+ fv.visitEnd();
+ }
+ }
+
+ // visits the methods
+ i = readUnsignedShort(u);
+ u += 2;
+ for (; i > 0; --i) {
+ int u0 = u + 6;
+ access = readUnsignedShort(u);
+ name = readUTF8(u + 2, c);
+ desc = readUTF8(u + 4, c);
+ signature = null;
+ anns = 0;
+ ianns = 0;
+ int dann = 0;
+ int mpanns = 0;
+ int impanns = 0;
+ cattrs = null;
+ v = 0;
+ w = 0;
+
+ // looks for Code and Exceptions attributes
+ j = readUnsignedShort(u + 6);
+ u += 8;
+ for (; j > 0; --j) {
+ attrName = readUTF8(u, c);
+ int attrSize = readInt(u + 2);
+ u += 6;
+ // tests are sorted in decreasing frequency order
+ // (based on frequencies observed on typical classes)
+ if ("Code".equals(attrName)) {
+ if (!skipCode) {
+ v = u;
+ }
+ } else if ("Exceptions".equals(attrName)) {
+ w = u;
+ } else if (SIGNATURES && "Signature".equals(attrName)) {
+ signature = readUTF8(u, c);
+ } else if ("Deprecated".equals(attrName)) {
+ access |= Opcodes.ACC_DEPRECATED;
+ } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) {
+ anns = u;
+ } else if (ANNOTATIONS && "AnnotationDefault".equals(attrName)) {
+ dann = u;
+ } else if ("Synthetic".equals(attrName)) {
+ access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
+ } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) {
+ ianns = u;
+ } else if (ANNOTATIONS && "RuntimeVisibleParameterAnnotations".equals(attrName))
+ {
+ mpanns = u;
+ } else if (ANNOTATIONS && "RuntimeInvisibleParameterAnnotations".equals(attrName))
+ {
+ impanns = u;
+ } else {
+ attr = readAttribute(attrs,
+ attrName,
+ u,
+ attrSize,
+ c,
+ -1,
+ null);
+ if (attr != null) {
+ attr.next = cattrs;
+ cattrs = attr;
+ }
+ }
+ u += attrSize;
+ }
+ // reads declared exceptions
+ String[] exceptions;
+ if (w == 0) {
+ exceptions = null;
+ } else {
+ exceptions = new String[readUnsignedShort(w)];
+ w += 2;
+ for (j = 0; j < exceptions.length; ++j) {
+ exceptions[j] = readClass(w, c);
+ w += 2;
+ }
+ }
+
+ // visits the method's code, if any
+ MethodVisitor mv = classVisitor.visitMethod(access,
+ name,
+ desc,
+ signature,
+ exceptions);
+
+ if (mv != null) {
+ /*
+ * if the returned MethodVisitor is in fact a MethodWriter, it
+ * means there is no method adapter between the reader and the
+ * writer. If, in addition, the writer's constant pool was
+ * copied from this reader (mw.cw.cr == this), and the signature
+ * and exceptions of the method have not been changed, then it
+ * is possible to skip all visit events and just copy the
+ * original code of the method to the writer (the access, name
+ * and descriptor can have been changed, this is not important
+ * since they are not copied as is from the reader).
+ */
+ if (WRITER && mv instanceof MethodWriter) {
+ MethodWriter mw = (MethodWriter) mv;
+ if (mw.cw.cr == this) {
+ if (signature == mw.signature) {
+ boolean sameExceptions = false;
+ if (exceptions == null) {
+ sameExceptions = mw.exceptionCount == 0;
+ } else {
+ if (exceptions.length == mw.exceptionCount) {
+ sameExceptions = true;
+ for (j = exceptions.length - 1; j >= 0; --j)
+ {
+ w -= 2;
+ if (mw.exceptions[j] != readUnsignedShort(w))
+ {
+ sameExceptions = false;
+ break;
+ }
+ }
+ }
+ }
+ if (sameExceptions) {
+ /*
+ * we do not copy directly the code into
+ * MethodWriter to save a byte array copy
+ * operation. The real copy will be done in
+ * ClassWriter.toByteArray().
+ */
+ mw.classReaderOffset = u0;
+ mw.classReaderLength = u - u0;
+ continue;
+ }
+ }
+ }
+ }
+
+ if (ANNOTATIONS && dann != 0) {
+ AnnotationVisitor dv = mv.visitAnnotationDefault();
+ readAnnotationValue(dann, c, null, dv);
+ if (dv != null) {
+ dv.visitEnd();
+ }
+ }
+ if (ANNOTATIONS) {
+ for (j = 1; j >= 0; --j) {
+ w = j == 0 ? ianns : anns;
+ if (w != 0) {
+ k = readUnsignedShort(w);
+ w += 2;
+ for (; k > 0; --k) {
+ w = readAnnotationValues(w + 2,
+ c,
+ true,
+ mv.visitAnnotation(readUTF8(w, c), j != 0));
+ }
+ }
+ }
+ }
+ if (ANNOTATIONS && mpanns != 0) {
+ readParameterAnnotations(mpanns, desc, c, true, mv);
+ }
+ if (ANNOTATIONS && impanns != 0) {
+ readParameterAnnotations(impanns, desc, c, false, mv);
+ }
+ while (cattrs != null) {
+ attr = cattrs.next;
+ cattrs.next = null;
+ mv.visitAttribute(cattrs);
+ cattrs = attr;
+ }
+ }
+
+ if (mv != null && v != 0) {
+ int maxStack = readUnsignedShort(v);
+ int maxLocals = readUnsignedShort(v + 2);
+ int codeLength = readInt(v + 4);
+ v += 8;
+
+ int codeStart = v;
+ int codeEnd = v + codeLength;
+
+ mv.visitCode();
+
+ // 1st phase: finds the labels
+ int label;
+ Label[] labels = new Label[codeLength + 2];
+ readLabel(codeLength + 1, labels);
+ while (v < codeEnd) {
+ w = v - codeStart;
+ int opcode = b[v] & 0xFF;
+ switch (ClassWriter.TYPE[opcode]) {
+ case ClassWriter.NOARG_INSN:
+ case ClassWriter.IMPLVAR_INSN:
+ v += 1;
+ break;
+ case ClassWriter.LABEL_INSN:
+ readLabel(w + readShort(v + 1), labels);
+ v += 3;
+ break;
+ case ClassWriter.LABELW_INSN:
+ readLabel(w + readInt(v + 1), labels);
+ v += 5;
+ break;
+ case ClassWriter.WIDE_INSN:
+ opcode = b[v + 1] & 0xFF;
+ if (opcode == Opcodes.IINC) {
+ v += 6;
+ } else {
+ v += 4;
+ }
+ break;
+ case ClassWriter.TABL_INSN:
+ // skips 0 to 3 padding bytes*
+ v = v + 4 - (w & 3);
+ // reads instruction
+ readLabel(w + readInt(v), labels);
+ j = readInt(v + 8) - readInt(v + 4) + 1;
+ v += 12;
+ for (; j > 0; --j) {
+ readLabel(w + readInt(v), labels);
+ v += 4;
+ }
+ break;
+ case ClassWriter.LOOK_INSN:
+ // skips 0 to 3 padding bytes*
+ v = v + 4 - (w & 3);
+ // reads instruction
+ readLabel(w + readInt(v), labels);
+ j = readInt(v + 4);
+ v += 8;
+ for (; j > 0; --j) {
+ readLabel(w + readInt(v + 4), labels);
+ v += 8;
+ }
+ break;
+ case ClassWriter.VAR_INSN:
+ case ClassWriter.SBYTE_INSN:
+ case ClassWriter.LDC_INSN:
+ v += 2;
+ break;
+ case ClassWriter.SHORT_INSN:
+ case ClassWriter.LDCW_INSN:
+ case ClassWriter.FIELDORMETH_INSN:
+ case ClassWriter.TYPE_INSN:
+ case ClassWriter.IINC_INSN:
+ v += 3;
+ break;
+ case ClassWriter.ITFMETH_INSN:
+ case ClassWriter.INDYMETH_INSN:
+ v += 5;
+ break;
+ // case MANA_INSN:
+ default:
+ v += 4;
+ break;
+ }
+ }
+ // parses the try catch entries
+ j = readUnsignedShort(v);
+ v += 2;
+ for (; j > 0; --j) {
+ Label start = readLabel(readUnsignedShort(v), labels);
+ Label end = readLabel(readUnsignedShort(v + 2), labels);
+ Label handler = readLabel(readUnsignedShort(v + 4), labels);
+ int type = readUnsignedShort(v + 6);
+ if (type == 0) {
+ mv.visitTryCatchBlock(start, end, handler, null);
+ } else {
+ mv.visitTryCatchBlock(start,
+ end,
+ handler,
+ readUTF8(items[type], c));
+ }
+ v += 8;
+ }
+ // parses the local variable, line number tables, and code
+ // attributes
+ int varTable = 0;
+ int varTypeTable = 0;
+ int stackMap = 0;
+ int stackMapSize = 0;
+ int frameCount = 0;
+ int frameMode = 0;
+ int frameOffset = 0;
+ int frameLocalCount = 0;
+ int frameLocalDiff = 0;
+ int frameStackCount = 0;
+ Object[] frameLocal = null;
+ Object[] frameStack = null;
+ boolean zip = true;
+ cattrs = null;
+ j = readUnsignedShort(v);
+ v += 2;
+ for (; j > 0; --j) {
+ attrName = readUTF8(v, c);
+ if ("LocalVariableTable".equals(attrName)) {
+ if (!skipDebug) {
+ varTable = v + 6;
+ k = readUnsignedShort(v + 6);
+ w = v + 8;
+ for (; k > 0; --k) {
+ label = readUnsignedShort(w);
+ if (labels[label] == null) {
+ readLabel(label, labels).status |= Label.DEBUG;
+ }
+ label += readUnsignedShort(w + 2);
+ if (labels[label] == null) {
+ readLabel(label, labels).status |= Label.DEBUG;
+ }
+ w += 10;
+ }
+ }
+ } else if ("LocalVariableTypeTable".equals(attrName)) {
+ varTypeTable = v + 6;
+ } else if ("LineNumberTable".equals(attrName)) {
+ if (!skipDebug) {
+ k = readUnsignedShort(v + 6);
+ w = v + 8;
+ for (; k > 0; --k) {
+ label = readUnsignedShort(w);
+ if (labels[label] == null) {
+ readLabel(label, labels).status |= Label.DEBUG;
+ }
+ labels[label].line = readUnsignedShort(w + 2);
+ w += 4;
+ }
+ }
+ } else if (FRAMES && "StackMapTable".equals(attrName)) {
+ if ((flags & SKIP_FRAMES) == 0) {
+ stackMap = v + 8;
+ stackMapSize = readInt(v + 2);
+ frameCount = readUnsignedShort(v + 6);
+ }
+ /*
+ * here we do not extract the labels corresponding to
+ * the attribute content. This would require a full
+ * parsing of the attribute, which would need to be
+ * repeated in the second phase (see below). Instead the
+ * content of the attribute is read one frame at a time
+ * (i.e. after a frame has been visited, the next frame
+ * is read), and the labels it contains are also
+ * extracted one frame at a time. Thanks to the ordering
+ * of frames, having only a "one frame lookahead" is not
+ * a problem, i.e. it is not possible to see an offset
+ * smaller than the offset of the current insn and for
+ * which no Label exist.
+ */
+ /*
+ * This is not true for UNINITIALIZED type offsets. We
+ * solve this by parsing the stack map table without a
+ * full decoding (see below).
+ */
+ } else if (FRAMES && "StackMap".equals(attrName)) {
+ if ((flags & SKIP_FRAMES) == 0) {
+ stackMap = v + 8;
+ stackMapSize = readInt(v + 2);
+ frameCount = readUnsignedShort(v + 6);
+ zip = false;
+ }
+ /*
+ * IMPORTANT! here we assume that the frames are
+ * ordered, as in the StackMapTable attribute, although
+ * this is not guaranteed by the attribute format.
+ */
+ } else {
+ for (k = 0; k < attrs.length; ++k) {
+ if (attrs[k].type.equals(attrName)) {
+ attr = attrs[k].read(this,
+ v + 6,
+ readInt(v + 2),
+ c,
+ codeStart - 8,
+ labels);
+ if (attr != null) {
+ attr.next = cattrs;
+ cattrs = attr;
+ }
+ }
+ }
+ }
+ v += 6 + readInt(v + 2);
+ }
+
+ // 2nd phase: visits each instruction
+ if (FRAMES && stackMap != 0) {
+ // creates the very first (implicit) frame from the method
+ // descriptor
+ frameLocal = new Object[maxLocals];
+ frameStack = new Object[maxStack];
+ if (unzip) {
+ int local = 0;
+ if ((access & Opcodes.ACC_STATIC) == 0) {
+ if ("<init>".equals(name)) {
+ frameLocal[local++] = Opcodes.UNINITIALIZED_THIS;
+ } else {
+ frameLocal[local++] = readClass(header + 2, c);
+ }
+ }
+ j = 1;
+ loop: while (true) {
+ k = j;
+ switch (desc.charAt(j++)) {
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ frameLocal[local++] = Opcodes.INTEGER;
+ break;
+ case 'F':
+ frameLocal[local++] = Opcodes.FLOAT;
+ break;
+ case 'J':
+ frameLocal[local++] = Opcodes.LONG;
+ break;
+ case 'D':
+ frameLocal[local++] = Opcodes.DOUBLE;
+ break;
+ case '[':
+ while (desc.charAt(j) == '[') {
+ ++j;
+ }
+ if (desc.charAt(j) == 'L') {
+ ++j;
+ while (desc.charAt(j) != ';') {
+ ++j;
+ }
+ }
+ frameLocal[local++] = desc.substring(k, ++j);
+ break;
+ case 'L':
+ while (desc.charAt(j) != ';') {
+ ++j;
+ }
+ frameLocal[local++] = desc.substring(k + 1,
+ j++);
+ break;
+ default:
+ break loop;
+ }
+ }
+ frameLocalCount = local;
+ }
+ /*
+ * for the first explicit frame the offset is not
+ * offset_delta + 1 but only offset_delta; setting the
+ * implicit frame offset to -1 allow the use of the
+ * "offset_delta + 1" rule in all cases
+ */
+ frameOffset = -1;
+ /*
+ * Finds labels for UNINITIALIZED frame types. Instead of
+ * decoding each element of the stack map table, we look
+ * for 3 consecutive bytes that "look like" an UNINITIALIZED
+ * type (tag 8, offset within code bounds, NEW instruction
+ * at this offset). We may find false positives (i.e. not
+ * real UNINITIALIZED types), but this should be rare, and
+ * the only consequence will be the creation of an unneeded
+ * label. This is better than creating a label for each NEW
+ * instruction, and faster than fully decoding the whole
+ * stack map table.
+ */
+ for (j = stackMap; j < stackMap + stackMapSize - 2; ++j) {
+ if (b[j] == 8) { // UNINITIALIZED FRAME TYPE
+ k = readUnsignedShort(j + 1);
+ if (k >= 0 && k < codeLength) { // potential offset
+ if ((b[codeStart + k] & 0xFF) == Opcodes.NEW) { // NEW at this offset
+ readLabel(k, labels);
+ }
+ }
+ }
+ }
+ }
+ v = codeStart;
+ Label l;
+ while (v < codeEnd) {
+ w = v - codeStart;
+
+ l = labels[w];
+ if (l != null) {
+ mv.visitLabel(l);
+ if (!skipDebug && l.line > 0) {
+ mv.visitLineNumber(l.line, l);
+ }
+ }
+
+ while (FRAMES && frameLocal != null
+ && (frameOffset == w || frameOffset == -1))
+ {
+ // if there is a frame for this offset,
+ // makes the visitor visit it,
+ // and reads the next frame if there is one.
+ if (!zip || unzip) {
+ mv.visitFrame(Opcodes.F_NEW,
+ frameLocalCount,
+ frameLocal,
+ frameStackCount,
+ frameStack);
+ } else if (frameOffset != -1) {
+ mv.visitFrame(frameMode,
+ frameLocalDiff,
+ frameLocal,
+ frameStackCount,
+ frameStack);
+ }
+
+ if (frameCount > 0) {
+ int tag, delta, n;
+ if (zip) {
+ tag = b[stackMap++] & 0xFF;
+ } else {
+ tag = MethodWriter.FULL_FRAME;
+ frameOffset = -1;
+ }
+ frameLocalDiff = 0;
+ if (tag < MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME)
+ {
+ delta = tag;
+ frameMode = Opcodes.F_SAME;
+ frameStackCount = 0;
+ } else if (tag < MethodWriter.RESERVED) {
+ delta = tag
+ - MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME;
+ stackMap = readFrameType(frameStack,
+ 0,
+ stackMap,
+ c,
+ labels);
+ frameMode = Opcodes.F_SAME1;
+ frameStackCount = 1;
+ } else {
+ delta = readUnsignedShort(stackMap);
+ stackMap += 2;
+ if (tag == MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
+ {
+ stackMap = readFrameType(frameStack,
+ 0,
+ stackMap,
+ c,
+ labels);
+ frameMode = Opcodes.F_SAME1;
+ frameStackCount = 1;
+ } else if (tag >= MethodWriter.CHOP_FRAME
+ && tag < MethodWriter.SAME_FRAME_EXTENDED)
+ {
+ frameMode = Opcodes.F_CHOP;
+ frameLocalDiff = MethodWriter.SAME_FRAME_EXTENDED
+ - tag;
+ frameLocalCount -= frameLocalDiff;
+ frameStackCount = 0;
+ } else if (tag == MethodWriter.SAME_FRAME_EXTENDED)
+ {
+ frameMode = Opcodes.F_SAME;
+ frameStackCount = 0;
+ } else if (tag < MethodWriter.FULL_FRAME) {
+ j = unzip ? frameLocalCount : 0;
+ for (k = tag
+ - MethodWriter.SAME_FRAME_EXTENDED; k > 0; k--)
+ {
+ stackMap = readFrameType(frameLocal,
+ j++,
+ stackMap,
+ c,
+ labels);
+ }
+ frameMode = Opcodes.F_APPEND;
+ frameLocalDiff = tag
+ - MethodWriter.SAME_FRAME_EXTENDED;
+ frameLocalCount += frameLocalDiff;
+ frameStackCount = 0;
+ } else { // if (tag == FULL_FRAME) {
+ frameMode = Opcodes.F_FULL;
+ n = frameLocalDiff = frameLocalCount = readUnsignedShort(stackMap);
+ stackMap += 2;
+ for (j = 0; n > 0; n--) {
+ stackMap = readFrameType(frameLocal,
+ j++,
+ stackMap,
+ c,
+ labels);
+ }
+ n = frameStackCount = readUnsignedShort(stackMap);
+ stackMap += 2;
+ for (j = 0; n > 0; n--) {
+ stackMap = readFrameType(frameStack,
+ j++,
+ stackMap,
+ c,
+ labels);
+ }
+ }
+ }
+ frameOffset += delta + 1;
+ readLabel(frameOffset, labels);
+
+ --frameCount;
+ } else {
+ frameLocal = null;
+ }
+ }
+
+ int opcode = b[v] & 0xFF;
+ switch (ClassWriter.TYPE[opcode]) {
+ case ClassWriter.NOARG_INSN:
+ mv.visitInsn(opcode);
+ v += 1;
+ break;
+ case ClassWriter.IMPLVAR_INSN:
+ if (opcode > Opcodes.ISTORE) {
+ opcode -= 59; // ISTORE_0
+ mv.visitVarInsn(Opcodes.ISTORE + (opcode >> 2),
+ opcode & 0x3);
+ } else {
+ opcode -= 26; // ILOAD_0
+ mv.visitVarInsn(Opcodes.ILOAD + (opcode >> 2),
+ opcode & 0x3);
+ }
+ v += 1;
+ break;
+ case ClassWriter.LABEL_INSN:
+ mv.visitJumpInsn(opcode, labels[w
+ + readShort(v + 1)]);
+ v += 3;
+ break;
+ case ClassWriter.LABELW_INSN:
+ mv.visitJumpInsn(opcode - 33, labels[w
+ + readInt(v + 1)]);
+ v += 5;
+ break;
+ case ClassWriter.WIDE_INSN:
+ opcode = b[v + 1] & 0xFF;
+ if (opcode == Opcodes.IINC) {
+ mv.visitIincInsn(readUnsignedShort(v + 2),
+ readShort(v + 4));
+ v += 6;
+ } else {
+ mv.visitVarInsn(opcode,
+ readUnsignedShort(v + 2));
+ v += 4;
+ }
+ break;
+ case ClassWriter.TABL_INSN:
+ // skips 0 to 3 padding bytes
+ v = v + 4 - (w & 3);
+ // reads instruction
+ label = w + readInt(v);
+ int min = readInt(v + 4);
+ int max = readInt(v + 8);
+ v += 12;
+ Label[] table = new Label[max - min + 1];
+ for (j = 0; j < table.length; ++j) {
+ table[j] = labels[w + readInt(v)];
+ v += 4;
+ }
+ mv.visitTableSwitchInsn(min,
+ max,
+ labels[label],
+ table);
+ break;
+ case ClassWriter.LOOK_INSN:
+ // skips 0 to 3 padding bytes
+ v = v + 4 - (w & 3);
+ // reads instruction
+ label = w + readInt(v);
+ j = readInt(v + 4);
+ v += 8;
+ int[] keys = new int[j];
+ Label[] values = new Label[j];
+ for (j = 0; j < keys.length; ++j) {
+ keys[j] = readInt(v);
+ values[j] = labels[w + readInt(v + 4)];
+ v += 8;
+ }
+ mv.visitLookupSwitchInsn(labels[label],
+ keys,
+ values);
+ break;
+ case ClassWriter.VAR_INSN:
+ mv.visitVarInsn(opcode, b[v + 1] & 0xFF);
+ v += 2;
+ break;
+ case ClassWriter.SBYTE_INSN:
+ mv.visitIntInsn(opcode, b[v + 1]);
+ v += 2;
+ break;
+ case ClassWriter.SHORT_INSN:
+ mv.visitIntInsn(opcode, readShort(v + 1));
+ v += 3;
+ break;
+ case ClassWriter.LDC_INSN:
+ mv.visitLdcInsn(readConst(b[v + 1] & 0xFF, c));
+ v += 2;
+ break;
+ case ClassWriter.LDCW_INSN:
+ mv.visitLdcInsn(readConst(readUnsignedShort(v + 1),
+ c));
+ v += 3;
+ break;
+ case ClassWriter.FIELDORMETH_INSN:
+ case ClassWriter.ITFMETH_INSN: {
+ int cpIndex = items[readUnsignedShort(v + 1)];
+ String iowner = readClass(cpIndex, c);
+ cpIndex = items[readUnsignedShort(cpIndex + 2)];
+ String iname = readUTF8(cpIndex, c);
+ String idesc = readUTF8(cpIndex + 2, c);
+ if (opcode < Opcodes.INVOKEVIRTUAL) {
+ mv.visitFieldInsn(opcode, iowner, iname, idesc);
+ } else {
+ mv.visitMethodInsn(opcode, iowner, iname, idesc);
+ }
+ if (opcode == Opcodes.INVOKEINTERFACE) {
+ v += 5;
+ } else {
+ v += 3;
+ }
+ break;
+ }
+ case ClassWriter.INDYMETH_INSN: {
+ int cpIndex = items[readUnsignedShort(v + 1)];
+ int bsmIndex = bootstrapMethods[readUnsignedShort(cpIndex)];
+ cpIndex = items[readUnsignedShort(cpIndex + 2)];
+ String iname = readUTF8(cpIndex, c);
+ String idesc = readUTF8(cpIndex + 2, c);
+
+ int mhIndex = readUnsignedShort(bsmIndex);
+ Handle bsm = (Handle) readConst(mhIndex, c);
+ int bsmArgCount = readUnsignedShort(bsmIndex + 2);
+ Object[] bsmArgs = new Object[bsmArgCount];
+ bsmIndex += 4;
+ for(int a = 0; a < bsmArgCount; a++) {
+ int argIndex = readUnsignedShort(bsmIndex);
+ bsmArgs[a] = readConst(argIndex, c);
+ bsmIndex += 2;
+ }
+ mv.visitInvokeDynamicInsn(iname, idesc, bsm, bsmArgs);
+
+ v += 5;
+ break;
+ }
+ case ClassWriter.TYPE_INSN:
+ mv.visitTypeInsn(opcode, readClass(v + 1, c));
+ v += 3;
+ break;
+ case ClassWriter.IINC_INSN:
+ mv.visitIincInsn(b[v + 1] & 0xFF, b[v + 2]);
+ v += 3;
+ break;
+ // case MANA_INSN:
+ default:
+ mv.visitMultiANewArrayInsn(readClass(v + 1, c),
+ b[v + 3] & 0xFF);
+ v += 4;
+ break;
+ }
+ }
+ l = labels[codeEnd - codeStart];
+ if (l != null) {
+ mv.visitLabel(l);
+ }
+ // visits the local variable tables
+ if (!skipDebug && varTable != 0) {
+ int[] typeTable = null;
+ if (varTypeTable != 0) {
+ k = readUnsignedShort(varTypeTable) * 3;
+ w = varTypeTable + 2;
+ typeTable = new int[k];
+ while (k > 0) {
+ typeTable[--k] = w + 6; // signature
+ typeTable[--k] = readUnsignedShort(w + 8); // index
+ typeTable[--k] = readUnsignedShort(w); // start
+ w += 10;
+ }
+ }
+ k = readUnsignedShort(varTable);
+ w = varTable + 2;
+ for (; k > 0; --k) {
+ int start = readUnsignedShort(w);
+ int length = readUnsignedShort(w + 2);
+ int index = readUnsignedShort(w + 8);
+ String vsignature = null;
+ if (typeTable != null) {
+ for (int a = 0; a < typeTable.length; a += 3) {
+ if (typeTable[a] == start
+ && typeTable[a + 1] == index)
+ {
+ vsignature = readUTF8(typeTable[a + 2], c);
+ break;
+ }
+ }
+ }
+ mv.visitLocalVariable(readUTF8(w + 4, c),
+ readUTF8(w + 6, c),
+ vsignature,
+ labels[start],
+ labels[start + length],
+ index);
+ w += 10;
+ }
+ }
+ // visits the other attributes
+ while (cattrs != null) {
+ attr = cattrs.next;
+ cattrs.next = null;
+ mv.visitAttribute(cattrs);
+ cattrs = attr;
+ }
+ // visits the max stack and max locals values
+ mv.visitMaxs(maxStack, maxLocals);
+ }
+
+ if (mv != null) {
+ mv.visitEnd();
+ }
+ }
+
+ // visits the end of the class
+ classVisitor.visitEnd();
+ }
+
+ /**
+ * Reads parameter annotations and makes the given visitor visit them.
+ *
+ * @param v start offset in {@link #b b} of the annotations to be read.
+ * @param desc the method descriptor.
+ * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or
+ * {@link #readConst readConst}.
+ * @param visible <tt>true</tt> if the annotations to be read are visible
+ * at runtime.
+ * @param mv the visitor that must visit the annotations.
+ */
+ private void readParameterAnnotations(
+ int v,
+ final String desc,
+ final char[] buf,
+ final boolean visible,
+ final MethodVisitor mv)
+ {
+ int i;
+ int n = b[v++] & 0xFF;
+ // workaround for a bug in javac (javac compiler generates a parameter
+ // annotation array whose size is equal to the number of parameters in
+ // the Java source file, while it should generate an array whose size is
+ // equal to the number of parameters in the method descriptor - which
+ // includes the synthetic parameters added by the compiler). This work-
+ // around supposes that the synthetic parameters are the first ones.
+ int synthetics = Type.getArgumentTypes(desc).length - n;
+ AnnotationVisitor av;
+ for (i = 0; i < synthetics; ++i) {
+ // virtual annotation to detect synthetic parameters in MethodWriter
+ av = mv.visitParameterAnnotation(i, "Ljava/lang/Synthetic;", false);
+ if (av != null) {
+ av.visitEnd();
+ }
+ }
+ for (; i < n + synthetics; ++i) {
+ int j = readUnsignedShort(v);
+ v += 2;
+ for (; j > 0; --j) {
+ av = mv.visitParameterAnnotation(i, readUTF8(v, buf), visible);
+ v = readAnnotationValues(v + 2, buf, true, av);
+ }
+ }
+ }
+
+ /**
+ * Reads the values of an annotation and makes the given visitor visit them.
+ *
+ * @param v the start offset in {@link #b b} of the values to be read
+ * (including the unsigned short that gives the number of values).
+ * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or
+ * {@link #readConst readConst}.
+ * @param named if the annotation values are named or not.
+ * @param av the visitor that must visit the values.
+ * @return the end offset of the annotation values.
+ */
+ private int readAnnotationValues(
+ int v,
+ final char[] buf,
+ final boolean named,
+ final AnnotationVisitor av)
+ {
+ int i = readUnsignedShort(v);
+ v += 2;
+ if (named) {
+ for (; i > 0; --i) {
+ v = readAnnotationValue(v + 2, buf, readUTF8(v, buf), av);
+ }
+ } else {
+ for (; i > 0; --i) {
+ v = readAnnotationValue(v, buf, null, av);
+ }
+ }
+ if (av != null) {
+ av.visitEnd();
+ }
+ return v;
+ }
+
+ /**
+ * Reads a value of an annotation and makes the given visitor visit it.
+ *
+ * @param v the start offset in {@link #b b} of the value to be read (<i>not
+ * including the value name constant pool index</i>).
+ * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or
+ * {@link #readConst readConst}.
+ * @param name the name of the value to be read.
+ * @param av the visitor that must visit the value.
+ * @return the end offset of the annotation value.
+ */
+ private int readAnnotationValue(
+ int v,
+ final char[] buf,
+ final String name,
+ final AnnotationVisitor av)
+ {
+ int i;
+ if (av == null) {
+ switch (b[v] & 0xFF) {
+ case 'e': // enum_const_value
+ return v + 5;
+ case '@': // annotation_value
+ return readAnnotationValues(v + 3, buf, true, null);
+ case '[': // array_value
+ return readAnnotationValues(v + 1, buf, false, null);
+ default:
+ return v + 3;
+ }
+ }
+ switch (b[v++] & 0xFF) {
+ case 'I': // pointer to CONSTANT_Integer
+ case 'J': // pointer to CONSTANT_Long
+ case 'F': // pointer to CONSTANT_Float
+ case 'D': // pointer to CONSTANT_Double
+ av.visit(name, readConst(readUnsignedShort(v), buf));
+ v += 2;
+ break;
+ case 'B': // pointer to CONSTANT_Byte
+ av.visit(name,
+ new Byte((byte) readInt(items[readUnsignedShort(v)])));
+ v += 2;
+ break;
+ case 'Z': // pointer to CONSTANT_Boolean
+ av.visit(name, readInt(items[readUnsignedShort(v)]) == 0
+ ? Boolean.FALSE
+ : Boolean.TRUE);
+ v += 2;
+ break;
+ case 'S': // pointer to CONSTANT_Short
+ av.visit(name,
+ new Short((short) readInt(items[readUnsignedShort(v)])));
+ v += 2;
+ break;
+ case 'C': // pointer to CONSTANT_Char
+ av.visit(name,
+ new Character((char) readInt(items[readUnsignedShort(v)])));
+ v += 2;
+ break;
+ case 's': // pointer to CONSTANT_Utf8
+ av.visit(name, readUTF8(v, buf));
+ v += 2;
+ break;
+ case 'e': // enum_const_value
+ av.visitEnum(name, readUTF8(v, buf), readUTF8(v + 2, buf));
+ v += 4;
+ break;
+ case 'c': // class_info
+ av.visit(name, Type.getType(readUTF8(v, buf)));
+ v += 2;
+ break;
+ case '@': // annotation_value
+ v = readAnnotationValues(v + 2,
+ buf,
+ true,
+ av.visitAnnotation(name, readUTF8(v, buf)));
+ break;
+ case '[': // array_value
+ int size = readUnsignedShort(v);
+ v += 2;
+ if (size == 0) {
+ return readAnnotationValues(v - 2,
+ buf,
+ false,
+ av.visitArray(name));
+ }
+ switch (this.b[v++] & 0xFF) {
+ case 'B':
+ byte[] bv = new byte[size];
+ for (i = 0; i < size; i++) {
+ bv[i] = (byte) readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, bv);
+ --v;
+ break;
+ case 'Z':
+ boolean[] zv = new boolean[size];
+ for (i = 0; i < size; i++) {
+ zv[i] = readInt(items[readUnsignedShort(v)]) != 0;
+ v += 3;
+ }
+ av.visit(name, zv);
+ --v;
+ break;
+ case 'S':
+ short[] sv = new short[size];
+ for (i = 0; i < size; i++) {
+ sv[i] = (short) readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, sv);
+ --v;
+ break;
+ case 'C':
+ char[] cv = new char[size];
+ for (i = 0; i < size; i++) {
+ cv[i] = (char) readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, cv);
+ --v;
+ break;
+ case 'I':
+ int[] iv = new int[size];
+ for (i = 0; i < size; i++) {
+ iv[i] = readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, iv);
+ --v;
+ break;
+ case 'J':
+ long[] lv = new long[size];
+ for (i = 0; i < size; i++) {
+ lv[i] = readLong(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, lv);
+ --v;
+ break;
+ case 'F':
+ float[] fv = new float[size];
+ for (i = 0; i < size; i++) {
+ fv[i] = Float.intBitsToFloat(readInt(items[readUnsignedShort(v)]));
+ v += 3;
+ }
+ av.visit(name, fv);
+ --v;
+ break;
+ case 'D':
+ double[] dv = new double[size];
+ for (i = 0; i < size; i++) {
+ dv[i] = Double.longBitsToDouble(readLong(items[readUnsignedShort(v)]));
+ v += 3;
+ }
+ av.visit(name, dv);
+ --v;
+ break;
+ default:
+ v = readAnnotationValues(v - 3,
+ buf,
+ false,
+ av.visitArray(name));
+ }
+ }
+ return v;
+ }
+
+ private int readFrameType(
+ final Object[] frame,
+ final int index,
+ int v,
+ final char[] buf,
+ final Label[] labels)
+ {
+ int type = b[v++] & 0xFF;
+ switch (type) {
+ case 0:
+ frame[index] = Opcodes.TOP;
+ break;
+ case 1:
+ frame[index] = Opcodes.INTEGER;
+ break;
+ case 2:
+ frame[index] = Opcodes.FLOAT;
+ break;
+ case 3:
+ frame[index] = Opcodes.DOUBLE;
+ break;
+ case 4:
+ frame[index] = Opcodes.LONG;
+ break;
+ case 5:
+ frame[index] = Opcodes.NULL;
+ break;
+ case 6:
+ frame[index] = Opcodes.UNINITIALIZED_THIS;
+ break;
+ case 7: // Object
+ frame[index] = readClass(v, buf);
+ v += 2;
+ break;
+ default: // Uninitialized
+ frame[index] = readLabel(readUnsignedShort(v), labels);
+ v += 2;
+ }
+ return v;
+ }
+
+ /**
+ * Returns the label corresponding to the given offset. The default
+ * implementation of this method creates a label for the given offset if it
+ * has not been already created.
+ *
+ * @param offset a bytecode offset in a method.
+ * @param labels the already created labels, indexed by their offset. If a
+ * label already exists for offset this method must not create a new
+ * one. Otherwise it must store the new label in this array.
+ * @return a non null Label, which must be equal to labels[offset].
+ */
+ protected Label readLabel(int offset, Label[] labels) {
+ if (labels[offset] == null) {
+ labels[offset] = new Label();
+ }
+ return labels[offset];
+ }
+
+ /**
+ * Reads an attribute in {@link #b b}.
+ *
+ * @param attrs prototypes of the attributes that must be parsed during the
+ * visit of the class. Any attribute whose type is not equal to the
+ * type of one the prototypes is ignored (i.e. an empty
+ * {@link Attribute} instance is returned).
+ * @param type the type of the attribute.
+ * @param off index of the first byte of the attribute's content in
+ * {@link #b b}. The 6 attribute header bytes, containing the type
+ * and the length of the attribute, are not taken into account here
+ * (they have already been read).
+ * @param len the length of the attribute's content.
+ * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or
+ * {@link #readConst readConst}.
+ * @param codeOff index of the first byte of code's attribute content in
+ * {@link #b b}, or -1 if the attribute to be read is not a code
+ * attribute. The 6 attribute header bytes, containing the type and
+ * the length of the attribute, are not taken into account here.
+ * @param labels the labels of the method's code, or <tt>null</tt> if the
+ * attribute to be read is not a code attribute.
+ * @return the attribute that has been read, or <tt>null</tt> to skip this
+ * attribute.
+ */
+ private Attribute readAttribute(
+ final Attribute[] attrs,
+ final String type,
+ final int off,
+ final int len,
+ final char[] buf,
+ final int codeOff,
+ final Label[] labels)
+ {
+ for (int i = 0; i < attrs.length; ++i) {
+ if (attrs[i].type.equals(type)) {
+ return attrs[i].read(this, off, len, buf, codeOff, labels);
+ }
+ }
+ return new Attribute(type).read(this, off, len, null, -1, null);
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods: low level parsing
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the number of constant pool items in {@link #b b}.
+ *
+ * @return the number of constant pool items in {@link #b b}.
+ */
+ public int getItemCount() {
+ return items.length;
+ }
+
+ /**
+ * Returns the start index of the constant pool item in {@link #b b}, plus
+ * one. <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param item the index a constant pool item.
+ * @return the start index of the constant pool item in {@link #b b}, plus
+ * one.
+ */
+ public int getItem(final int item) {
+ return items[item];
+ }
+
+ /**
+ * Returns the maximum length of the strings contained in the constant pool
+ * of the class.
+ *
+ * @return the maximum length of the strings contained in the constant pool
+ * of the class.
+ */
+ public int getMaxStringLength() {
+ return maxStringLength;
+ }
+
+ /**
+ * Reads a byte value in {@link #b b}. <i>This method is intended for
+ * {@link Attribute} sub classes, and is normally not needed by class
+ * generators or adapters.</i>
+ *
+ * @param index the start index of the value to be read in {@link #b b}.
+ * @return the read value.
+ */
+ public int readByte(final int index) {
+ return b[index] & 0xFF;
+ }
+
+ /**
+ * Reads an unsigned short value in {@link #b b}. <i>This method is
+ * intended for {@link Attribute} sub classes, and is normally not needed by
+ * class generators or adapters.</i>
+ *
+ * @param index the start index of the value to be read in {@link #b b}.
+ * @return the read value.
+ */
+ public int readUnsignedShort(final int index) {
+ byte[] b = this.b;
+ return ((b[index] & 0xFF) << 8) | (b[index + 1] & 0xFF);
+ }
+
+ /**
+ * Reads a signed short value in {@link #b b}. <i>This method is intended
+ * for {@link Attribute} sub classes, and is normally not needed by class
+ * generators or adapters.</i>
+ *
+ * @param index the start index of the value to be read in {@link #b b}.
+ * @return the read value.
+ */
+ public short readShort(final int index) {
+ byte[] b = this.b;
+ return (short) (((b[index] & 0xFF) << 8) | (b[index + 1] & 0xFF));
+ }
+
+ /**
+ * Reads a signed int value in {@link #b b}. <i>This method is intended for
+ * {@link Attribute} sub classes, and is normally not needed by class
+ * generators or adapters.</i>
+ *
+ * @param index the start index of the value to be read in {@link #b b}.
+ * @return the read value.
+ */
+ public int readInt(final int index) {
+ byte[] b = this.b;
+ return ((b[index] & 0xFF) << 24) | ((b[index + 1] & 0xFF) << 16)
+ | ((b[index + 2] & 0xFF) << 8) | (b[index + 3] & 0xFF);
+ }
+
+ /**
+ * Reads a signed long value in {@link #b b}. <i>This method is intended
+ * for {@link Attribute} sub classes, and is normally not needed by class
+ * generators or adapters.</i>
+ *
+ * @param index the start index of the value to be read in {@link #b b}.
+ * @return the read value.
+ */
+ public long readLong(final int index) {
+ long l1 = readInt(index);
+ long l0 = readInt(index + 4) & 0xFFFFFFFFL;
+ return (l1 << 32) | l0;
+ }
+
+ /**
+ * Reads an UTF8 string constant pool item in {@link #b b}. <i>This method
+ * is intended for {@link Attribute} sub classes, and is normally not needed
+ * by class generators or adapters.</i>
+ *
+ * @param index the start index of an unsigned short value in {@link #b b},
+ * whose value is the index of an UTF8 constant pool item.
+ * @param buf buffer to be used to read the item. This buffer must be
+ * sufficiently large. It is not automatically resized.
+ * @return the String corresponding to the specified UTF8 item.
+ */
+ public String readUTF8(int index, final char[] buf) {
+ int item = readUnsignedShort(index);
+ String s = strings[item];
+ if (s != null) {
+ return s;
+ }
+ index = items[item];
+ return strings[item] = readUTF(index + 2, readUnsignedShort(index), buf);
+ }
+
+ /**
+ * Reads UTF8 string in {@link #b b}.
+ *
+ * @param index start offset of the UTF8 string to be read.
+ * @param utfLen length of the UTF8 string to be read.
+ * @param buf buffer to be used to read the string. This buffer must be
+ * sufficiently large. It is not automatically resized.
+ * @return the String corresponding to the specified UTF8 string.
+ */
+ private String readUTF(int index, final int utfLen, final char[] buf) {
+ int endIndex = index + utfLen;
+ byte[] b = this.b;
+ int strLen = 0;
+ int c;
+ int st = 0;
+ char cc = 0;
+ while (index < endIndex) {
+ c = b[index++];
+ switch (st) {
+ case 0:
+ c = c & 0xFF;
+ if (c < 0x80) { // 0xxxxxxx
+ buf[strLen++] = (char) c;
+ } else if (c < 0xE0 && c > 0xBF) { // 110x xxxx 10xx xxxx
+ cc = (char) (c & 0x1F);
+ st = 1;
+ } else { // 1110 xxxx 10xx xxxx 10xx xxxx
+ cc = (char) (c & 0x0F);
+ st = 2;
+ }
+ break;
+
+ case 1: // byte 2 of 2-byte char or byte 3 of 3-byte char
+ buf[strLen++] = (char) ((cc << 6) | (c & 0x3F));
+ st = 0;
+ break;
+
+ case 2: // byte 2 of 3-byte char
+ cc = (char) ((cc << 6) | (c & 0x3F));
+ st = 1;
+ break;
+ }
+ }
+ return new String(buf, 0, strLen);
+ }
+
+ /**
+ * Reads a class constant pool item in {@link #b b}. <i>This method is
+ * intended for {@link Attribute} sub classes, and is normally not needed by
+ * class generators or adapters.</i>
+ *
+ * @param index the start index of an unsigned short value in {@link #b b},
+ * whose value is the index of a class constant pool item.
+ * @param buf buffer to be used to read the item. This buffer must be
+ * sufficiently large. It is not automatically resized.
+ * @return the String corresponding to the specified class item.
+ */
+ public String readClass(final int index, final char[] buf) {
+ // computes the start index of the CONSTANT_Class item in b
+ // and reads the CONSTANT_Utf8 item designated by
+ // the first two bytes of this CONSTANT_Class item
+ return readUTF8(items[readUnsignedShort(index)], buf);
+ }
+
+ /**
+ * Reads a numeric or string constant pool item in {@link #b b}. <i>This
+ * method is intended for {@link Attribute} sub classes, and is normally not
+ * needed by class generators or adapters.</i>
+ *
+ * @param item the index of a constant pool item.
+ * @param buf buffer to be used to read the item. This buffer must be
+ * sufficiently large. It is not automatically resized.
+ * @return the {@link Integer}, {@link Float}, {@link Long}, {@link Double},
+ * {@link String}, {@link Type} or {@link Handle} corresponding to
+ * the given constant pool item.
+ */
+ public Object readConst(final int item, final char[] buf) {
+ int index = items[item];
+ switch (b[index - 1]) {
+ case ClassWriter.INT:
+ return new Integer(readInt(index));
+ case ClassWriter.FLOAT:
+ return new Float(Float.intBitsToFloat(readInt(index)));
+ case ClassWriter.LONG:
+ return new Long(readLong(index));
+ case ClassWriter.DOUBLE:
+ return new Double(Double.longBitsToDouble(readLong(index)));
+ case ClassWriter.CLASS:
+ return Type.getObjectType(readUTF8(index, buf));
+ case ClassWriter.STR:
+ return readUTF8(index, buf);
+ case ClassWriter.MTYPE:
+ return Type.getMethodType(readUTF8(index, buf));
+
+ //case ClassWriter.HANDLE_BASE + [1..9]:
+ default: {
+ int tag = readByte(index);
+ int[] items = this.items;
+ int cpIndex = items[readUnsignedShort(index + 1)];
+ String owner = readClass(cpIndex, buf);
+ cpIndex = items[readUnsignedShort(cpIndex + 2)];
+ String name = readUTF8(cpIndex, buf);
+ String desc = readUTF8(cpIndex + 2, buf);
+ return new Handle(tag, owner, name, desc);
+ }
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/ClassVisitor.java b/src/asm/scala/tools/asm/ClassVisitor.java
new file mode 100644
index 0000000000..ae38ae0ab9
--- /dev/null
+++ b/src/asm/scala/tools/asm/ClassVisitor.java
@@ -0,0 +1,277 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A visitor to visit a Java class. The methods of this class must be called
+ * in the following order: <tt>visit</tt> [ <tt>visitSource</tt> ] [
+ * <tt>visitOuterClass</tt> ] ( <tt>visitAnnotation</tt> |
+ * <tt>visitAttribute</tt> )* ( <tt>visitInnerClass</tt> |
+ * <tt>visitField</tt> | <tt>visitMethod</tt> )* <tt>visitEnd</tt>.
+ *
+ * @author Eric Bruneton
+ */
+public abstract class ClassVisitor {
+
+ /**
+ * The ASM API version implemented by this visitor. The value of this field
+ * must be one of {@link Opcodes#ASM4}.
+ */
+ protected final int api;
+
+ /**
+ * The class visitor to which this visitor must delegate method calls. May
+ * be null.
+ */
+ protected ClassVisitor cv;
+
+ /**
+ * Constructs a new {@link ClassVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ public ClassVisitor(final int api) {
+ this(api, null);
+ }
+
+ /**
+ * Constructs a new {@link ClassVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param cv the class visitor to which this visitor must delegate method
+ * calls. May be null.
+ */
+ public ClassVisitor(final int api, final ClassVisitor cv) {
+ /*if (api != Opcodes.ASM4) {
+ throw new IllegalArgumentException();
+ }*/
+ this.api = api;
+ this.cv = cv;
+ }
+
+ /**
+ * Visits the header of the class.
+ *
+ * @param version the class version.
+ * @param access the class's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the class is deprecated.
+ * @param name the internal name of the class (see
+ * {@link Type#getInternalName() getInternalName}).
+ * @param signature the signature of this class. May be <tt>null</tt> if
+ * the class is not a generic one, and does not extend or implement
+ * generic classes or interfaces.
+ * @param superName the internal of name of the super class (see
+ * {@link Type#getInternalName() getInternalName}). For interfaces,
+ * the super class is {@link Object}. May be <tt>null</tt>, but
+ * only for the {@link Object} class.
+ * @param interfaces the internal names of the class's interfaces (see
+ * {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
+ */
+ public void visit(
+ int version,
+ int access,
+ String name,
+ String signature,
+ String superName,
+ String[] interfaces)
+ {
+ if (cv != null) {
+ cv.visit(version, access, name, signature, superName, interfaces);
+ }
+ }
+
+ /**
+ * Visits the source of the class.
+ *
+ * @param source the name of the source file from which the class was
+ * compiled. May be <tt>null</tt>.
+ * @param debug additional debug information to compute the correspondance
+ * between source and compiled elements of the class. May be
+ * <tt>null</tt>.
+ */
+ public void visitSource(String source, String debug) {
+ if (cv != null) {
+ cv.visitSource(source, debug);
+ }
+ }
+
+ /**
+ * Visits the enclosing class of the class. This method must be called only
+ * if the class has an enclosing class.
+ *
+ * @param owner internal name of the enclosing class of the class.
+ * @param name the name of the method that contains the class, or
+ * <tt>null</tt> if the class is not enclosed in a method of its
+ * enclosing class.
+ * @param desc the descriptor of the method that contains the class, or
+ * <tt>null</tt> if the class is not enclosed in a method of its
+ * enclosing class.
+ */
+ public void visitOuterClass(String owner, String name, String desc) {
+ if (cv != null) {
+ cv.visitOuterClass(owner, name, desc);
+ }
+ }
+
+ /**
+ * Visits an annotation of the class.
+ *
+ * @param desc the class descriptor of the annotation class.
+ * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values, or <tt>null</tt> if
+ * this visitor is not interested in visiting this annotation.
+ */
+ public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
+ if (cv != null) {
+ return cv.visitAnnotation(desc, visible);
+ }
+ return null;
+ }
+
+ /**
+ * Visits a non standard attribute of the class.
+ *
+ * @param attr an attribute.
+ */
+ public void visitAttribute(Attribute attr) {
+ if (cv != null) {
+ cv.visitAttribute(attr);
+ }
+ }
+
+ /**
+ * Visits information about an inner class. This inner class is not
+ * necessarily a member of the class being visited.
+ *
+ * @param name the internal name of an inner class (see
+ * {@link Type#getInternalName() getInternalName}).
+ * @param outerName the internal name of the class to which the inner class
+ * belongs (see {@link Type#getInternalName() getInternalName}). May
+ * be <tt>null</tt> for not member classes.
+ * @param innerName the (simple) name of the inner class inside its
+ * enclosing class. May be <tt>null</tt> for anonymous inner
+ * classes.
+ * @param access the access flags of the inner class as originally declared
+ * in the enclosing class.
+ */
+ public void visitInnerClass(
+ String name,
+ String outerName,
+ String innerName,
+ int access)
+ {
+ if (cv != null) {
+ cv.visitInnerClass(name, outerName, innerName, access);
+ }
+ }
+
+ /**
+ * Visits a field of the class.
+ *
+ * @param access the field's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the field is synthetic and/or
+ * deprecated.
+ * @param name the field's name.
+ * @param desc the field's descriptor (see {@link Type Type}).
+ * @param signature the field's signature. May be <tt>null</tt> if the
+ * field's type does not use generic types.
+ * @param value the field's initial value. This parameter, which may be
+ * <tt>null</tt> if the field does not have an initial value, must
+ * be an {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String} (for <tt>int</tt>,
+ * <tt>float</tt>, <tt>long</tt> or <tt>String</tt> fields
+ * respectively). <i>This parameter is only used for static fields</i>.
+ * Its value is ignored for non static fields, which must be
+ * initialized through bytecode instructions in constructors or
+ * methods.
+ * @return a visitor to visit field annotations and attributes, or
+ * <tt>null</tt> if this class visitor is not interested in
+ * visiting these annotations and attributes.
+ */
+ public FieldVisitor visitField(
+ int access,
+ String name,
+ String desc,
+ String signature,
+ Object value)
+ {
+ if (cv != null) {
+ return cv.visitField(access, name, desc, signature, value);
+ }
+ return null;
+ }
+
+ /**
+ * Visits a method of the class. This method <i>must</i> return a new
+ * {@link MethodVisitor} instance (or <tt>null</tt>) each time it is
+ * called, i.e., it should not return a previously returned visitor.
+ *
+ * @param access the method's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the method is synthetic and/or
+ * deprecated.
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link Type Type}).
+ * @param signature the method's signature. May be <tt>null</tt> if the
+ * method parameters, return type and exceptions do not use generic
+ * types.
+ * @param exceptions the internal names of the method's exception classes
+ * (see {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
+ * @return an object to visit the byte code of the method, or <tt>null</tt>
+ * if this class visitor is not interested in visiting the code of
+ * this method.
+ */
+ public MethodVisitor visitMethod(
+ int access,
+ String name,
+ String desc,
+ String signature,
+ String[] exceptions)
+ {
+ if (cv != null) {
+ return cv.visitMethod(access, name, desc, signature, exceptions);
+ }
+ return null;
+ }
+
+ /**
+ * Visits the end of the class. This method, which is the last one to be
+ * called, is used to inform the visitor that all the fields and methods of
+ * the class have been visited.
+ */
+ public void visitEnd() {
+ if (cv != null) {
+ cv.visitEnd();
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/ClassWriter.java b/src/asm/scala/tools/asm/ClassWriter.java
new file mode 100644
index 0000000000..c7a0736b51
--- /dev/null
+++ b/src/asm/scala/tools/asm/ClassWriter.java
@@ -0,0 +1,1672 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A {@link ClassVisitor} that generates classes in bytecode form. More
+ * precisely this visitor generates a byte array conforming to the Java class
+ * file format. It can be used alone, to generate a Java class "from scratch",
+ * or with one or more {@link ClassReader ClassReader} and adapter class visitor
+ * to generate a modified class from one or more existing Java classes.
+ *
+ * @author Eric Bruneton
+ */
+public class ClassWriter extends ClassVisitor {
+
+ /**
+ * Flag to automatically compute the maximum stack size and the maximum
+ * number of local variables of methods. If this flag is set, then the
+ * arguments of the {@link MethodVisitor#visitMaxs visitMaxs} method of the
+ * {@link MethodVisitor} returned by the {@link #visitMethod visitMethod}
+ * method will be ignored, and computed automatically from the signature and
+ * the bytecode of each method.
+ *
+ * @see #ClassWriter(int)
+ */
+ public static final int COMPUTE_MAXS = 1;
+
+ /**
+ * Flag to automatically compute the stack map frames of methods from
+ * scratch. If this flag is set, then the calls to the
+ * {@link MethodVisitor#visitFrame} method are ignored, and the stack map
+ * frames are recomputed from the methods bytecode. The arguments of the
+ * {@link MethodVisitor#visitMaxs visitMaxs} method are also ignored and
+ * recomputed from the bytecode. In other words, computeFrames implies
+ * computeMaxs.
+ *
+ * @see #ClassWriter(int)
+ */
+ public static final int COMPUTE_FRAMES = 2;
+
+ /**
+ * Pseudo access flag to distinguish between the synthetic attribute and
+ * the synthetic access flag.
+ */
+ static final int ACC_SYNTHETIC_ATTRIBUTE = 0x40000;
+
+ /**
+ * The type of instructions without any argument.
+ */
+ static final int NOARG_INSN = 0;
+
+ /**
+ * The type of instructions with an signed byte argument.
+ */
+ static final int SBYTE_INSN = 1;
+
+ /**
+ * The type of instructions with an signed short argument.
+ */
+ static final int SHORT_INSN = 2;
+
+ /**
+ * The type of instructions with a local variable index argument.
+ */
+ static final int VAR_INSN = 3;
+
+ /**
+ * The type of instructions with an implicit local variable index argument.
+ */
+ static final int IMPLVAR_INSN = 4;
+
+ /**
+ * The type of instructions with a type descriptor argument.
+ */
+ static final int TYPE_INSN = 5;
+
+ /**
+ * The type of field and method invocations instructions.
+ */
+ static final int FIELDORMETH_INSN = 6;
+
+ /**
+ * The type of the INVOKEINTERFACE/INVOKEDYNAMIC instruction.
+ */
+ static final int ITFMETH_INSN = 7;
+
+ /**
+ * The type of the INVOKEDYNAMIC instruction.
+ */
+ static final int INDYMETH_INSN = 8;
+
+ /**
+ * The type of instructions with a 2 bytes bytecode offset label.
+ */
+ static final int LABEL_INSN = 9;
+
+ /**
+ * The type of instructions with a 4 bytes bytecode offset label.
+ */
+ static final int LABELW_INSN = 10;
+
+ /**
+ * The type of the LDC instruction.
+ */
+ static final int LDC_INSN = 11;
+
+ /**
+ * The type of the LDC_W and LDC2_W instructions.
+ */
+ static final int LDCW_INSN = 12;
+
+ /**
+ * The type of the IINC instruction.
+ */
+ static final int IINC_INSN = 13;
+
+ /**
+ * The type of the TABLESWITCH instruction.
+ */
+ static final int TABL_INSN = 14;
+
+ /**
+ * The type of the LOOKUPSWITCH instruction.
+ */
+ static final int LOOK_INSN = 15;
+
+ /**
+ * The type of the MULTIANEWARRAY instruction.
+ */
+ static final int MANA_INSN = 16;
+
+ /**
+ * The type of the WIDE instruction.
+ */
+ static final int WIDE_INSN = 17;
+
+ /**
+ * The instruction types of all JVM opcodes.
+ */
+ static final byte[] TYPE;
+
+ /**
+ * The type of CONSTANT_Class constant pool items.
+ */
+ static final int CLASS = 7;
+
+ /**
+ * The type of CONSTANT_Fieldref constant pool items.
+ */
+ static final int FIELD = 9;
+
+ /**
+ * The type of CONSTANT_Methodref constant pool items.
+ */
+ static final int METH = 10;
+
+ /**
+ * The type of CONSTANT_InterfaceMethodref constant pool items.
+ */
+ static final int IMETH = 11;
+
+ /**
+ * The type of CONSTANT_String constant pool items.
+ */
+ static final int STR = 8;
+
+ /**
+ * The type of CONSTANT_Integer constant pool items.
+ */
+ static final int INT = 3;
+
+ /**
+ * The type of CONSTANT_Float constant pool items.
+ */
+ static final int FLOAT = 4;
+
+ /**
+ * The type of CONSTANT_Long constant pool items.
+ */
+ static final int LONG = 5;
+
+ /**
+ * The type of CONSTANT_Double constant pool items.
+ */
+ static final int DOUBLE = 6;
+
+ /**
+ * The type of CONSTANT_NameAndType constant pool items.
+ */
+ static final int NAME_TYPE = 12;
+
+ /**
+ * The type of CONSTANT_Utf8 constant pool items.
+ */
+ static final int UTF8 = 1;
+
+ /**
+ * The type of CONSTANT_MethodType constant pool items.
+ */
+ static final int MTYPE = 16;
+
+ /**
+ * The type of CONSTANT_MethodHandle constant pool items.
+ */
+ static final int HANDLE = 15;
+
+ /**
+ * The type of CONSTANT_InvokeDynamic constant pool items.
+ */
+ static final int INDY = 18;
+
+ /**
+ * The base value for all CONSTANT_MethodHandle constant pool items.
+ * Internally, ASM store the 9 variations of CONSTANT_MethodHandle into
+ * 9 different items.
+ */
+ static final int HANDLE_BASE = 20;
+
+ /**
+ * Normal type Item stored in the ClassWriter {@link ClassWriter#typeTable},
+ * instead of the constant pool, in order to avoid clashes with normal
+ * constant pool items in the ClassWriter constant pool's hash table.
+ */
+ static final int TYPE_NORMAL = 30;
+
+ /**
+ * Uninitialized type Item stored in the ClassWriter
+ * {@link ClassWriter#typeTable}, instead of the constant pool, in order to
+ * avoid clashes with normal constant pool items in the ClassWriter constant
+ * pool's hash table.
+ */
+ static final int TYPE_UNINIT = 31;
+
+ /**
+ * Merged type Item stored in the ClassWriter {@link ClassWriter#typeTable},
+ * instead of the constant pool, in order to avoid clashes with normal
+ * constant pool items in the ClassWriter constant pool's hash table.
+ */
+ static final int TYPE_MERGED = 32;
+
+ /**
+ * The type of BootstrapMethods items. These items are stored in a
+ * special class attribute named BootstrapMethods and
+ * not in the constant pool.
+ */
+ static final int BSM = 33;
+
+ /**
+ * The class reader from which this class writer was constructed, if any.
+ */
+ ClassReader cr;
+
+ /**
+ * Minor and major version numbers of the class to be generated.
+ */
+ int version;
+
+ /**
+ * Index of the next item to be added in the constant pool.
+ */
+ int index;
+
+ /**
+ * The constant pool of this class.
+ */
+ final ByteVector pool;
+
+ /**
+ * The constant pool's hash table data.
+ */
+ Item[] items;
+
+ /**
+ * The threshold of the constant pool's hash table.
+ */
+ int threshold;
+
+ /**
+ * A reusable key used to look for items in the {@link #items} hash table.
+ */
+ final Item key;
+
+ /**
+ * A reusable key used to look for items in the {@link #items} hash table.
+ */
+ final Item key2;
+
+ /**
+ * A reusable key used to look for items in the {@link #items} hash table.
+ */
+ final Item key3;
+
+ /**
+ * A reusable key used to look for items in the {@link #items} hash table.
+ */
+ final Item key4;
+
+ /**
+ * A type table used to temporarily store internal names that will not
+ * necessarily be stored in the constant pool. This type table is used by
+ * the control flow and data flow analysis algorithm used to compute stack
+ * map frames from scratch. This array associates to each index <tt>i</tt>
+ * the Item whose index is <tt>i</tt>. All Item objects stored in this
+ * array are also stored in the {@link #items} hash table. These two arrays
+ * allow to retrieve an Item from its index or, conversely, to get the index
+ * of an Item from its value. Each Item stores an internal name in its
+ * {@link Item#strVal1} field.
+ */
+ Item[] typeTable;
+
+ /**
+ * Number of elements in the {@link #typeTable} array.
+ */
+ private short typeCount;
+
+ /**
+ * The access flags of this class.
+ */
+ private int access;
+
+ /**
+ * The constant pool item that contains the internal name of this class.
+ */
+ private int name;
+
+ /**
+ * The internal name of this class.
+ */
+ String thisName;
+
+ /**
+ * The constant pool item that contains the signature of this class.
+ */
+ private int signature;
+
+ /**
+ * The constant pool item that contains the internal name of the super class
+ * of this class.
+ */
+ private int superName;
+
+ /**
+ * Number of interfaces implemented or extended by this class or interface.
+ */
+ private int interfaceCount;
+
+ /**
+ * The interfaces implemented or extended by this class or interface. More
+ * precisely, this array contains the indexes of the constant pool items
+ * that contain the internal names of these interfaces.
+ */
+ private int[] interfaces;
+
+ /**
+ * The index of the constant pool item that contains the name of the source
+ * file from which this class was compiled.
+ */
+ private int sourceFile;
+
+ /**
+ * The SourceDebug attribute of this class.
+ */
+ private ByteVector sourceDebug;
+
+ /**
+ * The constant pool item that contains the name of the enclosing class of
+ * this class.
+ */
+ private int enclosingMethodOwner;
+
+ /**
+ * The constant pool item that contains the name and descriptor of the
+ * enclosing method of this class.
+ */
+ private int enclosingMethod;
+
+ /**
+ * The runtime visible annotations of this class.
+ */
+ private AnnotationWriter anns;
+
+ /**
+ * The runtime invisible annotations of this class.
+ */
+ private AnnotationWriter ianns;
+
+ /**
+ * The non standard attributes of this class.
+ */
+ private Attribute attrs;
+
+ /**
+ * The number of entries in the InnerClasses attribute.
+ */
+ private int innerClassesCount;
+
+ /**
+ * The InnerClasses attribute.
+ */
+ private ByteVector innerClasses;
+
+ /**
+ * The number of entries in the BootstrapMethods attribute.
+ */
+ int bootstrapMethodsCount;
+
+ /**
+ * The BootstrapMethods attribute.
+ */
+ ByteVector bootstrapMethods;
+
+ /**
+ * The fields of this class. These fields are stored in a linked list of
+ * {@link FieldWriter} objects, linked to each other by their
+ * {@link FieldWriter#fv} field. This field stores the first element of
+ * this list.
+ */
+ FieldWriter firstField;
+
+ /**
+ * The fields of this class. These fields are stored in a linked list of
+ * {@link FieldWriter} objects, linked to each other by their
+ * {@link FieldWriter#fv} field. This field stores the last element of
+ * this list.
+ */
+ FieldWriter lastField;
+
+ /**
+ * The methods of this class. These methods are stored in a linked list of
+ * {@link MethodWriter} objects, linked to each other by their
+ * {@link MethodWriter#mv} field. This field stores the first element of
+ * this list.
+ */
+ MethodWriter firstMethod;
+
+ /**
+ * The methods of this class. These methods are stored in a linked list of
+ * {@link MethodWriter} objects, linked to each other by their
+ * {@link MethodWriter#mv} field. This field stores the last element of
+ * this list.
+ */
+ MethodWriter lastMethod;
+
+ /**
+ * <tt>true</tt> if the maximum stack size and number of local variables
+ * must be automatically computed.
+ */
+ private final boolean computeMaxs;
+
+ /**
+ * <tt>true</tt> if the stack map frames must be recomputed from scratch.
+ */
+ private final boolean computeFrames;
+
+ /**
+ * <tt>true</tt> if the stack map tables of this class are invalid. The
+ * {@link MethodWriter#resizeInstructions} method cannot transform existing
+ * stack map tables, and so produces potentially invalid classes when it is
+ * executed. In this case the class is reread and rewritten with the
+ * {@link #COMPUTE_FRAMES} option (the resizeInstructions method can resize
+ * stack map tables when this option is used).
+ */
+ boolean invalidFrames;
+
+ // ------------------------------------------------------------------------
+ // Static initializer
+ // ------------------------------------------------------------------------
+
+ /**
+ * Computes the instruction types of JVM opcodes.
+ */
+ static {
+ int i;
+ byte[] b = new byte[220];
+ String s = "AAAAAAAAAAAAAAAABCLMMDDDDDEEEEEEEEEEEEEEEEEEEEAAAAAAAADD"
+ + "DDDEEEEEEEEEEEEEEEEEEEEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
+ + "AAAAAAAAAAAAAAAAANAAAAAAAAAAAAAAAAAAAAJJJJJJJJJJJJJJJJDOPAA"
+ + "AAAAGGGGGGGHIFBFAAFFAARQJJKKJJJJJJJJJJJJJJJJJJ";
+ for (i = 0; i < b.length; ++i) {
+ b[i] = (byte) (s.charAt(i) - 'A');
+ }
+ TYPE = b;
+
+ // code to generate the above string
+ //
+ // // SBYTE_INSN instructions
+ // b[Constants.NEWARRAY] = SBYTE_INSN;
+ // b[Constants.BIPUSH] = SBYTE_INSN;
+ //
+ // // SHORT_INSN instructions
+ // b[Constants.SIPUSH] = SHORT_INSN;
+ //
+ // // (IMPL)VAR_INSN instructions
+ // b[Constants.RET] = VAR_INSN;
+ // for (i = Constants.ILOAD; i <= Constants.ALOAD; ++i) {
+ // b[i] = VAR_INSN;
+ // }
+ // for (i = Constants.ISTORE; i <= Constants.ASTORE; ++i) {
+ // b[i] = VAR_INSN;
+ // }
+ // for (i = 26; i <= 45; ++i) { // ILOAD_0 to ALOAD_3
+ // b[i] = IMPLVAR_INSN;
+ // }
+ // for (i = 59; i <= 78; ++i) { // ISTORE_0 to ASTORE_3
+ // b[i] = IMPLVAR_INSN;
+ // }
+ //
+ // // TYPE_INSN instructions
+ // b[Constants.NEW] = TYPE_INSN;
+ // b[Constants.ANEWARRAY] = TYPE_INSN;
+ // b[Constants.CHECKCAST] = TYPE_INSN;
+ // b[Constants.INSTANCEOF] = TYPE_INSN;
+ //
+ // // (Set)FIELDORMETH_INSN instructions
+ // for (i = Constants.GETSTATIC; i <= Constants.INVOKESTATIC; ++i) {
+ // b[i] = FIELDORMETH_INSN;
+ // }
+ // b[Constants.INVOKEINTERFACE] = ITFMETH_INSN;
+ // b[Constants.INVOKEDYNAMIC] = INDYMETH_INSN;
+ //
+ // // LABEL(W)_INSN instructions
+ // for (i = Constants.IFEQ; i <= Constants.JSR; ++i) {
+ // b[i] = LABEL_INSN;
+ // }
+ // b[Constants.IFNULL] = LABEL_INSN;
+ // b[Constants.IFNONNULL] = LABEL_INSN;
+ // b[200] = LABELW_INSN; // GOTO_W
+ // b[201] = LABELW_INSN; // JSR_W
+ // // temporary opcodes used internally by ASM - see Label and
+ // MethodWriter
+ // for (i = 202; i < 220; ++i) {
+ // b[i] = LABEL_INSN;
+ // }
+ //
+ // // LDC(_W) instructions
+ // b[Constants.LDC] = LDC_INSN;
+ // b[19] = LDCW_INSN; // LDC_W
+ // b[20] = LDCW_INSN; // LDC2_W
+ //
+ // // special instructions
+ // b[Constants.IINC] = IINC_INSN;
+ // b[Constants.TABLESWITCH] = TABL_INSN;
+ // b[Constants.LOOKUPSWITCH] = LOOK_INSN;
+ // b[Constants.MULTIANEWARRAY] = MANA_INSN;
+ // b[196] = WIDE_INSN; // WIDE
+ //
+ // for (i = 0; i < b.length; ++i) {
+ // System.err.print((char)('A' + b[i]));
+ // }
+ // System.err.println();
+ }
+
+ // ------------------------------------------------------------------------
+ // Constructor
+ // ------------------------------------------------------------------------
+
+ /**
+ * Constructs a new {@link ClassWriter} object.
+ *
+ * @param flags option flags that can be used to modify the default behavior
+ * of this class. See {@link #COMPUTE_MAXS}, {@link #COMPUTE_FRAMES}.
+ */
+ public ClassWriter(final int flags) {
+ super(Opcodes.ASM4);
+ index = 1;
+ pool = new ByteVector();
+ items = new Item[256];
+ threshold = (int) (0.75d * items.length);
+ key = new Item();
+ key2 = new Item();
+ key3 = new Item();
+ key4 = new Item();
+ this.computeMaxs = (flags & COMPUTE_MAXS) != 0;
+ this.computeFrames = (flags & COMPUTE_FRAMES) != 0;
+ }
+
+ /**
+ * Constructs a new {@link ClassWriter} object and enables optimizations for
+ * "mostly add" bytecode transformations. These optimizations are the
+ * following:
+ *
+ * <ul> <li>The constant pool from the original class is copied as is in the
+ * new class, which saves time. New constant pool entries will be added at
+ * the end if necessary, but unused constant pool entries <i>won't be
+ * removed</i>.</li> <li>Methods that are not transformed are copied as is
+ * in the new class, directly from the original class bytecode (i.e. without
+ * emitting visit events for all the method instructions), which saves a
+ * <i>lot</i> of time. Untransformed methods are detected by the fact that
+ * the {@link ClassReader} receives {@link MethodVisitor} objects that come
+ * from a {@link ClassWriter} (and not from any other {@link ClassVisitor}
+ * instance).</li> </ul>
+ *
+ * @param classReader the {@link ClassReader} used to read the original
+ * class. It will be used to copy the entire constant pool from the
+ * original class and also to copy other fragments of original
+ * bytecode where applicable.
+ * @param flags option flags that can be used to modify the default behavior
+ * of this class. <i>These option flags do not affect methods that
+ * are copied as is in the new class. This means that the maximum
+ * stack size nor the stack frames will be computed for these
+ * methods</i>. See {@link #COMPUTE_MAXS}, {@link #COMPUTE_FRAMES}.
+ */
+ public ClassWriter(final ClassReader classReader, final int flags) {
+ this(flags);
+ classReader.copyPool(this);
+ this.cr = classReader;
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the ClassVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public final void visit(
+ final int version,
+ final int access,
+ final String name,
+ final String signature,
+ final String superName,
+ final String[] interfaces)
+ {
+ this.version = version;
+ this.access = access;
+ this.name = newClass(name);
+ thisName = name;
+ if (ClassReader.SIGNATURES && signature != null) {
+ this.signature = newUTF8(signature);
+ }
+ this.superName = superName == null ? 0 : newClass(superName);
+ if (interfaces != null && interfaces.length > 0) {
+ interfaceCount = interfaces.length;
+ this.interfaces = new int[interfaceCount];
+ for (int i = 0; i < interfaceCount; ++i) {
+ this.interfaces[i] = newClass(interfaces[i]);
+ }
+ }
+ }
+
+ @Override
+ public final void visitSource(final String file, final String debug) {
+ if (file != null) {
+ sourceFile = newUTF8(file);
+ }
+ if (debug != null) {
+ sourceDebug = new ByteVector().putUTF8(debug);
+ }
+ }
+
+ @Override
+ public final void visitOuterClass(
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ enclosingMethodOwner = newClass(owner);
+ if (name != null && desc != null) {
+ enclosingMethod = newNameType(name, desc);
+ }
+ }
+
+ @Override
+ public final AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ ByteVector bv = new ByteVector();
+ // write type, and reserve space for values count
+ bv.putShort(newUTF8(desc)).putShort(0);
+ AnnotationWriter aw = new AnnotationWriter(this, true, bv, bv, 2);
+ if (visible) {
+ aw.next = anns;
+ anns = aw;
+ } else {
+ aw.next = ianns;
+ ianns = aw;
+ }
+ return aw;
+ }
+
+ @Override
+ public final void visitAttribute(final Attribute attr) {
+ attr.next = attrs;
+ attrs = attr;
+ }
+
+ @Override
+ public final void visitInnerClass(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access)
+ {
+ if (innerClasses == null) {
+ innerClasses = new ByteVector();
+ }
+ ++innerClassesCount;
+ innerClasses.putShort(name == null ? 0 : newClass(name));
+ innerClasses.putShort(outerName == null ? 0 : newClass(outerName));
+ innerClasses.putShort(innerName == null ? 0 : newUTF8(innerName));
+ innerClasses.putShort(access);
+ }
+
+ @Override
+ public final FieldVisitor visitField(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ return new FieldWriter(this, access, name, desc, signature, value);
+ }
+
+ @Override
+ public final MethodVisitor visitMethod(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ return new MethodWriter(this,
+ access,
+ name,
+ desc,
+ signature,
+ exceptions,
+ computeMaxs,
+ computeFrames);
+ }
+
+ @Override
+ public final void visitEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Other public methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the bytecode of the class that was build with this class writer.
+ *
+ * @return the bytecode of the class that was build with this class writer.
+ */
+ public byte[] toByteArray() {
+ if (index > Short.MAX_VALUE) {
+ throw new RuntimeException("Class file too large!");
+ }
+ // computes the real size of the bytecode of this class
+ int size = 24 + 2 * interfaceCount;
+ int nbFields = 0;
+ FieldWriter fb = firstField;
+ while (fb != null) {
+ ++nbFields;
+ size += fb.getSize();
+ fb = (FieldWriter) fb.fv;
+ }
+ int nbMethods = 0;
+ MethodWriter mb = firstMethod;
+ while (mb != null) {
+ ++nbMethods;
+ size += mb.getSize();
+ mb = (MethodWriter) mb.mv;
+ }
+ int attributeCount = 0;
+ if (bootstrapMethods != null) { // we put it as first argument in order
+ // to improve a bit ClassReader.copyBootstrapMethods
+ ++attributeCount;
+ size += 8 + bootstrapMethods.length;
+ newUTF8("BootstrapMethods");
+ }
+ if (ClassReader.SIGNATURES && signature != 0) {
+ ++attributeCount;
+ size += 8;
+ newUTF8("Signature");
+ }
+ if (sourceFile != 0) {
+ ++attributeCount;
+ size += 8;
+ newUTF8("SourceFile");
+ }
+ if (sourceDebug != null) {
+ ++attributeCount;
+ size += sourceDebug.length + 4;
+ newUTF8("SourceDebugExtension");
+ }
+ if (enclosingMethodOwner != 0) {
+ ++attributeCount;
+ size += 10;
+ newUTF8("EnclosingMethod");
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ ++attributeCount;
+ size += 6;
+ newUTF8("Deprecated");
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((version & 0xFFFF) < Opcodes.V1_5 || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ ++attributeCount;
+ size += 6;
+ newUTF8("Synthetic");
+ }
+ if (innerClasses != null) {
+ ++attributeCount;
+ size += 8 + innerClasses.length;
+ newUTF8("InnerClasses");
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ ++attributeCount;
+ size += 8 + anns.getSize();
+ newUTF8("RuntimeVisibleAnnotations");
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ ++attributeCount;
+ size += 8 + ianns.getSize();
+ newUTF8("RuntimeInvisibleAnnotations");
+ }
+ if (attrs != null) {
+ attributeCount += attrs.getCount();
+ size += attrs.getSize(this, null, 0, -1, -1);
+ }
+ size += pool.length;
+ // allocates a byte vector of this size, in order to avoid unnecessary
+ // arraycopy operations in the ByteVector.enlarge() method
+ ByteVector out = new ByteVector(size);
+ out.putInt(0xCAFEBABE).putInt(version);
+ out.putShort(index).putByteArray(pool.data, 0, pool.length);
+ int mask = Opcodes.ACC_DEPRECATED
+ | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
+ out.putShort(access & ~mask).putShort(name).putShort(superName);
+ out.putShort(interfaceCount);
+ for (int i = 0; i < interfaceCount; ++i) {
+ out.putShort(interfaces[i]);
+ }
+ out.putShort(nbFields);
+ fb = firstField;
+ while (fb != null) {
+ fb.put(out);
+ fb = (FieldWriter) fb.fv;
+ }
+ out.putShort(nbMethods);
+ mb = firstMethod;
+ while (mb != null) {
+ mb.put(out);
+ mb = (MethodWriter) mb.mv;
+ }
+ out.putShort(attributeCount);
+ if (bootstrapMethods != null) { // should be the first class attribute ?
+ out.putShort(newUTF8("BootstrapMethods"));
+ out.putInt(bootstrapMethods.length + 2).putShort(bootstrapMethodsCount);
+ out.putByteArray(bootstrapMethods.data, 0, bootstrapMethods.length);
+ }
+ if (ClassReader.SIGNATURES && signature != 0) {
+ out.putShort(newUTF8("Signature")).putInt(2).putShort(signature);
+ }
+ if (sourceFile != 0) {
+ out.putShort(newUTF8("SourceFile")).putInt(2).putShort(sourceFile);
+ }
+ if (sourceDebug != null) {
+ int len = sourceDebug.length - 2;
+ out.putShort(newUTF8("SourceDebugExtension")).putInt(len);
+ out.putByteArray(sourceDebug.data, 2, len);
+ }
+ if (enclosingMethodOwner != 0) {
+ out.putShort(newUTF8("EnclosingMethod")).putInt(4);
+ out.putShort(enclosingMethodOwner).putShort(enclosingMethod);
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ out.putShort(newUTF8("Deprecated")).putInt(0);
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((version & 0xFFFF) < Opcodes.V1_5 || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ out.putShort(newUTF8("Synthetic")).putInt(0);
+ }
+ if (innerClasses != null) {
+ out.putShort(newUTF8("InnerClasses"));
+ out.putInt(innerClasses.length + 2).putShort(innerClassesCount);
+ out.putByteArray(innerClasses.data, 0, innerClasses.length);
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ out.putShort(newUTF8("RuntimeVisibleAnnotations"));
+ anns.put(out);
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ out.putShort(newUTF8("RuntimeInvisibleAnnotations"));
+ ianns.put(out);
+ }
+ if (attrs != null) {
+ attrs.put(this, null, 0, -1, -1, out);
+ }
+ if (invalidFrames) {
+ ClassWriter cw = new ClassWriter(COMPUTE_FRAMES);
+ new ClassReader(out.data).accept(cw, ClassReader.SKIP_FRAMES);
+ return cw.toByteArray();
+ }
+ return out.data;
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods: constant pool management
+ // ------------------------------------------------------------------------
+
+ /**
+ * Adds a number or string constant to the constant pool of the class being
+ * build. Does nothing if the constant pool already contains a similar item.
+ *
+ * @param cst the value of the constant to be added to the constant pool.
+ * This parameter must be an {@link Integer}, a {@link Float}, a
+ * {@link Long}, a {@link Double}, a {@link String} or a
+ * {@link Type}.
+ * @return a new or already existing constant item with the given value.
+ */
+ Item newConstItem(final Object cst) {
+ if (cst instanceof Integer) {
+ int val = ((Integer) cst).intValue();
+ return newInteger(val);
+ } else if (cst instanceof Byte) {
+ int val = ((Byte) cst).intValue();
+ return newInteger(val);
+ } else if (cst instanceof Character) {
+ int val = ((Character) cst).charValue();
+ return newInteger(val);
+ } else if (cst instanceof Short) {
+ int val = ((Short) cst).intValue();
+ return newInteger(val);
+ } else if (cst instanceof Boolean) {
+ int val = ((Boolean) cst).booleanValue() ? 1 : 0;
+ return newInteger(val);
+ } else if (cst instanceof Float) {
+ float val = ((Float) cst).floatValue();
+ return newFloat(val);
+ } else if (cst instanceof Long) {
+ long val = ((Long) cst).longValue();
+ return newLong(val);
+ } else if (cst instanceof Double) {
+ double val = ((Double) cst).doubleValue();
+ return newDouble(val);
+ } else if (cst instanceof String) {
+ return newString((String) cst);
+ } else if (cst instanceof Type) {
+ Type t = (Type) cst;
+ int s = t.getSort();
+ if (s == Type.ARRAY) {
+ return newClassItem(t.getDescriptor());
+ } else if (s == Type.OBJECT) {
+ return newClassItem(t.getInternalName());
+ } else { // s == Type.METHOD
+ return newMethodTypeItem(t.getDescriptor());
+ }
+ } else if (cst instanceof Handle) {
+ Handle h = (Handle) cst;
+ return newHandleItem(h.tag, h.owner, h.name, h.desc);
+ } else {
+ throw new IllegalArgumentException("value " + cst);
+ }
+ }
+
+ /**
+ * Adds a number or string constant to the constant pool of the class being
+ * build. Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param cst the value of the constant to be added to the constant pool.
+ * This parameter must be an {@link Integer}, a {@link Float}, a
+ * {@link Long}, a {@link Double} or a {@link String}.
+ * @return the index of a new or already existing constant item with the
+ * given value.
+ */
+ public int newConst(final Object cst) {
+ return newConstItem(cst).index;
+ }
+
+ /**
+ * Adds an UTF8 string to the constant pool of the class being build. Does
+ * nothing if the constant pool already contains a similar item. <i>This
+ * method is intended for {@link Attribute} sub classes, and is normally not
+ * needed by class generators or adapters.</i>
+ *
+ * @param value the String value.
+ * @return the index of a new or already existing UTF8 item.
+ */
+ public int newUTF8(final String value) {
+ key.set(UTF8, value, null, null);
+ Item result = get(key);
+ if (result == null) {
+ pool.putByte(UTF8).putUTF8(value);
+ result = new Item(index++, key);
+ put(result);
+ }
+ return result.index;
+ }
+
+ /**
+ * Adds a class reference to the constant pool of the class being build.
+ * Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param value the internal name of the class.
+ * @return a new or already existing class reference item.
+ */
+ Item newClassItem(final String value) {
+ key2.set(CLASS, value, null, null);
+ Item result = get(key2);
+ if (result == null) {
+ pool.put12(CLASS, newUTF8(value));
+ result = new Item(index++, key2);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a class reference to the constant pool of the class being build.
+ * Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param value the internal name of the class.
+ * @return the index of a new or already existing class reference item.
+ */
+ public int newClass(final String value) {
+ return newClassItem(value).index;
+ }
+
+ /**
+ * Adds a method type reference to the constant pool of the class being
+ * build. Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param methodDesc method descriptor of the method type.
+ * @return a new or already existing method type reference item.
+ */
+ Item newMethodTypeItem(final String methodDesc) {
+ key2.set(MTYPE, methodDesc, null, null);
+ Item result = get(key2);
+ if (result == null) {
+ pool.put12(MTYPE, newUTF8(methodDesc));
+ result = new Item(index++, key2);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a method type reference to the constant pool of the class being
+ * build. Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param methodDesc method descriptor of the method type.
+ * @return the index of a new or already existing method type reference
+ * item.
+ */
+ public int newMethodType(final String methodDesc) {
+ return newMethodTypeItem(methodDesc).index;
+ }
+
+ /**
+ * Adds a handle to the constant pool of the class being build. Does nothing
+ * if the constant pool already contains a similar item. <i>This method is
+ * intended for {@link Attribute} sub classes, and is normally not needed by
+ * class generators or adapters.</i>
+ *
+ * @param tag the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
+ * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
+ * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
+ * {@link Opcodes#H_INVOKESTATIC}, {@link Opcodes#H_INVOKESPECIAL},
+ * {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ * @param owner the internal name of the field or method owner class.
+ * @param name the name of the field or method.
+ * @param desc the descriptor of the field or method.
+ * @return a new or an already existing method type reference item.
+ */
+ Item newHandleItem(
+ final int tag,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ key4.set(HANDLE_BASE + tag, owner, name, desc);
+ Item result = get(key4);
+ if (result == null) {
+ if (tag <= Opcodes.H_PUTSTATIC) {
+ put112(HANDLE, tag, newField(owner, name, desc));
+ } else {
+ put112(HANDLE, tag, newMethod(owner,
+ name,
+ desc,
+ tag == Opcodes.H_INVOKEINTERFACE));
+ }
+ result = new Item(index++, key4);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a handle to the constant pool of the class being
+ * build. Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param tag the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
+ * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
+ * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
+ * {@link Opcodes#H_INVOKESTATIC}, {@link Opcodes#H_INVOKESPECIAL},
+ * {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ * @param owner the internal name of the field or method owner class.
+ * @param name the name of the field or method.
+ * @param desc the descriptor of the field or method.
+ * @return the index of a new or already existing method type reference
+ * item.
+ */
+ public int newHandle(
+ final int tag,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ return newHandleItem(tag, owner, name, desc).index;
+ }
+
+ /**
+ * Adds an invokedynamic reference to the constant pool of the class being
+ * build. Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param name name of the invoked method.
+ * @param desc descriptor of the invoke method.
+ * @param bsm the bootstrap method.
+ * @param bsmArgs the bootstrap method constant arguments.
+ *
+ * @return a new or an already existing invokedynamic type reference item.
+ */
+ Item newInvokeDynamicItem(
+ final String name,
+ final String desc,
+ final Handle bsm,
+ final Object... bsmArgs)
+ {
+ // cache for performance
+ ByteVector bootstrapMethods = this.bootstrapMethods;
+ if (bootstrapMethods == null) {
+ bootstrapMethods = this.bootstrapMethods = new ByteVector();
+ }
+
+ int position = bootstrapMethods.length; // record current position
+
+ int hashCode = bsm.hashCode();
+ bootstrapMethods.putShort(newHandle(bsm.tag,
+ bsm.owner,
+ bsm.name,
+ bsm.desc));
+
+ int argsLength = bsmArgs.length;
+ bootstrapMethods.putShort(argsLength);
+
+ for (int i = 0; i < argsLength; i++) {
+ Object bsmArg = bsmArgs[i];
+ hashCode ^= bsmArg.hashCode();
+ bootstrapMethods.putShort(newConst(bsmArg));
+ }
+
+ byte[] data = bootstrapMethods.data;
+ int length = (1 + 1 + argsLength) << 1; // (bsm + argCount + arguments)
+ hashCode &= 0x7FFFFFFF;
+ Item result = items[hashCode % items.length];
+ loop: while (result != null) {
+ if (result.type != BSM || result.hashCode != hashCode) {
+ result = result.next;
+ continue;
+ }
+
+ // because the data encode the size of the argument
+ // we don't need to test if these size are equals
+ int resultPosition = result.intVal;
+ for (int p = 0; p < length; p++) {
+ if (data[position + p] != data[resultPosition + p]) {
+ result = result.next;
+ continue loop;
+ }
+ }
+ break;
+ }
+
+ int bootstrapMethodIndex;
+ if (result != null) {
+ bootstrapMethodIndex = result.index;
+ bootstrapMethods.length = position; // revert to old position
+ } else {
+ bootstrapMethodIndex = bootstrapMethodsCount++;
+ result = new Item(bootstrapMethodIndex);
+ result.set(position, hashCode);
+ put(result);
+ }
+
+ // now, create the InvokeDynamic constant
+ key3.set(name, desc, bootstrapMethodIndex);
+ result = get(key3);
+ if (result == null) {
+ put122(INDY, bootstrapMethodIndex, newNameType(name, desc));
+ result = new Item(index++, key3);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds an invokedynamic reference to the constant pool of the class being
+ * build. Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param name name of the invoked method.
+ * @param desc descriptor of the invoke method.
+ * @param bsm the bootstrap method.
+ * @param bsmArgs the bootstrap method constant arguments.
+ *
+ * @return the index of a new or already existing invokedynamic
+ * reference item.
+ */
+ public int newInvokeDynamic(
+ final String name,
+ final String desc,
+ final Handle bsm,
+ final Object... bsmArgs)
+ {
+ return newInvokeDynamicItem(name, desc, bsm, bsmArgs).index;
+ }
+
+ /**
+ * Adds a field reference to the constant pool of the class being build.
+ * Does nothing if the constant pool already contains a similar item.
+ *
+ * @param owner the internal name of the field's owner class.
+ * @param name the field's name.
+ * @param desc the field's descriptor.
+ * @return a new or already existing field reference item.
+ */
+ Item newFieldItem(final String owner, final String name, final String desc)
+ {
+ key3.set(FIELD, owner, name, desc);
+ Item result = get(key3);
+ if (result == null) {
+ put122(FIELD, newClass(owner), newNameType(name, desc));
+ result = new Item(index++, key3);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a field reference to the constant pool of the class being build.
+ * Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param owner the internal name of the field's owner class.
+ * @param name the field's name.
+ * @param desc the field's descriptor.
+ * @return the index of a new or already existing field reference item.
+ */
+ public int newField(final String owner, final String name, final String desc)
+ {
+ return newFieldItem(owner, name, desc).index;
+ }
+
+ /**
+ * Adds a method reference to the constant pool of the class being build.
+ * Does nothing if the constant pool already contains a similar item.
+ *
+ * @param owner the internal name of the method's owner class.
+ * @param name the method's name.
+ * @param desc the method's descriptor.
+ * @param itf <tt>true</tt> if <tt>owner</tt> is an interface.
+ * @return a new or already existing method reference item.
+ */
+ Item newMethodItem(
+ final String owner,
+ final String name,
+ final String desc,
+ final boolean itf)
+ {
+ int type = itf ? IMETH : METH;
+ key3.set(type, owner, name, desc);
+ Item result = get(key3);
+ if (result == null) {
+ put122(type, newClass(owner), newNameType(name, desc));
+ result = new Item(index++, key3);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a method reference to the constant pool of the class being build.
+ * Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param owner the internal name of the method's owner class.
+ * @param name the method's name.
+ * @param desc the method's descriptor.
+ * @param itf <tt>true</tt> if <tt>owner</tt> is an interface.
+ * @return the index of a new or already existing method reference item.
+ */
+ public int newMethod(
+ final String owner,
+ final String name,
+ final String desc,
+ final boolean itf)
+ {
+ return newMethodItem(owner, name, desc, itf).index;
+ }
+
+ /**
+ * Adds an integer to the constant pool of the class being build. Does
+ * nothing if the constant pool already contains a similar item.
+ *
+ * @param value the int value.
+ * @return a new or already existing int item.
+ */
+ Item newInteger(final int value) {
+ key.set(value);
+ Item result = get(key);
+ if (result == null) {
+ pool.putByte(INT).putInt(value);
+ result = new Item(index++, key);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a float to the constant pool of the class being build. Does nothing
+ * if the constant pool already contains a similar item.
+ *
+ * @param value the float value.
+ * @return a new or already existing float item.
+ */
+ Item newFloat(final float value) {
+ key.set(value);
+ Item result = get(key);
+ if (result == null) {
+ pool.putByte(FLOAT).putInt(key.intVal);
+ result = new Item(index++, key);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a long to the constant pool of the class being build. Does nothing
+ * if the constant pool already contains a similar item.
+ *
+ * @param value the long value.
+ * @return a new or already existing long item.
+ */
+ Item newLong(final long value) {
+ key.set(value);
+ Item result = get(key);
+ if (result == null) {
+ pool.putByte(LONG).putLong(value);
+ result = new Item(index, key);
+ index += 2;
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a double to the constant pool of the class being build. Does nothing
+ * if the constant pool already contains a similar item.
+ *
+ * @param value the double value.
+ * @return a new or already existing double item.
+ */
+ Item newDouble(final double value) {
+ key.set(value);
+ Item result = get(key);
+ if (result == null) {
+ pool.putByte(DOUBLE).putLong(key.longVal);
+ result = new Item(index, key);
+ index += 2;
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a string to the constant pool of the class being build. Does nothing
+ * if the constant pool already contains a similar item.
+ *
+ * @param value the String value.
+ * @return a new or already existing string item.
+ */
+ private Item newString(final String value) {
+ key2.set(STR, value, null, null);
+ Item result = get(key2);
+ if (result == null) {
+ pool.put12(STR, newUTF8(value));
+ result = new Item(index++, key2);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a name and type to the constant pool of the class being build. Does
+ * nothing if the constant pool already contains a similar item. <i>This
+ * method is intended for {@link Attribute} sub classes, and is normally not
+ * needed by class generators or adapters.</i>
+ *
+ * @param name a name.
+ * @param desc a type descriptor.
+ * @return the index of a new or already existing name and type item.
+ */
+ public int newNameType(final String name, final String desc) {
+ return newNameTypeItem(name, desc).index;
+ }
+
+ /**
+ * Adds a name and type to the constant pool of the class being build. Does
+ * nothing if the constant pool already contains a similar item.
+ *
+ * @param name a name.
+ * @param desc a type descriptor.
+ * @return a new or already existing name and type item.
+ */
+ Item newNameTypeItem(final String name, final String desc) {
+ key2.set(NAME_TYPE, name, desc, null);
+ Item result = get(key2);
+ if (result == null) {
+ put122(NAME_TYPE, newUTF8(name), newUTF8(desc));
+ result = new Item(index++, key2);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds the given internal name to {@link #typeTable} and returns its index.
+ * Does nothing if the type table already contains this internal name.
+ *
+ * @param type the internal name to be added to the type table.
+ * @return the index of this internal name in the type table.
+ */
+ int addType(final String type) {
+ key.set(TYPE_NORMAL, type, null, null);
+ Item result = get(key);
+ if (result == null) {
+ result = addType(key);
+ }
+ return result.index;
+ }
+
+ /**
+ * Adds the given "uninitialized" type to {@link #typeTable} and returns its
+ * index. This method is used for UNINITIALIZED types, made of an internal
+ * name and a bytecode offset.
+ *
+ * @param type the internal name to be added to the type table.
+ * @param offset the bytecode offset of the NEW instruction that created
+ * this UNINITIALIZED type value.
+ * @return the index of this internal name in the type table.
+ */
+ int addUninitializedType(final String type, final int offset) {
+ key.type = TYPE_UNINIT;
+ key.intVal = offset;
+ key.strVal1 = type;
+ key.hashCode = 0x7FFFFFFF & (TYPE_UNINIT + type.hashCode() + offset);
+ Item result = get(key);
+ if (result == null) {
+ result = addType(key);
+ }
+ return result.index;
+ }
+
+ /**
+ * Adds the given Item to {@link #typeTable}.
+ *
+ * @param item the value to be added to the type table.
+ * @return the added Item, which a new Item instance with the same value as
+ * the given Item.
+ */
+ private Item addType(final Item item) {
+ ++typeCount;
+ Item result = new Item(typeCount, key);
+ put(result);
+ if (typeTable == null) {
+ typeTable = new Item[16];
+ }
+ if (typeCount == typeTable.length) {
+ Item[] newTable = new Item[2 * typeTable.length];
+ System.arraycopy(typeTable, 0, newTable, 0, typeTable.length);
+ typeTable = newTable;
+ }
+ typeTable[typeCount] = result;
+ return result;
+ }
+
+ /**
+ * Returns the index of the common super type of the two given types. This
+ * method calls {@link #getCommonSuperClass} and caches the result in the
+ * {@link #items} hash table to speedup future calls with the same
+ * parameters.
+ *
+ * @param type1 index of an internal name in {@link #typeTable}.
+ * @param type2 index of an internal name in {@link #typeTable}.
+ * @return the index of the common super type of the two given types.
+ */
+ int getMergedType(final int type1, final int type2) {
+ key2.type = TYPE_MERGED;
+ key2.longVal = type1 | (((long) type2) << 32);
+ key2.hashCode = 0x7FFFFFFF & (TYPE_MERGED + type1 + type2);
+ Item result = get(key2);
+ if (result == null) {
+ String t = typeTable[type1].strVal1;
+ String u = typeTable[type2].strVal1;
+ key2.intVal = addType(getCommonSuperClass(t, u));
+ result = new Item((short) 0, key2);
+ put(result);
+ }
+ return result.intVal;
+ }
+
+ /**
+ * Returns the common super type of the two given types. The default
+ * implementation of this method <i>loads<i> the two given classes and uses
+ * the java.lang.Class methods to find the common super class. It can be
+ * overridden to compute this common super type in other ways, in particular
+ * without actually loading any class, or to take into account the class
+ * that is currently being generated by this ClassWriter, which can of
+ * course not be loaded since it is under construction.
+ *
+ * @param type1 the internal name of a class.
+ * @param type2 the internal name of another class.
+ * @return the internal name of the common super class of the two given
+ * classes.
+ */
+ protected String getCommonSuperClass(final String type1, final String type2)
+ {
+ Class<?> c, d;
+ ClassLoader classLoader = getClass().getClassLoader();
+ try {
+ c = Class.forName(type1.replace('/', '.'), false, classLoader);
+ d = Class.forName(type2.replace('/', '.'), false, classLoader);
+ } catch (Exception e) {
+ throw new RuntimeException(e.toString());
+ }
+ if (c.isAssignableFrom(d)) {
+ return type1;
+ }
+ if (d.isAssignableFrom(c)) {
+ return type2;
+ }
+ if (c.isInterface() || d.isInterface()) {
+ return "java/lang/Object";
+ } else {
+ do {
+ c = c.getSuperclass();
+ } while (!c.isAssignableFrom(d));
+ return c.getName().replace('.', '/');
+ }
+ }
+
+ /**
+ * Returns the constant pool's hash table item which is equal to the given
+ * item.
+ *
+ * @param key a constant pool item.
+ * @return the constant pool's hash table item which is equal to the given
+ * item, or <tt>null</tt> if there is no such item.
+ */
+ private Item get(final Item key) {
+ Item i = items[key.hashCode % items.length];
+ while (i != null && (i.type != key.type || !key.isEqualTo(i))) {
+ i = i.next;
+ }
+ return i;
+ }
+
+ /**
+ * Puts the given item in the constant pool's hash table. The hash table
+ * <i>must</i> not already contains this item.
+ *
+ * @param i the item to be added to the constant pool's hash table.
+ */
+ private void put(final Item i) {
+ if (index + typeCount > threshold) {
+ int ll = items.length;
+ int nl = ll * 2 + 1;
+ Item[] newItems = new Item[nl];
+ for (int l = ll - 1; l >= 0; --l) {
+ Item j = items[l];
+ while (j != null) {
+ int index = j.hashCode % newItems.length;
+ Item k = j.next;
+ j.next = newItems[index];
+ newItems[index] = j;
+ j = k;
+ }
+ }
+ items = newItems;
+ threshold = (int) (nl * 0.75);
+ }
+ int index = i.hashCode % items.length;
+ i.next = items[index];
+ items[index] = i;
+ }
+
+ /**
+ * Puts one byte and two shorts into the constant pool.
+ *
+ * @param b a byte.
+ * @param s1 a short.
+ * @param s2 another short.
+ */
+ private void put122(final int b, final int s1, final int s2) {
+ pool.put12(b, s1).putShort(s2);
+ }
+
+ /**
+ * Puts two bytes and one short into the constant pool.
+ *
+ * @param b1 a byte.
+ * @param b2 another byte.
+ * @param s a short.
+ */
+ private void put112(final int b1, final int b2, final int s) {
+ pool.put11(b1, b2).putShort(s);
+ }
+}
diff --git a/src/asm/scala/tools/asm/CustomAttr.java b/src/asm/scala/tools/asm/CustomAttr.java
new file mode 100644
index 0000000000..5ecfd283d0
--- /dev/null
+++ b/src/asm/scala/tools/asm/CustomAttr.java
@@ -0,0 +1,20 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ */
+
+package scala.tools.asm;
+
+import scala.tools.asm.Attribute;
+
+/**
+ * A subclass of ASM's Attribute for the sole purpose of accessing a protected field there.
+ *
+ */
+public class CustomAttr extends Attribute {
+
+ public CustomAttr(final String type, final byte[] value) {
+ super(type);
+ super.value = value;
+ }
+
+}
diff --git a/src/asm/scala/tools/asm/Edge.java b/src/asm/scala/tools/asm/Edge.java
new file mode 100644
index 0000000000..daac1f7bb0
--- /dev/null
+++ b/src/asm/scala/tools/asm/Edge.java
@@ -0,0 +1,75 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * An edge in the control flow graph of a method body. See {@link Label Label}.
+ *
+ * @author Eric Bruneton
+ */
+class Edge {
+
+ /**
+ * Denotes a normal control flow graph edge.
+ */
+ static final int NORMAL = 0;
+
+ /**
+ * Denotes a control flow graph edge corresponding to an exception handler.
+ * More precisely any {@link Edge} whose {@link #info} is strictly positive
+ * corresponds to an exception handler. The actual value of {@link #info} is
+ * the index, in the {@link ClassWriter} type table, of the exception that
+ * is catched.
+ */
+ static final int EXCEPTION = 0x7FFFFFFF;
+
+ /**
+ * Information about this control flow graph edge. If
+ * {@link ClassWriter#COMPUTE_MAXS} is used this field is the (relative)
+ * stack size in the basic block from which this edge originates. This size
+ * is equal to the stack size at the "jump" instruction to which this edge
+ * corresponds, relatively to the stack size at the beginning of the
+ * originating basic block. If {@link ClassWriter#COMPUTE_FRAMES} is used,
+ * this field is the kind of this control flow graph edge (i.e. NORMAL or
+ * EXCEPTION).
+ */
+ int info;
+
+ /**
+ * The successor block of the basic block from which this edge originates.
+ */
+ Label successor;
+
+ /**
+ * The next edge in the list of successors of the originating basic block.
+ * See {@link Label#successors successors}.
+ */
+ Edge next;
+}
diff --git a/src/asm/scala/tools/asm/FieldVisitor.java b/src/asm/scala/tools/asm/FieldVisitor.java
new file mode 100644
index 0000000000..9ac0f6236f
--- /dev/null
+++ b/src/asm/scala/tools/asm/FieldVisitor.java
@@ -0,0 +1,115 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A visitor to visit a Java field. The methods of this class must be called
+ * in the following order: ( <tt>visitAnnotation</tt> |
+ * <tt>visitAttribute</tt> )* <tt>visitEnd</tt>.
+ *
+ * @author Eric Bruneton
+ */
+public abstract class FieldVisitor {
+
+ /**
+ * The ASM API version implemented by this visitor. The value of this field
+ * must be one of {@link Opcodes#ASM4}.
+ */
+ protected final int api;
+
+ /**
+ * The field visitor to which this visitor must delegate method calls. May
+ * be null.
+ */
+ protected FieldVisitor fv;
+
+ /**
+ * Constructs a new {@link FieldVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ public FieldVisitor(final int api) {
+ this(api, null);
+ }
+
+ /**
+ * Constructs a new {@link FieldVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param fv the field visitor to which this visitor must delegate method
+ * calls. May be null.
+ */
+ public FieldVisitor(final int api, final FieldVisitor fv) {
+ /*if (api != Opcodes.ASM4) {
+ throw new IllegalArgumentException();
+ }*/
+ this.api = api;
+ this.fv = fv;
+ }
+
+ /**
+ * Visits an annotation of the field.
+ *
+ * @param desc the class descriptor of the annotation class.
+ * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values, or <tt>null</tt> if
+ * this visitor is not interested in visiting this annotation.
+ */
+ public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
+ if (fv != null) {
+ return fv.visitAnnotation(desc, visible);
+ }
+ return null;
+ }
+
+ /**
+ * Visits a non standard attribute of the field.
+ *
+ * @param attr an attribute.
+ */
+ public void visitAttribute(Attribute attr) {
+ if (fv != null) {
+ fv.visitAttribute(attr);
+ }
+ }
+
+ /**
+ * Visits the end of the field. This method, which is the last one to be
+ * called, is used to inform the visitor that all the annotations and
+ * attributes of the field have been visited.
+ */
+ public void visitEnd() {
+ if (fv != null) {
+ fv.visitEnd();
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/FieldWriter.java b/src/asm/scala/tools/asm/FieldWriter.java
new file mode 100644
index 0000000000..45ef6d0df3
--- /dev/null
+++ b/src/asm/scala/tools/asm/FieldWriter.java
@@ -0,0 +1,271 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * An {@link FieldVisitor} that generates Java fields in bytecode form.
+ *
+ * @author Eric Bruneton
+ */
+final class FieldWriter extends FieldVisitor {
+
+ /**
+ * The class writer to which this field must be added.
+ */
+ private final ClassWriter cw;
+
+ /**
+ * Access flags of this field.
+ */
+ private final int access;
+
+ /**
+ * The index of the constant pool item that contains the name of this
+ * method.
+ */
+ private final int name;
+
+ /**
+ * The index of the constant pool item that contains the descriptor of this
+ * field.
+ */
+ private final int desc;
+
+ /**
+ * The index of the constant pool item that contains the signature of this
+ * field.
+ */
+ private int signature;
+
+ /**
+ * The index of the constant pool item that contains the constant value of
+ * this field.
+ */
+ private int value;
+
+ /**
+ * The runtime visible annotations of this field. May be <tt>null</tt>.
+ */
+ private AnnotationWriter anns;
+
+ /**
+ * The runtime invisible annotations of this field. May be <tt>null</tt>.
+ */
+ private AnnotationWriter ianns;
+
+ /**
+ * The non standard attributes of this field. May be <tt>null</tt>.
+ */
+ private Attribute attrs;
+
+ // ------------------------------------------------------------------------
+ // Constructor
+ // ------------------------------------------------------------------------
+
+ /**
+ * Constructs a new {@link FieldWriter}.
+ *
+ * @param cw the class writer to which this field must be added.
+ * @param access the field's access flags (see {@link Opcodes}).
+ * @param name the field's name.
+ * @param desc the field's descriptor (see {@link Type}).
+ * @param signature the field's signature. May be <tt>null</tt>.
+ * @param value the field's constant value. May be <tt>null</tt>.
+ */
+ FieldWriter(
+ final ClassWriter cw,
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ super(Opcodes.ASM4);
+ if (cw.firstField == null) {
+ cw.firstField = this;
+ } else {
+ cw.lastField.fv = this;
+ }
+ cw.lastField = this;
+ this.cw = cw;
+ this.access = access;
+ this.name = cw.newUTF8(name);
+ this.desc = cw.newUTF8(desc);
+ if (ClassReader.SIGNATURES && signature != null) {
+ this.signature = cw.newUTF8(signature);
+ }
+ if (value != null) {
+ this.value = cw.newConstItem(value).index;
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the FieldVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ ByteVector bv = new ByteVector();
+ // write type, and reserve space for values count
+ bv.putShort(cw.newUTF8(desc)).putShort(0);
+ AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, 2);
+ if (visible) {
+ aw.next = anns;
+ anns = aw;
+ } else {
+ aw.next = ianns;
+ ianns = aw;
+ }
+ return aw;
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ attr.next = attrs;
+ attrs = attr;
+ }
+
+ @Override
+ public void visitEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the size of this field.
+ *
+ * @return the size of this field.
+ */
+ int getSize() {
+ int size = 8;
+ if (value != 0) {
+ cw.newUTF8("ConstantValue");
+ size += 8;
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ cw.newUTF8("Synthetic");
+ size += 6;
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ cw.newUTF8("Deprecated");
+ size += 6;
+ }
+ if (ClassReader.SIGNATURES && signature != 0) {
+ cw.newUTF8("Signature");
+ size += 8;
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ cw.newUTF8("RuntimeVisibleAnnotations");
+ size += 8 + anns.getSize();
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ cw.newUTF8("RuntimeInvisibleAnnotations");
+ size += 8 + ianns.getSize();
+ }
+ if (attrs != null) {
+ size += attrs.getSize(cw, null, 0, -1, -1);
+ }
+ return size;
+ }
+
+ /**
+ * Puts the content of this field into the given byte vector.
+ *
+ * @param out where the content of this field must be put.
+ */
+ void put(final ByteVector out) {
+ int mask = Opcodes.ACC_DEPRECATED
+ | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
+ out.putShort(access & ~mask).putShort(name).putShort(desc);
+ int attributeCount = 0;
+ if (value != 0) {
+ ++attributeCount;
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ ++attributeCount;
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ ++attributeCount;
+ }
+ if (ClassReader.SIGNATURES && signature != 0) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ ++attributeCount;
+ }
+ if (attrs != null) {
+ attributeCount += attrs.getCount();
+ }
+ out.putShort(attributeCount);
+ if (value != 0) {
+ out.putShort(cw.newUTF8("ConstantValue"));
+ out.putInt(2).putShort(value);
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ out.putShort(cw.newUTF8("Deprecated")).putInt(0);
+ }
+ if (ClassReader.SIGNATURES && signature != 0) {
+ out.putShort(cw.newUTF8("Signature"));
+ out.putInt(2).putShort(signature);
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ out.putShort(cw.newUTF8("RuntimeVisibleAnnotations"));
+ anns.put(out);
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ out.putShort(cw.newUTF8("RuntimeInvisibleAnnotations"));
+ ianns.put(out);
+ }
+ if (attrs != null) {
+ attrs.put(cw, null, 0, -1, -1, out);
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/Frame.java b/src/asm/scala/tools/asm/Frame.java
new file mode 100644
index 0000000000..387b56796d
--- /dev/null
+++ b/src/asm/scala/tools/asm/Frame.java
@@ -0,0 +1,1435 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * Information about the input and output stack map frames of a basic block.
+ *
+ * @author Eric Bruneton
+ */
+final class Frame {
+
+ /*
+ * Frames are computed in a two steps process: during the visit of each
+ * instruction, the state of the frame at the end of current basic block is
+ * updated by simulating the action of the instruction on the previous state
+ * of this so called "output frame". In visitMaxs, a fix point algorithm is
+ * used to compute the "input frame" of each basic block, i.e. the stack map
+ * frame at the beginning of the basic block, starting from the input frame
+ * of the first basic block (which is computed from the method descriptor),
+ * and by using the previously computed output frames to compute the input
+ * state of the other blocks.
+ *
+ * All output and input frames are stored as arrays of integers. Reference
+ * and array types are represented by an index into a type table (which is
+ * not the same as the constant pool of the class, in order to avoid adding
+ * unnecessary constants in the pool - not all computed frames will end up
+ * being stored in the stack map table). This allows very fast type
+ * comparisons.
+ *
+ * Output stack map frames are computed relatively to the input frame of the
+ * basic block, which is not yet known when output frames are computed. It
+ * is therefore necessary to be able to represent abstract types such as
+ * "the type at position x in the input frame locals" or "the type at
+ * position x from the top of the input frame stack" or even "the type at
+ * position x in the input frame, with y more (or less) array dimensions".
+ * This explains the rather complicated type format used in output frames.
+ *
+ * This format is the following: DIM KIND VALUE (4, 4 and 24 bits). DIM is a
+ * signed number of array dimensions (from -8 to 7). KIND is either BASE,
+ * LOCAL or STACK. BASE is used for types that are not relative to the input
+ * frame. LOCAL is used for types that are relative to the input local
+ * variable types. STACK is used for types that are relative to the input
+ * stack types. VALUE depends on KIND. For LOCAL types, it is an index in
+ * the input local variable types. For STACK types, it is a position
+ * relatively to the top of input frame stack. For BASE types, it is either
+ * one of the constants defined in FrameVisitor, or for OBJECT and
+ * UNINITIALIZED types, a tag and an index in the type table.
+ *
+ * Output frames can contain types of any kind and with a positive or
+ * negative dimension (and even unassigned types, represented by 0 - which
+ * does not correspond to any valid type value). Input frames can only
+ * contain BASE types of positive or null dimension. In all cases the type
+ * table contains only internal type names (array type descriptors are
+ * forbidden - dimensions must be represented through the DIM field).
+ *
+ * The LONG and DOUBLE types are always represented by using two slots (LONG +
+ * TOP or DOUBLE + TOP), for local variable types as well as in the operand
+ * stack. This is necessary to be able to simulate DUPx_y instructions,
+ * whose effect would be dependent on the actual type values if types were
+ * always represented by a single slot in the stack (and this is not
+ * possible, since actual type values are not always known - cf LOCAL and
+ * STACK type kinds).
+ */
+
+ /**
+ * Mask to get the dimension of a frame type. This dimension is a signed
+ * integer between -8 and 7.
+ */
+ static final int DIM = 0xF0000000;
+
+ /**
+ * Constant to be added to a type to get a type with one more dimension.
+ */
+ static final int ARRAY_OF = 0x10000000;
+
+ /**
+ * Constant to be added to a type to get a type with one less dimension.
+ */
+ static final int ELEMENT_OF = 0xF0000000;
+
+ /**
+ * Mask to get the kind of a frame type.
+ *
+ * @see #BASE
+ * @see #LOCAL
+ * @see #STACK
+ */
+ static final int KIND = 0xF000000;
+
+ /**
+ * Flag used for LOCAL and STACK types. Indicates that if this type happens
+ * to be a long or double type (during the computations of input frames),
+ * then it must be set to TOP because the second word of this value has
+ * been reused to store other data in the basic block. Hence the first word
+ * no longer stores a valid long or double value.
+ */
+ static final int TOP_IF_LONG_OR_DOUBLE = 0x800000;
+
+ /**
+ * Mask to get the value of a frame type.
+ */
+ static final int VALUE = 0x7FFFFF;
+
+ /**
+ * Mask to get the kind of base types.
+ */
+ static final int BASE_KIND = 0xFF00000;
+
+ /**
+ * Mask to get the value of base types.
+ */
+ static final int BASE_VALUE = 0xFFFFF;
+
+ /**
+ * Kind of the types that are not relative to an input stack map frame.
+ */
+ static final int BASE = 0x1000000;
+
+ /**
+ * Base kind of the base reference types. The BASE_VALUE of such types is an
+ * index into the type table.
+ */
+ static final int OBJECT = BASE | 0x700000;
+
+ /**
+ * Base kind of the uninitialized base types. The BASE_VALUE of such types
+ * in an index into the type table (the Item at that index contains both an
+ * instruction offset and an internal class name).
+ */
+ static final int UNINITIALIZED = BASE | 0x800000;
+
+ /**
+ * Kind of the types that are relative to the local variable types of an
+ * input stack map frame. The value of such types is a local variable index.
+ */
+ private static final int LOCAL = 0x2000000;
+
+ /**
+ * Kind of the the types that are relative to the stack of an input stack
+ * map frame. The value of such types is a position relatively to the top of
+ * this stack.
+ */
+ private static final int STACK = 0x3000000;
+
+ /**
+ * The TOP type. This is a BASE type.
+ */
+ static final int TOP = BASE | 0;
+
+ /**
+ * The BOOLEAN type. This is a BASE type mainly used for array types.
+ */
+ static final int BOOLEAN = BASE | 9;
+
+ /**
+ * The BYTE type. This is a BASE type mainly used for array types.
+ */
+ static final int BYTE = BASE | 10;
+
+ /**
+ * The CHAR type. This is a BASE type mainly used for array types.
+ */
+ static final int CHAR = BASE | 11;
+
+ /**
+ * The SHORT type. This is a BASE type mainly used for array types.
+ */
+ static final int SHORT = BASE | 12;
+
+ /**
+ * The INTEGER type. This is a BASE type.
+ */
+ static final int INTEGER = BASE | 1;
+
+ /**
+ * The FLOAT type. This is a BASE type.
+ */
+ static final int FLOAT = BASE | 2;
+
+ /**
+ * The DOUBLE type. This is a BASE type.
+ */
+ static final int DOUBLE = BASE | 3;
+
+ /**
+ * The LONG type. This is a BASE type.
+ */
+ static final int LONG = BASE | 4;
+
+ /**
+ * The NULL type. This is a BASE type.
+ */
+ static final int NULL = BASE | 5;
+
+ /**
+ * The UNINITIALIZED_THIS type. This is a BASE type.
+ */
+ static final int UNINITIALIZED_THIS = BASE | 6;
+
+ /**
+ * The stack size variation corresponding to each JVM instruction. This
+ * stack variation is equal to the size of the values produced by an
+ * instruction, minus the size of the values consumed by this instruction.
+ */
+ static final int[] SIZE;
+
+ /**
+ * Computes the stack size variation corresponding to each JVM instruction.
+ */
+ static {
+ int i;
+ int[] b = new int[202];
+ String s = "EFFFFFFFFGGFFFGGFFFEEFGFGFEEEEEEEEEEEEEEEEEEEEDEDEDDDDD"
+ + "CDCDEEEEEEEEEEEEEEEEEEEEBABABBBBDCFFFGGGEDCDCDCDCDCDCDCDCD"
+ + "CDCEEEEDDDDDDDCDCDCEFEFDDEEFFDEDEEEBDDBBDDDDDDCCCCCCCCEFED"
+ + "DDCDCDEEEEEEEEEEFEEEEEEDDEEDDEE";
+ for (i = 0; i < b.length; ++i) {
+ b[i] = s.charAt(i) - 'E';
+ }
+ SIZE = b;
+
+ // code to generate the above string
+ //
+ // int NA = 0; // not applicable (unused opcode or variable size opcode)
+ //
+ // b = new int[] {
+ // 0, //NOP, // visitInsn
+ // 1, //ACONST_NULL, // -
+ // 1, //ICONST_M1, // -
+ // 1, //ICONST_0, // -
+ // 1, //ICONST_1, // -
+ // 1, //ICONST_2, // -
+ // 1, //ICONST_3, // -
+ // 1, //ICONST_4, // -
+ // 1, //ICONST_5, // -
+ // 2, //LCONST_0, // -
+ // 2, //LCONST_1, // -
+ // 1, //FCONST_0, // -
+ // 1, //FCONST_1, // -
+ // 1, //FCONST_2, // -
+ // 2, //DCONST_0, // -
+ // 2, //DCONST_1, // -
+ // 1, //BIPUSH, // visitIntInsn
+ // 1, //SIPUSH, // -
+ // 1, //LDC, // visitLdcInsn
+ // NA, //LDC_W, // -
+ // NA, //LDC2_W, // -
+ // 1, //ILOAD, // visitVarInsn
+ // 2, //LLOAD, // -
+ // 1, //FLOAD, // -
+ // 2, //DLOAD, // -
+ // 1, //ALOAD, // -
+ // NA, //ILOAD_0, // -
+ // NA, //ILOAD_1, // -
+ // NA, //ILOAD_2, // -
+ // NA, //ILOAD_3, // -
+ // NA, //LLOAD_0, // -
+ // NA, //LLOAD_1, // -
+ // NA, //LLOAD_2, // -
+ // NA, //LLOAD_3, // -
+ // NA, //FLOAD_0, // -
+ // NA, //FLOAD_1, // -
+ // NA, //FLOAD_2, // -
+ // NA, //FLOAD_3, // -
+ // NA, //DLOAD_0, // -
+ // NA, //DLOAD_1, // -
+ // NA, //DLOAD_2, // -
+ // NA, //DLOAD_3, // -
+ // NA, //ALOAD_0, // -
+ // NA, //ALOAD_1, // -
+ // NA, //ALOAD_2, // -
+ // NA, //ALOAD_3, // -
+ // -1, //IALOAD, // visitInsn
+ // 0, //LALOAD, // -
+ // -1, //FALOAD, // -
+ // 0, //DALOAD, // -
+ // -1, //AALOAD, // -
+ // -1, //BALOAD, // -
+ // -1, //CALOAD, // -
+ // -1, //SALOAD, // -
+ // -1, //ISTORE, // visitVarInsn
+ // -2, //LSTORE, // -
+ // -1, //FSTORE, // -
+ // -2, //DSTORE, // -
+ // -1, //ASTORE, // -
+ // NA, //ISTORE_0, // -
+ // NA, //ISTORE_1, // -
+ // NA, //ISTORE_2, // -
+ // NA, //ISTORE_3, // -
+ // NA, //LSTORE_0, // -
+ // NA, //LSTORE_1, // -
+ // NA, //LSTORE_2, // -
+ // NA, //LSTORE_3, // -
+ // NA, //FSTORE_0, // -
+ // NA, //FSTORE_1, // -
+ // NA, //FSTORE_2, // -
+ // NA, //FSTORE_3, // -
+ // NA, //DSTORE_0, // -
+ // NA, //DSTORE_1, // -
+ // NA, //DSTORE_2, // -
+ // NA, //DSTORE_3, // -
+ // NA, //ASTORE_0, // -
+ // NA, //ASTORE_1, // -
+ // NA, //ASTORE_2, // -
+ // NA, //ASTORE_3, // -
+ // -3, //IASTORE, // visitInsn
+ // -4, //LASTORE, // -
+ // -3, //FASTORE, // -
+ // -4, //DASTORE, // -
+ // -3, //AASTORE, // -
+ // -3, //BASTORE, // -
+ // -3, //CASTORE, // -
+ // -3, //SASTORE, // -
+ // -1, //POP, // -
+ // -2, //POP2, // -
+ // 1, //DUP, // -
+ // 1, //DUP_X1, // -
+ // 1, //DUP_X2, // -
+ // 2, //DUP2, // -
+ // 2, //DUP2_X1, // -
+ // 2, //DUP2_X2, // -
+ // 0, //SWAP, // -
+ // -1, //IADD, // -
+ // -2, //LADD, // -
+ // -1, //FADD, // -
+ // -2, //DADD, // -
+ // -1, //ISUB, // -
+ // -2, //LSUB, // -
+ // -1, //FSUB, // -
+ // -2, //DSUB, // -
+ // -1, //IMUL, // -
+ // -2, //LMUL, // -
+ // -1, //FMUL, // -
+ // -2, //DMUL, // -
+ // -1, //IDIV, // -
+ // -2, //LDIV, // -
+ // -1, //FDIV, // -
+ // -2, //DDIV, // -
+ // -1, //IREM, // -
+ // -2, //LREM, // -
+ // -1, //FREM, // -
+ // -2, //DREM, // -
+ // 0, //INEG, // -
+ // 0, //LNEG, // -
+ // 0, //FNEG, // -
+ // 0, //DNEG, // -
+ // -1, //ISHL, // -
+ // -1, //LSHL, // -
+ // -1, //ISHR, // -
+ // -1, //LSHR, // -
+ // -1, //IUSHR, // -
+ // -1, //LUSHR, // -
+ // -1, //IAND, // -
+ // -2, //LAND, // -
+ // -1, //IOR, // -
+ // -2, //LOR, // -
+ // -1, //IXOR, // -
+ // -2, //LXOR, // -
+ // 0, //IINC, // visitIincInsn
+ // 1, //I2L, // visitInsn
+ // 0, //I2F, // -
+ // 1, //I2D, // -
+ // -1, //L2I, // -
+ // -1, //L2F, // -
+ // 0, //L2D, // -
+ // 0, //F2I, // -
+ // 1, //F2L, // -
+ // 1, //F2D, // -
+ // -1, //D2I, // -
+ // 0, //D2L, // -
+ // -1, //D2F, // -
+ // 0, //I2B, // -
+ // 0, //I2C, // -
+ // 0, //I2S, // -
+ // -3, //LCMP, // -
+ // -1, //FCMPL, // -
+ // -1, //FCMPG, // -
+ // -3, //DCMPL, // -
+ // -3, //DCMPG, // -
+ // -1, //IFEQ, // visitJumpInsn
+ // -1, //IFNE, // -
+ // -1, //IFLT, // -
+ // -1, //IFGE, // -
+ // -1, //IFGT, // -
+ // -1, //IFLE, // -
+ // -2, //IF_ICMPEQ, // -
+ // -2, //IF_ICMPNE, // -
+ // -2, //IF_ICMPLT, // -
+ // -2, //IF_ICMPGE, // -
+ // -2, //IF_ICMPGT, // -
+ // -2, //IF_ICMPLE, // -
+ // -2, //IF_ACMPEQ, // -
+ // -2, //IF_ACMPNE, // -
+ // 0, //GOTO, // -
+ // 1, //JSR, // -
+ // 0, //RET, // visitVarInsn
+ // -1, //TABLESWITCH, // visiTableSwitchInsn
+ // -1, //LOOKUPSWITCH, // visitLookupSwitch
+ // -1, //IRETURN, // visitInsn
+ // -2, //LRETURN, // -
+ // -1, //FRETURN, // -
+ // -2, //DRETURN, // -
+ // -1, //ARETURN, // -
+ // 0, //RETURN, // -
+ // NA, //GETSTATIC, // visitFieldInsn
+ // NA, //PUTSTATIC, // -
+ // NA, //GETFIELD, // -
+ // NA, //PUTFIELD, // -
+ // NA, //INVOKEVIRTUAL, // visitMethodInsn
+ // NA, //INVOKESPECIAL, // -
+ // NA, //INVOKESTATIC, // -
+ // NA, //INVOKEINTERFACE, // -
+ // NA, //INVOKEDYNAMIC, // visitInvokeDynamicInsn
+ // 1, //NEW, // visitTypeInsn
+ // 0, //NEWARRAY, // visitIntInsn
+ // 0, //ANEWARRAY, // visitTypeInsn
+ // 0, //ARRAYLENGTH, // visitInsn
+ // NA, //ATHROW, // -
+ // 0, //CHECKCAST, // visitTypeInsn
+ // 0, //INSTANCEOF, // -
+ // -1, //MONITORENTER, // visitInsn
+ // -1, //MONITOREXIT, // -
+ // NA, //WIDE, // NOT VISITED
+ // NA, //MULTIANEWARRAY, // visitMultiANewArrayInsn
+ // -1, //IFNULL, // visitJumpInsn
+ // -1, //IFNONNULL, // -
+ // NA, //GOTO_W, // -
+ // NA, //JSR_W, // -
+ // };
+ // for (i = 0; i < b.length; ++i) {
+ // System.err.print((char)('E' + b[i]));
+ // }
+ // System.err.println();
+ }
+
+ /**
+ * The label (i.e. basic block) to which these input and output stack map
+ * frames correspond.
+ */
+ Label owner;
+
+ /**
+ * The input stack map frame locals.
+ */
+ int[] inputLocals;
+
+ /**
+ * The input stack map frame stack.
+ */
+ int[] inputStack;
+
+ /**
+ * The output stack map frame locals.
+ */
+ private int[] outputLocals;
+
+ /**
+ * The output stack map frame stack.
+ */
+ private int[] outputStack;
+
+ /**
+ * Relative size of the output stack. The exact semantics of this field
+ * depends on the algorithm that is used.
+ *
+ * When only the maximum stack size is computed, this field is the size of
+ * the output stack relatively to the top of the input stack.
+ *
+ * When the stack map frames are completely computed, this field is the
+ * actual number of types in {@link #outputStack}.
+ */
+ private int outputStackTop;
+
+ /**
+ * Number of types that are initialized in the basic block.
+ *
+ * @see #initializations
+ */
+ private int initializationCount;
+
+ /**
+ * The types that are initialized in the basic block. A constructor
+ * invocation on an UNINITIALIZED or UNINITIALIZED_THIS type must replace
+ * <i>every occurence</i> of this type in the local variables and in the
+ * operand stack. This cannot be done during the first phase of the
+ * algorithm since, during this phase, the local variables and the operand
+ * stack are not completely computed. It is therefore necessary to store the
+ * types on which constructors are invoked in the basic block, in order to
+ * do this replacement during the second phase of the algorithm, where the
+ * frames are fully computed. Note that this array can contain types that
+ * are relative to input locals or to the input stack (see below for the
+ * description of the algorithm).
+ */
+ private int[] initializations;
+
+ /**
+ * Returns the output frame local variable type at the given index.
+ *
+ * @param local the index of the local that must be returned.
+ * @return the output frame local variable type at the given index.
+ */
+ private int get(final int local) {
+ if (outputLocals == null || local >= outputLocals.length) {
+ // this local has never been assigned in this basic block,
+ // so it is still equal to its value in the input frame
+ return LOCAL | local;
+ } else {
+ int type = outputLocals[local];
+ if (type == 0) {
+ // this local has never been assigned in this basic block,
+ // so it is still equal to its value in the input frame
+ type = outputLocals[local] = LOCAL | local;
+ }
+ return type;
+ }
+ }
+
+ /**
+ * Sets the output frame local variable type at the given index.
+ *
+ * @param local the index of the local that must be set.
+ * @param type the value of the local that must be set.
+ */
+ private void set(final int local, final int type) {
+ // creates and/or resizes the output local variables array if necessary
+ if (outputLocals == null) {
+ outputLocals = new int[10];
+ }
+ int n = outputLocals.length;
+ if (local >= n) {
+ int[] t = new int[Math.max(local + 1, 2 * n)];
+ System.arraycopy(outputLocals, 0, t, 0, n);
+ outputLocals = t;
+ }
+ // sets the local variable
+ outputLocals[local] = type;
+ }
+
+ /**
+ * Pushes a new type onto the output frame stack.
+ *
+ * @param type the type that must be pushed.
+ */
+ private void push(final int type) {
+ // creates and/or resizes the output stack array if necessary
+ if (outputStack == null) {
+ outputStack = new int[10];
+ }
+ int n = outputStack.length;
+ if (outputStackTop >= n) {
+ int[] t = new int[Math.max(outputStackTop + 1, 2 * n)];
+ System.arraycopy(outputStack, 0, t, 0, n);
+ outputStack = t;
+ }
+ // pushes the type on the output stack
+ outputStack[outputStackTop++] = type;
+ // updates the maximun height reached by the output stack, if needed
+ int top = owner.inputStackTop + outputStackTop;
+ if (top > owner.outputStackMax) {
+ owner.outputStackMax = top;
+ }
+ }
+
+ /**
+ * Pushes a new type onto the output frame stack.
+ *
+ * @param cw the ClassWriter to which this label belongs.
+ * @param desc the descriptor of the type to be pushed. Can also be a method
+ * descriptor (in this case this method pushes its return type onto
+ * the output frame stack).
+ */
+ private void push(final ClassWriter cw, final String desc) {
+ int type = type(cw, desc);
+ if (type != 0) {
+ push(type);
+ if (type == LONG || type == DOUBLE) {
+ push(TOP);
+ }
+ }
+ }
+
+ /**
+ * Returns the int encoding of the given type.
+ *
+ * @param cw the ClassWriter to which this label belongs.
+ * @param desc a type descriptor.
+ * @return the int encoding of the given type.
+ */
+ private static int type(final ClassWriter cw, final String desc) {
+ String t;
+ int index = desc.charAt(0) == '(' ? desc.indexOf(')') + 1 : 0;
+ switch (desc.charAt(index)) {
+ case 'V':
+ return 0;
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ return INTEGER;
+ case 'F':
+ return FLOAT;
+ case 'J':
+ return LONG;
+ case 'D':
+ return DOUBLE;
+ case 'L':
+ // stores the internal name, not the descriptor!
+ t = desc.substring(index + 1, desc.length() - 1);
+ return OBJECT | cw.addType(t);
+ // case '[':
+ default:
+ // extracts the dimensions and the element type
+ int data;
+ int dims = index + 1;
+ while (desc.charAt(dims) == '[') {
+ ++dims;
+ }
+ switch (desc.charAt(dims)) {
+ case 'Z':
+ data = BOOLEAN;
+ break;
+ case 'C':
+ data = CHAR;
+ break;
+ case 'B':
+ data = BYTE;
+ break;
+ case 'S':
+ data = SHORT;
+ break;
+ case 'I':
+ data = INTEGER;
+ break;
+ case 'F':
+ data = FLOAT;
+ break;
+ case 'J':
+ data = LONG;
+ break;
+ case 'D':
+ data = DOUBLE;
+ break;
+ // case 'L':
+ default:
+ // stores the internal name, not the descriptor
+ t = desc.substring(dims + 1, desc.length() - 1);
+ data = OBJECT | cw.addType(t);
+ }
+ return (dims - index) << 28 | data;
+ }
+ }
+
+ /**
+ * Pops a type from the output frame stack and returns its value.
+ *
+ * @return the type that has been popped from the output frame stack.
+ */
+ private int pop() {
+ if (outputStackTop > 0) {
+ return outputStack[--outputStackTop];
+ } else {
+ // if the output frame stack is empty, pops from the input stack
+ return STACK | -(--owner.inputStackTop);
+ }
+ }
+
+ /**
+ * Pops the given number of types from the output frame stack.
+ *
+ * @param elements the number of types that must be popped.
+ */
+ private void pop(final int elements) {
+ if (outputStackTop >= elements) {
+ outputStackTop -= elements;
+ } else {
+ // if the number of elements to be popped is greater than the number
+ // of elements in the output stack, clear it, and pops the remaining
+ // elements from the input stack.
+ owner.inputStackTop -= elements - outputStackTop;
+ outputStackTop = 0;
+ }
+ }
+
+ /**
+ * Pops a type from the output frame stack.
+ *
+ * @param desc the descriptor of the type to be popped. Can also be a method
+ * descriptor (in this case this method pops the types corresponding
+ * to the method arguments).
+ */
+ private void pop(final String desc) {
+ char c = desc.charAt(0);
+ if (c == '(') {
+ pop((Type.getArgumentsAndReturnSizes(desc) >> 2) - 1);
+ } else if (c == 'J' || c == 'D') {
+ pop(2);
+ } else {
+ pop(1);
+ }
+ }
+
+ /**
+ * Adds a new type to the list of types on which a constructor is invoked in
+ * the basic block.
+ *
+ * @param var a type on a which a constructor is invoked.
+ */
+ private void init(final int var) {
+ // creates and/or resizes the initializations array if necessary
+ if (initializations == null) {
+ initializations = new int[2];
+ }
+ int n = initializations.length;
+ if (initializationCount >= n) {
+ int[] t = new int[Math.max(initializationCount + 1, 2 * n)];
+ System.arraycopy(initializations, 0, t, 0, n);
+ initializations = t;
+ }
+ // stores the type to be initialized
+ initializations[initializationCount++] = var;
+ }
+
+ /**
+ * Replaces the given type with the appropriate type if it is one of the
+ * types on which a constructor is invoked in the basic block.
+ *
+ * @param cw the ClassWriter to which this label belongs.
+ * @param t a type
+ * @return t or, if t is one of the types on which a constructor is invoked
+ * in the basic block, the type corresponding to this constructor.
+ */
+ private int init(final ClassWriter cw, final int t) {
+ int s;
+ if (t == UNINITIALIZED_THIS) {
+ s = OBJECT | cw.addType(cw.thisName);
+ } else if ((t & (DIM | BASE_KIND)) == UNINITIALIZED) {
+ String type = cw.typeTable[t & BASE_VALUE].strVal1;
+ s = OBJECT | cw.addType(type);
+ } else {
+ return t;
+ }
+ for (int j = 0; j < initializationCount; ++j) {
+ int u = initializations[j];
+ int dim = u & DIM;
+ int kind = u & KIND;
+ if (kind == LOCAL) {
+ u = dim + inputLocals[u & VALUE];
+ } else if (kind == STACK) {
+ u = dim + inputStack[inputStack.length - (u & VALUE)];
+ }
+ if (t == u) {
+ return s;
+ }
+ }
+ return t;
+ }
+
+ /**
+ * Initializes the input frame of the first basic block from the method
+ * descriptor.
+ *
+ * @param cw the ClassWriter to which this label belongs.
+ * @param access the access flags of the method to which this label belongs.
+ * @param args the formal parameter types of this method.
+ * @param maxLocals the maximum number of local variables of this method.
+ */
+ void initInputFrame(
+ final ClassWriter cw,
+ final int access,
+ final Type[] args,
+ final int maxLocals)
+ {
+ inputLocals = new int[maxLocals];
+ inputStack = new int[0];
+ int i = 0;
+ if ((access & Opcodes.ACC_STATIC) == 0) {
+ if ((access & MethodWriter.ACC_CONSTRUCTOR) == 0) {
+ inputLocals[i++] = OBJECT | cw.addType(cw.thisName);
+ } else {
+ inputLocals[i++] = UNINITIALIZED_THIS;
+ }
+ }
+ for (int j = 0; j < args.length; ++j) {
+ int t = type(cw, args[j].getDescriptor());
+ inputLocals[i++] = t;
+ if (t == LONG || t == DOUBLE) {
+ inputLocals[i++] = TOP;
+ }
+ }
+ while (i < maxLocals) {
+ inputLocals[i++] = TOP;
+ }
+ }
+
+ /**
+ * Simulates the action of the given instruction on the output stack frame.
+ *
+ * @param opcode the opcode of the instruction.
+ * @param arg the operand of the instruction, if any.
+ * @param cw the class writer to which this label belongs.
+ * @param item the operand of the instructions, if any.
+ */
+ void execute(
+ final int opcode,
+ final int arg,
+ final ClassWriter cw,
+ final Item item)
+ {
+ int t1, t2, t3, t4;
+ switch (opcode) {
+ case Opcodes.NOP:
+ case Opcodes.INEG:
+ case Opcodes.LNEG:
+ case Opcodes.FNEG:
+ case Opcodes.DNEG:
+ case Opcodes.I2B:
+ case Opcodes.I2C:
+ case Opcodes.I2S:
+ case Opcodes.GOTO:
+ case Opcodes.RETURN:
+ break;
+ case Opcodes.ACONST_NULL:
+ push(NULL);
+ break;
+ case Opcodes.ICONST_M1:
+ case Opcodes.ICONST_0:
+ case Opcodes.ICONST_1:
+ case Opcodes.ICONST_2:
+ case Opcodes.ICONST_3:
+ case Opcodes.ICONST_4:
+ case Opcodes.ICONST_5:
+ case Opcodes.BIPUSH:
+ case Opcodes.SIPUSH:
+ case Opcodes.ILOAD:
+ push(INTEGER);
+ break;
+ case Opcodes.LCONST_0:
+ case Opcodes.LCONST_1:
+ case Opcodes.LLOAD:
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.FCONST_0:
+ case Opcodes.FCONST_1:
+ case Opcodes.FCONST_2:
+ case Opcodes.FLOAD:
+ push(FLOAT);
+ break;
+ case Opcodes.DCONST_0:
+ case Opcodes.DCONST_1:
+ case Opcodes.DLOAD:
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.LDC:
+ switch (item.type) {
+ case ClassWriter.INT:
+ push(INTEGER);
+ break;
+ case ClassWriter.LONG:
+ push(LONG);
+ push(TOP);
+ break;
+ case ClassWriter.FLOAT:
+ push(FLOAT);
+ break;
+ case ClassWriter.DOUBLE:
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case ClassWriter.CLASS:
+ push(OBJECT | cw.addType("java/lang/Class"));
+ break;
+ case ClassWriter.STR:
+ push(OBJECT | cw.addType("java/lang/String"));
+ break;
+ case ClassWriter.MTYPE:
+ push(OBJECT | cw.addType("java/lang/invoke/MethodType"));
+ break;
+ // case ClassWriter.HANDLE_BASE + [1..9]:
+ default:
+ push(OBJECT | cw.addType("java/lang/invoke/MethodHandle"));
+ }
+ break;
+ case Opcodes.ALOAD:
+ push(get(arg));
+ break;
+ case Opcodes.IALOAD:
+ case Opcodes.BALOAD:
+ case Opcodes.CALOAD:
+ case Opcodes.SALOAD:
+ pop(2);
+ push(INTEGER);
+ break;
+ case Opcodes.LALOAD:
+ case Opcodes.D2L:
+ pop(2);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.FALOAD:
+ pop(2);
+ push(FLOAT);
+ break;
+ case Opcodes.DALOAD:
+ case Opcodes.L2D:
+ pop(2);
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.AALOAD:
+ pop(1);
+ t1 = pop();
+ push(ELEMENT_OF + t1);
+ break;
+ case Opcodes.ISTORE:
+ case Opcodes.FSTORE:
+ case Opcodes.ASTORE:
+ t1 = pop();
+ set(arg, t1);
+ if (arg > 0) {
+ t2 = get(arg - 1);
+ // if t2 is of kind STACK or LOCAL we cannot know its size!
+ if (t2 == LONG || t2 == DOUBLE) {
+ set(arg - 1, TOP);
+ } else if ((t2 & KIND) != BASE) {
+ set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
+ }
+ }
+ break;
+ case Opcodes.LSTORE:
+ case Opcodes.DSTORE:
+ pop(1);
+ t1 = pop();
+ set(arg, t1);
+ set(arg + 1, TOP);
+ if (arg > 0) {
+ t2 = get(arg - 1);
+ // if t2 is of kind STACK or LOCAL we cannot know its size!
+ if (t2 == LONG || t2 == DOUBLE) {
+ set(arg - 1, TOP);
+ } else if ((t2 & KIND) != BASE) {
+ set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
+ }
+ }
+ break;
+ case Opcodes.IASTORE:
+ case Opcodes.BASTORE:
+ case Opcodes.CASTORE:
+ case Opcodes.SASTORE:
+ case Opcodes.FASTORE:
+ case Opcodes.AASTORE:
+ pop(3);
+ break;
+ case Opcodes.LASTORE:
+ case Opcodes.DASTORE:
+ pop(4);
+ break;
+ case Opcodes.POP:
+ case Opcodes.IFEQ:
+ case Opcodes.IFNE:
+ case Opcodes.IFLT:
+ case Opcodes.IFGE:
+ case Opcodes.IFGT:
+ case Opcodes.IFLE:
+ case Opcodes.IRETURN:
+ case Opcodes.FRETURN:
+ case Opcodes.ARETURN:
+ case Opcodes.TABLESWITCH:
+ case Opcodes.LOOKUPSWITCH:
+ case Opcodes.ATHROW:
+ case Opcodes.MONITORENTER:
+ case Opcodes.MONITOREXIT:
+ case Opcodes.IFNULL:
+ case Opcodes.IFNONNULL:
+ pop(1);
+ break;
+ case Opcodes.POP2:
+ case Opcodes.IF_ICMPEQ:
+ case Opcodes.IF_ICMPNE:
+ case Opcodes.IF_ICMPLT:
+ case Opcodes.IF_ICMPGE:
+ case Opcodes.IF_ICMPGT:
+ case Opcodes.IF_ICMPLE:
+ case Opcodes.IF_ACMPEQ:
+ case Opcodes.IF_ACMPNE:
+ case Opcodes.LRETURN:
+ case Opcodes.DRETURN:
+ pop(2);
+ break;
+ case Opcodes.DUP:
+ t1 = pop();
+ push(t1);
+ push(t1);
+ break;
+ case Opcodes.DUP_X1:
+ t1 = pop();
+ t2 = pop();
+ push(t1);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP_X2:
+ t1 = pop();
+ t2 = pop();
+ t3 = pop();
+ push(t1);
+ push(t3);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP2:
+ t1 = pop();
+ t2 = pop();
+ push(t2);
+ push(t1);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP2_X1:
+ t1 = pop();
+ t2 = pop();
+ t3 = pop();
+ push(t2);
+ push(t1);
+ push(t3);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP2_X2:
+ t1 = pop();
+ t2 = pop();
+ t3 = pop();
+ t4 = pop();
+ push(t2);
+ push(t1);
+ push(t4);
+ push(t3);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.SWAP:
+ t1 = pop();
+ t2 = pop();
+ push(t1);
+ push(t2);
+ break;
+ case Opcodes.IADD:
+ case Opcodes.ISUB:
+ case Opcodes.IMUL:
+ case Opcodes.IDIV:
+ case Opcodes.IREM:
+ case Opcodes.IAND:
+ case Opcodes.IOR:
+ case Opcodes.IXOR:
+ case Opcodes.ISHL:
+ case Opcodes.ISHR:
+ case Opcodes.IUSHR:
+ case Opcodes.L2I:
+ case Opcodes.D2I:
+ case Opcodes.FCMPL:
+ case Opcodes.FCMPG:
+ pop(2);
+ push(INTEGER);
+ break;
+ case Opcodes.LADD:
+ case Opcodes.LSUB:
+ case Opcodes.LMUL:
+ case Opcodes.LDIV:
+ case Opcodes.LREM:
+ case Opcodes.LAND:
+ case Opcodes.LOR:
+ case Opcodes.LXOR:
+ pop(4);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.FADD:
+ case Opcodes.FSUB:
+ case Opcodes.FMUL:
+ case Opcodes.FDIV:
+ case Opcodes.FREM:
+ case Opcodes.L2F:
+ case Opcodes.D2F:
+ pop(2);
+ push(FLOAT);
+ break;
+ case Opcodes.DADD:
+ case Opcodes.DSUB:
+ case Opcodes.DMUL:
+ case Opcodes.DDIV:
+ case Opcodes.DREM:
+ pop(4);
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.LSHL:
+ case Opcodes.LSHR:
+ case Opcodes.LUSHR:
+ pop(3);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.IINC:
+ set(arg, INTEGER);
+ break;
+ case Opcodes.I2L:
+ case Opcodes.F2L:
+ pop(1);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.I2F:
+ pop(1);
+ push(FLOAT);
+ break;
+ case Opcodes.I2D:
+ case Opcodes.F2D:
+ pop(1);
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.F2I:
+ case Opcodes.ARRAYLENGTH:
+ case Opcodes.INSTANCEOF:
+ pop(1);
+ push(INTEGER);
+ break;
+ case Opcodes.LCMP:
+ case Opcodes.DCMPL:
+ case Opcodes.DCMPG:
+ pop(4);
+ push(INTEGER);
+ break;
+ case Opcodes.JSR:
+ case Opcodes.RET:
+ throw new RuntimeException("JSR/RET are not supported with computeFrames option");
+ case Opcodes.GETSTATIC:
+ push(cw, item.strVal3);
+ break;
+ case Opcodes.PUTSTATIC:
+ pop(item.strVal3);
+ break;
+ case Opcodes.GETFIELD:
+ pop(1);
+ push(cw, item.strVal3);
+ break;
+ case Opcodes.PUTFIELD:
+ pop(item.strVal3);
+ pop();
+ break;
+ case Opcodes.INVOKEVIRTUAL:
+ case Opcodes.INVOKESPECIAL:
+ case Opcodes.INVOKESTATIC:
+ case Opcodes.INVOKEINTERFACE:
+ pop(item.strVal3);
+ if (opcode != Opcodes.INVOKESTATIC) {
+ t1 = pop();
+ if (opcode == Opcodes.INVOKESPECIAL
+ && item.strVal2.charAt(0) == '<')
+ {
+ init(t1);
+ }
+ }
+ push(cw, item.strVal3);
+ break;
+ case Opcodes.INVOKEDYNAMIC:
+ pop(item.strVal2);
+ push(cw, item.strVal2);
+ break;
+ case Opcodes.NEW:
+ push(UNINITIALIZED | cw.addUninitializedType(item.strVal1, arg));
+ break;
+ case Opcodes.NEWARRAY:
+ pop();
+ switch (arg) {
+ case Opcodes.T_BOOLEAN:
+ push(ARRAY_OF | BOOLEAN);
+ break;
+ case Opcodes.T_CHAR:
+ push(ARRAY_OF | CHAR);
+ break;
+ case Opcodes.T_BYTE:
+ push(ARRAY_OF | BYTE);
+ break;
+ case Opcodes.T_SHORT:
+ push(ARRAY_OF | SHORT);
+ break;
+ case Opcodes.T_INT:
+ push(ARRAY_OF | INTEGER);
+ break;
+ case Opcodes.T_FLOAT:
+ push(ARRAY_OF | FLOAT);
+ break;
+ case Opcodes.T_DOUBLE:
+ push(ARRAY_OF | DOUBLE);
+ break;
+ // case Opcodes.T_LONG:
+ default:
+ push(ARRAY_OF | LONG);
+ break;
+ }
+ break;
+ case Opcodes.ANEWARRAY:
+ String s = item.strVal1;
+ pop();
+ if (s.charAt(0) == '[') {
+ push(cw, '[' + s);
+ } else {
+ push(ARRAY_OF | OBJECT | cw.addType(s));
+ }
+ break;
+ case Opcodes.CHECKCAST:
+ s = item.strVal1;
+ pop();
+ if (s.charAt(0) == '[') {
+ push(cw, s);
+ } else {
+ push(OBJECT | cw.addType(s));
+ }
+ break;
+ // case Opcodes.MULTIANEWARRAY:
+ default:
+ pop(arg);
+ push(cw, item.strVal1);
+ break;
+ }
+ }
+
+ /**
+ * Merges the input frame of the given basic block with the input and output
+ * frames of this basic block. Returns <tt>true</tt> if the input frame of
+ * the given label has been changed by this operation.
+ *
+ * @param cw the ClassWriter to which this label belongs.
+ * @param frame the basic block whose input frame must be updated.
+ * @param edge the kind of the {@link Edge} between this label and 'label'.
+ * See {@link Edge#info}.
+ * @return <tt>true</tt> if the input frame of the given label has been
+ * changed by this operation.
+ */
+ boolean merge(final ClassWriter cw, final Frame frame, final int edge) {
+ boolean changed = false;
+ int i, s, dim, kind, t;
+
+ int nLocal = inputLocals.length;
+ int nStack = inputStack.length;
+ if (frame.inputLocals == null) {
+ frame.inputLocals = new int[nLocal];
+ changed = true;
+ }
+
+ for (i = 0; i < nLocal; ++i) {
+ if (outputLocals != null && i < outputLocals.length) {
+ s = outputLocals[i];
+ if (s == 0) {
+ t = inputLocals[i];
+ } else {
+ dim = s & DIM;
+ kind = s & KIND;
+ if (kind == BASE) {
+ t = s;
+ } else {
+ if (kind == LOCAL) {
+ t = dim + inputLocals[s & VALUE];
+ } else {
+ t = dim + inputStack[nStack - (s & VALUE)];
+ }
+ if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 && (t == LONG || t == DOUBLE)) {
+ t = TOP;
+ }
+ }
+ }
+ } else {
+ t = inputLocals[i];
+ }
+ if (initializations != null) {
+ t = init(cw, t);
+ }
+ changed |= merge(cw, t, frame.inputLocals, i);
+ }
+
+ if (edge > 0) {
+ for (i = 0; i < nLocal; ++i) {
+ t = inputLocals[i];
+ changed |= merge(cw, t, frame.inputLocals, i);
+ }
+ if (frame.inputStack == null) {
+ frame.inputStack = new int[1];
+ changed = true;
+ }
+ changed |= merge(cw, edge, frame.inputStack, 0);
+ return changed;
+ }
+
+ int nInputStack = inputStack.length + owner.inputStackTop;
+ if (frame.inputStack == null) {
+ frame.inputStack = new int[nInputStack + outputStackTop];
+ changed = true;
+ }
+
+ for (i = 0; i < nInputStack; ++i) {
+ t = inputStack[i];
+ if (initializations != null) {
+ t = init(cw, t);
+ }
+ changed |= merge(cw, t, frame.inputStack, i);
+ }
+ for (i = 0; i < outputStackTop; ++i) {
+ s = outputStack[i];
+ dim = s & DIM;
+ kind = s & KIND;
+ if (kind == BASE) {
+ t = s;
+ } else {
+ if (kind == LOCAL) {
+ t = dim + inputLocals[s & VALUE];
+ } else {
+ t = dim + inputStack[nStack - (s & VALUE)];
+ }
+ if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 && (t == LONG || t == DOUBLE)) {
+ t = TOP;
+ }
+ }
+ if (initializations != null) {
+ t = init(cw, t);
+ }
+ changed |= merge(cw, t, frame.inputStack, nInputStack + i);
+ }
+ return changed;
+ }
+
+ /**
+ * Merges the type at the given index in the given type array with the given
+ * type. Returns <tt>true</tt> if the type array has been modified by this
+ * operation.
+ *
+ * @param cw the ClassWriter to which this label belongs.
+ * @param t the type with which the type array element must be merged.
+ * @param types an array of types.
+ * @param index the index of the type that must be merged in 'types'.
+ * @return <tt>true</tt> if the type array has been modified by this
+ * operation.
+ */
+ private static boolean merge(
+ final ClassWriter cw,
+ int t,
+ final int[] types,
+ final int index)
+ {
+ int u = types[index];
+ if (u == t) {
+ // if the types are equal, merge(u,t)=u, so there is no change
+ return false;
+ }
+ if ((t & ~DIM) == NULL) {
+ if (u == NULL) {
+ return false;
+ }
+ t = NULL;
+ }
+ if (u == 0) {
+ // if types[index] has never been assigned, merge(u,t)=t
+ types[index] = t;
+ return true;
+ }
+ int v;
+ if ((u & BASE_KIND) == OBJECT || (u & DIM) != 0) {
+ // if u is a reference type of any dimension
+ if (t == NULL) {
+ // if t is the NULL type, merge(u,t)=u, so there is no change
+ return false;
+ } else if ((t & (DIM | BASE_KIND)) == (u & (DIM | BASE_KIND))) {
+ if ((u & BASE_KIND) == OBJECT) {
+ // if t is also a reference type, and if u and t have the
+ // same dimension merge(u,t) = dim(t) | common parent of the
+ // element types of u and t
+ v = (t & DIM) | OBJECT
+ | cw.getMergedType(t & BASE_VALUE, u & BASE_VALUE);
+ } else {
+ // if u and t are array types, but not with the same element
+ // type, merge(u,t)=java/lang/Object
+ v = OBJECT | cw.addType("java/lang/Object");
+ }
+ } else if ((t & BASE_KIND) == OBJECT || (t & DIM) != 0) {
+ // if t is any other reference or array type,
+ // merge(u,t)=java/lang/Object
+ v = OBJECT | cw.addType("java/lang/Object");
+ } else {
+ // if t is any other type, merge(u,t)=TOP
+ v = TOP;
+ }
+ } else if (u == NULL) {
+ // if u is the NULL type, merge(u,t)=t,
+ // or TOP if t is not a reference type
+ v = (t & BASE_KIND) == OBJECT || (t & DIM) != 0 ? t : TOP;
+ } else {
+ // if u is any other type, merge(u,t)=TOP whatever t
+ v = TOP;
+ }
+ if (u != v) {
+ types[index] = v;
+ return true;
+ }
+ return false;
+ }
+}
diff --git a/src/asm/scala/tools/asm/Handle.java b/src/asm/scala/tools/asm/Handle.java
new file mode 100644
index 0000000000..be8f334192
--- /dev/null
+++ b/src/asm/scala/tools/asm/Handle.java
@@ -0,0 +1,159 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package scala.tools.asm;
+
+/**
+ * A reference to a field or a method.
+ *
+ * @author Remi Forax
+ * @author Eric Bruneton
+ */
+public final class Handle {
+
+ /**
+ * The kind of field or method designated by this Handle. Should be
+ * {@link Opcodes#H_GETFIELD}, {@link Opcodes#H_GETSTATIC},
+ * {@link Opcodes#H_PUTFIELD}, {@link Opcodes#H_PUTSTATIC},
+ * {@link Opcodes#H_INVOKEVIRTUAL}, {@link Opcodes#H_INVOKESTATIC},
+ * {@link Opcodes#H_INVOKESPECIAL}, {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ */
+ final int tag;
+
+ /**
+ * The internal name of the field or method designed by this handle.
+ */
+ final String owner;
+
+ /**
+ * The name of the field or method designated by this handle.
+ */
+ final String name;
+
+ /**
+ * The descriptor of the field or method designated by this handle.
+ */
+ final String desc;
+
+ /**
+ * Constructs a new field or method handle.
+ *
+ * @param tag the kind of field or method designated by this Handle. Must be
+ * {@link Opcodes#H_GETFIELD}, {@link Opcodes#H_GETSTATIC},
+ * {@link Opcodes#H_PUTFIELD}, {@link Opcodes#H_PUTSTATIC},
+ * {@link Opcodes#H_INVOKEVIRTUAL}, {@link Opcodes#H_INVOKESTATIC},
+ * {@link Opcodes#H_INVOKESPECIAL},
+ * {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ * @param owner the internal name of the field or method designed by this
+ * handle.
+ * @param name the name of the field or method designated by this handle.
+ * @param desc the descriptor of the field or method designated by this
+ * handle.
+ */
+ public Handle(int tag, String owner, String name, String desc) {
+ this.tag = tag;
+ this.owner = owner;
+ this.name = name;
+ this.desc = desc;
+ }
+
+ /**
+ * Returns the kind of field or method designated by this handle.
+ *
+ * @return {@link Opcodes#H_GETFIELD}, {@link Opcodes#H_GETSTATIC},
+ * {@link Opcodes#H_PUTFIELD}, {@link Opcodes#H_PUTSTATIC},
+ * {@link Opcodes#H_INVOKEVIRTUAL}, {@link Opcodes#H_INVOKESTATIC},
+ * {@link Opcodes#H_INVOKESPECIAL},
+ * {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ */
+ public int getTag() {
+ return tag;
+ }
+
+ /**
+ * Returns the internal name of the field or method designed by this
+ * handle.
+ *
+ * @return the internal name of the field or method designed by this
+ * handle.
+ */
+ public String getOwner() {
+ return owner;
+ }
+
+ /**
+ * Returns the name of the field or method designated by this handle.
+ *
+ * @return the name of the field or method designated by this handle.
+ */
+ public String getName() {
+ return name;
+ }
+
+ /**
+ * Returns the descriptor of the field or method designated by this handle.
+ *
+ * @return the descriptor of the field or method designated by this handle.
+ */
+ public String getDesc() {
+ return desc;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof Handle)) {
+ return false;
+ }
+ Handle h = (Handle) obj;
+ return tag == h.tag && owner.equals(h.owner)
+ && name.equals(h.name) && desc.equals(h.desc);
+ }
+
+ @Override
+ public int hashCode() {
+ return tag + owner.hashCode() * name.hashCode() * desc.hashCode();
+ }
+
+ /**
+ * Returns the textual representation of this handle. The textual
+ * representation is: <pre>owner '.' name desc ' ' '(' tag ')'</pre>. As
+ * this format is unambiguous, it can be parsed if necessary.
+ */
+ @Override
+ public String toString() {
+ return owner + '.' + name + desc + " (" + tag + ')';
+ }
+}
diff --git a/src/asm/scala/tools/asm/Handler.java b/src/asm/scala/tools/asm/Handler.java
new file mode 100644
index 0000000000..9e92bb98be
--- /dev/null
+++ b/src/asm/scala/tools/asm/Handler.java
@@ -0,0 +1,118 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * Information about an exception handler block.
+ *
+ * @author Eric Bruneton
+ */
+class Handler {
+
+ /**
+ * Beginning of the exception handler's scope (inclusive).
+ */
+ Label start;
+
+ /**
+ * End of the exception handler's scope (exclusive).
+ */
+ Label end;
+
+ /**
+ * Beginning of the exception handler's code.
+ */
+ Label handler;
+
+ /**
+ * Internal name of the type of exceptions handled by this handler, or
+ * <tt>null</tt> to catch any exceptions.
+ */
+ String desc;
+
+ /**
+ * Constant pool index of the internal name of the type of exceptions
+ * handled by this handler, or 0 to catch any exceptions.
+ */
+ int type;
+
+ /**
+ * Next exception handler block info.
+ */
+ Handler next;
+
+ /**
+ * Removes the range between start and end from the given exception
+ * handlers.
+ *
+ * @param h an exception handler list.
+ * @param start the start of the range to be removed.
+ * @param end the end of the range to be removed. Maybe null.
+ * @return the exception handler list with the start-end range removed.
+ */
+ static Handler remove(Handler h, Label start, Label end) {
+ if (h == null) {
+ return null;
+ } else {
+ h.next = remove(h.next, start, end);
+ }
+ int hstart = h.start.position;
+ int hend = h.end.position;
+ int s = start.position;
+ int e = end == null ? Integer.MAX_VALUE : end.position;
+ // if [hstart,hend[ and [s,e[ intervals intersect...
+ if (s < hend && e > hstart) {
+ if (s <= hstart) {
+ if (e >= hend) {
+ // [hstart,hend[ fully included in [s,e[, h removed
+ h = h.next;
+ } else {
+ // [hstart,hend[ minus [s,e[ = [e,hend[
+ h.start = end;
+ }
+ } else if (e >= hend) {
+ // [hstart,hend[ minus [s,e[ = [hstart,s[
+ h.end = start;
+ } else {
+ // [hstart,hend[ minus [s,e[ = [hstart,s[ + [e,hend[
+ Handler g = new Handler();
+ g.start = end;
+ g.end = h.end;
+ g.handler = h.handler;
+ g.desc = h.desc;
+ g.type = h.type;
+ g.next = h.next;
+ h.end = start;
+ h.next = g;
+ }
+ }
+ return h;
+ }
+}
diff --git a/src/asm/scala/tools/asm/Item.java b/src/asm/scala/tools/asm/Item.java
new file mode 100644
index 0000000000..021a0b11d3
--- /dev/null
+++ b/src/asm/scala/tools/asm/Item.java
@@ -0,0 +1,297 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A constant pool item. Constant pool items can be created with the 'newXXX'
+ * methods in the {@link ClassWriter} class.
+ *
+ * @author Eric Bruneton
+ */
+final class Item {
+
+ /**
+ * Index of this item in the constant pool.
+ */
+ int index;
+
+ /**
+ * Type of this constant pool item. A single class is used to represent all
+ * constant pool item types, in order to minimize the bytecode size of this
+ * package. The value of this field is one of {@link ClassWriter#INT},
+ * {@link ClassWriter#LONG}, {@link ClassWriter#FLOAT},
+ * {@link ClassWriter#DOUBLE}, {@link ClassWriter#UTF8},
+ * {@link ClassWriter#STR}, {@link ClassWriter#CLASS},
+ * {@link ClassWriter#NAME_TYPE}, {@link ClassWriter#FIELD},
+ * {@link ClassWriter#METH}, {@link ClassWriter#IMETH},
+ * {@link ClassWriter#MTYPE}, {@link ClassWriter#INDY}.
+ *
+ * MethodHandle constant 9 variations are stored using a range
+ * of 9 values from {@link ClassWriter#HANDLE_BASE} + 1 to
+ * {@link ClassWriter#HANDLE_BASE} + 9.
+ *
+ * Special Item types are used for Items that are stored in the ClassWriter
+ * {@link ClassWriter#typeTable}, instead of the constant pool, in order to
+ * avoid clashes with normal constant pool items in the ClassWriter constant
+ * pool's hash table. These special item types are
+ * {@link ClassWriter#TYPE_NORMAL}, {@link ClassWriter#TYPE_UNINIT} and
+ * {@link ClassWriter#TYPE_MERGED}.
+ */
+ int type;
+
+ /**
+ * Value of this item, for an integer item.
+ */
+ int intVal;
+
+ /**
+ * Value of this item, for a long item.
+ */
+ long longVal;
+
+ /**
+ * First part of the value of this item, for items that do not hold a
+ * primitive value.
+ */
+ String strVal1;
+
+ /**
+ * Second part of the value of this item, for items that do not hold a
+ * primitive value.
+ */
+ String strVal2;
+
+ /**
+ * Third part of the value of this item, for items that do not hold a
+ * primitive value.
+ */
+ String strVal3;
+
+ /**
+ * The hash code value of this constant pool item.
+ */
+ int hashCode;
+
+ /**
+ * Link to another constant pool item, used for collision lists in the
+ * constant pool's hash table.
+ */
+ Item next;
+
+ /**
+ * Constructs an uninitialized {@link Item}.
+ */
+ Item() {
+ }
+
+ /**
+ * Constructs an uninitialized {@link Item} for constant pool element at
+ * given position.
+ *
+ * @param index index of the item to be constructed.
+ */
+ Item(final int index) {
+ this.index = index;
+ }
+
+ /**
+ * Constructs a copy of the given item.
+ *
+ * @param index index of the item to be constructed.
+ * @param i the item that must be copied into the item to be constructed.
+ */
+ Item(final int index, final Item i) {
+ this.index = index;
+ type = i.type;
+ intVal = i.intVal;
+ longVal = i.longVal;
+ strVal1 = i.strVal1;
+ strVal2 = i.strVal2;
+ strVal3 = i.strVal3;
+ hashCode = i.hashCode;
+ }
+
+ /**
+ * Sets this item to an integer item.
+ *
+ * @param intVal the value of this item.
+ */
+ void set(final int intVal) {
+ this.type = ClassWriter.INT;
+ this.intVal = intVal;
+ this.hashCode = 0x7FFFFFFF & (type + intVal);
+ }
+
+ /**
+ * Sets this item to a long item.
+ *
+ * @param longVal the value of this item.
+ */
+ void set(final long longVal) {
+ this.type = ClassWriter.LONG;
+ this.longVal = longVal;
+ this.hashCode = 0x7FFFFFFF & (type + (int) longVal);
+ }
+
+ /**
+ * Sets this item to a float item.
+ *
+ * @param floatVal the value of this item.
+ */
+ void set(final float floatVal) {
+ this.type = ClassWriter.FLOAT;
+ this.intVal = Float.floatToRawIntBits(floatVal);
+ this.hashCode = 0x7FFFFFFF & (type + (int) floatVal);
+ }
+
+ /**
+ * Sets this item to a double item.
+ *
+ * @param doubleVal the value of this item.
+ */
+ void set(final double doubleVal) {
+ this.type = ClassWriter.DOUBLE;
+ this.longVal = Double.doubleToRawLongBits(doubleVal);
+ this.hashCode = 0x7FFFFFFF & (type + (int) doubleVal);
+ }
+
+ /**
+ * Sets this item to an item that do not hold a primitive value.
+ *
+ * @param type the type of this item.
+ * @param strVal1 first part of the value of this item.
+ * @param strVal2 second part of the value of this item.
+ * @param strVal3 third part of the value of this item.
+ */
+ void set(
+ final int type,
+ final String strVal1,
+ final String strVal2,
+ final String strVal3)
+ {
+ this.type = type;
+ this.strVal1 = strVal1;
+ this.strVal2 = strVal2;
+ this.strVal3 = strVal3;
+ switch (type) {
+ case ClassWriter.UTF8:
+ case ClassWriter.STR:
+ case ClassWriter.CLASS:
+ case ClassWriter.MTYPE:
+ case ClassWriter.TYPE_NORMAL:
+ hashCode = 0x7FFFFFFF & (type + strVal1.hashCode());
+ return;
+ case ClassWriter.NAME_TYPE:
+ hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
+ * strVal2.hashCode());
+ return;
+ // ClassWriter.FIELD:
+ // ClassWriter.METH:
+ // ClassWriter.IMETH:
+ // ClassWriter.HANDLE_BASE + 1..9
+ default:
+ hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
+ * strVal2.hashCode() * strVal3.hashCode());
+ }
+ }
+
+ /**
+ * Sets the item to an InvokeDynamic item.
+ *
+ * @param name invokedynamic's name.
+ * @param desc invokedynamic's desc.
+ * @param bsmIndex zero based index into the class attribute BootrapMethods.
+ */
+ void set(String name, String desc, int bsmIndex) {
+ this.type = ClassWriter.INDY;
+ this.longVal = bsmIndex;
+ this.strVal1 = name;
+ this.strVal2 = desc;
+ this.hashCode = 0x7FFFFFFF & (ClassWriter.INDY + bsmIndex
+ * strVal1.hashCode() * strVal2.hashCode());
+ }
+
+ /**
+ * Sets the item to a BootstrapMethod item.
+ *
+ * @param position position in byte in the class attribute BootrapMethods.
+ * @param hashCode hashcode of the item. This hashcode is processed from
+ * the hashcode of the bootstrap method and the hashcode of
+ * all bootstrap arguments.
+ */
+ void set(int position, int hashCode) {
+ this.type = ClassWriter.BSM;
+ this.intVal = position;
+ this.hashCode = hashCode;
+ }
+
+ /**
+ * Indicates if the given item is equal to this one. <i>This method assumes
+ * that the two items have the same {@link #type}</i>.
+ *
+ * @param i the item to be compared to this one. Both items must have the
+ * same {@link #type}.
+ * @return <tt>true</tt> if the given item if equal to this one,
+ * <tt>false</tt> otherwise.
+ */
+ boolean isEqualTo(final Item i) {
+ switch (type) {
+ case ClassWriter.UTF8:
+ case ClassWriter.STR:
+ case ClassWriter.CLASS:
+ case ClassWriter.MTYPE:
+ case ClassWriter.TYPE_NORMAL:
+ return i.strVal1.equals(strVal1);
+ case ClassWriter.TYPE_MERGED:
+ case ClassWriter.LONG:
+ case ClassWriter.DOUBLE:
+ return i.longVal == longVal;
+ case ClassWriter.INT:
+ case ClassWriter.FLOAT:
+ return i.intVal == intVal;
+ case ClassWriter.TYPE_UNINIT:
+ return i.intVal == intVal && i.strVal1.equals(strVal1);
+ case ClassWriter.NAME_TYPE:
+ return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2);
+ case ClassWriter.INDY:
+ return i.longVal == longVal && i.strVal1.equals(strVal1)
+ && i.strVal2.equals(strVal2);
+
+ // case ClassWriter.FIELD:
+ // case ClassWriter.METH:
+ // case ClassWriter.IMETH:
+ // case ClassWriter.HANDLE_BASE + 1..9
+ default:
+ return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2)
+ && i.strVal3.equals(strVal3);
+ }
+ }
+
+}
diff --git a/src/asm/scala/tools/asm/Label.java b/src/asm/scala/tools/asm/Label.java
new file mode 100644
index 0000000000..712c7f251f
--- /dev/null
+++ b/src/asm/scala/tools/asm/Label.java
@@ -0,0 +1,555 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A label represents a position in the bytecode of a method. Labels are used
+ * for jump, goto, and switch instructions, and for try catch blocks. A label
+ * designates the <i>instruction</i> that is just after. Note however that
+ * there can be other elements between a label and the instruction it
+ * designates (such as other labels, stack map frames, line numbers, etc.).
+ *
+ * @author Eric Bruneton
+ */
+public class Label {
+
+ /**
+ * Indicates if this label is only used for debug attributes. Such a label
+ * is not the start of a basic block, the target of a jump instruction, or
+ * an exception handler. It can be safely ignored in control flow graph
+ * analysis algorithms (for optimization purposes).
+ */
+ static final int DEBUG = 1;
+
+ /**
+ * Indicates if the position of this label is known.
+ */
+ static final int RESOLVED = 2;
+
+ /**
+ * Indicates if this label has been updated, after instruction resizing.
+ */
+ static final int RESIZED = 4;
+
+ /**
+ * Indicates if this basic block has been pushed in the basic block stack.
+ * See {@link MethodWriter#visitMaxs visitMaxs}.
+ */
+ static final int PUSHED = 8;
+
+ /**
+ * Indicates if this label is the target of a jump instruction, or the start
+ * of an exception handler.
+ */
+ static final int TARGET = 16;
+
+ /**
+ * Indicates if a stack map frame must be stored for this label.
+ */
+ static final int STORE = 32;
+
+ /**
+ * Indicates if this label corresponds to a reachable basic block.
+ */
+ static final int REACHABLE = 64;
+
+ /**
+ * Indicates if this basic block ends with a JSR instruction.
+ */
+ static final int JSR = 128;
+
+ /**
+ * Indicates if this basic block ends with a RET instruction.
+ */
+ static final int RET = 256;
+
+ /**
+ * Indicates if this basic block is the start of a subroutine.
+ */
+ static final int SUBROUTINE = 512;
+
+ /**
+ * Indicates if this subroutine basic block has been visited by a
+ * visitSubroutine(null, ...) call.
+ */
+ static final int VISITED = 1024;
+
+ /**
+ * Indicates if this subroutine basic block has been visited by a
+ * visitSubroutine(!null, ...) call.
+ */
+ static final int VISITED2 = 2048;
+
+ /**
+ * Field used to associate user information to a label. Warning: this field
+ * is used by the ASM tree package. In order to use it with the ASM tree
+ * package you must override the {@link
+ * org.objectweb.asm.tree.MethodNode#getLabelNode} method.
+ */
+ public Object info;
+
+ /**
+ * Flags that indicate the status of this label.
+ *
+ * @see #DEBUG
+ * @see #RESOLVED
+ * @see #RESIZED
+ * @see #PUSHED
+ * @see #TARGET
+ * @see #STORE
+ * @see #REACHABLE
+ * @see #JSR
+ * @see #RET
+ */
+ int status;
+
+ /**
+ * The line number corresponding to this label, if known.
+ */
+ int line;
+
+ /**
+ * The position of this label in the code, if known.
+ */
+ int position;
+
+ /**
+ * Number of forward references to this label, times two.
+ */
+ private int referenceCount;
+
+ /**
+ * Informations about forward references. Each forward reference is
+ * described by two consecutive integers in this array: the first one is the
+ * position of the first byte of the bytecode instruction that contains the
+ * forward reference, while the second is the position of the first byte of
+ * the forward reference itself. In fact the sign of the first integer
+ * indicates if this reference uses 2 or 4 bytes, and its absolute value
+ * gives the position of the bytecode instruction. This array is also used
+ * as a bitset to store the subroutines to which a basic block belongs. This
+ * information is needed in {@linked MethodWriter#visitMaxs}, after all
+ * forward references have been resolved. Hence the same array can be used
+ * for both purposes without problems.
+ */
+ private int[] srcAndRefPositions;
+
+ // ------------------------------------------------------------------------
+
+ /*
+ * Fields for the control flow and data flow graph analysis algorithms (used
+ * to compute the maximum stack size or the stack map frames). A control
+ * flow graph contains one node per "basic block", and one edge per "jump"
+ * from one basic block to another. Each node (i.e., each basic block) is
+ * represented by the Label object that corresponds to the first instruction
+ * of this basic block. Each node also stores the list of its successors in
+ * the graph, as a linked list of Edge objects.
+ *
+ * The control flow analysis algorithms used to compute the maximum stack
+ * size or the stack map frames are similar and use two steps. The first
+ * step, during the visit of each instruction, builds information about the
+ * state of the local variables and the operand stack at the end of each
+ * basic block, called the "output frame", <i>relatively</i> to the frame
+ * state at the beginning of the basic block, which is called the "input
+ * frame", and which is <i>unknown</i> during this step. The second step,
+ * in {@link MethodWriter#visitMaxs}, is a fix point algorithm that
+ * computes information about the input frame of each basic block, from the
+ * input state of the first basic block (known from the method signature),
+ * and by the using the previously computed relative output frames.
+ *
+ * The algorithm used to compute the maximum stack size only computes the
+ * relative output and absolute input stack heights, while the algorithm
+ * used to compute stack map frames computes relative output frames and
+ * absolute input frames.
+ */
+
+ /**
+ * Start of the output stack relatively to the input stack. The exact
+ * semantics of this field depends on the algorithm that is used.
+ *
+ * When only the maximum stack size is computed, this field is the number of
+ * elements in the input stack.
+ *
+ * When the stack map frames are completely computed, this field is the
+ * offset of the first output stack element relatively to the top of the
+ * input stack. This offset is always negative or null. A null offset means
+ * that the output stack must be appended to the input stack. A -n offset
+ * means that the first n output stack elements must replace the top n input
+ * stack elements, and that the other elements must be appended to the input
+ * stack.
+ */
+ int inputStackTop;
+
+ /**
+ * Maximum height reached by the output stack, relatively to the top of the
+ * input stack. This maximum is always positive or null.
+ */
+ int outputStackMax;
+
+ /**
+ * Information about the input and output stack map frames of this basic
+ * block. This field is only used when {@link ClassWriter#COMPUTE_FRAMES}
+ * option is used.
+ */
+ Frame frame;
+
+ /**
+ * The successor of this label, in the order they are visited. This linked
+ * list does not include labels used for debug info only. If
+ * {@link ClassWriter#COMPUTE_FRAMES} option is used then, in addition, it
+ * does not contain successive labels that denote the same bytecode position
+ * (in this case only the first label appears in this list).
+ */
+ Label successor;
+
+ /**
+ * The successors of this node in the control flow graph. These successors
+ * are stored in a linked list of {@link Edge Edge} objects, linked to each
+ * other by their {@link Edge#next} field.
+ */
+ Edge successors;
+
+ /**
+ * The next basic block in the basic block stack. This stack is used in the
+ * main loop of the fix point algorithm used in the second step of the
+ * control flow analysis algorithms. It is also used in
+ * {@link #visitSubroutine} to avoid using a recursive method.
+ *
+ * @see MethodWriter#visitMaxs
+ */
+ Label next;
+
+ // ------------------------------------------------------------------------
+ // Constructor
+ // ------------------------------------------------------------------------
+
+ /**
+ * Constructs a new label.
+ */
+ public Label() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Methods to compute offsets and to manage forward references
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the offset corresponding to this label. This offset is computed
+ * from the start of the method's bytecode. <i>This method is intended for
+ * {@link Attribute} sub classes, and is normally not needed by class
+ * generators or adapters.</i>
+ *
+ * @return the offset corresponding to this label.
+ * @throws IllegalStateException if this label is not resolved yet.
+ */
+ public int getOffset() {
+ if ((status & RESOLVED) == 0) {
+ throw new IllegalStateException("Label offset position has not been resolved yet");
+ }
+ return position;
+ }
+
+ /**
+ * Puts a reference to this label in the bytecode of a method. If the
+ * position of the label is known, the offset is computed and written
+ * directly. Otherwise, a null offset is written and a new forward reference
+ * is declared for this label.
+ *
+ * @param owner the code writer that calls this method.
+ * @param out the bytecode of the method.
+ * @param source the position of first byte of the bytecode instruction that
+ * contains this label.
+ * @param wideOffset <tt>true</tt> if the reference must be stored in 4
+ * bytes, or <tt>false</tt> if it must be stored with 2 bytes.
+ * @throws IllegalArgumentException if this label has not been created by
+ * the given code writer.
+ */
+ void put(
+ final MethodWriter owner,
+ final ByteVector out,
+ final int source,
+ final boolean wideOffset)
+ {
+ if ((status & RESOLVED) == 0) {
+ if (wideOffset) {
+ addReference(-1 - source, out.length);
+ out.putInt(-1);
+ } else {
+ addReference(source, out.length);
+ out.putShort(-1);
+ }
+ } else {
+ if (wideOffset) {
+ out.putInt(position - source);
+ } else {
+ out.putShort(position - source);
+ }
+ }
+ }
+
+ /**
+ * Adds a forward reference to this label. This method must be called only
+ * for a true forward reference, i.e. only if this label is not resolved
+ * yet. For backward references, the offset of the reference can be, and
+ * must be, computed and stored directly.
+ *
+ * @param sourcePosition the position of the referencing instruction. This
+ * position will be used to compute the offset of this forward
+ * reference.
+ * @param referencePosition the position where the offset for this forward
+ * reference must be stored.
+ */
+ private void addReference(
+ final int sourcePosition,
+ final int referencePosition)
+ {
+ if (srcAndRefPositions == null) {
+ srcAndRefPositions = new int[6];
+ }
+ if (referenceCount >= srcAndRefPositions.length) {
+ int[] a = new int[srcAndRefPositions.length + 6];
+ System.arraycopy(srcAndRefPositions,
+ 0,
+ a,
+ 0,
+ srcAndRefPositions.length);
+ srcAndRefPositions = a;
+ }
+ srcAndRefPositions[referenceCount++] = sourcePosition;
+ srcAndRefPositions[referenceCount++] = referencePosition;
+ }
+
+ /**
+ * Resolves all forward references to this label. This method must be called
+ * when this label is added to the bytecode of the method, i.e. when its
+ * position becomes known. This method fills in the blanks that where left
+ * in the bytecode by each forward reference previously added to this label.
+ *
+ * @param owner the code writer that calls this method.
+ * @param position the position of this label in the bytecode.
+ * @param data the bytecode of the method.
+ * @return <tt>true</tt> if a blank that was left for this label was to
+ * small to store the offset. In such a case the corresponding jump
+ * instruction is replaced with a pseudo instruction (using unused
+ * opcodes) using an unsigned two bytes offset. These pseudo
+ * instructions will need to be replaced with true instructions with
+ * wider offsets (4 bytes instead of 2). This is done in
+ * {@link MethodWriter#resizeInstructions}.
+ * @throws IllegalArgumentException if this label has already been resolved,
+ * or if it has not been created by the given code writer.
+ */
+ boolean resolve(
+ final MethodWriter owner,
+ final int position,
+ final byte[] data)
+ {
+ boolean needUpdate = false;
+ this.status |= RESOLVED;
+ this.position = position;
+ int i = 0;
+ while (i < referenceCount) {
+ int source = srcAndRefPositions[i++];
+ int reference = srcAndRefPositions[i++];
+ int offset;
+ if (source >= 0) {
+ offset = position - source;
+ if (offset < Short.MIN_VALUE || offset > Short.MAX_VALUE) {
+ /*
+ * changes the opcode of the jump instruction, in order to
+ * be able to find it later (see resizeInstructions in
+ * MethodWriter). These temporary opcodes are similar to
+ * jump instruction opcodes, except that the 2 bytes offset
+ * is unsigned (and can therefore represent values from 0 to
+ * 65535, which is sufficient since the size of a method is
+ * limited to 65535 bytes).
+ */
+ int opcode = data[reference - 1] & 0xFF;
+ if (opcode <= Opcodes.JSR) {
+ // changes IFEQ ... JSR to opcodes 202 to 217
+ data[reference - 1] = (byte) (opcode + 49);
+ } else {
+ // changes IFNULL and IFNONNULL to opcodes 218 and 219
+ data[reference - 1] = (byte) (opcode + 20);
+ }
+ needUpdate = true;
+ }
+ data[reference++] = (byte) (offset >>> 8);
+ data[reference] = (byte) offset;
+ } else {
+ offset = position + source + 1;
+ data[reference++] = (byte) (offset >>> 24);
+ data[reference++] = (byte) (offset >>> 16);
+ data[reference++] = (byte) (offset >>> 8);
+ data[reference] = (byte) offset;
+ }
+ }
+ return needUpdate;
+ }
+
+ /**
+ * Returns the first label of the series to which this label belongs. For an
+ * isolated label or for the first label in a series of successive labels,
+ * this method returns the label itself. For other labels it returns the
+ * first label of the series.
+ *
+ * @return the first label of the series to which this label belongs.
+ */
+ Label getFirst() {
+ return !ClassReader.FRAMES || frame == null ? this : frame.owner;
+ }
+
+ // ------------------------------------------------------------------------
+ // Methods related to subroutines
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns true is this basic block belongs to the given subroutine.
+ *
+ * @param id a subroutine id.
+ * @return true is this basic block belongs to the given subroutine.
+ */
+ boolean inSubroutine(final long id) {
+ if ((status & Label.VISITED) != 0) {
+ return (srcAndRefPositions[(int) (id >>> 32)] & (int) id) != 0;
+ }
+ return false;
+ }
+
+ /**
+ * Returns true if this basic block and the given one belong to a common
+ * subroutine.
+ *
+ * @param block another basic block.
+ * @return true if this basic block and the given one belong to a common
+ * subroutine.
+ */
+ boolean inSameSubroutine(final Label block) {
+ if ((status & VISITED) == 0 || (block.status & VISITED) == 0) {
+ return false;
+ }
+ for (int i = 0; i < srcAndRefPositions.length; ++i) {
+ if ((srcAndRefPositions[i] & block.srcAndRefPositions[i]) != 0) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Marks this basic block as belonging to the given subroutine.
+ *
+ * @param id a subroutine id.
+ * @param nbSubroutines the total number of subroutines in the method.
+ */
+ void addToSubroutine(final long id, final int nbSubroutines) {
+ if ((status & VISITED) == 0) {
+ status |= VISITED;
+ srcAndRefPositions = new int[(nbSubroutines - 1) / 32 + 1];
+ }
+ srcAndRefPositions[(int) (id >>> 32)] |= (int) id;
+ }
+
+ /**
+ * Finds the basic blocks that belong to a given subroutine, and marks these
+ * blocks as belonging to this subroutine. This method follows the control
+ * flow graph to find all the blocks that are reachable from the current
+ * block WITHOUT following any JSR target.
+ *
+ * @param JSR a JSR block that jumps to this subroutine. If this JSR is not
+ * null it is added to the successor of the RET blocks found in the
+ * subroutine.
+ * @param id the id of this subroutine.
+ * @param nbSubroutines the total number of subroutines in the method.
+ */
+ void visitSubroutine(final Label JSR, final long id, final int nbSubroutines)
+ {
+ // user managed stack of labels, to avoid using a recursive method
+ // (recursivity can lead to stack overflow with very large methods)
+ Label stack = this;
+ while (stack != null) {
+ // removes a label l from the stack
+ Label l = stack;
+ stack = l.next;
+ l.next = null;
+
+ if (JSR != null) {
+ if ((l.status & VISITED2) != 0) {
+ continue;
+ }
+ l.status |= VISITED2;
+ // adds JSR to the successors of l, if it is a RET block
+ if ((l.status & RET) != 0) {
+ if (!l.inSameSubroutine(JSR)) {
+ Edge e = new Edge();
+ e.info = l.inputStackTop;
+ e.successor = JSR.successors.successor;
+ e.next = l.successors;
+ l.successors = e;
+ }
+ }
+ } else {
+ // if the l block already belongs to subroutine 'id', continue
+ if (l.inSubroutine(id)) {
+ continue;
+ }
+ // marks the l block as belonging to subroutine 'id'
+ l.addToSubroutine(id, nbSubroutines);
+ }
+ // pushes each successor of l on the stack, except JSR targets
+ Edge e = l.successors;
+ while (e != null) {
+ // if the l block is a JSR block, then 'l.successors.next' leads
+ // to the JSR target (see {@link #visitJumpInsn}) and must
+ // therefore not be followed
+ if ((l.status & Label.JSR) == 0 || e != l.successors.next) {
+ // pushes e.successor on the stack if it not already added
+ if (e.successor.next == null) {
+ e.successor.next = stack;
+ stack = e.successor;
+ }
+ }
+ e = e.next;
+ }
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Overriden Object methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns a string representation of this label.
+ *
+ * @return a string representation of this label.
+ */
+ @Override
+ public String toString() {
+ return "L" + System.identityHashCode(this);
+ }
+}
diff --git a/src/asm/scala/tools/asm/MethodVisitor.java b/src/asm/scala/tools/asm/MethodVisitor.java
new file mode 100644
index 0000000000..a8a859a6a9
--- /dev/null
+++ b/src/asm/scala/tools/asm/MethodVisitor.java
@@ -0,0 +1,588 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A visitor to visit a Java method. The methods of this class must be
+ * called in the following order: [ <tt>visitAnnotationDefault</tt> ] (
+ * <tt>visitAnnotation</tt> | <tt>visitParameterAnnotation</tt> |
+ * <tt>visitAttribute</tt> )* [ <tt>visitCode</tt> ( <tt>visitFrame</tt> |
+ * <tt>visit</tt><i>X</i>Insn</tt> | <tt>visitLabel</tt> | <tt>visitTryCatchBlock</tt> |
+ * <tt>visitLocalVariable</tt> | <tt>visitLineNumber</tt> )* <tt>visitMaxs</tt> ]
+ * <tt>visitEnd</tt>. In addition, the <tt>visit</tt><i>X</i>Insn</tt>
+ * and <tt>visitLabel</tt> methods must be called in the sequential order of
+ * the bytecode instructions of the visited code, <tt>visitTryCatchBlock</tt>
+ * must be called <i>before</i> the labels passed as arguments have been
+ * visited, and the <tt>visitLocalVariable</tt> and <tt>visitLineNumber</tt>
+ * methods must be called <i>after</i> the labels passed as arguments have been
+ * visited.
+ *
+ * @author Eric Bruneton
+ */
+public abstract class MethodVisitor {
+
+ /**
+ * The ASM API version implemented by this visitor. The value of this field
+ * must be one of {@link Opcodes#ASM4}.
+ */
+ protected final int api;
+
+ /**
+ * The method visitor to which this visitor must delegate method calls. May
+ * be null.
+ */
+ protected MethodVisitor mv;
+
+ /**
+ * Constructs a new {@link MethodVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ public MethodVisitor(final int api) {
+ this(api, null);
+ }
+
+ /**
+ * Constructs a new {@link MethodVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param mv the method visitor to which this visitor must delegate method
+ * calls. May be null.
+ */
+ public MethodVisitor(final int api, final MethodVisitor mv) {
+ /*if (api != Opcodes.ASM4) {
+ throw new IllegalArgumentException();
+ }*/
+ this.api = api;
+ this.mv = mv;
+ }
+
+ // -------------------------------------------------------------------------
+ // Annotations and non standard attributes
+ // -------------------------------------------------------------------------
+
+ /**
+ * Visits the default value of this annotation interface method.
+ *
+ * @return a visitor to the visit the actual default value of this
+ * annotation interface method, or <tt>null</tt> if this visitor
+ * is not interested in visiting this default value. The 'name'
+ * parameters passed to the methods of this annotation visitor are
+ * ignored. Moreover, exacly one visit method must be called on this
+ * annotation visitor, followed by visitEnd.
+ */
+ public AnnotationVisitor visitAnnotationDefault() {
+ if (mv != null) {
+ return mv.visitAnnotationDefault();
+ }
+ return null;
+ }
+
+ /**
+ * Visits an annotation of this method.
+ *
+ * @param desc the class descriptor of the annotation class.
+ * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values, or <tt>null</tt> if
+ * this visitor is not interested in visiting this annotation.
+ */
+ public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
+ if (mv != null) {
+ return mv.visitAnnotation(desc, visible);
+ }
+ return null;
+ }
+
+ /**
+ * Visits an annotation of a parameter this method.
+ *
+ * @param parameter the parameter index.
+ * @param desc the class descriptor of the annotation class.
+ * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values, or <tt>null</tt> if
+ * this visitor is not interested in visiting this annotation.
+ */
+ public AnnotationVisitor visitParameterAnnotation(
+ int parameter,
+ String desc,
+ boolean visible)
+ {
+ if (mv != null) {
+ return mv.visitParameterAnnotation(parameter, desc, visible);
+ }
+ return null;
+ }
+
+ /**
+ * Visits a non standard attribute of this method.
+ *
+ * @param attr an attribute.
+ */
+ public void visitAttribute(Attribute attr) {
+ if (mv != null) {
+ mv.visitAttribute(attr);
+ }
+ }
+
+ /**
+ * Starts the visit of the method's code, if any (i.e. non abstract method).
+ */
+ public void visitCode() {
+ if (mv != null) {
+ mv.visitCode();
+ }
+ }
+
+ /**
+ * Visits the current state of the local variables and operand stack
+ * elements. This method must(*) be called <i>just before</i> any
+ * instruction <b>i</b> that follows an unconditional branch instruction
+ * such as GOTO or THROW, that is the target of a jump instruction, or that
+ * starts an exception handler block. The visited types must describe the
+ * values of the local variables and of the operand stack elements <i>just
+ * before</i> <b>i</b> is executed. <br> <br> (*) this is mandatory only
+ * for classes whose version is greater than or equal to
+ * {@link Opcodes#V1_6 V1_6}. <br> <br> Packed frames are basically
+ * "deltas" from the state of the previous frame (very first frame is
+ * implicitly defined by the method's parameters and access flags): <ul>
+ * <li>{@link Opcodes#F_SAME} representing frame with exactly the same
+ * locals as the previous frame and with the empty stack.</li> <li>{@link Opcodes#F_SAME1}
+ * representing frame with exactly the same locals as the previous frame and
+ * with single value on the stack (<code>nStack</code> is 1 and
+ * <code>stack[0]</code> contains value for the type of the stack item).</li>
+ * <li>{@link Opcodes#F_APPEND} representing frame with current locals are
+ * the same as the locals in the previous frame, except that additional
+ * locals are defined (<code>nLocal</code> is 1, 2 or 3 and
+ * <code>local</code> elements contains values representing added types).</li>
+ * <li>{@link Opcodes#F_CHOP} representing frame with current locals are
+ * the same as the locals in the previous frame, except that the last 1-3
+ * locals are absent and with the empty stack (<code>nLocals</code> is 1,
+ * 2 or 3). </li> <li>{@link Opcodes#F_FULL} representing complete frame
+ * data.</li> </li> </ul>
+ *
+ * @param type the type of this stack map frame. Must be
+ * {@link Opcodes#F_NEW} for expanded frames, or
+ * {@link Opcodes#F_FULL}, {@link Opcodes#F_APPEND},
+ * {@link Opcodes#F_CHOP}, {@link Opcodes#F_SAME} or
+ * {@link Opcodes#F_APPEND}, {@link Opcodes#F_SAME1} for compressed
+ * frames.
+ * @param nLocal the number of local variables in the visited frame.
+ * @param local the local variable types in this frame. This array must not
+ * be modified. Primitive types are represented by
+ * {@link Opcodes#TOP}, {@link Opcodes#INTEGER},
+ * {@link Opcodes#FLOAT}, {@link Opcodes#LONG},
+ * {@link Opcodes#DOUBLE},{@link Opcodes#NULL} or
+ * {@link Opcodes#UNINITIALIZED_THIS} (long and double are
+ * represented by a single element). Reference types are represented
+ * by String objects (representing internal names), and uninitialized
+ * types by Label objects (this label designates the NEW instruction
+ * that created this uninitialized value).
+ * @param nStack the number of operand stack elements in the visited frame.
+ * @param stack the operand stack types in this frame. This array must not
+ * be modified. Its content has the same format as the "local" array.
+ * @throws IllegalStateException if a frame is visited just after another
+ * one, without any instruction between the two (unless this frame
+ * is a Opcodes#F_SAME frame, in which case it is silently ignored).
+ */
+ public void visitFrame(
+ int type,
+ int nLocal,
+ Object[] local,
+ int nStack,
+ Object[] stack)
+ {
+ if (mv != null) {
+ mv.visitFrame(type, nLocal, local, nStack, stack);
+ }
+ }
+
+ // -------------------------------------------------------------------------
+ // Normal instructions
+ // -------------------------------------------------------------------------
+
+ /**
+ * Visits a zero operand instruction.
+ *
+ * @param opcode the opcode of the instruction to be visited. This opcode is
+ * either NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1, ICONST_2,
+ * ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1, FCONST_0,
+ * FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD, LALOAD, FALOAD,
+ * DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, IASTORE, LASTORE, FASTORE,
+ * DASTORE, AASTORE, BASTORE, CASTORE, SASTORE, POP, POP2, DUP,
+ * DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP, IADD, LADD, FADD,
+ * DADD, ISUB, LSUB, FSUB, DSUB, IMUL, LMUL, FMUL, DMUL, IDIV, LDIV,
+ * FDIV, DDIV, IREM, LREM, FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL,
+ * LSHL, ISHR, LSHR, IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR,
+ * I2L, I2F, I2D, L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B,
+ * I2C, I2S, LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN,
+ * FRETURN, DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW,
+ * MONITORENTER, or MONITOREXIT.
+ */
+ public void visitInsn(int opcode) {
+ if (mv != null) {
+ mv.visitInsn(opcode);
+ }
+ }
+
+ /**
+ * Visits an instruction with a single int operand.
+ *
+ * @param opcode the opcode of the instruction to be visited. This opcode is
+ * either BIPUSH, SIPUSH or NEWARRAY.
+ * @param operand the operand of the instruction to be visited.<br> When
+ * opcode is BIPUSH, operand value should be between Byte.MIN_VALUE
+ * and Byte.MAX_VALUE.<br> When opcode is SIPUSH, operand value
+ * should be between Short.MIN_VALUE and Short.MAX_VALUE.<br> When
+ * opcode is NEWARRAY, operand value should be one of
+ * {@link Opcodes#T_BOOLEAN}, {@link Opcodes#T_CHAR},
+ * {@link Opcodes#T_FLOAT}, {@link Opcodes#T_DOUBLE},
+ * {@link Opcodes#T_BYTE}, {@link Opcodes#T_SHORT},
+ * {@link Opcodes#T_INT} or {@link Opcodes#T_LONG}.
+ */
+ public void visitIntInsn(int opcode, int operand) {
+ if (mv != null) {
+ mv.visitIntInsn(opcode, operand);
+ }
+ }
+
+ /**
+ * Visits a local variable instruction. A local variable instruction is an
+ * instruction that loads or stores the value of a local variable.
+ *
+ * @param opcode the opcode of the local variable instruction to be visited.
+ * This opcode is either ILOAD, LLOAD, FLOAD, DLOAD, ALOAD, ISTORE,
+ * LSTORE, FSTORE, DSTORE, ASTORE or RET.
+ * @param var the operand of the instruction to be visited. This operand is
+ * the index of a local variable.
+ */
+ public void visitVarInsn(int opcode, int var) {
+ if (mv != null) {
+ mv.visitVarInsn(opcode, var);
+ }
+ }
+
+ /**
+ * Visits a type instruction. A type instruction is an instruction that
+ * takes the internal name of a class as parameter.
+ *
+ * @param opcode the opcode of the type instruction to be visited. This
+ * opcode is either NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
+ * @param type the operand of the instruction to be visited. This operand
+ * must be the internal name of an object or array class (see {@link
+ * Type#getInternalName() getInternalName}).
+ */
+ public void visitTypeInsn(int opcode, String type) {
+ if (mv != null) {
+ mv.visitTypeInsn(opcode, type);
+ }
+ }
+
+ /**
+ * Visits a field instruction. A field instruction is an instruction that
+ * loads or stores the value of a field of an object.
+ *
+ * @param opcode the opcode of the type instruction to be visited. This
+ * opcode is either GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
+ * @param owner the internal name of the field's owner class (see {@link
+ * Type#getInternalName() getInternalName}).
+ * @param name the field's name.
+ * @param desc the field's descriptor (see {@link Type Type}).
+ */
+ public void visitFieldInsn(int opcode, String owner, String name, String desc) {
+ if (mv != null) {
+ mv.visitFieldInsn(opcode, owner, name, desc);
+ }
+ }
+
+ /**
+ * Visits a method instruction. A method instruction is an instruction that
+ * invokes a method.
+ *
+ * @param opcode the opcode of the type instruction to be visited. This
+ * opcode is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC
+ * or INVOKEINTERFACE.
+ * @param owner the internal name of the method's owner class (see {@link
+ * Type#getInternalName() getInternalName}).
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link Type Type}).
+ */
+ public void visitMethodInsn(int opcode, String owner, String name, String desc) {
+ if (mv != null) {
+ mv.visitMethodInsn(opcode, owner, name, desc);
+ }
+ }
+
+ /**
+ * Visits an invokedynamic instruction.
+ *
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link Type Type}).
+ * @param bsm the bootstrap method.
+ * @param bsmArgs the bootstrap method constant arguments. Each argument
+ * must be an {@link Integer}, {@link Float}, {@link Long},
+ * {@link Double}, {@link String}, {@link Type} or {@link Handle}
+ * value. This method is allowed to modify the content of the array
+ * so a caller should expect that this array may change.
+ */
+ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) {
+ if (mv != null) {
+ mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
+ }
+ }
+
+ /**
+ * Visits a jump instruction. A jump instruction is an instruction that may
+ * jump to another instruction.
+ *
+ * @param opcode the opcode of the type instruction to be visited. This
+ * opcode is either IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
+ * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ,
+ * IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
+ * @param label the operand of the instruction to be visited. This operand
+ * is a label that designates the instruction to which the jump
+ * instruction may jump.
+ */
+ public void visitJumpInsn(int opcode, Label label) {
+ if (mv != null) {
+ mv.visitJumpInsn(opcode, label);
+ }
+ }
+
+ /**
+ * Visits a label. A label designates the instruction that will be visited
+ * just after it.
+ *
+ * @param label a {@link Label Label} object.
+ */
+ public void visitLabel(Label label) {
+ if (mv != null) {
+ mv.visitLabel(label);
+ }
+ }
+
+ // -------------------------------------------------------------------------
+ // Special instructions
+ // -------------------------------------------------------------------------
+
+ /**
+ * Visits a LDC instruction. Note that new constant types may be added in
+ * future versions of the Java Virtual Machine. To easily detect new
+ * constant types, implementations of this method should check for
+ * unexpected constant types, like this:
+ * <pre>
+ * if (cst instanceof Integer) {
+ * // ...
+ * } else if (cst instanceof Float) {
+ * // ...
+ * } else if (cst instanceof Long) {
+ * // ...
+ * } else if (cst instanceof Double) {
+ * // ...
+ * } else if (cst instanceof String) {
+ * // ...
+ * } else if (cst instanceof Type) {
+ * int sort = ((Type) cst).getSort();
+ * if (sort == Type.OBJECT) {
+ * // ...
+ * } else if (sort == Type.ARRAY) {
+ * // ...
+ * } else if (sort == Type.METHOD) {
+ * // ...
+ * } else {
+ * // throw an exception
+ * }
+ * } else if (cst instanceof Handle) {
+ * // ...
+ * } else {
+ * // throw an exception
+ * }</pre>
+ *
+ * @param cst the constant to be loaded on the stack. This parameter must be
+ * a non null {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double}, a {@link String}, a {@link Type} of OBJECT or ARRAY
+ * sort for <tt>.class</tt> constants, for classes whose version is
+ * 49.0, a {@link Type} of METHOD sort or a {@link Handle} for
+ * MethodType and MethodHandle constants, for classes whose version
+ * is 51.0.
+ */
+ public void visitLdcInsn(Object cst) {
+ if (mv != null) {
+ mv.visitLdcInsn(cst);
+ }
+ }
+
+ /**
+ * Visits an IINC instruction.
+ *
+ * @param var index of the local variable to be incremented.
+ * @param increment amount to increment the local variable by.
+ */
+ public void visitIincInsn(int var, int increment) {
+ if (mv != null) {
+ mv.visitIincInsn(var, increment);
+ }
+ }
+
+ /**
+ * Visits a TABLESWITCH instruction.
+ *
+ * @param min the minimum key value.
+ * @param max the maximum key value.
+ * @param dflt beginning of the default handler block.
+ * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
+ * the beginning of the handler block for the <tt>min + i</tt> key.
+ */
+ public void visitTableSwitchInsn(int min, int max, Label dflt, Label... labels) {
+ if (mv != null) {
+ mv.visitTableSwitchInsn(min, max, dflt, labels);
+ }
+ }
+
+ /**
+ * Visits a LOOKUPSWITCH instruction.
+ *
+ * @param dflt beginning of the default handler block.
+ * @param keys the values of the keys.
+ * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
+ * the beginning of the handler block for the <tt>keys[i]</tt> key.
+ */
+ public void visitLookupSwitchInsn(Label dflt, int[] keys, Label[] labels) {
+ if (mv != null) {
+ mv.visitLookupSwitchInsn(dflt, keys, labels);
+ }
+ }
+
+ /**
+ * Visits a MULTIANEWARRAY instruction.
+ *
+ * @param desc an array type descriptor (see {@link Type Type}).
+ * @param dims number of dimensions of the array to allocate.
+ */
+ public void visitMultiANewArrayInsn(String desc, int dims) {
+ if (mv != null) {
+ mv.visitMultiANewArrayInsn(desc, dims);
+ }
+ }
+
+ // -------------------------------------------------------------------------
+ // Exceptions table entries, debug information, max stack and max locals
+ // -------------------------------------------------------------------------
+
+ /**
+ * Visits a try catch block.
+ *
+ * @param start beginning of the exception handler's scope (inclusive).
+ * @param end end of the exception handler's scope (exclusive).
+ * @param handler beginning of the exception handler's code.
+ * @param type internal name of the type of exceptions handled by the
+ * handler, or <tt>null</tt> to catch any exceptions (for "finally"
+ * blocks).
+ * @throws IllegalArgumentException if one of the labels has already been
+ * visited by this visitor (by the {@link #visitLabel visitLabel}
+ * method).
+ */
+ public void visitTryCatchBlock(Label start, Label end, Label handler, String type) {
+ if (mv != null) {
+ mv.visitTryCatchBlock(start, end, handler, type);
+ }
+ }
+
+ /**
+ * Visits a local variable declaration.
+ *
+ * @param name the name of a local variable.
+ * @param desc the type descriptor of this local variable.
+ * @param signature the type signature of this local variable. May be
+ * <tt>null</tt> if the local variable type does not use generic
+ * types.
+ * @param start the first instruction corresponding to the scope of this
+ * local variable (inclusive).
+ * @param end the last instruction corresponding to the scope of this local
+ * variable (exclusive).
+ * @param index the local variable's index.
+ * @throws IllegalArgumentException if one of the labels has not already
+ * been visited by this visitor (by the
+ * {@link #visitLabel visitLabel} method).
+ */
+ public void visitLocalVariable(
+ String name,
+ String desc,
+ String signature,
+ Label start,
+ Label end,
+ int index)
+ {
+ if (mv != null) {
+ mv.visitLocalVariable(name, desc, signature, start, end, index);
+ }
+ }
+
+ /**
+ * Visits a line number declaration.
+ *
+ * @param line a line number. This number refers to the source file from
+ * which the class was compiled.
+ * @param start the first instruction corresponding to this line number.
+ * @throws IllegalArgumentException if <tt>start</tt> has not already been
+ * visited by this visitor (by the {@link #visitLabel visitLabel}
+ * method).
+ */
+ public void visitLineNumber(int line, Label start) {
+ if (mv != null) {
+ mv.visitLineNumber(line, start);
+ }
+ }
+
+ /**
+ * Visits the maximum stack size and the maximum number of local variables
+ * of the method.
+ *
+ * @param maxStack maximum stack size of the method.
+ * @param maxLocals maximum number of local variables for the method.
+ */
+ public void visitMaxs(int maxStack, int maxLocals) {
+ if (mv != null) {
+ mv.visitMaxs(maxStack, maxLocals);
+ }
+ }
+
+ /**
+ * Visits the end of the method. This method, which is the last one to be
+ * called, is used to inform the visitor that all the annotations and
+ * attributes of the method have been visited.
+ */
+ public void visitEnd() {
+ if (mv != null) {
+ mv.visitEnd();
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/MethodWriter.java b/src/asm/scala/tools/asm/MethodWriter.java
new file mode 100644
index 0000000000..321bacb6fc
--- /dev/null
+++ b/src/asm/scala/tools/asm/MethodWriter.java
@@ -0,0 +1,2666 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A {@link MethodVisitor} that generates methods in bytecode form. Each visit
+ * method of this class appends the bytecode corresponding to the visited
+ * instruction to a byte vector, in the order these methods are called.
+ *
+ * @author Eric Bruneton
+ * @author Eugene Kuleshov
+ */
+class MethodWriter extends MethodVisitor {
+
+ /**
+ * Pseudo access flag used to denote constructors.
+ */
+ static final int ACC_CONSTRUCTOR = 262144;
+
+ /**
+ * Frame has exactly the same locals as the previous stack map frame and
+ * number of stack items is zero.
+ */
+ static final int SAME_FRAME = 0; // to 63 (0-3f)
+
+ /**
+ * Frame has exactly the same locals as the previous stack map frame and
+ * number of stack items is 1
+ */
+ static final int SAME_LOCALS_1_STACK_ITEM_FRAME = 64; // to 127 (40-7f)
+
+ /**
+ * Reserved for future use
+ */
+ static final int RESERVED = 128;
+
+ /**
+ * Frame has exactly the same locals as the previous stack map frame and
+ * number of stack items is 1. Offset is bigger then 63;
+ */
+ static final int SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED = 247; // f7
+
+ /**
+ * Frame where current locals are the same as the locals in the previous
+ * frame, except that the k last locals are absent. The value of k is given
+ * by the formula 251-frame_type.
+ */
+ static final int CHOP_FRAME = 248; // to 250 (f8-fA)
+
+ /**
+ * Frame has exactly the same locals as the previous stack map frame and
+ * number of stack items is zero. Offset is bigger then 63;
+ */
+ static final int SAME_FRAME_EXTENDED = 251; // fb
+
+ /**
+ * Frame where current locals are the same as the locals in the previous
+ * frame, except that k additional locals are defined. The value of k is
+ * given by the formula frame_type-251.
+ */
+ static final int APPEND_FRAME = 252; // to 254 // fc-fe
+
+ /**
+ * Full frame
+ */
+ static final int FULL_FRAME = 255; // ff
+
+ /**
+ * Indicates that the stack map frames must be recomputed from scratch. In
+ * this case the maximum stack size and number of local variables is also
+ * recomputed from scratch.
+ *
+ * @see #compute
+ */
+ private static final int FRAMES = 0;
+
+ /**
+ * Indicates that the maximum stack size and number of local variables must
+ * be automatically computed.
+ *
+ * @see #compute
+ */
+ private static final int MAXS = 1;
+
+ /**
+ * Indicates that nothing must be automatically computed.
+ *
+ * @see #compute
+ */
+ private static final int NOTHING = 2;
+
+ /**
+ * The class writer to which this method must be added.
+ */
+ final ClassWriter cw;
+
+ /**
+ * Access flags of this method.
+ */
+ private int access;
+
+ /**
+ * The index of the constant pool item that contains the name of this
+ * method.
+ */
+ private final int name;
+
+ /**
+ * The index of the constant pool item that contains the descriptor of this
+ * method.
+ */
+ private final int desc;
+
+ /**
+ * The descriptor of this method.
+ */
+ private final String descriptor;
+
+ /**
+ * The signature of this method.
+ */
+ String signature;
+
+ /**
+ * If not zero, indicates that the code of this method must be copied from
+ * the ClassReader associated to this writer in <code>cw.cr</code>. More
+ * precisely, this field gives the index of the first byte to copied from
+ * <code>cw.cr.b</code>.
+ */
+ int classReaderOffset;
+
+ /**
+ * If not zero, indicates that the code of this method must be copied from
+ * the ClassReader associated to this writer in <code>cw.cr</code>. More
+ * precisely, this field gives the number of bytes to copied from
+ * <code>cw.cr.b</code>.
+ */
+ int classReaderLength;
+
+ /**
+ * Number of exceptions that can be thrown by this method.
+ */
+ int exceptionCount;
+
+ /**
+ * The exceptions that can be thrown by this method. More precisely, this
+ * array contains the indexes of the constant pool items that contain the
+ * internal names of these exception classes.
+ */
+ int[] exceptions;
+
+ /**
+ * The annotation default attribute of this method. May be <tt>null</tt>.
+ */
+ private ByteVector annd;
+
+ /**
+ * The runtime visible annotations of this method. May be <tt>null</tt>.
+ */
+ private AnnotationWriter anns;
+
+ /**
+ * The runtime invisible annotations of this method. May be <tt>null</tt>.
+ */
+ private AnnotationWriter ianns;
+
+ /**
+ * The runtime visible parameter annotations of this method. May be
+ * <tt>null</tt>.
+ */
+ private AnnotationWriter[] panns;
+
+ /**
+ * The runtime invisible parameter annotations of this method. May be
+ * <tt>null</tt>.
+ */
+ private AnnotationWriter[] ipanns;
+
+ /**
+ * The number of synthetic parameters of this method.
+ */
+ private int synthetics;
+
+ /**
+ * The non standard attributes of the method.
+ */
+ private Attribute attrs;
+
+ /**
+ * The bytecode of this method.
+ */
+ private ByteVector code = new ByteVector();
+
+ /**
+ * Maximum stack size of this method.
+ */
+ private int maxStack;
+
+ /**
+ * Maximum number of local variables for this method.
+ */
+ private int maxLocals;
+
+ /**
+ * Number of local variables in the current stack map frame.
+ */
+ private int currentLocals;
+
+ /**
+ * Number of stack map frames in the StackMapTable attribute.
+ */
+ private int frameCount;
+
+ /**
+ * The StackMapTable attribute.
+ */
+ private ByteVector stackMap;
+
+ /**
+ * The offset of the last frame that was written in the StackMapTable
+ * attribute.
+ */
+ private int previousFrameOffset;
+
+ /**
+ * The last frame that was written in the StackMapTable attribute.
+ *
+ * @see #frame
+ */
+ private int[] previousFrame;
+
+ /**
+ * Index of the next element to be added in {@link #frame}.
+ */
+ private int frameIndex;
+
+ /**
+ * The current stack map frame. The first element contains the offset of the
+ * instruction to which the frame corresponds, the second element is the
+ * number of locals and the third one is the number of stack elements. The
+ * local variables start at index 3 and are followed by the operand stack
+ * values. In summary frame[0] = offset, frame[1] = nLocal, frame[2] =
+ * nStack, frame[3] = nLocal. All types are encoded as integers, with the
+ * same format as the one used in {@link Label}, but limited to BASE types.
+ */
+ private int[] frame;
+
+ /**
+ * Number of elements in the exception handler list.
+ */
+ private int handlerCount;
+
+ /**
+ * The first element in the exception handler list.
+ */
+ private Handler firstHandler;
+
+ /**
+ * The last element in the exception handler list.
+ */
+ private Handler lastHandler;
+
+ /**
+ * Number of entries in the LocalVariableTable attribute.
+ */
+ private int localVarCount;
+
+ /**
+ * The LocalVariableTable attribute.
+ */
+ private ByteVector localVar;
+
+ /**
+ * Number of entries in the LocalVariableTypeTable attribute.
+ */
+ private int localVarTypeCount;
+
+ /**
+ * The LocalVariableTypeTable attribute.
+ */
+ private ByteVector localVarType;
+
+ /**
+ * Number of entries in the LineNumberTable attribute.
+ */
+ private int lineNumberCount;
+
+ /**
+ * The LineNumberTable attribute.
+ */
+ private ByteVector lineNumber;
+
+ /**
+ * The non standard attributes of the method's code.
+ */
+ private Attribute cattrs;
+
+ /**
+ * Indicates if some jump instructions are too small and need to be resized.
+ */
+ private boolean resize;
+
+ /**
+ * The number of subroutines in this method.
+ */
+ private int subroutines;
+
+ // ------------------------------------------------------------------------
+
+ /*
+ * Fields for the control flow graph analysis algorithm (used to compute the
+ * maximum stack size). A control flow graph contains one node per "basic
+ * block", and one edge per "jump" from one basic block to another. Each
+ * node (i.e., each basic block) is represented by the Label object that
+ * corresponds to the first instruction of this basic block. Each node also
+ * stores the list of its successors in the graph, as a linked list of Edge
+ * objects.
+ */
+
+ /**
+ * Indicates what must be automatically computed.
+ *
+ * @see #FRAMES
+ * @see #MAXS
+ * @see #NOTHING
+ */
+ private final int compute;
+
+ /**
+ * A list of labels. This list is the list of basic blocks in the method,
+ * i.e. a list of Label objects linked to each other by their
+ * {@link Label#successor} field, in the order they are visited by
+ * {@link MethodVisitor#visitLabel}, and starting with the first basic block.
+ */
+ private Label labels;
+
+ /**
+ * The previous basic block.
+ */
+ private Label previousBlock;
+
+ /**
+ * The current basic block.
+ */
+ private Label currentBlock;
+
+ /**
+ * The (relative) stack size after the last visited instruction. This size
+ * is relative to the beginning of the current basic block, i.e., the true
+ * stack size after the last visited instruction is equal to the
+ * {@link Label#inputStackTop beginStackSize} of the current basic block
+ * plus <tt>stackSize</tt>.
+ */
+ private int stackSize;
+
+ /**
+ * The (relative) maximum stack size after the last visited instruction.
+ * This size is relative to the beginning of the current basic block, i.e.,
+ * the true maximum stack size after the last visited instruction is equal
+ * to the {@link Label#inputStackTop beginStackSize} of the current basic
+ * block plus <tt>stackSize</tt>.
+ */
+ private int maxStackSize;
+
+ // ------------------------------------------------------------------------
+ // Constructor
+ // ------------------------------------------------------------------------
+
+ /**
+ * Constructs a new {@link MethodWriter}.
+ *
+ * @param cw the class writer in which the method must be added.
+ * @param access the method's access flags (see {@link Opcodes}).
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link Type}).
+ * @param signature the method's signature. May be <tt>null</tt>.
+ * @param exceptions the internal names of the method's exceptions. May be
+ * <tt>null</tt>.
+ * @param computeMaxs <tt>true</tt> if the maximum stack size and number
+ * of local variables must be automatically computed.
+ * @param computeFrames <tt>true</tt> if the stack map tables must be
+ * recomputed from scratch.
+ */
+ MethodWriter(
+ final ClassWriter cw,
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions,
+ final boolean computeMaxs,
+ final boolean computeFrames)
+ {
+ super(Opcodes.ASM4);
+ if (cw.firstMethod == null) {
+ cw.firstMethod = this;
+ } else {
+ cw.lastMethod.mv = this;
+ }
+ cw.lastMethod = this;
+ this.cw = cw;
+ this.access = access;
+ this.name = cw.newUTF8(name);
+ this.desc = cw.newUTF8(desc);
+ this.descriptor = desc;
+ if (ClassReader.SIGNATURES) {
+ this.signature = signature;
+ }
+ if (exceptions != null && exceptions.length > 0) {
+ exceptionCount = exceptions.length;
+ this.exceptions = new int[exceptionCount];
+ for (int i = 0; i < exceptionCount; ++i) {
+ this.exceptions[i] = cw.newClass(exceptions[i]);
+ }
+ }
+ this.compute = computeFrames ? FRAMES : (computeMaxs ? MAXS : NOTHING);
+ if (computeMaxs || computeFrames) {
+ if (computeFrames && "<init>".equals(name)) {
+ this.access |= ACC_CONSTRUCTOR;
+ }
+ // updates maxLocals
+ int size = Type.getArgumentsAndReturnSizes(descriptor) >> 2;
+ if ((access & Opcodes.ACC_STATIC) != 0) {
+ --size;
+ }
+ maxLocals = size;
+ currentLocals = size;
+ // creates and visits the label for the first basic block
+ labels = new Label();
+ labels.status |= Label.PUSHED;
+ visitLabel(labels);
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the MethodVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public AnnotationVisitor visitAnnotationDefault() {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ annd = new ByteVector();
+ return new AnnotationWriter(cw, false, annd, null, 0);
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ ByteVector bv = new ByteVector();
+ // write type, and reserve space for values count
+ bv.putShort(cw.newUTF8(desc)).putShort(0);
+ AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, 2);
+ if (visible) {
+ aw.next = anns;
+ anns = aw;
+ } else {
+ aw.next = ianns;
+ ianns = aw;
+ }
+ return aw;
+ }
+
+ @Override
+ public AnnotationVisitor visitParameterAnnotation(
+ final int parameter,
+ final String desc,
+ final boolean visible)
+ {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ ByteVector bv = new ByteVector();
+ if ("Ljava/lang/Synthetic;".equals(desc)) {
+ // workaround for a bug in javac with synthetic parameters
+ // see ClassReader.readParameterAnnotations
+ synthetics = Math.max(synthetics, parameter + 1);
+ return new AnnotationWriter(cw, false, bv, null, 0);
+ }
+ // write type, and reserve space for values count
+ bv.putShort(cw.newUTF8(desc)).putShort(0);
+ AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, 2);
+ if (visible) {
+ if (panns == null) {
+ panns = new AnnotationWriter[Type.getArgumentTypes(descriptor).length];
+ }
+ aw.next = panns[parameter];
+ panns[parameter] = aw;
+ } else {
+ if (ipanns == null) {
+ ipanns = new AnnotationWriter[Type.getArgumentTypes(descriptor).length];
+ }
+ aw.next = ipanns[parameter];
+ ipanns[parameter] = aw;
+ }
+ return aw;
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ if (attr.isCodeAttribute()) {
+ attr.next = cattrs;
+ cattrs = attr;
+ } else {
+ attr.next = attrs;
+ attrs = attr;
+ }
+ }
+
+ @Override
+ public void visitCode() {
+ }
+
+ @Override
+ public void visitFrame(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack)
+ {
+ if (!ClassReader.FRAMES || compute == FRAMES) {
+ return;
+ }
+
+ if (type == Opcodes.F_NEW) {
+ currentLocals = nLocal;
+ startFrame(code.length, nLocal, nStack);
+ for (int i = 0; i < nLocal; ++i) {
+ if (local[i] instanceof String) {
+ frame[frameIndex++] = Frame.OBJECT
+ | cw.addType((String) local[i]);
+ } else if (local[i] instanceof Integer) {
+ frame[frameIndex++] = ((Integer) local[i]).intValue();
+ } else {
+ frame[frameIndex++] = Frame.UNINITIALIZED
+ | cw.addUninitializedType("",
+ ((Label) local[i]).position);
+ }
+ }
+ for (int i = 0; i < nStack; ++i) {
+ if (stack[i] instanceof String) {
+ frame[frameIndex++] = Frame.OBJECT
+ | cw.addType((String) stack[i]);
+ } else if (stack[i] instanceof Integer) {
+ frame[frameIndex++] = ((Integer) stack[i]).intValue();
+ } else {
+ frame[frameIndex++] = Frame.UNINITIALIZED
+ | cw.addUninitializedType("",
+ ((Label) stack[i]).position);
+ }
+ }
+ endFrame();
+ } else {
+ int delta;
+ if (stackMap == null) {
+ stackMap = new ByteVector();
+ delta = code.length;
+ } else {
+ delta = code.length - previousFrameOffset - 1;
+ if (delta < 0) {
+ if (type == Opcodes.F_SAME) {
+ return;
+ } else {
+ throw new IllegalStateException();
+ }
+ }
+ }
+
+ switch (type) {
+ case Opcodes.F_FULL:
+ currentLocals = nLocal;
+ stackMap.putByte(FULL_FRAME)
+ .putShort(delta)
+ .putShort(nLocal);
+ for (int i = 0; i < nLocal; ++i) {
+ writeFrameType(local[i]);
+ }
+ stackMap.putShort(nStack);
+ for (int i = 0; i < nStack; ++i) {
+ writeFrameType(stack[i]);
+ }
+ break;
+ case Opcodes.F_APPEND:
+ currentLocals += nLocal;
+ stackMap.putByte(SAME_FRAME_EXTENDED + nLocal)
+ .putShort(delta);
+ for (int i = 0; i < nLocal; ++i) {
+ writeFrameType(local[i]);
+ }
+ break;
+ case Opcodes.F_CHOP:
+ currentLocals -= nLocal;
+ stackMap.putByte(SAME_FRAME_EXTENDED - nLocal)
+ .putShort(delta);
+ break;
+ case Opcodes.F_SAME:
+ if (delta < 64) {
+ stackMap.putByte(delta);
+ } else {
+ stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
+ }
+ break;
+ case Opcodes.F_SAME1:
+ if (delta < 64) {
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
+ } else {
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
+ .putShort(delta);
+ }
+ writeFrameType(stack[0]);
+ break;
+ }
+
+ previousFrameOffset = code.length;
+ ++frameCount;
+ }
+
+ maxStack = Math.max(maxStack, nStack);
+ maxLocals = Math.max(maxLocals, currentLocals);
+ }
+
+ @Override
+ public void visitInsn(final int opcode) {
+ // adds the instruction to the bytecode of the method
+ code.putByte(opcode);
+ // update currentBlock
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(opcode, 0, null, null);
+ } else {
+ // updates current and max stack sizes
+ int size = stackSize + Frame.SIZE[opcode];
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ // if opcode == ATHROW or xRETURN, ends current block (no successor)
+ if ((opcode >= Opcodes.IRETURN && opcode <= Opcodes.RETURN)
+ || opcode == Opcodes.ATHROW)
+ {
+ noSuccessor();
+ }
+ }
+ }
+
+ @Override
+ public void visitIntInsn(final int opcode, final int operand) {
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(opcode, operand, null, null);
+ } else if (opcode != Opcodes.NEWARRAY) {
+ // updates current and max stack sizes only for NEWARRAY
+ // (stack size variation = 0 for BIPUSH or SIPUSH)
+ int size = stackSize + 1;
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ if (opcode == Opcodes.SIPUSH) {
+ code.put12(opcode, operand);
+ } else { // BIPUSH or NEWARRAY
+ code.put11(opcode, operand);
+ }
+ }
+
+ @Override
+ public void visitVarInsn(final int opcode, final int var) {
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(opcode, var, null, null);
+ } else {
+ // updates current and max stack sizes
+ if (opcode == Opcodes.RET) {
+ // no stack change, but end of current block (no successor)
+ currentBlock.status |= Label.RET;
+ // save 'stackSize' here for future use
+ // (see {@link #findSubroutineSuccessors})
+ currentBlock.inputStackTop = stackSize;
+ noSuccessor();
+ } else { // xLOAD or xSTORE
+ int size = stackSize + Frame.SIZE[opcode];
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ }
+ }
+ if (compute != NOTHING) {
+ // updates max locals
+ int n;
+ if (opcode == Opcodes.LLOAD || opcode == Opcodes.DLOAD
+ || opcode == Opcodes.LSTORE || opcode == Opcodes.DSTORE)
+ {
+ n = var + 2;
+ } else {
+ n = var + 1;
+ }
+ if (n > maxLocals) {
+ maxLocals = n;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ if (var < 4 && opcode != Opcodes.RET) {
+ int opt;
+ if (opcode < Opcodes.ISTORE) {
+ /* ILOAD_0 */
+ opt = 26 + ((opcode - Opcodes.ILOAD) << 2) + var;
+ } else {
+ /* ISTORE_0 */
+ opt = 59 + ((opcode - Opcodes.ISTORE) << 2) + var;
+ }
+ code.putByte(opt);
+ } else if (var >= 256) {
+ code.putByte(196 /* WIDE */).put12(opcode, var);
+ } else {
+ code.put11(opcode, var);
+ }
+ if (opcode >= Opcodes.ISTORE && compute == FRAMES && handlerCount > 0) {
+ visitLabel(new Label());
+ }
+ }
+
+ @Override
+ public void visitTypeInsn(final int opcode, final String type) {
+ Item i = cw.newClassItem(type);
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(opcode, code.length, cw, i);
+ } else if (opcode == Opcodes.NEW) {
+ // updates current and max stack sizes only if opcode == NEW
+ // (no stack change for ANEWARRAY, CHECKCAST, INSTANCEOF)
+ int size = stackSize + 1;
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ code.put12(opcode, i.index);
+ }
+
+ @Override
+ public void visitFieldInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ Item i = cw.newFieldItem(owner, name, desc);
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(opcode, 0, cw, i);
+ } else {
+ int size;
+ // computes the stack size variation
+ char c = desc.charAt(0);
+ switch (opcode) {
+ case Opcodes.GETSTATIC:
+ size = stackSize + (c == 'D' || c == 'J' ? 2 : 1);
+ break;
+ case Opcodes.PUTSTATIC:
+ size = stackSize + (c == 'D' || c == 'J' ? -2 : -1);
+ break;
+ case Opcodes.GETFIELD:
+ size = stackSize + (c == 'D' || c == 'J' ? 1 : 0);
+ break;
+ // case Constants.PUTFIELD:
+ default:
+ size = stackSize + (c == 'D' || c == 'J' ? -3 : -2);
+ break;
+ }
+ // updates current and max stack sizes
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ code.put12(opcode, i.index);
+ }
+
+ @Override
+ public void visitMethodInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ boolean itf = opcode == Opcodes.INVOKEINTERFACE;
+ Item i = cw.newMethodItem(owner, name, desc, itf);
+ int argSize = i.intVal;
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(opcode, 0, cw, i);
+ } else {
+ /*
+ * computes the stack size variation. In order not to recompute
+ * several times this variation for the same Item, we use the
+ * intVal field of this item to store this variation, once it
+ * has been computed. More precisely this intVal field stores
+ * the sizes of the arguments and of the return value
+ * corresponding to desc.
+ */
+ if (argSize == 0) {
+ // the above sizes have not been computed yet,
+ // so we compute them...
+ argSize = Type.getArgumentsAndReturnSizes(desc);
+ // ... and we save them in order
+ // not to recompute them in the future
+ i.intVal = argSize;
+ }
+ int size;
+ if (opcode == Opcodes.INVOKESTATIC) {
+ size = stackSize - (argSize >> 2) + (argSize & 0x03) + 1;
+ } else {
+ size = stackSize - (argSize >> 2) + (argSize & 0x03);
+ }
+ // updates current and max stack sizes
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ if (itf) {
+ if (argSize == 0) {
+ argSize = Type.getArgumentsAndReturnSizes(desc);
+ i.intVal = argSize;
+ }
+ code.put12(Opcodes.INVOKEINTERFACE, i.index).put11(argSize >> 2, 0);
+ } else {
+ code.put12(opcode, i.index);
+ }
+ }
+
+ @Override
+ public void visitInvokeDynamicInsn(
+ final String name,
+ final String desc,
+ final Handle bsm,
+ final Object... bsmArgs)
+ {
+ Item i = cw.newInvokeDynamicItem(name, desc, bsm, bsmArgs);
+ int argSize = i.intVal;
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(Opcodes.INVOKEDYNAMIC, 0, cw, i);
+ } else {
+ /*
+ * computes the stack size variation. In order not to recompute
+ * several times this variation for the same Item, we use the
+ * intVal field of this item to store this variation, once it
+ * has been computed. More precisely this intVal field stores
+ * the sizes of the arguments and of the return value
+ * corresponding to desc.
+ */
+ if (argSize == 0) {
+ // the above sizes have not been computed yet,
+ // so we compute them...
+ argSize = Type.getArgumentsAndReturnSizes(desc);
+ // ... and we save them in order
+ // not to recompute them in the future
+ i.intVal = argSize;
+ }
+ int size = stackSize - (argSize >> 2) + (argSize & 0x03) + 1;
+
+ // updates current and max stack sizes
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ code.put12(Opcodes.INVOKEDYNAMIC, i.index);
+ code.putShort(0);
+ }
+
+ @Override
+ public void visitJumpInsn(final int opcode, final Label label) {
+ Label nextInsn = null;
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(opcode, 0, null, null);
+ // 'label' is the target of a jump instruction
+ label.getFirst().status |= Label.TARGET;
+ // adds 'label' as a successor of this basic block
+ addSuccessor(Edge.NORMAL, label);
+ if (opcode != Opcodes.GOTO) {
+ // creates a Label for the next basic block
+ nextInsn = new Label();
+ }
+ } else {
+ if (opcode == Opcodes.JSR) {
+ if ((label.status & Label.SUBROUTINE) == 0) {
+ label.status |= Label.SUBROUTINE;
+ ++subroutines;
+ }
+ currentBlock.status |= Label.JSR;
+ addSuccessor(stackSize + 1, label);
+ // creates a Label for the next basic block
+ nextInsn = new Label();
+ /*
+ * note that, by construction in this method, a JSR block
+ * has at least two successors in the control flow graph:
+ * the first one leads the next instruction after the JSR,
+ * while the second one leads to the JSR target.
+ */
+ } else {
+ // updates current stack size (max stack size unchanged
+ // because stack size variation always negative in this
+ // case)
+ stackSize += Frame.SIZE[opcode];
+ addSuccessor(stackSize, label);
+ }
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ if ((label.status & Label.RESOLVED) != 0
+ && label.position - code.length < Short.MIN_VALUE)
+ {
+ /*
+ * case of a backward jump with an offset < -32768. In this case we
+ * automatically replace GOTO with GOTO_W, JSR with JSR_W and IFxxx
+ * <l> with IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx is the
+ * "opposite" opcode of IFxxx (i.e., IFNE for IFEQ) and where <l'>
+ * designates the instruction just after the GOTO_W.
+ */
+ if (opcode == Opcodes.GOTO) {
+ code.putByte(200); // GOTO_W
+ } else if (opcode == Opcodes.JSR) {
+ code.putByte(201); // JSR_W
+ } else {
+ // if the IF instruction is transformed into IFNOT GOTO_W the
+ // next instruction becomes the target of the IFNOT instruction
+ if (nextInsn != null) {
+ nextInsn.status |= Label.TARGET;
+ }
+ code.putByte(opcode <= 166
+ ? ((opcode + 1) ^ 1) - 1
+ : opcode ^ 1);
+ code.putShort(8); // jump offset
+ code.putByte(200); // GOTO_W
+ }
+ label.put(this, code, code.length - 1, true);
+ } else {
+ /*
+ * case of a backward jump with an offset >= -32768, or of a forward
+ * jump with, of course, an unknown offset. In these cases we store
+ * the offset in 2 bytes (which will be increased in
+ * resizeInstructions, if needed).
+ */
+ code.putByte(opcode);
+ label.put(this, code, code.length - 1, false);
+ }
+ if (currentBlock != null) {
+ if (nextInsn != null) {
+ // if the jump instruction is not a GOTO, the next instruction
+ // is also a successor of this instruction. Calling visitLabel
+ // adds the label of this next instruction as a successor of the
+ // current block, and starts a new basic block
+ visitLabel(nextInsn);
+ }
+ if (opcode == Opcodes.GOTO) {
+ noSuccessor();
+ }
+ }
+ }
+
+ @Override
+ public void visitLabel(final Label label) {
+ // resolves previous forward references to label, if any
+ resize |= label.resolve(this, code.length, code.data);
+ // updates currentBlock
+ if ((label.status & Label.DEBUG) != 0) {
+ return;
+ }
+ if (compute == FRAMES) {
+ if (currentBlock != null) {
+ if (label.position == currentBlock.position) {
+ // successive labels, do not start a new basic block
+ currentBlock.status |= (label.status & Label.TARGET);
+ label.frame = currentBlock.frame;
+ return;
+ }
+ // ends current block (with one new successor)
+ addSuccessor(Edge.NORMAL, label);
+ }
+ // begins a new current block
+ currentBlock = label;
+ if (label.frame == null) {
+ label.frame = new Frame();
+ label.frame.owner = label;
+ }
+ // updates the basic block list
+ if (previousBlock != null) {
+ if (label.position == previousBlock.position) {
+ previousBlock.status |= (label.status & Label.TARGET);
+ label.frame = previousBlock.frame;
+ currentBlock = previousBlock;
+ return;
+ }
+ previousBlock.successor = label;
+ }
+ previousBlock = label;
+ } else if (compute == MAXS) {
+ if (currentBlock != null) {
+ // ends current block (with one new successor)
+ currentBlock.outputStackMax = maxStackSize;
+ addSuccessor(stackSize, label);
+ }
+ // begins a new current block
+ currentBlock = label;
+ // resets the relative current and max stack sizes
+ stackSize = 0;
+ maxStackSize = 0;
+ // updates the basic block list
+ if (previousBlock != null) {
+ previousBlock.successor = label;
+ }
+ previousBlock = label;
+ }
+ }
+
+ @Override
+ public void visitLdcInsn(final Object cst) {
+ Item i = cw.newConstItem(cst);
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(Opcodes.LDC, 0, cw, i);
+ } else {
+ int size;
+ // computes the stack size variation
+ if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE)
+ {
+ size = stackSize + 2;
+ } else {
+ size = stackSize + 1;
+ }
+ // updates current and max stack sizes
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ int index = i.index;
+ if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE) {
+ code.put12(20 /* LDC2_W */, index);
+ } else if (index >= 256) {
+ code.put12(19 /* LDC_W */, index);
+ } else {
+ code.put11(Opcodes.LDC, index);
+ }
+ }
+
+ @Override
+ public void visitIincInsn(final int var, final int increment) {
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(Opcodes.IINC, var, null, null);
+ }
+ }
+ if (compute != NOTHING) {
+ // updates max locals
+ int n = var + 1;
+ if (n > maxLocals) {
+ maxLocals = n;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ if ((var > 255) || (increment > 127) || (increment < -128)) {
+ code.putByte(196 /* WIDE */)
+ .put12(Opcodes.IINC, var)
+ .putShort(increment);
+ } else {
+ code.putByte(Opcodes.IINC).put11(var, increment);
+ }
+ }
+
+ @Override
+ public void visitTableSwitchInsn(
+ final int min,
+ final int max,
+ final Label dflt,
+ final Label... labels)
+ {
+ // adds the instruction to the bytecode of the method
+ int source = code.length;
+ code.putByte(Opcodes.TABLESWITCH);
+ code.putByteArray(null, 0, (4 - code.length % 4) % 4);
+ dflt.put(this, code, source, true);
+ code.putInt(min).putInt(max);
+ for (int i = 0; i < labels.length; ++i) {
+ labels[i].put(this, code, source, true);
+ }
+ // updates currentBlock
+ visitSwitchInsn(dflt, labels);
+ }
+
+ @Override
+ public void visitLookupSwitchInsn(
+ final Label dflt,
+ final int[] keys,
+ final Label[] labels)
+ {
+ // adds the instruction to the bytecode of the method
+ int source = code.length;
+ code.putByte(Opcodes.LOOKUPSWITCH);
+ code.putByteArray(null, 0, (4 - code.length % 4) % 4);
+ dflt.put(this, code, source, true);
+ code.putInt(labels.length);
+ for (int i = 0; i < labels.length; ++i) {
+ code.putInt(keys[i]);
+ labels[i].put(this, code, source, true);
+ }
+ // updates currentBlock
+ visitSwitchInsn(dflt, labels);
+ }
+
+ private void visitSwitchInsn(final Label dflt, final Label[] labels) {
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(Opcodes.LOOKUPSWITCH, 0, null, null);
+ // adds current block successors
+ addSuccessor(Edge.NORMAL, dflt);
+ dflt.getFirst().status |= Label.TARGET;
+ for (int i = 0; i < labels.length; ++i) {
+ addSuccessor(Edge.NORMAL, labels[i]);
+ labels[i].getFirst().status |= Label.TARGET;
+ }
+ } else {
+ // updates current stack size (max stack size unchanged)
+ --stackSize;
+ // adds current block successors
+ addSuccessor(stackSize, dflt);
+ for (int i = 0; i < labels.length; ++i) {
+ addSuccessor(stackSize, labels[i]);
+ }
+ }
+ // ends current block
+ noSuccessor();
+ }
+ }
+
+ @Override
+ public void visitMultiANewArrayInsn(final String desc, final int dims) {
+ Item i = cw.newClassItem(desc);
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(Opcodes.MULTIANEWARRAY, dims, cw, i);
+ } else {
+ // updates current stack size (max stack size unchanged because
+ // stack size variation always negative or null)
+ stackSize += 1 - dims;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ code.put12(Opcodes.MULTIANEWARRAY, i.index).putByte(dims);
+ }
+
+ @Override
+ public void visitTryCatchBlock(
+ final Label start,
+ final Label end,
+ final Label handler,
+ final String type)
+ {
+ ++handlerCount;
+ Handler h = new Handler();
+ h.start = start;
+ h.end = end;
+ h.handler = handler;
+ h.desc = type;
+ h.type = type != null ? cw.newClass(type) : 0;
+ if (lastHandler == null) {
+ firstHandler = h;
+ } else {
+ lastHandler.next = h;
+ }
+ lastHandler = h;
+ }
+
+ @Override
+ public void visitLocalVariable(
+ final String name,
+ final String desc,
+ final String signature,
+ final Label start,
+ final Label end,
+ final int index)
+ {
+ if (signature != null) {
+ if (localVarType == null) {
+ localVarType = new ByteVector();
+ }
+ ++localVarTypeCount;
+ localVarType.putShort(start.position)
+ .putShort(end.position - start.position)
+ .putShort(cw.newUTF8(name))
+ .putShort(cw.newUTF8(signature))
+ .putShort(index);
+ }
+ if (localVar == null) {
+ localVar = new ByteVector();
+ }
+ ++localVarCount;
+ localVar.putShort(start.position)
+ .putShort(end.position - start.position)
+ .putShort(cw.newUTF8(name))
+ .putShort(cw.newUTF8(desc))
+ .putShort(index);
+ if (compute != NOTHING) {
+ // updates max locals
+ char c = desc.charAt(0);
+ int n = index + (c == 'J' || c == 'D' ? 2 : 1);
+ if (n > maxLocals) {
+ maxLocals = n;
+ }
+ }
+ }
+
+ @Override
+ public void visitLineNumber(final int line, final Label start) {
+ if (lineNumber == null) {
+ lineNumber = new ByteVector();
+ }
+ ++lineNumberCount;
+ lineNumber.putShort(start.position);
+ lineNumber.putShort(line);
+ }
+
+ @Override
+ public void visitMaxs(final int maxStack, final int maxLocals) {
+ if (ClassReader.FRAMES && compute == FRAMES) {
+ // completes the control flow graph with exception handler blocks
+ Handler handler = firstHandler;
+ while (handler != null) {
+ Label l = handler.start.getFirst();
+ Label h = handler.handler.getFirst();
+ Label e = handler.end.getFirst();
+ // computes the kind of the edges to 'h'
+ String t = handler.desc == null
+ ? "java/lang/Throwable"
+ : handler.desc;
+ int kind = Frame.OBJECT | cw.addType(t);
+ // h is an exception handler
+ h.status |= Label.TARGET;
+ // adds 'h' as a successor of labels between 'start' and 'end'
+ while (l != e) {
+ // creates an edge to 'h'
+ Edge b = new Edge();
+ b.info = kind;
+ b.successor = h;
+ // adds it to the successors of 'l'
+ b.next = l.successors;
+ l.successors = b;
+ // goes to the next label
+ l = l.successor;
+ }
+ handler = handler.next;
+ }
+
+ // creates and visits the first (implicit) frame
+ Frame f = labels.frame;
+ Type[] args = Type.getArgumentTypes(descriptor);
+ f.initInputFrame(cw, access, args, this.maxLocals);
+ visitFrame(f);
+
+ /*
+ * fix point algorithm: mark the first basic block as 'changed'
+ * (i.e. put it in the 'changed' list) and, while there are changed
+ * basic blocks, choose one, mark it as unchanged, and update its
+ * successors (which can be changed in the process).
+ */
+ int max = 0;
+ Label changed = labels;
+ while (changed != null) {
+ // removes a basic block from the list of changed basic blocks
+ Label l = changed;
+ changed = changed.next;
+ l.next = null;
+ f = l.frame;
+ // a reachable jump target must be stored in the stack map
+ if ((l.status & Label.TARGET) != 0) {
+ l.status |= Label.STORE;
+ }
+ // all visited labels are reachable, by definition
+ l.status |= Label.REACHABLE;
+ // updates the (absolute) maximum stack size
+ int blockMax = f.inputStack.length + l.outputStackMax;
+ if (blockMax > max) {
+ max = blockMax;
+ }
+ // updates the successors of the current basic block
+ Edge e = l.successors;
+ while (e != null) {
+ Label n = e.successor.getFirst();
+ boolean change = f.merge(cw, n.frame, e.info);
+ if (change && n.next == null) {
+ // if n has changed and is not already in the 'changed'
+ // list, adds it to this list
+ n.next = changed;
+ changed = n;
+ }
+ e = e.next;
+ }
+ }
+
+ // visits all the frames that must be stored in the stack map
+ Label l = labels;
+ while (l != null) {
+ f = l.frame;
+ if ((l.status & Label.STORE) != 0) {
+ visitFrame(f);
+ }
+ if ((l.status & Label.REACHABLE) == 0) {
+ // finds start and end of dead basic block
+ Label k = l.successor;
+ int start = l.position;
+ int end = (k == null ? code.length : k.position) - 1;
+ // if non empty basic block
+ if (end >= start) {
+ max = Math.max(max, 1);
+ // replaces instructions with NOP ... NOP ATHROW
+ for (int i = start; i < end; ++i) {
+ code.data[i] = Opcodes.NOP;
+ }
+ code.data[end] = (byte) Opcodes.ATHROW;
+ // emits a frame for this unreachable block
+ startFrame(start, 0, 1);
+ frame[frameIndex++] = Frame.OBJECT
+ | cw.addType("java/lang/Throwable");
+ endFrame();
+ // removes the start-end range from the exception handlers
+ firstHandler = Handler.remove(firstHandler, l, k);
+ }
+ }
+ l = l.successor;
+ }
+
+ handler = firstHandler;
+ handlerCount = 0;
+ while (handler != null) {
+ handlerCount += 1;
+ handler = handler.next;
+ }
+
+ this.maxStack = max;
+ } else if (compute == MAXS) {
+ // completes the control flow graph with exception handler blocks
+ Handler handler = firstHandler;
+ while (handler != null) {
+ Label l = handler.start;
+ Label h = handler.handler;
+ Label e = handler.end;
+ // adds 'h' as a successor of labels between 'start' and 'end'
+ while (l != e) {
+ // creates an edge to 'h'
+ Edge b = new Edge();
+ b.info = Edge.EXCEPTION;
+ b.successor = h;
+ // adds it to the successors of 'l'
+ if ((l.status & Label.JSR) == 0) {
+ b.next = l.successors;
+ l.successors = b;
+ } else {
+ // if l is a JSR block, adds b after the first two edges
+ // to preserve the hypothesis about JSR block successors
+ // order (see {@link #visitJumpInsn})
+ b.next = l.successors.next.next;
+ l.successors.next.next = b;
+ }
+ // goes to the next label
+ l = l.successor;
+ }
+ handler = handler.next;
+ }
+
+ if (subroutines > 0) {
+ // completes the control flow graph with the RET successors
+ /*
+ * first step: finds the subroutines. This step determines, for
+ * each basic block, to which subroutine(s) it belongs.
+ */
+ // finds the basic blocks that belong to the "main" subroutine
+ int id = 0;
+ labels.visitSubroutine(null, 1, subroutines);
+ // finds the basic blocks that belong to the real subroutines
+ Label l = labels;
+ while (l != null) {
+ if ((l.status & Label.JSR) != 0) {
+ // the subroutine is defined by l's TARGET, not by l
+ Label subroutine = l.successors.next.successor;
+ // if this subroutine has not been visited yet...
+ if ((subroutine.status & Label.VISITED) == 0) {
+ // ...assigns it a new id and finds its basic blocks
+ id += 1;
+ subroutine.visitSubroutine(null, (id / 32L) << 32
+ | (1L << (id % 32)), subroutines);
+ }
+ }
+ l = l.successor;
+ }
+ // second step: finds the successors of RET blocks
+ l = labels;
+ while (l != null) {
+ if ((l.status & Label.JSR) != 0) {
+ Label L = labels;
+ while (L != null) {
+ L.status &= ~Label.VISITED2;
+ L = L.successor;
+ }
+ // the subroutine is defined by l's TARGET, not by l
+ Label subroutine = l.successors.next.successor;
+ subroutine.visitSubroutine(l, 0, subroutines);
+ }
+ l = l.successor;
+ }
+ }
+
+ /*
+ * control flow analysis algorithm: while the block stack is not
+ * empty, pop a block from this stack, update the max stack size,
+ * compute the true (non relative) begin stack size of the
+ * successors of this block, and push these successors onto the
+ * stack (unless they have already been pushed onto the stack).
+ * Note: by hypothesis, the {@link Label#inputStackTop} of the
+ * blocks in the block stack are the true (non relative) beginning
+ * stack sizes of these blocks.
+ */
+ int max = 0;
+ Label stack = labels;
+ while (stack != null) {
+ // pops a block from the stack
+ Label l = stack;
+ stack = stack.next;
+ // computes the true (non relative) max stack size of this block
+ int start = l.inputStackTop;
+ int blockMax = start + l.outputStackMax;
+ // updates the global max stack size
+ if (blockMax > max) {
+ max = blockMax;
+ }
+ // analyzes the successors of the block
+ Edge b = l.successors;
+ if ((l.status & Label.JSR) != 0) {
+ // ignores the first edge of JSR blocks (virtual successor)
+ b = b.next;
+ }
+ while (b != null) {
+ l = b.successor;
+ // if this successor has not already been pushed...
+ if ((l.status & Label.PUSHED) == 0) {
+ // computes its true beginning stack size...
+ l.inputStackTop = b.info == Edge.EXCEPTION ? 1 : start
+ + b.info;
+ // ...and pushes it onto the stack
+ l.status |= Label.PUSHED;
+ l.next = stack;
+ stack = l;
+ }
+ b = b.next;
+ }
+ }
+ this.maxStack = Math.max(maxStack, max);
+ } else {
+ this.maxStack = maxStack;
+ this.maxLocals = maxLocals;
+ }
+ }
+
+ @Override
+ public void visitEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods: control flow analysis algorithm
+ // ------------------------------------------------------------------------
+
+ /**
+ * Adds a successor to the {@link #currentBlock currentBlock} block.
+ *
+ * @param info information about the control flow edge to be added.
+ * @param successor the successor block to be added to the current block.
+ */
+ private void addSuccessor(final int info, final Label successor) {
+ // creates and initializes an Edge object...
+ Edge b = new Edge();
+ b.info = info;
+ b.successor = successor;
+ // ...and adds it to the successor list of the currentBlock block
+ b.next = currentBlock.successors;
+ currentBlock.successors = b;
+ }
+
+ /**
+ * Ends the current basic block. This method must be used in the case where
+ * the current basic block does not have any successor.
+ */
+ private void noSuccessor() {
+ if (compute == FRAMES) {
+ Label l = new Label();
+ l.frame = new Frame();
+ l.frame.owner = l;
+ l.resolve(this, code.length, code.data);
+ previousBlock.successor = l;
+ previousBlock = l;
+ } else {
+ currentBlock.outputStackMax = maxStackSize;
+ }
+ currentBlock = null;
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods: stack map frames
+ // ------------------------------------------------------------------------
+
+ /**
+ * Visits a frame that has been computed from scratch.
+ *
+ * @param f the frame that must be visited.
+ */
+ private void visitFrame(final Frame f) {
+ int i, t;
+ int nTop = 0;
+ int nLocal = 0;
+ int nStack = 0;
+ int[] locals = f.inputLocals;
+ int[] stacks = f.inputStack;
+ // computes the number of locals (ignores TOP types that are just after
+ // a LONG or a DOUBLE, and all trailing TOP types)
+ for (i = 0; i < locals.length; ++i) {
+ t = locals[i];
+ if (t == Frame.TOP) {
+ ++nTop;
+ } else {
+ nLocal += nTop + 1;
+ nTop = 0;
+ }
+ if (t == Frame.LONG || t == Frame.DOUBLE) {
+ ++i;
+ }
+ }
+ // computes the stack size (ignores TOP types that are just after
+ // a LONG or a DOUBLE)
+ for (i = 0; i < stacks.length; ++i) {
+ t = stacks[i];
+ ++nStack;
+ if (t == Frame.LONG || t == Frame.DOUBLE) {
+ ++i;
+ }
+ }
+ // visits the frame and its content
+ startFrame(f.owner.position, nLocal, nStack);
+ for (i = 0; nLocal > 0; ++i, --nLocal) {
+ t = locals[i];
+ frame[frameIndex++] = t;
+ if (t == Frame.LONG || t == Frame.DOUBLE) {
+ ++i;
+ }
+ }
+ for (i = 0; i < stacks.length; ++i) {
+ t = stacks[i];
+ frame[frameIndex++] = t;
+ if (t == Frame.LONG || t == Frame.DOUBLE) {
+ ++i;
+ }
+ }
+ endFrame();
+ }
+
+ /**
+ * Starts the visit of a stack map frame.
+ *
+ * @param offset the offset of the instruction to which the frame
+ * corresponds.
+ * @param nLocal the number of local variables in the frame.
+ * @param nStack the number of stack elements in the frame.
+ */
+ private void startFrame(final int offset, final int nLocal, final int nStack)
+ {
+ int n = 3 + nLocal + nStack;
+ if (frame == null || frame.length < n) {
+ frame = new int[n];
+ }
+ frame[0] = offset;
+ frame[1] = nLocal;
+ frame[2] = nStack;
+ frameIndex = 3;
+ }
+
+ /**
+ * Checks if the visit of the current frame {@link #frame} is finished, and
+ * if yes, write it in the StackMapTable attribute.
+ */
+ private void endFrame() {
+ if (previousFrame != null) { // do not write the first frame
+ if (stackMap == null) {
+ stackMap = new ByteVector();
+ }
+ writeFrame();
+ ++frameCount;
+ }
+ previousFrame = frame;
+ frame = null;
+ }
+
+ /**
+ * Compress and writes the current frame {@link #frame} in the StackMapTable
+ * attribute.
+ */
+ private void writeFrame() {
+ int clocalsSize = frame[1];
+ int cstackSize = frame[2];
+ if ((cw.version & 0xFFFF) < Opcodes.V1_6) {
+ stackMap.putShort(frame[0]).putShort(clocalsSize);
+ writeFrameTypes(3, 3 + clocalsSize);
+ stackMap.putShort(cstackSize);
+ writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize);
+ return;
+ }
+ int localsSize = previousFrame[1];
+ int type = FULL_FRAME;
+ int k = 0;
+ int delta;
+ if (frameCount == 0) {
+ delta = frame[0];
+ } else {
+ delta = frame[0] - previousFrame[0] - 1;
+ }
+ if (cstackSize == 0) {
+ k = clocalsSize - localsSize;
+ switch (k) {
+ case -3:
+ case -2:
+ case -1:
+ type = CHOP_FRAME;
+ localsSize = clocalsSize;
+ break;
+ case 0:
+ type = delta < 64 ? SAME_FRAME : SAME_FRAME_EXTENDED;
+ break;
+ case 1:
+ case 2:
+ case 3:
+ type = APPEND_FRAME;
+ break;
+ }
+ } else if (clocalsSize == localsSize && cstackSize == 1) {
+ type = delta < 63
+ ? SAME_LOCALS_1_STACK_ITEM_FRAME
+ : SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED;
+ }
+ if (type != FULL_FRAME) {
+ // verify if locals are the same
+ int l = 3;
+ for (int j = 0; j < localsSize; j++) {
+ if (frame[l] != previousFrame[l]) {
+ type = FULL_FRAME;
+ break;
+ }
+ l++;
+ }
+ }
+ switch (type) {
+ case SAME_FRAME:
+ stackMap.putByte(delta);
+ break;
+ case SAME_LOCALS_1_STACK_ITEM_FRAME:
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
+ writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
+ break;
+ case SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED:
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
+ .putShort(delta);
+ writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
+ break;
+ case SAME_FRAME_EXTENDED:
+ stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
+ break;
+ case CHOP_FRAME:
+ stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
+ break;
+ case APPEND_FRAME:
+ stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
+ writeFrameTypes(3 + localsSize, 3 + clocalsSize);
+ break;
+ // case FULL_FRAME:
+ default:
+ stackMap.putByte(FULL_FRAME)
+ .putShort(delta)
+ .putShort(clocalsSize);
+ writeFrameTypes(3, 3 + clocalsSize);
+ stackMap.putShort(cstackSize);
+ writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize);
+ }
+ }
+
+ /**
+ * Writes some types of the current frame {@link #frame} into the
+ * StackMapTableAttribute. This method converts types from the format used
+ * in {@link Label} to the format used in StackMapTable attributes. In
+ * particular, it converts type table indexes to constant pool indexes.
+ *
+ * @param start index of the first type in {@link #frame} to write.
+ * @param end index of last type in {@link #frame} to write (exclusive).
+ */
+ private void writeFrameTypes(final int start, final int end) {
+ for (int i = start; i < end; ++i) {
+ int t = frame[i];
+ int d = t & Frame.DIM;
+ if (d == 0) {
+ int v = t & Frame.BASE_VALUE;
+ switch (t & Frame.BASE_KIND) {
+ case Frame.OBJECT:
+ stackMap.putByte(7)
+ .putShort(cw.newClass(cw.typeTable[v].strVal1));
+ break;
+ case Frame.UNINITIALIZED:
+ stackMap.putByte(8).putShort(cw.typeTable[v].intVal);
+ break;
+ default:
+ stackMap.putByte(v);
+ }
+ } else {
+ StringBuffer buf = new StringBuffer();
+ d >>= 28;
+ while (d-- > 0) {
+ buf.append('[');
+ }
+ if ((t & Frame.BASE_KIND) == Frame.OBJECT) {
+ buf.append('L');
+ buf.append(cw.typeTable[t & Frame.BASE_VALUE].strVal1);
+ buf.append(';');
+ } else {
+ switch (t & 0xF) {
+ case 1:
+ buf.append('I');
+ break;
+ case 2:
+ buf.append('F');
+ break;
+ case 3:
+ buf.append('D');
+ break;
+ case 9:
+ buf.append('Z');
+ break;
+ case 10:
+ buf.append('B');
+ break;
+ case 11:
+ buf.append('C');
+ break;
+ case 12:
+ buf.append('S');
+ break;
+ default:
+ buf.append('J');
+ }
+ }
+ stackMap.putByte(7).putShort(cw.newClass(buf.toString()));
+ }
+ }
+ }
+
+ private void writeFrameType(final Object type) {
+ if (type instanceof String) {
+ stackMap.putByte(7).putShort(cw.newClass((String) type));
+ } else if (type instanceof Integer) {
+ stackMap.putByte(((Integer) type).intValue());
+ } else {
+ stackMap.putByte(8).putShort(((Label) type).position);
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods: dump bytecode array
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the size of the bytecode of this method.
+ *
+ * @return the size of the bytecode of this method.
+ */
+ final int getSize() {
+ if (classReaderOffset != 0) {
+ return 6 + classReaderLength;
+ }
+ if (resize) {
+ // replaces the temporary jump opcodes introduced by Label.resolve.
+ if (ClassReader.RESIZE) {
+ resizeInstructions();
+ } else {
+ throw new RuntimeException("Method code too large!");
+ }
+ }
+ int size = 8;
+ if (code.length > 0) {
+ if (code.length > 65536) {
+ throw new RuntimeException("Method code too large!");
+ }
+ cw.newUTF8("Code");
+ size += 18 + code.length + 8 * handlerCount;
+ if (localVar != null) {
+ cw.newUTF8("LocalVariableTable");
+ size += 8 + localVar.length;
+ }
+ if (localVarType != null) {
+ cw.newUTF8("LocalVariableTypeTable");
+ size += 8 + localVarType.length;
+ }
+ if (lineNumber != null) {
+ cw.newUTF8("LineNumberTable");
+ size += 8 + lineNumber.length;
+ }
+ if (stackMap != null) {
+ boolean zip = (cw.version & 0xFFFF) >= Opcodes.V1_6;
+ cw.newUTF8(zip ? "StackMapTable" : "StackMap");
+ size += 8 + stackMap.length;
+ }
+ if (cattrs != null) {
+ size += cattrs.getSize(cw,
+ code.data,
+ code.length,
+ maxStack,
+ maxLocals);
+ }
+ }
+ if (exceptionCount > 0) {
+ cw.newUTF8("Exceptions");
+ size += 8 + 2 * exceptionCount;
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ cw.newUTF8("Synthetic");
+ size += 6;
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ cw.newUTF8("Deprecated");
+ size += 6;
+ }
+ if (ClassReader.SIGNATURES && signature != null) {
+ cw.newUTF8("Signature");
+ cw.newUTF8(signature);
+ size += 8;
+ }
+ if (ClassReader.ANNOTATIONS && annd != null) {
+ cw.newUTF8("AnnotationDefault");
+ size += 6 + annd.length;
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ cw.newUTF8("RuntimeVisibleAnnotations");
+ size += 8 + anns.getSize();
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ cw.newUTF8("RuntimeInvisibleAnnotations");
+ size += 8 + ianns.getSize();
+ }
+ if (ClassReader.ANNOTATIONS && panns != null) {
+ cw.newUTF8("RuntimeVisibleParameterAnnotations");
+ size += 7 + 2 * (panns.length - synthetics);
+ for (int i = panns.length - 1; i >= synthetics; --i) {
+ size += panns[i] == null ? 0 : panns[i].getSize();
+ }
+ }
+ if (ClassReader.ANNOTATIONS && ipanns != null) {
+ cw.newUTF8("RuntimeInvisibleParameterAnnotations");
+ size += 7 + 2 * (ipanns.length - synthetics);
+ for (int i = ipanns.length - 1; i >= synthetics; --i) {
+ size += ipanns[i] == null ? 0 : ipanns[i].getSize();
+ }
+ }
+ if (attrs != null) {
+ size += attrs.getSize(cw, null, 0, -1, -1);
+ }
+ return size;
+ }
+
+ /**
+ * Puts the bytecode of this method in the given byte vector.
+ *
+ * @param out the byte vector into which the bytecode of this method must be
+ * copied.
+ */
+ final void put(final ByteVector out) {
+ int mask = Opcodes.ACC_DEPRECATED
+ | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
+ out.putShort(access & ~mask).putShort(name).putShort(desc);
+ if (classReaderOffset != 0) {
+ out.putByteArray(cw.cr.b, classReaderOffset, classReaderLength);
+ return;
+ }
+ int attributeCount = 0;
+ if (code.length > 0) {
+ ++attributeCount;
+ }
+ if (exceptionCount > 0) {
+ ++attributeCount;
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ ++attributeCount;
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ ++attributeCount;
+ }
+ if (ClassReader.SIGNATURES && signature != null) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && annd != null) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && panns != null) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && ipanns != null) {
+ ++attributeCount;
+ }
+ if (attrs != null) {
+ attributeCount += attrs.getCount();
+ }
+ out.putShort(attributeCount);
+ if (code.length > 0) {
+ int size = 12 + code.length + 8 * handlerCount;
+ if (localVar != null) {
+ size += 8 + localVar.length;
+ }
+ if (localVarType != null) {
+ size += 8 + localVarType.length;
+ }
+ if (lineNumber != null) {
+ size += 8 + lineNumber.length;
+ }
+ if (stackMap != null) {
+ size += 8 + stackMap.length;
+ }
+ if (cattrs != null) {
+ size += cattrs.getSize(cw,
+ code.data,
+ code.length,
+ maxStack,
+ maxLocals);
+ }
+ out.putShort(cw.newUTF8("Code")).putInt(size);
+ out.putShort(maxStack).putShort(maxLocals);
+ out.putInt(code.length).putByteArray(code.data, 0, code.length);
+ out.putShort(handlerCount);
+ if (handlerCount > 0) {
+ Handler h = firstHandler;
+ while (h != null) {
+ out.putShort(h.start.position)
+ .putShort(h.end.position)
+ .putShort(h.handler.position)
+ .putShort(h.type);
+ h = h.next;
+ }
+ }
+ attributeCount = 0;
+ if (localVar != null) {
+ ++attributeCount;
+ }
+ if (localVarType != null) {
+ ++attributeCount;
+ }
+ if (lineNumber != null) {
+ ++attributeCount;
+ }
+ if (stackMap != null) {
+ ++attributeCount;
+ }
+ if (cattrs != null) {
+ attributeCount += cattrs.getCount();
+ }
+ out.putShort(attributeCount);
+ if (localVar != null) {
+ out.putShort(cw.newUTF8("LocalVariableTable"));
+ out.putInt(localVar.length + 2).putShort(localVarCount);
+ out.putByteArray(localVar.data, 0, localVar.length);
+ }
+ if (localVarType != null) {
+ out.putShort(cw.newUTF8("LocalVariableTypeTable"));
+ out.putInt(localVarType.length + 2).putShort(localVarTypeCount);
+ out.putByteArray(localVarType.data, 0, localVarType.length);
+ }
+ if (lineNumber != null) {
+ out.putShort(cw.newUTF8("LineNumberTable"));
+ out.putInt(lineNumber.length + 2).putShort(lineNumberCount);
+ out.putByteArray(lineNumber.data, 0, lineNumber.length);
+ }
+ if (stackMap != null) {
+ boolean zip = (cw.version & 0xFFFF) >= Opcodes.V1_6;
+ out.putShort(cw.newUTF8(zip ? "StackMapTable" : "StackMap"));
+ out.putInt(stackMap.length + 2).putShort(frameCount);
+ out.putByteArray(stackMap.data, 0, stackMap.length);
+ }
+ if (cattrs != null) {
+ cattrs.put(cw, code.data, code.length, maxLocals, maxStack, out);
+ }
+ }
+ if (exceptionCount > 0) {
+ out.putShort(cw.newUTF8("Exceptions"))
+ .putInt(2 * exceptionCount + 2);
+ out.putShort(exceptionCount);
+ for (int i = 0; i < exceptionCount; ++i) {
+ out.putShort(exceptions[i]);
+ }
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ out.putShort(cw.newUTF8("Deprecated")).putInt(0);
+ }
+ if (ClassReader.SIGNATURES && signature != null) {
+ out.putShort(cw.newUTF8("Signature"))
+ .putInt(2)
+ .putShort(cw.newUTF8(signature));
+ }
+ if (ClassReader.ANNOTATIONS && annd != null) {
+ out.putShort(cw.newUTF8("AnnotationDefault"));
+ out.putInt(annd.length);
+ out.putByteArray(annd.data, 0, annd.length);
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ out.putShort(cw.newUTF8("RuntimeVisibleAnnotations"));
+ anns.put(out);
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ out.putShort(cw.newUTF8("RuntimeInvisibleAnnotations"));
+ ianns.put(out);
+ }
+ if (ClassReader.ANNOTATIONS && panns != null) {
+ out.putShort(cw.newUTF8("RuntimeVisibleParameterAnnotations"));
+ AnnotationWriter.put(panns, synthetics, out);
+ }
+ if (ClassReader.ANNOTATIONS && ipanns != null) {
+ out.putShort(cw.newUTF8("RuntimeInvisibleParameterAnnotations"));
+ AnnotationWriter.put(ipanns, synthetics, out);
+ }
+ if (attrs != null) {
+ attrs.put(cw, null, 0, -1, -1, out);
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods: instruction resizing (used to handle GOTO_W and JSR_W)
+ // ------------------------------------------------------------------------
+
+ /**
+ * Resizes and replaces the temporary instructions inserted by
+ * {@link Label#resolve} for wide forward jumps, while keeping jump offsets
+ * and instruction addresses consistent. This may require to resize other
+ * existing instructions, or even to introduce new instructions: for
+ * example, increasing the size of an instruction by 2 at the middle of a
+ * method can increases the offset of an IFEQ instruction from 32766 to
+ * 32768, in which case IFEQ 32766 must be replaced with IFNEQ 8 GOTO_W
+ * 32765. This, in turn, may require to increase the size of another jump
+ * instruction, and so on... All these operations are handled automatically
+ * by this method. <p> <i>This method must be called after all the method
+ * that is being built has been visited</i>. In particular, the
+ * {@link Label Label} objects used to construct the method are no longer
+ * valid after this method has been called.
+ */
+ private void resizeInstructions() {
+ byte[] b = code.data; // bytecode of the method
+ int u, v, label; // indexes in b
+ int i, j; // loop indexes
+ /*
+ * 1st step: As explained above, resizing an instruction may require to
+ * resize another one, which may require to resize yet another one, and
+ * so on. The first step of the algorithm consists in finding all the
+ * instructions that need to be resized, without modifying the code.
+ * This is done by the following "fix point" algorithm:
+ *
+ * Parse the code to find the jump instructions whose offset will need
+ * more than 2 bytes to be stored (the future offset is computed from
+ * the current offset and from the number of bytes that will be inserted
+ * or removed between the source and target instructions). For each such
+ * instruction, adds an entry in (a copy of) the indexes and sizes
+ * arrays (if this has not already been done in a previous iteration!).
+ *
+ * If at least one entry has been added during the previous step, go
+ * back to the beginning, otherwise stop.
+ *
+ * In fact the real algorithm is complicated by the fact that the size
+ * of TABLESWITCH and LOOKUPSWITCH instructions depends on their
+ * position in the bytecode (because of padding). In order to ensure the
+ * convergence of the algorithm, the number of bytes to be added or
+ * removed from these instructions is over estimated during the previous
+ * loop, and computed exactly only after the loop is finished (this
+ * requires another pass to parse the bytecode of the method).
+ */
+ int[] allIndexes = new int[0]; // copy of indexes
+ int[] allSizes = new int[0]; // copy of sizes
+ boolean[] resize; // instructions to be resized
+ int newOffset; // future offset of a jump instruction
+
+ resize = new boolean[code.length];
+
+ // 3 = loop again, 2 = loop ended, 1 = last pass, 0 = done
+ int state = 3;
+ do {
+ if (state == 3) {
+ state = 2;
+ }
+ u = 0;
+ while (u < b.length) {
+ int opcode = b[u] & 0xFF; // opcode of current instruction
+ int insert = 0; // bytes to be added after this instruction
+
+ switch (ClassWriter.TYPE[opcode]) {
+ case ClassWriter.NOARG_INSN:
+ case ClassWriter.IMPLVAR_INSN:
+ u += 1;
+ break;
+ case ClassWriter.LABEL_INSN:
+ if (opcode > 201) {
+ // converts temporary opcodes 202 to 217, 218 and
+ // 219 to IFEQ ... JSR (inclusive), IFNULL and
+ // IFNONNULL
+ opcode = opcode < 218 ? opcode - 49 : opcode - 20;
+ label = u + readUnsignedShort(b, u + 1);
+ } else {
+ label = u + readShort(b, u + 1);
+ }
+ newOffset = getNewOffset(allIndexes, allSizes, u, label);
+ if (newOffset < Short.MIN_VALUE
+ || newOffset > Short.MAX_VALUE)
+ {
+ if (!resize[u]) {
+ if (opcode == Opcodes.GOTO
+ || opcode == Opcodes.JSR)
+ {
+ // two additional bytes will be required to
+ // replace this GOTO or JSR instruction with
+ // a GOTO_W or a JSR_W
+ insert = 2;
+ } else {
+ // five additional bytes will be required to
+ // replace this IFxxx <l> instruction with
+ // IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx
+ // is the "opposite" opcode of IFxxx (i.e.,
+ // IFNE for IFEQ) and where <l'> designates
+ // the instruction just after the GOTO_W.
+ insert = 5;
+ }
+ resize[u] = true;
+ }
+ }
+ u += 3;
+ break;
+ case ClassWriter.LABELW_INSN:
+ u += 5;
+ break;
+ case ClassWriter.TABL_INSN:
+ if (state == 1) {
+ // true number of bytes to be added (or removed)
+ // from this instruction = (future number of padding
+ // bytes - current number of padding byte) -
+ // previously over estimated variation =
+ // = ((3 - newOffset%4) - (3 - u%4)) - u%4
+ // = (-newOffset%4 + u%4) - u%4
+ // = -(newOffset & 3)
+ newOffset = getNewOffset(allIndexes, allSizes, 0, u);
+ insert = -(newOffset & 3);
+ } else if (!resize[u]) {
+ // over estimation of the number of bytes to be
+ // added to this instruction = 3 - current number
+ // of padding bytes = 3 - (3 - u%4) = u%4 = u & 3
+ insert = u & 3;
+ resize[u] = true;
+ }
+ // skips instruction
+ u = u + 4 - (u & 3);
+ u += 4 * (readInt(b, u + 8) - readInt(b, u + 4) + 1) + 12;
+ break;
+ case ClassWriter.LOOK_INSN:
+ if (state == 1) {
+ // like TABL_INSN
+ newOffset = getNewOffset(allIndexes, allSizes, 0, u);
+ insert = -(newOffset & 3);
+ } else if (!resize[u]) {
+ // like TABL_INSN
+ insert = u & 3;
+ resize[u] = true;
+ }
+ // skips instruction
+ u = u + 4 - (u & 3);
+ u += 8 * readInt(b, u + 4) + 8;
+ break;
+ case ClassWriter.WIDE_INSN:
+ opcode = b[u + 1] & 0xFF;
+ if (opcode == Opcodes.IINC) {
+ u += 6;
+ } else {
+ u += 4;
+ }
+ break;
+ case ClassWriter.VAR_INSN:
+ case ClassWriter.SBYTE_INSN:
+ case ClassWriter.LDC_INSN:
+ u += 2;
+ break;
+ case ClassWriter.SHORT_INSN:
+ case ClassWriter.LDCW_INSN:
+ case ClassWriter.FIELDORMETH_INSN:
+ case ClassWriter.TYPE_INSN:
+ case ClassWriter.IINC_INSN:
+ u += 3;
+ break;
+ case ClassWriter.ITFMETH_INSN:
+ case ClassWriter.INDYMETH_INSN:
+ u += 5;
+ break;
+ // case ClassWriter.MANA_INSN:
+ default:
+ u += 4;
+ break;
+ }
+ if (insert != 0) {
+ // adds a new (u, insert) entry in the allIndexes and
+ // allSizes arrays
+ int[] newIndexes = new int[allIndexes.length + 1];
+ int[] newSizes = new int[allSizes.length + 1];
+ System.arraycopy(allIndexes,
+ 0,
+ newIndexes,
+ 0,
+ allIndexes.length);
+ System.arraycopy(allSizes, 0, newSizes, 0, allSizes.length);
+ newIndexes[allIndexes.length] = u;
+ newSizes[allSizes.length] = insert;
+ allIndexes = newIndexes;
+ allSizes = newSizes;
+ if (insert > 0) {
+ state = 3;
+ }
+ }
+ }
+ if (state < 3) {
+ --state;
+ }
+ } while (state != 0);
+
+ // 2nd step:
+ // copies the bytecode of the method into a new bytevector, updates the
+ // offsets, and inserts (or removes) bytes as requested.
+
+ ByteVector newCode = new ByteVector(code.length);
+
+ u = 0;
+ while (u < code.length) {
+ int opcode = b[u] & 0xFF;
+ switch (ClassWriter.TYPE[opcode]) {
+ case ClassWriter.NOARG_INSN:
+ case ClassWriter.IMPLVAR_INSN:
+ newCode.putByte(opcode);
+ u += 1;
+ break;
+ case ClassWriter.LABEL_INSN:
+ if (opcode > 201) {
+ // changes temporary opcodes 202 to 217 (inclusive), 218
+ // and 219 to IFEQ ... JSR (inclusive), IFNULL and
+ // IFNONNULL
+ opcode = opcode < 218 ? opcode - 49 : opcode - 20;
+ label = u + readUnsignedShort(b, u + 1);
+ } else {
+ label = u + readShort(b, u + 1);
+ }
+ newOffset = getNewOffset(allIndexes, allSizes, u, label);
+ if (resize[u]) {
+ // replaces GOTO with GOTO_W, JSR with JSR_W and IFxxx
+ // <l> with IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx is
+ // the "opposite" opcode of IFxxx (i.e., IFNE for IFEQ)
+ // and where <l'> designates the instruction just after
+ // the GOTO_W.
+ if (opcode == Opcodes.GOTO) {
+ newCode.putByte(200); // GOTO_W
+ } else if (opcode == Opcodes.JSR) {
+ newCode.putByte(201); // JSR_W
+ } else {
+ newCode.putByte(opcode <= 166
+ ? ((opcode + 1) ^ 1) - 1
+ : opcode ^ 1);
+ newCode.putShort(8); // jump offset
+ newCode.putByte(200); // GOTO_W
+ // newOffset now computed from start of GOTO_W
+ newOffset -= 3;
+ }
+ newCode.putInt(newOffset);
+ } else {
+ newCode.putByte(opcode);
+ newCode.putShort(newOffset);
+ }
+ u += 3;
+ break;
+ case ClassWriter.LABELW_INSN:
+ label = u + readInt(b, u + 1);
+ newOffset = getNewOffset(allIndexes, allSizes, u, label);
+ newCode.putByte(opcode);
+ newCode.putInt(newOffset);
+ u += 5;
+ break;
+ case ClassWriter.TABL_INSN:
+ // skips 0 to 3 padding bytes
+ v = u;
+ u = u + 4 - (v & 3);
+ // reads and copies instruction
+ newCode.putByte(Opcodes.TABLESWITCH);
+ newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ j = readInt(b, u);
+ u += 4;
+ newCode.putInt(j);
+ j = readInt(b, u) - j + 1;
+ u += 4;
+ newCode.putInt(readInt(b, u - 4));
+ for (; j > 0; --j) {
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ }
+ break;
+ case ClassWriter.LOOK_INSN:
+ // skips 0 to 3 padding bytes
+ v = u;
+ u = u + 4 - (v & 3);
+ // reads and copies instruction
+ newCode.putByte(Opcodes.LOOKUPSWITCH);
+ newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ j = readInt(b, u);
+ u += 4;
+ newCode.putInt(j);
+ for (; j > 0; --j) {
+ newCode.putInt(readInt(b, u));
+ u += 4;
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ }
+ break;
+ case ClassWriter.WIDE_INSN:
+ opcode = b[u + 1] & 0xFF;
+ if (opcode == Opcodes.IINC) {
+ newCode.putByteArray(b, u, 6);
+ u += 6;
+ } else {
+ newCode.putByteArray(b, u, 4);
+ u += 4;
+ }
+ break;
+ case ClassWriter.VAR_INSN:
+ case ClassWriter.SBYTE_INSN:
+ case ClassWriter.LDC_INSN:
+ newCode.putByteArray(b, u, 2);
+ u += 2;
+ break;
+ case ClassWriter.SHORT_INSN:
+ case ClassWriter.LDCW_INSN:
+ case ClassWriter.FIELDORMETH_INSN:
+ case ClassWriter.TYPE_INSN:
+ case ClassWriter.IINC_INSN:
+ newCode.putByteArray(b, u, 3);
+ u += 3;
+ break;
+ case ClassWriter.ITFMETH_INSN:
+ case ClassWriter.INDYMETH_INSN:
+ newCode.putByteArray(b, u, 5);
+ u += 5;
+ break;
+ // case MANA_INSN:
+ default:
+ newCode.putByteArray(b, u, 4);
+ u += 4;
+ break;
+ }
+ }
+
+ // recomputes the stack map frames
+ if (frameCount > 0) {
+ if (compute == FRAMES) {
+ frameCount = 0;
+ stackMap = null;
+ previousFrame = null;
+ frame = null;
+ Frame f = new Frame();
+ f.owner = labels;
+ Type[] args = Type.getArgumentTypes(descriptor);
+ f.initInputFrame(cw, access, args, maxLocals);
+ visitFrame(f);
+ Label l = labels;
+ while (l != null) {
+ /*
+ * here we need the original label position. getNewOffset
+ * must therefore never have been called for this label.
+ */
+ u = l.position - 3;
+ if ((l.status & Label.STORE) != 0 || (u >= 0 && resize[u]))
+ {
+ getNewOffset(allIndexes, allSizes, l);
+ // TODO update offsets in UNINITIALIZED values
+ visitFrame(l.frame);
+ }
+ l = l.successor;
+ }
+ } else {
+ /*
+ * Resizing an existing stack map frame table is really hard.
+ * Not only the table must be parsed to update the offets, but
+ * new frames may be needed for jump instructions that were
+ * inserted by this method. And updating the offsets or
+ * inserting frames can change the format of the following
+ * frames, in case of packed frames. In practice the whole table
+ * must be recomputed. For this the frames are marked as
+ * potentially invalid. This will cause the whole class to be
+ * reread and rewritten with the COMPUTE_FRAMES option (see the
+ * ClassWriter.toByteArray method). This is not very efficient
+ * but is much easier and requires much less code than any other
+ * method I can think of.
+ */
+ cw.invalidFrames = true;
+ }
+ }
+ // updates the exception handler block labels
+ Handler h = firstHandler;
+ while (h != null) {
+ getNewOffset(allIndexes, allSizes, h.start);
+ getNewOffset(allIndexes, allSizes, h.end);
+ getNewOffset(allIndexes, allSizes, h.handler);
+ h = h.next;
+ }
+ // updates the instructions addresses in the
+ // local var and line number tables
+ for (i = 0; i < 2; ++i) {
+ ByteVector bv = i == 0 ? localVar : localVarType;
+ if (bv != null) {
+ b = bv.data;
+ u = 0;
+ while (u < bv.length) {
+ label = readUnsignedShort(b, u);
+ newOffset = getNewOffset(allIndexes, allSizes, 0, label);
+ writeShort(b, u, newOffset);
+ label += readUnsignedShort(b, u + 2);
+ newOffset = getNewOffset(allIndexes, allSizes, 0, label)
+ - newOffset;
+ writeShort(b, u + 2, newOffset);
+ u += 10;
+ }
+ }
+ }
+ if (lineNumber != null) {
+ b = lineNumber.data;
+ u = 0;
+ while (u < lineNumber.length) {
+ writeShort(b, u, getNewOffset(allIndexes,
+ allSizes,
+ 0,
+ readUnsignedShort(b, u)));
+ u += 4;
+ }
+ }
+ // updates the labels of the other attributes
+ Attribute attr = cattrs;
+ while (attr != null) {
+ Label[] labels = attr.getLabels();
+ if (labels != null) {
+ for (i = labels.length - 1; i >= 0; --i) {
+ getNewOffset(allIndexes, allSizes, labels[i]);
+ }
+ }
+ attr = attr.next;
+ }
+
+ // replaces old bytecodes with new ones
+ code = newCode;
+ }
+
+ /**
+ * Reads an unsigned short value in the given byte array.
+ *
+ * @param b a byte array.
+ * @param index the start index of the value to be read.
+ * @return the read value.
+ */
+ static int readUnsignedShort(final byte[] b, final int index) {
+ return ((b[index] & 0xFF) << 8) | (b[index + 1] & 0xFF);
+ }
+
+ /**
+ * Reads a signed short value in the given byte array.
+ *
+ * @param b a byte array.
+ * @param index the start index of the value to be read.
+ * @return the read value.
+ */
+ static short readShort(final byte[] b, final int index) {
+ return (short) (((b[index] & 0xFF) << 8) | (b[index + 1] & 0xFF));
+ }
+
+ /**
+ * Reads a signed int value in the given byte array.
+ *
+ * @param b a byte array.
+ * @param index the start index of the value to be read.
+ * @return the read value.
+ */
+ static int readInt(final byte[] b, final int index) {
+ return ((b[index] & 0xFF) << 24) | ((b[index + 1] & 0xFF) << 16)
+ | ((b[index + 2] & 0xFF) << 8) | (b[index + 3] & 0xFF);
+ }
+
+ /**
+ * Writes a short value in the given byte array.
+ *
+ * @param b a byte array.
+ * @param index where the first byte of the short value must be written.
+ * @param s the value to be written in the given byte array.
+ */
+ static void writeShort(final byte[] b, final int index, final int s) {
+ b[index] = (byte) (s >>> 8);
+ b[index + 1] = (byte) s;
+ }
+
+ /**
+ * Computes the future value of a bytecode offset. <p> Note: it is possible
+ * to have several entries for the same instruction in the <tt>indexes</tt>
+ * and <tt>sizes</tt>: two entries (index=a,size=b) and (index=a,size=b')
+ * are equivalent to a single entry (index=a,size=b+b').
+ *
+ * @param indexes current positions of the instructions to be resized. Each
+ * instruction must be designated by the index of its <i>last</i>
+ * byte, plus one (or, in other words, by the index of the <i>first</i>
+ * byte of the <i>next</i> instruction).
+ * @param sizes the number of bytes to be <i>added</i> to the above
+ * instructions. More precisely, for each i < <tt>len</tt>,
+ * <tt>sizes</tt>[i] bytes will be added at the end of the
+ * instruction designated by <tt>indexes</tt>[i] or, if
+ * <tt>sizes</tt>[i] is negative, the <i>last</i> |<tt>sizes[i]</tt>|
+ * bytes of the instruction will be removed (the instruction size
+ * <i>must not</i> become negative or null).
+ * @param begin index of the first byte of the source instruction.
+ * @param end index of the first byte of the target instruction.
+ * @return the future value of the given bytecode offset.
+ */
+ static int getNewOffset(
+ final int[] indexes,
+ final int[] sizes,
+ final int begin,
+ final int end)
+ {
+ int offset = end - begin;
+ for (int i = 0; i < indexes.length; ++i) {
+ if (begin < indexes[i] && indexes[i] <= end) {
+ // forward jump
+ offset += sizes[i];
+ } else if (end < indexes[i] && indexes[i] <= begin) {
+ // backward jump
+ offset -= sizes[i];
+ }
+ }
+ return offset;
+ }
+
+ /**
+ * Updates the offset of the given label.
+ *
+ * @param indexes current positions of the instructions to be resized. Each
+ * instruction must be designated by the index of its <i>last</i>
+ * byte, plus one (or, in other words, by the index of the <i>first</i>
+ * byte of the <i>next</i> instruction).
+ * @param sizes the number of bytes to be <i>added</i> to the above
+ * instructions. More precisely, for each i < <tt>len</tt>,
+ * <tt>sizes</tt>[i] bytes will be added at the end of the
+ * instruction designated by <tt>indexes</tt>[i] or, if
+ * <tt>sizes</tt>[i] is negative, the <i>last</i> |<tt>sizes[i]</tt>|
+ * bytes of the instruction will be removed (the instruction size
+ * <i>must not</i> become negative or null).
+ * @param label the label whose offset must be updated.
+ */
+ static void getNewOffset(
+ final int[] indexes,
+ final int[] sizes,
+ final Label label)
+ {
+ if ((label.status & Label.RESIZED) == 0) {
+ label.position = getNewOffset(indexes, sizes, 0, label.position);
+ label.status |= Label.RESIZED;
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/Opcodes.java b/src/asm/scala/tools/asm/Opcodes.java
new file mode 100644
index 0000000000..809e5ae590
--- /dev/null
+++ b/src/asm/scala/tools/asm/Opcodes.java
@@ -0,0 +1,358 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * Defines the JVM opcodes, access flags and array type codes. This interface
+ * does not define all the JVM opcodes because some opcodes are automatically
+ * handled. For example, the xLOAD and xSTORE opcodes are automatically replaced
+ * by xLOAD_n and xSTORE_n opcodes when possible. The xLOAD_n and xSTORE_n
+ * opcodes are therefore not defined in this interface. Likewise for LDC,
+ * automatically replaced by LDC_W or LDC2_W when necessary, WIDE, GOTO_W and
+ * JSR_W.
+ *
+ * @author Eric Bruneton
+ * @author Eugene Kuleshov
+ */
+public interface Opcodes {
+
+ // ASM API versions
+
+ int ASM4 = 4 << 16 | 0 << 8 | 0;
+
+ // versions
+
+ int V1_1 = 3 << 16 | 45;
+ int V1_2 = 0 << 16 | 46;
+ int V1_3 = 0 << 16 | 47;
+ int V1_4 = 0 << 16 | 48;
+ int V1_5 = 0 << 16 | 49;
+ int V1_6 = 0 << 16 | 50;
+ int V1_7 = 0 << 16 | 51;
+
+ // access flags
+
+ int ACC_PUBLIC = 0x0001; // class, field, method
+ int ACC_PRIVATE = 0x0002; // class, field, method
+ int ACC_PROTECTED = 0x0004; // class, field, method
+ int ACC_STATIC = 0x0008; // field, method
+ int ACC_FINAL = 0x0010; // class, field, method
+ int ACC_SUPER = 0x0020; // class
+ int ACC_SYNCHRONIZED = 0x0020; // method
+ int ACC_VOLATILE = 0x0040; // field
+ int ACC_BRIDGE = 0x0040; // method
+ int ACC_VARARGS = 0x0080; // method
+ int ACC_TRANSIENT = 0x0080; // field
+ int ACC_NATIVE = 0x0100; // method
+ int ACC_INTERFACE = 0x0200; // class
+ int ACC_ABSTRACT = 0x0400; // class, method
+ int ACC_STRICT = 0x0800; // method
+ int ACC_SYNTHETIC = 0x1000; // class, field, method
+ int ACC_ANNOTATION = 0x2000; // class
+ int ACC_ENUM = 0x4000; // class(?) field inner
+
+ // ASM specific pseudo access flags
+
+ int ACC_DEPRECATED = 0x20000; // class, field, method
+
+ // types for NEWARRAY
+
+ int T_BOOLEAN = 4;
+ int T_CHAR = 5;
+ int T_FLOAT = 6;
+ int T_DOUBLE = 7;
+ int T_BYTE = 8;
+ int T_SHORT = 9;
+ int T_INT = 10;
+ int T_LONG = 11;
+
+ // tags for Handle
+
+ int H_GETFIELD = 1;
+ int H_GETSTATIC = 2;
+ int H_PUTFIELD = 3;
+ int H_PUTSTATIC = 4;
+ int H_INVOKEVIRTUAL = 5;
+ int H_INVOKESTATIC = 6;
+ int H_INVOKESPECIAL = 7;
+ int H_NEWINVOKESPECIAL = 8;
+ int H_INVOKEINTERFACE = 9;
+
+ // stack map frame types
+
+ /**
+ * Represents an expanded frame. See {@link ClassReader#EXPAND_FRAMES}.
+ */
+ int F_NEW = -1;
+
+ /**
+ * Represents a compressed frame with complete frame data.
+ */
+ int F_FULL = 0;
+
+ /**
+ * Represents a compressed frame where locals are the same as the locals in
+ * the previous frame, except that additional 1-3 locals are defined, and
+ * with an empty stack.
+ */
+ int F_APPEND = 1;
+
+ /**
+ * Represents a compressed frame where locals are the same as the locals in
+ * the previous frame, except that the last 1-3 locals are absent and with
+ * an empty stack.
+ */
+ int F_CHOP = 2;
+
+ /**
+ * Represents a compressed frame with exactly the same locals as the
+ * previous frame and with an empty stack.
+ */
+ int F_SAME = 3;
+
+ /**
+ * Represents a compressed frame with exactly the same locals as the
+ * previous frame and with a single value on the stack.
+ */
+ int F_SAME1 = 4;
+
+ Integer TOP = new Integer(0);
+ Integer INTEGER = new Integer(1);
+ Integer FLOAT = new Integer(2);
+ Integer DOUBLE = new Integer(3);
+ Integer LONG = new Integer(4);
+ Integer NULL = new Integer(5);
+ Integer UNINITIALIZED_THIS = new Integer(6);
+
+ // opcodes // visit method (- = idem)
+
+ int NOP = 0; // visitInsn
+ int ACONST_NULL = 1; // -
+ int ICONST_M1 = 2; // -
+ int ICONST_0 = 3; // -
+ int ICONST_1 = 4; // -
+ int ICONST_2 = 5; // -
+ int ICONST_3 = 6; // -
+ int ICONST_4 = 7; // -
+ int ICONST_5 = 8; // -
+ int LCONST_0 = 9; // -
+ int LCONST_1 = 10; // -
+ int FCONST_0 = 11; // -
+ int FCONST_1 = 12; // -
+ int FCONST_2 = 13; // -
+ int DCONST_0 = 14; // -
+ int DCONST_1 = 15; // -
+ int BIPUSH = 16; // visitIntInsn
+ int SIPUSH = 17; // -
+ int LDC = 18; // visitLdcInsn
+ // int LDC_W = 19; // -
+ // int LDC2_W = 20; // -
+ int ILOAD = 21; // visitVarInsn
+ int LLOAD = 22; // -
+ int FLOAD = 23; // -
+ int DLOAD = 24; // -
+ int ALOAD = 25; // -
+ // int ILOAD_0 = 26; // -
+ // int ILOAD_1 = 27; // -
+ // int ILOAD_2 = 28; // -
+ // int ILOAD_3 = 29; // -
+ // int LLOAD_0 = 30; // -
+ // int LLOAD_1 = 31; // -
+ // int LLOAD_2 = 32; // -
+ // int LLOAD_3 = 33; // -
+ // int FLOAD_0 = 34; // -
+ // int FLOAD_1 = 35; // -
+ // int FLOAD_2 = 36; // -
+ // int FLOAD_3 = 37; // -
+ // int DLOAD_0 = 38; // -
+ // int DLOAD_1 = 39; // -
+ // int DLOAD_2 = 40; // -
+ // int DLOAD_3 = 41; // -
+ // int ALOAD_0 = 42; // -
+ // int ALOAD_1 = 43; // -
+ // int ALOAD_2 = 44; // -
+ // int ALOAD_3 = 45; // -
+ int IALOAD = 46; // visitInsn
+ int LALOAD = 47; // -
+ int FALOAD = 48; // -
+ int DALOAD = 49; // -
+ int AALOAD = 50; // -
+ int BALOAD = 51; // -
+ int CALOAD = 52; // -
+ int SALOAD = 53; // -
+ int ISTORE = 54; // visitVarInsn
+ int LSTORE = 55; // -
+ int FSTORE = 56; // -
+ int DSTORE = 57; // -
+ int ASTORE = 58; // -
+ // int ISTORE_0 = 59; // -
+ // int ISTORE_1 = 60; // -
+ // int ISTORE_2 = 61; // -
+ // int ISTORE_3 = 62; // -
+ // int LSTORE_0 = 63; // -
+ // int LSTORE_1 = 64; // -
+ // int LSTORE_2 = 65; // -
+ // int LSTORE_3 = 66; // -
+ // int FSTORE_0 = 67; // -
+ // int FSTORE_1 = 68; // -
+ // int FSTORE_2 = 69; // -
+ // int FSTORE_3 = 70; // -
+ // int DSTORE_0 = 71; // -
+ // int DSTORE_1 = 72; // -
+ // int DSTORE_2 = 73; // -
+ // int DSTORE_3 = 74; // -
+ // int ASTORE_0 = 75; // -
+ // int ASTORE_1 = 76; // -
+ // int ASTORE_2 = 77; // -
+ // int ASTORE_3 = 78; // -
+ int IASTORE = 79; // visitInsn
+ int LASTORE = 80; // -
+ int FASTORE = 81; // -
+ int DASTORE = 82; // -
+ int AASTORE = 83; // -
+ int BASTORE = 84; // -
+ int CASTORE = 85; // -
+ int SASTORE = 86; // -
+ int POP = 87; // -
+ int POP2 = 88; // -
+ int DUP = 89; // -
+ int DUP_X1 = 90; // -
+ int DUP_X2 = 91; // -
+ int DUP2 = 92; // -
+ int DUP2_X1 = 93; // -
+ int DUP2_X2 = 94; // -
+ int SWAP = 95; // -
+ int IADD = 96; // -
+ int LADD = 97; // -
+ int FADD = 98; // -
+ int DADD = 99; // -
+ int ISUB = 100; // -
+ int LSUB = 101; // -
+ int FSUB = 102; // -
+ int DSUB = 103; // -
+ int IMUL = 104; // -
+ int LMUL = 105; // -
+ int FMUL = 106; // -
+ int DMUL = 107; // -
+ int IDIV = 108; // -
+ int LDIV = 109; // -
+ int FDIV = 110; // -
+ int DDIV = 111; // -
+ int IREM = 112; // -
+ int LREM = 113; // -
+ int FREM = 114; // -
+ int DREM = 115; // -
+ int INEG = 116; // -
+ int LNEG = 117; // -
+ int FNEG = 118; // -
+ int DNEG = 119; // -
+ int ISHL = 120; // -
+ int LSHL = 121; // -
+ int ISHR = 122; // -
+ int LSHR = 123; // -
+ int IUSHR = 124; // -
+ int LUSHR = 125; // -
+ int IAND = 126; // -
+ int LAND = 127; // -
+ int IOR = 128; // -
+ int LOR = 129; // -
+ int IXOR = 130; // -
+ int LXOR = 131; // -
+ int IINC = 132; // visitIincInsn
+ int I2L = 133; // visitInsn
+ int I2F = 134; // -
+ int I2D = 135; // -
+ int L2I = 136; // -
+ int L2F = 137; // -
+ int L2D = 138; // -
+ int F2I = 139; // -
+ int F2L = 140; // -
+ int F2D = 141; // -
+ int D2I = 142; // -
+ int D2L = 143; // -
+ int D2F = 144; // -
+ int I2B = 145; // -
+ int I2C = 146; // -
+ int I2S = 147; // -
+ int LCMP = 148; // -
+ int FCMPL = 149; // -
+ int FCMPG = 150; // -
+ int DCMPL = 151; // -
+ int DCMPG = 152; // -
+ int IFEQ = 153; // visitJumpInsn
+ int IFNE = 154; // -
+ int IFLT = 155; // -
+ int IFGE = 156; // -
+ int IFGT = 157; // -
+ int IFLE = 158; // -
+ int IF_ICMPEQ = 159; // -
+ int IF_ICMPNE = 160; // -
+ int IF_ICMPLT = 161; // -
+ int IF_ICMPGE = 162; // -
+ int IF_ICMPGT = 163; // -
+ int IF_ICMPLE = 164; // -
+ int IF_ACMPEQ = 165; // -
+ int IF_ACMPNE = 166; // -
+ int GOTO = 167; // -
+ int JSR = 168; // -
+ int RET = 169; // visitVarInsn
+ int TABLESWITCH = 170; // visiTableSwitchInsn
+ int LOOKUPSWITCH = 171; // visitLookupSwitch
+ int IRETURN = 172; // visitInsn
+ int LRETURN = 173; // -
+ int FRETURN = 174; // -
+ int DRETURN = 175; // -
+ int ARETURN = 176; // -
+ int RETURN = 177; // -
+ int GETSTATIC = 178; // visitFieldInsn
+ int PUTSTATIC = 179; // -
+ int GETFIELD = 180; // -
+ int PUTFIELD = 181; // -
+ int INVOKEVIRTUAL = 182; // visitMethodInsn
+ int INVOKESPECIAL = 183; // -
+ int INVOKESTATIC = 184; // -
+ int INVOKEINTERFACE = 185; // -
+ int INVOKEDYNAMIC = 186; // visitInvokeDynamicInsn
+ int NEW = 187; // visitTypeInsn
+ int NEWARRAY = 188; // visitIntInsn
+ int ANEWARRAY = 189; // visitTypeInsn
+ int ARRAYLENGTH = 190; // visitInsn
+ int ATHROW = 191; // -
+ int CHECKCAST = 192; // visitTypeInsn
+ int INSTANCEOF = 193; // -
+ int MONITORENTER = 194; // visitInsn
+ int MONITOREXIT = 195; // -
+ // int WIDE = 196; // NOT VISITED
+ int MULTIANEWARRAY = 197; // visitMultiANewArrayInsn
+ int IFNULL = 198; // visitJumpInsn
+ int IFNONNULL = 199; // -
+ // int GOTO_W = 200; // -
+ // int JSR_W = 201; // -
+}
diff --git a/src/asm/scala/tools/asm/Type.java b/src/asm/scala/tools/asm/Type.java
new file mode 100644
index 0000000000..bf1107182a
--- /dev/null
+++ b/src/asm/scala/tools/asm/Type.java
@@ -0,0 +1,865 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Method;
+
+/**
+ * A Java field or method type. This class can be used to make it easier to
+ * manipulate type and method descriptors.
+ *
+ * @author Eric Bruneton
+ * @author Chris Nokleberg
+ */
+public class Type {
+
+ /**
+ * The sort of the <tt>void</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int VOID = 0;
+
+ /**
+ * The sort of the <tt>boolean</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int BOOLEAN = 1;
+
+ /**
+ * The sort of the <tt>char</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int CHAR = 2;
+
+ /**
+ * The sort of the <tt>byte</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int BYTE = 3;
+
+ /**
+ * The sort of the <tt>short</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int SHORT = 4;
+
+ /**
+ * The sort of the <tt>int</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int INT = 5;
+
+ /**
+ * The sort of the <tt>float</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int FLOAT = 6;
+
+ /**
+ * The sort of the <tt>long</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int LONG = 7;
+
+ /**
+ * The sort of the <tt>double</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int DOUBLE = 8;
+
+ /**
+ * The sort of array reference types. See {@link #getSort getSort}.
+ */
+ public static final int ARRAY = 9;
+
+ /**
+ * The sort of object reference types. See {@link #getSort getSort}.
+ */
+ public static final int OBJECT = 10;
+
+ /**
+ * The sort of method types. See {@link #getSort getSort}.
+ */
+ public static final int METHOD = 11;
+
+ /**
+ * The <tt>void</tt> type.
+ */
+ public static final Type VOID_TYPE = new Type(VOID, null, ('V' << 24)
+ | (5 << 16) | (0 << 8) | 0, 1);
+
+ /**
+ * The <tt>boolean</tt> type.
+ */
+ public static final Type BOOLEAN_TYPE = new Type(BOOLEAN, null, ('Z' << 24)
+ | (0 << 16) | (5 << 8) | 1, 1);
+
+ /**
+ * The <tt>char</tt> type.
+ */
+ public static final Type CHAR_TYPE = new Type(CHAR, null, ('C' << 24)
+ | (0 << 16) | (6 << 8) | 1, 1);
+
+ /**
+ * The <tt>byte</tt> type.
+ */
+ public static final Type BYTE_TYPE = new Type(BYTE, null, ('B' << 24)
+ | (0 << 16) | (5 << 8) | 1, 1);
+
+ /**
+ * The <tt>short</tt> type.
+ */
+ public static final Type SHORT_TYPE = new Type(SHORT, null, ('S' << 24)
+ | (0 << 16) | (7 << 8) | 1, 1);
+
+ /**
+ * The <tt>int</tt> type.
+ */
+ public static final Type INT_TYPE = new Type(INT, null, ('I' << 24)
+ | (0 << 16) | (0 << 8) | 1, 1);
+
+ /**
+ * The <tt>float</tt> type.
+ */
+ public static final Type FLOAT_TYPE = new Type(FLOAT, null, ('F' << 24)
+ | (2 << 16) | (2 << 8) | 1, 1);
+
+ /**
+ * The <tt>long</tt> type.
+ */
+ public static final Type LONG_TYPE = new Type(LONG, null, ('J' << 24)
+ | (1 << 16) | (1 << 8) | 2, 1);
+
+ /**
+ * The <tt>double</tt> type.
+ */
+ public static final Type DOUBLE_TYPE = new Type(DOUBLE, null, ('D' << 24)
+ | (3 << 16) | (3 << 8) | 2, 1);
+
+ // ------------------------------------------------------------------------
+ // Fields
+ // ------------------------------------------------------------------------
+
+ /**
+ * The sort of this Java type.
+ */
+ private final int sort;
+
+ /**
+ * A buffer containing the internal name of this Java type. This field is
+ * only used for reference types.
+ */
+ private final char[] buf;
+
+ /**
+ * The offset of the internal name of this Java type in {@link #buf buf} or,
+ * for primitive types, the size, descriptor and getOpcode offsets for this
+ * type (byte 0 contains the size, byte 1 the descriptor, byte 2 the offset
+ * for IALOAD or IASTORE, byte 3 the offset for all other instructions).
+ */
+ private final int off;
+
+ /**
+ * The length of the internal name of this Java type.
+ */
+ private final int len;
+
+ // ------------------------------------------------------------------------
+ // Constructors
+ // ------------------------------------------------------------------------
+
+ /**
+ * Constructs a reference type.
+ *
+ * @param sort the sort of the reference type to be constructed.
+ * @param buf a buffer containing the descriptor of the previous type.
+ * @param off the offset of this descriptor in the previous buffer.
+ * @param len the length of this descriptor.
+ */
+ private Type(final int sort, final char[] buf, final int off, final int len)
+ {
+ this.sort = sort;
+ this.buf = buf;
+ this.off = off;
+ this.len = len;
+ }
+
+ /**
+ * Returns the Java type corresponding to the given type descriptor.
+ *
+ * @param typeDescriptor a field or method type descriptor.
+ * @return the Java type corresponding to the given type descriptor.
+ */
+ public static Type getType(final String typeDescriptor) {
+ return getType(typeDescriptor.toCharArray(), 0);
+ }
+
+ /**
+ * Returns the Java type corresponding to the given internal name.
+ *
+ * @param internalName an internal name.
+ * @return the Java type corresponding to the given internal name.
+ */
+ public static Type getObjectType(final String internalName) {
+ char[] buf = internalName.toCharArray();
+ return new Type(buf[0] == '[' ? ARRAY : OBJECT, buf, 0, buf.length);
+ }
+
+ /**
+ * Returns the Java type corresponding to the given method descriptor.
+ * Equivalent to <code>Type.getType(methodDescriptor)</code>.
+ *
+ * @param methodDescriptor a method descriptor.
+ * @return the Java type corresponding to the given method descriptor.
+ */
+ public static Type getMethodType(final String methodDescriptor) {
+ return getType(methodDescriptor.toCharArray(), 0);
+ }
+
+ /**
+ * Returns the Java method type corresponding to the given argument and
+ * return types.
+ *
+ * @param returnType the return type of the method.
+ * @param argumentTypes the argument types of the method.
+ * @return the Java type corresponding to the given argument and return types.
+ */
+ public static Type getMethodType(final Type returnType, final Type... argumentTypes) {
+ return getType(getMethodDescriptor(returnType, argumentTypes));
+ }
+
+ /**
+ * Returns the Java type corresponding to the given class.
+ *
+ * @param c a class.
+ * @return the Java type corresponding to the given class.
+ */
+ public static Type getType(final Class<?> c) {
+ if (c.isPrimitive()) {
+ if (c == Integer.TYPE) {
+ return INT_TYPE;
+ } else if (c == Void.TYPE) {
+ return VOID_TYPE;
+ } else if (c == Boolean.TYPE) {
+ return BOOLEAN_TYPE;
+ } else if (c == Byte.TYPE) {
+ return BYTE_TYPE;
+ } else if (c == Character.TYPE) {
+ return CHAR_TYPE;
+ } else if (c == Short.TYPE) {
+ return SHORT_TYPE;
+ } else if (c == Double.TYPE) {
+ return DOUBLE_TYPE;
+ } else if (c == Float.TYPE) {
+ return FLOAT_TYPE;
+ } else /* if (c == Long.TYPE) */{
+ return LONG_TYPE;
+ }
+ } else {
+ return getType(getDescriptor(c));
+ }
+ }
+
+ /**
+ * Returns the Java method type corresponding to the given constructor.
+ *
+ * @param c a {@link Constructor Constructor} object.
+ * @return the Java method type corresponding to the given constructor.
+ */
+ public static Type getType(final Constructor<?> c) {
+ return getType(getConstructorDescriptor(c));
+ }
+
+ /**
+ * Returns the Java method type corresponding to the given method.
+ *
+ * @param m a {@link Method Method} object.
+ * @return the Java method type corresponding to the given method.
+ */
+ public static Type getType(final Method m) {
+ return getType(getMethodDescriptor(m));
+ }
+
+ /**
+ * Returns the Java types corresponding to the argument types of the given
+ * method descriptor.
+ *
+ * @param methodDescriptor a method descriptor.
+ * @return the Java types corresponding to the argument types of the given
+ * method descriptor.
+ */
+ public static Type[] getArgumentTypes(final String methodDescriptor) {
+ char[] buf = methodDescriptor.toCharArray();
+ int off = 1;
+ int size = 0;
+ while (true) {
+ char car = buf[off++];
+ if (car == ')') {
+ break;
+ } else if (car == 'L') {
+ while (buf[off++] != ';') {
+ }
+ ++size;
+ } else if (car != '[') {
+ ++size;
+ }
+ }
+ Type[] args = new Type[size];
+ off = 1;
+ size = 0;
+ while (buf[off] != ')') {
+ args[size] = getType(buf, off);
+ off += args[size].len + (args[size].sort == OBJECT ? 2 : 0);
+ size += 1;
+ }
+ return args;
+ }
+
+ /**
+ * Returns the Java types corresponding to the argument types of the given
+ * method.
+ *
+ * @param method a method.
+ * @return the Java types corresponding to the argument types of the given
+ * method.
+ */
+ public static Type[] getArgumentTypes(final Method method) {
+ Class<?>[] classes = method.getParameterTypes();
+ Type[] types = new Type[classes.length];
+ for (int i = classes.length - 1; i >= 0; --i) {
+ types[i] = getType(classes[i]);
+ }
+ return types;
+ }
+
+ /**
+ * Returns the Java type corresponding to the return type of the given
+ * method descriptor.
+ *
+ * @param methodDescriptor a method descriptor.
+ * @return the Java type corresponding to the return type of the given
+ * method descriptor.
+ */
+ public static Type getReturnType(final String methodDescriptor) {
+ char[] buf = methodDescriptor.toCharArray();
+ return getType(buf, methodDescriptor.indexOf(')') + 1);
+ }
+
+ /**
+ * Returns the Java type corresponding to the return type of the given
+ * method.
+ *
+ * @param method a method.
+ * @return the Java type corresponding to the return type of the given
+ * method.
+ */
+ public static Type getReturnType(final Method method) {
+ return getType(method.getReturnType());
+ }
+
+ /**
+ * Computes the size of the arguments and of the return value of a method.
+ *
+ * @param desc the descriptor of a method.
+ * @return the size of the arguments of the method (plus one for the
+ * implicit this argument), argSize, and the size of its return
+ * value, retSize, packed into a single int i =
+ * <tt>(argSize << 2) | retSize</tt> (argSize is therefore equal
+ * to <tt>i >> 2</tt>, and retSize to <tt>i & 0x03</tt>).
+ */
+ public static int getArgumentsAndReturnSizes(final String desc) {
+ int n = 1;
+ int c = 1;
+ while (true) {
+ char car = desc.charAt(c++);
+ if (car == ')') {
+ car = desc.charAt(c);
+ return n << 2
+ | (car == 'V' ? 0 : (car == 'D' || car == 'J' ? 2 : 1));
+ } else if (car == 'L') {
+ while (desc.charAt(c++) != ';') {
+ }
+ n += 1;
+ } else if (car == '[') {
+ while ((car = desc.charAt(c)) == '[') {
+ ++c;
+ }
+ if (car == 'D' || car == 'J') {
+ n -= 1;
+ }
+ } else if (car == 'D' || car == 'J') {
+ n += 2;
+ } else {
+ n += 1;
+ }
+ }
+ }
+
+ /**
+ * Returns the Java type corresponding to the given type descriptor. For
+ * method descriptors, buf is supposed to contain nothing more than the
+ * descriptor itself.
+ *
+ * @param buf a buffer containing a type descriptor.
+ * @param off the offset of this descriptor in the previous buffer.
+ * @return the Java type corresponding to the given type descriptor.
+ */
+ private static Type getType(final char[] buf, final int off) {
+ int len;
+ switch (buf[off]) {
+ case 'V':
+ return VOID_TYPE;
+ case 'Z':
+ return BOOLEAN_TYPE;
+ case 'C':
+ return CHAR_TYPE;
+ case 'B':
+ return BYTE_TYPE;
+ case 'S':
+ return SHORT_TYPE;
+ case 'I':
+ return INT_TYPE;
+ case 'F':
+ return FLOAT_TYPE;
+ case 'J':
+ return LONG_TYPE;
+ case 'D':
+ return DOUBLE_TYPE;
+ case '[':
+ len = 1;
+ while (buf[off + len] == '[') {
+ ++len;
+ }
+ if (buf[off + len] == 'L') {
+ ++len;
+ while (buf[off + len] != ';') {
+ ++len;
+ }
+ }
+ return new Type(ARRAY, buf, off, len + 1);
+ case 'L':
+ len = 1;
+ while (buf[off + len] != ';') {
+ ++len;
+ }
+ return new Type(OBJECT, buf, off + 1, len - 1);
+ // case '(':
+ default:
+ return new Type(METHOD, buf, 0, buf.length);
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Accessors
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the sort of this Java type.
+ *
+ * @return {@link #VOID VOID}, {@link #BOOLEAN BOOLEAN},
+ * {@link #CHAR CHAR}, {@link #BYTE BYTE}, {@link #SHORT SHORT},
+ * {@link #INT INT}, {@link #FLOAT FLOAT}, {@link #LONG LONG},
+ * {@link #DOUBLE DOUBLE}, {@link #ARRAY ARRAY},
+ * {@link #OBJECT OBJECT} or {@link #METHOD METHOD}.
+ */
+ public int getSort() {
+ return sort;
+ }
+
+ /**
+ * Returns the number of dimensions of this array type. This method should
+ * only be used for an array type.
+ *
+ * @return the number of dimensions of this array type.
+ */
+ public int getDimensions() {
+ int i = 1;
+ while (buf[off + i] == '[') {
+ ++i;
+ }
+ return i;
+ }
+
+ /**
+ * Returns the type of the elements of this array type. This method should
+ * only be used for an array type.
+ *
+ * @return Returns the type of the elements of this array type.
+ */
+ public Type getElementType() {
+ return getType(buf, off + getDimensions());
+ }
+
+ /**
+ * Returns the binary name of the class corresponding to this type. This
+ * method must not be used on method types.
+ *
+ * @return the binary name of the class corresponding to this type.
+ */
+ public String getClassName() {
+ switch (sort) {
+ case VOID:
+ return "void";
+ case BOOLEAN:
+ return "boolean";
+ case CHAR:
+ return "char";
+ case BYTE:
+ return "byte";
+ case SHORT:
+ return "short";
+ case INT:
+ return "int";
+ case FLOAT:
+ return "float";
+ case LONG:
+ return "long";
+ case DOUBLE:
+ return "double";
+ case ARRAY:
+ StringBuffer b = new StringBuffer(getElementType().getClassName());
+ for (int i = getDimensions(); i > 0; --i) {
+ b.append("[]");
+ }
+ return b.toString();
+ case OBJECT:
+ return new String(buf, off, len).replace('/', '.');
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Returns the internal name of the class corresponding to this object or
+ * array type. The internal name of a class is its fully qualified name (as
+ * returned by Class.getName(), where '.' are replaced by '/'. This method
+ * should only be used for an object or array type.
+ *
+ * @return the internal name of the class corresponding to this object type.
+ */
+ public String getInternalName() {
+ return new String(buf, off, len);
+ }
+
+ /**
+ * Returns the argument types of methods of this type. This method should
+ * only be used for method types.
+ *
+ * @return the argument types of methods of this type.
+ */
+ public Type[] getArgumentTypes() {
+ return getArgumentTypes(getDescriptor());
+ }
+
+ /**
+ * Returns the return type of methods of this type. This method should only
+ * be used for method types.
+ *
+ * @return the return type of methods of this type.
+ */
+ public Type getReturnType() {
+ return getReturnType(getDescriptor());
+ }
+
+ /**
+ * Returns the size of the arguments and of the return value of methods of
+ * this type. This method should only be used for method types.
+ *
+ * @return the size of the arguments (plus one for the implicit this
+ * argument), argSize, and the size of the return value, retSize,
+ * packed into a single int i = <tt>(argSize << 2) | retSize</tt>
+ * (argSize is therefore equal to <tt>i >> 2</tt>, and retSize to
+ * <tt>i & 0x03</tt>).
+ */
+ public int getArgumentsAndReturnSizes() {
+ return getArgumentsAndReturnSizes(getDescriptor());
+ }
+
+ // ------------------------------------------------------------------------
+ // Conversion to type descriptors
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the descriptor corresponding to this Java type.
+ *
+ * @return the descriptor corresponding to this Java type.
+ */
+ public String getDescriptor() {
+ StringBuffer buf = new StringBuffer();
+ getDescriptor(buf);
+ return buf.toString();
+ }
+
+ /**
+ * Returns the descriptor corresponding to the given argument and return
+ * types.
+ *
+ * @param returnType the return type of the method.
+ * @param argumentTypes the argument types of the method.
+ * @return the descriptor corresponding to the given argument and return
+ * types.
+ */
+ public static String getMethodDescriptor(
+ final Type returnType,
+ final Type... argumentTypes)
+ {
+ StringBuffer buf = new StringBuffer();
+ buf.append('(');
+ for (int i = 0; i < argumentTypes.length; ++i) {
+ argumentTypes[i].getDescriptor(buf);
+ }
+ buf.append(')');
+ returnType.getDescriptor(buf);
+ return buf.toString();
+ }
+
+ /**
+ * Appends the descriptor corresponding to this Java type to the given
+ * string buffer.
+ *
+ * @param buf the string buffer to which the descriptor must be appended.
+ */
+ private void getDescriptor(final StringBuffer buf) {
+ if (this.buf == null) {
+ // descriptor is in byte 3 of 'off' for primitive types (buf == null)
+ buf.append((char) ((off & 0xFF000000) >>> 24));
+ } else if (sort == OBJECT) {
+ buf.append('L');
+ buf.append(this.buf, off, len);
+ buf.append(';');
+ } else { // sort == ARRAY || sort == METHOD
+ buf.append(this.buf, off, len);
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Direct conversion from classes to type descriptors,
+ // without intermediate Type objects
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the internal name of the given class. The internal name of a
+ * class is its fully qualified name, as returned by Class.getName(), where
+ * '.' are replaced by '/'.
+ *
+ * @param c an object or array class.
+ * @return the internal name of the given class.
+ */
+ public static String getInternalName(final Class<?> c) {
+ return c.getName().replace('.', '/');
+ }
+
+ /**
+ * Returns the descriptor corresponding to the given Java type.
+ *
+ * @param c an object class, a primitive class or an array class.
+ * @return the descriptor corresponding to the given class.
+ */
+ public static String getDescriptor(final Class<?> c) {
+ StringBuffer buf = new StringBuffer();
+ getDescriptor(buf, c);
+ return buf.toString();
+ }
+
+ /**
+ * Returns the descriptor corresponding to the given constructor.
+ *
+ * @param c a {@link Constructor Constructor} object.
+ * @return the descriptor of the given constructor.
+ */
+ public static String getConstructorDescriptor(final Constructor<?> c) {
+ Class<?>[] parameters = c.getParameterTypes();
+ StringBuffer buf = new StringBuffer();
+ buf.append('(');
+ for (int i = 0; i < parameters.length; ++i) {
+ getDescriptor(buf, parameters[i]);
+ }
+ return buf.append(")V").toString();
+ }
+
+ /**
+ * Returns the descriptor corresponding to the given method.
+ *
+ * @param m a {@link Method Method} object.
+ * @return the descriptor of the given method.
+ */
+ public static String getMethodDescriptor(final Method m) {
+ Class<?>[] parameters = m.getParameterTypes();
+ StringBuffer buf = new StringBuffer();
+ buf.append('(');
+ for (int i = 0; i < parameters.length; ++i) {
+ getDescriptor(buf, parameters[i]);
+ }
+ buf.append(')');
+ getDescriptor(buf, m.getReturnType());
+ return buf.toString();
+ }
+
+ /**
+ * Appends the descriptor of the given class to the given string buffer.
+ *
+ * @param buf the string buffer to which the descriptor must be appended.
+ * @param c the class whose descriptor must be computed.
+ */
+ private static void getDescriptor(final StringBuffer buf, final Class<?> c) {
+ Class<?> d = c;
+ while (true) {
+ if (d.isPrimitive()) {
+ char car;
+ if (d == Integer.TYPE) {
+ car = 'I';
+ } else if (d == Void.TYPE) {
+ car = 'V';
+ } else if (d == Boolean.TYPE) {
+ car = 'Z';
+ } else if (d == Byte.TYPE) {
+ car = 'B';
+ } else if (d == Character.TYPE) {
+ car = 'C';
+ } else if (d == Short.TYPE) {
+ car = 'S';
+ } else if (d == Double.TYPE) {
+ car = 'D';
+ } else if (d == Float.TYPE) {
+ car = 'F';
+ } else /* if (d == Long.TYPE) */{
+ car = 'J';
+ }
+ buf.append(car);
+ return;
+ } else if (d.isArray()) {
+ buf.append('[');
+ d = d.getComponentType();
+ } else {
+ buf.append('L');
+ String name = d.getName();
+ int len = name.length();
+ for (int i = 0; i < len; ++i) {
+ char car = name.charAt(i);
+ buf.append(car == '.' ? '/' : car);
+ }
+ buf.append(';');
+ return;
+ }
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Corresponding size and opcodes
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the size of values of this type. This method must not be used for
+ * method types.
+ *
+ * @return the size of values of this type, i.e., 2 for <tt>long</tt> and
+ * <tt>double</tt>, 0 for <tt>void</tt> and 1 otherwise.
+ */
+ public int getSize() {
+ // the size is in byte 0 of 'off' for primitive types (buf == null)
+ return buf == null ? (off & 0xFF) : 1;
+ }
+
+ /**
+ * Returns a JVM instruction opcode adapted to this Java type. This method
+ * must not be used for method types.
+ *
+ * @param opcode a JVM instruction opcode. This opcode must be one of ILOAD,
+ * ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL,
+ * ISHR, IUSHR, IAND, IOR, IXOR and IRETURN.
+ * @return an opcode that is similar to the given opcode, but adapted to
+ * this Java type. For example, if this type is <tt>float</tt> and
+ * <tt>opcode</tt> is IRETURN, this method returns FRETURN.
+ */
+ public int getOpcode(final int opcode) {
+ if (opcode == Opcodes.IALOAD || opcode == Opcodes.IASTORE) {
+ // the offset for IALOAD or IASTORE is in byte 1 of 'off' for
+ // primitive types (buf == null)
+ return opcode + (buf == null ? (off & 0xFF00) >> 8 : 4);
+ } else {
+ // the offset for other instructions is in byte 2 of 'off' for
+ // primitive types (buf == null)
+ return opcode + (buf == null ? (off & 0xFF0000) >> 16 : 4);
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Equals, hashCode and toString
+ // ------------------------------------------------------------------------
+
+ /**
+ * Tests if the given object is equal to this type.
+ *
+ * @param o the object to be compared to this type.
+ * @return <tt>true</tt> if the given object is equal to this type.
+ */
+ @Override
+ public boolean equals(final Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (!(o instanceof Type)) {
+ return false;
+ }
+ Type t = (Type) o;
+ if (sort != t.sort) {
+ return false;
+ }
+ if (sort >= ARRAY) {
+ if (len != t.len) {
+ return false;
+ }
+ for (int i = off, j = t.off, end = i + len; i < end; i++, j++) {
+ if (buf[i] != t.buf[j]) {
+ return false;
+ }
+ }
+ }
+ return true;
+ }
+
+ /**
+ * Returns a hash code value for this type.
+ *
+ * @return a hash code value for this type.
+ */
+ @Override
+ public int hashCode() {
+ int hc = 13 * sort;
+ if (sort >= ARRAY) {
+ for (int i = off, end = i + len; i < end; i++) {
+ hc = 17 * (hc + buf[i]);
+ }
+ }
+ return hc;
+ }
+
+ /**
+ * Returns a string representation of this type.
+ *
+ * @return the descriptor of this type.
+ */
+ @Override
+ public String toString() {
+ return getDescriptor();
+ }
+}
diff --git a/src/asm/scala/tools/asm/signature/SignatureReader.java b/src/asm/scala/tools/asm/signature/SignatureReader.java
new file mode 100644
index 0000000000..22e6427e63
--- /dev/null
+++ b/src/asm/scala/tools/asm/signature/SignatureReader.java
@@ -0,0 +1,229 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.signature;
+
+/**
+ * A type signature parser to make a signature visitor visit an existing
+ * signature.
+ *
+ * @author Thomas Hallgren
+ * @author Eric Bruneton
+ */
+public class SignatureReader {
+
+ /**
+ * The signature to be read.
+ */
+ private final String signature;
+
+ /**
+ * Constructs a {@link SignatureReader} for the given signature.
+ *
+ * @param signature A <i>ClassSignature</i>, <i>MethodTypeSignature</i>,
+ * or <i>FieldTypeSignature</i>.
+ */
+ public SignatureReader(final String signature) {
+ this.signature = signature;
+ }
+
+ /**
+ * Makes the given visitor visit the signature of this
+ * {@link SignatureReader}. This signature is the one specified in the
+ * constructor (see {@link #SignatureReader(String) SignatureReader}). This
+ * method is intended to be called on a {@link SignatureReader} that was
+ * created using a <i>ClassSignature</i> (such as the
+ * <code>signature</code> parameter of the
+ * {@link org.objectweb.asm.ClassVisitor#visit ClassVisitor.visit} method)
+ * or a <i>MethodTypeSignature</i> (such as the <code>signature</code>
+ * parameter of the
+ * {@link org.objectweb.asm.ClassVisitor#visitMethod ClassVisitor.visitMethod}
+ * method).
+ *
+ * @param v the visitor that must visit this signature.
+ */
+ public void accept(final SignatureVisitor v) {
+ String signature = this.signature;
+ int len = signature.length();
+ int pos;
+ char c;
+
+ if (signature.charAt(0) == '<') {
+ pos = 2;
+ do {
+ int end = signature.indexOf(':', pos);
+ v.visitFormalTypeParameter(signature.substring(pos - 1, end));
+ pos = end + 1;
+
+ c = signature.charAt(pos);
+ if (c == 'L' || c == '[' || c == 'T') {
+ pos = parseType(signature, pos, v.visitClassBound());
+ }
+
+ while ((c = signature.charAt(pos++)) == ':') {
+ pos = parseType(signature, pos, v.visitInterfaceBound());
+ }
+ } while (c != '>');
+ } else {
+ pos = 0;
+ }
+
+ if (signature.charAt(pos) == '(') {
+ pos++;
+ while (signature.charAt(pos) != ')') {
+ pos = parseType(signature, pos, v.visitParameterType());
+ }
+ pos = parseType(signature, pos + 1, v.visitReturnType());
+ while (pos < len) {
+ pos = parseType(signature, pos + 1, v.visitExceptionType());
+ }
+ } else {
+ pos = parseType(signature, pos, v.visitSuperclass());
+ while (pos < len) {
+ pos = parseType(signature, pos, v.visitInterface());
+ }
+ }
+ }
+
+ /**
+ * Makes the given visitor visit the signature of this
+ * {@link SignatureReader}. This signature is the one specified in the
+ * constructor (see {@link #SignatureReader(String) SignatureReader}). This
+ * method is intended to be called on a {@link SignatureReader} that was
+ * created using a <i>FieldTypeSignature</i>, such as the
+ * <code>signature</code> parameter of the
+ * {@link org.objectweb.asm.ClassVisitor#visitField
+ * ClassVisitor.visitField} or {@link
+ * org.objectweb.asm.MethodVisitor#visitLocalVariable
+ * MethodVisitor.visitLocalVariable} methods.
+ *
+ * @param v the visitor that must visit this signature.
+ */
+ public void acceptType(final SignatureVisitor v) {
+ parseType(this.signature, 0, v);
+ }
+
+ /**
+ * Parses a field type signature and makes the given visitor visit it.
+ *
+ * @param signature a string containing the signature that must be parsed.
+ * @param pos index of the first character of the signature to parsed.
+ * @param v the visitor that must visit this signature.
+ * @return the index of the first character after the parsed signature.
+ */
+ private static int parseType(
+ final String signature,
+ int pos,
+ final SignatureVisitor v)
+ {
+ char c;
+ int start, end;
+ boolean visited, inner;
+ String name;
+
+ switch (c = signature.charAt(pos++)) {
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ case 'F':
+ case 'J':
+ case 'D':
+ case 'V':
+ v.visitBaseType(c);
+ return pos;
+
+ case '[':
+ return parseType(signature, pos, v.visitArrayType());
+
+ case 'T':
+ end = signature.indexOf(';', pos);
+ v.visitTypeVariable(signature.substring(pos, end));
+ return end + 1;
+
+ default: // case 'L':
+ start = pos;
+ visited = false;
+ inner = false;
+ for (;;) {
+ switch (c = signature.charAt(pos++)) {
+ case '.':
+ case ';':
+ if (!visited) {
+ name = signature.substring(start, pos - 1);
+ if (inner) {
+ v.visitInnerClassType(name);
+ } else {
+ v.visitClassType(name);
+ }
+ }
+ if (c == ';') {
+ v.visitEnd();
+ return pos;
+ }
+ start = pos;
+ visited = false;
+ inner = true;
+ break;
+
+ case '<':
+ name = signature.substring(start, pos - 1);
+ if (inner) {
+ v.visitInnerClassType(name);
+ } else {
+ v.visitClassType(name);
+ }
+ visited = true;
+ top: for (;;) {
+ switch (c = signature.charAt(pos)) {
+ case '>':
+ break top;
+ case '*':
+ ++pos;
+ v.visitTypeArgument();
+ break;
+ case '+':
+ case '-':
+ pos = parseType(signature,
+ pos + 1,
+ v.visitTypeArgument(c));
+ break;
+ default:
+ pos = parseType(signature,
+ pos,
+ v.visitTypeArgument('='));
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/signature/SignatureVisitor.java b/src/asm/scala/tools/asm/signature/SignatureVisitor.java
new file mode 100644
index 0000000000..2fc364e374
--- /dev/null
+++ b/src/asm/scala/tools/asm/signature/SignatureVisitor.java
@@ -0,0 +1,228 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.signature;
+
+import scala.tools.asm.Opcodes;
+
+/**
+ * A visitor to visit a generic signature. The methods of this interface must be
+ * called in one of the three following orders (the last one is the only valid
+ * order for a {@link SignatureVisitor} that is returned by a method of this
+ * interface): <ul> <li><i>ClassSignature</i> = (
+ * <tt>visitFormalTypeParameter</tt>
+ * <tt>visitClassBound</tt>?
+ * <tt>visitInterfaceBound</tt>* )* ( <tt>visitSuperClass</tt>
+ * <tt>visitInterface</tt>* )</li>
+ * <li><i>MethodSignature</i> = ( <tt>visitFormalTypeParameter</tt>
+ * <tt>visitClassBound</tt>?
+ * <tt>visitInterfaceBound</tt>* )* ( <tt>visitParameterType</tt>*
+ * <tt>visitReturnType</tt>
+ * <tt>visitExceptionType</tt>* )</li> <li><i>TypeSignature</i> =
+ * <tt>visitBaseType</tt> | <tt>visitTypeVariable</tt> |
+ * <tt>visitArrayType</tt> | (
+ * <tt>visitClassType</tt> <tt>visitTypeArgument</tt>* (
+ * <tt>visitInnerClassType</tt> <tt>visitTypeArgument</tt>* )*
+ * <tt>visitEnd</tt> ) )</li> </ul>
+ *
+ * @author Thomas Hallgren
+ * @author Eric Bruneton
+ */
+public abstract class SignatureVisitor {
+
+ /**
+ * Wildcard for an "extends" type argument.
+ */
+ public final static char EXTENDS = '+';
+
+ /**
+ * Wildcard for a "super" type argument.
+ */
+ public final static char SUPER = '-';
+
+ /**
+ * Wildcard for a normal type argument.
+ */
+ public final static char INSTANCEOF = '=';
+
+ /**
+ * The ASM API version implemented by this visitor. The value of this field
+ * must be one of {@link Opcodes#ASM4}.
+ */
+ protected final int api;
+
+ /**
+ * Constructs a new {@link SignatureVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ public SignatureVisitor(final int api) {
+ this.api = api;
+ }
+
+ /**
+ * Visits a formal type parameter.
+ *
+ * @param name the name of the formal parameter.
+ */
+ public void visitFormalTypeParameter(String name) {
+ }
+
+ /**
+ * Visits the class bound of the last visited formal type parameter.
+ *
+ * @return a non null visitor to visit the signature of the class bound.
+ */
+ public SignatureVisitor visitClassBound() {
+ return this;
+ }
+
+ /**
+ * Visits an interface bound of the last visited formal type parameter.
+ *
+ * @return a non null visitor to visit the signature of the interface bound.
+ */
+ public SignatureVisitor visitInterfaceBound() {
+ return this;
+ }
+
+ /**
+ * Visits the type of the super class.
+ *
+ * @return a non null visitor to visit the signature of the super class
+ * type.
+ */
+ public SignatureVisitor visitSuperclass() {
+ return this;
+ }
+
+ /**
+ * Visits the type of an interface implemented by the class.
+ *
+ * @return a non null visitor to visit the signature of the interface type.
+ */
+ public SignatureVisitor visitInterface() {
+ return this;
+ }
+
+ /**
+ * Visits the type of a method parameter.
+ *
+ * @return a non null visitor to visit the signature of the parameter type.
+ */
+ public SignatureVisitor visitParameterType() {
+ return this;
+ }
+
+ /**
+ * Visits the return type of the method.
+ *
+ * @return a non null visitor to visit the signature of the return type.
+ */
+ public SignatureVisitor visitReturnType() {
+ return this;
+ }
+
+ /**
+ * Visits the type of a method exception.
+ *
+ * @return a non null visitor to visit the signature of the exception type.
+ */
+ public SignatureVisitor visitExceptionType() {
+ return this;
+ }
+
+ /**
+ * Visits a signature corresponding to a primitive type.
+ *
+ * @param descriptor the descriptor of the primitive type, or 'V' for
+ * <tt>void</tt>.
+ */
+ public void visitBaseType(char descriptor) {
+ }
+
+ /**
+ * Visits a signature corresponding to a type variable.
+ *
+ * @param name the name of the type variable.
+ */
+ public void visitTypeVariable(String name) {
+ }
+
+ /**
+ * Visits a signature corresponding to an array type.
+ *
+ * @return a non null visitor to visit the signature of the array element
+ * type.
+ */
+ public SignatureVisitor visitArrayType() {
+ return this;
+ }
+
+ /**
+ * Starts the visit of a signature corresponding to a class or interface
+ * type.
+ *
+ * @param name the internal name of the class or interface.
+ */
+ public void visitClassType(String name) {
+ }
+
+ /**
+ * Visits an inner class.
+ *
+ * @param name the local name of the inner class in its enclosing class.
+ */
+ public void visitInnerClassType(String name) {
+ }
+
+ /**
+ * Visits an unbounded type argument of the last visited class or inner
+ * class type.
+ */
+ public void visitTypeArgument() {
+ }
+
+ /**
+ * Visits a type argument of the last visited class or inner class type.
+ *
+ * @param wildcard '+', '-' or '='.
+ * @return a non null visitor to visit the signature of the type argument.
+ */
+ public SignatureVisitor visitTypeArgument(char wildcard) {
+ return this;
+ }
+
+ /**
+ * Ends the visit of a signature corresponding to a class or interface type.
+ */
+ public void visitEnd() {
+ }
+}
diff --git a/src/asm/scala/tools/asm/signature/SignatureWriter.java b/src/asm/scala/tools/asm/signature/SignatureWriter.java
new file mode 100644
index 0000000000..a59fdfde2b
--- /dev/null
+++ b/src/asm/scala/tools/asm/signature/SignatureWriter.java
@@ -0,0 +1,227 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.signature;
+
+import scala.tools.asm.Opcodes;
+
+/**
+ * A signature visitor that generates signatures in string format.
+ *
+ * @author Thomas Hallgren
+ * @author Eric Bruneton
+ */
+public class SignatureWriter extends SignatureVisitor {
+
+ /**
+ * Buffer used to construct the signature.
+ */
+ private final StringBuffer buf = new StringBuffer();
+
+ /**
+ * Indicates if the signature contains formal type parameters.
+ */
+ private boolean hasFormals;
+
+ /**
+ * Indicates if the signature contains method parameter types.
+ */
+ private boolean hasParameters;
+
+ /**
+ * Stack used to keep track of class types that have arguments. Each element
+ * of this stack is a boolean encoded in one bit. The top of the stack is
+ * the lowest order bit. Pushing false = *2, pushing true = *2+1, popping =
+ * /2.
+ */
+ private int argumentStack;
+
+ /**
+ * Constructs a new {@link SignatureWriter} object.
+ */
+ public SignatureWriter() {
+ super(Opcodes.ASM4);
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the SignatureVisitor interface
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visitFormalTypeParameter(final String name) {
+ if (!hasFormals) {
+ hasFormals = true;
+ buf.append('<');
+ }
+ buf.append(name);
+ buf.append(':');
+ }
+
+ @Override
+ public SignatureVisitor visitClassBound() {
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitInterfaceBound() {
+ buf.append(':');
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitSuperclass() {
+ endFormals();
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitInterface() {
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitParameterType() {
+ endFormals();
+ if (!hasParameters) {
+ hasParameters = true;
+ buf.append('(');
+ }
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitReturnType() {
+ endFormals();
+ if (!hasParameters) {
+ buf.append('(');
+ }
+ buf.append(')');
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitExceptionType() {
+ buf.append('^');
+ return this;
+ }
+
+ @Override
+ public void visitBaseType(final char descriptor) {
+ buf.append(descriptor);
+ }
+
+ @Override
+ public void visitTypeVariable(final String name) {
+ buf.append('T');
+ buf.append(name);
+ buf.append(';');
+ }
+
+ @Override
+ public SignatureVisitor visitArrayType() {
+ buf.append('[');
+ return this;
+ }
+
+ @Override
+ public void visitClassType(final String name) {
+ buf.append('L');
+ buf.append(name);
+ argumentStack *= 2;
+ }
+
+ @Override
+ public void visitInnerClassType(final String name) {
+ endArguments();
+ buf.append('.');
+ buf.append(name);
+ argumentStack *= 2;
+ }
+
+ @Override
+ public void visitTypeArgument() {
+ if (argumentStack % 2 == 0) {
+ ++argumentStack;
+ buf.append('<');
+ }
+ buf.append('*');
+ }
+
+ @Override
+ public SignatureVisitor visitTypeArgument(final char wildcard) {
+ if (argumentStack % 2 == 0) {
+ ++argumentStack;
+ buf.append('<');
+ }
+ if (wildcard != '=') {
+ buf.append(wildcard);
+ }
+ return this;
+ }
+
+ @Override
+ public void visitEnd() {
+ endArguments();
+ buf.append(';');
+ }
+
+ /**
+ * Returns the signature that was built by this signature writer.
+ *
+ * @return the signature that was built by this signature writer.
+ */
+ @Override
+ public String toString() {
+ return buf.toString();
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Ends the formal type parameters section of the signature.
+ */
+ private void endFormals() {
+ if (hasFormals) {
+ hasFormals = false;
+ buf.append('>');
+ }
+ }
+
+ /**
+ * Ends the type arguments of a class or inner class type.
+ */
+ private void endArguments() {
+ if (argumentStack % 2 != 0) {
+ buf.append('>');
+ }
+ argumentStack /= 2;
+ }
+} \ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/AbstractInsnNode.java b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
new file mode 100644
index 0000000000..471f842ffc
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
@@ -0,0 +1,238 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.List;
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a bytecode instruction. <i>An instruction can appear
+ * at most once in at most one {@link InsnList} at a time</i>.
+ *
+ * @author Eric Bruneton
+ */
+public abstract class AbstractInsnNode {
+
+ /**
+ * The type of {@link InsnNode} instructions.
+ */
+ public static final int INSN = 0;
+
+ /**
+ * The type of {@link IntInsnNode} instructions.
+ */
+ public static final int INT_INSN = 1;
+
+ /**
+ * The type of {@link VarInsnNode} instructions.
+ */
+ public static final int VAR_INSN = 2;
+
+ /**
+ * The type of {@link TypeInsnNode} instructions.
+ */
+ public static final int TYPE_INSN = 3;
+
+ /**
+ * The type of {@link FieldInsnNode} instructions.
+ */
+ public static final int FIELD_INSN = 4;
+
+ /**
+ * The type of {@link MethodInsnNode} instructions.
+ */
+ public static final int METHOD_INSN = 5;
+
+ /**
+ * The type of {@link InvokeDynamicInsnNode} instructions.
+ */
+ public static final int INVOKE_DYNAMIC_INSN = 6;
+
+ /**
+ * The type of {@link JumpInsnNode} instructions.
+ */
+ public static final int JUMP_INSN = 7;
+
+ /**
+ * The type of {@link LabelNode} "instructions".
+ */
+ public static final int LABEL = 8;
+
+ /**
+ * The type of {@link LdcInsnNode} instructions.
+ */
+ public static final int LDC_INSN = 9;
+
+ /**
+ * The type of {@link IincInsnNode} instructions.
+ */
+ public static final int IINC_INSN = 10;
+
+ /**
+ * The type of {@link TableSwitchInsnNode} instructions.
+ */
+ public static final int TABLESWITCH_INSN = 11;
+
+ /**
+ * The type of {@link LookupSwitchInsnNode} instructions.
+ */
+ public static final int LOOKUPSWITCH_INSN = 12;
+
+ /**
+ * The type of {@link MultiANewArrayInsnNode} instructions.
+ */
+ public static final int MULTIANEWARRAY_INSN = 13;
+
+ /**
+ * The type of {@link FrameNode} "instructions".
+ */
+ public static final int FRAME = 14;
+
+ /**
+ * The type of {@link LineNumberNode} "instructions".
+ */
+ public static final int LINE = 15;
+
+ /**
+ * The opcode of this instruction.
+ */
+ protected int opcode;
+
+ /**
+ * Previous instruction in the list to which this instruction belongs.
+ */
+ AbstractInsnNode prev;
+
+ /**
+ * Next instruction in the list to which this instruction belongs.
+ */
+ AbstractInsnNode next;
+
+ /**
+ * Index of this instruction in the list to which it belongs. The value of
+ * this field is correct only when {@link InsnList#cache} is not null. A
+ * value of -1 indicates that this instruction does not belong to any
+ * {@link InsnList}.
+ */
+ int index;
+
+ /**
+ * Constructs a new {@link AbstractInsnNode}.
+ *
+ * @param opcode the opcode of the instruction to be constructed.
+ */
+ protected AbstractInsnNode(final int opcode) {
+ this.opcode = opcode;
+ this.index = -1;
+ }
+
+ /**
+ * Returns the opcode of this instruction.
+ *
+ * @return the opcode of this instruction.
+ */
+ public int getOpcode() {
+ return opcode;
+ }
+
+ /**
+ * Returns the type of this instruction.
+ *
+ * @return the type of this instruction, i.e. one the constants defined in
+ * this class.
+ */
+ public abstract int getType();
+
+ /**
+ * Returns the previous instruction in the list to which this instruction
+ * belongs, if any.
+ *
+ * @return the previous instruction in the list to which this instruction
+ * belongs, if any. May be <tt>null</tt>.
+ */
+ public AbstractInsnNode getPrevious() {
+ return prev;
+ }
+
+ /**
+ * Returns the next instruction in the list to which this instruction
+ * belongs, if any.
+ *
+ * @return the next instruction in the list to which this instruction
+ * belongs, if any. May be <tt>null</tt>.
+ */
+ public AbstractInsnNode getNext() {
+ return next;
+ }
+
+ /**
+ * Makes the given code visitor visit this instruction.
+ *
+ * @param cv a code visitor.
+ */
+ public abstract void accept(final MethodVisitor cv);
+
+ /**
+ * Returns a copy of this instruction.
+ *
+ * @param labels a map from LabelNodes to cloned LabelNodes.
+ * @return a copy of this instruction. The returned instruction does not
+ * belong to any {@link InsnList}.
+ */
+ public abstract AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels);
+
+ /**
+ * Returns the clone of the given label.
+ *
+ * @param label a label.
+ * @param map a map from LabelNodes to cloned LabelNodes.
+ * @return the clone of the given label.
+ */
+ static LabelNode clone(final LabelNode label, final Map<LabelNode, LabelNode> map) {
+ return map.get(label);
+ }
+
+ /**
+ * Returns the clones of the given labels.
+ *
+ * @param labels a list of labels.
+ * @param map a map from LabelNodes to cloned LabelNodes.
+ * @return the clones of the given labels.
+ */
+ static LabelNode[] clone(final List<LabelNode> labels, final Map<LabelNode, LabelNode> map) {
+ LabelNode[] clones = new LabelNode[labels.size()];
+ for (int i = 0; i < clones.length; ++i) {
+ clones[i] = map.get(labels.get(i));
+ }
+ return clones;
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/AnnotationNode.java b/src/asm/scala/tools/asm/tree/AnnotationNode.java
new file mode 100644
index 0000000000..9f132550e6
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/AnnotationNode.java
@@ -0,0 +1,224 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents an annotationn.
+ *
+ * @author Eric Bruneton
+ */
+public class AnnotationNode extends AnnotationVisitor {
+
+ /**
+ * The class descriptor of the annotation class.
+ */
+ public String desc;
+
+ /**
+ * The name value pairs of this annotation. Each name value pair is stored
+ * as two consecutive elements in the list. The name is a {@link String},
+ * and the value may be a {@link Byte}, {@link Boolean}, {@link Character},
+ * {@link Short}, {@link Integer}, {@link Long}, {@link Float},
+ * {@link Double}, {@link String} or {@link org.objectweb.asm.Type}, or an
+ * two elements String array (for enumeration values), a
+ * {@link AnnotationNode}, or a {@link List} of values of one of the
+ * preceding types. The list may be <tt>null</tt> if there is no name
+ * value pair.
+ */
+ public List<Object> values;
+
+ /**
+ * Constructs a new {@link AnnotationNode}. <i>Subclasses must not use this
+ * constructor</i>. Instead, they must use the
+ * {@link #AnnotationNode(int, String)} version.
+ *
+ * @param desc the class descriptor of the annotation class.
+ */
+ public AnnotationNode(final String desc) {
+ this(Opcodes.ASM4, desc);
+ }
+
+ /**
+ * Constructs a new {@link AnnotationNode}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param desc the class descriptor of the annotation class.
+ */
+ public AnnotationNode(final int api, final String desc) {
+ super(api);
+ this.desc = desc;
+ }
+
+ /**
+ * Constructs a new {@link AnnotationNode} to visit an array value.
+ *
+ * @param values where the visited values must be stored.
+ */
+ AnnotationNode(final List<Object> values) {
+ super(Opcodes.ASM4);
+ this.values = values;
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the AnnotationVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(final String name, final Object value) {
+ if (values == null) {
+ values = new ArrayList<Object>(this.desc != null ? 2 : 1);
+ }
+ if (this.desc != null) {
+ values.add(name);
+ }
+ values.add(value);
+ }
+
+ @Override
+ public void visitEnum(
+ final String name,
+ final String desc,
+ final String value)
+ {
+ if (values == null) {
+ values = new ArrayList<Object>(this.desc != null ? 2 : 1);
+ }
+ if (this.desc != null) {
+ values.add(name);
+ }
+ values.add(new String[] { desc, value });
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String name,
+ final String desc)
+ {
+ if (values == null) {
+ values = new ArrayList<Object>(this.desc != null ? 2 : 1);
+ }
+ if (this.desc != null) {
+ values.add(name);
+ }
+ AnnotationNode annotation = new AnnotationNode(desc);
+ values.add(annotation);
+ return annotation;
+ }
+
+ @Override
+ public AnnotationVisitor visitArray(final String name) {
+ if (values == null) {
+ values = new ArrayList<Object>(this.desc != null ? 2 : 1);
+ }
+ if (this.desc != null) {
+ values.add(name);
+ }
+ List<Object> array = new ArrayList<Object>();
+ values.add(array);
+ return new AnnotationNode(array);
+ }
+
+ @Override
+ public void visitEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Accept methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Checks that this annotation node is compatible with the given ASM API
+ * version. This methods checks that this node, and all its nodes
+ * recursively, do not contain elements that were introduced in more recent
+ * versions of the ASM API than the given version.
+ *
+ * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ */
+ public void check(final int api) {
+ // nothing to do
+ }
+
+ /**
+ * Makes the given visitor visit this annotation.
+ *
+ * @param av an annotation visitor. Maybe <tt>null</tt>.
+ */
+ public void accept(final AnnotationVisitor av) {
+ if (av != null) {
+ if (values != null) {
+ for (int i = 0; i < values.size(); i += 2) {
+ String name = (String) values.get(i);
+ Object value = values.get(i + 1);
+ accept(av, name, value);
+ }
+ }
+ av.visitEnd();
+ }
+ }
+
+ /**
+ * Makes the given visitor visit a given annotation value.
+ *
+ * @param av an annotation visitor. Maybe <tt>null</tt>.
+ * @param name the value name.
+ * @param value the actual value.
+ */
+ static void accept(
+ final AnnotationVisitor av,
+ final String name,
+ final Object value)
+ {
+ if (av != null) {
+ if (value instanceof String[]) {
+ String[] typeconst = (String[]) value;
+ av.visitEnum(name, typeconst[0], typeconst[1]);
+ } else if (value instanceof AnnotationNode) {
+ AnnotationNode an = (AnnotationNode) value;
+ an.accept(av.visitAnnotation(name, an.desc));
+ } else if (value instanceof List) {
+ AnnotationVisitor v = av.visitArray(name);
+ List<?> array = (List<?>) value;
+ for (int j = 0; j < array.size(); ++j) {
+ accept(v, null, array.get(j));
+ }
+ v.visitEnd();
+ } else {
+ av.visit(name, value);
+ }
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/ClassNode.java b/src/asm/scala/tools/asm/tree/ClassNode.java
new file mode 100644
index 0000000000..64effae698
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/ClassNode.java
@@ -0,0 +1,371 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.ClassVisitor;
+import scala.tools.asm.FieldVisitor;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents a class.
+ *
+ * @author Eric Bruneton
+ */
+public class ClassNode extends ClassVisitor {
+
+ /**
+ * The class version.
+ */
+ public int version;
+
+ /**
+ * The class's access flags (see {@link org.objectweb.asm.Opcodes}). This
+ * field also indicates if the class is deprecated.
+ */
+ public int access;
+
+ /**
+ * The internal name of the class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ */
+ public String name;
+
+ /**
+ * The signature of the class. Mayt be <tt>null</tt>.
+ */
+ public String signature;
+
+ /**
+ * The internal of name of the super class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). For
+ * interfaces, the super class is {@link Object}. May be <tt>null</tt>,
+ * but only for the {@link Object} class.
+ */
+ public String superName;
+
+ /**
+ * The internal names of the class's interfaces (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). This
+ * list is a list of {@link String} objects.
+ */
+ public List<String> interfaces;
+
+ /**
+ * The name of the source file from which this class was compiled. May be
+ * <tt>null</tt>.
+ */
+ public String sourceFile;
+
+ /**
+ * Debug information to compute the correspondance between source and
+ * compiled elements of the class. May be <tt>null</tt>.
+ */
+ public String sourceDebug;
+
+ /**
+ * The internal name of the enclosing class of the class. May be
+ * <tt>null</tt>.
+ */
+ public String outerClass;
+
+ /**
+ * The name of the method that contains the class, or <tt>null</tt> if the
+ * class is not enclosed in a method.
+ */
+ public String outerMethod;
+
+ /**
+ * The descriptor of the method that contains the class, or <tt>null</tt>
+ * if the class is not enclosed in a method.
+ */
+ public String outerMethodDesc;
+
+ /**
+ * The runtime visible annotations of this class. This list is a list of
+ * {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label visible
+ */
+ public List<AnnotationNode> visibleAnnotations;
+
+ /**
+ * The runtime invisible annotations of this class. This list is a list of
+ * {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label invisible
+ */
+ public List<AnnotationNode> invisibleAnnotations;
+
+ /**
+ * The non standard attributes of this class. This list is a list of
+ * {@link Attribute} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.Attribute
+ */
+ public List<Attribute> attrs;
+
+ /**
+ * Informations about the inner classes of this class. This list is a list
+ * of {@link InnerClassNode} objects.
+ *
+ * @associates org.objectweb.asm.tree.InnerClassNode
+ */
+ public List<InnerClassNode> innerClasses;
+
+ /**
+ * The fields of this class. This list is a list of {@link FieldNode}
+ * objects.
+ *
+ * @associates org.objectweb.asm.tree.FieldNode
+ */
+ public List<FieldNode> fields;
+
+ /**
+ * The methods of this class. This list is a list of {@link MethodNode}
+ * objects.
+ *
+ * @associates org.objectweb.asm.tree.MethodNode
+ */
+ public List<MethodNode> methods;
+
+ /**
+ * Constructs a new {@link ClassNode}. <i>Subclasses must not use this
+ * constructor</i>. Instead, they must use the {@link #ClassNode(int)}
+ * version.
+ */
+ public ClassNode() {
+ this(Opcodes.ASM4);
+ }
+
+ /**
+ * Constructs a new {@link ClassNode}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ public ClassNode(final int api) {
+ super(api);
+ this.interfaces = new ArrayList<String>();
+ this.innerClasses = new ArrayList<InnerClassNode>();
+ this.fields = new ArrayList<FieldNode>();
+ this.methods = new ArrayList<MethodNode>();
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the ClassVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(
+ final int version,
+ final int access,
+ final String name,
+ final String signature,
+ final String superName,
+ final String[] interfaces)
+ {
+ this.version = version;
+ this.access = access;
+ this.name = name;
+ this.signature = signature;
+ this.superName = superName;
+ if (interfaces != null) {
+ this.interfaces.addAll(Arrays.asList(interfaces));
+ }
+ }
+
+ @Override
+ public void visitSource(final String file, final String debug) {
+ sourceFile = file;
+ sourceDebug = debug;
+ }
+
+ @Override
+ public void visitOuterClass(
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ outerClass = owner;
+ outerMethod = name;
+ outerMethodDesc = desc;
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ AnnotationNode an = new AnnotationNode(desc);
+ if (visible) {
+ if (visibleAnnotations == null) {
+ visibleAnnotations = new ArrayList<AnnotationNode>(1);
+ }
+ visibleAnnotations.add(an);
+ } else {
+ if (invisibleAnnotations == null) {
+ invisibleAnnotations = new ArrayList<AnnotationNode>(1);
+ }
+ invisibleAnnotations.add(an);
+ }
+ return an;
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ if (attrs == null) {
+ attrs = new ArrayList<Attribute>(1);
+ }
+ attrs.add(attr);
+ }
+
+ @Override
+ public void visitInnerClass(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access)
+ {
+ InnerClassNode icn = new InnerClassNode(name,
+ outerName,
+ innerName,
+ access);
+ innerClasses.add(icn);
+ }
+
+ @Override
+ public FieldVisitor visitField(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ FieldNode fn = new FieldNode(access, name, desc, signature, value);
+ fields.add(fn);
+ return fn;
+ }
+
+ @Override
+ public MethodVisitor visitMethod(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ MethodNode mn = new MethodNode(access,
+ name,
+ desc,
+ signature,
+ exceptions);
+ methods.add(mn);
+ return mn;
+ }
+
+ @Override
+ public void visitEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Accept method
+ // ------------------------------------------------------------------------
+
+ /**
+ * Checks that this class node is compatible with the given ASM API version.
+ * This methods checks that this node, and all its nodes recursively, do not
+ * contain elements that were introduced in more recent versions of the ASM
+ * API than the given version.
+ *
+ * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ */
+ public void check(final int api) {
+ // nothing to do
+ }
+
+ /**
+ * Makes the given class visitor visit this class.
+ *
+ * @param cv a class visitor.
+ */
+ public void accept(final ClassVisitor cv) {
+ // visits header
+ String[] interfaces = new String[this.interfaces.size()];
+ this.interfaces.toArray(interfaces);
+ cv.visit(version, access, name, signature, superName, interfaces);
+ // visits source
+ if (sourceFile != null || sourceDebug != null) {
+ cv.visitSource(sourceFile, sourceDebug);
+ }
+ // visits outer class
+ if (outerClass != null) {
+ cv.visitOuterClass(outerClass, outerMethod, outerMethodDesc);
+ }
+ // visits attributes
+ int i, n;
+ n = visibleAnnotations == null ? 0 : visibleAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ AnnotationNode an = visibleAnnotations.get(i);
+ an.accept(cv.visitAnnotation(an.desc, true));
+ }
+ n = invisibleAnnotations == null ? 0 : invisibleAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ AnnotationNode an = invisibleAnnotations.get(i);
+ an.accept(cv.visitAnnotation(an.desc, false));
+ }
+ n = attrs == null ? 0 : attrs.size();
+ for (i = 0; i < n; ++i) {
+ cv.visitAttribute(attrs.get(i));
+ }
+ // visits inner classes
+ for (i = 0; i < innerClasses.size(); ++i) {
+ innerClasses.get(i).accept(cv);
+ }
+ // visits fields
+ for (i = 0; i < fields.size(); ++i) {
+ fields.get(i).accept(cv);
+ }
+ // visits methods
+ for (i = 0; i < methods.size(); ++i) {
+ methods.get(i).accept(cv);
+ }
+ // visits end
+ cv.visitEnd();
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/FieldInsnNode.java b/src/asm/scala/tools/asm/tree/FieldInsnNode.java
new file mode 100644
index 0000000000..6b7a6a142a
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/FieldInsnNode.java
@@ -0,0 +1,106 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a field instruction. A field instruction is an
+ * instruction that loads or stores the value of a field of an object.
+ *
+ * @author Eric Bruneton
+ */
+public class FieldInsnNode extends AbstractInsnNode {
+
+ /**
+ * The internal name of the field's owner class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ */
+ public String owner;
+
+ /**
+ * The field's name.
+ */
+ public String name;
+
+ /**
+ * The field's descriptor (see {@link org.objectweb.asm.Type}).
+ */
+ public String desc;
+
+ /**
+ * Constructs a new {@link FieldInsnNode}.
+ *
+ * @param opcode the opcode of the type instruction to be constructed. This
+ * opcode must be GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
+ * @param owner the internal name of the field's owner class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * @param name the field's name.
+ * @param desc the field's descriptor (see {@link org.objectweb.asm.Type}).
+ */
+ public FieldInsnNode(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ super(opcode);
+ this.owner = owner;
+ this.name = name;
+ this.desc = desc;
+ }
+
+ /**
+ * Sets the opcode of this instruction.
+ *
+ * @param opcode the new instruction opcode. This opcode must be GETSTATIC,
+ * PUTSTATIC, GETFIELD or PUTFIELD.
+ */
+ public void setOpcode(final int opcode) {
+ this.opcode = opcode;
+ }
+
+ @Override
+ public int getType() {
+ return FIELD_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor cv) {
+ cv.visitFieldInsn(opcode, owner, name, desc);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new FieldInsnNode(opcode, owner, name, desc);
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/FieldNode.java b/src/asm/scala/tools/asm/tree/FieldNode.java
new file mode 100644
index 0000000000..9a1e17033c
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/FieldNode.java
@@ -0,0 +1,243 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.ClassVisitor;
+import scala.tools.asm.FieldVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents a field.
+ *
+ * @author Eric Bruneton
+ */
+public class FieldNode extends FieldVisitor {
+
+ /**
+ * The field's access flags (see {@link org.objectweb.asm.Opcodes}). This
+ * field also indicates if the field is synthetic and/or deprecated.
+ */
+ public int access;
+
+ /**
+ * The field's name.
+ */
+ public String name;
+
+ /**
+ * The field's descriptor (see {@link org.objectweb.asm.Type}).
+ */
+ public String desc;
+
+ /**
+ * The field's signature. May be <tt>null</tt>.
+ */
+ public String signature;
+
+ /**
+ * The field's initial value. This field, which may be <tt>null</tt> if
+ * the field does not have an initial value, must be an {@link Integer}, a
+ * {@link Float}, a {@link Long}, a {@link Double} or a {@link String}.
+ */
+ public Object value;
+
+ /**
+ * The runtime visible annotations of this field. This list is a list of
+ * {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label visible
+ */
+ public List<AnnotationNode> visibleAnnotations;
+
+ /**
+ * The runtime invisible annotations of this field. This list is a list of
+ * {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label invisible
+ */
+ public List<AnnotationNode> invisibleAnnotations;
+
+ /**
+ * The non standard attributes of this field. This list is a list of
+ * {@link Attribute} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.Attribute
+ */
+ public List<Attribute> attrs;
+
+ /**
+ * Constructs a new {@link FieldNode}. <i>Subclasses must not use this
+ * constructor</i>. Instead, they must use the
+ * {@link #FieldNode(int, int, String, String, String, Object)} version.
+ *
+ * @param access the field's access flags (see
+ * {@link org.objectweb.asm.Opcodes}). This parameter also indicates
+ * if the field is synthetic and/or deprecated.
+ * @param name the field's name.
+ * @param desc the field's descriptor (see {@link org.objectweb.asm.Type
+ * Type}).
+ * @param signature the field's signature.
+ * @param value the field's initial value. This parameter, which may be
+ * <tt>null</tt> if the field does not have an initial value, must be
+ * an {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String}.
+ */
+ public FieldNode(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ this(Opcodes.ASM4, access, name, desc, signature, value);
+ }
+
+ /**
+ * Constructs a new {@link FieldNode}. <i>Subclasses must not use this
+ * constructor</i>. Instead, they must use the
+ * {@link #FieldNode(int, int, String, String, String, Object)} version.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param access the field's access flags (see
+ * {@link org.objectweb.asm.Opcodes}). This parameter also indicates
+ * if the field is synthetic and/or deprecated.
+ * @param name the field's name.
+ * @param desc the field's descriptor (see {@link org.objectweb.asm.Type
+ * Type}).
+ * @param signature the field's signature.
+ * @param value the field's initial value. This parameter, which may be
+ * <tt>null</tt> if the field does not have an initial value, must be
+ * an {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String}.
+ */
+ public FieldNode(
+ final int api,
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ super(api);
+ this.access = access;
+ this.name = name;
+ this.desc = desc;
+ this.signature = signature;
+ this.value = value;
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the FieldVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ AnnotationNode an = new AnnotationNode(desc);
+ if (visible) {
+ if (visibleAnnotations == null) {
+ visibleAnnotations = new ArrayList<AnnotationNode>(1);
+ }
+ visibleAnnotations.add(an);
+ } else {
+ if (invisibleAnnotations == null) {
+ invisibleAnnotations = new ArrayList<AnnotationNode>(1);
+ }
+ invisibleAnnotations.add(an);
+ }
+ return an;
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ if (attrs == null) {
+ attrs = new ArrayList<Attribute>(1);
+ }
+ attrs.add(attr);
+ }
+
+ @Override
+ public void visitEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Accept methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Checks that this field node is compatible with the given ASM API version.
+ * This methods checks that this node, and all its nodes recursively, do not
+ * contain elements that were introduced in more recent versions of the ASM
+ * API than the given version.
+ *
+ * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ */
+ public void check(final int api) {
+ // nothing to do
+ }
+
+ /**
+ * Makes the given class visitor visit this field.
+ *
+ * @param cv a class visitor.
+ */
+ public void accept(final ClassVisitor cv) {
+ FieldVisitor fv = cv.visitField(access, name, desc, signature, value);
+ if (fv == null) {
+ return;
+ }
+ int i, n;
+ n = visibleAnnotations == null ? 0 : visibleAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ AnnotationNode an = visibleAnnotations.get(i);
+ an.accept(fv.visitAnnotation(an.desc, true));
+ }
+ n = invisibleAnnotations == null ? 0 : invisibleAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ AnnotationNode an = invisibleAnnotations.get(i);
+ an.accept(fv.visitAnnotation(an.desc, false));
+ }
+ n = attrs == null ? 0 : attrs.size();
+ for (i = 0; i < n; ++i) {
+ fv.visitAttribute(attrs.get(i));
+ }
+ fv.visitEnd();
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/FrameNode.java b/src/asm/scala/tools/asm/tree/FrameNode.java
new file mode 100644
index 0000000000..66825de0ac
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/FrameNode.java
@@ -0,0 +1,211 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents a stack map frame. These nodes are pseudo instruction
+ * nodes in order to be inserted in an instruction list. In fact these nodes
+ * must(*) be inserted <i>just before</i> any instruction node <b>i</b> that
+ * follows an unconditionnal branch instruction such as GOTO or THROW, that is
+ * the target of a jump instruction, or that starts an exception handler block.
+ * The stack map frame types must describe the values of the local variables and
+ * of the operand stack elements <i>just before</i> <b>i</b> is executed. <br>
+ * <br> (*) this is mandatory only for classes whose version is greater than or
+ * equal to {@link Opcodes#V1_6 V1_6}.
+ *
+ * @author Eric Bruneton
+ */
+public class FrameNode extends AbstractInsnNode {
+
+ /**
+ * The type of this frame. Must be {@link Opcodes#F_NEW} for expanded
+ * frames, or {@link Opcodes#F_FULL}, {@link Opcodes#F_APPEND},
+ * {@link Opcodes#F_CHOP}, {@link Opcodes#F_SAME} or
+ * {@link Opcodes#F_APPEND}, {@link Opcodes#F_SAME1} for compressed frames.
+ */
+ public int type;
+
+ /**
+ * The types of the local variables of this stack map frame. Elements of
+ * this list can be Integer, String or LabelNode objects (for primitive,
+ * reference and uninitialized types respectively - see
+ * {@link MethodVisitor}).
+ */
+ public List<Object> local;
+
+ /**
+ * The types of the operand stack elements of this stack map frame. Elements
+ * of this list can be Integer, String or LabelNode objects (for primitive,
+ * reference and uninitialized types respectively - see
+ * {@link MethodVisitor}).
+ */
+ public List<Object> stack;
+
+ private FrameNode() {
+ super(-1);
+ }
+
+ /**
+ * Constructs a new {@link FrameNode}.
+ *
+ * @param type the type of this frame. Must be {@link Opcodes#F_NEW} for
+ * expanded frames, or {@link Opcodes#F_FULL},
+ * {@link Opcodes#F_APPEND}, {@link Opcodes#F_CHOP},
+ * {@link Opcodes#F_SAME} or {@link Opcodes#F_APPEND},
+ * {@link Opcodes#F_SAME1} for compressed frames.
+ * @param nLocal number of local variables of this stack map frame.
+ * @param local the types of the local variables of this stack map frame.
+ * Elements of this list can be Integer, String or LabelNode objects
+ * (for primitive, reference and uninitialized types respectively -
+ * see {@link MethodVisitor}).
+ * @param nStack number of operand stack elements of this stack map frame.
+ * @param stack the types of the operand stack elements of this stack map
+ * frame. Elements of this list can be Integer, String or LabelNode
+ * objects (for primitive, reference and uninitialized types
+ * respectively - see {@link MethodVisitor}).
+ */
+ public FrameNode(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack)
+ {
+ super(-1);
+ this.type = type;
+ switch (type) {
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ this.local = asList(nLocal, local);
+ this.stack = asList(nStack, stack);
+ break;
+ case Opcodes.F_APPEND:
+ this.local = asList(nLocal, local);
+ break;
+ case Opcodes.F_CHOP:
+ this.local = Arrays.asList(new Object[nLocal]);
+ break;
+ case Opcodes.F_SAME:
+ break;
+ case Opcodes.F_SAME1:
+ this.stack = asList(1, stack);
+ break;
+ }
+ }
+
+ @Override
+ public int getType() {
+ return FRAME;
+ }
+
+ /**
+ * Makes the given visitor visit this stack map frame.
+ *
+ * @param mv a method visitor.
+ */
+ @Override
+ public void accept(final MethodVisitor mv) {
+ switch (type) {
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ mv.visitFrame(type,
+ local.size(),
+ asArray(local),
+ stack.size(),
+ asArray(stack));
+ break;
+ case Opcodes.F_APPEND:
+ mv.visitFrame(type, local.size(), asArray(local), 0, null);
+ break;
+ case Opcodes.F_CHOP:
+ mv.visitFrame(type, local.size(), null, 0, null);
+ break;
+ case Opcodes.F_SAME:
+ mv.visitFrame(type, 0, null, 0, null);
+ break;
+ case Opcodes.F_SAME1:
+ mv.visitFrame(type, 0, null, 1, asArray(stack));
+ break;
+ }
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ FrameNode clone = new FrameNode();
+ clone.type = type;
+ if (local != null) {
+ clone.local = new ArrayList<Object>();
+ for (int i = 0; i < local.size(); ++i) {
+ Object l = local.get(i);
+ if (l instanceof LabelNode) {
+ l = labels.get(l);
+ }
+ clone.local.add(l);
+ }
+ }
+ if (stack != null) {
+ clone.stack = new ArrayList<Object>();
+ for (int i = 0; i < stack.size(); ++i) {
+ Object s = stack.get(i);
+ if (s instanceof LabelNode) {
+ s = labels.get(s);
+ }
+ clone.stack.add(s);
+ }
+ }
+ return clone;
+ }
+
+ // ------------------------------------------------------------------------
+
+ private static List<Object> asList(final int n, final Object[] o) {
+ return Arrays.asList(o).subList(0, n);
+ }
+
+ private static Object[] asArray(final List<Object> l) {
+ Object[] objs = new Object[l.size()];
+ for (int i = 0; i < objs.length; ++i) {
+ Object o = l.get(i);
+ if (o instanceof LabelNode) {
+ o = ((LabelNode) o).getLabel();
+ }
+ objs[i] = o;
+ }
+ return objs;
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/IincInsnNode.java b/src/asm/scala/tools/asm/tree/IincInsnNode.java
new file mode 100644
index 0000000000..75ac40884d
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/IincInsnNode.java
@@ -0,0 +1,80 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents an IINC instruction.
+ *
+ * @author Eric Bruneton
+ */
+public class IincInsnNode extends AbstractInsnNode {
+
+ /**
+ * Index of the local variable to be incremented.
+ */
+ public int var;
+
+ /**
+ * Amount to increment the local variable by.
+ */
+ public int incr;
+
+ /**
+ * Constructs a new {@link IincInsnNode}.
+ *
+ * @param var index of the local variable to be incremented.
+ * @param incr increment amount to increment the local variable by.
+ */
+ public IincInsnNode(final int var, final int incr) {
+ super(Opcodes.IINC);
+ this.var = var;
+ this.incr = incr;
+ }
+
+ @Override
+ public int getType() {
+ return IINC_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitIincInsn(var, incr);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new IincInsnNode(var, incr);
+ }
+} \ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/InnerClassNode.java b/src/asm/scala/tools/asm/tree/InnerClassNode.java
new file mode 100644
index 0000000000..4579488921
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/InnerClassNode.java
@@ -0,0 +1,101 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import scala.tools.asm.ClassVisitor;
+
+/**
+ * A node that represents an inner class.
+ *
+ * @author Eric Bruneton
+ */
+public class InnerClassNode {
+
+ /**
+ * The internal name of an inner class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ */
+ public String name;
+
+ /**
+ * The internal name of the class to which the inner class belongs (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). May
+ * be <tt>null</tt>.
+ */
+ public String outerName;
+
+ /**
+ * The (simple) name of the inner class inside its enclosing class. May be
+ * <tt>null</tt> for anonymous inner classes.
+ */
+ public String innerName;
+
+ /**
+ * The access flags of the inner class as originally declared in the
+ * enclosing class.
+ */
+ public int access;
+
+ /**
+ * Constructs a new {@link InnerClassNode}.
+ *
+ * @param name the internal name of an inner class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * @param outerName the internal name of the class to which the inner class
+ * belongs (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * May be <tt>null</tt>.
+ * @param innerName the (simple) name of the inner class inside its
+ * enclosing class. May be <tt>null</tt> for anonymous inner
+ * classes.
+ * @param access the access flags of the inner class as originally declared
+ * in the enclosing class.
+ */
+ public InnerClassNode(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access)
+ {
+ this.name = name;
+ this.outerName = outerName;
+ this.innerName = innerName;
+ this.access = access;
+ }
+
+ /**
+ * Makes the given class visitor visit this inner class.
+ *
+ * @param cv a class visitor.
+ */
+ public void accept(final ClassVisitor cv) {
+ cv.visitInnerClass(name, outerName, innerName, access);
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/InsnList.java b/src/asm/scala/tools/asm/tree/InsnList.java
new file mode 100644
index 0000000000..dedd3bba73
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/InsnList.java
@@ -0,0 +1,578 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ListIterator;
+import java.util.NoSuchElementException;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A doubly linked list of {@link AbstractInsnNode} objects. <i>This
+ * implementation is not thread safe</i>.
+ */
+public class InsnList {
+
+ /**
+ * The number of instructions in this list.
+ */
+ private int size;
+
+ /**
+ * The first instruction in this list. May be <tt>null</tt>.
+ */
+ private AbstractInsnNode first;
+
+ /**
+ * The last instruction in this list. May be <tt>null</tt>.
+ */
+ private AbstractInsnNode last;
+
+ /**
+ * A cache of the instructions of this list. This cache is used to improve
+ * the performance of the {@link #get} method.
+ */
+ AbstractInsnNode[] cache;
+
+ /**
+ * Returns the number of instructions in this list.
+ *
+ * @return the number of instructions in this list.
+ */
+ public int size() {
+ return size;
+ }
+
+ /**
+ * Returns the first instruction in this list.
+ *
+ * @return the first instruction in this list, or <tt>null</tt> if the
+ * list is empty.
+ */
+ public AbstractInsnNode getFirst() {
+ return first;
+ }
+
+ /**
+ * Returns the last instruction in this list.
+ *
+ * @return the last instruction in this list, or <tt>null</tt> if the list
+ * is empty.
+ */
+ public AbstractInsnNode getLast() {
+ return last;
+ }
+
+ /**
+ * Returns the instruction whose index is given. This method builds a cache
+ * of the instructions in this list to avoid scanning the whole list each
+ * time it is called. Once the cache is built, this method run in constant
+ * time. This cache is invalidated by all the methods that modify the list.
+ *
+ * @param index the index of the instruction that must be returned.
+ * @return the instruction whose index is given.
+ * @throws IndexOutOfBoundsException if (index < 0 || index >= size()).
+ */
+ public AbstractInsnNode get(final int index) {
+ if (index < 0 || index >= size) {
+ throw new IndexOutOfBoundsException();
+ }
+ if (cache == null) {
+ cache = toArray();
+ }
+ return cache[index];
+ }
+
+ /**
+ * Returns <tt>true</tt> if the given instruction belongs to this list.
+ * This method always scans the instructions of this list until it finds the
+ * given instruction or reaches the end of the list.
+ *
+ * @param insn an instruction.
+ * @return <tt>true</tt> if the given instruction belongs to this list.
+ */
+ public boolean contains(final AbstractInsnNode insn) {
+ AbstractInsnNode i = first;
+ while (i != null && i != insn) {
+ i = i.next;
+ }
+ return i != null;
+ }
+
+ /**
+ * Returns the index of the given instruction in this list. This method
+ * builds a cache of the instruction indexes to avoid scanning the whole
+ * list each time it is called. Once the cache is built, this method run in
+ * constant time. The cache is invalidated by all the methods that modify
+ * the list.
+ *
+ * @param insn an instruction <i>of this list</i>.
+ * @return the index of the given instruction in this list. <i>The result of
+ * this method is undefined if the given instruction does not belong
+ * to this list</i>. Use {@link #contains contains} to test if an
+ * instruction belongs to an instruction list or not.
+ */
+ public int indexOf(final AbstractInsnNode insn) {
+ if (cache == null) {
+ cache = toArray();
+ }
+ return insn.index;
+ }
+
+ /**
+ * Makes the given visitor visit all of the instructions in this list.
+ *
+ * @param mv the method visitor that must visit the instructions.
+ */
+ public void accept(final MethodVisitor mv) {
+ AbstractInsnNode insn = first;
+ while (insn != null) {
+ insn.accept(mv);
+ insn = insn.next;
+ }
+ }
+
+ /**
+ * Returns an iterator over the instructions in this list.
+ *
+ * @return an iterator over the instructions in this list.
+ */
+ public ListIterator<AbstractInsnNode> iterator() {
+ return iterator(0);
+ }
+
+ /**
+ * Returns an iterator over the instructions in this list.
+ *
+ * @return an iterator over the instructions in this list.
+ */
+ @SuppressWarnings("unchecked")
+ public ListIterator<AbstractInsnNode> iterator(int index) {
+ return new InsnListIterator(index);
+ }
+
+ /**
+ * Returns an array containing all of the instructions in this list.
+ *
+ * @return an array containing all of the instructions in this list.
+ */
+ public AbstractInsnNode[] toArray() {
+ int i = 0;
+ AbstractInsnNode elem = first;
+ AbstractInsnNode[] insns = new AbstractInsnNode[size];
+ while (elem != null) {
+ insns[i] = elem;
+ elem.index = i++;
+ elem = elem.next;
+ }
+ return insns;
+ }
+
+ /**
+ * Replaces an instruction of this list with another instruction.
+ *
+ * @param location an instruction <i>of this list</i>.
+ * @param insn another instruction, <i>which must not belong to any
+ * {@link InsnList}</i>.
+ */
+ public void set(final AbstractInsnNode location, final AbstractInsnNode insn) {
+ AbstractInsnNode next = location.next;
+ insn.next = next;
+ if (next != null) {
+ next.prev = insn;
+ } else {
+ last = insn;
+ }
+ AbstractInsnNode prev = location.prev;
+ insn.prev = prev;
+ if (prev != null) {
+ prev.next = insn;
+ } else {
+ first = insn;
+ }
+ if (cache != null) {
+ int index = location.index;
+ cache[index] = insn;
+ insn.index = index;
+ } else {
+ insn.index = 0; // insn now belongs to an InsnList
+ }
+ location.index = -1; // i no longer belongs to an InsnList
+ location.prev = null;
+ location.next = null;
+ }
+
+ /**
+ * Adds the given instruction to the end of this list.
+ *
+ * @param insn an instruction, <i>which must not belong to any
+ * {@link InsnList}</i>.
+ */
+ public void add(final AbstractInsnNode insn) {
+ ++size;
+ if (last == null) {
+ first = insn;
+ last = insn;
+ } else {
+ last.next = insn;
+ insn.prev = last;
+ }
+ last = insn;
+ cache = null;
+ insn.index = 0; // insn now belongs to an InsnList
+ }
+
+ /**
+ * Adds the given instructions to the end of this list.
+ *
+ * @param insns an instruction list, which is cleared during the process.
+ * This list must be different from 'this'.
+ */
+ public void add(final InsnList insns) {
+ if (insns.size == 0) {
+ return;
+ }
+ size += insns.size;
+ if (last == null) {
+ first = insns.first;
+ last = insns.last;
+ } else {
+ AbstractInsnNode elem = insns.first;
+ last.next = elem;
+ elem.prev = last;
+ last = insns.last;
+ }
+ cache = null;
+ insns.removeAll(false);
+ }
+
+ /**
+ * Inserts the given instruction at the begining of this list.
+ *
+ * @param insn an instruction, <i>which must not belong to any
+ * {@link InsnList}</i>.
+ */
+ public void insert(final AbstractInsnNode insn) {
+ ++size;
+ if (first == null) {
+ first = insn;
+ last = insn;
+ } else {
+ first.prev = insn;
+ insn.next = first;
+ }
+ first = insn;
+ cache = null;
+ insn.index = 0; // insn now belongs to an InsnList
+ }
+
+ /**
+ * Inserts the given instructions at the begining of this list.
+ *
+ * @param insns an instruction list, which is cleared during the process.
+ * This list must be different from 'this'.
+ */
+ public void insert(final InsnList insns) {
+ if (insns.size == 0) {
+ return;
+ }
+ size += insns.size;
+ if (first == null) {
+ first = insns.first;
+ last = insns.last;
+ } else {
+ AbstractInsnNode elem = insns.last;
+ first.prev = elem;
+ elem.next = first;
+ first = insns.first;
+ }
+ cache = null;
+ insns.removeAll(false);
+ }
+
+ /**
+ * Inserts the given instruction after the specified instruction.
+ *
+ * @param location an instruction <i>of this list</i> after which insn must be
+ * inserted.
+ * @param insn the instruction to be inserted, <i>which must not belong to
+ * any {@link InsnList}</i>.
+ */
+ public void insert(final AbstractInsnNode location, final AbstractInsnNode insn) {
+ ++size;
+ AbstractInsnNode next = location.next;
+ if (next == null) {
+ last = insn;
+ } else {
+ next.prev = insn;
+ }
+ location.next = insn;
+ insn.next = next;
+ insn.prev = location;
+ cache = null;
+ insn.index = 0; // insn now belongs to an InsnList
+ }
+
+ /**
+ * Inserts the given instructions after the specified instruction.
+ *
+ * @param location an instruction <i>of this list</i> after which the
+ * instructions must be inserted.
+ * @param insns the instruction list to be inserted, which is cleared during
+ * the process. This list must be different from 'this'.
+ */
+ public void insert(final AbstractInsnNode location, final InsnList insns) {
+ if (insns.size == 0) {
+ return;
+ }
+ size += insns.size;
+ AbstractInsnNode ifirst = insns.first;
+ AbstractInsnNode ilast = insns.last;
+ AbstractInsnNode next = location.next;
+ if (next == null) {
+ last = ilast;
+ } else {
+ next.prev = ilast;
+ }
+ location.next = ifirst;
+ ilast.next = next;
+ ifirst.prev = location;
+ cache = null;
+ insns.removeAll(false);
+ }
+
+ /**
+ * Inserts the given instruction before the specified instruction.
+ *
+ * @param location an instruction <i>of this list</i> before which insn must be
+ * inserted.
+ * @param insn the instruction to be inserted, <i>which must not belong to
+ * any {@link InsnList}</i>.
+ */
+ public void insertBefore(final AbstractInsnNode location, final AbstractInsnNode insn) {
+ ++size;
+ AbstractInsnNode prev = location.prev;
+ if (prev == null) {
+ first = insn;
+ } else {
+ prev.next = insn;
+ }
+ location.prev = insn;
+ insn.next = location;
+ insn.prev = prev;
+ cache = null;
+ insn.index = 0; // insn now belongs to an InsnList
+ }
+
+ /**
+ * Inserts the given instructions before the specified instruction.
+ *
+ * @param location an instruction <i>of this list</i> before which the instructions
+ * must be inserted.
+ * @param insns the instruction list to be inserted, which is cleared during
+ * the process. This list must be different from 'this'.
+ */
+ public void insertBefore(final AbstractInsnNode location, final InsnList insns) {
+ if (insns.size == 0) {
+ return;
+ }
+ size += insns.size;
+ AbstractInsnNode ifirst = insns.first;
+ AbstractInsnNode ilast = insns.last;
+ AbstractInsnNode prev = location .prev;
+ if (prev == null) {
+ first = ifirst;
+ } else {
+ prev.next = ifirst;
+ }
+ location .prev = ilast;
+ ilast.next = location ;
+ ifirst.prev = prev;
+ cache = null;
+ insns.removeAll(false);
+ }
+
+
+
+ /**
+ * Removes the given instruction from this list.
+ *
+ * @param insn the instruction <i>of this list</i> that must be removed.
+ */
+ public void remove(final AbstractInsnNode insn) {
+ --size;
+ AbstractInsnNode next = insn.next;
+ AbstractInsnNode prev = insn.prev;
+ if (next == null) {
+ if (prev == null) {
+ first = null;
+ last = null;
+ } else {
+ prev.next = null;
+ last = prev;
+ }
+ } else {
+ if (prev == null) {
+ first = next;
+ next.prev = null;
+ } else {
+ prev.next = next;
+ next.prev = prev;
+ }
+ }
+ cache = null;
+ insn.index = -1; // insn no longer belongs to an InsnList
+ insn.prev = null;
+ insn.next = null;
+ }
+
+ /**
+ * Removes all of the instructions of this list.
+ *
+ * @param mark if the instructions must be marked as no longer belonging to
+ * any {@link InsnList}.
+ */
+ void removeAll(final boolean mark) {
+ if (mark) {
+ AbstractInsnNode insn = first;
+ while (insn != null) {
+ AbstractInsnNode next = insn.next;
+ insn.index = -1; // insn no longer belongs to an InsnList
+ insn.prev = null;
+ insn.next = null;
+ insn = next;
+ }
+ }
+ size = 0;
+ first = null;
+ last = null;
+ cache = null;
+ }
+
+ /**
+ * Removes all of the instructions of this list.
+ */
+ public void clear() {
+ removeAll(false);
+ }
+
+ /**
+ * Reset all labels in the instruction list. This method should be called
+ * before reusing same instructions list between several
+ * <code>ClassWriter</code>s.
+ */
+ public void resetLabels() {
+ AbstractInsnNode insn = first;
+ while (insn != null) {
+ if (insn instanceof LabelNode) {
+ ((LabelNode) insn).resetLabel();
+ }
+ insn = insn.next;
+ }
+ }
+
+ // this class is not generified because it will create bridges
+ private final class InsnListIterator implements ListIterator/*<AbstractInsnNode>*/ {
+
+ AbstractInsnNode next;
+
+ AbstractInsnNode prev;
+
+ InsnListIterator(int index) {
+ if(index==size()) {
+ next = null;
+ prev = getLast();
+ } else {
+ next = get(index);
+ prev = next.prev;
+ }
+ }
+
+ public boolean hasNext() {
+ return next != null;
+ }
+
+ public Object next() {
+ if (next == null) {
+ throw new NoSuchElementException();
+ }
+ AbstractInsnNode result = next;
+ prev = result;
+ next = result.next;
+ return result;
+ }
+
+ public void remove() {
+ InsnList.this.remove(prev);
+ prev = prev.prev;
+ }
+
+ public boolean hasPrevious() {
+ return prev != null;
+ }
+
+ public Object previous() {
+ AbstractInsnNode result = prev;
+ next = result;
+ prev = result.prev;
+ return result;
+ }
+
+ public int nextIndex() {
+ if (next == null) {
+ return size();
+ }
+ if (cache == null) {
+ cache = toArray();
+ }
+ return next.index;
+ }
+
+ public int previousIndex() {
+ if (prev == null) {
+ return -1;
+ }
+ if (cache == null) {
+ cache = toArray();
+ }
+ return prev.index;
+ }
+
+ public void add(Object o) {
+ InsnList.this.insertBefore(next, (AbstractInsnNode) o);
+ prev = (AbstractInsnNode) o;
+ }
+
+ public void set(Object o) {
+ InsnList.this.set(next.prev, (AbstractInsnNode) o);
+ prev = (AbstractInsnNode) o;
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/InsnNode.java b/src/asm/scala/tools/asm/tree/InsnNode.java
new file mode 100644
index 0000000000..d4664d23c2
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/InsnNode.java
@@ -0,0 +1,84 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a zero operand instruction.
+ *
+ * @author Eric Bruneton
+ */
+public class InsnNode extends AbstractInsnNode {
+
+ /**
+ * Constructs a new {@link InsnNode}.
+ *
+ * @param opcode the opcode of the instruction to be constructed. This
+ * opcode must be NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1,
+ * ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1,
+ * FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD, LALOAD,
+ * FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, IASTORE, LASTORE,
+ * FASTORE, DASTORE, AASTORE, BASTORE, CASTORE, SASTORE, POP, POP2,
+ * DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP, IADD, LADD,
+ * FADD, DADD, ISUB, LSUB, FSUB, DSUB, IMUL, LMUL, FMUL, DMUL, IDIV,
+ * LDIV, FDIV, DDIV, IREM, LREM, FREM, DREM, INEG, LNEG, FNEG, DNEG,
+ * ISHL, LSHL, ISHR, LSHR, IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR,
+ * LXOR, I2L, I2F, I2D, L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F,
+ * I2B, I2C, I2S, LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN,
+ * FRETURN, DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW,
+ * MONITORENTER, or MONITOREXIT.
+ */
+ public InsnNode(final int opcode) {
+ super(opcode);
+ }
+
+ @Override
+ public int getType() {
+ return INSN;
+ }
+
+ /**
+ * Makes the given visitor visit this instruction.
+ *
+ * @param mv a method visitor.
+ */
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitInsn(opcode);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new InsnNode(opcode);
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/IntInsnNode.java b/src/asm/scala/tools/asm/tree/IntInsnNode.java
new file mode 100644
index 0000000000..b61270c786
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/IntInsnNode.java
@@ -0,0 +1,84 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents an instruction with a single int operand.
+ *
+ * @author Eric Bruneton
+ */
+public class IntInsnNode extends AbstractInsnNode {
+
+ /**
+ * The operand of this instruction.
+ */
+ public int operand;
+
+ /**
+ * Constructs a new {@link IntInsnNode}.
+ *
+ * @param opcode the opcode of the instruction to be constructed. This
+ * opcode must be BIPUSH, SIPUSH or NEWARRAY.
+ * @param operand the operand of the instruction to be constructed.
+ */
+ public IntInsnNode(final int opcode, final int operand) {
+ super(opcode);
+ this.operand = operand;
+ }
+
+ /**
+ * Sets the opcode of this instruction.
+ *
+ * @param opcode the new instruction opcode. This opcode must be BIPUSH,
+ * SIPUSH or NEWARRAY.
+ */
+ public void setOpcode(final int opcode) {
+ this.opcode = opcode;
+ }
+
+ @Override
+ public int getType() {
+ return INT_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitIntInsn(opcode, operand);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new IntInsnNode(opcode, operand);
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
new file mode 100644
index 0000000000..d993b5a054
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
@@ -0,0 +1,100 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.Handle;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents an invokedynamic instruction.
+ *
+ * @author Remi Forax
+ */
+public class InvokeDynamicInsnNode extends AbstractInsnNode {
+
+ /**
+ * Invokedynamic name.
+ */
+ public String name;
+
+ /**
+ * Invokedynamic descriptor.
+ */
+ public String desc;
+
+ /**
+ * Bootstrap method
+ */
+ public Handle bsm;
+
+ /**
+ * Bootstrap constant arguments
+ */
+ public Object[] bsmArgs;
+
+ /**
+ * Constructs a new {@link InvokeDynamicInsnNode}.
+ *
+ * @param name invokedynamic name.
+ * @param desc invokedynamic descriptor (see {@link org.objectweb.asm.Type}).
+ * @param bsm the bootstrap method.
+ * @param bsmArgs the boostrap constant arguments.
+ */
+ public InvokeDynamicInsnNode(
+ final String name,
+ final String desc,
+ final Handle bsm,
+ final Object... bsmArgs)
+ {
+ super(Opcodes.INVOKEDYNAMIC);
+ this.name = name;
+ this.desc = desc;
+ this.bsm = bsm;
+ this.bsmArgs = bsmArgs;
+ }
+
+ @Override
+ public int getType() {
+ return INVOKE_DYNAMIC_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs);
+ }
+} \ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/JumpInsnNode.java b/src/asm/scala/tools/asm/tree/JumpInsnNode.java
new file mode 100644
index 0000000000..339ebbd2d0
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/JumpInsnNode.java
@@ -0,0 +1,92 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a jump instruction. A jump instruction is an
+ * instruction that may jump to another instruction.
+ *
+ * @author Eric Bruneton
+ */
+public class JumpInsnNode extends AbstractInsnNode {
+
+ /**
+ * The operand of this instruction. This operand is a label that designates
+ * the instruction to which this instruction may jump.
+ */
+ public LabelNode label;
+
+ /**
+ * Constructs a new {@link JumpInsnNode}.
+ *
+ * @param opcode the opcode of the type instruction to be constructed. This
+ * opcode must be IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
+ * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ,
+ * IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
+ * @param label the operand of the instruction to be constructed. This
+ * operand is a label that designates the instruction to which the
+ * jump instruction may jump.
+ */
+ public JumpInsnNode(final int opcode, final LabelNode label) {
+ super(opcode);
+ this.label = label;
+ }
+
+ /**
+ * Sets the opcode of this instruction.
+ *
+ * @param opcode the new instruction opcode. This opcode must be IFEQ, IFNE,
+ * IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT,
+ * IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ, IF_ACMPNE, GOTO, JSR,
+ * IFNULL or IFNONNULL.
+ */
+ public void setOpcode(final int opcode) {
+ this.opcode = opcode;
+ }
+
+ @Override
+ public int getType() {
+ return JUMP_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitJumpInsn(opcode, label.getLabel());
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new JumpInsnNode(opcode, clone(label, labels));
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/LabelNode.java b/src/asm/scala/tools/asm/tree/LabelNode.java
new file mode 100644
index 0000000000..523a8d6442
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/LabelNode.java
@@ -0,0 +1,78 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * An {@link AbstractInsnNode} that encapsulates a {@link Label}.
+ */
+public class LabelNode extends AbstractInsnNode {
+
+ private Label label;
+
+ public LabelNode() {
+ super(-1);
+ }
+
+ public LabelNode(final Label label) {
+ super(-1);
+ this.label = label;
+ }
+
+ @Override
+ public int getType() {
+ return LABEL;
+ }
+
+ public Label getLabel() {
+ if (label == null) {
+ label = new Label();
+ }
+ return label;
+ }
+
+ @Override
+ public void accept(final MethodVisitor cv) {
+ cv.visitLabel(getLabel());
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return labels.get(this);
+ }
+
+ public void resetLabel() {
+ label = null;
+ }
+} \ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/LdcInsnNode.java b/src/asm/scala/tools/asm/tree/LdcInsnNode.java
new file mode 100644
index 0000000000..f8d115acd5
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/LdcInsnNode.java
@@ -0,0 +1,77 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents an LDC instruction.
+ *
+ * @author Eric Bruneton
+ */
+public class LdcInsnNode extends AbstractInsnNode {
+
+ /**
+ * The constant to be loaded on the stack. This parameter must be a non null
+ * {@link Integer}, a {@link Float}, a {@link Long}, a {@link Double}, a
+ * {@link String} or a {@link org.objectweb.asm.Type}.
+ */
+ public Object cst;
+
+ /**
+ * Constructs a new {@link LdcInsnNode}.
+ *
+ * @param cst the constant to be loaded on the stack. This parameter must be
+ * a non null {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String}.
+ */
+ public LdcInsnNode(final Object cst) {
+ super(Opcodes.LDC);
+ this.cst = cst;
+ }
+
+ @Override
+ public int getType() {
+ return LDC_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitLdcInsn(cst);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new LdcInsnNode(cst);
+ }
+} \ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/LineNumberNode.java b/src/asm/scala/tools/asm/tree/LineNumberNode.java
new file mode 100644
index 0000000000..acc83c8d30
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/LineNumberNode.java
@@ -0,0 +1,82 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a line number declaration. These nodes are pseudo
+ * instruction nodes in order to be inserted in an instruction list.
+ *
+ * @author Eric Bruneton
+ */
+public class LineNumberNode extends AbstractInsnNode {
+
+ /**
+ * A line number. This number refers to the source file from which the class
+ * was compiled.
+ */
+ public int line;
+
+ /**
+ * The first instruction corresponding to this line number.
+ */
+ public LabelNode start;
+
+ /**
+ * Constructs a new {@link LineNumberNode}.
+ *
+ * @param line a line number. This number refers to the source file from
+ * which the class was compiled.
+ * @param start the first instruction corresponding to this line number.
+ */
+ public LineNumberNode(final int line, final LabelNode start) {
+ super(-1);
+ this.line = line;
+ this.start = start;
+ }
+
+ @Override
+ public int getType() {
+ return LINE;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitLineNumber(line, start.getLabel());
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new LineNumberNode(line, clone(start, labels));
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/LocalVariableNode.java b/src/asm/scala/tools/asm/tree/LocalVariableNode.java
new file mode 100644
index 0000000000..51cbd3ca00
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/LocalVariableNode.java
@@ -0,0 +1,115 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a local variable declaration.
+ *
+ * @author Eric Bruneton
+ */
+public class LocalVariableNode {
+
+ /**
+ * The name of a local variable.
+ */
+ public String name;
+
+ /**
+ * The type descriptor of this local variable.
+ */
+ public String desc;
+
+ /**
+ * The signature of this local variable. May be <tt>null</tt>.
+ */
+ public String signature;
+
+ /**
+ * The first instruction corresponding to the scope of this local variable
+ * (inclusive).
+ */
+ public LabelNode start;
+
+ /**
+ * The last instruction corresponding to the scope of this local variable
+ * (exclusive).
+ */
+ public LabelNode end;
+
+ /**
+ * The local variable's index.
+ */
+ public int index;
+
+ /**
+ * Constructs a new {@link LocalVariableNode}.
+ *
+ * @param name the name of a local variable.
+ * @param desc the type descriptor of this local variable.
+ * @param signature the signature of this local variable. May be
+ * <tt>null</tt>.
+ * @param start the first instruction corresponding to the scope of this
+ * local variable (inclusive).
+ * @param end the last instruction corresponding to the scope of this local
+ * variable (exclusive).
+ * @param index the local variable's index.
+ */
+ public LocalVariableNode(
+ final String name,
+ final String desc,
+ final String signature,
+ final LabelNode start,
+ final LabelNode end,
+ final int index)
+ {
+ this.name = name;
+ this.desc = desc;
+ this.signature = signature;
+ this.start = start;
+ this.end = end;
+ this.index = index;
+ }
+
+ /**
+ * Makes the given visitor visit this local variable declaration.
+ *
+ * @param mv a method visitor.
+ */
+ public void accept(final MethodVisitor mv) {
+ mv.visitLocalVariable(name,
+ desc,
+ signature,
+ start.getLabel(),
+ end.getLabel(),
+ index);
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
new file mode 100644
index 0000000000..6d0f971c29
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
@@ -0,0 +1,116 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents a LOOKUPSWITCH instruction.
+ *
+ * @author Eric Bruneton
+ */
+public class LookupSwitchInsnNode extends AbstractInsnNode {
+
+ /**
+ * Beginning of the default handler block.
+ */
+ public LabelNode dflt;
+
+ /**
+ * The values of the keys. This list is a list of {@link Integer} objects.
+ */
+ public List<Integer> keys;
+
+ /**
+ * Beginnings of the handler blocks. This list is a list of
+ * {@link LabelNode} objects.
+ */
+ public List<LabelNode> labels;
+
+ /**
+ * Constructs a new {@link LookupSwitchInsnNode}.
+ *
+ * @param dflt beginning of the default handler block.
+ * @param keys the values of the keys.
+ * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
+ * the beginning of the handler block for the <tt>keys[i]</tt> key.
+ */
+ public LookupSwitchInsnNode(
+ final LabelNode dflt,
+ final int[] keys,
+ final LabelNode[] labels)
+ {
+ super(Opcodes.LOOKUPSWITCH);
+ this.dflt = dflt;
+ this.keys = new ArrayList<Integer>(keys == null ? 0 : keys.length);
+ this.labels = new ArrayList<LabelNode>(labels == null ? 0 : labels.length);
+ if (keys != null) {
+ for (int i = 0; i < keys.length; ++i) {
+ this.keys.add(new Integer(keys[i]));
+ }
+ }
+ if (labels != null) {
+ this.labels.addAll(Arrays.asList(labels));
+ }
+ }
+
+ @Override
+ public int getType() {
+ return LOOKUPSWITCH_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ int[] keys = new int[this.keys.size()];
+ for (int i = 0; i < keys.length; ++i) {
+ keys[i] = this.keys.get(i).intValue();
+ }
+ Label[] labels = new Label[this.labels.size()];
+ for (int i = 0; i < labels.length; ++i) {
+ labels[i] = this.labels.get(i).getLabel();
+ }
+ mv.visitLookupSwitchInsn(dflt.getLabel(), keys, labels);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ LookupSwitchInsnNode clone = new LookupSwitchInsnNode(clone(dflt,
+ labels), null, clone(this.labels, labels));
+ clone.keys.addAll(keys);
+ return clone;
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/MethodInsnNode.java b/src/asm/scala/tools/asm/tree/MethodInsnNode.java
new file mode 100644
index 0000000000..c3036bc6b4
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/MethodInsnNode.java
@@ -0,0 +1,107 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a method instruction. A method instruction is an
+ * instruction that invokes a method.
+ *
+ * @author Eric Bruneton
+ */
+public class MethodInsnNode extends AbstractInsnNode {
+
+ /**
+ * The internal name of the method's owner class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ */
+ public String owner;
+
+ /**
+ * The method's name.
+ */
+ public String name;
+
+ /**
+ * The method's descriptor (see {@link org.objectweb.asm.Type}).
+ */
+ public String desc;
+
+ /**
+ * Constructs a new {@link MethodInsnNode}.
+ *
+ * @param opcode the opcode of the type instruction to be constructed. This
+ * opcode must be INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or
+ * INVOKEINTERFACE.
+ * @param owner the internal name of the method's owner class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link org.objectweb.asm.Type}).
+ */
+ public MethodInsnNode(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ super(opcode);
+ this.owner = owner;
+ this.name = name;
+ this.desc = desc;
+ }
+
+ /**
+ * Sets the opcode of this instruction.
+ *
+ * @param opcode the new instruction opcode. This opcode must be
+ * INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or INVOKEINTERFACE.
+ */
+ public void setOpcode(final int opcode) {
+ this.opcode = opcode;
+ }
+
+ @Override
+ public int getType() {
+ return METHOD_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitMethodInsn(opcode, owner, name, desc);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new MethodInsnNode(opcode, owner, name, desc);
+ }
+} \ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/MethodNode.java b/src/asm/scala/tools/asm/tree/MethodNode.java
new file mode 100644
index 0000000000..70ec39e058
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/MethodNode.java
@@ -0,0 +1,645 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.ClassVisitor;
+import scala.tools.asm.Handle;
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+
+/**
+ * A node that represents a method.
+ *
+ * @author Eric Bruneton
+ */
+public class MethodNode extends MethodVisitor {
+
+ /**
+ * The method's access flags (see {@link Opcodes}). This field also
+ * indicates if the method is synthetic and/or deprecated.
+ */
+ public int access;
+
+ /**
+ * The method's name.
+ */
+ public String name;
+
+ /**
+ * The method's descriptor (see {@link Type}).
+ */
+ public String desc;
+
+ /**
+ * The method's signature. May be <tt>null</tt>.
+ */
+ public String signature;
+
+ /**
+ * The internal names of the method's exception classes (see
+ * {@link Type#getInternalName() getInternalName}). This list is a list of
+ * {@link String} objects.
+ */
+ public List<String> exceptions;
+
+ /**
+ * The runtime visible annotations of this method. This list is a list of
+ * {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label visible
+ */
+ public List<AnnotationNode> visibleAnnotations;
+
+ /**
+ * The runtime invisible annotations of this method. This list is a list of
+ * {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label invisible
+ */
+ public List<AnnotationNode> invisibleAnnotations;
+
+ /**
+ * The non standard attributes of this method. This list is a list of
+ * {@link Attribute} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.Attribute
+ */
+ public List<Attribute> attrs;
+
+ /**
+ * The default value of this annotation interface method. This field must be
+ * a {@link Byte}, {@link Boolean}, {@link Character}, {@link Short},
+ * {@link Integer}, {@link Long}, {@link Float}, {@link Double},
+ * {@link String} or {@link Type}, or an two elements String array (for
+ * enumeration values), a {@link AnnotationNode}, or a {@link List} of
+ * values of one of the preceding types. May be <tt>null</tt>.
+ */
+ public Object annotationDefault;
+
+ /**
+ * The runtime visible parameter annotations of this method. These lists are
+ * lists of {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label invisible parameters
+ */
+ public List<AnnotationNode>[] visibleParameterAnnotations;
+
+ /**
+ * The runtime invisible parameter annotations of this method. These lists
+ * are lists of {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label visible parameters
+ */
+ public List<AnnotationNode>[] invisibleParameterAnnotations;
+
+ /**
+ * The instructions of this method. This list is a list of
+ * {@link AbstractInsnNode} objects.
+ *
+ * @associates org.objectweb.asm.tree.AbstractInsnNode
+ * @label instructions
+ */
+ public InsnList instructions;
+
+ /**
+ * The try catch blocks of this method. This list is a list of
+ * {@link TryCatchBlockNode} objects.
+ *
+ * @associates org.objectweb.asm.tree.TryCatchBlockNode
+ */
+ public List<TryCatchBlockNode> tryCatchBlocks;
+
+ /**
+ * The maximum stack size of this method.
+ */
+ public int maxStack;
+
+ /**
+ * The maximum number of local variables of this method.
+ */
+ public int maxLocals;
+
+ /**
+ * The local variables of this method. This list is a list of
+ * {@link LocalVariableNode} objects. May be <tt>null</tt>
+ *
+ * @associates org.objectweb.asm.tree.LocalVariableNode
+ */
+ public List<LocalVariableNode> localVariables;
+
+ /**
+ * If the accept method has been called on this object.
+ */
+ private boolean visited;
+
+ /**
+ * Constructs an uninitialized {@link MethodNode}. <i>Subclasses must not
+ * use this constructor</i>. Instead, they must use the
+ * {@link #MethodNode(int)} version.
+ */
+ public MethodNode() {
+ this(Opcodes.ASM4);
+ }
+
+ /**
+ * Constructs an uninitialized {@link MethodNode}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ public MethodNode(final int api) {
+ super(api);
+ this.instructions = new InsnList();
+ }
+
+ /**
+ * Constructs a new {@link MethodNode}. <i>Subclasses must not use this
+ * constructor</i>. Instead, they must use the
+ * {@link #MethodNode(int, int, String, String, String, String[])} version.
+ *
+ * @param access the method's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the method is synthetic and/or
+ * deprecated.
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link Type}).
+ * @param signature the method's signature. May be <tt>null</tt>.
+ * @param exceptions the internal names of the method's exception classes
+ * (see {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
+ */
+ public MethodNode(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ this(Opcodes.ASM4, access, name, desc, signature, exceptions);
+ }
+
+ /**
+ * Constructs a new {@link MethodNode}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param access the method's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the method is synthetic and/or
+ * deprecated.
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link Type}).
+ * @param signature the method's signature. May be <tt>null</tt>.
+ * @param exceptions the internal names of the method's exception classes
+ * (see {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
+ */
+ public MethodNode(
+ final int api,
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ super(api);
+ this.access = access;
+ this.name = name;
+ this.desc = desc;
+ this.signature = signature;
+ this.exceptions = new ArrayList<String>(exceptions == null
+ ? 0
+ : exceptions.length);
+ boolean isAbstract = (access & Opcodes.ACC_ABSTRACT) != 0;
+ if (!isAbstract) {
+ this.localVariables = new ArrayList<LocalVariableNode>(5);
+ }
+ this.tryCatchBlocks = new ArrayList<TryCatchBlockNode>();
+ if (exceptions != null) {
+ this.exceptions.addAll(Arrays.asList(exceptions));
+ }
+ this.instructions = new InsnList();
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the MethodVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public AnnotationVisitor visitAnnotationDefault() {
+ return new AnnotationNode(new ArrayList<Object>(0) {
+ @Override
+ public boolean add(final Object o) {
+ annotationDefault = o;
+ return super.add(o);
+ }
+ });
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ AnnotationNode an = new AnnotationNode(desc);
+ if (visible) {
+ if (visibleAnnotations == null) {
+ visibleAnnotations = new ArrayList<AnnotationNode>(1);
+ }
+ visibleAnnotations.add(an);
+ } else {
+ if (invisibleAnnotations == null) {
+ invisibleAnnotations = new ArrayList<AnnotationNode>(1);
+ }
+ invisibleAnnotations.add(an);
+ }
+ return an;
+ }
+
+ @Override
+ public AnnotationVisitor visitParameterAnnotation(
+ final int parameter,
+ final String desc,
+ final boolean visible)
+ {
+ AnnotationNode an = new AnnotationNode(desc);
+ if (visible) {
+ if (visibleParameterAnnotations == null) {
+ int params = Type.getArgumentTypes(this.desc).length;
+ visibleParameterAnnotations = (List<AnnotationNode>[])new List<?>[params];
+ }
+ if (visibleParameterAnnotations[parameter] == null) {
+ visibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(1);
+ }
+ visibleParameterAnnotations[parameter].add(an);
+ } else {
+ if (invisibleParameterAnnotations == null) {
+ int params = Type.getArgumentTypes(this.desc).length;
+ invisibleParameterAnnotations = (List<AnnotationNode>[])new List<?>[params];
+ }
+ if (invisibleParameterAnnotations[parameter] == null) {
+ invisibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(1);
+ }
+ invisibleParameterAnnotations[parameter].add(an);
+ }
+ return an;
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ if (attrs == null) {
+ attrs = new ArrayList<Attribute>(1);
+ }
+ attrs.add(attr);
+ }
+
+ @Override
+ public void visitCode() {
+ }
+
+ @Override
+ public void visitFrame(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack)
+ {
+ instructions.add(new FrameNode(type, nLocal, local == null
+ ? null
+ : getLabelNodes(local), nStack, stack == null
+ ? null
+ : getLabelNodes(stack)));
+ }
+
+ @Override
+ public void visitInsn(final int opcode) {
+ instructions.add(new InsnNode(opcode));
+ }
+
+ @Override
+ public void visitIntInsn(final int opcode, final int operand) {
+ instructions.add(new IntInsnNode(opcode, operand));
+ }
+
+ @Override
+ public void visitVarInsn(final int opcode, final int var) {
+ instructions.add(new VarInsnNode(opcode, var));
+ }
+
+ @Override
+ public void visitTypeInsn(final int opcode, final String type) {
+ instructions.add(new TypeInsnNode(opcode, type));
+ }
+
+ @Override
+ public void visitFieldInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ instructions.add(new FieldInsnNode(opcode, owner, name, desc));
+ }
+
+ @Override
+ public void visitMethodInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ instructions.add(new MethodInsnNode(opcode, owner, name, desc));
+ }
+
+ @Override
+ public void visitInvokeDynamicInsn(
+ String name,
+ String desc,
+ Handle bsm,
+ Object... bsmArgs)
+ {
+ instructions.add(new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs));
+ }
+
+ @Override
+ public void visitJumpInsn(final int opcode, final Label label) {
+ instructions.add(new JumpInsnNode(opcode, getLabelNode(label)));
+ }
+
+ @Override
+ public void visitLabel(final Label label) {
+ instructions.add(getLabelNode(label));
+ }
+
+ @Override
+ public void visitLdcInsn(final Object cst) {
+ instructions.add(new LdcInsnNode(cst));
+ }
+
+ @Override
+ public void visitIincInsn(final int var, final int increment) {
+ instructions.add(new IincInsnNode(var, increment));
+ }
+
+ @Override
+ public void visitTableSwitchInsn(
+ final int min,
+ final int max,
+ final Label dflt,
+ final Label... labels)
+ {
+ instructions.add(new TableSwitchInsnNode(min,
+ max,
+ getLabelNode(dflt),
+ getLabelNodes(labels)));
+ }
+
+ @Override
+ public void visitLookupSwitchInsn(
+ final Label dflt,
+ final int[] keys,
+ final Label[] labels)
+ {
+ instructions.add(new LookupSwitchInsnNode(getLabelNode(dflt),
+ keys,
+ getLabelNodes(labels)));
+ }
+
+ @Override
+ public void visitMultiANewArrayInsn(final String desc, final int dims) {
+ instructions.add(new MultiANewArrayInsnNode(desc, dims));
+ }
+
+ @Override
+ public void visitTryCatchBlock(
+ final Label start,
+ final Label end,
+ final Label handler,
+ final String type)
+ {
+ tryCatchBlocks.add(new TryCatchBlockNode(getLabelNode(start),
+ getLabelNode(end),
+ getLabelNode(handler),
+ type));
+ }
+
+ @Override
+ public void visitLocalVariable(
+ final String name,
+ final String desc,
+ final String signature,
+ final Label start,
+ final Label end,
+ final int index)
+ {
+ localVariables.add(new LocalVariableNode(name,
+ desc,
+ signature,
+ getLabelNode(start),
+ getLabelNode(end),
+ index));
+ }
+
+ @Override
+ public void visitLineNumber(final int line, final Label start) {
+ instructions.add(new LineNumberNode(line, getLabelNode(start)));
+ }
+
+ @Override
+ public void visitMaxs(final int maxStack, final int maxLocals) {
+ this.maxStack = maxStack;
+ this.maxLocals = maxLocals;
+ }
+
+ @Override
+ public void visitEnd() {
+ }
+
+ /**
+ * Returns the LabelNode corresponding to the given Label. Creates a new
+ * LabelNode if necessary. The default implementation of this method uses
+ * the {@link Label#info} field to store associations between labels and
+ * label nodes.
+ *
+ * @param l a Label.
+ * @return the LabelNode corresponding to l.
+ */
+ protected LabelNode getLabelNode(final Label l) {
+ if (!(l.info instanceof LabelNode)) {
+ l.info = new LabelNode(l);
+ }
+ return (LabelNode) l.info;
+ }
+
+ private LabelNode[] getLabelNodes(final Label[] l) {
+ LabelNode[] nodes = new LabelNode[l.length];
+ for (int i = 0; i < l.length; ++i) {
+ nodes[i] = getLabelNode(l[i]);
+ }
+ return nodes;
+ }
+
+ private Object[] getLabelNodes(final Object[] objs) {
+ Object[] nodes = new Object[objs.length];
+ for (int i = 0; i < objs.length; ++i) {
+ Object o = objs[i];
+ if (o instanceof Label) {
+ o = getLabelNode((Label) o);
+ }
+ nodes[i] = o;
+ }
+ return nodes;
+ }
+
+ // ------------------------------------------------------------------------
+ // Accept method
+ // ------------------------------------------------------------------------
+
+ /**
+ * Checks that this method node is compatible with the given ASM API
+ * version. This methods checks that this node, and all its nodes
+ * recursively, do not contain elements that were introduced in more recent
+ * versions of the ASM API than the given version.
+ *
+ * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ */
+ public void check(final int api) {
+ // nothing to do
+ }
+
+ /**
+ * Makes the given class visitor visit this method.
+ *
+ * @param cv a class visitor.
+ */
+ public void accept(final ClassVisitor cv) {
+ String[] exceptions = new String[this.exceptions.size()];
+ this.exceptions.toArray(exceptions);
+ MethodVisitor mv = cv.visitMethod(access,
+ name,
+ desc,
+ signature,
+ exceptions);
+ if (mv != null) {
+ accept(mv);
+ }
+ }
+
+ /**
+ * Makes the given method visitor visit this method.
+ *
+ * @param mv a method visitor.
+ */
+ public void accept(final MethodVisitor mv) {
+ // visits the method attributes
+ int i, j, n;
+ if (annotationDefault != null) {
+ AnnotationVisitor av = mv.visitAnnotationDefault();
+ AnnotationNode.accept(av, null, annotationDefault);
+ if (av != null) {
+ av.visitEnd();
+ }
+ }
+ n = visibleAnnotations == null ? 0 : visibleAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ AnnotationNode an = visibleAnnotations.get(i);
+ an.accept(mv.visitAnnotation(an.desc, true));
+ }
+ n = invisibleAnnotations == null ? 0 : invisibleAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ AnnotationNode an = invisibleAnnotations.get(i);
+ an.accept(mv.visitAnnotation(an.desc, false));
+ }
+ n = visibleParameterAnnotations == null
+ ? 0
+ : visibleParameterAnnotations.length;
+ for (i = 0; i < n; ++i) {
+ List<?> l = visibleParameterAnnotations[i];
+ if (l == null) {
+ continue;
+ }
+ for (j = 0; j < l.size(); ++j) {
+ AnnotationNode an = (AnnotationNode) l.get(j);
+ an.accept(mv.visitParameterAnnotation(i, an.desc, true));
+ }
+ }
+ n = invisibleParameterAnnotations == null
+ ? 0
+ : invisibleParameterAnnotations.length;
+ for (i = 0; i < n; ++i) {
+ List<?> l = invisibleParameterAnnotations[i];
+ if (l == null) {
+ continue;
+ }
+ for (j = 0; j < l.size(); ++j) {
+ AnnotationNode an = (AnnotationNode) l.get(j);
+ an.accept(mv.visitParameterAnnotation(i, an.desc, false));
+ }
+ }
+ if (visited) {
+ instructions.resetLabels();
+ }
+ n = attrs == null ? 0 : attrs.size();
+ for (i = 0; i < n; ++i) {
+ mv.visitAttribute(attrs.get(i));
+ }
+ // visits the method's code
+ if (instructions.size() > 0) {
+ mv.visitCode();
+ // visits try catch blocks
+ n = tryCatchBlocks == null ? 0 : tryCatchBlocks.size();
+ for (i = 0; i < n; ++i) {
+ tryCatchBlocks.get(i).accept(mv);
+ }
+ // visits instructions
+ instructions.accept(mv);
+ // visits local variables
+ n = localVariables == null ? 0 : localVariables.size();
+ for (i = 0; i < n; ++i) {
+ localVariables.get(i).accept(mv);
+ }
+ // visits maxs
+ mv.visitMaxs(maxStack, maxLocals);
+ visited = true;
+ }
+ mv.visitEnd();
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
new file mode 100644
index 0000000000..9dfba77335
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
@@ -0,0 +1,81 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents a MULTIANEWARRAY instruction.
+ *
+ * @author Eric Bruneton
+ */
+public class MultiANewArrayInsnNode extends AbstractInsnNode {
+
+ /**
+ * An array type descriptor (see {@link org.objectweb.asm.Type}).
+ */
+ public String desc;
+
+ /**
+ * Number of dimensions of the array to allocate.
+ */
+ public int dims;
+
+ /**
+ * Constructs a new {@link MultiANewArrayInsnNode}.
+ *
+ * @param desc an array type descriptor (see {@link org.objectweb.asm.Type}).
+ * @param dims number of dimensions of the array to allocate.
+ */
+ public MultiANewArrayInsnNode(final String desc, final int dims) {
+ super(Opcodes.MULTIANEWARRAY);
+ this.desc = desc;
+ this.dims = dims;
+ }
+
+ @Override
+ public int getType() {
+ return MULTIANEWARRAY_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitMultiANewArrayInsn(desc, dims);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new MultiANewArrayInsnNode(desc, dims);
+ }
+
+} \ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
new file mode 100644
index 0000000000..929ad9b32b
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
@@ -0,0 +1,115 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents a TABLESWITCH instruction.
+ *
+ * @author Eric Bruneton
+ */
+public class TableSwitchInsnNode extends AbstractInsnNode {
+
+ /**
+ * The minimum key value.
+ */
+ public int min;
+
+ /**
+ * The maximum key value.
+ */
+ public int max;
+
+ /**
+ * Beginning of the default handler block.
+ */
+ public LabelNode dflt;
+
+ /**
+ * Beginnings of the handler blocks. This list is a list of
+ * {@link LabelNode} objects.
+ */
+ public List<LabelNode> labels;
+
+ /**
+ * Constructs a new {@link TableSwitchInsnNode}.
+ *
+ * @param min the minimum key value.
+ * @param max the maximum key value.
+ * @param dflt beginning of the default handler block.
+ * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
+ * the beginning of the handler block for the <tt>min + i</tt> key.
+ */
+ public TableSwitchInsnNode(
+ final int min,
+ final int max,
+ final LabelNode dflt,
+ final LabelNode... labels)
+ {
+ super(Opcodes.TABLESWITCH);
+ this.min = min;
+ this.max = max;
+ this.dflt = dflt;
+ this.labels = new ArrayList<LabelNode>();
+ if (labels != null) {
+ this.labels.addAll(Arrays.asList(labels));
+ }
+ }
+
+ @Override
+ public int getType() {
+ return TABLESWITCH_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ Label[] labels = new Label[this.labels.size()];
+ for (int i = 0; i < labels.length; ++i) {
+ labels[i] = this.labels.get(i).getLabel();
+ }
+ mv.visitTableSwitchInsn(min, max, dflt.getLabel(), labels);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new TableSwitchInsnNode(min,
+ max,
+ clone(dflt, labels),
+ clone(this.labels, labels));
+ }
+} \ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
new file mode 100644
index 0000000000..375b4cfcb9
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
@@ -0,0 +1,94 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a try catch block.
+ *
+ * @author Eric Bruneton
+ */
+public class TryCatchBlockNode {
+
+ /**
+ * Beginning of the exception handler's scope (inclusive).
+ */
+ public LabelNode start;
+
+ /**
+ * End of the exception handler's scope (exclusive).
+ */
+ public LabelNode end;
+
+ /**
+ * Beginning of the exception handler's code.
+ */
+ public LabelNode handler;
+
+ /**
+ * Internal name of the type of exceptions handled by the handler. May be
+ * <tt>null</tt> to catch any exceptions (for "finally" blocks).
+ */
+ public String type;
+
+ /**
+ * Constructs a new {@link TryCatchBlockNode}.
+ *
+ * @param start beginning of the exception handler's scope (inclusive).
+ * @param end end of the exception handler's scope (exclusive).
+ * @param handler beginning of the exception handler's code.
+ * @param type internal name of the type of exceptions handled by the
+ * handler, or <tt>null</tt> to catch any exceptions (for "finally"
+ * blocks).
+ */
+ public TryCatchBlockNode(
+ final LabelNode start,
+ final LabelNode end,
+ final LabelNode handler,
+ final String type)
+ {
+ this.start = start;
+ this.end = end;
+ this.handler = handler;
+ this.type = type;
+ }
+
+ /**
+ * Makes the given visitor visit this try catch block.
+ *
+ * @param mv a method visitor.
+ */
+ public void accept(final MethodVisitor mv) {
+ mv.visitTryCatchBlock(start.getLabel(), end.getLabel(), handler == null
+ ? null
+ : handler.getLabel(), type);
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/TypeInsnNode.java b/src/asm/scala/tools/asm/tree/TypeInsnNode.java
new file mode 100644
index 0000000000..0b2666c498
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/TypeInsnNode.java
@@ -0,0 +1,87 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a type instruction. A type instruction is an
+ * instruction that takes a type descriptor as parameter.
+ *
+ * @author Eric Bruneton
+ */
+public class TypeInsnNode extends AbstractInsnNode {
+
+ /**
+ * The operand of this instruction. This operand is an internal name (see
+ * {@link org.objectweb.asm.Type}).
+ */
+ public String desc;
+
+ /**
+ * Constructs a new {@link TypeInsnNode}.
+ *
+ * @param opcode the opcode of the type instruction to be constructed. This
+ * opcode must be NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
+ * @param desc the operand of the instruction to be constructed. This
+ * operand is an internal name (see {@link org.objectweb.asm.Type}).
+ */
+ public TypeInsnNode(final int opcode, final String desc) {
+ super(opcode);
+ this.desc = desc;
+ }
+
+ /**
+ * Sets the opcode of this instruction.
+ *
+ * @param opcode the new instruction opcode. This opcode must be NEW,
+ * ANEWARRAY, CHECKCAST or INSTANCEOF.
+ */
+ public void setOpcode(final int opcode) {
+ this.opcode = opcode;
+ }
+
+ @Override
+ public int getType() {
+ return TYPE_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitTypeInsn(opcode, desc);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new TypeInsnNode(opcode, desc);
+ }
+} \ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/VarInsnNode.java b/src/asm/scala/tools/asm/tree/VarInsnNode.java
new file mode 100644
index 0000000000..89f572db59
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/VarInsnNode.java
@@ -0,0 +1,90 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a local variable instruction. A local variable
+ * instruction is an instruction that loads or stores the value of a local
+ * variable.
+ *
+ * @author Eric Bruneton
+ */
+public class VarInsnNode extends AbstractInsnNode {
+
+ /**
+ * The operand of this instruction. This operand is the index of a local
+ * variable.
+ */
+ public int var;
+
+ /**
+ * Constructs a new {@link VarInsnNode}.
+ *
+ * @param opcode the opcode of the local variable instruction to be
+ * constructed. This opcode must be ILOAD, LLOAD, FLOAD, DLOAD,
+ * ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET.
+ * @param var the operand of the instruction to be constructed. This operand
+ * is the index of a local variable.
+ */
+ public VarInsnNode(final int opcode, final int var) {
+ super(opcode);
+ this.var = var;
+ }
+
+ /**
+ * Sets the opcode of this instruction.
+ *
+ * @param opcode the new instruction opcode. This opcode must be ILOAD,
+ * LLOAD, FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE
+ * or RET.
+ */
+ public void setOpcode(final int opcode) {
+ this.opcode = opcode;
+ }
+
+ @Override
+ public int getType() {
+ return VAR_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitVarInsn(opcode, var);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new VarInsnNode(opcode, var);
+ }
+} \ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
new file mode 100644
index 0000000000..df387b0b8e
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
@@ -0,0 +1,549 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.AbstractInsnNode;
+import scala.tools.asm.tree.IincInsnNode;
+import scala.tools.asm.tree.InsnList;
+import scala.tools.asm.tree.JumpInsnNode;
+import scala.tools.asm.tree.LabelNode;
+import scala.tools.asm.tree.LookupSwitchInsnNode;
+import scala.tools.asm.tree.MethodNode;
+import scala.tools.asm.tree.TableSwitchInsnNode;
+import scala.tools.asm.tree.TryCatchBlockNode;
+import scala.tools.asm.tree.VarInsnNode;
+
+/**
+ * A semantic bytecode analyzer. <i>This class does not fully check that JSR and
+ * RET instructions are valid.</i>
+ *
+ * @param <V> type of the Value used for the analysis.
+ *
+ * @author Eric Bruneton
+ */
+public class Analyzer<V extends Value> implements Opcodes {
+
+ private final Interpreter<V> interpreter;
+
+ private int n;
+
+ private InsnList insns;
+
+ private List<TryCatchBlockNode>[] handlers;
+
+ private Frame<V>[] frames;
+
+ private Subroutine[] subroutines;
+
+ private boolean[] queued;
+
+ private int[] queue;
+
+ private int top;
+
+ /**
+ * Constructs a new {@link Analyzer}.
+ *
+ * @param interpreter the interpreter to be used to symbolically interpret
+ * the bytecode instructions.
+ */
+ public Analyzer(final Interpreter<V> interpreter) {
+ this.interpreter = interpreter;
+ }
+
+ /**
+ * Analyzes the given method.
+ *
+ * @param owner the internal name of the class to which the method belongs.
+ * @param m the method to be analyzed.
+ * @return the symbolic state of the execution stack frame at each bytecode
+ * instruction of the method. The size of the returned array is
+ * equal to the number of instructions (and labels) of the method. A
+ * given frame is <tt>null</tt> if and only if the corresponding
+ * instruction cannot be reached (dead code).
+ * @throws AnalyzerException if a problem occurs during the analysis.
+ */
+ public Frame<V>[] analyze(final String owner, final MethodNode m)
+ throws AnalyzerException
+ {
+ if ((m.access & (ACC_ABSTRACT | ACC_NATIVE)) != 0) {
+ frames = (Frame<V>[])new Frame<?>[0];
+ return frames;
+ }
+ n = m.instructions.size();
+ insns = m.instructions;
+ handlers = (List<TryCatchBlockNode>[])new List<?>[n];
+ frames = (Frame<V>[])new Frame<?>[n];
+ subroutines = new Subroutine[n];
+ queued = new boolean[n];
+ queue = new int[n];
+ top = 0;
+
+ // computes exception handlers for each instruction
+ for (int i = 0; i < m.tryCatchBlocks.size(); ++i) {
+ TryCatchBlockNode tcb = m.tryCatchBlocks.get(i);
+ int begin = insns.indexOf(tcb.start);
+ int end = insns.indexOf(tcb.end);
+ for (int j = begin; j < end; ++j) {
+ List<TryCatchBlockNode> insnHandlers = handlers[j];
+ if (insnHandlers == null) {
+ insnHandlers = new ArrayList<TryCatchBlockNode>();
+ handlers[j] = insnHandlers;
+ }
+ insnHandlers.add(tcb);
+ }
+ }
+
+ // computes the subroutine for each instruction:
+ Subroutine main = new Subroutine(null, m.maxLocals, null);
+ List<AbstractInsnNode> subroutineCalls = new ArrayList<AbstractInsnNode>();
+ Map<LabelNode, Subroutine> subroutineHeads = new HashMap<LabelNode, Subroutine>();
+ findSubroutine(0, main, subroutineCalls);
+ while (!subroutineCalls.isEmpty()) {
+ JumpInsnNode jsr = (JumpInsnNode) subroutineCalls.remove(0);
+ Subroutine sub = subroutineHeads.get(jsr.label);
+ if (sub == null) {
+ sub = new Subroutine(jsr.label, m.maxLocals, jsr);
+ subroutineHeads.put(jsr.label, sub);
+ findSubroutine(insns.indexOf(jsr.label), sub, subroutineCalls);
+ } else {
+ sub.callers.add(jsr);
+ }
+ }
+ for (int i = 0; i < n; ++i) {
+ if (subroutines[i] != null && subroutines[i].start == null) {
+ subroutines[i] = null;
+ }
+ }
+
+ // initializes the data structures for the control flow analysis
+ Frame<V> current = newFrame(m.maxLocals, m.maxStack);
+ Frame<V> handler = newFrame(m.maxLocals, m.maxStack);
+ current.setReturn(interpreter.newValue(Type.getReturnType(m.desc)));
+ Type[] args = Type.getArgumentTypes(m.desc);
+ int local = 0;
+ if ((m.access & ACC_STATIC) == 0) {
+ Type ctype = Type.getObjectType(owner);
+ current.setLocal(local++, interpreter.newValue(ctype));
+ }
+ for (int i = 0; i < args.length; ++i) {
+ current.setLocal(local++, interpreter.newValue(args[i]));
+ if (args[i].getSize() == 2) {
+ current.setLocal(local++, interpreter.newValue(null));
+ }
+ }
+ while (local < m.maxLocals) {
+ current.setLocal(local++, interpreter.newValue(null));
+ }
+ merge(0, current, null);
+
+ init(owner, m);
+
+ // control flow analysis
+ while (top > 0) {
+ int insn = queue[--top];
+ Frame<V> f = frames[insn];
+ Subroutine subroutine = subroutines[insn];
+ queued[insn] = false;
+
+ AbstractInsnNode insnNode = null;
+ try {
+ insnNode = m.instructions.get(insn);
+ int insnOpcode = insnNode.getOpcode();
+ int insnType = insnNode.getType();
+
+ if (insnType == AbstractInsnNode.LABEL
+ || insnType == AbstractInsnNode.LINE
+ || insnType == AbstractInsnNode.FRAME)
+ {
+ merge(insn + 1, f, subroutine);
+ newControlFlowEdge(insn, insn + 1);
+ } else {
+ current.init(f).execute(insnNode, interpreter);
+ subroutine = subroutine == null ? null : subroutine.copy();
+
+ if (insnNode instanceof JumpInsnNode) {
+ JumpInsnNode j = (JumpInsnNode) insnNode;
+ if (insnOpcode != GOTO && insnOpcode != JSR) {
+ merge(insn + 1, current, subroutine);
+ newControlFlowEdge(insn, insn + 1);
+ }
+ int jump = insns.indexOf(j.label);
+ if (insnOpcode == JSR) {
+ merge(jump, current, new Subroutine(j.label,
+ m.maxLocals,
+ j));
+ } else {
+ merge(jump, current, subroutine);
+ }
+ newControlFlowEdge(insn, jump);
+ } else if (insnNode instanceof LookupSwitchInsnNode) {
+ LookupSwitchInsnNode lsi = (LookupSwitchInsnNode) insnNode;
+ int jump = insns.indexOf(lsi.dflt);
+ merge(jump, current, subroutine);
+ newControlFlowEdge(insn, jump);
+ for (int j = 0; j < lsi.labels.size(); ++j) {
+ LabelNode label = lsi.labels.get(j);
+ jump = insns.indexOf(label);
+ merge(jump, current, subroutine);
+ newControlFlowEdge(insn, jump);
+ }
+ } else if (insnNode instanceof TableSwitchInsnNode) {
+ TableSwitchInsnNode tsi = (TableSwitchInsnNode) insnNode;
+ int jump = insns.indexOf(tsi.dflt);
+ merge(jump, current, subroutine);
+ newControlFlowEdge(insn, jump);
+ for (int j = 0; j < tsi.labels.size(); ++j) {
+ LabelNode label = tsi.labels.get(j);
+ jump = insns.indexOf(label);
+ merge(jump, current, subroutine);
+ newControlFlowEdge(insn, jump);
+ }
+ } else if (insnOpcode == RET) {
+ if (subroutine == null) {
+ throw new AnalyzerException(insnNode, "RET instruction outside of a sub routine");
+ }
+ for (int i = 0; i < subroutine.callers.size(); ++i) {
+ JumpInsnNode caller = subroutine.callers.get(i);
+ int call = insns.indexOf(caller);
+ if (frames[call] != null) {
+ merge(call + 1,
+ frames[call],
+ current,
+ subroutines[call],
+ subroutine.access);
+ newControlFlowEdge(insn, call + 1);
+ }
+ }
+ } else if (insnOpcode != ATHROW
+ && (insnOpcode < IRETURN || insnOpcode > RETURN))
+ {
+ if (subroutine != null) {
+ if (insnNode instanceof VarInsnNode) {
+ int var = ((VarInsnNode) insnNode).var;
+ subroutine.access[var] = true;
+ if (insnOpcode == LLOAD || insnOpcode == DLOAD
+ || insnOpcode == LSTORE
+ || insnOpcode == DSTORE)
+ {
+ subroutine.access[var + 1] = true;
+ }
+ } else if (insnNode instanceof IincInsnNode) {
+ int var = ((IincInsnNode) insnNode).var;
+ subroutine.access[var] = true;
+ }
+ }
+ merge(insn + 1, current, subroutine);
+ newControlFlowEdge(insn, insn + 1);
+ }
+ }
+
+ List<TryCatchBlockNode> insnHandlers = handlers[insn];
+ if (insnHandlers != null) {
+ for (int i = 0; i < insnHandlers.size(); ++i) {
+ TryCatchBlockNode tcb = insnHandlers.get(i);
+ Type type;
+ if (tcb.type == null) {
+ type = Type.getObjectType("java/lang/Throwable");
+ } else {
+ type = Type.getObjectType(tcb.type);
+ }
+ int jump = insns.indexOf(tcb.handler);
+ if (newControlFlowExceptionEdge(insn, tcb)) {
+ handler.init(f);
+ handler.clearStack();
+ handler.push(interpreter.newValue(type));
+ merge(jump, handler, subroutine);
+ }
+ }
+ }
+ } catch (AnalyzerException e) {
+ throw new AnalyzerException(e.node, "Error at instruction " + insn
+ + ": " + e.getMessage(), e);
+ } catch (Exception e) {
+ throw new AnalyzerException(insnNode, "Error at instruction " + insn
+ + ": " + e.getMessage(), e);
+ }
+ }
+
+ return frames;
+ }
+
+ private void findSubroutine(int insn, final Subroutine sub, final List<AbstractInsnNode> calls)
+ throws AnalyzerException
+ {
+ while (true) {
+ if (insn < 0 || insn >= n) {
+ throw new AnalyzerException(null, "Execution can fall off end of the code");
+ }
+ if (subroutines[insn] != null) {
+ return;
+ }
+ subroutines[insn] = sub.copy();
+ AbstractInsnNode node = insns.get(insn);
+
+ // calls findSubroutine recursively on normal successors
+ if (node instanceof JumpInsnNode) {
+ if (node.getOpcode() == JSR) {
+ // do not follow a JSR, it leads to another subroutine!
+ calls.add(node);
+ } else {
+ JumpInsnNode jnode = (JumpInsnNode) node;
+ findSubroutine(insns.indexOf(jnode.label), sub, calls);
+ }
+ } else if (node instanceof TableSwitchInsnNode) {
+ TableSwitchInsnNode tsnode = (TableSwitchInsnNode) node;
+ findSubroutine(insns.indexOf(tsnode.dflt), sub, calls);
+ for (int i = tsnode.labels.size() - 1; i >= 0; --i) {
+ LabelNode l = tsnode.labels.get(i);
+ findSubroutine(insns.indexOf(l), sub, calls);
+ }
+ } else if (node instanceof LookupSwitchInsnNode) {
+ LookupSwitchInsnNode lsnode = (LookupSwitchInsnNode) node;
+ findSubroutine(insns.indexOf(lsnode.dflt), sub, calls);
+ for (int i = lsnode.labels.size() - 1; i >= 0; --i) {
+ LabelNode l = lsnode.labels.get(i);
+ findSubroutine(insns.indexOf(l), sub, calls);
+ }
+ }
+
+ // calls findSubroutine recursively on exception handler successors
+ List<TryCatchBlockNode> insnHandlers = handlers[insn];
+ if (insnHandlers != null) {
+ for (int i = 0; i < insnHandlers.size(); ++i) {
+ TryCatchBlockNode tcb = insnHandlers.get(i);
+ findSubroutine(insns.indexOf(tcb.handler), sub, calls);
+ }
+ }
+
+ // if insn does not falls through to the next instruction, return.
+ switch (node.getOpcode()) {
+ case GOTO:
+ case RET:
+ case TABLESWITCH:
+ case LOOKUPSWITCH:
+ case IRETURN:
+ case LRETURN:
+ case FRETURN:
+ case DRETURN:
+ case ARETURN:
+ case RETURN:
+ case ATHROW:
+ return;
+ }
+ insn++;
+ }
+ }
+
+ /**
+ * Returns the symbolic stack frame for each instruction of the last
+ * recently analyzed method.
+ *
+ * @return the symbolic state of the execution stack frame at each bytecode
+ * instruction of the method. The size of the returned array is
+ * equal to the number of instructions (and labels) of the method. A
+ * given frame is <tt>null</tt> if the corresponding instruction
+ * cannot be reached, or if an error occured during the analysis of
+ * the method.
+ */
+ public Frame<V>[] getFrames() {
+ return frames;
+ }
+
+ /**
+ * Returns the exception handlers for the given instruction.
+ *
+ * @param insn the index of an instruction of the last recently analyzed
+ * method.
+ * @return a list of {@link TryCatchBlockNode} objects.
+ */
+ public List<TryCatchBlockNode> getHandlers(final int insn) {
+ return handlers[insn];
+ }
+
+ /**
+ * Initializes this analyzer. This method is called just before the
+ * execution of control flow analysis loop in #analyze. The default
+ * implementation of this method does nothing.
+ *
+ * @param owner the internal name of the class to which the method belongs.
+ * @param m the method to be analyzed.
+ * @throws AnalyzerException if a problem occurs.
+ */
+ protected void init(String owner, MethodNode m) throws AnalyzerException {
+ }
+
+ /**
+ * Constructs a new frame with the given size.
+ *
+ * @param nLocals the maximum number of local variables of the frame.
+ * @param nStack the maximum stack size of the frame.
+ * @return the created frame.
+ */
+ protected Frame<V> newFrame(final int nLocals, final int nStack) {
+ return new Frame<V>(nLocals, nStack);
+ }
+
+ /**
+ * Constructs a new frame that is identical to the given frame.
+ *
+ * @param src a frame.
+ * @return the created frame.
+ */
+ protected Frame<V> newFrame(final Frame<? extends V> src) {
+ return new Frame<V>(src);
+ }
+
+ /**
+ * Creates a control flow graph edge. The default implementation of this
+ * method does nothing. It can be overriden in order to construct the
+ * control flow graph of a method (this method is called by the
+ * {@link #analyze analyze} method during its visit of the method's code).
+ *
+ * @param insn an instruction index.
+ * @param successor index of a successor instruction.
+ */
+ protected void newControlFlowEdge(final int insn, final int successor) {
+ }
+
+ /**
+ * Creates a control flow graph edge corresponding to an exception handler.
+ * The default implementation of this method does nothing. It can be
+ * overridden in order to construct the control flow graph of a method (this
+ * method is called by the {@link #analyze analyze} method during its visit
+ * of the method's code).
+ *
+ * @param insn an instruction index.
+ * @param successor index of a successor instruction.
+ * @return true if this edge must be considered in the data flow analysis
+ * performed by this analyzer, or false otherwise. The default
+ * implementation of this method always returns true.
+ */
+ protected boolean newControlFlowExceptionEdge(
+ final int insn,
+ final int successor)
+ {
+ return true;
+ }
+
+ /**
+ * Creates a control flow graph edge corresponding to an exception handler.
+ * The default implementation of this method delegates to
+ * {@link #newControlFlowExceptionEdge(int, int)
+ * newControlFlowExceptionEdge(int, int)}. It can be overridden in order to
+ * construct the control flow graph of a method (this method is called by
+ * the {@link #analyze analyze} method during its visit of the method's
+ * code).
+ *
+ * @param insn an instruction index.
+ * @param tcb TryCatchBlockNode corresponding to this edge.
+ * @return true if this edge must be considered in the data flow analysis
+ * performed by this analyzer, or false otherwise. The default
+ * implementation of this method delegates to
+ * {@link #newControlFlowExceptionEdge(int, int)
+ * newControlFlowExceptionEdge(int, int)}.
+ */
+ protected boolean newControlFlowExceptionEdge(
+ final int insn,
+ final TryCatchBlockNode tcb)
+ {
+ return newControlFlowExceptionEdge(insn, insns.indexOf(tcb.handler));
+ }
+
+ // -------------------------------------------------------------------------
+
+ private void merge(
+ final int insn,
+ final Frame<V> frame,
+ final Subroutine subroutine) throws AnalyzerException
+ {
+ Frame<V> oldFrame = frames[insn];
+ Subroutine oldSubroutine = subroutines[insn];
+ boolean changes;
+
+ if (oldFrame == null) {
+ frames[insn] = newFrame(frame);
+ changes = true;
+ } else {
+ changes = oldFrame.merge(frame, interpreter);
+ }
+
+ if (oldSubroutine == null) {
+ if (subroutine != null) {
+ subroutines[insn] = subroutine.copy();
+ changes = true;
+ }
+ } else {
+ if (subroutine != null) {
+ changes |= oldSubroutine.merge(subroutine);
+ }
+ }
+ if (changes && !queued[insn]) {
+ queued[insn] = true;
+ queue[top++] = insn;
+ }
+ }
+
+ private void merge(
+ final int insn,
+ final Frame<V> beforeJSR,
+ final Frame<V> afterRET,
+ final Subroutine subroutineBeforeJSR,
+ final boolean[] access) throws AnalyzerException
+ {
+ Frame<V> oldFrame = frames[insn];
+ Subroutine oldSubroutine = subroutines[insn];
+ boolean changes;
+
+ afterRET.merge(beforeJSR, access);
+
+ if (oldFrame == null) {
+ frames[insn] = newFrame(afterRET);
+ changes = true;
+ } else {
+ changes = oldFrame.merge(afterRET, interpreter);
+ }
+
+ if (oldSubroutine != null && subroutineBeforeJSR != null) {
+ changes |= oldSubroutine.merge(subroutineBeforeJSR);
+ }
+ if (changes && !queued[insn]) {
+ queued[insn] = true;
+ queue[top++] = insn;
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
new file mode 100644
index 0000000000..a89bb3513f
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
@@ -0,0 +1,64 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import scala.tools.asm.tree.AbstractInsnNode;
+
+/**
+ * Thrown if a problem occurs during the analysis of a method.
+ *
+ * @author Bing Ran
+ * @author Eric Bruneton
+ */
+public class AnalyzerException extends Exception {
+
+ public final AbstractInsnNode node;
+
+ public AnalyzerException(final AbstractInsnNode node, final String msg) {
+ super(msg);
+ this.node = node;
+ }
+
+ public AnalyzerException(final AbstractInsnNode node, final String msg, final Throwable exception) {
+ super(msg, exception);
+ this.node = node;
+ }
+
+ public AnalyzerException(
+ final AbstractInsnNode node,
+ final String msg,
+ final Object expected,
+ final Value encountered)
+ {
+ super((msg == null ? "Expected " : msg + ": expected ") + expected
+ + ", but found " + encountered);
+ this.node = node;
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
new file mode 100644
index 0000000000..64ddcc11e6
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
@@ -0,0 +1,365 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.List;
+
+import scala.tools.asm.Handle;
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.AbstractInsnNode;
+import scala.tools.asm.tree.FieldInsnNode;
+import scala.tools.asm.tree.IntInsnNode;
+import scala.tools.asm.tree.InvokeDynamicInsnNode;
+import scala.tools.asm.tree.LdcInsnNode;
+import scala.tools.asm.tree.MethodInsnNode;
+import scala.tools.asm.tree.MultiANewArrayInsnNode;
+import scala.tools.asm.tree.TypeInsnNode;
+
+/**
+ * An {@link Interpreter} for {@link BasicValue} values.
+ *
+ * @author Eric Bruneton
+ * @author Bing Ran
+ */
+public class BasicInterpreter extends Interpreter<BasicValue> implements
+ Opcodes
+{
+
+ public BasicInterpreter() {
+ super(ASM4);
+ }
+
+ protected BasicInterpreter(final int api) {
+ super(api);
+ }
+
+ @Override
+ public BasicValue newValue(final Type type) {
+ if (type == null) {
+ return BasicValue.UNINITIALIZED_VALUE;
+ }
+ switch (type.getSort()) {
+ case Type.VOID:
+ return null;
+ case Type.BOOLEAN:
+ case Type.CHAR:
+ case Type.BYTE:
+ case Type.SHORT:
+ case Type.INT:
+ return BasicValue.INT_VALUE;
+ case Type.FLOAT:
+ return BasicValue.FLOAT_VALUE;
+ case Type.LONG:
+ return BasicValue.LONG_VALUE;
+ case Type.DOUBLE:
+ return BasicValue.DOUBLE_VALUE;
+ case Type.ARRAY:
+ case Type.OBJECT:
+ return BasicValue.REFERENCE_VALUE;
+ default:
+ throw new Error("Internal error");
+ }
+ }
+
+ @Override
+ public BasicValue newOperation(final AbstractInsnNode insn)
+ throws AnalyzerException
+ {
+ switch (insn.getOpcode()) {
+ case ACONST_NULL:
+ return newValue(Type.getObjectType("null"));
+ case ICONST_M1:
+ case ICONST_0:
+ case ICONST_1:
+ case ICONST_2:
+ case ICONST_3:
+ case ICONST_4:
+ case ICONST_5:
+ return BasicValue.INT_VALUE;
+ case LCONST_0:
+ case LCONST_1:
+ return BasicValue.LONG_VALUE;
+ case FCONST_0:
+ case FCONST_1:
+ case FCONST_2:
+ return BasicValue.FLOAT_VALUE;
+ case DCONST_0:
+ case DCONST_1:
+ return BasicValue.DOUBLE_VALUE;
+ case BIPUSH:
+ case SIPUSH:
+ return BasicValue.INT_VALUE;
+ case LDC:
+ Object cst = ((LdcInsnNode) insn).cst;
+ if (cst instanceof Integer) {
+ return BasicValue.INT_VALUE;
+ } else if (cst instanceof Float) {
+ return BasicValue.FLOAT_VALUE;
+ } else if (cst instanceof Long) {
+ return BasicValue.LONG_VALUE;
+ } else if (cst instanceof Double) {
+ return BasicValue.DOUBLE_VALUE;
+ } else if (cst instanceof String) {
+ return newValue(Type.getObjectType("java/lang/String"));
+ } else if (cst instanceof Type) {
+ int sort = ((Type) cst).getSort();
+ if (sort == Type.OBJECT || sort == Type.ARRAY) {
+ return newValue(Type.getObjectType("java/lang/Class"));
+ } else if (sort == Type.METHOD) {
+ return newValue(Type.getObjectType("java/lang/invoke/MethodType"));
+ } else {
+ throw new IllegalArgumentException("Illegal LDC constant " + cst);
+ }
+ } else if (cst instanceof Handle) {
+ return newValue(Type.getObjectType("java/lang/invoke/MethodHandle"));
+ } else {
+ throw new IllegalArgumentException("Illegal LDC constant " + cst);
+ }
+ case JSR:
+ return BasicValue.RETURNADDRESS_VALUE;
+ case GETSTATIC:
+ return newValue(Type.getType(((FieldInsnNode) insn).desc));
+ case NEW:
+ return newValue(Type.getObjectType(((TypeInsnNode) insn).desc));
+ default:
+ throw new Error("Internal error.");
+ }
+ }
+
+ @Override
+ public BasicValue copyOperation(final AbstractInsnNode insn, final BasicValue value)
+ throws AnalyzerException
+ {
+ return value;
+ }
+
+ @Override
+ public BasicValue unaryOperation(final AbstractInsnNode insn, final BasicValue value)
+ throws AnalyzerException
+ {
+ switch (insn.getOpcode()) {
+ case INEG:
+ case IINC:
+ case L2I:
+ case F2I:
+ case D2I:
+ case I2B:
+ case I2C:
+ case I2S:
+ return BasicValue.INT_VALUE;
+ case FNEG:
+ case I2F:
+ case L2F:
+ case D2F:
+ return BasicValue.FLOAT_VALUE;
+ case LNEG:
+ case I2L:
+ case F2L:
+ case D2L:
+ return BasicValue.LONG_VALUE;
+ case DNEG:
+ case I2D:
+ case L2D:
+ case F2D:
+ return BasicValue.DOUBLE_VALUE;
+ case IFEQ:
+ case IFNE:
+ case IFLT:
+ case IFGE:
+ case IFGT:
+ case IFLE:
+ case TABLESWITCH:
+ case LOOKUPSWITCH:
+ case IRETURN:
+ case LRETURN:
+ case FRETURN:
+ case DRETURN:
+ case ARETURN:
+ case PUTSTATIC:
+ return null;
+ case GETFIELD:
+ return newValue(Type.getType(((FieldInsnNode) insn).desc));
+ case NEWARRAY:
+ switch (((IntInsnNode) insn).operand) {
+ case T_BOOLEAN:
+ return newValue(Type.getType("[Z"));
+ case T_CHAR:
+ return newValue(Type.getType("[C"));
+ case T_BYTE:
+ return newValue(Type.getType("[B"));
+ case T_SHORT:
+ return newValue(Type.getType("[S"));
+ case T_INT:
+ return newValue(Type.getType("[I"));
+ case T_FLOAT:
+ return newValue(Type.getType("[F"));
+ case T_DOUBLE:
+ return newValue(Type.getType("[D"));
+ case T_LONG:
+ return newValue(Type.getType("[J"));
+ default:
+ throw new AnalyzerException(insn, "Invalid array type");
+ }
+ case ANEWARRAY:
+ String desc = ((TypeInsnNode) insn).desc;
+ return newValue(Type.getType("[" + Type.getObjectType(desc)));
+ case ARRAYLENGTH:
+ return BasicValue.INT_VALUE;
+ case ATHROW:
+ return null;
+ case CHECKCAST:
+ desc = ((TypeInsnNode) insn).desc;
+ return newValue(Type.getObjectType(desc));
+ case INSTANCEOF:
+ return BasicValue.INT_VALUE;
+ case MONITORENTER:
+ case MONITOREXIT:
+ case IFNULL:
+ case IFNONNULL:
+ return null;
+ default:
+ throw new Error("Internal error.");
+ }
+ }
+
+ @Override
+ public BasicValue binaryOperation(
+ final AbstractInsnNode insn,
+ final BasicValue value1,
+ final BasicValue value2) throws AnalyzerException
+ {
+ switch (insn.getOpcode()) {
+ case IALOAD:
+ case BALOAD:
+ case CALOAD:
+ case SALOAD:
+ case IADD:
+ case ISUB:
+ case IMUL:
+ case IDIV:
+ case IREM:
+ case ISHL:
+ case ISHR:
+ case IUSHR:
+ case IAND:
+ case IOR:
+ case IXOR:
+ return BasicValue.INT_VALUE;
+ case FALOAD:
+ case FADD:
+ case FSUB:
+ case FMUL:
+ case FDIV:
+ case FREM:
+ return BasicValue.FLOAT_VALUE;
+ case LALOAD:
+ case LADD:
+ case LSUB:
+ case LMUL:
+ case LDIV:
+ case LREM:
+ case LSHL:
+ case LSHR:
+ case LUSHR:
+ case LAND:
+ case LOR:
+ case LXOR:
+ return BasicValue.LONG_VALUE;
+ case DALOAD:
+ case DADD:
+ case DSUB:
+ case DMUL:
+ case DDIV:
+ case DREM:
+ return BasicValue.DOUBLE_VALUE;
+ case AALOAD:
+ return BasicValue.REFERENCE_VALUE;
+ case LCMP:
+ case FCMPL:
+ case FCMPG:
+ case DCMPL:
+ case DCMPG:
+ return BasicValue.INT_VALUE;
+ case IF_ICMPEQ:
+ case IF_ICMPNE:
+ case IF_ICMPLT:
+ case IF_ICMPGE:
+ case IF_ICMPGT:
+ case IF_ICMPLE:
+ case IF_ACMPEQ:
+ case IF_ACMPNE:
+ case PUTFIELD:
+ return null;
+ default:
+ throw new Error("Internal error.");
+ }
+ }
+
+ @Override
+ public BasicValue ternaryOperation(
+ final AbstractInsnNode insn,
+ final BasicValue value1,
+ final BasicValue value2,
+ final BasicValue value3) throws AnalyzerException
+ {
+ return null;
+ }
+
+ @Override
+ public BasicValue naryOperation(final AbstractInsnNode insn, final List<? extends BasicValue> values)
+ throws AnalyzerException
+ {
+ int opcode = insn.getOpcode();
+ if (opcode == MULTIANEWARRAY) {
+ return newValue(Type.getType(((MultiANewArrayInsnNode) insn).desc));
+ } else if (opcode == INVOKEDYNAMIC){
+ return newValue(Type.getReturnType(((InvokeDynamicInsnNode) insn).desc));
+ } else {
+ return newValue(Type.getReturnType(((MethodInsnNode) insn).desc));
+ }
+ }
+
+ @Override
+ public void returnOperation(
+ final AbstractInsnNode insn,
+ final BasicValue value,
+ final BasicValue expected) throws AnalyzerException
+ {
+ }
+
+ @Override
+ public BasicValue merge(final BasicValue v, final BasicValue w) {
+ if (!v.equals(w)) {
+ return BasicValue.UNINITIALIZED_VALUE;
+ }
+ return v;
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicValue.java b/src/asm/scala/tools/asm/tree/analysis/BasicValue.java
new file mode 100644
index 0000000000..6c449db9b0
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicValue.java
@@ -0,0 +1,108 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import scala.tools.asm.Type;
+
+/**
+ * A {@link Value} that is represented by its type in a seven types type system.
+ * This type system distinguishes the UNINITIALZED, INT, FLOAT, LONG, DOUBLE,
+ * REFERENCE and RETURNADDRESS types.
+ *
+ * @author Eric Bruneton
+ */
+public class BasicValue implements Value {
+
+ public static final BasicValue UNINITIALIZED_VALUE = new BasicValue(null);
+
+ public static final BasicValue INT_VALUE = new BasicValue(Type.INT_TYPE);
+
+ public static final BasicValue FLOAT_VALUE = new BasicValue(Type.FLOAT_TYPE);
+
+ public static final BasicValue LONG_VALUE = new BasicValue(Type.LONG_TYPE);
+
+ public static final BasicValue DOUBLE_VALUE = new BasicValue(Type.DOUBLE_TYPE);
+
+ public static final BasicValue REFERENCE_VALUE = new BasicValue(Type.getObjectType("java/lang/Object"));
+
+ public static final BasicValue RETURNADDRESS_VALUE = new BasicValue(Type.VOID_TYPE);
+
+ private final Type type;
+
+ public BasicValue(final Type type) {
+ this.type = type;
+ }
+
+ public Type getType() {
+ return type;
+ }
+
+ public int getSize() {
+ return type == Type.LONG_TYPE || type == Type.DOUBLE_TYPE ? 2 : 1;
+ }
+
+ public boolean isReference() {
+ return type != null
+ && (type.getSort() == Type.OBJECT || type.getSort() == Type.ARRAY);
+ }
+
+ @Override
+ public boolean equals(final Object value) {
+ if (value == this) {
+ return true;
+ } else if (value instanceof BasicValue) {
+ if (type == null) {
+ return ((BasicValue) value).type == null;
+ } else {
+ return type.equals(((BasicValue) value).type);
+ }
+ } else {
+ return false;
+ }
+ }
+
+ @Override
+ public int hashCode() {
+ return type == null ? 0 : type.hashCode();
+ }
+
+ @Override
+ public String toString() {
+ if (this == UNINITIALIZED_VALUE) {
+ return ".";
+ } else if (this == RETURNADDRESS_VALUE) {
+ return "A";
+ } else if (this == REFERENCE_VALUE) {
+ return "R";
+ } else {
+ return type.getDescriptor();
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
new file mode 100644
index 0000000000..9297dd9294
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
@@ -0,0 +1,459 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.List;
+
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.AbstractInsnNode;
+import scala.tools.asm.tree.FieldInsnNode;
+import scala.tools.asm.tree.InvokeDynamicInsnNode;
+import scala.tools.asm.tree.MethodInsnNode;
+
+/**
+ * An extended {@link BasicInterpreter} that checks that bytecode instructions
+ * are correctly used.
+ *
+ * @author Eric Bruneton
+ * @author Bing Ran
+ */
+public class BasicVerifier extends BasicInterpreter {
+
+ public BasicVerifier() {
+ super(ASM4);
+ }
+
+ protected BasicVerifier(final int api) {
+ super(api);
+ }
+
+ @Override
+ public BasicValue copyOperation(final AbstractInsnNode insn, final BasicValue value)
+ throws AnalyzerException
+ {
+ Value expected;
+ switch (insn.getOpcode()) {
+ case ILOAD:
+ case ISTORE:
+ expected = BasicValue.INT_VALUE;
+ break;
+ case FLOAD:
+ case FSTORE:
+ expected = BasicValue.FLOAT_VALUE;
+ break;
+ case LLOAD:
+ case LSTORE:
+ expected = BasicValue.LONG_VALUE;
+ break;
+ case DLOAD:
+ case DSTORE:
+ expected = BasicValue.DOUBLE_VALUE;
+ break;
+ case ALOAD:
+ if (!value.isReference()) {
+ throw new AnalyzerException(insn,
+ null,
+ "an object reference",
+ value);
+ }
+ return value;
+ case ASTORE:
+ if (!value.isReference()
+ && !BasicValue.RETURNADDRESS_VALUE.equals(value))
+ {
+ throw new AnalyzerException(insn,
+ null,
+ "an object reference or a return address",
+ value);
+ }
+ return value;
+ default:
+ return value;
+ }
+ if (!expected.equals(value)) {
+ throw new AnalyzerException(insn, null, expected, value);
+ }
+ return value;
+ }
+
+ @Override
+ public BasicValue unaryOperation(final AbstractInsnNode insn, final BasicValue value)
+ throws AnalyzerException
+ {
+ BasicValue expected;
+ switch (insn.getOpcode()) {
+ case INEG:
+ case IINC:
+ case I2F:
+ case I2L:
+ case I2D:
+ case I2B:
+ case I2C:
+ case I2S:
+ case IFEQ:
+ case IFNE:
+ case IFLT:
+ case IFGE:
+ case IFGT:
+ case IFLE:
+ case TABLESWITCH:
+ case LOOKUPSWITCH:
+ case IRETURN:
+ case NEWARRAY:
+ case ANEWARRAY:
+ expected = BasicValue.INT_VALUE;
+ break;
+ case FNEG:
+ case F2I:
+ case F2L:
+ case F2D:
+ case FRETURN:
+ expected = BasicValue.FLOAT_VALUE;
+ break;
+ case LNEG:
+ case L2I:
+ case L2F:
+ case L2D:
+ case LRETURN:
+ expected = BasicValue.LONG_VALUE;
+ break;
+ case DNEG:
+ case D2I:
+ case D2F:
+ case D2L:
+ case DRETURN:
+ expected = BasicValue.DOUBLE_VALUE;
+ break;
+ case GETFIELD:
+ expected = newValue(Type.getObjectType(((FieldInsnNode) insn).owner));
+ break;
+ case CHECKCAST:
+ if (!value.isReference()) {
+ throw new AnalyzerException(insn,
+ null,
+ "an object reference",
+ value);
+ }
+ return super.unaryOperation(insn, value);
+ case ARRAYLENGTH:
+ if (!isArrayValue(value)) {
+ throw new AnalyzerException(insn,
+ null,
+ "an array reference",
+ value);
+ }
+ return super.unaryOperation(insn, value);
+ case ARETURN:
+ case ATHROW:
+ case INSTANCEOF:
+ case MONITORENTER:
+ case MONITOREXIT:
+ case IFNULL:
+ case IFNONNULL:
+ if (!value.isReference()) {
+ throw new AnalyzerException(insn,
+ null,
+ "an object reference",
+ value);
+ }
+ return super.unaryOperation(insn, value);
+ case PUTSTATIC:
+ expected = newValue(Type.getType(((FieldInsnNode) insn).desc));
+ break;
+ default:
+ throw new Error("Internal error.");
+ }
+ if (!isSubTypeOf(value, expected)) {
+ throw new AnalyzerException(insn, null, expected, value);
+ }
+ return super.unaryOperation(insn, value);
+ }
+
+ @Override
+ public BasicValue binaryOperation(
+ final AbstractInsnNode insn,
+ final BasicValue value1,
+ final BasicValue value2) throws AnalyzerException
+ {
+ BasicValue expected1;
+ BasicValue expected2;
+ switch (insn.getOpcode()) {
+ case IALOAD:
+ expected1 = newValue(Type.getType("[I"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case BALOAD:
+ if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
+ expected1 = newValue(Type.getType("[Z"));
+ } else {
+ expected1 = newValue(Type.getType("[B"));
+ }
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case CALOAD:
+ expected1 = newValue(Type.getType("[C"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case SALOAD:
+ expected1 = newValue(Type.getType("[S"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case LALOAD:
+ expected1 = newValue(Type.getType("[J"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case FALOAD:
+ expected1 = newValue(Type.getType("[F"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case DALOAD:
+ expected1 = newValue(Type.getType("[D"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case AALOAD:
+ expected1 = newValue(Type.getType("[Ljava/lang/Object;"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case IADD:
+ case ISUB:
+ case IMUL:
+ case IDIV:
+ case IREM:
+ case ISHL:
+ case ISHR:
+ case IUSHR:
+ case IAND:
+ case IOR:
+ case IXOR:
+ case IF_ICMPEQ:
+ case IF_ICMPNE:
+ case IF_ICMPLT:
+ case IF_ICMPGE:
+ case IF_ICMPGT:
+ case IF_ICMPLE:
+ expected1 = BasicValue.INT_VALUE;
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case FADD:
+ case FSUB:
+ case FMUL:
+ case FDIV:
+ case FREM:
+ case FCMPL:
+ case FCMPG:
+ expected1 = BasicValue.FLOAT_VALUE;
+ expected2 = BasicValue.FLOAT_VALUE;
+ break;
+ case LADD:
+ case LSUB:
+ case LMUL:
+ case LDIV:
+ case LREM:
+ case LAND:
+ case LOR:
+ case LXOR:
+ case LCMP:
+ expected1 = BasicValue.LONG_VALUE;
+ expected2 = BasicValue.LONG_VALUE;
+ break;
+ case LSHL:
+ case LSHR:
+ case LUSHR:
+ expected1 = BasicValue.LONG_VALUE;
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case DADD:
+ case DSUB:
+ case DMUL:
+ case DDIV:
+ case DREM:
+ case DCMPL:
+ case DCMPG:
+ expected1 = BasicValue.DOUBLE_VALUE;
+ expected2 = BasicValue.DOUBLE_VALUE;
+ break;
+ case IF_ACMPEQ:
+ case IF_ACMPNE:
+ expected1 = BasicValue.REFERENCE_VALUE;
+ expected2 = BasicValue.REFERENCE_VALUE;
+ break;
+ case PUTFIELD:
+ FieldInsnNode fin = (FieldInsnNode) insn;
+ expected1 = newValue(Type.getObjectType(fin.owner));
+ expected2 = newValue(Type.getType(fin.desc));
+ break;
+ default:
+ throw new Error("Internal error.");
+ }
+ if (!isSubTypeOf(value1, expected1)) {
+ throw new AnalyzerException(insn, "First argument", expected1, value1);
+ } else if (!isSubTypeOf(value2, expected2)) {
+ throw new AnalyzerException(insn, "Second argument", expected2, value2);
+ }
+ if (insn.getOpcode() == AALOAD) {
+ return getElementValue(value1);
+ } else {
+ return super.binaryOperation(insn, value1, value2);
+ }
+ }
+
+ @Override
+ public BasicValue ternaryOperation(
+ final AbstractInsnNode insn,
+ final BasicValue value1,
+ final BasicValue value2,
+ final BasicValue value3) throws AnalyzerException
+ {
+ BasicValue expected1;
+ BasicValue expected3;
+ switch (insn.getOpcode()) {
+ case IASTORE:
+ expected1 = newValue(Type.getType("[I"));
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case BASTORE:
+ if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
+ expected1 = newValue(Type.getType("[Z"));
+ } else {
+ expected1 = newValue(Type.getType("[B"));
+ }
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case CASTORE:
+ expected1 = newValue(Type.getType("[C"));
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case SASTORE:
+ expected1 = newValue(Type.getType("[S"));
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case LASTORE:
+ expected1 = newValue(Type.getType("[J"));
+ expected3 = BasicValue.LONG_VALUE;
+ break;
+ case FASTORE:
+ expected1 = newValue(Type.getType("[F"));
+ expected3 = BasicValue.FLOAT_VALUE;
+ break;
+ case DASTORE:
+ expected1 = newValue(Type.getType("[D"));
+ expected3 = BasicValue.DOUBLE_VALUE;
+ break;
+ case AASTORE:
+ expected1 = value1;
+ expected3 = BasicValue.REFERENCE_VALUE;
+ break;
+ default:
+ throw new Error("Internal error.");
+ }
+ if (!isSubTypeOf(value1, expected1)) {
+ throw new AnalyzerException(insn, "First argument", "a " + expected1
+ + " array reference", value1);
+ } else if (!BasicValue.INT_VALUE.equals(value2)) {
+ throw new AnalyzerException(insn, "Second argument",
+ BasicValue.INT_VALUE,
+ value2);
+ } else if (!isSubTypeOf(value3, expected3)) {
+ throw new AnalyzerException(insn, "Third argument", expected3, value3);
+ }
+ return null;
+ }
+
+ @Override
+ public BasicValue naryOperation(final AbstractInsnNode insn, final List<? extends BasicValue> values)
+ throws AnalyzerException
+ {
+ int opcode = insn.getOpcode();
+ if (opcode == MULTIANEWARRAY) {
+ for (int i = 0; i < values.size(); ++i) {
+ if (!BasicValue.INT_VALUE.equals(values.get(i))) {
+ throw new AnalyzerException(insn,
+ null,
+ BasicValue.INT_VALUE,
+ values.get(i));
+ }
+ }
+ } else {
+ int i = 0;
+ int j = 0;
+ if (opcode != INVOKESTATIC && opcode != INVOKEDYNAMIC) {
+ Type owner = Type.getObjectType(((MethodInsnNode) insn).owner);
+ if (!isSubTypeOf(values.get(i++), newValue(owner))) {
+ throw new AnalyzerException(insn, "Method owner",
+ newValue(owner),
+ values.get(0));
+ }
+ }
+ String desc = (opcode == INVOKEDYNAMIC)?
+ ((InvokeDynamicInsnNode) insn).desc:
+ ((MethodInsnNode) insn).desc;
+ Type[] args = Type.getArgumentTypes(desc);
+ while (i < values.size()) {
+ BasicValue expected = newValue(args[j++]);
+ BasicValue encountered = values.get(i++);
+ if (!isSubTypeOf(encountered, expected)) {
+ throw new AnalyzerException(insn,
+ "Argument " + j,
+ expected,
+ encountered);
+ }
+ }
+ }
+ return super.naryOperation(insn, values);
+ }
+
+ @Override
+ public void returnOperation(
+ final AbstractInsnNode insn,
+ final BasicValue value,
+ final BasicValue expected) throws AnalyzerException
+ {
+ if (!isSubTypeOf(value, expected)) {
+ throw new AnalyzerException(insn,
+ "Incompatible return type",
+ expected,
+ value);
+ }
+ }
+
+ protected boolean isArrayValue(final BasicValue value) {
+ return value.isReference();
+ }
+
+ protected BasicValue getElementValue(final BasicValue objectArrayValue)
+ throws AnalyzerException
+ {
+ return BasicValue.REFERENCE_VALUE;
+ }
+
+ protected boolean isSubTypeOf(final BasicValue value, final BasicValue expected) {
+ return value.equals(expected);
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/Frame.java b/src/asm/scala/tools/asm/tree/analysis/Frame.java
new file mode 100644
index 0000000000..fe19c2c9ae
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/Frame.java
@@ -0,0 +1,709 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.AbstractInsnNode;
+import scala.tools.asm.tree.IincInsnNode;
+import scala.tools.asm.tree.InvokeDynamicInsnNode;
+import scala.tools.asm.tree.MethodInsnNode;
+import scala.tools.asm.tree.MultiANewArrayInsnNode;
+import scala.tools.asm.tree.VarInsnNode;
+
+/**
+ * A symbolic execution stack frame. A stack frame contains a set of local
+ * variable slots, and an operand stack. Warning: long and double values are
+ * represented by <i>two</i> slots in local variables, and by <i>one</i> slot
+ * in the operand stack.
+ *
+ * @param <V> type of the Value used for the analysis.
+ *
+ * @author Eric Bruneton
+ */
+public class Frame<V extends Value> {
+
+ /**
+ * The expected return type of the analyzed method, or <tt>null</tt> if the
+ * method returns void.
+ */
+ private V returnValue;
+
+ /**
+ * The local variables and operand stack of this frame.
+ */
+ private V[] values;
+
+ /**
+ * The number of local variables of this frame.
+ */
+ private int locals;
+
+ /**
+ * The number of elements in the operand stack.
+ */
+ private int top;
+
+ /**
+ * Constructs a new frame with the given size.
+ *
+ * @param nLocals the maximum number of local variables of the frame.
+ * @param nStack the maximum stack size of the frame.
+ */
+ public Frame(final int nLocals, final int nStack) {
+ this.values = (V[]) new Value[nLocals + nStack];
+ this.locals = nLocals;
+ }
+
+ /**
+ * Constructs a new frame that is identical to the given frame.
+ *
+ * @param src a frame.
+ */
+ public Frame(final Frame<? extends V> src) {
+ this(src.locals, src.values.length - src.locals);
+ init(src);
+ }
+
+ /**
+ * Copies the state of the given frame into this frame.
+ *
+ * @param src a frame.
+ * @return this frame.
+ */
+ public Frame<V> init(final Frame<? extends V> src) {
+ returnValue = src.returnValue;
+ System.arraycopy(src.values, 0, values, 0, values.length);
+ top = src.top;
+ return this;
+ }
+
+ /**
+ * Sets the expected return type of the analyzed method.
+ *
+ * @param v the expected return type of the analyzed method, or
+ * <tt>null</tt> if the method returns void.
+ */
+ public void setReturn(final V v) {
+ returnValue = v;
+ }
+
+ /**
+ * Returns the maximum number of local variables of this frame.
+ *
+ * @return the maximum number of local variables of this frame.
+ */
+ public int getLocals() {
+ return locals;
+ }
+
+ /**
+ * Returns the value of the given local variable.
+ *
+ * @param i a local variable index.
+ * @return the value of the given local variable.
+ * @throws IndexOutOfBoundsException if the variable does not exist.
+ */
+ public V getLocal(final int i) throws IndexOutOfBoundsException {
+ if (i >= locals) {
+ throw new IndexOutOfBoundsException("Trying to access an inexistant local variable");
+ }
+ return values[i];
+ }
+
+ /**
+ * Sets the value of the given local variable.
+ *
+ * @param i a local variable index.
+ * @param value the new value of this local variable.
+ * @throws IndexOutOfBoundsException if the variable does not exist.
+ */
+ public void setLocal(final int i, final V value)
+ throws IndexOutOfBoundsException
+ {
+ if (i >= locals) {
+ throw new IndexOutOfBoundsException("Trying to access an inexistant local variable "+i);
+ }
+ values[i] = value;
+ }
+
+ /**
+ * Returns the number of values in the operand stack of this frame. Long and
+ * double values are treated as single values.
+ *
+ * @return the number of values in the operand stack of this frame.
+ */
+ public int getStackSize() {
+ return top;
+ }
+
+ /**
+ * Returns the value of the given operand stack slot.
+ *
+ * @param i the index of an operand stack slot.
+ * @return the value of the given operand stack slot.
+ * @throws IndexOutOfBoundsException if the operand stack slot does not
+ * exist.
+ */
+ public V getStack(final int i) throws IndexOutOfBoundsException {
+ return values[i + locals];
+ }
+
+ /**
+ * Clears the operand stack of this frame.
+ */
+ public void clearStack() {
+ top = 0;
+ }
+
+ /**
+ * Pops a value from the operand stack of this frame.
+ *
+ * @return the value that has been popped from the stack.
+ * @throws IndexOutOfBoundsException if the operand stack is empty.
+ */
+ public V pop() throws IndexOutOfBoundsException {
+ if (top == 0) {
+ throw new IndexOutOfBoundsException("Cannot pop operand off an empty stack.");
+ }
+ return values[--top + locals];
+ }
+
+ /**
+ * Pushes a value into the operand stack of this frame.
+ *
+ * @param value the value that must be pushed into the stack.
+ * @throws IndexOutOfBoundsException if the operand stack is full.
+ */
+ public void push(final V value) throws IndexOutOfBoundsException {
+ if (top + locals >= values.length) {
+ throw new IndexOutOfBoundsException("Insufficient maximum stack size.");
+ }
+ values[top++ + locals] = value;
+ }
+
+ public void execute(
+ final AbstractInsnNode insn,
+ final Interpreter<V> interpreter) throws AnalyzerException
+ {
+ V value1, value2, value3, value4;
+ List<V> values;
+ int var;
+
+ switch (insn.getOpcode()) {
+ case Opcodes.NOP:
+ break;
+ case Opcodes.ACONST_NULL:
+ case Opcodes.ICONST_M1:
+ case Opcodes.ICONST_0:
+ case Opcodes.ICONST_1:
+ case Opcodes.ICONST_2:
+ case Opcodes.ICONST_3:
+ case Opcodes.ICONST_4:
+ case Opcodes.ICONST_5:
+ case Opcodes.LCONST_0:
+ case Opcodes.LCONST_1:
+ case Opcodes.FCONST_0:
+ case Opcodes.FCONST_1:
+ case Opcodes.FCONST_2:
+ case Opcodes.DCONST_0:
+ case Opcodes.DCONST_1:
+ case Opcodes.BIPUSH:
+ case Opcodes.SIPUSH:
+ case Opcodes.LDC:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.ILOAD:
+ case Opcodes.LLOAD:
+ case Opcodes.FLOAD:
+ case Opcodes.DLOAD:
+ case Opcodes.ALOAD:
+ push(interpreter.copyOperation(insn,
+ getLocal(((VarInsnNode) insn).var)));
+ break;
+ case Opcodes.IALOAD:
+ case Opcodes.LALOAD:
+ case Opcodes.FALOAD:
+ case Opcodes.DALOAD:
+ case Opcodes.AALOAD:
+ case Opcodes.BALOAD:
+ case Opcodes.CALOAD:
+ case Opcodes.SALOAD:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.ISTORE:
+ case Opcodes.LSTORE:
+ case Opcodes.FSTORE:
+ case Opcodes.DSTORE:
+ case Opcodes.ASTORE:
+ value1 = interpreter.copyOperation(insn, pop());
+ var = ((VarInsnNode) insn).var;
+ setLocal(var, value1);
+ if (value1.getSize() == 2) {
+ setLocal(var + 1, interpreter.newValue(null));
+ }
+ if (var > 0) {
+ Value local = getLocal(var - 1);
+ if (local != null && local.getSize() == 2) {
+ setLocal(var - 1, interpreter.newValue(null));
+ }
+ }
+ break;
+ case Opcodes.IASTORE:
+ case Opcodes.LASTORE:
+ case Opcodes.FASTORE:
+ case Opcodes.DASTORE:
+ case Opcodes.AASTORE:
+ case Opcodes.BASTORE:
+ case Opcodes.CASTORE:
+ case Opcodes.SASTORE:
+ value3 = pop();
+ value2 = pop();
+ value1 = pop();
+ interpreter.ternaryOperation(insn, value1, value2, value3);
+ break;
+ case Opcodes.POP:
+ if (pop().getSize() == 2) {
+ throw new AnalyzerException(insn, "Illegal use of POP");
+ }
+ break;
+ case Opcodes.POP2:
+ if (pop().getSize() == 1) {
+ if (pop().getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of POP2");
+ }
+ }
+ break;
+ case Opcodes.DUP:
+ value1 = pop();
+ if (value1.getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of DUP");
+ }
+ push(value1);
+ push(interpreter.copyOperation(insn, value1));
+ break;
+ case Opcodes.DUP_X1:
+ value1 = pop();
+ value2 = pop();
+ if (value1.getSize() != 1 || value2.getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of DUP_X1");
+ }
+ push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
+ break;
+ case Opcodes.DUP_X2:
+ value1 = pop();
+ if (value1.getSize() == 1) {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
+ push(interpreter.copyOperation(insn, value1));
+ push(value3);
+ push(value2);
+ push(value1);
+ break;
+ }
+ } else {
+ push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
+ break;
+ }
+ }
+ throw new AnalyzerException(insn, "Illegal use of DUP_X2");
+ case Opcodes.DUP2:
+ value1 = pop();
+ if (value1.getSize() == 1) {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ push(value2);
+ push(value1);
+ push(interpreter.copyOperation(insn, value2));
+ push(interpreter.copyOperation(insn, value1));
+ break;
+ }
+ } else {
+ push(value1);
+ push(interpreter.copyOperation(insn, value1));
+ break;
+ }
+ throw new AnalyzerException(insn, "Illegal use of DUP2");
+ case Opcodes.DUP2_X1:
+ value1 = pop();
+ if (value1.getSize() == 1) {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
+ push(interpreter.copyOperation(insn, value2));
+ push(interpreter.copyOperation(insn, value1));
+ push(value3);
+ push(value2);
+ push(value1);
+ break;
+ }
+ }
+ } else {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
+ break;
+ }
+ }
+ throw new AnalyzerException(insn, "Illegal use of DUP2_X1");
+ case Opcodes.DUP2_X2:
+ value1 = pop();
+ if (value1.getSize() == 1) {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
+ value4 = pop();
+ if (value4.getSize() == 1) {
+ push(interpreter.copyOperation(insn, value2));
+ push(interpreter.copyOperation(insn, value1));
+ push(value4);
+ push(value3);
+ push(value2);
+ push(value1);
+ break;
+ }
+ } else {
+ push(interpreter.copyOperation(insn, value2));
+ push(interpreter.copyOperation(insn, value1));
+ push(value3);
+ push(value2);
+ push(value1);
+ break;
+ }
+ }
+ } else {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
+ push(interpreter.copyOperation(insn, value1));
+ push(value3);
+ push(value2);
+ push(value1);
+ break;
+ }
+ } else {
+ push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
+ break;
+ }
+ }
+ throw new AnalyzerException(insn, "Illegal use of DUP2_X2");
+ case Opcodes.SWAP:
+ value2 = pop();
+ value1 = pop();
+ if (value1.getSize() != 1 || value2.getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of SWAP");
+ }
+ push(interpreter.copyOperation(insn, value2));
+ push(interpreter.copyOperation(insn, value1));
+ break;
+ case Opcodes.IADD:
+ case Opcodes.LADD:
+ case Opcodes.FADD:
+ case Opcodes.DADD:
+ case Opcodes.ISUB:
+ case Opcodes.LSUB:
+ case Opcodes.FSUB:
+ case Opcodes.DSUB:
+ case Opcodes.IMUL:
+ case Opcodes.LMUL:
+ case Opcodes.FMUL:
+ case Opcodes.DMUL:
+ case Opcodes.IDIV:
+ case Opcodes.LDIV:
+ case Opcodes.FDIV:
+ case Opcodes.DDIV:
+ case Opcodes.IREM:
+ case Opcodes.LREM:
+ case Opcodes.FREM:
+ case Opcodes.DREM:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.INEG:
+ case Opcodes.LNEG:
+ case Opcodes.FNEG:
+ case Opcodes.DNEG:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.ISHL:
+ case Opcodes.LSHL:
+ case Opcodes.ISHR:
+ case Opcodes.LSHR:
+ case Opcodes.IUSHR:
+ case Opcodes.LUSHR:
+ case Opcodes.IAND:
+ case Opcodes.LAND:
+ case Opcodes.IOR:
+ case Opcodes.LOR:
+ case Opcodes.IXOR:
+ case Opcodes.LXOR:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.IINC:
+ var = ((IincInsnNode) insn).var;
+ setLocal(var, interpreter.unaryOperation(insn, getLocal(var)));
+ break;
+ case Opcodes.I2L:
+ case Opcodes.I2F:
+ case Opcodes.I2D:
+ case Opcodes.L2I:
+ case Opcodes.L2F:
+ case Opcodes.L2D:
+ case Opcodes.F2I:
+ case Opcodes.F2L:
+ case Opcodes.F2D:
+ case Opcodes.D2I:
+ case Opcodes.D2L:
+ case Opcodes.D2F:
+ case Opcodes.I2B:
+ case Opcodes.I2C:
+ case Opcodes.I2S:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.LCMP:
+ case Opcodes.FCMPL:
+ case Opcodes.FCMPG:
+ case Opcodes.DCMPL:
+ case Opcodes.DCMPG:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.IFEQ:
+ case Opcodes.IFNE:
+ case Opcodes.IFLT:
+ case Opcodes.IFGE:
+ case Opcodes.IFGT:
+ case Opcodes.IFLE:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.IF_ICMPEQ:
+ case Opcodes.IF_ICMPNE:
+ case Opcodes.IF_ICMPLT:
+ case Opcodes.IF_ICMPGE:
+ case Opcodes.IF_ICMPGT:
+ case Opcodes.IF_ICMPLE:
+ case Opcodes.IF_ACMPEQ:
+ case Opcodes.IF_ACMPNE:
+ value2 = pop();
+ value1 = pop();
+ interpreter.binaryOperation(insn, value1, value2);
+ break;
+ case Opcodes.GOTO:
+ break;
+ case Opcodes.JSR:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.RET:
+ break;
+ case Opcodes.TABLESWITCH:
+ case Opcodes.LOOKUPSWITCH:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.IRETURN:
+ case Opcodes.LRETURN:
+ case Opcodes.FRETURN:
+ case Opcodes.DRETURN:
+ case Opcodes.ARETURN:
+ value1 = pop();
+ interpreter.unaryOperation(insn, value1);
+ interpreter.returnOperation(insn, value1, returnValue);
+ break;
+ case Opcodes.RETURN:
+ if (returnValue != null) {
+ throw new AnalyzerException(insn, "Incompatible return type");
+ }
+ break;
+ case Opcodes.GETSTATIC:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.PUTSTATIC:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.GETFIELD:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.PUTFIELD:
+ value2 = pop();
+ value1 = pop();
+ interpreter.binaryOperation(insn, value1, value2);
+ break;
+ case Opcodes.INVOKEVIRTUAL:
+ case Opcodes.INVOKESPECIAL:
+ case Opcodes.INVOKESTATIC:
+ case Opcodes.INVOKEINTERFACE: {
+ values = new ArrayList<V>();
+ String desc = ((MethodInsnNode) insn).desc;
+ for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
+ values.add(0, pop());
+ }
+ if (insn.getOpcode() != Opcodes.INVOKESTATIC) {
+ values.add(0, pop());
+ }
+ if (Type.getReturnType(desc) == Type.VOID_TYPE) {
+ interpreter.naryOperation(insn, values);
+ } else {
+ push(interpreter.naryOperation(insn, values));
+ }
+ break;
+ }
+ case Opcodes.INVOKEDYNAMIC: {
+ values = new ArrayList<V>();
+ String desc = ((InvokeDynamicInsnNode) insn).desc;
+ for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
+ values.add(0, pop());
+ }
+ if (Type.getReturnType(desc) == Type.VOID_TYPE) {
+ interpreter.naryOperation(insn, values);
+ } else {
+ push(interpreter.naryOperation(insn, values));
+ }
+ break;
+ }
+ case Opcodes.NEW:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.NEWARRAY:
+ case Opcodes.ANEWARRAY:
+ case Opcodes.ARRAYLENGTH:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.ATHROW:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.CHECKCAST:
+ case Opcodes.INSTANCEOF:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.MONITORENTER:
+ case Opcodes.MONITOREXIT:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.MULTIANEWARRAY:
+ values = new ArrayList<V>();
+ for (int i = ((MultiANewArrayInsnNode) insn).dims; i > 0; --i) {
+ values.add(0, pop());
+ }
+ push(interpreter.naryOperation(insn, values));
+ break;
+ case Opcodes.IFNULL:
+ case Opcodes.IFNONNULL:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ default:
+ throw new RuntimeException("Illegal opcode "+insn.getOpcode());
+ }
+ }
+
+ /**
+ * Merges this frame with the given frame.
+ *
+ * @param frame a frame.
+ * @param interpreter the interpreter used to merge values.
+ * @return <tt>true</tt> if this frame has been changed as a result of the
+ * merge operation, or <tt>false</tt> otherwise.
+ * @throws AnalyzerException if the frames have incompatible sizes.
+ */
+ public boolean merge(final Frame<? extends V> frame, final Interpreter<V> interpreter)
+ throws AnalyzerException
+ {
+ if (top != frame.top) {
+ throw new AnalyzerException(null, "Incompatible stack heights");
+ }
+ boolean changes = false;
+ for (int i = 0; i < locals + top; ++i) {
+ V v = interpreter.merge(values[i], frame.values[i]);
+ if (v != values[i]) {
+ values[i] = v;
+ changes = true;
+ }
+ }
+ return changes;
+ }
+
+ /**
+ * Merges this frame with the given frame (case of a RET instruction).
+ *
+ * @param frame a frame
+ * @param access the local variables that have been accessed by the
+ * subroutine to which the RET instruction corresponds.
+ * @return <tt>true</tt> if this frame has been changed as a result of the
+ * merge operation, or <tt>false</tt> otherwise.
+ */
+ public boolean merge(final Frame<? extends V> frame, final boolean[] access) {
+ boolean changes = false;
+ for (int i = 0; i < locals; ++i) {
+ if (!access[i] && !values[i].equals(frame.values[i])) {
+ values[i] = frame.values[i];
+ changes = true;
+ }
+ }
+ return changes;
+ }
+
+ /**
+ * Returns a string representation of this frame.
+ *
+ * @return a string representation of this frame.
+ */
+ @Override
+ public String toString() {
+ StringBuffer b = new StringBuffer();
+ for (int i = 0; i < getLocals(); ++i) {
+ b.append(getLocal(i));
+ }
+ b.append(' ');
+ for (int i = 0; i < getStackSize(); ++i) {
+ b.append(getStack(i).toString());
+ }
+ return b.toString();
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
new file mode 100644
index 0000000000..930c8f4af8
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
@@ -0,0 +1,204 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.List;
+
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.AbstractInsnNode;
+
+/**
+ * A semantic bytecode interpreter. More precisely, this interpreter only
+ * manages the computation of values from other values: it does not manage the
+ * transfer of values to or from the stack, and to or from the local variables.
+ * This separation allows a generic bytecode {@link Analyzer} to work with
+ * various semantic interpreters, without needing to duplicate the code to
+ * simulate the transfer of values.
+ *
+ * @param <V> type of the Value used for the analysis.
+ *
+ * @author Eric Bruneton
+ */
+public abstract class Interpreter<V extends Value> {
+
+ protected final int api;
+
+ protected Interpreter(final int api) {
+ this.api = api;
+ }
+
+ /**
+ * Creates a new value that represents the given type.
+ *
+ * Called for method parameters (including <code>this</code>),
+ * exception handler variable and with <code>null</code> type
+ * for variables reserved by long and double types.
+ *
+ * @param type a primitive or reference type, or <tt>null</tt> to
+ * represent an uninitialized value.
+ * @return a value that represents the given type. The size of the returned
+ * value must be equal to the size of the given type.
+ */
+ public abstract V newValue(Type type);
+
+ /**
+ * Interprets a bytecode instruction without arguments. This method is
+ * called for the following opcodes:
+ *
+ * ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1, ICONST_2, ICONST_3, ICONST_4,
+ * ICONST_5, LCONST_0, LCONST_1, FCONST_0, FCONST_1, FCONST_2, DCONST_0,
+ * DCONST_1, BIPUSH, SIPUSH, LDC, JSR, GETSTATIC, NEW
+ *
+ * @param insn the bytecode instruction to be interpreted.
+ * @return the result of the interpretation of the given instruction.
+ * @throws AnalyzerException if an error occured during the interpretation.
+ */
+ public abstract V newOperation(AbstractInsnNode insn)
+ throws AnalyzerException;
+
+ /**
+ * Interprets a bytecode instruction that moves a value on the stack or to
+ * or from local variables. This method is called for the following opcodes:
+ *
+ * ILOAD, LLOAD, FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE,
+ * ASTORE, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP
+ *
+ * @param insn the bytecode instruction to be interpreted.
+ * @param value the value that must be moved by the instruction.
+ * @return the result of the interpretation of the given instruction. The
+ * returned value must be <tt>equal</tt> to the given value.
+ * @throws AnalyzerException if an error occured during the interpretation.
+ */
+ public abstract V copyOperation(AbstractInsnNode insn, V value)
+ throws AnalyzerException;
+
+ /**
+ * Interprets a bytecode instruction with a single argument. This method is
+ * called for the following opcodes:
+ *
+ * INEG, LNEG, FNEG, DNEG, IINC, I2L, I2F, I2D, L2I, L2F, L2D, F2I, F2L,
+ * F2D, D2I, D2L, D2F, I2B, I2C, I2S, IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE,
+ * TABLESWITCH, LOOKUPSWITCH, IRETURN, LRETURN, FRETURN, DRETURN, ARETURN,
+ * PUTSTATIC, GETFIELD, NEWARRAY, ANEWARRAY, ARRAYLENGTH, ATHROW, CHECKCAST,
+ * INSTANCEOF, MONITORENTER, MONITOREXIT, IFNULL, IFNONNULL
+ *
+ * @param insn the bytecode instruction to be interpreted.
+ * @param value the argument of the instruction to be interpreted.
+ * @return the result of the interpretation of the given instruction.
+ * @throws AnalyzerException if an error occured during the interpretation.
+ */
+ public abstract V unaryOperation(AbstractInsnNode insn, V value)
+ throws AnalyzerException;
+
+ /**
+ * Interprets a bytecode instruction with two arguments. This method is
+ * called for the following opcodes:
+ *
+ * IALOAD, LALOAD, FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, IADD,
+ * LADD, FADD, DADD, ISUB, LSUB, FSUB, DSUB, IMUL, LMUL, FMUL, DMUL, IDIV,
+ * LDIV, FDIV, DDIV, IREM, LREM, FREM, DREM, ISHL, LSHL, ISHR, LSHR, IUSHR,
+ * LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR, LCMP, FCMPL, FCMPG, DCMPL,
+ * DCMPG, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE,
+ * IF_ACMPEQ, IF_ACMPNE, PUTFIELD
+ *
+ * @param insn the bytecode instruction to be interpreted.
+ * @param value1 the first argument of the instruction to be interpreted.
+ * @param value2 the second argument of the instruction to be interpreted.
+ * @return the result of the interpretation of the given instruction.
+ * @throws AnalyzerException if an error occured during the interpretation.
+ */
+ public abstract V binaryOperation(AbstractInsnNode insn, V value1, V value2)
+ throws AnalyzerException;
+
+ /**
+ * Interprets a bytecode instruction with three arguments. This method is
+ * called for the following opcodes:
+ *
+ * IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE, SASTORE
+ *
+ * @param insn the bytecode instruction to be interpreted.
+ * @param value1 the first argument of the instruction to be interpreted.
+ * @param value2 the second argument of the instruction to be interpreted.
+ * @param value3 the third argument of the instruction to be interpreted.
+ * @return the result of the interpretation of the given instruction.
+ * @throws AnalyzerException if an error occured during the interpretation.
+ */
+ public abstract V ternaryOperation(
+ AbstractInsnNode insn,
+ V value1,
+ V value2,
+ V value3) throws AnalyzerException;
+
+ /**
+ * Interprets a bytecode instruction with a variable number of arguments.
+ * This method is called for the following opcodes:
+ *
+ * INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC, INVOKEINTERFACE,
+ * MULTIANEWARRAY and INVOKEDYNAMIC
+ *
+ * @param insn the bytecode instruction to be interpreted.
+ * @param values the arguments of the instruction to be interpreted.
+ * @return the result of the interpretation of the given instruction.
+ * @throws AnalyzerException if an error occured during the interpretation.
+ */
+ public abstract V naryOperation(
+ AbstractInsnNode insn,
+ List< ? extends V> values) throws AnalyzerException;
+
+ /**
+ * Interprets a bytecode return instruction. This method is called for the
+ * following opcodes:
+ *
+ * IRETURN, LRETURN, FRETURN, DRETURN, ARETURN
+ *
+ * @param insn the bytecode instruction to be interpreted.
+ * @param value the argument of the instruction to be interpreted.
+ * @param expected the expected return type of the analyzed method.
+ * @throws AnalyzerException if an error occured during the interpretation.
+ */
+ public abstract void returnOperation(
+ AbstractInsnNode insn,
+ V value,
+ V expected) throws AnalyzerException;
+
+ /**
+ * Merges two values. The merge operation must return a value that
+ * represents both values (for instance, if the two values are two types,
+ * the merged value must be a common super type of the two types. If the two
+ * values are integer intervals, the merged value must be an interval that
+ * contains the previous ones. Likewise for other types of values).
+ *
+ * @param v a value.
+ * @param w another value.
+ * @return the merged value. If the merged value is equal to <tt>v</tt>,
+ * this method <i>must</i> return <tt>v</tt>.
+ */
+ public abstract V merge(V v, V w);
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
new file mode 100644
index 0000000000..c4f515d328
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
@@ -0,0 +1,329 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.List;
+
+import scala.tools.asm.Type;
+
+/**
+ * An extended {@link BasicVerifier} that performs more precise verifications.
+ * This verifier computes exact class types, instead of using a single "object
+ * reference" type (as done in the {@link BasicVerifier}).
+ *
+ * @author Eric Bruneton
+ * @author Bing Ran
+ */
+public class SimpleVerifier extends BasicVerifier {
+
+ /**
+ * The class that is verified.
+ */
+ private final Type currentClass;
+
+ /**
+ * The super class of the class that is verified.
+ */
+ private final Type currentSuperClass;
+
+ /**
+ * The interfaces implemented by the class that is verified.
+ */
+ private final List<Type> currentClassInterfaces;
+
+ /**
+ * If the class that is verified is an interface.
+ */
+ private final boolean isInterface;
+
+ /**
+ * The loader to use for referenced classes.
+ */
+ private ClassLoader loader = getClass().getClassLoader();
+
+ /**
+ * Constructs a new {@link SimpleVerifier}.
+ */
+ public SimpleVerifier() {
+ this(null, null, false);
+ }
+
+ /**
+ * Constructs a new {@link SimpleVerifier} to verify a specific class. This
+ * class will not be loaded into the JVM since it may be incorrect.
+ *
+ * @param currentClass the class that is verified.
+ * @param currentSuperClass the super class of the class that is verified.
+ * @param isInterface if the class that is verified is an interface.
+ */
+ public SimpleVerifier(
+ final Type currentClass,
+ final Type currentSuperClass,
+ final boolean isInterface)
+ {
+ this(currentClass, currentSuperClass, null, isInterface);
+ }
+
+ /**
+ * Constructs a new {@link SimpleVerifier} to verify a specific class. This
+ * class will not be loaded into the JVM since it may be incorrect.
+ *
+ * @param currentClass the class that is verified.
+ * @param currentSuperClass the super class of the class that is verified.
+ * @param currentClassInterfaces the interfaces implemented by the class
+ * that is verified.
+ * @param isInterface if the class that is verified is an interface.
+ */
+ public SimpleVerifier(
+ final Type currentClass,
+ final Type currentSuperClass,
+ final List<Type> currentClassInterfaces,
+ final boolean isInterface)
+ {
+ this(ASM4,
+ currentClass,
+ currentSuperClass,
+ currentClassInterfaces,
+ isInterface);
+ }
+
+ protected SimpleVerifier(
+ final int api,
+ final Type currentClass,
+ final Type currentSuperClass,
+ final List<Type> currentClassInterfaces,
+ final boolean isInterface)
+ {
+ super(api);
+ this.currentClass = currentClass;
+ this.currentSuperClass = currentSuperClass;
+ this.currentClassInterfaces = currentClassInterfaces;
+ this.isInterface = isInterface;
+ }
+
+ /**
+ * Set the <code>ClassLoader</code> which will be used to load referenced
+ * classes. This is useful if you are verifying multiple interdependent
+ * classes.
+ *
+ * @param loader a <code>ClassLoader</code> to use
+ */
+ public void setClassLoader(final ClassLoader loader) {
+ this.loader = loader;
+ }
+
+ @Override
+ public BasicValue newValue(final Type type) {
+ if (type == null) {
+ return BasicValue.UNINITIALIZED_VALUE;
+ }
+
+ boolean isArray = type.getSort() == Type.ARRAY;
+ if (isArray) {
+ switch (type.getElementType().getSort()) {
+ case Type.BOOLEAN:
+ case Type.CHAR:
+ case Type.BYTE:
+ case Type.SHORT:
+ return new BasicValue(type);
+ }
+ }
+
+ BasicValue v = super.newValue(type);
+ if (BasicValue.REFERENCE_VALUE.equals(v)) {
+ if (isArray) {
+ v = newValue(type.getElementType());
+ String desc = v.getType().getDescriptor();
+ for (int i = 0; i < type.getDimensions(); ++i) {
+ desc = '[' + desc;
+ }
+ v = new BasicValue(Type.getType(desc));
+ } else {
+ v = new BasicValue(type);
+ }
+ }
+ return v;
+ }
+
+ @Override
+ protected boolean isArrayValue(final BasicValue value) {
+ Type t = value.getType();
+ return t != null
+ && ("Lnull;".equals(t.getDescriptor()) || t.getSort() == Type.ARRAY);
+ }
+
+ @Override
+ protected BasicValue getElementValue(final BasicValue objectArrayValue)
+ throws AnalyzerException
+ {
+ Type arrayType = objectArrayValue.getType();
+ if (arrayType != null) {
+ if (arrayType.getSort() == Type.ARRAY) {
+ return newValue(Type.getType(arrayType.getDescriptor()
+ .substring(1)));
+ } else if ("Lnull;".equals(arrayType.getDescriptor())) {
+ return objectArrayValue;
+ }
+ }
+ throw new Error("Internal error");
+ }
+
+ @Override
+ protected boolean isSubTypeOf(final BasicValue value, final BasicValue expected) {
+ Type expectedType = expected.getType();
+ Type type = value.getType();
+ switch (expectedType.getSort()) {
+ case Type.INT:
+ case Type.FLOAT:
+ case Type.LONG:
+ case Type.DOUBLE:
+ return type.equals(expectedType);
+ case Type.ARRAY:
+ case Type.OBJECT:
+ if ("Lnull;".equals(type.getDescriptor())) {
+ return true;
+ } else if (type.getSort() == Type.OBJECT
+ || type.getSort() == Type.ARRAY)
+ {
+ return isAssignableFrom(expectedType, type);
+ } else {
+ return false;
+ }
+ default:
+ throw new Error("Internal error");
+ }
+ }
+
+ @Override
+ public BasicValue merge(final BasicValue v, final BasicValue w) {
+ if (!v.equals(w)) {
+ Type t = v.getType();
+ Type u = w.getType();
+ if (t != null
+ && (t.getSort() == Type.OBJECT || t.getSort() == Type.ARRAY))
+ {
+ if (u != null
+ && (u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY))
+ {
+ if ("Lnull;".equals(t.getDescriptor())) {
+ return w;
+ }
+ if ("Lnull;".equals(u.getDescriptor())) {
+ return v;
+ }
+ if (isAssignableFrom(t, u)) {
+ return v;
+ }
+ if (isAssignableFrom(u, t)) {
+ return w;
+ }
+ // TODO case of array classes of the same dimension
+ // TODO should we look also for a common super interface?
+ // problem: there may be several possible common super
+ // interfaces
+ do {
+ if (t == null || isInterface(t)) {
+ return BasicValue.REFERENCE_VALUE;
+ }
+ t = getSuperClass(t);
+ if (isAssignableFrom(t, u)) {
+ return newValue(t);
+ }
+ } while (true);
+ }
+ }
+ return BasicValue.UNINITIALIZED_VALUE;
+ }
+ return v;
+ }
+
+ protected boolean isInterface(final Type t) {
+ if (currentClass != null && t.equals(currentClass)) {
+ return isInterface;
+ }
+ return getClass(t).isInterface();
+ }
+
+ protected Type getSuperClass(final Type t) {
+ if (currentClass != null && t.equals(currentClass)) {
+ return currentSuperClass;
+ }
+ Class<?> c = getClass(t).getSuperclass();
+ return c == null ? null : Type.getType(c);
+ }
+
+ protected boolean isAssignableFrom(final Type t, final Type u) {
+ if (t.equals(u)) {
+ return true;
+ }
+ if (currentClass != null && t.equals(currentClass)) {
+ if (getSuperClass(u) == null) {
+ return false;
+ } else {
+ if (isInterface) {
+ return u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY;
+ }
+ return isAssignableFrom(t, getSuperClass(u));
+ }
+ }
+ if (currentClass != null && u.equals(currentClass)) {
+ if (isAssignableFrom(t, currentSuperClass)) {
+ return true;
+ }
+ if (currentClassInterfaces != null) {
+ for (int i = 0; i < currentClassInterfaces.size(); ++i) {
+ Type v = currentClassInterfaces.get(i);
+ if (isAssignableFrom(t, v)) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+ Class<?> tc = getClass(t);
+ if (tc.isInterface()) {
+ tc = Object.class;
+ }
+ return tc.isAssignableFrom(getClass(u));
+ }
+
+ protected Class<?> getClass(final Type t) {
+ try {
+ if (t.getSort() == Type.ARRAY) {
+ return Class.forName(t.getDescriptor().replace('/', '.'),
+ false,
+ loader);
+ }
+ return Class.forName(t.getClassName(), false, loader);
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException(e.toString());
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/SmallSet.java b/src/asm/scala/tools/asm/tree/analysis/SmallSet.java
new file mode 100644
index 0000000000..205878d18c
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/SmallSet.java
@@ -0,0 +1,134 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.AbstractSet;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+import java.util.Set;
+
+/**
+ * A set of at most two elements.
+ *
+ * @author Eric Bruneton
+ */
+class SmallSet<E> extends AbstractSet<E> implements Iterator<E> {
+
+ // if e1 is null, e2 must be null; otherwise e2 must be different from e1
+
+ E e1, e2;
+
+ static final <T> Set<T> emptySet() {
+ return new SmallSet<T>(null, null);
+ }
+
+ SmallSet(final E e1, final E e2) {
+ this.e1 = e1;
+ this.e2 = e2;
+ }
+
+ // -------------------------------------------------------------------------
+ // Implementation of inherited abstract methods
+ // -------------------------------------------------------------------------
+
+ @Override
+ public Iterator<E> iterator() {
+ return new SmallSet<E>(e1, e2);
+ }
+
+ @Override
+ public int size() {
+ return e1 == null ? 0 : (e2 == null ? 1 : 2);
+ }
+
+ // -------------------------------------------------------------------------
+ // Implementation of the Iterator interface
+ // -------------------------------------------------------------------------
+
+ public boolean hasNext() {
+ return e1 != null;
+ }
+
+ public E next() {
+ if (e1 == null) {
+ throw new NoSuchElementException();
+ }
+ E e = e1;
+ e1 = e2;
+ e2 = null;
+ return e;
+ }
+
+ public void remove() {
+ }
+
+ // -------------------------------------------------------------------------
+ // Utility methods
+ // -------------------------------------------------------------------------
+
+ Set<E> union(final SmallSet<E> s) {
+ if ((s.e1 == e1 && s.e2 == e2) || (s.e1 == e2 && s.e2 == e1)) {
+ return this; // if the two sets are equal, return this
+ }
+ if (s.e1 == null) {
+ return this; // if s is empty, return this
+ }
+ if (e1 == null) {
+ return s; // if this is empty, return s
+ }
+ if (s.e2 == null) { // s contains exactly one element
+ if (e2 == null) {
+ return new SmallSet<E>(e1, s.e1); // necessarily e1 != s.e1
+ } else if (s.e1 == e1 || s.e1 == e2) { // s is included in this
+ return this;
+ }
+ }
+ if (e2 == null) { // this contains exactly one element
+ // if (s.e2 == null) { // cannot happen
+ // return new SmallSet(e1, s.e1); // necessarily e1 != s.e1
+ // } else
+ if (e1 == s.e1 || e1 == s.e2) { // this in included in s
+ return s;
+ }
+ }
+ // here we know that there are at least 3 distinct elements
+ HashSet<E> r = new HashSet<E>(4);
+ r.add(e1);
+ if (e2 != null) {
+ r.add(e2);
+ }
+ r.add(s.e1);
+ if (s.e2 != null) {
+ r.add(s.e2);
+ }
+ return r;
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
new file mode 100644
index 0000000000..067200b51e
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
@@ -0,0 +1,206 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.AbstractInsnNode;
+import scala.tools.asm.tree.FieldInsnNode;
+import scala.tools.asm.tree.InvokeDynamicInsnNode;
+import scala.tools.asm.tree.LdcInsnNode;
+import scala.tools.asm.tree.MethodInsnNode;
+
+/**
+ * An {@link Interpreter} for {@link SourceValue} values.
+ *
+ * @author Eric Bruneton
+ */
+public class SourceInterpreter extends Interpreter<SourceValue> implements
+ Opcodes
+{
+
+ public SourceInterpreter() {
+ super(ASM4);
+ }
+
+ protected SourceInterpreter(final int api) {
+ super(api);
+ }
+
+ @Override
+ public SourceValue newValue(final Type type) {
+ if (type == Type.VOID_TYPE) {
+ return null;
+ }
+ return new SourceValue(type == null ? 1 : type.getSize());
+ }
+
+ @Override
+ public SourceValue newOperation(final AbstractInsnNode insn) {
+ int size;
+ switch (insn.getOpcode()) {
+ case LCONST_0:
+ case LCONST_1:
+ case DCONST_0:
+ case DCONST_1:
+ size = 2;
+ break;
+ case LDC:
+ Object cst = ((LdcInsnNode) insn).cst;
+ size = cst instanceof Long || cst instanceof Double ? 2 : 1;
+ break;
+ case GETSTATIC:
+ size = Type.getType(((FieldInsnNode) insn).desc).getSize();
+ break;
+ default:
+ size = 1;
+ }
+ return new SourceValue(size, insn);
+ }
+
+ @Override
+ public SourceValue copyOperation(final AbstractInsnNode insn, final SourceValue value) {
+ return new SourceValue(value.getSize(), insn);
+ }
+
+ @Override
+ public SourceValue unaryOperation(final AbstractInsnNode insn, final SourceValue value)
+ {
+ int size;
+ switch (insn.getOpcode()) {
+ case LNEG:
+ case DNEG:
+ case I2L:
+ case I2D:
+ case L2D:
+ case F2L:
+ case F2D:
+ case D2L:
+ size = 2;
+ break;
+ case GETFIELD:
+ size = Type.getType(((FieldInsnNode) insn).desc).getSize();
+ break;
+ default:
+ size = 1;
+ }
+ return new SourceValue(size, insn);
+ }
+
+ @Override
+ public SourceValue binaryOperation(
+ final AbstractInsnNode insn,
+ final SourceValue value1,
+ final SourceValue value2)
+ {
+ int size;
+ switch (insn.getOpcode()) {
+ case LALOAD:
+ case DALOAD:
+ case LADD:
+ case DADD:
+ case LSUB:
+ case DSUB:
+ case LMUL:
+ case DMUL:
+ case LDIV:
+ case DDIV:
+ case LREM:
+ case DREM:
+ case LSHL:
+ case LSHR:
+ case LUSHR:
+ case LAND:
+ case LOR:
+ case LXOR:
+ size = 2;
+ break;
+ default:
+ size = 1;
+ }
+ return new SourceValue(size, insn);
+ }
+
+ @Override
+ public SourceValue ternaryOperation(
+ final AbstractInsnNode insn,
+ final SourceValue value1,
+ final SourceValue value2,
+ final SourceValue value3)
+ {
+ return new SourceValue(1, insn);
+ }
+
+ @Override
+ public SourceValue naryOperation(final AbstractInsnNode insn, final List<? extends SourceValue> values) {
+ int size;
+ int opcode = insn.getOpcode();
+ if (opcode == MULTIANEWARRAY) {
+ size = 1;
+ } else {
+ String desc = (opcode == INVOKEDYNAMIC)?
+ ((InvokeDynamicInsnNode) insn).desc:
+ ((MethodInsnNode) insn).desc;
+ size = Type.getReturnType(desc).getSize();
+ }
+ return new SourceValue(size, insn);
+ }
+
+ @Override
+ public void returnOperation(
+ final AbstractInsnNode insn,
+ final SourceValue value,
+ final SourceValue expected)
+ {
+ }
+
+ @Override
+ public SourceValue merge(final SourceValue d, final SourceValue w) {
+ if (d.insns instanceof SmallSet && w.insns instanceof SmallSet) {
+ Set<AbstractInsnNode> s = ((SmallSet<AbstractInsnNode>) d.insns).union((SmallSet<AbstractInsnNode>) w.insns);
+ if (s == d.insns && d.size == w.size) {
+ return d;
+ } else {
+ return new SourceValue(Math.min(d.size, w.size), s);
+ }
+ }
+ if (d.size != w.size || !d.insns.containsAll(w.insns)) {
+ HashSet<AbstractInsnNode> s = new HashSet<AbstractInsnNode>();
+ s.addAll(d.insns);
+ s.addAll(w.insns);
+ return new SourceValue(Math.min(d.size, w.size), s);
+ }
+ return d;
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/SourceValue.java b/src/asm/scala/tools/asm/tree/analysis/SourceValue.java
new file mode 100644
index 0000000000..57ff212fb2
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/SourceValue.java
@@ -0,0 +1,97 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.Set;
+
+import scala.tools.asm.tree.AbstractInsnNode;
+
+/**
+ * A {@link Value} that is represented by its type in a two types type system.
+ * This type system distinguishes the ONEWORD and TWOWORDS types.
+ *
+ * @author Eric Bruneton
+ */
+public class SourceValue implements Value {
+
+ /**
+ * The size of this value.
+ */
+ public final int size;
+
+ /**
+ * The instructions that can produce this value. For example, for the Java
+ * code below, the instructions that can produce the value of <tt>i</tt>
+ * at line 5 are the txo ISTORE instructions at line 1 and 3:
+ *
+ * <pre>
+ * 1: i = 0;
+ * 2: if (...) {
+ * 3: i = 1;
+ * 4: }
+ * 5: return i;
+ * </pre>
+ *
+ * This field is a set of {@link AbstractInsnNode} objects.
+ */
+ public final Set<AbstractInsnNode> insns;
+
+ public SourceValue(final int size) {
+ this(size, SmallSet.<AbstractInsnNode>emptySet());
+ }
+
+ public SourceValue(final int size, final AbstractInsnNode insn) {
+ this.size = size;
+ this.insns = new SmallSet<AbstractInsnNode>(insn, null);
+ }
+
+ public SourceValue(final int size, final Set<AbstractInsnNode> insns) {
+ this.size = size;
+ this.insns = insns;
+ }
+
+ public int getSize() {
+ return size;
+ }
+
+ @Override
+ public boolean equals(final Object value) {
+ if (!(value instanceof SourceValue)) {
+ return false;
+ }
+ SourceValue v = (SourceValue) value;
+ return size == v.size && insns.equals(v.insns);
+ }
+
+ @Override
+ public int hashCode() {
+ return insns.hashCode();
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/Subroutine.java b/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
new file mode 100644
index 0000000000..038880ddcd
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
@@ -0,0 +1,93 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import scala.tools.asm.tree.JumpInsnNode;
+import scala.tools.asm.tree.LabelNode;
+
+/**
+ * A method subroutine (corresponds to a JSR instruction).
+ *
+ * @author Eric Bruneton
+ */
+class Subroutine {
+
+ LabelNode start;
+
+ boolean[] access;
+
+ List<JumpInsnNode> callers;
+
+ private Subroutine() {
+ }
+
+ Subroutine(
+ final LabelNode start,
+ final int maxLocals,
+ final JumpInsnNode caller)
+ {
+ this.start = start;
+ this.access = new boolean[maxLocals];
+ this.callers = new ArrayList<JumpInsnNode>();
+ callers.add(caller);
+ }
+
+ public Subroutine copy() {
+ Subroutine result = new Subroutine();
+ result.start = start;
+ result.access = new boolean[access.length];
+ System.arraycopy(access, 0, result.access, 0, access.length);
+ result.callers = new ArrayList<JumpInsnNode>(callers);
+ return result;
+ }
+
+ public boolean merge(final Subroutine subroutine) throws AnalyzerException {
+ boolean changes = false;
+ for (int i = 0; i < access.length; ++i) {
+ if (subroutine.access[i] && !access[i]) {
+ access[i] = true;
+ changes = true;
+ }
+ }
+ if (subroutine.start == start) {
+ for (int i = 0; i < subroutine.callers.size(); ++i) {
+ JumpInsnNode caller = subroutine.callers.get(i);
+ if (!callers.contains(caller)) {
+ callers.add(caller);
+ changes = true;
+ }
+ }
+ }
+ return changes;
+ }
+} \ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/analysis/Value.java b/src/asm/scala/tools/asm/tree/analysis/Value.java
new file mode 100644
index 0000000000..1edf475ce7
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/Value.java
@@ -0,0 +1,45 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+/**
+ * An immutable symbolic value for semantic interpretation of bytecode.
+ *
+ * @author Eric Bruneton
+ */
+public interface Value {
+
+ /**
+ * Returns the size of this value in words.
+ *
+ * @return either 1 or 2.
+ */
+ int getSize();
+}
diff --git a/src/asm/scala/tools/asm/util/ASMifiable.java b/src/asm/scala/tools/asm/util/ASMifiable.java
new file mode 100644
index 0000000000..6a31dd508f
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/ASMifiable.java
@@ -0,0 +1,53 @@
+/**
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.util.Map;
+
+import scala.tools.asm.Label;
+
+/**
+ * An {@link org.objectweb.asm.Attribute Attribute} that can print the ASM code
+ * to create an equivalent attribute.
+ *
+ * @author Eugene Kuleshov
+ */
+public interface ASMifiable {
+
+ /**
+ * Prints the ASM code to create an attribute equal to this attribute.
+ *
+ * @param buf a buffer used for printing Java code.
+ * @param varName name of the variable in a printed code used to store
+ * attribute instance.
+ * @param labelNames map of label instances to their names.
+ */
+ void asmify(StringBuffer buf, String varName, Map<Label, String> labelNames);
+}
diff --git a/src/asm/scala/tools/asm/util/ASMifier.java b/src/asm/scala/tools/asm/util/ASMifier.java
new file mode 100644
index 0000000000..5967c877d1
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/ASMifier.java
@@ -0,0 +1,1238 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.io.FileInputStream;
+import java.io.PrintWriter;
+import java.util.HashMap;
+import java.util.Map;
+
+import scala.tools.asm.Attribute;
+import scala.tools.asm.ClassReader;
+import scala.tools.asm.Handle;
+import scala.tools.asm.Label;
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+
+/**
+ * A {@link Printer} that prints the ASM code to generate the classes if visits.
+ *
+ * @author Eric Bruneton
+ */
+public class ASMifier extends Printer {
+
+ /**
+ * The name of the visitor variable in the produced code.
+ */
+ protected final String name;
+
+ /**
+ * Identifier of the annotation visitor variable in the produced code.
+ */
+ protected final int id;
+
+ /**
+ * The label names. This map associates String values to Label keys. It is
+ * used only in ASMifierMethodVisitor.
+ */
+ protected Map<Label, String> labelNames;
+
+ /**
+ * Pseudo access flag used to distinguish class access flags.
+ */
+ private static final int ACCESS_CLASS = 262144;
+
+ /**
+ * Pseudo access flag used to distinguish field access flags.
+ */
+ private static final int ACCESS_FIELD = 524288;
+
+ /**
+ * Pseudo access flag used to distinguish inner class flags.
+ */
+ private static final int ACCESS_INNER = 1048576;
+
+ /**
+ * Constructs a new {@link ASMifier}. <i>Subclasses must not use this
+ * constructor</i>. Instead, they must use the
+ * {@link #ASMifier(int, String, int)} version.
+ */
+ public ASMifier() {
+ this(Opcodes.ASM4, "cw", 0);
+ }
+
+ /**
+ * Constructs a new {@link ASMifier}.
+ *
+ * @param api the ASM API version implemented by this class. Must be one of
+ * {@link Opcodes#ASM4}.
+ * @param name the name of the visitor variable in the produced code.
+ * @param id identifier of the annotation visitor variable in the produced
+ * code.
+ */
+ protected ASMifier(final int api, final String name, final int id) {
+ super(api);
+ this.name = name;
+ this.id = id;
+ }
+
+ /**
+ * Prints the ASM source code to generate the given class to the standard
+ * output. <p> Usage: ASMifier [-debug] &lt;binary
+ * class name or class file name&gt;
+ *
+ * @param args the command line arguments.
+ *
+ * @throws Exception if the class cannot be found, or if an IO exception
+ * occurs.
+ */
+ public static void main(final String[] args) throws Exception {
+ int i = 0;
+ int flags = ClassReader.SKIP_DEBUG;
+
+ boolean ok = true;
+ if (args.length < 1 || args.length > 2) {
+ ok = false;
+ }
+ if (ok && "-debug".equals(args[0])) {
+ i = 1;
+ flags = 0;
+ if (args.length != 2) {
+ ok = false;
+ }
+ }
+ if (!ok) {
+ System.err.println("Prints the ASM code to generate the given class.");
+ System.err.println("Usage: ASMifier [-debug] "
+ + "<fully qualified class name or class file name>");
+ return;
+ }
+ ClassReader cr;
+ if (args[i].endsWith(".class") || args[i].indexOf('\\') > -1
+ || args[i].indexOf('/') > -1)
+ {
+ cr = new ClassReader(new FileInputStream(args[i]));
+ } else {
+ cr = new ClassReader(args[i]);
+ }
+ cr.accept(new TraceClassVisitor(null,
+ new ASMifier(),
+ new PrintWriter(System.out)), flags);
+ }
+
+ // ------------------------------------------------------------------------
+ // Classes
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(
+ final int version,
+ final int access,
+ final String name,
+ final String signature,
+ final String superName,
+ final String[] interfaces)
+ {
+ String simpleName;
+ int n = name.lastIndexOf('/');
+ if (n == -1) {
+ simpleName = name;
+ } else {
+ text.add("package asm." + name.substring(0, n).replace('/', '.')
+ + ";\n");
+ simpleName = name.substring(n + 1);
+ }
+ text.add("import java.util.*;\n");
+ text.add("import org.objectweb.asm.*;\n");
+ text.add("import org.objectweb.asm.attrs.*;\n");
+ text.add("public class " + simpleName + "Dump implements Opcodes {\n\n");
+ text.add("public static byte[] dump () throws Exception {\n\n");
+ text.add("ClassWriter cw = new ClassWriter(0);\n");
+ text.add("FieldVisitor fv;\n");
+ text.add("MethodVisitor mv;\n");
+ text.add("AnnotationVisitor av0;\n\n");
+
+ buf.setLength(0);
+ buf.append("cw.visit(");
+ switch (version) {
+ case Opcodes.V1_1:
+ buf.append("V1_1");
+ break;
+ case Opcodes.V1_2:
+ buf.append("V1_2");
+ break;
+ case Opcodes.V1_3:
+ buf.append("V1_3");
+ break;
+ case Opcodes.V1_4:
+ buf.append("V1_4");
+ break;
+ case Opcodes.V1_5:
+ buf.append("V1_5");
+ break;
+ case Opcodes.V1_6:
+ buf.append("V1_6");
+ break;
+ case Opcodes.V1_7:
+ buf.append("V1_7");
+ break;
+ default:
+ buf.append(version);
+ break;
+ }
+ buf.append(", ");
+ appendAccess(access | ACCESS_CLASS);
+ buf.append(", ");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(signature);
+ buf.append(", ");
+ appendConstant(superName);
+ buf.append(", ");
+ if (interfaces != null && interfaces.length > 0) {
+ buf.append("new String[] {");
+ for (int i = 0; i < interfaces.length; ++i) {
+ buf.append(i == 0 ? " " : ", ");
+ appendConstant(interfaces[i]);
+ }
+ buf.append(" }");
+ } else {
+ buf.append("null");
+ }
+ buf.append(");\n\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitSource(final String file, final String debug) {
+ buf.setLength(0);
+ buf.append("cw.visitSource(");
+ appendConstant(file);
+ buf.append(", ");
+ appendConstant(debug);
+ buf.append(");\n\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitOuterClass(
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ buf.append("cw.visitOuterClass(");
+ appendConstant(owner);
+ buf.append(", ");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(desc);
+ buf.append(");\n\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public ASMifier visitClassAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ return visitAnnotation(desc, visible);
+ }
+
+ @Override
+ public void visitClassAttribute(final Attribute attr) {
+ visitAttribute(attr);
+ }
+
+ @Override
+ public void visitInnerClass(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access)
+ {
+ buf.setLength(0);
+ buf.append("cw.visitInnerClass(");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(outerName);
+ buf.append(", ");
+ appendConstant(innerName);
+ buf.append(", ");
+ appendAccess(access | ACCESS_INNER);
+ buf.append(");\n\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public ASMifier visitField(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ buf.setLength(0);
+ buf.append("{\n");
+ buf.append("fv = cw.visitField(");
+ appendAccess(access | ACCESS_FIELD);
+ buf.append(", ");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(desc);
+ buf.append(", ");
+ appendConstant(signature);
+ buf.append(", ");
+ appendConstant(value);
+ buf.append(");\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("fv", 0);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
+ @Override
+ public ASMifier visitMethod(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ buf.setLength(0);
+ buf.append("{\n");
+ buf.append("mv = cw.visitMethod(");
+ appendAccess(access);
+ buf.append(", ");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(desc);
+ buf.append(", ");
+ appendConstant(signature);
+ buf.append(", ");
+ if (exceptions != null && exceptions.length > 0) {
+ buf.append("new String[] {");
+ for (int i = 0; i < exceptions.length; ++i) {
+ buf.append(i == 0 ? " " : ", ");
+ appendConstant(exceptions[i]);
+ }
+ buf.append(" }");
+ } else {
+ buf.append("null");
+ }
+ buf.append(");\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("mv", 0);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
+ @Override
+ public void visitClassEnd() {
+ text.add("cw.visitEnd();\n\n");
+ text.add("return cw.toByteArray();\n");
+ text.add("}\n");
+ text.add("}\n");
+ }
+
+ // ------------------------------------------------------------------------
+ // Annotations
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(final String name, final Object value) {
+ buf.setLength(0);
+ buf.append("av").append(id).append(".visit(");
+ appendConstant(buf, name);
+ buf.append(", ");
+ appendConstant(buf, value);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitEnum(
+ final String name,
+ final String desc,
+ final String value)
+ {
+ buf.setLength(0);
+ buf.append("av").append(id).append(".visitEnum(");
+ appendConstant(buf, name);
+ buf.append(", ");
+ appendConstant(buf, desc);
+ buf.append(", ");
+ appendConstant(buf, value);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public ASMifier visitAnnotation(
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ buf.append("{\n");
+ buf.append("AnnotationVisitor av").append(id + 1).append(" = av");
+ buf.append(id).append(".visitAnnotation(");
+ appendConstant(buf, name);
+ buf.append(", ");
+ appendConstant(buf, desc);
+ buf.append(");\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("av", id + 1);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
+ @Override
+ public ASMifier visitArray(final String name) {
+ buf.setLength(0);
+ buf.append("{\n");
+ buf.append("AnnotationVisitor av").append(id + 1).append(" = av");
+ buf.append(id).append(".visitArray(");
+ appendConstant(buf, name);
+ buf.append(");\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("av", id + 1);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
+ @Override
+ public void visitAnnotationEnd() {
+ buf.setLength(0);
+ buf.append("av").append(id).append(".visitEnd();\n");
+ text.add(buf.toString());
+ }
+
+ // ------------------------------------------------------------------------
+ // Fields
+ // ------------------------------------------------------------------------
+
+ @Override
+ public ASMifier visitFieldAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ return visitAnnotation(desc, visible);
+ }
+
+ @Override
+ public void visitFieldAttribute(final Attribute attr) {
+ visitAttribute(attr);
+ }
+
+ @Override
+ public void visitFieldEnd() {
+ buf.setLength(0);
+ buf.append(name).append(".visitEnd();\n");
+ text.add(buf.toString());
+ }
+
+ // ------------------------------------------------------------------------
+ // Methods
+ // ------------------------------------------------------------------------
+
+ @Override
+ public ASMifier visitAnnotationDefault() {
+ buf.setLength(0);
+ buf.append("{\n")
+ .append("av0 = ")
+ .append(name)
+ .append(".visitAnnotationDefault();\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("av", 0);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
+ @Override
+ public ASMifier visitMethodAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ return visitAnnotation(desc, visible);
+ }
+
+ @Override
+ public ASMifier visitParameterAnnotation(
+ final int parameter,
+ final String desc,
+ final boolean visible)
+ {
+ buf.setLength(0);
+ buf.append("{\n")
+ .append("av0 = ").append(name).append(".visitParameterAnnotation(")
+ .append(parameter)
+ .append(", ");
+ appendConstant(desc);
+ buf.append(", ").append(visible).append(");\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("av", 0);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
+ @Override
+ public void visitMethodAttribute(final Attribute attr) {
+ visitAttribute(attr);
+ }
+
+ @Override
+ public void visitCode() {
+ text.add(name + ".visitCode();\n");
+ }
+
+ @Override
+ public void visitFrame(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack)
+ {
+ buf.setLength(0);
+ switch (type) {
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ declareFrameTypes(nLocal, local);
+ declareFrameTypes(nStack, stack);
+ if (type == Opcodes.F_NEW) {
+ buf.append(name).append(".visitFrame(Opcodes.F_NEW, ");
+ } else {
+ buf.append(name).append(".visitFrame(Opcodes.F_FULL, ");
+ }
+ buf.append(nLocal).append(", new Object[] {");
+ appendFrameTypes(nLocal, local);
+ buf.append("}, ").append(nStack).append(", new Object[] {");
+ appendFrameTypes(nStack, stack);
+ buf.append('}');
+ break;
+ case Opcodes.F_APPEND:
+ declareFrameTypes(nLocal, local);
+ buf.append(name).append(".visitFrame(Opcodes.F_APPEND,")
+ .append(nLocal)
+ .append(", new Object[] {");
+ appendFrameTypes(nLocal, local);
+ buf.append("}, 0, null");
+ break;
+ case Opcodes.F_CHOP:
+ buf.append(name).append(".visitFrame(Opcodes.F_CHOP,")
+ .append(nLocal)
+ .append(", null, 0, null");
+ break;
+ case Opcodes.F_SAME:
+ buf.append(name).append(".visitFrame(Opcodes.F_SAME, 0, null, 0, null");
+ break;
+ case Opcodes.F_SAME1:
+ declareFrameTypes(1, stack);
+ buf.append(name).append(".visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[] {");
+ appendFrameTypes(1, stack);
+ buf.append('}');
+ break;
+ }
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitInsn(final int opcode) {
+ buf.setLength(0);
+ buf.append(name).append(".visitInsn(").append(OPCODES[opcode]).append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitIntInsn(final int opcode, final int operand) {
+ buf.setLength(0);
+ buf.append(name)
+ .append(".visitIntInsn(")
+ .append(OPCODES[opcode])
+ .append(", ")
+ .append(opcode == Opcodes.NEWARRAY
+ ? TYPES[operand]
+ : Integer.toString(operand))
+ .append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitVarInsn(final int opcode, final int var) {
+ buf.setLength(0);
+ buf.append(name)
+ .append(".visitVarInsn(")
+ .append(OPCODES[opcode])
+ .append(", ")
+ .append(var)
+ .append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitTypeInsn(final int opcode, final String type) {
+ buf.setLength(0);
+ buf.append(name).append(".visitTypeInsn(").append(OPCODES[opcode]).append(", ");
+ appendConstant(type);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitFieldInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ buf.append(this.name).append(".visitFieldInsn(").append(OPCODES[opcode]).append(", ");
+ appendConstant(owner);
+ buf.append(", ");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(desc);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMethodInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ buf.append(this.name).append(".visitMethodInsn(").append(OPCODES[opcode]).append(", ");
+ appendConstant(owner);
+ buf.append(", ");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(desc);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitInvokeDynamicInsn(
+ String name,
+ String desc,
+ Handle bsm,
+ Object... bsmArgs)
+ {
+ buf.setLength(0);
+ buf.append(this.name).append(".visitInvokeDynamicInsn(");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(desc);
+ buf.append(", ");
+ appendConstant(bsm);
+ buf.append(", new Object[]{");
+ for (int i = 0; i < bsmArgs.length; ++i) {
+ appendConstant(bsmArgs[i]);
+ if (i != bsmArgs.length - 1) {
+ buf.append(", ");
+ }
+ }
+ buf.append("});\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitJumpInsn(final int opcode, final Label label) {
+ buf.setLength(0);
+ declareLabel(label);
+ buf.append(name).append(".visitJumpInsn(").append(OPCODES[opcode]).append(", ");
+ appendLabel(label);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLabel(final Label label) {
+ buf.setLength(0);
+ declareLabel(label);
+ buf.append(name).append(".visitLabel(");
+ appendLabel(label);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLdcInsn(final Object cst) {
+ buf.setLength(0);
+ buf.append(name).append(".visitLdcInsn(");
+ appendConstant(cst);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitIincInsn(final int var, final int increment) {
+ buf.setLength(0);
+ buf.append(name)
+ .append(".visitIincInsn(")
+ .append(var)
+ .append(", ")
+ .append(increment)
+ .append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitTableSwitchInsn(
+ final int min,
+ final int max,
+ final Label dflt,
+ final Label... labels)
+ {
+ buf.setLength(0);
+ for (int i = 0; i < labels.length; ++i) {
+ declareLabel(labels[i]);
+ }
+ declareLabel(dflt);
+
+ buf.append(name)
+ .append(".visitTableSwitchInsn(")
+ .append(min)
+ .append(", ")
+ .append(max)
+ .append(", ");
+ appendLabel(dflt);
+ buf.append(", new Label[] {");
+ for (int i = 0; i < labels.length; ++i) {
+ buf.append(i == 0 ? " " : ", ");
+ appendLabel(labels[i]);
+ }
+ buf.append(" });\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLookupSwitchInsn(
+ final Label dflt,
+ final int[] keys,
+ final Label[] labels)
+ {
+ buf.setLength(0);
+ for (int i = 0; i < labels.length; ++i) {
+ declareLabel(labels[i]);
+ }
+ declareLabel(dflt);
+
+ buf.append(name).append(".visitLookupSwitchInsn(");
+ appendLabel(dflt);
+ buf.append(", new int[] {");
+ for (int i = 0; i < keys.length; ++i) {
+ buf.append(i == 0 ? " " : ", ").append(keys[i]);
+ }
+ buf.append(" }, new Label[] {");
+ for (int i = 0; i < labels.length; ++i) {
+ buf.append(i == 0 ? " " : ", ");
+ appendLabel(labels[i]);
+ }
+ buf.append(" });\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMultiANewArrayInsn(final String desc, final int dims) {
+ buf.setLength(0);
+ buf.append(name).append(".visitMultiANewArrayInsn(");
+ appendConstant(desc);
+ buf.append(", ").append(dims).append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitTryCatchBlock(
+ final Label start,
+ final Label end,
+ final Label handler,
+ final String type)
+ {
+ buf.setLength(0);
+ declareLabel(start);
+ declareLabel(end);
+ declareLabel(handler);
+ buf.append(name).append(".visitTryCatchBlock(");
+ appendLabel(start);
+ buf.append(", ");
+ appendLabel(end);
+ buf.append(", ");
+ appendLabel(handler);
+ buf.append(", ");
+ appendConstant(type);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLocalVariable(
+ final String name,
+ final String desc,
+ final String signature,
+ final Label start,
+ final Label end,
+ final int index)
+ {
+ buf.setLength(0);
+ buf.append(this.name).append(".visitLocalVariable(");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(desc);
+ buf.append(", ");
+ appendConstant(signature);
+ buf.append(", ");
+ appendLabel(start);
+ buf.append(", ");
+ appendLabel(end);
+ buf.append(", ").append(index).append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLineNumber(final int line, final Label start) {
+ buf.setLength(0);
+ buf.append(name).append(".visitLineNumber(").append(line).append(", ");
+ appendLabel(start);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMaxs(final int maxStack, final int maxLocals) {
+ buf.setLength(0);
+ buf.append(name)
+ .append(".visitMaxs(")
+ .append(maxStack)
+ .append(", ")
+ .append(maxLocals)
+ .append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMethodEnd() {
+ buf.setLength(0);
+ buf.append(name).append(".visitEnd();\n");
+ text.add(buf.toString());
+ }
+
+ // ------------------------------------------------------------------------
+ // Common methods
+ // ------------------------------------------------------------------------
+
+ public ASMifier visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ buf.setLength(0);
+ buf.append("{\n")
+ .append("av0 = ")
+ .append(name)
+ .append(".visitAnnotation(");
+ appendConstant(desc);
+ buf.append(", ").append(visible).append(");\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("av", 0);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
+ public void visitAttribute(final Attribute attr) {
+ buf.setLength(0);
+ buf.append("// ATTRIBUTE ").append(attr.type).append('\n');
+ if (attr instanceof ASMifiable) {
+ if (labelNames == null) {
+ labelNames = new HashMap<Label, String>();
+ }
+ buf.append("{\n");
+ ((ASMifiable) attr).asmify(buf, "attr", labelNames);
+ buf.append(name).append(".visitAttribute(attr);\n");
+ buf.append("}\n");
+ }
+ text.add(buf.toString());
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods
+ // ------------------------------------------------------------------------
+
+ protected ASMifier createASMifier(final String name, final int id) {
+ return new ASMifier(Opcodes.ASM4, name, id);
+ }
+
+ /**
+ * Appends a string representation of the given access modifiers to {@link
+ * #buf buf}.
+ *
+ * @param access some access modifiers.
+ */
+ void appendAccess(final int access) {
+ boolean first = true;
+ if ((access & Opcodes.ACC_PUBLIC) != 0) {
+ buf.append("ACC_PUBLIC");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_PRIVATE) != 0) {
+ buf.append("ACC_PRIVATE");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_PROTECTED) != 0) {
+ buf.append("ACC_PROTECTED");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_FINAL) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_FINAL");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_STATIC) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_STATIC");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_SYNCHRONIZED) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ if ((access & ACCESS_CLASS) == 0) {
+ buf.append("ACC_SYNCHRONIZED");
+ } else {
+ buf.append("ACC_SUPER");
+ }
+ first = false;
+ }
+ if ((access & Opcodes.ACC_VOLATILE) != 0
+ && (access & ACCESS_FIELD) != 0)
+ {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_VOLATILE");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_BRIDGE) != 0 && (access & ACCESS_CLASS) == 0
+ && (access & ACCESS_FIELD) == 0)
+ {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_BRIDGE");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_VARARGS) != 0 && (access & ACCESS_CLASS) == 0
+ && (access & ACCESS_FIELD) == 0)
+ {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_VARARGS");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_TRANSIENT) != 0
+ && (access & ACCESS_FIELD) != 0)
+ {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_TRANSIENT");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_NATIVE) != 0 && (access & ACCESS_CLASS) == 0
+ && (access & ACCESS_FIELD) == 0)
+ {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_NATIVE");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_ENUM) != 0
+ && ((access & ACCESS_CLASS) != 0
+ || (access & ACCESS_FIELD) != 0 || (access & ACCESS_INNER) != 0))
+ {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_ENUM");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_ANNOTATION) != 0
+ && ((access & ACCESS_CLASS) != 0 || (access & ACCESS_INNER) != 0))
+ {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_ANNOTATION");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_ABSTRACT) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_ABSTRACT");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_INTERFACE) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_INTERFACE");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_STRICT) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_STRICT");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_SYNTHETIC");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_DEPRECATED");
+ first = false;
+ }
+ if (first) {
+ buf.append('0');
+ }
+ }
+
+ /**
+ * Appends a string representation of the given constant to the given
+ * buffer.
+ *
+ * @param cst an {@link Integer}, {@link Float}, {@link Long},
+ * {@link Double} or {@link String} object. May be <tt>null</tt>.
+ */
+ protected void appendConstant(final Object cst) {
+ appendConstant(buf, cst);
+ }
+
+ /**
+ * Appends a string representation of the given constant to the given
+ * buffer.
+ *
+ * @param buf a string buffer.
+ * @param cst an {@link Integer}, {@link Float}, {@link Long},
+ * {@link Double} or {@link String} object. May be <tt>null</tt>.
+ */
+ static void appendConstant(final StringBuffer buf, final Object cst) {
+ if (cst == null) {
+ buf.append("null");
+ } else if (cst instanceof String) {
+ appendString(buf, (String) cst);
+ } else if (cst instanceof Type) {
+ buf.append("Type.getType(\"");
+ buf.append(((Type) cst).getDescriptor());
+ buf.append("\")");
+ } else if (cst instanceof Handle) {
+ buf.append("new Handle(");
+ Handle h = (Handle) cst;
+ buf.append("Opcodes.").append(HANDLE_TAG[h.getTag()]).append(", \"");
+ buf.append(h.getOwner()).append("\", \"");
+ buf.append(h.getName()).append("\", \"");
+ buf.append(h.getDesc()).append("\")");
+ } else if (cst instanceof Byte) {
+ buf.append("new Byte((byte)").append(cst).append(')');
+ } else if (cst instanceof Boolean) {
+ buf.append(((Boolean) cst).booleanValue() ? "Boolean.TRUE" : "Boolean.FALSE");
+ } else if (cst instanceof Short) {
+ buf.append("new Short((short)").append(cst).append(')');
+ } else if (cst instanceof Character) {
+ int c = ((Character) cst).charValue();
+ buf.append("new Character((char)").append(c).append(')');
+ } else if (cst instanceof Integer) {
+ buf.append("new Integer(").append(cst).append(')');
+ } else if (cst instanceof Float) {
+ buf.append("new Float(\"").append(cst).append("\")");
+ } else if (cst instanceof Long) {
+ buf.append("new Long(").append(cst).append("L)");
+ } else if (cst instanceof Double) {
+ buf.append("new Double(\"").append(cst).append("\")");
+ } else if (cst instanceof byte[]) {
+ byte[] v = (byte[]) cst;
+ buf.append("new byte[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",").append(v[i]);
+ }
+ buf.append('}');
+ } else if (cst instanceof boolean[]) {
+ boolean[] v = (boolean[]) cst;
+ buf.append("new boolean[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",").append(v[i]);
+ }
+ buf.append('}');
+ } else if (cst instanceof short[]) {
+ short[] v = (short[]) cst;
+ buf.append("new short[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",").append("(short)").append(v[i]);
+ }
+ buf.append('}');
+ } else if (cst instanceof char[]) {
+ char[] v = (char[]) cst;
+ buf.append("new char[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",")
+ .append("(char)")
+ .append((int) v[i]);
+ }
+ buf.append('}');
+ } else if (cst instanceof int[]) {
+ int[] v = (int[]) cst;
+ buf.append("new int[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",").append(v[i]);
+ }
+ buf.append('}');
+ } else if (cst instanceof long[]) {
+ long[] v = (long[]) cst;
+ buf.append("new long[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",").append(v[i]).append('L');
+ }
+ buf.append('}');
+ } else if (cst instanceof float[]) {
+ float[] v = (float[]) cst;
+ buf.append("new float[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",").append(v[i]).append('f');
+ }
+ buf.append('}');
+ } else if (cst instanceof double[]) {
+ double[] v = (double[]) cst;
+ buf.append("new double[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",").append(v[i]).append('d');
+ }
+ buf.append('}');
+ }
+ }
+
+ private void declareFrameTypes(final int n, final Object[] o) {
+ for (int i = 0; i < n; ++i) {
+ if (o[i] instanceof Label) {
+ declareLabel((Label) o[i]);
+ }
+ }
+ }
+
+ private void appendFrameTypes(final int n, final Object[] o) {
+ for (int i = 0; i < n; ++i) {
+ if (i > 0) {
+ buf.append(", ");
+ }
+ if (o[i] instanceof String) {
+ appendConstant(o[i]);
+ } else if (o[i] instanceof Integer) {
+ switch (((Integer) o[i]).intValue()) {
+ case 0:
+ buf.append("Opcodes.TOP");
+ break;
+ case 1:
+ buf.append("Opcodes.INTEGER");
+ break;
+ case 2:
+ buf.append("Opcodes.FLOAT");
+ break;
+ case 3:
+ buf.append("Opcodes.DOUBLE");
+ break;
+ case 4:
+ buf.append("Opcodes.LONG");
+ break;
+ case 5:
+ buf.append("Opcodes.NULL");
+ break;
+ case 6:
+ buf.append("Opcodes.UNINITIALIZED_THIS");
+ break;
+ }
+ } else {
+ appendLabel((Label) o[i]);
+ }
+ }
+ }
+
+ /**
+ * Appends a declaration of the given label to {@link #buf buf}. This
+ * declaration is of the form "Label lXXX = new Label();". Does nothing if
+ * the given label has already been declared.
+ *
+ * @param l a label.
+ */
+ protected void declareLabel(final Label l) {
+ if (labelNames == null) {
+ labelNames = new HashMap<Label, String>();
+ }
+ String name = labelNames.get(l);
+ if (name == null) {
+ name = "l" + labelNames.size();
+ labelNames.put(l, name);
+ buf.append("Label ").append(name).append(" = new Label();\n");
+ }
+ }
+
+ /**
+ * Appends the name of the given label to {@link #buf buf}. The given label
+ * <i>must</i> already have a name. One way to ensure this is to always
+ * call {@link #declareLabel declared} before calling this method.
+ *
+ * @param l a label.
+ */
+ protected void appendLabel(final Label l) {
+ buf.append(labelNames.get(l));
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
new file mode 100644
index 0000000000..8030c14f2e
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
@@ -0,0 +1,142 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+
+/**
+ * An {@link AnnotationVisitor} that checks that its methods are properly used.
+ *
+ * @author Eric Bruneton
+ */
+public class CheckAnnotationAdapter extends AnnotationVisitor {
+
+ private final boolean named;
+
+ private boolean end;
+
+ public CheckAnnotationAdapter(final AnnotationVisitor av) {
+ this(av, true);
+ }
+
+ CheckAnnotationAdapter(final AnnotationVisitor av, final boolean named) {
+ super(Opcodes.ASM4, av);
+ this.named = named;
+ }
+
+ @Override
+ public void visit(final String name, final Object value) {
+ checkEnd();
+ checkName(name);
+ if (!(value instanceof Byte || value instanceof Boolean
+ || value instanceof Character || value instanceof Short
+ || value instanceof Integer || value instanceof Long
+ || value instanceof Float || value instanceof Double
+ || value instanceof String || value instanceof Type
+ || value instanceof byte[] || value instanceof boolean[]
+ || value instanceof char[] || value instanceof short[]
+ || value instanceof int[] || value instanceof long[]
+ || value instanceof float[] || value instanceof double[]))
+ {
+ throw new IllegalArgumentException("Invalid annotation value");
+ }
+ if (value instanceof Type) {
+ int sort = ((Type) value).getSort();
+ if (sort != Type.OBJECT && sort != Type.ARRAY) {
+ throw new IllegalArgumentException("Invalid annotation value");
+ }
+ }
+ if (av != null) {
+ av.visit(name, value);
+ }
+ }
+
+ @Override
+ public void visitEnum(
+ final String name,
+ final String desc,
+ final String value)
+ {
+ checkEnd();
+ checkName(name);
+ CheckMethodAdapter.checkDesc(desc, false);
+ if (value == null) {
+ throw new IllegalArgumentException("Invalid enum value");
+ }
+ if (av != null) {
+ av.visitEnum(name, desc, value);
+ }
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String name,
+ final String desc)
+ {
+ checkEnd();
+ checkName(name);
+ CheckMethodAdapter.checkDesc(desc, false);
+ return new CheckAnnotationAdapter(av == null
+ ? null
+ : av.visitAnnotation(name, desc));
+ }
+
+ @Override
+ public AnnotationVisitor visitArray(final String name) {
+ checkEnd();
+ checkName(name);
+ return new CheckAnnotationAdapter(av == null
+ ? null
+ : av.visitArray(name), false);
+ }
+
+ @Override
+ public void visitEnd() {
+ checkEnd();
+ end = true;
+ if (av != null) {
+ av.visitEnd();
+ }
+ }
+
+ private void checkEnd() {
+ if (end) {
+ throw new IllegalStateException("Cannot call a visit method after visitEnd has been called");
+ }
+ }
+
+ private void checkName(final String name) {
+ if (named && name == null) {
+ throw new IllegalArgumentException("Annotation value name must not be null");
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/CheckClassAdapter.java b/src/asm/scala/tools/asm/util/CheckClassAdapter.java
new file mode 100644
index 0000000000..a455322531
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/CheckClassAdapter.java
@@ -0,0 +1,603 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.io.FileInputStream;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.ClassReader;
+import scala.tools.asm.ClassVisitor;
+import scala.tools.asm.FieldVisitor;
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.ClassNode;
+import scala.tools.asm.tree.MethodNode;
+import scala.tools.asm.tree.analysis.Analyzer;
+import scala.tools.asm.tree.analysis.BasicValue;
+import scala.tools.asm.tree.analysis.Frame;
+import scala.tools.asm.tree.analysis.SimpleVerifier;
+
+/**
+ * A {@link ClassVisitor} that checks that its methods are properly used. More
+ * precisely this class adapter checks each method call individually, based
+ * <i>only</i> on its arguments, but does <i>not</i> check the <i>sequence</i>
+ * of method calls. For example, the invalid sequence
+ * <tt>visitField(ACC_PUBLIC, "i", "I", null)</tt> <tt>visitField(ACC_PUBLIC,
+ * "i", "D", null)</tt>
+ * will <i>not</i> be detected by this class adapter.
+ *
+ * <p><code>CheckClassAdapter</code> can be also used to verify bytecode
+ * transformations in order to make sure transformed bytecode is sane. For
+ * example:
+ *
+ * <pre>
+ * InputStream is = ...; // get bytes for the source class
+ * ClassReader cr = new ClassReader(is);
+ * ClassWriter cw = new ClassWriter(cr, ClassWriter.COMPUTE_MAXS);
+ * ClassVisitor cv = new <b>MyClassAdapter</b>(new CheckClassAdapter(cw));
+ * cr.accept(cv, 0);
+ *
+ * StringWriter sw = new StringWriter();
+ * PrintWriter pw = new PrintWriter(sw);
+ * CheckClassAdapter.verify(new ClassReader(cw.toByteArray()), false, pw);
+ * assertTrue(sw.toString(), sw.toString().length()==0);
+ * </pre>
+ *
+ * Above code runs transformed bytecode trough the
+ * <code>CheckClassAdapter</code>. It won't be exactly the same verification
+ * as JVM does, but it run data flow analysis for the code of each method and
+ * checks that expectations are met for each method instruction.
+ *
+ * <p>If method bytecode has errors, assertion text will show the erroneous
+ * instruction number and dump of the failed method with information about
+ * locals and stack slot for each instruction. For example (format is -
+ * insnNumber locals : stack):
+ *
+ * <pre>
+ * org.objectweb.asm.tree.analysis.AnalyzerException: Error at instruction 71: Expected I, but found .
+ * at org.objectweb.asm.tree.analysis.Analyzer.analyze(Analyzer.java:289)
+ * at org.objectweb.asm.util.CheckClassAdapter.verify(CheckClassAdapter.java:135)
+ * ...
+ * remove()V
+ * 00000 LinkedBlockingQueue$Itr . . . . . . . . :
+ * ICONST_0
+ * 00001 LinkedBlockingQueue$Itr . . . . . . . . : I
+ * ISTORE 2
+ * 00001 LinkedBlockingQueue$Itr <b>.</b> I . . . . . . :
+ * ...
+ *
+ * 00071 LinkedBlockingQueue$Itr <b>.</b> I . . . . . . :
+ * ILOAD 1
+ * 00072 <b>?</b>
+ * INVOKESPECIAL java/lang/Integer.<init> (I)V
+ * ...
+ * </pre>
+ *
+ * In the above output you can see that variable 1 loaded by
+ * <code>ILOAD 1</code> instruction at position <code>00071</code> is not
+ * initialized. You can also see that at the beginning of the method (code
+ * inserted by the transformation) variable 2 is initialized.
+ *
+ * <p>Note that when used like that, <code>CheckClassAdapter.verify()</code>
+ * can trigger additional class loading, because it is using
+ * <code>SimpleVerifier</code>.
+ *
+ * @author Eric Bruneton
+ */
+public class CheckClassAdapter extends ClassVisitor {
+
+ /**
+ * The class version number.
+ */
+ private int version;
+
+ /**
+ * <tt>true</tt> if the visit method has been called.
+ */
+ private boolean start;
+
+ /**
+ * <tt>true</tt> if the visitSource method has been called.
+ */
+ private boolean source;
+
+ /**
+ * <tt>true</tt> if the visitOuterClass method has been called.
+ */
+ private boolean outer;
+
+ /**
+ * <tt>true</tt> if the visitEnd method has been called.
+ */
+ private boolean end;
+
+ /**
+ * The already visited labels. This map associate Integer values to Label
+ * keys.
+ */
+ private Map<Label, Integer> labels;
+
+ /**
+ * <tt>true</tt> if the method code must be checked with a BasicVerifier.
+ */
+ private boolean checkDataFlow;
+
+ /**
+ * Checks a given class. <p> Usage: CheckClassAdapter &lt;binary
+ * class name or class file name&gt;
+ *
+ * @param args the command line arguments.
+ *
+ * @throws Exception if the class cannot be found, or if an IO exception
+ * occurs.
+ */
+ public static void main(final String[] args) throws Exception {
+ if (args.length != 1) {
+ System.err.println("Verifies the given class.");
+ System.err.println("Usage: CheckClassAdapter "
+ + "<fully qualified class name or class file name>");
+ return;
+ }
+ ClassReader cr;
+ if (args[0].endsWith(".class")) {
+ cr = new ClassReader(new FileInputStream(args[0]));
+ } else {
+ cr = new ClassReader(args[0]);
+ }
+
+ verify(cr, false, new PrintWriter(System.err));
+ }
+
+ /**
+ * Checks a given class.
+ *
+ * @param cr a <code>ClassReader</code> that contains bytecode for the
+ * analysis.
+ * @param loader a <code>ClassLoader</code> which will be used to load
+ * referenced classes. This is useful if you are verifiying multiple
+ * interdependent classes.
+ * @param dump true if bytecode should be printed out not only when errors
+ * are found.
+ * @param pw write where results going to be printed
+ */
+ public static void verify(
+ final ClassReader cr,
+ final ClassLoader loader,
+ final boolean dump,
+ final PrintWriter pw)
+ {
+ ClassNode cn = new ClassNode();
+ cr.accept(new CheckClassAdapter(cn, false), ClassReader.SKIP_DEBUG);
+
+ Type syperType = cn.superName == null
+ ? null
+ : Type.getObjectType(cn.superName);
+ List<MethodNode> methods = cn.methods;
+
+ List<Type> interfaces = new ArrayList<Type>();
+ for (Iterator<String> i = cn.interfaces.iterator(); i.hasNext();) {
+ interfaces.add(Type.getObjectType(i.next().toString()));
+ }
+
+ for (int i = 0; i < methods.size(); ++i) {
+ MethodNode method = methods.get(i);
+ SimpleVerifier verifier = new SimpleVerifier(Type.getObjectType(cn.name),
+ syperType,
+ interfaces,
+ (cn.access & Opcodes.ACC_INTERFACE) != 0);
+ Analyzer<BasicValue> a = new Analyzer<BasicValue>(verifier);
+ if (loader != null) {
+ verifier.setClassLoader(loader);
+ }
+ try {
+ a.analyze(cn.name, method);
+ if (!dump) {
+ continue;
+ }
+ } catch (Exception e) {
+ e.printStackTrace(pw);
+ }
+ printAnalyzerResult(method, a, pw);
+ }
+ pw.flush();
+ }
+
+ /**
+ * Checks a given class
+ *
+ * @param cr a <code>ClassReader</code> that contains bytecode for the
+ * analysis.
+ * @param dump true if bytecode should be printed out not only when errors
+ * are found.
+ * @param pw write where results going to be printed
+ */
+ public static void verify(
+ final ClassReader cr,
+ final boolean dump,
+ final PrintWriter pw)
+ {
+ verify(cr, null, dump, pw);
+ }
+
+ static void printAnalyzerResult(
+ MethodNode method,
+ Analyzer<BasicValue> a,
+ final PrintWriter pw)
+ {
+ Frame<BasicValue>[] frames = a.getFrames();
+ Textifier t = new Textifier();
+ TraceMethodVisitor mv = new TraceMethodVisitor(t);
+
+ pw.println(method.name + method.desc);
+ for (int j = 0; j < method.instructions.size(); ++j) {
+ method.instructions.get(j).accept(mv);
+
+ StringBuffer s = new StringBuffer();
+ Frame<BasicValue> f = frames[j];
+ if (f == null) {
+ s.append('?');
+ } else {
+ for (int k = 0; k < f.getLocals(); ++k) {
+ s.append(getShortName(f.getLocal(k).toString()))
+ .append(' ');
+ }
+ s.append(" : ");
+ for (int k = 0; k < f.getStackSize(); ++k) {
+ s.append(getShortName(f.getStack(k).toString()))
+ .append(' ');
+ }
+ }
+ while (s.length() < method.maxStack + method.maxLocals + 1) {
+ s.append(' ');
+ }
+ pw.print(Integer.toString(j + 100000).substring(1));
+ pw.print(" " + s + " : " + t.text.get(t.text.size() - 1));
+ }
+ for (int j = 0; j < method.tryCatchBlocks.size(); ++j) {
+ method.tryCatchBlocks.get(j).accept(mv);
+ pw.print(" " + t.text.get(t.text.size() - 1));
+ }
+ pw.println();
+ }
+
+ private static String getShortName(final String name) {
+ int n = name.lastIndexOf('/');
+ int k = name.length();
+ if (name.charAt(k - 1) == ';') {
+ k--;
+ }
+ return n == -1 ? name : name.substring(n + 1, k);
+ }
+
+ /**
+ * Constructs a new {@link CheckClassAdapter}. <i>Subclasses must not use
+ * this constructor</i>. Instead, they must use the
+ * {@link #CheckClassAdapter(int, ClassVisitor, boolean)} version.
+ *
+ * @param cv the class visitor to which this adapter must delegate calls.
+ */
+ public CheckClassAdapter(final ClassVisitor cv) {
+ this(cv, true);
+ }
+
+ /**
+ * Constructs a new {@link CheckClassAdapter}. <i>Subclasses must not use
+ * this constructor</i>. Instead, they must use the
+ * {@link #CheckClassAdapter(int, ClassVisitor, boolean)} version.
+ *
+ * @param cv the class visitor to which this adapter must delegate calls.
+ * @param checkDataFlow <tt>true</tt> to perform basic data flow checks, or
+ * <tt>false</tt> to not perform any data flow check (see
+ * {@link CheckMethodAdapter}). This option requires valid maxLocals
+ * and maxStack values.
+ */
+ public CheckClassAdapter(final ClassVisitor cv, final boolean checkDataFlow)
+ {
+ this(Opcodes.ASM4, cv, checkDataFlow);
+ }
+
+ /**
+ * Constructs a new {@link CheckClassAdapter}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param cv the class visitor to which this adapter must delegate calls.
+ * @param checkDataFlow <tt>true</tt> to perform basic data flow checks, or
+ * <tt>false</tt> to not perform any data flow check (see
+ * {@link CheckMethodAdapter}). This option requires valid maxLocals
+ * and maxStack values.
+ */
+ protected CheckClassAdapter(
+ final int api,
+ final ClassVisitor cv,
+ final boolean checkDataFlow)
+ {
+ super(api, cv);
+ this.labels = new HashMap<Label, Integer>();
+ this.checkDataFlow = checkDataFlow;
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the ClassVisitor interface
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(
+ final int version,
+ final int access,
+ final String name,
+ final String signature,
+ final String superName,
+ final String[] interfaces)
+ {
+ if (start) {
+ throw new IllegalStateException("visit must be called only once");
+ }
+ start = true;
+ checkState();
+ checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_FINAL
+ + Opcodes.ACC_SUPER + Opcodes.ACC_INTERFACE
+ + Opcodes.ACC_ABSTRACT + Opcodes.ACC_SYNTHETIC
+ + Opcodes.ACC_ANNOTATION + Opcodes.ACC_ENUM
+ + Opcodes.ACC_DEPRECATED
+ + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ if (name == null || !name.endsWith("package-info")) {
+ CheckMethodAdapter.checkInternalName(name, "class name");
+ }
+ if ("java/lang/Object".equals(name)) {
+ if (superName != null) {
+ throw new IllegalArgumentException("The super class name of the Object class must be 'null'");
+ }
+ } else {
+ CheckMethodAdapter.checkInternalName(superName, "super class name");
+ }
+ if (signature != null) {
+ CheckMethodAdapter.checkClassSignature(signature);
+ }
+ if ((access & Opcodes.ACC_INTERFACE) != 0) {
+ if (!"java/lang/Object".equals(superName)) {
+ throw new IllegalArgumentException("The super class name of interfaces must be 'java/lang/Object'");
+ }
+ }
+ if (interfaces != null) {
+ for (int i = 0; i < interfaces.length; ++i) {
+ CheckMethodAdapter.checkInternalName(interfaces[i],
+ "interface name at index " + i);
+ }
+ }
+ this.version = version;
+ super.visit(version, access, name, signature, superName, interfaces);
+ }
+
+ @Override
+ public void visitSource(final String file, final String debug) {
+ checkState();
+ if (source) {
+ throw new IllegalStateException("visitSource can be called only once.");
+ }
+ source = true;
+ super.visitSource(file, debug);
+ }
+
+ @Override
+ public void visitOuterClass(
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ checkState();
+ if (outer) {
+ throw new IllegalStateException("visitOuterClass can be called only once.");
+ }
+ outer = true;
+ if (owner == null) {
+ throw new IllegalArgumentException("Illegal outer class owner");
+ }
+ if (desc != null) {
+ CheckMethodAdapter.checkMethodDesc(desc);
+ }
+ super.visitOuterClass(owner, name, desc);
+ }
+
+ @Override
+ public void visitInnerClass(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access)
+ {
+ checkState();
+ CheckMethodAdapter.checkInternalName(name, "class name");
+ if (outerName != null) {
+ CheckMethodAdapter.checkInternalName(outerName, "outer class name");
+ }
+ if (innerName != null) {
+ CheckMethodAdapter.checkIdentifier(innerName, "inner class name");
+ }
+ checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE
+ + Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC
+ + Opcodes.ACC_FINAL + Opcodes.ACC_INTERFACE
+ + Opcodes.ACC_ABSTRACT + Opcodes.ACC_SYNTHETIC
+ + Opcodes.ACC_ANNOTATION + Opcodes.ACC_ENUM);
+ super.visitInnerClass(name, outerName, innerName, access);
+ }
+
+ @Override
+ public FieldVisitor visitField(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ checkState();
+ checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE
+ + Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC
+ + Opcodes.ACC_FINAL + Opcodes.ACC_VOLATILE
+ + Opcodes.ACC_TRANSIENT + Opcodes.ACC_SYNTHETIC
+ + Opcodes.ACC_ENUM + Opcodes.ACC_DEPRECATED
+ + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ CheckMethodAdapter.checkUnqualifiedName(version, name, "field name");
+ CheckMethodAdapter.checkDesc(desc, false);
+ if (signature != null) {
+ CheckMethodAdapter.checkFieldSignature(signature);
+ }
+ if (value != null) {
+ CheckMethodAdapter.checkConstant(value);
+ }
+ FieldVisitor av = super.visitField(access, name, desc, signature, value);
+ return new CheckFieldAdapter(av);
+ }
+
+ @Override
+ public MethodVisitor visitMethod(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ checkState();
+ checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE
+ + Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC
+ + Opcodes.ACC_FINAL + Opcodes.ACC_SYNCHRONIZED
+ + Opcodes.ACC_BRIDGE + Opcodes.ACC_VARARGS + Opcodes.ACC_NATIVE
+ + Opcodes.ACC_ABSTRACT + Opcodes.ACC_STRICT
+ + Opcodes.ACC_SYNTHETIC + Opcodes.ACC_DEPRECATED
+ + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ CheckMethodAdapter.checkMethodIdentifier(version, name, "method name");
+ CheckMethodAdapter.checkMethodDesc(desc);
+ if (signature != null) {
+ CheckMethodAdapter.checkMethodSignature(signature);
+ }
+ if (exceptions != null) {
+ for (int i = 0; i < exceptions.length; ++i) {
+ CheckMethodAdapter.checkInternalName(exceptions[i],
+ "exception name at index " + i);
+ }
+ }
+ CheckMethodAdapter cma;
+ if (checkDataFlow) {
+ cma = new CheckMethodAdapter(access,
+ name,
+ desc,
+ super.visitMethod(access, name, desc, signature, exceptions),
+ labels);
+ } else {
+ cma = new CheckMethodAdapter(super.visitMethod(access,
+ name,
+ desc,
+ signature,
+ exceptions), labels);
+ }
+ cma.version = version;
+ return cma;
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ checkState();
+ CheckMethodAdapter.checkDesc(desc, false);
+ return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible));
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ checkState();
+ if (attr == null) {
+ throw new IllegalArgumentException("Invalid attribute (must not be null)");
+ }
+ super.visitAttribute(attr);
+ }
+
+ @Override
+ public void visitEnd() {
+ checkState();
+ end = true;
+ super.visitEnd();
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Checks that the visit method has been called and that visitEnd has not
+ * been called.
+ */
+ private void checkState() {
+ if (!start) {
+ throw new IllegalStateException("Cannot visit member before visit has been called.");
+ }
+ if (end) {
+ throw new IllegalStateException("Cannot visit member after visitEnd has been called.");
+ }
+ }
+
+ /**
+ * Checks that the given access flags do not contain invalid flags. This
+ * method also checks that mutually incompatible flags are not set
+ * simultaneously.
+ *
+ * @param access the access flags to be checked
+ * @param possibleAccess the valid access flags.
+ */
+ static void checkAccess(final int access, final int possibleAccess) {
+ if ((access & ~possibleAccess) != 0) {
+ throw new IllegalArgumentException("Invalid access flags: "
+ + access);
+ }
+ int pub = (access & Opcodes.ACC_PUBLIC) == 0 ? 0 : 1;
+ int pri = (access & Opcodes.ACC_PRIVATE) == 0 ? 0 : 1;
+ int pro = (access & Opcodes.ACC_PROTECTED) == 0 ? 0 : 1;
+ if (pub + pri + pro > 1) {
+ throw new IllegalArgumentException("public private and protected are mutually exclusive: "
+ + access);
+ }
+ int fin = (access & Opcodes.ACC_FINAL) == 0 ? 0 : 1;
+ int abs = (access & Opcodes.ACC_ABSTRACT) == 0 ? 0 : 1;
+ if (fin + abs > 1) {
+ throw new IllegalArgumentException("final and abstract are mutually exclusive: "
+ + access);
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/CheckFieldAdapter.java b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
new file mode 100644
index 0000000000..bdcbe14b16
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
@@ -0,0 +1,97 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.FieldVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A {@link FieldVisitor} that checks that its methods are properly used.
+ */
+public class CheckFieldAdapter extends FieldVisitor {
+
+ private boolean end;
+
+ /**
+ * Constructs a new {@link CheckFieldAdapter}. <i>Subclasses must not use
+ * this constructor</i>. Instead, they must use the
+ * {@link #CheckFieldAdapter(int, FieldVisitor)} version.
+ *
+ * @param fv the field visitor to which this adapter must delegate calls.
+ */
+ public CheckFieldAdapter(final FieldVisitor fv) {
+ this(Opcodes.ASM4, fv);
+ }
+
+ /**
+ * Constructs a new {@link CheckFieldAdapter}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param fv the field visitor to which this adapter must delegate calls.
+ */
+ protected CheckFieldAdapter(final int api, final FieldVisitor fv) {
+ super(api, fv);
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ checkEnd();
+ CheckMethodAdapter.checkDesc(desc, false);
+ return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible));
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ checkEnd();
+ if (attr == null) {
+ throw new IllegalArgumentException("Invalid attribute (must not be null)");
+ }
+ super.visitAttribute(attr);
+ }
+
+ @Override
+ public void visitEnd() {
+ checkEnd();
+ end = true;
+ super.visitEnd();
+ }
+
+ private void checkEnd() {
+ if (end) {
+ throw new IllegalStateException("Cannot call a visit method after visitEnd has been called");
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/CheckMethodAdapter.java b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
new file mode 100644
index 0000000000..7549765421
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
@@ -0,0 +1,1668 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.Handle;
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.MethodNode;
+import scala.tools.asm.tree.analysis.Analyzer;
+import scala.tools.asm.tree.analysis.BasicValue;
+import scala.tools.asm.tree.analysis.BasicVerifier;
+
+/**
+ * A {@link MethodVisitor} that checks that its methods are properly used. More
+ * precisely this method adapter checks each instruction individually, i.e.,
+ * each visit method checks some preconditions based <i>only</i> on its
+ * arguments - such as the fact that the given opcode is correct for a given
+ * visit method. This adapter can also perform some basic data flow checks (more
+ * precisely those that can be performed without the full class hierarchy - see
+ * {@link org.objectweb.asm.tree.analysis.BasicVerifier}). For instance in a
+ * method whose signature is <tt>void m ()</tt>, the invalid instruction
+ * IRETURN, or the invalid sequence IADD L2I will be detected if the data flow
+ * checks are enabled. These checks are enabled by using the
+ * {@link #CheckMethodAdapter(int,String,String,MethodVisitor,Map)} constructor.
+ * They are not performed if any other constructor is used.
+ *
+ * @author Eric Bruneton
+ */
+public class CheckMethodAdapter extends MethodVisitor {
+
+ /**
+ * The class version number.
+ */
+ public int version;
+
+ /**
+ * <tt>true</tt> if the visitCode method has been called.
+ */
+ private boolean startCode;
+
+ /**
+ * <tt>true</tt> if the visitMaxs method has been called.
+ */
+ private boolean endCode;
+
+ /**
+ * <tt>true</tt> if the visitEnd method has been called.
+ */
+ private boolean endMethod;
+
+ /**
+ * Number of visited instructions.
+ */
+ private int insnCount;
+
+ /**
+ * The already visited labels. This map associate Integer values to pseudo
+ * code offsets.
+ */
+ private final Map<Label, Integer> labels;
+
+ /**
+ * The labels used in this method. Every used label must be visited with
+ * visitLabel before the end of the method (i.e. should be in #labels).
+ */
+ private Set<Label> usedLabels;
+
+ /**
+ * The exception handler ranges. Each pair of list element contains the
+ * start and end labels of an exception handler block.
+ */
+ private List<Label> handlers;
+
+ /**
+ * Code of the visit method to be used for each opcode.
+ */
+ private static final int[] TYPE;
+
+ /**
+ * The Label.status field.
+ */
+ private static Field labelStatusField;
+
+ static {
+ String s = "BBBBBBBBBBBBBBBBCCIAADDDDDAAAAAAAAAAAAAAAAAAAABBBBBBBBDD"
+ + "DDDAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB"
+ + "BBBBBBBBBBBBBBBBBBBJBBBBBBBBBBBBBBBBBBBBHHHHHHHHHHHHHHHHD"
+ + "KLBBBBBBFFFFGGGGAECEBBEEBBAMHHAA";
+ TYPE = new int[s.length()];
+ for (int i = 0; i < TYPE.length; ++i) {
+ TYPE[i] = s.charAt(i) - 'A' - 1;
+ }
+ }
+
+ // code to generate the above string
+ // public static void main (String[] args) {
+ // int[] TYPE = new int[] {
+ // 0, //NOP
+ // 0, //ACONST_NULL
+ // 0, //ICONST_M1
+ // 0, //ICONST_0
+ // 0, //ICONST_1
+ // 0, //ICONST_2
+ // 0, //ICONST_3
+ // 0, //ICONST_4
+ // 0, //ICONST_5
+ // 0, //LCONST_0
+ // 0, //LCONST_1
+ // 0, //FCONST_0
+ // 0, //FCONST_1
+ // 0, //FCONST_2
+ // 0, //DCONST_0
+ // 0, //DCONST_1
+ // 1, //BIPUSH
+ // 1, //SIPUSH
+ // 7, //LDC
+ // -1, //LDC_W
+ // -1, //LDC2_W
+ // 2, //ILOAD
+ // 2, //LLOAD
+ // 2, //FLOAD
+ // 2, //DLOAD
+ // 2, //ALOAD
+ // -1, //ILOAD_0
+ // -1, //ILOAD_1
+ // -1, //ILOAD_2
+ // -1, //ILOAD_3
+ // -1, //LLOAD_0
+ // -1, //LLOAD_1
+ // -1, //LLOAD_2
+ // -1, //LLOAD_3
+ // -1, //FLOAD_0
+ // -1, //FLOAD_1
+ // -1, //FLOAD_2
+ // -1, //FLOAD_3
+ // -1, //DLOAD_0
+ // -1, //DLOAD_1
+ // -1, //DLOAD_2
+ // -1, //DLOAD_3
+ // -1, //ALOAD_0
+ // -1, //ALOAD_1
+ // -1, //ALOAD_2
+ // -1, //ALOAD_3
+ // 0, //IALOAD
+ // 0, //LALOAD
+ // 0, //FALOAD
+ // 0, //DALOAD
+ // 0, //AALOAD
+ // 0, //BALOAD
+ // 0, //CALOAD
+ // 0, //SALOAD
+ // 2, //ISTORE
+ // 2, //LSTORE
+ // 2, //FSTORE
+ // 2, //DSTORE
+ // 2, //ASTORE
+ // -1, //ISTORE_0
+ // -1, //ISTORE_1
+ // -1, //ISTORE_2
+ // -1, //ISTORE_3
+ // -1, //LSTORE_0
+ // -1, //LSTORE_1
+ // -1, //LSTORE_2
+ // -1, //LSTORE_3
+ // -1, //FSTORE_0
+ // -1, //FSTORE_1
+ // -1, //FSTORE_2
+ // -1, //FSTORE_3
+ // -1, //DSTORE_0
+ // -1, //DSTORE_1
+ // -1, //DSTORE_2
+ // -1, //DSTORE_3
+ // -1, //ASTORE_0
+ // -1, //ASTORE_1
+ // -1, //ASTORE_2
+ // -1, //ASTORE_3
+ // 0, //IASTORE
+ // 0, //LASTORE
+ // 0, //FASTORE
+ // 0, //DASTORE
+ // 0, //AASTORE
+ // 0, //BASTORE
+ // 0, //CASTORE
+ // 0, //SASTORE
+ // 0, //POP
+ // 0, //POP2
+ // 0, //DUP
+ // 0, //DUP_X1
+ // 0, //DUP_X2
+ // 0, //DUP2
+ // 0, //DUP2_X1
+ // 0, //DUP2_X2
+ // 0, //SWAP
+ // 0, //IADD
+ // 0, //LADD
+ // 0, //FADD
+ // 0, //DADD
+ // 0, //ISUB
+ // 0, //LSUB
+ // 0, //FSUB
+ // 0, //DSUB
+ // 0, //IMUL
+ // 0, //LMUL
+ // 0, //FMUL
+ // 0, //DMUL
+ // 0, //IDIV
+ // 0, //LDIV
+ // 0, //FDIV
+ // 0, //DDIV
+ // 0, //IREM
+ // 0, //LREM
+ // 0, //FREM
+ // 0, //DREM
+ // 0, //INEG
+ // 0, //LNEG
+ // 0, //FNEG
+ // 0, //DNEG
+ // 0, //ISHL
+ // 0, //LSHL
+ // 0, //ISHR
+ // 0, //LSHR
+ // 0, //IUSHR
+ // 0, //LUSHR
+ // 0, //IAND
+ // 0, //LAND
+ // 0, //IOR
+ // 0, //LOR
+ // 0, //IXOR
+ // 0, //LXOR
+ // 8, //IINC
+ // 0, //I2L
+ // 0, //I2F
+ // 0, //I2D
+ // 0, //L2I
+ // 0, //L2F
+ // 0, //L2D
+ // 0, //F2I
+ // 0, //F2L
+ // 0, //F2D
+ // 0, //D2I
+ // 0, //D2L
+ // 0, //D2F
+ // 0, //I2B
+ // 0, //I2C
+ // 0, //I2S
+ // 0, //LCMP
+ // 0, //FCMPL
+ // 0, //FCMPG
+ // 0, //DCMPL
+ // 0, //DCMPG
+ // 6, //IFEQ
+ // 6, //IFNE
+ // 6, //IFLT
+ // 6, //IFGE
+ // 6, //IFGT
+ // 6, //IFLE
+ // 6, //IF_ICMPEQ
+ // 6, //IF_ICMPNE
+ // 6, //IF_ICMPLT
+ // 6, //IF_ICMPGE
+ // 6, //IF_ICMPGT
+ // 6, //IF_ICMPLE
+ // 6, //IF_ACMPEQ
+ // 6, //IF_ACMPNE
+ // 6, //GOTO
+ // 6, //JSR
+ // 2, //RET
+ // 9, //TABLESWITCH
+ // 10, //LOOKUPSWITCH
+ // 0, //IRETURN
+ // 0, //LRETURN
+ // 0, //FRETURN
+ // 0, //DRETURN
+ // 0, //ARETURN
+ // 0, //RETURN
+ // 4, //GETSTATIC
+ // 4, //PUTSTATIC
+ // 4, //GETFIELD
+ // 4, //PUTFIELD
+ // 5, //INVOKEVIRTUAL
+ // 5, //INVOKESPECIAL
+ // 5, //INVOKESTATIC
+ // 5, //INVOKEINTERFACE
+ // -1, //INVOKEDYNAMIC
+ // 3, //NEW
+ // 1, //NEWARRAY
+ // 3, //ANEWARRAY
+ // 0, //ARRAYLENGTH
+ // 0, //ATHROW
+ // 3, //CHECKCAST
+ // 3, //INSTANCEOF
+ // 0, //MONITORENTER
+ // 0, //MONITOREXIT
+ // -1, //WIDE
+ // 11, //MULTIANEWARRAY
+ // 6, //IFNULL
+ // 6, //IFNONNULL
+ // -1, //GOTO_W
+ // -1 //JSR_W
+ // };
+ // for (int i = 0; i < TYPE.length; ++i) {
+ // System.out.print((char)(TYPE[i] + 1 + 'A'));
+ // }
+ // System.out.println();
+ // }
+
+ /**
+ * Constructs a new {@link CheckMethodAdapter} object. This method adapter
+ * will not perform any data flow check (see
+ * {@link #CheckMethodAdapter(int,String,String,MethodVisitor,Map)}).
+ * <i>Subclasses must not use this constructor</i>. Instead, they must use
+ * the {@link #CheckMethodAdapter(int, MethodVisitor, Map)} version.
+ *
+ * @param mv the method visitor to which this adapter must delegate calls.
+ */
+ public CheckMethodAdapter(final MethodVisitor mv) {
+ this(mv, new HashMap<Label, Integer>());
+ }
+
+ /**
+ * Constructs a new {@link CheckMethodAdapter} object. This method adapter
+ * will not perform any data flow check (see
+ * {@link #CheckMethodAdapter(int,String,String,MethodVisitor,Map)}).
+ * <i>Subclasses must not use this constructor</i>. Instead, they must use
+ * the {@link #CheckMethodAdapter(int, MethodVisitor, Map)} version.
+ *
+ * @param mv the method visitor to which this adapter must delegate calls.
+ * @param labels a map of already visited labels (in other methods).
+ */
+ public CheckMethodAdapter(
+ final MethodVisitor mv,
+ final Map<Label, Integer> labels)
+ {
+ this(Opcodes.ASM4, mv, labels);
+ }
+
+ /**
+ * Constructs a new {@link CheckMethodAdapter} object. This method adapter
+ * will not perform any data flow check (see
+ * {@link #CheckMethodAdapter(int,String,String,MethodVisitor,Map)}).
+ *
+ * @param mv the method visitor to which this adapter must delegate calls.
+ * @param labels a map of already visited labels (in other methods).
+ */
+ protected CheckMethodAdapter(
+ final int api,
+ final MethodVisitor mv,
+ final Map<Label, Integer> labels)
+ {
+ super(api, mv);
+ this.labels = labels;
+ this.usedLabels = new HashSet<Label>();
+ this.handlers = new ArrayList<Label>();
+ }
+
+ /**
+ * Constructs a new {@link CheckMethodAdapter} object. This method adapter
+ * will perform basic data flow checks. For instance in a method whose
+ * signature is <tt>void m ()</tt>, the invalid instruction IRETURN, or the
+ * invalid sequence IADD L2I will be detected.
+ *
+ * @param access the method's access flags.
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link Type Type}).
+ * @param cmv the method visitor to which this adapter must delegate calls.
+ * @param labels a map of already visited labels (in other methods).
+ */
+ public CheckMethodAdapter(
+ final int access,
+ final String name,
+ final String desc,
+ final MethodVisitor cmv,
+ final Map<Label, Integer> labels)
+ {
+ this(new MethodNode(access, name, desc, null, null) {
+ @Override
+ public void visitEnd() {
+ Analyzer<BasicValue> a = new Analyzer<BasicValue>(new BasicVerifier());
+ try {
+ a.analyze("dummy", this);
+ } catch (Exception e) {
+ if (e instanceof IndexOutOfBoundsException
+ && maxLocals == 0 && maxStack == 0)
+ {
+ throw new RuntimeException("Data flow checking option requires valid, non zero maxLocals and maxStack values.");
+ }
+ e.printStackTrace();
+ StringWriter sw = new StringWriter();
+ PrintWriter pw = new PrintWriter(sw, true);
+ CheckClassAdapter.printAnalyzerResult(this, a, pw);
+ pw.close();
+ throw new RuntimeException(e.getMessage() + ' '
+ + sw.toString());
+ }
+ accept(cmv);
+ }
+ },
+ labels);
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ checkEndMethod();
+ checkDesc(desc, false);
+ return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible));
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotationDefault() {
+ checkEndMethod();
+ return new CheckAnnotationAdapter(super.visitAnnotationDefault(), false);
+ }
+
+ @Override
+ public AnnotationVisitor visitParameterAnnotation(
+ final int parameter,
+ final String desc,
+ final boolean visible)
+ {
+ checkEndMethod();
+ checkDesc(desc, false);
+ return new CheckAnnotationAdapter(super.visitParameterAnnotation(parameter,
+ desc,
+ visible));
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ checkEndMethod();
+ if (attr == null) {
+ throw new IllegalArgumentException("Invalid attribute (must not be null)");
+ }
+ super.visitAttribute(attr);
+ }
+
+ @Override
+ public void visitCode() {
+ startCode = true;
+ super.visitCode();
+ }
+
+ @Override
+ public void visitFrame(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack)
+ {
+ int mLocal;
+ int mStack;
+ switch (type) {
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ mLocal = Integer.MAX_VALUE;
+ mStack = Integer.MAX_VALUE;
+ break;
+
+ case Opcodes.F_SAME:
+ mLocal = 0;
+ mStack = 0;
+ break;
+
+ case Opcodes.F_SAME1:
+ mLocal = 0;
+ mStack = 1;
+ break;
+
+ case Opcodes.F_APPEND:
+ case Opcodes.F_CHOP:
+ mLocal = 3;
+ mStack = 0;
+ break;
+
+ default:
+ throw new IllegalArgumentException("Invalid frame type " + type);
+ }
+
+ if (nLocal > mLocal) {
+ throw new IllegalArgumentException("Invalid nLocal=" + nLocal
+ + " for frame type " + type);
+ }
+ if (nStack > mStack) {
+ throw new IllegalArgumentException("Invalid nStack=" + nStack
+ + " for frame type " + type);
+ }
+
+ if (type != Opcodes.F_CHOP) {
+ if (nLocal > 0 && (local == null || local.length < nLocal)) {
+ throw new IllegalArgumentException("Array local[] is shorter than nLocal");
+ }
+ for (int i = 0; i < nLocal; ++i) {
+ checkFrameValue(local[i]);
+ }
+ }
+ if (nStack > 0 && (stack == null || stack.length < nStack)) {
+ throw new IllegalArgumentException("Array stack[] is shorter than nStack");
+ }
+ for (int i = 0; i < nStack; ++i) {
+ checkFrameValue(stack[i]);
+ }
+
+ super.visitFrame(type, nLocal, local, nStack, stack);
+ }
+
+ @Override
+ public void visitInsn(final int opcode) {
+ checkStartCode();
+ checkEndCode();
+ checkOpcode(opcode, 0);
+ super.visitInsn(opcode);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitIntInsn(final int opcode, final int operand) {
+ checkStartCode();
+ checkEndCode();
+ checkOpcode(opcode, 1);
+ switch (opcode) {
+ case Opcodes.BIPUSH:
+ checkSignedByte(operand, "Invalid operand");
+ break;
+ case Opcodes.SIPUSH:
+ checkSignedShort(operand, "Invalid operand");
+ break;
+ // case Constants.NEWARRAY:
+ default:
+ if (operand < Opcodes.T_BOOLEAN || operand > Opcodes.T_LONG) {
+ throw new IllegalArgumentException("Invalid operand (must be an array type code T_...): "
+ + operand);
+ }
+ }
+ super.visitIntInsn(opcode, operand);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitVarInsn(final int opcode, final int var) {
+ checkStartCode();
+ checkEndCode();
+ checkOpcode(opcode, 2);
+ checkUnsignedShort(var, "Invalid variable index");
+ super.visitVarInsn(opcode, var);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitTypeInsn(final int opcode, final String type) {
+ checkStartCode();
+ checkEndCode();
+ checkOpcode(opcode, 3);
+ checkInternalName(type, "type");
+ if (opcode == Opcodes.NEW && type.charAt(0) == '[') {
+ throw new IllegalArgumentException("NEW cannot be used to create arrays: "
+ + type);
+ }
+ super.visitTypeInsn(opcode, type);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitFieldInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ checkStartCode();
+ checkEndCode();
+ checkOpcode(opcode, 4);
+ checkInternalName(owner, "owner");
+ checkUnqualifiedName(version, name, "name");
+ checkDesc(desc, false);
+ super.visitFieldInsn(opcode, owner, name, desc);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitMethodInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ checkStartCode();
+ checkEndCode();
+ checkOpcode(opcode, 5);
+ checkMethodIdentifier(version, name, "name");
+ checkInternalName(owner, "owner");
+ checkMethodDesc(desc);
+ super.visitMethodInsn(opcode, owner, name, desc);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitInvokeDynamicInsn(
+ String name,
+ String desc,
+ Handle bsm,
+ Object... bsmArgs)
+ {
+ checkStartCode();
+ checkEndCode();
+ checkMethodIdentifier(version, name, "name");
+ checkMethodDesc(desc);
+ if (bsm.getTag() != Opcodes.H_INVOKESTATIC
+ && bsm.getTag() != Opcodes.H_NEWINVOKESPECIAL)
+ {
+ throw new IllegalArgumentException("invalid handle tag "
+ + bsm.getTag());
+ }
+ for (int i = 0; i < bsmArgs.length; i++) {
+ checkLDCConstant(bsmArgs[i]);
+ }
+ super.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitJumpInsn(final int opcode, final Label label) {
+ checkStartCode();
+ checkEndCode();
+ checkOpcode(opcode, 6);
+ checkLabel(label, false, "label");
+ checkNonDebugLabel(label);
+ super.visitJumpInsn(opcode, label);
+ usedLabels.add(label);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitLabel(final Label label) {
+ checkStartCode();
+ checkEndCode();
+ checkLabel(label, false, "label");
+ if (labels.get(label) != null) {
+ throw new IllegalArgumentException("Already visited label");
+ }
+ labels.put(label, new Integer(insnCount));
+ super.visitLabel(label);
+ }
+
+ @Override
+ public void visitLdcInsn(final Object cst) {
+ checkStartCode();
+ checkEndCode();
+ checkLDCConstant(cst);
+ super.visitLdcInsn(cst);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitIincInsn(final int var, final int increment) {
+ checkStartCode();
+ checkEndCode();
+ checkUnsignedShort(var, "Invalid variable index");
+ checkSignedShort(increment, "Invalid increment");
+ super.visitIincInsn(var, increment);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitTableSwitchInsn(
+ final int min,
+ final int max,
+ final Label dflt,
+ final Label... labels)
+ {
+ checkStartCode();
+ checkEndCode();
+ if (max < min) {
+ throw new IllegalArgumentException("Max = " + max
+ + " must be greater than or equal to min = " + min);
+ }
+ checkLabel(dflt, false, "default label");
+ checkNonDebugLabel(dflt);
+ if (labels == null || labels.length != max - min + 1) {
+ throw new IllegalArgumentException("There must be max - min + 1 labels");
+ }
+ for (int i = 0; i < labels.length; ++i) {
+ checkLabel(labels[i], false, "label at index " + i);
+ checkNonDebugLabel(labels[i]);
+ }
+ super.visitTableSwitchInsn(min, max, dflt, labels);
+ for (int i = 0; i < labels.length; ++i) {
+ usedLabels.add(labels[i]);
+ }
+ ++insnCount;
+ }
+
+ @Override
+ public void visitLookupSwitchInsn(
+ final Label dflt,
+ final int[] keys,
+ final Label[] labels)
+ {
+ checkEndCode();
+ checkStartCode();
+ checkLabel(dflt, false, "default label");
+ checkNonDebugLabel(dflt);
+ if (keys == null || labels == null || keys.length != labels.length) {
+ throw new IllegalArgumentException("There must be the same number of keys and labels");
+ }
+ for (int i = 0; i < labels.length; ++i) {
+ checkLabel(labels[i], false, "label at index " + i);
+ checkNonDebugLabel(labels[i]);
+ }
+ super.visitLookupSwitchInsn(dflt, keys, labels);
+ usedLabels.add(dflt);
+ for (int i = 0; i < labels.length; ++i) {
+ usedLabels.add(labels[i]);
+ }
+ ++insnCount;
+ }
+
+ @Override
+ public void visitMultiANewArrayInsn(final String desc, final int dims) {
+ checkStartCode();
+ checkEndCode();
+ checkDesc(desc, false);
+ if (desc.charAt(0) != '[') {
+ throw new IllegalArgumentException("Invalid descriptor (must be an array type descriptor): "
+ + desc);
+ }
+ if (dims < 1) {
+ throw new IllegalArgumentException("Invalid dimensions (must be greater than 0): "
+ + dims);
+ }
+ if (dims > desc.lastIndexOf('[') + 1) {
+ throw new IllegalArgumentException("Invalid dimensions (must not be greater than dims(desc)): "
+ + dims);
+ }
+ super.visitMultiANewArrayInsn(desc, dims);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitTryCatchBlock(
+ final Label start,
+ final Label end,
+ final Label handler,
+ final String type)
+ {
+ checkStartCode();
+ checkEndCode();
+ checkLabel(start, false, "start label");
+ checkLabel(end, false, "end label");
+ checkLabel(handler, false, "handler label");
+ checkNonDebugLabel(start);
+ checkNonDebugLabel(end);
+ checkNonDebugLabel(handler);
+ if (labels.get(start) != null || labels.get(end) != null
+ || labels.get(handler) != null)
+ {
+ throw new IllegalStateException("Try catch blocks must be visited before their labels");
+ }
+ if (type != null) {
+ checkInternalName(type, "type");
+ }
+ super.visitTryCatchBlock(start, end, handler, type);
+ handlers.add(start);
+ handlers.add(end);
+ }
+
+ @Override
+ public void visitLocalVariable(
+ final String name,
+ final String desc,
+ final String signature,
+ final Label start,
+ final Label end,
+ final int index)
+ {
+ checkStartCode();
+ checkEndCode();
+ checkUnqualifiedName(version, name, "name");
+ checkDesc(desc, false);
+ checkLabel(start, true, "start label");
+ checkLabel(end, true, "end label");
+ checkUnsignedShort(index, "Invalid variable index");
+ int s = labels.get(start).intValue();
+ int e = labels.get(end).intValue();
+ if (e < s) {
+ throw new IllegalArgumentException("Invalid start and end labels (end must be greater than start)");
+ }
+ super.visitLocalVariable(name, desc, signature, start, end, index);
+ }
+
+ @Override
+ public void visitLineNumber(final int line, final Label start) {
+ checkStartCode();
+ checkEndCode();
+ checkUnsignedShort(line, "Invalid line number");
+ checkLabel(start, true, "start label");
+ super.visitLineNumber(line, start);
+ }
+
+ @Override
+ public void visitMaxs(final int maxStack, final int maxLocals) {
+ checkStartCode();
+ checkEndCode();
+ endCode = true;
+ for (Label l : usedLabels) {
+ if (labels.get(l) == null) {
+ throw new IllegalStateException("Undefined label used");
+ }
+ }
+ for (int i = 0; i < handlers.size(); ) {
+ Integer start = labels.get(handlers.get(i++));
+ Integer end = labels.get(handlers.get(i++));
+ if (start == null || end == null) {
+ throw new IllegalStateException("Undefined try catch block labels");
+ }
+ if (end.intValue() <= start.intValue()) {
+ throw new IllegalStateException("Emty try catch block handler range");
+ }
+ }
+ checkUnsignedShort(maxStack, "Invalid max stack");
+ checkUnsignedShort(maxLocals, "Invalid max locals");
+ super.visitMaxs(maxStack, maxLocals);
+ }
+
+ @Override
+ public void visitEnd() {
+ checkEndMethod();
+ endMethod = true;
+ super.visitEnd();
+ }
+
+ // -------------------------------------------------------------------------
+
+ /**
+ * Checks that the visitCode method has been called.
+ */
+ void checkStartCode() {
+ if (!startCode) {
+ throw new IllegalStateException("Cannot visit instructions before visitCode has been called.");
+ }
+ }
+
+ /**
+ * Checks that the visitMaxs method has not been called.
+ */
+ void checkEndCode() {
+ if (endCode) {
+ throw new IllegalStateException("Cannot visit instructions after visitMaxs has been called.");
+ }
+ }
+
+ /**
+ * Checks that the visitEnd method has not been called.
+ */
+ void checkEndMethod() {
+ if (endMethod) {
+ throw new IllegalStateException("Cannot visit elements after visitEnd has been called.");
+ }
+ }
+
+ /**
+ * Checks a stack frame value.
+ *
+ * @param value the value to be checked.
+ */
+ void checkFrameValue(final Object value) {
+ if (value == Opcodes.TOP || value == Opcodes.INTEGER
+ || value == Opcodes.FLOAT || value == Opcodes.LONG
+ || value == Opcodes.DOUBLE || value == Opcodes.NULL
+ || value == Opcodes.UNINITIALIZED_THIS)
+ {
+ return;
+ }
+ if (value instanceof String) {
+ checkInternalName((String) value, "Invalid stack frame value");
+ return;
+ }
+ if (!(value instanceof Label)) {
+ throw new IllegalArgumentException("Invalid stack frame value: "
+ + value);
+ } else {
+ usedLabels.add((Label) value);
+ }
+ }
+
+ /**
+ * Checks that the type of the given opcode is equal to the given type.
+ *
+ * @param opcode the opcode to be checked.
+ * @param type the expected opcode type.
+ */
+ static void checkOpcode(final int opcode, final int type) {
+ if (opcode < 0 || opcode > 199 || TYPE[opcode] != type) {
+ throw new IllegalArgumentException("Invalid opcode: " + opcode);
+ }
+ }
+
+ /**
+ * Checks that the given value is a signed byte.
+ *
+ * @param value the value to be checked.
+ * @param msg an message to be used in case of error.
+ */
+ static void checkSignedByte(final int value, final String msg) {
+ if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) {
+ throw new IllegalArgumentException(msg
+ + " (must be a signed byte): " + value);
+ }
+ }
+
+ /**
+ * Checks that the given value is a signed short.
+ *
+ * @param value the value to be checked.
+ * @param msg an message to be used in case of error.
+ */
+ static void checkSignedShort(final int value, final String msg) {
+ if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) {
+ throw new IllegalArgumentException(msg
+ + " (must be a signed short): " + value);
+ }
+ }
+
+ /**
+ * Checks that the given value is an unsigned short.
+ *
+ * @param value the value to be checked.
+ * @param msg an message to be used in case of error.
+ */
+ static void checkUnsignedShort(final int value, final String msg) {
+ if (value < 0 || value > 65535) {
+ throw new IllegalArgumentException(msg
+ + " (must be an unsigned short): " + value);
+ }
+ }
+
+ /**
+ * Checks that the given value is an {@link Integer}, a{@link Float}, a
+ * {@link Long}, a {@link Double} or a {@link String}.
+ *
+ * @param cst the value to be checked.
+ */
+ static void checkConstant(final Object cst) {
+ if (!(cst instanceof Integer) && !(cst instanceof Float)
+ && !(cst instanceof Long) && !(cst instanceof Double)
+ && !(cst instanceof String))
+ {
+ throw new IllegalArgumentException("Invalid constant: " + cst);
+ }
+ }
+
+ void checkLDCConstant(final Object cst) {
+ if (cst instanceof Type) {
+ int s = ((Type) cst).getSort();
+ if (s != Type.OBJECT && s != Type.ARRAY && s != Type.METHOD) {
+ throw new IllegalArgumentException("Illegal LDC constant value");
+ }
+ if (s != Type.METHOD && (version & 0xFFFF) < Opcodes.V1_5) {
+ throw new IllegalArgumentException("ldc of a constant class requires at least version 1.5");
+ }
+ if (s == Type.METHOD && (version & 0xFFFF) < Opcodes.V1_7) {
+ throw new IllegalArgumentException("ldc of a method type requires at least version 1.7");
+ }
+ } else if (cst instanceof Handle) {
+ if ((version & 0xFFFF) < Opcodes.V1_7) {
+ throw new IllegalArgumentException("ldc of a handle requires at least version 1.7");
+ }
+ int tag = ((Handle) cst).getTag();
+ if (tag < Opcodes.H_GETFIELD || tag > Opcodes.H_INVOKEINTERFACE) {
+ throw new IllegalArgumentException("invalid handle tag "
+ + tag);
+ }
+ } else {
+ checkConstant(cst);
+ }
+ }
+
+ /**
+ * Checks that the given string is a valid unqualified name.
+ *
+ * @param version the class version.
+ * @param name the string to be checked.
+ * @param msg a message to be used in case of error.
+ */
+ static void checkUnqualifiedName(
+ int version,
+ final String name,
+ final String msg)
+ {
+ if ((version & 0xFFFF) < Opcodes.V1_5) {
+ checkIdentifier(name, msg);
+ } else {
+ for (int i = 0; i < name.length(); ++i) {
+ if (".;[/".indexOf(name.charAt(i)) != -1) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must be a valid unqualified name): " + name);
+ }
+ }
+ }
+ }
+
+ /**
+ * Checks that the given string is a valid Java identifier.
+ *
+ * @param name the string to be checked.
+ * @param msg a message to be used in case of error.
+ */
+ static void checkIdentifier(final String name, final String msg) {
+ checkIdentifier(name, 0, -1, msg);
+ }
+
+ /**
+ * Checks that the given substring is a valid Java identifier.
+ *
+ * @param name the string to be checked.
+ * @param start index of the first character of the identifier (inclusive).
+ * @param end index of the last character of the identifier (exclusive). -1
+ * is equivalent to <tt>name.length()</tt> if name is not
+ * <tt>null</tt>.
+ * @param msg a message to be used in case of error.
+ */
+ static void checkIdentifier(
+ final String name,
+ final int start,
+ final int end,
+ final String msg)
+ {
+ if (name == null || (end == -1 ? name.length() <= start : end <= start))
+ {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must not be null or empty)");
+ }
+ if (!Character.isJavaIdentifierStart(name.charAt(start))) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must be a valid Java identifier): " + name);
+ }
+ int max = end == -1 ? name.length() : end;
+ for (int i = start + 1; i < max; ++i) {
+ if (!Character.isJavaIdentifierPart(name.charAt(i))) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must be a valid Java identifier): " + name);
+ }
+ }
+ }
+
+ /**
+ * Checks that the given string is a valid Java identifier or is equal to
+ * '&lt;init&gt;' or '&lt;clinit&gt;'.
+ *
+ * @param version the class version.
+ * @param name the string to be checked.
+ * @param msg a message to be used in case of error.
+ */
+ static void checkMethodIdentifier(
+ int version,
+ final String name,
+ final String msg)
+ {
+ if (name == null || name.length() == 0) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must not be null or empty)");
+ }
+ if ("<init>".equals(name) || "<clinit>".equals(name)) {
+ return;
+ }
+ if ((version & 0xFFFF) >= Opcodes.V1_5) {
+ for (int i = 0; i < name.length(); ++i) {
+ if (".;[/<>".indexOf(name.charAt(i)) != -1) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must be a valid unqualified name): " + name);
+ }
+ }
+ return;
+ }
+ if (!Character.isJavaIdentifierStart(name.charAt(0))) {
+ throw new IllegalArgumentException("Invalid "
+ + msg
+ + " (must be a '<init>', '<clinit>' or a valid Java identifier): "
+ + name);
+ }
+ for (int i = 1; i < name.length(); ++i) {
+ if (!Character.isJavaIdentifierPart(name.charAt(i))) {
+ throw new IllegalArgumentException("Invalid "
+ + msg
+ + " (must be '<init>' or '<clinit>' or a valid Java identifier): "
+ + name);
+ }
+ }
+ }
+
+ /**
+ * Checks that the given string is a valid internal class name.
+ *
+ * @param name the string to be checked.
+ * @param msg a message to be used in case of error.
+ */
+ static void checkInternalName(final String name, final String msg) {
+ if (name == null || name.length() == 0) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must not be null or empty)");
+ }
+ if (name.charAt(0) == '[') {
+ checkDesc(name, false);
+ } else {
+ checkInternalName(name, 0, -1, msg);
+ }
+ }
+
+ /**
+ * Checks that the given substring is a valid internal class name.
+ *
+ * @param name the string to be checked.
+ * @param start index of the first character of the identifier (inclusive).
+ * @param end index of the last character of the identifier (exclusive). -1
+ * is equivalent to <tt>name.length()</tt> if name is not
+ * <tt>null</tt>.
+ * @param msg a message to be used in case of error.
+ */
+ static void checkInternalName(
+ final String name,
+ final int start,
+ final int end,
+ final String msg)
+ {
+ int max = end == -1 ? name.length() : end;
+ try {
+ int begin = start;
+ int slash;
+ do {
+ slash = name.indexOf('/', begin + 1);
+ if (slash == -1 || slash > max) {
+ slash = max;
+ }
+ checkIdentifier(name, begin, slash, null);
+ begin = slash + 1;
+ } while (slash != max);
+ } catch (IllegalArgumentException _) {
+ throw new IllegalArgumentException("Invalid "
+ + msg
+ + " (must be a fully qualified class name in internal form): "
+ + name);
+ }
+ }
+
+ /**
+ * Checks that the given string is a valid type descriptor.
+ *
+ * @param desc the string to be checked.
+ * @param canBeVoid <tt>true</tt> if <tt>V</tt> can be considered valid.
+ */
+ static void checkDesc(final String desc, final boolean canBeVoid) {
+ int end = checkDesc(desc, 0, canBeVoid);
+ if (end != desc.length()) {
+ throw new IllegalArgumentException("Invalid descriptor: " + desc);
+ }
+ }
+
+ /**
+ * Checks that a the given substring is a valid type descriptor.
+ *
+ * @param desc the string to be checked.
+ * @param start index of the first character of the identifier (inclusive).
+ * @param canBeVoid <tt>true</tt> if <tt>V</tt> can be considered valid.
+ * @return the index of the last character of the type decriptor, plus one.
+ */
+ static int checkDesc(
+ final String desc,
+ final int start,
+ final boolean canBeVoid)
+ {
+ if (desc == null || start >= desc.length()) {
+ throw new IllegalArgumentException("Invalid type descriptor (must not be null or empty)");
+ }
+ int index;
+ switch (desc.charAt(start)) {
+ case 'V':
+ if (canBeVoid) {
+ return start + 1;
+ } else {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ case 'F':
+ case 'J':
+ case 'D':
+ return start + 1;
+ case '[':
+ index = start + 1;
+ while (index < desc.length() && desc.charAt(index) == '[') {
+ ++index;
+ }
+ if (index < desc.length()) {
+ return checkDesc(desc, index, false);
+ } else {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ case 'L':
+ index = desc.indexOf(';', start);
+ if (index == -1 || index - start < 2) {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ try {
+ checkInternalName(desc, start + 1, index, null);
+ } catch (IllegalArgumentException _) {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ return index + 1;
+ default:
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ }
+
+ /**
+ * Checks that the given string is a valid method descriptor.
+ *
+ * @param desc the string to be checked.
+ */
+ static void checkMethodDesc(final String desc) {
+ if (desc == null || desc.length() == 0) {
+ throw new IllegalArgumentException("Invalid method descriptor (must not be null or empty)");
+ }
+ if (desc.charAt(0) != '(' || desc.length() < 3) {
+ throw new IllegalArgumentException("Invalid descriptor: " + desc);
+ }
+ int start = 1;
+ if (desc.charAt(start) != ')') {
+ do {
+ if (desc.charAt(start) == 'V') {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ start = checkDesc(desc, start, false);
+ } while (start < desc.length() && desc.charAt(start) != ')');
+ }
+ start = checkDesc(desc, start + 1, true);
+ if (start != desc.length()) {
+ throw new IllegalArgumentException("Invalid descriptor: " + desc);
+ }
+ }
+
+ /**
+ * Checks a class signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ */
+ static void checkClassSignature(final String signature) {
+ // ClassSignature:
+ // FormalTypeParameters? ClassTypeSignature ClassTypeSignature*
+
+ int pos = 0;
+ if (getChar(signature, 0) == '<') {
+ pos = checkFormalTypeParameters(signature, pos);
+ }
+ pos = checkClassTypeSignature(signature, pos);
+ while (getChar(signature, pos) == 'L') {
+ pos = checkClassTypeSignature(signature, pos);
+ }
+ if (pos != signature.length()) {
+ throw new IllegalArgumentException(signature + ": error at index "
+ + pos);
+ }
+ }
+
+ /**
+ * Checks a method signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ */
+ static void checkMethodSignature(final String signature) {
+ // MethodTypeSignature:
+ // FormalTypeParameters? ( TypeSignature* ) ( TypeSignature | V ) (
+ // ^ClassTypeSignature | ^TypeVariableSignature )*
+
+ int pos = 0;
+ if (getChar(signature, 0) == '<') {
+ pos = checkFormalTypeParameters(signature, pos);
+ }
+ pos = checkChar('(', signature, pos);
+ while ("ZCBSIFJDL[T".indexOf(getChar(signature, pos)) != -1) {
+ pos = checkTypeSignature(signature, pos);
+ }
+ pos = checkChar(')', signature, pos);
+ if (getChar(signature, pos) == 'V') {
+ ++pos;
+ } else {
+ pos = checkTypeSignature(signature, pos);
+ }
+ while (getChar(signature, pos) == '^') {
+ ++pos;
+ if (getChar(signature, pos) == 'L') {
+ pos = checkClassTypeSignature(signature, pos);
+ } else {
+ pos = checkTypeVariableSignature(signature, pos);
+ }
+ }
+ if (pos != signature.length()) {
+ throw new IllegalArgumentException(signature + ": error at index "
+ + pos);
+ }
+ }
+
+ /**
+ * Checks a field signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ */
+ static void checkFieldSignature(final String signature) {
+ int pos = checkFieldTypeSignature(signature, 0);
+ if (pos != signature.length()) {
+ throw new IllegalArgumentException(signature + ": error at index "
+ + pos);
+ }
+ }
+
+ /**
+ * Checks the formal type parameters of a class or method signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkFormalTypeParameters(final String signature, int pos)
+ {
+ // FormalTypeParameters:
+ // < FormalTypeParameter+ >
+
+ pos = checkChar('<', signature, pos);
+ pos = checkFormalTypeParameter(signature, pos);
+ while (getChar(signature, pos) != '>') {
+ pos = checkFormalTypeParameter(signature, pos);
+ }
+ return pos + 1;
+ }
+
+ /**
+ * Checks a formal type parameter of a class or method signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkFormalTypeParameter(final String signature, int pos)
+ {
+ // FormalTypeParameter:
+ // Identifier : FieldTypeSignature? (: FieldTypeSignature)*
+
+ pos = checkIdentifier(signature, pos);
+ pos = checkChar(':', signature, pos);
+ if ("L[T".indexOf(getChar(signature, pos)) != -1) {
+ pos = checkFieldTypeSignature(signature, pos);
+ }
+ while (getChar(signature, pos) == ':') {
+ pos = checkFieldTypeSignature(signature, pos + 1);
+ }
+ return pos;
+ }
+
+ /**
+ * Checks a field type signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkFieldTypeSignature(final String signature, int pos)
+ {
+ // FieldTypeSignature:
+ // ClassTypeSignature | ArrayTypeSignature | TypeVariableSignature
+ //
+ // ArrayTypeSignature:
+ // [ TypeSignature
+
+ switch (getChar(signature, pos)) {
+ case 'L':
+ return checkClassTypeSignature(signature, pos);
+ case '[':
+ return checkTypeSignature(signature, pos + 1);
+ default:
+ return checkTypeVariableSignature(signature, pos);
+ }
+ }
+
+ /**
+ * Checks a class type signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkClassTypeSignature(final String signature, int pos)
+ {
+ // ClassTypeSignature:
+ // L Identifier ( / Identifier )* TypeArguments? ( . Identifier
+ // TypeArguments? )* ;
+
+ pos = checkChar('L', signature, pos);
+ pos = checkIdentifier(signature, pos);
+ while (getChar(signature, pos) == '/') {
+ pos = checkIdentifier(signature, pos + 1);
+ }
+ if (getChar(signature, pos) == '<') {
+ pos = checkTypeArguments(signature, pos);
+ }
+ while (getChar(signature, pos) == '.') {
+ pos = checkIdentifier(signature, pos + 1);
+ if (getChar(signature, pos) == '<') {
+ pos = checkTypeArguments(signature, pos);
+ }
+ }
+ return checkChar(';', signature, pos);
+ }
+
+ /**
+ * Checks the type arguments in a class type signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeArguments(final String signature, int pos) {
+ // TypeArguments:
+ // < TypeArgument+ >
+
+ pos = checkChar('<', signature, pos);
+ pos = checkTypeArgument(signature, pos);
+ while (getChar(signature, pos) != '>') {
+ pos = checkTypeArgument(signature, pos);
+ }
+ return pos + 1;
+ }
+
+ /**
+ * Checks a type argument in a class type signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeArgument(final String signature, int pos) {
+ // TypeArgument:
+ // * | ( ( + | - )? FieldTypeSignature )
+
+ char c = getChar(signature, pos);
+ if (c == '*') {
+ return pos + 1;
+ } else if (c == '+' || c == '-') {
+ pos++;
+ }
+ return checkFieldTypeSignature(signature, pos);
+ }
+
+ /**
+ * Checks a type variable signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeVariableSignature(
+ final String signature,
+ int pos)
+ {
+ // TypeVariableSignature:
+ // T Identifier ;
+
+ pos = checkChar('T', signature, pos);
+ pos = checkIdentifier(signature, pos);
+ return checkChar(';', signature, pos);
+ }
+
+ /**
+ * Checks a type signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeSignature(final String signature, int pos) {
+ // TypeSignature:
+ // Z | C | B | S | I | F | J | D | FieldTypeSignature
+
+ switch (getChar(signature, pos)) {
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ case 'F':
+ case 'J':
+ case 'D':
+ return pos + 1;
+ default:
+ return checkFieldTypeSignature(signature, pos);
+ }
+ }
+
+ /**
+ * Checks an identifier.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkIdentifier(final String signature, int pos) {
+ if (!Character.isJavaIdentifierStart(getChar(signature, pos))) {
+ throw new IllegalArgumentException(signature
+ + ": identifier expected at index " + pos);
+ }
+ ++pos;
+ while (Character.isJavaIdentifierPart(getChar(signature, pos))) {
+ ++pos;
+ }
+ return pos;
+ }
+
+ /**
+ * Checks a single character.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkChar(final char c, final String signature, int pos)
+ {
+ if (getChar(signature, pos) == c) {
+ return pos + 1;
+ }
+ throw new IllegalArgumentException(signature + ": '" + c
+ + "' expected at index " + pos);
+ }
+
+ /**
+ * Returns the signature car at the given index.
+ *
+ * @param signature a signature.
+ * @param pos an index in signature.
+ * @return the character at the given index, or 0 if there is no such
+ * character.
+ */
+ private static char getChar(final String signature, int pos) {
+ return pos < signature.length() ? signature.charAt(pos) : (char) 0;
+ }
+
+ /**
+ * Checks that the given label is not null. This method can also check that
+ * the label has been visited.
+ *
+ * @param label the label to be checked.
+ * @param checkVisited <tt>true</tt> to check that the label has been
+ * visited.
+ * @param msg a message to be used in case of error.
+ */
+ void checkLabel(
+ final Label label,
+ final boolean checkVisited,
+ final String msg)
+ {
+ if (label == null) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must not be null)");
+ }
+ if (checkVisited && labels.get(label) == null) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must be visited first)");
+ }
+ }
+
+ /**
+ * Checks that the given label is not a label used only for debug purposes.
+ *
+ * @param label the label to be checked.
+ */
+ private static void checkNonDebugLabel(final Label label) {
+ Field f = getLabelStatusField();
+ int status = 0;
+ try {
+ status = f == null ? 0 : ((Integer) f.get(label)).intValue();
+ } catch (IllegalAccessException e) {
+ throw new Error("Internal error");
+ }
+ if ((status & 0x01) != 0) {
+ throw new IllegalArgumentException("Labels used for debug info cannot be reused for control flow");
+ }
+ }
+
+ /**
+ * Returns the Field object corresponding to the Label.status field.
+ *
+ * @return the Field object corresponding to the Label.status field.
+ */
+ private static Field getLabelStatusField() {
+ if (labelStatusField == null) {
+ labelStatusField = getLabelField("a");
+ if (labelStatusField == null) {
+ labelStatusField = getLabelField("status");
+ }
+ }
+ return labelStatusField;
+ }
+
+ /**
+ * Returns the field of the Label class whose name is given.
+ *
+ * @param name a field name.
+ * @return the field of the Label class whose name is given, or null.
+ */
+ private static Field getLabelField(final String name) {
+ try {
+ Field f = Label.class.getDeclaredField(name);
+ f.setAccessible(true);
+ return f;
+ } catch (NoSuchFieldException e) {
+ return null;
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
new file mode 100644
index 0000000000..3a6c3e780f
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
@@ -0,0 +1,329 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.signature.SignatureVisitor;
+
+/**
+ * A {@link SignatureVisitor} that checks that its methods are properly used.
+ *
+ * @author Eric Bruneton
+ */
+public class CheckSignatureAdapter extends SignatureVisitor {
+
+ /**
+ * Type to be used to check class signatures. See
+ * {@link #CheckSignatureAdapter(int, SignatureVisitor) CheckSignatureAdapter}.
+ */
+ public static final int CLASS_SIGNATURE = 0;
+
+ /**
+ * Type to be used to check method signatures. See
+ * {@link #CheckSignatureAdapter(int, SignatureVisitor) CheckSignatureAdapter}.
+ */
+ public static final int METHOD_SIGNATURE = 1;
+
+ /**
+ * Type to be used to check type signatures.See
+ * {@link #CheckSignatureAdapter(int, SignatureVisitor) CheckSignatureAdapter}.
+ */
+ public static final int TYPE_SIGNATURE = 2;
+
+ private static final int EMPTY = 1;
+
+ private static final int FORMAL = 2;
+
+ private static final int BOUND = 4;
+
+ private static final int SUPER = 8;
+
+ private static final int PARAM = 16;
+
+ private static final int RETURN = 32;
+
+ private static final int SIMPLE_TYPE = 64;
+
+ private static final int CLASS_TYPE = 128;
+
+ private static final int END = 256;
+
+ /**
+ * Type of the signature to be checked.
+ */
+ private final int type;
+
+ /**
+ * State of the automaton used to check the order of method calls.
+ */
+ private int state;
+
+ /**
+ * <tt>true</tt> if the checked type signature can be 'V'.
+ */
+ private boolean canBeVoid;
+
+ /**
+ * The visitor to which this adapter must delegate calls. May be
+ * <tt>null</tt>.
+ */
+ private final SignatureVisitor sv;
+
+ /**
+ * Creates a new {@link CheckSignatureAdapter} object. <i>Subclasses must
+ * not use this constructor</i>. Instead, they must use the
+ * {@link #CheckSignatureAdapter(int, int, SignatureVisitor)} version.
+ *
+ * @param type the type of signature to be checked. See
+ * {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
+ * {@link #TYPE_SIGNATURE}.
+ * @param sv the visitor to which this adapter must delegate calls. May be
+ * <tt>null</tt>.
+ */
+ public CheckSignatureAdapter(final int type, final SignatureVisitor sv) {
+ this(Opcodes.ASM4, type, sv);
+ }
+
+ /**
+ * Creates a new {@link CheckSignatureAdapter} object.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param type the type of signature to be checked. See
+ * {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
+ * {@link #TYPE_SIGNATURE}.
+ * @param sv the visitor to which this adapter must delegate calls. May be
+ * <tt>null</tt>.
+ */
+ protected CheckSignatureAdapter(
+ final int api,
+ final int type,
+ final SignatureVisitor sv)
+ {
+ super(api);
+ this.type = type;
+ this.state = EMPTY;
+ this.sv = sv;
+ }
+
+ // class and method signatures
+
+ @Override
+ public void visitFormalTypeParameter(final String name) {
+ if (type == TYPE_SIGNATURE
+ || (state != EMPTY && state != FORMAL && state != BOUND))
+ {
+ throw new IllegalStateException();
+ }
+ CheckMethodAdapter.checkIdentifier(name, "formal type parameter");
+ state = FORMAL;
+ if (sv != null) {
+ sv.visitFormalTypeParameter(name);
+ }
+ }
+
+ @Override
+ public SignatureVisitor visitClassBound() {
+ if (state != FORMAL) {
+ throw new IllegalStateException();
+ }
+ state = BOUND;
+ SignatureVisitor v = sv == null ? null : sv.visitClassBound();
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ @Override
+ public SignatureVisitor visitInterfaceBound() {
+ if (state != FORMAL && state != BOUND) {
+ throw new IllegalArgumentException();
+ }
+ SignatureVisitor v = sv == null ? null : sv.visitInterfaceBound();
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ // class signatures
+
+ @Override
+ public SignatureVisitor visitSuperclass() {
+ if (type != CLASS_SIGNATURE || (state & (EMPTY | FORMAL | BOUND)) == 0)
+ {
+ throw new IllegalArgumentException();
+ }
+ state = SUPER;
+ SignatureVisitor v = sv == null ? null : sv.visitSuperclass();
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ @Override
+ public SignatureVisitor visitInterface() {
+ if (state != SUPER) {
+ throw new IllegalStateException();
+ }
+ SignatureVisitor v = sv == null ? null : sv.visitInterface();
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ // method signatures
+
+ @Override
+ public SignatureVisitor visitParameterType() {
+ if (type != METHOD_SIGNATURE
+ || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0)
+ {
+ throw new IllegalArgumentException();
+ }
+ state = PARAM;
+ SignatureVisitor v = sv == null ? null : sv.visitParameterType();
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ @Override
+ public SignatureVisitor visitReturnType() {
+ if (type != METHOD_SIGNATURE
+ || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0)
+ {
+ throw new IllegalArgumentException();
+ }
+ state = RETURN;
+ SignatureVisitor v = sv == null ? null : sv.visitReturnType();
+ CheckSignatureAdapter cv = new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ cv.canBeVoid = true;
+ return cv;
+ }
+
+ @Override
+ public SignatureVisitor visitExceptionType() {
+ if (state != RETURN) {
+ throw new IllegalStateException();
+ }
+ SignatureVisitor v = sv == null ? null : sv.visitExceptionType();
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ // type signatures
+
+ @Override
+ public void visitBaseType(final char descriptor) {
+ if (type != TYPE_SIGNATURE || state != EMPTY) {
+ throw new IllegalStateException();
+ }
+ if (descriptor == 'V') {
+ if (!canBeVoid) {
+ throw new IllegalArgumentException();
+ }
+ } else {
+ if ("ZCBSIFJD".indexOf(descriptor) == -1) {
+ throw new IllegalArgumentException();
+ }
+ }
+ state = SIMPLE_TYPE;
+ if (sv != null) {
+ sv.visitBaseType(descriptor);
+ }
+ }
+
+ @Override
+ public void visitTypeVariable(final String name) {
+ if (type != TYPE_SIGNATURE || state != EMPTY) {
+ throw new IllegalStateException();
+ }
+ CheckMethodAdapter.checkIdentifier(name, "type variable");
+ state = SIMPLE_TYPE;
+ if (sv != null) {
+ sv.visitTypeVariable(name);
+ }
+ }
+
+ @Override
+ public SignatureVisitor visitArrayType() {
+ if (type != TYPE_SIGNATURE || state != EMPTY) {
+ throw new IllegalStateException();
+ }
+ state = SIMPLE_TYPE;
+ SignatureVisitor v = sv == null ? null : sv.visitArrayType();
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ @Override
+ public void visitClassType(final String name) {
+ if (type != TYPE_SIGNATURE || state != EMPTY) {
+ throw new IllegalStateException();
+ }
+ CheckMethodAdapter.checkInternalName(name, "class name");
+ state = CLASS_TYPE;
+ if (sv != null) {
+ sv.visitClassType(name);
+ }
+ }
+
+ @Override
+ public void visitInnerClassType(final String name) {
+ if (state != CLASS_TYPE) {
+ throw new IllegalStateException();
+ }
+ CheckMethodAdapter.checkIdentifier(name, "inner class name");
+ if (sv != null) {
+ sv.visitInnerClassType(name);
+ }
+ }
+
+ @Override
+ public void visitTypeArgument() {
+ if (state != CLASS_TYPE) {
+ throw new IllegalStateException();
+ }
+ if (sv != null) {
+ sv.visitTypeArgument();
+ }
+ }
+
+ @Override
+ public SignatureVisitor visitTypeArgument(final char wildcard) {
+ if (state != CLASS_TYPE) {
+ throw new IllegalStateException();
+ }
+ if ("+-=".indexOf(wildcard) == -1) {
+ throw new IllegalArgumentException();
+ }
+ SignatureVisitor v = sv == null ? null : sv.visitTypeArgument(wildcard);
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ @Override
+ public void visitEnd() {
+ if (state != CLASS_TYPE) {
+ throw new IllegalStateException();
+ }
+ state = END;
+ if (sv != null) {
+ sv.visitEnd();
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/Printer.java b/src/asm/scala/tools/asm/util/Printer.java
new file mode 100644
index 0000000000..c39fd548ce
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/Printer.java
@@ -0,0 +1,558 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+
+import scala.tools.asm.Attribute;
+import scala.tools.asm.Handle;
+import scala.tools.asm.Label;
+import scala.tools.asm.Opcodes;
+
+/**
+ * An abstract converter from visit events to text.
+ *
+ * @author Eric Bruneton
+ */
+public abstract class Printer {
+
+ /**
+ * The names of the Java Virtual Machine opcodes.
+ */
+ public static final String[] OPCODES;
+
+ /**
+ * The names of the for <code>operand</code> parameter values of the
+ * {@link org.objectweb.asm.MethodVisitor#visitIntInsn} method when
+ * <code>opcode</code> is <code>NEWARRAY</code>.
+ */
+ public static final String[] TYPES;
+
+ /**
+ * The names of the <code>tag</code> field values for
+ * {@link org.objectweb.asm.Handle}.
+ */
+ public static final String[] HANDLE_TAG;
+
+ static {
+ String s = "NOP,ACONST_NULL,ICONST_M1,ICONST_0,ICONST_1,ICONST_2,"
+ + "ICONST_3,ICONST_4,ICONST_5,LCONST_0,LCONST_1,FCONST_0,"
+ + "FCONST_1,FCONST_2,DCONST_0,DCONST_1,BIPUSH,SIPUSH,LDC,,,"
+ + "ILOAD,LLOAD,FLOAD,DLOAD,ALOAD,,,,,,,,,,,,,,,,,,,,,IALOAD,"
+ + "LALOAD,FALOAD,DALOAD,AALOAD,BALOAD,CALOAD,SALOAD,ISTORE,"
+ + "LSTORE,FSTORE,DSTORE,ASTORE,,,,,,,,,,,,,,,,,,,,,IASTORE,"
+ + "LASTORE,FASTORE,DASTORE,AASTORE,BASTORE,CASTORE,SASTORE,POP,"
+ + "POP2,DUP,DUP_X1,DUP_X2,DUP2,DUP2_X1,DUP2_X2,SWAP,IADD,LADD,"
+ + "FADD,DADD,ISUB,LSUB,FSUB,DSUB,IMUL,LMUL,FMUL,DMUL,IDIV,LDIV,"
+ + "FDIV,DDIV,IREM,LREM,FREM,DREM,INEG,LNEG,FNEG,DNEG,ISHL,LSHL,"
+ + "ISHR,LSHR,IUSHR,LUSHR,IAND,LAND,IOR,LOR,IXOR,LXOR,IINC,I2L,"
+ + "I2F,I2D,L2I,L2F,L2D,F2I,F2L,F2D,D2I,D2L,D2F,I2B,I2C,I2S,LCMP,"
+ + "FCMPL,FCMPG,DCMPL,DCMPG,IFEQ,IFNE,IFLT,IFGE,IFGT,IFLE,"
+ + "IF_ICMPEQ,IF_ICMPNE,IF_ICMPLT,IF_ICMPGE,IF_ICMPGT,IF_ICMPLE,"
+ + "IF_ACMPEQ,IF_ACMPNE,GOTO,JSR,RET,TABLESWITCH,LOOKUPSWITCH,"
+ + "IRETURN,LRETURN,FRETURN,DRETURN,ARETURN,RETURN,GETSTATIC,"
+ + "PUTSTATIC,GETFIELD,PUTFIELD,INVOKEVIRTUAL,INVOKESPECIAL,"
+ + "INVOKESTATIC,INVOKEINTERFACE,INVOKEDYNAMIC,NEW,NEWARRAY,"
+ + "ANEWARRAY,ARRAYLENGTH,ATHROW,CHECKCAST,INSTANCEOF,"
+ + "MONITORENTER,MONITOREXIT,,MULTIANEWARRAY,IFNULL,IFNONNULL,";
+ OPCODES = new String[200];
+ int i = 0;
+ int j = 0;
+ int l;
+ while ((l = s.indexOf(',', j)) > 0) {
+ OPCODES[i++] = j + 1 == l ? null : s.substring(j, l);
+ j = l + 1;
+ }
+
+ s = "T_BOOLEAN,T_CHAR,T_FLOAT,T_DOUBLE,T_BYTE,T_SHORT,T_INT,T_LONG,";
+ TYPES = new String[12];
+ j = 0;
+ i = 4;
+ while ((l = s.indexOf(',', j)) > 0) {
+ TYPES[i++] = s.substring(j, l);
+ j = l + 1;
+ }
+
+ s = "H_GETFIELD,H_GETSTATIC,H_PUTFIELD,H_PUTSTATIC,"
+ + "H_INVOKEVIRTUAL,H_INVOKESTATIC,H_INVOKESPECIAL,"
+ + "H_NEWINVOKESPECIAL,H_INVOKEINTERFACE,";
+ HANDLE_TAG = new String[10];
+ j = 0;
+ i = 1;
+ while ((l = s.indexOf(',', j)) > 0) {
+ HANDLE_TAG[i++] = s.substring(j, l);
+ j = l + 1;
+ }
+ }
+
+ /**
+ * The ASM API version implemented by this class. The value of this field
+ * must be one of {@link Opcodes#ASM4}.
+ */
+ protected final int api;
+
+ /**
+ * A buffer that can be used to create strings.
+ */
+ protected final StringBuffer buf;
+
+ /**
+ * The text to be printed. Since the code of methods is not necessarily
+ * visited in sequential order, one method after the other, but can be
+ * interlaced (some instructions from method one, then some instructions
+ * from method two, then some instructions from method one again...), it is
+ * not possible to print the visited instructions directly to a sequential
+ * stream. A class is therefore printed in a two steps process: a string
+ * tree is constructed during the visit, and printed to a sequential stream
+ * at the end of the visit. This string tree is stored in this field, as a
+ * string list that can contain other string lists, which can themselves
+ * contain other string lists, and so on.
+ */
+ public final List<Object> text;
+
+ /**
+ * Constructs a new {@link Printer}.
+ */
+ protected Printer(final int api) {
+ this.api = api;
+ this.buf = new StringBuffer();
+ this.text = new ArrayList<Object>();
+ }
+
+ /**
+ * Class header.
+ * See {@link org.objectweb.asm.ClassVisitor#visit}.
+ */
+ public abstract void visit(
+ final int version,
+ final int access,
+ final String name,
+ final String signature,
+ final String superName,
+ final String[] interfaces);
+
+ /**
+ * Class source.
+ * See {@link org.objectweb.asm.ClassVisitor#visitSource}.
+ */
+ public abstract void visitSource(final String file, final String debug);
+
+ /**
+ * Class outer class.
+ * See {@link org.objectweb.asm.ClassVisitor#visitOuterClass}.
+ */
+ public abstract void visitOuterClass(
+ final String owner,
+ final String name,
+ final String desc);
+
+ /**
+ * Class annotation.
+ * See {@link org.objectweb.asm.ClassVisitor#visitAnnotation}.
+ */
+ public abstract Printer visitClassAnnotation(
+ final String desc,
+ final boolean visible);
+
+ /**
+ * Class attribute.
+ * See {@link org.objectweb.asm.ClassVisitor#visitAttribute}.
+ */
+ public abstract void visitClassAttribute(final Attribute attr);
+
+ /**
+ * Class inner name.
+ * See {@link org.objectweb.asm.ClassVisitor#visitInnerClass}.
+ */
+ public abstract void visitInnerClass(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access);
+
+ /**
+ * Class field.
+ * See {@link org.objectweb.asm.ClassVisitor#visitField}.
+ */
+ public abstract Printer visitField(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value);
+
+ /**
+ * Class method.
+ * See {@link org.objectweb.asm.ClassVisitor#visitMethod}.
+ */
+ public abstract Printer visitMethod(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions);
+
+ /**
+ * Class end.
+ * See {@link org.objectweb.asm.ClassVisitor#visitEnd}.
+ */
+ public abstract void visitClassEnd();
+
+ // ------------------------------------------------------------------------
+ // Annotations
+ // ------------------------------------------------------------------------
+
+ /**
+ * Annotation value.
+ * See {@link org.objectweb.asm.AnnotationVisitor#visit}.
+ */
+ public abstract void visit(final String name, final Object value);
+
+ /**
+ * Annotation enum value.
+ * See {@link org.objectweb.asm.AnnotationVisitor#visitEnum}.
+ */
+ public abstract void visitEnum(
+ final String name,
+ final String desc,
+ final String value);
+
+ /**
+ * Nested annotation value.
+ * See {@link org.objectweb.asm.AnnotationVisitor#visitAnnotation}.
+ */
+ public abstract Printer visitAnnotation(
+ final String name,
+ final String desc);
+
+ /**
+ * Annotation array value.
+ * See {@link org.objectweb.asm.AnnotationVisitor#visitArray}.
+ */
+ public abstract Printer visitArray(final String name);
+
+ /**
+ * Annotation end.
+ * See {@link org.objectweb.asm.AnnotationVisitor#visitEnd}.
+ */
+ public abstract void visitAnnotationEnd();
+
+ // ------------------------------------------------------------------------
+ // Fields
+ // ------------------------------------------------------------------------
+
+ /**
+ * Field annotation.
+ * See {@link org.objectweb.asm.FieldVisitor#visitAnnotation}.
+ */
+ public abstract Printer visitFieldAnnotation(
+ final String desc,
+ final boolean visible);
+
+ /**
+ * Field attribute.
+ * See {@link org.objectweb.asm.FieldVisitor#visitAttribute}.
+ */
+ public abstract void visitFieldAttribute(final Attribute attr);
+
+ /**
+ * Field end.
+ * See {@link org.objectweb.asm.FieldVisitor#visitEnd}.
+ */
+ public abstract void visitFieldEnd();
+
+ // ------------------------------------------------------------------------
+ // Methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Method default annotation.
+ * See {@link org.objectweb.asm.MethodVisitor#visitAnnotationDefault}.
+ */
+ public abstract Printer visitAnnotationDefault();
+
+ /**
+ * Method annotation.
+ * See {@link org.objectweb.asm.MethodVisitor#visitAnnotation}.
+ */
+ public abstract Printer visitMethodAnnotation(
+ final String desc,
+ final boolean visible);
+
+ /**
+ * Method parameter annotation.
+ * See {@link org.objectweb.asm.MethodVisitor#visitParameterAnnotation}.
+ */
+ public abstract Printer visitParameterAnnotation(
+ final int parameter,
+ final String desc,
+ final boolean visible);
+
+ /**
+ * Method attribute.
+ * See {@link org.objectweb.asm.MethodVisitor#visitAttribute}.
+ */
+ public abstract void visitMethodAttribute(final Attribute attr);
+
+ /**
+ * Method start.
+ * See {@link org.objectweb.asm.MethodVisitor#visitCode}.
+ */
+ public abstract void visitCode();
+
+ /**
+ * Method stack frame.
+ * See {@link org.objectweb.asm.MethodVisitor#visitFrame}.
+ */
+ public abstract void visitFrame(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitInsn}.
+ */
+ public abstract void visitInsn(final int opcode);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitIntInsn}.
+ */
+ public abstract void visitIntInsn(final int opcode, final int operand);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitVarInsn}.
+ */
+ public abstract void visitVarInsn(final int opcode, final int var);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitTypeInsn}.
+ */
+ public abstract void visitTypeInsn(final int opcode, final String type);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitFieldInsn}.
+ */
+ public abstract void visitFieldInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitMethodInsn}.
+ */
+ public abstract void visitMethodInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitInvokeDynamicInsn}.
+ */
+ public abstract void visitInvokeDynamicInsn(
+ String name,
+ String desc,
+ Handle bsm,
+ Object... bsmArgs);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitJumpInsn}.
+ */
+ public abstract void visitJumpInsn(final int opcode, final Label label);
+
+ /**
+ * Method label.
+ * See {@link org.objectweb.asm.MethodVisitor#visitLabel}.
+ */
+ public abstract void visitLabel(final Label label);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitLdcInsn}.
+ */
+ public abstract void visitLdcInsn(final Object cst);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitIincInsn}.
+ */
+ public abstract void visitIincInsn(final int var, final int increment);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitTableSwitchInsn}.
+ */
+ public abstract void visitTableSwitchInsn(
+ final int min,
+ final int max,
+ final Label dflt,
+ final Label... labels);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitLookupSwitchInsn}.
+ */
+ public abstract void visitLookupSwitchInsn(
+ final Label dflt,
+ final int[] keys,
+ final Label[] labels);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitMultiANewArrayInsn}.
+ */
+ public abstract void visitMultiANewArrayInsn(
+ final String desc,
+ final int dims);
+
+ /**
+ * Method exception handler.
+ * See {@link org.objectweb.asm.MethodVisitor#visitTryCatchBlock}.
+ */
+ public abstract void visitTryCatchBlock(
+ final Label start,
+ final Label end,
+ final Label handler,
+ final String type);
+
+ /**
+ * Method debug info.
+ * See {@link org.objectweb.asm.MethodVisitor#visitLocalVariable}.
+ */
+ public abstract void visitLocalVariable(
+ final String name,
+ final String desc,
+ final String signature,
+ final Label start,
+ final Label end,
+ final int index);
+
+ /**
+ * Method debug info.
+ * See {@link org.objectweb.asm.MethodVisitor#visitLineNumber}.
+ */
+ public abstract void visitLineNumber(final int line, final Label start);
+
+ /**
+ * Method max stack and max locals.
+ * See {@link org.objectweb.asm.MethodVisitor#visitMaxs}.
+ */
+ public abstract void visitMaxs(final int maxStack, final int maxLocals);
+
+ /**
+ * Method end.
+ * See {@link org.objectweb.asm.MethodVisitor#visitEnd}.
+ */
+ public abstract void visitMethodEnd();
+
+ /**
+ * Returns the text constructed by this visitor.
+ *
+ * @return the text constructed by this visitor.
+ */
+ public List<Object> getText() {
+ return text;
+ }
+
+ /**
+ * Prints the text constructed by this visitor.
+ *
+ * @param pw the print writer to be used.
+ */
+ public void print(final PrintWriter pw) {
+ printList(pw, text);
+ }
+
+ /**
+ * Appends a quoted string to a given buffer.
+ *
+ * @param buf the buffer where the string must be added.
+ * @param s the string to be added.
+ */
+ public static void appendString(final StringBuffer buf, final String s) {
+ buf.append('\"');
+ for (int i = 0; i < s.length(); ++i) {
+ char c = s.charAt(i);
+ if (c == '\n') {
+ buf.append("\\n");
+ } else if (c == '\r') {
+ buf.append("\\r");
+ } else if (c == '\\') {
+ buf.append("\\\\");
+ } else if (c == '"') {
+ buf.append("\\\"");
+ } else if (c < 0x20 || c > 0x7f) {
+ buf.append("\\u");
+ if (c < 0x10) {
+ buf.append("000");
+ } else if (c < 0x100) {
+ buf.append("00");
+ } else if (c < 0x1000) {
+ buf.append('0');
+ }
+ buf.append(Integer.toString(c, 16));
+ } else {
+ buf.append(c);
+ }
+ }
+ buf.append('\"');
+ }
+
+ /**
+ * Prints the given string tree.
+ *
+ * @param pw the writer to be used to print the tree.
+ * @param l a string tree, i.e., a string list that can contain other string
+ * lists, and so on recursively.
+ */
+ static void printList(final PrintWriter pw, final List<?> l) {
+ for (int i = 0; i < l.size(); ++i) {
+ Object o = l.get(i);
+ if (o instanceof List) {
+ printList(pw, (List<?>) o);
+ } else {
+ pw.print(o.toString());
+ }
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/SignatureChecker.java b/src/asm/scala/tools/asm/util/SignatureChecker.java
new file mode 100644
index 0000000000..7b7eea4383
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/SignatureChecker.java
@@ -0,0 +1,47 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ */
+
+package scala.tools.asm.util;
+
+import scala.tools.asm.util.CheckMethodAdapter;
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A subclass of ASM's CheckMethodAdapter for the sole purpose of accessing some protected methods there.
+ *
+ */
+public class SignatureChecker extends CheckMethodAdapter {
+
+ public SignatureChecker(final MethodVisitor mv) {
+ super(mv);
+ }
+
+ /**
+ * Checks a class signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ */
+ public static void checkClassSignature(final String signature) {
+ CheckMethodAdapter.checkClassSignature(signature);
+ }
+
+ /**
+ * Checks a method signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ */
+ public static void checkMethodSignature(final String signature) {
+ CheckMethodAdapter.checkMethodSignature(signature);
+ }
+
+ /**
+ * Checks a field signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ */
+ public static void checkFieldSignature(final String signature) {
+ CheckMethodAdapter.checkFieldSignature(signature);
+ }
+
+}
diff --git a/src/asm/scala/tools/asm/util/Textifiable.java b/src/asm/scala/tools/asm/util/Textifiable.java
new file mode 100644
index 0000000000..b80d0139db
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/Textifiable.java
@@ -0,0 +1,54 @@
+/**
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.util.Map;
+
+import scala.tools.asm.Label;
+
+/**
+ * An {@link org.objectweb.asm.Attribute Attribute} that can print a readable
+ * representation of itself.
+ *
+ * Implementations should construct readable output from an attribute data
+ * structure. Such representation could be used in unit test assertions.
+ *
+ * @author Eugene Kuleshov
+ */
+public interface Textifiable {
+
+ /**
+ * Build a human readable representation of this attribute.
+ *
+ * @param buf a buffer used for printing Java code.
+ * @param labelNames map of label instances to their names.
+ */
+ void textify(StringBuffer buf, Map<Label, String> labelNames);
+}
diff --git a/src/asm/scala/tools/asm/util/Textifier.java b/src/asm/scala/tools/asm/util/Textifier.java
new file mode 100644
index 0000000000..8d40ebd026
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/Textifier.java
@@ -0,0 +1,1286 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.io.FileInputStream;
+import java.io.PrintWriter;
+import java.util.HashMap;
+import java.util.Map;
+
+import scala.tools.asm.Attribute;
+import scala.tools.asm.ClassReader;
+import scala.tools.asm.Handle;
+import scala.tools.asm.Label;
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+import scala.tools.asm.signature.SignatureReader;
+
+/**
+ * A {@link Printer} that prints a disassembled view of the classes it visits.
+ *
+ * @author Eric Bruneton
+ */
+public class Textifier extends Printer {
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for internal
+ * type names in bytecode notation.
+ */
+ public static final int INTERNAL_NAME = 0;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for field
+ * descriptors, formatted in bytecode notation
+ */
+ public static final int FIELD_DESCRIPTOR = 1;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for field
+ * signatures, formatted in bytecode notation
+ */
+ public static final int FIELD_SIGNATURE = 2;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for method
+ * descriptors, formatted in bytecode notation
+ */
+ public static final int METHOD_DESCRIPTOR = 3;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for method
+ * signatures, formatted in bytecode notation
+ */
+ public static final int METHOD_SIGNATURE = 4;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for class
+ * signatures, formatted in bytecode notation
+ */
+ public static final int CLASS_SIGNATURE = 5;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for field or
+ * method return value signatures, formatted in default Java notation
+ * (non-bytecode)
+ */
+ public static final int TYPE_DECLARATION = 6;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for class
+ * signatures, formatted in default Java notation (non-bytecode)
+ */
+ public static final int CLASS_DECLARATION = 7;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for method
+ * parameter signatures, formatted in default Java notation (non-bytecode)
+ */
+ public static final int PARAMETERS_DECLARATION = 8;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for handle
+ * descriptors, formatted in bytecode notation
+ */
+ public static final int HANDLE_DESCRIPTOR = 9;
+
+ /**
+ * Tab for class members.
+ */
+ protected String tab = " ";
+
+ /**
+ * Tab for bytecode instructions.
+ */
+ protected String tab2 = " ";
+
+ /**
+ * Tab for table and lookup switch instructions.
+ */
+ protected String tab3 = " ";
+
+ /**
+ * Tab for labels.
+ */
+ protected String ltab = " ";
+
+ /**
+ * The label names. This map associate String values to Label keys.
+ */
+ protected Map<Label, String> labelNames;
+
+ private int valueNumber = 0;
+
+ /**
+ * Constructs a new {@link Textifier}. <i>Subclasses must not use this
+ * constructor</i>. Instead, they must use the {@link #Textifier(int)}
+ * version.
+ */
+ public Textifier() {
+ this(Opcodes.ASM4);
+ }
+
+ /**
+ * Constructs a new {@link Textifier}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ protected Textifier(final int api) {
+ super(api);
+ }
+
+ /**
+ * Prints a disassembled view of the given class to the standard output. <p>
+ * Usage: Textifier [-debug] &lt;binary class name or class
+ * file name &gt;
+ *
+ * @param args the command line arguments.
+ *
+ * @throws Exception if the class cannot be found, or if an IO exception
+ * occurs.
+ */
+ public static void main(final String[] args) throws Exception {
+ int i = 0;
+ int flags = ClassReader.SKIP_DEBUG;
+
+ boolean ok = true;
+ if (args.length < 1 || args.length > 2) {
+ ok = false;
+ }
+ if (ok && "-debug".equals(args[0])) {
+ i = 1;
+ flags = 0;
+ if (args.length != 2) {
+ ok = false;
+ }
+ }
+ if (!ok) {
+ System.err.println("Prints a disassembled view of the given class.");
+ System.err.println("Usage: Textifier [-debug] "
+ + "<fully qualified class name or class file name>");
+ return;
+ }
+ ClassReader cr;
+ if (args[i].endsWith(".class") || args[i].indexOf('\\') > -1
+ || args[i].indexOf('/') > -1)
+ {
+ cr = new ClassReader(new FileInputStream(args[i]));
+ } else {
+ cr = new ClassReader(args[i]);
+ }
+ cr.accept(new TraceClassVisitor(new PrintWriter(System.out)),
+ flags);
+ }
+
+ // ------------------------------------------------------------------------
+ // Classes
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(
+ final int version,
+ final int access,
+ final String name,
+ final String signature,
+ final String superName,
+ final String[] interfaces)
+ {
+ int major = version & 0xFFFF;
+ int minor = version >>> 16;
+ buf.setLength(0);
+ buf.append("// class version ")
+ .append(major)
+ .append('.')
+ .append(minor)
+ .append(" (")
+ .append(version)
+ .append(")\n");
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ buf.append("// DEPRECATED\n");
+ }
+ buf.append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n');
+
+ appendDescriptor(CLASS_SIGNATURE, signature);
+ if (signature != null) {
+ TraceSignatureVisitor sv = new TraceSignatureVisitor(access);
+ SignatureReader r = new SignatureReader(signature);
+ r.accept(sv);
+ buf.append("// declaration: ")
+ .append(name)
+ .append(sv.getDeclaration())
+ .append('\n');
+ }
+
+ appendAccess(access & ~Opcodes.ACC_SUPER);
+ if ((access & Opcodes.ACC_ANNOTATION) != 0) {
+ buf.append("@interface ");
+ } else if ((access & Opcodes.ACC_INTERFACE) != 0) {
+ buf.append("interface ");
+ } else if ((access & Opcodes.ACC_ENUM) == 0) {
+ buf.append("class ");
+ }
+ appendDescriptor(INTERNAL_NAME, name);
+
+ if (superName != null && !"java/lang/Object".equals(superName)) {
+ buf.append(" extends ");
+ appendDescriptor(INTERNAL_NAME, superName);
+ buf.append(' ');
+ }
+ if (interfaces != null && interfaces.length > 0) {
+ buf.append(" implements ");
+ for (int i = 0; i < interfaces.length; ++i) {
+ appendDescriptor(INTERNAL_NAME, interfaces[i]);
+ buf.append(' ');
+ }
+ }
+ buf.append(" {\n\n");
+
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitSource(final String file, final String debug) {
+ buf.setLength(0);
+ if (file != null) {
+ buf.append(tab)
+ .append("// compiled from: ")
+ .append(file)
+ .append('\n');
+ }
+ if (debug != null) {
+ buf.append(tab)
+ .append("// debug info: ")
+ .append(debug)
+ .append('\n');
+ }
+ if (buf.length() > 0) {
+ text.add(buf.toString());
+ }
+ }
+
+ @Override
+ public void visitOuterClass(
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ buf.append(tab).append("OUTERCLASS ");
+ appendDescriptor(INTERNAL_NAME, owner);
+ buf.append(' ');
+ if (name != null) {
+ buf.append(name).append(' ');
+ }
+ appendDescriptor(METHOD_DESCRIPTOR, desc);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public Textifier visitClassAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ text.add("\n");
+ return visitAnnotation(desc, visible);
+ }
+
+ @Override
+ public void visitClassAttribute(final Attribute attr) {
+ text.add("\n");
+ visitAttribute(attr);
+ }
+
+ @Override
+ public void visitInnerClass(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access)
+ {
+ buf.setLength(0);
+ buf.append(tab).append("// access flags 0x");
+ buf.append(Integer.toHexString(access & ~Opcodes.ACC_SUPER).toUpperCase()).append('\n');
+ buf.append(tab);
+ appendAccess(access);
+ buf.append("INNERCLASS ");
+ appendDescriptor(INTERNAL_NAME, name);
+ buf.append(' ');
+ appendDescriptor(INTERNAL_NAME, outerName);
+ buf.append(' ');
+ appendDescriptor(INTERNAL_NAME, innerName);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public Textifier visitField(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ buf.setLength(0);
+ buf.append('\n');
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ buf.append(tab).append("// DEPRECATED\n");
+ }
+ buf.append(tab).append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n');
+ if (signature != null) {
+ buf.append(tab);
+ appendDescriptor(FIELD_SIGNATURE, signature);
+
+ TraceSignatureVisitor sv = new TraceSignatureVisitor(0);
+ SignatureReader r = new SignatureReader(signature);
+ r.acceptType(sv);
+ buf.append(tab)
+ .append("// declaration: ")
+ .append(sv.getDeclaration())
+ .append('\n');
+ }
+
+ buf.append(tab);
+ appendAccess(access);
+
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append(' ').append(name);
+ if (value != null) {
+ buf.append(" = ");
+ if (value instanceof String) {
+ buf.append('\"').append(value).append('\"');
+ } else {
+ buf.append(value);
+ }
+ }
+
+ buf.append('\n');
+ text.add(buf.toString());
+
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ return t;
+ }
+
+ @Override
+ public Textifier visitMethod(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ buf.setLength(0);
+ buf.append('\n');
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ buf.append(tab).append("// DEPRECATED\n");
+ }
+ buf.append(tab).append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n');
+
+ if (signature != null) {
+ buf.append(tab);
+ appendDescriptor(METHOD_SIGNATURE, signature);
+
+ TraceSignatureVisitor v = new TraceSignatureVisitor(0);
+ SignatureReader r = new SignatureReader(signature);
+ r.accept(v);
+ String genericDecl = v.getDeclaration();
+ String genericReturn = v.getReturnType();
+ String genericExceptions = v.getExceptions();
+
+ buf.append(tab)
+ .append("// declaration: ")
+ .append(genericReturn)
+ .append(' ')
+ .append(name)
+ .append(genericDecl);
+ if (genericExceptions != null) {
+ buf.append(" throws ").append(genericExceptions);
+ }
+ buf.append('\n');
+ }
+
+ buf.append(tab);
+ appendAccess(access);
+ if ((access & Opcodes.ACC_NATIVE) != 0) {
+ buf.append("native ");
+ }
+ if ((access & Opcodes.ACC_VARARGS) != 0) {
+ buf.append("varargs ");
+ }
+ if ((access & Opcodes.ACC_BRIDGE) != 0) {
+ buf.append("bridge ");
+ }
+
+ buf.append(name);
+ appendDescriptor(METHOD_DESCRIPTOR, desc);
+ if (exceptions != null && exceptions.length > 0) {
+ buf.append(" throws ");
+ for (int i = 0; i < exceptions.length; ++i) {
+ appendDescriptor(INTERNAL_NAME, exceptions[i]);
+ buf.append(' ');
+ }
+ }
+
+ buf.append('\n');
+ text.add(buf.toString());
+
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ return t;
+ }
+
+ @Override
+ public void visitClassEnd() {
+ text.add("}\n");
+ }
+
+ // ------------------------------------------------------------------------
+ // Annotations
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(final String name, final Object value) {
+ buf.setLength(0);
+ appendComa(valueNumber++);
+
+ if (name != null) {
+ buf.append(name).append('=');
+ }
+
+ if (value instanceof String) {
+ visitString((String) value);
+ } else if (value instanceof Type) {
+ visitType((Type) value);
+ } else if (value instanceof Byte) {
+ visitByte(((Byte) value).byteValue());
+ } else if (value instanceof Boolean) {
+ visitBoolean(((Boolean) value).booleanValue());
+ } else if (value instanceof Short) {
+ visitShort(((Short) value).shortValue());
+ } else if (value instanceof Character) {
+ visitChar(((Character) value).charValue());
+ } else if (value instanceof Integer) {
+ visitInt(((Integer) value).intValue());
+ } else if (value instanceof Float) {
+ visitFloat(((Float) value).floatValue());
+ } else if (value instanceof Long) {
+ visitLong(((Long) value).longValue());
+ } else if (value instanceof Double) {
+ visitDouble(((Double) value).doubleValue());
+ } else if (value.getClass().isArray()) {
+ buf.append('{');
+ if (value instanceof byte[]) {
+ byte[] v = (byte[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitByte(v[i]);
+ }
+ } else if (value instanceof boolean[]) {
+ boolean[] v = (boolean[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitBoolean(v[i]);
+ }
+ } else if (value instanceof short[]) {
+ short[] v = (short[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitShort(v[i]);
+ }
+ } else if (value instanceof char[]) {
+ char[] v = (char[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitChar(v[i]);
+ }
+ } else if (value instanceof int[]) {
+ int[] v = (int[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitInt(v[i]);
+ }
+ } else if (value instanceof long[]) {
+ long[] v = (long[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitLong(v[i]);
+ }
+ } else if (value instanceof float[]) {
+ float[] v = (float[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitFloat(v[i]);
+ }
+ } else if (value instanceof double[]) {
+ double[] v = (double[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitDouble(v[i]);
+ }
+ }
+ buf.append('}');
+ }
+
+ text.add(buf.toString());
+ }
+
+ private void visitInt(final int value) {
+ buf.append(value);
+ }
+
+ private void visitLong(final long value) {
+ buf.append(value).append('L');
+ }
+
+ private void visitFloat(final float value) {
+ buf.append(value).append('F');
+ }
+
+ private void visitDouble(final double value) {
+ buf.append(value).append('D');
+ }
+
+ private void visitChar(final char value) {
+ buf.append("(char)").append((int) value);
+ }
+
+ private void visitShort(final short value) {
+ buf.append("(short)").append(value);
+ }
+
+ private void visitByte(final byte value) {
+ buf.append("(byte)").append(value);
+ }
+
+ private void visitBoolean(final boolean value) {
+ buf.append(value);
+ }
+
+ private void visitString(final String value) {
+ appendString(buf, value);
+ }
+
+ private void visitType(final Type value) {
+ buf.append(value.getClassName()).append(".class");
+ }
+
+ @Override
+ public void visitEnum(
+ final String name,
+ final String desc,
+ final String value)
+ {
+ buf.setLength(0);
+ appendComa(valueNumber++);
+ if (name != null) {
+ buf.append(name).append('=');
+ }
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append('.').append(value);
+ text.add(buf.toString());
+ }
+
+ @Override
+ public Textifier visitAnnotation(
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ appendComa(valueNumber++);
+ if (name != null) {
+ buf.append(name).append('=');
+ }
+ buf.append('@');
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append('(');
+ text.add(buf.toString());
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ text.add(")");
+ return t;
+ }
+
+ @Override
+ public Textifier visitArray(
+ final String name)
+ {
+ buf.setLength(0);
+ appendComa(valueNumber++);
+ if (name != null) {
+ buf.append(name).append('=');
+ }
+ buf.append('{');
+ text.add(buf.toString());
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ text.add("}");
+ return t;
+ }
+
+ @Override
+ public void visitAnnotationEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Fields
+ // ------------------------------------------------------------------------
+
+ @Override
+ public Textifier visitFieldAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ return visitAnnotation(desc, visible);
+ }
+
+ @Override
+ public void visitFieldAttribute(final Attribute attr) {
+ visitAttribute(attr);
+ }
+
+ @Override
+ public void visitFieldEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Methods
+ // ------------------------------------------------------------------------
+
+ @Override
+ public Textifier visitAnnotationDefault() {
+ text.add(tab2 + "default=");
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ text.add("\n");
+ return t;
+ }
+
+ @Override
+ public Textifier visitMethodAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ return visitAnnotation(desc, visible);
+ }
+
+ @Override
+ public Textifier visitParameterAnnotation(
+ final int parameter,
+ final String desc,
+ final boolean visible)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append('@');
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append('(');
+ text.add(buf.toString());
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ text.add(visible ? ") // parameter " : ") // invisible, parameter ");
+ text.add(new Integer(parameter));
+ text.add("\n");
+ return t;
+ }
+
+ @Override
+ public void visitMethodAttribute(final Attribute attr) {
+ buf.setLength(0);
+ buf.append(tab).append("ATTRIBUTE ");
+ appendDescriptor(-1, attr.type);
+
+ if (attr instanceof Textifiable) {
+ ((Textifiable) attr).textify(buf, labelNames);
+ } else {
+ buf.append(" : unknown\n");
+ }
+
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitCode() {
+ }
+
+ @Override
+ public void visitFrame(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack)
+ {
+ buf.setLength(0);
+ buf.append(ltab);
+ buf.append("FRAME ");
+ switch (type) {
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ buf.append("FULL [");
+ appendFrameTypes(nLocal, local);
+ buf.append("] [");
+ appendFrameTypes(nStack, stack);
+ buf.append(']');
+ break;
+ case Opcodes.F_APPEND:
+ buf.append("APPEND [");
+ appendFrameTypes(nLocal, local);
+ buf.append(']');
+ break;
+ case Opcodes.F_CHOP:
+ buf.append("CHOP ").append(nLocal);
+ break;
+ case Opcodes.F_SAME:
+ buf.append("SAME");
+ break;
+ case Opcodes.F_SAME1:
+ buf.append("SAME1 ");
+ appendFrameTypes(1, stack);
+ break;
+ }
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitInsn(final int opcode) {
+ buf.setLength(0);
+ buf.append(tab2).append(OPCODES[opcode]).append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitIntInsn(final int opcode, final int operand) {
+ buf.setLength(0);
+ buf.append(tab2)
+ .append(OPCODES[opcode])
+ .append(' ')
+ .append(opcode == Opcodes.NEWARRAY
+ ? TYPES[operand]
+ : Integer.toString(operand))
+ .append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitVarInsn(final int opcode, final int var) {
+ buf.setLength(0);
+ buf.append(tab2)
+ .append(OPCODES[opcode])
+ .append(' ')
+ .append(var)
+ .append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitTypeInsn(final int opcode, final String type) {
+ buf.setLength(0);
+ buf.append(tab2).append(OPCODES[opcode]).append(' ');
+ appendDescriptor(INTERNAL_NAME, type);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitFieldInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append(OPCODES[opcode]).append(' ');
+ appendDescriptor(INTERNAL_NAME, owner);
+ buf.append('.').append(name).append(" : ");
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMethodInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append(OPCODES[opcode]).append(' ');
+ appendDescriptor(INTERNAL_NAME, owner);
+ buf.append('.').append(name).append(' ');
+ appendDescriptor(METHOD_DESCRIPTOR, desc);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitInvokeDynamicInsn(
+ String name,
+ String desc,
+ Handle bsm,
+ Object... bsmArgs)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append("INVOKEDYNAMIC").append(' ');
+ buf.append(name);
+ appendDescriptor(METHOD_DESCRIPTOR, desc);
+ buf.append(" [");
+ appendHandle(bsm);
+ buf.append(tab3).append("// arguments:");
+ if(bsmArgs.length == 0) {
+ buf.append(" none");
+ } else {
+ buf.append('\n').append(tab3);
+ for(int i = 0; i < bsmArgs.length; i++) {
+ Object cst = bsmArgs[i];
+ if (cst instanceof String) {
+ Printer.appendString(buf, (String) cst);
+ } else if (cst instanceof Type) {
+ buf.append(((Type) cst).getDescriptor()).append(".class");
+ } else if (cst instanceof Handle) {
+ appendHandle((Handle) cst);
+ } else {
+ buf.append(cst);
+ }
+ buf.append(", ");
+ }
+ buf.setLength(buf.length() - 2);
+ }
+ buf.append('\n');
+ buf.append(tab2).append("]\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitJumpInsn(final int opcode, final Label label) {
+ buf.setLength(0);
+ buf.append(tab2).append(OPCODES[opcode]).append(' ');
+ appendLabel(label);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLabel(final Label label) {
+ buf.setLength(0);
+ buf.append(ltab);
+ appendLabel(label);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLdcInsn(final Object cst) {
+ buf.setLength(0);
+ buf.append(tab2).append("LDC ");
+ if (cst instanceof String) {
+ Printer.appendString(buf, (String) cst);
+ } else if (cst instanceof Type) {
+ buf.append(((Type) cst).getDescriptor()).append(".class");
+ } else {
+ buf.append(cst);
+ }
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitIincInsn(final int var, final int increment) {
+ buf.setLength(0);
+ buf.append(tab2)
+ .append("IINC ")
+ .append(var)
+ .append(' ')
+ .append(increment)
+ .append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitTableSwitchInsn(
+ final int min,
+ final int max,
+ final Label dflt,
+ final Label... labels)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append("TABLESWITCH\n");
+ for (int i = 0; i < labels.length; ++i) {
+ buf.append(tab3).append(min + i).append(": ");
+ appendLabel(labels[i]);
+ buf.append('\n');
+ }
+ buf.append(tab3).append("default: ");
+ appendLabel(dflt);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLookupSwitchInsn(
+ final Label dflt,
+ final int[] keys,
+ final Label[] labels)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append("LOOKUPSWITCH\n");
+ for (int i = 0; i < labels.length; ++i) {
+ buf.append(tab3).append(keys[i]).append(": ");
+ appendLabel(labels[i]);
+ buf.append('\n');
+ }
+ buf.append(tab3).append("default: ");
+ appendLabel(dflt);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMultiANewArrayInsn(final String desc, final int dims) {
+ buf.setLength(0);
+ buf.append(tab2).append("MULTIANEWARRAY ");
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append(' ').append(dims).append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitTryCatchBlock(
+ final Label start,
+ final Label end,
+ final Label handler,
+ final String type)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append("TRYCATCHBLOCK ");
+ appendLabel(start);
+ buf.append(' ');
+ appendLabel(end);
+ buf.append(' ');
+ appendLabel(handler);
+ buf.append(' ');
+ appendDescriptor(INTERNAL_NAME, type);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLocalVariable(
+ final String name,
+ final String desc,
+ final String signature,
+ final Label start,
+ final Label end,
+ final int index)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append("LOCALVARIABLE ").append(name).append(' ');
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append(' ');
+ appendLabel(start);
+ buf.append(' ');
+ appendLabel(end);
+ buf.append(' ').append(index).append('\n');
+
+ if (signature != null) {
+ buf.append(tab2);
+ appendDescriptor(FIELD_SIGNATURE, signature);
+
+ TraceSignatureVisitor sv = new TraceSignatureVisitor(0);
+ SignatureReader r = new SignatureReader(signature);
+ r.acceptType(sv);
+ buf.append(tab2)
+ .append("// declaration: ")
+ .append(sv.getDeclaration())
+ .append('\n');
+ }
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLineNumber(final int line, final Label start) {
+ buf.setLength(0);
+ buf.append(tab2).append("LINENUMBER ").append(line).append(' ');
+ appendLabel(start);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMaxs(final int maxStack, final int maxLocals) {
+ buf.setLength(0);
+ buf.append(tab2).append("MAXSTACK = ").append(maxStack).append('\n');
+ text.add(buf.toString());
+
+ buf.setLength(0);
+ buf.append(tab2).append("MAXLOCALS = ").append(maxLocals).append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMethodEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Common methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Prints a disassembled view of the given annotation.
+ *
+ * @param desc the class descriptor of the annotation class.
+ * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values.
+ */
+ public Textifier visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ buf.setLength(0);
+ buf.append(tab).append('@');
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append('(');
+ text.add(buf.toString());
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ text.add(visible ? ")\n" : ") // invisible\n");
+ return t;
+ }
+
+ /**
+ * Prints a disassembled view of the given attribute.
+ *
+ * @param attr an attribute.
+ */
+ public void visitAttribute(final Attribute attr) {
+ buf.setLength(0);
+ buf.append(tab).append("ATTRIBUTE ");
+ appendDescriptor(-1, attr.type);
+
+ if (attr instanceof Textifiable) {
+ ((Textifiable) attr).textify(buf, null);
+ } else {
+ buf.append(" : unknown\n");
+ }
+
+ text.add(buf.toString());
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Creates a new TraceVisitor instance.
+ *
+ * @return a new TraceVisitor.
+ */
+ protected Textifier createTextifier() {
+ return new Textifier();
+ }
+
+ /**
+ * Appends an internal name, a type descriptor or a type signature to
+ * {@link #buf buf}.
+ *
+ * @param type indicates if desc is an internal name, a field descriptor, a
+ * method descriptor, a class signature, ...
+ * @param desc an internal name, type descriptor, or type signature. May be
+ * <tt>null</tt>.
+ */
+ protected void appendDescriptor(final int type, final String desc) {
+ if (type == CLASS_SIGNATURE || type == FIELD_SIGNATURE
+ || type == METHOD_SIGNATURE)
+ {
+ if (desc != null) {
+ buf.append("// signature ").append(desc).append('\n');
+ }
+ } else {
+ buf.append(desc);
+ }
+ }
+
+ /**
+ * Appends the name of the given label to {@link #buf buf}. Creates a new
+ * label name if the given label does not yet have one.
+ *
+ * @param l a label.
+ */
+ protected void appendLabel(final Label l) {
+ if (labelNames == null) {
+ labelNames = new HashMap<Label, String>();
+ }
+ String name = labelNames.get(l);
+ if (name == null) {
+ name = "L" + labelNames.size();
+ labelNames.put(l, name);
+ }
+ buf.append(name);
+ }
+
+ /**
+ * Appends the information about the given handle to {@link #buf buf}.
+ *
+ * @param h a handle, non null.
+ */
+ protected void appendHandle(final Handle h) {
+ buf.append('\n').append(tab3);
+ int tag = h.getTag();
+ buf.append("// handle kind 0x").append(Integer.toHexString(tag)).append(" : ");
+ switch (tag) {
+ case Opcodes.H_GETFIELD:
+ buf.append("GETFIELD");
+ break;
+ case Opcodes.H_GETSTATIC:
+ buf.append("GETSTATIC");
+ break;
+ case Opcodes.H_PUTFIELD:
+ buf.append("PUTFIELD");
+ break;
+ case Opcodes.H_PUTSTATIC:
+ buf.append("PUTSTATIC");
+ break;
+ case Opcodes.H_INVOKEINTERFACE:
+ buf.append("INVOKEINTERFACE");
+ break;
+ case Opcodes.H_INVOKESPECIAL:
+ buf.append("INVOKESPECIAL");
+ break;
+ case Opcodes.H_INVOKESTATIC:
+ buf.append("INVOKESTATIC");
+ break;
+ case Opcodes.H_INVOKEVIRTUAL:
+ buf.append("INVOKEVIRTUAL");
+ break;
+ case Opcodes.H_NEWINVOKESPECIAL:
+ buf.append("NEWINVOKESPECIAL");
+ break;
+ }
+ buf.append('\n');
+ buf.append(tab3);
+ appendDescriptor(INTERNAL_NAME, h.getOwner());
+ buf.append('.');
+ buf.append(h.getName());
+ buf.append('(');
+ appendDescriptor(HANDLE_DESCRIPTOR, h.getDesc());
+ buf.append(')').append('\n');
+ }
+
+ /**
+ * Appends a string representation of the given access modifiers to {@link
+ * #buf buf}.
+ *
+ * @param access some access modifiers.
+ */
+ private void appendAccess(final int access) {
+ if ((access & Opcodes.ACC_PUBLIC) != 0) {
+ buf.append("public ");
+ }
+ if ((access & Opcodes.ACC_PRIVATE) != 0) {
+ buf.append("private ");
+ }
+ if ((access & Opcodes.ACC_PROTECTED) != 0) {
+ buf.append("protected ");
+ }
+ if ((access & Opcodes.ACC_FINAL) != 0) {
+ buf.append("final ");
+ }
+ if ((access & Opcodes.ACC_STATIC) != 0) {
+ buf.append("static ");
+ }
+ if ((access & Opcodes.ACC_SYNCHRONIZED) != 0) {
+ buf.append("synchronized ");
+ }
+ if ((access & Opcodes.ACC_VOLATILE) != 0) {
+ buf.append("volatile ");
+ }
+ if ((access & Opcodes.ACC_TRANSIENT) != 0) {
+ buf.append("transient ");
+ }
+ if ((access & Opcodes.ACC_ABSTRACT) != 0) {
+ buf.append("abstract ");
+ }
+ if ((access & Opcodes.ACC_STRICT) != 0) {
+ buf.append("strictfp ");
+ }
+ if ((access & Opcodes.ACC_ENUM) != 0) {
+ buf.append("enum ");
+ }
+ }
+
+ private void appendComa(final int i) {
+ if (i != 0) {
+ buf.append(", ");
+ }
+ }
+
+ private void appendFrameTypes(final int n, final Object[] o) {
+ for (int i = 0; i < n; ++i) {
+ if (i > 0) {
+ buf.append(' ');
+ }
+ if (o[i] instanceof String) {
+ String desc = (String) o[i];
+ if (desc.startsWith("[")) {
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ } else {
+ appendDescriptor(INTERNAL_NAME, desc);
+ }
+ } else if (o[i] instanceof Integer) {
+ switch (((Integer) o[i]).intValue()) {
+ case 0:
+ appendDescriptor(FIELD_DESCRIPTOR, "T");
+ break;
+ case 1:
+ appendDescriptor(FIELD_DESCRIPTOR, "I");
+ break;
+ case 2:
+ appendDescriptor(FIELD_DESCRIPTOR, "F");
+ break;
+ case 3:
+ appendDescriptor(FIELD_DESCRIPTOR, "D");
+ break;
+ case 4:
+ appendDescriptor(FIELD_DESCRIPTOR, "J");
+ break;
+ case 5:
+ appendDescriptor(FIELD_DESCRIPTOR, "N");
+ break;
+ case 6:
+ appendDescriptor(FIELD_DESCRIPTOR, "U");
+ break;
+ }
+ } else {
+ appendLabel((Label) o[i]);
+ }
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
new file mode 100644
index 0000000000..f112609031
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
@@ -0,0 +1,96 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * An {@link AnnotationVisitor} that prints the annotations it visits with a
+ * {@link Printer}.
+ *
+ * @author Eric Bruneton
+ */
+public final class TraceAnnotationVisitor extends AnnotationVisitor {
+
+ private final Printer p;
+
+ public TraceAnnotationVisitor(final Printer p) {
+ this(null, p);
+ }
+
+ public TraceAnnotationVisitor(final AnnotationVisitor av, final Printer p) {
+ super(Opcodes.ASM4, av);
+ this.p = p;
+ }
+
+ @Override
+ public void visit(final String name, final Object value) {
+ p.visit(name, value);
+ super.visit(name, value);
+ }
+
+ @Override
+ public void visitEnum(
+ final String name,
+ final String desc,
+ final String value)
+ {
+ p.visitEnum(name, desc, value);
+ super.visitEnum(name, desc, value);
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String name,
+ final String desc)
+ {
+ Printer p = this.p.visitAnnotation(name, desc);
+ AnnotationVisitor av = this.av == null
+ ? null
+ : this.av.visitAnnotation(name, desc);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
+ public AnnotationVisitor visitArray(final String name) {
+ Printer p = this.p.visitArray(name);
+ AnnotationVisitor av = this.av == null
+ ? null
+ : this.av.visitArray(name);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
+ public void visitEnd() {
+ p.visitAnnotationEnd();
+ super.visitEnd();
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/TraceClassVisitor.java b/src/asm/scala/tools/asm/util/TraceClassVisitor.java
new file mode 100644
index 0000000000..bb830b71ce
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/TraceClassVisitor.java
@@ -0,0 +1,232 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.io.PrintWriter;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.ClassVisitor;
+import scala.tools.asm.FieldVisitor;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A {@link ClassVisitor} that prints the classes it visits with a
+ * {@link Printer}. This class visitor can be used in the middle of a class
+ * visitor chain to trace the class that is visited at a given point in this
+ * chain. This may be useful for debugging purposes. <p> The trace printed when
+ * visiting the <tt>Hello</tt> class is the following: <p> <blockquote>
+ *
+ * <pre> // class version 49.0 (49) // access flags 0x21 public class Hello {
+ *
+ * // compiled from: Hello.java
+ *
+ * // access flags 0x1 public &lt;init&gt; ()V ALOAD 0 INVOKESPECIAL
+ * java/lang/Object &lt;init&gt; ()V RETURN MAXSTACK = 1 MAXLOCALS = 1
+ *
+ * // access flags 0x9 public static main ([Ljava/lang/String;)V GETSTATIC
+ * java/lang/System out Ljava/io/PrintStream; LDC &quot;hello&quot;
+ * INVOKEVIRTUAL java/io/PrintStream println (Ljava/lang/String;)V RETURN
+ * MAXSTACK = 2 MAXLOCALS = 1 } </pre>
+ *
+ * </blockquote> where <tt>Hello</tt> is defined by: <p> <blockquote>
+ *
+ * <pre> public class Hello {
+ *
+ * public static void main(String[] args) {
+ * System.out.println(&quot;hello&quot;); } } </pre>
+ *
+ * </blockquote>
+ *
+ * @author Eric Bruneton
+ * @author Eugene Kuleshov
+ */
+public final class TraceClassVisitor extends ClassVisitor {
+
+ /**
+ * The print writer to be used to print the class. May be null.
+ */
+ private final PrintWriter pw;
+
+ /**
+ * The object that actually converts visit events into text.
+ */
+ public final Printer p;
+
+ /**
+ * Constructs a new {@link TraceClassVisitor}.
+ *
+ * @param pw the print writer to be used to print the class.
+ */
+ public TraceClassVisitor(final PrintWriter pw) {
+ this(null, pw);
+ }
+
+ /**
+ * Constructs a new {@link TraceClassVisitor}.
+ *
+ * @param cv the {@link ClassVisitor} to which this visitor delegates calls.
+ * May be <tt>null</tt>.
+ * @param pw the print writer to be used to print the class.
+ */
+ public TraceClassVisitor(final ClassVisitor cv, final PrintWriter pw) {
+ this(cv, new Textifier(), pw);
+ }
+
+ /**
+ * Constructs a new {@link TraceClassVisitor}.
+ *
+ * @param cv the {@link ClassVisitor} to which this visitor delegates calls.
+ * May be <tt>null</tt>.
+ * @param p the object that actually converts visit events into text.
+ * @param pw the print writer to be used to print the class. May be null if
+ * you simply want to use the result via
+ * {@link Printer#getText()}, instead of printing it.
+ */
+ public TraceClassVisitor(
+ final ClassVisitor cv,
+ final Printer p,
+ final PrintWriter pw)
+ {
+ super(Opcodes.ASM4, cv);
+ this.pw = pw;
+ this.p = p;
+ }
+
+ @Override
+ public void visit(
+ final int version,
+ final int access,
+ final String name,
+ final String signature,
+ final String superName,
+ final String[] interfaces)
+ {
+ p.visit(version, access, name, signature, superName, interfaces);
+ super.visit(version, access, name, signature, superName, interfaces);
+ }
+
+ @Override
+ public void visitSource(final String file, final String debug) {
+ p.visitSource(file, debug);
+ super.visitSource(file, debug);
+ }
+
+ @Override
+ public void visitOuterClass(
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ p.visitOuterClass(owner, name, desc);
+ super.visitOuterClass(owner, name, desc);
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ Printer p = this.p.visitClassAnnotation(desc, visible);
+ AnnotationVisitor av = cv == null ? null : cv.visitAnnotation(desc,
+ visible);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ p.visitClassAttribute(attr);
+ super.visitAttribute(attr);
+ }
+
+ @Override
+ public void visitInnerClass(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access)
+ {
+ p.visitInnerClass(name, outerName, innerName, access);
+ super.visitInnerClass(name, outerName, innerName, access);
+ }
+
+ @Override
+ public FieldVisitor visitField(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ Printer p = this.p.visitField(access,
+ name,
+ desc,
+ signature,
+ value);
+ FieldVisitor fv = cv == null ? null : cv.visitField(access,
+ name,
+ desc,
+ signature,
+ value);
+ return new TraceFieldVisitor(fv, p);
+ }
+
+ @Override
+ public MethodVisitor visitMethod(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ Printer p = this.p.visitMethod(access,
+ name,
+ desc,
+ signature,
+ exceptions);
+ MethodVisitor mv = cv == null ? null : cv.visitMethod(access,
+ name,
+ desc,
+ signature,
+ exceptions);
+ return new TraceMethodVisitor(mv, p);
+ }
+
+ @Override
+ public void visitEnd() {
+ p.visitClassEnd();
+ if (pw != null) {
+ p.print(pw);
+ pw.flush();
+ }
+ super.visitEnd();
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/TraceFieldVisitor.java b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
new file mode 100644
index 0000000000..f537e83be1
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
@@ -0,0 +1,78 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.FieldVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A {@link FieldVisitor} that prints the fields it visits with a
+ * {@link Printer}.
+ *
+ * @author Eric Bruneton
+ */
+public final class TraceFieldVisitor extends FieldVisitor {
+
+ public final Printer p;
+
+ public TraceFieldVisitor(final Printer p) {
+ this(null, p);
+ }
+
+ public TraceFieldVisitor(final FieldVisitor fv, final Printer p) {
+ super(Opcodes.ASM4, fv);
+ this.p = p;
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ Printer p = this.p.visitFieldAnnotation(desc, visible);
+ AnnotationVisitor av = fv == null ? null : fv.visitAnnotation(desc,
+ visible);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ p.visitFieldAttribute(attr);
+ super.visitAttribute(attr);
+ }
+
+ @Override
+ public void visitEnd() {
+ p.visitFieldEnd();
+ super.visitEnd();
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/TraceMethodVisitor.java b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
new file mode 100644
index 0000000000..9aabf2079e
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
@@ -0,0 +1,264 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.Handle;
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A {@link MethodVisitor} that prints the methods it visits with a
+ * {@link Printer}.
+ *
+ * @author Eric Bruneton
+ */
+public final class TraceMethodVisitor extends MethodVisitor {
+
+ public final Printer p;
+
+ public TraceMethodVisitor(final Printer p) {
+ this(null, p);
+ }
+
+ public TraceMethodVisitor(final MethodVisitor mv, final Printer p) {
+ super(Opcodes.ASM4, mv);
+ this.p = p;
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ Printer p = this.p.visitMethodAnnotation(desc, visible);
+ AnnotationVisitor av = mv == null ? null : mv.visitAnnotation(desc,
+ visible);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ p.visitMethodAttribute(attr);
+ super.visitAttribute(attr);
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotationDefault() {
+ Printer p = this.p.visitAnnotationDefault();
+ AnnotationVisitor av = mv == null ? null : mv.visitAnnotationDefault();
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
+ public AnnotationVisitor visitParameterAnnotation(
+ final int parameter,
+ final String desc,
+ final boolean visible)
+ {
+ Printer p = this.p.visitParameterAnnotation(parameter,
+ desc,
+ visible);
+ AnnotationVisitor av = mv == null
+ ? null
+ : mv.visitParameterAnnotation(parameter, desc, visible);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
+ public void visitCode() {
+ p.visitCode();
+ super.visitCode();
+ }
+
+ @Override
+ public void visitFrame(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack)
+ {
+ p.visitFrame(type, nLocal, local, nStack, stack);
+ super.visitFrame(type, nLocal, local, nStack, stack);
+ }
+
+ @Override
+ public void visitInsn(final int opcode) {
+ p.visitInsn(opcode);
+ super.visitInsn(opcode);
+ }
+
+ @Override
+ public void visitIntInsn(final int opcode, final int operand) {
+ p.visitIntInsn(opcode, operand);
+ super.visitIntInsn(opcode, operand);
+ }
+
+ @Override
+ public void visitVarInsn(final int opcode, final int var) {
+ p.visitVarInsn(opcode, var);
+ super.visitVarInsn(opcode, var);
+ }
+
+ @Override
+ public void visitTypeInsn(final int opcode, final String type) {
+ p.visitTypeInsn(opcode, type);
+ super.visitTypeInsn(opcode, type);
+ }
+
+ @Override
+ public void visitFieldInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ p.visitFieldInsn(opcode, owner, name, desc);
+ super.visitFieldInsn(opcode, owner, name, desc);
+ }
+
+ @Override
+ public void visitMethodInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ p.visitMethodInsn(opcode, owner, name, desc);
+ super.visitMethodInsn(opcode, owner, name, desc);
+ }
+
+ @Override
+ public void visitInvokeDynamicInsn(
+ String name,
+ String desc,
+ Handle bsm,
+ Object... bsmArgs)
+ {
+ p.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
+ super.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
+ }
+
+ @Override
+ public void visitJumpInsn(final int opcode, final Label label) {
+ p.visitJumpInsn(opcode, label);
+ super.visitJumpInsn(opcode, label);
+ }
+
+ @Override
+ public void visitLabel(final Label label) {
+ p.visitLabel(label);
+ super.visitLabel(label);
+ }
+
+ @Override
+ public void visitLdcInsn(final Object cst) {
+ p.visitLdcInsn(cst);
+ super.visitLdcInsn(cst);
+ }
+
+ @Override
+ public void visitIincInsn(final int var, final int increment) {
+ p.visitIincInsn(var, increment);
+ super.visitIincInsn(var, increment);
+ }
+
+ @Override
+ public void visitTableSwitchInsn(
+ final int min,
+ final int max,
+ final Label dflt,
+ final Label... labels)
+ {
+ p.visitTableSwitchInsn(min, max, dflt, labels);
+ super.visitTableSwitchInsn(min, max, dflt, labels);
+ }
+
+ @Override
+ public void visitLookupSwitchInsn(
+ final Label dflt,
+ final int[] keys,
+ final Label[] labels)
+ {
+ p.visitLookupSwitchInsn(dflt, keys, labels);
+ super.visitLookupSwitchInsn(dflt, keys, labels);
+ }
+
+ @Override
+ public void visitMultiANewArrayInsn(final String desc, final int dims) {
+ p.visitMultiANewArrayInsn(desc, dims);
+ super.visitMultiANewArrayInsn(desc, dims);
+ }
+
+ @Override
+ public void visitTryCatchBlock(
+ final Label start,
+ final Label end,
+ final Label handler,
+ final String type)
+ {
+ p.visitTryCatchBlock(start, end, handler, type);
+ super.visitTryCatchBlock(start, end, handler, type);
+ }
+
+ @Override
+ public void visitLocalVariable(
+ final String name,
+ final String desc,
+ final String signature,
+ final Label start,
+ final Label end,
+ final int index)
+ {
+ p.visitLocalVariable(name, desc, signature, start, end, index);
+ super.visitLocalVariable(name, desc, signature, start, end, index);
+ }
+
+ @Override
+ public void visitLineNumber(final int line, final Label start) {
+ p.visitLineNumber(line, start);
+ super.visitLineNumber(line, start);
+ }
+
+ @Override
+ public void visitMaxs(final int maxStack, final int maxLocals) {
+ p.visitMaxs(maxStack, maxLocals);
+ super.visitMaxs(maxStack, maxLocals);
+ }
+
+ @Override
+ public void visitEnd() {
+ p.visitMethodEnd();
+ super.visitEnd();
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
new file mode 100644
index 0000000000..a37b759811
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
@@ -0,0 +1,318 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.signature.SignatureVisitor;
+
+/**
+ * A {@link SignatureVisitor} that prints a disassembled view of the signature
+ * it visits.
+ *
+ * @author Eugene Kuleshov
+ * @author Eric Bruneton
+ */
+public final class TraceSignatureVisitor extends SignatureVisitor {
+
+ private final StringBuffer declaration;
+
+ private boolean isInterface;
+
+ private boolean seenFormalParameter;
+
+ private boolean seenInterfaceBound;
+
+ private boolean seenParameter;
+
+ private boolean seenInterface;
+
+ private StringBuffer returnType;
+
+ private StringBuffer exceptions;
+
+ /**
+ * Stack used to keep track of class types that have arguments. Each element
+ * of this stack is a boolean encoded in one bit. The top of the stack is
+ * the lowest order bit. Pushing false = *2, pushing true = *2+1, popping =
+ * /2.
+ */
+ private int argumentStack;
+
+ /**
+ * Stack used to keep track of array class types. Each element of this stack
+ * is a boolean encoded in one bit. The top of the stack is the lowest order
+ * bit. Pushing false = *2, pushing true = *2+1, popping = /2.
+ */
+ private int arrayStack;
+
+ private String separator = "";
+
+ public TraceSignatureVisitor(final int access) {
+ super(Opcodes.ASM4);
+ isInterface = (access & Opcodes.ACC_INTERFACE) != 0;
+ this.declaration = new StringBuffer();
+ }
+
+ private TraceSignatureVisitor(final StringBuffer buf) {
+ super(Opcodes.ASM4);
+ this.declaration = buf;
+ }
+
+ @Override
+ public void visitFormalTypeParameter(final String name) {
+ declaration.append(seenFormalParameter ? ", " : "<").append(name);
+ seenFormalParameter = true;
+ seenInterfaceBound = false;
+ }
+
+ @Override
+ public SignatureVisitor visitClassBound() {
+ separator = " extends ";
+ startType();
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitInterfaceBound() {
+ separator = seenInterfaceBound ? ", " : " extends ";
+ seenInterfaceBound = true;
+ startType();
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitSuperclass() {
+ endFormals();
+ separator = " extends ";
+ startType();
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitInterface() {
+ separator = seenInterface ? ", " : isInterface
+ ? " extends "
+ : " implements ";
+ seenInterface = true;
+ startType();
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitParameterType() {
+ endFormals();
+ if (seenParameter) {
+ declaration.append(", ");
+ } else {
+ seenParameter = true;
+ declaration.append('(');
+ }
+ startType();
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitReturnType() {
+ endFormals();
+ if (seenParameter) {
+ seenParameter = false;
+ } else {
+ declaration.append('(');
+ }
+ declaration.append(')');
+ returnType = new StringBuffer();
+ return new TraceSignatureVisitor(returnType);
+ }
+
+ @Override
+ public SignatureVisitor visitExceptionType() {
+ if (exceptions == null) {
+ exceptions = new StringBuffer();
+ } else {
+ exceptions.append(", ");
+ }
+ // startType();
+ return new TraceSignatureVisitor(exceptions);
+ }
+
+ @Override
+ public void visitBaseType(final char descriptor) {
+ switch (descriptor) {
+ case 'V':
+ declaration.append("void");
+ break;
+ case 'B':
+ declaration.append("byte");
+ break;
+ case 'J':
+ declaration.append("long");
+ break;
+ case 'Z':
+ declaration.append("boolean");
+ break;
+ case 'I':
+ declaration.append("int");
+ break;
+ case 'S':
+ declaration.append("short");
+ break;
+ case 'C':
+ declaration.append("char");
+ break;
+ case 'F':
+ declaration.append("float");
+ break;
+ // case 'D':
+ default:
+ declaration.append("double");
+ break;
+ }
+ endType();
+ }
+
+ @Override
+ public void visitTypeVariable(final String name) {
+ declaration.append(name);
+ endType();
+ }
+
+ @Override
+ public SignatureVisitor visitArrayType() {
+ startType();
+ arrayStack |= 1;
+ return this;
+ }
+
+ @Override
+ public void visitClassType(final String name) {
+ if ("java/lang/Object".equals(name)) {
+ // Map<java.lang.Object,java.util.List>
+ // or
+ // abstract public V get(Object key); (seen in Dictionary.class)
+ // should have Object
+ // but java.lang.String extends java.lang.Object is unnecessary
+ boolean needObjectClass = argumentStack % 2 != 0 || seenParameter;
+ if (needObjectClass) {
+ declaration.append(separator).append(name.replace('/', '.'));
+ }
+ } else {
+ declaration.append(separator).append(name.replace('/', '.'));
+ }
+ separator = "";
+ argumentStack *= 2;
+ }
+
+ @Override
+ public void visitInnerClassType(final String name) {
+ if (argumentStack % 2 != 0) {
+ declaration.append('>');
+ }
+ argumentStack /= 2;
+ declaration.append('.');
+ declaration.append(separator).append(name.replace('/', '.'));
+ separator = "";
+ argumentStack *= 2;
+ }
+
+ @Override
+ public void visitTypeArgument() {
+ if (argumentStack % 2 == 0) {
+ ++argumentStack;
+ declaration.append('<');
+ } else {
+ declaration.append(", ");
+ }
+ declaration.append('?');
+ }
+
+ @Override
+ public SignatureVisitor visitTypeArgument(final char tag) {
+ if (argumentStack % 2 == 0) {
+ ++argumentStack;
+ declaration.append('<');
+ } else {
+ declaration.append(", ");
+ }
+
+ if (tag == EXTENDS) {
+ declaration.append("? extends ");
+ } else if (tag == SUPER) {
+ declaration.append("? super ");
+ }
+
+ startType();
+ return this;
+ }
+
+ @Override
+ public void visitEnd() {
+ if (argumentStack % 2 != 0) {
+ declaration.append('>');
+ }
+ argumentStack /= 2;
+ endType();
+ }
+
+ public String getDeclaration() {
+ return declaration.toString();
+ }
+
+ public String getReturnType() {
+ return returnType == null ? null : returnType.toString();
+ }
+
+ public String getExceptions() {
+ return exceptions == null ? null : exceptions.toString();
+ }
+
+ // -----------------------------------------------
+
+ private void endFormals() {
+ if (seenFormalParameter) {
+ declaration.append('>');
+ seenFormalParameter = false;
+ }
+ }
+
+ private void startType() {
+ arrayStack *= 2;
+ }
+
+ private void endType() {
+ if (arrayStack % 2 == 0) {
+ arrayStack /= 2;
+ } else {
+ while (arrayStack % 2 != 0) {
+ arrayStack /= 2;
+ declaration.append("[]");
+ }
+ }
+ }
+}
diff --git a/src/attic/README b/src/attic/README
deleted file mode 100644
index 9fb600ae57..0000000000
--- a/src/attic/README
+++ /dev/null
@@ -1,2 +0,0 @@
-This is a holding area for source files which aren't used in
-trunk anymore but which we're keeping available for a time. \ No newline at end of file
diff --git a/src/attic/scala/tools/nsc/models/SemanticTokens.scala b/src/attic/scala/tools/nsc/models/SemanticTokens.scala
deleted file mode 100644
index a94188a3c1..0000000000
--- a/src/attic/scala/tools/nsc/models/SemanticTokens.scala
+++ /dev/null
@@ -1,701 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package models
-
-import java.lang.Character.isJavaIdentifierPart
-import java.lang.Thread
-
-import scala.collection.mutable.{HashMap, HashSet}
-import scala.tools.nsc.Global
-import scala.tools.nsc.symtab.{Flags, Names}
-import scala.tools.nsc.symtab.Flags.DEFERRED
-import scala.tools.nsc.util.{BatchSourceFile, SourceFile}
-
-class SemanticTokens(val compiler: Global) {
- import compiler._
- object walker extends symtab.SymbolWalker {
- lazy val global : compiler.type = compiler
- }
-
- abstract class Kind {}
- object OBJECT extends Kind
- object CLASS extends Kind
- object TRAIT extends Kind
- object DEF extends Kind
- object VAL extends Kind
- object VAR extends Kind
- object ARG extends Kind
- object TPARAM extends Kind
-
- type AnyClass = Class[_]
-
- // static constants here
-
- abstract class Token {
- def length: Int
- def prev: HasNext
- def next: HasPrev
- }
-
- def eatKeyword(source: BatchSourceFile, pos: Int, keywords: List[String]) : Int = {
- if (keywords.isEmpty)
- pos
- else if (pos == source.length)
- -1
- else if (source.beginsWith(pos, " "))
- eatKeywords(source, pos + 1)
- else if (source.beginsWith(pos, keywords.head + " "))
- eatKeywords(source, pos + keywords.head.length + 1)
- else
- eatKeyword(source, pos, keywords.tail)
- }
-
- def eatKeywords(source: BatchSourceFile, pos: Int): Int = {
- val keywords =
- "package" :: "val" :: "var" :: "def" :: "class" :: "trait" :: "override" :: "case" ::
- "object" :: "sealed" :: "private" :: "protected" :: Nil
- if (pos != -1) eatKeyword(source, pos, keywords)
- else pos
- }
-
- trait HasNext extends Token {
- var next0: HasPrev = _
- def next = next0
- }
-
- trait HasPrev extends Token {
- var prev0: HasNext = _
- def prev = prev0
- }
-
- abstract class Actual extends HasNext with HasPrev {
- def convertToGap: (Int, Actual) = {
- val nextGap = next.isInstanceOf[Gap]
- val prevGap = prev.isInstanceOf[Gap]
-
- if (prevGap) {
- val ret = prev.length
- val gap = prev.asInstanceOf[Gap]
- gap.setLength(gap.length + length)
- if (nextGap) {
- gap.setLength(gap.length + next.length)
- gap.next0 = next.next
- next.next.prev0 = gap
- } else {
- gap.next0 = next
- next.prev0 = gap
- }
- (ret, gap)
- }
- else if (nextGap) {
- val gap = next.asInstanceOf[Gap]
- gap.setLength(gap.length + length)
- gap.prev0 = prev
- prev.next0 = gap
- (0, gap)
- }
- else {
- prev.next0 = next
- next.prev0 = prev
- val gap = new Gap(prev)
- gap.setLength(length)
- (0, gap)
- }
-
- }
- def insert(prev1: HasNext) {
- next0 = prev1.next
- prev0 = prev1
- prev0.next0 = this
- next0.prev0 = this
- }
-
- } // Actual
-
- final class Gap extends Actual {
- def this(prev1: HasNext) = {
- this()
- insert(prev1)
- }
- override def toString() = "gap-" + length
-
- var length0: Int = -1
- def length: Int = length0
- def setLength(length1: Int) = length0 = length1
-
- // already gap
- override def convertToGap: (Int, Actual) = (0, this)
- }
-
- def Process(unit: CompilationUnit) = new Process(unit)
- class Process(val unit: CompilationUnit) {
- private var doLog = true
- def source = unit.source
-
- def dbg(tree: Tree) = {
- def treePos: Position = if (tree ne null) tree.pos else NoPosition;
- (
- "TREE=" + tree +
- (if (tree ne null) (" CLASS=" + tree.getClass()) else "") +
- " SYM=" + tree.symbol +
- " POS=" +
- treePos.dbgString
- )}
-
- val symbols = new HashMap[Symbol, Info]
-
- class Info(val symbol: Symbol) {
- var defined : Def = _
- val uses = new HashSet[Use]
- symbols.update(symbol, this)
- }
-
- def info(symbol: Symbol): Info =
- if (symbols.contains(symbol)) symbols(symbol)
- else new Info(symbol)
-
- abstract class Semantic(val symbol: Symbol) extends Actual {
- val name = symbol.name.decode.toString.trim
- assert(symbol != NoSymbol)
- def myOuter = Process.this
-
- def tpe: Type = symbol.tpe
-
- def length = name.length()
- def info: Info = if (symbols.contains(symbol)) symbols(symbol) else new Info(symbol)
-
- def kind = {
- val term0 = symbol
- if (false) null
- else if (term0.isVariable) VAR
- else if (term0.isValueParameter) ARG
- else if (term0.isMethod) DEF
- else if (term0.isClass) CLASS
- else if (term0.isModule) OBJECT
- else if (term0.isValue) VAL
- else if (term0.isTypeParameter) TPARAM
- else if (term0.isType ) TPARAM
- else {
- // Console.err.println("UNRECOGNIZED SYMBOL: " + term0 + " " + name);
- null
- }
- }
- }
-
- class Def(symbol0: Symbol) extends Semantic(symbol0) {
- info.defined = this
- override def toString() = "def-" + name + "-" + symbol.getClass()
- }
- class Use(symbol0: Symbol, tpe0: Type) extends Semantic(symbol0) {
- info.uses += this
-
- override def tpe : Type = if (tpe0 ne null) tpe0 else super.tpe;
- override def toString() = "use-" + name + "-" + symbol.getClass();
- }
- val list = new TokenList
-
- //build(unit.body)
- val map = new scala.collection.mutable.LinkedHashMap[Int,Symbol]
- map.clear // populate the map.
- class visitor extends walker.Visitor {
- def contains(pos : Position) = map.contains(pos.point)
- def apply(pos : Position) = map(pos.point)
- def update(pos : Position, sym : Symbol) : Unit = if (pos.isDefined) {
- val offset = pos.point
- map(offset) = sym
- val isDef = pos.point == sym.pos.point
- list.put(offset, (if (isDef) new Def(sym) else new Use(sym, NoType)));
- }
- }
- walker.walk(unit.body, new visitor)(offset => unit.source.identifier(offset, compiler))
-
-
- // ok start building....
- def build[T <: Tree](trees: List[T]) {
- for (tree <- trees) build(tree)
- }
-
- def build(tree0: Tree): Unit = try {
- /* if (tree0.pos != NoPosition) */ tree0 match {
- case tree: ImplDef =>
- val pos = eatKeywords(unit.source.asInstanceOf[BatchSourceFile], tree.pos.point)
- if (pos == -1) {
-
- } else buildDef(tree.symbol, eatKeywords(unit.source.asInstanceOf[BatchSourceFile], tree.pos.point));
- tree match {
- case cdef: ClassDef => build(cdef.tparams)
- case _ => ;
- }
- build(tree.impl.parents)
- build(tree.impl.body)
- case tree: ValOrDefDef =>
- if (!tree.symbol.hasAccessorFlag || tree.symbol.isDeferred) {
- // MO: I added !tree.symbol.hasFlag(DEFERRED) in a refactoring where
- // getters now can be abstract whereas before they could not.
- // Adding the condition thus keeps the old behavior.
- // todo: review whether this is correct, or whether abstract getters should be included.
- {
- val pos : Int = if (tree.name.toString().equals("<init>")) -1 else
- eatKeywords(unit.source.asInstanceOf[BatchSourceFile], tree.pos.point);
- if (false) Console.err.println("VALDEF: tree=" + tree + " sym=" + tree.symbol + " pos0=" +
- tree.symbol.pos + " alias=" + tree.symbol.alias + " pos1=" +
- pos + " pos2=" + tree.pos.dbgString + " " + tree.symbol.isSynthetic);
-
- if (pos != -1 && !tree.isSynthetic)
- buildDef(tree.symbol, pos);
- }
-
- if (tree.isInstanceOf[DefDef]) {
- val ddef = tree.asInstanceOf[DefDef];
- build(ddef.tparams);
-
- for (l0 <- ddef.vparamss; arg <- l0) {
- val pos0 : Int = if (!unit.source.beginsWith(arg.pos.point, "val ")) arg.pos.point;
- else unit.source.skipWhitespace(arg.pos.point + ("val ").length());
- buildDef(arg.symbol, pos0);
- build(arg.tpt);
- }
- }
- //TPT=scala.Iterator[DocGenerator.this.compiler0.CompilationUnit] 260 class scala.tools.nsc.ast.Trees$TypeTree scala.Iterator[DocGenerator.this.compiler0.CompilationUnit] class scala.tools.nsc.symtab.Types$$anon$5
- if ((tree.tpt eq null) || (tree.tpt.tpe eq null)) {
- //Console.err.println("BAD: " + tree.tpt + " in " + tree);
- } else {
- //Console.err.println("TPT=" + tree.tpt + " " + tree.tpt.pos + " " + tree.tpt.getClass() + " " + tree.tpt.tpe + " " + tree.tpt.tpe.getClass() + " " + tree.tpt.tpe.getClass().getSuperclass());
- build(tree.tpt);
- }
- //Console.err.println("RHS: " + tree.rhs + " " + tree.rhs.getClass() + " " + tree.rhs.getClass().getSuperclass());
- build(tree.rhs);
- }
- case tree: PackageDef =>
- //Console.err.println("PACKAGE: " + tree.name);
- if (false) {
- val pos = eatKeywords(unit.source.asInstanceOf[BatchSourceFile], tree.pos.pointOrElse(-1))
- if (pos != -1)
- buildDef(tree.symbol, pos)
- }
- build(tree.stats)
- case tree: Function =>
- for (arg <- tree.vparams if arg.pos != NoPosition) {
- val name = arg.name.toString().trim()
- val pos: Int =
- if (unit.source.beginsWith(arg.pos.pointOrElse(-1), "val "))
- unit.source.skipWhitespace(arg.pos.pointOrElse(-1) + ("val ").length())
- else if (unit.source.asInstanceOf[BatchSourceFile].content(arg.pos.point) == ':') {
- var posx : Int = arg.pos.point
- while (unit.source.asInstanceOf[BatchSourceFile].content(posx - 1).isWhitespace) posx = posx - 1
- posx - name.length()
- } else arg.pos.point
- buildDef(arg.symbol, pos)
- build(arg.tpt)
- }
- build(tree.body)
- case tree : TypeTree =>
- val treex = tree
- val tree1 = if (tree.original ne null) tree.original else tree
- def classes(clazz: AnyClass): List[AnyClass] =
- if (clazz eq null) Nil
- else clazz :: classes(clazz.getSuperclass())
- if (tree.original eq null) {
- if (false) Console.err.println("NO_ORIGINAL: " + tree + " " + tree.tpe + " " + classes(tree.tpe.getClass()));
- }
- if (tree.tpe ne null) buildT(tree1, tree.tpe);
- def buildT( tree : Tree, tpe : Type) : Unit = if (tree.pos != NoPosition) tpe match {
- case tpe0 : TypeRef => tree match {
- case apt : AppliedTypeTree =>
- buildUse(tpe.typeSymbol, apt.tpt.pos.pointOrElse(-1), tpe0);
- //Console.err.println("APT: " + treex + " vs. " + treex.original);
- //Console.err.println("APT: " + treex.pos + " vs. " + treex.original.pos + " " + unit.source.dbg(treex.original.pos));
- //Console.err.println("APT: " + apt.tpt + " sym0=" + apt.tpt.symbol + " sym1=" + tpe0.sym + " apt.args=" + apt.args + " tpe0.args=" + tpe0.args);
-
- buildTs (apt.args, tpe0.args);
- case ident : Ident => buildUse(tpe0.sym, ident.pos.pointOrElse(-1), tpe0);
- case select : Select =>
- if (select.symbol == NoSymbol)
- try {
- // build(select);
- buildUse(tpe0.typeSymbol, selectPos(select), tpe0);
- //Console.err.println("QUALIFIER: " + select.qualifier + " " + unit.source.dbg(select.qualifier.pos) + " " + tpe0.prefix + " " + tpe0.prefix.getClass() + " " + tpe0.prefix.getClass().getSuperclass() +" " + tpe0.prefix.widen + " " + tpe0.prefix.toLongString);
- buildT(select.qualifier, tpe0.prefix);
- } catch {
- case e : Error =>
- Console.err.println("BUILD_SELECT: " + select + " @ " + tpe0 + " " + (select.pos).dbgString);
- throw e;
- }
- case tpt : TypeTree =>
- if (tpt.symbol ne null) {
- Console.err.println("SYM0 " + tpt.symbol + " " + (tpt.pos).dbgString);
- buildUse(tpt.symbol, tpt.pos.pointOrElse(-1), tpe0);
- } else if (tpe0.typeSymbol ne null) {
- //Console.err.println("TYPE_SYM1 " + tpe0.symbol + " " + unit.source.dbg(tpt.pos));
- buildUse(tpe0.typeSymbol, tpt.pos.pointOrElse(-1), tpe0);
- } else {
- Console.err.println("UNKNOWN TPT0: " + (tpt.pos).dbgString + " tpt=" + tpt + " " + tpt.symbol + " tpe0="+ tpe0 + " " + tpe0.typeSymbol + " tpe0.args=" + tpe0.args);
- }
- case sft : SelectFromTypeTree =>
- build(sft.qualifier); // XXX: broken
- if (false) Console.err.println("SFTT: " + sft + " sym=" + sft.symbol + " name=" + sft.name + " qual=" + sft.qualifier + " qual.sym=" +
- sft.qualifier.symbol +
- " qual.pos=" + (sft.qualifier.pos).dbgString + " symbol=" + sft.symbol + " type=" + tpe0 +
- " type.sym=" + tpe0.typeSymbol);
- case _ => Console.err.println("UNKNOWN TPT2: " + tree + " vs. " + tpe0 + " " + tree.getClass() + " " + (tree.pos).dbgString);
- }
- case tpe0 : MethodType => tree match {
- case tpt: TypeTree =>
- if (tpt.original ne null) buildT(tpt.original, tpe);
- else {
- Console.err.println("UNKNOWN TPT3: " + tree + " vs. " + tpe0 + " " + (tree.pos).dbgString);
- }
- case ident : Ident => buildT(ident, tpe0.resultType);
- case select : Select => buildT(select, tpe0.resultType);
- case _ => Console.err.println("UNKNOWN TPE: " + tree + " vs. " + tpe0 + " " + tree.getClass());
- }
- case tpe0 : RefinedType => tree match {
- case cpt : CompoundTypeTree =>
- buildTs(cpt.templ.parents, tpe0.parents);
-
- case _ : TypeTree =>
- // Console.err.println("UNKNOWN TPE13: " + dbg(tree) + " tpe0=" + tpe0 + " " + tpe0.parents);
- case _ =>
- if (false) Console.err.println("UNKNOWN TPE5: " + dbg(tree) + " tpe0=" + tpe0 + " " + tpe0.parents);
- }
- case tpe0 : ThisType => tree match {
- case stt : SingletonTypeTree => stt.ref match {
- case ths : This => build(ths);
-
- case _ => Console.err.println("UNKNOWN TPE11: " + tpe0 + " " + stt + " " + stt.ref + " " + stt.ref.getClass() + " " + (tree.pos).dbgString);
- }
- case tt : This =>
- case _ : Ident =>
- case _ : Select =>
- case tt : TypeTree =>
- if (false) Console.err.println("UNKNOWN TPE12: " + tpe0 + " " + tree + " " + tree.getClass() + " " + (tree.pos).dbgString);
- case _ =>
- if (false) Console.err.println("UNKNOWN TPE10: " + tpe0 + " " + tree + " " + tree.getClass() + " " + (tree.pos).dbgString);
- }
- case tpe0 : SingleType => tree match {
- case ident : Ident => buildUse(tpe0.sym, ident.pos.pointOrElse(-1), tpe0);
- case select : Select =>
- buildUse(tpe0.termSymbol, selectPos(select), tpe0);
- //Console.err.println("QUALIFIER-0: " + select.qualifier + " " + unit.source.dbg(select.qualifier.pos) + " " + tpe0.prefix + " " + tpe0.prefix.getClass() + " " + tpe0.prefix.getClass().getSuperclass() +" " + tpe0.prefix.widen + " " + tpe0.prefix.toLongString);
- buildT(select.qualifier, tpe0.prefix);
-
- case _ =>
- if (false) Console.err.println("UNKNOWN TPE8: " + tree + " " + (tree.pos).dbgString + " TPE=" + tpe0 + " PRE=" + tpe0.pre + " SYM=" + tpe0.sym);
-
- }
- case ctype : ConstantType =>
- case ErrorType =>
- case _ => {
- if (false) Console.err.println("UNKNOWN TPE4: " + tree + " " + tpe + " " + tpe.getClass() + " " + (tree.pos).dbgString);
- }
- };
- def buildTs(trees : List[Tree], types : List[Type]): Unit = if (!trees.isEmpty && !types.isEmpty) {
- buildT (trees.head, types.head);
- buildTs(trees.tail, types.tail);
- } else if (trees.isEmpty != types.isEmpty) {
- if (false && doLog) {
- Console.println("" + treex + " vs. " + treex.original);
- if (treex.original ne null)
- Console.println("" + treex.tpe + " vs. " + treex.original.tpe);
- logError("Tree vs. Type mismatch: " + trees + " " + types + " " + (tree.pos).dbgString, null);
- doLog = false;
- }
- };
- case tree: Bind =>
- buildDef(tree.symbol, tree.pos.pointOrElse(-1))
- build(tree.body)
- case tree: Ident =>
- buildUse(tree.symbol, tree.pos.pointOrElse(-1), tree.tpe)
- case tree: Select =>
- try {
- build(tree.qualifier)
- } catch {
- case e : Error => Console.err.println("SELECTQ: " + tree + " " + tree.qualifier + " " + (tree.qualifier.pos).dbgString); throw e;
- }
- try {
- if (tree.pos.isDefined && tree.pos.point >= unit.source.length) {
- if (false) Console.err.println("BAD_SELECT_QUALIFIER " + tree + " @ " + (tree.pos).dbgString);
-
- } else {
- //Console.err.println("SELECT-0: " + tree.symbol + " " + tree.pos.dbgString + " " + (tree.pos - selectPos(tree)));
- buildUse(tree.symbol, selectPos(tree), tree.tpe);
- }
- } catch {
- case e : Error => Console.err.println("SELECTU: " + tree + " " + tree.symbol + " " + tree.pos.dbgString); throw e;
- }
- case tree: TypeApply =>
- //Console.err.println("TYPE_APPLY: " + tree + " " + tree.pos.dbgString);
- if (!tree.args.isEmpty) {
- //Console.err.println("ARGS: " + unit.source.dbg(tree.args0.head.pos));
- }
- build(tree.fun)
- build(tree.args)
- case tree: Apply =>
-
- build(tree.fun)
- build(tree.args)
- case tree: GenericApply =>
-
- build(tree.fun)
- build(tree.args)
- case tree: Typed =>
- build(tree.expr)
- build(tree.tpt)
- case tree: Block =>
- if (false) {
- if (!tree.stats.isEmpty)
- Console.err.println("BLOCKS: " + tree.stats.head + " " + tree.stats.head.getClass());
- Console.err.println("BLOCKE: " + tree.expr + " " + tree.expr.getClass())
- }
- build(tree.stats)
- build(tree.expr)
- case tree: CaseDef =>
- build(tree.pat)
- build(tree.guard)
- build(tree.body)
- case tree : Assign => build(tree.lhs); build(tree.rhs);
- case tree : If => build(tree.cond); build(tree.thenp); build(tree.elsep);
- case tree : New =>
- //Console.err.println("NEW: " + tree.tpt + " " + tree.tpt.getClass());
- build(tree.tpt);
- case tree : Match => build(tree.selector); build(tree.cases);
- case tree : Return => build(tree.expr);
- case tree : LabelDef => build(tree.rhs);
- case tree : Throw => build(tree.expr);
- case tree : Try => build(tree.block); build(tree.catches); build(tree.finalizer);
- case tree : Alternative => build(tree.trees);
- case tree : This =>
-
- if (tree.symbol ne null) buildUse(tree.symbol, tree.pos.pointOrElse(-1), tree.tpe);
- //Thread.dumpStack();
- case tree : TypeDef =>
- //Console.err.println("ALIAS: " + tree);
- build(tree.rhs); build(tree.tparams); buildDef(tree.symbol, tree.pos.pointOrElse(-1));
- case tree : DocDef => build(tree.definition);
- case tree: Import => build(tree.expr)
- case tree: AppliedTypeTree => ;
- case tree: Annotated => ;
- case tree: SingletonTypeTree => ;
- case tree: Super => ;
- case tree: Literal => ;
- case EmptyTree => ;
- case _ => ;
- Console.err.println("BAIL: " + (tree0.pos) + " " + tree0 + " " + tree0.getClass());
- }
- } catch {
- case t: Throwable =>
- logError("Error occured at " + (tree0.pos), t)
- }
-
- def buildUse(term: Symbol, pos: Int, tpe: Type) = buildSym(term, pos, false, tpe)
- def buildDef(term: Symbol, pos: Int) = buildSym(term, pos, true, null)
-
- def buildSym(term: Symbol, pos: Int, isDef: Boolean, tpe: Type): Unit =
- if (term.hasAccessorFlag)
- buildSym(analyzer.underlying(term), pos, isDef, tpe)
- else if (pos == -1) {
- //Console.err.println("NOPOS: " + term)
- //Thread.dumpStack()
- }
- else if (term != NoSymbol) {
- val name = term.name.decode.toString.trim
- val buf = unit.source.asInstanceOf[BatchSourceFile].content
- val cs = name.toChars
- var idx = 0
- if (cs.length + pos > buf.length) return
- else while (idx < cs.length) {
- if (buf(pos + idx) != cs(idx)) {
- //Console.err.println("MISMATCH: " + name + "[" + idx + "] " + unit.source.dbg(pos));
- //Thread.dumpStack();
- return;
- }
- else idx = idx + 1;
- }
- if (cs.length + pos + 1 < buf.length) {
- if (isJavaIdentifierPart(buf(pos + cs.length))) {
- //Console.err.println("MISMATCH: " + name + "[last] " + unit.source.dbg(pos));
- return;
- }
- }
- try {
- list.put(pos, (if (isDef) new Def(term) else new Use(term, tpe)));
- } catch {
- case e : Error => e.printStackTrace();
- }
- }
-
- def selectPos(tree: Select): Int = if (tree.pos == NoPosition) -1 else {
- val buf = unit.source.asInstanceOf[BatchSourceFile].content
- if (tree.pos.point >= buf.length) {
- if (false) {
- Console.err.println("" + tree + "@" + tree.pos + " not in " +
- unit.source.file.name + "[" + buf.length + "]");
- Thread.dumpStack()
- abort()
- }
- return 0
- }
-
- val pos : Int =
- if (buf(tree.pos.point) != '.') tree.pos.point
- else {
- def f(x : Int) : Int = {
- if (buf(x).isWhitespace) f(x + 1)
- else x
- }
- f(tree.pos.point + 1)
- }
- pos
- };
-
- class TokenList {
- object begin extends HasNext {
- def prev = this
- def length = 0
- }
- object end extends HasPrev {
- def next = this
- def length = 0
- }
- // initialize
- begin.next0 = end
- end.prev0 = begin
-
- def tokenAt(offset: Int) = {
- cursor.seek(offset)
- if (cursor.token.isInstanceOf[Semantic]) cursor.token.asInstanceOf[Semantic]
- else null
- }
-
- def put(offset: Int, tok: Actual): Unit = tok match {
- case tok0: Semantic => put(offset, tok0)
- case gap: Gap =>
- }
-
- def put(offset: Int, tok: Semantic) {
- cursor.seek(offset);
- if (cursor.token == end) {
- assert(offset >= cursor.offset);
- if (offset > cursor.offset) {
- // add a gap.
- val gap = new Gap(end.prev);
- gap.setLength(offset - cursor.offset);
- cursor.offset = offset;
- }
- // append.
- tok.insert(end.prev);
- cursor.offset = cursor.offset + tok.length;
- } else if (!cursor.token.isInstanceOf[Gap]) {
- val sem = cursor.token.asInstanceOf[Semantic];
- if (sem.symbol == tok.symbol) return;
- if (sem.symbol != tok.symbol &&
- sem.symbol.getClass() == tok.symbol.getClass() &&
- sem.symbol.pos == tok.symbol.pos) return;
- } else {
- val gap = cursor.token.asInstanceOf[Gap];
- if (!(offset - cursor.offset + tok.length <= gap.length)) {
- Console.err.println("LIST =" + this);
- Console.err.println("OFFSET=" + offset + " " + tok + " " + tok.length);
- Console.err.println(" " + cursor.offset + " " + gap.length);
- gap.length0 = offset - cursor.offset + tok.length
- //abort();
- }
- if (offset == cursor.offset) {
- // replace or prepend
- tok.prev0 = gap.prev0;
- if (tok.length == gap.length) { // replace gap
- tok.next0 = gap.next0;
- } else {
- gap.setLength(gap.length - tok.length);
- tok.next0 = gap;
- }
- tok.next0.prev0 = tok;
- tok.prev0.next0 = tok;
- cursor.token = tok;
- } else {
- // append
- val diff = (cursor.offset + gap.length) - (offset + tok.length);
-
- gap.setLength(gap.length - tok.length - diff);
- tok.prev0 = gap;
- tok.next0 = gap.next;
- tok.next0.prev0 = tok;
- tok.prev0.next0 = tok;
- if (diff != 0) {
- val gap0 = new Gap(tok);
- gap0.setLength(diff);
- }
- }
- }
- }
-
- override def toString(): String = {
- var node = begin.next
- var str = ""
- while (node != end) {
- str = str + " " + node
- node = node.next
- }
- str
- }
-
- object cursor {
- var token: Token = end
- var offset: Int = 0
-
- def next(): Unit = if (token == end) end else {
- offset = offset + token.length
- token = token.next
- }
- def prev(): Unit = if (token.prev == begin) token else {
- offset = offset - token.prev.length
- token = token.prev
- }
- def seek(soffset: Int): Unit = if (soffset == 0) {
- token = begin.next
- offset = 0
- } else {
- assert(soffset > 0)
- while (offset > soffset) prev;
- while (offset + token.length <= soffset && token != end) {
- val len0 = offset;
- next;
- }
- }
- def convertToGap = if (token.isInstanceOf[Actual]) {
- val ret = token.asInstanceOf[Actual].convertToGap;
- offset = offset - ret._1;
- token = ret._2;
- }
- }
-
- // add or delete characters
- def adjust(offset: Int, /* where */
- length: Int, /* how many characters are modified */
- to : Int /* length of new string */) = {
- cursor.seek(offset)
- if (cursor.token != end) {
- cursor.convertToGap
- while (cursor.offset + cursor.token.length < offset + length && cursor.token.next != end) {
- val save = cursor.offset
- cursor.next
- cursor.convertToGap
- assert(cursor.offset == save)
- }
- if (length != to && cursor.token != end) {
- val diff = to - length;
- val gap = cursor.token.asInstanceOf[Gap];
- gap.setLength(gap.length + diff);
- };
- }
- }
-
- } // TokenList
-
- }
-}
-
diff --git a/src/attic/scala/tools/nsc/models/Signatures.scala b/src/attic/scala/tools/nsc/models/Signatures.scala
deleted file mode 100644
index a5dfce6c56..0000000000
--- a/src/attic/scala/tools/nsc/models/Signatures.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package models
-
-import scala.collection.mutable.{HashMap, HashSet}
-import scala.tools.nsc.{Global => Compiler}
-import scala.tools.nsc.symtab.{Flags, Names}
-import scala.tools.nsc.util.{ Position, SourceFile }
-
-/** This class ...
- *
- * @author Sean McDirmid
- * @version 1.0
- */
-class Signatures(val compiler: Compiler) {
- import compiler._
-
- class Signature(val name: String, val children: List[Signature]) {
- def asString: String = name + "[" + asString0(children) + "]"
- }
-
- def sort(sigs: List[Signature]) = sigs sortBy (_.name) reverse
-
- def asString0(sigs: List[Signature]): String =
- sort(sigs) map (_.asString) mkString
-
- def signature(unit: CompilationUnit): String =
- asString0(signature(unit.body, Nil))
-
- def signature(trees: List[Tree]): List[Signature] = {
- var ret : List[Signature] = Nil
- for (tree <- trees) ret = signature(tree, ret)
- ret
- }
-
- /**
- * @param tree0 ...
- * @param rest ...
- * @return ...
- */
- def signature(tree0: Tree, rest: List[Signature]): List[Signature] = tree0 match {
- case tree: MemberDef => if (!tree.mods.isPrivate) {
- val name = "" + tree.name + "::" +
- (tree.mods &~ Flags.SYNTHETIC)
-
- val children: List[Signature] = tree match {
- case impl: ImplDef
- //if (!impl.name.toString.contains("$anonfun$")) =>
- if (impl.name.pos("$anonfun$") == name.length) =>
- val supers = new Signature("$$supers", signature(impl.impl.parents))
- val body = new Signature("$$body", signature(impl.impl.body))
- val ret = supers :: body :: Nil
- impl match {
- case cdef: ClassDef =>
- new Signature("$$tparams", signature(cdef.tparams)) :: ret
- case _ =>
- ret
- }
- case vdef: ValOrDefDef =>
- val ret = signature(vdef.tpt, Nil)
- vdef match {
- case ddef : DefDef =>
- val tparams = new Signature("$$tparams", signature(ddef.tparams))
- var vparamss : List[Signature] = Nil
- for (list <- ddef.vparamss)
- vparamss = signature(list) ::: vparamss
- new Signature("$$ret", ret) :: tparams :: vparamss
- case _ =>
- ret
- }
- case pdef: PackageDef => signature(pdef.stats)
- case _ => Nil
- }
- new Signature(name, children) :: rest
-
- } else rest
- case tree: TypeTree => new Signature("" + tree.tpe, Nil) :: rest
- case _ => rest
- }
-}
diff --git a/src/attic/scala/tools/nsc/symtab/SymbolWalker.scala b/src/attic/scala/tools/nsc/symtab/SymbolWalker.scala
deleted file mode 100644
index 01b5c76cdc..0000000000
--- a/src/attic/scala/tools/nsc/symtab/SymbolWalker.scala
+++ /dev/null
@@ -1,252 +0,0 @@
-package scala.tools.nsc
-package symtab
-
-trait SymbolWalker {
- val global : Global
- import global._
- import scala.collection.mutable.LinkedHashSet
- trait Visitor {
- def update(pos : Position, sym : Symbol) : Unit
- def contains(pos : Position) : Boolean
- def apply(pos : Position) : Symbol
- def putDef(sym : Symbol, pos : Position) : Unit = ()
- }
- /*
- implicit def map2use(map : Map[Position,Symbol]) = new Visitor {
- def update(pos : Position, sym : Symbol) : Unit = map.update(pos, sym)
- def contains(pos : Position) : Boolean = map.contains(pos)
- def apply(pos : Position) : Symbol = map.apply(pos)
- }
- */
- private def validSym(t: Tree) = t.symbol != NoSymbol && t.symbol != null
- private def validSym(tp: Type) = tp != null && tp.typeSymbol != NoSymbol && tp.typeSymbol != null
- private def notNull(tp: Type) = tp.typeSymbol != null
- private def isNoSymbol(t: Tree) = t.symbol eq NoSymbol
-
- def walk(tree: Tree, visitor : Visitor)(fid : (util.Position) => Option[String]) : Unit = {
- val visited = new LinkedHashSet[Tree]
- def f(t : Tree) : Unit = {
- if (visited.add(t)) return
-
- def fs(l: List[Tree]) = l foreach f
- def fss(l: List[List[Tree]]) = l foreach fs
-
- val sym = (t, t.tpe) match {
- case (Super(_,_),SuperType(_,supertp)) if validSym(supertp) => supertp.typeSymbol
- case _ if validSym(t) => t.symbol
- case (t: TypeTree, tp) if validSym(tp) => tp.typeSymbol
- case (t: TypeTree, tp) if validSym(tp.resultType) => tp.resultType.typeSymbol
- case (t, tpe: Type) if isNoSymbol(t) && tpe.termSymbol != null =>
- if (t.isTerm) tpe.termSymbol
- else t.tpe match {
- case x: TypeRef => x.sym // XXX: looks like a bug
- case _ => tpe.typeSymbol
- }
- case _ => NoSymbol
- }
-
- if (sym != null && sym != NoSymbol /* && !sym.hasFlag(SYNTHETIC) */) {
- var id = fid(t.pos)
- val doAdd = if (id.isDefined) {
- if (id.get.charAt(0) == '`') id = Some(id.get.substring(1, id.get.length - 1))
- val name = sym.name.decode.trim
- if ((name startsWith id.get) || (id.get startsWith name)) true
- else {
- false
- }
- } else false
- if (doAdd) {
-
- if (!visitor.contains(t.pos)) {
- visitor(t.pos) = sym
- } else {
- val existing = visitor(t.pos)
- if (sym.sourceFile != existing.sourceFile || sym.pos != existing.pos) {
- (sym,existing) match {
- case (sym,existing) if sym.pos == existing.pos =>
- case (sym : TypeSymbol ,_ : ClassSymbol) => visitor(t.pos) = sym
- case (_ : ClassSymbol,_ : TypeSymbol) => // nothing
- case _ if sym.isModule && existing.isValue => // nothing
- case _ if sym.isClass && existing.isMethod => // nothing
- case _ =>
- assert(true)
- }
- }
- }}
- }
- t match {
- case t : DefTree if t.symbol != NoSymbol =>
- if (t.pos != NoPosition)
- visitor.putDef(t.symbol, t.pos)
- if (t.symbol.isClass) {
- val factory = NoSymbol // XXX: t.symbol.caseFactory
- if (factory != NoSymbol) {
- visitor.putDef(factory, t.pos)
- }
- }
- case t : TypeBoundsTree => f(t.lo); f(t.hi)
- case t : TypeTree if t.original != null =>
- def h(original : Tree, tpe : Type): Unit = try {
- if (original.tpe == null)
- original.tpe = tpe
- (original) match {
- case (AppliedTypeTree(_,trees)) if tpe.isInstanceOf[TypeRef] =>
- val types = tpe.asInstanceOf[TypeRef].args
- trees.zip(types).foreach{
- case (tree,tpe) => assert(tree != null && tpe != null); h(tree, tpe)
- }
- case _ =>
- }
- }
- if (t.original.tpe == null) {
- val dup = t.original.duplicate
- h(dup,t.tpe)
- f(dup)
- } else f(t.original)
- ()
- case _ =>
- }
- (t) match {
- case (t : MemberDef) if t.symbol != null && t.symbol != NoSymbol =>
- val annotated = if (sym.isModule) sym.moduleClass else sym
- val i = t.mods.annotations.iterator
- val j = annotated.annotations.iterator
- while (i.hasNext && j.hasNext) {
- val tree = i.next
- val ainfo = j.next
- val sym = ainfo.atp.typeSymbol
- tree.setType(ainfo.atp)
- tree.setSymbol(sym)
- f(tree)
- }
-
- case _ =>
- }
- t match {
- case tree: ImplDef =>
- fs(tree.impl.parents); f(tree.impl.self); fs(tree.impl.body)
- tree match {
- case tree : ClassDef => fs(tree.tparams)
- case _ =>
- }
- case tree: PackageDef => fs(tree.stats)
- case tree: ValOrDefDef =>
- f(tree.rhs);
- if (tree.tpt != null) {
- f(tree.tpt)
- }
- tree match {
- case tree : DefDef => fs(tree.tparams); fss(tree.vparamss)
- case _ =>
- }
- case tree: Function => fs(tree.vparams); f(tree.body)
- case tree : Bind => f(tree.body)
- case tree : Select =>
- val qualifier = if (tree.tpe != null && tree.qualifier.tpe == null) {
- val pre = tree.tpe.prefix
- val qualifier = tree.qualifier.duplicate
- qualifier.tpe = pre
- qualifier
- } else tree.qualifier
-
- f(qualifier)
- case tree : Annotated => f(tree.annot); f(tree.arg)
- case tree : GenericApply => f(tree.fun); fs(tree.args)
- case tree : UnApply => f(tree.fun); fs(tree.args)
- case tree : AppliedTypeTree =>
- if (tree.tpe != null) {
- val i = tree.tpe.typeArgs.iterator
- val j = tree.args.iterator
- while (i.hasNext && j.hasNext) {
- val tpe = i.next
- val arg = j.next
- if (arg.tpe == null) {
- arg.tpe = tpe
- }
- }
- if (tree.tpt.tpe == null) {
- tree.tpt.tpe = tree.tpe
- }
-
- }
- f(tree.tpt); fs(tree.args)
-
- case tree : ExistentialTypeTree=>
- if (tree.tpt.tpe == null) {
- tree.tpt.tpe = tree.tpe
- }
-
- f(tree.tpt)
- fs(tree.whereClauses)
- case tree : SingletonTypeTree =>
- if (tree.ref.tpe == null) {
- val dup = tree.ref.duplicate
- dup.tpe = tree.tpe
- f(dup)
- } else f(tree.ref)
- case tree : CompoundTypeTree =>
- if (tree.tpe != null && tree.tpe.typeSymbol != null && tree.tpe.typeSymbol.isRefinementClass) tree.tpe.typeSymbol.info match {
- case tpe : RefinedType =>
- tpe.parents.zip(tree.templ.parents).foreach{
- case (tpe,tree) =>
- if (tree.hasSymbol && (tree.symbol == NoSymbol || tree.symbol == null)) {
- tree.symbol = tpe.typeSymbol
- }
- }
-
- case _ =>
- }
-
- f(tree.templ)
- case tree : Template => fs(tree.parents); f(tree.self); fs(tree.body)
- case tree : SelectFromTypeTree => {
- if (tree.qualifier.tpe == null) tree.tpe match {
- case tpe : TypeRef =>
- // give it a type!
- tree.qualifier.tpe = tpe.prefix
- case _ =>
- // tree.tpe.pre
- }
- f(tree.qualifier)
- }
- case tree : Literal =>
- /*
- if (tree.tpe != null && tree.tpe.typeSymbol == definitions.ClassClass) {
- // nothing we can do without original tree.
- }
- */
-
- case tree : Typed => f(tree.expr); f(tree.tpt)
- case tree : Block => fs(tree.stats); f(tree.expr)
- case tree: CaseDef => f(tree.pat);f(tree.guard);f(tree.body)
- case tree : Assign => f(tree.lhs); f(tree.rhs);
- case tree : If => f(tree.cond); f(tree.thenp); f(tree.elsep);
- case tree : New => f(tree.tpt);
- case tree : Match => f(tree.selector); fs(tree.cases);
- case tree : Return => f(tree.expr);
- case tree : LabelDef => f(tree.rhs);
- case tree : Throw => f(tree.expr);
- case tree : Try => f(tree.block); fs(tree.catches); f(tree.finalizer);
- case tree : Alternative => fs(tree.trees);
- case tree : TypeDef =>
- (tree.tpe,sym) match {
- case (null,sym : TypeSymbol) if (sym.rawInfo.isComplete) =>
- if (tree.tparams.isEmpty) {
- if (tree.rhs.tpe == null) tree.rhs.tpe = sym.info
- f(tree.rhs)
- } else {
- val tree0 = AppliedTypeTree(tree.rhs, tree.tparams)
- tree0.tpe = sym.info
- f(tree0)
- }
- case _ => f(tree.rhs); fs(tree.tparams)
- }
- case tree : DocDef => f(tree.definition);
- case tree: Import => f(tree.expr)
- case _ =>
- }
- }
- f(tree)
- }
-
-}
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index 6016f6fb92..8c91128de0 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -130,7 +130,7 @@ object FunctionOne extends Function(1) {
* @param g a function A => T1
* @return a new function `f` such that `f(x) == apply(g(x))`
*/
- def compose[A](g: A => T1): A => R = { x => apply(g(x)) }
+ @annotation.unspecialized def compose[A](g: A => T1): A => R = { x => apply(g(x)) }
/** Composes two instances of Function1 in a new Function1, with this function applied first.
*
@@ -138,7 +138,7 @@ object FunctionOne extends Function(1) {
* @param g a function R => A
* @return a new function `f` such that `f(x) == g(apply(x))`
*/
- def andThen[A](g: R => A): T1 => A = { x => g(apply(x)) }
+ @annotation.unspecialized def andThen[A](g: R => A): T1 => A = { x => g(apply(x)) }
"""
}
@@ -169,19 +169,20 @@ object Function {
class Function(val i: Int) extends Group("Function") with Arity {
def descriptiveComment = ""
- def functionNTemplate = """
+ def functionNTemplate =
+"""
* In the following example, the definition of %s is a
* shorthand for the anonymous class definition %s:
*
* {{{
- * object Main extends App { %s }
+ * object Main extends App {%s}
* }}}
*
* Note that `Function1` does not define a total function, as might
* be suggested by the existence of [[scala.PartialFunction]]. The only
* distinction between `Function1` and `PartialFunction` is that the
* latter can specify inputs which it will not handle.
- """
+"""
def toStr() = "\"" + ("<function%d>" format i) + "\""
def apply() = {
@@ -195,7 +196,7 @@ class Function(val i: Int) extends Group("Function") with Arity {
* @return the result of function application.
*/
def apply({funArgs}): R
- {moreMethods}
+{moreMethods}
override def toString() = {toStr}
}}
</file>
@@ -218,15 +219,15 @@ class Function(val i: Int) extends Group("Function") with Arity {
// f(x1,x2,x3,x4,x5,x6) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)
def curryComment = {
-"""/** Creates a curried version of this function.
+""" /** Creates a curried version of this function.
*
* @return a function `f` such that `f%s == apply%s`
*/""".format(xdefs map ("(" + _ + ")") mkString, commaXs)
}
def tupleMethod = {
- def comment = """
- /** Creates a tupled version of this function: instead of %d arguments,
+ def comment =
+""" /** Creates a tupled version of this function: instead of %d arguments,
* it accepts a single [[scala.Tuple%d]] argument.
*
* @return a function `f` such that `f(%s) == f(Tuple%d%s) == apply%s`
@@ -234,14 +235,14 @@ class Function(val i: Int) extends Group("Function") with Arity {
""".format(i, i, commaXs, i, commaXs, commaXs)
def body = "case Tuple%d%s => apply%s".format(i, commaXs, commaXs)
- comment + " def tupled: Tuple%d%s => R = {\n %s\n }".format(i, invariantArgs, body)
+ comment + "\n @annotation.unspecialized def tupled: Tuple%d%s => R = {\n %s\n }".format(i, invariantArgs, body)
}
def curryMethod = {
val body = if (i < 5) shortCurry else longCurry
curryComment +
- " def curried: %s => R = {\n %s\n }\n".format(
+ "\n @annotation.unspecialized def curried: %s => R = {\n %s\n }\n".format(
targs mkString " => ", body
)
}
@@ -255,10 +256,7 @@ class Function(val i: Int) extends Group("Function") with Arity {
zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz */
object Tuple {
- val zipImports = """
-import scala.collection.{ TraversableLike => TLike, IterableLike => ILike }
-import scala.collection.generic.{ CanBuildFrom => CBF }
-"""
+ val zipImports = ""
def make(i: Int) = apply(i)()
def apply(i: Int) = i match {
@@ -284,230 +282,11 @@ object TupleTwo extends Tuple(2)
* second element is the first element of this Tuple.
*/
def swap: Tuple2[T2,T1] = Tuple2(_2, _1)
-
- @deprecated("Use `zipped` instead.", "2.9.0")
- def zip[Repr1, El1, El2, To](implicit w1: T1 => TLike[El1, Repr1],
- w2: T2 => Iterable[El2],
- cbf1: CBF[Repr1, (El1, El2), To]): To = {
- zipped map ((x, y) => ((x, y)))
- }
-
- /** Wraps a tuple in a `Zipped`, which supports 2-ary generalisations of `map`, `flatMap`, `filter`, etc.
- * Note that there must be an implicit value to convert this tuple's types into a [[scala.collection.TraversableLike]]
- * or [[scala.collection.IterableLike]].
- * {{{
- * scala> val tuple = (List(1,2,3),List('a','b','c'))
- * tuple: (List[Int], List[Char]) = (List(1, 2, 3),List(a, b, c))
- *
- * scala> tuple.zipped map { (x,y) => x + ":" + y }
- * res6: List[java.lang.String] = List(1:a, 2:b, 3:c)
- * }}}
- *
- * @see Zipped
- * Note: will not terminate for infinite-sized collections.
- */
- def zipped[Repr1, El1, Repr2, El2](implicit w1: T1 => TLike[El1, Repr1], w2: T2 => ILike[El2, Repr2]): Zipped[Repr1, El1, Repr2, El2]
- = new Zipped[Repr1, El1, Repr2, El2](_1, _2)
-
- class Zipped[+Repr1, +El1, +Repr2, +El2](coll1: TLike[El1, Repr1], coll2: ILike[El2, Repr2]) { // coll2: ILike for filter
- def map[B, To](f: (El1, El2) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- b.sizeHint(coll1)
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext)
- b += f(el1, elems2.next)
- else
- return b.result
- }
-
- b.result
- }
-
- def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext)
- b ++= f(el1, elems2.next)
- else
- return b.result
- }
-
- b.result
- }
-
- def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = {
- val b1 = cbf1(coll1.repr)
- val b2 = cbf2(coll2.repr)
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext) {
- val el2 = elems2.next
- if (f(el1, el2)) {
- b1 += el1
- b2 += el2
- }
- }
- else return (b1.result, b2.result)
- }
-
- (b1.result, b2.result)
- }
-
- def exists(f: (El1, El2) => Boolean): Boolean = {
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext) {
- if (f(el1, elems2.next))
- return true
- }
- else return false
- }
- false
- }
-
- def forall(f: (El1, El2) => Boolean): Boolean =
- !exists((x, y) => !f(x, y))
-
- def foreach[U](f: (El1, El2) => U): Unit = {
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext)
- f(el1, elems2.next)
- else
- return
- }
- }
- }
"""
}
object TupleThree extends Tuple(3) {
override def imports = Tuple.zipImports
- override def moreMethods = """
-
- @deprecated("Use `zipped` instead.", "2.9.0")
- def zip[Repr1, El1, El2, El3, To](implicit w1: T1 => TLike[El1, Repr1],
- w2: T2 => Iterable[El2],
- w3: T3 => Iterable[El3],
- cbf1: CBF[Repr1, (El1, El2, El3), To]): To = {
- zipped map ((x, y, z) => ((x, y, z)))
- }
-
- /** Wraps a tuple in a `Zipped`, which supports 3-ary generalisations of `map`, `flatMap`, `filter`, etc.
- * Note that there must be an implicit value to convert this tuple's types into a [[scala.collection.TraversableLike]]
- * or [[scala.collection.IterableLike]].
- * {{{
- * scala> val tuple = (List(1,2,3),List('a','b','c'),List("x","y","z"))
- * tuple: (List[Int], List[Char], List[java.lang.String]) = (List(1, 2, 3),List(a, b, c),List(x, y, z))
- *
- * scala> tuple.zipped map { (x,y,z) => x + ":" + y + ":" + z}
- * res8: List[java.lang.String] = List(1:a:x, 2:b:y, 3:c:z)
- * }}}
- *
- * @see Zipped
- * Note: will not terminate for infinite-sized collections.
- */
- def zipped[Repr1, El1, Repr2, El2, Repr3, El3](implicit w1: T1 => TLike[El1, Repr1],
- w2: T2 => ILike[El2, Repr2],
- w3: T3 => ILike[El3, Repr3]): Zipped[Repr1, El1, Repr2, El2, Repr3, El3]
- = new Zipped[Repr1, El1, Repr2, El2, Repr3, El3](_1, _2, _3)
-
- class Zipped[+Repr1, +El1, +Repr2, +El2, +Repr3, +El3](coll1: TLike[El1, Repr1],
- coll2: ILike[El2, Repr2],
- coll3: ILike[El3, Repr3]) {
- def map[B, To](f: (El1, El2, El3) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext)
- b += f(el1, elems2.next, elems3.next)
- else
- return b.result
- }
- b.result
- }
-
- def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext)
- b ++= f(el1, elems2.next, elems3.next)
- else
- return b.result
- }
- b.result
- }
-
- def filter[To1, To2, To3](f: (El1, El2, El3) => Boolean)(
- implicit cbf1: CBF[Repr1, El1, To1],
- cbf2: CBF[Repr2, El2, To2],
- cbf3: CBF[Repr3, El3, To3]): (To1, To2, To3) = {
- val b1 = cbf1(coll1.repr)
- val b2 = cbf2(coll2.repr)
- val b3 = cbf3(coll3.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
- def result = (b1.result, b2.result, b3.result)
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext) {
- val el2 = elems2.next
- val el3 = elems3.next
-
- if (f(el1, el2, el3)) {
- b1 += el1
- b2 += el2
- b3 += el3
- }
- }
- else return result
- }
-
- result
- }
-
- def exists(f: (El1, El2, El3) => Boolean): Boolean = {
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext) {
- if (f(el1, elems2.next, elems3.next))
- return true
- }
- else return false
- }
- false
- }
-
- def forall(f: (El1, El2, El3) => Boolean): Boolean =
- !exists((x, y, z) => !f(x, y, z))
-
- def foreach[U](f: (El1, El2, El3) => U): Unit = {
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext)
- f(el1, elems2.next, elems3.next)
- else
- return
- }
- }
- }
-"""
}
class Tuple(val i: Int) extends Group("Tuple") with Arity {
@@ -578,7 +357,7 @@ class Product(val i: Int) extends Group("Product") with Arity {
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
"""
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index 7f8343a84e..7ab54f81c3 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -112,7 +112,7 @@
<deploy-local name="scala-compiler" version="@{version}" repository="@{repository}" />
<deploy-local-plugin name="continuations" version="@{version}" repository="@{repository}"/>
<deploy-local name="scala-actors" version="@{version}" repository="@{repository}" />
- <deploy-local name="scala-dbc" version="@{version}" repository="@{repository}" />
+ <deploy-local name="scala-actors-migration" version="@{version}" repository="@{repository}" />
<deploy-local name="scala-swing" version="@{version}" repository="@{repository}"/>
<deploy-local name="scalap" version="@{version}" repository="@{repository}"/>
<deploy-local name="scala-partest" version="@{version}" repository="@{repository}"/>
@@ -171,9 +171,9 @@
</deploy-remote>
<deploy-remote name="jline" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scala-compiler" version="@{version}" repository="@{repository}" />
- <deploy-remote name="scala-dbc" version="@{version}" repository="@{repository}" />
<deploy-remote name="scala-swing" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scala-actors" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scala-actors-migration" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scalap" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scala-partest" version="@{version}" repository="@{repository}"/>
<deploy-remote-plugin name="continuations" version="@{version}" repository="@{repository}"/>
@@ -239,9 +239,9 @@
<deploy-remote-signed name="scala-library" version="@{version}" repository="@{repository}"/>
<deploy-remote-signed name="jline" version="@{version}" repository="@{repository}"/>
<deploy-remote-signed name="scala-compiler" version="@{version}" repository="@{repository}" />
- <deploy-remote-signed name="scala-dbc" version="@{version}" repository="@{repository}" />
<deploy-remote-signed name="scala-swing" version="@{version}" repository="@{repository}"/>
<deploy-remote-signed name="scala-actors" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scala-actors-migration" version="@{version}" repository="@{repository}"/>
<deploy-remote-signed name="scalap" version="@{version}" repository="@{repository}"/>
<deploy-remote-signed name="scala-partest" version="@{version}" repository="@{repository}"/>
</sequential>
diff --git a/src/build/maven/scala-actors-migration-pom.xml b/src/build/maven/scala-actors-migration-pom.xml
new file mode 100644
index 0000000000..93fc34ece9
--- /dev/null
+++ b/src/build/maven/scala-actors-migration-pom.xml
@@ -0,0 +1,66 @@
+<project
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-actors-migration</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
+ <name>Scala Migration Kit</name>
+ <description>Migration kit that enables easy transition from the Scala Actors to Akka.</description>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2012</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html
+ </url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-actors</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
+</project>
diff --git a/src/build/maven/scala-dbc-pom.xml b/src/build/maven/scala-dbc-pom.xml
deleted file mode 100644
index aa3d050c1e..0000000000
--- a/src/build/maven/scala-dbc-pom.xml
+++ /dev/null
@@ -1,61 +0,0 @@
-<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-dbc</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
- <name>Scala Database Connectivity</name>
- <description>Connectivity for your DBs</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html
- </url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>@VERSION@</version>
- </dependency>
- </dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Typesafe</id>
- <name>Typesafe, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/maven/scala-library-pom.xml b/src/build/maven/scala-library-pom.xml
index c3f8a4531c..e8db512125 100644
--- a/src/build/maven/scala-library-pom.xml
+++ b/src/build/maven/scala-library-pom.xml
@@ -32,9 +32,9 @@
</issueManagement>
<dependencies>
<dependency>
- <groupId>org.skife.com.typesafe.config</groupId>
- <artifactId>typesafe-config</artifactId>
- <version>0.3.0</version>
+ <groupId>com.typesafe</groupId>
+ <artifactId>config</artifactId>
+ <version>0.4.0</version>
</dependency>
</dependencies>
<distributionManagement>
diff --git a/src/build/pack.xml b/src/build/pack.xml
index 3bd4d7a199..956beaef88 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -137,9 +137,9 @@ MAIN DISTRIBUTION PACKAGING
<mvn-copy-lib mvn.artifact.name="jline"/>
<mvn-copy-lib mvn.artifact.name="scala-library"/>
<mvn-copy-lib mvn.artifact.name="scala-compiler"/>
- <mvn-copy-lib mvn.artifact.name="scala-dbc"/>
<mvn-copy-lib mvn.artifact.name="scala-swing"/>
<mvn-copy-lib mvn.artifact.name="scala-actors"/>
+ <mvn-copy-lib mvn.artifact.name="scala-actors-migration"/>
<mvn-copy-lib mvn.artifact.name="scala-partest"/>
<mvn-copy-lib mvn.artifact.name="scalap"/>
</target>
@@ -202,11 +202,14 @@ MAIN DISTRIBUTION PACKAGING
basedir="${build-docs.dir}/continuations-plugin">
<include name="**/*"/>
</jar>
- <!-- TODO - Scala swing, dbc and actors should maybe have thier own jar, but creating it is SLOW. -->
+ <jar destfile="${dists.dir}/maven/${version.number}/scala-actors-migration/scala-actors-migration-docs.jar"
+ basedir="${build-docs.dir}/actors-migration">
+ <include name="**/*"/>
+ </jar>
+
+ <!-- TODO - Scala swing and actors should maybe have thier own jar, but creating it is SLOW. -->
<copy tofile="${dists.dir}/maven/${version.number}/scala-swing/scala-swing-docs.jar"
file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
- <copy tofile="${dists.dir}/maven/${version.number}/scala-dbc/scala-dbc-docs.jar"
- file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
<copy tofile="${dists.dir}/maven/${version.number}/scala-actors/scala-actors-docs.jar"
file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
</target>
diff --git a/src/compiler/scala/reflect/internal/AnnotationInfos.scala b/src/compiler/scala/reflect/internal/AnnotationInfos.scala
index b86c62661a..91e1c3d50d 100644
--- a/src/compiler/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/compiler/scala/reflect/internal/AnnotationInfos.scala
@@ -18,8 +18,6 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
// the Symbol's field directly. For Type, a new AnnotatedType is
// created which wraps the original type.
trait Annotatable[Self] {
- self: Self =>
-
/** The annotations on this type. */
def annotations: List[AnnotationInfo] // Annotations on this type.
def setAnnotations(annots: List[AnnotationInfo]): Self // Replace annotations with argument list.
@@ -76,13 +74,34 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
*/
case class ScalaSigBytes(bytes: Array[Byte]) extends ClassfileAnnotArg {
override def toString = (bytes map { byte => (byte & 0xff).toHexString }).mkString("[ ", " ", " ]")
- lazy val encodedBytes = ByteCodecs.encode(bytes)
- def isLong: Boolean = (encodedBytes.length > 65535)
+ lazy val encodedBytes = ByteCodecs.encode(bytes) // TODO remove after migration to ASM-based GenJVM complete
+ def isLong: Boolean = (encodedBytes.length > 65535) // TODO remove after migration to ASM-based GenJVM complete
+ lazy val sevenBitsMayBeZero: Array[Byte] = {
+ mapToNextModSevenBits(scala.reflect.internal.pickling.ByteCodecs.encode8to7(bytes))
+ }
+ def fitsInOneString: Boolean = {
+ val numZeros = (sevenBitsMayBeZero count { b => b == 0 })
+ val res = (sevenBitsMayBeZero.length + numZeros) <= 65535
+ assert(this.isLong == !res, "As things stand, can't just swap in `fitsInOneString()` for `isLong()`")
+ res
+ }
def sigAnnot: Type =
if (this.isLong)
definitions.ScalaLongSignatureAnnotation.tpe
else
definitions.ScalaSignatureAnnotation.tpe
+
+ private def mapToNextModSevenBits(src: Array[Byte]): Array[Byte] = {
+ var i = 0
+ val srclen = src.length
+ while (i < srclen) {
+ val in = src(i)
+ src(i) = (if (in == 0x7f) 0.toByte else (in + 1).toByte)
+ i += 1
+ }
+ src
+ }
+
}
/** Represents a nested classfile annotation */
@@ -119,7 +138,11 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
// necessary for reification, see Reifiers.scala for more info
private var orig: Tree = EmptyTree
def original = orig
- def setOriginal(t: Tree): this.type = { orig = t; this }
+ def setOriginal(t: Tree): this.type = {
+ orig = t
+ this setPos t.pos
+ this
+ }
override def toString = (
atp +
@@ -260,8 +283,7 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
}
}
- lazy val classfileAnnotArgManifest: ClassManifest[ClassfileAnnotArg] =
- reflect.ClassManifest[ClassfileAnnotArg](classOf[ClassfileAnnotArg])
+ lazy val classfileAnnotArgTag: ArrayTag[ClassfileAnnotArg] = arrayTag[ClassfileAnnotArg]
object UnmappableAnnotation extends CompleteAnnotationInfo(NoType, Nil, Nil)
}
diff --git a/src/compiler/scala/reflect/internal/ClassfileConstants.scala b/src/compiler/scala/reflect/internal/ClassfileConstants.scala
index eec72d082d..3346e9cccb 100644
--- a/src/compiler/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/compiler/scala/reflect/internal/ClassfileConstants.scala
@@ -372,7 +372,7 @@ object ClassfileConstants {
}
def methodFlags(jflags: Int): Long = {
initFields(jflags)
- translateFlags(jflags, 0)
+ translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE else 0)
}
}
object FlagTranslation extends FlagTranslation { }
diff --git a/src/compiler/scala/reflect/internal/Constants.scala b/src/compiler/scala/reflect/internal/Constants.scala
index 135d18d5ad..861bc870a7 100644
--- a/src/compiler/scala/reflect/internal/Constants.scala
+++ b/src/compiler/scala/reflect/internal/Constants.scala
@@ -224,6 +224,7 @@ trait Constants extends api.Constants {
case ClazzTag => "classOf[" + signature(typeValue) + "]"
case CharTag => "'" + escapedChar(charValue) + "'"
case LongTag => longValue.toString() + "L"
+ case EnumTag => symbolValue.name.toString()
case _ => String.valueOf(value)
}
}
diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala
index 26b6a3cd14..0cdef9e79a 100644
--- a/src/compiler/scala/reflect/internal/Definitions.scala
+++ b/src/compiler/scala/reflect/internal/Definitions.scala
@@ -6,7 +6,7 @@
package scala.reflect
package internal
-import annotation.{ switch }
+import annotation.{ switch, meta }
import scala.collection.{ mutable, immutable }
import Flags._
import PartialFunction._
@@ -15,6 +15,8 @@ import scala.reflect.{ mirror => rm }
trait Definitions extends reflect.api.StandardDefinitions {
self: SymbolTable =>
+ object definitions extends DefinitionsClass
+
// [Eugene] find a way to make these non-lazy
lazy val ByteTpe = definitions.ByteClass.asType
lazy val ShortTpe = definitions.ShortClass.asType
@@ -41,34 +43,21 @@ trait Definitions extends reflect.api.StandardDefinitions {
*/
private type PolyMethodCreator = List[Symbol] => (Option[List[Type]], Type)
- private def enterNewClass(owner: Symbol, name: TypeName, parents: List[Type], flags: Long = 0L): Symbol = {
+ private def enterNewClass(owner: Symbol, name: TypeName, parents: List[Type], flags: Long = 0L): ClassSymbol = {
val clazz = owner.newClassSymbol(name, NoPosition, flags)
clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz)
}
- private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): Symbol = {
+ private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): MethodSymbol = {
val msym = owner.newMethod(name.encode, NoPosition, flags)
val params = msym.newSyntheticValueParams(formals)
msym setInfo MethodType(params, restpe)
}
- private def enterNewMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): Symbol =
+ private def enterNewMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): MethodSymbol =
owner.info.decls enter newMethod(owner, name, formals, restpe, flags)
// the scala value classes
trait ValueClassDefinitions {
- self: definitions.type =>
-
- private[Definitions] def valueCache(name: Name) = {
- val res = (
- if (name.isTypeName) ScalaPackageClass.info member name
- else ScalaPackageClass.info member name suchThat (_ hasFlag MODULE)
- )
- if (res eq NoSymbol)
- abort("Could not find value classes! This is a catastrophic failure. scala " + scala.util.Properties.versionString)
- else res
- }
- private[Definitions] def valueModuleMethod(className: Name, methodName: Name): Symbol = {
- valueCache(className.toTermName).moduleClass.tpe member methodName
- }
+ self: DefinitionsClass =>
import ClassfileConstants._
@@ -94,8 +83,27 @@ trait Definitions extends reflect.api.StandardDefinitions {
tpnme.Unit -> VOID_TAG
)
+ private def catastrophicFailure() =
+ abort("Could not find value classes! This is a catastrophic failure. scala " +
+ scala.util.Properties.versionString)
+
+ private def valueClassSymbol(name: TypeName): ClassSymbol = {
+ getMember(ScalaPackageClass, name) match {
+ case x: ClassSymbol => x
+ case _ => catastrophicFailure()
+ }
+ }
+ private def valueClassCompanion(name: TermName): ModuleSymbol = {
+ getMember(ScalaPackageClass, name) match {
+ case x: ModuleSymbol => x
+ case _ => catastrophicFailure()
+ }
+ }
+ private def valueCompanionMember(className: Name, methodName: TermName): MethodSymbol =
+ getMemberMethod(valueClassCompanion(className.toTermName).moduleClass, methodName)
+
private def classesMap[T](f: Name => T) = symbolsMap(ScalaValueClassesNoUnit, f)
- private def symbolsMap[T](syms: List[Symbol], f: Name => T): Map[Symbol, T] = syms zip (syms map (x => f(x.name))) toMap
+ private def symbolsMap[T](syms: List[Symbol], f: Name => T): Map[Symbol, T] = mapFrom(syms)(x => f(x.name))
private def symbolsMapFilt[T](syms: List[Symbol], p: Name => Boolean, f: Name => T) = symbolsMap(syms filter (x => p(x.name)), f)
private def boxedName(name: Name) = sn.Boxed(name.toTypeName)
@@ -106,8 +114,8 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val boxedClass = classesMap(x => getClass(boxedName(x)))
lazy val refClass = classesMap(x => getRequiredClass("scala.runtime." + x + "Ref"))
lazy val volatileRefClass = classesMap(x => getRequiredClass("scala.runtime.Volatile" + x + "Ref"))
- lazy val boxMethod = classesMap(x => valueModuleMethod(x, nme.box))
- lazy val unboxMethod = classesMap(x => valueModuleMethod(x, nme.unbox))
+ lazy val boxMethod = classesMap(x => valueCompanionMember(x, nme.box))
+ lazy val unboxMethod = classesMap(x => valueCompanionMember(x, nme.unbox))
def isNumericSubClass(sub: Symbol, sup: Symbol) = (
(numericWeight contains sub)
@@ -116,27 +124,28 @@ trait Definitions extends reflect.api.StandardDefinitions {
)
/** Is symbol a numeric value class? */
- def isNumericValueClass(sym: Symbol): Boolean =
- numericWeight contains sym
+ def isNumericValueClass(sym: Symbol) = ScalaNumericValueClasses contains sym
def isGetClass(sym: Symbol) =
- (sym.name == nme.getClass_) && (sym.paramss.isEmpty || sym.paramss.head.isEmpty)
-
- lazy val UnitClass = valueCache(tpnme.Unit)
- lazy val ByteClass = valueCache(tpnme.Byte)
- lazy val ShortClass = valueCache(tpnme.Short)
- lazy val CharClass = valueCache(tpnme.Char)
- lazy val IntClass = valueCache(tpnme.Int)
- lazy val LongClass = valueCache(tpnme.Long)
- lazy val FloatClass = valueCache(tpnme.Float)
- lazy val DoubleClass = valueCache(tpnme.Double)
- lazy val BooleanClass = valueCache(tpnme.Boolean)
+ (sym.name == nme.getClass_) && flattensToEmpty(sym.paramss)
+
+ lazy val UnitClass = valueClassSymbol(tpnme.Unit)
+ lazy val ByteClass = valueClassSymbol(tpnme.Byte)
+ lazy val ShortClass = valueClassSymbol(tpnme.Short)
+ lazy val CharClass = valueClassSymbol(tpnme.Char)
+ lazy val IntClass = valueClassSymbol(tpnme.Int)
+ lazy val LongClass = valueClassSymbol(tpnme.Long)
+ lazy val FloatClass = valueClassSymbol(tpnme.Float)
+ lazy val DoubleClass = valueClassSymbol(tpnme.Double)
+ lazy val BooleanClass = valueClassSymbol(tpnme.Boolean)
lazy val Boolean_and = getMember(BooleanClass, nme.ZAND)
lazy val Boolean_or = getMember(BooleanClass, nme.ZOR)
lazy val Boolean_not = getMember(BooleanClass, nme.UNARY_!)
+ lazy val ScalaNumericValueClasses = ScalaValueClasses filterNot Set[Symbol](UnitClass, BooleanClass)
+
def ScalaValueClassesNoUnit = ScalaValueClasses filterNot (_ eq UnitClass)
- def ScalaValueClasses: List[Symbol] = List(
+ def ScalaValueClasses: List[ClassSymbol] = List(
UnitClass,
BooleanClass,
ByteClass,
@@ -148,10 +157,10 @@ trait Definitions extends reflect.api.StandardDefinitions {
DoubleClass
)
def ScalaValueClassCompanions: List[Symbol] = ScalaValueClasses map (_.companionSymbol)
- def ScalaPrimitiveValueClasses: List[Symbol] = ScalaValueClasses
+ def ScalaPrimitiveValueClasses: List[ClassSymbol] = ScalaValueClasses
}
- object definitions extends AbsDefinitions with ValueClassDefinitions {
+ abstract class DefinitionsClass extends AbsDefinitions with ValueClassDefinitions {
private var isInitialized = false
def isDefinitionsInitialized = isInitialized
@@ -207,15 +216,14 @@ trait Definitions extends reflect.api.StandardDefinitions {
}
// It becomes tricky to create dedicated objects for other symbols because
// of initialization order issues.
- lazy val JavaLangPackage = getModule(sn.JavaLang)
+ lazy val JavaLangPackage = getRequiredPackage(sn.JavaLang)
lazy val JavaLangPackageClass = JavaLangPackage.moduleClass
- lazy val ScalaPackage = getModule(nme.scala_)
+ lazy val ScalaPackage = getRequiredPackage(nme.scala_)
lazy val ScalaPackageClass = ScalaPackage.moduleClass
-
- lazy val RuntimePackage = getRequiredModule("scala.runtime")
+ lazy val RuntimePackage = getRequiredPackage("scala.runtime")
lazy val RuntimePackageClass = RuntimePackage.moduleClass
- lazy val JavaLangEnumClass = getRequiredClass("java.lang.Enum")
+ lazy val JavaLangEnumClass = requiredClass[java.lang.Enum[_]]
// convenient one-argument parameter lists
lazy val anyparam = List(AnyClass.tpe)
@@ -268,9 +276,9 @@ trait Definitions extends reflect.api.StandardDefinitions {
}
// top types
- lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT)
- lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectClass.tpe)
- lazy val ObjectClass = getClass(sn.Object)
+ lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT)
+ lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectClass.tpe)
+ lazy val ObjectClass = getRequiredClass(sn.Object.toString)
// Note: this is not the type alias AnyRef, it's a companion-like
// object used by the @specialize annotation.
@@ -278,12 +286,13 @@ trait Definitions extends reflect.api.StandardDefinitions {
@deprecated("Use AnyRefModule", "2.10.0")
def Predef_AnyRef = AnyRefModule
- lazy val AnyValClass = ScalaPackageClass.info member tpnme.AnyVal orElse {
+ lazy val AnyValClass: ClassSymbol = (ScalaPackageClass.info member tpnme.AnyVal orElse {
val anyval = enterNewClass(ScalaPackageClass, tpnme.AnyVal, List(AnyClass.tpe, NotNullClass.tpe), ABSTRACT)
val av_constr = anyval.newClassConstructor(NoPosition)
anyval.info.decls enter av_constr
anyval
- }
+ }).asInstanceOf[ClassSymbol]
+
lazy val AnyVal_getClass = enterNewMethod(AnyValClass, nme.getClass_, Nil, getClassReturnType(AnyValClass.tpe))
// bottom types
@@ -292,7 +301,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
sealed abstract class BottomClassSymbol(name: TypeName, parent: Symbol) extends ClassSymbol(ScalaPackageClass, NoPosition, name) {
locally {
- this initFlags ABSTRACT | TRAIT | FINAL
+ this initFlags ABSTRACT | FINAL
this setInfoAndEnter ClassInfoType(List(parent.tpe), newScope, this)
}
final override def isBottomClass = true
@@ -308,24 +317,24 @@ trait Definitions extends reflect.api.StandardDefinitions {
}
// exceptions and other throwables
- lazy val ClassCastExceptionClass = getRequiredClass("java.lang.ClassCastException")
+ lazy val ClassCastExceptionClass = requiredClass[ClassCastException]
lazy val IndexOutOfBoundsExceptionClass = getClass(sn.IOOBException)
lazy val InvocationTargetExceptionClass = getClass(sn.InvTargetException)
- lazy val MatchErrorClass = getRequiredClass("scala.MatchError")
- lazy val NonLocalReturnControlClass = getRequiredClass("scala.runtime.NonLocalReturnControl")
+ lazy val MatchErrorClass = requiredClass[MatchError]
+ lazy val NonLocalReturnControlClass = requiredClass[scala.runtime.NonLocalReturnControl[_]]
lazy val NullPointerExceptionClass = getClass(sn.NPException)
lazy val ThrowableClass = getClass(sn.Throwable)
- lazy val UninitializedErrorClass = getRequiredClass("scala.UninitializedFieldError")
+ lazy val UninitializedErrorClass = requiredClass[UninitializedFieldError]
// fundamental reference classes
- lazy val PartialFunctionClass = getRequiredClass("scala.PartialFunction")
- lazy val AbstractPartialFunctionClass = getRequiredClass("scala.runtime.AbstractPartialFunction")
- lazy val SymbolClass = getRequiredClass("scala.Symbol")
- lazy val StringClass = getClass(sn.String)
+ lazy val PartialFunctionClass = requiredClass[PartialFunction[_,_]]
+ lazy val AbstractPartialFunctionClass = requiredClass[scala.runtime.AbstractPartialFunction[_,_]]
+ lazy val SymbolClass = requiredClass[scala.Symbol]
+ lazy val StringClass = requiredClass[java.lang.String]
lazy val StringModule = StringClass.linkedClassOfClass
- lazy val ClassClass = getClass(sn.Class)
+ lazy val ClassClass = requiredClass[java.lang.Class[_]]
def Class_getMethod = getMember(ClassClass, nme.getMethod_)
- lazy val DynamicClass = getRequiredClass("scala.Dynamic")
+ lazy val DynamicClass = requiredClass[Dynamic]
// fundamental modules
lazy val SysPackage = getPackageObject("scala.sys")
@@ -336,14 +345,14 @@ trait Definitions extends reflect.api.StandardDefinitions {
// Those modules and their module classes
lazy val UnqualifiedOwners = UnqualifiedModules.toSet ++ UnqualifiedModules.map(_.moduleClass)
- lazy val PredefModule: Symbol = getRequiredModule("scala.Predef")
+ lazy val PredefModule = requiredModule[scala.Predef.type]
lazy val PredefModuleClass = PredefModule.moduleClass
- def Predef_classOf = getMember(PredefModule, nme.classOf)
- def Predef_identity = getMember(PredefModule, nme.identity)
- def Predef_conforms = getMember(PredefModule, nme.conforms)
+ def Predef_classOf = getMember(PredefModule, nme.classOf)
+ def Predef_identity = getMember(PredefModule, nme.identity)
+ def Predef_conforms = getMember(PredefModule, nme.conforms)
def Predef_wrapRefArray = getMember(PredefModule, nme.wrapRefArray)
- def Predef_??? = getMember(PredefModule, nme.???)
+ def Predef_??? = getMember(PredefModule, nme.???)
/** Is `sym` a member of Predef with the given name?
* Note: DON't replace this by sym == Predef_conforms/etc, as Predef_conforms is a `def`
@@ -356,12 +365,12 @@ trait Definitions extends reflect.api.StandardDefinitions {
/** Specialization.
*/
- lazy val SpecializableModule = getRequiredModule("scala.Specializable")
- lazy val GroupOfSpecializable = SpecializableModule.info.member(newTypeName("Group"))
+ lazy val SpecializableModule = requiredModule[Specializable]
+ lazy val GroupOfSpecializable = getMember(SpecializableModule, tpnme.Group)
- lazy val ConsoleModule: Symbol = getRequiredModule("scala.Console")
- lazy val ScalaRunTimeModule: Symbol = getRequiredModule("scala.runtime.ScalaRunTime")
- lazy val SymbolModule: Symbol = getRequiredModule("scala.Symbol")
+ lazy val ConsoleModule: Symbol = requiredModule[scala.Console.type]
+ lazy val ScalaRunTimeModule: Symbol = requiredModule[scala.runtime.ScalaRunTime.type]
+ lazy val SymbolModule: Symbol = requiredModule[scala.Symbol.type]
lazy val Symbol_apply = SymbolModule.info decl nme.apply
def SeqFactory = getMember(ScalaRunTimeModule, nme.Seq)
@@ -371,15 +380,17 @@ trait Definitions extends reflect.api.StandardDefinitions {
def arrayCloneMethod = getMember(ScalaRunTimeModule, nme.array_clone)
def ensureAccessibleMethod = getMember(ScalaRunTimeModule, nme.ensureAccessible)
def scalaRuntimeSameElements = getMember(ScalaRunTimeModule, nme.sameElements)
+ def arrayClassMethod = getMember(ScalaRunTimeModule, nme.arrayClass)
+ def arrayElementClassMethod = getMember(ScalaRunTimeModule, nme.arrayElementClass)
// classes with special meanings
- lazy val StringAddClass = getRequiredClass("scala.runtime.StringAdd")
- lazy val ArrowAssocClass = getRequiredClass("scala.Predef.ArrowAssoc")
- lazy val StringAdd_+ = getMember(StringAddClass, nme.PLUS)
- lazy val NotNullClass = getRequiredClass("scala.NotNull")
- lazy val ScalaNumberClass = getRequiredClass("scala.math.ScalaNumber")
- lazy val TraitSetterAnnotationClass = getRequiredClass("scala.runtime.TraitSetter")
- lazy val DelayedInitClass = getRequiredClass("scala.DelayedInit")
+ lazy val StringAddClass = requiredClass[scala.runtime.StringAdd]
+ lazy val ArrowAssocClass = getRequiredClass("scala.Predef.ArrowAssoc") // SI-5731
+ lazy val StringAdd_+ = getMember(StringAddClass, nme.PLUS)
+ lazy val NotNullClass = getRequiredClass("scala.NotNull")
+ lazy val ScalaNumberClass = requiredClass[scala.math.ScalaNumber]
+ lazy val TraitSetterAnnotationClass = requiredClass[scala.runtime.TraitSetter]
+ lazy val DelayedInitClass = requiredClass[scala.DelayedInit]
def delayedInitMethod = getMember(DelayedInitClass, nme.delayedInit)
// a dummy value that communicates that a delayedInit call is compiler-generated
// from phase UnCurry to phase Constructors
@@ -387,14 +398,15 @@ trait Definitions extends reflect.api.StandardDefinitions {
// def delayedInitArgVal = EmptyPackageClass.newValue(NoPosition, nme.delayedInitArg)
// .setInfo(UnitClass.tpe)
- lazy val TypeConstraintClass = getRequiredClass("scala.annotation.TypeConstraint")
+ lazy val TypeConstraintClass = requiredClass[scala.annotation.TypeConstraint]
lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, anyparam, ABSTRACT | TRAIT | FINAL)
- lazy val SerializableClass = getRequiredClass("scala.Serializable")
- lazy val JavaSerializableClass = getClass(sn.JavaSerializable) modifyInfo fixupAsAnyTrait
- lazy val ComparableClass = getRequiredClass("java.lang.Comparable") modifyInfo fixupAsAnyTrait
- lazy val JavaCloneableClass = getRequiredClass("java.lang.Cloneable")
- lazy val RemoteInterfaceClass = getRequiredClass("java.rmi.Remote")
- lazy val RemoteExceptionClass = getRequiredClass("java.rmi.RemoteException")
+ lazy val SerializableClass = requiredClass[scala.Serializable]
+ lazy val JavaSerializableClass = requiredClass[java.io.Serializable] modifyInfo fixupAsAnyTrait
+ lazy val ComparableClass = requiredClass[java.lang.Comparable[_]] modifyInfo fixupAsAnyTrait
+ lazy val JavaCloneableClass = requiredClass[java.lang.Cloneable]
+ lazy val JavaNumberClass = requiredClass[java.lang.Number]
+ lazy val RemoteInterfaceClass = requiredClass[java.rmi.Remote]
+ lazy val RemoteExceptionClass = requiredClass[java.rmi.RemoteException]
lazy val ByNameParamClass = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyClass.tpe)
lazy val EqualsPatternClass = specialPolyClass(tpnme.EQUALS_PATTERN_NAME, 0L)(_ => AnyClass.tpe)
@@ -423,6 +435,10 @@ trait Definitions extends reflect.api.StandardDefinitions {
case TypeRef(_, ArrayClass, arg :: Nil) => isPrimitiveValueClass(arg.typeSymbol)
case _ => false
}
+ def isReferenceArray(tp: Type) = tp match {
+ case TypeRef(_, ArrayClass, arg :: Nil) => arg <:< AnyRefClass.tpe
+ case _ => false
+ }
def isArrayOfSymbol(tp: Type, elem: Symbol) = tp match {
case TypeRef(_, ArrayClass, arg :: Nil) => arg.typeSymbol == elem
case _ => false
@@ -431,90 +447,106 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val MatchingStrategyClass = getRequiredClass("scala.MatchingStrategy")
// collections classes
- lazy val ConsClass = getRequiredClass("scala.collection.immutable.$colon$colon")
- lazy val IterableClass = getRequiredClass("scala.collection.Iterable")
- lazy val IteratorClass = getRequiredClass("scala.collection.Iterator")
- lazy val ListClass = getRequiredClass("scala.collection.immutable.List")
- lazy val SeqClass = getRequiredClass("scala.collection.Seq")
- lazy val StringBuilderClass = getRequiredClass("scala.collection.mutable.StringBuilder")
- lazy val TraversableClass = getRequiredClass("scala.collection.Traversable")
-
- lazy val ListModule = getRequiredModule("scala.collection.immutable.List")
+ lazy val ConsClass = requiredClass[scala.collection.immutable.::[_]]
+ lazy val IterableClass = requiredClass[scala.collection.Iterable[_]]
+ lazy val IteratorClass = requiredClass[scala.collection.Iterator[_]]
+ lazy val ListClass = requiredClass[scala.collection.immutable.List[_]]
+ lazy val SeqClass = requiredClass[scala.collection.Seq[_]]
+ lazy val StringBuilderClass = requiredClass[scala.collection.mutable.StringBuilder]
+ lazy val TraversableClass = requiredClass[scala.collection.Traversable[_]]
+
+ lazy val ListModule = requiredModule[scala.collection.immutable.List.type]
lazy val List_apply = getMember(ListModule, nme.apply)
- lazy val NilModule = getRequiredModule("scala.collection.immutable.Nil")
- lazy val SeqModule = getRequiredModule("scala.collection.Seq")
- lazy val IteratorModule = getRequiredModule("scala.collection.Iterator")
+ lazy val NilModule = requiredModule[scala.collection.immutable.Nil.type]
+ lazy val SeqModule = requiredModule[scala.collection.Seq.type]
+ lazy val IteratorModule = requiredModule[scala.collection.Iterator.type]
lazy val Iterator_apply = getMember(IteratorModule, nme.apply)
// arrays and their members
- lazy val ArrayModule = getRequiredModule("scala.Array")
+ lazy val ArrayModule = requiredModule[scala.Array.type]
lazy val ArrayModule_overloadedApply = getMember(ArrayModule, nme.apply)
- lazy val ArrayClass = getRequiredClass("scala.Array")
+ lazy val ArrayClass = getRequiredClass("scala.Array") // requiredClass[scala.Array[_]]
lazy val Array_apply = getMember(ArrayClass, nme.apply)
lazy val Array_update = getMember(ArrayClass, nme.update)
lazy val Array_length = getMember(ArrayClass, nme.length)
lazy val Array_clone = getMember(ArrayClass, nme.clone_)
// reflection / structural types
- lazy val SoftReferenceClass = getRequiredClass("java.lang.ref.SoftReference")
- lazy val WeakReferenceClass = getRequiredClass("java.lang.ref.WeakReference")
+ lazy val SoftReferenceClass = requiredClass[java.lang.ref.SoftReference[_]]
+ lazy val WeakReferenceClass = requiredClass[java.lang.ref.WeakReference[_]]
lazy val MethodClass = getClass(sn.MethodAsObject)
def methodClass_setAccessible = getMember(MethodClass, nme.setAccessible)
- lazy val EmptyMethodCacheClass = getRequiredClass("scala.runtime.EmptyMethodCache")
- lazy val MethodCacheClass = getRequiredClass("scala.runtime.MethodCache")
+ lazy val EmptyMethodCacheClass = requiredClass[scala.runtime.EmptyMethodCache]
+ lazy val MethodCacheClass = requiredClass[scala.runtime.MethodCache]
def methodCache_find = getMember(MethodCacheClass, nme.find_)
def methodCache_add = getMember(MethodCacheClass, nme.add_)
// scala.reflect
lazy val ReflectPackageClass = getMember(ScalaPackageClass, nme.reflect)
- lazy val ReflectPackage = getPackageObject("scala.reflect")
- def Reflect_mirror = getMember(ReflectPackage, nme.mirror)
-
- lazy val ExprClass = getMember(getRequiredClass("scala.reflect.api.Exprs"), tpnme.Expr)
+ lazy val ReflectPackage = requiredModule[scala.reflect.`package`.type]
+ def ReflectMirror = getMember(ReflectPackage, nme.mirror)
+ // [Eugene] is this a good place for ReflectMirrorPrefix?
+ def ReflectMirrorPrefix = gen.mkAttributedRef(ReflectMirror) setType singleType(ReflectMirror.owner.thisPrefix, ReflectMirror)
+
+ lazy val PartialManifestClass = requiredClass[scala.reflect.ClassManifest[_]]
+ lazy val PartialManifestModule = requiredModule[scala.reflect.ClassManifest.type]
+ lazy val FullManifestClass = requiredClass[scala.reflect.Manifest[_]]
+ lazy val FullManifestModule = requiredModule[scala.reflect.Manifest.type]
+ lazy val OptManifestClass = requiredClass[scala.reflect.OptManifest[_]]
+ lazy val NoManifest = requiredModule[scala.reflect.NoManifest.type]
+
+ lazy val ExprClass = getMember(requiredClass[scala.reflect.api.Exprs], tpnme.Expr)
def ExprTree = getMember(ExprClass, nme.tree)
def ExprTpe = getMember(ExprClass, nme.tpe)
def ExprEval = getMember(ExprClass, nme.eval)
def ExprValue = getMember(ExprClass, nme.value)
- lazy val ExprModule = getMember(getRequiredClass("scala.reflect.api.Exprs"), nme.Expr)
-
- lazy val ClassTagClass = getRequiredClass("scala.reflect.ClassTag")
- def ClassTagErasure = getMember(ClassTagClass, nme.erasure)
- def ClassTagTpe = getMember(ClassTagClass, nme.tpe)
- lazy val ClassTagModule = getRequiredModule("scala.reflect.ClassTag")
- lazy val TypeTagsClass = getRequiredClass("scala.reflect.api.TypeTags")
- lazy val TypeTagClass = getMember(TypeTagsClass, tpnme.TypeTag)
- def TypeTagTpe = getMember(TypeTagClass, nme.tpe)
- lazy val TypeTagModule = getMember(TypeTagsClass, nme.TypeTag)
- lazy val ConcreteTypeTagClass = getMember(TypeTagsClass, tpnme.ConcreteTypeTag)
- lazy val ConcreteTypeTagModule = getMember(TypeTagsClass, nme.ConcreteTypeTag)
-
- lazy val MacroContextClass = getRequiredClass("scala.reflect.makro.Context")
+ lazy val ExprModule = getMember(requiredClass[scala.reflect.api.Exprs], nme.Expr)
+
+ lazy val ArrayTagClass = requiredClass[scala.reflect.ArrayTag[_]]
+ lazy val ErasureTagClass = requiredClass[scala.reflect.ErasureTag[_]]
+ lazy val ClassTagModule = requiredModule[scala.reflect.ClassTag[_]]
+ lazy val ClassTagClass = requiredClass[scala.reflect.ClassTag[_]]
+ lazy val TypeTagsClass = requiredClass[scala.reflect.api.TypeTags]
+ lazy val TypeTagClass = getMemberClass(TypeTagsClass, tpnme.TypeTag)
+ lazy val TypeTagModule = getMemberModule(TypeTagsClass, nme.TypeTag)
+ lazy val ConcreteTypeTagClass = getMemberClass(TypeTagsClass, tpnme.ConcreteTypeTag)
+ lazy val ConcreteTypeTagModule = getMemberModule(TypeTagsClass, nme.ConcreteTypeTag)
+
+ def ArrayTagWrap = getMemberMethod(ArrayTagClass, nme.wrap)
+ def ArrayTagNewArray = getMemberMethod(ArrayTagClass, nme.newArray)
+ def ErasureTagErasure = getMemberMethod(ErasureTagClass, nme.erasure)
+ def ClassTagTpe = getMemberMethod(ClassTagClass, nme.tpe)
+ def TypeTagTpe = getMemberMethod(TypeTagClass, nme.tpe)
+
+ lazy val MacroContextClass = requiredClass[scala.reflect.makro.Context]
def MacroContextPrefix = getMember(MacroContextClass, nme.prefix)
def MacroContextPrefixType = getMember(MacroContextClass, tpnme.PrefixType)
def MacroContextMirror = getMember(MacroContextClass, nme.mirror)
def MacroContextReify = getMember(MacroContextClass, nme.reify)
- lazy val MacroImplAnnotation = getRequiredClass("scala.reflect.makro.internal.macroImpl")
+ lazy val MacroImplAnnotation = requiredClass[scala.reflect.makro.internal.macroImpl]
lazy val MacroInternalPackage = getPackageObject("scala.reflect.makro.internal")
- def MacroInternal_materializeClassTag = getMember(MacroInternalPackage, nme.materializeClassTag)
- def MacroInternal_materializeTypeTag = getMember(MacroInternalPackage, nme.materializeTypeTag)
- def MacroInternal_materializeConcreteTypeTag = getMember(MacroInternalPackage, nme.materializeConcreteTypeTag)
+ def MacroInternal_materializeArrayTag = getMemberMethod(MacroInternalPackage, nme.materializeArrayTag)
+ def MacroInternal_materializeErasureTag = getMemberMethod(MacroInternalPackage, nme.materializeErasureTag)
+ def MacroInternal_materializeClassTag = getMemberMethod(MacroInternalPackage, nme.materializeClassTag)
+ def MacroInternal_materializeTypeTag = getMemberMethod(MacroInternalPackage, nme.materializeTypeTag)
+ def MacroInternal_materializeConcreteTypeTag = getMemberMethod(MacroInternalPackage, nme.materializeConcreteTypeTag)
- lazy val ScalaSignatureAnnotation = getRequiredClass("scala.reflect.ScalaSignature")
- lazy val ScalaLongSignatureAnnotation = getRequiredClass("scala.reflect.ScalaLongSignature")
+ lazy val ScalaSignatureAnnotation = requiredClass[scala.reflect.ScalaSignature]
+ lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature]
// Option classes
- lazy val OptionClass: Symbol = getRequiredClass("scala.Option")
- lazy val SomeClass: Symbol = getRequiredClass("scala.Some")
- lazy val NoneModule: Symbol = getRequiredModule("scala.None")
- lazy val SomeModule: Symbol = getRequiredModule("scala.Some")
+ lazy val OptionClass: Symbol = requiredClass[Option[_]]
+ lazy val SomeClass: Symbol = requiredClass[Some[_]]
+ lazy val NoneModule: Symbol = requiredModule[scala.None.type]
+ lazy val SomeModule: Symbol = requiredModule[scala.Some.type]
// [Eugene] how do I make this work without casts?
// private lazy val importerFromRm = self.mkImporter(rm)
private lazy val importerFromRm = self.mkImporter(rm).asInstanceOf[self.Importer { val from: rm.type }]
- def manifestToType(m: Manifest[_]): Type = importerFromRm.importType(m.tpe)
+ def compilerTypeFromTag(t: rm.TypeTag[_]): Type = importerFromRm.importType(t.tpe)
- def manifestToSymbol(m: Manifest[_]): Symbol = importerFromRm.importSymbol(m.tpe.typeSymbol)
+ def compilerSymbolFromTag(t: rm.TypeTag[_]): Symbol = importerFromRm.importSymbol(t.sym)
// The given symbol represents either String.+ or StringAdd.+
def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+
@@ -578,6 +610,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
def isTupleTypeOrSubtype(tp: Type) = isTupleType(tp)
def tupleField(n: Int, j: Int) = getMember(TupleClass(n), nme.productAccessorName(j))
+ // NOTE: returns true for NoSymbol since it's included in the TupleClass array -- is this intensional?
def isTupleSymbol(sym: Symbol) = TupleClass contains unspecializedSymbol(sym)
def isProductNClass(sym: Symbol) = ProductClass contains sym
@@ -616,13 +649,13 @@ trait Definitions extends reflect.api.StandardDefinitions {
}
def isTupleType(tp: Type) = isTupleTypeDirect(tp.normalize)
- lazy val ProductRootClass: Symbol = getRequiredClass("scala.Product")
- def Product_productArity = getMember(ProductRootClass, nme.productArity)
- def Product_productElement = getMember(ProductRootClass, nme.productElement)
- // def Product_productElementName = getMember(ProductRootClass, nme.productElementName)
- def Product_iterator = getMember(ProductRootClass, nme.productIterator)
- def Product_productPrefix = getMember(ProductRootClass, nme.productPrefix)
- def Product_canEqual = getMember(ProductRootClass, nme.canEqual_)
+ lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product]
+ def Product_productArity = getMemberMethod(ProductRootClass, nme.productArity)
+ def Product_productElement = getMemberMethod(ProductRootClass, nme.productElement)
+ def Product_iterator = getMemberMethod(ProductRootClass, nme.productIterator)
+ def Product_productPrefix = getMemberMethod(ProductRootClass, nme.productPrefix)
+ def Product_canEqual = getMemberMethod(ProductRootClass, nme.canEqual_)
+ // def Product_productElementName = getMemberMethod(ProductRootClass, nme.productElementName)
def productProj(z:Symbol, j: Int): Symbol = getMember(z, nme.productAccessorName(j))
def productProj(n: Int, j: Int): Symbol = productProj(ProductClass(n), j)
@@ -655,6 +688,11 @@ trait Definitions extends reflect.api.StandardDefinitions {
false
}
+ def isPartialFunctionType(tp: Type): Boolean = {
+ val sym = tp.typeSymbol
+ (sym eq PartialFunctionClass) || (sym eq AbstractPartialFunctionClass)
+ }
+
def isSeqType(tp: Type) = elementType(SeqClass, tp.normalize) != NoType
def elementType(container: Symbol, tp: Type): Type = tp match {
@@ -858,79 +896,92 @@ trait Definitions extends reflect.api.StandardDefinitions {
def Object_toString = getMember(ObjectClass, nme.toString_)
// boxed classes
- lazy val ObjectRefClass = getRequiredClass("scala.runtime.ObjectRef")
- lazy val VolatileObjectRefClass = getRequiredClass("scala.runtime.VolatileObjectRef")
+ lazy val ObjectRefClass = requiredClass[scala.runtime.ObjectRef[_]]
+ lazy val VolatileObjectRefClass = requiredClass[scala.runtime.VolatileObjectRef[_]]
+ lazy val RuntimeStaticsModule = getRequiredModule("scala.runtime.Statics")
lazy val BoxesRunTimeModule = getRequiredModule("scala.runtime.BoxesRunTime")
lazy val BoxesRunTimeClass = BoxesRunTimeModule.moduleClass
lazy val BoxedNumberClass = getClass(sn.BoxedNumber)
lazy val BoxedCharacterClass = getClass(sn.BoxedCharacter)
lazy val BoxedBooleanClass = getClass(sn.BoxedBoolean)
- lazy val BoxedByteClass = getRequiredClass("java.lang.Byte")
- lazy val BoxedShortClass = getRequiredClass("java.lang.Short")
- lazy val BoxedIntClass = getRequiredClass("java.lang.Integer")
- lazy val BoxedLongClass = getRequiredClass("java.lang.Long")
- lazy val BoxedFloatClass = getRequiredClass("java.lang.Float")
- lazy val BoxedDoubleClass = getRequiredClass("java.lang.Double")
+ lazy val BoxedByteClass = requiredClass[java.lang.Byte]
+ lazy val BoxedShortClass = requiredClass[java.lang.Short]
+ lazy val BoxedIntClass = requiredClass[java.lang.Integer]
+ lazy val BoxedLongClass = requiredClass[java.lang.Long]
+ lazy val BoxedFloatClass = requiredClass[java.lang.Float]
+ lazy val BoxedDoubleClass = requiredClass[java.lang.Double]
lazy val Boxes_isNumberOrBool = getDecl(BoxesRunTimeClass, nme.isBoxedNumberOrBoolean)
lazy val Boxes_isNumber = getDecl(BoxesRunTimeClass, nme.isBoxedNumber)
- lazy val BoxedUnitClass = getRequiredClass("scala.runtime.BoxedUnit")
+ lazy val BoxedUnitClass = requiredClass[scala.runtime.BoxedUnit]
lazy val BoxedUnitModule = getRequiredModule("scala.runtime.BoxedUnit")
def BoxedUnit_UNIT = getMember(BoxedUnitModule, nme.UNIT)
def BoxedUnit_TYPE = getMember(BoxedUnitModule, nme.TYPE_)
// Annotation base classes
- lazy val AnnotationClass = getRequiredClass("scala.annotation.Annotation")
- lazy val ClassfileAnnotationClass = getRequiredClass("scala.annotation.ClassfileAnnotation")
- lazy val StaticAnnotationClass = getRequiredClass("scala.annotation.StaticAnnotation")
+ lazy val AnnotationClass = requiredClass[scala.annotation.Annotation]
+ lazy val ClassfileAnnotationClass = requiredClass[scala.annotation.ClassfileAnnotation]
+ lazy val StaticAnnotationClass = requiredClass[scala.annotation.StaticAnnotation]
// Annotations
- lazy val BridgeClass = getRequiredClass("scala.annotation.bridge")
- lazy val ElidableMethodClass = getRequiredClass("scala.annotation.elidable")
- lazy val ImplicitNotFoundClass = getRequiredClass("scala.annotation.implicitNotFound")
- lazy val MigrationAnnotationClass = getRequiredClass("scala.annotation.migration")
- lazy val ScalaStrictFPAttr = getRequiredClass("scala.annotation.strictfp")
- lazy val SerializableAttr = getRequiredClass("scala.annotation.serializable") // @serializable is deprecated
- lazy val SwitchClass = getRequiredClass("scala.annotation.switch")
- lazy val TailrecClass = getRequiredClass("scala.annotation.tailrec")
- lazy val VarargsClass = getRequiredClass("scala.annotation.varargs")
- lazy val uncheckedStableClass = getRequiredClass("scala.annotation.unchecked.uncheckedStable")
- lazy val uncheckedVarianceClass = getRequiredClass("scala.annotation.unchecked.uncheckedVariance")
-
- lazy val BeanPropertyAttr = getRequiredClass("scala.beans.BeanProperty")
- lazy val BooleanBeanPropertyAttr = getRequiredClass("scala.beans.BooleanBeanProperty")
- lazy val CloneableAttr = getRequiredClass("scala.cloneable")
- lazy val DeprecatedAttr = getRequiredClass("scala.deprecated")
- lazy val DeprecatedNameAttr = getRequiredClass("scala.deprecatedName")
- lazy val NativeAttr = getRequiredClass("scala.native")
- lazy val RemoteAttr = getRequiredClass("scala.remote")
- lazy val ScalaInlineClass = getRequiredClass("scala.inline")
- lazy val ScalaNoInlineClass = getRequiredClass("scala.noinline")
- lazy val SerialVersionUIDAttr = getRequiredClass("scala.SerialVersionUID")
- lazy val SpecializedClass = getRequiredClass("scala.specialized")
- lazy val ThrowsClass = getRequiredClass("scala.throws")
- lazy val TransientAttr = getRequiredClass("scala.transient")
- lazy val UncheckedClass = getRequiredClass("scala.unchecked")
- lazy val VolatileAttr = getRequiredClass("scala.volatile")
+ lazy val BridgeClass = requiredClass[scala.annotation.bridge]
+ lazy val ElidableMethodClass = requiredClass[scala.annotation.elidable]
+ lazy val ImplicitNotFoundClass = requiredClass[scala.annotation.implicitNotFound]
+ lazy val MigrationAnnotationClass = requiredClass[scala.annotation.migration]
+ lazy val ScalaStrictFPAttr = requiredClass[scala.annotation.strictfp]
+ lazy val SerializableAttr = requiredClass[scala.annotation.serializable] // @serializable is deprecated
+ lazy val SwitchClass = requiredClass[scala.annotation.switch]
+ lazy val TailrecClass = requiredClass[scala.annotation.tailrec]
+ lazy val VarargsClass = requiredClass[scala.annotation.varargs]
+ lazy val uncheckedStableClass = requiredClass[scala.annotation.unchecked.uncheckedStable]
+ lazy val uncheckedVarianceClass = requiredClass[scala.annotation.unchecked.uncheckedVariance]
+
+ lazy val BeanPropertyAttr = requiredClass[scala.beans.BeanProperty]
+ lazy val BooleanBeanPropertyAttr = requiredClass[scala.beans.BooleanBeanProperty]
+ lazy val CloneableAttr = requiredClass[scala.cloneable]
+ lazy val DeprecatedAttr = requiredClass[scala.deprecated]
+ lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName]
+ lazy val NativeAttr = requiredClass[scala.native]
+ lazy val RemoteAttr = requiredClass[scala.remote]
+ lazy val ScalaInlineClass = requiredClass[scala.inline]
+ lazy val ScalaNoInlineClass = requiredClass[scala.noinline]
+ lazy val SerialVersionUIDAttr = requiredClass[scala.SerialVersionUID]
+ lazy val SpecializedClass = requiredClass[scala.specialized]
+ lazy val ThrowsClass = requiredClass[scala.throws]
+ lazy val TransientAttr = requiredClass[scala.transient]
+ lazy val UncheckedClass = requiredClass[scala.unchecked]
+ lazy val UnspecializedClass = requiredClass[scala.annotation.unspecialized]
+ lazy val VolatileAttr = requiredClass[scala.volatile]
// Meta-annotations
- lazy val BeanGetterTargetClass = getMetaAnnotation("beanGetter")
- lazy val BeanSetterTargetClass = getMetaAnnotation("beanSetter")
- lazy val FieldTargetClass = getMetaAnnotation("field")
- lazy val GetterTargetClass = getMetaAnnotation("getter")
- lazy val ParamTargetClass = getMetaAnnotation("param")
- lazy val SetterTargetClass = getMetaAnnotation("setter")
- lazy val ClassTargetClass = getMetaAnnotation("companionClass")
- lazy val ObjectTargetClass = getMetaAnnotation("companionObject")
- lazy val MethodTargetClass = getMetaAnnotation("companionMethod") // TODO: module, moduleClass? package, packageObject?
-
- private def getMetaAnnotation(name: String) = getRequiredClass("scala.annotation.meta." + name)
+ lazy val BeanGetterTargetClass = requiredClass[meta.beanGetter]
+ lazy val BeanSetterTargetClass = requiredClass[meta.beanSetter]
+ lazy val FieldTargetClass = requiredClass[meta.field]
+ lazy val GetterTargetClass = requiredClass[meta.getter]
+ lazy val ParamTargetClass = requiredClass[meta.param]
+ lazy val SetterTargetClass = requiredClass[meta.setter]
+ lazy val ClassTargetClass = requiredClass[meta.companionClass]
+ lazy val ObjectTargetClass = requiredClass[meta.companionObject]
+ lazy val MethodTargetClass = requiredClass[meta.companionMethod] // TODO: module, moduleClass? package, packageObject?
+ lazy val LanguageFeatureAnnot = requiredClass[meta.languageFeature]
+
+ // Language features
+ lazy val languageFeatureModule = getRequiredModule("scala.languageFeature")
+ lazy val experimentalModule = getMember(languageFeatureModule, nme.experimental)
+ lazy val MacrosFeature = getLanguageFeature("macros", experimentalModule)
+ lazy val DynamicsFeature = getLanguageFeature("dynamics")
+ lazy val PostfixOpsFeature = getLanguageFeature("postfixOps")
+ lazy val ReflectiveCallsFeature = getLanguageFeature("reflectiveCalls")
+ lazy val ImplicitConversionsFeature = getLanguageFeature("implicitConversions")
+ lazy val HigherKindsFeature = getLanguageFeature("higherKinds")
+ lazy val ExistentialsFeature = getLanguageFeature("existentials")
+
def isMetaAnnotation(sym: Symbol): Boolean = metaAnnotations(sym) || (
// Trying to allow for deprecated locations
sym.isAliasType && isMetaAnnotation(sym.info.typeSymbol)
)
- lazy val metaAnnotations = Set(
+ lazy val metaAnnotations = Set[Symbol](
FieldTargetClass, ParamTargetClass,
GetterTargetClass, SetterTargetClass,
BeanGetterTargetClass, BeanSetterTargetClass
@@ -949,31 +1000,77 @@ trait Definitions extends reflect.api.StandardDefinitions {
def getPackageObject(fullname: String): Symbol =
getModule(newTermName(fullname)).info member nme.PACKAGE
- def getModule(fullname: Name): Symbol =
- getModuleOrClass(fullname.toTermName)
+ def getModule(fullname: Name): ModuleSymbol =
+ getModuleOrClass(fullname.toTermName) match {
+ case x: ModuleSymbol => x
+ case _ => MissingRequirementError.notFound("object " + fullname)
+ }
- def getClass(fullname: Name): Symbol = {
- var result = getModuleOrClass(fullname.toTypeName)
- while (result.isAliasType) result = result.info.typeSymbol
- result
+ def getPackage(fullname: Name): PackageSymbol =
+ getModuleOrClass(fullname.toTermName) match {
+ case x: PackageSymbol => x
+ case _ => MissingRequirementError.notFound("package " + fullname)
+ }
+ @inline private def wrapMissing(body: => Symbol): Symbol =
+ try body
+ catch { case _: MissingRequirementError => NoSymbol }
+
+ private def fatalMissingSymbol(owner: Symbol, name: Name, what: String = "member") = {
+ throw new FatalError(owner + " does not have a " + what + " " + name)
}
- def getRequiredModule(fullname: String): Symbol =
+ @deprecated("Use getClassByName", "2.10.0")
+ def getClass(fullname: Name): Symbol = getClassByName(fullname)
+
+ def getRequiredPackage(fullname: String): PackageSymbol =
+ getPackage(newTermNameCached(fullname))
+
+ def getRequiredModule(fullname: String): ModuleSymbol =
getModule(newTermNameCached(fullname))
- def getRequiredClass(fullname: String): Symbol =
- getClass(newTypeNameCached(fullname))
+
+ def erasureName[T: ErasureTag] : String = {
+ /** We'd like the String representation to be a valid
+ * scala type, so we have to decode the jvm's secret language.
+ */
+ def erasureString(clazz: Class[_]): String = {
+ if (clazz.isArray) "Array[" + erasureString(clazz.getComponentType) + "]"
+ else clazz.getName
+ }
+ erasureString(implicitly[ErasureTag[T]].erasure)
+ }
+
+ def requiredClass[T: ClassTag] : ClassSymbol = getRequiredClass(erasureName[T])
+
+ // TODO: What syntax do we think should work here? Say you have an object
+ // like scala.Predef. You can't say requiredModule[scala.Predef] since there's
+ // no accompanying Predef class, and if you say requiredModule[scala.Predef.type]
+ // the name found via the erasure is scala.Predef$. For now I am
+ // removing the trailing $, but I think that classTag should have
+ // a method which returns a usable name, one which doesn't expose this
+ // detail of the backend.
+ def requiredModule[T: ClassTag] : ModuleSymbol =
+ getRequiredModule(erasureName[T] stripSuffix "$")
+
+ def getRequiredClass(fullname: String): ClassSymbol =
+ getClassByName(newTypeNameCached(fullname)) match {
+ case x: ClassSymbol => x
+ case _ => MissingRequirementError.notFound("class " + fullname)
+ }
def getClassIfDefined(fullname: String): Symbol =
getClassIfDefined(newTypeName(fullname))
+
def getClassIfDefined(fullname: Name): Symbol =
- try getClass(fullname.toTypeName)
- catch { case _: MissingRequirementError => NoSymbol }
+ wrapMissing(getClass(fullname.toTypeName))
def getModuleIfDefined(fullname: String): Symbol =
getModuleIfDefined(newTermName(fullname))
+
def getModuleIfDefined(fullname: Name): Symbol =
- try getModule(fullname.toTermName)
- catch { case _: MissingRequirementError => NoSymbol }
+ wrapMissing(getModule(fullname.toTermName))
+
+ def getLanguageFeature(name: String, owner: Symbol = languageFeatureModule) =
+ getMember(owner, newTypeName(name))
def termMember(owner: Symbol, name: String): Symbol = owner.info.member(newTermName(name))
def typeMember(owner: Symbol, name: String): Symbol = owner.info.member(newTypeName(name))
@@ -994,9 +1091,33 @@ trait Definitions extends reflect.api.StandardDefinitions {
def getMember(owner: Symbol, name: Name): Symbol = {
getMemberIfDefined(owner, name) orElse {
- throw new FatalError(owner + " does not have a member " + name)
+ if (phase.flatClasses && name.isTypeName && !owner.isPackageObjectOrClass) {
+ val pkg = owner.owner
+ val flatname = nme.flattenedName(owner.name, name)
+ getMember(pkg, flatname)
+ }
+ else fatalMissingSymbol(owner, name)
}
}
+ def getMemberModule(owner: Symbol, name: Name): ModuleSymbol = {
+ getMember(owner, name.toTermName) match {
+ case x: ModuleSymbol => x
+ case _ => fatalMissingSymbol(owner, name, "member object")
+ }
+ }
+ def getMemberClass(owner: Symbol, name: Name): ClassSymbol = {
+ getMember(owner, name.toTypeName) match {
+ case x: ClassSymbol => x
+ case _ => fatalMissingSymbol(owner, name, "member class")
+ }
+ }
+ def getMemberMethod(owner: Symbol, name: Name): MethodSymbol = {
+ getMember(owner, name.toTermName) match {
+ case x: MethodSymbol => x
+ case _ => fatalMissingSymbol(owner, name, "method")
+ }
+ }
+
def getMemberIfDefined(owner: Symbol, name: Name): Symbol =
owner.info.nonPrivateMember(name)
@@ -1005,13 +1126,11 @@ trait Definitions extends reflect.api.StandardDefinitions {
* know the method in question is uniquely declared in the given owner.
*/
def getDecl(owner: Symbol, name: Name): Symbol = {
- getDeclIfDefined(owner, name) orElse {
- throw new FatalError(owner + " does not have a decl " + name)
- }
+ getDeclIfDefined(owner, name) orElse fatalMissingSymbol(owner, name, "decl")
}
def getDeclIfDefined(owner: Symbol, name: Name): Symbol =
owner.info.nonPrivateDecl(name)
-
+
def packageExists(packageName: String): Boolean =
getModuleIfDefined(packageName).isPackage
@@ -1037,10 +1156,16 @@ trait Definitions extends reflect.api.StandardDefinitions {
*/
private def getModuleOrClass(path: Name): Symbol = getModuleOrClass(path, path.length)
- private def newAlias(owner: Symbol, name: TypeName, alias: Type): Symbol =
+ def getClassByName(fullname: Name): Symbol = {
+ var result = getModuleOrClass(fullname.toTypeName)
+ while (result.isAliasType) result = result.info.typeSymbol
+ result
+ }
+
+ private def newAlias(owner: Symbol, name: TypeName, alias: Type): AliasTypeSymbol =
owner.newAliasType(name) setInfoAndEnter alias
- private def specialPolyClass(name: TypeName, flags: Long)(parentFn: Symbol => Type): Symbol = {
+ private def specialPolyClass(name: TypeName, flags: Long)(parentFn: Symbol => Type): ClassSymbol = {
val clazz = enterNewClass(ScalaPackageClass, name, Nil)
val tparam = clazz.newSyntheticTypeParam("T0", flags)
val parents = List(AnyRefClass.tpe, parentFn(tparam))
@@ -1048,7 +1173,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
clazz setInfo GenPolyType(List(tparam), ClassInfoType(parents, newScope, clazz))
}
- def newPolyMethod(typeParamCount: Int, owner: Symbol, name: TermName, flags: Long)(createFn: PolyMethodCreator): Symbol = {
+ def newPolyMethod(typeParamCount: Int, owner: Symbol, name: TermName, flags: Long)(createFn: PolyMethodCreator): MethodSymbol = {
val msym = owner.newMethod(name.encode, NoPosition, flags)
val tparams = msym.newSyntheticTypeParams(typeParamCount)
val mtpe = createFn(tparams) match {
@@ -1061,16 +1186,16 @@ trait Definitions extends reflect.api.StandardDefinitions {
/** T1 means one type parameter.
*/
- def newT1NullaryMethod(owner: Symbol, name: TermName, flags: Long)(createFn: Symbol => Type): Symbol = {
+ def newT1NullaryMethod(owner: Symbol, name: TermName, flags: Long)(createFn: Symbol => Type): MethodSymbol = {
newPolyMethod(1, owner, name, flags)(tparams => (None, createFn(tparams.head)))
}
- def newT1NoParamsMethod(owner: Symbol, name: TermName, flags: Long)(createFn: Symbol => Type): Symbol = {
+ def newT1NoParamsMethod(owner: Symbol, name: TermName, flags: Long)(createFn: Symbol => Type): MethodSymbol = {
newPolyMethod(1, owner, name, flags)(tparams => (Some(Nil), createFn(tparams.head)))
}
- lazy val boxedClassValues = boxedClass.values.toSet
- lazy val isUnbox = unboxMethod.values.toSet
- lazy val isBox = boxMethod.values.toSet
+ lazy val boxedClassValues = boxedClass.values.toSet[Symbol]
+ lazy val isUnbox = unboxMethod.values.toSet[Symbol]
+ lazy val isBox = boxMethod.values.toSet[Symbol]
/** Is symbol a phantom class for which no runtime representation exists? */
lazy val isPhantomClass = Set[Symbol](AnyClass, AnyValClass, NullClass, NothingClass)
@@ -1078,14 +1203,13 @@ trait Definitions extends reflect.api.StandardDefinitions {
/** Is the symbol that of a parent which is added during parsing? */
lazy val isPossibleSyntheticParent = ProductClass.toSet[Symbol] + ProductRootClass + SerializableClass
- lazy val scalaValueClassesSet = ScalaValueClasses.toSet
- private lazy val boxedValueClassesSet = boxedClass.values.toSet + BoxedUnitClass
+ private lazy val boxedValueClassesSet = boxedClass.values.toSet[Symbol] + BoxedUnitClass
/** Is symbol a value class? */
def isPrimitiveValueClass(sym: Symbol) = ScalaValueClasses contains sym
def isNonUnitValueClass(sym: Symbol) = isPrimitiveValueClass(sym) && (sym != UnitClass)
def isSpecializableClass(sym: Symbol) = isPrimitiveValueClass(sym) || (sym == AnyRefClass)
- def isScalaValueType(tp: Type) = ScalaValueClasses contains tp.typeSymbol
+ def isPrimitiveValueType(tp: Type) = isPrimitiveValueClass(tp.typeSymbol)
/** Is symbol a boxed value class, e.g. java.lang.Integer? */
def isBoxedValueClass(sym: Symbol) = boxedValueClassesSet(sym)
diff --git a/src/compiler/scala/reflect/internal/Flags.scala b/src/compiler/scala/reflect/internal/Flags.scala
index ce459bdd06..e6820cf78a 100644
--- a/src/compiler/scala/reflect/internal/Flags.scala
+++ b/src/compiler/scala/reflect/internal/Flags.scala
@@ -83,40 +83,40 @@ import scala.collection.{ mutable, immutable }
/** Flags set on Modifiers instances in the parsing stage.
*/
class ModifierFlags {
- final val IMPLICIT = 0x00000200
- final val FINAL = 0x00000020 // May not be overridden. Note that java final implies much more than scala final.
- final val PRIVATE = 0x00000004
- final val PROTECTED = 0x00000001
-
- final val SEALED = 0x00000400
- final val OVERRIDE = 0x00000002
- final val CASE = 0x00000800
- final val ABSTRACT = 0x00000008 // abstract class, or used in conjunction with abstract override.
+ final val IMPLICIT = 1 << 9
+ final val FINAL = 1 << 5 // May not be overridden. Note that java final implies much more than scala final.
+ final val PRIVATE = 1 << 2
+ final val PROTECTED = 1 << 0
+
+ final val SEALED = 1 << 10
+ final val OVERRIDE = 1 << 1
+ final val CASE = 1 << 11
+ final val ABSTRACT = 1 << 3 // abstract class, or used in conjunction with abstract override.
// Note difference to DEFERRED!
- final val DEFERRED = 0x00000010 // was `abstract' for members | trait is virtual
- final val INTERFACE = 0x00000080 // symbol is an interface (i.e. a trait which defines only abstract methods)
- final val MUTABLE = 0x00001000 // symbol is a mutable variable.
- final val PARAM = 0x00002000 // symbol is a (value or type) parameter to a method
- final val MACRO = 0x00008000 // symbol is a macro definition
-
- final val COVARIANT = 0x00010000 // symbol is a covariant type variable
- final val BYNAMEPARAM = 0x00010000 // parameter is by name
- final val CONTRAVARIANT = 0x00020000 // symbol is a contravariant type variable
- final val ABSOVERRIDE = 0x00040000 // combination of abstract & override
- final val LOCAL = 0x00080000 // symbol is local to current class (i.e. private[this] or protected[this]
+ final val DEFERRED = 1 << 4 // was `abstract' for members | trait is virtual
+ final val INTERFACE = 1 << 7 // symbol is an interface (i.e. a trait which defines only abstract methods)
+ final val MUTABLE = 1 << 12 // symbol is a mutable variable.
+ final val PARAM = 1 << 13 // symbol is a (value or type) parameter to a method
+ final val MACRO = 1 << 15 // symbol is a macro definition
+
+ final val COVARIANT = 1 << 16 // symbol is a covariant type variable
+ final val BYNAMEPARAM = 1 << 16 // parameter is by name
+ final val CONTRAVARIANT = 1 << 17 // symbol is a contravariant type variable
+ final val ABSOVERRIDE = 1 << 18 // combination of abstract & override
+ final val LOCAL = 1 << 19 // symbol is local to current class (i.e. private[this] or protected[this]
// pre: PRIVATE or PROTECTED are also set
- final val JAVA = 0x00100000 // symbol was defined by a Java class
- final val STATIC = 0x00800000 // static field, method or class
- final val CASEACCESSOR = 0x01000000 // symbol is a case parameter (or its accessor, or a GADT skolem)
- final val TRAIT = 0x02000000 // symbol is a trait
- final val DEFAULTPARAM = 0x02000000 // the parameter has a default value
- final val PARAMACCESSOR = 0x20000000 // for field definitions generated for primary constructor
+ final val JAVA = 1 << 20 // symbol was defined by a Java class
+ final val STATIC = 1 << 23 // static field, method or class
+ final val CASEACCESSOR = 1 << 24 // symbol is a case parameter (or its accessor, or a GADT skolem)
+ final val TRAIT = 1 << 25 // symbol is a trait
+ final val DEFAULTPARAM = 1 << 25 // the parameter has a default value
+ final val PARAMACCESSOR = 1 << 29 // for field definitions generated for primary constructor
// parameters (no matter if it's a 'val' parameter or not)
// for parameters of a primary constructor ('val' or not)
// for the accessor methods generated for 'val' or 'var' parameters
- final val LAZY = 0x80000000L // symbol is a lazy val. can't have MUTABLE unless transformed by typer
- final val PRESUPER = 0x2000000000L // value is evaluated before super call
- final val DEFAULTINIT = 0x20000000000L// symbol is initialized to the default value: used by -Xcheckinit
+ final val LAZY = 1L << 31 // symbol is a lazy val. can't have MUTABLE unless transformed by typer
+ final val PRESUPER = 1L << 37 // value is evaluated before super call
+ final val DEFAULTINIT = 1L << 41 // symbol is initialized to the default value: used by -Xcheckinit
// Overridden.
def flagToString(flag: Long): String = ""
@@ -129,43 +129,43 @@ object ModifierFlags extends ModifierFlags
/** All flags and associated operatins */
class Flags extends ModifierFlags {
- final val METHOD = 0x00000040 // a method
- final val MODULE = 0x00000100 // symbol is module or class implementing a module
- final val PACKAGE = 0x00004000 // symbol is a java package
-
- final val CAPTURED = 0x00010000 // variable is accessed from nested function. Set by LambdaLift.
- final val LABEL = 0x00020000 // method symbol is a label. Set by TailCall
- final val INCONSTRUCTOR = 0x00020000 // class symbol is defined in this/superclass constructor.
- final val SYNTHETIC = 0x00200000 // symbol is compiler-generated
- final val STABLE = 0x00400000 // functions that are assumed to be stable
+ final val METHOD = 1 << 6 // a method
+ final val MODULE = 1 << 8 // symbol is module or class implementing a module
+ final val PACKAGE = 1 << 14 // symbol is a java package
+
+ final val CAPTURED = 1 << 16 // variable is accessed from nested function. Set by LambdaLift.
+ final val LABEL = 1 << 17 // method symbol is a label. Set by TailCall
+ final val INCONSTRUCTOR = 1 << 17 // class symbol is defined in this/superclass constructor.
+ final val SYNTHETIC = 1 << 21 // symbol is compiler-generated
+ final val STABLE = 1 << 22 // functions that are assumed to be stable
// (typically, access methods for valdefs)
// or classes that do not contain abstract types.
- final val BRIDGE = 0x04000000 // function is a bridge method. Set by Erasure
- final val ACCESSOR = 0x08000000 // a value or variable accessor (getter or setter)
+ final val BRIDGE = 1 << 26 // function is a bridge method. Set by Erasure
+ final val ACCESSOR = 1 << 27 // a value or variable accessor (getter or setter)
- final val SUPERACCESSOR = 0x10000000 // a super accessor
- final val MODULEVAR = 0x40000000 // for variables: is the variable caching a module value
+ final val SUPERACCESSOR = 1 << 28 // a super accessor
+ final val MODULEVAR = 1 << 30 // for variables: is the variable caching a module value
- final val IS_ERROR = 0x100000000L // symbol is an error symbol
- final val OVERLOADED = 0x200000000L // symbol is overloaded
- final val LIFTED = 0x400000000L // class has been lifted out to package level
+ final val IS_ERROR = 1L << 32 // symbol is an error symbol
+ final val OVERLOADED = 1L << 33 // symbol is overloaded
+ final val LIFTED = 1L << 34 // class has been lifted out to package level
// local value has been lifted out to class level
// todo: make LIFTED = latePRIVATE?
- final val MIXEDIN = 0x800000000L // term member has been mixed in
- final val EXISTENTIAL = 0x800000000L // type is an existential parameter or skolem
- final val EXPANDEDNAME = 0x1000000000L // name has been expanded with class suffix
- final val IMPLCLASS = 0x2000000000L // symbol is an implementation class
- final val TRANS_FLAG = 0x4000000000L // transient flag guaranteed to be reset after each phase.
+ final val MIXEDIN = 1L << 35 // term member has been mixed in
+ final val EXISTENTIAL = 1L << 35 // type is an existential parameter or skolem
+ final val EXPANDEDNAME = 1L << 36 // name has been expanded with class suffix
+ final val IMPLCLASS = 1L << 37 // symbol is an implementation class
+ final val TRANS_FLAG = 1L << 38 // transient flag guaranteed to be reset after each phase.
- final val LOCKED = 0x8000000000L // temporary flag to catch cyclic dependencies
- final val SPECIALIZED = 0x10000000000L// symbol is a generated specialized member
- final val VBRIDGE = 0x40000000000L// symbol is a varargs bridge
+ final val LOCKED = 1L << 39 // temporary flag to catch cyclic dependencies
+ final val SPECIALIZED = 1L << 40 // symbol is a generated specialized member
+ final val VBRIDGE = 1L << 42 // symbol is a varargs bridge
- final val VARARGS = 0x80000000000L// symbol is a Java-style varargs method
- final val TRIEDCOOKING = 0x100000000000L // ``Cooking'' has been tried on this symbol
- // A Java method's type is ``cooked'' by transforming raw types to existentials
+ final val VARARGS = 1L << 43 // symbol is a Java-style varargs method
+ final val TRIEDCOOKING = 1L << 44 // ``Cooking'' has been tried on this symbol
+ // A Java method's type is ``cooked'' by transforming raw types to existentials
- final val SYNCHRONIZED = 0x200000000000L // symbol is a method which should be marked ACC_SYNCHRONIZED
+ final val SYNCHRONIZED = 1L << 45 // symbol is a method which should be marked ACC_SYNCHRONIZED
// ------- shift definitions -------------------------------------------------------
final val InitialFlags = 0x0001FFFFFFFFFFFFL // flags that are enabled from phase 1.
@@ -260,7 +260,7 @@ class Flags extends ModifierFlags {
/** When a symbol for a default getter is created, it inherits these
* flags from the method with the default. Other flags applied at creation
- * time are SYNTHETIC, DEFAULTPARAM, and possibly OVERRIDE.
+ * time are SYNTHETIC, DEFAULTPARAM, and possibly OVERRIDE, and maybe PRESUPER.
*/
final val DefaultGetterFlags = PRIVATE | PROTECTED | FINAL
diff --git a/src/compiler/scala/reflect/internal/FrontEnds.scala b/src/compiler/scala/reflect/internal/FrontEnds.scala
new file mode 100644
index 0000000000..74501c7686
--- /dev/null
+++ b/src/compiler/scala/reflect/internal/FrontEnds.scala
@@ -0,0 +1,75 @@
+package scala.reflect
+package internal
+
+trait FrontEnds { self: SymbolTable =>
+
+ import scala.tools.nsc.reporters._
+ import scala.tools.nsc.Settings
+
+ def mkConsoleFrontEnd(minSeverity: Int = 1): FrontEnd = {
+ val settings = new Settings()
+ if (minSeverity <= 0) settings.verbose.value = true
+ if (minSeverity > 1) settings.nowarn.value = true
+ wrapReporter(new ConsoleReporter(settings))
+ }
+
+ abstract class FrontEndToReporterProxy(val frontEnd: FrontEnd) extends AbstractReporter {
+ import frontEnd.{Severity => ApiSeverity}
+ val API_INFO = frontEnd.INFO
+ val API_WARNING = frontEnd.WARNING
+ val API_ERROR = frontEnd.ERROR
+
+ type NscSeverity = Severity
+ val NSC_INFO = INFO
+ val NSC_WARNING = WARNING
+ val NSC_ERROR = ERROR
+
+ def display(pos: Position, msg: String, nscSeverity: NscSeverity): Unit =
+ frontEnd.log(pos, msg, nscSeverity match {
+ case NSC_INFO => API_INFO
+ case NSC_WARNING => API_WARNING
+ case NSC_ERROR => API_ERROR
+ })
+
+ def displayPrompt(): Unit =
+ frontEnd.interactive()
+ }
+
+ def wrapFrontEnd(frontEnd: FrontEnd): Reporter = new FrontEndToReporterProxy(frontEnd) {
+ val settings = new Settings()
+ settings.verbose.value = true
+ settings.nowarn.value = false
+ }
+
+ class ReporterToFrontEndProxy(val reporter: Reporter) extends FrontEnd {
+ val API_INFO = INFO
+ val API_WARNING = WARNING
+ val API_ERROR = ERROR
+
+ override def hasErrors = reporter.hasErrors
+ override def hasWarnings = reporter.hasWarnings
+
+ def display(info: Info): Unit = info.severity match {
+ case API_INFO => reporter.info(info.pos, info.msg, false)
+ case API_WARNING => reporter.warning(info.pos, info.msg)
+ case API_ERROR => reporter.error(info.pos, info.msg)
+ }
+
+ def interactive(): Unit = reporter match {
+ case reporter: AbstractReporter => reporter.displayPrompt()
+ case _ => // do nothing
+ }
+
+ override def flush(): Unit = {
+ super.flush()
+ reporter.flush()
+ }
+
+ override def reset(): Unit = {
+ super.reset()
+ reporter.reset()
+ }
+ }
+
+ def wrapReporter(reporter: Reporter): FrontEnd = new ReporterToFrontEndProxy(reporter)
+}
diff --git a/src/compiler/scala/reflect/internal/HasFlags.scala b/src/compiler/scala/reflect/internal/HasFlags.scala
index 348f81c51d..0937577ca3 100644
--- a/src/compiler/scala/reflect/internal/HasFlags.scala
+++ b/src/compiler/scala/reflect/internal/HasFlags.scala
@@ -75,70 +75,41 @@ trait HasFlags {
* flag carrying entity.
*/
def resolveOverloadedFlag(flag: Long): String = Flags.flagToString(flag)
-
- def privateWithinString = if (hasAccessBoundary) privateWithin.toString else ""
-
- protected def isSetting(f: Long, mask: Long) = !hasFlag(f) && ((mask & f) != 0L)
- protected def isClearing(f: Long, mask: Long) = hasFlag(f) && ((mask & f) != 0L)
// Tests which come through cleanly: both Symbol and Modifiers use these
// identically, testing for a single flag.
- def isCase = hasFlag(CASE )
- def isFinal = hasFlag(FINAL )
- def isImplicit = hasFlag(IMPLICIT )
- def isLazy = hasFlag(LAZY )
- def isMutable = hasFlag(MUTABLE ) // in Modifiers, formerly isVariable
- def isOverride = hasFlag(OVERRIDE )
- def isPrivate = hasFlag(PRIVATE )
- def isProtected = hasFlag(PROTECTED)
- def isSynthetic = hasFlag(SYNTHETIC)
- def isInterface = hasFlag(INTERFACE)
-
- // Newly introduced based on having a reasonably obvious clean translation.
- def isPrivateLocal = hasAllFlags(PrivateLocal)
- def isProtectedLocal = hasAllFlags(ProtectedLocal)
- def isParamAccessor = hasFlag(PARAMACCESSOR)
- def isCaseAccessor = hasFlag(CASEACCESSOR)
- def isSuperAccessor = hasFlag(SUPERACCESSOR)
- def isLifted = hasFlag(LIFTED)
-
- // Formerly the Modifiers impl did not include the access boundary check,
- // which must have been a bug.
- def isPublic = hasNoFlags(PRIVATE | PROTECTED) && !hasAccessBoundary
-
- // Removed isClass qualification since the flag isn't overloaded and
- // sym.isClass is enforced in Namers#validate.
- def isSealed = hasFlag(SEALED)
-
- // Removed !isClass qualification since the flag isn't overloaded.
- def isDeferred = hasFlag(DEFERRED)
-
- // Dropped isTerm condition because flag isn't overloaded.
+ def hasAbstractFlag = hasFlag(ABSTRACT)
+ def hasAccessorFlag = hasFlag(ACCESSOR)
+ def hasDefault = hasAllFlags(DEFAULTPARAM | PARAM)
+ def hasLocalFlag = hasFlag(LOCAL)
+ def hasModuleFlag = hasFlag(MODULE)
+ def hasPackageFlag = hasFlag(PACKAGE)
+ def hasStableFlag = hasFlag(STABLE)
+ def hasStaticFlag = hasFlag(STATIC)
def isAbstractOverride = hasFlag(ABSOVERRIDE)
- def isAnyOverride = hasFlag(OVERRIDE | ABSOVERRIDE)
-
- // Disambiguating: DEFAULTPARAM, TRAIT
- def hasDefault = hasAllFlags(DEFAULTPARAM | PARAM)
- def isTrait = hasFlag(TRAIT) && !hasFlag(PARAM)
-
- // Straightforwardly named accessors already being used differently.
- // These names are most likely temporary.
- def hasAbstractFlag = hasFlag(ABSTRACT)
- def hasAccessorFlag = hasFlag(ACCESSOR)
- def hasLocalFlag = hasFlag(LOCAL)
- def hasModuleFlag = hasFlag(MODULE)
- def hasPackageFlag = hasFlag(PACKAGE)
- def hasStableFlag = hasFlag(STABLE)
- def hasStaticFlag = hasFlag(STATIC)
-
- // Disambiguating: LABEL, CONTRAVARIANT, INCONSTRUCTOR
- def isLabel = hasAllFlags(LABEL | METHOD) && !hasAccessorFlag
- // Cannot effectively disambiguate the others at this level.
- def hasContravariantFlag = hasFlag(CONTRAVARIANT)
- def hasInConstructorFlag = hasFlag(INCONSTRUCTOR)
-
- // Name
- def isJavaDefined = hasFlag(JAVA)
+ def isAnyOverride = hasFlag(OVERRIDE | ABSOVERRIDE)
+ def isCase = hasFlag(CASE)
+ def isCaseAccessor = hasFlag(CASEACCESSOR)
+ def isDeferred = hasFlag(DEFERRED)
+ def isFinal = hasFlag(FINAL)
+ def isImplicit = hasFlag(IMPLICIT)
+ def isInterface = hasFlag(INTERFACE)
+ def isJavaDefined = hasFlag(JAVA)
+ def isLabel = hasAllFlags(LABEL | METHOD) && !hasAccessorFlag
+ def isLazy = hasFlag(LAZY)
+ def isLifted = hasFlag(LIFTED)
+ def isMutable = hasFlag(MUTABLE)
+ def isOverride = hasFlag(OVERRIDE)
+ def isParamAccessor = hasFlag(PARAMACCESSOR)
+ def isPrivate = hasFlag(PRIVATE)
+ def isPrivateLocal = hasAllFlags(PrivateLocal)
+ def isProtected = hasFlag(PROTECTED)
+ def isProtectedLocal = hasAllFlags(ProtectedLocal)
+ def isPublic = hasNoFlags(PRIVATE | PROTECTED) && !hasAccessBoundary
+ def isSealed = hasFlag(SEALED)
+ def isSuperAccessor = hasFlag(SUPERACCESSOR)
+ def isSynthetic = hasFlag(SYNTHETIC)
+ def isTrait = hasFlag(TRAIT) && !hasFlag(PARAM)
def flagBitsToString(bits: Long): String = {
// Fast path for common case
@@ -162,7 +133,7 @@ trait HasFlags {
}
def accessString: String = {
- val pw = privateWithinString
+ val pw = if (hasAccessBoundary) privateWithin.toString else ""
if (pw == "") {
if (hasAllFlags(PrivateLocal)) "private[this]"
@@ -188,8 +159,6 @@ trait HasFlags {
def hasTraitFlag = hasFlag(TRAIT)
@deprecated("Use hasDefault", "2.10.0")
def hasDefaultFlag = hasFlag(DEFAULTPARAM)
- @deprecated("", "2.9.0")
- def isAbstract = hasFlag(ABSTRACT)
@deprecated("Use isValueParameter or isTypeParameter", "2.10.0")
def isParameter = hasFlag(PARAM)
@deprecated("Use flagString", "2.10.0")
diff --git a/src/compiler/scala/reflect/internal/Importers.scala b/src/compiler/scala/reflect/internal/Importers.scala
index ab5e19fca9..6d6a0ec317 100644
--- a/src/compiler/scala/reflect/internal/Importers.scala
+++ b/src/compiler/scala/reflect/internal/Importers.scala
@@ -71,9 +71,9 @@ trait Importers { self: SymbolTable =>
case x: from.ModuleSymbol =>
linkReferenced(myowner.newModuleSymbol(myname, mypos, myflags), x, importSymbol)
case x: from.FreeTerm =>
- newFreeTerm(importName(x.name).toTermName, importType(x.info), x.value, x.origin, myflags)
+ newFreeTermSymbol(importName(x.name).toTermName, importType(x.info), x.value, x.flags, x.origin)
case x: from.FreeType =>
- newFreeType(importName(x.name).toTypeName, importType(x.info), x.value, x.origin, myflags)
+ newFreeTypeSymbol(importName(x.name).toTypeName, importType(x.info), x.value, x.flags, x.origin)
case x: from.TermSymbol =>
linkReferenced(myowner.newValue(myname, mypos, myflags), x, importSymbol)
case x: from.TypeSkolem =>
@@ -326,7 +326,7 @@ trait Importers { self: SymbolTable =>
case from.ValDef(mods, name, tpt, rhs) =>
new ValDef(importModifiers(mods), importName(name).toTermName, importTree(tpt), importTree(rhs))
case from.DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- new DefDef(importModifiers(mods), importName(name).toTermName, tparams map importTypeDef, vparamss map (_ map importValDef), importTree(tpt), importTree(rhs))
+ new DefDef(importModifiers(mods), importName(name).toTermName, tparams map importTypeDef, mmap(vparamss)(importValDef), importTree(tpt), importTree(rhs))
case from.TypeDef(mods, name, tparams, rhs) =>
new TypeDef(importModifiers(mods), importName(name).toTypeName, tparams map importTypeDef, importTree(rhs))
case from.LabelDef(name, params, rhs) =>
@@ -389,12 +389,8 @@ trait Importers { self: SymbolTable =>
new This(importName(qual).toTypeName)
case from.Select(qual, name) =>
new Select(importTree(qual), importName(name))
- case from.Ident(name) => tree match {
- case _: from.BackQuotedIdent =>
- new BackQuotedIdent(importName(name))
- case _ =>
- new Ident(importName(name))
- }
+ case from.Ident(name) =>
+ new Ident(importName(name))
case from.ReferenceToBoxed(ident) =>
new ReferenceToBoxed(importTree(ident) match { case ident: Ident => ident })
case from.Literal(constant @ from.Constant(_)) =>
diff --git a/src/compiler/scala/reflect/internal/InfoTransformers.scala b/src/compiler/scala/reflect/internal/InfoTransformers.scala
index 96d9d8f076..e53f714c0c 100644
--- a/src/compiler/scala/reflect/internal/InfoTransformers.scala
+++ b/src/compiler/scala/reflect/internal/InfoTransformers.scala
@@ -20,12 +20,14 @@ trait InfoTransformers {
def transform(sym: Symbol, tpe: Type): Type
def insert(that: InfoTransformer) {
- assert(this.pid != that.pid)
+ assert(this.pid != that.pid, this.pid)
+
if (that.pid < this.pid) {
prev insert that
} else if (next.pid <= that.pid && next.pid != NoPhase.id) {
next insert that
} else {
+ log("Inserting info transformer %s following %s".format(phaseOf(that.pid), phaseOf(this.pid)))
that.next = next
that.prev = this
next.prev = that
diff --git a/src/compiler/scala/reflect/internal/NameManglers.scala b/src/compiler/scala/reflect/internal/NameManglers.scala
deleted file mode 100644
index ac22017569..0000000000
--- a/src/compiler/scala/reflect/internal/NameManglers.scala
+++ /dev/null
@@ -1,216 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.reflect
-package internal
-
-import java.security.MessageDigest
-import scala.io.Codec
-import Chars.isOperatorPart
-
-/** A trait to encapsulate name mangling. It's intended for the
- * values and methods involved in assembling names out of other names,
- * and not for simple synthetically named locals.
- */
-trait NameManglers {
- self: SymbolTable =>
-
- trait NameManglingCommon {
- self: CommonNames =>
-
- val MODULE_SUFFIX_STRING = NameTransformer.MODULE_SUFFIX_STRING
- val NAME_JOIN_STRING = NameTransformer.NAME_JOIN_STRING
-
- val MODULE_SUFFIX_NAME: TermName = newTermName(MODULE_SUFFIX_STRING)
- val NAME_JOIN_NAME: TermName = newTermName(NAME_JOIN_STRING)
-
- def flattenedName(segments: Name*): NameType = compactedString(segments mkString NAME_JOIN_STRING)
-
- /**
- * COMPACTIFY
- *
- * The hashed name has the form (prefix + marker + md5 + marker + suffix), where
- * - prefix/suffix.length = MaxNameLength / 4
- * - md5.length = 32
- *
- * We obtain the formula:
- *
- * FileNameLength = 2*(MaxNameLength / 4) + 2.marker.length + 32 + 6
- *
- * (+6 for ".class"). MaxNameLength can therefore be computed as follows:
- */
- private final val marker = "$$$$"
- private final val MaxNameLength = math.min(
- settings.maxClassfileName.value - 6,
- 2 * (settings.maxClassfileName.value - 6 - 2*marker.length - 32)
- )
- private lazy val md5 = MessageDigest.getInstance("MD5")
- private def toMD5(s: String, edge: Int) = {
- val prefix = s take edge
- val suffix = s takeRight edge
-
- val cs = s.toArray
- val bytes = Codec toUTF8 cs
- md5 update bytes
- val md5chars = md5.digest() map (b => (b & 0xFF).toHexString) mkString
-
- prefix + marker + md5chars + marker + suffix
- }
- private def compactedString(s: String) =
- if (s.length <= MaxNameLength) s
- else toMD5(s, MaxNameLength / 4)
- }
-
- trait TypeNameMangling extends NameManglingCommon {
- self: tpnme.type =>
-
- }
-
- trait TermNameMangling extends NameManglingCommon {
- self: nme.type =>
-
- val IMPL_CLASS_SUFFIX = "$class"
- val LOCALDUMMY_PREFIX = "<local " // owner of local blocks
- val PROTECTED_PREFIX = "protected$"
- val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set"
- val SINGLETON_SUFFIX = ".type"
- val SUPER_PREFIX_STRING = "super$"
- val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
- val SETTER_SUFFIX: TermName = encode("_=")
-
- @deprecated("Use SPECIALIZED_SUFFIX", "2.10.0")
- def SPECIALIZED_SUFFIX_STRING = SPECIALIZED_SUFFIX.toString
- @deprecated("Use SPECIALIZED_SUFFIX", "2.10.0")
- def SPECIALIZED_SUFFIX_NAME: TermName = SPECIALIZED_SUFFIX.toTermName
-
- def isConstructorName(name: Name) = name == CONSTRUCTOR || name == MIXIN_CONSTRUCTOR
- def isExceptionResultName(name: Name) = name startsWith EXCEPTION_RESULT_PREFIX
- def isImplClassName(name: Name) = name endsWith IMPL_CLASS_SUFFIX
- def isLocalDummyName(name: Name) = name startsWith LOCALDUMMY_PREFIX
- def isLocalName(name: Name) = name endsWith LOCAL_SUFFIX_STRING
- def isLoopHeaderLabel(name: Name) = (name startsWith WHILE_PREFIX) || (name startsWith DO_WHILE_PREFIX)
- def isProtectedAccessorName(name: Name) = name startsWith PROTECTED_PREFIX
- def isSuperAccessorName(name: Name) = name startsWith SUPER_PREFIX_STRING
- def isReplWrapperName(name: Name) = name containsName INTERPRETER_IMPORT_WRAPPER
- def isSetterName(name: Name) = name endsWith SETTER_SUFFIX
- def isTraitSetterName(name: Name) = isSetterName(name) && (name containsName TRAIT_SETTER_SEPARATOR_STRING)
- def isSingletonName(name: Name) = name endsWith SINGLETON_SUFFIX
- def isModuleName(name: Name) = name endsWith MODULE_SUFFIX_NAME
-
- def isOpAssignmentName(name: Name) = name match {
- case raw.NE | raw.LE | raw.GE | EMPTY => false
- case _ =>
- name.endChar == '=' && name.startChar != '=' && isOperatorPart(name.startChar)
- }
-
- /** The expanded setter name of `name` relative to this class `base`
- */
- def expandedSetterName(name: TermName, base: Symbol): TermName =
- expandedName(name, base, separator = TRAIT_SETTER_SEPARATOR_STRING)
-
- /** If `name` is an expandedName name, the original name.
- * Otherwise `name` itself.
- */
- def originalName(name: Name): Name = {
- var i = name.length
- while (i >= 2 && !(name(i - 1) == '$' && name(i - 2) == '$')) i -= 1
- if (i >= 2) {
- while (i >= 3 && name(i - 3) == '$') i -= 1
- name.subName(i, name.length)
- } else name
- }
-
- def unspecializedName(name: Name): Name = (
- if (name endsWith SPECIALIZED_SUFFIX)
- name.subName(0, name.lastIndexOf('m') - 1)
- else name
- )
-
- /** Return the original name and the types on which this name
- * is specialized. For example,
- * {{{
- * splitSpecializedName("foo$mIcD$sp") == ('foo', "I", "D")
- * }}}
- * `foo$mIcD$sp` is the name of a method specialized on two type
- * parameters, the first one belonging to the method itself, on Int,
- * and another one belonging to the enclosing class, on Double.
- */
- def splitSpecializedName(name: Name): (Name, String, String) =
- if (name endsWith SPECIALIZED_SUFFIX) {
- val name1 = name dropRight SPECIALIZED_SUFFIX.length
- val idxC = name1 lastIndexOf 'c'
- val idxM = name1 lastIndexOf 'm'
-
- (name1.subName(0, idxM - 1),
- name1.subName(idxC + 1, name1.length).toString,
- name1.subName(idxM + 1, idxC).toString)
- } else
- (name, "", "")
-
- def getterName(name: TermName): TermName = if (isLocalName(name)) localToGetter(name) else name
- def getterToLocal(name: TermName): TermName = name append LOCAL_SUFFIX_STRING
- def getterToSetter(name: TermName): TermName = name append SETTER_SUFFIX
- def localToGetter(name: TermName): TermName = name dropRight LOCAL_SUFFIX_STRING.length
-
- def dropLocalSuffix(name: Name): Name = if (name endsWith ' ') name dropRight 1 else name
-
- def setterToGetter(name: TermName): TermName = {
- val p = name.pos(TRAIT_SETTER_SEPARATOR_STRING)
- if (p < name.length)
- setterToGetter(name drop (p + TRAIT_SETTER_SEPARATOR_STRING.length))
- else
- name.subName(0, name.length - SETTER_SUFFIX.length)
- }
-
- def defaultGetterName(name: Name, pos: Int): TermName = {
- val prefix = if (isConstructorName(name)) "init" else name
- newTermName(prefix + DEFAULT_GETTER_STRING + pos)
- }
- def defaultGetterToMethod(name: Name): TermName = {
- val p = name.pos(DEFAULT_GETTER_STRING)
- if (p < name.length) name.toTermName.subName(0, p)
- else name.toTermName
- }
-
- // def anonNumberSuffix(name: Name): Name = {
- // ("" + name) lastIndexOf '$' match {
- // case -1 => nme.EMPTY
- // case idx =>
- // val s = name drop idx
- // if (s.toString forall (_.isDigit)) s
- // else nme.EMPTY
- // }
- // }
-
- // If the name ends with $nn where nn are
- // all digits, strip the $ and the digits.
- // Otherwise return the argument.
- def stripAnonNumberSuffix(name: Name): Name = {
- var pos = name.length
- while (pos > 0 && name(pos - 1).isDigit)
- pos -= 1
-
- if (pos <= 0 || pos == name.length || name(pos - 1) != '$') name
- else name.subName(0, pos - 1)
- }
-
- def stripModuleSuffix(name: Name): Name = (
- if (isModuleName(name)) name dropRight MODULE_SUFFIX_STRING.length else name
- )
-
- def dropSingletonName(name: Name): TypeName = name dropRight SINGLETON_SUFFIX.length toTypeName
- def singletonName(name: Name): TypeName = name append SINGLETON_SUFFIX toTypeName
- def implClassName(name: Name): TypeName = name append IMPL_CLASS_SUFFIX toTypeName
- def interfaceName(implname: Name): TypeName = implname dropRight IMPL_CLASS_SUFFIX.length toTypeName
- def localDummyName(clazz: Symbol): TermName = newTermName(LOCALDUMMY_PREFIX + clazz.name + ">")
- def superName(name: Name): TermName = newTermName(SUPER_PREFIX_STRING + name)
-
- /** The name of an accessor for protected symbols. */
- def protName(name: Name): TermName = newTermName(PROTECTED_PREFIX + name)
-
- /** The name of a setter for protected symbols. Used for inherited Java fields. */
- def protSetterName(name: Name): TermName = newTermName(PROTECTED_SET_PREFIX + name)
- }
-}
diff --git a/src/compiler/scala/reflect/internal/Names.scala b/src/compiler/scala/reflect/internal/Names.scala
index 5f38374f20..17924f0c0c 100644
--- a/src/compiler/scala/reflect/internal/Names.scala
+++ b/src/compiler/scala/reflect/internal/Names.scala
@@ -8,6 +8,7 @@ package internal
import scala.io.Codec
import java.security.MessageDigest
+import language.implicitConversions
/** The class Names ...
*
@@ -355,8 +356,10 @@ trait Names extends api.Names {
final def endsWith(char: Char): Boolean = len > 0 && endChar == char
final def endsWith(name: String): Boolean = endsWith(newTermName(name))
- def dropRight(n: Int) = subName(0, len - n)
- def drop(n: Int) = subName(n, len)
+ def dropRight(n: Int): ThisNameType = subName(0, len - n)
+ def drop(n: Int): ThisNameType = subName(n, len)
+ def stripSuffix(suffix: Name): ThisNameType =
+ if (this endsWith suffix) dropRight(suffix.length) else thisName
def indexOf(ch: Char) = {
val idx = pos(ch)
@@ -394,14 +397,14 @@ trait Names extends api.Names {
// def decodedName: ThisNameType = newName(decoded)
def encodedName: ThisNameType = encode
- /** Replace operator symbols by corresponding $op_name. */
+ /** Replace operator symbols by corresponding \$op_name. */
def encode: ThisNameType = {
val str = toString
val res = NameTransformer.encode(str)
if (res == str) thisName else newName(res)
}
- /** Replace $op_name by corresponding operator symbol. */
+ /** Replace \$op_name by corresponding operator symbol. */
def decode: String = {
if (this containsChar '$') {
val str = toString
@@ -428,13 +431,16 @@ trait Names extends api.Names {
/** A name that contains no operator chars nor dollar signs.
* TODO - see if it's any faster to do something along these lines.
+ * Cute: now that exhaustivity kind of works, the mere presence of
+ * this trait causes TermName and TypeName to stop being exhaustive.
+ * Commented out.
*/
- trait AlphaNumName extends Name {
- final override def encode = thisName
- final override def decodedName = thisName
- final override def decode = toString
- final override def isOperatorName = false
- }
+ // trait AlphaNumName extends Name {
+ // final override def encode = thisName
+ // final override def decodedName = thisName
+ // final override def decode = toString
+ // final override def isOperatorName = false
+ // }
/** TermName_S and TypeName_S have fields containing the string version of the name.
* TermName_R and TypeName_R recreate it each time toString is called.
diff --git a/src/compiler/scala/reflect/internal/Phase.scala b/src/compiler/scala/reflect/internal/Phase.scala
index 89d643aacf..68dc5ce783 100644
--- a/src/compiler/scala/reflect/internal/Phase.scala
+++ b/src/compiler/scala/reflect/internal/Phase.scala
@@ -7,9 +7,10 @@ package scala.reflect
package internal
abstract class Phase(val prev: Phase) {
+ if ((prev ne null) && (prev ne NoPhase))
+ prev.nx = this
type Id = Int
-
val id: Id = if (prev eq null) 0 else prev.id + 1
/** New flags visible after this phase has completed */
@@ -18,12 +19,13 @@ abstract class Phase(val prev: Phase) {
/** New flags visible once this phase has started */
def newFlags: Long = 0l
- private var fmask: Long =
- if (prev eq null) Flags.InitialFlags else prev.flagMask | prev.nextFlags | newFlags
+ val fmask = (
+ if (prev eq null) Flags.InitialFlags
+ else prev.flagMask | prev.nextFlags | newFlags
+ )
def flagMask: Long = fmask
private var nx: Phase = this
- if ((prev ne null) && (prev ne NoPhase)) prev.nx = this
def next: Phase = nx
def hasNext = next != this
diff --git a/src/compiler/scala/reflect/internal/Reporters.scala b/src/compiler/scala/reflect/internal/Reporters.scala
deleted file mode 100644
index 20d4a1d026..0000000000
--- a/src/compiler/scala/reflect/internal/Reporters.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-package scala.reflect
-package internal
-
-trait Reporters { self: SymbolTable =>
-
- import self.{Reporter => ApiReporter}
- import scala.tools.nsc.reporters._
- import scala.tools.nsc.reporters.{Reporter => NscReporter}
- import scala.tools.nsc.Settings
-
- def mkConsoleReporter(minSeverity: Int = 1): ApiReporter = {
- val settings = new Settings()
- if (minSeverity <= 0) settings.verbose.value = true
- if (minSeverity > 1) settings.nowarn.value = true
- wrapNscReporter(new ConsoleReporter(settings))
- }
-
- abstract class ApiToNscReporterProxy(val apiReporter: ApiReporter) extends AbstractReporter {
- import apiReporter.{Severity => ApiSeverity}
- val API_INFO = apiReporter.INFO
- val API_WARNING = apiReporter.WARNING
- val API_ERROR = apiReporter.ERROR
-
- type NscSeverity = Severity
- val NSC_INFO = INFO
- val NSC_WARNING = WARNING
- val NSC_ERROR = ERROR
-
- def display(pos: Position, msg: String, nscSeverity: NscSeverity): Unit =
- apiReporter.log(pos, msg, nscSeverity match {
- case NSC_INFO => API_INFO
- case NSC_WARNING => API_WARNING
- case NSC_ERROR => API_ERROR
- })
-
- def displayPrompt(): Unit =
- apiReporter.interactive()
- }
-
- def wrapApiReporter(apiReporter: ApiReporter): NscReporter = new ApiToNscReporterProxy(apiReporter) {
- val settings = new Settings()
- settings.verbose.value = true
- settings.nowarn.value = false
- }
-
- class NscToApiReporterProxy(val nscReporter: NscReporter) extends ApiReporter {
- val API_INFO = INFO
- val API_WARNING = WARNING
- val API_ERROR = ERROR
-
- def display(info: Info): Unit = info.severity match {
- case API_INFO => nscReporter.info(info.pos, info.msg, false)
- case API_WARNING => nscReporter.warning(info.pos, info.msg)
- case API_ERROR => nscReporter.error(info.pos, info.msg)
- }
-
- def interactive(): Unit = nscReporter match {
- case nscReporter: AbstractReporter => nscReporter.displayPrompt()
- case _ => // do nothing
- }
-
- override def flush(): Unit = {
- super.flush()
- nscReporter.flush()
- }
-
- override def reset(): Unit = {
- super.reset()
- nscReporter.reset()
- }
- }
-
- def wrapNscReporter(nscReporter: NscReporter): ApiReporter = new NscToApiReporterProxy(nscReporter)
-}
diff --git a/src/compiler/scala/reflect/internal/Required.scala b/src/compiler/scala/reflect/internal/Required.scala
index ba6d65a306..6d146354a3 100644
--- a/src/compiler/scala/reflect/internal/Required.scala
+++ b/src/compiler/scala/reflect/internal/Required.scala
@@ -5,10 +5,7 @@ import settings.MutableSettings
trait Required { self: SymbolTable =>
- type AbstractFileType >: Null <: {
- def path: String
- def canonicalPath: String
- }
+ type AbstractFileType >: Null <: api.RequiredFile
def picklerPhase: Phase
diff --git a/src/compiler/scala/reflect/internal/Scopes.scala b/src/compiler/scala/reflect/internal/Scopes.scala
index ef48d6102f..36e8ebb212 100644
--- a/src/compiler/scala/reflect/internal/Scopes.scala
+++ b/src/compiler/scala/reflect/internal/Scopes.scala
@@ -113,7 +113,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
*
* @param sym ...
*/
- def enter(sym: Symbol): Symbol = { enter(newScopeEntry(sym, this)); sym }
+ def enter[T <: Symbol](sym: T): T = { enter(newScopeEntry(sym, this)); sym }
/** enter a symbol, asserting that no symbol with same name exists in scope
*
diff --git a/src/compiler/scala/reflect/internal/StdAttachments.scala b/src/compiler/scala/reflect/internal/StdAttachments.scala
new file mode 100644
index 0000000000..ae2ad87deb
--- /dev/null
+++ b/src/compiler/scala/reflect/internal/StdAttachments.scala
@@ -0,0 +1,10 @@
+package scala.reflect
+package internal
+
+import scala.reflect.makro.runtime.{Context => MacroContext}
+
+trait StdAttachments {
+ self: SymbolTable =>
+
+ case class ReifyAttachment(original: Symbol)
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/internal/StdNames.scala b/src/compiler/scala/reflect/internal/StdNames.scala
index 1666887133..bd4d9a9f34 100644
--- a/src/compiler/scala/reflect/internal/StdNames.scala
+++ b/src/compiler/scala/reflect/internal/StdNames.scala
@@ -6,26 +6,141 @@
package scala.reflect
package internal
-import scala.collection.immutable
-import NameTransformer.MODULE_SUFFIX_STRING
+import java.security.MessageDigest
+import Chars.isOperatorPart
import annotation.switch
+import language.implicitConversions
+import scala.collection.immutable
+import scala.io.Codec
-trait StdNames extends NameManglers { self: SymbolTable =>
+trait StdNames {
+ self: SymbolTable =>
def encode(str: String): TermName = newTermNameCached(NameTransformer.encode(str))
- implicit def lowerTermNames(n: TermName): String = "" + n
-
- // implicit def stringToTermName(s: String): TermName = newTermName(s)
-
- /** This should be the first trait in the linearization. */
- trait Keywords {
+ implicit def lowerTermNames(n: TermName): String = n.toString
+
+ /** Tensions: would like the keywords to be the very first names entered into the names
+ * storage so their ids count from 0, which simplifies the parser. Switched to abstract
+ * classes to avoid all the indirection which is generated with implementation-containing
+ * traits. Since all these classes use eager vals, that means the constructor with the
+ * keywords must run first. If it's the top in the superclass chain, then CommonNames
+ * must inherit from it, which means TypeNames would inherit keywords as well.
+ *
+ * Solution: Keywords extends CommonNames and uses early defs to beat the
+ * CommonNames constructor out of the starting gate. This is its builder.
+ */
+ private class KeywordSetBuilder {
private var kws: Set[TermName] = Set()
- private def kw(s: String): TermName = {
+ def apply(s: String): TermName = {
val result = newTermNameCached(s)
kws = kws + result
result
}
+ def result: Set[TermName] = {
+ val result = kws
+ kws = null
+ result
+ }
+ }
+
+ private final object compactify extends (String => String) {
+ val md5 = MessageDigest.getInstance("MD5")
+
+ /**
+ * COMPACTIFY
+ *
+ * The hashed name has the form (prefix + marker + md5 + marker + suffix), where
+ * - prefix/suffix.length = MaxNameLength / 4
+ * - md5.length = 32
+ *
+ * We obtain the formula:
+ *
+ * FileNameLength = 2*(MaxNameLength / 4) + 2.marker.length + 32 + 6
+ *
+ * (+6 for ".class"). MaxNameLength can therefore be computed as follows:
+ */
+ val marker = "$$$$"
+ val MaxNameLength = math.min(
+ settings.maxClassfileName.value - 6,
+ 2 * (settings.maxClassfileName.value - 6 - 2*marker.length - 32)
+ )
+ def toMD5(s: String, edge: Int): String = {
+ val prefix = s take edge
+ val suffix = s takeRight edge
+
+ val cs = s.toArray
+ val bytes = Codec toUTF8 cs
+ md5 update bytes
+ val md5chars = md5.digest() map (b => (b & 0xFF).toHexString) mkString
+
+ prefix + marker + md5chars + marker + suffix
+ }
+ def apply(s: String): String = (
+ if (s.length <= MaxNameLength) s
+ else toMD5(s, MaxNameLength / 4)
+ )
+ }
+
+ abstract class CommonNames {
+ type NameType <: Name
+ protected implicit def createNameType(name: String): NameType
+
+ def flattenedName(segments: Name*): NameType =
+ compactify(segments mkString NAME_JOIN_STRING)
+
+ val MODULE_SUFFIX_STRING: String = NameTransformer.MODULE_SUFFIX_STRING
+ val NAME_JOIN_STRING: String = NameTransformer.NAME_JOIN_STRING
+ val SINGLETON_SUFFIX: String = ".type"
+
+ val ANON_CLASS_NAME: NameType = "$anon"
+ val ANON_FUN_NAME: NameType = "$anonfun"
+ val EMPTY: NameType = ""
+ val EMPTY_PACKAGE_NAME: NameType = "<empty>"
+ val IMPL_CLASS_SUFFIX = "$class"
+ val IMPORT: NameType = "<import>"
+ val MODULE_SUFFIX_NAME: NameType = MODULE_SUFFIX_STRING
+ val MODULE_VAR_SUFFIX: NameType = "$module"
+ val NAME_JOIN_NAME: NameType = NAME_JOIN_STRING
+ val PACKAGE: NameType = "package"
+ val ROOT: NameType = "<root>"
+ val SPECIALIZED_SUFFIX: NameType = "$sp"
+
+ // value types (and AnyRef) are all used as terms as well
+ // as (at least) arguments to the @specialize annotation.
+ final val Boolean: NameType = "Boolean"
+ final val Byte: NameType = "Byte"
+ final val Char: NameType = "Char"
+ final val Double: NameType = "Double"
+ final val Float: NameType = "Float"
+ final val Int: NameType = "Int"
+ final val Long: NameType = "Long"
+ final val Short: NameType = "Short"
+ final val Unit: NameType = "Unit"
+
+ final val ScalaValueNames: scala.List[NameType] =
+ scala.List(Byte, Char, Short, Int, Long, Float, Double, Boolean, Unit)
+
+ // some types whose companions we utilize
+ final val AnyRef: NameType = "AnyRef"
+ final val Array: NameType = "Array"
+ final val List: NameType = "List"
+ final val Seq: NameType = "Seq"
+ final val Symbol: NameType = "Symbol"
+ final val ClassTag: NameType = "ClassTag"
+ final val TypeTag : NameType = "TypeTag"
+ final val ConcreteTypeTag: NameType = "ConcreteTypeTag"
+
+ // fictions we use as both types and terms
+ final val ERROR: NameType = "<error>"
+ final val NO_NAME: NameType = "<none>" // formerly NOSYMBOL
+ final val WILDCARD: NameType = "_"
+ }
+
+ /** This should be the first trait in the linearization. */
+ // abstract class Keywords extends CommonNames {
+ abstract class Keywords extends {
+ private val kw = new KeywordSetBuilder
final val ABSTRACTkw: TermName = kw("abstract")
final val CASEkw: TermName = kw("case")
@@ -56,6 +171,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
final val RETURNkw: TermName = kw("return")
final val SEALEDkw: TermName = kw("sealed")
final val SUPERkw: TermName = kw("super")
+ final val THENkw: TermName = kw("then")
final val THISkw: TermName = kw("this")
final val THROWkw: TermName = kw("throw")
final val TRAITkw: TermName = kw("trait")
@@ -79,66 +195,20 @@ trait StdNames extends NameManglers { self: SymbolTable =>
final val HASHkw: TermName = kw("#")
final val ATkw: TermName = kw("@")
- final val keywords = {
- val result = kws.toSet
- kws = null
- result
- }
-
+ final val keywords = kw.result
+ } with CommonNames {
final val javaKeywords = new JavaKeywords()
}
- trait CommonNames /*extends LibraryCommonNames*/ {
-
- type NameType <: Name
- protected implicit def createNameType(name: String): NameType
-
- val EMPTY: NameType = ""
- val ANON_FUN_NAME: NameType = "$anonfun"
- val ANON_CLASS_NAME: NameType = "$anon"
- val EMPTY_PACKAGE_NAME: NameType = "<empty>"
- val IMPORT: NameType = "<import>"
- val MODULE_VAR_SUFFIX: NameType = "$module"
- val ROOT: NameType = "<root>"
- val PACKAGE: NameType = "package"
- val SPECIALIZED_SUFFIX: NameType = "$sp"
-
- // value types (and AnyRef) are all used as terms as well
- // as (at least) arguments to the @specialize annotation.
- final val Boolean: NameType = "Boolean"
- final val Byte: NameType = "Byte"
- final val Char: NameType = "Char"
- final val Double: NameType = "Double"
- final val Float: NameType = "Float"
- final val Int: NameType = "Int"
- final val Long: NameType = "Long"
- final val Short: NameType = "Short"
- final val Unit: NameType = "Unit"
-
- final val ScalaValueNames: scala.List[NameType] =
- scala.List(Byte, Char, Short, Int, Long, Float, Double, Boolean, Unit)
-
- // some types whose companions we utilize
- final val AnyRef: NameType = "AnyRef"
- final val Array: NameType = "Array"
- final val List: NameType = "List"
- final val Seq: NameType = "Seq"
- final val Symbol: NameType = "Symbol"
- final val ClassTag: NameType = "ClassTag"
- final val TypeTag : NameType = "TypeTag"
- final val ConcreteTypeTag: NameType = "ConcreteTypeTag"
-
- // fictions we use as both types and terms
- final val ERROR: NameType = "<error>"
- final val NO_NAME: NameType = "<none>" // formerly NOSYMBOL
- final val WILDCARD: NameType = "_"
- }
+ abstract class TypeNames extends Keywords {
+ type NameType = TypeName
+ protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
- trait TypeNames extends CommonNames {
final val BYNAME_PARAM_CLASS_NAME: NameType = "<byname>"
final val EQUALS_PATTERN_NAME: NameType = "<equals>"
final val JAVA_REPEATED_PARAM_CLASS_NAME: NameType = "<repeated...>"
final val LOCAL_CHILD: NameType = "<local child>"
+ final val REFINE_CLASS_NAME: NameType = "<refinement>"
final val REPEATED_PARAM_CLASS_NAME: NameType = "<repeated>"
final val WILDCARD_STAR: NameType = "_*"
@@ -159,7 +229,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
final val Annotation: NameType = "Annotation"
final val ClassfileAnnotation: NameType = "ClassfileAnnotation"
final val Enum: NameType = "Enum"
-
+ final val Group: NameType = "Group"
final val Tree: NameType = "Tree"
final val TypeTree: NameType = "TypeTree"
@@ -177,7 +247,6 @@ trait StdNames extends NameManglers { self: SymbolTable =>
final val DeprecatedATTR: NameType = "Deprecated"
final val ExceptionsATTR: NameType = "Exceptions"
final val InnerClassesATTR: NameType = "InnerClasses"
- final val JacoMetaATTR: NameType = "JacoMeta"
final val LineNumberTableATTR: NameType = "LineNumberTable"
final val LocalVariableTableATTR: NameType = "LocalVariableTable"
final val RuntimeAnnotationATTR: NameType = "RuntimeVisibleAnnotations" // RetentionPolicy.RUNTIME
@@ -187,33 +256,207 @@ trait StdNames extends NameManglers { self: SymbolTable =>
final val SignatureATTR: NameType = "Signature"
final val SourceFileATTR: NameType = "SourceFile"
final val SyntheticATTR: NameType = "Synthetic"
+
+ def dropSingletonName(name: Name): TypeName = name dropRight SINGLETON_SUFFIX.length toTypeName
+ def singletonName(name: Name): TypeName = name append SINGLETON_SUFFIX toTypeName
+ def implClassName(name: Name): TypeName = name append IMPL_CLASS_SUFFIX toTypeName
+ def interfaceName(implname: Name): TypeName = implname dropRight IMPL_CLASS_SUFFIX.length toTypeName
}
- trait TermNames extends Keywords with CommonNames {
+ abstract class TermNames extends Keywords {
+ type NameType = TermName
+ protected implicit def createNameType(name: String): TermName = newTermNameCached(name)
+
+ /** Base strings from which synthetic names are derived. */
+ val BITMAP_PREFIX = "bitmap$"
+ val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$"
+ val DEFAULT_GETTER_STRING = "$default$"
+ val DEFAULT_GETTER_INIT_STRING = "$lessinit$greater" // CONSTRUCTOR.encoded, less is more
+ val DO_WHILE_PREFIX = "doWhile$"
+ val EVIDENCE_PARAM_PREFIX = "evidence$"
+ val EXCEPTION_RESULT_PREFIX = "exceptionResult"
+ val EXPAND_SEPARATOR_STRING = "$$"
+ val INTERPRETER_IMPORT_WRAPPER = "$iw"
+ val INTERPRETER_LINE_PREFIX = "line"
+ val INTERPRETER_VAR_PREFIX = "res"
+ val INTERPRETER_WRAPPER_SUFFIX = "$object"
+ val LOCALDUMMY_PREFIX = "<local " // owner of local blocks
+ val PROTECTED_PREFIX = "protected$"
+ val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set"
+ val SUPER_PREFIX_STRING = "super$"
+ val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
+ val WHILE_PREFIX = "while$"
+
// Compiler internal names
- val EXPAND_SEPARATOR_STRING = "$$"
-
- val ANYNAME: NameType = "<anyname>"
- val CONSTRUCTOR: NameType = "<init>"
- val FAKE_LOCAL_THIS: NameType = "this$"
- val INITIALIZER: NameType = CONSTRUCTOR // Is this buying us something?
- val LAZY_LOCAL: NameType = "$lzy"
- val LOCAL_SUFFIX_STRING = " "
- val MIRROR_PREFIX: NameType = "$mr."
- val MIRROR_SHORT: NameType = "$mr"
- val MIRROR_FREE_PREFIX: NameType = "free$"
- val MIRROR_FREE_THIS_SUFFIX: NameType = "$this"
- val MIRROR_FREE_VALUE_SUFFIX: NameType = "$value"
- val MIXIN_CONSTRUCTOR: NameType = "$init$"
- val MODULE_INSTANCE_FIELD: NameType = NameTransformer.MODULE_INSTANCE_NAME // "MODULE$"
- val OUTER: NameType = "$outer"
- val OUTER_LOCAL: NameType = OUTER + LOCAL_SUFFIX_STRING // "$outer ", note the space
- val OUTER_SYNTH: NameType = "<outer>" // emitted by virtual pattern matcher, replaced by outer accessor in explicitouter
- val SELECTOR_DUMMY: NameType = "<unapply-selector>"
- val SELF: NameType = "$this"
- val SPECIALIZED_INSTANCE: NameType = "specInstance$"
- val STAR: NameType = "*"
- val THIS: NameType = "_$this"
+ val ANYNAME: NameType = "<anyname>"
+ val CONSTRUCTOR: NameType = "<init>"
+ val FAKE_LOCAL_THIS: NameType = "this$"
+ val INITIALIZER: NameType = CONSTRUCTOR // Is this buying us something?
+ val LAZY_LOCAL: NameType = "$lzy"
+ val LAZY_SLOW_SUFFIX: NameType = "$lzycompute"
+ val LOCAL_SUFFIX_STRING = " "
+ val MIRROR_FREE_PREFIX: NameType = "free$"
+ val MIRROR_FREE_THIS_SUFFIX: NameType = "$this"
+ val MIRROR_FREE_VALUE_SUFFIX: NameType = "$value"
+ val MIRROR_PREFIX: NameType = "$mr."
+ val MIRROR_SHORT: NameType = "$mr"
+ val MIRROR_SYMDEF_PREFIX: NameType = "symdef$"
+ val MIXIN_CONSTRUCTOR: NameType = "$init$"
+ val MODULE_INSTANCE_FIELD: NameType = NameTransformer.MODULE_INSTANCE_NAME // "MODULE$"
+ val OUTER: NameType = "$outer"
+ val OUTER_LOCAL: NameType = OUTER + LOCAL_SUFFIX_STRING // "$outer ", note the space
+ val OUTER_SYNTH: NameType = "<outer>" // emitted by virtual pattern matcher, replaced by outer accessor in explicitouter
+ val SELECTOR_DUMMY: NameType = "<unapply-selector>"
+ val SELF: NameType = "$this"
+ val SETTER_SUFFIX: NameType = encode("_=")
+ val SPECIALIZED_INSTANCE: NameType = "specInstance$"
+ val STAR: NameType = "*"
+ val THIS: NameType = "_$this"
+
+ @deprecated("Use SPECIALIZED_SUFFIX", "2.10.0")
+ def SPECIALIZED_SUFFIX_STRING = SPECIALIZED_SUFFIX.toString
+ @deprecated("Use SPECIALIZED_SUFFIX", "2.10.0")
+ def SPECIALIZED_SUFFIX_NAME: TermName = SPECIALIZED_SUFFIX.toTermName
+
+ def isConstructorName(name: Name) = name == CONSTRUCTOR || name == MIXIN_CONSTRUCTOR
+ def isExceptionResultName(name: Name) = name startsWith EXCEPTION_RESULT_PREFIX
+ def isImplClassName(name: Name) = name endsWith IMPL_CLASS_SUFFIX
+ def isLocalDummyName(name: Name) = name startsWith LOCALDUMMY_PREFIX
+ def isLocalName(name: Name) = name endsWith LOCAL_SUFFIX_STRING
+ def isLoopHeaderLabel(name: Name) = (name startsWith WHILE_PREFIX) || (name startsWith DO_WHILE_PREFIX)
+ def isProtectedAccessorName(name: Name) = name startsWith PROTECTED_PREFIX
+ def isSuperAccessorName(name: Name) = name startsWith SUPER_PREFIX_STRING
+ def isReplWrapperName(name: Name) = name containsName INTERPRETER_IMPORT_WRAPPER
+ def isSetterName(name: Name) = name endsWith SETTER_SUFFIX
+ def isTraitSetterName(name: Name) = isSetterName(name) && (name containsName TRAIT_SETTER_SEPARATOR_STRING)
+ def isSingletonName(name: Name) = name endsWith SINGLETON_SUFFIX
+ def isModuleName(name: Name) = name endsWith MODULE_SUFFIX_NAME
+
+ def isDeprecatedIdentifierName(name: Name) = name.toTermName match {
+ case nme.`then` | nme.`macro` => true
+ case _ => false
+ }
+
+ def isOpAssignmentName(name: Name) = name match {
+ case raw.NE | raw.LE | raw.GE | EMPTY => false
+ case _ =>
+ name.endChar == '=' && name.startChar != '=' && isOperatorPart(name.startChar)
+ }
+
+ /** The expanded name of `name` relative to this class `base` with given `separator`
+ */
+ def expandedName(name: TermName, base: Symbol, separator: String = EXPAND_SEPARATOR_STRING): TermName =
+ newTermNameCached(base.fullName('$') + separator + name)
+
+ /** The expanded setter name of `name` relative to this class `base`
+ */
+ def expandedSetterName(name: TermName, base: Symbol): TermName =
+ expandedName(name, base, separator = TRAIT_SETTER_SEPARATOR_STRING)
+
+ /** If `name` is an expandedName name, the original name.
+ * Otherwise `name` itself.
+ */
+ def originalName(name: Name): Name = {
+ var i = name.length
+ while (i >= 2 && !(name(i - 1) == '$' && name(i - 2) == '$')) i -= 1
+ if (i >= 2) {
+ while (i >= 3 && name(i - 3) == '$') i -= 1
+ name.subName(i, name.length)
+ } else name
+ }
+
+ def unspecializedName(name: Name): Name = (
+ if (name endsWith SPECIALIZED_SUFFIX)
+ name.subName(0, name.lastIndexOf('m') - 1)
+ else name
+ )
+
+ /*
+ def anonNumberSuffix(name: Name): Name = {
+ ("" + name) lastIndexOf '$' match {
+ case -1 => nme.EMPTY
+ case idx =>
+ val s = name drop idx
+ if (s.toString forall (_.isDigit)) s
+ else nme.EMPTY
+ }
+ }
+ */
+
+ /** Return the original name and the types on which this name
+ * is specialized. For example,
+ * {{{
+ * splitSpecializedName("foo$mIcD$sp") == ('foo', "I", "D")
+ * }}}
+ * `foo$mIcD$sp` is the name of a method specialized on two type
+ * parameters, the first one belonging to the method itself, on Int,
+ * and another one belonging to the enclosing class, on Double.
+ */
+ def splitSpecializedName(name: Name): (Name, String, String) =
+ if (name endsWith SPECIALIZED_SUFFIX) {
+ val name1 = name dropRight SPECIALIZED_SUFFIX.length
+ val idxC = name1 lastIndexOf 'c'
+ val idxM = name1 lastIndexOf 'm'
+
+ (name1.subName(0, idxM - 1),
+ name1.subName(idxC + 1, name1.length).toString,
+ name1.subName(idxM + 1, idxC).toString)
+ } else
+ (name, "", "")
+
+ def getterName(name: TermName): TermName = if (isLocalName(name)) localToGetter(name) else name
+ def getterToLocal(name: TermName): TermName = name append LOCAL_SUFFIX_STRING
+ def getterToSetter(name: TermName): TermName = name append SETTER_SUFFIX
+ def localToGetter(name: TermName): TermName = name dropRight LOCAL_SUFFIX_STRING.length
+
+ def dropLocalSuffix(name: Name): Name = if (name endsWith ' ') name dropRight 1 else name
+
+ def setterToGetter(name: TermName): TermName = {
+ val p = name.pos(TRAIT_SETTER_SEPARATOR_STRING)
+ if (p < name.length)
+ setterToGetter(name drop (p + TRAIT_SETTER_SEPARATOR_STRING.length))
+ else
+ name.subName(0, name.length - SETTER_SUFFIX.length)
+ }
+
+ // Nominally, name$default$N, encoded for <init>
+ def defaultGetterName(name: Name, pos: Int): TermName = {
+ val prefix = if (isConstructorName(name)) DEFAULT_GETTER_INIT_STRING else name
+ newTermName(prefix + DEFAULT_GETTER_STRING + pos)
+ }
+ // Nominally, name from name$default$N, CONSTRUCTOR for <init>
+ def defaultGetterToMethod(name: Name): TermName = {
+ val p = name.pos(DEFAULT_GETTER_STRING)
+ if (p < name.length) {
+ val q = name.toTermName.subName(0, p)
+ // i.e., if (q.decoded == CONSTRUCTOR.toString) CONSTRUCTOR else q
+ if (q.toString == DEFAULT_GETTER_INIT_STRING) CONSTRUCTOR else q
+ } else name.toTermName
+ }
+
+ // If the name ends with $nn where nn are
+ // all digits, strip the $ and the digits.
+ // Otherwise return the argument.
+ def stripAnonNumberSuffix(name: Name): Name = {
+ var pos = name.length
+ while (pos > 0 && name(pos - 1).isDigit)
+ pos -= 1
+
+ if (pos <= 0 || pos == name.length || name(pos - 1) != '$') name
+ else name.subName(0, pos - 1)
+ }
+
+ def stripModuleSuffix(name: Name): Name = (
+ if (isModuleName(name)) name dropRight MODULE_SUFFIX_STRING.length else name
+ )
+ def localDummyName(clazz: Symbol): TermName = newTermName(LOCALDUMMY_PREFIX + clazz.name + ">")
+ def superName(name: Name): TermName = newTermName(SUPER_PREFIX_STRING + name)
+
+ /** The name of an accessor for protected symbols. */
+ def protName(name: Name): TermName = newTermName(PROTECTED_PREFIX + name)
+
+ /** The name of a setter for protected symbols. Used for inherited Java fields. */
+ def protSetterName(name: Name): TermName = newTermName(PROTECTED_SET_PREFIX + name)
final val Nil: NameType = "Nil"
final val Predef: NameType = "Predef"
@@ -268,8 +511,6 @@ trait StdNames extends NameManglers { self: SymbolTable =>
case _ => newTermName("x$" + i)
}
- // [Eugene to Paul] see comments in StandardNames.scala to find out why's this here
- val QQQ = ???
val ??? = encode("???")
val wrapRefArray: NameType = "wrapRefArray"
@@ -290,11 +531,14 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val AnnotationInfo: NameType = "AnnotationInfo"
val Any: NameType = "Any"
val AnyVal: NameType = "AnyVal"
+ val AppliedTypeTree: NameType = "AppliedTypeTree"
val Apply: NameType = "Apply"
val ArrayAnnotArg: NameType = "ArrayAnnotArg"
+ val Constant: NameType = "Constant"
val ConstantType: NameType = "ConstantType"
val EmptyPackage: NameType = "EmptyPackage"
val EmptyPackageClass: NameType = "EmptyPackageClass"
+ val ExistentialTypeTree: NameType = "ExistentialTypeTree"
val Expr: NameType = "Expr"
val Ident: NameType = "Ident"
val Import: NameType = "Import"
@@ -325,9 +569,13 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val append: NameType = "append"
val apply: NameType = "apply"
val applyDynamic: NameType = "applyDynamic"
+ val applyDynamicNamed: NameType = "applyDynamicNamed"
val applyOrElse: NameType = "applyOrElse"
val args : NameType = "args"
val argv : NameType = "argv"
+ val arrayClass: NameType = "arrayClass"
+ val arrayElementClass: NameType = "arrayElementClass"
+ val arrayTagToClassManifest: NameType = "arrayTagToClassManifest"
val arrayValue: NameType = "arrayValue"
val array_apply : NameType = "array_apply"
val array_clone : NameType = "array_clone"
@@ -345,6 +593,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val checkInitialized: NameType = "checkInitialized"
val classOf: NameType = "classOf"
val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure
+ val concreteTypeTagToManifest: NameType = "concreteTypeTagToManifest"
val conforms: NameType = "conforms"
val copy: NameType = "copy"
val definitions: NameType = "definitions"
@@ -363,6 +612,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val error: NameType = "error"
val eval: NameType = "eval"
val ex: NameType = "ex"
+ val experimental: NameType = "experimental"
val false_ : NameType = "false"
val filter: NameType = "filter"
val finalize_ : NameType = if (forMSIL) "Finalize" else "finalize"
@@ -389,26 +639,33 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val lang: NameType = "lang"
val length: NameType = "length"
val lengthCompare: NameType = "lengthCompare"
+ val liftedTree: NameType = "liftedTree"
+ val `macro` : NameType = "macro"
val macroThis : NameType = "_this"
val macroContext : NameType = "c"
val main: NameType = "main"
val manifest: NameType = "manifest"
+ val manifestToConcreteTypeTag: NameType = "manifestToConcreteTypeTag"
val map: NameType = "map"
+ val materializeArrayTag: NameType = "materializeArrayTag"
val materializeClassTag: NameType = "materializeClassTag"
- val materializeTypeTag: NameType = "materializeTypeTag"
val materializeConcreteTypeTag: NameType = "materializeConcreteTypeTag"
+ val materializeErasureTag: NameType= "materializeErasureTag"
+ val materializeTypeTag: NameType = "materializeTypeTag"
val mirror : NameType = "mirror"
val moduleClass : NameType = "moduleClass"
val name: NameType = "name"
val ne: NameType = "ne"
val newArray: NameType = "newArray"
+ val newFreeExistential: NameType = "newFreeExistential"
val newFreeTerm: NameType = "newFreeTerm"
val newFreeType: NameType = "newFreeType"
val newNestedSymbol: NameType = "newNestedSymbol"
val newScopeWith: NameType = "newScopeWith"
+ val next: NameType = "next"
val nmeNewTermName: NameType = "newTermName"
val nmeNewTypeName: NameType = "newTypeName"
- val next: NameType = "next"
+ val normalize: NameType = "normalize"
val notifyAll_ : NameType = "notifyAll"
val notify_ : NameType = "notify"
val null_ : NameType = "null"
@@ -426,6 +683,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val runtime: NameType = "runtime"
val sameElements: NameType = "sameElements"
val scala_ : NameType = "scala"
+ val selectDynamic: NameType = "selectDynamic"
val selectOverloadedMethod: NameType = "selectOverloadedMethod"
val selectTerm: NameType = "selectTerm"
val selectType: NameType = "selectType"
@@ -439,6 +697,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val staticModule : NameType = "staticModule"
val synchronized_ : NameType = "synchronized"
val tail: NameType = "tail"
+ val `then` : NameType = "then"
val thisModuleType: NameType = "thisModuleType"
val this_ : NameType = "this"
val throw_ : NameType = "throw"
@@ -455,12 +714,14 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val unapplySeq: NameType = "unapplySeq"
val unbox: NameType = "unbox"
val update: NameType = "update"
+ val updateDynamic: NameType = "updateDynamic"
val value: NameType = "value"
val valueOf : NameType = "valueOf"
val values : NameType = "values"
val view_ : NameType = "view"
val wait_ : NameType = "wait"
val withFilter: NameType = "withFilter"
+ val wrap: NameType = "wrap"
val zip: NameType = "zip"
val synthSwitch: NameType = "$synthSwitch"
@@ -498,22 +759,13 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val toInteger: NameType = "toInteger"
}
- object tpnme extends AbsTypeNames with TypeNames /*with LibraryTypeNames*/ with TypeNameMangling {
- type NameType = TypeName
- protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
-
- val REFINE_CLASS_NAME: NameType = "<refinement>"
- }
+ object tpnme extends TypeNames with AbsTypeNames { }
/** For fully qualified type names.
*/
object fulltpnme extends TypeNames {
- type NameType = TypeName
- protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
-
val RuntimeNothing: NameType = "scala.runtime.Nothing$"
val RuntimeNull: NameType = "scala.runtime.Null$"
- val JavaLangEnum: NameType = "java.lang.Enum"
}
/** Java binary names, like scala/runtime/Nothing$.
@@ -525,18 +777,9 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val RuntimeNull = toBinary(fulltpnme.RuntimeNull).toTypeName
}
- object fullnme extends TermNames {
- type NameType = TermName
- protected implicit def createNameType(name: String): TermName = newTermNameCached(name)
-
- val MirrorPackage: NameType = "scala.reflect.mirror"
- }
-
val javanme = nme.javaKeywords
- object nme extends AbsTermNames with TermNames /*with LibraryTermNames*/ with TermNameMangling {
- type NameType = TermName
- protected implicit def createNameType(name: String): TermName = newTermNameCached(name)
+ object nme extends TermNames with AbsTermNames {
/** Translate a String into a list of simple TypeNames and TermNames.
* In all segments before the last, type/term is determined by whether
@@ -576,17 +819,12 @@ trait StdNames extends NameManglers { self: SymbolTable =>
def newBitmapName(bitmapPrefix: Name, n: Int) = bitmapPrefix append ("" + n)
- val BITMAP_PREFIX: String = "bitmap$"
val BITMAP_NORMAL: NameType = BITMAP_PREFIX + "" // initialization bitmap for public/protected lazy vals
val BITMAP_TRANSIENT: NameType = BITMAP_PREFIX + "trans$" // initialization bitmap for transient lazy vals
- val BITMAP_PRIVATE: NameType = BITMAP_PREFIX + "priv$" // initialization bitmap for private lazy vals
val BITMAP_CHECKINIT: NameType = BITMAP_PREFIX + "init$" // initialization bitmap for checkinit values
val BITMAP_CHECKINIT_TRANSIENT: NameType = BITMAP_PREFIX + "inittrans$" // initialization bitmap for transient checkinit values
- /** The expanded name of `name` relative to this class `base` with given `separator`
- */
- def expandedName(name: TermName, base: Symbol, separator: String = EXPAND_SEPARATOR_STRING): TermName =
- newTermNameCached(base.fullName('$') + separator + name)
+ def newLazyValSlowComputeName(lzyValName: Name) = lzyValName append LAZY_SLOW_SUFFIX
def isModuleVarName(name: Name): Boolean =
stripAnonNumberSuffix(name) endsWith MODULE_VAR_SUFFIX
@@ -594,27 +832,14 @@ trait StdNames extends NameManglers { self: SymbolTable =>
def moduleVarName(name: TermName): TermName =
newTermNameCached("" + name + MODULE_VAR_SUFFIX)
- val ROOTPKG: TermName = "_root_"
+ val ROOTPKG: TermName = "_root_"
+ val EQEQ_LOCAL_VAR: TermName = "eqEqTemp$"
- /** Base strings from which synthetic names are derived. */
- val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$"
- val DEFAULT_GETTER_STRING = "$default$"
- val DO_WHILE_PREFIX = "doWhile$"
- val EQEQ_LOCAL_VAR_STRING = "eqEqTemp$"
- val EVIDENCE_PARAM_PREFIX = "evidence$"
- val EXCEPTION_RESULT_PREFIX = "exceptionResult"
- val INTERPRETER_IMPORT_WRAPPER = "$iw"
- val INTERPRETER_LINE_PREFIX = "line"
- val INTERPRETER_VAR_PREFIX = "res"
- val INTERPRETER_WRAPPER_SUFFIX = "$object"
- val WHILE_PREFIX = "while$"
-
- val EQEQ_LOCAL_VAR: TermName = newTermName(EQEQ_LOCAL_VAR_STRING)
-
- def getCause = sn.GetCause
- def getClass_ = sn.GetClass
- def getMethod_ = sn.GetMethod
- def invoke_ = sn.Invoke
+ def getCause = sn.GetCause
+ def getClass_ = sn.GetClass
+ def getComponentType = sn.GetComponentType
+ def getMethod_ = sn.GetMethod
+ def invoke_ = sn.Invoke
val ADD = encode("+")
val AND = encode("&")
@@ -641,14 +866,9 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val ZOR = encode("||")
// unary operators
- // [Eugene to Paul] see comments in StandardNames.scala to find out why's this here
- val UNARY_TILDE = UNARY_~
val UNARY_~ = encode("unary_~")
- val UNARY_PLUS = UNARY_+
val UNARY_+ = encode("unary_+")
- val UNARY_MINUS = UNARY_-
val UNARY_- = encode("unary_-")
- val UNARY_NOT = UNARY_!
val UNARY_! = encode("unary_!")
// Grouped here so Cleanup knows what tests to perform.
@@ -778,6 +998,11 @@ trait StdNames extends NameManglers { self: SymbolTable =>
case 22 => nme._22
case _ => newTermName("_" + j)
}
+
+ @deprecated("Use a method in tpnme", "2.10.0") def dropSingletonName(name: Name): TypeName = tpnme.dropSingletonName(name)
+ @deprecated("Use a method in tpnme", "2.10.0") def singletonName(name: Name): TypeName = tpnme.singletonName(name)
+ @deprecated("Use a method in tpnme", "2.10.0") def implClassName(name: Name): TypeName = tpnme.implClassName(name)
+ @deprecated("Use a method in tpnme", "2.10.0") def interfaceName(implname: Name): TypeName = tpnme.interfaceName(implname)
}
abstract class SymbolNames {
@@ -803,6 +1028,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val ForName : TermName
val GetCause : TermName
val GetClass : TermName
+ val GetComponentType : TermName
val GetMethod : TermName
val Invoke : TermName
val JavaLang : TermName
@@ -811,12 +1037,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
}
class JavaKeywords {
- private var kws: Set[TermName] = Set()
- private def kw(s: String): TermName = {
- val result = newTermNameCached(s)
- kws = kws + result
- result
- }
+ private val kw = new KeywordSetBuilder
final val ABSTRACTkw: TermName = kw("abstract")
final val ASSERTkw: TermName = kw("assert")
@@ -869,11 +1090,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
final val VOLATILEkw: TermName = kw("volatile")
final val WHILEkw: TermName = kw("while")
- final val keywords = {
- val result = kws.toSet
- kws = null
- result
- }
+ final val keywords = kw.result
}
private abstract class JavaNames extends SymbolNames {
@@ -897,12 +1114,13 @@ trait StdNames extends NameManglers { self: SymbolTable =>
final val Throwable: TypeName = "java.lang.Throwable"
final val ValueType: TypeName = tpnme.NO_NAME
- final val ForName: TermName = newTermName("forName")
- final val GetCause: TermName = newTermName("getCause")
- final val GetClass: TermName = newTermName("getClass")
- final val GetMethod: TermName = newTermName("getMethod")
- final val Invoke: TermName = newTermName("invoke")
- final val JavaLang: TermName = newTermName("java.lang")
+ final val ForName: TermName = newTermName("forName")
+ final val GetCause: TermName = newTermName("getCause")
+ final val GetClass: TermName = newTermName("getClass")
+ final val GetComponentType: TermName = newTermName("getComponentType")
+ final val GetMethod: TermName = newTermName("getMethod")
+ final val Invoke: TermName = newTermName("invoke")
+ final val JavaLang: TermName = newTermName("java.lang")
val Boxed = immutable.Map[TypeName, TypeName](
tpnme.Boolean -> BoxedBoolean,
@@ -934,12 +1152,13 @@ trait StdNames extends NameManglers { self: SymbolTable =>
final val Throwable: TypeName = "System.Exception"
final val ValueType: TypeName = "System.ValueType"
- final val ForName: TermName = newTermName("GetType")
- final val GetCause: TermName = newTermName("InnerException") /* System.Reflection.TargetInvocationException.InnerException */
- final val GetClass: TermName = newTermName("GetType")
- final val GetMethod: TermName = newTermName("GetMethod")
- final val Invoke: TermName = newTermName("Invoke")
- final val JavaLang: TermName = newTermName("System")
+ final val ForName: TermName = newTermName("GetType")
+ final val GetCause: TermName = newTermName("InnerException") /* System.Reflection.TargetInvocationException.InnerException */
+ final val GetClass: TermName = newTermName("GetType")
+ final val GetComponentType: TermName = newTermName("GetElementType")
+ final val GetMethod: TermName = newTermName("GetMethod")
+ final val Invoke: TermName = newTermName("Invoke")
+ final val JavaLang: TermName = newTermName("System")
val Boxed = immutable.Map[TypeName, TypeName](
tpnme.Boolean -> "System.Boolean",
diff --git a/src/compiler/scala/reflect/internal/SymbolCreations.scala b/src/compiler/scala/reflect/internal/SymbolCreations.scala
deleted file mode 100644
index a1163b0f57..0000000000
--- a/src/compiler/scala/reflect/internal/SymbolCreations.scala
+++ /dev/null
@@ -1,113 +0,0 @@
- /* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.reflect
-package internal
-
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
-import util.Statistics._
-import Flags._
-import api.Modifier
-import scala.tools.util.StringOps.{ ojoin }
-
-trait SymbolCreations {
- self: SymbolTable =>
-
- import definitions._
-
- /** Symbol creation interface, possibly better moved somewhere else.
- * It'd be nice if we had virtual classes, but since we
- * don't: these methods insulate the direct instantiation of the symbols
- * (which may be overridden, e.g. in SynchronizedSymbols) from the
- * enforcement of preconditions and choice of symbol constructor based
- * on flags, which are (or should be) final so they can be reasoned about
- * without lots of surprises.
- */
- trait SymbolCreatorInterface {
- // Fallbacks; more precise creators should normally be called.
- protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol
- // This in fact does not exist anymore in the interests of better typed TypeSymbols.
- // protected def createTypeSymbol(name: TypeName, pos: Position, newFlags: Long): TypeSymbol
-
- // I believe all but rogue TypeSymbols are one of: ClassSymbol, AbstractTypeSymbol, AliasTypeSymbol, or TypeSkolem.
- protected def createAbstractTypeSymbol(name: TypeName, pos: Position, newFlags: Long): AbstractTypeSymbol
- protected def createAliasTypeSymbol(name: TypeName, pos: Position, newFlags: Long): AliasTypeSymbol
- protected def createTypeSkolemSymbol(name: TypeName, origin: AnyRef, pos: Position, newFlags: Long): TypeSkolem
- protected def createClassSymbol(name: TypeName, pos: Position, newFlags: Long): ClassSymbol
-
- // More specific ClassSymbols.
- // TODO - AnonymousClassSymbol.
- // TODO maybe - PackageObjects, but that one cost me a lot of time when I tried it before
- // because it broke reification some way I couldn't see.
- protected def createModuleClassSymbol(name: TypeName, pos: Position, newFlags: Long): ModuleClassSymbol
- protected def createPackageClassSymbol(name: TypeName, pos: Position, newFlags: Long): PackageClassSymbol
- protected def createRefinementClassSymbol(pos: Position, newFlags: Long): RefinementClassSymbol
- protected def createImplClassSymbol(name: TypeName, pos: Position, newFlags: Long): ClassSymbol
- protected def createPackageObjectClassSymbol(pos: Position, newFlags: Long): PackageObjectClassSymbol
-
- // Distinguished term categories include methods, modules, packages, package objects,
- // value parameters, and values (including vals, vars, and lazy vals.)
- protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol
- protected def createModuleSymbol(name: TermName, pos: Position, newFlags: Long): ModuleSymbol
- protected def createPackageSymbol(name: TermName, pos: Position, newFlags: Long): PackageSymbol
-
- // TODO
- // protected def createValueParameterSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol
- // protected def createValueMemberSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol
- }
-
- trait SymbolCreator extends SymbolCreatorInterface {
- self: Symbol =>
-
- /*** Predictable symbol creation.
- *
- * newTermSymbol, newClassSymbol, and newNonClassSymbol all create symbols based
- * only on the flags (for reconstruction after reification.) It would be nice to
- * combine the last two into newTypeSymbol, but this requires some flag which allows us
- * to distinguish classes and type aliases, which as yet does not exist.
- *
- * The fundamental flags used to determine which Symbol subclass to instantiate are:
- * METHOD, PACKAGE, MODULE, PARAM, DEFERRED.
- */
- final def newTermSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol = {
- if ((newFlags & METHOD) != 0)
- createMethodSymbol(name, pos, newFlags)
- else if ((newFlags & PACKAGE) != 0)
- createPackageSymbol(name, pos, newFlags | PackageFlags)
- else if ((newFlags & MODULE) != 0)
- createModuleSymbol(name, pos, newFlags)
- else if ((newFlags & PARAM) != 0)
- createValueParameterSymbol(name, pos, newFlags)
- else
- createValueMemberSymbol(name, pos, newFlags)
- }
-
- final def newClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol = {
- if (name == tpnme.REFINE_CLASS_NAME)
- createRefinementClassSymbol(pos, newFlags)
- else if ((newFlags & PACKAGE) != 0)
- createPackageClassSymbol(name, pos, newFlags | PackageFlags)
- else if (name == tpnme.PACKAGE)
- createPackageObjectClassSymbol(pos, newFlags)
- else if ((newFlags & MODULE) != 0)
- createModuleClassSymbol(name, pos, newFlags)
- else if ((newFlags & IMPLCLASS) != 0)
- createImplClassSymbol(name, pos, newFlags)
- else
- createClassSymbol(name, pos, newFlags)
- }
-
- final def newNonClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): TypeSymbol = {
- if ((newFlags & DEFERRED) != 0)
- createAbstractTypeSymbol(name, pos, newFlags)
- else
- createAliasTypeSymbol(name, pos, newFlags)
- }
-
- def newTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): TypeSymbol =
- newNonClassSymbol(name, pos, newFlags)
- }
-}
diff --git a/src/compiler/scala/reflect/internal/SymbolFlags.scala b/src/compiler/scala/reflect/internal/SymbolFlags.scala
deleted file mode 100644
index febcec8c7c..0000000000
--- a/src/compiler/scala/reflect/internal/SymbolFlags.scala
+++ /dev/null
@@ -1,176 +0,0 @@
- /* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.reflect
-package internal
-
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
-import util.Statistics._
-import Flags._
-import api.Modifier
-import scala.tools.util.StringOps.{ ojoin }
-
-trait SymbolFlags {
- self: SymbolTable =>
-
- import definitions._
-
- /** Not mixed in under normal conditions; a powerful debugging aid.
- */
- trait FlagVerifier extends Symbol {
- private def assert0(cond: Boolean, message: => Any) {
- if (!cond) {
- Console.err.println("[flag verification failure]\n%s\n%s\n".format(atPhaseStackMessage, message))
- (new Throwable).getStackTrace.take(13).drop(3).foreach(println)
- println("")
- }
- }
-
- protected def verifyChange(isAdd: Boolean, mask: Long, before: Long) {
- val after = if (isAdd) before | mask else before & ~mask
- val added = after & ~before
- val removed = before & ~after
- val ignored = mask & ~added & ~removed
- val error = (
- (added & OverloadedFlagsMask) != 0 || (removed & OverloadedFlagsMask) != 0
- // || (ignored != 0)
- )
- val addString = if (added == 0) "" else "+(" + flagsToString(added) + ")"
- val removeString = if (removed == 0) "" else "-(" + flagsToString(removed) + ")"
- val changeString = if (added == 0 && removed == 0) "no change" else addString + " " + removeString
-
- if (error) {
- val templ = (
- """| symbol: %s %s in %s
- | call: %s(%s)
- | flags: %s
- | result: %s""".stripMargin
- )
-
- assert0(false, templ.format(
- shortSymbolClass,
- name.decode,
- owner,
- if (isAdd) "+" else "-",
- flagsToString(mask),
- flagsToString(before),
- changeString
- ))
- }
- }
-
- protected def verifyFlags(what: String) {
- assert0(this hasAllFlags alwaysHasFlags, symbolCreationString + "\n always=%s, what=%s\n".format(flagsToString(alwaysHasFlags), what))
- if (this hasFlag neverHasFlags) {
- val hasRaw = (rawflags & neverHasFlags) != 0
- assert0(!hasRaw, symbolCreationString + "\n never=%s, what=%s".format(flagsToString(neverHasFlags), what))
- }
- }
- abstract override def initFlags(mask: Long): this.type = {
- super.initFlags(mask)
- verifyFlags("initFlags(" + flagsToString(mask) + ")")
- this
- }
- abstract override def setFlag(mask: Long): this.type = {
- verifyChange(true, mask, rawflags)
- super.setFlag(mask)
- verifyFlags("setFlag(" + flagsToString(mask) + ")")
- this
- }
- abstract override def resetFlag(mask: Long): this.type = {
- verifyChange(false, mask, rawflags)
- super.resetFlag(mask)
- verifyFlags("resetFlag(" + flagsToString(mask) + ")")
- this
- }
- abstract override def flags_=(fs: Long) {
- if ((fs & ~rawflags) != 0)
- verifyChange(true, fs & ~rawflags, rawflags)
- if ((rawflags & ~fs) != 0)
- verifyChange(false, rawflags & ~fs, rawflags)
-
- super.flags_=(fs)
- verifyFlags("flags_=(" + flagsToString(fs) + ")")
- }
- }
-
- /** Flags which should always be present on a particular class of
- * Symbol, and never be present on any others.
- */
- def AllDistinguishingFlags: Long = METHOD | MODULE | IMPLCLASS
-
- /** A distinguishing flag is one which the mixing class must always
- * have, and which no other symbol class is allowed to have.
- */
- trait DistinguishingFlag extends SymbolFlagLogic {
- this: Symbol =>
-
- def distinguishingFlag: Long
- override protected def alwaysHasFlags = super.alwaysHasFlags | distinguishingFlag
- override protected def neverHasFlags = super.neverHasFlags & ~distinguishingFlag
- }
-
- trait SymbolFlagLogic {
- this: Symbol =>
-
- // Forced for performance reasons to define all the flag manipulation
- // methods alongside the field being manipulated.
- def getFlag(mask: Long): Long
- def hasFlag(mask: Long): Boolean
- def hasAllFlags(mask: Long): Boolean
- def setFlag(mask: Long): this.type
- def resetFlag(mask: Long): this.type
- def initFlags(mask: Long): this.type
- def resetFlags(): Unit
-
- protected def resolveOverloadedFlag(flag: Long): String
- protected def calculateFlagString(basis: Long): String
-
- protected def alwaysHasFlags: Long = 0L
- protected def neverHasFlags: Long = AllDistinguishingFlags
-
- def rawFlagString(mask: Long): String = calculateFlagString(rawflags & mask)
- def rawFlagString: String = rawFlagString(flagMask)
- def debugFlagString: String = flagString(AllFlags)
-
- /** String representation of symbol's variance */
- def varianceString: String =
- if (variance == 1) "+"
- else if (variance == -1) "-"
- else ""
-
- override def flagMask =
- if (settings.debug.value && !isAbstractType) AllFlags
- else if (owner.isRefinementClass) ExplicitFlags & ~OVERRIDE
- else ExplicitFlags
-
- // make the error message more googlable
- def flagsExplanationString =
- if (isGADTSkolem) " (this is a GADT skolem)"
- else ""
-
- /** If the given flag is set on this symbol, also set the corresponding
- * notFLAG. For instance if flag is PRIVATE, the notPRIVATE flag will
- * be set if PRIVATE is currently set.
- */
- final def setNotFlag(flag: Int) = if (hasFlag(flag)) setFlag((flag: @annotation.switch) match {
- case PRIVATE => notPRIVATE
- case PROTECTED => notPROTECTED
- case OVERRIDE => notOVERRIDE
- case _ => abort("setNotFlag on invalid flag: " + flag)
- })
-
- protected def shortSymbolClass = getClass.getName.split('.').last.stripPrefix("Symbols$")
- def symbolCreationString: String = (
- "%s%25s | %-40s | %s".format(
- if (settings.uniqid.value) "%06d | ".format(id) else "",
- shortSymbolClass,
- name.decode + " in " + owner,
- rawFlagString
- )
- )
- }
-}
diff --git a/src/compiler/scala/reflect/internal/SymbolTable.scala b/src/compiler/scala/reflect/internal/SymbolTable.scala
index 5ed37c04ad..ddd6c43031 100644
--- a/src/compiler/scala/reflect/internal/SymbolTable.scala
+++ b/src/compiler/scala/reflect/internal/SymbolTable.scala
@@ -13,9 +13,7 @@ import scala.tools.nsc.util.WeakHashSet
abstract class SymbolTable extends api.Universe
with Collections
with Names
- with SymbolCreations
with Symbols
- with SymbolFlags
with FreeVars
with Types
with Kinds
@@ -36,8 +34,9 @@ abstract class SymbolTable extends api.Universe
with Importers
with Required
with TreeBuildUtil
- with Reporters
+ with FrontEnds
with CapturedVariables
+ with StdAttachments
{
def rootLoader: LazyType
def log(msg: => AnyRef): Unit
@@ -49,14 +48,18 @@ abstract class SymbolTable extends api.Universe
/** Override with final implementation for inlining. */
def debuglog(msg: => String): Unit = if (settings.debug.value) log(msg)
def debugwarn(msg: => String): Unit = if (settings.debug.value) Console.err.println(msg)
+ def throwableAsString(t: Throwable): String = "" + t
+
+ /** Prints a stack trace if -Ydebug or equivalent was given, otherwise does nothing. */
+ def debugStack(t: Throwable): Unit = debugwarn(throwableAsString(t))
/** Overridden when we know more about what was happening during a failure. */
def supplementErrorMessage(msg: String): String = msg
-
+
private[scala] def printCaller[T](msg: String)(result: T) = {
- Console.err.println(msg + ": " + result)
- Console.err.println("Called from:")
- (new Throwable).getStackTrace.drop(2).take(15).foreach(Console.err.println)
+ Console.err.println("%s: %s\nCalled from: %s".format(msg, result,
+ (new Throwable).getStackTrace.drop(2).take(15).mkString("\n")))
+
result
}
@@ -75,6 +78,19 @@ abstract class SymbolTable extends api.Universe
result
}
+ // For too long have we suffered in order to sort NAMES.
+ // I'm pretty sure there's a reasonable default for that.
+ // Notice challenge created by Ordering's invariance.
+ implicit def lowPriorityNameOrdering[T <: Names#Name]: Ordering[T] =
+ SimpleNameOrdering.asInstanceOf[Ordering[T]]
+
+ private object SimpleNameOrdering extends Ordering[Names#Name] {
+ def compare(n1: Names#Name, n2: Names#Name) = (
+ if (n1 eq n2) 0
+ else n1.toString compareTo n2.toString
+ )
+ }
+
/** Dump each symbol to stdout after shutdown.
*/
final val traceSymbolActivity = sys.props contains "scalac.debug.syms"
@@ -266,42 +282,20 @@ abstract class SymbolTable extends api.Universe
object perRunCaches {
import java.lang.ref.WeakReference
import scala.runtime.ScalaRunTime.stringOf
+ import scala.collection.generic.Clearable
- // We can allow ourselves a structural type, these methods
- // amount to a few calls per run at most. This does suggest
- // a "Clearable" trait may be useful.
- private type Clearable = {
- def size: Int
- def clear(): Unit
- }
// Weak references so the garbage collector will take care of
// letting us know when a cache is really out of commission.
private val caches = mutable.HashSet[WeakReference[Clearable]]()
- private def dumpCaches() {
- println(caches.size + " structures are in perRunCaches.")
- caches.zipWithIndex foreach { case (ref, index) =>
- val cache = ref.get()
- println("(" + index + ")" + (
- if (cache == null) " has been collected."
- else " has " + cache.size + " entries:\n" + stringOf(cache)
- ))
- }
- }
- // if (settings.debug.value) {
- // println(Signallable("dump compiler caches")(dumpCaches()))
- // }
-
def recordCache[T <: Clearable](cache: T): T = {
caches += new WeakReference(cache)
cache
}
def clearAll() = {
- if (settings.debug.value) {
- val size = caches flatMap (ref => Option(ref.get)) map (_.size) sum;
- log("Clearing " + caches.size + " caches totalling " + size + " entries.")
- }
+ debuglog("Clearing " + caches.size + " caches.")
+
caches foreach { ref =>
val cache = ref.get()
if (cache == null)
@@ -317,11 +311,6 @@ abstract class SymbolTable extends api.Universe
def newWeakSet[K <: AnyRef]() = recordCache(new WeakHashSet[K]())
}
- /** Break into repl debugger if assertion is true. */
- // def breakIf(assertion: => Boolean, args: Any*): Unit =
- // if (assertion)
- // ILoop.break(args.toList)
-
/** The set of all installed infotransformers. */
var infoTransformers = new InfoTransformer {
val pid = NoPhase.id
diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala
index fc94e96acd..b32b955631 100644
--- a/src/compiler/scala/reflect/internal/Symbols.scala
+++ b/src/compiler/scala/reflect/internal/Symbols.scala
@@ -47,13 +47,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Create a new free term. Its owner is NoSymbol.
*/
- def newFreeTerm(name: TermName, info: Type, value: => Any, origin: String, newFlags: Long = 0L): FreeTerm =
- new FreeTerm(name, value, origin) initFlags newFlags setInfo info
+ def newFreeTermSymbol(name: TermName, info: Type, value: => Any, flags: Long = 0L, origin: String): FreeTerm =
+ new FreeTerm(name, value, origin) initFlags flags setInfo info
/** Create a new free type. Its owner is NoSymbol.
*/
- def newFreeType(name: TypeName, info: Type, value: => Any, origin: String, newFlags: Long = 0L): FreeType =
- new FreeType(name, value, origin) initFlags newFlags setInfo info
+ def newFreeTypeSymbol(name: TypeName, info: Type, value: => Any, flags: Long = 0L, origin: String): FreeType =
+ new FreeType(name, value, origin) initFlags flags setInfo info
/** The original owner of a class. Used by the backend to generate
* EnclosingMethod attributes.
@@ -64,6 +64,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
this: Symbol =>
def kind: String = kindString
+ def isExistential: Boolean = this.isExistentiallyBound
def newNestedSymbol(name: Name, pos: Position, newFlags: Long, isClass: Boolean): Symbol = name match {
case n: TermName => newTermSymbol(n, pos, newFlags)
@@ -156,9 +157,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
abstract class Symbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: Name)
extends AbsSymbolImpl
with HasFlags
- with SymbolFlagLogic
- with SymbolCreator
- // with FlagVerifier // DEBUG
with Annotatable[Symbol] {
type AccessBoundaryType = Symbol
@@ -208,6 +206,36 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
}
+ def rawFlagString(mask: Long): String = calculateFlagString(rawflags & mask)
+ def rawFlagString: String = rawFlagString(flagMask)
+ def debugFlagString: String = flagString(AllFlags)
+
+ /** String representation of symbol's variance */
+ def varianceString: String =
+ if (variance == 1) "+"
+ else if (variance == -1) "-"
+ else ""
+
+ override def flagMask =
+ if (settings.debug.value && !isAbstractType) AllFlags
+ else if (owner.isRefinementClass) ExplicitFlags & ~OVERRIDE
+ else ExplicitFlags
+
+ // make the error message more googlable
+ def flagsExplanationString =
+ if (isGADTSkolem) " (this is a GADT skolem)"
+ else ""
+
+ def shortSymbolClass = getClass.getName.split('.').last.stripPrefix("Symbols$")
+ def symbolCreationString: String = (
+ "%s%25s | %-40s | %s".format(
+ if (settings.uniqid.value) "%06d | ".format(id) else "",
+ shortSymbolClass,
+ name.decode + " in " + owner,
+ rawFlagString
+ )
+ )
+
/** !!! The logic after "hasFlag" is far too opaque to be unexplained.
* I'm guessing it's attempting to compensate for flag overloading,
* and embedding such logic in an undocumented island like this is a
@@ -231,7 +259,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
newTermSymbol(name, pos, PARAM | newFlags)
/** Create local dummy for template (owner of local blocks) */
- final def newLocalDummy(pos: Position) =
+ final def newLocalDummy(pos: Position): TermSymbol =
newTermSymbol(nme.localDummyName(this), pos) setInfo NoType
final def newMethod(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): MethodSymbol =
createMethodSymbol(name, pos, METHOD | newFlags)
@@ -239,15 +267,15 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
newMethod(name, pos, LABEL)
/** Propagates ConstrFlags (JAVA, specifically) from owner to constructor. */
- final def newConstructor(pos: Position, newFlags: Long = 0L) =
+ final def newConstructor(pos: Position, newFlags: Long = 0L): MethodSymbol =
newMethod(nme.CONSTRUCTOR, pos, getFlag(ConstrFlags) | newFlags)
/** Static constructor with info set. */
- def newStaticConstructor(pos: Position) =
+ def newStaticConstructor(pos: Position): MethodSymbol =
newConstructor(pos, STATIC) setInfo UnitClass.tpe
/** Instance constructor with info set. */
- def newClassConstructor(pos: Position) =
+ def newClassConstructor(pos: Position): MethodSymbol =
newConstructor(pos) setInfo MethodType(Nil, this.tpe)
def newLinkedModule(clazz: Symbol, newFlags: Long = 0L): ModuleSymbol = {
@@ -265,10 +293,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
newModule(name, pos, PackageFlags | newFlags)
}
- final def newThisSym(name: TermName = nme.this_, pos: Position = NoPosition) =
+ final def newThisSym(name: TermName = nme.this_, pos: Position = NoPosition): TermSymbol =
newTermSymbol(name, pos, SYNTHETIC)
- final def newImport(pos: Position) =
+ final def newImport(pos: Position): TermSymbol =
newTermSymbol(nme.IMPORT, pos)
final def newModuleSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol =
@@ -298,45 +326,45 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*
* pre.memberType(m)
*/
- final def newOverloaded(pre: Type, alternatives: List[Symbol]): Symbol = (
+ final def newOverloaded(pre: Type, alternatives: List[Symbol]): TermSymbol = (
newTermSymbol(alternatives.head.name.toTermName, alternatives.head.pos, OVERLOADED)
setInfo OverloadedType(pre, alternatives)
)
- final def newErrorValue(name: TermName) =
+ final def newErrorValue(name: TermName): TermSymbol =
newTermSymbol(name, pos, SYNTHETIC | IS_ERROR) setInfo ErrorType
/** Symbol of a type definition type T = ...
*/
- final def newAliasType(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): Symbol =
+ final def newAliasType(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AliasTypeSymbol =
createAliasTypeSymbol(name, pos, newFlags)
/** Symbol of an abstract type type T >: ... <: ...
*/
- final def newAbstractType(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): Symbol =
+ final def newAbstractType(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AbstractTypeSymbol =
createAbstractTypeSymbol(name, pos, DEFERRED | newFlags)
/** Symbol of a type parameter
*/
- final def newTypeParameter(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L) =
+ final def newTypeParameter(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): TypeSymbol =
newAbstractType(name, pos, PARAM | newFlags)
/** Symbol of an existential type T forSome { ... }
*/
- final def newExistential(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): Symbol =
+ final def newExistential(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): TypeSymbol =
newAbstractType(name, pos, EXISTENTIAL | newFlags)
/** Synthetic value parameters when parameter symbols are not available
*/
- final def newSyntheticValueParamss(argtypess: List[List[Type]]): List[List[Symbol]] = {
+ final def newSyntheticValueParamss(argtypess: List[List[Type]]): List[List[TermSymbol]] = {
var cnt = 0
def freshName() = { cnt += 1; nme.syntheticParamName(cnt) }
mmap(argtypess)(tp => newValueParameter(freshName(), owner.pos.focus, SYNTHETIC) setInfo tp)
}
- def newSyntheticTypeParam(): Symbol = newSyntheticTypeParam("T0", 0L)
- def newSyntheticTypeParam(name: String, newFlags: Long): Symbol = newTypeParameter(newTypeName(name), NoPosition, newFlags) setInfo TypeBounds.empty
- def newSyntheticTypeParams(num: Int): List[Symbol] = (0 until num).toList map (n => newSyntheticTypeParam("T" + n, 0L))
+ def newSyntheticTypeParam(): TypeSymbol = newSyntheticTypeParam("T0", 0L)
+ def newSyntheticTypeParam(name: String, newFlags: Long): TypeSymbol = newTypeParameter(newTypeName(name), NoPosition, newFlags) setInfo TypeBounds.empty
+ def newSyntheticTypeParams(num: Int): List[TypeSymbol] = (0 until num).toList map (n => newSyntheticTypeParam("T" + n, 0L))
/** Create a new existential type skolem with this symbol its owner,
* based on the given symbol and origin.
@@ -351,13 +379,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def newGADTSkolem(name: TypeName, origin: Symbol, info: Type): TypeSkolem =
newTypeSkolemSymbol(name, origin, origin.pos, origin.flags & ~(EXISTENTIAL | PARAM) | CASEACCESSOR | SYNTHETIC) setInfo info
- final def freshExistential(suffix: String): Symbol =
+ final def freshExistential(suffix: String): TypeSymbol =
newExistential(freshExistentialName(suffix), pos)
/** Synthetic value parameters when parameter symbols are not available.
* Calling this method multiple times will re-use the same parameter names.
*/
- final def newSyntheticValueParams(argtypes: List[Type]): List[Symbol] =
+ final def newSyntheticValueParams(argtypes: List[Type]): List[TermSymbol] =
newSyntheticValueParamss(List(argtypes)).head
/** Synthetic value parameter when parameter symbol is not available.
@@ -371,27 +399,27 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* with name `T` in its typeParams list. While type checking the parameters, result type and
* body of the method, there's a local copy of `T` which is a TypeSkolem.
*/
- final def newTypeSkolem: Symbol =
+ final def newTypeSkolem: TypeSkolem =
owner.newTypeSkolemSymbol(name.toTypeName, this, pos, flags)
- final def newClass(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L) =
+ final def newClass(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol =
newClassSymbol(name, pos, newFlags)
/** A new class with its info set to a ClassInfoType with given scope and parents. */
- def newClassWithInfo(name: TypeName, parents: List[Type], scope: Scope, pos: Position = NoPosition, newFlags: Long = 0L) = {
+ def newClassWithInfo(name: TypeName, parents: List[Type], scope: Scope, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol = {
val clazz = newClass(name, pos, newFlags)
clazz setInfo ClassInfoType(parents, scope, clazz)
}
- final def newErrorClass(name: TypeName) =
+ final def newErrorClass(name: TypeName): ClassSymbol =
newClassWithInfo(name, Nil, new ErrorScope(this), pos, SYNTHETIC | IS_ERROR)
- final def newModuleClass(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L) =
+ final def newModuleClass(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol =
newModuleClassSymbol(name, pos, newFlags | MODULE)
- final def newAnonymousFunctionClass(pos: Position = NoPosition, newFlags: Long = 0L) =
+ final def newAnonymousFunctionClass(pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol =
newClassSymbol(tpnme.ANON_FUN_NAME, pos, FINAL | SYNTHETIC | newFlags)
- final def newAnonymousFunctionValue(pos: Position, newFlags: Long = 0L) =
+ final def newAnonymousFunctionValue(pos: Position, newFlags: Long = 0L): TermSymbol =
newTermSymbol(nme.ANON_FUN_NAME, pos, SYNTHETIC | newFlags) setInfo NoType
def newImplClass(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol = {
@@ -401,11 +429,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Refinement types P { val x: String; type T <: Number }
* also have symbols, they are refinementClasses
*/
- final def newRefinementClass(pos: Position) = createRefinementClassSymbol(pos, 0L)
+ final def newRefinementClass(pos: Position): RefinementClassSymbol =
+ createRefinementClassSymbol(pos, 0L)
/** Create a new getter for current symbol (which must be a field)
*/
- final def newGetter: Symbol = (
+ final def newGetter: MethodSymbol = (
owner.newMethod(nme.getterName(name.toTermName), NoPosition, getterFlags(flags))
setPrivateWithin privateWithin
setInfo MethodType(Nil, tpe)
@@ -618,15 +647,15 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
&& owner.isPackageClass
&& nme.isReplWrapperName(name)
)
- @inline final override def getFlag(mask: Long): Long = flags & mask
+ @inline final def getFlag(mask: Long): Long = flags & mask
/** Does symbol have ANY flag in `mask` set? */
- @inline final override def hasFlag(mask: Long): Boolean = (flags & mask) != 0
+ @inline final def hasFlag(mask: Long): Boolean = (flags & mask) != 0
/** Does symbol have ALL the flags in `mask` set? */
- @inline final override def hasAllFlags(mask: Long): Boolean = (flags & mask) == mask
+ @inline final def hasAllFlags(mask: Long): Boolean = (flags & mask) == mask
- override def setFlag(mask: Long): this.type = { _rawflags |= mask ; this }
- override def resetFlag(mask: Long): this.type = { _rawflags &= ~mask ; this }
- override def resetFlags() { rawflags &= (TopLevelCreationFlags | alwaysHasFlags) }
+ def setFlag(mask: Long): this.type = { _rawflags |= mask ; this }
+ def resetFlag(mask: Long): this.type = { _rawflags &= ~mask ; this }
+ def resetFlags() { rawflags &= TopLevelCreationFlags }
/** Default implementation calls the generic string function, which
* will print overloaded flags as <flag1/flag2/flag3>. Subclasses
@@ -637,7 +666,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Set the symbol's flags to the given value, asserting
* that the previous value was 0.
*/
- override def initFlags(mask: Long): this.type = {
+ def initFlags(mask: Long): this.type = {
assert(rawflags == 0L, symbolCreationString)
_rawflags = mask
this
@@ -706,7 +735,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
|| isAnonOrRefinementClass // has uninteresting <anon> or <refinement> prefix
|| nme.isReplWrapperName(name) // has ugly $iw. prefix (doesn't call isInterpreterWrapper due to nesting)
)
- def isFBounded = info.baseTypeSeq exists (_ contains this)
+ def isFBounded = info match {
+ case TypeBounds(_, _) => info.baseTypeSeq exists (_ contains this)
+ case _ => false
+ }
/** Is symbol a monomorphic type?
* assumption: if a type starts out as monomorphic, it will not acquire
@@ -714,13 +746,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
final def isMonomorphicType =
isType && {
- var is = infos
- (is eq null) || {
- while (is.prev ne null) { is = is.prev }
- is.info.isComplete && !is.info.isHigherKinded // was: is.info.typeParams.isEmpty.
- // YourKit listed the call to PolyType.typeParams as a hot spot but it is likely an artefact.
- // The change to isHigherKinded did not reduce the total running time.
- }
+ val info = originalInfo
+ info.isComplete && !info.isHigherKinded
}
def isStrictFP = hasAnnotation(ScalaStrictFPAttr) || (enclClass hasAnnotation ScalaStrictFPAttr)
@@ -850,6 +877,16 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isStructuralRefinement: Boolean =
(isClass || isType || isModule) && info.normalize/*.underlying*/.isStructuralRefinement
+ /** Is this a term symbol only defined in a refinement (so that it needs
+ * to be accessed by reflection)?
+ */
+ def isOnlyRefinementMember: Boolean =
+ isTerm && // type members are not affected
+ owner.isRefinementClass && // owner must be a refinement class
+ (owner.info decl name) == this && // symbol must be explicitly declared in the refinement (not synthesized from glb)
+ allOverriddenSymbols.isEmpty && // symbol must not override a symbol in a base class
+ !isConstant // symbol must not be a constant. Question: Can we exclude @inline methods as well?
+
final def isStructuralRefinementMember = owner.isStructuralRefinement && isPossibleInRefinement && isPublic
final def isPossibleInRefinement = !isConstructor && !isOverridingSymbol
@@ -887,8 +924,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (!owner.isLocatable) return false
if (owner.isTerm) return false
+ if (isLocalDummy) return false
if (isType && isNonClassType) return false
+ if (isRefinementClass) return false
return true
}
@@ -1073,6 +1112,44 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
protected def createValueMemberSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol =
new TermSymbol(this, pos, name) initFlags newFlags
+ final def newTermSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol = {
+ if ((newFlags & METHOD) != 0)
+ createMethodSymbol(name, pos, newFlags)
+ else if ((newFlags & PACKAGE) != 0)
+ createPackageSymbol(name, pos, newFlags | PackageFlags)
+ else if ((newFlags & MODULE) != 0)
+ createModuleSymbol(name, pos, newFlags)
+ else if ((newFlags & PARAM) != 0)
+ createValueParameterSymbol(name, pos, newFlags)
+ else
+ createValueMemberSymbol(name, pos, newFlags)
+ }
+
+ final def newClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol = {
+ if (name == tpnme.REFINE_CLASS_NAME)
+ createRefinementClassSymbol(pos, newFlags)
+ else if ((newFlags & PACKAGE) != 0)
+ createPackageClassSymbol(name, pos, newFlags | PackageFlags)
+ else if (name == tpnme.PACKAGE)
+ createPackageObjectClassSymbol(pos, newFlags)
+ else if ((newFlags & MODULE) != 0)
+ createModuleClassSymbol(name, pos, newFlags)
+ else if ((newFlags & IMPLCLASS) != 0)
+ createImplClassSymbol(name, pos, newFlags)
+ else
+ createClassSymbol(name, pos, newFlags)
+ }
+
+ final def newNonClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): TypeSymbol = {
+ if ((newFlags & DEFERRED) != 0)
+ createAbstractTypeSymbol(name, pos, newFlags)
+ else
+ createAliasTypeSymbol(name, pos, newFlags)
+ }
+
+ def newTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): TypeSymbol =
+ newNonClassSymbol(name, pos, newFlags)
+
/** The class or term up to which this symbol is accessible,
* or RootClass if it is public. As java protected statics are
* otherwise completely inaccessible in scala, they are treated
@@ -1110,6 +1187,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ info and type -------------------------------------------------------------------
private[Symbols] var infos: TypeHistory = null
+ def originalInfo = {
+ if (infos eq null) null
+ else {
+ var is = infos
+ while (is.prev ne null) { is = is.prev }
+ is.info
+ }
+ }
/** Get type. The type of a symbol is:
* for a type symbol, the type corresponding to the symbol itself,
@@ -1192,8 +1277,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Set new info valid from start of this phase. */
def updateInfo(info: Type): Symbol = {
- assert(phaseId(infos.validFrom) <= phase.id)
- if (phaseId(infos.validFrom) == phase.id) infos = infos.prev
+ val pid = phaseId(infos.validFrom)
+ assert(pid <= phase.id, (pid, phase.id))
+ if (pid == phase.id) infos = infos.prev
infos = TypeHistory(currentPeriod, info, infos)
_validTo = if (info.isComplete) currentPeriod else NoPeriod
this
@@ -1404,7 +1490,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Reset symbol to initial state
*/
- def reset(completer: Type) {
+ def reset(completer: Type): this.type = {
resetFlags()
infos = null
_validTo = NoPeriod
@@ -1634,7 +1720,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* symbol with the same owner, and the name of this symbol with $class
* appended to it.
*/
- final def implClass: Symbol = owner.info.decl(nme.implClassName(name))
+ final def implClass: Symbol = owner.info.decl(tpnme.implClassName(name))
/** The class that is logically an outer class of given `clazz`.
* This is the enclosing class, except for classes defined locally to constructors,
@@ -1751,7 +1837,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
} else owner.enclosingTopLevelClass
/** Is this symbol defined in the same scope and compilation unit as `that` symbol? */
- def isCoDefinedWith(that: Symbol) = (
+ def isCoDefinedWith(that: Symbol) = {
+ import language.reflectiveCalls
(this.rawInfo ne NoType) &&
(this.effectiveOwner == that.effectiveOwner) && {
!this.effectiveOwner.isPackageClass ||
@@ -1770,7 +1857,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
false
}
}
- )
+ }
/** The internal representation of classes and objects:
*
@@ -1874,7 +1961,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
site.nonPrivateMemberAdmitting(name, admit).filter(sym =>
!sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
- /** The symbol overridden by this symbol in given class `ofclazz`.
+ /** The symbol, in class `ofclazz`, that is overridden by this symbol.
*
* @param ofclazz is a base class of this symbol's owner.
*/
@@ -2014,13 +2101,27 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def isExpandedModuleClass: Boolean = name(name.length - 1) == '$'
*/
- def sourceFile: AbstractFileType =
- if (isModule) moduleClass.sourceFile
- else enclosingTopLevelClass.sourceFile
- def sourceFile_=(f: AbstractFileType) {
- abort("sourceFile_= inapplicable for " + this)
- }
+ /** Desire to re-use the field in ClassSymbol which stores the source
+ * file to also store the classfile, but without changing the behavior
+ * of sourceFile (which is expected at least in the IDE only to
+ * return actual source code.) So sourceFile has classfiles filtered out.
+ */
+ private def sourceFileOnly(file: AbstractFileType): AbstractFileType =
+ if ((file eq null) || (file.path endsWith ".class")) null else file
+
+ private def binaryFileOnly(file: AbstractFileType): AbstractFileType =
+ if ((file eq null) || !(file.path endsWith ".class")) null else file
+
+ final def binaryFile: AbstractFileType = binaryFileOnly(associatedFile)
+ final def sourceFile: AbstractFileType = sourceFileOnly(associatedFile)
+
+ /** Overridden in ModuleSymbols to delegate to the module class. */
+ def associatedFile: AbstractFileType = enclosingTopLevelClass.associatedFile
+ def associatedFile_=(f: AbstractFileType) { abort("associatedFile_= inapplicable for " + this) }
+
+ @deprecated("Use associatedFile_= instead", "2.10.0")
+ def sourceFile_=(f: AbstractFileType): Unit = associatedFile_=(f)
/** If this is a sealed class, its known direct subclasses.
* Otherwise, the empty set.
@@ -2031,7 +2132,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def sealedDescendants: Set[Symbol] = children.flatMap(_.sealedDescendants) + this
- @inline final def orElse[T](alt: => Symbol): Symbol = if (this ne NoSymbol) this else alt
+ @inline final def orElse(alt: => Symbol): Symbol = if (this ne NoSymbol) this else alt
+ @inline final def andAlso(f: Symbol => Unit): Symbol = { if (this ne NoSymbol) f(this) ; this }
// ------ toString -------------------------------------------------------------------
@@ -2051,7 +2153,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** String representation of symbol's definition key word */
final def keyString: String =
if (isJavaInterface) "interface"
- else if (isTrait) "trait"
+ else if (isTrait && !isImplClass) "trait"
else if (isClass) "class"
else if (isType && !isParameter) "type"
else if (isVariable) "var"
@@ -2079,6 +2181,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else if (isSetter) ("setter", if (isSourceMethod) "method" else "value", "SET")
else if (isTerm && isLazy) ("lazy value", "lazy value", "LAZ")
else if (isVariable) ("field", "variable", "VAR")
+ else if (isImplClass) ("implementation class", "class", "IMPL")
else if (isTrait) ("trait", "trait", "TRT")
else if (isClass) ("class", "class", "CLS")
else if (isType) ("type", "type", "TPE")
@@ -2195,7 +2298,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
def infosString = infos.toString
- def debugLocationString = fullLocationString + " " + debugFlagString
+ def debugLocationString = fullLocationString + " (flags: " + debugFlagString + ")"
private def defStringCompose(infoString: String) = compose(
flagString,
@@ -2221,13 +2324,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** String representation of existentially bound variable */
def existentialToString =
if (isSingletonExistential && !settings.debug.value)
- "val " + nme.dropSingletonName(name) + ": " + dropSingletonType(info.bounds.hi)
+ "val " + tpnme.dropSingletonName(name) + ": " + dropSingletonType(info.bounds.hi)
else defString
}
/** A class for term symbols */
class TermSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName)
- extends Symbol(initOwner, initPos, initName) {
+ extends Symbol(initOwner, initPos, initName) with TermSymbolApi {
private[this] var _referenced: Symbol = NoSymbol
privateWithin = NoSymbol
@@ -2396,10 +2499,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** A class for module symbols */
class ModuleSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName)
- extends TermSymbol(initOwner, initPos, initName) with DistinguishingFlag {
- def distinguishingFlag = MODULE
+ extends TermSymbol(initOwner, initPos, initName) with ModuleSymbolApi {
private var flatname: TermName = null
+ override def associatedFile = moduleClass.associatedFile
+ override def associatedFile_=(f: AbstractFileType) { moduleClass.associatedFile = f }
+
override def isModule = true
override def moduleClass = referenced
override def companionClass =
@@ -2421,19 +2526,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
class PackageSymbol protected[Symbols] (owner0: Symbol, pos0: Position, name0: TermName)
- extends ModuleSymbol(owner0, pos0, name0) with DistinguishingFlag {
- override def distinguishingFlag = super.distinguishingFlag | PACKAGE
+ extends ModuleSymbol(owner0, pos0, name0) with PackageSymbolApi {
override def isPackage = true
}
/** A class for method symbols */
class MethodSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName)
- extends TermSymbol(initOwner, initPos, initName) with DistinguishingFlag {
- def distinguishingFlag = METHOD
- // MethodSymbols pick up MODULE when trait-owned object accessors are cloned
- // during mixin composition.
- override protected def neverHasFlags = super.neverHasFlags & ~MODULE
-
+ extends TermSymbol(initOwner, initPos, initName) with MethodSymbolApi {
private[this] var mtpePeriod = NoPeriod
private[this] var mtpePre: Type = _
private[this] var mtpeResult: Type = _
@@ -2490,7 +2589,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* of this class. Classes are instances of a subclass.
*/
abstract class TypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
- extends Symbol(initOwner, initPos, initName) {
+ extends Symbol(initOwner, initPos, initName) with TypeSymbolApi {
privateWithin = NoSymbol
private[this] var _rawname: TypeName = initName
@@ -2614,10 +2713,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
info.baseTypeIndex(that) >= 0
)
- override def reset(completer: Type) {
+ override def reset(completer: Type): this.type = {
super.reset(completer)
tpePeriod = NoPeriod
tyconRunId = NoRunId
+ this
}
/*** example:
@@ -2691,19 +2791,16 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** A class for class symbols */
class ClassSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
- extends TypeSymbol(initOwner, initPos, initName) {
+ extends TypeSymbol(initOwner, initPos, initName) with ClassSymbolApi {
type TypeOfClonedSymbol = ClassSymbol
- private[this] var flatname: TypeName = _
- private[this] var source: AbstractFileType = _
- private[this] var thissym: Symbol = this
+ private[this] var flatname: TypeName = _
+ private[this] var _associatedFile: AbstractFileType = _
+ private[this] var thissym: Symbol = this
private[this] var thisTypeCache: Type = _
private[this] var thisTypePeriod = NoPeriod
- override protected def alwaysHasFlags: Long = 0L
- override protected def neverHasFlags: Long = 0L
-
override def resolveOverloadedFlag(flag: Long) = flag match {
case INCONSTRUCTOR => "<inconstructor>" // INCONSTRUCTOR / CONTRAVARIANT / LABEL
case EXISTENTIAL => "<existential>" // EXISTENTIAL / MIXEDIN
@@ -2738,7 +2835,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def toInterface: Symbol = (
if (isImplClass) {
if (phase.next.erasedTypes) lastParent
- else owner.info.decl(nme.interfaceName(name))
+ else owner.info.decl(tpnme.interfaceName(name))
}
else super.toInterface
)
@@ -2795,14 +2892,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (c.isOverloaded) c.alternatives.head else c
}
- override def sourceFile =
- if (owner.isPackageClass) source
- else super.sourceFile
- override def sourceFile_=(f: AbstractFileType) { source = f }
+ override def associatedFile = if (owner.isPackageClass) _associatedFile else super.associatedFile
+ override def associatedFile_=(f: AbstractFileType) { _associatedFile = f }
- override def reset(completer: Type) {
+ override def reset(completer: Type): this.type = {
super.reset(completer)
thissym = this
+ this
}
/** the type this.type in this class */
@@ -2842,6 +2938,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
clone.typeOfThis = typeOfThis
clone.thisSym setName thisSym.name
}
+ if (_associatedFile ne null)
+ clone.associatedFile = _associatedFile
+
clone
}
@@ -2860,13 +2959,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* plain class symbols!
*/
class ModuleClassSymbol protected[Symbols] (owner: Symbol, pos: Position, name: TypeName)
- extends ClassSymbol(owner, pos, name) with DistinguishingFlag {
+ extends ClassSymbol(owner, pos, name) {
private[this] var module: Symbol = _
private[this] var typeOfThisCache: Type = _
private[this] var typeOfThisPeriod = NoPeriod
- def distinguishingFlag = MODULE
-
private var implicitMembersCacheValue: List[Symbol] = Nil
private var implicitMembersCacheKey1: Type = NoType
private var implicitMembersCacheKey2: ScopeEntry = null
@@ -2921,8 +3018,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
class PackageClassSymbol protected[Symbols] (owner0: Symbol, pos0: Position, name0: TypeName)
- extends ModuleClassSymbol(owner0, pos0, name0) with DistinguishingFlag {
- override def distinguishingFlag = super.distinguishingFlag | PACKAGE
+ extends ModuleClassSymbol(owner0, pos0, name0) {
override def sourceModule = companionModule
override def enclClassChain = Nil
override def isPackageClass = true
@@ -2949,13 +3045,15 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
)
}
- class FreeTerm(name0: TermName, value0: => Any, val origin: String) extends TermSymbol(NoSymbol, NoPosition, name0) {
+ trait FreeSymbol extends Symbol {
+ def origin: String
+ }
+ class FreeTerm(name0: TermName, value0: => Any, val origin: String) extends TermSymbol(NoSymbol, NoPosition, name0) with FreeSymbol {
def value = value0
override def isFreeTerm = true
}
- // [Eugene] the NoSymbol origin works for type parameters. what about existential free types?
- class FreeType(name0: TypeName, value0: => Any, val origin: String) extends TypeSkolem(NoSymbol, NoPosition, name0, NoSymbol) {
+ class FreeType(name0: TypeName, value0: => Any, val origin: String) extends TypeSkolem(NoSymbol, NoPosition, name0, NoSymbol) with FreeSymbol {
def value = value0
override def isFreeType = true
}
@@ -2994,11 +3092,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def enclosingTopLevelClass: Symbol = this
override def enclosingPackageClass: Symbol = this
override def enclMethod: Symbol = this
- override def sourceFile: AbstractFileType = null
+ override def associatedFile = null
override def ownerChain: List[Symbol] = List()
override def ownersIterator: Iterator[Symbol] = Iterator.empty
override def alternatives: List[Symbol] = List()
- override def reset(completer: Type) {}
+ override def reset(completer: Type): this.type = this
override def info: Type = NoType
override def existentialBound: Type = NoType
override def rawInfo: Type = NoType
@@ -3008,7 +3106,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def originalEnclosingMethod = this
override def owner: Symbol =
- abort("no-symbol does not have an owner (this is a bug: scala " + scala.util.Properties.versionString + ")")
+ abort("no-symbol does not have an owner")
override def typeConstructor: Type =
abort("no-symbol does not have a type constructor (this may indicate scalac cannot find fundamental classes)")
}
@@ -3093,10 +3191,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (settings.debug.value) printStackTrace()
}
- case class InvalidCompanions(sym1: Symbol, sym2: Symbol) extends Throwable(
+ case class InvalidCompanions(sym1: Symbol, sym2: Symbol) extends Throwable({
+ import language.reflectiveCalls
"Companions '" + sym1 + "' and '" + sym2 + "' must be defined in same file:\n" +
" Found in " + sym1.sourceFile.canonicalPath + " and " + sym2.sourceFile.canonicalPath
- ) {
+ }) {
override def toString = getMessage
}
diff --git a/src/compiler/scala/reflect/internal/TreeBuildUtil.scala b/src/compiler/scala/reflect/internal/TreeBuildUtil.scala
index fbcd5043bc..d4d4652e91 100644
--- a/src/compiler/scala/reflect/internal/TreeBuildUtil.scala
+++ b/src/compiler/scala/reflect/internal/TreeBuildUtil.scala
@@ -1,6 +1,8 @@
package scala.reflect
package internal
+import Flags._
+
trait TreeBuildUtil extends api.TreeBuildUtil { self: SymbolTable =>
// ``staticClass'' and ``staticModule'' rely on ClassLoaders
@@ -51,9 +53,11 @@ trait TreeBuildUtil extends api.TreeBuildUtil { self: SymbolTable =>
try selectOverloadedMethod(owner, name, index)
catch { case _: MissingRequirementError => NoSymbol }
- def newFreeTerm(name: String, info: Type, value: => Any, origin: String) = newFreeTerm(newTermName(name), info, value, origin)
+ def newFreeTerm(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null) = newFreeTermSymbol(newTermName(name), info, value, flags, origin)
+
+ def newFreeType(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null) = newFreeTypeSymbol(newTypeName(name), info, value, (if (flags == 0L) PARAM else flags) | DEFERRED, origin)
- def newFreeType(name: String, info: Type, value: => Any, origin: String) = newFreeType(newTypeName(name), info, value, origin)
+ def newFreeExistential(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null) = newFreeTypeSymbol(newTypeName(name), info, value, (if (flags == 0L) EXISTENTIAL else flags) | DEFERRED, origin)
def modifiersFromInternalFlags(flags: Long, privateWithin: Name, annotations: List[Tree]): Modifiers =
Modifiers(flags, privateWithin, annotations)
diff --git a/src/compiler/scala/reflect/internal/TreeGen.scala b/src/compiler/scala/reflect/internal/TreeGen.scala
index 1a374b6e59..f2f9842595 100644
--- a/src/compiler/scala/reflect/internal/TreeGen.scala
+++ b/src/compiler/scala/reflect/internal/TreeGen.scala
@@ -7,13 +7,14 @@ abstract class TreeGen extends api.AbsTreeGen {
import global._
import definitions._
- def rootId(name: Name) = Select(Ident(nme.ROOTPKG), name)
- def rootScalaDot(name: Name) = Select(rootId(nme.scala_) setSymbol ScalaPackage, name)
- def scalaDot(name: Name) = Select(Ident(nme.scala_) setSymbol ScalaPackage, name)
- def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass
- def scalaUnitConstr = scalaDot(tpnme.Unit) setSymbol UnitClass
- def productConstr = scalaDot(tpnme.Product) setSymbol ProductRootClass
- def serializableConstr = scalaDot(tpnme.Serializable) setSymbol SerializableClass
+ def rootId(name: Name) = Select(Ident(nme.ROOTPKG), name)
+ def rootScalaDot(name: Name) = Select(rootId(nme.scala_) setSymbol ScalaPackage, name)
+ def scalaDot(name: Name) = Select(Ident(nme.scala_) setSymbol ScalaPackage, name)
+ def scalaAnnotationDot(name: Name) = Select(scalaDot(nme.annotation), name)
+ def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass
+ def scalaUnitConstr = scalaDot(tpnme.Unit) setSymbol UnitClass
+ def productConstr = scalaDot(tpnme.Product) setSymbol ProductRootClass
+ def serializableConstr = scalaDot(tpnme.Serializable) setSymbol SerializableClass
def scalaFunctionConstr(argtpes: List[Tree], restpe: Tree, abstractFun: Boolean = false): Tree = {
val cls = if (abstractFun)
@@ -48,6 +49,9 @@ abstract class TreeGen extends api.AbsTreeGen {
def mkMethodCall(target: Tree, targs: List[Type], args: List[Tree]): Tree =
Apply(mkTypeApply(target, targs map TypeTree), args)
+ def mkNullaryCall(method: Symbol, targs: List[Type]): Tree =
+ mkTypeApply(mkAttributedRef(method), targs map TypeTree)
+
/** Builds a reference to value whose type is given stable prefix.
* The type must be suitable for this. For example, it
* must not be a TypeRef pointing to an abstract type variable.
diff --git a/src/compiler/scala/reflect/internal/TreeInfo.scala b/src/compiler/scala/reflect/internal/TreeInfo.scala
index ed22cad730..1528061adb 100644
--- a/src/compiler/scala/reflect/internal/TreeInfo.scala
+++ b/src/compiler/scala/reflect/internal/TreeInfo.scala
@@ -221,14 +221,37 @@ abstract class TreeInfo {
case _ => false
}
+ /**
+ * Named arguments can transform a constructor call into a block, e.g.
+ * <init>(b = foo, a = bar)
+ * is transformed to
+ * { val x$1 = foo
+ * val x$2 = bar
+ * <init>(x$2, x$1)
+ * }
+ */
+ def stripNamedApplyBlock(tree: Tree) = tree match {
+ case Block(stats, expr) if stats.forall(_.isInstanceOf[ValDef]) =>
+ expr
+ case _ =>
+ tree
+ }
+
/** Is tree a self or super constructor call? */
- def isSelfOrSuperConstrCall(tree: Tree) =
- isSelfConstrCall(tree) || isSuperConstrCall(tree)
+ def isSelfOrSuperConstrCall(tree: Tree) = {
+ // stripNamedApply for SI-3584: adaptToImplicitMethod in Typers creates a special context
+ // for implicit search in constructor calls, adaptToImplicitMethod(isSelfOrConstrCall)
+ val tree1 = stripNamedApplyBlock(tree)
+ isSelfConstrCall(tree1) || isSuperConstrCall(tree1)
+ }
/** Is tree a variable pattern? */
def isVarPattern(pat: Tree): Boolean = pat match {
- case _: BackQuotedIdent => false
- case x: Ident => isVariableName(x.name)
+ case x: Ident => !x.isBackquoted && isVariableName(x.name)
+ case _ => false
+ }
+ def isDeprecatedIdentifier(tree: Tree): Boolean = tree match {
+ case x: Ident => !x.isBackquoted && nme.isDeprecatedIdentifierName(x.name)
case _ => false
}
@@ -532,6 +555,21 @@ abstract class TreeInfo {
}
}
+ def isApplyDynamicName(name: Name) = (name == nme.updateDynamic) || (name == nme.selectDynamic) || (name == nme.applyDynamic) || (name == nme.applyDynamicNamed)
+
+ class DynamicApplicationExtractor(nameTest: Name => Boolean) {
+ def unapply(tree: Tree) = tree match {
+ case Apply(TypeApply(Select(qual, oper), _), List(Literal(Constant(name)))) if nameTest(oper) => Some((qual, name))
+ case Apply(Select(qual, oper), List(Literal(Constant(name)))) if nameTest(oper) => Some((qual, name))
+ case Apply(Ident(oper), List(Literal(Constant(name)))) if nameTest(oper) => Some((EmptyTree, name))
+ case _ => None
+ }
+ }
+ object DynamicUpdate extends DynamicApplicationExtractor(_ == nme.updateDynamic)
+ object DynamicApplication extends DynamicApplicationExtractor(isApplyDynamicName)
+ object DynamicApplicationNamed extends DynamicApplicationExtractor(_ == nme.applyDynamicNamed)
+
+
// domain-specific extractors for reification
import definitions._
@@ -575,7 +613,7 @@ abstract class TreeInfo {
}
object Reified {
- def unapply(tree: Tree): Option[(Tree, List[ValDef], Tree)] = tree match {
+ def unapply(tree: Tree): Option[(Tree, List[Tree], Tree)] = tree match {
case ReifiedTree(reifee, symbolTable, reified, _) =>
Some(reifee, symbolTable, reified)
case ReifiedType(reifee, symbolTable, reified) =>
@@ -586,16 +624,16 @@ abstract class TreeInfo {
}
object ReifiedTree {
- def unapply(tree: Tree): Option[(Tree, List[ValDef], Tree, Tree)] = tree match {
- case reifee @ Block((mrDef @ ValDef(_, _, _, _)) :: symbolTable, Apply(Apply(_, List(tree)), List(Apply(_, List(tpe))))) if mrDef.name == nme.MIRROR_SHORT =>
- Some(reifee, symbolTable map (_.asInstanceOf[ValDef]), tree, tpe)
+ def unapply(tree: Tree): Option[(Tree, List[Tree], Tree, Tree)] = tree match {
+ case reifee @ Block((mrDef @ ValDef(_, _, _, _)) :: symbolTable, Apply(Apply(_, List(tree)), List(Apply(_, tpe :: _)))) if mrDef.name == nme.MIRROR_SHORT =>
+ Some(reifee, symbolTable, tree, tpe)
case _ =>
None
}
}
object InlineableTreeSplice {
- def unapply(tree: Tree): Option[(Tree, List[ValDef], Tree, Tree, Symbol)] = tree match {
+ def unapply(tree: Tree): Option[(Tree, List[Tree], Tree, Tree, Symbol)] = tree match {
case select @ Select(ReifiedTree(splicee, symbolTable, tree, tpe), _) if select.symbol == ExprEval || select.symbol == ExprValue =>
Some(splicee, symbolTable, tree, tpe, select.symbol)
case _ =>
@@ -604,7 +642,7 @@ abstract class TreeInfo {
}
object InlinedTreeSplice {
- def unapply(tree: Tree): Option[(Tree, List[ValDef], Tree, Tree)] = tree match {
+ def unapply(tree: Tree): Option[(Tree, List[Tree], Tree, Tree)] = tree match {
case Select(ReifiedTree(splicee, symbolTable, tree, tpe), name) if name == ExprTree.name =>
Some(splicee, symbolTable, tree, tpe)
case _ =>
@@ -613,16 +651,16 @@ abstract class TreeInfo {
}
object ReifiedType {
- def unapply(tree: Tree): Option[(Tree, List[ValDef], Tree)] = tree match {
- case reifee @ Block((mrDef @ ValDef(_, _, _, _)) :: symbolTable, Apply(_, List(tpe))) if mrDef.name == nme.MIRROR_SHORT =>
- Some(reifee, symbolTable map (_.asInstanceOf[ValDef]), tpe)
+ def unapply(tree: Tree): Option[(Tree, List[Tree], Tree)] = tree match {
+ case reifee @ Block((mrDef @ ValDef(_, _, _, _)) :: symbolTable, Apply(_, tpe :: _)) if mrDef.name == nme.MIRROR_SHORT =>
+ Some(reifee, symbolTable, tpe)
case _ =>
None
}
}
object InlinedTypeSplice {
- def unapply(tree: Tree): Option[(Tree, List[ValDef], Tree)] = tree match {
+ def unapply(tree: Tree): Option[(Tree, List[Tree], Tree)] = tree match {
case Select(ReifiedType(splicee, symbolTable, tpe), name) if name == TypeTagTpe.name =>
Some(splicee, symbolTable, tpe)
case _ =>
@@ -631,11 +669,11 @@ abstract class TreeInfo {
}
object FreeDef {
- def unapply(tree: Tree): Option[(Tree, TermName, Tree, String)] = tree match {
- case FreeTermDef(mrRef, name, binding, origin) =>
- Some(mrRef, name, binding, origin)
- case FreeTypeDef(mrRef, name, binding, origin) =>
- Some(mrRef, name, binding, origin)
+ def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = tree match {
+ case FreeTermDef(mrRef, name, binding, flags, origin) =>
+ Some(mrRef, name, binding, flags, origin)
+ case FreeTypeDef(mrRef, name, binding, flags, origin) =>
+ Some(mrRef, name, binding, flags, origin)
case _ =>
None
}
@@ -644,30 +682,31 @@ abstract class TreeInfo {
object FreeTermDef {
lazy val newFreeTermMethod = getMember(getRequiredClass("scala.reflect.api.TreeBuildUtil"), nme.newFreeTerm)
- def unapply(tree: Tree): Option[(Tree, TermName, Tree, String)] = tree match {
- case ValDef(_, name, _, Apply(Select(mrRef @ Ident(_), newFreeTerm), List(_, _, binding, Literal(Constant(origin: String)))))
+ def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = tree match {
+ case ValDef(_, name, _, Apply(Select(mrRef @ Ident(_), newFreeTerm), List(_, _, binding, Literal(Constant(flags: Long)), Literal(Constant(origin: String)))))
if mrRef.name == nme.MIRROR_SHORT && newFreeTerm == newFreeTermMethod.name =>
- Some(mrRef, name, binding, origin)
+ Some(mrRef, name, binding, flags, origin)
case _ =>
None
}
}
object FreeTypeDef {
- lazy val newFreeTypeMethod = getMember(getRequiredClass("scala.reflect.api.TreeBuildUtil"), nme.newFreeType)
+ lazy val newFreeExistentialMethod = getMember(getRequiredClass("scala.reflect.api.TreeBuildUtil"), nme.newFreeType)
+ lazy val newFreeTypeMethod = getMember(getRequiredClass("scala.reflect.api.TreeBuildUtil"), nme.newFreeExistential)
- def unapply(tree: Tree): Option[(Tree, TermName, Tree, String)] = tree match {
- case ValDef(_, name, _, Apply(Select(mrRef1 @ Ident(_), newFreeType), List(_, _, value, Literal(Constant(origin: String)))))
- if mrRef1.name == nme.MIRROR_SHORT && newFreeType == newFreeTypeMethod.name =>
+ def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = tree match {
+ case ValDef(_, name, _, Apply(Select(mrRef1 @ Ident(_), newFreeType), List(_, _, value, Literal(Constant(flags: Long)), Literal(Constant(origin: String)))))
+ if mrRef1.name == nme.MIRROR_SHORT && (newFreeType == newFreeTypeMethod.name || newFreeType == newFreeExistentialMethod.name) =>
value match {
- case Apply(TypeApply(Select(Select(mrRef2 @ Ident(_), typeTag), apply), List(binding)), List(Literal(Constant(null))))
+ case Apply(TypeApply(Select(Select(mrRef2 @ Ident(_), typeTag), apply), List(binding)), List(Literal(Constant(null)), _))
if mrRef2.name == nme.MIRROR_SHORT && typeTag == nme.TypeTag && apply == nme.apply =>
- Some(mrRef1, name, binding, origin)
- case Apply(TypeApply(Select(mrRef2 @ Ident(_), typeTag), List(binding)), List(Literal(Constant(null))))
+ Some(mrRef1, name, binding, flags, origin)
+ case Apply(TypeApply(Select(mrRef2 @ Ident(_), typeTag), List(binding)), List(Literal(Constant(null)), _))
if mrRef2.name == nme.MIRROR_SHORT && typeTag == nme.TypeTag =>
- Some(mrRef1, name, binding, origin)
+ Some(mrRef1, name, binding, flags, origin)
case _ =>
- throw new Error("unsupported free type def: " + showRaw(tree))
+ throw new Error("unsupported free type def: %s%n%s".format(value, showRaw(value)))
}
case _ =>
None
diff --git a/src/compiler/scala/reflect/internal/TreePrinters.scala b/src/compiler/scala/reflect/internal/TreePrinters.scala
index 9b4c18ce86..b3e4318fdc 100644
--- a/src/compiler/scala/reflect/internal/TreePrinters.scala
+++ b/src/compiler/scala/reflect/internal/TreePrinters.scala
@@ -103,6 +103,16 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
}
}
+ def printLabelParams(ps: List[Ident]) {
+ print("(")
+ printSeq(ps){printLabelParam}{print(", ")}
+ print(")")
+ }
+
+ def printLabelParam(p: Ident) {
+ print(symName(p, p.name)); printOpt(": ", TypeTree() setType p.tpe)
+ }
+
def printValueParams(ts: List[ValDef]) {
print("(")
if (!ts.isEmpty) printFlags(ts.head.mods.flags & IMPLICIT, "")
@@ -219,7 +229,7 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
}
case LabelDef(name, params, rhs) =>
- print(symName(tree, name)); printRow(params, "(", ",", ")"); printBlock(rhs)
+ print(symName(tree, name)); printLabelParams(params); printBlock(rhs)
case Import(expr, selectors) =>
// Is this selector remapping a name (i.e, {name1 => name2})
@@ -363,8 +373,9 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
case Select(qualifier, name) =>
print(backquotedPath(qualifier), ".", symName(tree, name))
- case Ident(name) =>
- print(symName(tree, name))
+ case id @ Ident(name) =>
+ val str = symName(tree, name)
+ print( if (id.isBackquoted) "`" + str + "`" else str )
case Literal(x) =>
print(x.escapedStringValue)
diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala
index 0d7e68aee3..3e7f23800c 100644
--- a/src/compiler/scala/reflect/internal/Trees.scala
+++ b/src/compiler/scala/reflect/internal/Trees.scala
@@ -103,9 +103,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
// --- extension methods --------------------------------------------------------
- implicit def treeOps(tree: Tree): TreeOps = new TreeOps(tree)
-
- class TreeOps(tree: Tree) {
+ implicit class TreeOps(tree: Tree) {
def isErroneous = (tree.tpe ne null) && tree.tpe.isErroneous
def isTyped = (tree.tpe ne null) && !tree.tpe.isErroneous
@@ -146,14 +144,13 @@ trait Trees extends api.Trees { self: SymbolTable =>
* less than the whole tree.
*/
def summaryString: String = tree match {
- case Select(qual, name) => qual.summaryString + "." + name.decode
- case Ident(name) => name.longString
case Literal(const) => "Literal(" + const + ")"
- case t: DefTree => t.shortClass + " `" + t.name.decode + "`"
- case t: RefTree => t.shortClass + " `" + t.name.longString + "`"
+ case Ident(name) => "Ident(%s)".format(name.decode)
+ case Select(qual, name) => "Select(%s, %s)".format(qual.summaryString, name.decode)
+ case t: NameTree => t.name.longString
case t =>
t.shortClass + (
- if (t.symbol != null && t.symbol != NoSymbol) " " + t.symbol
+ if (t.symbol != null && t.symbol != NoSymbol) "(" + t.symbol + ")"
else ""
)
}
@@ -319,11 +316,19 @@ trait Trees extends api.Trees { self: SymbolTable =>
class ChangeOwnerTraverser(val oldowner: Symbol, val newowner: Symbol) extends Traverser {
def changeOwner(tree: Tree) = tree match {
case Return(expr) =>
- if (tree.symbol == oldowner)
- tree.symbol = newowner
+ if (tree.symbol == oldowner) {
+ // SI-5612
+ if (newowner hasTransOwner oldowner)
+ log("NOT changing owner of %s because %s is nested in %s".format(tree, newowner, oldowner))
+ else {
+ log("changing owner of %s: %s => %s".format(tree, oldowner, newowner))
+ tree.symbol = newowner
+ }
+ }
case _: DefTree | _: Function =>
- if (tree.symbol != NoSymbol && tree.symbol.owner == oldowner)
+ if (tree.symbol != NoSymbol && tree.symbol.owner == oldowner) {
tree.symbol.owner = newowner
+ }
case _ =>
}
override def traverse(tree: Tree) {
@@ -416,7 +421,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
tree match {
case Ident(name0) if tree.symbol != NoSymbol =>
treeCopy.Ident(tree, tree.symbol.name)
- case Select(qual, name0) =>
+ case Select(qual, name0) if tree.symbol != NoSymbol =>
treeCopy.Select(tree, transform(qual), tree.symbol.name)
case _ =>
super.transform(tree)
diff --git a/src/compiler/scala/reflect/internal/TypeDebugging.scala b/src/compiler/scala/reflect/internal/TypeDebugging.scala
index 54efef8142..85a1767067 100644
--- a/src/compiler/scala/reflect/internal/TypeDebugging.scala
+++ b/src/compiler/scala/reflect/internal/TypeDebugging.scala
@@ -50,33 +50,6 @@ trait TypeDebugging {
def refine(defs: Scope): String = defs.toList.mkString("{", " ;\n ", "}")
}
- def dump(tp: Type): Unit = {
- println("** " + tp + " / " + tp.getClass + " **")
- import tp._
-
- println("typeSymbol = " + typeSymbol)
- println("termSymbol = " + termSymbol)
- println("widen = " + widen)
- println("deconst = " + deconst)
- println("typeOfThis = " + typeOfThis)
- println("bounds = " + bounds)
- println("parents = " + parents)
- println("prefixChain = " + prefixChain)
- println("typeConstructor = " + typeConstructor)
- println(" .. typeConstructor.typeParams = " + typeConstructor.typeParams)
- println(" .. _.variance = " + (typeConstructor.typeParams map (_.variance)))
- println("typeArgs = " + typeArgs)
- println("resultType = " + resultType)
- println("finalResultType = " + finalResultType)
- println("paramss = " + paramss)
- println("paramTypes = " + paramTypes)
- println("typeParams = " + typeParams)
- println("boundSyms = " + boundSyms)
- println("baseTypeSeq = " + baseTypeSeq)
- println("baseClasses = " + baseClasses)
- println("toLongString = " + toLongString)
- }
-
private def debug(tp: Type): String = tp match {
case TypeRef(pre, sym, args) => debug(pre) + "." + sym.nameString + str.tparams(args)
case ThisType(sym) => sym.nameString + ".this"
diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala
index 73a8f5c55c..a839e44182 100644
--- a/src/compiler/scala/reflect/internal/Types.scala
+++ b/src/compiler/scala/reflect/internal/Types.scala
@@ -6,7 +6,8 @@
package scala.reflect
package internal
-import scala.collection.{ mutable, immutable }
+import scala.collection.{ mutable, immutable, generic }
+import generic.Clearable
import scala.ref.WeakReference
import mutable.ListBuffer
import Flags._
@@ -97,7 +98,7 @@ trait Types extends api.Types { self: SymbolTable =>
*/
private final val propagateParameterBoundsToTypeVars = sys.props contains "scalac.debug.prop-constraints"
- protected val enableTypeVarExperimentals = settings.Xexperimental.value || settings.YvirtPatmat.value
+ protected val enableTypeVarExperimentals = settings.Xexperimental.value
/** Empty immutable maps to avoid allocations. */
private val emptySymMap = immutable.Map[Symbol, Symbol]()
@@ -115,7 +116,7 @@ trait Types extends api.Types { self: SymbolTable =>
protected def newUndoLog = new UndoLog
- class UndoLog {
+ class UndoLog extends Clearable {
private type UndoPairs = List[(TypeVar, TypeConstraint)]
private var log: UndoPairs = List()
@@ -139,7 +140,7 @@ trait Types extends api.Types { self: SymbolTable =>
log ::= ((tv, tv.constr.cloneInternal))
}
- private[scala] def clear() {
+ def clear() {
if (settings.debug.value)
self.log("Clearing " + log.size + " entries from the undoLog.")
@@ -264,8 +265,53 @@ trait Types extends api.Types { self: SymbolTable =>
def nonPrivateDeclaration(name: Name): Symbol = nonPrivateDecl(name)
def declarations = decls
def typeArguments = typeArgs
- def erasure = transformedType(this)
+ def erasure = this match {
+ case ConstantType(value) => widen.erasure // [Eugene to Martin] constant types are unaffected by erasure. weird.
+ case _ =>
+ var result = transformedType(this)
+ result = result.normalize match { // necessary to deal with erasures of HK types, typeConstructor won't work
+ case PolyType(undets, underlying) => existentialAbstraction(undets, underlying) // we don't want undets in the result
+ case _ => result
+ }
+ // [Eugene] erasure screws up all ThisTypes for modules into PackageTypeRefs
+ // we need to unscrew them, or certain typechecks will fail mysteriously
+ // http://groups.google.com/group/scala-internals/browse_thread/thread/6d3277ae21b6d581
+ result = result.map(tpe => tpe match {
+ case tpe: PackageTypeRef => ThisType(tpe.sym)
+ case _ => tpe
+ })
+ result
+ }
def substituteTypes(from: List[Symbol], to: List[Type]): Type = subst(from, to)
+
+ // [Eugene] to be discussed and refactored
+ def isConcrete = {
+ def notConcreteSym(sym: Symbol) =
+ sym.isAbstractType && !sym.isExistential
+
+ def notConcreteTpe(tpe: Type): Boolean = tpe match {
+ case ThisType(_) => false
+ case SuperType(_, _) => false
+ case SingleType(pre, sym) => notConcreteSym(sym)
+ case ConstantType(_) => false
+ case TypeRef(_, sym, args) => notConcreteSym(sym) || (args exists (arg => notConcreteTpe(arg)))
+ case RefinedType(_, _) => false
+ case ExistentialType(_, _) => false
+ case AnnotatedType(_, tp, _) => notConcreteTpe(tp)
+ case _ => true
+ }
+
+ !notConcreteTpe(this)
+ }
+
+ // [Eugene] is this comprehensive?
+ // the only thingies that we want to splice are: 1) type parameters, 2) type members
+ // the thingies that we don't want to splice are: 1) concrete types (obviously), 2) existential skolems
+ // this check seems to cover them all, right?
+ // todo. after we discuss this, move the check to subclasses
+ def isSpliceable = {
+ this.isInstanceOf[TypeRef] && typeSymbol.isAbstractType && !typeSymbol.isExistential
+ }
}
/** The base class for all types */
@@ -703,16 +749,24 @@ trait Types extends api.Types { self: SymbolTable =>
def substThis(from: Symbol, to: Symbol): Type =
substThis(from, to.thisType)
- /** Performs both substThis and substSym in one traversal.
+ /** Performs both substThis and substSym, in that order.
+ *
+ * [JZ] Reverted `SubstThisAndSymMap` from 334872, which was not the same as
+ * `substThis(from, to).substSym(symsFrom, symsTo)`.
+ *
+ * `SubstThisAndSymMap` performs a breadth-first map over this type, which meant that
+ * symbol substitution occured before `ThisType` substitution. Consequently, in substitution
+ * of a `SingleType(ThisType(`from`), sym), symbols were rebound to `from` rather than `to`.
*/
- def substThisAndSym(from: Symbol, to: Type, symsFrom: List[Symbol], symsTo: List[Symbol]): Type = {
+ def substThisAndSym(from: Symbol, to: Type, symsFrom: List[Symbol], symsTo: List[Symbol]): Type =
if (symsFrom eq symsTo) substThis(from, to)
- else new SubstThisAndSymMap(from, to, symsFrom, symsTo) apply this
- }
+ else substThis(from, to).substSym(symsFrom, symsTo)
/** Returns all parts of this type which satisfy predicate `p` */
def filter(p: Type => Boolean): List[Type] = new FilterTypeCollector(p) collect this
- def withFilter(p: Type => Boolean) = new FilterTypeCollector(p) {
+ def withFilter(p: Type => Boolean) = new FilterMapForeach(p)
+
+ class FilterMapForeach(p: Type => Boolean) extends FilterTypeCollector(p){
def foreach[U](f: Type => U): Unit = collect(Type.this) foreach f
def map[T](f: Type => T): List[T] = collect(Type.this) map f
}
@@ -915,14 +969,10 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def directObjectString = safeToString
- /** A test whether a type contains any unification type variables. */
+ /** A test whether a type contains any unification type variables.
+ * Overridden with custom logic except where trivially true.
+ */
def isGround: Boolean = this match {
- case TypeVar(_, constr) =>
- constr.instValid && constr.inst.isGround
- case TypeRef(pre, sym, args) =>
- sym.isPackageClass || pre.isGround && (args forall (_.isGround))
- case SingleType(pre, sym) =>
- sym.isPackageClass || pre.isGround
case ThisType(_) | NoPrefix | WildcardType | NoType | ErrorType | ConstantType(_) =>
true
case _ =>
@@ -1258,6 +1308,8 @@ trait Types extends api.Types { self: SymbolTable =>
*/
abstract case class SingleType(pre: Type, sym: Symbol) extends SingletonType {
override val isTrivial: Boolean = pre.isTrivial
+ override def isGround = sym.isPackageClass || pre.isGround
+
// override def isNullable = underlying.isNullable
override def isNotNull = underlying.isNotNull
private[reflect] var underlyingCache: Type = NoType
@@ -1313,7 +1365,8 @@ trait Types extends api.Types { self: SymbolTable =>
if (period != currentPeriod) {
tpe.underlyingPeriod = currentPeriod
if (!isValid(period)) {
- tpe.underlyingCache = tpe.pre.memberType(tpe.sym).resultType;
+ // [Eugene to Paul] needs review
+ tpe.underlyingCache = if (tpe.sym == NoSymbol) ThisType(RootClass) else tpe.pre.memberType(tpe.sym).resultType;
assert(tpe.underlyingCache ne tpe, tpe)
}
}
@@ -1463,7 +1516,8 @@ trait Types extends api.Types { self: SymbolTable =>
case tv: TypeVar => tvs += tv
case _ =>
}
- val varToParamMap: Map[Type, Symbol] = tvs map (tv => tv -> tv.origin.typeSymbol.cloneSymbol) toMap
+ val varToParamMap: Map[Type, Symbol] =
+ mapFrom[TypeVar, Type, Symbol](tvs.toList)(_.origin.typeSymbol.cloneSymbol)
val paramToVarMap = varToParamMap map (_.swap)
val varToParam = new TypeMap {
def apply(tp: Type) = varToParamMap get tp match {
@@ -1562,12 +1616,26 @@ trait Types extends api.Types { self: SymbolTable =>
override def typeConstructor =
copyRefinedType(this, parents map (_.typeConstructor), decls)
- /* MO to AM: This is probably not correct
- * If they are several higher-kinded parents with different bounds we need
- * to take the intersection of their bounds
- */
- override def normalize = {
- if (isHigherKinded) {
+ final override def normalize: Type =
+ if (phase.erasedTypes) normalizeImpl
+ else {
+ if (normalized eq null) normalized = normalizeImpl
+ normalized
+ }
+
+ private var normalized: Type = _
+ private def normalizeImpl = {
+ // TODO see comments around def intersectionType and def merge
+ def flatten(tps: List[Type]): List[Type] = tps flatMap { case RefinedType(parents, ds) if ds.isEmpty => flatten(parents) case tp => List(tp) }
+ val flattened = flatten(parents).distinct
+ if (decls.isEmpty && flattened.tail.isEmpty) {
+ flattened.head
+ } else if (flattened != parents) {
+ refinedType(flattened, if (typeSymbol eq NoSymbol) NoSymbol else typeSymbol.owner, decls, NoPosition)
+ } else if (isHigherKinded) {
+ // MO to AM: This is probably not correct
+ // If they are several higher-kinded parents with different bounds we need
+ // to take the intersection of their bounds
typeFun(
typeParams,
RefinedType(
@@ -1577,8 +1645,7 @@ trait Types extends api.Types { self: SymbolTable =>
},
decls,
typeSymbol))
- }
- else super.normalize
+ } else super.normalize
}
/** A refined type P1 with ... with Pn { decls } is volatile if
@@ -1912,7 +1979,7 @@ trait Types extends api.Types { self: SymbolTable =>
require(sym.isPackageClass, sym)
override protected def finishPrefix(rest: String) = packagePrefix + rest
}
- class RefinementTypeRef(sym0: Symbol) extends NoArgsTypeRef(NoType, sym0) with ClassTypeRef {
+ class RefinementTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) with ClassTypeRef {
require(sym.isRefinementClass, sym)
// I think this is okay, but see #1241 (r12414), #2208, and typedTypeConstructor in Typers
@@ -2053,7 +2120,7 @@ trait Types extends api.Types { self: SymbolTable =>
// TODO: is there another way a typeref's symbol can refer to a symbol defined in its pre?
case _ => sym
}
-
+ override def kind = "AliasTypeRef"
}
trait AbstractTypeRef extends NonClassTypeRef {
@@ -2106,6 +2173,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def bounds = thisInfo.bounds
// def transformInfo(tp: Type): Type = appliedType(tp.asSeenFrom(pre, sym.owner), typeArgsOrDummies)
override protected[Types] def baseTypeSeqImpl: BaseTypeSeq = transform(bounds.hi).baseTypeSeq prepend this
+ override def kind = "AbstractTypeRef"
}
/** A class for named types of the form
@@ -2143,6 +2211,11 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
+ override def isGround = (
+ sym.isPackageClass
+ || pre.isGround && args.forall(_.isGround)
+ )
+
def etaExpand: Type = {
// must initialise symbol, see test/files/pos/ticket0137.scala
val tpars = initializedTypeParams
@@ -2235,10 +2308,10 @@ trait Types extends api.Types { self: SymbolTable =>
parentsString(thisInfo.parents) + refinementString
else rest
)
- private def customToString = this match {
- case TypeRef(_, RepeatedParamClass, arg :: _) => arg + "*"
- case TypeRef(_, ByNameParamClass, arg :: _) => "=> " + arg
- case _ =>
+ private def customToString = sym match {
+ case RepeatedParamClass => args.head + "*"
+ case ByNameParamClass => "=> " + args.head
+ case _ =>
def targs = normalize.typeArgs
if (isFunctionType(this)) {
@@ -2275,7 +2348,7 @@ trait Types extends api.Types { self: SymbolTable =>
else if (sym.isPackageClass || sym.isPackageObjectOrClass)
sym.skipPackageObject.fullName + "."
else if (isStable && nme.isSingletonName(sym.name))
- nme.dropSingletonName(sym.name) + "."
+ tpnme.dropSingletonName(sym.name) + "."
else
super.prefixString
)
@@ -2292,7 +2365,7 @@ trait Types extends api.Types { self: SymbolTable =>
else {
if (sym.isAliasType) new NoArgsTypeRef(pre, sym) with AliasTypeRef
else if (sym.isAbstractType) new NoArgsTypeRef(pre, sym) with AbstractTypeRef
- else if (sym.isRefinementClass) new RefinementTypeRef(sym)
+ else if (sym.isRefinementClass) new RefinementTypeRef(pre, sym)
else if (sym.isPackageClass) new PackageTypeRef(pre, sym)
else if (sym.isModuleClass) new ModuleTypeRef(pre, sym)
else new NoArgsTypeRef(pre, sym) with ClassTypeRef
@@ -2495,8 +2568,8 @@ trait Types extends api.Types { self: SymbolTable =>
override def prefix = maybeRewrap(underlying.prefix)
override def typeArgs = underlying.typeArgs map maybeRewrap
override def params = underlying.params mapConserve { param =>
- val tpe1 = rewrap(param.tpe)
- if (tpe1 eq param.tpe) param else param.cloneSymbol.setInfo(tpe1)
+ val tpe1 = rewrap(param.tpeHK)
+ if (tpe1 eq param.tpeHK) param else param.cloneSymbol.setInfo(tpe1)
}
override def paramTypes = underlying.paramTypes map maybeRewrap
override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = {
@@ -2515,39 +2588,46 @@ trait Types extends api.Types { self: SymbolTable =>
override def skolemizeExistential(owner: Symbol, origin: AnyRef) =
deriveType(quantified, tparam => (owner orElse tparam.owner).newExistentialSkolem(tparam, origin))(underlying)
- private def wildcardArgsString(available: Set[Symbol], args: List[Type]): List[String] = args match {
- case TypeRef(_, sym, _) :: args1 if (available contains sym) =>
- ("_"+sym.infoString(sym.info)) :: wildcardArgsString(available - sym, args1)
- case arg :: args1 if !(quantified exists (arg contains _)) =>
- arg.toString :: wildcardArgsString(available, args1)
- case _ =>
- List()
+ private def wildcardArgsString(qset: Set[Symbol], args: List[Type]): List[String] = args map {
+ case TypeRef(_, sym, _) if (qset contains sym) =>
+ "_"+sym.infoString(sym.info)
+ case arg =>
+ arg.toString
}
+
/** An existential can only be printed with wildcards if:
* - the underlying type is a typeref
- * - where there is a 1-to-1 correspondence between underlying's typeargs and quantified
- * - and none of the existential parameters is referenced from anywhere else in the type
- * - and none of the existential parameters are singleton types
+ * - every quantified variable appears at most once as a type argument and
+ * nowhere inside a type argument
+ * - no quantified type argument contains a quantified variable in its bound
+ * - the typeref's symbol is not itself quantified
+ * - the prefix is not quanitified
*/
- private def isRepresentableWithWildcards = !settings.debug.value && {
+ def isRepresentableWithWildcards = {
val qset = quantified.toSet
- !qset.exists(_.isSingletonExistential) && (underlying match {
- case TypeRef(_, sym, args) =>
- sameLength(args, quantified) && {
- args forall { arg =>
- qset(arg.typeSymbol) && !qset.exists(arg.typeSymbol.info.bounds contains _)
- }
+ underlying match {
+ case TypeRef(pre, sym, args) =>
+ def isQuantified(tpe: Type): Boolean = {
+ (tpe exists (t => qset contains t.typeSymbol)) ||
+ tpe.typeSymbol.isRefinementClass && (tpe.parents exists isQuantified)
}
+ val (wildcardArgs, otherArgs) = args partition (arg => qset contains arg.typeSymbol)
+ wildcardArgs.distinct == wildcardArgs &&
+ !(otherArgs exists (arg => isQuantified(arg))) &&
+ !(wildcardArgs exists (arg => isQuantified(arg.typeSymbol.info.bounds))) &&
+ !(qset contains sym) &&
+ !isQuantified(pre)
case _ => false
- })
}
+ }
+
override def safeToString: String = {
def clauses = {
val str = quantified map (_.existentialToString) mkString (" forSome { ", "; ", " }")
if (settings.explaintypes.value) "(" + str + ")" else str
}
underlying match {
- case TypeRef(pre, sym, args) if isRepresentableWithWildcards =>
+ case TypeRef(pre, sym, args) if !settings.debug.value && isRepresentableWithWildcards =>
"" + TypeRef(pre, sym, Nil) + wildcardArgsString(quantified.toSet, args).mkString("[", ", ", "]")
case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) =>
"(" + underlying + ")" + clauses
@@ -2589,6 +2669,10 @@ trait Types extends api.Types { self: SymbolTable =>
override def kind = "OverloadedType"
}
+ def overloadedType(pre: Type, alternatives: List[Symbol]): Type =
+ if (alternatives.tail.isEmpty) pre memberType alternatives.head
+ else OverloadedType(pre, alternatives)
+
/** A class remembering a type instantiation for some a set of overloaded
* polymorphic symbols.
* Not used after phase `typer`.
@@ -2675,21 +2759,35 @@ trait Types extends api.Types { self: SymbolTable =>
else new TypeConstraint
}
def unapply(tv: TypeVar): Some[(Type, TypeConstraint)] = Some((tv.origin, tv.constr))
+ def untouchable(tparam: Symbol): TypeVar = createTypeVar(tparam, untouchable = true)
+ def apply(tparam: Symbol): TypeVar = createTypeVar(tparam, untouchable = false)
def apply(origin: Type, constr: TypeConstraint): TypeVar = apply(origin, constr, Nil, Nil)
- def apply(tparam: Symbol): TypeVar = apply(tparam.tpeHK, deriveConstraint(tparam), Nil, tparam.typeParams)
+ def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]): TypeVar =
+ createTypeVar(origin, constr, args, params, untouchable = false)
/** This is the only place TypeVars should be instantiated.
*/
- def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]): TypeVar = {
+ private def createTypeVar(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol], untouchable: Boolean): TypeVar = {
val tv = (
- if (args.isEmpty && params.isEmpty) new TypeVar(origin, constr)
- else if (args.size == params.size) new AppliedTypeVar(origin, constr, params zip args)
- else if (args.isEmpty) new HKTypeVar(origin, constr, params)
+ if (args.isEmpty && params.isEmpty) {
+ if (untouchable) new TypeVar(origin, constr) with UntouchableTypeVar
+ else new TypeVar(origin, constr)
+ }
+ else if (args.size == params.size) {
+ if (untouchable) new AppliedTypeVar(origin, constr, params zip args) with UntouchableTypeVar
+ else new AppliedTypeVar(origin, constr, params zip args)
+ }
+ else if (args.isEmpty) {
+ if (untouchable) new HKTypeVar(origin, constr, params) with UntouchableTypeVar
+ else new HKTypeVar(origin, constr, params)
+ }
else throw new Error("Invalid TypeVar construction: " + ((origin, constr, args, params)))
)
trace("create", "In " + tv.originLocation)(tv)
}
+ private def createTypeVar(tparam: Symbol, untouchable: Boolean): TypeVar =
+ createTypeVar(tparam.tpeHK, deriveConstraint(tparam), Nil, tparam.typeParams, untouchable)
}
/** Repack existential types, otherwise they sometimes get unpacked in the
@@ -2738,6 +2836,23 @@ trait Types extends api.Types { self: SymbolTable =>
)
}
+ trait UntouchableTypeVar extends TypeVar {
+ override def untouchable = true
+ override def isGround = true
+ override def registerTypeEquality(tp: Type, typeVarLHS: Boolean) = tp match {
+ case t: TypeVar if !t.untouchable =>
+ t.registerTypeEquality(this, !typeVarLHS)
+ case _ =>
+ super.registerTypeEquality(tp, typeVarLHS)
+ }
+ override def registerBound(tp: Type, isLowerBound: Boolean, isNumericBound: Boolean = false): Boolean = tp match {
+ case t: TypeVar if !t.untouchable =>
+ t.registerBound(this, !isLowerBound, isNumericBound)
+ case _ =>
+ super.registerBound(tp, isLowerBound, isNumericBound)
+ }
+ }
+
/** A class representing a type variable: not used after phase `typer`.
*
* A higher-kinded TypeVar has params (Symbols) and typeArgs (Types).
@@ -2751,6 +2866,7 @@ trait Types extends api.Types { self: SymbolTable =>
val origin: Type,
val constr0: TypeConstraint
) extends Type {
+ def untouchable = false // by other typevars
override def params: List[Symbol] = Nil
override def typeArgs: List[Type] = Nil
override def isHigherKinded = false
@@ -2763,6 +2879,7 @@ trait Types extends api.Types { self: SymbolTable =>
*/
var constr = constr0
def instValid = constr.instValid
+ override def isGround = instValid && constr.inst.isGround
/** The variable's skolemization level */
val level = skolemizationLevel
@@ -2805,6 +2922,7 @@ trait Types extends api.Types { self: SymbolTable =>
// existential.
// were we compared to skolems at a higher skolemizationLevel?
// EXPERIMENTAL: value will not be considered unless enableTypeVarExperimentals is true
+ // see SI-5729 for why this is still experimental
private var encounteredHigherLevel = false
private def shouldRepackType = enableTypeVarExperimentals && encounteredHigherLevel
@@ -2931,14 +3049,13 @@ trait Types extends api.Types { self: SymbolTable =>
// would be pointless. In this case, each check we perform causes us to lose specificity: in
// the end the best we'll do is the least specific type we tested against, since the typevar
// does not see these checks as "probes" but as requirements to fulfill.
- // TODO: the `suspended` flag can be used to poke around with leaving a trace
+ // TODO: can the `suspended` flag be used to poke around without leaving a trace?
//
// So the strategy used here is to test first the type, then the direct parents, and finally
// to fall back on the individual base types. This warrants eventual re-examination.
// AM: I think we could use the `suspended` flag to avoid side-effecting during unification
-
- if (suspended) // constraint accumulation is disabled
+ if (suspended) // constraint accumulation is disabled
checkSubtype(tp, origin)
else if (constr.instValid) // type var is already set
checkSubtype(tp, constr.inst)
@@ -2957,7 +3074,8 @@ trait Types extends api.Types { self: SymbolTable =>
}
def registerTypeEquality(tp: Type, typeVarLHS: Boolean): Boolean = {
- //println("regTypeEq: "+(safeToString, debugString(tp), typeVarLHS)) //@MDEBUG
+// println("regTypeEq: "+(safeToString, debugString(tp), tp.getClass, if (typeVarLHS) "in LHS" else "in RHS", if (suspended) "ZZ" else if (constr.instValid) "IV" else "")) //@MDEBUG
+// println("constr: "+ constr)
def checkIsSameType(tp: Type) =
if(typeVarLHS) constr.inst =:= tp
else tp =:= constr.inst
@@ -3014,10 +3132,7 @@ trait Types extends api.Types { self: SymbolTable =>
tparams map (_.defString) mkString("[", ",", "]")
case _ => ""
}
- def originName = {
- val name = origin.typeSymbolDirect.decodedName
- if (name contains "_$") origin.typeSymbolDirect.decodedName else name
- }
+ def originName = origin.typeSymbolDirect.decodedName
def originLocation = {
val sym = origin.typeSymbolDirect
val encl = sym.owner.logicallyEnclosingMember
@@ -3035,8 +3150,8 @@ trait Types extends api.Types { self: SymbolTable =>
protected def typeVarString = originName
override def safeToString = (
if ((constr eq null) || (constr.inst eq null)) "TVar<" + originName + "=null>"
- else if (constr.inst ne NoType) "" + constr.inst
- else "?" + levelString + originName
+ else if (constr.inst ne NoType) "=?" + constr.inst
+ else (if(untouchable) "!?" else "?") + levelString + originName
)
override def kind = "TypeVar"
@@ -3169,8 +3284,10 @@ trait Types extends api.Types { self: SymbolTable =>
final class UniqueErasedValueType(sym: Symbol) extends ErasedValueType(sym) with UniqueType
object ErasedValueType {
- def apply(sym: Symbol): Type =
+ def apply(sym: Symbol): Type = {
+ assert(sym ne NoSymbol, "ErasedValueType cannot be NoSymbol")
unique(new UniqueErasedValueType(sym))
+ }
}
/** A class representing an as-yet unevaluated type.
@@ -3228,7 +3345,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (phase.erasedTypes)
if (parents.isEmpty) ObjectClass.tpe else parents.head
else {
- val clazz = owner.newRefinementClass(NoPosition)
+ val clazz = owner.newRefinementClass(pos) // TODO: why were we passing in NoPosition instead of pos?
val result = RefinedType(parents, decls, clazz)
clazz.setInfo(result)
result
@@ -3382,7 +3499,7 @@ trait Types extends api.Types { self: SymbolTable =>
case TypeRef(pre, sym, _) if sameLength(sym.typeParams, args) =>
val eparams = typeParamsToExistentials(sym)
val bounds = args map (TypeBounds upper _)
- (eparams, bounds).zipped foreach (_ setInfo _)
+ foreach2(eparams, bounds)(_ setInfo _)
newExistentialType(eparams, typeRef(pre, sym, eparams map (_.tpe)))
case _ =>
@@ -3564,7 +3681,7 @@ trait Types extends api.Types { self: SymbolTable =>
else owner.newValueParameter(name.toTermName)
paramStack = newParams :: paramStack
try {
- (newParams, ptypes).zipped foreach ((p, t) => p setInfo this(t))
+ foreach2(newParams, ptypes)((p, t) => p setInfo this(t))
val restpe1 = this(restpe)
if (isType) PolyType(newParams, restpe1)
else MethodType(newParams, restpe1)
@@ -4207,7 +4324,7 @@ trait Types extends api.Types { self: SymbolTable =>
def throwError = abort("" + tp + sym.locationString + " cannot be instantiated from " + pre.widen)
val symclazz = sym.owner
- if (symclazz == clazz && !pre.isInstanceOf[TypeVar] && (pre.widen.typeSymbol isNonBottomSubClass symclazz)) {
+ if (symclazz == clazz && !pre.widen.isInstanceOf[TypeVar] && (pre.widen.typeSymbol isNonBottomSubClass symclazz)) {
// have to deconst because it may be a Class[T].
pre.baseType(symclazz).deconst match {
case TypeRef(_, basesym, baseargs) =>
@@ -4407,13 +4524,6 @@ trait Types extends api.Types { self: SymbolTable =>
case _ => mapOver(tp)
}
}
- class SubstThisAndSymMap(fromThis: Symbol, toThis: Type, fromSyms: List[Symbol], toSyms: List[Symbol])
- extends SubstSymMap(fromSyms, toSyms) {
- override def apply(tp: Type): Type = tp match {
- case ThisType(sym) if sym == fromThis => apply(toThis)
- case _ => super.apply(tp)
- }
- }
class SubstWildcardMap(from: List[Symbol]) extends TypeMap {
def apply(tp: Type): Type = try {
@@ -4733,16 +4843,21 @@ trait Types extends api.Types { self: SymbolTable =>
val sym1 = adaptToNewRun(sym.owner.thisType, sym)
if (sym1 == sym) tp else ThisType(sym1)
} catch {
- case ex: MissingTypeControl =>
+ case ex: MissingTypeControl =>
tp
}
case SingleType(pre, sym) =>
if (sym.isPackage) tp
else {
val pre1 = this(pre)
- val sym1 = adaptToNewRun(pre1, sym)
- if ((pre1 eq pre) && (sym1 eq sym)) tp
- else singleType(pre1, sym1)
+ try {
+ val sym1 = adaptToNewRun(pre1, sym)
+ if ((pre1 eq pre) && (sym1 eq sym)) tp
+ else singleType(pre1, sym1)
+ } catch {
+ case _: MissingTypeControl =>
+ tp
+ }
}
case TypeRef(pre, sym, args) =>
if (sym.isPackageClass) tp
@@ -4807,7 +4922,23 @@ trait Types extends api.Types { self: SymbolTable =>
override def hashCode = tp1.hashCode * 41 + tp2.hashCode
override def equals(other: Any) = other match {
case stp: SubTypePair =>
- (tp1 =:= stp.tp1) && (tp2 =:= stp.tp2)
+ // suspend TypeVars in types compared by =:=,
+ // since we don't want to mutate them simply to check whether a subtype test is pending
+ // in addition to making subtyping "more correct" for type vars,
+ // it should avoid the stackoverflow that's been plaguing us (https://groups.google.com/d/topic/scala-internals/2gHzNjtB4xA/discussion)
+ // this method is only called when subtyping hits a recursion threshold (subsametypeRecursions >= LogPendingSubTypesThreshold)
+ @inline def suspend(tp: Type) =
+ if (tp.isGround) null else suspendTypeVarsInType(tp)
+ @inline def revive(suspension: List[TypeVar]) =
+ if (suspension ne null) suspension foreach (_.suspended = false)
+
+ val suspensions = Array(tp1, stp.tp1, tp2, stp.tp2) map suspend
+
+ val sameTypes = (tp1 =:= stp.tp1) && (tp2 =:= stp.tp2)
+
+ suspensions foreach revive
+
+ sameTypes
case _ =>
false
}
@@ -5605,14 +5736,15 @@ trait Types extends api.Types { self: SymbolTable =>
val info1 = tp1.memberInfo(sym1)
val info2 = tp2.memberInfo(sym2).substThis(tp2.typeSymbol, tp1)
//System.out.println("specializes "+tp1+"."+sym1+":"+info1+sym1.locationString+" AND "+tp2+"."+sym2+":"+info2)//DEBUG
- sym2.isTerm && (info1 <:< info2) /*&& (!sym2.isStable || sym1.isStable) */ ||
- sym2.isAbstractType && {
- val memberTp1 = tp1.memberType(sym1)
- // println("kinds conform? "+(memberTp1, tp1, sym2, kindsConform(List(sym2), List(memberTp1), tp2, sym2.owner)))
- info2.bounds.containsType(memberTp1) &&
- kindsConform(List(sym2), List(memberTp1), tp1, sym1.owner)
- } ||
- sym2.isAliasType && tp2.memberType(sym2).substThis(tp2.typeSymbol, tp1) =:= tp1.memberType(sym1) //@MAT ok
+ ( sym2.isTerm && (info1 <:< info2) && (!sym2.isStable || sym1.isStable)
+ || sym2.isAbstractType && {
+ val memberTp1 = tp1.memberType(sym1)
+ // println("kinds conform? "+(memberTp1, tp1, sym2, kindsConform(List(sym2), List(memberTp1), tp2, sym2.owner)))
+ info2.bounds.containsType(memberTp1) &&
+ kindsConform(List(sym2), List(memberTp1), tp1, sym1.owner)
+ }
+ || sym2.isAliasType && tp2.memberType(sym2).substThis(tp2.typeSymbol, tp1) =:= tp1.memberType(sym1) //@MAT ok
+ )
}
/** A function implementing `tp1` matches `tp2`. */
@@ -5787,8 +5919,8 @@ trait Types extends api.Types { self: SymbolTable =>
foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => {
val ok = (tparam2 != tparam) && (
(bound contains tparam2)
- || up && (tparam2.info.bounds.lo =:= tparam.tpe)
- || !up && (tparam2.info.bounds.hi =:= tparam.tpe)
+ || up && (tparam2.info.bounds.lo =:= tparam.tpeHK)
+ || !up && (tparam2.info.bounds.hi =:= tparam.tpeHK)
)
if (ok) {
if (tvar2.constr.inst eq null) cyclic = true
@@ -5802,7 +5934,7 @@ trait Types extends api.Types { self: SymbolTable =>
for (tparam2 <- tparams)
tparam2.info.bounds.lo.dealias match {
case TypeRef(_, `tparam`, _) =>
- tvar addHiBound tparam2.tpe.instantiateTypeParams(tparams, tvars)
+ tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
case _ =>
}
} else {
@@ -5812,7 +5944,7 @@ trait Types extends api.Types { self: SymbolTable =>
for (tparam2 <- tparams)
tparam2.info.bounds.hi.dealias match {
case TypeRef(_, `tparam`, _) =>
- tvar addLoBound tparam2.tpe.instantiateTypeParams(tparams, tvars)
+ tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
case _ =>
}
}
@@ -6044,8 +6176,9 @@ trait Types extends api.Types { self: SymbolTable =>
def stripType(tp: Type) = tp match {
case ExistentialType(_, res) =>
res
- case TypeVar(_, constr) =>
- if (constr.instValid) constr.inst
+ case tv@TypeVar(_, constr) =>
+ if (tv.instValid) constr.inst
+ else if (tv.untouchable) tv
else abort("trying to do lub/glb of typevar "+tp)
case t => t
}
@@ -6438,47 +6571,50 @@ trait Types extends api.Types { self: SymbolTable =>
else Some(typeRef(pre, sym, List(lub(args))))
}
}
- else {
- val args = map2(sym.typeParams, argss.transpose) { (tparam, as) =>
- if (depth == 0) {
- if (tparam.variance == variance) {
- // Take the intersection of the upper bounds of the type parameters
- // rather than falling all the way back to "Any", otherwise we end up not
- // conforming to bounds.
- val bounds0 = sym.typeParams map (_.info.bounds.hi) filterNot (_.typeSymbol == AnyClass)
- if (bounds0.isEmpty) AnyClass.tpe
- else intersectionType(bounds0 map (b => b.asSeenFrom(tps.head, sym)))
+ else transposeSafe(argss) match {
+ case None =>
+ // transpose freaked out because of irregular argss
+ // catching just in case (shouldn't happen, but also doesn't cost us)
+ // [JZ] It happens: see SI-5683.
+ debuglog("transposed irregular matrix!?" +(tps, argss))
+ None
+ case Some(argsst) =>
+ val args = map2(sym.typeParams, argsst) { (tparam, as) =>
+ if (depth == 0) {
+ if (tparam.variance == variance) {
+ // Take the intersection of the upper bounds of the type parameters
+ // rather than falling all the way back to "Any", otherwise we end up not
+ // conforming to bounds.
+ val bounds0 = sym.typeParams map (_.info.bounds.hi) filterNot (_.typeSymbol == AnyClass)
+ if (bounds0.isEmpty) AnyClass.tpe
+ else intersectionType(bounds0 map (b => b.asSeenFrom(tps.head, sym)))
+ }
+ else if (tparam.variance == -variance) NothingClass.tpe
+ else NoType
}
- else if (tparam.variance == -variance) NothingClass.tpe
- else NoType
- }
- else {
- if (tparam.variance == variance) lub(as, decr(depth))
- else if (tparam.variance == -variance) glb(as, decr(depth))
else {
- val l = lub(as, decr(depth))
- val g = glb(as, decr(depth))
- if (l <:< g) l
+ if (tparam.variance == variance) lub(as, decr(depth))
+ else if (tparam.variance == -variance) glb(as, decr(depth))
+ else {
+ val l = lub(as, decr(depth))
+ val g = glb(as, decr(depth))
+ if (l <:< g) l
else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
// just err on the conservative side, i.e. with a bound that is too high.
// if(!(tparam.info.bounds contains tparam)) //@M can't deal with f-bounds, see #2251
- val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
- capturedParams += qvar
- qvar.tpe
+ val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
+ capturedParams += qvar
+ qvar.tpe
+ }
}
}
}
- }
- if (args contains NoType) None
- else Some(existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args)))
+ if (args contains NoType) None
+ else Some(existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args)))
}
} catch {
case ex: MalformedType => None
- case ex: IndexOutOfBoundsException => // transpose freaked out because of irregular argss
- // catching just in case (shouldn't happen, but also doesn't cost us)
- debuglog("transposed irregular matrix!?"+ (tps, argss))
- None
}
case SingleType(_, sym) :: rest =>
val pres = tps map (_.prefix)
@@ -6615,6 +6751,11 @@ trait Types extends api.Types { self: SymbolTable =>
case TypeRef(_, sym, _) => !isPrimitiveValueClass(sym)
case _ => false
}
+ // Add serializable to a list of parents, unless one of them already is
+ def addSerializable(ps: Type*): List[Type] = (
+ if (ps exists (_ <:< SerializableClass.tpe)) ps.toList
+ else (ps :+ SerializableClass.tpe).toList
+ )
def objToAny(tp: Type): Type =
if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyClass.tpe
diff --git a/src/compiler/scala/reflect/internal/pickling/UnPickler.scala b/src/compiler/scala/reflect/internal/pickling/UnPickler.scala
index f89aa9bf5c..fd3fac1b37 100644
--- a/src/compiler/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/compiler/scala/reflect/internal/pickling/UnPickler.scala
@@ -447,7 +447,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
private def readArrayAnnot() = {
readByte() // skip the `annotargarray` tag
val end = readNat() + readIndex
- until(end, () => readClassfileAnnotArg(readNat())).toArray(classfileAnnotArgManifest)
+ until(end, () => readClassfileAnnotArg(readNat())).toArray(classfileAnnotArgTag)
}
protected def readClassfileAnnotArg(i: Int): ClassfileAnnotArg = bytes(index(i)) match {
case ANNOTINFO => NestedAnnotArg(at(i, readAnnotation))
@@ -816,16 +816,10 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg)
protected def errorMissingRequirement(name: Name, owner: Symbol): Symbol =
- missingHook(owner, name) orElse {
- val what = if (name.isTypeName) "type" else "value"
- MissingRequirementError.notFound(
- "while unpickling %s, reference %s %s of %s/%s/%s".format(
- filename,
- what, name.decode, owner.tpe.widen,
- owner.tpe.typeSymbol.ownerChain,
- owner.info.members.mkString("\n ", "\n ", ""))
- )
- }
+ missingHook(owner, name) orElse MissingRequirementError.notFound(
+ "bad reference while unpickling %s: %s not found in %s".format(
+ filename, name.longString, owner.tpe.widen)
+ )
def inferMethodAlternative(fun: Tree, argtpes: List[Type], restpe: Type) {} // can't do it; need a compiler for that.
@@ -849,7 +843,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
atPhase(p) (sym setInfo tp)
if (currentRunId != definedAtRunId)
sym.setInfo(adaptToNewRunMap(tp))
- }
+ }
catch {
case e: MissingRequirementError => throw toTypeError(e)
}
diff --git a/src/compiler/scala/reflect/internal/settings/MutableSettings.scala b/src/compiler/scala/reflect/internal/settings/MutableSettings.scala
index b556c33aba..8640a23aa7 100644
--- a/src/compiler/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/compiler/scala/reflect/internal/settings/MutableSettings.scala
@@ -43,5 +43,6 @@ abstract class MutableSettings extends AbsSettings {
def Yrecursion: IntSetting
def maxClassfileName: IntSetting
def Xexperimental: BooleanSetting
- def YvirtPatmat: BooleanSetting
+ def XoldPatmat: BooleanSetting
+ def XnoPatmatAnalysis: BooleanSetting
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/internal/transform/Erasure.scala b/src/compiler/scala/reflect/internal/transform/Erasure.scala
index 1b323f839b..5beec70d62 100644
--- a/src/compiler/scala/reflect/internal/transform/Erasure.scala
+++ b/src/compiler/scala/reflect/internal/transform/Erasure.scala
@@ -17,7 +17,14 @@ trait Erasure {
* with primitive as well as class types)?.
*/
private def genericCore(tp: Type): Type = tp.normalize match {
- case TypeRef(_, sym, _) if sym.isAbstractType && !sym.owner.isJavaDefined =>
+ /* A Java Array<T> is erased to Array[Object] (T can only be a reference type), where as a Scala Array[T] is
+ * erased to Object. However, there is only symbol for the Array class. So to make the distinction between
+ * a Java and a Scala array, we check if the owner of T comes from a Java class.
+ * This however caused issue SI-5654. The additional test for EXSITENTIAL fixes it, see the ticket comments.
+ * In short, members of an existential type (e.g. `T` in `forSome { type T }`) can have pretty arbitrary
+ * owners (e.g. when computing lubs, <root> is used). All packageClass symbols have `isJavaDefined == true`.
+ */
+ case TypeRef(_, sym, _) if sym.isAbstractType && (!sym.owner.isJavaDefined || sym.hasFlag(Flags.EXISTENTIAL)) =>
tp
case ExistentialType(tparams, restp) =>
genericCore(restp)
diff --git a/src/compiler/scala/reflect/internal/util/Collections.scala b/src/compiler/scala/reflect/internal/util/Collections.scala
index 9dbf1adeef..1f8eb15c90 100644
--- a/src/compiler/scala/reflect/internal/util/Collections.scala
+++ b/src/compiler/scala/reflect/internal/util/Collections.scala
@@ -24,18 +24,21 @@ trait Collections {
)
/** All these mm methods are "deep map" style methods for
- * mapping etc. on a list of lists.
+ * mapping etc. on a list of lists while avoiding unnecessary
+ * intermediate structures like those created via flatten.
*/
final def mexists[A](xss: List[List[A]])(p: A => Boolean) =
xss exists (_ exists p)
+ final def mforall[A](xss: List[List[A]])(p: A => Boolean) =
+ xss forall (_ forall p)
final def mmap[A, B](xss: List[List[A]])(f: A => B) =
xss map (_ map f)
final def mforeach[A](xss: List[List[A]])(f: A => Unit) =
xss foreach (_ foreach f)
final def mfind[A](xss: List[List[A]])(p: A => Boolean): Option[A] = {
- for (xs <- xss; x <- xs)
- if (p(x)) return Some(x)
- None
+ var res: Option[A] = null
+ mforeach(xss)(x => if ((res eq null) && p(x)) res = Some(x))
+ if (res eq null) None else res
}
final def mfilter[A](xss: List[List[A]])(p: A => Boolean) =
for (xs <- xss; x <- xs; if p(x)) yield x
@@ -66,6 +69,31 @@ trait Collections {
}
lb.toList
}
+
+ final def flatCollect[A, B](elems: List[A])(pf: PartialFunction[A, Traversable[B]]): List[B] = {
+ val lb = new ListBuffer[B]
+ for (x <- elems ; if pf isDefinedAt x)
+ lb ++= pf(x)
+
+ lb.toList
+ }
+
+ final def distinctBy[A, B](xs: List[A])(f: A => B): List[A] = {
+ val buf = new ListBuffer[A]
+ val seen = mutable.Set[B]()
+ xs foreach { x =>
+ val y = f(x)
+ if (!seen(y)) {
+ buf += x
+ seen += y
+ }
+ }
+ buf.toList
+ }
+
+ @tailrec final def flattensToEmpty(xss: Seq[Seq[_]]): Boolean = {
+ xss.isEmpty || xss.head.isEmpty && flattensToEmpty(xss.tail)
+ }
final def foreachWithIndex[A, B](xs: List[A])(f: (A, Int) => Unit) {
var index = 0
@@ -82,6 +110,10 @@ trait Collections {
xs find p getOrElse orElse
}
+ final def mapFrom[A, A1 >: A, B](xs: List[A])(f: A => B): Map[A1, B] = {
+ Map[A1, B](xs map (x => (x, f(x))): _*)
+ }
+
final def mapWithIndex[A, B](xs: List[A])(f: (A, Int) => B): List[B] = {
val lb = new ListBuffer[B]
var index = 0
@@ -169,6 +201,12 @@ trait Collections {
}
true
}
+
+ final def transposeSafe[A](ass: List[List[A]]): Option[List[List[A]]] = try {
+ Some(ass.transpose)
+ } catch {
+ case _: IllegalArgumentException => None
+ }
}
object Collections extends Collections { }
diff --git a/src/compiler/scala/reflect/internal/util/Origins.scala b/src/compiler/scala/reflect/internal/util/Origins.scala
index b9985c8f50..0bd5ad55ca 100644
--- a/src/compiler/scala/reflect/internal/util/Origins.scala
+++ b/src/compiler/scala/reflect/internal/util/Origins.scala
@@ -15,7 +15,7 @@ import Origins._
* You could do this:
*
* {{{
- * private lazy val origins = Origins[SymbolTable]("phase_=")
+ * private lazy val origins = Origins("arbitraryTag")
* // Commented out original enclosed for contrast
* // final def phase_=(p: Phase): Unit = {
* final def phase_=(p: Phase): Unit = origins {
@@ -23,7 +23,7 @@ import Origins._
*
* And that's it. When the JVM exits it would issue a report something like this:
{{{
- >> Origins scala.tools.nsc.symtab.SymbolTable.phase_= logged 145585 calls from 51 distinguished sources.
+ >> Origins tag 'arbitraryTag' logged 145585 calls from 51 distinguished sources.
71114 scala.tools.nsc.symtab.Symbols$Symbol.unsafeTypeParams(Symbols.scala:862)
16584 scala.tools.nsc.symtab.Symbols$Symbol.rawInfo(Symbols.scala:757)
@@ -37,29 +37,21 @@ import Origins._
*/
abstract class Origins {
type Rep
+ type StackSlice = Array[StackTraceElement]
+
+ def tag: String
+ def isCutoff(el: StackTraceElement): Boolean
def newRep(xs: StackSlice): Rep
def repString(rep: Rep): String
- def originClass: String
-
- private var _tag: String = null
- def tag: String = _tag
- def setTag(tag: String): this.type = {
- _tag = tag
- this
- }
private val origins = new mutable.HashMap[Rep, Int] withDefaultValue 0
private def add(xs: Rep) = origins(xs) += 1
private def total = origins.values.foldLeft(0L)(_ + _)
- // We find the right line by dropping any from around here and any
- // from the method's origin class.
- private def dropStackElement(cn: String) =
- (cn startsWith OriginsName) || (cn startsWith originClass)
-
// Create a stack and whittle it down to the interesting part.
- private def readStack(): Array[StackTraceElement] =
- (new Throwable).getStackTrace dropWhile (el => dropStackElement(el.getClassName))
+ def readStack(): Array[StackTraceElement] = (
+ Thread.currentThread.getStackTrace dropWhile (x => !isCutoff(x)) dropWhile isCutoff drop 1
+ )
def apply[T](body: => T): T = {
add(newRep(readStack()))
@@ -67,7 +59,7 @@ abstract class Origins {
}
def clear() = origins.clear()
def show() = {
- println("\n>> Origins %s.%s logged %s calls from %s distinguished sources.\n".format(originClass, tag, total, origins.keys.size))
+ println("\n>> Origins tag '%s' logged %s calls from %s distinguished sources.\n".format(tag, total, origins.keys.size))
origins.toList sortBy (-_._2) foreach {
case (k, v) => println("%7s %s".format(v, repString(k)))
}
@@ -79,29 +71,49 @@ abstract class Origins {
}
object Origins {
- private type StackSlice = Array[StackTraceElement]
- private val OriginsName = classOf[Origins].getName
- private val counters = new mutable.HashSet[Origins]
+ private val counters = mutable.HashMap[String, Origins]()
+ private val thisClass = this.getClass.getName
- {
- // Console.println("\nOrigins loaded: registering shutdown hook to display results.")
- sys.addShutdownHook(counters foreach (_.purge()))
+ locally {
+ sys.addShutdownHook(counters.values foreach (_.purge()))
}
- def apply[T: Manifest](tag: String): Origins = apply(tag, manifest[T].erasure)
- def apply(tag: String, clazz: Class[_]): Origins = apply(tag, new OneLine(clazz))
- def apply(tag: String, orElse: => Origins): Origins = {
- counters find (_.tag == tag) getOrElse {
- val res = orElse setTag tag
- counters += res
- res
- }
+ case class OriginId(className: String, methodName: String) {
+ def matches(el: StackTraceElement) = (
+ (methodName == el.getMethodName) && (className startsWith el.getClassName)
+ )
}
- class OneLine(clazz: Class[_]) extends Origins {
- type Rep = StackTraceElement
- val originClass = clazz.getName stripSuffix MODULE_SUFFIX_STRING
- def newRep(xs: StackSlice): Rep = xs(0)
- def repString(rep: Rep) = " " + rep
+ def lookup(tag: String, orElse: String => Origins): Origins =
+ counters.getOrElseUpdate(tag, orElse(tag))
+ def register(x: Origins): Origins = {
+ counters(x.tag) = x
+ x
+ }
+
+ private def preCutoff(el: StackTraceElement) = (
+ (el.getClassName == thisClass)
+ || (el.getClassName startsWith "java.lang.")
+ )
+ private def findCutoff() = {
+ val cutoff = Thread.currentThread.getStackTrace dropWhile preCutoff head;
+ OriginId(cutoff.getClassName, cutoff.getMethodName)
+ }
+
+ def apply(tag: String): Origins = counters.getOrElseUpdate(tag, new OneLine(tag, findCutoff()))
+ def apply(tag: String, frames: Int): Origins = counters.getOrElseUpdate(tag, new MultiLine(tag, findCutoff(), frames))
+
+ class OneLine(val tag: String, id: OriginId) extends Origins {
+ type Rep = StackTraceElement
+ def isCutoff(el: StackTraceElement) = id matches el
+ def newRep(xs: StackSlice): Rep = if ((xs eq null) || (xs.length == 0)) null else xs(0)
+ def repString(rep: Rep) = " " + rep
+ }
+ class MultiLine(val tag: String, id: OriginId, numLines: Int) extends Origins {
+ type Rep = List[StackTraceElement]
+ def isCutoff(el: StackTraceElement) = id matches el
+ def newRep(xs: StackSlice): Rep = (xs take numLines).toList
+ def repString(rep: Rep) = rep.map("\n " + _).mkString
+ override def readStack() = super.readStack() drop 1
}
}
diff --git a/src/compiler/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/compiler/scala/reflect/internal/util/TraceSymbolActivity.scala
index eb384f9a85..1424226042 100644
--- a/src/compiler/scala/reflect/internal/util/TraceSymbolActivity.scala
+++ b/src/compiler/scala/reflect/internal/util/TraceSymbolActivity.scala
@@ -7,7 +7,7 @@ trait TraceSymbolActivity {
val global: SymbolTable
import global._
- if (traceSymbolActivity)
+ if (traceSymbolActivity && !global.inReflexiveMirror)
scala.sys addShutdownHook showAllSymbols()
private type Set[T] = scala.collection.immutable.Set[T]
diff --git a/src/compiler/scala/reflect/makro/runtime/Errors.scala b/src/compiler/scala/reflect/makro/runtime/AbortMacroException.scala
index d78eae9237..d78eae9237 100644
--- a/src/compiler/scala/reflect/makro/runtime/Errors.scala
+++ b/src/compiler/scala/reflect/makro/runtime/AbortMacroException.scala
diff --git a/src/compiler/scala/reflect/makro/runtime/Context.scala b/src/compiler/scala/reflect/makro/runtime/Context.scala
index 184008658e..ca02822788 100644
--- a/src/compiler/scala/reflect/makro/runtime/Context.scala
+++ b/src/compiler/scala/reflect/makro/runtime/Context.scala
@@ -10,11 +10,12 @@ abstract class Context extends scala.reflect.makro.Context
with Enclosures
with Names
with Reifiers
- with Reporters
+ with FrontEnds
with Settings
with Symbols
with Typers
- with Util {
+ with Util
+ with Traces {
val mirror: Global
diff --git a/src/compiler/scala/reflect/makro/runtime/Enclosures.scala b/src/compiler/scala/reflect/makro/runtime/Enclosures.scala
index f9a6987e48..ab38fc024d 100644
--- a/src/compiler/scala/reflect/makro/runtime/Enclosures.scala
+++ b/src/compiler/scala/reflect/makro/runtime/Enclosures.scala
@@ -6,31 +6,18 @@ trait Enclosures {
import mirror._
+ private def site = callsiteTyper.context
+ private def enclTrees = site.enclosingContextChain map (_.tree)
+ private def enclPoses = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition)
+
// vals are eager to simplify debugging
// after all we wouldn't save that much time by making them lazy
-
- val macroApplication: Tree = expandee
-
- val enclosingMacros: List[Context] = this :: mirror.analyzer.openMacros
-
- val enclosingImplicits: List[(Type, Tree)] = callsiteTyper.context.openImplicits
-
- val enclosingPosition: Position = enclosingMacros.find(c => c.macroApplication.pos != NoPosition).map(_.macroApplication.pos).getOrElse(NoPosition)
-
- val enclosingApplication: Tree = {
- def loop(context: analyzer.Context): Tree = context match {
- case analyzer.NoContext => EmptyTree
- case context if context.tree.isInstanceOf[Apply] => context.tree
- case context => loop(context.outer)
- }
-
- val context = callsiteTyper.context
- loop(context)
- }
-
- val enclosingMethod: Tree = callsiteTyper.context.enclMethod.tree
-
- val enclosingClass: Tree = callsiteTyper.context.enclClass.tree
-
- val enclosingUnit: CompilationUnit = currentRun.currentUnit
-} \ No newline at end of file
+ val macroApplication: Tree = expandee
+ val enclosingApplication: Tree = enclTrees collectFirst { case t: Apply => t } getOrElse EmptyTree
+ val enclosingClass: Tree = site.enclClass.tree
+ val enclosingImplicits: List[(Type, Tree)] = site.openImplicits
+ val enclosingMacros: List[Context] = this :: mirror.analyzer.openMacros // include self
+ val enclosingMethod: Tree = site.enclMethod.tree
+ val enclosingPosition: Position = if (enclPoses.isEmpty) NoPosition else enclPoses.head.pos
+ val enclosingUnit: CompilationUnit = currentRun.currentUnit
+}
diff --git a/src/compiler/scala/reflect/makro/runtime/Reporters.scala b/src/compiler/scala/reflect/makro/runtime/FrontEnds.scala
index 0fd037bdd2..7cfa8e80f3 100644
--- a/src/compiler/scala/reflect/makro/runtime/Reporters.scala
+++ b/src/compiler/scala/reflect/makro/runtime/FrontEnds.scala
@@ -1,21 +1,21 @@
package scala.reflect.makro
package runtime
-trait Reporters {
+trait FrontEnds {
self: Context =>
import mirror._
- def reporter: mirror.Reporter = wrapNscReporter(mirror.reporter)
+ def frontEnd: FrontEnd = wrapReporter(mirror.reporter)
- def setReporter(reporter: mirror.Reporter): this.type = {
- mirror.reporter = wrapApiReporter(reporter)
+ def setFrontEnd(frontEnd: FrontEnd): this.type = {
+ mirror.reporter = wrapFrontEnd(frontEnd)
this
}
- def withReporter[T](reporter: Reporter)(op: => T): T = {
+ def withFrontEnd[T](frontEnd: FrontEnd)(op: => T): T = {
val old = mirror.reporter
- setReporter(reporter)
+ setFrontEnd(frontEnd)
try op
finally mirror.reporter = old
}
diff --git a/src/compiler/scala/reflect/makro/runtime/Reifiers.scala b/src/compiler/scala/reflect/makro/runtime/Reifiers.scala
index 2488b06d6c..1c5af4b752 100644
--- a/src/compiler/scala/reflect/makro/runtime/Reifiers.scala
+++ b/src/compiler/scala/reflect/makro/runtime/Reifiers.scala
@@ -10,60 +10,25 @@ trait Reifiers {
self: Context =>
import mirror._
+ import definitions._
- lazy val reflectMirrorPrefix: Tree = {
- // [Eugene] how do I typecheck this without undergoing this tiresome (and, in general, incorrect) procedure?
- val prefix: Tree = Select(Select(Ident(definitions.ScalaPackage), newTermName("reflect")), newTermName("mirror"))
- val prefixTpe = typeCheck(TypeApply(Select(prefix, newTermName("asInstanceOf")), List(SingletonTypeTree(prefix)))).tpe
- typeCheck(prefix) setType prefixTpe
- }
-
- def reifyTree(prefix: Tree, tree: Tree): Tree =
- reifyTopLevel(prefix, tree)
-
- def reifyType(prefix: Tree, tpe: Type, dontSpliceAtTopLevel: Boolean = false, requireConcreteTypeTag: Boolean = false): Tree =
- reifyTopLevel(prefix, tpe, dontSpliceAtTopLevel, requireConcreteTypeTag)
-
- def unreifyTree(tree: Tree): Tree =
- Select(tree, definitions.ExprEval)
+ lazy val reflectMirrorPrefix: Tree = ReflectMirrorPrefix
- def reifyTopLevel(prefix: Tree, reifee: Any, dontSpliceAtTopLevel: Boolean = false, requireConcreteTypeTag: Boolean = false): Tree = {
- // [Eugene] the plumbing is not very pretty, but anyways factoring out the reifier seems like a necessary step to me
- import scala.reflect.reify._
- val reifier = mkReifier(mirror)(callsiteTyper, prefix, reifee, dontSpliceAtTopLevel, requireConcreteTypeTag)
-
- try {
- val result = reifier.reified
- logFreeVars(expandee.pos, result)
- result
- } catch {
- case ex: reifier.ReificationError =>
-// // this is a "soft" exception - it will normally be caught by the macro
-// // consequently, we need to log the stack trace here, so that it doesn't get lost
-// if (settings.Yreifydebug.value) {
-// val message = new java.io.StringWriter()
-// ex.printStackTrace(new java.io.PrintWriter(message))
-// println(scala.compat.Platform.EOL + message)
-// }
- val xlated = new ReificationError(ex.pos, ex.msg)
- xlated.setStackTrace(ex.getStackTrace)
- throw xlated
- case ex: reifier.UnexpectedReificationError =>
- val xlated = new UnexpectedReificationError(ex.pos, ex.msg, ex.cause)
- xlated.setStackTrace(ex.getStackTrace)
- throw xlated
- }
+ def reifyTree(prefix: Tree, tree: Tree): Tree = {
+ val result = scala.reflect.reify.`package`.reifyTree(mirror)(callsiteTyper, prefix, tree)
+ logFreeVars(enclosingPosition, result)
+ result
}
- class ReificationError(var pos: Position, val msg: String) extends Throwable(msg)
-
- object ReificationError extends ReificationErrorExtractor {
- def unapply(error: ReificationError): Option[(Position, String)] = Some((error.pos, error.msg))
+ def reifyType(prefix: Tree, tpe: Type, dontSpliceAtTopLevel: Boolean = false, concrete: Boolean = false): Tree = {
+ val result = scala.reflect.reify.`package`.reifyType(mirror)(callsiteTyper, prefix, tpe, dontSpliceAtTopLevel, concrete)
+ logFreeVars(enclosingPosition, result)
+ result
}
- class UnexpectedReificationError(val pos: Position, val msg: String, val cause: Throwable = null) extends Throwable(msg, cause)
+ def reifyErasure(tpe: Type, concrete: Boolean = true): Tree =
+ scala.reflect.reify.`package`.reifyErasure(mirror)(callsiteTyper, tpe, concrete)
- object UnexpectedReificationError extends UnexpectedReificationErrorExtractor {
- def unapply(error: UnexpectedReificationError): Option[(Position, String, Throwable)] = Some((error.pos, error.msg, error.cause))
- }
+ def unreifyTree(tree: Tree): Tree =
+ Select(tree, definitions.ExprEval)
}
diff --git a/src/compiler/scala/reflect/makro/runtime/Symbols.scala b/src/compiler/scala/reflect/makro/runtime/Symbols.scala
index 552ad2a303..6341523486 100644
--- a/src/compiler/scala/reflect/makro/runtime/Symbols.scala
+++ b/src/compiler/scala/reflect/makro/runtime/Symbols.scala
@@ -5,4 +5,6 @@ trait Symbols {
self: Context =>
def isLocatable(sym: Symbol) = sym.isLocatable
+
+ def isStatic(sym: Symbol) = sym.isStatic
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/makro/runtime/Traces.scala b/src/compiler/scala/reflect/makro/runtime/Traces.scala
new file mode 100644
index 0000000000..6b61842316
--- /dev/null
+++ b/src/compiler/scala/reflect/makro/runtime/Traces.scala
@@ -0,0 +1,8 @@
+package scala.reflect.makro
+package runtime
+
+trait Traces extends util.Traces {
+ self: Context =>
+
+ def globalSettings = mirror.settings
+}
diff --git a/src/compiler/scala/reflect/makro/runtime/Typers.scala b/src/compiler/scala/reflect/makro/runtime/Typers.scala
index 38e819746d..704d3d7ac2 100644
--- a/src/compiler/scala/reflect/makro/runtime/Typers.scala
+++ b/src/compiler/scala/reflect/makro/runtime/Typers.scala
@@ -4,13 +4,12 @@ package runtime
trait Typers {
self: Context =>
- val openMacros: List[Context] = this :: mirror.analyzer.openMacros
+ def openMacros: List[Context] = this :: mirror.analyzer.openMacros
- val openImplicits: List[(Type, Tree)] = callsiteTyper.context.openImplicits
+ def openImplicits: List[(Type, Tree)] = callsiteTyper.context.openImplicits
def typeCheck(tree: Tree, pt: Type = mirror.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree = {
- def trace(msg: Any) = if (mirror.settings.Ymacrodebug.value) println(msg)
- trace("typechecking %s with expected type %s, implicit views = %s, macros = %s".format(tree, pt, !withImplicitViewsDisabled, !withMacrosDisabled))
+ macroLogVerbose("typechecking %s with expected type %s, implicit views = %s, macros = %s".format(tree, pt, !withImplicitViewsDisabled, !withMacrosDisabled))
val wrapper1 = if (!withImplicitViewsDisabled) (callsiteTyper.context.withImplicitsEnabled[Tree] _) else (callsiteTyper.context.withImplicitsDisabled[Tree] _)
val wrapper2 = if (!withMacrosDisabled) (callsiteTyper.context.withMacrosEnabled[Tree] _) else (callsiteTyper.context.withMacrosDisabled[Tree] _)
def wrapper (tree: => Tree) = wrapper1(wrapper2(tree))
@@ -21,25 +20,24 @@ trait Typers {
// (also see reflect.runtime.ToolBoxes.typeCheckExpr for a workaround that might work for you)
wrapper(callsiteTyper.silent(_.typed(tree, mirror.analyzer.EXPRmode, pt)) match {
case mirror.analyzer.SilentResultValue(result) =>
- trace(result)
+ macroLogVerbose(result)
result
case error @ mirror.analyzer.SilentTypeError(_) =>
- trace(error.err.errMsg)
+ macroLogVerbose(error.err.errMsg)
if (!silent) throw new mirror.TypeError(error.err.errPos, error.err.errMsg)
mirror.EmptyTree
})
}
def inferImplicitValue(pt: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree = {
- def trace(msg: Any) = if (mirror.settings.Ymacrodebug.value) println(msg)
- trace("inferring implicit value of type %s, macros = %s".format(pt, !withMacrosDisabled))
+ macroLogVerbose("inferring implicit value of type %s, macros = %s".format(pt, !withMacrosDisabled))
import mirror.analyzer.SearchResult
- val wrapper1 = if (!withMacrosDisabled) (callsiteTyper.context.withMacrosEnabled[SearchResult] _) else (callsiteTyper.context.withMacrosDisabled[SearchResult] _)
+ val context = callsiteTyper.context
+ val wrapper1 = if (!withMacrosDisabled) (context.withMacrosEnabled[SearchResult] _) else (context.withMacrosDisabled[SearchResult] _)
def wrapper (inference: => SearchResult) = wrapper1(inference)
- val context = callsiteTyper.context.makeImplicit(true)
wrapper(mirror.analyzer.inferImplicit(mirror.EmptyTree, pt, true, false, context, !silent, pos)) match {
case failure if failure.tree.isEmpty =>
- trace("implicit search has failed. to find out the reason, turn on -Xlog-implicits")
+ macroLogVerbose("implicit search has failed. to find out the reason, turn on -Xlog-implicits")
if (context.hasErrors) throw new mirror.TypeError(context.errBuffer.head.errPos, context.errBuffer.head.errMsg)
mirror.EmptyTree
case success =>
@@ -48,17 +46,16 @@ trait Typers {
}
def inferImplicitView(tree: Tree, from: Type, to: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, reportAmbiguous: Boolean = true, pos: Position = enclosingPosition): Tree = {
- def trace(msg: Any) = if (mirror.settings.Ymacrodebug.value) println(msg)
- trace("inferring implicit view from %s to %s for %s, macros = %s, reportAmbiguous = %s".format(from, to, tree, !withMacrosDisabled, reportAmbiguous))
+ macroLogVerbose("inferring implicit view from %s to %s for %s, macros = %s, reportAmbiguous = %s".format(from, to, tree, !withMacrosDisabled, reportAmbiguous))
import mirror.analyzer.SearchResult
- val wrapper1 = if (!withMacrosDisabled) (callsiteTyper.context.withMacrosEnabled[SearchResult] _) else (callsiteTyper.context.withMacrosDisabled[SearchResult] _)
+ val context = callsiteTyper.context
+ val wrapper1 = if (!withMacrosDisabled) (context.withMacrosEnabled[SearchResult] _) else (context.withMacrosDisabled[SearchResult] _)
def wrapper (inference: => SearchResult) = wrapper1(inference)
val fun1 = mirror.definitions.FunctionClass(1)
val viewTpe = mirror.TypeRef(fun1.typeConstructor.prefix, fun1, List(from, to))
- val context = callsiteTyper.context.makeImplicit(reportAmbiguous)
- wrapper(mirror.analyzer.inferImplicit(mirror.EmptyTree, viewTpe, reportAmbiguous, true, context, !silent, pos)) match {
+ wrapper(mirror.analyzer.inferImplicit(tree, viewTpe, reportAmbiguous, true, context, !silent, pos)) match {
case failure if failure.tree.isEmpty =>
- trace("implicit search has failed. to find out the reason, turn on -Xlog-implicits")
+ macroLogVerbose("implicit search has failed. to find out the reason, turn on -Xlog-implicits")
if (context.hasErrors) throw new mirror.TypeError(context.errBuffer.head.errPos, context.errBuffer.head.errMsg)
mirror.EmptyTree
case success =>
@@ -72,7 +69,7 @@ trait Typers {
def unapply(error: TypeError): Option[(Position, String)] = Some((error.pos, error.msg))
}
- def resetAllAttrs[T <: Tree](tree: T): T = mirror.resetAllAttrs(tree)
+ def resetAllAttrs(tree: Tree): Tree = mirror.resetAllAttrs(tree)
- def resetLocalAttrs[T <: Tree](tree: T): T = mirror.resetLocalAttrs(tree)
+ def resetLocalAttrs(tree: Tree): Tree = mirror.resetLocalAttrs(tree)
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/makro/util/Traces.scala b/src/compiler/scala/reflect/makro/util/Traces.scala
new file mode 100644
index 0000000000..2363cc4bac
--- /dev/null
+++ b/src/compiler/scala/reflect/makro/util/Traces.scala
@@ -0,0 +1,18 @@
+package scala.reflect.makro
+package util
+
+trait Traces {
+ def globalSettings: tools.nsc.Settings
+
+ // [Eugene] lots of ways to log:
+ // 1) trace(...)
+ // 2) log(...)
+ // 3) if (foo) { doStuff(); includingSomeLogs(); }
+ // what is the conventional way of unifying this?
+ val macroDebugLite = globalSettings.YmacrodebugLite.value
+ val macroDebugVerbose = globalSettings.YmacrodebugVerbose.value
+ val macroTraceLite = scala.tools.nsc.util.trace when (macroDebugLite || macroDebugVerbose)
+ val macroTraceVerbose = scala.tools.nsc.util.trace when macroDebugVerbose
+ @inline final def macroLogLite(msg: => Any) { if (macroDebugLite || macroDebugVerbose) println(msg) }
+ @inline final def macroLogVerbose(msg: => Any) { if (macroDebugVerbose) println(msg) }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala
index 30c6c06c7b..4466f281b8 100644
--- a/src/compiler/scala/reflect/reify/Errors.scala
+++ b/src/compiler/scala/reflect/reify/Errors.scala
@@ -1,7 +1,8 @@
package scala.reflect
package reify
-import scala.tools.nsc.Global
+import scala.reflect.makro.ReificationError
+import scala.reflect.makro.UnexpectedReificationError
trait Errors {
self: Reifier =>
@@ -9,11 +10,7 @@ trait Errors {
import mirror._
import definitions._
- class ReificationError(var pos: Position, val msg: String) extends Throwable(msg)
- class UnexpectedReificationError(val pos: Position, val msg: String, val cause: Throwable = null) extends Throwable(msg)
-
- lazy val defaultErrorPosition: Position =
- mirror.analyzer.openMacros.find(c => c.macroApplication.pos != NoPosition).map(_.macroApplication.pos).getOrElse(NoPosition)
+ def defaultErrorPosition = analyzer.enclosingMacroPosition
// expected errors: these can happen if the user casually writes whatever.reify(...)
// hence we don't crash here, but nicely report a typechecking error and bail out asap
@@ -60,4 +57,4 @@ trait Errors {
val msg = "internal error: erroneous reifees are not supported, make sure that your reifee has typechecked successfully before passing it to the reifier"
throw new UnexpectedReificationError(defaultErrorPosition, msg)
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/Reifiers.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index 16c26734b2..fea825358e 100644
--- a/src/compiler/scala/reflect/reify/Reifiers.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -2,6 +2,8 @@ package scala.reflect
package reify
import scala.tools.nsc.Global
+import scala.reflect.makro.ReificationError
+import scala.reflect.makro.UnexpectedReificationError
/** Given a tree or a type, generate a tree that when executed at runtime produces the original tree or type.
* See more info in the comments to ``reify'' in scala.reflect.api.Universe.
@@ -21,7 +23,7 @@ abstract class Reifier extends Phases
val prefix: Tree
val reifee: Any
val dontSpliceAtTopLevel: Boolean
- val requireConcreteTypeTag: Boolean
+ val concrete: Boolean
/**
* For ``reifee'' and other reification parameters, generate a tree of the form
@@ -72,22 +74,23 @@ abstract class Reifier extends Phases
if (tree.tpe exists (sub => sub.typeSymbol.isLocalToReifee))
CannotReifyReifeeThatHasTypeLocalToReifee(tree)
- val manifestedType = typer.packedType(tree, NoSymbol)
- val manifestedRtype = reifyType(manifestedType)
- val tagModule = if (definitelyConcrete) ConcreteTypeTagModule else TypeTagModule
- var typeTagCtor = TypeApply(Select(Ident(nme.MIRROR_SHORT), tagModule.name), List(TypeTree(manifestedType)))
- var exprCtor = TypeApply(Select(Ident(nme.MIRROR_SHORT), ExprModule.name), List(TypeTree(manifestedType)))
- Apply(Apply(exprCtor, List(rtree)), List(Apply(typeTagCtor, List(manifestedRtype))))
+ val taggedType = typer.packedType(tree, NoSymbol)
+ val tagModule = if (reificationIsConcrete) ConcreteTypeTagModule else TypeTagModule
+ val tagCtor = TypeApply(Select(Ident(nme.MIRROR_SHORT), tagModule.name), List(TypeTree(taggedType)))
+ val exprCtor = TypeApply(Select(Ident(nme.MIRROR_SHORT), ExprModule.name), List(TypeTree(taggedType)))
+ val tagArgs = List(reify(taggedType), reifyErasure(mirror)(typer, taggedType, concrete = false))
+ Apply(Apply(exprCtor, List(rtree)), List(Apply(tagCtor, tagArgs)))
case tpe: Type =>
reifyTrace("reifying = ")(tpe.toString)
reifyTrace("prefix = ")(prefix)
val rtree = reify(tpe)
- val manifestedType = tpe
- var tagModule = if (definitelyConcrete) ConcreteTypeTagModule else TypeTagModule
- var ctor = TypeApply(Select(Ident(nme.MIRROR_SHORT), tagModule.name), List(TypeTree(manifestedType)))
- Apply(ctor, List(rtree))
+ val taggedType = tpe
+ val tagModule = if (reificationIsConcrete) ConcreteTypeTagModule else TypeTagModule
+ val ctor = TypeApply(Select(Ident(nme.MIRROR_SHORT), tagModule.name), List(TypeTree(taggedType)))
+ val args = List(rtree, reifyErasure(mirror)(typer, taggedType, concrete = false))
+ Apply(ctor, args)
case _ =>
throw new Error("reifee %s of type %s is not supported".format(reifee, if (reifee == null) "null" else reifee.getClass.toString))
@@ -126,10 +129,17 @@ abstract class Reifier extends Phases
// 3) local freeterm inlining in Metalevels
// 4) trivial tree splice inlining in Reify (Trees.scala)
// 5) trivial type splice inlining in Reify (Types.scala)
- val freevarBindings = symbolTable collect { case freedef @ FreeDef(_, _, binding, _) => binding.symbol } toSet
+ val freevarBindings = symbolTable collect { case entry @ FreeDef(_, _, binding, _, _) => binding.symbol } toSet
+ // [Eugene] yeah, ugly and extremely brittle, but we do need to do resetAttrs. will be fixed later
+ var importantSymbols = Set[Symbol](PredefModule, ScalaRunTimeModule)
+ importantSymbols ++= importantSymbols map (_.companionSymbol)
+ importantSymbols ++= importantSymbols map (_.moduleClass)
+ importantSymbols ++= importantSymbols map (_.linkedClassOfClass)
+ def importantSymbol(sym: Symbol): Boolean = sym != null && sym != NoSymbol && importantSymbols(sym)
val untyped = resetAllAttrs(wrapped, leaveAlone = {
case ValDef(_, mr, _, _) if mr == nme.MIRROR_SHORT => true
case tree if freevarBindings contains tree.symbol => true
+ case tree if importantSymbol(tree.symbol) => true
case _ => false
})
diff --git a/src/compiler/scala/reflect/reify/codegen/AnnotationInfos.scala b/src/compiler/scala/reflect/reify/codegen/AnnotationInfos.scala
new file mode 100644
index 0000000000..1d218317dc
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/codegen/AnnotationInfos.scala
@@ -0,0 +1,56 @@
+package scala.reflect.reify
+package codegen
+
+trait AnnotationInfos {
+ self: Reifier =>
+
+ import mirror._
+ import definitions._
+ import treeInfo._
+
+ // usually annotations are reified as their originals from Modifiers
+ // however, when reifying free and tough types, we're forced to reify annotation infos as is
+ // why is that bad? take a look inside
+ def reifyAnnotationInfo(ann: AnnotationInfo): Tree = {
+ val reifiedArgs = ann.args map { arg =>
+ val saved1 = reifyTreeSymbols
+ val saved2 = reifyTreeTypes
+
+ try {
+ // one more quirk of reifying annotations
+ //
+ // when reifying AnnotatedTypes we need to reify all the types and symbols of inner ASTs
+ // that's because a lot of logic expects post-typer trees to have non-null tpes
+ //
+ // Q: reified trees are pre-typer, so there's shouldn't be a problem.
+ // reflective typechecker will fill in missing symbols and types, right?
+ // A: actually, no. annotation ASTs live inside AnnotatedTypes,
+ // and insides of the types is the place where typechecker doesn't look.
+ reifyTreeSymbols = true
+ reifyTreeTypes = true
+
+ // todo. every AnnotationInfo is an island, entire of itself
+ // no regular Traverser or Transformer can reach it
+ // hence we need to run its contents through the entire reification pipeline
+ // e.g. to apply reshaping or to check metalevels
+ reify(arg)
+ } finally {
+ reifyTreeSymbols = saved1
+ reifyTreeTypes = saved2
+ }
+ }
+
+ def reifyClassfileAnnotArg(arg: ClassfileAnnotArg): Tree = arg match {
+ case LiteralAnnotArg(const) =>
+ mirrorFactoryCall(nme.LiteralAnnotArg, reifyProduct(const))
+ case ArrayAnnotArg(args) =>
+ mirrorFactoryCall(nme.ArrayAnnotArg, scalaFactoryCall(nme.Array, args map reifyClassfileAnnotArg: _*))
+ case NestedAnnotArg(ann) =>
+ mirrorFactoryCall(nme.NestedAnnotArg, reifyAnnotationInfo(ann))
+ }
+
+ // if you reify originals of anns, you get SO when trying to reify AnnotatedTypes, so screw it - after all, it's not that important
+ val reifiedAssocs = ann.assocs map (assoc => scalaFactoryCall(nme.Tuple2, reify(assoc._1), reifyClassfileAnnotArg(assoc._2)))
+ mirrorFactoryCall(nme.AnnotationInfo, reify(ann.atp), mkList(reifiedArgs), mkList(reifiedAssocs))
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/codegen/Symbols.scala b/src/compiler/scala/reflect/reify/codegen/Symbols.scala
index 3328f5e402..21a08b7efb 100644
--- a/src/compiler/scala/reflect/reify/codegen/Symbols.scala
+++ b/src/compiler/scala/reflect/reify/codegen/Symbols.scala
@@ -46,13 +46,8 @@ trait Symbols {
}
} else {
// todo. make sure that free methods and free local defs work correctly
- if (sym.isTerm) {
- if (reifyDebug) println("Free term" + (if (sym.isCapturedVariable) " (captured)" else "") + ": " + sym)
- reifyFreeTerm(sym, Ident(sym))
- } else {
- if (reifyDebug) println("Free type: " + sym)
- reifyFreeType(sym, Ident(sym))
- }
+ if (sym.isTerm) reifyFreeTerm(sym, Ident(sym))
+ else reifyFreeType(sym, Ident(sym))
}
}
@@ -61,13 +56,16 @@ trait Symbols {
case Some(reified) =>
reified
case None =>
+ if (reifyDebug) println("Free term" + (if (sym.isCapturedVariable) " (captured)" else "") + ": " + sym + "(" + sym.accurateKindString + ")")
+ var name = newTermName(nme.MIRROR_FREE_PREFIX + sym.name)
+ if (sym.isType) name = name.append(nme.MIRROR_FREE_THIS_SUFFIX)
if (sym.isCapturedVariable) {
assert(value.isInstanceOf[Ident], showRaw(value))
val capturedTpe = capturedVariableType(sym)
val capturedValue = referenceCapturedVariable(sym)
- locallyReify(sym, mirrorCall(nme.newFreeTerm, reify(sym.name.toString), reify(capturedTpe), capturedValue, reify(origin(sym))))
+ locallyReify(sym, name, mirrorCall(nme.newFreeTerm, reify(sym.name.toString), reify(capturedTpe), capturedValue, reify(sym.flags), reify(origin(sym))))
} else {
- locallyReify(sym, mirrorCall(nme.newFreeTerm, reify(sym.name.toString), reify(sym.tpe), value, reify(origin(sym))))
+ locallyReify(sym, name, mirrorCall(nme.newFreeTerm, reify(sym.name.toString), reify(sym.tpe), value, reify(sym.flags), reify(origin(sym))))
}
}
@@ -76,36 +74,111 @@ trait Symbols {
case Some(reified) =>
reified
case None =>
- val phantomTypeTag = Apply(TypeApply(Select(Ident(nme.MIRROR_SHORT), nme.TypeTag), List(value)), List(Literal(Constant(null))))
- // todo. implement info reification for free types: type bounds, HK-arity, whatever else that can be useful
- locallyReify(sym, mirrorCall(nme.newFreeType, reify(sym.name.toString), reify(sym.info), phantomTypeTag, reify(origin(sym))))
+ if (reifyDebug) println("Free type: %s (%s)".format(sym, sym.accurateKindString))
+ var name = newTermName(nme.MIRROR_FREE_PREFIX + sym.name)
+ val phantomTypeTag = Apply(TypeApply(Select(Ident(nme.MIRROR_SHORT), nme.TypeTag), List(value)), List(Literal(Constant(null)), Literal(Constant(null))))
+ val flavor = if (sym.isExistential) nme.newFreeExistential else nme.newFreeType
+ locallyReify(sym, name, mirrorCall(flavor, reify(sym.name.toString), reify(sym.info), phantomTypeTag, reify(sym.flags), reify(origin(sym))))
+ }
+
+ def reifySymDef(sym: Symbol): Tree =
+ locallyReified get sym match {
+ case Some(reified) =>
+ reified
+ case None =>
+ if (reifyDebug) println("Sym def: %s (%s)".format(sym, sym.accurateKindString))
+ assert(!sym.isLocatable, sym) // if this assertion fires, then tough type reification needs to be rethought
+ sym.owner.ownersIterator find (!_.isLocatable) foreach reifySymDef
+ var name = newTermName(nme.MIRROR_SYMDEF_PREFIX + sym.name)
+ locallyReify(sym, name, Apply(Select(reify(sym.owner), nme.newNestedSymbol), List(reify(sym.name), reify(sym.pos), reify(sym.flags), reify(sym.isClass))))
}
+ // todo. very brittle abstraction, needs encapsulation
import scala.collection.mutable._
- private val localReifications = ArrayBuffer[ValDef]()
+ private val localReifications = ArrayBuffer[Tree]()
private val locallyReified = Map[Symbol, Tree]()
- def symbolTable: List[ValDef] = localReifications.toList
- def symbolTable_=(newSymbolTable: List[ValDef]): Unit = {
+ private var filledIn = false
+ def symbolTable: List[Tree] = { fillInSymbolTable(); localReifications.toList }
+ def symbolTable_=(newSymbolTable: List[Tree]): Unit = {
localReifications.clear()
locallyReified.clear()
+ filledIn = false
newSymbolTable foreach {
- case freedef @ FreeDef(_, name, binding, _) =>
- if (!(locallyReified contains binding.symbol)) {
- localReifications += freedef
- locallyReified(binding.symbol) = Ident(name)
+ case entry =>
+ val att = entry.attachmentOpt[ReifyAttachment]
+ att match {
+ case Some(ReifyAttachment(sym)) =>
+ // don't duplicate reified symbols when merging inlined reifee
+ if (!(locallyReified contains sym)) {
+ val ValDef(_, name, _, _) = entry
+ localReifications += entry
+ locallyReified(sym) = Ident(name)
+ }
+ case other =>
+ // do nothing => symbol table fill-ins will be repopulated later
}
}
}
- private def locallyReify(sym: Symbol, reificode: => Tree): Tree = {
+ private def localName(name0: TermName): TermName = {
+ var name = name0.toString
+ name = name.replace(".type", "$type")
+ name = name.replace(" ", "$")
+ val fresh = typer.context.unit.fresh
+ newTermName(fresh.newName(name))
+ }
+
+ private def locallyReify(sym: Symbol, name0: TermName, reificode: => Tree): Tree = {
val reified = reificode
- val Apply(Select(_, flavor), _) = reified
- // [Eugene] name clashes are impossible, right?
- var name = newTermName(nme.MIRROR_FREE_PREFIX + sym.name)
- if (flavor == nme.newFreeTerm && sym.isType) name = name.append(nme.MIRROR_FREE_THIS_SUFFIX);
- // todo. also reify annotations for free vars
- localReifications += ValDef(NoMods, name, TypeTree(), reified)
+ val name = localName(name0)
+ // todo. tried to declare a private class here to carry an attachment, but it's path-dependent
+ // so got troubles with exchanging free variables between nested and enclosing quasiquotes
+ // attaching just Symbol isn't good either, so we need to think of a principled solution
+ val local = ValDef(NoMods, name, TypeTree(), reified) withAttachment ReifyAttachment(sym)
+ localReifications += local
+ filledIn = false
locallyReified(sym) = Ident(name)
locallyReified(sym)
}
+
+ /** Sets type signatures and annotations for locally reified symbols */
+ private def fillInSymbolTable() = {
+ if (!filledIn) {
+ val fillIns = new ArrayBuffer[Tree]
+ var i = 0
+ while (i < localReifications.length) {
+ // fillInSymbol might create new locallyReified symbols, that's why this is done iteratively
+ val reified = localReifications(i)
+ val att = reified.attachmentOpt[ReifyAttachment]
+ att match {
+ case Some(ReifyAttachment(sym)) => fillIns += fillInSymbol(sym)
+ case other => // do nothing
+ }
+ i += 1
+ }
+
+ filledIn = true
+ localReifications ++= fillIns.toList
+ }
+ }
+
+ /** Generate code to add type and annotation info to a reified symbol */
+ private def fillInSymbol(sym: Symbol): Tree = {
+ if (reifyDebug) println("Filling in: %s (%s)".format(sym, sym.accurateKindString))
+ val isFree = locallyReified(sym) match { case Ident(name) => name startsWith nme.MIRROR_FREE_PREFIX }
+ if (isFree) {
+ if (sym.annotations.isEmpty) EmptyTree
+ else Apply(Select(locallyReified(sym), nme.setAnnotations), List(reify(sym.annotations)))
+ } else {
+ import scala.reflect.internal.Flags._
+ if (sym hasFlag LOCKED) {
+ // [Eugene] better to have a symbol without a type signature, than to crash with a CyclicReference
+ EmptyTree
+ } else {
+ val rset = Apply(Select(locallyReified(sym), nme.setTypeSignature), List(reify(sym.info)))
+ if (sym.annotations.isEmpty) rset
+ else Apply(Select(rset, nme.setAnnotations), List(reify(sym.annotations)))
+ }
+ }
+ }
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/codegen/Trees.scala b/src/compiler/scala/reflect/reify/codegen/Trees.scala
index 22f42aea49..c9f5fc5b8d 100644
--- a/src/compiler/scala/reflect/reify/codegen/Trees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/Trees.scala
@@ -7,6 +7,12 @@ trait Trees {
import mirror._
import definitions._
import treeInfo._
+ import scala.reflect.api.Modifier
+
+ // unfortunately, these are necessary to reify AnnotatedTypes
+ // I'd gladly got rid of them, but I don't fancy making a metaprogramming API that doesn't work with annotated types
+ var reifyTreeSymbols = false
+ var reifyTreeTypes = false
/**
* Reify a tree.
@@ -41,7 +47,7 @@ trait Trees {
reifyMirrorObject(EmptyTree)
case mirror.emptyValDef =>
mirrorSelect(nme.emptyValDef)
- case FreeDef(_, _, _, _) =>
+ case FreeDef(_, _, _, _, _) =>
reifyNestedFreeDef(tree)
case FreeRef(_, _) =>
reifyNestedFreeRef(tree)
@@ -52,6 +58,28 @@ trait Trees {
case NestedExpr(_, _, _) =>
reifyNestedExpr(tree)
case Literal(const @ Constant(_)) =>
+ // [Eugene] was necessary when we reified erasures as normalized tycons
+ // now, when we do existentialAbstraction on normalizations, everything works great
+ // todo. find an explanation
+// if (const.tag == ClazzTag) {
+//// def preprocess(tpe: Type): Type = tpe.typeSymbol match {
+//// case ArrayClass => appliedType(ArrayClass, preprocess(tpe.typeArgs.head))
+//// case _ => tpe.typeConstructor
+//// }
+//// val tpe = preprocess(const.typeValue)
+// val tpe = const.typeValue
+// var reified = reify(tpe)
+// reified = mirrorCall(nme.Literal, mirrorCall(nme.Constant, reified))
+//// val skolems = ClassClass.typeParams map (_ => newTypeName(typer.context.unit.fresh.newName("_$")))
+//// var existential = mirrorCall(nme.AppliedTypeTree, mirrorCall(nme.TypeTree, reify(ClassClass.typeConstructor)), mkList(skolems map (skolem => mirrorCall(nme.Ident, reify(skolem)))))
+//// existential = mirrorCall(nme.ExistentialTypeTree, existential, reify(skolems map (skolem => TypeDef(Modifiers(Set(Modifier.deferred: Modifier)), skolem, Nil, TypeBoundsTree(Ident(NothingClass) setType NothingClass.tpe, Ident(AnyClass) setType AnyClass.tpe)))))
+//// reified = mirrorCall(nme.TypeApply, mirrorCall(nme.Select, reified, reify(nme.asInstanceOf_)), mkList(List(existential)))
+// // why is this required??
+//// reified = mirrorCall(nme.TypeApply, mirrorCall(nme.Select, reified, reify(nme.asInstanceOf_)), mkList(List(mirrorCall(nme.TypeTree, reify(appliedType(ClassClass.tpe, List(AnyClass.tpe)))))))
+// reified
+// } else {
+// mirrorCall(nme.Literal, reifyProduct(const))
+// }
mirrorCall(nme.Literal, reifyProduct(const))
case Import(expr, selectors) =>
mirrorCall(nme.Import, reify(expr), mkList(selectors map reifyProduct))
@@ -59,6 +87,17 @@ trait Trees {
reifyProduct(tree)
}
+ // usually we don't reify symbols/types, because they can be re-inferred during subsequent reflective compilation
+ // however, reification of AnnotatedTypes is special. see ``reifyType'' to find out why.
+ if (reifyTreeSymbols && tree.hasSymbol) {
+ if (reifyDebug) println("reifying symbol %s for tree %s".format(tree.symbol, tree))
+ rtree = Apply(Select(rtree, nme.setSymbol), List(reify(tree.symbol)))
+ }
+ if (reifyTreeTypes && tree.tpe != null) {
+ if (reifyDebug) println("reifying type %s for tree %s".format(tree.tpe, tree))
+ rtree = Apply(Select(rtree, nme.setType), List(reify(tree.tpe)))
+ }
+
rtree
}
@@ -82,7 +121,7 @@ trait Trees {
case InlinedTreeSplice(_, inlinedSymbolTable, tree, _) =>
if (reifyDebug) println("inlining the splicee")
// all free vars local to the enclosing reifee should've already been inlined by ``Metalevels''
- inlinedSymbolTable foreach { case freedef @ FreeDef(_, _, binding, _) => assert(!binding.symbol.isLocalToReifee, freedef) }
+ inlinedSymbolTable collect { case freedef @ FreeDef(_, _, binding, _, _) if binding.symbol.isLocalToReifee => assert(false, freedef) }
symbolTable ++= inlinedSymbolTable
tree
case tree =>
@@ -152,11 +191,11 @@ trait Trees {
val tpe = tpe0.dealias
if (reifyDebug) println("reifying bound type %s (underlying type is %s, dealiased is %s)".format(sym0, tpe0, tpe))
- if (eligibleForSplicing(tpe)) {
+ if (tpe.isSpliceable) {
val spliced = spliceType(tpe)
if (spliced == EmptyTree) {
if (reifyDebug) println("splicing failed: reify as is")
- mirrorCall(nme.TypeTree, reifyType(tpe))
+ mirrorCall(nme.TypeTree, reify(tpe))
} else {
spliced match {
case TypeRefToFreeType(freeType) =>
@@ -173,7 +212,7 @@ trait Trees {
mirrorCall(nme.Ident, reify(sym))
} else {
if (reifyDebug) println("tpe is an alias, but not a locatable: reify as TypeTree(%s)".format(tpe))
- mirrorCall(nme.TypeTree, reifyType(tpe))
+ mirrorCall(nme.TypeTree, reify(tpe))
}
}
}
diff --git a/src/compiler/scala/reflect/reify/codegen/Types.scala b/src/compiler/scala/reflect/reify/codegen/Types.scala
index 9bc113e8a4..a2b074c6b2 100644
--- a/src/compiler/scala/reflect/reify/codegen/Types.scala
+++ b/src/compiler/scala/reflect/reify/codegen/Types.scala
@@ -55,11 +55,9 @@ trait Types {
case tpe @ NullaryMethodType(restpe) =>
reifyProduct(tpe)
case tpe @ AnnotatedType(anns, underlying, selfsym) =>
-// reifyAnnotatedType(tpe)
- CannotReifyType(tpe)
+ reifyAnnotatedType(tpe)
case _ =>
-// reifyToughType(tpe)
- CannotReifyType(tpe)
+ reifyToughType(tpe)
}
}
@@ -67,14 +65,15 @@ trait Types {
private var spliceTypesEnabled = !dontSpliceAtTopLevel
/** Keeps track of whether this reification contains abstract type parameters */
- var maybeConcrete = true
- var definitelyConcrete = true
-
- def eligibleForSplicing(tpe: Type): Boolean = {
- // [Eugene] is this comprehensive?
- // the only thingies that we want to splice are: 1) type parameters, 2) type members
- // this check seems to cover them all, right?
- tpe.isInstanceOf[TypeRef] && tpe.typeSymbol.isAbstractType
+ private var _reificationIsConcrete = true
+ def reificationIsConcrete = _reificationIsConcrete
+ def reificationIsConcrete_=(value: Boolean) {
+ _reificationIsConcrete = value
+ if (!value && concrete) {
+ assert(current.isInstanceOf[Type], current)
+ val offender = current.asInstanceOf[Type]
+ CannotReifyConcreteTypeTagHavingUnresolvedTypeParameters(offender)
+ }
}
private type SpliceCacheKey = (Symbol, Symbol)
@@ -84,11 +83,13 @@ trait Types {
}
def spliceType(tpe: Type): Tree = {
- if (eligibleForSplicing(tpe)) {
+ // [Eugene] it seems that depending on the context the very same symbol can be either a spliceable tparam or a quantified existential. very weird!
+ val quantified = currentQuantified
+ if (tpe.isSpliceable && !(quantified contains tpe.typeSymbol)) {
if (reifyDebug) println("splicing " + tpe)
if (spliceTypesEnabled) {
- var tagClass = if (requireConcreteTypeTag) ConcreteTypeTagClass else TypeTagClass
+ var tagClass = if (concrete) ConcreteTypeTagClass else TypeTagClass
val tagTpe = singleType(prefix.tpe, prefix.tpe member tagClass.name)
// [Eugene] this should be enough for an abstract type, right?
@@ -98,22 +99,18 @@ trait Types {
// if this fails, it might produce the dreaded "erroneous or inaccessible type" error
// to find out the whereabouts of the error run scalac with -Ydebug
if (reifyDebug) println("launching implicit search for %s.%s[%s]".format(prefix, tagClass.name, tpe))
- val positionBearer = mirror.analyzer.openMacros.find(c => c.macroApplication.pos != NoPosition).map(_.macroApplication).getOrElse(EmptyTree).asInstanceOf[Tree]
- typer.resolveTypeTag(positionBearer, prefix.tpe, tpe, requireConcreteTypeTag) match {
+ typer.resolveTypeTag(prefix.tpe, tpe, defaultErrorPosition, concrete) match {
case failure if failure.isEmpty =>
if (reifyDebug) println("implicit search was fruitless")
- definitelyConcrete &= false
- maybeConcrete &= false
EmptyTree
case success =>
if (reifyDebug) println("implicit search has produced a result: " + success)
- definitelyConcrete |= requireConcreteTypeTag
- maybeConcrete |= true
+ reificationIsConcrete &= concrete
var splice = Select(success, nme.tpe)
splice match {
case InlinedTypeSplice(_, inlinedSymbolTable, tpe) =>
// all free vars local to the enclosing reifee should've already been inlined by ``Metalevels''
- inlinedSymbolTable foreach { case freedef @ FreeDef(_, _, binding, _) => assert(!binding.symbol.isLocalToReifee, freedef) }
+ inlinedSymbolTable collect { case freedef @ FreeDef(_, _, binding, _, _) if binding.symbol.isLocalToReifee => assert(false, freedef) }
symbolTable ++= inlinedSymbolTable
reifyTrace("inlined the splicee: ")(tpe)
case tpe =>
@@ -126,101 +123,46 @@ trait Types {
if (reifyDebug) println("splicing has been cancelled: spliceTypesEnabled = false")
}
- if (requireConcreteTypeTag)
- CannotReifyConcreteTypeTagHavingUnresolvedTypeParameters(tpe)
+ reificationIsConcrete = false
}
spliceTypesEnabled = true
EmptyTree
}
- // yet another thingie disabled for simplicity
- // in principle, we could retain and reify AnnotatedTypes
- // but that'd require reifying every type and symbol inside ann.args
- // however, since we've given up on tough types for the moment, the former would be problematic
-// private def reifyAnnotatedType(tpe: AnnotatedType): Tree = {
-// // ``Reshaper'' transforms annotation infos from symbols back into Modifier.annotations, which are trees
-// // so the only place on Earth that can lead to reification of AnnotationInfos is the Ay Tee Land
-// // therefore this function is as local as possible, don't move it out of this scope
-// def reifyAnnotationInfo(ann: AnnotationInfo): Tree = {
-// val reifiedArgs = ann.args map { arg =>
-// val saved1 = reifyTreeSymbols
-// val saved2 = reifyTreeTypes
-//
-// try {
-// // one more quirk of reifying annotations
-// //
-// // when reifying AnnotatedTypes we need to reify all the types and symbols of inner ASTs
-// // that's because a lot of logic expects post-typer trees to have non-null tpes
-// //
-// // Q: reified trees are pre-typer, so there's shouldn't be a problem.
-// // reflective typechecker will fill in missing symbols and types, right?
-// // A: actually, no. annotation ASTs live inside AnnotatedTypes,
-// // and insides of the types is the place where typechecker doesn't look.
-// reifyTreeSymbols = true
-// reifyTreeTypes = true
-//
-// // todo. every AnnotationInfo is an island, entire of itself
-// // no regular Traverser or Transformer can reach it
-// // hence we need to run its contents through the entire reification pipeline
-// // e.g. to apply reshaping or to check metalevels
-// reify(arg)
-// } finally {
-// reifyTreeSymbols = saved1
-// reifyTreeTypes = saved2
-// }
-// }
-//
-// def reifyClassfileAnnotArg(arg: ClassfileAnnotArg): Tree = arg match {
-// case LiteralAnnotArg(const) =>
-// mirrorFactoryCall(nme.LiteralAnnotArg, reifyProduct(const))
-// case ArrayAnnotArg(args) =>
-// mirrorFactoryCall(nme.ArrayAnnotArg, scalaFactoryCall(nme.Array, args map reifyClassfileAnnotArg: _*))
-// case NestedAnnotArg(ann) =>
-// mirrorFactoryCall(nme.NestedAnnotArg, reifyAnnotationInfo(ann))
-// }
-//
-// // if you reify originals of anns, you get SO when trying to reify AnnotatedTypes, so screw it - after all, it's not that important
-// val reifiedAssocs = ann.assocs map (assoc => scalaFactoryCall(nme.Tuple2, reify(assoc._1), reifyClassfileAnnotArg(assoc._2)))
-// mirrorFactoryCall(nme.AnnotationInfo, reify(ann.atp), mkList(reifiedArgs), mkList(reifiedAssocs))
-// }
-//
-// val AnnotatedType(anns, underlying, selfsym) = tpe
-// mirrorFactoryCall(nme.AnnotatedType, mkList(anns map reifyAnnotationInfo), reify(underlying), reify(selfsym))
-// }
-
- // previous solution to reifying tough types involved creating dummy symbols (see ``registerReifiableSymbol'' calls below)
- // however such symbols lost all the connections with their origins and became almost useless, except for typechecking
- // hence this approach was replaced by less powerful, but more principled one based on ``reifyFreeType''
- // it's possible that later on we will revise and revive ``reifyToughType'', but for now it's disabled under an implementation restriction
-// /** Reify a tough type, i.e. the one that leads to creation of auxiliary symbols */
-// // This is the uncharted territory in the reifier
-// private def reifyToughType(tpe: Type): Tree = {
-// if (reifyDebug) println("tough type: %s (%s)".format(tpe, tpe.kind))
-//
-// def reifyScope(scope: Scope): Tree = {
-// scope foreach registerReifiableSymbol
-// mirrorCall(nme.newScopeWith, scope.toList map reify: _*)
-// }
-//
-// tpe match {
-// case tpe @ RefinedType(parents, decls) =>
-// registerReifiableSymbol(tpe.typeSymbol)
-// mirrorFactoryCall(tpe, reify(parents), reifyScope(decls), reify(tpe.typeSymbol))
-// case tpe @ ExistentialType(tparams, underlying) =>
-// tparams foreach registerReifiableSymbol
-// mirrorFactoryCall(tpe, reify(tparams), reify(underlying))
-// case tpe @ ClassInfoType(parents, decls, clazz) =>
-// registerReifiableSymbol(clazz)
-// mirrorFactoryCall(tpe, reify(parents), reifyScope(decls), reify(tpe.typeSymbol))
-// case tpe @ MethodType(params, restpe) =>
-// params foreach registerReifiableSymbol
-// mirrorFactoryCall(tpe, reify(params), reify(restpe))
-// case tpe @ PolyType(tparams, underlying) =>
-// tparams foreach registerReifiableSymbol
-// mirrorFactoryCall(tpe, reify(tparams), reify(underlying))
-// case _ =>
-// throw new Error("internal error: %s (%s) is not supported".format(tpe, tpe.kind))
-// }
-// }
+ /** Reify an annotated type, i.e. the one that makes us deal with AnnotationInfos */
+ private def reifyAnnotatedType(tpe: AnnotatedType): Tree = {
+ val AnnotatedType(anns, underlying, selfsym) = tpe
+ mirrorFactoryCall(nme.AnnotatedType, mkList(anns map reifyAnnotationInfo), reify(underlying), reify(selfsym))
+ }
+
+ /** Reify a tough type, i.e. the one that leads to creation of auxiliary symbols */
+ private def reifyToughType(tpe: Type): Tree = {
+ if (reifyDebug) println("tough type: %s (%s)".format(tpe, tpe.kind))
+
+ def reifyScope(scope: Scope): Tree = {
+ scope foreach reifySymDef
+ mirrorCall(nme.newScopeWith, scope.toList map reify: _*)
+ }
+
+ tpe match {
+ case tpe @ RefinedType(parents, decls) =>
+ reifySymDef(tpe.typeSymbol)
+ mirrorFactoryCall(tpe, reify(parents), reifyScope(decls), reify(tpe.typeSymbol))
+ case tpe @ ExistentialType(tparams, underlying) =>
+ tparams foreach reifySymDef
+ mirrorFactoryCall(tpe, reify(tparams), reify(underlying))
+ case tpe @ ClassInfoType(parents, decls, clazz) =>
+ reifySymDef(clazz)
+ mirrorFactoryCall(tpe, reify(parents), reifyScope(decls), reify(tpe.typeSymbol))
+ case tpe @ MethodType(params, restpe) =>
+ params foreach reifySymDef
+ mirrorFactoryCall(tpe, reify(params), reify(restpe))
+ case tpe @ PolyType(tparams, underlying) =>
+ tparams foreach reifySymDef
+ mirrorFactoryCall(tpe, reify(tparams), reify(underlying))
+ case _ =>
+ throw new Error("internal error: %s (%s) is not supported".format(tpe, tpe.kind))
+ }
+ }
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala
index 85cf92fe2f..a096e2e93b 100644
--- a/src/compiler/scala/reflect/reify/package.scala
+++ b/src/compiler/scala/reflect/reify/package.scala
@@ -1,14 +1,16 @@
package scala.reflect
import scala.tools.nsc.Global
+import scala.reflect.makro.ReificationError
+import scala.reflect.makro.UnexpectedReificationError
package object reify {
- def mkReifier(global: Global)(typer: global.analyzer.Typer, prefix: global.Tree, reifee: Any, dontSpliceAtTopLevel: Boolean = false, requireConcreteTypeTag: Boolean = false): Reifier { val mirror: global.type } = {
+ private def mkReifier(global: Global)(typer: global.analyzer.Typer, prefix: global.Tree, reifee: Any, dontSpliceAtTopLevel: Boolean = false, concrete: Boolean = false): Reifier { val mirror: global.type } = {
val typer1: typer.type = typer
val prefix1: prefix.type = prefix
val reifee1 = reifee
val dontSpliceAtTopLevel1 = dontSpliceAtTopLevel
- val requireConcreteTypeTag1 = requireConcreteTypeTag
+ val concrete1 = concrete
new {
val mirror: global.type = global
@@ -16,7 +18,42 @@ package object reify {
val prefix = prefix1
val reifee = reifee1
val dontSpliceAtTopLevel = dontSpliceAtTopLevel1
- val requireConcreteTypeTag = requireConcreteTypeTag1
+ val concrete = concrete1
} with Reifier
}
+
+ def reifyTree(global: Global)(typer: global.analyzer.Typer, prefix: global.Tree, tree: global.Tree): global.Tree =
+ mkReifier(global)(typer, prefix, tree, false, false).reified.asInstanceOf[global.Tree]
+
+ def reifyType(global: Global)(typer: global.analyzer.Typer, prefix: global.Tree, tpe: global.Type, dontSpliceAtTopLevel: Boolean = false, concrete: Boolean = false): global.Tree =
+ mkReifier(global)(typer, prefix, tpe, dontSpliceAtTopLevel, concrete).reified.asInstanceOf[global.Tree]
+
+ def reifyErasure(global: Global)(typer0: global.analyzer.Typer, tpe: global.Type, concrete: Boolean = true): global.Tree = {
+ import global._
+ import definitions._
+ import analyzer.enclosingMacroPosition
+
+ def erasureTagInScope = typer0.context.withMacrosDisabled(typer0.resolveErasureTag(tpe, enclosingMacroPosition, concrete = concrete))
+ def arrayTagInScope = typer0.context.withMacrosDisabled(typer0.resolveArrayTag(tpe, enclosingMacroPosition))
+ val inScope = (erasureTagInScope, arrayTagInScope)
+
+ inScope match {
+ case (success, _) if !success.isEmpty =>
+ Select(success, nme.erasure)
+ case (_, success) if !success.isEmpty =>
+ gen.mkMethodCall(arrayElementClassMethod, List(success))
+ case _ =>
+ tpe.normalize match {
+ case TypeRef(_, ArrayClass, componentTpe :: Nil) =>
+ val componentErasure = reifyErasure(global)(typer0, componentTpe, concrete)
+ gen.mkMethodCall(arrayClassMethod, List(componentErasure))
+ case _ =>
+ if (tpe.isSpliceable && concrete)
+ throw new ReificationError(enclosingMacroPosition, "tpe %s is an unresolved spliceable type".format(tpe))
+ var erasure = tpe.erasure
+ if (tpe.typeSymbol.isDerivedValueClass && global.phase.id < global.currentRun.erasurePhase.id) erasure = tpe
+ gen.mkNullaryCall(Predef_classOf, List(erasure))
+ }
+ }
+ }
}
diff --git a/src/compiler/scala/reflect/reify/phases/Calculate.scala b/src/compiler/scala/reflect/reify/phases/Calculate.scala
index 59a36f0ba4..93ef46472e 100644
--- a/src/compiler/scala/reflect/reify/phases/Calculate.scala
+++ b/src/compiler/scala/reflect/reify/phases/Calculate.scala
@@ -8,14 +8,12 @@ trait Calculate {
import definitions._
import treeInfo._
- implicit def sym2richSym(sym: Symbol): RichSymbol = new RichSymbol(sym)
- class RichSymbol(sym: Symbol) {
+ implicit class RichSymbol(sym: Symbol) {
def metalevel: Int = { assert(sym != NoSymbol); localSymbols.getOrElse(sym, 0) }
def isLocalToReifee = (localSymbols contains sym) // [Eugene] how do I account for local skolems?
}
- implicit def tpe2richTpe(tpe: Type): RichType = new RichType(tpe)
- class RichType(tpe: Type) {
+ implicit class RichType(tpe: Type) {
def isLocalToReifee = tpe != null && (tpe exists (tp => (localSymbols contains tp.typeSymbol) || (localSymbols contains tp.termSymbol)))
}
@@ -48,6 +46,7 @@ trait Calculate {
bindRelatedSymbol(tree.symbol.companionClass, "companionClass")
bindRelatedSymbol(tree.symbol.companionModule, "companionModule")
Some(tree.symbol) collect { case termSymbol: TermSymbol => bindRelatedSymbol(termSymbol.referenced, "referenced") }
+ Some(tree) collect { case labelDef: LabelDef => labelDef.params foreach (param => bindRelatedSymbol(param.symbol, "labelParam")) }
def bindRelatedSymbol(related: Symbol, name: String): Unit =
if (related != null && related != NoSymbol) {
if (reifyDebug) println("boundSym (" + name + "): " + related)
@@ -58,4 +57,4 @@ trait Calculate {
super.traverse(tree)
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
index a329a1043d..206f3b1118 100644
--- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala
+++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
@@ -115,7 +115,7 @@ trait Metalevels {
if (reifyDebug) println("entering inlineable splice: " + splicee)
val Block(mrDef :: symbolTable, expr) = splicee
// [Eugene] how to express the fact that a scrutinee is both of some type and matches an extractor?
- val freedefsToInline = symbolTable collect { case freedef @ FreeTermDef(_, _, binding, _) if binding.symbol.isLocalToReifee => freedef.asInstanceOf[ValDef] }
+ val freedefsToInline = symbolTable collect { case freedef @ FreeTermDef(_, _, binding, _, _) if binding.symbol.isLocalToReifee => freedef.asInstanceOf[ValDef] }
freedefsToInline foreach (vdef => this.freedefsToInline(vdef.name) = vdef)
val symbolTable1 = symbolTable diff freedefsToInline
val tree1 = Select(Block(mrDef :: symbolTable1, expr), flavor)
@@ -134,9 +134,11 @@ trait Metalevels {
// FreeRef(_, _) check won't work, because metalevels of symbol table and body are different, hence, freerefs in symbol table look different from freerefs in body
// todo. also perform garbage collection on local symbols
// so that local symbols used only in type signatures of free vars get removed
+ // todo. same goes for auxiliary symbol defs reified to support tough types
+ // some of them need to be rebuilt, some of them need to be removed, because they're no longer necessary
case FreeRef(mr, name) if freedefsToInline contains name =>
if (reifyDebug) println("inlineable free ref: %s in %s".format(name, showRaw(tree)))
- val freedef @ FreeDef(_, _, binding, _) = freedefsToInline(name)
+ val freedef @ FreeDef(_, _, binding, _, _) = freedefsToInline(name)
if (reifyDebug) println("related definition: %s".format(showRaw(freedef)))
val inlined = reify(binding)
if (reifyDebug) println("verdict: inlined as %s".format(showRaw(inlined)))
diff --git a/src/compiler/scala/reflect/reify/phases/Reify.scala b/src/compiler/scala/reflect/reify/phases/Reify.scala
index f6d6423605..e03ff5832c 100644
--- a/src/compiler/scala/reflect/reify/phases/Reify.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reify.scala
@@ -9,6 +9,7 @@ trait Reify extends Symbols
with Types
with Names
with Trees
+ with AnnotationInfos
with Positions
with Util {
@@ -18,11 +19,30 @@ trait Reify extends Symbols
import definitions._
import treeInfo._
+ // `reify` looked so nice, I wanted to push the last bit of orthogonal
+ // logic out of it so you can see the improvement. There is no cost to
+ // wrapper methods of this form because the inliner will eliminate them,
+ // but they are very good at separating concerns like pushing/popping
+ // a stack, and they are great for composition and reuse.
+ //
+ // Also, please avoid public vars whenever possible.
+ private object reifyStack {
+ var currents: List[Any] = reifee :: Nil
+
+ @inline final def push[T](reifee: Any)(body: => T): T = {
+ currents ::= reifee
+ try body
+ finally currents = currents.tail
+ }
+ }
+ def currentQuantified = flatCollect(reifyStack.currents)({ case ExistentialType(quantified, _) => quantified })
+ def current = reifyStack.currents.head
+
/**
* Reifies any supported value.
* For internal use only, use ``reified'' instead.
*/
- def reify(reifee: Any): Tree = reifee match {
+ def reify(reifee: Any): Tree = reifyStack.push(reifee)(reifee match {
// before adding some case here, in global scope, please, consider
// whether it can be localized like reifyAnnotationInfo or reifyScope
// this will help reification stay as sane as possible
@@ -30,6 +50,9 @@ trait Reify extends Symbols
case tpe: Type => reifyType(tpe)
case name: Name => reifyName(name)
case tree: Tree => reifyTree(tree)
+ // disabled because this is a very special case that I plan to remove later
+ // why do I dislike annotations? see comments to `reifyAnnotationInfo`
+// case ann: AnnotationInfo => reifyAnnotationInfo(ann)
case pos: Position => reifyPosition(pos)
case mods: mirror.Modifiers => reifyModifiers(mods)
case xs: List[_] => reifyList(xs)
@@ -38,5 +61,5 @@ trait Reify extends Symbols
case null => Literal(Constant(null))
case _ =>
throw new Error("reifee %s of type %s is not supported".format(reifee, reifee.getClass))
- }
+ })
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index e700604612..4ab306a13f 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -78,12 +78,6 @@ trait Reshape {
if (reifyDebug) println("unapplying unapply: " + tree)
val fun1 = extractExtractor(fun)
Apply(fun1, args).copyAttrs(unapply)
- case Literal(const @ Constant(tpe: Type)) =>
- // todo. implement this
- ???
- case Literal(const @ Constant(sym: Symbol)) =>
- // todo. implement this
- ???
case _ =>
tree
}
diff --git a/src/compiler/scala/reflect/runtime/AbstractFile.scala b/src/compiler/scala/reflect/runtime/AbstractFile.scala
index bf3b47298b..414bba020b 100644
--- a/src/compiler/scala/reflect/runtime/AbstractFile.scala
+++ b/src/compiler/scala/reflect/runtime/AbstractFile.scala
@@ -1,6 +1,7 @@
-package scala.reflect.runtime
+package scala.reflect
+package runtime
-class AbstractFile(val jfile: java.io.File) {
- def path: String = jfile.getPath()
- def canonicalPath: String = jfile.getCanonicalPath()
-} \ No newline at end of file
+class AbstractFile(val jfile: java.io.File) extends api.RequiredFile {
+ def path: String = jfile.getPath()
+ def canonicalPath: String = jfile.getCanonicalPath()
+}
diff --git a/src/compiler/scala/reflect/runtime/JavaToScala.scala b/src/compiler/scala/reflect/runtime/JavaToScala.scala
index 6688d77985..e11f6140c9 100644
--- a/src/compiler/scala/reflect/runtime/JavaToScala.scala
+++ b/src/compiler/scala/reflect/runtime/JavaToScala.scala
@@ -314,6 +314,27 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
private def followStatic(clazz: Symbol, mods: Int) =
if (jModifier.isStatic(mods)) clazz.companionModule.moduleClass else clazz
+ /** Methods which need to be wrapped because they either are getSimpleName
+ * or call getSimpleName:
+ *
+ * public String getSimpleName()
+ * public boolean isAnonymousClass()
+ * public boolean isLocalClass()
+ * public boolean isMemberClass()
+ * public String getCanonicalName()
+ *
+ * TODO - find all such calls and wrap them.
+ * TODO - create mechanism to avoid the recurrence of unwrapped calls.
+ */
+ private def wrapClassCheck[T](alt: T)(body: => T): T =
+ try body catch { case x: InternalError if x.getMessage == "Malformed class name" => alt }
+
+ private def wrapIsLocalClass(clazz: jClass[_]): Boolean =
+ wrapClassCheck(false)(clazz.isLocalClass)
+
+ private def wrapGetSimpleName(clazz: jClass[_]): String =
+ wrapClassCheck("")(clazz.getSimpleName)
+
/**
* The Scala owner of the Scala class corresponding to the Java class `jclazz`
*/
@@ -322,7 +343,7 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
val jEnclosingClass = jclazz.getEnclosingClass
val sEnclosingClass = classToScala(jEnclosingClass)
followStatic(sEnclosingClass, jclazz.getModifiers)
- } else if (jclazz.isLocalClass) {
+ } else if (wrapIsLocalClass(jclazz)) {
val jEnclosingMethod = jclazz.getEnclosingMethod
if (jEnclosingMethod != null) {
methodToScala(jEnclosingMethod)
@@ -494,7 +515,7 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
val sym = {
if (jclazz.isMemberClass && !nme.isImplClassName(jname)) {
lookup
- } else if (jclazz.isLocalClass || invalidClassName(jname)) {
+ } else if (wrapIsLocalClass(jclazz) || invalidClassName(jname)) {
// local classes and implementation classes not preserved by unpickling - treat as Java
jclassAsScala(jclazz)
} else if (jclazz.isArray) {
diff --git a/src/compiler/scala/reflect/runtime/Mirror.scala b/src/compiler/scala/reflect/runtime/Mirror.scala
index 20024ed058..bf4bc83bea 100644
--- a/src/compiler/scala/reflect/runtime/Mirror.scala
+++ b/src/compiler/scala/reflect/runtime/Mirror.scala
@@ -25,7 +25,7 @@ class Mirror(var classLoader: ClassLoader) extends Universe with api.Mirror {
def symbolOfInstance(obj: Any): Symbol = classToScala(obj.getClass)
def typeOfInstance(obj: Any): Type = typeToScala(obj.getClass)
// to do add getClass/getType for instances of primitive types, probably like this:
- // def getClass[T <: AnyVal : Manifest](x: T): Symbol = manifest[T].getClass
+ // def getClass[T <: AnyVal : ClassTag](x: T): Symbol = classTag[T].sym
def getValueOfField(receiver: AnyRef, field: Symbol): Any = {
fieldToJava(field).get(receiver)
diff --git a/src/compiler/scala/reflect/runtime/Settings.scala b/src/compiler/scala/reflect/runtime/Settings.scala
index 27e90c94bd..b247797c6c 100644
--- a/src/compiler/scala/reflect/runtime/Settings.scala
+++ b/src/compiler/scala/reflect/runtime/Settings.scala
@@ -34,5 +34,6 @@ class Settings extends internal.settings.MutableSettings {
val maxClassfileName = new IntSetting(255)
val Xexperimental = new BooleanSetting(false)
val deepCloning = new BooleanSetting (false)
- val YvirtPatmat = new BooleanSetting(false)
+ val XoldPatmat = new BooleanSetting(false)
+ val XnoPatmatAnalysis = new BooleanSetting(false)
}
diff --git a/src/compiler/scala/reflect/runtime/SynchronizedOps.scala b/src/compiler/scala/reflect/runtime/SynchronizedOps.scala
index dd806beb2a..907c0dd369 100644
--- a/src/compiler/scala/reflect/runtime/SynchronizedOps.scala
+++ b/src/compiler/scala/reflect/runtime/SynchronizedOps.scala
@@ -39,7 +39,7 @@ trait SynchronizedOps extends internal.SymbolTable
trait SynchronizedScope extends Scope {
override def isEmpty: Boolean = synchronized { super.isEmpty }
override def size: Int = synchronized { super.size }
- override def enter(sym: Symbol) = synchronized { super.enter(sym) }
+ override def enter[T <: Symbol](sym: T): T = synchronized { super.enter(sym) }
override def rehash(sym: Symbol, newname: Name) = synchronized { super.rehash(sym, newname) }
override def unlink(e: ScopeEntry) = synchronized { super.unlink(e) }
override def unlink(sym: Symbol) = synchronized { super.unlink(sym) }
diff --git a/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala b/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala
index 6fc5f7ed8a..2322911220 100644
--- a/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -14,11 +14,11 @@ trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
override def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol =
synchronized { super.connectModuleToClass(m, moduleClass) }
- override def newFreeTerm(name: TermName, info: Type, value: => Any, origin: String = null, newFlags: Long = 0L): FreeTerm =
- new FreeTerm(name, value, origin) with SynchronizedTermSymbol initFlags newFlags setInfo info
+ override def newFreeTermSymbol(name: TermName, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTerm =
+ new FreeTerm(name, value, origin) with SynchronizedTermSymbol initFlags flags setInfo info
- override def newFreeType(name: TypeName, info: Type, value: => Any, origin: String = null, newFlags: Long = 0L): FreeType =
- new FreeType(name, value, origin) with SynchronizedTypeSymbol initFlags newFlags setInfo info
+ override def newFreeTypeSymbol(name: TypeName, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeType =
+ new FreeType(name, value, origin) with SynchronizedTypeSymbol initFlags flags setInfo info
override protected def makeNoSymbol: NoSymbol = new NoSymbol with SynchronizedSymbol
@@ -46,7 +46,7 @@ trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
override def typeParams: List[Symbol] = synchronized { super.typeParams }
- override def reset(completer: Type) = synchronized { super.reset(completer) }
+ override def reset(completer: Type): this.type = synchronized { super.reset(completer) }
override def infosString: String = synchronized { super.infosString }
@@ -121,8 +121,8 @@ trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
}
trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol {
- override def sourceFile = synchronized { super.sourceFile }
- override def sourceFile_=(f: AbstractFileType) = synchronized { super.sourceFile_=(f) }
+ override def associatedFile = synchronized { super.associatedFile }
+ override def associatedFile_=(f: AbstractFileType) = synchronized { super.associatedFile_=(f) }
override def thisSym: Symbol = synchronized { super.thisSym }
override def thisType: Type = synchronized { super.thisType }
override def typeOfThis: Type = synchronized { super.typeOfThis }
diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala
index 6d832a590f..895c645c83 100644
--- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala
+++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala
@@ -16,14 +16,11 @@ import scala.compat.Platform.EOL
trait ToolBoxes extends { self: Universe =>
- import self.{Reporter => ApiReporter}
- import scala.tools.nsc.reporters.{Reporter => NscReporter}
+ def mkToolBox(frontEnd: FrontEnd = mkSilentFrontEnd(), options: String = "") = new ToolBox(frontEnd, options)
- def mkToolBox(reporter: ApiReporter = mkSilentReporter(), options: String = "") = new ToolBox(reporter, options)
+ class ToolBox(val frontEnd: FrontEnd, val options: String) extends AbsToolBox {
- class ToolBox(val reporter: ApiReporter, val options: String) extends AbsToolBox {
-
- class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter: NscReporter)
+ class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter: Reporter)
extends ReflectGlobal(settings, reporter, ToolBox.this.classLoader) {
import definitions._
@@ -116,7 +113,7 @@ trait ToolBoxes extends { self: Universe =>
override def transform(tree: Tree): Tree =
tree match {
case Ident(name) if reversedFreeTermNames contains name =>
- Ident(reversedFreeTermNames(name))
+ Ident(reversedFreeTermNames(name)) setType tree.tpe
case _ =>
super.transform(tree)
}
@@ -200,11 +197,7 @@ trait ToolBoxes extends { self: Universe =>
val run = new Run
reporter.reset()
run.compileUnits(List(unit), run.namerPhase)
- if (reporter.hasErrors) {
- var msg = "reflective compilation has failed: " + EOL + EOL
- msg += ToolBox.this.reporter.infos map (_.msg) mkString EOL
- throw new ToolBoxError(ToolBox.this, msg)
- }
+ throwIfErrors()
val className = mdef.symbol.fullName
if (settings.debug.value) println("generated: "+className)
@@ -250,6 +243,15 @@ trait ToolBoxes extends { self: Universe =>
settings.Yshowsymkinds.value = saved3
}
}
+
+ // reporter doesn't accumulate errors, but the front-end does
+ def throwIfErrors() = {
+ if (frontEnd.hasErrors) {
+ var msg = "reflective compilation has failed: " + EOL + EOL
+ msg += frontEnd.infos map (_.msg) mkString EOL
+ throw new ToolBoxError(ToolBox.this, msg)
+ }
+ }
}
// todo. is not going to work with quoted arguments with embedded whitespaces
@@ -263,19 +265,13 @@ trait ToolBoxes extends { self: Universe =>
lazy val compiler: ToolBoxGlobal = {
try {
- val errorFn: String => Unit = msg => reporter.log(NoPosition, msg, reporter.ERROR)
- // [Eugene] settings shouldn't be passed via reporters, this is crazy
-// val command = reporter match {
-// case reporter: AbstractReporter => new CompilerCommand(arguments.toList, reporter.settings, errorFn)
-// case _ => new CompilerCommand(arguments.toList, errorFn)
-// }
+ val errorFn: String => Unit = msg => frontEnd.log(NoPosition, msg, frontEnd.ERROR)
val command = new CompilerCommand(arguments.toList, errorFn)
command.settings.outputDirs setSingleOutput virtualDirectory
- val nscReporter = new ApiToNscReporterProxy(reporter) { val settings = command.settings }
- val instance = new ToolBoxGlobal(command.settings, nscReporter)
- if (nscReporter.hasErrors) {
+ val instance = new ToolBoxGlobal(command.settings, new FrontEndToReporterProxy(frontEnd) { val settings = command.settings })
+ if (frontEnd.hasErrors) {
var msg = "reflective compilation has failed: cannot initialize the compiler: " + EOL + EOL
- msg += reporter.infos map (_.msg) mkString EOL
+ msg += frontEnd.infos map (_.msg) mkString EOL
throw new ToolBoxError(this, msg)
}
instance.phase = (new instance.Run).typerPhase // need to manually set a phase, because otherwise TypeHistory will crash
@@ -319,18 +315,16 @@ trait ToolBoxes extends { self: Universe =>
// todo. implement this
???
- def resetAllAttrs[T <: Tree](tree: T): T = {
+ def resetAllAttrs(tree: Tree): Tree = {
val ctree: compiler.Tree = importer.importTree(tree)
val ttree: compiler.Tree = compiler.resetAllAttrs(ctree)
- val rmttree = exporter.importTree(ttree)
- rmttree.asInstanceOf[T]
+ exporter.importTree(ttree)
}
- def resetLocalAttrs[T <: Tree](tree: T): T = {
+ def resetLocalAttrs(tree: Tree): Tree = {
val ctree: compiler.Tree = importer.importTree(tree)
val ttree: compiler.Tree = compiler.resetLocalAttrs(ctree)
- val rmttree = exporter.importTree(ttree)
- rmttree.asInstanceOf[T]
+ exporter.importTree(ttree)
}
def showAttributed(tree: Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String =
diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/compiler/scala/tools/ant/Scaladoc.scala
index c92474b33e..daa08ef8a7 100644
--- a/src/compiler/scala/tools/ant/Scaladoc.scala
+++ b/src/compiler/scala/tools/ant/Scaladoc.scala
@@ -75,6 +75,11 @@ class Scaladoc extends ScalaMatchingTask {
*/
object Flag extends PermissibleValue {
val values = List("yes", "no", "on", "off")
+ def getBooleanValue(value: String, flagName: String): Boolean =
+ if (Flag.isPermissible(value))
+ return ("yes".equals(value) || "on".equals(value))
+ else
+ buildError("Unknown " + flagName + " flag '" + value + "'")
}
/** The directories that contain source files to compile. */
@@ -127,6 +132,25 @@ class Scaladoc extends ScalaMatchingTask {
/** Instruct the ant task not to fail in the event of errors */
private var nofail: Boolean = false
+ /** Instruct the scaladoc tool to document implicit conversions */
+ private var docImplicits: Boolean = false
+
+ /** Instruct the scaladoc tool to document all (including impossible) implicit conversions */
+ private var docImplicitsShowAll: Boolean = false
+
+ /** Instruct the scaladoc tool to output implicits debugging information */
+ private var docImplicitsDebug: Boolean = false
+
+ /** Instruct the scaladoc tool to create diagrams */
+ private var docDiagrams: Boolean = false
+
+ /** Instruct the scaladoc tool to output diagram creation debugging information */
+ private var docDiagramsDebug: Boolean = false
+
+ /** Instruct the scaladoc tool to use the binary given to create diagrams */
+ private var docDiagramsDotPath: Option[String] = None
+
+
/*============================================================================*\
** Properties setters **
\*============================================================================*/
@@ -361,12 +385,39 @@ class Scaladoc extends ScalaMatchingTask {
*
* @param input One of the flags `yes/no` or `on/off`. Default if no/off.
*/
- def setNoFail(input: String) {
- if (Flag.isPermissible(input))
- nofail = "yes".equals(input) || "on".equals(input)
- else
- buildError("Unknown nofail flag '" + input + "'")
- }
+ def setNoFail(input: String) =
+ nofail = Flag.getBooleanValue(input, "nofail")
+
+ /** Set the `implicits` info attribute.
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setImplicits(input: String) =
+ docImplicits = Flag.getBooleanValue(input, "implicits")
+
+ /** Set the `implicitsShowAll` info attribute to enable scaladoc to show all implicits, including those impossible to
+ * convert to from the default scope
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setImplicitsShowAll(input: String) =
+ docImplicitsShowAll = Flag.getBooleanValue(input, "implicitsShowAll")
+
+ /** Set the `implicitsDebug` info attribute so scaladoc outputs implicit conversion debug information
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setImplicitsDebug(input: String) =
+ docImplicitsDebug = Flag.getBooleanValue(input, "implicitsDebug")
+
+ /** Set the `diagrams` bit so Scaladoc adds diagrams to the documentation
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setDiagrams(input: String) =
+ docDiagrams = Flag.getBooleanValue(input, "diagrams")
+
+ /** Set the `diagramsDebug` bit so Scaladoc outputs diagram building debug information
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setDiagramsDebug(input: String) =
+ docDiagramsDebug = Flag.getBooleanValue(input, "diagramsDebug")
+
+ /** Set the `diagramsDotPath` attribute to the path where graphviz dot can be found (including the binary file name,
+ * eg: /usr/bin/dot) */
+ def setDiagramsDotPath(input: String) =
+ docDiagramsDotPath = Some(input)
/*============================================================================*\
** Properties getters **
@@ -560,6 +611,13 @@ class Scaladoc extends ScalaMatchingTask {
docSettings.deprecation.value = deprecation
docSettings.unchecked.value = unchecked
+ docSettings.docImplicits.value = docImplicits
+ docSettings.docImplicitsDebug.value = docImplicitsDebug
+ docSettings.docImplicitsShowAll.value = docImplicitsShowAll
+ docSettings.docDiagrams.value = docDiagrams
+ docSettings.docDiagramsDebug.value = docDiagramsDebug
+ if(!docDiagramsDotPath.isEmpty) docSettings.docDiagramsDotPath.value = docDiagramsDotPath.get
+
if (!docgenerator.isEmpty) docSettings.docgenerator.value = docgenerator.get
if (!docrootcontent.isEmpty) docSettings.docRootContent.value = docrootcontent.get.getAbsolutePath()
log("Scaladoc params = '" + addParams + "'", Project.MSG_DEBUG)
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
index 599936f6f8..256b5088e9 100644
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -9,10 +9,14 @@
##############################################################################
findScalaHome () {
- # see #2092
- local SOURCE="${BASH_SOURCE[0]}"
- while [ -h "$SOURCE" ] ; do SOURCE="$(readlink "$SOURCE")"; done
- ( cd -P "$( dirname "$SOURCE" )"/.. && pwd )
+ # see SI-2092 and SI-5792
+ local source="${BASH_SOURCE[0]}"
+ while [ -h "$source" ] ; do
+ local linked="$(readlink "$source")"
+ local dir="$( cd -P $(dirname "$source") && cd -P $(dirname "$linked") && pwd )"
+ source="$dir/$(basename "$linked")"
+ done
+ ( cd -P "$(dirname "$source")/.." && pwd )
}
execCommand () {
[[ -n $SCALA_RUNNER_DEBUG ]] && echo "" && for arg in "$@@"; do echo "$arg"; done && echo "";
diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/compiler/scala/tools/cmd/FromString.scala
index 9592e7a716..91356b3c19 100644
--- a/src/compiler/scala/tools/cmd/FromString.scala
+++ b/src/compiler/scala/tools/cmd/FromString.scala
@@ -7,19 +7,19 @@ package scala.tools
package cmd
import nsc.io.{ Path, File, Directory }
-import scala.reflect.Manifest
+import scala.reflect.TypeTag
/** A general mechanism for defining how a command line argument
* (always a String) is transformed into an arbitrary type. A few
* example instances are in the companion object, but in general
* either IntFromString will suffice or you'll want custom transformers.
*/
-abstract class FromString[+T](implicit m: Manifest[T]) extends PartialFunction[String, T] {
+abstract class FromString[+T](implicit t: TypeTag[T]) extends PartialFunction[String, T] {
def apply(s: String): T
def isDefinedAt(s: String): Boolean = true
def zero: T = apply("")
- def targetString: String = m.toString
+ def targetString: String = t.toString
}
object FromString {
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index 0869350dd3..83cd9c2578 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -14,20 +14,43 @@ trait AnyValReps {
sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String) extends AnyValRep(name,repr,javaEquiv) {
case class Op(val op : String, val doc : String)
+
+ private def companionCoercions(tos: AnyValRep*) = {
+ tos.toList map (to =>
+ """implicit def @javaequiv@2%s(x: @name@): %s = x.to%s""".format(to.javaEquiv, to.name, to.name)
+ )
+ }
+ def coercionCommentExtra = ""
+ def coercionComment = """
+ /** Language mandated coercions from @name@ to "wider" types.%s
+ */""".format(coercionCommentExtra)
+
+ def implicitCoercions: List[String] = {
+ val coercions = this match {
+ case B => companionCoercions(S, I, L, F, D)
+ case S | C => companionCoercions(I, L, F, D)
+ case I => companionCoercions(L, F, D)
+ case L => companionCoercions(F, D)
+ case F => companionCoercions(D)
+ case _ => Nil
+ }
+ if (coercions.isEmpty) Nil
+ else coercionComment :: coercions
+ }
def isCardinal: Boolean = isIntegerType(this)
def unaryOps = {
val ops = List(
Op("+", "/**\n" +
- " * @return this value, unmodified\n" +
+ " * Returns this value, unmodified.\n" +
" */"),
Op("-", "/**\n" +
- " * @return the negation of this value\n" +
+ " * Returns the negation of this value.\n" +
" */"))
if(isCardinal)
Op("~", "/**\n" +
- " * @return the bitwise negation of this value\n" +
+ " * Returns the bitwise negation of this value.\n" +
" * @example {{{\n" +
" * ~5 == -6\n" +
" * // in binary: ~00000101 ==\n" +
@@ -41,7 +64,7 @@ trait AnyValReps {
if (isCardinal)
List(
Op("|", "/**\n" +
- " * @return the bitwise OR of this value and x\n" +
+ " * Returns the bitwise OR of this value and `x`.\n" +
" * @example {{{\n" +
" * (0xf0 | 0xaa) == 0xfa\n" +
" * // in binary: 11110000\n" +
@@ -51,7 +74,7 @@ trait AnyValReps {
" * }}}\n" +
" */"),
Op("&", "/**\n" +
- " * @return the bitwise AND of this value and x\n" +
+ " * Returns the bitwise AND of this value and `x`.\n" +
" * @example {{{\n" +
" * (0xf0 & 0xaa) == 0xa0\n" +
" * // in binary: 11110000\n" +
@@ -61,7 +84,7 @@ trait AnyValReps {
" * }}}\n" +
" */"),
Op("^", "/**\n" +
- " * @return the bitwise XOR of this value and x\n" +
+ " * Returns the bitwise XOR of this value and `x`.\n" +
" * @example {{{\n" +
" * (0xf0 ^ 0xaa) == 0x5a\n" +
" * // in binary: 11110000\n" +
@@ -76,13 +99,13 @@ trait AnyValReps {
if (isCardinal)
List(
Op("<<", "/**\n" +
- " * @return this value bit-shifted left by the specified number of bits,\n" +
+ " * Returns this value bit-shifted left by the specified number of bits,\n" +
" * filling in the new right bits with zeroes.\n" +
" * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}\n" +
" */"),
Op(">>>", "/**\n" +
- " * @return this value bit-shifted right by the specified number of bits,\n" +
+ " * Returns this value bit-shifted right by the specified number of bits,\n" +
" * filling the new left bits with zeroes.\n" +
" * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}\n" +
" * @example {{{\n" +
@@ -93,7 +116,7 @@ trait AnyValReps {
" */"),
Op(">>", "/**\n" +
- " * @return this value bit-shifted left by the specified number of bits,\n" +
+ " * Returns this value bit-shifted left by the specified number of bits,\n" +
" * filling in the right bits with the same value as the left-most bit of this.\n" +
" * The effect of this is to retain the sign of the value.\n" +
" * @example {{{\n" +
@@ -105,19 +128,19 @@ trait AnyValReps {
else Nil
def comparisonOps = List(
- Op("==", "/**\n * @return `true` if this value is equal x, `false` otherwise\n */"),
- Op("!=", "/**\n * @return `true` if this value is not equal to x, `false` otherwise\n */"),
- Op("<", "/**\n * @return `true` if this value is less than x, `false` otherwise\n */"),
- Op("<=", "/**\n * @return `true` if this value is less than or equal to x, `false` otherwise\n */"),
- Op(">", "/**\n * @return `true` if this value is greater than x, `false` otherwise\n */"),
- Op(">=", "/**\n * @return `true` if this value is greater than or equal to x, `false` otherwise\n */"))
+ Op("==", "/**\n * Returns `true` if this value is equal to x, `false` otherwise.\n */"),
+ Op("!=", "/**\n * Returns `true` if this value is not equal to x, `false` otherwise.\n */"),
+ Op("<", "/**\n * Returns `true` if this value is less than x, `false` otherwise.\n */"),
+ Op("<=", "/**\n * Returns `true` if this value is less than or equal to x, `false` otherwise.\n */"),
+ Op(">", "/**\n * Returns `true` if this value is greater than x, `false` otherwise.\n */"),
+ Op(">=", "/**\n * Returns `true` if this value is greater than or equal to x, `false` otherwise.\n */"))
def otherOps = List(
- Op("+", "/**\n * @return the sum of this value and x\n */"),
- Op("-", "/**\n * @return the difference of this value and x\n */"),
- Op("*", "/**\n * @return the product of this value and x\n */"),
- Op("/", "/**\n * @return the quotient of this value and x\n */"),
- Op("%", "/**\n * @return the remainder of the division of this value by x\n */"))
+ Op("+", "/**\n * Returns the sum of this value and `x`.\n */"),
+ Op("-", "/**\n * Returns the difference of this value and `x`.\n */"),
+ Op("*", "/**\n * Returns the product of this value and `x`.\n */"),
+ Op("/", "/**\n * Returns the quotient of this value and `x`.\n */"),
+ Op("%", "/**\n * Returns the remainder of the division of this value by `x`.\n */"))
// Given two numeric value types S and T , the operation type of S and T is defined as follows:
// If both S and T are subrange types then the operation type of S and T is Int.
@@ -160,7 +183,7 @@ trait AnyValReps {
}
def objectLines = {
val comp = if (isCardinal) cardinalCompanion else floatingCompanion
- (comp + allCompanions).trim.lines map interpolate toList
+ (comp + allCompanions + "\n" + nonUnitCompanions).trim.lines.toList ++ implicitCoercions map interpolate
}
/** Makes a set of binary operations based on the given set of ops, args, and resultFn.
@@ -224,8 +247,9 @@ trait AnyValReps {
def classDoc = interpolate(classDocTemplate)
def objectDoc = ""
def mkImports = ""
- def mkClass = assemble("final class", "AnyVal", classLines) + "\n"
- def mkObject = assemble("object", "AnyValCompanion", objectLines) + "\n"
+
+ def mkClass = assemble("final class " + name + " private extends AnyVal", classLines)
+ def mkObject = assemble("object " + name + " extends AnyValCompanion", objectLines)
def make() = List[String](
headerTemplate,
mkImports,
@@ -235,11 +259,10 @@ trait AnyValReps {
mkObject
) mkString ""
- def assemble(what: String, parent: String, lines: List[String]): String = {
- val decl = "%s %s extends %s ".format(what, name, parent)
- val body = if (lines.isEmpty) "{ }\n\n" else lines map indent mkString ("{\n", "\n", "\n}\n")
+ def assemble(decl: String, lines: List[String]): String = {
+ val body = if (lines.isEmpty) " { }\n\n" else lines map indent mkString (" {\n", "\n", "\n}\n")
- decl + body
+ decl + body + "\n"
}
override def toString = name
}
@@ -296,6 +319,8 @@ def unbox(x: java.lang.Object): @name@ = @unboxImpl@
override def toString = "object scala.@name@"
"""
+ def nonUnitCompanions = "" // todo
+
def cardinalCompanion = """
/** The smallest value representable as a @name@.
*/
@@ -432,7 +457,7 @@ def ^(x: Boolean): Boolean = sys.error("stub")
override def getClass(): Class[Boolean] = sys.error("stub")
""".trim.lines.toList
- def objectLines = interpolate(allCompanions).lines.toList
+ def objectLines = interpolate(allCompanions + "\n" + nonUnitCompanions).lines.toList
}
object U extends AnyValRep("Unit", None, "void") {
override def classDoc = """
diff --git a/src/compiler/scala/tools/cmd/program/Scmp.scala b/src/compiler/scala/tools/cmd/program/Scmp.scala
deleted file mode 100644
index 6f39c29a77..0000000000
--- a/src/compiler/scala/tools/cmd/program/Scmp.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package cmd
-package program
-
-import nsc.io._
-import scala.sys.process._
-
-object Scmp {
- private val scmpUsage = """
- |Usage: scmp [options] <cmd line>
- |Example: scmp --p1 '-no-specialization -Ydebug' scalac src/library/scala/Function1.scala
- |
- |Note: the command line must start with a path to scalac.
- |""".stripMargin
- private val scmpOptions = List(
- "p1" -> "options for the first run only",
- "p2" -> "options for the second run only"
- )
- private val scmpInfo = Simple.scalaProgramInfo("scmp", scmpUsage)
- lazy val ScmpSpec = Simple(scmpInfo, Nil, scmpOptions, x => returning(x)(_.onlyKnownOptions = false))
-
- def main(args0: Array[String]): Unit = {
- if (args0.isEmpty)
- return println(scmpUsage)
-
- val runner = ScmpSpec instance args0
- import runner._
-
- val p1args = parsed.getOrElse("--p1", "")
- val p2args = parsed.getOrElse("--p2", "")
-
- if (p1args.isEmpty && p2args.isEmpty)
- return println("At least one of --p1 and --p2 must be given.")
- if (residualArgs.isEmpty)
- return println("There is no command to run.")
-
- def createCmd(extras: String) =
- fromArgs(residualArgs.patch(1, toArgs(extras), 0))
-
- def runCmd(cmd: String) = {
- val tmpfile = File.makeTemp()
- (cmd #> tmpfile.jfile !)
- tmpfile
- }
-
- val cmds = List(p1args, p2args) map createCmd
- println(cmds.mkString("Running command lines:\n ", "\n ", ""))
-
- val files = cmds map runCmd map (_.path)
- val diff = "diff %s %s".format(files: _*).!!
-
- if (diff.isEmpty) println("No differences.")
- else println(diff)
- }
-}
diff --git a/src/compiler/scala/tools/cmd/program/Simple.scala b/src/compiler/scala/tools/cmd/program/Simple.scala
deleted file mode 100644
index f2095d64b8..0000000000
--- a/src/compiler/scala/tools/cmd/program/Simple.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package cmd
-package program
-
-import Spec.Info
-
-/** A boilerplate reducer for commands with simple requirements. For examples,
- * see Scmp and Tokens in this package.
- */
-object Simple {
- type CommandLineTransform = SimpleCommandLine => SimpleCommandLine
-
- abstract class SimpleSpec(val programInfo: Info) extends Spec with Meta.StdOpts with Interpolation
-
- trait SimpleInstance extends SimpleSpec with Instance {
- val parsed: CommandLine
- }
-
- class SimpleReference(
- programInfo: Info,
- unary: List[(String, String)] = Nil,
- binary: List[(String, String)] = Nil,
- postCreation: CommandLineTransform = null
- ) extends SimpleSpec(programInfo) with Reference {
-
- spec =>
-
- if (programInfo.usage != "") help(programInfo.usage)
- unary foreach { case (option, help) => option / help --? }
- binary foreach { case (option, help) => option / help --| }
-
- type ThisCommandLine = SimpleCommandLine
-
- def creator(args: List[String]) = new SimpleCommandLine(spec, args)
- def instance(args: Array[String]): SimpleInstance = instance(args.toList)
- def instance(args: List[String]): SimpleInstance =
- new {
- val parsed = spec(args: _*)
- } with SimpleSpec(programInfo) with SimpleInstance {
- lazy val referenceSpec = spec
- }
-
- lazy val referenceSpec = spec
- }
-
- def apply(info: Info, unary: List[(String, String)], binary: List[(String, String)], postCreation: CommandLineTransform): SimpleReference = {
- new SimpleReference(info, unary, binary, postCreation) {
- override def creator(args: List[String]) = {
- val obj = super.creator(args)
- if (postCreation == null) obj
- else postCreation(obj)
- }
- }
- }
-
- def scalaProgramInfo(name: String, help: String) =
- Spec.Info(name, help, "scala.tools.cmd.program." + name.capitalize)
-
- /** You can't override a def with a var unless a setter exists. We cleverly
- * sidestep this by mixing in a trait with dummy setters which will be
- * inaccessible due to the overriding var.
- */
- trait Ticket2338WontFixWorkaround {
- def enforceArity_=(x: Boolean): Unit = sys.error("unreachable")
- def onlyKnownOptions_=(x: Boolean): Unit = sys.error("unreachable")
- }
-
- /** Configurability simplicity achieved by turning defs into vars and letting
- * the spec creator apply a transformation. This way there's no need to create
- * custom subclasses of CommandLine.
- */
- class SimpleCommandLine(spec: Reference, args: List[String]) extends CommandLine(spec, args) with Ticket2338WontFixWorkaround {
- override var enforceArity: Boolean = true
- override var onlyKnownOptions: Boolean = true
- }
-}
diff --git a/src/compiler/scala/tools/cmd/program/Tokens.scala b/src/compiler/scala/tools/cmd/program/Tokens.scala
deleted file mode 100644
index be494bba3a..0000000000
--- a/src/compiler/scala/tools/cmd/program/Tokens.scala
+++ /dev/null
@@ -1,106 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package cmd
-package program
-
-import nsc._
-import scala.reflect.internal.Chars.char2uescape
-import io._
-import ast.parser.Tokens._
-
-/** Given paths on the command line, tokenizes any scala files found
- * and prints one token per line.
- */
-object Tokens {
- private val tokensUsage = "Usage: tokens [options] <path1 path2 ...>\n\nOptions:"
- private val tokensUnary = List(
- "verbose" -> "be more verbose",
- "freq" -> "combine token lists and sort by frequency",
- "stats" -> "output some stats"
- )
- private val tokensBinary = List(
- "sliding" -> "print tokens in groups of given size"
- )
- private val tokensInfo = Simple.scalaProgramInfo("tokens", tokensUsage)
- private lazy val TokensSpec = Simple(tokensInfo, tokensUnary, tokensBinary, null)
-
- def sanitize(x: Any): String = sanitize(x.toString)
- def sanitize(str: String): String = str flatMap (x => if (x.isControl) char2uescape(x) else x.toString)
-
- def main(args0: Array[String]): Unit = {
- if (args0.isEmpty)
- return println(TokensSpec.helpMsg)
-
- val runner = TokensSpec instance args0
- import runner._
-
- val files = (residualArgs flatMap walk).distinct
- if (parsed isSet "--verbose")
- println("Tokenizing: " + (files map (_.name) mkString " "))
-
- if (parsed isSet "--stats")
- println("Stats not yet implemented.")
-
- def raw = files flatMap fromScalaSource
- def tokens: List[Any] =
- if (parsed isSet "--sliding") raw sliding parsed("--sliding").toInt map (_ map sanitize mkString " ") toList
- else raw
-
- def output =
- if (parsed isSet "--freq")
- (tokens groupBy (x => x) mapValues (_.length)).toList sortBy (-_._2) map (x => x._2 + " " + x._1)
- else
- tokens
-
- output foreach println
- }
-
- def fromPaths(paths: String*): List[Any] =
- (paths.toList flatMap walk).distinct flatMap fromScalaSource
-
- /** Given a path, returns all .scala files underneath it.
- */
- private def walk(arg: String): List[File] = {
- def traverse = Path(arg) ifDirectory (_.deepList()) getOrElse Iterator(File(arg))
-
- Path onlyFiles traverse filter (_ hasExtension "scala") toList
- }
-
- def fromScalaString(code: String): List[Any] = {
- val f = File.makeTemp("tokens")
- f writeAll code
- fromScalaSource(f)
- }
-
- /** Tokenizes a single scala file.
- */
- def fromScalaSource(file: Path): List[Any] = fromScalaSource(file.path)
- def fromScalaSource(file: String): List[Any] = {
- val global = new Global(new Settings())
- import global._
- import syntaxAnalyzer.{ UnitScanner, token2string }
-
- val in = new UnitScanner(new CompilationUnit(getSourceFile(file)))
- in.init()
-
- Iterator continually {
- val token = in.token match {
- case IDENTIFIER | BACKQUOTED_IDENT => in.name
- case CHARLIT | INTLIT | LONGLIT => in.intVal
- case DOUBLELIT | FLOATLIT => in.floatVal
- case STRINGLIT => "\"" + in.strVal + "\""
- case SEMI | NEWLINE => ";"
- case NEWLINES => ";;"
- case COMMA => ","
- case EOF => null
- case x => token2string(x)
- }
- in.nextToken()
- token
- } takeWhile (_ != null) toList
- }
-}
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index d6f57801e7..2f1e15168a 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
-import util.{ FreshNameCreator, Position, NoPosition, SourceFile, NoSourceFile }
+import util.{ FreshNameCreator, Position, NoPosition, BatchSourceFile, SourceFile, NoSourceFile }
import scala.collection.mutable
import scala.collection.mutable.{ LinkedHashSet, ListBuffer }
@@ -61,6 +61,9 @@ trait CompilationUnits { self: Global =>
/** things to check at end of compilation unit */
val toCheck = new ListBuffer[() => Unit]
+ /** The features that were already checked for this unit */
+ var checkedFeatures = Set[Symbol]()
+
def position(pos: Int) = source.position(pos)
/** The position of a targeted type check
@@ -85,12 +88,13 @@ trait CompilationUnits { self: Global =>
reporter.warning(pos, msg)
def deprecationWarning(pos: Position, msg: String) =
- if (opt.deprecation) warning(pos, msg)
- else currentRun.deprecationWarnings ::= ((pos, msg))
+ currentRun.deprecationWarnings0.warn(pos, msg)
def uncheckedWarning(pos: Position, msg: String) =
- if (opt.unchecked) warning(pos, msg)
- else currentRun.uncheckedWarnings ::= ((pos, msg))
+ currentRun.uncheckedWarnings0.warn(pos, msg)
+
+ def inlinerWarning(pos: Position, msg: String) =
+ currentRun.inlinerWarnings.warn(pos, msg)
def incompleteInputError(pos: Position, msg:String) =
reporter.incompleteInputError(pos, msg)
@@ -104,10 +108,14 @@ trait CompilationUnits { self: Global =>
override def toString() = source.toString()
def clear() {
- fresh = null
- body = null
+ fresh = new FreshNameCreator.Default
+ body = EmptyTree
depends.clear()
defined.clear()
+ synthetics.clear()
+ toCheck.clear()
+ checkedFeatures = Set()
+ icode.clear()
}
}
}
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index a0c39f71fb..a4a8e1fd11 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -93,7 +93,7 @@ class CompileSocket extends CompileOutputCommon {
/** Start a new server. */
private def startNewServer(vmArgs: String) = {
- val cmd = serverCommand(vmArgs split " " toSeq)
+ val cmd = serverCommand((vmArgs split " ").toSeq)
info("[Executing command: %s]" format cmd.mkString(" "))
// Hiding inadequate daemonized implementation from public API for now
@@ -206,7 +206,7 @@ class CompileSocket extends CompileOutputCommon {
Thread sleep 100
ff.length
}
- if (Iterator continually check take 50 find (_ > 0) isEmpty) {
+ if ((Iterator continually check take 50 find (_ > 0)).isEmpty) {
ff.delete()
fatal("Unable to establish connection to server.")
}
diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala
index 54bc218912..4c8a27083a 100644
--- a/src/compiler/scala/tools/nsc/CompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala
@@ -47,7 +47,7 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
/** Creates a help message for a subset of options based on cond */
def createUsageMsg(cond: Setting => Boolean): String = {
val baseList = (settings.visibleSettings filter cond).toList sortBy (_.name)
- val width = baseList map (_.helpSyntax.length) max
+ val width = (baseList map (_.helpSyntax.length)).max
def format(s: String) = ("%-" + width + "s") format s
def helpStr(s: Setting) = {
val str = format(s.helpSyntax) + " " + s.helpDescription
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
index b9e9a14adf..f91cb854c6 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
@@ -8,7 +8,7 @@ package scala.tools.nsc
import scala.tools.util.PathResolver
class GenericRunnerSettings(error: String => Unit) extends Settings(error) {
- def classpathURLs = new PathResolver(this) asURLs
+ def classpathURLs = new PathResolver(this).asURLs
val howtorun =
ChoiceSetting(
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index b7d7f5d16f..271dca3157 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -8,15 +8,13 @@ package scala.tools.nsc
import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
import compat.Platform.currentTime
-
-import scala.tools.util.{ Profiling, PathResolver }
+import scala.tools.util.PathResolver
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
-import reporters.{ Reporter => NscReporter, ConsoleReporter }
-import util.{ NoPosition, Exceptional, ClassPath, SourceFile, NoSourceFile, Statistics, StatisticsInfo, BatchSourceFile, ScriptSourceFile, ShowPickled, ScalaClassLoader, returning }
+import reporters.{ Reporter, ConsoleReporter }
+import util.{ NoPosition, Exceptional, ClassPath, MergedClassPath, SourceFile, NoSourceFile, Statistics, StatisticsInfo, BatchSourceFile, ScriptSourceFile, ScalaClassLoader, returning }
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
import settings.{ AestheticSettings }
-
import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers }
import symtab.classfile.Pickler
import dependencies.DependencyAnalysis
@@ -25,24 +23,25 @@ import ast._
import ast.parser._
import typechecker._
import transform._
-
import backend.icode.{ ICodes, GenICode, ICodeCheckers }
import backend.{ ScalaPrimitives, Platform, MSILPlatform, JavaPlatform }
-import backend.jvm.GenJVM
+import backend.jvm.{GenJVM, GenASM}
import backend.opt.{ Inliners, InlineExceptionHandlers, ClosureElimination, DeadCodeElimination }
import backend.icode.analysis._
-
-class Global(var currentSettings: Settings, var reporter: NscReporter) extends SymbolTable
- with ClassLoaders
- with ToolBoxes
- with CompilationUnits
- with Plugins
- with PhaseAssembly
- with Trees
- with FreeVars
- with TreePrinters
- with DocComments
- with Positions {
+import language.postfixOps
+import reflect.internal.StdAttachments
+
+class Global(var currentSettings: Settings, var reporter: Reporter) extends SymbolTable
+ with ClassLoaders
+ with ToolBoxes
+ with CompilationUnits
+ with Plugins
+ with PhaseAssembly
+ with Trees
+ with FreeVars
+ with TreePrinters
+ with DocComments
+ with Positions {
override def settings = currentSettings
@@ -50,7 +49,7 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
// alternate constructors ------------------------------------------
- def this(reporter: NscReporter) =
+ def this(reporter: Reporter) =
this(new Settings(err => reporter.error(null, err)), reporter)
def this(settings: Settings) =
@@ -73,7 +72,11 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
if (forMSIL) new { val global: Global.this.type = Global.this } with MSILPlatform
else new { val global: Global.this.type = Global.this } with JavaPlatform
- def classPath: ClassPath[platform.BinaryRepr] = platform.classPath
+ type PlatformClassPath = ClassPath[platform.BinaryRepr]
+ type OptClassPath = Option[PlatformClassPath]
+
+ def classPath: PlatformClassPath = platform.classPath
+
def rootLoader: LazyType = platform.rootLoader
// sub-components --------------------------------------------------
@@ -131,7 +134,38 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
object nodePrinters extends {
val global: Global.this.type = Global.this
} with NodePrinters {
+ var lastPrintedPhase: Phase = NoPhase
+ var lastPrintedSource: String = ""
infolevel = InfoLevel.Verbose
+
+ def showUnit(unit: CompilationUnit) {
+ print(" // " + unit.source)
+ if (unit.body == null) println(": tree is null")
+ else {
+ val source = util.stringFromWriter(w => newTreePrinter(w) print unit.body)
+
+ // treePrinter show unit.body
+ if (lastPrintedSource == source)
+ println(": tree is unchanged since " + lastPrintedPhase)
+ else {
+ lastPrintedPhase = phase.prev // since we're running inside "afterPhase"
+ lastPrintedSource = source
+ println("")
+ println(source)
+ println("")
+ }
+ }
+ }
+ }
+
+ def withInfoLevel[T](infolevel: nodePrinters.InfoLevel.Value)(op: => T) = {
+ val saved = nodePrinters.infolevel
+ try {
+ nodePrinters.infolevel = infolevel
+ op
+ } finally {
+ nodePrinters.infolevel = saved
+ }
}
/** Representing ASTs as graphs */
@@ -153,7 +187,9 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
/** Register new context; called for every created context
*/
- def registerContext(c: analyzer.Context) {}
+ def registerContext(c: analyzer.Context) {
+ lastSeenContext = c
+ }
/** Register top level class (called on entering the class)
*/
@@ -234,9 +270,7 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
}
def logThrowable(t: Throwable): Unit = globalError(throwableAsString(t))
- def throwableAsString(t: Throwable): String =
- if (opt.richExes) Exceptional(t).force().context()
- else util.stackTraceString(t)
+ override def throwableAsString(t: Throwable) = util.stackTraceString(t)
// ------------ File interface -----------------------------------------
@@ -309,8 +343,8 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
def checkPhase = wasActive(settings.check)
def logPhase = isActive(settings.log)
- // Write *.icode files the setting was given.
- def writeICode = settings.writeICode.isSetByUser && isActive(settings.writeICode)
+ // Write *.icode files right after GenICode when -Xprint-icode was given.
+ def writeICodeAtICode = settings.writeICode.isSetByUser && isActive(settings.writeICode)
// showing/printing things
def browsePhase = isActive(settings.browse)
@@ -324,19 +358,6 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
def showTrees = settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value
val showClass = optSetting[String](settings.Xshowcls) map (x => splitClassAndPhase(x, false))
val showObject = optSetting[String](settings.Xshowobj) map (x => splitClassAndPhase(x, true))
-
- // profiling
- def profCPUPhase = isActive(settings.Yprofile) && !profileAll
- def profileAll = settings.Yprofile.doAllPhases
- def profileAny = !settings.Yprofile.isDefault || !settings.YprofileMem.isDefault
- def profileClass = settings.YprofileClass.value
- def profileMem = settings.YprofileMem.value
-
- // shortish-term property based options
- def timings = (sys.props contains "scala.timings")
- def inferDebug = (sys.props contains "scalac.debug.infer") || settings.Yinferdebug.value
- def typerDebug = (sys.props contains "scalac.debug.typer") || settings.Ytyperdebug.value
- def lubDebug = (sys.props contains "scalac.debug.lub")
}
// The current division between scala.reflect.* and scala.tools.nsc.* is pretty
@@ -345,9 +366,7 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
// where I need it, and then an override in Global with the setting.
override protected val etaExpandKeepsStar = settings.etaExpandKeepsStar.value
// Here comes another one...
- override protected val enableTypeVarExperimentals = (
- settings.Xexperimental.value || settings.YvirtPatmat.value
- )
+ override protected val enableTypeVarExperimentals = settings.Xexperimental.value
// True if -Xscript has been set, indicating a script run.
def isScriptRun = opt.script.isDefined
@@ -380,14 +399,7 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
def run() {
echoPhaseSummary(this)
- currentRun.units foreach { unit =>
- if (opt.timings) {
- val start = System.nanoTime
- try applyPhase(unit)
- finally unitTimings(unit) += (System.nanoTime - start)
- }
- else applyPhase(unit)
- }
+ currentRun.units foreach applyPhase
}
def apply(unit: CompilationUnit): Unit
@@ -431,8 +443,8 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
}
/** Switch to turn on detailed type logs */
- var printTypings = opt.typerDebug
- var printInfers = opt.inferDebug
+ var printTypings = settings.Ytyperdebug.value
+ var printInfers = settings.Yinferdebug.value
// phaseName = "parser"
object syntaxAnalyzer extends {
@@ -453,10 +465,19 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
val global: Global.this.type = Global.this
} with Analyzer
+ // phaseName = "patmat"
+ object patmat extends {
+ val global: Global.this.type = Global.this
+ val runsAfter = List("typer")
+ // patmat doesn't need to be right after typer, as long as we run before supperaccesors
+ // (sbt does need to run right after typer, so don't conflict)
+ val runsRightAfter = None
+ } with PatternMatching
+
// phaseName = "superaccessors"
object superAccessors extends {
val global: Global.this.type = Global.this
- val runsAfter = List("typer")
+ val runsAfter = List("patmat")
val runsRightAfter = None
} with SuperAccessors
@@ -525,7 +546,6 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
// phaseName = "lazyvals"
object lazyVals extends {
- final val FLAGS_PER_WORD = 32
val global: Global.this.type = Global.this
val runsAfter = List("erasure")
val runsRightAfter = None
@@ -601,13 +621,20 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
val runsRightAfter = None
} with DeadCodeElimination
- // phaseName = "jvm"
+ // phaseName = "jvm", FJBG-based version
object genJVM extends {
val global: Global.this.type = Global.this
val runsAfter = List("dce")
val runsRightAfter = None
} with GenJVM
+ // phaseName = "jvm", ASM-based version
+ object genASM extends {
+ val global: Global.this.type = Global.this
+ val runsAfter = List("dce")
+ val runsRightAfter = None
+ } with GenASM
+
// This phase is optional: only added if settings.make option is given.
// phaseName = "dependencyAnalysis"
object dependencyAnalysis extends {
@@ -673,6 +700,7 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
analyzer.namerFactory -> "resolve names, attach symbols to named trees",
analyzer.packageObjects -> "load package objects",
analyzer.typerFactory -> "the meat and potatoes: type the trees",
+ patmat -> "translate match expressions",
superAccessors -> "add super accessors in traits and nested classes",
extensionMethods -> "add extension methods for inline classes",
pickler -> "serialize symbol tables",
@@ -724,20 +752,6 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
protected lazy val phasesSet = new mutable.HashSet[SubComponent]
protected lazy val phasesDescMap = new mutable.HashMap[SubComponent, String] withDefaultValue ""
private lazy val phaseTimings = new Phases.TimingModel // tracking phase stats
- private lazy val unitTimings = mutable.HashMap[CompilationUnit, Long]() withDefaultValue 0L // tracking time spent per unit
- private def unitTimingsFormatted(): String = {
- def toMillis(nanos: Long) = "%.3f" format nanos / 1000000d
-
- val formatter = new util.TableDef[(String, String)] {
- >> ("ms" -> (_._1)) >+ " "
- << ("path" -> (_._2))
- }
- "" + (
- new formatter.Table(unitTimings.toList sortBy (-_._2) map {
- case (unit, nanos) => (toMillis(nanos), unit.source.path)
- })
- )
- }
protected def addToPhasesSet(sub: SubComponent, descr: String) {
phasesSet += sub
@@ -830,6 +844,164 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
def printAfterEachPhase[T](op: => T): Unit =
describeAfterEachPhase(op) foreach (m => println(" " + m))
+ // ------------ Invalidations ---------------------------------
+
+ /** Is given package class a system package class that cannot be invalidated?
+ */
+ private def isSystemPackageClass(pkg: Symbol) =
+ pkg == definitions.RootClass ||
+ pkg == definitions.ScalaPackageClass || {
+ val pkgname = pkg.fullName
+ (pkgname startsWith "scala.") && !(pkgname startsWith "scala.tools")
+ }
+
+ /** Invalidates packages that contain classes defined in a classpath entry, and
+ * rescans that entry.
+ * @param path A fully qualified name that refers to a directory or jar file that's
+ * an entry on the classpath.
+ * First, causes the classpath entry referred to by `path` to be rescanned, so that
+ * any new files or deleted files or changes in subpackages are picked up.
+ * Second, invalidates any packages for which one of the following considitions is met:
+
+ * - the classpath entry contained during the last compilation run classfiles
+ * that represent a member in the package
+ * - the classpath entry now contains classfiles
+ * that represent a member in the package
+ * - the set of subpackages has changed.
+ *
+ * The invalidated packages are reset in their entirety; all member classes and member packages
+ * are re-accessed using the new classpath.
+ * Not invalidated are system packages that the compiler needs to access as parts
+ * of standard definitions. The criterion what is a system package is currently:
+ * any package rooted in "scala", with the exception of packages rooted in "scala.tools".
+ * This can be refined later.
+ * @return A pair consisting of
+ * - a list of invalidated packages
+ * - a list of of packages that should have been invalidated but were not because
+ * they are system packages.
+ */
+ def invalidateClassPathEntries(paths: String*): (List[ClassSymbol], List[ClassSymbol]) = {
+ val invalidated, failed = new mutable.ListBuffer[ClassSymbol]
+ classPath match {
+ case cp: MergedClassPath[_] =>
+ def assoc(path: String): List[(PlatformClassPath, PlatformClassPath)] = {
+ val dir = AbstractFile getDirectory path
+ val canonical = dir.canonicalPath
+ def matchesCanonical(e: ClassPath[_]) = e.origin match {
+ case Some(opath) =>
+ (AbstractFile getDirectory opath).canonicalPath == canonical
+ case None =>
+ false
+ }
+ cp.entries find matchesCanonical match {
+ case Some(oldEntry) =>
+ List(oldEntry -> cp.context.newClassPath(dir))
+ case None =>
+ println(s"canonical = $canonical, origins = ${cp.entries map (_.origin)}")
+ error(s"cannot invalidate: no entry named $path in classpath $classPath")
+ List()
+ }
+ }
+ val subst = Map(paths flatMap assoc: _*)
+ if (subst.nonEmpty) {
+ platform updateClassPath subst
+ informProgress(s"classpath updated on entries [${subst.keys mkString ","}]")
+ def mkClassPath(elems: Iterable[PlatformClassPath]): PlatformClassPath =
+ if (elems.size == 1) elems.head
+ else new MergedClassPath(elems, classPath.context)
+ val oldEntries = mkClassPath(subst.keys)
+ val newEntries = mkClassPath(subst.values)
+ reSync(definitions.RootClass, Some(classPath), Some(oldEntries), Some(newEntries), invalidated, failed)
+ }
+ }
+ def show(msg: String, syms: collection.Traversable[Symbol]) =
+ if (syms.nonEmpty)
+ informProgress(s"$msg: ${syms map (_.fullName) mkString ","}")
+ show("invalidated packages", invalidated)
+ show("could not invalidate system packages", failed)
+ (invalidated.toList, failed.toList)
+ }
+
+ /** Re-syncs symbol table with classpath
+ * @param root The root symbol to be resynced (a package class)
+ * @param allEntries Optionally, the corresponding package in the complete current classPath
+ * @param oldEntries Optionally, the corresponding package in the old classPath entries
+ * @param newEntries Optionally, the corresponding package in the new classPath entries
+ * @param invalidated A listbuffer collecting the invalidated package classes
+ * @param failed A listbuffer collecting system package classes which could not be invalidated
+ * The resyncing strategy is determined by the absence or presence of classes and packages.
+ * If either oldEntries or newEntries contains classes, root is invalidated, provided a corresponding package
+ * exists in allEntries, or otherwise is removed.
+ * Otherwise, the action is determined by the following matrix, with columns:
+ *
+ * old new all sym action
+ * + + + + recurse into all child packages of old ++ new
+ * + - + + invalidate root
+ * + - - + remove root from its scope
+ * - + + + invalidate root
+ * - + + - create and enter root
+ * - - * * no action
+ *
+ * Here, old, new, all mean classpaths and sym means symboltable. + is presence of an
+ * entry in its column, - is absence, * is don't care.
+ *
+ * Note that new <= all and old <= sym, so the matrix above covers all possibilities.
+ */
+ private def reSync(root: ClassSymbol,
+ allEntries: OptClassPath, oldEntries: OptClassPath, newEntries: OptClassPath,
+ invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]) {
+ ifDebug(informProgress(s"syncing $root, $oldEntries -> $newEntries"))
+
+ val getName: ClassPath[platform.BinaryRepr] => String = (_.name)
+ def hasClasses(cp: OptClassPath) = cp.isDefined && cp.get.classes.nonEmpty
+ def invalidateOrRemove(root: ClassSymbol) = {
+ allEntries match {
+ case Some(cp) => root setInfo new loaders.PackageLoader(cp)
+ case None => root.owner.info.decls unlink root.sourceModule
+ }
+ invalidated += root
+ }
+ def packageNames(cp: PlatformClassPath): Set[String] = cp.packages.toSet map getName
+ def subPackage(cp: PlatformClassPath, name: String): OptClassPath =
+ cp.packages find (cp1 => getName(cp1) == name)
+
+ val classesFound = hasClasses(oldEntries) || hasClasses(newEntries)
+ if (classesFound && !isSystemPackageClass(root)) {
+ invalidateOrRemove(root)
+ } else {
+ if (classesFound) {
+ if (root.isRoot) invalidateOrRemove(definitions.EmptyPackageClass)
+ else failed += root
+ }
+ (oldEntries, newEntries) match {
+ case (Some(oldcp) , Some(newcp)) =>
+ for (pstr <- packageNames(oldcp) ++ packageNames(newcp)) {
+ val pname = newTermName(pstr)
+ val pkg = (root.info decl pname) orElse {
+ // package was created by external agent, create symbol to track it
+ assert(!subPackage(oldcp, pstr).isDefined)
+ loaders.enterPackage(root, pstr, new loaders.PackageLoader(allEntries.get))
+ }
+ reSync(
+ pkg.moduleClass.asInstanceOf[ClassSymbol],
+ subPackage(allEntries.get, pstr), subPackage(oldcp, pstr), subPackage(newcp, pstr),
+ invalidated, failed)
+ }
+ case (Some(oldcp), None) =>
+ invalidateOrRemove(root)
+ case (None, Some(newcp)) =>
+ invalidateOrRemove(root)
+ case (None, None) =>
+ }
+ }
+ }
+
+ /** Invalidate contents of setting -Yinvalidate */
+ def doInvalidation() = settings.Yinvalidate.value match {
+ case "" =>
+ case entry => invalidateClassPathEntries(entry)
+ }
+
// ----------- Runs ---------------------------------------
private var curRun: Run = null
@@ -849,6 +1021,7 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
*
* @param sym A class symbol, object symbol, package, or package class.
*/
+ @deprecated("use invalidateClassPathEntries instead")
def clearOnNextRun(sym: Symbol) = false
/* To try out clearOnNext run on the scala.tools.nsc project itself
* replace `false` above with the following code
@@ -860,7 +1033,7 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
}
}}
- * Then, fsc -Xexperimental clears the nsc porject between successive runs of `fsc`.
+ * Then, fsc -Xexperimental clears the nsc project between successive runs of `fsc`.
*/
/** Remove the current run when not needed anymore. Used by the build
@@ -871,6 +1044,10 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
curRun = null
}
+ object typeDeconstruct extends {
+ val global: Global.this.type = Global.this
+ } with interpreter.StructuredTypeStrings
+
/** There are common error conditions where when the exception hits
* here, currentRun.currentUnit is null. This robs us of the knowledge
* of what file was being compiled when it broke. Since I really
@@ -878,6 +1055,11 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
*/
private var lastSeenSourceFile: SourceFile = NoSourceFile
+ /** Let's share a lot more about why we crash all over the place.
+ * People will be very grateful.
+ */
+ private var lastSeenContext: analyzer.Context = null
+
/** The currently active run
*/
def currentRun: Run = curRun
@@ -906,25 +1088,64 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
@inline final def beforeTyper[T](op: => T): T = beforePhase(currentRun.typerPhase)(op)
@inline final def beforeUncurry[T](op: => T): T = beforePhase(currentRun.uncurryPhase)(op)
+ def explainContext(c: analyzer.Context): String = (
+ if (c == null) "" else (
+ """| context owners: %s
+ |
+ |Enclosing block or template:
+ |%s""".format(
+ c.owner.ownerChain.takeWhile(!_.isPackageClass).mkString(" -> "),
+ nodePrinters.nodeToString(c.enclClassOrMethod.tree)
+ )
+ )
+ )
+ // Owners up to and including the first package class.
+ private def ownerChainString(sym: Symbol): String = (
+ if (sym == null) ""
+ else sym.ownerChain.span(!_.isPackageClass) match {
+ case (xs, pkg :: _) => (xs :+ pkg) mkString " -> "
+ case _ => sym.ownerChain mkString " -> " // unlikely
+ }
+ )
+ private def formatExplain(pairs: (String, Any)*): String = (
+ pairs.toList collect { case (k, v) if v != null => "%20s: %s".format(k, v) } mkString "\n"
+ )
+
+ def explainTree(t: Tree): String = formatExplain(
+ )
+
/** Don't want to introduce new errors trying to report errors,
* so swallow exceptions.
*/
override def supplementErrorMessage(errorMessage: String): String = try {
- """|
- | while compiling: %s
- | current phase: %s
- | library version: %s
- | compiler version: %s
- | reconstructed args: %s
- |
- |%s""".stripMargin.format(
- currentSource.path,
- phase,
- scala.util.Properties.versionString,
- Properties.versionString,
- settings.recreateArgs.mkString(" "),
- if (opt.debug) "Current unit body:\n" + currentUnit.body + "\n" + errorMessage else errorMessage
+ val tree = analyzer.lastTreeToTyper
+ val sym = tree.symbol
+ val tpe = tree.tpe
+ val enclosing = lastSeenContext.enclClassOrMethod.tree
+
+ val info1 = formatExplain(
+ "while compiling" -> currentSource.path,
+ "during phase" -> ( if (globalPhase eq phase) phase else "global=%s, atPhase=%s".format(globalPhase, phase) ),
+ "library version" -> scala.util.Properties.versionString,
+ "compiler version" -> Properties.versionString,
+ "reconstructed args" -> settings.recreateArgs.mkString(" ")
+ )
+ val info2 = formatExplain(
+ "last tree to typer" -> tree.summaryString,
+ "symbol" -> Option(sym).fold("null")(_.debugLocationString),
+ "symbol definition" -> Option(sym).fold("null")(_.defString),
+ "tpe" -> tpe,
+ "symbol owners" -> ownerChainString(sym),
+ "context owners" -> ownerChainString(lastSeenContext.owner)
+ )
+ val info3: List[String] = (
+ ( List("== Enclosing template or block ==", nodePrinters.nodeToString(enclosing).trim) )
+ ++ ( if (tpe eq null) Nil else List("== Expanded type of tree ==", typeDeconstruct.show(tpe)) )
+ ++ ( if (!opt.debug) Nil else List("== Current unit body ==", nodePrinters.nodeToString(currentUnit.body)) )
+ ++ ( List(errorMessage) )
)
+
+ ("\n" + info1) :: info2 :: info3 mkString "\n\n"
}
catch { case x: Exception => errorMessage }
@@ -938,6 +1159,22 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
inform("[running phase " + ph.name + " on " + currentRun.size + " compilation units]")
}
+ /** Collects for certain classes of warnings during this run. */
+ class ConditionalWarning(what: String, option: Settings#BooleanSetting) {
+ val warnings = new mutable.ListBuffer[(Position, String)]
+ def warn(pos: Position, msg: String) =
+ if (option.value) reporter.warning(pos, msg)
+ else warnings += ((pos, msg))
+ def summarize() =
+ if (option.isDefault && warnings.nonEmpty)
+ reporter.warning(NoPosition, "there were %d %s warnings; re-run with %s for details".format(warnings.size, what, option.name))
+ }
+
+ def newUnitParser(code: String) = new syntaxAnalyzer.UnitParser(newCompilationUnit(code))
+ def newUnitScanner(code: String) = new syntaxAnalyzer.UnitScanner(newCompilationUnit(code))
+ def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code))
+ def newSourceFile(code: String) = new BatchSourceFile("<console>", code)
+
/** A Run is a single execution of the compiler on a sets of units
*/
class Run {
@@ -949,9 +1186,19 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
/** The currently compiled unit; set from GlobalPhase */
var currentUnit: CompilationUnit = NoCompilationUnit
- /** Counts for certain classes of warnings during this run. */
- var deprecationWarnings: List[(Position, String)] = Nil
- var uncheckedWarnings: List[(Position, String)] = Nil
+ // This change broke sbt; I gave it the thrilling name of uncheckedWarnings0 so
+ // as to recover uncheckedWarnings for its ever-fragile compiler interface.
+ val deprecationWarnings0 = new ConditionalWarning("deprecation", settings.deprecation)
+ val uncheckedWarnings0 = new ConditionalWarning("unchecked", settings.unchecked)
+ val featureWarnings = new ConditionalWarning("feature", settings.feature)
+ val inlinerWarnings = new ConditionalWarning("inliner", settings.YinlinerWarnings)
+ val allConditionalWarnings = List(deprecationWarnings0, uncheckedWarnings0, featureWarnings, inlinerWarnings)
+
+ // for sbt's benefit
+ def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList
+ def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList
+
+ var reportedFeature = Set[Symbol]()
/** A flag whether macro expansions failed */
var macroExpansionFailed = false
@@ -1031,6 +1278,7 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
/** Reset all classes contained in current project, as determined by
* the clearOnNextRun hook
*/
+ @deprecated("use invalidateClassPathEntries instead")
def resetProjectClasses(root: Symbol): Unit = try {
def unlink(sym: Symbol) =
if (sym != NoSymbol) root.info.decls.unlink(sym)
@@ -1136,14 +1384,22 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
val mixinPhase = phaseNamed("mixin")
val cleanupPhase = phaseNamed("cleanup")
val icodePhase = phaseNamed("icode")
- // val inlinerPhase = phaseNamed("inliner")
- // val inlineExceptionHandlersPhase = phaseNamed("inlineExceptionHandlers")
- // val closelimPhase = phaseNamed("closelim")
- // val dcePhase = phaseNamed("dce")
+ val inlinerPhase = phaseNamed("inliner")
+ val inlineExceptionHandlersPhase = phaseNamed("inlineExceptionHandlers")
+ val closelimPhase = phaseNamed("closelim")
+ val dcePhase = phaseNamed("dce")
val jvmPhase = phaseNamed("jvm")
+ // val msilPhase = phaseNamed("msil")
def runIsAt(ph: Phase) = globalPhase.id == ph.id
def runIsPast(ph: Phase) = globalPhase.id > ph.id
+ // def runIsAtBytecodeGen = (runIsAt(jvmPhase) || runIsAt(msilPhase))
+ def runIsAtOptimiz = {
+ runIsAt(inlinerPhase) || // listing phases in full for robustness when -Ystop-after has been given.
+ runIsAt(inlineExceptionHandlersPhase) ||
+ runIsAt(closelimPhase) ||
+ runIsAt(dcePhase)
+ }
isDefined = true
@@ -1172,14 +1428,7 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
*/
def units: Iterator[CompilationUnit] = unitbuf.iterator
- def registerPickle(sym: Symbol): Unit = {
- // Convert all names to the type name: objects don't store pickled data
- if (opt.showPhase && (opt.showNames exists (x => findNamedMember(x.toTypeName, sym) != NoSymbol))) {
- symData get sym foreach { pickle =>
- ShowPickled.show("\n<<-- " + sym.fullName + " -->>\n", pickle, false)
- }
- }
- }
+ def registerPickle(sym: Symbol): Unit = ()
/** does this run compile given class, module, or case factory? */
def compiles(sym: Symbol): Boolean =
@@ -1214,9 +1463,6 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
private def showMembers() =
opt.showNames foreach (x => showDef(x, opt.declsOnly, globalPhase))
- // If -Yprofile isn't given this will never be triggered.
- lazy val profiler = Class.forName(opt.profileClass).newInstance().asInstanceOf[Profiling]
-
// Similarly, this will only be created under -Yshow-syms.
object trackerFactory extends SymbolTrackers {
val global: Global.this.type = Global.this
@@ -1241,12 +1487,8 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
}
}
else {
- def warn(count: Int, what: String, option: Settings#BooleanSetting) = (
- if (option.isDefault && count > 0)
- warning("there were %d %s warnings; re-run with %s for details".format(count, what, option.name))
- )
- warn(deprecationWarnings.size, "deprecation", settings.deprecation)
- warn(uncheckedWarnings.size, "unchecked", settings.unchecked)
+ allConditionalWarnings foreach (_.summarize)
+
if (macroExpansionFailed)
warning("some macros could not be expanded and code fell back to overridden methods;"+
"\nrecompiling with generated classfiles on the classpath might help.")
@@ -1264,7 +1506,7 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
// nothing to compile, but we should still report use of deprecated options
if (sources.isEmpty) {
- checkDeprecatedSettings(new CompilationUnit(new BatchSourceFile("<no file>", "")))
+ checkDeprecatedSettings(newCompilationUnit(""))
reportCompileErrors()
return
}
@@ -1277,17 +1519,16 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
def compileUnits(units: List[CompilationUnit], fromPhase: Phase) {
try compileUnitsInternal(units, fromPhase)
catch { case ex =>
+ // ex.printStackTrace(Console.out) // DEBUG for fsc, note that error stacktraces do not print in fsc
globalError(supplementErrorMessage("uncaught exception during compilation: " + ex.getClass.getName))
throw ex
}
}
private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) {
+ doInvalidation()
+
units foreach addUnit
- if (opt.profileAll) {
- inform("starting CPU profiling on compilation run")
- profiler.startProfiling()
- }
val startTime = currentTime
reporter.reset()
@@ -1297,30 +1538,21 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
while (globalPhase.hasNext && !reporter.hasErrors) {
val startTime = currentTime
phase = globalPhase
-
- if (opt.profCPUPhase) {
- inform("starting CPU profiling on phase " + globalPhase.name)
- profiler profile globalPhase.run
- }
- else globalPhase.run
-
- // Create a profiling generation for each phase's allocations
- if (opt.profileAny)
- profiler.advanceGeneration(globalPhase.name)
+ globalPhase.run
// progress update
informTime(globalPhase.description, startTime)
phaseTimings(globalPhase) = currentTime - startTime
- // write icode to *.icode files
- if (opt.writeICode)
+ if (opt.writeICodeAtICode || (opt.printPhase && runIsAtOptimiz)) {
+ // Write *.icode files when -Xprint-icode or -Xprint:<some-optimiz-phase> was given.
writeICode()
-
- // print trees
- if (opt.printPhase || opt.printLate && runIsAt(cleanupPhase)) {
+ } else if (opt.printPhase || opt.printLate && runIsAt(cleanupPhase)) {
+ // print trees
if (opt.showTrees) nodePrinters.printAll()
else printAllUnits()
}
+
// print the symbols presently attached to AST nodes
if (opt.showSymbols)
trackerFactory.snapshot()
@@ -1346,13 +1578,6 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
advancePhase
}
- if (opt.profileAll)
- profiler.stopProfiling()
-
- if (opt.timings) {
- inform(phaseTimings.formatted)
- inform(unitTimingsFormatted)
- }
if (traceSymbolActivity)
units map (_.body) foreach (traceSymbols recordSymbolsInTree _)
@@ -1365,15 +1590,6 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
symSource.keys foreach (x => resetPackageClass(x.owner))
informTime("total", startTime)
- // save heap snapshot if requested
- if (opt.profileMem) {
- inform("Saving heap snapshot, this could take a while...")
- System.gc()
- profiler.captureSnapshot()
- inform("...done saving heap snapshot.")
- specializeTypes.printSpecStats()
- }
-
// record dependency data
if (!dependencyAnalysis.off)
dependencyAnalysis.saveDependencyAnalysis()
@@ -1427,20 +1643,6 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
refreshProgress
}
- /**
- * Attempt to locate a source file providing the given name as a top-level
- * definition in the given context, and add it to the run via compileLate
- * if found.
- */
- def compileSourceFor(context : analyzer.Context, name : Name) = false
-
- /**
- * Attempt to locate a source file providing the given name as a top-level
- * definition with the given prefix, and add it to the run via compileLate
- * if found.
- */
- def compileSourceFor(qual : Tree, name : Name) = false
-
/** Reset package class to state at typer (not sure what this
* is needed for?)
*/
@@ -1493,14 +1695,16 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
} // class Run
def printAllUnits() {
- print("[[syntax trees at end of " + phase + "]]")
- afterPhase(phase) { currentRun.units foreach (treePrinter.print(_)) }
+ print("[[syntax trees at end of %25s]]".format(phase))
+ afterPhase(phase)(currentRun.units foreach { unit =>
+ nodePrinters showUnit unit
+ })
}
/** We resolve the class/object ambiguity by passing a type/term name.
*/
def showDef(fullName: Name, declsOnly: Boolean, ph: Phase) = {
- val boringOwners = Set(definitions.AnyClass, definitions.AnyRefClass, definitions.ObjectClass)
+ val boringOwners = Set[Symbol](definitions.AnyClass, definitions.AnyRefClass, definitions.ObjectClass)
def phased[T](body: => T): T = afterPhase(ph)(body)
def boringMember(sym: Symbol) = boringOwners(sym.owner)
def symString(sym: Symbol) = if (sym.isTerm) sym.defString else sym.toString
@@ -1579,35 +1783,5 @@ class Global(var currentSettings: Settings, var reporter: NscReporter) extends S
}
object Global {
- /** If possible, instantiate the global specified via -Yglobal-class.
- * This allows the use of a custom Global subclass with the software which
- * wraps Globals, such as scalac, fsc, and the repl.
- */
- def fromSettings(settings: Settings, reporter: NscReporter): Global = {
- // !!! The classpath isn't known until the Global is created, which is too
- // late, so we have to duplicate it here. Classpath is too tightly coupled,
- // it is a construct external to the compiler and should be treated as such.
- val parentLoader = settings.explicitParentLoader getOrElse getClass.getClassLoader
- val loader = ScalaClassLoader.fromURLs(new PathResolver(settings).result.asURLs, parentLoader)
- val name = settings.globalClass.value
- val clazz = Class.forName(name, true, loader)
- val cons = clazz.getConstructor(classOf[Settings], classOf[NscReporter])
-
- cons.newInstance(settings, reporter).asInstanceOf[Global]
- }
-
- /** A global instantiated this way honors -Yglobal-class setting, and
- * falls back on calling the Global constructor directly.
- */
- def apply(settings: Settings, reporter: NscReporter): Global = {
- val g = (
- if (settings.globalClass.isDefault) null
- else try fromSettings(settings, reporter) catch { case x =>
- reporter.warning(NoPosition, "Failed to instantiate " + settings.globalClass.value + ": " + x)
- null
- }
- )
- if (g != null) g
- else new Global(settings, reporter)
- }
+ def apply(settings: Settings, reporter: Reporter): Global = new Global(settings, reporter)
}
diff --git a/src/compiler/scala/tools/nsc/InterpreterCommand.scala b/src/compiler/scala/tools/nsc/InterpreterCommand.scala
deleted file mode 100644
index e25a83e08b..0000000000
--- a/src/compiler/scala/tools/nsc/InterpreterCommand.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package scala.tools.nsc
-
-import interpreter._
-
-/** A compatibility stub.
- */
-@deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0")
-class InterpreterCommand(arguments: List[String], error: String => Unit) extends CommandLine(arguments, error) { } \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/MainInterpreter.scala b/src/compiler/scala/tools/nsc/MainInterpreter.scala
deleted file mode 100644
index 5d190bbe14..0000000000
--- a/src/compiler/scala/tools/nsc/MainInterpreter.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Lex Spoon
- */
-
-package scala.tools.nsc
-
-import interpreter._
-
-@deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0")
-object MainInterpreter {
- def main(args: Array[String]): Unit = new ILoop main args
-}
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index a627b982b6..a9e029e485 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -8,6 +8,7 @@ package scala.tools.nsc
import java.io.{ BufferedWriter, FileWriter }
import scala.collection.mutable
+import language.postfixOps
/**
* PhaseAssembly
diff --git a/src/compiler/scala/tools/nsc/Phases.scala b/src/compiler/scala/tools/nsc/Phases.scala
index d18495458c..aa0ea1bdd8 100644
--- a/src/compiler/scala/tools/nsc/Phases.scala
+++ b/src/compiler/scala/tools/nsc/Phases.scala
@@ -7,13 +7,14 @@ package scala.tools.nsc
import symtab.Flags
import util.TableDef
+import language.postfixOps
object Phases {
val MaxPhases = 64
/** A class for tracking something about each phase.
*/
- class Model[T: Manifest] {
+ class Model[T] {
case class Cell(ph: Phase, value: T) {
def name = ph.name
def id = ph.id
diff --git a/src/compiler/scala/tools/nsc/ToolBoxes.scala b/src/compiler/scala/tools/nsc/ToolBoxes.scala
index eb298833b8..f5eefa4e62 100644
--- a/src/compiler/scala/tools/nsc/ToolBoxes.scala
+++ b/src/compiler/scala/tools/nsc/ToolBoxes.scala
@@ -4,11 +4,9 @@ import util.ScalaClassLoader
trait ToolBoxes { self: Global =>
- import self.{Reporter => ApiReporter}
+ def mkToolBox(frontEnd: FrontEnd = mkSilentFrontEnd(), options: String = "") = new ToolBox(frontEnd, options)
- def mkToolBox(reporter: ApiReporter = mkSilentReporter(), options: String = "") = new ToolBox(reporter, options)
-
- class ToolBox(val reporter: ApiReporter, val options: String) extends AbsToolBox {
+ class ToolBox(val frontEnd: FrontEnd, val options: String) extends AbsToolBox {
def typeCheck(tree0: Tree, pt: Type = WildcardType, freeTypes: Map[FreeType, Type] = Map[FreeType, Type](), silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree = {
val tree = substituteFreeTypes(tree0, freeTypes)
val currentTyper = typer
@@ -32,18 +30,19 @@ trait ToolBoxes { self: Global =>
// todo. implement this
???
- def resetAllAttrs[T <: Tree](tree: T): T =
+ def resetAllAttrs(tree: Tree): Tree =
self.resetAllAttrs(tree)
- def resetLocalAttrs[T <: Tree](tree: T): T =
+ def resetLocalAttrs(tree: Tree): Tree =
self.resetLocalAttrs(tree)
def runExpr(tree0: Tree, freeTypes: Map[FreeType, Type] = Map[FreeType, Type]()): Any = {
var tree = substituteFreeTypes(tree0, freeTypes)
// need to reset the tree, otherwise toolbox will refuse to work with it
- tree = resetAllAttrs(tree0.duplicate)
+ // upd. this has to be done by the user himself, otherwise we run into troubles. see SI-5713
+// tree = resetAllAttrs(tree0.duplicate)
val imported = importer.importTree(tree)
- val toolBox = libraryClasspathMirror.mkToolBox(reporter.asInstanceOf[libraryClasspathMirror.Reporter], options)
+ val toolBox = libraryClasspathMirror.mkToolBox(frontEnd.asInstanceOf[libraryClasspathMirror.FrontEnd], options)
try toolBox.runExpr(imported)
catch {
case ex: toolBox.ToolBoxError =>
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index ff4e2f3fb5..028c5741c9 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package ast
import symtab._
-import reporters.{Reporter => NscReporter}
+import reporters._
import util.{Position, NoPosition}
import util.DocStrings._
import scala.reflect.internal.Chars._
@@ -21,8 +21,6 @@ trait DocComments { self: Global =>
var cookedDocComments = Map[Symbol, String]()
- def reporter: NscReporter
-
/** The raw doc comment map */
val docComments = mutable.HashMap[Symbol, DocComment]()
@@ -252,7 +250,7 @@ trait DocComments { self: Global =>
def replaceInheritdoc(childSection: String, parentSection: => String) =
if (childSection.indexOf("@inheritdoc") == -1)
childSection
- else
+ else
childSection.replaceAllLiterally("@inheritdoc", parentSection)
def getParentSection(section: (Int, Int)): String = {
@@ -275,9 +273,9 @@ trait DocComments { self: Global =>
}
child.substring(section._1, section._1 + 7) match {
- case param@("@param "|"@tparam"|"@throws") =>
+ case param@("@param "|"@tparam"|"@throws") =>
sectionString(extractSectionParam(child, section), parentNamedParams(param.trim))
- case _ =>
+ case _ =>
sectionString(extractSectionTag(child, section), parentTagMap)
}
}
@@ -367,7 +365,7 @@ trait DocComments { self: Global =>
case vname =>
lookupVariable(vname, site) match {
case Some(replacement) => replaceWith(replacement)
- case None => reporter.warning(sym.pos, "Variable " + vname + " undefined in comment for " + sym)
+ case None => reporter.warning(sym.pos, "Variable " + vname + " undefined in comment for " + sym + " in " + site)
}
}
}
@@ -385,7 +383,7 @@ trait DocComments { self: Global =>
}
// !!! todo: inherit from Comment?
- case class DocComment(raw: String, pos: Position = NoPosition) {
+ case class DocComment(raw: String, pos: Position = NoPosition, codePos: Position = NoPosition) {
/** Returns:
* template: the doc comment minus all @define and @usecase sections
@@ -414,7 +412,7 @@ trait DocComments { self: Global =>
val comment = "/** " + raw.substring(commentStart, end) + "*/"
val commentPos = subPos(commentStart, end)
- UseCase(DocComment(comment, commentPos), code, codePos)
+ UseCase(DocComment(comment, commentPos, codePos), code, codePos)
}
private def subPos(start: Int, end: Int) =
@@ -463,7 +461,18 @@ trait DocComments { self: Global =>
findIn(classes ::: List(pkgs.head, definitions.RootClass))
}
- def getType(str: String): Type = {
+ def getType(_str: String, variable: String): Type = {
+ /*
+ * work around the backticks issue suggested by Simon in
+ * https://groups.google.com/forum/?hl=en&fromgroups#!topic/scala-internals/z7s1CCRCz74
+ * ideally, we'd have a removeWikiSyntax method in the CommentFactory to completely eliminate the wiki markup
+ */
+ val str =
+ if (_str.length >= 2 && _str.startsWith("`") && _str.endsWith("`"))
+ _str.substring(1, _str.length - 2)
+ else
+ _str
+
def getParts(start: Int): List[String] = {
val end = skipIdent(str, start)
if (end == start) List()
@@ -473,7 +482,11 @@ trait DocComments { self: Global =>
}
}
val parts = getParts(0)
- assert(parts.nonEmpty, "parts is empty '" + str + "' in site " + site)
+ if (parts.isEmpty) {
+ reporter.error(comment.codePos, "Incorrect variable expansion for " + variable + " in use case. Does the " +
+ "variable expand to wiki syntax when documenting " + site + "?")
+ return ErrorType
+ }
val partnames = (parts.init map newTermName) :+ newTypeName(parts.last)
val (start, rest) = parts match {
case "this" :: _ => (site.thisType, partnames.tail)
@@ -492,7 +505,7 @@ trait DocComments { self: Global =>
for (alias <- aliases) yield
lookupVariable(alias.name.toString.substring(1), site) match {
case Some(repl) =>
- val tpe = getType(repl.trim)
+ val tpe = getType(repl.trim, alias.name.toString)
if (tpe != NoType) tpe
else {
val alias1 = alias.cloneSymbol(definitions.RootClass, alias.rawflags, newTypeName(repl))
diff --git a/src/compiler/scala/tools/nsc/ast/FreeVars.scala b/src/compiler/scala/tools/nsc/ast/FreeVars.scala
index 1bf36e8bf2..a1983d1834 100644
--- a/src/compiler/scala/tools/nsc/ast/FreeVars.scala
+++ b/src/compiler/scala/tools/nsc/ast/FreeVars.scala
@@ -13,9 +13,9 @@ trait FreeVars extends reflect.internal.FreeVars { self: Global =>
case Reified(_, symbolTable, _) =>
// logging free vars only when they are untyped prevents avalanches of duplicate messages
symbolTable foreach {
- case FreeTermDef(_, _, binding, origin) if settings.logFreeTerms.value && binding.tpe == null =>
+ case FreeTermDef(_, _, binding, _, origin) if settings.logFreeTerms.value && binding.tpe == null =>
reporter.echo(position, "free term: %s %s".format(showRaw(binding), origin))
- case FreeTypeDef(_, _, binding, origin) if settings.logFreeTypes.value && binding.tpe == null =>
+ case FreeTypeDef(_, _, binding, _, origin) if settings.logFreeTypes.value && binding.tpe == null =>
reporter.echo(position, "free type: %s %s".format(showRaw(binding), origin))
case _ =>
// do nothing
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index c79ca1206e..ce3106ab29 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -8,6 +8,7 @@ package ast
import compat.Platform.EOL
import symtab.Flags._
+import language.postfixOps
/** The object `nodePrinter` converts the internal tree
* representation to a string.
@@ -24,48 +25,21 @@ abstract class NodePrinters {
}
var infolevel = InfoLevel.Quiet
- def nodeToString: Tree => String =
- if (sys.props contains "scala.colors") nodeToColorizedString
- else nodeToRegularString
+ def nodeToString: Tree => String = nodeToRegularString
object nodeToRegularString extends DefaultPrintAST with (Tree => String) {
def apply(tree: Tree) = stringify(tree)
}
- object nodeToColorizedString extends ColorPrintAST with (Tree => String) {
- def apply(tree: Tree) = stringify(tree)
- }
-
- trait ColorPrintAST extends DefaultPrintAST {
- import scala.tools.util.color._
-
- def keywordColor = Cyan
- def typeColor = Yellow
- def termColor = Blue
- def flagColor = Red
- def literalColor = Green
-
- override def showFlags(tree: MemberDef) =
- super.showFlags(tree) in flagColor.bright
-
- override def showDefTreeName(tree: DefTree) =
- if (tree.name.isTermName) tree.name.decode in termColor.bright
- else tree.name.decode in typeColor.bright
-
- override def showName(name: Name) =
- if (name == nme.EMPTY || name == tpnme.EMPTY) "<empty>" in keywordColor
- else if (name.isTermName) name.decode in termColor
- else name.decode in typeColor
-
- override def showLiteral(lit: Literal) =
- super.showLiteral(lit) in literalColor.bright
- }
-
trait DefaultPrintAST extends PrintAST {
+ val printPos = settings.Xprintpos.value || settings.Yposdebug.value
+
+ def showNameAndPos(tree: NameTree) = showPosition(tree) + showName(tree.name)
def showDefTreeName(tree: DefTree) = showName(tree.name)
+ def showPosition(tree: Tree) = if (printPos) tree.pos.show else ""
def showFlags(tree: MemberDef) = flagsToString(tree.symbol.flags | tree.mods.flags)
- def showLiteral(lit: Literal) = lit.value.escapedStringValue
- def showTypeTree(tt: TypeTree) = "<tpt>" + emptyOrComment(showType(tt))
+ def showLiteral(lit: Literal) = showPosition(lit) + lit.value.escapedStringValue
+ def showTypeTree(tt: TypeTree) = showPosition(tt) + "<tpt>" + emptyOrComment(showType(tt))
def showName(name: Name) = name match {
case nme.EMPTY | tpnme.EMPTY => "<empty>"
case name => "\"" + name + "\""
@@ -96,17 +70,21 @@ abstract class NodePrinters {
private var level = 0
def showName(name: Name): String
+ def showPosition(tree: Tree): String
+ def showNameAndPos(tree: NameTree): String
def showDefTreeName(defTree: DefTree): String
def showFlags(tree: MemberDef): String
def showLiteral(lit: Literal): String
def showTypeTree(tt: TypeTree): String
def showAttributes(tree: Tree): String // symbol and type
- def showRefTreeName(tree: Tree): String = tree match {
- case SelectFromTypeTree(qual, name) => showRefTreeName(qual) + "#" + showName(name)
- case Select(qual, name) => showRefTreeName(qual) + "." + showName(name)
- case Ident(name) => showName(name)
- case _ => "" + tree
+ def showRefTreeName(tree: Tree): String = {
+ tree match {
+ case SelectFromTypeTree(qual, name) => showRefTreeName(qual) + "#" + showName(name)
+ case Select(qual, name) => showRefTreeName(qual) + "." + showName(name)
+ case id @ Ident(name) => showNameAndPos(id)
+ case _ => "" + tree
+ }
}
def showRefTree(tree: RefTree): String = {
def prefix0 = showRefTreeName(tree.qualifier)
@@ -115,7 +93,7 @@ abstract class NodePrinters {
case Select(_, _) => prefix0 + "."
case _ => ""
})
- prefix + showName(tree.name) + emptyOrComment(showAttributes(tree))
+ prefix + showNameAndPos(tree) + emptyOrComment(showAttributes(tree))
}
def emptyOrComment(s: String) = if (s == "") "" else " // " + s
@@ -124,7 +102,6 @@ abstract class NodePrinters {
buf.clear()
if (settings.XshowtreesStringified.value) buf.append(tree.toString + EOL)
if (settings.XshowtreesCompact.value) {
- // todo. colors for compact representation
buf.append(showRaw(tree))
} else {
level = 0
@@ -190,8 +167,9 @@ abstract class NodePrinters {
}
}
+ def treePrefix(tree: Tree) = showPosition(tree) + tree.printingPrefix
def printMultiline(tree: Tree)(body: => Unit) {
- printMultiline(tree.printingPrefix, showAttributes(tree))(body)
+ printMultiline(treePrefix(tree), showAttributes(tree))(body)
}
def printMultiline(prefix: String, comment: String)(body: => Unit) {
printLine(prefix + "(", comment)
@@ -217,10 +195,12 @@ abstract class NodePrinters {
}
def printSingle(tree: Tree, name: Name) {
- println(tree.printingPrefix + "(" + showName(name) + ")" + showAttributes(tree))
+ println(treePrefix(tree) + "(" + showName(name) + ")" + showAttributes(tree))
}
def traverse(tree: Tree) {
+ showPosition(tree)
+
tree match {
case AppliedTypeTree(tpt, args) => applyCommon(tree, tpt, args)
case ApplyDynamic(fun, args) => applyCommon(tree, fun, args)
@@ -229,6 +209,19 @@ abstract class NodePrinters {
case Throw(Ident(name)) =>
printSingle(tree, name)
+ case b @ Bind(name, body) =>
+ printMultiline(tree) {
+ println(showDefTreeName(b))
+ traverse(body)
+ }
+
+ case ld @ LabelDef(name, params, rhs) =>
+ printMultiline(tree) {
+ showNameAndPos(ld)
+ traverseList("()", "params")(params)
+ traverse(rhs)
+ }
+
case Function(vparams, body) =>
printMultiline(tree) {
traverseList("()", "parameter")(vparams)
@@ -308,7 +301,7 @@ abstract class NodePrinters {
val ps0 = parents map { p =>
if (p.tpe eq null) p match {
case x: RefTree => showRefTree(x)
- case x => "" + x
+ case x => showPosition(x) + x
}
else showName(newTypeName(p.tpe.typeSymbol.fullName))
}
@@ -352,7 +345,7 @@ abstract class NodePrinters {
case _ =>
tree match {
case t: RefTree => println(showRefTree(t))
- case t if t.productArity == 0 => println(tree.printingPrefix)
+ case t if t.productArity == 0 => println(treePrefix(t))
case t => printMultiline(tree)(tree.productIterator foreach traverseAny)
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index 3302c11127..b4beb231ab 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -18,6 +18,7 @@ import scala.concurrent.Lock
import scala.text._
import symtab.Flags._
import symtab.SymbolTable
+import language.implicitConversions
/**
* Tree browsers can show the AST in a graphical and interactive
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 2b75925d9a..91935854f4 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -9,6 +9,7 @@ package ast
import PartialFunction._
import symtab.Flags
+import language.implicitConversions
/** A DSL for generating scala code. The goal is that the
* code generating code should look a lot like the code it
@@ -94,6 +95,12 @@ trait TreeDSL {
def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other)
def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other)
def INT_!= (other: Tree) = fn(target, getMember(IntClass, nme.NE), other)
+
+ // generic operations on ByteClass, IntClass, LongClass
+ def GEN_| (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.OR), other)
+ def GEN_& (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.AND), other)
+ def GEN_== (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.EQ), other)
+ def GEN_!= (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.NE), other)
def BOOL_&& (other: Tree) = fn(target, Boolean_and, other)
def BOOL_|| (other: Tree) = fn(target, Boolean_or, other)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 19d1e0a51a..3a527676b4 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -9,6 +9,7 @@ package ast
import scala.collection.mutable.ListBuffer
import symtab.Flags._
import symtab.SymbolTable
+import language.postfixOps
/** XXX to resolve: TreeGen only assumes global is a SymbolTable, but
* TreeDSL at the moment expects a Global. Can we get by with SymbolTable?
@@ -71,14 +72,6 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
Annotated(Ident(nme.synthSwitch), expr)
}
- // must be kept in synch with the codegen in PatMatVirtualiser
- object VirtualCaseDef {
- def unapply(b: Block): Option[(Assign, Tree, Tree)] = b match {
- case Block(List(assign@Assign(keepGoingLhs, falseLit), matchRes), zero) => Some((assign, matchRes, zero)) // TODO: check tree annotation
- case _ => None
- }
- }
-
def hasSynthCaseSymbol(t: Tree) = (t.symbol ne null) && (t.symbol hasFlag (CASE | SYNTHETIC))
// TODO: would be so much nicer if we would know during match-translation (i.e., type checking)
@@ -86,9 +79,11 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
class MatchMatcher {
def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = unknownTree(orig)
def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = unknownTree(orig)
- def caseVirtualizedMatchOpt(orig: Tree, zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], epilogue: Tree, wrap: Tree => Tree): Tree = unknownTree(orig)
+ def caseVirtualizedMatchOpt(orig: Tree, prologue: List[Tree], cases: List[Tree], matchEndDef: Tree, wrap: Tree => Tree): Tree = unknownTree(orig)
- def apply(matchExpr: Tree): Tree = (matchExpr: @unchecked) match {
+ def genVirtualizedMatch(prologue: List[Tree], cases: List[Tree], matchEndDef: Tree): Tree = Block(prologue ++ cases, matchEndDef)
+
+ def apply(matchExpr: Tree): Tree = matchExpr match {
// old-style match or virtpatmat switch
case Match(selector, cases) => // println("simple match: "+ (selector, cases) + "for:\n"+ matchExpr )
caseMatch(matchExpr, selector, cases, identity)
@@ -99,11 +94,15 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if opt.virtPatmat => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr )
caseVirtualizedMatch(matchExpr, tgt, targs, scrut, matcher)
// optimized version of virtpatmat
- case Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, epilogue) if opt.virtPatmat => // TODO: check tree annotation // println("virtopt match: "+ (zero, x, matchRes, keepGoing, stats) + "for:\n"+ matchExpr )
- caseVirtualizedMatchOpt(matchExpr, zero, x, matchRes, keepGoing, stats, epilogue, identity)
+ case Block(stats, matchEndDef) if opt.virtPatmat && (stats forall hasSynthCaseSymbol) =>
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, identity)
// optimized version of virtpatmat
- case Block(outerStats, orig@Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, epilogue)) if opt.virtPatmat => // TODO: check tree annotation // println("virt opt block match: "+ (zero, x, matchRes, keepGoing, stats, outerStats) + "for:\n"+ matchExpr )
- caseVirtualizedMatchOpt(matchExpr, zero, x, matchRes, keepGoing, stats, epilogue, m => copyBlock(matchExpr, outerStats, m))
+ case Block(outerStats, orig@Block(stats, matchEndDef)) if opt.virtPatmat && (stats forall hasSynthCaseSymbol) =>
+ val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, m => copyBlock(matchExpr, outerStats, m))
case other =>
unknownTree(other)
}
@@ -119,35 +118,6 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
}
}
- def withDefaultCase(matchExpr: Tree, defaultAction: Tree/*scrutinee*/ => Tree): Tree = {
- object withDefaultTransformer extends MatchMatcher {
- override def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = {
- val casesNoSynthCatchAll = dropSyntheticCatchAll(cases)
- if (casesNoSynthCatchAll exists treeInfo.isDefaultCase) orig
- else {
- val defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, defaultAction(selector.duplicate))
- wrap(Match(selector, casesNoSynthCatchAll :+ defaultCase))
- }
- }
- override def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = { import CODE._
- ((matcher APPLY (scrut)) DOT nme.getOrElse) APPLY (defaultAction(scrut.duplicate)) // TODO: pass targs
- }
- override def caseVirtualizedMatchOpt(orig: Tree, zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], epilogue: Tree, wrap: Tree => Tree): Tree = { import CODE._
- wrap(Block(
- zero ::
- x ::
- matchRes ::
- keepGoing ::
- stats,
- // replace `if (keepGoing) throw new MatchError(...) else matchRes` by `if (keepGoing) ${defaultAction(`x`)} else matchRes`
- (IF (REF(keepGoing.symbol)) THEN defaultAction(x.rhs.duplicate) ELSE REF(matchRes.symbol))
- ))
- }
- }
- withDefaultTransformer(matchExpr)
- }
-
-
def mkCached(cvar: Symbol, expr: Tree): Tree = {
val cvarRef = mkUnattributedRef(cvar)
Block(
@@ -207,6 +177,22 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
def mkSysErrorCall(message: String): Tree =
mkMethodCall(Sys_error, List(Literal(Constant(message))))
+ /** A creator for a call to a scala.reflect.Manifest or ClassManifest factory method.
+ *
+ * @param full full or partial manifest (target will be Manifest or ClassManifest)
+ * @param constructor name of the factory method (e.g. "classType")
+ * @param tparg the type argument
+ * @param args value arguments
+ * @return the tree
+ */
+ def mkManifestFactoryCall(full: Boolean, constructor: String, tparg: Type, args: List[Tree]): Tree =
+ mkMethodCall(
+ if (full) FullManifestModule else PartialManifestModule,
+ newTermName(constructor),
+ List(tparg),
+ args
+ )
+
/** Make a synchronized block on 'monitor'. */
def mkSynchronized(monitor: Tree, body: Tree): Tree =
Apply(Select(monitor, Object_synchronized), List(body))
@@ -222,6 +208,7 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
else AppliedTypeTree(Ident(clazz), targs map TypeTree)
))
}
+ def mkSuperSelect = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
def wildcardStar(tree: Tree) =
atPos(tree.pos) { Typed(tree, Ident(tpnme.WILDCARD_STAR)) }
@@ -250,7 +237,7 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
mkMethodCall(
PredefModule,
wrapArrayMethodName(elemtp),
- if (isScalaValueType(elemtp)) Nil else List(elemtp),
+ if (isPrimitiveValueType(elemtp)) Nil else List(elemtp),
List(tree)
)
}
@@ -275,7 +262,7 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
* elem type elemtp to expected type pt.
*/
def mkCastArray(tree: Tree, elemtp: Type, pt: Type) =
- if (elemtp.typeSymbol == AnyClass && isScalaValueType(tree.tpe.typeArgs.head))
+ if (elemtp.typeSymbol == AnyClass && isPrimitiveValueType(tree.tpe.typeArgs.head))
mkCast(mkRuntimeCall(nme.toObjectArray, List(tree)), pt)
else
mkCast(tree, pt)
@@ -366,23 +353,19 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
else Block(prefix, containing) setPos (prefix.head.pos union containing.pos)
}
- /** Return a double-checked locking idiom around the syncBody tree. It guards with `cond` and
+ /** Return the synchronized part of the double-checked locking idiom around the syncBody tree. It guards with `cond` and
* synchronizez on `clazz.this`. Additional statements can be included after initialization,
* (outside the synchronized block).
*
* The idiom works only if the condition is using a volatile field.
* @see http://www.cs.umd.edu/~pugh/java/memoryModel/DoubleCheckedLocking.html
*/
- def mkDoubleCheckedLocking(clazz: Symbol, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree =
- mkDoubleCheckedLocking(mkAttributedThis(clazz), cond, syncBody, stats)
-
- def mkDoubleCheckedLocking(attrThis: Tree, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree = {
- If(cond,
- Block(
- mkSynchronized(
- attrThis,
- If(cond, Block(syncBody: _*), EmptyTree)) ::
- stats: _*),
- EmptyTree)
- }
+ def mkSynchronizedCheck(clazz: Symbol, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree =
+ mkSynchronizedCheck(mkAttributedThis(clazz), cond, syncBody, stats)
+
+ def mkSynchronizedCheck(attrThis: Tree, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree =
+ Block(mkSynchronized(
+ attrThis,
+ If(cond, Block(syncBody: _*), EmptyTree)) ::
+ stats: _*)
}
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 66704680ae..6f1a8f488f 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -58,6 +58,8 @@ trait Trees extends reflect.internal.Trees { self: Global =>
case class InjectDerivedValue(arg: Tree)
extends SymTree
+ class PostfixSelect(qual: Tree, name: Name) extends Select(qual, name)
+
/** emitted by typer, eliminated by refchecks */
case class TypeTreeWithDeferredRefCheck()(val check: () => TypeTree) extends TypTree
@@ -84,13 +86,12 @@ trait Trees extends reflect.internal.Trees { self: Global =>
/* Add constructor to template */
// create parameters for <init> as synthetic trees.
- var vparamss1 =
- vparamss map (vps => vps.map { vd =>
- atPos(vd.pos.focus) {
- ValDef(
- Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM | BYNAMEPARAM) | PARAM | PARAMACCESSOR) withAnnotations vd.mods.annotations,
- vd.name, vd.tpt.duplicate, vd.rhs.duplicate)
- }})
+ var vparamss1 = mmap(vparamss) { vd =>
+ atPos(vd.pos.focus) {
+ val mods = Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM | BYNAMEPARAM) | PARAM | PARAMACCESSOR)
+ ValDef(mods withAnnotations vd.mods.annotations, vd.name, vd.tpt.duplicate, vd.rhs.duplicate)
+ }
+ }
val (edefs, rest) = body span treeInfo.isEarlyDef
val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
val gvdefs = evdefs map {
@@ -114,9 +115,7 @@ trait Trees extends reflect.internal.Trees { self: Global =>
// convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
vparamss1 = List() :: vparamss1;
- val superRef: Tree = atPos(superPos) {
- Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
- }
+ val superRef: Tree = atPos(superPos)(gen.mkSuperSelect)
val superCall = (superRef /: argss) (Apply)
List(
atPos(wrappingPos(superPos, lvdefs ::: argss.flatten)) (
@@ -141,11 +140,18 @@ trait Trees extends reflect.internal.Trees { self: Global =>
* @param body the template statements without primary constructor
* and value parameter fields.
*/
- def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): ClassDef =
+ def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): ClassDef = {
+ // "if they have symbols they should be owned by `sym`"
+ assert(
+ mforall(vparamss)(p => (p.symbol eq NoSymbol) || (p.symbol.owner == sym)),
+ ((mmap(vparamss)(_.symbol), sym))
+ )
+
ClassDef(sym,
Template(sym.info.parents map TypeTree,
if (sym.thisSym == sym || phase.erasedTypes) emptyValDef else ValDef(sym.thisSym),
constrMods, vparamss, argss, body, superPos))
+ }
// --- subcomponents --------------------------------------------------
@@ -187,7 +193,7 @@ trait Trees extends reflect.internal.Trees { self: Global =>
def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type) =
new SelectFromArray(qualifier, selector, erasure).copyAttrs(tree)
def InjectDerivedValue(tree: Tree, arg: Tree) =
- new InjectDerivedValue(arg)
+ new InjectDerivedValue(arg).copyAttrs(tree)
def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match {
case dc@TypeTreeWithDeferredRefCheck() => new TypeTreeWithDeferredRefCheck()(dc.check).copyAttrs(tree)
}
@@ -226,6 +232,11 @@ trait Trees extends reflect.internal.Trees { self: Global =>
}
}
+ // used when a phase is disabled
+ object noopTransformer extends Transformer {
+ override def transformUnit(unit: CompilationUnit): Unit = {}
+ }
+
override protected def xtransform(transformer: super.Transformer, tree: Tree): Tree = tree match {
case DocDef(comment, definition) =>
transformer.treeCopy.DocDef(tree, comment, transformer.transform(definition))
@@ -251,8 +262,9 @@ trait Trees extends reflect.internal.Trees { self: Global =>
// def resetAllAttrs[A<:Tree](x:A): A = { new ResetAttrsTraverser().traverse(x); x }
// def resetLocalAttrs[A<:Tree](x:A): A = { new ResetLocalAttrsTraverser().traverse(x); x }
- def resetAllAttrs[A <: Tree](x: A, leaveAlone: Tree => Boolean = null): A = new ResetAttrs(false, leaveAlone).transform(x)
- def resetLocalAttrs[A <: Tree](x: A, leaveAlone: Tree => Boolean = null): A = new ResetAttrs(true, leaveAlone).transform(x)
+ def resetAllAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(false, leaveAlone).transform(x)
+ def resetLocalAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(true, leaveAlone).transform(x)
+ def resetLocalAttrsKeepLabels(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(true, leaveAlone, true).transform(x)
/** A transformer which resets symbol and tpe fields of all nodes in a given tree,
* with special treatment of:
@@ -263,7 +275,7 @@ trait Trees extends reflect.internal.Trees { self: Global =>
*
* (bq:) This transformer has mutable state and should be discarded after use
*/
- private class ResetAttrs(localOnly: Boolean, leaveAlone: Tree => Boolean = null) {
+ private class ResetAttrs(localOnly: Boolean, leaveAlone: Tree => Boolean = null, keepLabels: Boolean = false) {
val debug = settings.debug.value
val trace = scala.tools.nsc.util.trace when debug
@@ -312,13 +324,14 @@ trait Trees extends reflect.internal.Trees { self: Global =>
super.transform {
tree match {
case tpt: TypeTree =>
- if (tpt.original != null) {
+ if (tpt.original != null)
transform(tpt.original)
- } else {
- if (tpt.tpe != null && (tpt.wasEmpty || (tpt.tpe exists (tp => locals contains tp.typeSymbol))))
- tpt.tpe = null
- tree
+ else if (tpt.tpe != null && (tpt.wasEmpty || (tpt.tpe exists (tp => locals contains tp.typeSymbol)))) {
+ val dupl = tpt.duplicate
+ dupl.tpe = null
+ dupl
}
+ else tree
case TypeApply(fn, args) if args map transform exists (_.isEmpty) =>
transform(fn)
case This(_) if tree.symbol != null && tree.symbol.isPackageClass =>
@@ -326,18 +339,19 @@ trait Trees extends reflect.internal.Trees { self: Global =>
case EmptyTree =>
tree
case _ =>
- if (tree.hasSymbol && (!localOnly || (locals contains tree.symbol)))
- tree.symbol = NoSymbol
- tree.tpe = null
- tree
+ val dupl = tree.duplicate
+ if (tree.hasSymbol && (!localOnly || (locals contains tree.symbol)) && !(keepLabels && tree.symbol.isLabel))
+ dupl.symbol = NoSymbol
+ dupl.tpe = null
+ dupl
}
}
- }
+ }
}
- def transform[T <: Tree](x: T): T = {
+ def transform(x: Tree): Tree = {
if (localOnly)
- new MarkLocals().traverse(x)
+ new MarkLocals().traverse(x)
if (localOnly && debug) {
assert(locals.size == orderedLocals.size)
@@ -345,9 +359,7 @@ trait Trees extends reflect.internal.Trees { self: Global =>
trace("locals (%d total): %n".format(orderedLocals.size))(msg)
}
- val x1 = new Transformer().transform(x)
- assert(x.getClass isInstance x1, x1.getClass)
- x1.asInstanceOf[T]
+ new Transformer().transform(x)
}
}
@@ -361,4 +373,4 @@ trait Trees extends reflect.internal.Trees { self: Global =>
*/
- } \ No newline at end of file
+ }
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index 93fa9a60f6..f702f44338 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -397,7 +397,7 @@ trait MarkupParsers {
/** xScalaPatterns ::= patterns
*/
- def xScalaPatterns: List[Tree] = escapeToScala(parser.seqPatterns(), "pattern")
+ def xScalaPatterns: List[Tree] = escapeToScala(parser.xmlSeqPatterns(), "pattern")
def reportSyntaxError(pos: Int, str: String) = parser.syntaxError(pos, str)
def reportSyntaxError(str: String) {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index daabfae6b3..43560f9d8d 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -70,6 +70,9 @@ trait ParsersCommon extends ScannersCommon {
@inline final def inBracesOrNil[T](body: => List[T]): List[T] = inBracesOrError(body, Nil)
@inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, Literal(Constant()))
+ @inline final def dropAnyBraces[T](body: => T): T =
+ if (in.token == LBRACE) inBraces(body)
+ else body
@inline final def inBrackets[T](body: => T): T = {
accept(LBRACKET)
@@ -159,9 +162,9 @@ self =>
def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg)
/** the markup parser */
- lazy val xmlp = new MarkupParser(this, true)
+ lazy val xmlp = new MarkupParser(this, preserveWS = true)
- object symbXMLBuilder extends SymbolicXMLBuilder(this, true) { // DEBUG choices
+ object symbXMLBuilder extends SymbolicXMLBuilder(this, preserveWS = true) { // DEBUG choices
val global: self.global.type = self.global
def freshName(prefix: String): Name = SourceFileParser.this.freshName(prefix)
}
@@ -314,7 +317,7 @@ self =>
* by compilationUnit().
*/
def scriptBody(): Tree = {
- val stmts = templateStatSeq(false)._2
+ val stmts = templateStats()
accept(EOF)
def mainModuleName = newTermName(settings.script.value)
@@ -382,7 +385,7 @@ self =>
Nil,
List(Nil),
TypeTree(),
- Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), Nil)), Literal(Constant(())))
+ Block(List(Apply(gen.mkSuperSelect, Nil)), Literal(Constant(())))
)
// def main
@@ -732,9 +735,9 @@ self =>
}
ts.toList
}
- @inline final def commaSeparated[T](part: => T): List[T] = tokenSeparated(COMMA, false, part)
- @inline final def caseSeparated[T](part: => T): List[T] = tokenSeparated(CASE, true, part)
- @inline final def readAnnots[T](part: => T): List[T] = tokenSeparated(AT, true, part)
+ @inline final def commaSeparated[T](part: => T): List[T] = tokenSeparated(COMMA, sepFirst = false, part)
+ @inline final def caseSeparated[T](part: => T): List[T] = tokenSeparated(CASE, sepFirst = true, part)
+ @inline final def readAnnots[T](part: => T): List[T] = tokenSeparated(AT, sepFirst = true, part)
/* --------- OPERAND/OPERATOR STACK --------------------------------------- */
@@ -773,8 +776,7 @@ self =>
syntaxError(
offset, "left- and right-associative operators with same precedence may not be mixed", false)
- def reduceStack(isExpr: Boolean, base: List[OpInfo], top0: Tree,
- prec: Int, leftAssoc: Boolean): Tree = {
+ def reduceStack(isExpr: Boolean, base: List[OpInfo], top0: Tree, prec: Int, leftAssoc: Boolean): Tree = {
var top = top0
if (opstack != base && precedence(opstack.head.operator) == prec)
checkAssoc(opstack.head.offset, opstack.head.operator, leftAssoc)
@@ -822,7 +824,7 @@ self =>
atPos(start, in.skipToken()) { makeFunctionTypeTree(ts, typ()) }
else {
ts foreach checkNotByNameOrVarargs
- val tuple = atPos(start) { makeTupleType(ts, true) }
+ val tuple = atPos(start) { makeTupleType(ts, flattenUnary = true) }
infixTypeRest(
compoundTypeRest(
annotTypeRest(
@@ -884,10 +886,10 @@ self =>
def simpleType(): Tree = {
val start = in.offset
simpleTypeRest(in.token match {
- case LPAREN => atPos(start)(makeTupleType(inParens(types()), true))
+ case LPAREN => atPos(start)(makeTupleType(inParens(types()), flattenUnary = true))
case USCORE => wildcardType(in.skipToken())
case _ =>
- path(false, true) match {
+ path(thisOK = false, typeOK = true) match {
case r @ SingletonTypeTree(_) => r
case r => convertToTypeId(r)
}
@@ -897,7 +899,7 @@ self =>
private def typeProjection(t: Tree): Tree = {
val hashOffset = in.skipToken()
val nameOffset = in.offset
- val name = identForType(false)
+ val name = identForType(skipIt = false)
val point = if (name == tpnme.ERROR) hashOffset else nameOffset
atPos(t.pos.startOrPoint, point)(SelectFromTypeTree(t, name))
}
@@ -946,7 +948,7 @@ self =>
if (isIdent && in.name != nme.STAR) {
val opOffset = in.offset
val leftAssoc = treeInfo.isLeftAssoc(in.name)
- if (mode != InfixMode.FirstOp) checkAssoc(opOffset, in.name, mode == InfixMode.LeftOp)
+ if (mode != InfixMode.FirstOp) checkAssoc(opOffset, in.name, leftAssoc = mode == InfixMode.LeftOp)
val op = identForType()
val tycon = atPos(opOffset) { Ident(op) }
newLineOptWhenFollowing(isTypeIntroToken)
@@ -983,7 +985,7 @@ self =>
syntaxErrorOrIncomplete(expectedMsg(IDENTIFIER), skipIt)
nme.ERROR
}
- def ident(): Name = ident(true)
+ def ident(): Name = ident(skipIt = true)
def rawIdent(): Name = try in.name finally in.nextToken()
/** For when it's known already to be a type name. */
@@ -994,7 +996,7 @@ self =>
val point = in.offset
//assert(t.pos.isDefined, t)
if (t != EmptyTree)
- Select(t, ident(false)) setPos r2p(t.pos.startOrPoint, point, in.lastOffset)
+ Select(t, ident(skipIt = false)) setPos r2p(t.pos.startOrPoint, point, in.lastOffset)
else
errorTermTree // has already been reported
}
@@ -1024,7 +1026,7 @@ self =>
val tok = in.token
val name = ident()
t = atPos(start) {
- if (tok == BACKQUOTED_IDENT) new BackQuotedIdent(name)
+ if (tok == BACKQUOTED_IDENT) Ident(name) withAttachment BackquotedIdentifier
else Ident(name)
}
if (in.token == DOT) {
@@ -1052,7 +1054,8 @@ self =>
if (typeOK && in.token == TYPE) {
in.nextToken()
atPos(t.pos.startOrPoint, dotOffset) { SingletonTypeTree(t) }
- } else {
+ }
+ else {
val t1 = selector(t)
if (in.token == DOT) { selectors(t1, typeOK, in.skipToken()) }
else t1
@@ -1073,7 +1076,7 @@ self =>
* }}}
*/
def stableId(): Tree =
- path(false, false)
+ path(thisOK = false, typeOK = false)
/** {{{
* QualId ::= Id {`.' Id}
@@ -1082,7 +1085,7 @@ self =>
def qualId(): Tree = {
val start = in.offset
val id = atPos(start) { Ident(ident()) }
- if (in.token == DOT) { selectors(id, false, in.skipToken()) }
+ if (in.token == DOT) { selectors(id, typeOK = false, in.skipToken()) }
else id
}
/** Calls `qualId()` and manages some package state. */
@@ -1106,7 +1109,7 @@ self =>
* }}}
* @note The returned tree does not yet have a position
*/
- def literal(isNegated: Boolean = false): Tree = {
+ def literal(isNegated: Boolean = false, inPattern: Boolean = false): Tree = {
def finish(value: Any): Tree = {
val t = Literal(Constant(value))
in.nextToken()
@@ -1115,7 +1118,7 @@ self =>
if (in.token == SYMBOLLIT)
Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
else if (in.token == INTERPOLATIONID)
- interpolatedString()
+ interpolatedString(inPattern = inPattern)
else finish(in.token match {
case CHARLIT => in.charVal
case INTLIT => in.intVal(isNegated).toInt
@@ -1141,7 +1144,7 @@ self =>
}
}
- private def interpolatedString(): Tree = atPos(in.offset) {
+ private def interpolatedString(inPattern: Boolean = false): Tree = atPos(in.offset) {
val start = in.offset
val interpolator = in.name
@@ -1151,8 +1154,11 @@ self =>
while (in.token == STRINGPART) {
partsBuf += literal()
exprBuf += {
- if (in.token == IDENTIFIER) atPos(in.offset)(Ident(ident()))
- else expr()
+ if (inPattern) dropAnyBraces(pattern())
+ else {
+ if (in.token == IDENTIFIER) atPos(in.offset)(Ident(ident()))
+ else expr()
+ }
}
}
if (in.token == STRINGLIT) partsBuf += literal()
@@ -1200,7 +1206,7 @@ self =>
else startInfixType()
def annotTypeRest(t: Tree): Tree =
- (t /: annotations(false)) (makeAnnotated)
+ (t /: annotations(skipNewLines = false)) (makeAnnotated)
/** {{{
* WildcardType ::= `_' TypeBounds
@@ -1379,7 +1385,7 @@ self =>
syntaxErrorOrIncomplete("`*' expected", true)
}
} else if (in.token == AT) {
- t = (t /: annotations(false)) (makeAnnotated)
+ t = (t /: annotations(skipNewLines = false)) (makeAnnotated)
} else {
t = atPos(t.pos.startOrPoint, colonPos) {
val tpt = typeOrInfixType(location)
@@ -1445,7 +1451,7 @@ self =>
var top = prefixExpr()
while (isIdent) {
- top = reduceStack(true, base, top, precedence(in.name), treeInfo.isLeftAssoc(in.name))
+ top = reduceStack(isExpr = true, base, top, precedence(in.name), leftAssoc = treeInfo.isLeftAssoc(in.name))
val op = in.name
opstack = OpInfo(top, op, in.offset) :: opstack
ident()
@@ -1453,18 +1459,19 @@ self =>
if (isExprIntro) {
val next = prefixExpr()
if (next == EmptyTree)
- return reduceStack(true, base, top, 0, true)
+ return reduceStack(isExpr = true, base, top, 0, leftAssoc = true)
top = next
} else {
+ // postfix expression
val topinfo = opstack.head
opstack = opstack.tail
- val od = stripParens(reduceStack(true, base, topinfo.operand, 0, true))
+ val od = stripParens(reduceStack(isExpr = true, base, topinfo.operand, 0, leftAssoc = true))
return atPos(od.pos.startOrPoint, topinfo.offset) {
- Select(od, topinfo.operator.encode)
+ new PostfixSelect(od, topinfo.operator.encode)
}
}
}
- reduceStack(true, base, top, 0, true)
+ reduceStack(isExpr = true, base, top, 0, leftAssoc = true)
}
/** {{{
@@ -1475,9 +1482,10 @@ self =>
if (isUnaryOp) {
atPos(in.offset) {
val name = nme.toUnaryName(rawIdent())
- // val name = nme.toUnaryName(ident()) // val name: Name = "unary_" + ident()
- if (name == nme.UNARY_- && isNumericLit) simpleExprRest(atPos(in.offset)(literal(isNegated = true)), true)
- else Select(stripParens(simpleExpr()), name)
+ if (name == nme.UNARY_- && isNumericLit)
+ simpleExprRest(atPos(in.offset)(literal(isNegated = true)), canApply = true)
+ else
+ Select(stripParens(simpleExpr()), name)
}
}
else simpleExpr()
@@ -1505,7 +1513,7 @@ self =>
case XMLSTART =>
xmlLiteral()
case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER =>
- path(true, false)
+ path(thisOK = true, typeOK = false)
case USCORE =>
val start = in.offset
val pname = freshName("x$")
@@ -1524,14 +1532,14 @@ self =>
val nstart = in.skipToken()
val npos = r2p(nstart, nstart, in.lastOffset)
val tstart = in.offset
- val (parents, argss, self, stats) = template(false)
+ val (parents, argss, self, stats) = template(isTrait = false)
val cpos = r2p(tstart, tstart, in.lastOffset max tstart)
makeNew(parents, self, stats, argss, npos, cpos)
case _ =>
syntaxErrorOrIncomplete("illegal start of simple expression", true)
errorTermTree
}
- simpleExprRest(t, canApply)
+ simpleExprRest(t, canApply = canApply)
}
def simpleExprRest(t: Tree, canApply: Boolean): Tree = {
@@ -1539,7 +1547,7 @@ self =>
in.token match {
case DOT =>
in.nextToken()
- simpleExprRest(selector(stripParens(t)), true)
+ simpleExprRest(selector(stripParens(t)), canApply = true)
case LBRACKET =>
val t1 = stripParens(t)
t1 match {
@@ -1548,7 +1556,7 @@ self =>
while (in.token == LBRACKET)
app = atPos(app.pos.startOrPoint, in.offset)(TypeApply(app, exprTypeArgs()))
- simpleExprRest(app, true)
+ simpleExprRest(app, canApply = true)
case _ =>
t1
}
@@ -1564,7 +1572,7 @@ self =>
}
Apply(sel, argumentExprs())
}
- simpleExprRest(app, true)
+ simpleExprRest(app, canApply = true)
case USCORE =>
atPos(t.pos.startOrPoint, in.skipToken()) {
Typed(stripParens(t), Function(Nil, EmptyTree))
@@ -1652,11 +1660,11 @@ self =>
*/
def enumerators(): List[Enumerator] = {
val enums = new ListBuffer[Enumerator]
- generator(enums, false)
+ generator(enums, eqOK = false)
while (isStatSep) {
in.nextToken()
if (in.token == IF) enums += makeFilter(in.offset, guard())
- else generator(enums, true)
+ else generator(enums, eqOK = true)
}
enums.toList
}
@@ -1698,11 +1706,11 @@ self =>
* was threaded through methods as boolean seqOK.
*/
trait SeqContextSensitive extends PatternContextSensitive {
- /** Returns Some(tree) if it finds a star and prematurely ends parsing.
- * This is an artifact of old implementation which has proven difficult
- * to cleanly extract.
- */
- def interceptStarPattern(top: Tree): Option[Tree]
+ // is a sequence pattern _* allowed?
+ def isSequenceOK: Boolean
+
+ // are we in an XML pattern?
+ def isXML: Boolean = false
def functionArgType(): Tree = argType()
def argType(): Tree = {
@@ -1772,21 +1780,7 @@ self =>
*/
def pattern2(): Tree = {
val nameOffset = in.offset
- def warnIfMacro(tree: Tree): Unit = {
- def check(name: Name): Unit = if (name.toString == nme.MACROkw.toString)
- warning(nameOffset, "in future versions of Scala \"macro\" will be a keyword. consider using a different name.")
- tree match {
- case _: BackQuotedIdent =>
- ;
- case Ident(name) =>
- check(name)
- case _ =>
- ;
- }
- }
-
val p = pattern3()
- warnIfMacro(p)
if (in.token != AT) p
else p match {
@@ -1803,24 +1797,74 @@ self =>
/** {{{
* Pattern3 ::= SimplePattern
* | SimplePattern {Id [nl] SimplePattern}
- * SeqPattern3 ::= SeqSimplePattern [ `*' | `?' | `+' ]
- * | SeqSimplePattern {Id [nl] SeqSimplePattern}
* }}}
*/
def pattern3(): Tree = {
+ var top = simplePattern(badPattern3)
+ // after peekahead
+ def acceptWildStar() = atPos(top.pos.startOrPoint, in.prev.offset)(Star(stripParens(top)))
+ def peekahead() = {
+ in.prev copyFrom in
+ in.nextToken()
+ }
+ def pushback() = {
+ in.next copyFrom in
+ in copyFrom in.prev
+ }
+ // See SI-3189, SI-4832 for motivation. Cf SI-3480 for counter-motivation.
+ // TODO: dredge out the remnants of regexp patterns.
+ // /{/ peek for _*) or _*} (for xml escape)
+ if (isSequenceOK) {
+ top match {
+ case Ident(nme.WILDCARD) if (isRawStar) =>
+ peekahead()
+ in.token match {
+ case RBRACE if (isXML) => return acceptWildStar()
+ case RPAREN if (!isXML) => return acceptWildStar()
+ case _ => pushback()
+ }
+ case _ =>
+ }
+ }
val base = opstack
- var top = simplePattern()
- interceptStarPattern(top) foreach { x => return x }
-
while (isIdent && in.name != raw.BAR) {
- top = reduceStack(
- false, base, top, precedence(in.name), treeInfo.isLeftAssoc(in.name))
+ top = reduceStack(isExpr = false, base, top, precedence(in.name), leftAssoc = treeInfo.isLeftAssoc(in.name))
val op = in.name
opstack = OpInfo(top, op, in.offset) :: opstack
ident()
- top = simplePattern()
- }
- stripParens(reduceStack(false, base, top, 0, true))
+ top = simplePattern(badPattern3)
+ }
+ stripParens(reduceStack(isExpr = false, base, top, 0, leftAssoc = true))
+ }
+ def badPattern3(): Tree = {
+ def isComma = in.token == COMMA
+ def isAnyBrace = in.token == RPAREN || in.token == RBRACE
+ val badStart = "illegal start of simple pattern"
+ // better recovery if don't skip delims of patterns
+ var skip = !(isComma || isAnyBrace)
+ val msg = if (!opstack.isEmpty && opstack.head.operator == nme.STAR) {
+ opstack.head.operand match {
+ case Ident(nme.WILDCARD) =>
+ if (isSequenceOK && isComma)
+ "bad use of _* (a sequence pattern must be the last pattern)"
+ else if (isSequenceOK && isAnyBrace) {
+ skip = true // do skip bad paren; scanner may skip bad brace already
+ "bad brace or paren after _*"
+ } else if (!isSequenceOK && isAnyBrace)
+ "bad use of _* (sequence pattern not allowed)"
+ else badStart
+ case _ =>
+ if (isSequenceOK && isAnyBrace)
+ "use _* to match a sequence"
+ else if (isComma || isAnyBrace)
+ "trailing * is not a valid pattern"
+ else badStart
+ }
+ } else {
+ badStart
+ }
+ syntaxErrorOrIncomplete(msg, skip)
+ errorPatternTree
}
/** {{{
@@ -1828,20 +1872,23 @@ self =>
* | `_'
* | literal
* | XmlPattern
- * | StableId [TypeArgs] [`(' [SeqPatterns] `)']
+ * | StableId /[TypeArgs]/ [`(' [Patterns] `)']
+ * | StableId [`(' [Patterns] `)']
+ * | StableId [`(' [Patterns] `,' [varid `@'] `_' `*' `)']
* | `(' [Patterns] `)'
- * SimpleSeqPattern ::= varid
- * | `_'
- * | literal
- * | XmlPattern
- * | `<' xLiteralPattern
- * | StableId [TypeArgs] [`(' [SeqPatterns] `)']
- * | `(' [SeqPatterns] `)'
* }}}
*
* XXX: Hook for IDE
*/
def simplePattern(): Tree = {
+ // simple diagnostics for this entry point
+ def badStart(): Tree = {
+ syntaxErrorOrIncomplete("illegal start of simple pattern", true)
+ errorPatternTree
+ }
+ simplePattern(badStart)
+ }
+ def simplePattern(onError: () => Tree): Tree = {
val start = in.offset
in.token match {
case IDENTIFIER | BACKQUOTED_IDENT | THIS =>
@@ -1850,7 +1897,7 @@ self =>
case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT =>
t match {
case Ident(nme.MINUS) =>
- return atPos(start) { literal(isNegated = true) }
+ return atPos(start) { literal(isNegated = true, inPattern = true) }
case _ =>
}
case _ =>
@@ -1868,14 +1915,13 @@ self =>
atPos(start, start) { Ident(nme.WILDCARD) }
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL =>
- atPos(start) { literal() }
+ atPos(start) { literal(inPattern = true) }
case LPAREN =>
atPos(start)(makeParens(noSeq.patterns()))
case XMLSTART =>
xmlLiteralPattern()
case _ =>
- syntaxErrorOrIncomplete("illegal start of simple pattern", true)
- errorPatternTree
+ onError()
}
}
}
@@ -1886,16 +1932,16 @@ self =>
}
/** The implementation for parsing inside of patterns at points where sequences are allowed. */
object seqOK extends SeqContextSensitive {
- // See ticket #3189 for the motivation for the null check.
- // TODO: dredge out the remnants of regexp patterns.
- // ... and now this is back the way it was because it caused #3480.
- def interceptStarPattern(top: Tree): Option[Tree] =
- if (isRawStar) Some(atPos(top.pos.startOrPoint, in.skipToken())(Star(stripParens(top))))
- else None
+ val isSequenceOK = true
}
/** The implementation for parsing inside of patterns at points where sequences are disallowed. */
object noSeq extends SeqContextSensitive {
- def interceptStarPattern(top: Tree) = None
+ val isSequenceOK = false
+ }
+ /** For use from xml pattern, where sequence is allowed and encouraged. */
+ object xmlSeqOK extends SeqContextSensitive {
+ val isSequenceOK = true
+ override val isXML = true
}
/** These are default entry points into the pattern context sensitive methods:
* they are all initiated from non-pattern context.
@@ -1909,7 +1955,8 @@ self =>
/** Default entry points into some pattern contexts. */
def pattern(): Tree = noSeq.pattern()
def patterns(): List[Tree] = noSeq.patterns()
- def seqPatterns(): List[Tree] = seqOK.patterns() // Also called from xml parser
+ def seqPatterns(): List[Tree] = seqOK.patterns()
+ def xmlSeqPatterns(): List[Tree] = xmlSeqOK.patterns() // Called from xml parser
def argumentPatterns(): List[Tree] = inParens {
if (in.token == RPAREN) Nil
else seqPatterns()
@@ -2050,7 +2097,7 @@ self =>
var caseParam = ofCaseClass
def param(): ValDef = {
val start = in.offset
- val annots = annotations(false)
+ val annots = annotations(skipNewLines = false)
var mods = Modifiers(Flags.PARAM)
if (owner.isTypeName) {
mods = modifiers() | Flags.PARAMACCESSOR
@@ -2198,7 +2245,7 @@ self =>
param
}
newLineOptWhenFollowedBy(LBRACKET)
- if (in.token == LBRACKET) inBrackets(commaSeparated(typeParam(NoMods withAnnotations annotations(true))))
+ if (in.token == LBRACKET) inBrackets(commaSeparated(typeParam(NoMods withAnnotations annotations(skipNewLines = true))))
else Nil
}
@@ -2355,7 +2402,7 @@ self =>
private def caseAwareTokenOffset = if (in.token == CASECLASS || in.token == CASEOBJECT) in.prev.offset else in.offset
def nonLocalDefOrDcl : List[Tree] = {
- val annots = annotations(true)
+ val annots = annotations(skipNewLines = true)
defOrDcl(caseAwareTokenOffset, modifiers() withAnnotations annots)
}
@@ -2449,7 +2496,7 @@ self =>
in.nextToken
if (in.token == THIS) {
atPos(start, in.skipToken()) {
- val vparamss = paramClauses(nme.CONSTRUCTOR, classContextBounds map (_.duplicate), false)
+ val vparamss = paramClauses(nme.CONSTRUCTOR, classContextBounds map (_.duplicate), ofCaseClass = false)
newLineOptWhenFollowedBy(LBRACE)
val rhs = in.token match {
case LBRACE => atPos(in.offset) { constrBlock(vparamss) }
@@ -2460,10 +2507,7 @@ self =>
}
else {
val nameOffset = in.offset
- val isBackquoted = in.token == BACKQUOTED_IDENT
val name = ident()
- if (name.toString == nme.MACROkw.toString && !isBackquoted)
- warning(nameOffset, "in future versions of Scala \"macro\" will be a keyword. consider using a different name.")
funDefRest(start, nameOffset, mods, name)
}
}
@@ -2476,10 +2520,10 @@ self =>
// i.e. (B[T] or T => B)
val contextBoundBuf = new ListBuffer[Tree]
val tparams = typeParamClauseOpt(name, contextBoundBuf)
- val vparamss = paramClauses(name, contextBoundBuf.toList, false)
+ val vparamss = paramClauses(name, contextBoundBuf.toList, ofCaseClass = false)
newLineOptWhenFollowedBy(LBRACE)
var restype = fromWithinReturnType(typedOpt())
- val rhs =
+ val rhs =
if (isStatSep || in.token == RBRACE) {
if (restype.isEmpty) restype = scalaUnitConstr
newmods |= Flags.DEFERRED
@@ -2488,10 +2532,15 @@ self =>
restype = scalaUnitConstr
blockExpr()
} else {
- accept(EQUALS)
- if (settings.Xmacros.value && in.token == MACRO) {
- in.nextToken()
- newmods |= Flags.MACRO
+ if (in.token == EQUALS) {
+ in.nextTokenAllow(nme.MACROkw)
+ if (settings.Xmacros.value && in.token == MACRO || // [Martin] Xmacros can be retired now
+ in.token == IDENTIFIER && in.name == nme.MACROkw) {
+ in.nextToken()
+ newmods |= Flags.MACRO
+ }
+ } else {
+ accept(EQUALS)
}
expr()
}
@@ -2552,10 +2601,7 @@ self =>
newLinesOpt()
atPos(start, in.offset) {
val nameOffset = in.offset
- val isBackquoted = in.token == BACKQUOTED_IDENT
val name = identForType()
- if (name.toString == nme.MACROkw.toString && !isBackquoted)
- warning(nameOffset, "in future versions of Scala \"macro\" will be a keyword. consider using a different name.")
// @M! a type alias as well as an abstract type may declare type parameters
val tparams = typeParamClauseOpt(name, null)
in.token match {
@@ -2573,7 +2619,7 @@ self =>
/** Hook for IDE, for top-level classes/objects. */
def topLevelTmplDef: Tree = {
- val annots = annotations(true)
+ val annots = annotations(skipNewLines = true)
val pos = caseAwareTokenOffset
val mods = modifiers() withAnnotations annots
tmplDef(pos, mods)
@@ -2613,17 +2659,13 @@ self =>
def classDef(start: Int, mods: Modifiers): ClassDef = {
in.nextToken
val nameOffset = in.offset
- val isBackquoted = in.token == BACKQUOTED_IDENT
val name = identForType()
- if (name.toString == nme.MACROkw.toString && !isBackquoted)
- warning(nameOffset, "in future versions of Scala \"macro\" will be a keyword. consider using a different name.")
-
atPos(start, if (name == tpnme.ERROR) start else nameOffset) {
savingClassContextBounds {
val contextBoundBuf = new ListBuffer[Tree]
val tparams = typeParamClauseOpt(name, contextBoundBuf)
classContextBounds = contextBoundBuf.toList
- val tstart = in.offset :: classContextBounds.map(_.pos.startOrPoint) min;
+ val tstart = (in.offset :: classContextBounds.map(_.pos.startOrPoint)).min
if (!classContextBounds.isEmpty && mods.isTrait) {
syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", false)
classContextBounds = List()
@@ -2631,7 +2673,7 @@ self =>
val constrAnnots = constructorAnnotations()
val (constrMods, vparamss) =
if (mods.isTrait) (Modifiers(Flags.TRAIT), List())
- else (accessModifierOpt(), paramClauses(name, classContextBounds, mods.isCase))
+ else (accessModifierOpt(), paramClauses(name, classContextBounds, ofCaseClass = mods.isCase))
var mods1 = mods
if (mods.isTrait) {
if (settings.YvirtClasses && in.token == SUBTYPE) mods1 |= Flags.DEFERRED
@@ -2657,10 +2699,7 @@ self =>
def objectDef(start: Int, mods: Modifiers): ModuleDef = {
in.nextToken
val nameOffset = in.offset
- val isBackquoted = in.token == BACKQUOTED_IDENT
val name = ident()
- if (name.toString == nme.MACROkw.toString && !isBackquoted)
- warning(nameOffset, "in future versions of Scala \"macro\" will be a keyword. consider using a different name.")
val tstart = in.offset
atPos(start, if (name == nme.ERROR) start else nameOffset) {
val mods1 = if (in.token == SUBTYPE) mods | Flags.DEFERRED else mods
@@ -2701,7 +2740,7 @@ self =>
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
// @S: pre template body cannot stub like post body can!
- val (self, body) = templateBody(true)
+ val (self, body) = templateBody(isPre = true)
if (in.token == WITH && self.isEmpty) {
val earlyDefs: List[Tree] = body flatMap {
case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
@@ -2714,15 +2753,15 @@ self =>
case _ => List()
}
in.nextToken()
- val (parents, argss) = templateParents(isTrait)
- val (self1, body1) = templateBodyOpt(isTrait)
+ val (parents, argss) = templateParents(isTrait = isTrait)
+ val (self1, body1) = templateBodyOpt(traitParentSeen = isTrait)
(parents, argss, self1, earlyDefs ::: body1)
} else {
(List(), List(List()), self, body)
}
} else {
- val (parents, argss) = templateParents(isTrait)
- val (self, body) = templateBodyOpt(isTrait)
+ val (parents, argss) = templateParents(isTrait = isTrait)
+ val (self, body) = templateBodyOpt(traitParentSeen = isTrait)
(parents, argss, self, body)
}
}
@@ -2740,11 +2779,11 @@ self =>
val (parents0, argss, self, body) = (
if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait) {
in.nextToken()
- template(mods.isTrait)
+ template(isTrait = mods.isTrait)
}
else {
newLineOptWhenFollowedBy(LBRACE)
- val (self, body) = templateBodyOpt(false)
+ val (self, body) = templateBodyOpt(traitParentSeen = false)
(List(), List(List()), self, body)
}
)
@@ -2778,14 +2817,14 @@ self =>
* }}}
* @param isPre specifies whether in early initializer (true) or not (false)
*/
- def templateBody(isPre: Boolean) = inBraces(templateStatSeq(isPre)) match {
+ def templateBody(isPre: Boolean) = inBraces(templateStatSeq(isPre = isPre)) match {
case (self, Nil) => (self, List(EmptyTree))
case result => result
}
def templateBodyOpt(traitParentSeen: Boolean): (ValDef, List[Tree]) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
- templateBody(false)
+ templateBody(isPre = false)
} else {
if (in.token == LPAREN)
syntaxError((if (traitParentSeen) "parents of traits" else "traits or objects")+
@@ -2839,24 +2878,7 @@ self =>
*/
def packaging(start: Int): Tree = {
val nameOffset = in.offset
- def warnIfMacro(tree: Tree): Unit = {
- def check(name: Name): Unit = if (name.toString == nme.MACROkw.toString)
- warning(nameOffset, "in future versions of Scala \"macro\" will be a keyword. consider using a different name.")
- tree match {
- case _: BackQuotedIdent =>
- ;
- case Ident(name) =>
- check(name)
- case Select(qual, name) =>
- warnIfMacro(qual)
- check(name)
- case _ =>
- ;
- }
- }
-
val pkg = pkgQualId()
- warnIfMacro(pkg)
val stats = inBracesOrNil(topStatSeq())
makePackaging(start, pkg, stats)
}
@@ -2897,6 +2919,13 @@ self =>
stats.toList
}
+ /** Informal - for the repl and other direct parser accessors.
+ */
+ def templateStats(): List[Tree] = templateStatSeq(isPre = false)._2 match {
+ case Nil => List(EmptyTree)
+ case stats => stats
+ }
+
/** {{{
* TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStat {semi TemplateStat}
* TemplateStat ::= Import
@@ -2976,7 +3005,7 @@ self =>
def localDef : List[Tree] = {
atEndPos {
atStartPos(in.offset) {
- val annots = annotations(true)
+ val annots = annotations(skipNewLines = true)
val mods = localModifiers() withAnnotations annots
if (!(mods hasFlag ~(Flags.IMPLICIT | Flags.LAZY))) defOrDcl(mods)
else List(tmplDef(mods))
@@ -2986,7 +3015,7 @@ self =>
*/
def localDef(implicitMod: Int): List[Tree] = {
- val annots = annotations(true)
+ val annots = annotations(skipNewLines = true)
val pos = in.offset
val mods = (localModifiers() | implicitMod) withAnnotations annots
val defs =
@@ -3059,27 +3088,8 @@ self =>
}
} else {
val nameOffset = in.offset
- def warnIfMacro(tree: Tree): Unit = {
- def check(name: Name): Unit = if (name.toString == nme.MACROkw.toString)
- warning(nameOffset, "in future versions of Scala \"macro\" will be a keyword. consider using a different name.")
- tree match {
- // [Eugene] pkgQualId never returns BackQuotedIdents
- // this means that we'll get spurious warnings even if we wrap macro package name in backquotes
- case _: BackQuotedIdent =>
- ;
- case Ident(name) =>
- check(name)
- case Select(qual, name) =>
- warnIfMacro(qual)
- check(name)
- case _ =>
- ;
- }
- }
-
in.flushDoc
val pkg = pkgQualId()
- warnIfMacro(pkg)
if (in.token == EOF) {
ts += makePackaging(start, pkg, List())
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 81d81a4fb7..87072f3172 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -113,10 +113,18 @@ trait Scanners extends ScannersCommon {
}
/** Clear buffer and set name and token */
- private def finishNamed() {
+ private def finishNamed(idtoken: Int = IDENTIFIER) {
name = newTermName(cbuf.toString)
- token = name2token(name)
cbuf.clear()
+ token = idtoken
+ if (idtoken == IDENTIFIER) {
+ val idx = name.start - kwOffset
+ if (idx >= 0 && idx < kwArray.length) {
+ token = kwArray(idx)
+ if (token == IDENTIFIER && allowIdent != name)
+ deprecationWarning(name+" is now a reserved word; usage as an identifier is deprecated")
+ }
+ }
}
/** Clear buffer and set string */
@@ -190,6 +198,20 @@ trait Scanners extends ScannersCommon {
off
}
+ /** Allow an otherwise deprecated ident here */
+ private var allowIdent: Name = nme.EMPTY
+
+ /** Get next token, and allow the otherwise deprecated ident `name` */
+ def nextTokenAllow(name: Name) = {
+ val prev = allowIdent
+ allowIdent = name
+ try {
+ nextToken()
+ } finally {
+ allowIdent = prev
+ }
+ }
+
/** Produce next token, filling TokenData fields of Scanner.
*/
def nextToken() {
@@ -231,6 +253,12 @@ trait Scanners extends ScannersCommon {
lastOffset -= 1
}
if (inStringInterpolation) fetchStringPart() else fetchToken()
+ if(token == ERROR) {
+ if (inMultiLineInterpolation)
+ sepRegions = sepRegions.tail.tail
+ else if (inStringInterpolation)
+ sepRegions = sepRegions.tail
+ }
} else {
this copyFrom next
next.token = EMPTY
@@ -328,7 +356,7 @@ trait Scanners extends ScannersCommon {
putChar(ch)
nextChar()
getIdentRest()
- if (ch == '"' && token == IDENTIFIER && settings.Xexperimental.value)
+ if (ch == '"' && token == IDENTIFIER)
token = INTERPOLATIONID
case '<' => // is XMLSTART?
val last = if (charOffset >= 2) buf(charOffset - 2) else ' '
@@ -562,9 +590,8 @@ trait Scanners extends ScannersCommon {
getLitChars('`')
if (ch == '`') {
nextChar()
- finishNamed()
+ finishNamed(BACKQUOTED_IDENT)
if (name.length == 0) syntaxError("empty quoted identifier")
- token = BACKQUOTED_IDENT
}
else syntaxError("unclosed quoted identifier")
}
@@ -697,7 +724,7 @@ trait Scanners extends ScannersCommon {
do {
putChar(ch)
nextRawChar()
- } while (Character.isUnicodeIdentifierPart(ch))
+ } while (ch != SU && Character.isUnicodeIdentifierPart(ch))
next.token = IDENTIFIER
next.name = newTermName(cbuf.toString)
cbuf.clear()
@@ -1124,9 +1151,9 @@ trait Scanners extends ScannersCommon {
nme.VIEWBOUNDkw -> VIEWBOUND,
nme.SUPERTYPEkw -> SUPERTYPE,
nme.HASHkw -> HASH,
- nme.ATkw -> AT
- ) ++
- (if (settings.Xmacros.value) List(nme.MACROkw -> MACRO) else List())
+ nme.ATkw -> AT,
+ nme.MACROkw -> IDENTIFIER,
+ nme.THENkw -> IDENTIFIER)
private var kwOffset: Int = -1
private val kwArray: Array[Int] = {
@@ -1135,14 +1162,7 @@ trait Scanners extends ScannersCommon {
arr
}
- final val token2name = allKeywords map (_.swap) toMap
-
- /** Convert name to token */
- final def name2token(name: Name) = {
- val idx = name.start - kwOffset
- if (idx >= 0 && idx < kwArray.length) kwArray(idx)
- else IDENTIFIER
- }
+ final val token2name = (allKeywords map (_.swap)).toMap
// Token representation ----------------------------------------------------
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index 849437e4ff..b0204c5971 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -11,6 +11,7 @@ import xml.{ EntityRef, Text }
import xml.XML.{ xmlns }
import symtab.Flags.MUTABLE
import scala.tools.util.StringOps.splitWhere
+import language.implicitConversions
/** This class builds instance of `Tree` that represent XML.
*
@@ -29,7 +30,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
private[parser] var isPattern: Boolean = _
- private trait XMLTypeNames extends TypeNames {
+ private object xmltypes extends TypeNames {
val _Comment: NameType = "Comment"
val _Elem: NameType = "Elem"
val _EntityRef: NameType = "EntityRef"
@@ -44,7 +45,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
val _UnprefixedAttribute: NameType = "UnprefixedAttribute"
}
- private trait XMLTermNames extends TermNames {
+ private object xmlterms extends TermNames {
val _Null: NameType = "Null"
val __Elem: NameType = "Elem"
val __Text: NameType = "Text"
@@ -56,15 +57,6 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
val _xml: NameType = "xml"
}
- private object xmltypes extends XMLTypeNames {
- type NameType = TypeName
- implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
- }
- private object xmlterms extends XMLTermNames {
- type NameType = TermName
- implicit def createNameType(name: String): TermName = newTermNameCached(name)
- }
-
import xmltypes.{_Comment, _Elem, _EntityRef, _Group, _MetaData, _NamespaceBinding, _NodeBuffer,
_PrefixedAttribute, _ProcInstr, _Text, _Unparsed, _UnprefixedAttribute}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
index e17bbf5e46..a4a062609b 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
@@ -110,7 +110,8 @@ object Tokens extends Tokens {
final val MATCH = 58
final val FORSOME = 59
final val LAZY = 61
- final val MACRO = 62
+ final val MACRO = 62 // not yet used in 2.10
+ final val THEN = 63 // not yet used in 2.10
def isKeyword(code: Int) =
code >= IF && code <= LAZY
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 3a6e26d3b5..de7e6f9c7a 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -521,11 +521,29 @@ abstract class TreeBuilder {
// val/var x_1 = t$._1
// ...
// val/var x_N = t$._N
- val pat1 = patvarTransformer.transform(pat)
+
+ val rhsUnchecked = gen.mkUnchecked(rhs)
+
+ // TODO: clean this up -- there is too much information packked into makePatDef's `pat` argument
+ // when it's a simple identifier (case Some((name, tpt)) -- above),
+ // pat should have the type ascription that was specified by the user
+ // however, in `case None` (here), we must be careful not to generate illegal pattern trees (such as `(a, b): Tuple2[Int, String]`)
+ // i.e., this must hold: pat1 match { case Typed(expr, tp) => assert(expr.isInstanceOf[Ident]) case _ => }
+ // if we encounter such an erroneous pattern, we strip off the type ascription from pat and propagate the type information to rhs
+ val (pat1, rhs1) = patvarTransformer.transform(pat) match {
+ // move the Typed ascription to the rhs
+ case Typed(expr, tpt) if !expr.isInstanceOf[Ident] =>
+ val rhsTypedUnchecked =
+ if (tpt.isEmpty) rhsUnchecked
+ else Typed(rhsUnchecked, tpt) setPos (rhs.pos union tpt.pos)
+ (expr, rhsTypedUnchecked)
+ case ok =>
+ (ok, rhsUnchecked)
+ }
val vars = getVariables(pat1)
val matchExpr = atPos((pat1.pos union rhs.pos).makeTransparent) {
Match(
- gen.mkUnchecked(rhs),
+ rhs1,
List(
atPos(pat1.pos) {
CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident, true))
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index 314a3b45a0..62885cc73d 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package backend
import io.AbstractFile
-import util.{ClassPath,JavaClassPath}
+import util.{ClassPath,JavaClassPath,MergedClassPath,DeltaClassPath}
import util.ClassPath.{ JavaContext, DefaultJavaContext }
import scala.tools.util.PathResolver
@@ -17,7 +17,17 @@ trait JavaPlatform extends Platform {
type BinaryRepr = AbstractFile
- lazy val classPath = new PathResolver(settings).result
+ private var currentClassPath: Option[MergedClassPath[BinaryRepr]] = None
+
+ def classPath: ClassPath[BinaryRepr] = {
+ if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result)
+ currentClassPath.get
+ }
+
+ /** Update classpath with a substituted subentry */
+ def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]]) =
+ currentClassPath = Some(new DeltaClassPath(currentClassPath.get, subst))
+
def rootLoader = new loaders.PackageLoader(classPath.asInstanceOf[ClassPath[platform.BinaryRepr]])
// [Martin] Why do we need a cast here?
// The problem is that we cannot specify at this point that global.platform should be of type JavaPlatform.
@@ -33,9 +43,13 @@ trait JavaPlatform extends Platform {
if (settings.make.isDefault) Nil
else List(dependencyAnalysis)
+ private def classEmitPhase =
+ if (settings.target.value == "jvm-1.5") genJVM
+ else genASM
+
def platformPhases = List(
- flatten, // get rid of inner classes
- genJVM // generate .class files
+ flatten, // get rid of inner classes
+ classEmitPhase // generate .class files
) ++ depAnalysisPhase
lazy val externalEquals = getDecl(BoxesRunTimeClass, nme.equals_)
diff --git a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala b/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
index 65b1fbc229..f4176c46b8 100644
--- a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
@@ -31,6 +31,10 @@ trait MSILPlatform extends Platform {
def rootLoader = new loaders.PackageLoader(classPath.asInstanceOf[ClassPath[platform.BinaryRepr]])
// See discussion in JavaPlatForm for why we need a cast here.
+ /** Update classpath with a substituted subentry */
+ def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]]) =
+ throw new UnsupportedOperationException("classpath invalidations not supported on MSIL")
+
def platformPhases = List(
genMSIL // generate .msil files
)
diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala
index 23592eeb61..f770713093 100644
--- a/src/compiler/scala/tools/nsc/backend/Platform.scala
+++ b/src/compiler/scala/tools/nsc/backend/Platform.scala
@@ -24,6 +24,9 @@ trait Platform {
/** The root symbol loader. */
def rootLoader: LazyType
+ /** Update classpath with a substitution that maps entries to entries */
+ def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]])
+
/** Any platform-specific phases. */
def platformPhases: List[SubComponent]
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index 4f3b0bf951..b8ecaf1b43 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -11,6 +11,7 @@ import scala.collection.{ mutable, immutable }
import mutable.{ ListBuffer, ArrayBuffer }
import util.{ Position, NoPosition }
import backend.icode.analysis.ProgramPoint
+import language.postfixOps
trait BasicBlocks {
self: ICodes =>
@@ -218,8 +219,8 @@ trait BasicBlocks {
///////////////////// Substitutions ///////////////////////
/**
- * Replace the instruction at the given position. Used by labels when
- * they are anchored. It retains the position of the previous instruction.
+ * Replace the instruction at the given position. Used by labels when they are anchored.
+ * The replacing instruction is given the nsc.util.Position of the instruction it replaces.
*/
def replaceInstruction(pos: Int, instr: Instruction): Boolean = {
assert(closed, "Instructions can be replaced only after the basic block is closed")
@@ -232,7 +233,7 @@ trait BasicBlocks {
/**
* Replace the given instruction with the new one.
* Returns `true` if it actually changed something.
- * It retains the position of the previous instruction.
+ * The replacing instruction is given the nsc.util.Position of the instruction it replaces.
*/
def replaceInstruction(oldInstr: Instruction, newInstr: Instruction): Boolean = {
assert(closed, "Instructions can be replaced only after the basic block is closed")
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index ec303d76ee..f7541a4739 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -13,6 +13,7 @@ import scala.collection.mutable.{ ListBuffer, Buffer }
import scala.tools.nsc.symtab._
import scala.annotation.switch
import PartialFunction._
+import language.postfixOps
/** This class ...
*
@@ -820,7 +821,7 @@ abstract class GenICode extends SubComponent {
ctx2
case _ =>
- abort("Cannot instantiate " + tpt + "of kind: " + generatedType)
+ abort("Cannot instantiate " + tpt + " of kind: " + generatedType)
}
case Apply(fun @ _, List(expr)) if (definitions.isBox(fun.symbol)) =>
@@ -1207,7 +1208,7 @@ abstract class GenICode extends SubComponent {
if (!tree.symbol.isPackageClass) tree.symbol
else tree.symbol.info.member(nme.PACKAGE) match {
case NoSymbol => assert(false, "Cannot use package as value: " + tree) ; NoSymbol
- case s => Console.err.println("Bug: found package class where package object expected. Converting.") ; s.moduleClass
+ case s => debugwarn("Bug: found package class where package object expected. Converting.") ; s.moduleClass
}
)
debuglog("LOAD_MODULE from %s: %s".format(tree.shortClass, sym))
@@ -2224,7 +2225,7 @@ abstract class GenICode extends SubComponent {
* jumps to the given basic block.
*/
def patch(code: Code) {
- val map = toPatch map (i => (i -> patch(i))) toMap;
+ val map = mapFrom(toPatch)(patch)
code.blocks foreach (_ subst map)
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 36651541b2..efb4e7a199 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -44,7 +44,7 @@ trait Members {
def blocksList: List[BasicBlock] = blocks.toList
def instructions = blocksList flatMap (_.iterator)
def blockCount = blocks.size
- def instructionCount = blocks map (_.length) sum
+ def instructionCount = (blocks map (_.length)).sum
def touched = _touched
def touched_=(b: Boolean): Unit = {
@@ -156,7 +156,7 @@ trait Members {
def newBlock() = code.newBlock
def startBlock = code.startBlock
- def lastBlock = blocks.last
+ def lastBlock = { assert(blocks.nonEmpty, symbol); blocks.last }
def blocks = code.blocksList
def linearizedBlocks(lin: Linearizer = self.linearizer): List[BasicBlock] = lin linearize this
@@ -257,11 +257,23 @@ trait Members {
var succ = bb
do {
succ = nextBlock(succ);
- bb.removeLastInstruction
- succ.toList foreach { i => bb.emit(i, i.pos) }
- code.removeBlock(succ)
+ val lastInstr = bb.lastInstruction
+ /* Ticket SI-5672
+ * Besides removing the control-flow instruction at the end of `bb` (usually a JUMP), we have to pop any values it pushes.
+ * Examples:
+ * `SWITCH` consisting of just the default case, or
+ * `CJUMP(targetBlock, targetBlock, _, _)` ie where success and failure targets coincide (this one consumes two stack values).
+ */
+ val oldTKs = lastInstr.consumedTypes
+ assert(lastInstr.consumed == oldTKs.size, "Someone forgot to override consumedTypes() in " + lastInstr)
+
+ bb.removeLastInstruction
+ for(tk <- oldTKs.reverse) { bb.emit(DROP(tk), lastInstr.pos) }
+ succ.toList foreach { i => bb.emit(i, i.pos) }
+ code.removeBlock(succ)
+ exh foreach { e => e.covered = e.covered - succ }
+
nextBlock -= bb
- exh foreach { e => e.covered = e.covered - succ }
} while (nextBlock.isDefinedAt(succ))
bb.close
} else
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index ec6c631bd1..3179fc5c56 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -435,6 +435,10 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+
+ override val consumedTypes = List(INT)
+
+ def flatTagsCount: Int = { var acc = 0; var rest = tags; while(rest.nonEmpty) { acc += rest.head.length; rest = rest.tail }; acc } // a one-liner
}
/** This class represents a JUMP instruction
@@ -468,6 +472,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 2
override def produced = 0
+
+ override val consumedTypes = List(kind, kind)
}
/** This class represents a CZJUMP instruction
@@ -487,6 +493,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+
+ override val consumedTypes = List(kind)
}
@@ -497,6 +505,8 @@ trait Opcodes { self: ICodes =>
case class RETURN(kind: TypeKind) extends Instruction {
override def consumed = if (kind == UNIT) 0 else 1
override def produced = 0
+
+ // TODO override val consumedTypes = List(kind)
}
/** This class represents a THROW instruction
@@ -593,48 +603,49 @@ trait Opcodes { self: ICodes =>
/** This class represents a method invocation style. */
sealed abstract class InvokeStyle {
/** Is this a dynamic method call? */
- def isDynamic: Boolean = this match {
- case Dynamic => true
- case _ => false
- }
+ def isDynamic: Boolean = false
/** Is this a static method call? */
- def isStatic: Boolean = this match {
- case Static(_) => true
- case _ => false
- }
+ def isStatic: Boolean = false
- def isSuper: Boolean = this match {
- case SuperCall(_) => true
- case _ => false
- }
+ def isSuper: Boolean = false
/** Is this an instance method call? */
- def hasInstance: Boolean = this match {
- case Static(false) => false
- case _ => true
- }
+ def hasInstance: Boolean = true
/** Returns a string representation of this style. */
- override def toString(): String = this match {
- case Dynamic => "dynamic"
- case Static(false) => "static-class"
- case Static(true) => "static-instance"
- case SuperCall(mix) => "super(" + mix + ")"
- }
+ override def toString(): String
}
- /** Virtual calls */
- case object Dynamic extends InvokeStyle
+ /** Virtual calls.
+ * On JVM, translated to either `invokeinterface` or `invokevirtual`.
+ */
+ case object Dynamic extends InvokeStyle {
+ override def isDynamic = true
+ override def toString(): String = "dynamic"
+ }
/**
- * Special invoke. Static(true) is used for calls to private
- * members.
+ * Special invoke:
+ * Static(true) is used for calls to private members, ie `invokespecial` on JVM.
+ * Static(false) is used for calls to class-level instance-less static methods, ie `invokestatic` on JVM.
*/
- case class Static(onInstance: Boolean) extends InvokeStyle
+ case class Static(onInstance: Boolean) extends InvokeStyle {
+ override def isStatic = true
+ override def hasInstance = onInstance
+ override def toString(): String = {
+ if(onInstance) "static-instance"
+ else "static-class"
+ }
+ }
- /** Call through super[mix]. */
- case class SuperCall(mix: Name) extends InvokeStyle
+ /** Call through super[mix].
+ * On JVM, translated to `invokespecial`.
+ */
+ case class SuperCall(mix: Name) extends InvokeStyle {
+ override def isSuper = true
+ override def toString(): String = { "super(" + mix + ")" }
+ }
// CLR backend
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
index f99ac28e9d..72220184d2 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
@@ -127,42 +127,61 @@ trait Primitives { self: ICodes =>
/** Returns a string representation of this operation. */
override def toString(): String
+
+ /** used only from GenASM */
+ def opcodeIF(): Int
+
+ /** used only from GenASM */
+ def opcodeIFICMP(): Int
+
}
/** An equality test */
case object EQ extends TestOp {
def negate() = NE
override def toString() = "EQ"
+ override def opcodeIF() = scala.tools.asm.Opcodes.IFEQ
+ override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPEQ
}
/** A non-equality test */
case object NE extends TestOp {
def negate() = EQ
override def toString() = "NE"
+ override def opcodeIF() = scala.tools.asm.Opcodes.IFNE
+ override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPNE
}
/** A less-than test */
case object LT extends TestOp {
def negate() = GE
override def toString() = "LT"
+ override def opcodeIF() = scala.tools.asm.Opcodes.IFLT
+ override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPLT
}
/** A greater-than-or-equal test */
case object GE extends TestOp {
def negate() = LT
override def toString() = "GE"
+ override def opcodeIF() = scala.tools.asm.Opcodes.IFGE
+ override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPGE
}
/** A less-than-or-equal test */
case object LE extends TestOp {
def negate() = GT
override def toString() = "LE"
+ override def opcodeIF() = scala.tools.asm.Opcodes.IFLE
+ override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPLE
}
/** A greater-than test */
case object GT extends TestOp {
def negate() = LE
override def toString() = "GT"
+ override def opcodeIF() = scala.tools.asm.Opcodes.IFGT
+ override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPGT
}
/** This class represents an arithmetic operation. */
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
index 7ad7cadd92..1ec2cf017a 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
@@ -44,7 +44,7 @@ trait TypeKinds { self: ICodes =>
}
/** Reverse map for toType */
private lazy val reversePrimitiveMap: Map[TypeKind, Symbol] =
- primitiveTypeMap map (_.swap) toMap
+ (primitiveTypeMap map (_.swap)).toMap
/** This class represents a type kind. Type kinds
* represent the types that the VM know (or the ICode
@@ -420,7 +420,7 @@ trait TypeKinds { self: ICodes =>
// between "object PackratParsers$class" and "trait PackratParsers"
if (sym.isImplClass) {
// pos/spec-List.scala is the sole failure if we don't check for NoSymbol
- val traitSym = sym.owner.info.decl(nme.interfaceName(sym.name))
+ val traitSym = sym.owner.info.decl(tpnme.interfaceName(sym.name))
if (traitSym != NoSymbol)
return REFERENCE(traitSym)
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index f5be82a776..4427da92c8 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -566,8 +566,8 @@ abstract class CopyPropagation {
method.blocks map { b =>
"\nIN(%s):\t Bindings: %s".format(b.label, in(b).bindings) +
"\nIN(%s):\t Stack: %s".format(b.label, in(b).stack)
- } mkString
- )
+ }
+ ).mkString
} /* class CopyAnalysis */
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
index 49f5b51d51..5f261ba05e 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
@@ -97,6 +97,6 @@ abstract class Liveness {
}
}
override def toString() =
- method.blocks map (b => "\nlive-in(%s)=%s\nlive-out(%s)=%s".format(b, in(b), b, out(b))) mkString
+ (method.blocks map (b => "\nlive-in(%s)=%s\nlive-out(%s)=%s".format(b, in(b), b, out(b)))).mkString
} /* Liveness analysis */
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index c217869a48..ff68aba845 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -6,13 +6,13 @@
package scala.tools.nsc
package backend.jvm
-import ch.epfl.lamp.fjbg._
import java.io.{ DataOutputStream, FileOutputStream, OutputStream, File => JFile }
import scala.tools.nsc.io._
import scala.tools.nsc.util.ScalaClassLoader
import scala.tools.util.JavapClass
import java.util.jar.{ JarEntry, JarOutputStream, Attributes }
import Attributes.Name
+import language.postfixOps
/** For the last mile: turning generated bytecode in memory into
* something you can use. Has implementations for writing to class
@@ -25,19 +25,19 @@ trait BytecodeWriters {
private def outputDirectory(sym: Symbol): AbstractFile = (
settings.outputDirs.outputDirFor(beforeFlatten(sym.sourceFile))
)
- private def getFile(base: AbstractFile, cls: JClass, suffix: String): AbstractFile = {
+ private def getFile(base: AbstractFile, /*cls.getName()*/ clsName: String, suffix: String): AbstractFile = {
var dir = base
- val pathParts = cls.getName().split("[./]").toList
+ val pathParts = clsName.split("[./]").toList
for (part <- pathParts.init) {
dir = dir.subdirectoryNamed(part)
}
dir.fileNamed(pathParts.last + suffix)
}
- private def getFile(sym: Symbol, cls: JClass, suffix: String): AbstractFile =
- getFile(outputDirectory(sym), cls, suffix)
+ private def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
+ getFile(outputDirectory(sym), clsName, suffix)
trait BytecodeWriter {
- def writeClass(label: String, jclass: JClass, sym: Symbol): Unit
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol): Unit
def close(): Unit = ()
}
@@ -48,11 +48,11 @@ trait BytecodeWriters {
)
val writer = new Jar(jfile).jarWriter(jarMainAttrs: _*)
- def writeClass(label: String, jclass: JClass, sym: Symbol) {
- val path = jclass.getName + ".class"
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
+ val path = jclassName + ".class"
val out = writer.newOutputStream(path)
- try jclass writeTo out
+ try out.write(jclassBytes, 0, jclassBytes.length)
finally out.flush()
informProgress("added " + label + path + " to jar")
@@ -72,11 +72,11 @@ trait BytecodeWriters {
try javap(Seq("-verbose", "dummy")) foreach (_.show())
finally pw.close()
}
- abstract override def writeClass(label: String, jclass: JClass, sym: Symbol) {
- super.writeClass(label, jclass, sym)
+ abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
+ super.writeClass(label, jclassName, jclassBytes, sym)
- val bytes = getFile(sym, jclass, ".class").toByteArray
- val segments = jclass.getName().split("[./]")
+ val bytes = getFile(sym, jclassName, ".class").toByteArray
+ val segments = jclassName.split("[./]")
val javapFile = segments.foldLeft(baseDir: Path)(_ / _) changeExtension "javap" toFile;
javapFile.parent.createDirectory()
@@ -85,11 +85,11 @@ trait BytecodeWriters {
}
trait ClassBytecodeWriter extends BytecodeWriter {
- def writeClass(label: String, jclass: JClass, sym: Symbol) {
- val outfile = getFile(sym, jclass, ".class")
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
+ val outfile = getFile(sym, jclassName, ".class")
val outstream = new DataOutputStream(outfile.bufferedOutput)
- try jclass writeTo outstream
+ try outstream.write(jclassBytes, 0, jclassBytes.length)
finally outstream.close()
informProgress("wrote '" + label + "' to " + outfile)
}
@@ -98,15 +98,15 @@ trait BytecodeWriters {
trait DumpBytecodeWriter extends BytecodeWriter {
val baseDir = Directory(settings.Ydumpclasses.value).createDirectory()
- abstract override def writeClass(label: String, jclass: JClass, sym: Symbol) {
- super.writeClass(label, jclass, sym)
+ abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
+ super.writeClass(label, jclassName, jclassBytes, sym)
- val pathName = jclass.getName()
+ val pathName = jclassName
var dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile;
dumpFile.parent.createDirectory()
val outstream = new DataOutputStream(new FileOutputStream(dumpFile.path))
- try jclass writeTo outstream
+ try outstream.write(jclassBytes, 0, jclassBytes.length)
finally outstream.close()
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
new file mode 100644
index 0000000000..b8cf4eca9f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -0,0 +1,3291 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+
+import java.nio.ByteBuffer
+import scala.collection.{ mutable, immutable }
+import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
+import scala.tools.nsc.symtab._
+import scala.tools.nsc.io.AbstractFile
+
+import scala.tools.asm
+import asm.Label
+
+/**
+ * @author Iulian Dragos (version 1.0, FJBG-based implementation)
+ * @author Miguel Garcia (version 2.0, ASM-based implementation)
+ *
+ * Documentation at http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/2012Q2/GenASM.pdf
+ */
+abstract class GenASM extends SubComponent with BytecodeWriters {
+ import global._
+ import icodes._
+ import icodes.opcodes._
+ import definitions._
+
+ val phaseName = "jvm"
+
+ /** Create a new phase */
+ override def newPhase(p: Phase): Phase = new AsmPhase(p)
+
+ private def outputDirectory(sym: Symbol): AbstractFile =
+ settings.outputDirs outputDirFor beforeFlatten(sym.sourceFile)
+
+ private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
+ var dir = base
+ val pathParts = clsName.split("[./]").toList
+ for (part <- pathParts.init) {
+ dir = dir.subdirectoryNamed(part)
+ }
+ dir.fileNamed(pathParts.last + suffix)
+ }
+ private def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
+ getFile(outputDirectory(sym), clsName, suffix)
+
+ /** JVM code generation phase
+ */
+ class AsmPhase(prev: Phase) extends ICodePhase(prev) {
+ def name = phaseName
+ override def erasedTypes = true
+ def apply(cls: IClass) = sys.error("no implementation")
+
+ val BeanInfoAttr = definitions.getRequiredClass("scala.beans.BeanInfo")
+
+ def isJavaEntryPoint(icls: IClass) = {
+ val sym = icls.symbol
+ def fail(msg: String, pos: Position = sym.pos) = {
+ icls.cunit.warning(sym.pos,
+ sym.name + " has a main method with parameter type Array[String], but " + sym.fullName('.') + " will not be a runnable program.\n" +
+ " Reason: " + msg
+ // TODO: make this next claim true, if possible
+ // by generating valid main methods as static in module classes
+ // not sure what the jvm allows here
+ // + " You can still run the program by calling it as " + sym.javaSimpleName + " instead."
+ )
+ false
+ }
+ def failNoForwarder(msg: String) = {
+ fail(msg + ", which means no static forwarder can be generated.\n")
+ }
+ val possibles = if (sym.hasModuleFlag) (sym.tpe nonPrivateMember nme.main).alternatives else Nil
+ val hasApproximate = possibles exists { m =>
+ m.info match {
+ case MethodType(p :: Nil, _) => p.tpe.typeSymbol == ArrayClass
+ case _ => false
+ }
+ }
+ // At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
+ hasApproximate && {
+ // Before erasure so we can identify generic mains.
+ beforeErasure {
+ val companion = sym.linkedClassOfClass
+ val companionMain = companion.tpe.member(nme.main)
+
+ if (hasJavaMainMethod(companion))
+ failNoForwarder("companion contains its own main method")
+ else if (companion.tpe.member(nme.main) != NoSymbol)
+ // this is only because forwarders aren't smart enough yet
+ failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
+ else if (companion.isTrait)
+ failNoForwarder("companion is a trait")
+ // Now either succeeed, or issue some additional warnings for things which look like
+ // attempts to be java main methods.
+ else possibles exists { m =>
+ m.info match {
+ case PolyType(_, _) =>
+ fail("main methods cannot be generic.")
+ case MethodType(params, res) =>
+ if (res.typeSymbol :: params exists (_.isAbstractType))
+ fail("main methods cannot refer to type parameters or abstract types.", m.pos)
+ else
+ isJavaMainMethod(m) || fail("main method must have exact signature (Array[String])Unit", m.pos)
+ case tp =>
+ fail("don't know what this is: " + tp, m.pos)
+ }
+ }
+ }
+ }
+ }
+
+ private def initBytecodeWriter(entryPoints: List[IClass]): BytecodeWriter = {
+ settings.outputDirs.getSingleOutput match {
+ case Some(f) if f hasExtension "jar" =>
+ // If no main class was specified, see if there's only one
+ // entry point among the classes going into the jar.
+ if (settings.mainClass.isDefault) {
+ entryPoints map (_.symbol fullName '.') match {
+ case Nil =>
+ log("No Main-Class designated or discovered.")
+ case name :: Nil =>
+ log("Unique entry point: setting Main-Class to " + name)
+ settings.mainClass.value = name
+ case names =>
+ log("No Main-Class due to multiple entry points:\n " + names.mkString("\n "))
+ }
+ }
+ else log("Main-Class was specified: " + settings.mainClass.value)
+
+ new DirectToJarfileWriter(f.file)
+
+ case _ =>
+ if (settings.Ygenjavap.isDefault) {
+ if(settings.Ydumpclasses.isDefault)
+ new ClassBytecodeWriter { }
+ else
+ new ClassBytecodeWriter with DumpBytecodeWriter { }
+ }
+ else new ClassBytecodeWriter with JavapBytecodeWriter { }
+
+ // TODO A ScalapBytecodeWriter could take asm.util.Textifier as starting point.
+ // Three areas where javap ouput is less than ideal (e.g. when comparing versions of the same classfile) are:
+ // (a) unreadable pickle;
+ // (b) two constant pools, while having identical contents, are displayed differently due to physical layout.
+ // (c) stack maps (classfile version 50 and up) are displayed in encoded form by javap, their expansion makes more sense instead.
+ }
+ }
+
+ override def run() {
+
+ if (settings.debug.value)
+ inform("[running phase " + name + " on icode]")
+
+ if (settings.Xdce.value)
+ for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym))
+ icodes.classes -= sym
+
+ // For predictably ordered error messages.
+ var sortedClasses = classes.values.toList sortBy ("" + _.symbol.fullName)
+
+ debuglog("Created new bytecode generator for " + classes.size + " classes.")
+ val bytecodeWriter = initBytecodeWriter(sortedClasses filter isJavaEntryPoint)
+ val plainCodeGen = new JPlainBuilder(bytecodeWriter)
+ val mirrorCodeGen = new JMirrorBuilder(bytecodeWriter)
+ val beanInfoCodeGen = new JBeanInfoBuilder(bytecodeWriter)
+
+ while(!sortedClasses.isEmpty) {
+ val c = sortedClasses.head
+
+ if (isStaticModule(c.symbol) && isTopLevelModule(c.symbol)) {
+ if (c.symbol.companionClass == NoSymbol) {
+ mirrorCodeGen.genMirrorClass(c.symbol, c.cunit)
+ } else {
+ log("No mirror class for module with linked class: " + c.symbol.fullName)
+ }
+ }
+
+ plainCodeGen.genClass(c)
+
+ if (c.symbol hasAnnotation BeanInfoAttr) {
+ beanInfoCodeGen.genBeanInfoClass(c)
+ }
+
+ sortedClasses = sortedClasses.tail
+ classes -= c.symbol // GC opportunity
+ }
+
+ bytecodeWriter.close()
+ classes.clear()
+ reverseJavaName.clear()
+
+ /* don't javaNameCache.clear() because that causes the following tests to fail:
+ * test/files/run/macro-repl-dontexpand.scala
+ * test/files/jvm/interpreter.scala
+ * TODO but why? what use could javaNameCache possibly see once GenJVM is over?
+ */
+
+ /* TODO After emitting all class files (e.g., in a separate compiler phase) ASM can perform bytecode verification:
+ *
+ * (1) call the asm.util.CheckAdapter.verify() overload:
+ * public static void verify(ClassReader cr, ClassLoader loader, boolean dump, PrintWriter pw)
+ *
+ * (2) passing a custom ClassLoader to verify inter-dependent classes.
+ *
+ * Alternatively, an offline-bytecode verifier could be used (e.g. Maxine brings one as separate tool).
+ */
+
+ } // end of AsmPhase.run()
+
+ } // end of class AsmPhase
+
+ var pickledBytes = 0 // statistics
+
+ // Don't put this in per run caches. Contains entries for classes as well as members.
+ val javaNameCache = new mutable.WeakHashMap[Symbol, Name]() ++= List(
+ NothingClass -> binarynme.RuntimeNothing,
+ RuntimeNothingClass -> binarynme.RuntimeNothing,
+ NullClass -> binarynme.RuntimeNull,
+ RuntimeNullClass -> binarynme.RuntimeNull
+ )
+
+ // unlike javaNameCache, reverseJavaName contains entries only for class symbols and their internal names.
+ val reverseJavaName = mutable.Map.empty[String, Symbol] ++= List(
+ binarynme.RuntimeNothing.toString() -> RuntimeNothingClass, // RuntimeNothingClass is the bytecode-level return type of Scala methods with Nothing return-type.
+ binarynme.RuntimeNull.toString() -> RuntimeNullClass
+ )
+
+ private def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
+
+ @inline final private def hasPublicBitSet(flags: Int) = ((flags & asm.Opcodes.ACC_PUBLIC) != 0)
+
+ @inline final private def isRemote(s: Symbol) = (s hasAnnotation RemoteAttr)
+
+ /**
+ * Return the Java modifiers for the given symbol.
+ * Java modifiers for classes:
+ * - public, abstract, final, strictfp (not used)
+ * for interfaces:
+ * - the same as for classes, without 'final'
+ * for fields:
+ * - public, private (*)
+ * - static, final
+ * for methods:
+ * - the same as for fields, plus:
+ * - abstract, synchronized (not used), strictfp (not used), native (not used)
+ *
+ * (*) protected cannot be used, since inner classes 'see' protected members,
+ * and they would fail verification after lifted.
+ */
+ def javaFlags(sym: Symbol): Int = {
+ // constructors of module classes should be private
+ // PP: why are they only being marked private at this stage and not earlier?
+ val privateFlag =
+ sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner))
+
+ // Final: the only fields which can receive ACC_FINAL are eager vals.
+ // Neither vars nor lazy vals can, because:
+ //
+ // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3
+ // "Another problem is that the specification allows aggressive
+ // optimization of final fields. Within a thread, it is permissible to
+ // reorder reads of a final field with those modifications of a final
+ // field that do not take place in the constructor."
+ //
+ // A var or lazy val which is marked final still has meaning to the
+ // scala compiler. The word final is heavily overloaded unfortunately;
+ // for us it means "not overridable". At present you can't override
+ // vars regardless; this may change.
+ //
+ // The logic does not check .isFinal (which checks flags for the FINAL flag,
+ // and includes symbols marked lateFINAL) instead inspecting rawflags so
+ // we can exclude lateFINAL. Such symbols are eligible for inlining, but to
+ // avoid breaking proxy software which depends on subclassing, we do not
+ // emit ACC_FINAL.
+ // Nested objects won't receive ACC_FINAL in order to allow for their overriding.
+
+ val finalFlag = (
+ (sym.hasFlag(Flags.FINAL) || isTopLevelModule(sym))
+ && !sym.enclClass.isInterface
+ && !sym.isClassConstructor
+ && !sym.isMutable // lazy vals and vars both
+ )
+
+ import asm.Opcodes._
+ mkFlags(
+ if (privateFlag) ACC_PRIVATE else ACC_PUBLIC,
+ if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0,
+ if (sym.isInterface) ACC_INTERFACE else 0,
+ if (finalFlag) ACC_FINAL else 0,
+ if (sym.isStaticMember) ACC_STATIC else 0,
+ if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
+ if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
+ if (sym.isVarargsMethod) ACC_VARARGS else 0,
+ if (sym.hasFlag(Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0
+ )
+ }
+
+ def javaFieldFlags(sym: Symbol) = {
+ javaFlags(sym) | mkFlags(
+ if (sym hasAnnotation TransientAttr) asm.Opcodes.ACC_TRANSIENT else 0,
+ if (sym hasAnnotation VolatileAttr) asm.Opcodes.ACC_VOLATILE else 0,
+ if (sym.isMutable) 0 else asm.Opcodes.ACC_FINAL
+ )
+ }
+
+ def isTopLevelModule(sym: Symbol): Boolean =
+ afterPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
+
+ def isStaticModule(sym: Symbol): Boolean = {
+ sym.isModuleClass && !sym.isImplClass && !sym.isLifted
+ }
+
+ // -----------------------------------------------------------------------------------------
+ // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM)
+ // Background:
+ // http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf
+ // http://comments.gmane.org/gmane.comp.java.vm.languages/2293
+ // https://issues.scala-lang.org/browse/SI-3872
+ // -----------------------------------------------------------------------------------------
+
+ /**
+ * Given an internal name (eg "java/lang/Integer") returns the class symbol for it.
+ *
+ * Better not to need this method (an example where control flow arrives here is welcome).
+ * This method is invoked only upon both (1) and (2) below happening:
+ * (1) providing an asm.ClassWriter with an internal name by other means than javaName()
+ * (2) forgetting to track the corresponding class-symbol in reverseJavaName.
+ *
+ * (The first item is already unlikely because we rely on javaName()
+ * to do the bookkeeping for entries that should go in innerClassBuffer.)
+ *
+ * (We could do completely without this method at the expense of computing stack-map-frames ourselves and
+ * invoking visitFrame(), but that would require another pass over all instructions.)
+ *
+ * Right now I can't think of any invocation of visitSomething() on MethodVisitor
+ * where we hand an internal name not backed by a reverseJavaName.
+ * However, I'm leaving this note just in case any such oversight is discovered.
+ */
+ def inameToSymbol(iname: String): Symbol = {
+ val name = global.newTypeName(iname)
+ val res0 =
+ if (nme.isModuleName(name)) definitions.getModule(nme.stripModuleSuffix(name))
+ else definitions.getClassByName(name.replace('/', '.')) // TODO fails for inner classes (but this hasn't been tested).
+ assert(res0 != NoSymbol)
+ val res = jsymbol(res0)
+ res
+ }
+
+ def jsymbol(sym: Symbol): Symbol = {
+ if(sym.isJavaDefined && sym.isModuleClass) sym.linkedClassOfClass
+ else if(sym.isModule) sym.moduleClass
+ else sym // we track only module-classes and plain-classes
+ }
+
+ private def superClasses(s: Symbol): List[Symbol] = {
+ assert(!s.isInterface)
+ s.superClass match {
+ case NoSymbol => List(s)
+ case sc => s :: superClasses(sc)
+ }
+ }
+
+ private def firstCommonSuffix(as: List[Symbol], bs: List[Symbol]): Symbol = {
+ assert(!(as contains NoSymbol))
+ assert(!(bs contains NoSymbol))
+ var chainA = as
+ var chainB = bs
+ var fcs: Symbol = NoSymbol
+ do {
+ if (chainB contains chainA.head) fcs = chainA.head
+ else if (chainA contains chainB.head) fcs = chainB.head
+ else {
+ chainA = chainA.tail
+ chainB = chainB.tail
+ }
+ } while(fcs == NoSymbol)
+ fcs
+ }
+
+ @inline final private def jvmWiseLUB(a: Symbol, b: Symbol): Symbol = {
+
+ assert(a.isClass)
+ assert(b.isClass)
+
+ val res = Pair(a.isInterface, b.isInterface) match {
+ case (true, true) =>
+ global.lub(List(a.tpe, b.tpe)).typeSymbol // TODO assert == firstCommonSuffix of resp. parents
+ case (true, false) =>
+ if(b isSubClass a) a else ObjectClass
+ case (false, true) =>
+ if(a isSubClass b) b else ObjectClass
+ case _ =>
+ firstCommonSuffix(superClasses(a), superClasses(b))
+ }
+ assert(res != NoSymbol)
+ res
+ }
+
+ /* The internal name of the least common ancestor of the types given by inameA and inameB.
+ It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow */
+ def getCommonSuperClass(inameA: String, inameB: String): String = {
+ val a = reverseJavaName.getOrElseUpdate(inameA, inameToSymbol(inameA))
+ val b = reverseJavaName.getOrElseUpdate(inameB, inameToSymbol(inameB))
+
+ // global.lub(List(a.tpe, b.tpe)).typeSymbol.javaBinaryName.toString()
+ // icodes.lub(icodes.toTypeKind(a.tpe), icodes.toTypeKind(b.tpe)).toType
+ val lcaSym = jvmWiseLUB(a, b)
+ val lcaName = lcaSym.javaBinaryName.toString // don't call javaName because that side-effects innerClassBuffer.
+ val oldsym = reverseJavaName.put(lcaName, lcaSym)
+ assert(oldsym.isEmpty || (oldsym.get == lcaSym), "somehow we're not managing to compute common-super-class for ASM consumption")
+ assert(lcaName != "scala/Any")
+
+ lcaName // TODO ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Do some caching.
+ }
+
+ class CClassWriter(flags: Int) extends asm.ClassWriter(flags) {
+ override def getCommonSuperClass(iname1: String, iname2: String): String = {
+ GenASM.this.getCommonSuperClass(iname1, iname2)
+ }
+ }
+
+ // -----------------------------------------------------------------------------------------
+ // constants
+ // -----------------------------------------------------------------------------------------
+
+ private val classfileVersion: Int = settings.target.value match {
+ case "jvm-1.5" => asm.Opcodes.V1_5
+ case "jvm-1.5-asm" => asm.Opcodes.V1_5
+ case "jvm-1.6" => asm.Opcodes.V1_6
+ case "jvm-1.7" => asm.Opcodes.V1_7
+ }
+
+ private val majorVersion: Int = (classfileVersion & 0xFF)
+ private val emitStackMapFrame = (majorVersion >= 50)
+
+ private val extraProc: Int = mkFlags(
+ asm.ClassWriter.COMPUTE_MAXS,
+ if(emitStackMapFrame) asm.ClassWriter.COMPUTE_FRAMES else 0
+ )
+
+ val JAVA_LANG_OBJECT = asm.Type.getObjectType("java/lang/Object")
+ val JAVA_LANG_STRING = asm.Type.getObjectType("java/lang/String")
+
+ /** basic functionality for class file building */
+ abstract class JBuilder(bytecodeWriter: BytecodeWriter) {
+
+ val EMPTY_JTYPE_ARRAY = Array.empty[asm.Type]
+ val EMPTY_STRING_ARRAY = Array.empty[String]
+
+ val mdesc_arglessvoid = "()V"
+
+ val CLASS_CONSTRUCTOR_NAME = "<clinit>"
+ val INSTANCE_CONSTRUCTOR_NAME = "<init>"
+
+ // -----------------------------------------------------------------------------------------
+ // factory methods
+ // -----------------------------------------------------------------------------------------
+
+ /**
+ * Returns a new ClassWriter for the class given by arguments.
+ *
+ * @param access the class's access flags. This parameter also indicates if the class is deprecated.
+ *
+ * @param name the internal name of the class.
+ *
+ * @param signature the signature of this class. May be <tt>null</tt> if
+ * the class is not a generic one, and does not extend or implement
+ * generic classes or interfaces.
+ *
+ * @param superName the internal of name of the super class. For interfaces,
+ * the super class is {@link Object}. May be <tt>null</tt>, but
+ * only for the {@link Object} class.
+ *
+ * @param interfaces the internal names of the class's interfaces (see
+ * {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
+ */
+ def createJClass(access: Int, name: String, signature: String, superName: String, interfaces: Array[String]): asm.ClassWriter = {
+ val cw = new CClassWriter(extraProc)
+ cw.visit(classfileVersion,
+ access, name, signature,
+ superName, interfaces)
+
+ cw
+ }
+
+ def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = {
+ val dest = new Array[Byte](len);
+ System.arraycopy(b, offset, dest, 0, len);
+ new asm.CustomAttr(name, dest)
+ }
+
+ // -----------------------------------------------------------------------------------------
+ // utitilies useful when emitting plain, mirror, and beaninfo classes.
+ // -----------------------------------------------------------------------------------------
+
+ def writeIfNotTooBig(label: String, jclassName: String, jclass: asm.ClassWriter, sym: Symbol) {
+ try {
+ val arr = jclass.toByteArray()
+ bytecodeWriter.writeClass(label, jclassName, arr, sym)
+ } catch {
+ case e: java.lang.RuntimeException if(e.getMessage() == "Class file too large!") =>
+ // TODO check where ASM throws the equivalent of CodeSizeTooBigException
+ log("Skipped class "+jclassName+" because it exceeds JVM limits (it's too big or has methods that are too long).")
+ }
+ }
+
+ /** Specialized array conversion to prevent calling
+ * java.lang.reflect.Array.newInstance via TraversableOnce.toArray
+ */
+ def mkArray(xs: Traversable[asm.Type]): Array[asm.Type] = { val a = new Array[asm.Type](xs.size); xs.copyToArray(a); a }
+ def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a }
+
+ // -----------------------------------------------------------------------------------------
+ // Getters for (JVMS 4.2) internal and unqualified names (represented as JType instances).
+ // These getters track behind the scenes the inner classes referred to in the class being emitted,
+ // so as to build the InnerClasses attribute (JVMS 4.7.6) via `addInnerClasses()`
+ // (which also adds as member classes those inner classes that have been declared,
+ // thus also covering the case of inner classes declared but otherwise not referred).
+ // -----------------------------------------------------------------------------------------
+
+ val innerClassBuffer = mutable.LinkedHashSet[Symbol]()
+
+ /** For given symbol return a symbol corresponding to a class that should be declared as inner class.
+ *
+ * For example:
+ * class A {
+ * class B
+ * object C
+ * }
+ *
+ * then method will return:
+ * NoSymbol for A,
+ * the same symbol for A.B (corresponding to A$B class), and
+ * A$C$ symbol for A.C.
+ */
+ def innerClassSymbolFor(s: Symbol): Symbol =
+ if (s.isClass) s else if (s.isModule) s.moduleClass else NoSymbol
+
+ /** Return the a name of this symbol that can be used on the Java platform. It removes spaces from names.
+ *
+ * Special handling:
+ * scala.Nothing erases to scala.runtime.Nothing$
+ * scala.Null erases to scala.runtime.Null$
+ *
+ * This is needed because they are not real classes, and they mean
+ * 'abrupt termination upon evaluation of that expression' or null respectively.
+ * This handling is done already in GenICode, but here we need to remove
+ * references from method signatures to these types, because such classes
+ * cannot exist in the classpath: the type checker will be very confused.
+ */
+ def javaName(sym: Symbol): String = {
+
+ /**
+ * Checks if given symbol corresponds to inner class/object and add it to innerClassBuffer
+ *
+ * Note: This method is called recursively thus making sure that we add complete chain
+ * of inner class all until root class.
+ */
+ def collectInnerClass(s: Symbol): Unit = {
+ // TODO: some beforeFlatten { ... } which accounts for
+ // being nested in parameterized classes (if we're going to selectively flatten.)
+ val x = innerClassSymbolFor(s)
+ if(x ne NoSymbol) {
+ assert(x.isClass, "not an inner-class symbol")
+ val isInner = !x.rawowner.isPackageClass
+ if (isInner) {
+ innerClassBuffer += x
+ collectInnerClass(x.rawowner)
+ }
+ }
+ }
+
+ collectInnerClass(sym)
+
+ var hasInternalName = (sym.isClass || (sym.isModule && !sym.isMethod))
+ val cachedJN = javaNameCache.getOrElseUpdate(sym, {
+ if (hasInternalName) { sym.javaBinaryName }
+ else { sym.javaSimpleName }
+ })
+
+ if(emitStackMapFrame && hasInternalName) {
+ val internalName = cachedJN.toString()
+ val trackedSym = jsymbol(sym)
+ reverseJavaName.get(internalName) match {
+ case None =>
+ reverseJavaName.put(internalName, trackedSym)
+ case Some(oldsym) =>
+ assert((oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass), // In contrast, neither NothingClass nor NullClass show up bytecode-level.
+ "how can getCommonSuperclass() do its job if different class symbols get the same bytecode-level internal name.")
+ }
+ }
+
+ cachedJN.toString
+ }
+
+ def descriptor(t: Type): String = { javaType(t).getDescriptor }
+ def descriptor(k: TypeKind): String = { javaType(k).getDescriptor }
+ def descriptor(s: Symbol): String = { javaType(s).getDescriptor }
+
+ def javaType(tk: TypeKind): asm.Type = {
+ if(tk.isValueType) {
+ if(tk.isIntSizedType) {
+ (tk: @unchecked) match {
+ case BOOL => asm.Type.BOOLEAN_TYPE
+ case BYTE => asm.Type.BYTE_TYPE
+ case SHORT => asm.Type.SHORT_TYPE
+ case CHAR => asm.Type.CHAR_TYPE
+ case INT => asm.Type.INT_TYPE
+ }
+ } else {
+ (tk: @unchecked) match {
+ case UNIT => asm.Type.VOID_TYPE
+ case LONG => asm.Type.LONG_TYPE
+ case FLOAT => asm.Type.FLOAT_TYPE
+ case DOUBLE => asm.Type.DOUBLE_TYPE
+ }
+ }
+ } else {
+ assert(!tk.isBoxedType, tk) // documentation (BOXED matches none below anyway)
+ (tk: @unchecked) match {
+ case REFERENCE(cls) => asm.Type.getObjectType(javaName(cls))
+ case ARRAY(elem) => javaArrayType(javaType(elem))
+ }
+ }
+ }
+
+ def javaType(t: Type): asm.Type = javaType(toTypeKind(t))
+
+ def javaType(s: Symbol): asm.Type = {
+ if (s.isMethod) {
+ val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType);
+ asm.Type.getMethodType( resT, (s.tpe.paramTypes map javaType): _* )
+ } else { javaType(s.tpe) }
+ }
+
+ def javaArrayType(elem: asm.Type): asm.Type = { asm.Type.getObjectType("[" + elem.getDescriptor) }
+
+ def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
+
+ } // end of class JBuilder
+
+
+ /** functionality for building plain and mirror classes */
+ abstract class JCommonBuilder(bytecodeWriter: BytecodeWriter) extends JBuilder(bytecodeWriter) {
+
+ // -----------------------------------------------------------------------------------------
+ // more constants
+ // -----------------------------------------------------------------------------------------
+
+ val INNER_CLASSES_FLAGS =
+ (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT)
+
+ val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC
+ val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL
+
+ val strMODULE_INSTANCE_FIELD = nme.MODULE_INSTANCE_FIELD.toString
+
+ // -----------------------------------------------------------------------------------------
+ // Custom attribute (JVMS 4.7.1) "ScalaSig" used as marker only
+ // i.e., the pickle is contained in a custom annotation, see:
+ // (1) `addAnnotations()`,
+ // (2) SID # 10 (draft) - Storage of pickled Scala signatures in class files, http://www.scala-lang.org/sid/10
+ // (3) SID # 5 - Internals of Scala Annotations, http://www.scala-lang.org/sid/5
+ // That annotation in turn is not related to the "java-generic-signature" (JVMS 4.7.9)
+ // other than both ending up encoded as attributes (JVMS 4.7)
+ // (with the caveat that the "ScalaSig" attribute is associated to some classes,
+ // while the "Signature" attribute can be associated to classes, methods, and fields.)
+ // -----------------------------------------------------------------------------------------
+
+ val versionPickle = {
+ val vp = new PickleBuffer(new Array[Byte](16), -1, 0)
+ assert(vp.writeIndex == 0, vp)
+ vp writeNat PickleFormat.MajorVersion
+ vp writeNat PickleFormat.MinorVersion
+ vp writeNat 0
+ vp
+ }
+
+ def pickleMarkerLocal = {
+ createJAttribute(tpnme.ScalaSignatureATTR.toString, versionPickle.bytes, 0, versionPickle.writeIndex)
+ }
+
+ def pickleMarkerForeign = {
+ createJAttribute(tpnme.ScalaATTR.toString, new Array[Byte](0), 0, 0)
+ }
+
+ /** Returns a ScalaSignature annotation if it must be added to this class, none otherwise.
+ * This annotation must be added to the class' annotations list when generating them.
+ *
+ * Depending on whether the returned option is defined, it adds to `jclass` one of:
+ * (a) the ScalaSig marker attribute
+ * (indicating that a scala-signature-annotation aka pickle is present in this class); or
+ * (b) the Scala marker attribute
+ * (indicating that a scala-signature-annotation aka pickle is to be found in another file).
+ *
+ *
+ * @param jclassName The class file that is being readied.
+ * @param sym The symbol for which the signature has been entered in the symData map.
+ * This is different than the symbol
+ * that is being generated in the case of a mirror class.
+ * @return An option that is:
+ * - defined and contains an AnnotationInfo of the ScalaSignature type,
+ * instantiated with the pickle signature for sym.
+ * - empty if the jclass/sym pair must not contain a pickle.
+ *
+ */
+ def getAnnotPickle(jclassName: String, sym: Symbol): Option[AnnotationInfo] = {
+ currentRun.symData get sym match {
+ case Some(pickle) if !nme.isModuleName(newTermName(jclassName)) =>
+ val scalaAnnot = {
+ val sigBytes = ScalaSigBytes(pickle.bytes.take(pickle.writeIndex))
+ AnnotationInfo(sigBytes.sigAnnot, Nil, List((nme.bytes, sigBytes)))
+ }
+ pickledBytes += pickle.writeIndex
+ currentRun.symData -= sym
+ currentRun.symData -= sym.companionSymbol
+ Some(scalaAnnot)
+ case _ =>
+ None
+ }
+ }
+
+ /**
+ * Quoting from JVMS 4.7.5 The Exceptions Attribute
+ * "The Exceptions attribute indicates which checked exceptions a method may throw.
+ * There may be at most one Exceptions attribute in each method_info structure."
+ *
+ * The contents of that attribute are determined by the `String[] exceptions` argument to ASM's ClassVisitor.visitMethod()
+ * This method returns such list of internal names.
+ *
+ */
+ def getExceptions(excs: List[AnnotationInfo]): List[String] = {
+ for (AnnotationInfo(tp, List(exc), _) <- excs.distinct if tp.typeSymbol == ThrowsClass)
+ yield {
+ val Literal(const) = exc
+ javaName(const.typeValue.typeSymbol)
+ }
+ }
+
+ /** Whether an annotation should be emitted as a Java annotation
+ * .initialize: if 'annot' is read from pickle, atp might be un-initialized
+ */
+ private def shouldEmitAnnotation(annot: AnnotationInfo) =
+ annot.symbol.initialize.isJavaDefined &&
+ annot.matches(ClassfileAnnotationClass) &&
+ annot.args.isEmpty &&
+ !annot.matches(DeprecatedAttr)
+
+ // @M don't generate java generics sigs for (members of) implementation
+ // classes, as they are monomorphic (TODO: ok?)
+ private def needsGenericSignature(sym: Symbol) = !(
+ // PP: This condition used to include sym.hasExpandedName, but this leads
+ // to the total loss of generic information if a private member is
+ // accessed from a closure: both the field and the accessor were generated
+ // without it. This is particularly bad because the availability of
+ // generic information could disappear as a consequence of a seemingly
+ // unrelated change.
+ sym.isSynthetic
+ || sym.isLiftedMethod
+ || sym.isBridge
+ || (sym.ownerChain exists (_.isImplClass))
+ )
+
+ def getCurrentCUnit(): CompilationUnit
+
+ /** @return
+ * - `null` if no Java signature is to be added (`null` is what ASM expects in these cases).
+ * - otherwise the signature in question
+ */
+ def getGenericSignature(sym: Symbol, owner: Symbol): String = {
+
+ if (!needsGenericSignature(sym)) { return null }
+
+ val memberTpe = beforeErasure(owner.thisType.memberInfo(sym))
+
+ val jsOpt: Option[String] = erasure.javaSig(sym, memberTpe)
+ if (jsOpt.isEmpty) { return null }
+
+ val sig = jsOpt.get
+ log(sig) // This seems useful enough in the general case.
+
+ def wrap(op: => Unit) = {
+ try { op; true }
+ catch { case _ => false }
+ }
+
+ if (settings.Xverify.value) {
+ // Run the signature parser to catch bogus signatures.
+ val isValidSignature = wrap {
+ // Alternative: scala.tools.reflect.SigParser (frontend to sun.reflect.generics.parser.SignatureParser)
+ import scala.tools.asm.util.SignatureChecker
+ if (sym.isMethod) { SignatureChecker checkMethodSignature sig } // requires asm-util.jar
+ else if (sym.isTerm) { SignatureChecker checkFieldSignature sig }
+ else { SignatureChecker checkClassSignature sig }
+ }
+
+ if(!isValidSignature) {
+ getCurrentCUnit().warning(sym.pos,
+ """|compiler bug: created invalid generic signature for %s in %s
+ |signature: %s
+ |if this is reproducible, please report bug at https://issues.scala-lang.org/
+ """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig))
+ return null
+ }
+ }
+
+ if ((settings.check containsName phaseName)) {
+ val normalizedTpe = beforeErasure(erasure.prepareSigMap(memberTpe))
+ val bytecodeTpe = owner.thisType.memberInfo(sym)
+ if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
+ getCurrentCUnit().warning(sym.pos,
+ """|compiler bug: created generic signature for %s in %s that does not conform to its erasure
+ |signature: %s
+ |original type: %s
+ |normalized type: %s
+ |erasure type: %s
+ |if this is reproducible, please report bug at http://issues.scala-lang.org/
+ """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig, memberTpe, normalizedTpe, bytecodeTpe))
+ return null
+ }
+ }
+
+ sig
+ }
+
+ def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = {
+ val ca = new Array[Char](bytes.size)
+ var idx = 0
+ while(idx < bytes.size) {
+ val b: Byte = bytes(idx)
+ assert((b & ~0x7f) == 0)
+ ca(idx) = b.asInstanceOf[Char]
+ idx += 1
+ }
+
+ ca
+ }
+
+ // TODO this method isn't exercised during bootstrapping. Open question: is it bug free?
+ private def arrEncode(sb: ScalaSigBytes): Array[String] = {
+ var strs: List[String] = Nil
+ val bSeven: Array[Byte] = sb.sevenBitsMayBeZero
+ // chop into slices of at most 65535 bytes, counting 0x00 as taking two bytes (as per JVMS 4.4.7 The CONSTANT_Utf8_info Structure)
+ var prevOffset = 0
+ var offset = 0
+ var encLength = 0
+ while(offset < bSeven.size) {
+ val newEncLength = encLength.toLong + (if(bSeven(offset) == 0) 2 else 1)
+ if(newEncLength > 65535) {
+ val ba = bSeven.slice(prevOffset, offset)
+ strs ::= new java.lang.String(ubytesToCharArray(ba))
+ encLength = 0
+ prevOffset = offset
+ } else {
+ encLength += 1
+ offset += 1
+ }
+ }
+ if(prevOffset < offset) {
+ assert(offset == bSeven.length)
+ val ba = bSeven.slice(prevOffset, offset)
+ strs ::= new java.lang.String(ubytesToCharArray(ba))
+ }
+ assert(strs.size > 1, "encode instead as one String via strEncode()") // TODO too strict?
+ strs.reverse.toArray
+ }
+
+ private def strEncode(sb: ScalaSigBytes): String = {
+ val ca = ubytesToCharArray(sb.sevenBitsMayBeZero)
+ new java.lang.String(ca)
+ // debug val bvA = new asm.ByteVector; bvA.putUTF8(s)
+ // debug val enc: Array[Byte] = scala.reflect.internal.pickling.ByteCodecs.encode(bytes)
+ // debug assert(enc(idx) == bvA.getByte(idx + 2))
+ // debug assert(bvA.getLength == enc.size + 2)
+ }
+
+ def emitArgument(av: asm.AnnotationVisitor,
+ name: String,
+ arg: ClassfileAnnotArg) {
+ arg match {
+
+ case LiteralAnnotArg(const) =>
+ if(const.isNonUnitAnyVal) { av.visit(name, const.value) }
+ else {
+ const.tag match {
+ case StringTag =>
+ assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant`
+ av.visit(name, const.stringValue) // `stringValue` special-cases null, but that execution path isn't exercised for a const with StringTag
+ case ClazzTag => av.visit(name, javaType(const.typeValue))
+ case EnumTag =>
+ val edesc = descriptor(const.tpe) // the class descriptor of the enumeration class.
+ val evalue = const.symbolValue.name.toString // value the actual enumeration value.
+ av.visitEnum(name, edesc, evalue)
+ }
+ }
+
+ case sb@ScalaSigBytes(bytes) =>
+ // see http://www.scala-lang.org/sid/10 (Storage of pickled Scala signatures in class files)
+ // also JVMS Sec. 4.7.16.1 The element_value structure and JVMS Sec. 4.4.7 The CONSTANT_Utf8_info Structure.
+ val assocValue = (if(sb.fitsInOneString) strEncode(sb) else arrEncode(sb))
+ av.visit(name, assocValue)
+ // for the lazy val in ScalaSigBytes to be GC'ed, the invoker of emitAnnotations() should hold the ScalaSigBytes in a method-local var that doesn't escape.
+
+ case ArrayAnnotArg(args) =>
+ val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name)
+ for(arg <- args) { emitArgument(arrAnnotV, null, arg) }
+ arrAnnotV.visitEnd()
+
+ case NestedAnnotArg(annInfo) =>
+ val AnnotationInfo(typ, args, assocs) = annInfo
+ assert(args.isEmpty, args)
+ val desc = descriptor(typ) // the class descriptor of the nested annotation class
+ val nestedVisitor = av.visitAnnotation(name, desc)
+ emitAssocs(nestedVisitor, assocs)
+ }
+ }
+
+ def emitAssocs(av: asm.AnnotationVisitor, assocs: List[(Name, ClassfileAnnotArg)]) {
+ for ((name, value) <- assocs) {
+ emitArgument(av, name.toString(), value)
+ }
+ av.visitEnd()
+ }
+
+ def emitAnnotations(cw: asm.ClassVisitor, annotations: List[AnnotationInfo]) {
+ for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val av = cw.visitAnnotation(descriptor(typ), true)
+ emitAssocs(av, assocs)
+ }
+ }
+
+ def emitAnnotations(mw: asm.MethodVisitor, annotations: List[AnnotationInfo]) {
+ for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val av = mw.visitAnnotation(descriptor(typ), true)
+ emitAssocs(av, assocs)
+ }
+ }
+
+ def emitAnnotations(fw: asm.FieldVisitor, annotations: List[AnnotationInfo]) {
+ for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val av = fw.visitAnnotation(descriptor(typ), true)
+ emitAssocs(av, assocs)
+ }
+ }
+
+ def emitParamAnnotations(jmethod: asm.MethodVisitor, pannotss: List[List[AnnotationInfo]]) {
+ val annotationss = pannotss map (_ filter shouldEmitAnnotation)
+ if (annotationss forall (_.isEmpty)) return
+ for (Pair(annots, idx) <- annotationss.zipWithIndex;
+ annot <- annots) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), true)
+ emitAssocs(pannVisitor, assocs)
+ }
+ }
+
+ def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
+ /** The outer name for this inner class. Note that it returns null
+ * when the inner class should not get an index in the constant pool.
+ * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
+ */
+ def outerName(innerSym: Symbol): String = {
+ if (innerSym.originalEnclosingMethod != NoSymbol)
+ null
+ else {
+ val outerName = javaName(innerSym.rawowner)
+ if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
+ else outerName
+ }
+ }
+
+ def innerName(innerSym: Symbol): String =
+ if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
+ null
+ else
+ innerSym.rawname + innerSym.moduleSuffix
+
+ // add inner classes which might not have been referenced yet
+ afterErasure {
+ for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
+ innerClassBuffer += m
+ }
+
+ val allInners: List[Symbol] = innerClassBuffer.toList
+ if (allInners.nonEmpty) {
+ debuglog(csym.fullName('.') + " contains " + allInners.size + " inner classes.")
+
+ // entries ready to be serialized into the classfile, used to detect duplicates.
+ val entries = mutable.Map.empty[String, String]
+
+ // sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
+ for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
+ val flags = mkFlags(
+ if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
+ javaFlags(innerSym),
+ if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
+ ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
+ val jname = javaName(innerSym) // never null
+ val oname = outerName(innerSym) // null when method-enclosed
+ val iname = innerName(innerSym) // null for anonymous inner class
+
+ // Mimicking javap inner class output
+ debuglog(
+ if (oname == null || iname == null) "//class " + jname
+ else "//%s=class %s of class %s".format(iname, jname, oname)
+ )
+
+ assert(jname != null, "javaName is broken.") // documentation
+ val doAdd = entries.get(jname) match {
+ // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
+ case Some(prevOName) =>
+ // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
+ // i.e. for them it must be the case that oname == java/lang/Thread
+ assert(prevOName == oname, "duplicate")
+ false
+ case None => true
+ }
+
+ if(doAdd) {
+ entries += (jname -> oname)
+ jclass.visitInnerClass(jname, oname, iname, flags)
+ }
+
+ /*
+ * TODO assert (JVMS 4.7.6 The InnerClasses attribute)
+ * If a class file has a version number that is greater than or equal to 51.0, and
+ * has an InnerClasses attribute in its attributes table, then for all entries in the
+ * classes array of the InnerClasses attribute, the value of the
+ * outer_class_info_index item must be zero if the value of the
+ * inner_name_index item is zero.
+ */
+
+ }
+ }
+ }
+
+ /** Adds a @remote annotation, actual use unknown.
+ *
+ * Invoked from genMethod() and addForwarder().
+ */
+ def addRemoteExceptionAnnot(isRemoteClass: Boolean, isJMethodPublic: Boolean, meth: Symbol) {
+ val needsAnnotation = (
+ ( isRemoteClass ||
+ isRemote(meth) && isJMethodPublic
+ ) && !(meth.throwsAnnotations contains RemoteExceptionClass)
+ )
+ if (needsAnnotation) {
+ val c = Constant(RemoteExceptionClass.tpe)
+ val arg = Literal(c) setType c.tpe
+ meth.addAnnotation(ThrowsClass, arg)
+ }
+ }
+
+ // -----------------------------------------------------------------------------------------
+ // Static forwarders (related to mirror classes but also present in
+ // a plain class lacking companion module, for details see `isCandidateForForwarders`).
+ // -----------------------------------------------------------------------------------------
+
+ val ExcludedForwarderFlags = {
+ import Flags._
+ // Should include DEFERRED but this breaks findMember.
+ ( CASE | SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags )
+ }
+
+ /** Add a forwarder for method m. Used only from addForwarders(). */
+ private def addForwarder(isRemoteClass: Boolean, jclass: asm.ClassVisitor, module: Symbol, m: Symbol) {
+ val moduleName = javaName(module)
+ val methodInfo = module.thisType.memberInfo(m)
+ val paramJavaTypes: List[asm.Type] = methodInfo.paramTypes map javaType
+ // val paramNames = 0 until paramJavaTypes.length map ("x_" + _)
+
+ /** Forwarders must not be marked final,
+ * as the JVM will not allow redefinition of a final static method,
+ * and we don't know what classes might be subclassing the companion class. See SI-4827.
+ */
+ // TODO: evaluate the other flags we might be dropping on the floor here.
+ // TODO: ACC_SYNTHETIC ?
+ val flags = PublicStatic | (
+ if (m.isVarargsMethod) asm.Opcodes.ACC_VARARGS else 0
+ )
+
+ // TODO needed? for(ann <- m.annotations) { ann.symbol.initialize }
+ val jgensig = if (m.isDeferred) null else getGenericSignature(m, module); // only add generic signature if method concrete; bug #1745
+ addRemoteExceptionAnnot(isRemoteClass, hasPublicBitSet(flags), m)
+ val (throws, others) = m.annotations partition (_.symbol == ThrowsClass)
+ val thrownExceptions: List[String] = getExceptions(throws)
+
+ val jReturnType = javaType(methodInfo.resultType)
+ val mdesc = asm.Type.getMethodDescriptor(jReturnType, paramJavaTypes: _*)
+ val mirrorMethodName = javaName(m)
+ val mirrorMethod: asm.MethodVisitor = jclass.visitMethod(
+ flags,
+ mirrorMethodName,
+ mdesc,
+ jgensig,
+ mkArray(thrownExceptions)
+ )
+
+ // typestate: entering mode with valid call sequences:
+ // [ visitAnnotationDefault ] ( visitAnnotation | visitParameterAnnotation | visitAttribute )*
+
+ emitAnnotations(mirrorMethod, others)
+ emitParamAnnotations(mirrorMethod, m.info.params.map(_.annotations))
+
+ // typestate: entering mode with valid call sequences:
+ // visitCode ( visitFrame | visitXInsn | visitLabel | visitTryCatchBlock | visitLocalVariable | visitLineNumber )* visitMaxs ] visitEnd
+
+ mirrorMethod.visitCode()
+
+ mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, strMODULE_INSTANCE_FIELD, descriptor(module))
+
+ var index = 0
+ for(jparamType <- paramJavaTypes) {
+ mirrorMethod.visitVarInsn(jparamType.getOpcode(asm.Opcodes.ILOAD), index)
+ assert(jparamType.getSort() != asm.Type.METHOD, jparamType)
+ index += jparamType.getSize()
+ }
+
+ mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, javaType(m).getDescriptor)
+ mirrorMethod.visitInsn(jReturnType.getOpcode(asm.Opcodes.IRETURN))
+
+ mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ mirrorMethod.visitEnd()
+
+ }
+
+ /** Add forwarders for all methods defined in `module` that don't conflict
+ * with methods in the companion class of `module`. A conflict arises when
+ * a method with the same name is defined both in a class and its companion object:
+ * method signature is not taken into account.
+ */
+ def addForwarders(isRemoteClass: Boolean, jclass: asm.ClassVisitor, jclassName: String, moduleClass: Symbol) {
+ assert(moduleClass.isModuleClass, moduleClass)
+ debuglog("Dumping mirror class for object: " + moduleClass)
+
+ val linkedClass = moduleClass.companionClass
+ val linkedModule = linkedClass.companionSymbol
+ lazy val conflictingNames: Set[Name] = {
+ linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name } toSet
+ }
+ debuglog("Potentially conflicting names for forwarders: " + conflictingNames)
+
+ for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flags.METHOD)) {
+ if (m.isType || m.isDeferred || (m.owner eq ObjectClass) || m.isConstructor)
+ debuglog("No forwarder for '%s' from %s to '%s'".format(m, jclassName, moduleClass))
+ else if (conflictingNames(m.name))
+ log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name))
+ else {
+ log("Adding static forwarder for '%s' from %s to '%s'".format(m, jclassName, moduleClass))
+ addForwarder(isRemoteClass, jclass, moduleClass, m)
+ }
+ }
+ }
+
+ } // end of class JCommonBuilder
+
+
+ trait JAndroidBuilder {
+ self: JPlainBuilder =>
+
+ /** From the reference documentation of the Android SDK:
+ * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`.
+ * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`,
+ * which is an object implementing the `Parcelable.Creator` interface.
+ */
+ private val androidFieldName = newTermName("CREATOR")
+
+ private lazy val AndroidParcelableInterface = definitions.getClassIfDefined("android.os.Parcelable")
+ private lazy val AndroidCreatorClass = definitions.getClassIfDefined("android.os.Parcelable$Creator")
+
+ def isAndroidParcelableClass(sym: Symbol) =
+ (AndroidParcelableInterface != NoSymbol) &&
+ (sym.parentSymbols contains AndroidParcelableInterface)
+
+ /* Typestate: should be called before emitting fields (because it adds an IField to the current IClass). */
+ def addCreatorCode(block: BasicBlock) {
+ val fieldSymbol = (
+ clasz.symbol.newValue(newTermName(androidFieldName), NoPosition, Flags.STATIC | Flags.FINAL)
+ setInfo AndroidCreatorClass.tpe
+ )
+ val methodSymbol = definitions.getMember(clasz.symbol.companionModule, androidFieldName)
+ clasz addField new IField(fieldSymbol)
+ block emit CALL_METHOD(methodSymbol, Static(false))
+ block emit STORE_FIELD(fieldSymbol, true)
+ }
+
+ def legacyAddCreatorCode(clinit: asm.MethodVisitor) {
+ val creatorType: asm.Type = javaType(AndroidCreatorClass)
+ val tdesc_creator = creatorType.getDescriptor
+
+ jclass.visitField(
+ PublicStaticFinal,
+ androidFieldName,
+ tdesc_creator,
+ null, // no java-generic-signature
+ null // no initial value
+ ).visitEnd()
+
+ val moduleName = javaName(clasz.symbol)+"$"
+
+ // GETSTATIC `moduleName`.MODULE$ : `moduleName`;
+ clinit.visitFieldInsn(
+ asm.Opcodes.GETSTATIC,
+ moduleName,
+ strMODULE_INSTANCE_FIELD,
+ asm.Type.getObjectType(moduleName).getDescriptor
+ )
+
+ // INVOKEVIRTUAL `moduleName`.CREATOR() : android.os.Parcelable$Creator;
+ clinit.visitMethodInsn(
+ asm.Opcodes.INVOKEVIRTUAL,
+ moduleName,
+ androidFieldName,
+ asm.Type.getMethodDescriptor(creatorType, Array.empty[asm.Type]: _*)
+ )
+
+ // PUTSTATIC `thisName`.CREATOR;
+ clinit.visitFieldInsn(
+ asm.Opcodes.PUTSTATIC,
+ thisName,
+ androidFieldName,
+ tdesc_creator
+ )
+ }
+
+ } // end of trait JAndroidBuilder
+
+ /** Map from type kinds to the Java reference types.
+ * It is used to push class literals onto the operand stack.
+ * @see Predef.classOf
+ * @see genConstant()
+ */
+ private val classLiteral = immutable.Map[TypeKind, asm.Type](
+ UNIT -> asm.Type.getObjectType("java/lang/Void"),
+ BOOL -> asm.Type.getObjectType("java/lang/Boolean"),
+ BYTE -> asm.Type.getObjectType("java/lang/Byte"),
+ SHORT -> asm.Type.getObjectType("java/lang/Short"),
+ CHAR -> asm.Type.getObjectType("java/lang/Character"),
+ INT -> asm.Type.getObjectType("java/lang/Integer"),
+ LONG -> asm.Type.getObjectType("java/lang/Long"),
+ FLOAT -> asm.Type.getObjectType("java/lang/Float"),
+ DOUBLE -> asm.Type.getObjectType("java/lang/Double")
+ )
+
+ def isNonUnitValueTK(tk: TypeKind): Boolean = { tk.isValueType && tk != UNIT }
+
+ case class MethodNameAndType(mname: String, mdesc: String)
+
+ private val jBoxTo: Map[TypeKind, MethodNameAndType] = {
+ Map(
+ BOOL -> MethodNameAndType("boxToBoolean", "(Z)Ljava/lang/Boolean;" ) ,
+ BYTE -> MethodNameAndType("boxToByte", "(B)Ljava/lang/Byte;" ) ,
+ CHAR -> MethodNameAndType("boxToCharacter", "(C)Ljava/lang/Character;") ,
+ SHORT -> MethodNameAndType("boxToShort", "(S)Ljava/lang/Short;" ) ,
+ INT -> MethodNameAndType("boxToInteger", "(I)Ljava/lang/Integer;" ) ,
+ LONG -> MethodNameAndType("boxToLong", "(J)Ljava/lang/Long;" ) ,
+ FLOAT -> MethodNameAndType("boxToFloat", "(F)Ljava/lang/Float;" ) ,
+ DOUBLE -> MethodNameAndType("boxToDouble", "(D)Ljava/lang/Double;" )
+ )
+ }
+
+ private val jUnboxTo: Map[TypeKind, MethodNameAndType] = {
+ Map(
+ BOOL -> MethodNameAndType("unboxToBoolean", "(Ljava/lang/Object;)Z") ,
+ BYTE -> MethodNameAndType("unboxToByte", "(Ljava/lang/Object;)B") ,
+ CHAR -> MethodNameAndType("unboxToChar", "(Ljava/lang/Object;)C") ,
+ SHORT -> MethodNameAndType("unboxToShort", "(Ljava/lang/Object;)S") ,
+ INT -> MethodNameAndType("unboxToInt", "(Ljava/lang/Object;)I") ,
+ LONG -> MethodNameAndType("unboxToLong", "(Ljava/lang/Object;)J") ,
+ FLOAT -> MethodNameAndType("unboxToFloat", "(Ljava/lang/Object;)F") ,
+ DOUBLE -> MethodNameAndType("unboxToDouble", "(Ljava/lang/Object;)D")
+ )
+ }
+
+ case class BlockInteval(start: BasicBlock, end: BasicBlock)
+
+ /** builder of plain classes */
+ class JPlainBuilder(bytecodeWriter: BytecodeWriter)
+ extends JCommonBuilder(bytecodeWriter)
+ with JAndroidBuilder {
+
+ val MIN_SWITCH_DENSITY = 0.7
+
+ val StringBuilderClassName = javaName(definitions.StringBuilderClass)
+ val BoxesRunTime = "scala/runtime/BoxesRunTime"
+
+ val StringBuilderType = asm.Type.getObjectType(StringBuilderClassName)
+ val mdesc_toString = "()Ljava/lang/String;"
+ val mdesc_arrayClone = "()Ljava/lang/Object;"
+
+ val tdesc_long = asm.Type.LONG_TYPE.getDescriptor // ie. "J"
+
+ def isParcelableClass = isAndroidParcelableClass(clasz.symbol)
+
+ def serialVUID: Option[Long] = clasz.symbol getAnnotation SerialVersionUIDAttr collect {
+ case AnnotationInfo(_, Literal(const) :: _, _) => const.longValue
+ }
+
+ private def getSuperInterfaces(c: IClass): Array[String] = {
+
+ // Additional interface parents based on annotations and other cues
+ def newParentForAttr(attr: Symbol): Option[Symbol] = attr match {
+ case SerializableAttr => Some(SerializableClass)
+ case CloneableAttr => Some(JavaCloneableClass)
+ case RemoteAttr => Some(RemoteInterfaceClass)
+ case _ => None
+ }
+
+ /** Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
+ * This is important on Android because there is otherwise an interface explosion.
+ */
+ def minimizeInterfaces(lstIfaces: List[Symbol]): List[Symbol] = {
+ var rest = lstIfaces
+ var leaves = List.empty[Symbol]
+ while(!rest.isEmpty) {
+ val candidate = rest.head
+ val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
+ if(!nonLeaf) {
+ leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
+ }
+ rest = rest.tail
+ }
+
+ leaves
+ }
+
+ val ps = c.symbol.info.parents
+ val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses;
+ val superInterfaces = superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol)) distinct
+
+ if(superInterfaces.isEmpty) EMPTY_STRING_ARRAY
+ else mkArray(minimizeInterfaces(superInterfaces) map javaName)
+ }
+
+ var clasz: IClass = _ // this var must be assigned only by genClass()
+ var jclass: asm.ClassWriter = _ // the classfile being emitted
+ var thisName: String = _ // the internal name of jclass
+
+ def thisDescr: String = {
+ assert(thisName != null, "thisDescr invoked too soon.")
+ asm.Type.getObjectType(thisName).getDescriptor
+ }
+
+ def getCurrentCUnit(): CompilationUnit = { clasz.cunit }
+
+ def genClass(c: IClass) {
+ clasz = c
+ innerClassBuffer.clear()
+
+ thisName = javaName(c.symbol) // the internal name of the class being emitted
+
+ val ps = c.symbol.info.parents
+ val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol);
+
+ val ifaces = getSuperInterfaces(c)
+
+ val thisSignature = getGenericSignature(c.symbol, c.symbol.owner)
+ val flags = mkFlags(
+ javaFlags(c.symbol),
+ if(isDeprecated(c.symbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+ jclass = createJClass(flags,
+ thisName, thisSignature,
+ superClass, ifaces)
+
+ // typestate: entering mode with valid call sequences:
+ // [ visitSource ] [ visitOuterClass ] ( visitAnnotation | visitAttribute )*
+
+ jclass.visitSource(c.cunit.source.toString,
+ null /* SourceDebugExtension */)
+
+ val enclM = getEnclosingMethodAttribute()
+ if(enclM != null) {
+ val EnclMethodEntry(className, methodName, methodType) = enclM
+ jclass.visitOuterClass(className, methodName, methodType.getDescriptor)
+ }
+
+ // typestate: entering mode with valid call sequences:
+ // ( visitAnnotation | visitAttribute )*
+
+ val ssa = getAnnotPickle(thisName, c.symbol)
+ jclass.visitAttribute(if(ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
+ emitAnnotations(jclass, c.symbol.annotations ++ ssa)
+
+ // typestate: entering mode with valid call sequences:
+ // ( visitInnerClass | visitField | visitMethod )* visitEnd
+
+ if (isStaticModule(c.symbol) || serialVUID != None || isParcelableClass) {
+
+ if (isStaticModule(c.symbol)) { addModuleInstanceField() }
+ addStaticInit(c.lookupStaticCtor)
+
+ } else {
+
+ for (constructor <- c.lookupStaticCtor) {
+ addStaticInit(Some(constructor))
+ }
+ val skipStaticForwarders = (c.symbol.isInterface || settings.noForwarders.value)
+ if (!skipStaticForwarders) {
+ val lmoc = c.symbol.companionModule
+ // add static forwarders if there are no name conflicts; see bugs #363 and #1735
+ if (lmoc != NoSymbol) {
+ // it must be a top level class (name contains no $s)
+ val isCandidateForForwarders = {
+ afterPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass }
+ }
+ if (isCandidateForForwarders) {
+ log("Adding static forwarders from '%s' to implementations in '%s'".format(c.symbol, lmoc))
+ addForwarders(isRemote(clasz.symbol), jclass, thisName, lmoc.moduleClass)
+ }
+ }
+ }
+
+ }
+
+ clasz.fields foreach genField
+ clasz.methods foreach { im => genMethod(im, c.symbol.isInterface) }
+
+ addInnerClasses(clasz.symbol, jclass)
+ jclass.visitEnd()
+ writeIfNotTooBig("" + c.symbol.name, thisName, jclass, c.symbol)
+
+ }
+
+ /**
+ * @param owner internal name of the enclosing class of the class.
+ *
+ * @param name the name of the method that contains the class.
+
+ * @param methodType the method that contains the class.
+ */
+ case class EnclMethodEntry(owner: String, name: String, methodType: asm.Type)
+
+ /**
+ * @return null if the current class is not internal to a method
+ *
+ * Quoting from JVMS 4.7.7 The EnclosingMethod Attribute
+ * A class must have an EnclosingMethod attribute if and only if it is a local class or an anonymous class.
+ * A class may have no more than one EnclosingMethod attribute.
+ *
+ */
+ private def getEnclosingMethodAttribute(): EnclMethodEntry = { // JVMS 4.7.7
+ var res: EnclMethodEntry = null
+ val clazz = clasz.symbol
+ val sym = clazz.originalEnclosingMethod
+ if (sym.isMethod) {
+ debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, sym.enclClass))
+ res = EnclMethodEntry(javaName(sym.enclClass), javaName(sym), javaType(sym))
+ } else if (clazz.isAnonymousClass) {
+ val enclClass = clazz.rawowner
+ assert(enclClass.isClass, enclClass)
+ val sym = enclClass.primaryConstructor
+ if (sym == NoSymbol) {
+ log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(enclClass, clazz))
+ } else {
+ debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass))
+ res = EnclMethodEntry(javaName(enclClass), javaName(sym), javaType(sym))
+ }
+ }
+
+ res
+ }
+
+ def genField(f: IField) {
+ debuglog("Adding field: " + f.symbol.fullName)
+
+ val javagensig = getGenericSignature(f.symbol, clasz.symbol)
+
+ val flags = mkFlags(
+ javaFieldFlags(f.symbol),
+ if(isDeprecated(f.symbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ val jfield: asm.FieldVisitor = jclass.visitField(
+ flags,
+ javaName(f.symbol),
+ javaType(f.symbol.tpe).getDescriptor(),
+ javagensig,
+ null // no initial value
+ )
+
+ emitAnnotations(jfield, f.symbol.annotations)
+ jfield.visitEnd()
+ }
+
+ def debugLevel = settings.debuginfo.indexOfChoice
+
+ // val emitSource = debugLevel >= 1
+ val emitLines = debugLevel >= 2
+ val emitVars = debugLevel >= 3
+
+ var method: IMethod = _
+ var jmethod: asm.MethodVisitor = _
+ var jMethodName: String = _
+
+ @inline final def emit(opc: Int) { jmethod.visitInsn(opc) }
+
+ def genMethod(m: IMethod, isJInterface: Boolean) {
+
+ def isClosureApply(sym: Symbol): Boolean = {
+ (sym.name == nme.apply) &&
+ sym.owner.isSynthetic &&
+ sym.owner.tpe.parents.exists { t =>
+ val TypeRef(_, sym, _) = t
+ FunctionClass contains sym
+ }
+ }
+
+ if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return
+
+ debuglog("Generating method " + m.symbol.fullName)
+ method = m
+ computeLocalVarsIndex(m)
+
+ var resTpe: asm.Type = javaType(m.symbol.tpe.resultType)
+ if (m.symbol.isClassConstructor)
+ resTpe = asm.Type.VOID_TYPE
+
+ val flags = mkFlags(
+ javaFlags(m.symbol),
+ if (isJInterface) asm.Opcodes.ACC_ABSTRACT else 0,
+ if (m.symbol.isStrictFP) asm.Opcodes.ACC_STRICT else 0,
+ if (method.native) asm.Opcodes.ACC_NATIVE else 0, // native methods of objects are generated in mirror classes
+ if(isDeprecated(m.symbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ // TODO needed? for(ann <- m.symbol.annotations) { ann.symbol.initialize }
+ val jgensig = getGenericSignature(m.symbol, clasz.symbol)
+ addRemoteExceptionAnnot(isRemote(clasz.symbol), hasPublicBitSet(flags), m.symbol)
+ val (excs, others) = m.symbol.annotations partition (_.symbol == ThrowsClass)
+ val thrownExceptions: List[String] = getExceptions(excs)
+
+ jMethodName = javaName(m.symbol)
+ val mdesc = asm.Type.getMethodDescriptor(resTpe, (m.params map (p => javaType(p.kind))): _*)
+ jmethod = jclass.visitMethod(
+ flags,
+ jMethodName,
+ mdesc,
+ jgensig,
+ mkArray(thrownExceptions)
+ )
+
+ // TODO param names: (m.params map (p => javaName(p.sym)))
+
+ // typestate: entering mode with valid call sequences:
+ // [ visitAnnotationDefault ] ( visitAnnotation | visitParameterAnnotation | visitAttribute )*
+
+ emitAnnotations(jmethod, others)
+ emitParamAnnotations(jmethod, m.params.map(_.sym.annotations))
+
+ // typestate: entering mode with valid call sequences:
+ // [ visitCode ( visitFrame | visitXInsn | visitLabel | visitTryCatchBlock | visitLocalVariable | visitLineNumber )* visitMaxs ] visitEnd
+ // In addition, the visitXInsn and visitLabel methods must be called in the sequential order of the bytecode instructions of the visited code,
+ // visitTryCatchBlock must be called before the labels passed as arguments have been visited, and
+ // the visitLocalVariable and visitLineNumber methods must be called after the labels passed as arguments have been visited.
+
+ val hasAbstractBitSet = ((flags & asm.Opcodes.ACC_ABSTRACT) != 0)
+ val hasCodeAttribute = (!hasAbstractBitSet && !method.native)
+ if (hasCodeAttribute) {
+
+ jmethod.visitCode()
+
+ if (emitVars && isClosureApply(method.symbol)) {
+ // add a fake local for debugging purposes
+ val outerField = clasz.symbol.info.decl(nme.OUTER_LOCAL)
+ if (outerField != NoSymbol) {
+ log("Adding fake local to represent outer 'this' for closure " + clasz)
+ val _this =
+ new Local(method.symbol.newVariable(nme.FAKE_LOCAL_THIS),
+ toTypeKind(outerField.tpe),
+ false)
+ m.locals = m.locals ::: List(_this)
+ computeLocalVarsIndex(m) // since we added a new local, we need to recompute indexes
+ jmethod.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ jmethod.visitFieldInsn(asm.Opcodes.GETFIELD,
+ javaName(clasz.symbol), // field owner
+ javaName(outerField), // field name
+ descriptor(outerField) // field descriptor
+ )
+ assert(_this.kind.isReferenceType, _this.kind)
+ jmethod.visitVarInsn(asm.Opcodes.ASTORE, indexOf(_this))
+ }
+ }
+
+ assert( m.locals forall { local => (m.params contains local) == local.arg }, m.locals )
+
+ val hasStaticBitSet = ((flags & asm.Opcodes.ACC_STATIC) != 0)
+ genCode(m, emitVars, hasStaticBitSet)
+
+ jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ }
+
+ jmethod.visitEnd()
+
+ }
+
+ def addModuleInstanceField() {
+ val fv =
+ jclass.visitField(PublicStaticFinal, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
+ strMODULE_INSTANCE_FIELD,
+ thisDescr,
+ null, // no java-generic-signature
+ null // no initial value
+ )
+
+ // typestate: entering mode with valid call sequences:
+ // ( visitAnnotation | visitAttribute )* visitEnd.
+
+ fv.visitEnd()
+ }
+
+
+ /* Typestate: should be called before being done with emitting fields (because it invokes addCreatorCode() which adds an IField to the current IClass). */
+ def addStaticInit(mopt: Option[IMethod]) {
+
+ val clinitMethod: asm.MethodVisitor = jclass.visitMethod(
+ PublicStatic, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
+ CLASS_CONSTRUCTOR_NAME,
+ mdesc_arglessvoid,
+ null, // no java-generic-signature
+ null // no throwable exceptions
+ )
+
+ mopt match {
+
+ case Some(m) =>
+
+ val oldLastBlock = m.lastBlock
+ val lastBlock = m.newBlock()
+ oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
+
+ if (isStaticModule(clasz.symbol)) {
+ // call object's private ctor from static ctor
+ lastBlock emit NEW(REFERENCE(m.symbol.enclClass))
+ lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(true))
+ }
+
+ // add serialVUID code
+ serialVUID foreach { value =>
+ val fieldName = "serialVersionUID"
+ val fieldSymbol = clasz.symbol.newValue(newTermName(fieldName), NoPosition, Flags.STATIC | Flags.FINAL) setInfo LongClass.tpe
+ clasz addField new IField(fieldSymbol)
+ lastBlock emit CONSTANT(Constant(value))
+ lastBlock emit STORE_FIELD(fieldSymbol, true)
+ }
+
+ if (isParcelableClass) { addCreatorCode(lastBlock) }
+
+ lastBlock emit RETURN(UNIT)
+ lastBlock.close
+
+ method = m
+ jmethod = clinitMethod
+ jMethodName = CLASS_CONSTRUCTOR_NAME
+ jmethod.visitCode()
+ genCode(m, false, true)
+ jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ jmethod.visitEnd()
+
+ case None =>
+ clinitMethod.visitCode()
+ legacyStaticInitializer(clinitMethod)
+ clinitMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ clinitMethod.visitEnd()
+
+ }
+ }
+
+ /* used only from addStaticInit() */
+ private def legacyStaticInitializer(clinit: asm.MethodVisitor) {
+ if (isStaticModule(clasz.symbol)) {
+ clinit.visitTypeInsn(asm.Opcodes.NEW, thisName)
+ clinit.visitMethodInsn(asm.Opcodes.INVOKESPECIAL,
+ thisName, INSTANCE_CONSTRUCTOR_NAME, mdesc_arglessvoid)
+ }
+
+ serialVUID foreach { value =>
+ val fieldName = "serialVersionUID"
+ jclass.visitField(
+ PublicStaticFinal,
+ fieldName,
+ tdesc_long,
+ null, // no java-generic-signature
+ value // TODO confirm whether initial value here is behaviorally equiv to fjbg's emitPUSH emitPUTSTATIC
+ ).visitEnd()
+ }
+
+ if (isParcelableClass) { legacyAddCreatorCode(clinit) }
+
+ clinit.visitInsn(asm.Opcodes.RETURN)
+ }
+
+ // -----------------------------------------------------------------------------------------
+ // Emitting bytecode instructions.
+ // -----------------------------------------------------------------------------------------
+
+ private def genConstant(mv: asm.MethodVisitor, const: Constant) {
+ const.tag match {
+
+ case BooleanTag => jcode.boolconst(const.booleanValue)
+
+ case ByteTag => jcode.iconst(const.byteValue)
+ case ShortTag => jcode.iconst(const.shortValue)
+ case CharTag => jcode.iconst(const.charValue)
+ case IntTag => jcode.iconst(const.intValue)
+
+ case LongTag => jcode.lconst(const.longValue)
+ case FloatTag => jcode.fconst(const.floatValue)
+ case DoubleTag => jcode.dconst(const.doubleValue)
+
+ case UnitTag => ()
+
+ case StringTag =>
+ assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant`
+ mv.visitLdcInsn(const.stringValue) // `stringValue` special-cases null, but not for a const with StringTag
+
+ case NullTag => mv.visitInsn(asm.Opcodes.ACONST_NULL)
+
+ case ClazzTag =>
+ val kind = toTypeKind(const.typeValue)
+ val toPush: asm.Type =
+ if (kind.isValueType) classLiteral(kind)
+ else javaType(kind);
+ mv.visitLdcInsn(toPush)
+
+ case EnumTag =>
+ val sym = const.symbolValue
+ mv.visitFieldInsn(
+ asm.Opcodes.GETSTATIC,
+ javaName(sym.owner),
+ javaName(sym),
+ javaType(sym.tpe.underlying).getDescriptor()
+ )
+
+ case _ => abort("Unknown constant value: " + const)
+ }
+ }
+
+ /** Just a namespace for utilities that encapsulate MethodVisitor idioms.
+ * In the ASM world, org.objectweb.asm.commons.InstructionAdapter plays a similar role,
+ * but the methods here allow choosing when to transition from ICode to ASM types
+ * (including not at all, e.g. for performance).
+ */
+ object jcode {
+
+ import asm.Opcodes;
+
+ def aconst(cst: AnyRef) {
+ if (cst == null) { jmethod.visitInsn(Opcodes.ACONST_NULL) }
+ else { jmethod.visitLdcInsn(cst) }
+ }
+
+ @inline final def boolconst(b: Boolean) { iconst(if(b) 1 else 0) }
+
+ def iconst(cst: Int) {
+ if (cst >= -1 && cst <= 5) {
+ jmethod.visitInsn(Opcodes.ICONST_0 + cst)
+ } else if (cst >= java.lang.Byte.MIN_VALUE && cst <= java.lang.Byte.MAX_VALUE) {
+ jmethod.visitIntInsn(Opcodes.BIPUSH, cst)
+ } else if (cst >= java.lang.Short.MIN_VALUE && cst <= java.lang.Short.MAX_VALUE) {
+ jmethod.visitIntInsn(Opcodes.SIPUSH, cst)
+ } else {
+ jmethod.visitLdcInsn(new Integer(cst))
+ }
+ }
+
+ def lconst(cst: Long) {
+ if (cst == 0L || cst == 1L) {
+ jmethod.visitInsn(Opcodes.LCONST_0 + cst.asInstanceOf[Int])
+ } else {
+ jmethod.visitLdcInsn(new java.lang.Long(cst))
+ }
+ }
+
+ def fconst(cst: Float) {
+ val bits: Int = java.lang.Float.floatToIntBits(cst)
+ if (bits == 0L || bits == 0x3f800000 || bits == 0x40000000) { // 0..2
+ jmethod.visitInsn(Opcodes.FCONST_0 + cst.asInstanceOf[Int])
+ } else {
+ jmethod.visitLdcInsn(new java.lang.Float(cst))
+ }
+ }
+
+ def dconst(cst: Double) {
+ val bits: Long = java.lang.Double.doubleToLongBits(cst)
+ if (bits == 0L || bits == 0x3ff0000000000000L) { // +0.0d and 1.0d
+ jmethod.visitInsn(Opcodes.DCONST_0 + cst.asInstanceOf[Int])
+ } else {
+ jmethod.visitLdcInsn(new java.lang.Double(cst))
+ }
+ }
+
+ def newarray(elem: TypeKind) {
+ if(elem.isRefOrArrayType) {
+ jmethod.visitTypeInsn(Opcodes.ANEWARRAY, javaType(elem).getInternalName)
+ } else {
+ val rand = {
+ if(elem.isIntSizedType) {
+ (elem: @unchecked) match {
+ case BOOL => Opcodes.T_BOOLEAN
+ case BYTE => Opcodes.T_BYTE
+ case SHORT => Opcodes.T_SHORT
+ case CHAR => Opcodes.T_CHAR
+ case INT => Opcodes.T_INT
+ }
+ } else {
+ (elem: @unchecked) match {
+ case LONG => Opcodes.T_LONG
+ case FLOAT => Opcodes.T_FLOAT
+ case DOUBLE => Opcodes.T_DOUBLE
+ }
+ }
+ }
+ jmethod.visitIntInsn(Opcodes.NEWARRAY, rand)
+ }
+ }
+
+
+ @inline def load( idx: Int, tk: TypeKind) { emitVarInsn(Opcodes.ILOAD, idx, tk) }
+ @inline def store(idx: Int, tk: TypeKind) { emitVarInsn(Opcodes.ISTORE, idx, tk) }
+
+ @inline def aload( tk: TypeKind) { emitTypeBased(aloadOpcodes, tk) }
+ @inline def astore(tk: TypeKind) { emitTypeBased(astoreOpcodes, tk) }
+
+ @inline def neg(tk: TypeKind) { emitPrimitive(negOpcodes, tk) }
+ @inline def add(tk: TypeKind) { emitPrimitive(addOpcodes, tk) }
+ @inline def sub(tk: TypeKind) { emitPrimitive(subOpcodes, tk) }
+ @inline def mul(tk: TypeKind) { emitPrimitive(mulOpcodes, tk) }
+ @inline def div(tk: TypeKind) { emitPrimitive(divOpcodes, tk) }
+ @inline def rem(tk: TypeKind) { emitPrimitive(remOpcodes, tk) }
+
+ @inline def invokespecial(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, desc)
+ }
+ @inline def invokestatic(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, desc)
+ }
+ @inline def invokeinterface(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKEINTERFACE, owner, name, desc)
+ }
+ @inline def invokevirtual(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, desc)
+ }
+
+ @inline def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) }
+ @inline def emitIF(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF, label) }
+ @inline def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) }
+ @inline def emitIF_ACMP(cond: TestOp, label: asm.Label) {
+ assert((cond == EQ) || (cond == NE), cond)
+ val opc = (if(cond == EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE)
+ jmethod.visitJumpInsn(opc, label)
+ }
+ @inline def emitIFNONNULL(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNONNULL, label) }
+ @inline def emitIFNULL (label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNULL, label) }
+
+ @inline def emitRETURN(tk: TypeKind) {
+ if(tk == UNIT) { jmethod.visitInsn(Opcodes.RETURN) }
+ else { emitTypeBased(returnOpcodes, tk) }
+ }
+
+ /** Emits one of tableswitch or lookoupswitch. */
+ def emitSWITCH(keys: Array[Int], branches: Array[asm.Label], defaultBranch: asm.Label, minDensity: Double) {
+ assert(keys.length == branches.length)
+
+ // For empty keys, it makes sense emitting LOOKUPSWITCH with defaultBranch only.
+ // Similar to what javac emits for a switch statement consisting only of a default case.
+ if (keys.length == 0) {
+ jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches)
+ return
+ }
+
+ // sort `keys` by increasing key, keeping `branches` in sync. TODO FIXME use quicksort
+ var i = 1
+ while (i < keys.length) {
+ var j = 1
+ while (j <= keys.length - i) {
+ if (keys(j) < keys(j - 1)) {
+ val tmp = keys(j)
+ keys(j) = keys(j - 1)
+ keys(j - 1) = tmp
+ val tmpL = branches(j)
+ branches(j) = branches(j - 1)
+ branches(j - 1) = tmpL
+ }
+ j += 1
+ }
+ i += 1
+ }
+
+ val keyMin = keys(0)
+ val keyMax = keys(keys.length - 1)
+
+ val isDenseEnough: Boolean = {
+ /** Calculate in long to guard against overflow. TODO what overflow??? */
+ val keyRangeD: Double = (keyMax.asInstanceOf[Long] - keyMin + 1).asInstanceOf[Double]
+ val klenD: Double = keys.length
+ val kdensity: Double = (klenD / keyRangeD)
+
+ kdensity >= minDensity
+ }
+
+ if (isDenseEnough) {
+ // use a table in which holes are filled with defaultBranch.
+ val keyRange = (keyMax - keyMin + 1)
+ val newBranches = new Array[asm.Label](keyRange)
+ var oldPos = 0;
+ var i = 0
+ while(i < keyRange) {
+ val key = keyMin + i;
+ if (keys(oldPos) == key) {
+ newBranches(i) = branches(oldPos)
+ oldPos += 1
+ } else {
+ newBranches(i) = defaultBranch
+ }
+ i += 1
+ }
+ assert(oldPos == keys.length, "emitSWITCH")
+ jmethod.visitTableSwitchInsn(keyMin, keyMax, defaultBranch, newBranches: _*)
+ } else {
+ jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches)
+ }
+ }
+
+ // internal helpers -- not part of the public API of `jcode`
+ // don't make private otherwise inlining will suffer
+
+ def emitVarInsn(opc: Int, idx: Int, tk: TypeKind) {
+ assert((opc == Opcodes.ILOAD) || (opc == Opcodes.ISTORE), opc)
+ jmethod.visitVarInsn(javaType(tk).getOpcode(opc), idx)
+ }
+
+ // ---------------- array load and store ----------------
+
+ val aloadOpcodes = { import Opcodes._; Array(AALOAD, BALOAD, SALOAD, CALOAD, IALOAD, LALOAD, FALOAD, DALOAD) }
+ val astoreOpcodes = { import Opcodes._; Array(AASTORE, BASTORE, SASTORE, CASTORE, IASTORE, LASTORE, FASTORE, DASTORE) }
+
+ val returnOpcodes = { import Opcodes._; Array(ARETURN, IRETURN, IRETURN, IRETURN, IRETURN, LRETURN, FRETURN, DRETURN) }
+
+ def emitTypeBased(opcs: Array[Int], tk: TypeKind) {
+ assert(tk != UNIT, tk)
+ val opc = {
+ if(tk.isRefOrArrayType) { opcs(0) }
+ else if(tk.isIntSizedType) {
+ (tk: @unchecked) match {
+ case BOOL | BYTE => opcs(1)
+ case SHORT => opcs(2)
+ case CHAR => opcs(3)
+ case INT => opcs(4)
+ }
+ } else {
+ (tk: @unchecked) match {
+ case LONG => opcs(5)
+ case FLOAT => opcs(6)
+ case DOUBLE => opcs(7)
+ }
+ }
+ }
+ jmethod.visitInsn(opc)
+ }
+
+ // ---------------- primitive operations ----------------
+
+ val negOpcodes: Array[Int] = { import Opcodes._; Array(INEG, LNEG, FNEG, DNEG) }
+ val addOpcodes: Array[Int] = { import Opcodes._; Array(IADD, LADD, FADD, DADD) }
+ val subOpcodes: Array[Int] = { import Opcodes._; Array(ISUB, LSUB, FSUB, DSUB) }
+ val mulOpcodes: Array[Int] = { import Opcodes._; Array(IMUL, LMUL, FMUL, DMUL) }
+ val divOpcodes: Array[Int] = { import Opcodes._; Array(IDIV, LDIV, FDIV, DDIV) }
+ val remOpcodes: Array[Int] = { import Opcodes._; Array(IREM, LREM, FREM, DREM) }
+
+ def emitPrimitive(opcs: Array[Int], tk: TypeKind) {
+ val opc = {
+ if(tk.isIntSizedType) { opcs(0) }
+ else {
+ (tk: @unchecked) match {
+ case LONG => opcs(1)
+ case FLOAT => opcs(2)
+ case DOUBLE => opcs(3)
+ }
+ }
+ }
+ jmethod.visitInsn(opc)
+ }
+
+ }
+
+ /** Invoked from genMethod() and addStaticInit() */
+ def genCode(m: IMethod,
+ emitVars: Boolean, // this param name hides the instance-level var
+ isStatic: Boolean) {
+
+
+ newNormal.normalize(m)
+
+ // ------------------------------------------------------------------------------------------------------------
+ // Part 1 of genCode(): setting up one-to-one correspondence between ASM Labels and BasicBlocks `linearization`
+ // ------------------------------------------------------------------------------------------------------------
+
+ val linearization: List[BasicBlock] = linearizer.linearize(m)
+ if(linearization.isEmpty) { return }
+
+ var isModuleInitialized = false
+
+ val labels: collection.Map[BasicBlock, asm.Label] = mutable.HashMap(linearization map (_ -> new asm.Label()) : _*)
+
+ val onePastLast = new asm.Label // token for the mythical instruction past the last instruction in the method being emitted
+
+ // maps a BasicBlock b to the Label that corresponds to b's successor in the linearization. The last BasicBlock is mapped to the onePastLast label.
+ val linNext: collection.Map[BasicBlock, asm.Label] = {
+ val result = mutable.HashMap.empty[BasicBlock, asm.Label]
+ var rest = linearization
+ var prev = rest.head
+ rest = rest.tail
+ while(!rest.isEmpty) {
+ result += (prev -> labels(rest.head))
+ prev = rest.head
+ rest = rest.tail
+ }
+ assert(!result.contains(prev))
+ result += (prev -> onePastLast)
+
+ result
+ }
+
+ // ------------------------------------------------------------------------------------------------------------
+ // Part 2 of genCode(): demarcating exception handler boundaries (visitTryCatchBlock() must be invoked before visitLabel() in genBlock())
+ // ------------------------------------------------------------------------------------------------------------
+
+ /**Generate exception handlers for the current method.
+ *
+ * Quoting from the JVMS 4.7.3 The Code Attribute
+ * The items of the Code_attribute structure are as follows:
+ * . . .
+ * exception_table[]
+ * Each entry in the exception_table array describes one
+ * exception handler in the code array. The order of the handlers in
+ * the exception_table array is significant.
+ * Each exception_table entry contains the following four items:
+ * start_pc, end_pc:
+ * ... The value of end_pc either must be a valid index into
+ * the code array of the opcode of an instruction or must be equal to code_length,
+ * the length of the code array.
+ * handler_pc:
+ * The value of the handler_pc item indicates the start of the exception handler
+ * catch_type:
+ * ... If the value of the catch_type item is zero,
+ * this exception handler is called for all exceptions.
+ * This is used to implement finally
+ */
+ def genExceptionHandlers() {
+
+ /** Return a list of pairs of intervals where the handler is active.
+ * Each interval is closed on both ends, ie. inclusive both in the left and right endpoints: [start, end].
+ * Preconditions:
+ * - e.covered non-empty
+ * Postconditions for the result:
+ * - always non-empty
+ * - intervals are sorted as per `linearization`
+ * - the argument's `covered` blocks have been grouped into maximally contiguous intervals,
+ * ie. between any two intervals in the result there is a non-empty gap.
+ * - each of the `covered` blocks in the argument is contained in some interval in the result
+ */
+ def intervals(e: ExceptionHandler): List[BlockInteval] = {
+ assert(e.covered.nonEmpty, e)
+ var result: List[BlockInteval] = Nil
+ var rest = linearization
+
+ // find intervals
+ while(!rest.isEmpty) {
+ // find interval start
+ var start: BasicBlock = null
+ while(!rest.isEmpty && (start eq null)) {
+ if(e.covered(rest.head)) { start = rest.head }
+ rest = rest.tail
+ }
+ if(start ne null) {
+ // find interval end
+ var end = start // for the time being
+ while(!rest.isEmpty && (e.covered(rest.head))) {
+ end = rest.head
+ rest = rest.tail
+ }
+ result = BlockInteval(start, end) :: result
+ }
+ }
+
+ assert(result.nonEmpty, e)
+
+ result
+ }
+
+ /* TODO test/files/run/exceptions-2.scala displays an ExceptionHandler.covered that contains
+ * blocks not in the linearization (dead-code?). Is that well-formed or not?
+ * For now, we ignore those blocks (after all, that's what `genBlocks(linearization)` in effect does).
+ */
+ for (e <- this.method.exh) {
+ val ignore: Set[BasicBlock] = (e.covered filterNot { b => linearization contains b } )
+ // TODO someday assert(ignore.isEmpty, "an ExceptionHandler.covered contains blocks not in the linearization (dead-code?)")
+ if(ignore.nonEmpty) {
+ e.covered = e.covered filterNot ignore
+ }
+ }
+
+ // an ExceptionHandler lacking covered blocks doesn't get an entry in the Exceptions table.
+ // TODO in that case, ExceptionHandler.cls doesn't go through javaName(). What if cls is an inner class?
+ for (e <- this.method.exh ; if e.covered.nonEmpty ; p <- intervals(e)) {
+ debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
+ " from: " + p.start + " to: " + p.end + " catching: " + e.cls);
+ val cls: String = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
+ else javaName(e.cls)
+ jmethod.visitTryCatchBlock(labels(p.start), linNext(p.end), labels(e.startBlock), cls)
+ }
+ } // end of genCode()'s genExceptionHandlers()
+
+ if (m.exh.nonEmpty) { genExceptionHandlers() }
+
+ // ------------------------------------------------------------------------------------------------------------
+ // Part 3 of genCode(): "Infrastructure" to later emit debug info for local variables and method params (LocalVariablesTable bytecode attribute).
+ // ------------------------------------------------------------------------------------------------------------
+
+ case class LocVarEntry(local: Local, start: asm.Label, end: asm.Label) // start is inclusive while end exclusive.
+
+ case class Interval(lstart: asm.Label, lend: asm.Label) {
+ @inline final def start = lstart.getOffset
+ @inline final def end = lend.getOffset
+
+ def precedes(that: Interval): Boolean = { this.end < that.start }
+
+ def overlaps(that: Interval): Boolean = { !(this.precedes(that) || that.precedes(this)) }
+
+ def mergeWith(that: Interval): Interval = {
+ val newStart = if(this.start <= that.start) this.lstart else that.lstart;
+ val newEnd = if(this.end <= that.end) that.lend else this.lend;
+ Interval(newStart, newEnd)
+ }
+
+ def repOK: Boolean = { start <= end }
+
+ }
+
+ /** Track those instruction ranges where certain locals are in scope. Used to later emit the LocalVariableTable attribute (JVMS 4.7.13) */
+ object scoping {
+
+ private val pending = mutable.Map.empty[Local, mutable.Stack[Label]]
+ private var seen: List[LocVarEntry] = Nil
+
+ private def fuse(ranges: List[Interval], added: Interval): List[Interval] = {
+ assert(added.repOK, added)
+ if(ranges.isEmpty) { return List(added) }
+ // precond: ranges is sorted by increasing start
+ var fused: List[Interval] = Nil
+ var done = false
+ var rest = ranges
+ while(!done && rest.nonEmpty) {
+ val current = rest.head
+ assert(current.repOK, current)
+ rest = rest.tail
+ if(added precedes current) {
+ fused = fused ::: ( added :: current :: rest )
+ done = true
+ } else if(current overlaps added) {
+ fused = fused ::: ( added.mergeWith(current) :: rest )
+ done = true
+ }
+ }
+ if(!done) { fused = fused ::: List(added) }
+ assert(repOK(fused), fused)
+
+ fused
+ }
+
+ def pushScope(lv: Local, start: Label) {
+ val st = pending.getOrElseUpdate(lv, mutable.Stack.empty[Label])
+ st.push(start)
+ }
+ def popScope(lv: Local, end: Label) {
+ val start = pending(lv).pop()
+ seen ::= LocVarEntry(lv, start, end)
+ }
+
+ def getMerged(): collection.Map[Local, List[Interval]] = {
+ // TODO should but isn't: unbalanced start(s) of scope(s)
+ val shouldBeEmpty = pending filter { p => val Pair(k, st) = p; st.nonEmpty };
+
+ val merged = mutable.Map.empty[Local, List[Interval]]
+
+ def addToMerged(lv: Local, start: Label, end: Label) {
+ val ranges = merged.getOrElseUpdate(lv, Nil)
+ val coalesced = fuse(ranges, Interval(start, end))
+ merged.update(lv, coalesced)
+ }
+
+ for(LocVarEntry(lv, start, end) <- seen) { addToMerged(lv, start, end) }
+
+ /* for each var with unbalanced start(s) of scope(s):
+ (a) take the earliest start (among unbalanced and balanced starts)
+ (b) take the latest end (onePastLast if none available)
+ (c) merge the thus made-up interval
+ */
+ for(Pair(k, st) <- shouldBeEmpty) {
+ var start = st.toList.sortBy(_.getOffset).head
+ if(merged.isDefinedAt(k)) {
+ val balancedStart = merged(k).head.lstart
+ if(balancedStart.getOffset < start.getOffset) {
+ start = balancedStart;
+ }
+ }
+ val endOpt: Option[Label] = for(ranges <- merged.get(k)) yield ranges.last.lend;
+ val end = endOpt.getOrElse(onePastLast)
+ addToMerged(k, start, end)
+ }
+
+ merged
+ }
+
+ private def repOK(fused: List[Interval]): Boolean = {
+ fused match {
+ case Nil => true
+ case h :: Nil => h.repOK
+ case h :: n :: rest =>
+ h.repOK && h.precedes(n) && !h.overlaps(n) && repOK(n :: rest)
+ }
+ }
+
+ }
+
+ def genLocalVariableTable() {
+ // adding `this` and method params.
+ if (!isStatic) {
+ jmethod.visitLocalVariable("this", thisDescr, null, labels(m.startBlock), onePastLast, 0)
+ }
+ for(lv <- m.params) {
+ jmethod.visitLocalVariable(javaName(lv.sym), descriptor(lv.kind), null, labels(m.startBlock), onePastLast, indexOf(lv))
+ }
+ // adding non-param locals
+ var anonCounter = 0
+ var fltnd: List[Triple[String, Local, Interval]] = Nil
+ for(Pair(local, ranges) <- scoping.getMerged()) {
+ var name = javaName(local.sym)
+ if (name == null) {
+ anonCounter += 1;
+ name = "<anon" + anonCounter + ">"
+ }
+ for(intrvl <- ranges) {
+ fltnd ::= Triple(name, local, intrvl)
+ }
+ }
+ // quest for deterministic output that Map.toList doesn't provide (so that ant test.stability doesn't complain).
+ val srtd = fltnd.sortBy { kr =>
+ val Triple(name: String, local: Local, intrvl: Interval) = kr
+
+ Triple(intrvl.start, intrvl.end - intrvl.start, name) // ie sort by (start, length, name)
+ }
+
+ for(Triple(name, local, Interval(start, end)) <- srtd) {
+ jmethod.visitLocalVariable(name, descriptor(local.kind), null, start, end, indexOf(local))
+ }
+ // "There may be no more than one LocalVariableTable attribute per local variable in the Code attribute"
+ }
+
+ // ------------------------------------------------------------------------------------------------------------
+ // Part 4 of genCode(): Bookkeeping (to later emit debug info) of association between line-number and instruction position.
+ // ------------------------------------------------------------------------------------------------------------
+
+ case class LineNumberEntry(line: Int, start: asm.Label)
+ var lastLineNr: Int = -1
+ var lnEntries: List[LineNumberEntry] = Nil
+
+ // ------------------------------------------------------------------------------------------------------------
+ // Part 5 of genCode(): "Utilities" to emit code proper (most prominently: genBlock()).
+ // ------------------------------------------------------------------------------------------------------------
+
+ var nextBlock: BasicBlock = linearization.head
+
+ def genBlocks(l: List[BasicBlock]): Unit = l match {
+ case Nil => ()
+ case x :: Nil => nextBlock = null; genBlock(x)
+ case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
+ }
+
+ def isAccessibleFrom(target: Symbol, site: Symbol): Boolean = {
+ target.isPublic || target.isProtected && {
+ (site.enclClass isSubClass target.enclClass) ||
+ (site.enclosingPackage == target.privateWithin)
+ }
+ } // end of genCode()'s isAccessibleFrom()
+
+ def genCallMethod(call: CALL_METHOD) {
+ val CALL_METHOD(method, style) = call
+ val siteSymbol = clasz.symbol
+ val hostSymbol = call.hostClass
+ val methodOwner = method.owner
+ // info calls so that types are up to date; erasure may add lateINTERFACE to traits
+ hostSymbol.info ; methodOwner.info
+
+ def isInterfaceCall(sym: Symbol) = (
+ sym.isInterface && methodOwner != ObjectClass
+ || sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
+ )
+ // whether to reference the type of the receiver or
+ // the type of the method owner (if not an interface!)
+ val useMethodOwner = (
+ style != Dynamic
+ || !isInterfaceCall(hostSymbol) && isAccessibleFrom(methodOwner, siteSymbol)
+ || hostSymbol.isBottomClass
+ )
+ val receiver = if (useMethodOwner) methodOwner else hostSymbol
+ val jowner = javaName(receiver)
+ val jname = javaName(method)
+ val jtype = javaType(method).getDescriptor()
+
+ def dbg(invoke: String) {
+ debuglog("%s %s %s.%s:%s".format(invoke, receiver.accessString, jowner, jname, jtype))
+ }
+
+ def initModule() {
+ // we initialize the MODULE$ field immediately after the super ctor
+ if (isStaticModule(siteSymbol) && !isModuleInitialized &&
+ jMethodName == INSTANCE_CONSTRUCTOR_NAME &&
+ jname == INSTANCE_CONSTRUCTOR_NAME) {
+ isModuleInitialized = true
+ jmethod.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ jmethod.visitFieldInsn(asm.Opcodes.PUTSTATIC, thisName, strMODULE_INSTANCE_FIELD, thisDescr)
+ }
+ }
+
+ style match {
+ case Static(true) => dbg("invokespecial"); jcode.invokespecial (jowner, jname, jtype)
+ case Static(false) => dbg("invokestatic"); jcode.invokestatic (jowner, jname, jtype)
+ case Dynamic if isInterfaceCall(receiver) => dbg("invokinterface"); jcode.invokeinterface(jowner, jname, jtype)
+ case Dynamic => dbg("invokevirtual"); jcode.invokevirtual (jowner, jname, jtype)
+ case SuperCall(_) =>
+ dbg("invokespecial")
+ jcode.invokespecial(jowner, jname, jtype)
+ initModule()
+ }
+ } // end of genCode()'s genCallMethod()
+
+ def genBlock(b: BasicBlock) {
+ jmethod.visitLabel(labels(b))
+
+ import asm.Opcodes;
+
+ debuglog("Generating code for block: " + b)
+
+ // val lastInstr = b.lastInstruction
+
+ for (instr <- b) {
+
+ if(instr.pos.isDefined) {
+ val iPos = instr.pos
+ val currentLineNr = iPos.line
+ val skip = (currentLineNr == lastLineNr) // if(iPos.isRange) iPos.sameRange(lastPos) else
+ if(!skip) {
+ lastLineNr = currentLineNr
+ val lineLab = new asm.Label
+ jmethod.visitLabel(lineLab)
+ lnEntries ::= LineNumberEntry(currentLineNr, lineLab)
+ }
+ }
+
+ instr match {
+ case THIS(_) => jmethod.visitVarInsn(Opcodes.ALOAD, 0)
+
+ case CONSTANT(const) => genConstant(jmethod, const)
+
+ case LOAD_ARRAY_ITEM(kind) => jcode.aload(kind)
+
+ case LOAD_LOCAL(local) => jcode.load(indexOf(local), local.kind)
+
+ case lf @ LOAD_FIELD(field, isStatic) =>
+ var owner = javaName(lf.hostClass)
+ debuglog("LOAD_FIELD with owner: " + owner + " flags: " + Flags.flagsToString(field.owner.flags))
+ val fieldJName = javaName(field)
+ val fieldDescr = descriptor(field)
+ val opc = if (isStatic) Opcodes.GETSTATIC else Opcodes.GETFIELD
+ jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
+
+ case LOAD_MODULE(module) =>
+ // assert(module.isModule, "Expected module: " + module)
+ debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
+ if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) {
+ jmethod.visitVarInsn(Opcodes.ALOAD, 0)
+ } else {
+ jmethod.visitFieldInsn(
+ Opcodes.GETSTATIC,
+ javaName(module) /* + "$" */ ,
+ strMODULE_INSTANCE_FIELD,
+ descriptor(module)
+ )
+ }
+
+ case STORE_ARRAY_ITEM(kind) => jcode.astore(kind)
+
+ case STORE_LOCAL(local) => jcode.store(indexOf(local), local.kind)
+
+ case STORE_THIS(_) =>
+ // this only works for impl classes because the self parameter comes first
+ // in the method signature. If that changes, this code has to be revisited.
+ jmethod.visitVarInsn(Opcodes.ASTORE, 0)
+
+ case STORE_FIELD(field, isStatic) =>
+ val owner = javaName(field.owner)
+ val fieldJName = javaName(field)
+ val fieldDescr = descriptor(field)
+ val opc = if (isStatic) Opcodes.PUTSTATIC else Opcodes.PUTFIELD
+ jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
+
+ case CALL_PRIMITIVE(primitive) => genPrimitive(primitive, instr.pos)
+
+ /** Special handling to access native Array.clone() */
+ case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
+ val target: String = javaType(call.targetTypeKind).getInternalName
+ jcode.invokevirtual(target, "clone", mdesc_arrayClone)
+
+ case call @ CALL_METHOD(method, style) => genCallMethod(call)
+
+ case BOX(kind) =>
+ val MethodNameAndType(mname, mdesc) = jBoxTo(kind)
+ jcode.invokestatic(BoxesRunTime, mname, mdesc)
+
+ case UNBOX(kind) =>
+ val MethodNameAndType(mname, mdesc) = jUnboxTo(kind)
+ jcode.invokestatic(BoxesRunTime, mname, mdesc)
+
+ case NEW(REFERENCE(cls)) =>
+ val className = javaName(cls)
+ jmethod.visitTypeInsn(Opcodes.NEW, className)
+
+ case CREATE_ARRAY(elem, 1) => jcode newarray elem
+
+ case CREATE_ARRAY(elem, dims) =>
+ jmethod.visitMultiANewArrayInsn(descriptor(ArrayN(elem, dims)), dims)
+
+ case IS_INSTANCE(tpe) =>
+ val jtyp: asm.Type =
+ tpe match {
+ case REFERENCE(cls) => asm.Type.getObjectType(javaName(cls))
+ case ARRAY(elem) => javaArrayType(javaType(elem))
+ case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
+ }
+ jmethod.visitTypeInsn(Opcodes.INSTANCEOF, jtyp.getInternalName)
+
+ case CHECK_CAST(tpe) =>
+ tpe match {
+
+ case REFERENCE(cls) =>
+ if (cls != ObjectClass) { // No need to checkcast for Objects
+ jmethod.visitTypeInsn(Opcodes.CHECKCAST, javaName(cls))
+ }
+
+ case ARRAY(elem) =>
+ val iname = javaArrayType(javaType(elem)).getInternalName
+ jmethod.visitTypeInsn(Opcodes.CHECKCAST, iname)
+
+ case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
+ }
+
+ case sw @ SWITCH(tagss, branches) =>
+ assert(branches.length == tagss.length + 1, sw)
+ val flatSize = sw.flatTagsCount
+ val flatKeys = new Array[Int](flatSize)
+ val flatBranches = new Array[asm.Label](flatSize)
+
+ var restTagss = tagss
+ var restBranches = branches
+ var k = 0 // ranges over flatKeys and flatBranches
+ while(restTagss.nonEmpty) {
+ val currLabel = labels(restBranches.head)
+ for(cTag <- restTagss.head) {
+ flatKeys(k) = cTag;
+ flatBranches(k) = currLabel
+ k += 1
+ }
+ restTagss = restTagss.tail
+ restBranches = restBranches.tail
+ }
+ val defaultLabel = labels(restBranches.head)
+ assert(restBranches.tail.isEmpty)
+ debuglog("Emitting SWITCH:\ntags: " + tagss + "\nbranches: " + branches)
+ jcode.emitSWITCH(flatKeys, flatBranches, defaultLabel, MIN_SWITCH_DENSITY)
+
+ case JUMP(whereto) =>
+ if (nextBlock != whereto) {
+ jcode goTo labels(whereto)
+ }
+
+ case CJUMP(success, failure, cond, kind) =>
+ if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ if (nextBlock == success) {
+ jcode.emitIF_ICMP(cond.negate, labels(failure))
+ // .. and fall through to success label
+ } else {
+ jcode.emitIF_ICMP(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ if (nextBlock == success) {
+ jcode.emitIF_ACMP(cond.negate, labels(failure))
+ // .. and fall through to success label
+ } else {
+ jcode.emitIF_ACMP(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ } else {
+ (kind: @unchecked) match {
+ case LONG => emit(Opcodes.LCMP)
+ case FLOAT =>
+ if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
+ else emit(Opcodes.FCMPL)
+ case DOUBLE =>
+ if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
+ else emit(Opcodes.DCMPL)
+ }
+ if (nextBlock == success) {
+ jcode.emitIF(cond.negate, labels(failure))
+ // .. and fall through to success label
+ } else {
+ jcode.emitIF(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ }
+
+ case CZJUMP(success, failure, cond, kind) =>
+ if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ if (nextBlock == success) {
+ jcode.emitIF(cond.negate, labels(failure))
+ } else {
+ jcode.emitIF(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ val Success = success
+ val Failure = failure
+ // @unchecked because references aren't compared with GT, GE, LT, LE.
+ ((cond, nextBlock) : @unchecked) match {
+ case (EQ, Success) => jcode emitIFNONNULL labels(failure)
+ case (NE, Failure) => jcode emitIFNONNULL labels(success)
+ case (EQ, Failure) => jcode emitIFNULL labels(success)
+ case (NE, Success) => jcode emitIFNULL labels(failure)
+ case (EQ, _) =>
+ jcode emitIFNULL labels(success)
+ jcode goTo labels(failure)
+ case (NE, _) =>
+ jcode emitIFNONNULL labels(success)
+ jcode goTo labels(failure)
+ }
+ } else {
+ (kind: @unchecked) match {
+ case LONG =>
+ emit(Opcodes.LCONST_0)
+ emit(Opcodes.LCMP)
+ case FLOAT =>
+ emit(Opcodes.FCONST_0)
+ if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
+ else emit(Opcodes.FCMPL)
+ case DOUBLE =>
+ emit(Opcodes.DCONST_0)
+ if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
+ else emit(Opcodes.DCMPL)
+ }
+ if (nextBlock == success) {
+ jcode.emitIF(cond.negate, labels(failure))
+ } else {
+ jcode.emitIF(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ }
+
+ case RETURN(kind) => jcode emitRETURN kind
+
+ case THROW(_) => emit(Opcodes.ATHROW)
+
+ case DROP(kind) =>
+ emit(if(kind.isWideType) Opcodes.POP2 else Opcodes.POP)
+
+ case DUP(kind) =>
+ emit(if(kind.isWideType) Opcodes.DUP2 else Opcodes.DUP)
+
+ case MONITOR_ENTER() => emit(Opcodes.MONITORENTER)
+
+ case MONITOR_EXIT() => emit(Opcodes.MONITOREXIT)
+
+ case SCOPE_ENTER(lv) =>
+ // locals removed by closelim (via CopyPropagation) may have left behind SCOPE_ENTER, SCOPE_EXIT that are to be ignored
+ val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
+ if(relevant) { // TODO check: does GenICode emit SCOPE_ENTER, SCOPE_EXIT for synthetic vars?
+ // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
+ // similarly, these labels aren't tracked in the `labels` map.
+ val start = new asm.Label
+ jmethod.visitLabel(start)
+ scoping.pushScope(lv, start)
+ }
+
+ case SCOPE_EXIT(lv) =>
+ val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
+ if(relevant) {
+ // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
+ // similarly, these labels aren't tracked in the `labels` map.
+ val end = new asm.Label
+ jmethod.visitLabel(end)
+ scoping.popScope(lv, end)
+ }
+
+ case LOAD_EXCEPTION(_) =>
+ ()
+ }
+
+ }
+
+ } // end of genCode()'s genBlock()
+
+ /**
+ * Emits one or more conversion instructions based on the types given as arguments.
+ *
+ * @param from The type of the value to be converted into another type.
+ * @param to The type the value will be converted into.
+ */
+ def emitT2T(from: TypeKind, to: TypeKind) {
+ assert(isNonUnitValueTK(from), from)
+ assert(isNonUnitValueTK(to), to)
+
+ def pickOne(opcs: Array[Int]) {
+ val chosen = (to: @unchecked) match {
+ case BYTE => opcs(0)
+ case SHORT => opcs(1)
+ case CHAR => opcs(2)
+ case INT => opcs(3)
+ case LONG => opcs(4)
+ case FLOAT => opcs(5)
+ case DOUBLE => opcs(6)
+ }
+ if(chosen != -1) { emit(chosen) }
+ }
+
+ if(from == to) { return }
+ if((from == BOOL) || (to == BOOL)) {
+ // the only conversion involving BOOL that is allowed is (BOOL -> BOOL)
+ throw new Error("inconvertible types : " + from.toString() + " -> " + to.toString())
+ }
+
+ if(from.isIntSizedType) { // BYTE, CHAR, SHORT, and INT. (we're done with BOOL already)
+
+ val fromByte = { import asm.Opcodes._; Array( -1, -1, I2C, -1, I2L, I2F, I2D) } // do nothing for (BYTE -> SHORT) and for (BYTE -> INT)
+ val fromChar = { import asm.Opcodes._; Array(I2B, I2S, -1, -1, I2L, I2F, I2D) } // for (CHAR -> INT) do nothing
+ val fromShort = { import asm.Opcodes._; Array(I2B, -1, I2C, -1, I2L, I2F, I2D) } // for (SHORT -> INT) do nothing
+ val fromInt = { import asm.Opcodes._; Array(I2B, I2S, I2C, -1, I2L, I2F, I2D) }
+
+ (from: @unchecked) match {
+ case BYTE => pickOne(fromByte)
+ case SHORT => pickOne(fromShort)
+ case CHAR => pickOne(fromChar)
+ case INT => pickOne(fromInt)
+ }
+
+ } else { // FLOAT, LONG, DOUBLE
+
+ (from: @unchecked) match {
+ case FLOAT =>
+ import asm.Opcodes.{ F2L, F2D, F2I }
+ (to: @unchecked) match {
+ case LONG => emit(F2L)
+ case DOUBLE => emit(F2D)
+ case _ => emit(F2I); emitT2T(INT, to)
+ }
+
+ case LONG =>
+ import asm.Opcodes.{ L2F, L2D, L2I }
+ (to: @unchecked) match {
+ case FLOAT => emit(L2F)
+ case DOUBLE => emit(L2D)
+ case _ => emit(L2I); emitT2T(INT, to)
+ }
+
+ case DOUBLE =>
+ import asm.Opcodes.{ D2L, D2F, D2I }
+ (to: @unchecked) match {
+ case FLOAT => emit(D2F)
+ case LONG => emit(D2L)
+ case _ => emit(D2I); emitT2T(INT, to)
+ }
+ }
+ }
+ } // end of genCode()'s emitT2T()
+
+ def genPrimitive(primitive: Primitive, pos: Position) {
+
+ import asm.Opcodes;
+
+ primitive match {
+
+ case Negation(kind) => jcode.neg(kind)
+
+ case Arithmetic(op, kind) =>
+ op match {
+
+ case ADD => jcode.add(kind)
+ case SUB => jcode.sub(kind)
+ case MUL => jcode.mul(kind)
+ case DIV => jcode.div(kind)
+ case REM => jcode.rem(kind)
+
+ case NOT =>
+ if(kind.isIntSizedType) {
+ emit(Opcodes.ICONST_M1)
+ emit(Opcodes.IXOR)
+ } else if(kind == LONG) {
+ jmethod.visitLdcInsn(new java.lang.Long(-1))
+ jmethod.visitInsn(Opcodes.LXOR)
+ } else {
+ abort("Impossible to negate an " + kind)
+ }
+
+ case _ =>
+ abort("Unknown arithmetic primitive " + primitive)
+ }
+
+ // TODO Logical's 2nd elem should be declared ValueTypeKind, to better approximate its allowed values (isIntSized, its comments appears to convey)
+ // TODO GenICode uses `toTypeKind` to define that elem, `toValueTypeKind` would be needed instead.
+ // TODO How about adding some asserts to Logical and similar ones to capture the remaining constraint (UNIT not allowed).
+ case Logical(op, kind) => ((op, kind): @unchecked) match {
+ case (AND, LONG) => emit(Opcodes.LAND)
+ case (AND, INT) => emit(Opcodes.IAND)
+ case (AND, _) =>
+ emit(Opcodes.IAND)
+ if (kind != BOOL) { emitT2T(INT, kind) }
+
+ case (OR, LONG) => emit(Opcodes.LOR)
+ case (OR, INT) => emit(Opcodes.IOR)
+ case (OR, _) =>
+ emit(Opcodes.IOR)
+ if (kind != BOOL) { emitT2T(INT, kind) }
+
+ case (XOR, LONG) => emit(Opcodes.LXOR)
+ case (XOR, INT) => emit(Opcodes.IXOR)
+ case (XOR, _) =>
+ emit(Opcodes.IXOR)
+ if (kind != BOOL) { emitT2T(INT, kind) }
+ }
+
+ case Shift(op, kind) => ((op, kind): @unchecked) match {
+ case (LSL, LONG) => emit(Opcodes.LSHL)
+ case (LSL, INT) => emit(Opcodes.ISHL)
+ case (LSL, _) =>
+ emit(Opcodes.ISHL)
+ emitT2T(INT, kind)
+
+ case (ASR, LONG) => emit(Opcodes.LSHR)
+ case (ASR, INT) => emit(Opcodes.ISHR)
+ case (ASR, _) =>
+ emit(Opcodes.ISHR)
+ emitT2T(INT, kind)
+
+ case (LSR, LONG) => emit(Opcodes.LUSHR)
+ case (LSR, INT) => emit(Opcodes.IUSHR)
+ case (LSR, _) =>
+ emit(Opcodes.IUSHR)
+ emitT2T(INT, kind)
+ }
+
+ case Comparison(op, kind) => ((op, kind): @unchecked) match {
+ case (CMP, LONG) => emit(Opcodes.LCMP)
+ case (CMPL, FLOAT) => emit(Opcodes.FCMPL)
+ case (CMPG, FLOAT) => emit(Opcodes.FCMPG)
+ case (CMPL, DOUBLE) => emit(Opcodes.DCMPL)
+ case (CMPG, DOUBLE) => emit(Opcodes.DCMPL) // TODO bug? why not DCMPG? http://docs.oracle.com/javase/specs/jvms/se5.0/html/Instructions2.doc3.html
+ }
+
+ case Conversion(src, dst) =>
+ debuglog("Converting from: " + src + " to: " + dst)
+ if (dst == BOOL) { println("Illegal conversion at: " + clasz + " at: " + pos.source + ":" + pos.line) }
+ else { emitT2T(src, dst) }
+
+ case ArrayLength(_) => emit(Opcodes.ARRAYLENGTH)
+
+ case StartConcat =>
+ jmethod.visitTypeInsn(Opcodes.NEW, StringBuilderClassName)
+ jmethod.visitInsn(Opcodes.DUP)
+ jcode.invokespecial(
+ StringBuilderClassName,
+ INSTANCE_CONSTRUCTOR_NAME,
+ mdesc_arglessvoid
+ )
+
+ case StringConcat(el) =>
+ val jtype = el match {
+ case REFERENCE(_) | ARRAY(_) => JAVA_LANG_OBJECT
+ case _ => javaType(el)
+ }
+ jcode.invokevirtual(
+ StringBuilderClassName,
+ "append",
+ asm.Type.getMethodDescriptor(StringBuilderType, Array(jtype): _*)
+ )
+
+ case EndConcat =>
+ jcode.invokevirtual(StringBuilderClassName, "toString", mdesc_toString)
+
+ case _ => abort("Unimplemented primitive " + primitive)
+ }
+ } // end of genCode()'s genPrimitive()
+
+ // ------------------------------------------------------------------------------------------------------------
+ // Part 6 of genCode(): the executable part of genCode() starts here.
+ // ------------------------------------------------------------------------------------------------------------
+
+ genBlocks(linearization)
+
+ jmethod.visitLabel(onePastLast)
+
+ if(emitLines) {
+ for(LineNumberEntry(line, start) <- lnEntries.sortBy(_.start.getOffset)) { jmethod.visitLineNumber(line, start) }
+ }
+ if(emitVars) { genLocalVariableTable() }
+
+ } // end of BytecodeGenerator.genCode()
+
+
+ ////////////////////// local vars ///////////////////////
+
+ // def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe))
+
+ def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1
+
+ // def indexOf(m: IMethod, sym: Symbol): Int = {
+ // val Some(local) = m lookupLocal sym
+ // indexOf(local)
+ // }
+
+ @inline final def indexOf(local: Local): Int = {
+ assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ")
+ local.index
+ }
+
+ /**
+ * Compute the indexes of each local variable of the given method.
+ * *Does not assume the parameters come first!*
+ */
+ def computeLocalVarsIndex(m: IMethod) {
+ var idx = if (m.symbol.isStaticMember) 0 else 1;
+
+ for (l <- m.params) {
+ debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
+ l.index = idx
+ idx += sizeOf(l.kind)
+ }
+
+ for (l <- m.locals if !l.arg) {
+ debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
+ l.index = idx
+ idx += sizeOf(l.kind)
+ }
+ }
+
+ } // end of class JPlainBuilder
+
+
+ /** builder of mirror classes */
+ class JMirrorBuilder(bytecodeWriter: BytecodeWriter) extends JCommonBuilder(bytecodeWriter) {
+
+ private var cunit: CompilationUnit = _
+ def getCurrentCUnit(): CompilationUnit = cunit;
+
+ /** Generate a mirror class for a top-level module. A mirror class is a class
+ * containing only static methods that forward to the corresponding method
+ * on the MODULE instance of the given Scala object. It will only be
+ * generated if there is no companion class: if there is, an attempt will
+ * instead be made to add the forwarder methods to the companion class.
+ */
+ def genMirrorClass(modsym: Symbol, cunit: CompilationUnit) {
+ assert(modsym.companionClass == NoSymbol, modsym)
+ innerClassBuffer.clear()
+ this.cunit = cunit
+ val moduleName = javaName(modsym) // + "$"
+ val mirrorName = moduleName.substring(0, moduleName.length() - 1)
+
+ val flags = (asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL)
+ val mirrorClass = createJClass(flags,
+ mirrorName,
+ null /* no java-generic-signature */,
+ JAVA_LANG_OBJECT.getInternalName,
+ EMPTY_STRING_ARRAY)
+
+ log("Dumping mirror class for '%s'".format(mirrorName))
+
+ // typestate: entering mode with valid call sequences:
+ // [ visitSource ] [ visitOuterClass ] ( visitAnnotation | visitAttribute )*
+
+ mirrorClass.visitSource("" + cunit.source,
+ null /* SourceDebugExtension */)
+
+ val ssa = getAnnotPickle(mirrorName, modsym.companionSymbol)
+ mirrorClass.visitAttribute(if(ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
+ emitAnnotations(mirrorClass, modsym.annotations ++ ssa)
+
+ // typestate: entering mode with valid call sequences:
+ // ( visitInnerClass | visitField | visitMethod )* visitEnd
+
+ addForwarders(isRemote(modsym), mirrorClass, mirrorName, modsym)
+
+ addInnerClasses(modsym, mirrorClass)
+ mirrorClass.visitEnd()
+ writeIfNotTooBig("" + modsym.name, mirrorName, mirrorClass, modsym)
+ }
+
+
+ } // end of class JMirrorBuilder
+
+
+ /** builder of bean info classes */
+ class JBeanInfoBuilder(bytecodeWriter: BytecodeWriter) extends JBuilder(bytecodeWriter) {
+
+ /**
+ * Generate a bean info class that describes the given class.
+ *
+ * @author Ross Judson (ross.judson@soletta.com)
+ */
+ def genBeanInfoClass(clasz: IClass) {
+
+ // val BeanInfoSkipAttr = definitions.getRequiredClass("scala.beans.BeanInfoSkip")
+ // val BeanDisplayNameAttr = definitions.getRequiredClass("scala.beans.BeanDisplayName")
+ // val BeanDescriptionAttr = definitions.getRequiredClass("scala.beans.BeanDescription")
+ // val description = c.symbol getAnnotation BeanDescriptionAttr
+ // informProgress(description.toString)
+ innerClassBuffer.clear()
+
+ val flags = mkFlags(
+ javaFlags(clasz.symbol),
+ if(isDeprecated(clasz.symbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ val beanInfoName = (javaName(clasz.symbol) + "BeanInfo")
+ val beanInfoClass = createJClass(
+ flags,
+ beanInfoName,
+ null, // no java-generic-signature
+ "scala/beans/ScalaBeanInfo",
+ EMPTY_STRING_ARRAY
+ )
+
+ // beanInfoClass typestate: entering mode with valid call sequences:
+ // [ visitSource ] [ visitOuterClass ] ( visitAnnotation | visitAttribute )*
+
+ beanInfoClass.visitSource(
+ clasz.cunit.source.toString,
+ null /* SourceDebugExtension */
+ )
+
+ var fieldList = List[String]()
+
+ for (f <- clasz.fields if f.symbol.hasGetter;
+ g = f.symbol.getter(clasz.symbol);
+ s = f.symbol.setter(clasz.symbol);
+ if g.isPublic && !(f.symbol.name startsWith "$")
+ ) {
+ // inserting $outer breaks the bean
+ fieldList = javaName(f.symbol) :: javaName(g) :: (if (s != NoSymbol) javaName(s) else null) :: fieldList
+ }
+
+ val methodList: List[String] =
+ for (m <- clasz.methods
+ if !m.symbol.isConstructor &&
+ m.symbol.isPublic &&
+ !(m.symbol.name startsWith "$") &&
+ !m.symbol.isGetter &&
+ !m.symbol.isSetter)
+ yield javaName(m.symbol)
+
+ // beanInfoClass typestate: entering mode with valid call sequences:
+ // ( visitInnerClass | visitField | visitMethod )* visitEnd
+
+ val constructor = beanInfoClass.visitMethod(
+ asm.Opcodes.ACC_PUBLIC,
+ INSTANCE_CONSTRUCTOR_NAME,
+ mdesc_arglessvoid,
+ null, // no java-generic-signature
+ EMPTY_STRING_ARRAY // no throwable exceptions
+ )
+
+ // constructor typestate: entering mode with valid call sequences:
+ // [ visitAnnotationDefault ] ( visitAnnotation | visitParameterAnnotation | visitAttribute )*
+
+ val stringArrayJType: asm.Type = javaArrayType(JAVA_LANG_STRING)
+ val conJType: asm.Type =
+ asm.Type.getMethodType(
+ asm.Type.VOID_TYPE,
+ Array(javaType(ClassClass), stringArrayJType, stringArrayJType): _*
+ )
+
+ def push(lst: List[String]) {
+ var fi = 0
+ for (f <- lst) {
+ constructor.visitInsn(asm.Opcodes.DUP)
+ constructor.visitLdcInsn(new java.lang.Integer(fi))
+ if (f == null) { constructor.visitInsn(asm.Opcodes.ACONST_NULL) }
+ else { constructor.visitLdcInsn(f) }
+ constructor.visitInsn(JAVA_LANG_STRING.getOpcode(asm.Opcodes.IASTORE))
+ fi += 1
+ }
+ }
+
+ // constructor typestate: entering mode with valid call sequences:
+ // [ visitCode ( visitFrame | visitXInsn | visitLabel | visitTryCatchBlock | visitLocalVariable | visitLineNumber )* visitMaxs ] visitEnd
+
+ constructor.visitCode()
+
+ constructor.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ // push the class
+ constructor.visitLdcInsn(javaType(clasz.symbol))
+
+ // push the string array of field information
+ constructor.visitLdcInsn(new java.lang.Integer(fieldList.length))
+ constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName)
+ push(fieldList)
+
+ // push the string array of method information
+ constructor.visitLdcInsn(new java.lang.Integer(methodList.length))
+ constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName)
+ push(methodList)
+
+ // invoke the superclass constructor, which will do the
+ // necessary java reflection and create Method objects.
+ constructor.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, "scala/beans/ScalaBeanInfo", INSTANCE_CONSTRUCTOR_NAME, conJType.getDescriptor)
+ constructor.visitInsn(asm.Opcodes.RETURN)
+
+ constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ constructor.visitEnd()
+
+ // TODO no inner classes attribute is written. Confirm intent.
+ assert(innerClassBuffer.isEmpty, innerClassBuffer)
+
+ beanInfoClass.visitEnd()
+
+ writeIfNotTooBig("BeanInfo ", beanInfoName, beanInfoClass, clasz.symbol)
+ }
+
+ } // end of class JBeanInfoBuilder
+
+ /** A namespace for utilities to normalize the code of an IMethod, over and beyond what IMethod.normalize() strives for.
+ * In particualr, IMethod.normalize() doesn't collapseJumpChains().
+ *
+ * TODO Eventually, these utilities should be moved to IMethod and reused from normalize() (there's nothing JVM-specific about them).
+ */
+ object newNormal {
+
+ def startsWithJump(b: BasicBlock): Boolean = { assert(b.nonEmpty, "empty block"); b.firstInstruction.isInstanceOf[JUMP] }
+
+ /** Prune from an exception handler those covered blocks which are jump-only. */
+ private def coverWhatCountsOnly(m: IMethod): Boolean = {
+ assert(m.hasCode, "code-less method")
+
+ var wasReduced = false
+ for(h <- m.exh) {
+ val shouldntCover = (h.covered filter startsWithJump)
+ if(shouldntCover.nonEmpty) {
+ wasReduced = true
+ h.covered --= shouldntCover // not removing any block on purpose.
+ }
+ }
+
+ wasReduced
+ }
+
+ /** An exception handler is pruned provided any of the following holds:
+ * (1) it covers nothing (for example, this may result after removing unreachable blocks)
+ * (2) each block it covers is of the form: JUMP(_)
+ * Return true iff one or more ExceptionHandlers were removed.
+ *
+ * A caveat: removing an exception handler, for whatever reason, means that its handler code (even if unreachable)
+ * won't be able to cause a class-loading-exception. As a result, behavior can be different.
+ */
+ private def elimNonCoveringExh(m: IMethod): Boolean = {
+ assert(m.hasCode, "code-less method")
+
+ def isRedundant(eh: ExceptionHandler): Boolean = {
+ (eh.cls != NoSymbol) && ( // TODO `eh.isFinallyBlock` more readable than `eh.cls != NoSymbol`
+ eh.covered.isEmpty
+ || (eh.covered forall startsWithJump)
+ )
+ }
+
+ var wasReduced = false
+ val toPrune = (m.exh.toSet filter isRedundant)
+ if(toPrune.nonEmpty) {
+ wasReduced = true
+ for(h <- toPrune; r <- h.blocks) { m.code.removeBlock(r) } // TODO m.code.removeExh(h)
+ m.exh = (m.exh filterNot toPrune)
+ }
+
+ wasReduced
+ }
+
+ private def isJumpOnly(b: BasicBlock): Option[BasicBlock] = {
+ b.toList match {
+ case JUMP(whereto) :: rest =>
+ assert(rest.isEmpty, "A block contains instructions after JUMP (looks like enterIgnoreMode() was itself ignored.)")
+ Some(whereto)
+ case _ => None
+ }
+ }
+
+ private def directSuccStar(b: BasicBlock): List[BasicBlock] = { directSuccStar(List(b)) }
+
+ /** Transitive closure of successors potentially reachable due to normal (non-exceptional) control flow.
+ Those BBs in the argument are also included in the result */
+ private def directSuccStar(starters: Traversable[BasicBlock]): List[BasicBlock] = {
+ val result = new mutable.ListBuffer[BasicBlock]
+ var toVisit: List[BasicBlock] = starters.toList.distinct
+ while(toVisit.nonEmpty) {
+ val h = toVisit.head
+ toVisit = toVisit.tail
+ result += h
+ for(p <- h.directSuccessors; if !result.contains(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
+ }
+ result.toList
+ }
+
+ /** Returns:
+ * for single-block self-loops, the pair (start, Nil)
+ * for other cycles, the pair (backedge-target, basic-blocks-in-the-cycle-except-backedge-target)
+ * otherwise a pair consisting of:
+ * (a) the endpoint of a (single or multi-hop) chain of JUMPs
+ * (such endpoint does not start with a JUMP and therefore is not part of the chain); and
+ * (b) the chain (ie blocks to be removed when collapsing the chain of jumps).
+ * Precondition: the BasicBlock given as argument starts with an unconditional JUMP.
+ */
+ private def finalDestination(start: BasicBlock): (BasicBlock, List[BasicBlock]) = {
+ assert(startsWithJump(start), "not the start of a (single or multi-hop) chain of JUMPs.")
+ var hops: List[BasicBlock] = Nil
+ var prev = start
+ var done = false
+ do {
+ done = isJumpOnly(prev) match {
+ case Some(dest) =>
+ if (dest == start) { return (start, hops) } // leave infinite-loops in place
+ hops ::= prev
+ if (hops.contains(dest)) {
+ // leave infinite-loops in place
+ return (dest, hops filterNot (dest eq))
+ }
+ prev = dest;
+ false
+ case None => true
+ }
+ } while(!done)
+
+ (prev, hops)
+ }
+
+ /**
+ * Collapse a chain of "jump-only" blocks such as:
+ *
+ * JUMP b1;
+ * b1: JUMP b2;
+ * b2: JUMP ... etc.
+ *
+ * by re-wiring predecessors to target directly the "final destination".
+ * Even if covered by an exception handler, a "non-self-loop jump-only block" can always be removed.
+
+ * Returns true if any replacement was made, false otherwise.
+ *
+ * In more detail:
+ * Starting at each of the entry points (m.startBlock, the start block of each exception handler)
+ * rephrase those control-flow instructions targeting a jump-only block (which jumps to a final destination D) to target D.
+ * The blocks thus skipped are also removed from IMethod.blocks.
+ *
+ * Rationale for this normalization:
+ * test/files/run/private-inline.scala after -optimize is chock full of
+ * BasicBlocks containing just JUMP(whereTo), where no exception handler straddles them.
+ * They should be collapsed by IMethod.normalize() but aren't.
+ * That was fine in FJBG times when by the time the exception table was emitted,
+ * it already contained "anchored" labels (ie instruction offsets were known)
+ * and thus ranges with identical (start, end) (i.e, identical after GenJVM omitted the JUMPs in question)
+ * could be weeded out to avoid "java.lang.ClassFormatError: Illegal exception table range"
+ * Now that visitTryCatchBlock() must be called before Labels are resolved,
+ * this method gets rid of the BasicBlocks described above (to recap, consisting of just a JUMP).
+ */
+ private def collapseJumpOnlyBlocks(m: IMethod): Boolean = {
+ assert(m.hasCode, "code-less method")
+
+ /* "start" is relative in a cycle, but we call this helper with the "first" entry-point we found. */
+ def realTarget(jumpStart: BasicBlock): Map[BasicBlock, BasicBlock] = {
+ assert(startsWithJump(jumpStart), "not part of a jump-chain")
+ val Pair(dest, redundants) = finalDestination(jumpStart)
+ (for(skipOver <- redundants) yield Pair(skipOver, dest)).toMap
+ }
+
+ def rephraseGotos(detour: Map[BasicBlock, BasicBlock]) {
+ for(Pair(oldTarget, newTarget) <- detour.iterator) {
+ if(m.startBlock == oldTarget) {
+ m.code.startBlock = newTarget
+ }
+ for(eh <- m.exh; if eh.startBlock == oldTarget) {
+ eh.setStartBlock(newTarget)
+ }
+ for(b <- m.blocks; if !detour.isDefinedAt(b)) {
+ val idxLast = (b.size - 1)
+ b.lastInstruction match {
+ case JUMP(whereto) =>
+ if (whereto == oldTarget) {
+ b.replaceInstruction(idxLast, JUMP(newTarget))
+ }
+ case CJUMP(succ, fail, cond, kind) =>
+ if ((succ == oldTarget) || (fail == oldTarget)) {
+ b.replaceInstruction(idxLast, CJUMP(detour.getOrElse(succ, succ),
+ detour.getOrElse(fail, fail),
+ cond, kind))
+ }
+ case CZJUMP(succ, fail, cond, kind) =>
+ if ((succ == oldTarget) || (fail == oldTarget)) {
+ b.replaceInstruction(idxLast, CZJUMP(detour.getOrElse(succ, succ),
+ detour.getOrElse(fail, fail),
+ cond, kind))
+ }
+ case SWITCH(tags, labels) =>
+ if(labels exists (detour.isDefinedAt(_))) {
+ val newLabels = (labels map { lab => detour.getOrElse(lab, lab) })
+ b.replaceInstruction(idxLast, SWITCH(tags, newLabels))
+ }
+ case _ => ()
+ }
+ }
+ }
+ }
+
+ /* remove from all containers that may contain a reference to */
+ def elide(redu: BasicBlock) {
+ assert(m.startBlock != redu, "startBlock should have been re-wired by now")
+ m.code.removeBlock(redu);
+ }
+
+ var wasReduced = false
+ val entryPoints: List[BasicBlock] = m.startBlock :: (m.exh map (_.startBlock));
+
+ var elided = mutable.Set.empty[BasicBlock] // debug
+ var newTargets = mutable.Set.empty[BasicBlock] // debug
+
+ for (ep <- entryPoints) {
+ var reachable = directSuccStar(ep) // this list may contain blocks belonging to jump-chains that we'll skip over
+ while(reachable.nonEmpty) {
+ val h = reachable.head
+ reachable = reachable.tail
+ if(startsWithJump(h)) {
+ val detour = realTarget(h)
+ if(detour.nonEmpty) {
+ wasReduced = true
+ reachable = (reachable filterNot (detour.keySet.contains(_)))
+ rephraseGotos(detour)
+ detour.keySet foreach elide
+ elided ++= detour.keySet
+ newTargets ++= detour.values
+ }
+ }
+ }
+ }
+ assert(newTargets.intersect(elided).isEmpty, "contradiction: we just elided the final destionation of a jump-chain")
+
+ wasReduced
+ }
+
+ def normalize(m: IMethod) {
+ if(!m.hasCode) { return }
+ collapseJumpOnlyBlocks(m)
+ var wasReduced = false;
+ do {
+ wasReduced = false
+ // Prune from an exception handler those covered blocks which are jump-only.
+ wasReduced |= coverWhatCountsOnly(m); icodes.checkValid(m) // TODO should be unnecessary now that collapseJumpOnlyBlocks(m) is in place
+ // Prune exception handlers covering nothing.
+ wasReduced |= elimNonCoveringExh(m); icodes.checkValid(m)
+
+ // TODO see note in genExceptionHandlers about an ExceptionHandler.covered containing dead blocks (newNormal should remove them, but, where do those blocks come from?)
+ } while (wasReduced)
+
+ // TODO this would be a good time to remove synthetic local vars seeing no use, don't forget to call computeLocalVarsIndex() afterwards.
+ }
+
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index c04be1721e..c29630d04b 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -6,11 +6,10 @@
package scala.tools.nsc
package backend.jvm
-import java.io.{ DataOutputStream, OutputStream }
+import java.io.{ByteArrayOutputStream, DataOutputStream, OutputStream }
import java.nio.ByteBuffer
import scala.collection.{ mutable, immutable }
import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
-import scala.tools.reflect.SigParser
import scala.tools.nsc.symtab._
import scala.tools.nsc.util.{ SourceFile, NoSourceFile }
import scala.reflect.internal.ClassfileConstants._
@@ -19,6 +18,7 @@ import JAccessFlags._
import JObjectType.{ JAVA_LANG_STRING, JAVA_LANG_OBJECT }
import java.util.jar.{ JarEntry, JarOutputStream }
import scala.tools.nsc.io.AbstractFile
+import language.postfixOps
/** This class ...
*
@@ -121,9 +121,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (settings.debug.value)
inform("[running phase " + name + " on icode]")
- if (settings.Xverify.value && !SigParser.isParserAvailable)
- global.warning("signature verification requested by signature parser unavailable: signatures not checked")
-
if (settings.Xdce.value)
for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym))
icodes.classes -= sym
@@ -190,7 +187,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
val MIN_SWITCH_DENSITY = 0.7
val INNER_CLASSES_FLAGS =
- (ACC_PUBLIC | ACC_PRIVATE | ACC_PROTECTED | ACC_STATIC | ACC_FINAL | ACC_INTERFACE | ACC_ABSTRACT)
+ (ACC_PUBLIC | ACC_PRIVATE | ACC_PROTECTED | ACC_STATIC | ACC_INTERFACE | ACC_ABSTRACT)
val PublicStatic = ACC_PUBLIC | ACC_STATIC
val PublicStaticFinal = ACC_PUBLIC | ACC_STATIC | ACC_FINAL
@@ -344,7 +341,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
*/
def emitClass(jclass: JClass, sym: Symbol) {
addInnerClasses(jclass)
- writeClass("" + sym.name, jclass, sym)
+ writeClass("" + sym.name, jclass.getName(), toByteArray(jclass), sym)
}
/** Returns the ScalaSignature annotation if it must be added to this class,
@@ -516,6 +513,14 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
}
}
+ private def toByteArray(jc: JClass): Array[Byte] = {
+ val bos = new java.io.ByteArrayOutputStream()
+ val dos = new java.io.DataOutputStream(bos)
+ jc.writeTo(dos)
+ dos.close()
+ bos.toByteArray
+ }
+
/**
* Generate a bean info class that describes the given class.
*
@@ -585,7 +590,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
jcode.emitRETURN()
// write the bean information class file.
- writeClass("BeanInfo ", beanInfoClass, c.symbol)
+ writeClass("BeanInfo ", beanInfoClass.getName(), toByteArray(beanInfoClass), c.symbol)
}
/** Add the given 'throws' attributes to jmethod */
@@ -713,14 +718,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
nannots
}
- /** Run the signature parser to catch bogus signatures.
- */
- def isValidSignature(sym: Symbol, sig: String) = (
- if (sym.isMethod) SigParser verifyMethod sig
- else if (sym.isTerm) SigParser verifyType sig
- else SigParser verifyClass sig
- )
-
// @M don't generate java generics sigs for (members of) implementation
// classes, as they are monomorphic (TODO: ok?)
private def needsGenericSignature(sym: Symbol) = !(
@@ -742,19 +739,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
erasure.javaSig(sym, memberTpe) foreach { sig =>
// This seems useful enough in the general case.
log(sig)
- /** Since we're using a sun internal class for signature validation,
- * we have to allow for it not existing or otherwise malfunctioning:
- * in which case we treat every signature as valid. Medium term we
- * should certainly write independent signature validation.
- */
- if (settings.Xverify.value && SigParser.isParserAvailable && !isValidSignature(sym, sig)) {
- clasz.cunit.warning(sym.pos,
- """|compiler bug: created invalid generic signature for %s in %s
- |signature: %s
- |if this is reproducible, please report bug at https://issues.scala-lang.org/
- """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig))
- return
- }
if (checkSignatures) {
val normalizedTpe = beforeErasure(erasure.prepareSigMap(memberTpe))
val bytecodeTpe = owner.thisType.memberInfo(sym)
@@ -1150,6 +1134,27 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
*/
def generateMirrorClass(clasz: Symbol, sourceFile: SourceFile) {
import JAccessFlags._
+ /* We need to save inner classes buffer and create a new one to make sure
+ * that we do confuse inner classes of the class we mirror with inner
+ * classes of the class we are mirroring. These two sets can be different
+ * as seen in this case:
+ *
+ * class A {
+ * class B
+ * def b: B = new B
+ * }
+ * object C extends A
+ *
+ * Here mirror class of C has a static forwarder for (inherited) method `b`
+ * therefore it refers to class `B` and needs InnerClasses entry. However,
+ * the real class for `C` (named `C$`) is empty and does not refer to `B`
+ * thus does not need InnerClasses entry it.
+ *
+ * NOTE: This logic has been refactored in GenASM and everything is
+ * implemented in a much cleaner way by having two separate buffers.
+ */
+ val savedInnerClasses = innerClassBuffer
+ innerClassBuffer = mutable.LinkedHashSet[Symbol]()
val moduleName = javaName(clasz) // + "$"
val mirrorName = moduleName.substring(0, moduleName.length() - 1)
val mirrorClass = fjbgContext.JClass(ACC_SUPER | ACC_PUBLIC | ACC_FINAL,
@@ -1163,6 +1168,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
val ssa = scalaSignatureAddingMarker(mirrorClass, clasz.companionSymbol)
addAnnotations(mirrorClass, clasz.annotations ++ ssa)
emitClass(mirrorClass, clasz)
+ innerClassBuffer = savedInnerClasses
}
var linearization: List[BasicBlock] = Nil
@@ -1323,7 +1329,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
val lastInstr = b.lastInstruction
for (instr <- b) {
-
instr match {
case THIS(clasz) => jcode.emitALOAD_0()
@@ -1527,6 +1532,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
case (NE, _) =>
jcode emitIFNONNULL labels(success)
jcode.emitGOTO_maybe_W(labels(failure), false)
+ case _ =>
}
} else {
(kind: @unchecked) match {
@@ -1946,8 +1952,10 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
// we can exclude lateFINAL. Such symbols are eligible for inlining, but to
// avoid breaking proxy software which depends on subclassing, we do not
// emit ACC_FINAL.
+ // Nested objects won't receive ACC_FINAL in order to allow for their overriding.
+
val finalFlag = (
- ((sym.rawflags & (Flags.FINAL | Flags.MODULE)) != 0)
+ (sym.hasFlag(Flags.FINAL) || isTopLevelModule(sym))
&& !sym.enclClass.isInterface
&& !sym.isClassConstructor
&& !sym.isMutable // lazy vals and vars both
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
index 98c1fc2f63..d23571b517 100644
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
@@ -15,6 +15,7 @@ import scala.tools.nsc.symtab._
import ch.epfl.lamp.compiler.msil.{Type => MsilType, _}
import ch.epfl.lamp.compiler.msil.emit._
import ch.epfl.lamp.compiler.msil.util.PECustomMod
+import language.postfixOps
abstract class GenMSIL extends SubComponent {
import global._
@@ -1552,7 +1553,7 @@ abstract class GenMSIL extends SubComponent {
}
def emitBrBool(cond: TestOp, dest: Label) {
- cond match {
+ (cond: @unchecked) match {
// EQ -> Brfalse, NE -> Brtrue; this is because we come from
// a CZJUMP. If the value on the stack is 0 (e.g. a boolean
// method returned false), and we are in the case EQ, then
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index 95c371fa8b..d4ee9b6b48 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -82,7 +82,7 @@ abstract class DeadCodeElimination extends SubComponent {
mark
sweep(m)
accessedLocals = accessedLocals.distinct
- if (m.locals diff accessedLocals nonEmpty) {
+ if ((m.locals diff accessedLocals).nonEmpty) {
log("Removed dead locals: " + (m.locals diff accessedLocals))
m.locals = accessedLocals.reverse
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
index ec137203bf..0d47352215 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
@@ -360,7 +360,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
val caughtException = toTypeKind(caughtClass.tpe)
// copy the exception handler code once again, dropping the LOAD_EXCEPTION
val copy = handler.code.newBlock
- copy.emitOnly(handler.iterator drop dropCount toSeq: _*)
+ copy.emitOnly((handler.iterator drop dropCount).toSeq: _*)
// extend the handlers of the handler to the copy
for (parentHandler <- handler.method.exh ; if parentHandler covers handler) {
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index dfe9081ee5..08e059419a 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -9,7 +9,7 @@ package backend.opt
import scala.collection.mutable
import scala.tools.nsc.symtab._
-import scala.tools.nsc.util.{ NoSourceFile }
+import scala.tools.nsc.util.NoSourceFile
/**
* @author Iulian Dragos
@@ -135,7 +135,7 @@ abstract class Inliners extends SubComponent {
/** The current iclass */
private var currentIClazz: IClass = _
- private def warn(pos: Position, msg: String) = currentIClazz.cunit.warning(pos, msg)
+ private def warn(pos: Position, msg: String) = currentIClazz.cunit.inlinerWarning(pos, msg)
val recentTFAs = mutable.Map.empty[Symbol, Tuple2[Boolean, analysis.MethodTFA]]
private def getRecentTFA(incm: IMethod): (Boolean, analysis.MethodTFA) = {
@@ -196,33 +196,35 @@ abstract class Inliners extends SubComponent {
val staleIn = mutable.Set.empty[BasicBlock]
/**
- * A transformation local to the body of the argument.
+ * A transformation local to the body of the IMethod received as argument.
* An linining decision consists in replacing a callsite with the body of the callee.
* Please notice that, because `analyzeMethod()` itself may modify a method body,
* the particular callee bodies that end up being inlined depend on the particular order in which methods are visited
- * (no topological ordering over the call-graph is attempted).
+ * (no topological sorting over the call-graph is attempted).
*
* Making an inlining decision requires type-flow information for both caller and callee.
* Regarding the caller, such information is needed only for basic blocks containing inlining candidates
* (and their transitive predecessors). This observation leads to using a custom type-flow analysis (MTFAGrowable)
- * that can be re-inited, i.e. that reuses lattice elements (type-flow information) computed in a previous iteration
+ * that can be re-inited, i.e. that reuses lattice elements (type-flow information computed in a previous iteration)
* as starting point for faster convergence in a new iteration.
*
* The mechanics of inlining are iterative for a given invocation of `analyzeMethod(m)`,
- * thus considering the basic blocks that successful inlining added in a previous iteration:
+ * and are affected by inlinings from previous iterations
+ * (ie, "heuristic" rules are based on statistics tracked for that purpose):
*
* (1) before the iterations proper start, so-called preinlining is performed.
* Those callsites whose (receiver, concreteMethod) are both known statically
* can be analyzed for inlining before computing a type-flow. Details in `preInline()`
*
* (2) the first iteration computes type-flow information for basic blocks containing inlining candidates
- * (and their transitive predecessors), so called `relevantBBs`.
+ * (and their transitive predecessors), so called `relevantBBs` basic blocks.
* The ensuing analysis of each candidate (performed by `analyzeInc()`)
- * may result in a CFG isomorphic to that of the callee being inserted where the callsite was
- * (i.e. a CALL_METHOD instruction is replaced with a single-entry single-exit CFG, which we call "successful inlining").
+ * may result in a CFG isomorphic to that of the callee being inserted in place of the callsite
+ * (i.e. a CALL_METHOD instruction is replaced with a single-entry single-exit CFG,
+ * a situation we call "successful inlining").
*
- * (3) following iterations have their relevant basic blocks updated to focus
- * on the inlined basic blocks and their successors only. Details in `MTFAGrowable.reinit()`
+ * (3) following iterations have `relevantBBs` updated to focus on the inlined basic blocks and their successors only.
+ * Details in `MTFAGrowable.reinit()`
* */
def analyzeMethod(m: IMethod): Unit = {
// m.normalize
@@ -372,7 +374,7 @@ abstract class Inliners extends SubComponent {
* That's why preInline() is invoked twice: any inlinings downplayed by the heuristics during the first round get an opportunity to rank higher during the second.
*
* As a whole, both `preInline()` invocations amount to priming the inlining process,
- * so that the first TFA run afterwards is able to gain more information as compared to a cold-start.
+ * so that the first TFA that is run afterwards is able to gain more information as compared to a cold-start.
*/
val totalPreInlines = {
val firstRound = preInline(true)
@@ -388,9 +390,10 @@ abstract class Inliners extends SubComponent {
/* it's important not to inline in unreachable basic blocks. linearizedBlocks() returns only reachable ones. */
tfa.callerLin = caller.m.linearizedBlocks()
- /* TODO Do we want to perform inlining in non-finally exception handlers?
+ /* TODO Do we really want to inline inside exception handlers?
* Seems counterproductive (the larger the method the less likely it will be JITed).
- * The alternative above would be `linearizer.linearizeAt(caller.m, caller.m.startBlock)`.
+ * The alternative would be `linearizer.linearizeAt(caller.m, caller.m.startBlock)`.
+ * And, we would cut down on TFA iterations, too.
* See also comment on the same topic in TypeFlowAnalysis. */
tfa.reinit(m, staleOut.toList, splicedBlocks, staleIn)
@@ -759,12 +762,16 @@ abstract class Inliners extends SubComponent {
private def helperIsSafeToInline(stackLength: Int): Boolean = {
def makePublic(f: Symbol): Boolean =
- (inc.m.sourceFile ne NoSourceFile) && (f.isSynthetic || f.isParamAccessor) && {
- debuglog("Making not-private symbol out of synthetic: " + f)
+ /*
+ * Completely disabling member publifying. This shouldn't have been done in the first place. :|
+ */
+ false
+ // (inc.m.sourceFile ne NoSourceFile) && (f.isSynthetic || f.isParamAccessor) && {
+ // debuglog("Making not-private symbol out of synthetic: " + f)
- f setNotFlag Flags.PRIVATE
- true
- }
+ // f setNotFlag Flags.PRIVATE
+ // true
+ // }
if (!inc.m.hasCode || inc.isRecursive) { return false }
if (inc.m.symbol.hasFlag(Flags.SYNCHRONIZED)) { return false }
@@ -854,7 +861,7 @@ abstract class Inliners extends SubComponent {
def lookupIMethod(meth: Symbol, receiver: Symbol): Option[IMethod] = {
def tryParent(sym: Symbol) = icodes icode sym flatMap (_ lookupMethod meth)
- receiver.info.baseClasses.iterator map tryParent find (_.isDefined) flatten
+ (receiver.info.baseClasses.iterator map tryParent find (_.isDefined)).flatten
}
} /* class Inliner */
} /* class Inliners */
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
index f32564f097..76a8b87ba7 100644
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
@@ -58,7 +58,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
case Right(sourceCode) =>
new compiler.Run() compileSources List(new BatchSourceFile("newSource", sourceCode))
}
-
+
if (reporter.hasErrors)
return None
@@ -80,6 +80,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
val modelFactory = (
new { override val global: compiler.type = compiler }
with model.ModelFactory(compiler, settings)
+ with model.ModelFactoryImplicitSupport
with model.comment.CommentFactory
with model.TreeFactory {
override def templateShouldDocument(sym: compiler.Symbol) =
@@ -89,7 +90,8 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
modelFactory.makeModel match {
case Some(madeModel) =>
- println("model contains " + modelFactory.templatesCount + " documentable templates")
+ if (settings.reportModel)
+ println("model contains " + modelFactory.templatesCount + " documentable templates")
Some(madeModel)
case None =>
println("no documentable class found in compilation units")
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala
index 45a2ad78b4..d3a1d47de8 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/doc/Settings.scala
@@ -8,6 +8,7 @@ package doc
import java.io.File
import java.lang.System
+import language.postfixOps
/** An extended version of compiler settings, with additional Scaladoc-specific options.
* @param error A function that prints a string to the appropriate error stream. */
@@ -87,6 +88,38 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
""
)
+ val docImplicits = BooleanSetting (
+ "-implicits",
+ "Document members inherited by implicit conversions."
+ )
+
+ val docImplicitsDebug = BooleanSetting (
+ "-implicits-debug",
+ "Show debugging information for members inherited by implicit conversions."
+ )
+
+ val docImplicitsShowAll = BooleanSetting (
+ "-implicits-show-all",
+ "Show members inherited by implicit conversions that are impossible in the default scope. " +
+ "(for example conversions that require Numeric[String] to be in scope)"
+ )
+
+ val docDiagrams = BooleanSetting (
+ "-diagrams",
+ "Create inheritance diagrams for classes, traits and packages."
+ )
+
+ val docDiagramsDebug = BooleanSetting (
+ "-diagrams-debug",
+ "Show debugging information for the diagram creation process."
+ )
+
+ val docDiagramsDotPath = PathSetting (
+ "-diagrams-dot-path",
+ "The path to the dot executable used to generate the inheritance diagrams. Ex: /usr/bin/dot",
+ "dot" // by default, just pick up the system-wide dot
+ )
+
// Somewhere slightly before r18708 scaladoc stopped building unless the
// self-type check was suppressed. I hijacked the slotted-for-removal-anyway
// suppress-vt-warnings option and renamed it for this purpose.
@@ -94,9 +127,105 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
// For improved help output.
def scaladocSpecific = Set[Settings#Setting](
- docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator
+ docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes,
+ docDiagrams, docDiagramsDebug, docDiagramsDotPath,
+ docImplicits, docImplicitsDebug, docImplicitsShowAll
)
val isScaladocSpecific: String => Boolean = scaladocSpecific map (_.name)
override def isScaladoc = true
+
+ // unset by the testsuite, we don't need to count the entities in the model
+ var reportModel = true
+
+ /**
+ * This is the hardcoded area of Scaladoc. This is where "undesirable" stuff gets eliminated. I know it's not pretty,
+ * but ultimately scaladoc has to be useful. :)
+ */
+ object hardcoded {
+
+ /** The common context bounds and some humanly explanations. Feel free to add more explanations
+ * `<root>.scala.package.Numeric` is the type class
+ * `tparam` is the name of the type parameter it gets (this only describes type classes with 1 type param)
+ * the function result should be a humanly-understandable description of the type class
+ */
+ val knownTypeClasses: Map[String, String => String] = Map() +
+ ("<root>.scala.package.Numeric" -> ((tparam: String) => tparam + " is a numeric class, such as Int, Long, Float or Double")) +
+ ("<root>.scala.package.Integral" -> ((tparam: String) => tparam + " is an integral numeric class, such as Int or Long")) +
+ ("<root>.scala.package.Fractional" -> ((tparam: String) => tparam + " is a fractional numeric class, such as Float or Double")) +
+ ("<root>.scala.reflect.Manifest" -> ((tparam: String) => tparam + " is accompanied by a Manifest, which is a runtime representation of its type that survives erasure")) +
+ ("<root>.scala.reflect.ClassManifest" -> ((tparam: String) => tparam + " is accompanied by a ClassManifest, which is a runtime representation of its type that survives erasure")) +
+ ("<root>.scala.reflect.OptManifest" -> ((tparam: String) => tparam + " is accompanied by an OptManifest, which can be either a runtime representation of its type or the NoManifest, which means the runtime type is not available")) +
+ ("<root>.scala.reflect.ClassTag" -> ((tparam: String) => tparam + " is accompanied by a ClassTag, which is a runtime representation of its type that survives erasure")) +
+ ("<root>.scala.reflect.TypeTag" -> ((tparam: String) => tparam + " is accompanied by a TypeTag, which is a runtime representation of its type that survives erasure")) +
+ ("<root>.scala.reflect.ConcreteTypeTag" -> ((tparam: String) => tparam + " is accompanied by an ConcreteTypeTag, which is a runtime representation of a concrete type that survives erasure"))
+
+ /**
+ * Set of classes to exclude from index and diagrams
+ * TODO: Should be configurable
+ */
+ def isExcluded(qname: String) = {
+ ( ( qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") ||
+ qname.startsWith("scala.Function") || qname.startsWith("scala.runtime.AbstractFunction")
+ ) && !(
+ qname == "scala.Tuple1" || qname == "scala.Tuple2" ||
+ qname == "scala.Product" || qname == "scala.Product1" || qname == "scala.Product2" ||
+ qname == "scala.Function" || qname == "scala.Function1" || qname == "scala.Function2" ||
+ qname == "scala.runtime.AbstractFunction0" || qname == "scala.runtime.AbstractFunction1" ||
+ qname == "scala.runtime.AbstractFunction2"
+ )
+ )
+ }
+
+ /** Common conversion targets that affect any class in Scala */
+ val commonConversionTargets = List(
+ "scala.Predef.any2stringfmt",
+ "scala.Predef.any2stringadd",
+ "scala.Predef.any2ArrowAssoc",
+ "scala.Predef.any2Ensuring")
+
+ /** There's a reason all these are specialized by hand but documenting each of them is beyond the point */
+ val arraySkipConversions = List(
+ "scala.Predef.refArrayOps",
+ "scala.Predef.intArrayOps",
+ "scala.Predef.doubleArrayOps",
+ "scala.Predef.longArrayOps",
+ "scala.Predef.floatArrayOps",
+ "scala.Predef.charArrayOps",
+ "scala.Predef.byteArrayOps",
+ "scala.Predef.shortArrayOps",
+ "scala.Predef.booleanArrayOps",
+ "scala.Predef.unitArrayOps",
+ "scala.LowPriorityImplicits.wrapRefArray",
+ "scala.LowPriorityImplicits.wrapIntArray",
+ "scala.LowPriorityImplicits.wrapDoubleArray",
+ "scala.LowPriorityImplicits.wrapLongArray",
+ "scala.LowPriorityImplicits.wrapFloatArray",
+ "scala.LowPriorityImplicits.wrapCharArray",
+ "scala.LowPriorityImplicits.wrapByteArray",
+ "scala.LowPriorityImplicits.wrapShortArray",
+ "scala.LowPriorityImplicits.wrapBooleanArray",
+ "scala.LowPriorityImplicits.wrapUnitArray",
+ "scala.LowPriorityImplicits.genericWrapArray")
+
+ // included as names as here we don't have access to a Global with Definitions :(
+ def valueClassList = List("unit", "boolean", "byte", "short", "char", "int", "long", "float", "double")
+ def valueClassFilterPrefixes = List("scala.LowPriorityImplicits", "scala.Predef")
+
+ /** Dirty, dirty, dirty hack: the value params conversions can all kick in -- and they are disambiguated by priority
+ * but showing priority in scaladoc would make no sense -- so we have to manually remove the conversions that we
+ * know will never get a chance to kick in. Anyway, DIRTY DIRTY DIRTY! */
+ def valueClassFilter(value: String, conversionName: String): Boolean = {
+ val valueName = value.toLowerCase
+ val otherValues = valueClassList.filterNot(_ == valueName)
+
+ for (prefix <- valueClassFilterPrefixes)
+ if (conversionName.startsWith(prefix))
+ for (otherValue <- otherValues)
+ if (conversionName.startsWith(prefix + "." + otherValue))
+ return false
+
+ true
+ }
+ }
}
diff --git a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
index 9b29ebd745..8f426a443d 100644
--- a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
+++ b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
@@ -5,6 +5,8 @@
package scala.tools.nsc
package doc
+import language.implicitConversions
+import language.postfixOps
/** Some glue between DocParser (which reads source files which can't be compiled)
* and the scaladoc model.
@@ -14,7 +16,7 @@ trait Uncompilable {
val settings: Settings
import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, Name, DocComment, NoSymbol }
- import global.definitions.RootClass
+ import global.definitions.{ RootClass, AnyRefClass }
private implicit def translateName(name: Global#Name) =
if (name.isTypeName) newTypeName("" + name) else newTermName("" + name)
@@ -32,7 +34,7 @@ trait Uncompilable {
}
def files = settings.uncompilableFiles
def symbols = pairs map (_._1)
- def templates = symbols filter (x => x.isClass || x.isTrait) toSet
+ def templates = symbols filter (x => x.isClass || x.isTrait || x == AnyRefClass/* which is now a type alias */) toSet
def comments = {
if (settings.debug.value || settings.verbose.value)
inform("Found %d uncompilable files: %s".format(files.size, files mkString ", "))
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
index 0116e02e0e..914824d523 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -71,6 +71,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
"signaturebg.gif",
"signaturebg2.gif",
"typebg.gif",
+ "conversionbg.gif",
"valuemembersbg.gif",
"navigation-li-a.png",
@@ -80,6 +81,8 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
"selected.png",
"selected2-right.png",
"selected2.png",
+ "selected-right-implicits.png",
+ "selected-implicits.png",
"unselected.png"
)
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
index 1544dafc69..e3da8bddea 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -23,7 +23,7 @@ abstract class HtmlPage extends Page { thisPage =>
protected def title: String
/** The page description */
- protected def description: String =
+ protected def description: String =
// unless overwritten, will display the title in a spaced format, keeping - and .
title.replaceAll("[^a-zA-Z0-9\\.\\-]+", " ").replaceAll("\\-+", " - ").replaceAll(" +", " ")
@@ -164,15 +164,15 @@ abstract class HtmlPage extends Page { thisPage =>
}
/** Returns the HTML code that represents the template in `tpl` as a hyperlinked name. */
- def templateToHtml(tpl: TemplateEntity) = tpl match {
+ def templateToHtml(tpl: TemplateEntity, name: String = null) = tpl match {
case dTpl: DocTemplateEntity =>
if (hasPage(dTpl)) {
- <a href={ relativeLinkTo(dTpl) } class="extype" name={ dTpl.qualifiedName }>{ dTpl.name }</a>
+ <a href={ relativeLinkTo(dTpl) } class="extype" name={ dTpl.qualifiedName }>{ if (name eq null) dTpl.name else name }</a>
} else {
- xml.Text(dTpl.name)
+ xml.Text(if (name eq null) dTpl.name else name)
}
case ndTpl: NoDocTemplate =>
- xml.Text(ndTpl.name)
+ xml.Text(if (name eq null) ndTpl.name else name)
}
/** Returns the HTML code that represents the templates in `tpls` as a list of hyperlinked names. */
@@ -192,6 +192,6 @@ abstract class HtmlPage extends Page { thisPage =>
else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isTrait) "object_to_trait_big.png"
else if (ety.isObject) "object_big.png"
else if (ety.isPackage) "package_big.png"
- else "class_big.png" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
+ else "class_big.png" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
index f67abc58da..3ff973ec66 100644
--- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
@@ -41,13 +41,13 @@ private[html] object SyntaxHigh {
/** Standard library classes/objects, sorted alphabetically */
val standards = Array (
"Any", "AnyRef", "AnyVal", "App", "Application", "Array",
- "Boolean", "Byte", "Char", "Class", "Console", "Double",
- "Enumeration", "Float", "Function", "Int",
+ "Boolean", "Byte", "Char", "Class", "ClassTag", "ClassManifest", "ConcreteTypeTag",
+ "Console", "Double", "Enumeration", "Float", "Function", "Int",
"List", "Long", "Manifest", "Map",
- "None", "Nothing", "Null", "Object", "Option",
+ "NoManifest", "None", "Nothing", "Null", "Object", "Option", "OptManifest",
"Pair", "Predef",
"Seq", "Set", "Short", "Some", "String", "Symbol",
- "Triple", "Unit")
+ "Triple", "TypeTag", "Unit")
def apply(data: String): NodeSeq = {
val buf = data.getBytes
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
index 346780147e..8ed13e0da2 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
@@ -44,7 +44,7 @@ class Index(universe: doc.Universe, index: doc.Index) extends HtmlPage {
</div>
{ browser }
<div id="content" class="ui-layout-center">
- <iframe name="template" src={ relativeLinkTo{List("package.html")} }/>
+ <iframe id="template" name="template" src={ relativeLinkTo{List("package.html")} }/>
</div>
</body>
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index f059b5c0cb..66189a6854 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -10,6 +10,7 @@ package page
import model._
import scala.xml.{ NodeSeq, Text, UnprefixedAttribute }
+import language.postfixOps
class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage {
@@ -69,8 +70,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
<p id="owner">{ templatesToHtml(tpl.inTemplate.toRoot.reverse.tail, xml.Text(".")) }</p>
}
- <body class={ if (tpl.isTrait || tpl.isClass || tpl.qualifiedName == "scala.AnyRef") "type" else "value" }
- onload={ "sh_highlightDocument('../lib/', '.min.js');" }>
+ <body class={ if (tpl.isTrait || tpl.isClass || tpl.qualifiedName == "scala.AnyRef") "type" else "value" }>
<div id="definition">
{
tpl.companion match {
@@ -87,22 +87,43 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
{ memberToCommentHtml(tpl, true) }
<div id="mbrsel">
- <div id='textfilter'><span class='pre'/><span class='input'><input type='text' accesskey='/'/></span><span class='post'/></div>
- { if (tpl.linearizationTemplates.isEmpty) NodeSeq.Empty else
+ <div id='textfilter'><span class='pre'/><span class='input'><input id='mbrsel-input' type='text' accesskey='/'/></span><span class='post'/></div>
+ { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NodeSeq.Empty else
<div id="order">
<span class="filtertype">Ordering</span>
<ol><li class="alpha in"><span>Alphabetic</span></li><li class="inherit out"><span>By inheritance</span></li></ol>
</div>
}
- { if (tpl.linearizationTemplates.isEmpty) NodeSeq.Empty else
- <div id="ancestors">
- <span class="filtertype">Inherited</span>
- <ol><li class="hideall out"><span>Hide All</span></li>
- <li class="showall in"><span>Show all</span></li></ol>
- <ol id="linearization">{
- (tpl :: tpl.linearizationTemplates) map { wte => <li class="in" name={ wte.qualifiedName }><span>{ wte.name }</span></li> }
- }</ol>
- </div>
+ { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NodeSeq.Empty else
+ {
+ if (!tpl.linearization.isEmpty)
+ <div id="ancestors">
+ <span class="filtertype">Inherited<br/>
+ </span>
+ <ol id="linearization">
+ { (tpl :: tpl.linearizationTemplates).map(wte => <li class="in" name={ wte.qualifiedName }><span>{ wte.name }</span></li>) }
+ </ol>
+ </div>
+ else NodeSeq.Empty
+ } ++ {
+ if (!tpl.conversions.isEmpty)
+ <div id="ancestors">
+ <span class="filtertype">Implicitly<br/>
+ </span>
+ <ol id="implicits">
+ { tpl.conversions.map(conv => <li class="in" name={ conv.conversionQualifiedName }><span>{ "by " + conv.conversionShortName }</span></li>) }
+ </ol>
+ </div>
+ else NodeSeq.Empty
+ } ++
+ <div id="ancestors">
+ <span class="filtertype"></span>
+ <ol>
+ <li class="hideall out"><span>Hide All</span></li>
+ <li class="showall in"><span>Show all</span></li>
+ </ol>
+ <a href="docs.scala-lang.org/overviews/scaladoc/usage.html#members" target="_blank">Learn more about member selection</a>
+ </div>
}
{
<div id="visbl">
@@ -152,23 +173,25 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
<div id="inheritedMembers">
{
+ // linearization
NodeSeq fromSeq (for ((superTpl, superType) <- (tpl.linearizationTemplates zip tpl.linearizationTypes)) yield
<div class="parent" name={ superTpl.qualifiedName }>
<h3>Inherited from {
- if (tpl.universe.settings.useStupidTypes.value)
- superTpl match {
- case dtpl: DocTemplateEntity =>
- val sig = signature(dtpl, false, true) \ "_"
- sig
- case tpl: TemplateEntity =>
- tpl.name
- }
- else
- typeToHtml(superType, true)
+ typeToHtmlWithStupidTypes(tpl, superTpl, superType)
}</h3>
</div>
)
}
+ {
+ // implicitly inherited
+ NodeSeq fromSeq (for (conversion <- (tpl.conversions)) yield
+ <div class="conversion" name={ conversion.conversionQualifiedName }>
+ <h3>Inherited by implicit conversion { conversion.conversionShortName } from
+ { typeToHtml(tpl.resultType, true) } to { typeToHtml(conversion.targetType, true) }
+ </h3>
+ </div>
+ )
+ }
</div>
</div>
@@ -219,11 +242,12 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
case d:MemberEntity with Def => defParamsToString(d)
case _ => ""
}
+ val memberComment = memberToCommentHtml(mbr, false)
<li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
- data-isabs={ mbr.isAbstract.toString }>
+ data-isabs={ mbr.isAbstract.toString } fullComment={ if(memberComment.isEmpty) "no" else "yes" }>
<a id={ mbr.name +defParamsString +":"+ mbr.resultType.name}/>
{ signature(mbr, false) }
- { memberToCommentHtml(mbr, false) }
+ { memberComment }
</li>
}
@@ -275,6 +299,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
<p class="comment cmt">{ inlineToHtml(mbr.comment.get.short) }</p>
def memberToCommentBodyHtml(mbr: MemberEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
+
val memberComment =
if (mbr.comment.isEmpty) NodeSeq.Empty
else <div class="comment cmt">{ commentToHtml(mbr.comment) }</div>
@@ -291,7 +316,6 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
def mbrCmt = mbr.comment.get
def paramCommentToHtml(prs: List[ParameterEntity]): NodeSeq = prs match {
- case Nil => NodeSeq.Empty
case (tp: TypeParam) :: rest =>
val paramEntry: NodeSeq = {
@@ -304,6 +328,9 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
<dt class="param">{ vp.name }</dt><dd class="cmt">{ bodyToHtml(mbrCmt.valueParams(vp.name)) }</dd>
}
paramEntry ++ paramCommentToHtml(rest)
+
+ case _ =>
+ NodeSeq.Empty
}
if (mbr.comment.isEmpty) NodeSeq.Empty
@@ -326,6 +353,45 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
}
}
+ val implicitInformation = mbr.byConversion match {
+ case Some(conv) =>
+ <dt class="implicit">Implicit information</dt> ++
+ {
+ val targetType = typeToHtml(conv.targetType, true)
+ val conversionMethod = conv.convertorMethod match {
+ case Left(member) => Text(member.name)
+ case Right(name) => Text(name)
+ }
+
+ // strip off the package object endings, they make things harder to follow
+ val conversionOwnerQualifiedNane = conv.convertorOwner.qualifiedName.stripSuffix(".package")
+ val conversionOwner = templateToHtml(conv.convertorOwner, conversionOwnerQualifiedNane)
+
+ val constraintText = conv.constraints match {
+ case Nil =>
+ NodeSeq.Empty
+ case List(constraint) =>
+ xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint) ++ xml.Text(".")
+ case List(constraint1, constraint2) =>
+ xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint1) ++
+ xml.Text(" and at the same time ") ++ constraintToHtml(constraint2) ++ xml.Text(".")
+ case constraints =>
+ <br/> ++ "This conversion will take place only if all of the following constraints are met:" ++ <br/> ++ {
+ var index = 0
+ constraints map { constraint => xml.Text({ index += 1; index } + ". ") ++ constraintToHtml(constraint) ++ <br/> }
+ }
+ }
+
+ <dd>
+ This member is added by an implicit conversion from { typeToHtml(mbr.inTemplate.resultType, true) } to
+ { targetType } performed by method { conversionMethod } in { conversionOwner }.
+ { constraintText }
+ </dd>
+ }
+ case _ =>
+ NodeSeq.Empty
+ }
+
// --- start attributes block vals
val attributes: Seq[scala.xml.Node] = {
val fvs: List[comment.Paragraph] = visibility(mbr).toList
@@ -354,7 +420,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
</div>
case _ => NodeSeq.Empty
}
- }
+ }
val selfType: Seq[scala.xml.Node] = mbr match {
case dtpl: DocTemplateEntity if (isSelf && !dtpl.selfType.isEmpty && !isReduced) =>
@@ -447,7 +513,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
<dt>See also</dt>
<dd>{
val seeXml:List[scala.xml.NodeSeq]=(for(see <- comment.see ) yield <span class="cmt">{bodyToHtml(see)}</span> )
- seeXml.reduceLeft(_ ++ Text(", ") ++ _)
+ seeXml.reduceLeft(_ ++ _)
}</dd>
} else NodeSeq.Empty
@@ -477,7 +543,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
}
// end attributes block vals ---
- val attributesInfo = attributes ++ definitionClasses ++ fullSignature ++ selfType ++ annotations ++ deprecation ++ migration ++ sourceLink ++ mainComment
+ val attributesInfo = implicitInformation ++ attributes ++ definitionClasses ++ fullSignature ++ selfType ++ annotations ++ deprecation ++ migration ++ sourceLink ++ mainComment
val attributesBlock =
if (attributesInfo.isEmpty)
NodeSeq.Empty
@@ -561,12 +627,13 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
</span>
<span class="symbol">
{
+ val nameClass = if (mbr.byConversion.isDefined) "implicit" else "name"
val nameHtml = {
val value = if (mbr.isConstructor) tpl.name else mbr.name
val span = if (mbr.deprecation.isDefined)
- <span class={"name deprecated"} title={"Deprecated: "+bodyToStr(mbr.deprecation.get)}>{ value }</span>
+ <span class={ nameClass + " deprecated"} title={"Deprecated: "+bodyToStr(mbr.deprecation.get)}>{ value }</span>
else
- <span class={"name"}>{ value }</span>
+ <span class={ nameClass }>{ value }</span>
val encoded = scala.reflect.NameTransformer.encode(value)
if (encoded != value) {
span % new UnprefixedAttribute("title",
@@ -765,4 +832,43 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
case _ => inl.toString
}
+ private def typeToHtmlWithStupidTypes(tpl: TemplateEntity, superTpl: TemplateEntity, superType: TypeEntity): NodeSeq =
+ if (tpl.universe.settings.useStupidTypes.value)
+ superTpl match {
+ case dtpl: DocTemplateEntity =>
+ val sig = signature(dtpl, false, true) \ "_"
+ sig
+ case tpl: TemplateEntity =>
+ Text(tpl.name)
+ }
+ else
+ typeToHtml(superType, true)
+
+ private def constraintToHtml(constraint: Constraint): NodeSeq = constraint match {
+ case ktcc: KnownTypeClassConstraint =>
+ xml.Text(ktcc.typeExplanation(ktcc.typeParamName) + " (" + ktcc.typeParamName + ": ") ++
+ templateToHtml(ktcc.typeClassEntity) ++ xml.Text(")")
+ case tcc: TypeClassConstraint =>
+ xml.Text(tcc.typeParamName + " is ") ++
+ <a href="http://stackoverflow.com/questions/2982276/what-is-a-context-bound-in-scala" target="_blank">
+ context-bounded</a> ++ xml.Text(" by " + tcc.typeClassEntity.qualifiedName + " (" + tcc.typeParamName + ": ") ++
+ templateToHtml(tcc.typeClassEntity) ++ xml.Text(")")
+ case impl: ImplicitInScopeConstraint =>
+ xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, true) ++ xml.Text(" is in scope")
+ case eq: EqualTypeParamConstraint =>
+ xml.Text(eq.typeParamName + " is " + eq.rhs.name + " (" + eq.typeParamName + " =:= ") ++
+ typeToHtml(eq.rhs, true) ++ xml.Text(")")
+ case bt: BoundedTypeParamConstraint =>
+ xml.Text(bt.typeParamName + " is a superclass of " + bt.lowerBound.name + " and a subclass of " +
+ bt.upperBound.name + " (" + bt.typeParamName + " >: ") ++
+ typeToHtml(bt.lowerBound, true) ++ xml.Text(" <: ") ++
+ typeToHtml(bt.upperBound, true) ++ xml.Text(")")
+ case lb: LowerBoundedTypeParamConstraint =>
+ xml.Text(lb.typeParamName + " is a superclass of " + lb.lowerBound.name + " (" + lb.typeParamName + " >: ") ++
+ typeToHtml(lb.lowerBound, true) ++ xml.Text(")")
+ case ub: UpperBoundedTypeParamConstraint =>
+ xml.Text(ub.typeParamName + " is a subclass of " + ub.upperBound.name + " (" + ub.typeParamName + " <: ") ++
+ typeToHtml(ub.upperBound, true) ++ xml.Text(")")
+ }
+
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
new file mode 100644
index 0000000000..4be145d0af
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
index c6136c508e..2a8f9b570a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
@@ -1,23 +1,32 @@
* {
- color: inherit;
- font-size: 10pt;
- text-decoration: none;
+ color: inherit;
+ font-size: 10pt;
+ text-decoration: none;
font-family: Arial, sans-serif;
- border-width: 0px;
- padding: 0px;
- margin: 0px;
+ border-width: 0px;
+ padding: 0px;
+ margin: 0px;
}
a {
- cursor: pointer;
+ cursor: pointer;
}
a:hover {
- text-decoration: underline;
+ text-decoration: underline;
}
h1 {
- display: none;
+ display: none;
+}
+
+.selected {
+ -moz-box-shadow: inset 0px 5px 10px rgba(58, 88, 97, .36);
+ -webkit-box-shadow: inset 0px 5px 10px rgba(58, 88, 97, .36);
+ border-top: solid 1px rgba(119, 138, 153, 0.8);
+ border-bottom: solid 1px rgba(151, 173, 191, 0.4);
+ background-color: #ced2d9;
+ margin: -1px 0px;
}
/*.letters {
@@ -33,81 +42,81 @@ h1 {
}
#browser {
- top: 0px;
- left: 0px;
- bottom: 0px;
- width: 100%;
- display: block;
- position: fixed;
+ top: 0px;
+ left: 0px;
+ bottom: 0px;
+ width: 100%;
+ display: block;
+ position: fixed;
}
#filter {
- position: absolute;
- display: block;
-/* padding: 5px;*/
- right: 0;
- left: 0;
- top: 0;
- background-image:url('filterbg.gif');
- background-repeat:repeat-x;
- background-color: #ededee; /* light gray */
- /*background-color: #DADADA;*/
- border:1px solid #bbbbbb;
- border-top:0;
- border-left:0;
- border-right:0;
+ position: absolute;
+ display: block;
+/* padding: 5px;*/
+ right: 0;
+ left: 0;
+ top: 0;
+ background-image:url('filterbg.gif');
+ background-repeat:repeat-x;
+ background-color: #ededee; /* light gray */
+ /*background-color: #DADADA;*/
+ border:1px solid #bbbbbb;
+ border-top:0;
+ border-left:0;
+ border-right:0;
}
#textfilter {
- position: relative;
- display: block;
- height: 20px;
- margin-top: 5px;
- margin-bottom: 5px;
+ position: relative;
+ display: block;
+ height: 20px;
+ margin-top: 5px;
+ margin-bottom: 5px;
}
#textfilter > .pre {
- display: block;
- position: absolute;
- top: 0;
- left: 0;
- height: 23px;
- width: 21px;
- background: url("filter_box_left.png");
+ display: block;
+ position: absolute;
+ top: 0;
+ left: 0;
+ height: 23px;
+ width: 21px;
+ background: url("filter_box_left.png");
}
#textfilter > .input {
- display: block;
- position: absolute;
- top: 0;
- right: 20px;
- left: 20px;
+ display: block;
+ position: absolute;
+ top: 0;
+ right: 20px;
+ left: 20px;
}
#textfilter > .input > input {
- height: 20px;
- padding: 1px;
- font-weight: bold;
- color: #000000;
- background: #ffffff url("filterboxbarbg.png") repeat-x bottom left;
- width: 100%;
+ height: 20px;
+ padding: 1px;
+ font-weight: bold;
+ color: #000000;
+ background: #ffffff url("filterboxbarbg.png") repeat-x bottom left;
+ width: 100%;
}
#textfilter > .post {
- display: block;
- position: absolute;
- top: 0;
- right: 0;
- height: 23px;
- width: 21px;
- background: url("filter_box_right.png");
+ display: block;
+ position: absolute;
+ top: 0;
+ right: 0;
+ height: 23px;
+ width: 21px;
+ background: url("filter_box_right.png");
}
/*#textfilter {
- position: relative;
- display: block;
+ position: relative;
+ display: block;
height: 20px;
- margin-bottom: 5px;
+ margin-bottom: 5px;
}
#textfilter > .pre {
@@ -121,7 +130,7 @@ h1 {
}
#textfilter > .input {
- display: block;
+ display: block;
position: absolute;
top: 0;
right: 20px;
@@ -129,11 +138,11 @@ h1 {
}
#textfilter > .input > input {
- height: 16px;
- padding: 2px;
- font-weight: bold;
- color: darkblue;
- background-color: white;
+ height: 16px;
+ padding: 2px;
+ font-weight: bold;
+ color: darkblue;
+ background-color: white;
width: 100%;
}
@@ -148,22 +157,22 @@ h1 {
}*/
#focusfilter {
- position: relative;
- text-align: center;
- display: block;
- padding: 5px;
- background-color: #fffebd; /* light yellow*/
- text-shadow: #ffffff 0 1px 0;
+ position: relative;
+ text-align: center;
+ display: block;
+ padding: 5px;
+ background-color: #fffebd; /* light yellow*/
+ text-shadow: #ffffff 0 1px 0;
}
#focusfilter .focuscoll {
- font-weight: bold;
- text-shadow: #ffffff 0 1px 0;
+ font-weight: bold;
+ text-shadow: #ffffff 0 1px 0;
}
#focusfilter img {
- bottom: -2px;
- position: relative;
+ bottom: -2px;
+ position: relative;
}
#kindfilter {
@@ -182,10 +191,9 @@ h1 {
}
#kindfilter > a:hover {
- color: #4C4C4C;
- text-decoration: none;
- text-shadow: #ffffff 0 1px 0;
-
+ color: #4C4C4C;
+ text-decoration: none;
+ text-shadow: #ffffff 0 1px 0;
}
#letters {
@@ -208,117 +216,117 @@ h1 {
}
#tpl {
- display: block;
- position: fixed;
- overflow: auto;
- right: 0;
- left: 0;
- bottom: 0;
- top: 5px;
- position: absolute;
- display: block;
+ display: block;
+ position: fixed;
+ overflow: auto;
+ right: 0;
+ left: 0;
+ bottom: 0;
+ top: 5px;
+ position: absolute;
+ display: block;
}
#tpl .packhide {
- display: block;
- float: right;
- font-weight: normal;
- color: white;
+ display: block;
+ float: right;
+ font-weight: normal;
+ color: white;
}
#tpl .packfocus {
- display: block;
- float: right;
- font-weight: normal;
- color: white;
+ display: block;
+ float: right;
+ font-weight: normal;
+ color: white;
}
#tpl .packages > ol {
- background-color: #dadfe6;
- /*margin-bottom: 5px;*/
+ background-color: #dadfe6;
+ /*margin-bottom: 5px;*/
}
/*#tpl .packages > ol > li {
- margin-bottom: 1px;
+ margin-bottom: 1px;
}*/
#tpl .packages > li > a {
- padding: 0px 5px;
+ padding: 0px 5px;
}
#tpl .packages > li > a.tplshow {
- display: block;
- color: white;
- font-weight: bold;
- display: block;
- text-shadow: #000000 0 1px 0;
+ display: block;
+ color: white;
+ font-weight: bold;
+ display: block;
+ text-shadow: #000000 0 1px 0;
}
#tpl ol > li.pack {
- padding: 3px 5px;
- background: url("packagesbg.gif");
- background-repeat:repeat-x;
- min-height: 14px;
- background-color: #6e808e;
+ padding: 3px 5px;
+ background: url("packagesbg.gif");
+ background-repeat:repeat-x;
+ min-height: 14px;
+ background-color: #6e808e;
}
#tpl ol > li {
- display: block;
+ display: block;
}
#tpl .templates > li {
- padding-left: 5px;
- min-height: 18px;
+ padding-left: 5px;
+ min-height: 18px;
}
#tpl ol > li .icon {
- padding-right: 5px;
- bottom: -2px;
- position: relative;
+ padding-right: 5px;
+ bottom: -2px;
+ position: relative;
}
#tpl .templates div.placeholder {
- padding-right: 5px;
- width: 13px;
- display: inline-block;
+ padding-right: 5px;
+ width: 13px;
+ display: inline-block;
}
#tpl .templates span.tplLink {
- padding-left: 5px;
+ padding-left: 5px;
}
#content {
- border-left-width: 1px;
- border-left-color: black;
- border-left-style: white;
- right: 0px;
- left: 0px;
- bottom: 0px;
- top: 0px;
- position: fixed;
- margin-left: 300px;
- display: block;
+ border-left-width: 1px;
+ border-left-color: black;
+ border-left-style: white;
+ right: 0px;
+ left: 0px;
+ bottom: 0px;
+ top: 0px;
+ position: fixed;
+ margin-left: 300px;
+ display: block;
}
#content > iframe {
- display: block;
- height: 100%;
- width: 100%;
+ display: block;
+ height: 100%;
+ width: 100%;
}
.ui-layout-pane {
- background: #FFF;
- overflow: auto;
+ background: #FFF;
+ overflow: auto;
}
.ui-layout-resizer {
- background-image:url('filterbg.gif');
- background-repeat:repeat-x;
- background-color: #ededee; /* light gray */
- border:1px solid #bbbbbb;
- border-top:0;
- border-bottom:0;
- border-left: 0;
+ background-image:url('filterbg.gif');
+ background-repeat:repeat-x;
+ background-color: #ededee; /* light gray */
+ border:1px solid #bbbbbb;
+ border-top:0;
+ border-bottom:0;
+ border-left: 0;
}
.ui-layout-toggler {
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
index e9ed7181e4..eb7f752440 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
@@ -15,10 +15,10 @@ var lastHash = "";
$(document).ready(function() {
$('body').layout({ west__size: '20%' });
- $('#browser').layout({
- center__paneSelector: ".ui-west-center"
+ $('#browser').layout({
+ center__paneSelector: ".ui-west-center"
//,center__initClosed:true
- ,north__paneSelector: ".ui-west-north"
+ ,north__paneSelector: ".ui-west-north"
});
$('iframe').bind("load", function(){
var subtitle = $(this).contents().find('title').text();
@@ -260,18 +260,95 @@ function prepareEntityList() {
.prepend("<a class='packfocus'>focus</a>");
}
+/* Handles all key presses while scrolling around with keyboard shortcuts in left panel */
+function keyboardScrolldownLeftPane() {
+ scheduler.add("init", function() {
+ $("#textfilter input").blur();
+ var $items = $("#tpl li");
+ $items.first().addClass('selected');
+
+ $(window).bind("keydown", function(e) {
+ var $old = $items.filter('.selected'),
+ $new;
+
+ switch ( e.keyCode ) {
+
+ case 9: // tab
+ $old.removeClass('selected');
+ break;
+
+ case 13: // enter
+ $old.removeClass('selected');
+ var $url = $old.children().filter('a:last').attr('href');
+ $("#template").attr("src",$url);
+ break;
+
+ case 27: // escape
+ $old.removeClass('selected');
+ $(window).unbind(e);
+ $("#textfilter input").focus();
+
+ break;
+
+ case 38: // up
+ $new = $old.prev();
+
+ if (!$new.length) {
+ $new = $old.parent().prev();
+ }
+
+ if ($new.is('ol') && $new.children(':last').is('ol')) {
+ $new = $new.children().children(':last');
+ } else if ($new.is('ol')) {
+ $new = $new.children(':last');
+ }
+
+ break;
+
+ case 40: // down
+ $new = $old.next();
+ if (!$new.length) {
+ $new = $old.parent().parent().next();
+ }
+ if ($new.is('ol')) {
+ $new = $new.children(':first');
+ }
+ break;
+ }
+
+ if ($new.is('li')) {
+ $old.removeClass('selected');
+ $new.addClass('selected');
+ } else if (e.keyCode == 38) {
+ $(window).unbind(e);
+ $("#textfilter input").focus();
+ }
+ });
+ });
+}
+
/* Configures the text filter */
function configureTextFilter() {
scheduler.add("init", function() {
- $("#filter").append("<div id='textfilter'><span class='pre'/><span class='input'><input type='text' accesskey='/'/></span><span class='post'/></div>");
+ $("#filter").append("<div id='textfilter'><span class='pre'/><span class='input'><input id='index-input' type='text' accesskey='/'/></span><span class='post'/></div>");
printAlphabet();
var input = $("#textfilter input");
resizeFilterBlock();
- input.bind("keyup", function(event) {
+ input.bind("keydown", function(event) {
if (event.keyCode == 27) { // escape
input.attr("value", "");
}
- textFilter();
+ if (event.keyCode == 9) { // tab
+ $("#template").contents().find("#mbrsel-input").focus();
+ input.attr("value", "");
+ return false;
+ }
+ if (event.keyCode == 40) { // down arrow
+ $(window).unbind("keydown");
+ keyboardScrolldownLeftPane();
+ return false;
+ }
+ textFilter();
});
input.focus(function(event) { input.select(); });
});
@@ -454,7 +531,7 @@ function resizeFilterBlock() {
function printAlphabet() {
var html = '<a target="template" href="index/index-_.html">#</a>';
var c;
- for (c = 'a'; c < 'z'; c = String.fromCharCode(c.charCodeAt(0) + 1)) {
+ for (c = 'a'; c <= 'z'; c = String.fromCharCode(c.charCodeAt(0) + 1)) {
html += [
'<a target="template" href="index/index-',
c,
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
index 7c24308023..16ad06c5ac 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
@@ -1,154 +1,4 @@
-/*!
- * jQuery JavaScript Library v1.4.2
- * http://jquery.com/
- *
- * Copyright 2010, John Resig
- * Dual licensed under the MIT or GPL Version 2 licenses.
- * http://jquery.org/license
- *
- * Includes Sizzle.js
- * http://sizzlejs.com/
- * Copyright 2010, The Dojo Foundation
- * Released under the MIT, BSD, and GPL Licenses.
- *
- * Date: Sat Feb 13 22:33:48 2010 -0500
- */
-(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o<i;o++)e(a[o],b,f?d.call(a[o],o,e(a[o],b)):d,j);return a}return i?
-e(a[0],b):w}function J(){return(new Date).getTime()}function Y(){return false}function Z(){return true}function na(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function oa(a){var b,d=[],f=[],e=arguments,j,i,o,k,n,r;i=c.data(this,"events");if(!(a.liveFired===this||!i||!i.live||a.button&&a.type==="click")){a.liveFired=this;var u=i.live.slice(0);for(k=0;k<u.length;k++){i=u[k];i.origType.replace(O,"")===a.type?f.push(i.selector):u.splice(k--,1)}j=c(a.target).closest(f,a.currentTarget);n=0;for(r=
-j.length;n<r;n++)for(k=0;k<u.length;k++){i=u[k];if(j[n].selector===i.selector){o=j[n].elem;f=null;if(i.preType==="mouseenter"||i.preType==="mouseleave")f=c(a.relatedTarget).closest(i.selector)[0];if(!f||f!==o)d.push({elem:o,handleObj:i})}}n=0;for(r=d.length;n<r;n++){j=d[n];a.currentTarget=j.elem;a.data=j.handleObj.data;a.handleObj=j.handleObj;if(j.handleObj.origHandler.apply(j.elem,e)===false){b=false;break}}return b}}function pa(a,b){return"live."+(a&&a!=="*"?a+".":"")+b.replace(/\./g,"`").replace(/ /g,
-"&")}function qa(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function ra(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var f=c.data(a[d++]),e=c.data(this,f);if(f=f&&f.events){delete e.handle;e.events={};for(var j in f)for(var i in f[j])c.event.add(this,j,f[j][i],f[j][i].data)}}})}function sa(a,b,d){var f,e,j;b=b&&b[0]?b[0].ownerDocument||b[0]:s;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===s&&!ta.test(a[0])&&(c.support.checkClone||!ua.test(a[0]))){e=
-true;if(j=c.fragments[a[0]])if(j!==1)f=j}if(!f){f=b.createDocumentFragment();c.clean(a,b,f,d)}if(e)c.fragments[a[0]]=j?f:1;return{fragment:f,cacheable:e}}function K(a,b){var d={};c.each(va.concat.apply([],va.slice(0,b)),function(){d[this]=a});return d}function wa(a){return"scrollTo"in a&&a.document?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var c=function(a,b){return new c.fn.init(a,b)},Ra=A.jQuery,Sa=A.$,s=A.document,T,Ta=/^[^<]*(<[\w\W]+>)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]*$/,Va=/\S/,
-Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if((d=Ta.exec(a))&&
-(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagName(a);return c.merge(this,
-a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.context;if(b===
-"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:function(a){return this.pushStack(c.map(this,
-function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b<d;b++)if((e=arguments[b])!=null)for(j in e){i=a[j];o=e[j];if(a!==o)if(f&&o&&(c.isPlainObject(o)||c.isArray(o))){i=i&&(c.isPlainObject(i)||
-c.isArray(i))?i:c.isArray(o)?[]:{};a[j]=c.extend(f,i,o)}else if(o!==w)a[j]=o}return a};c.extend({noConflict:function(a){A.$=Sa;if(a)A.jQuery=Ra;return c},isReady:false,ready:function(){if(!c.isReady){if(!s.body)return setTimeout(c.ready,13);c.isReady=true;if(Q){for(var a,b=0;a=Q[b++];)a.call(s,c);Q=null}c.fn.triggerHandler&&c(s).triggerHandler("ready")}},bindReady:function(){if(!xa){xa=true;if(s.readyState==="complete")return c.ready();if(s.addEventListener){s.addEventListener("DOMContentLoaded",
-L,false);A.addEventListener("load",c.ready,false)}else if(s.attachEvent){s.attachEvent("onreadystatechange",L);A.attachEvent("onload",c.ready);var a=false;try{a=A.frameElement==null}catch(b){}s.documentElement.doScroll&&a&&ma()}}},isFunction:function(a){return $.call(a)==="[object Function]"},isArray:function(a){return $.call(a)==="[object Array]"},isPlainObject:function(a){if(!a||$.call(a)!=="[object Object]"||a.nodeType||a.setInterval)return false;if(a.constructor&&!aa.call(a,"constructor")&&!aa.call(a.constructor.prototype,
-"isPrototypeOf"))return false;var b;for(b in a);return b===w||aa.call(a,b)},isEmptyObject:function(a){for(var b in a)return false;return true},error:function(a){throw a;},parseJSON:function(a){if(typeof a!=="string"||!a)return null;a=c.trim(a);if(/^[\],:{}\s]*$/.test(a.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,"")))return A.JSON&&A.JSON.parse?A.JSON.parse(a):(new Function("return "+
-a))();else c.error("Invalid JSON: "+a)},noop:function(){},globalEval:function(a){if(a&&Va.test(a)){var b=s.getElementsByTagName("head")[0]||s.documentElement,d=s.createElement("script");d.type="text/javascript";if(c.support.scriptEval)d.appendChild(s.createTextNode(a));else d.text=a;b.insertBefore(d,b.firstChild);b.removeChild(d)}},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,b,d){var f,e=0,j=a.length,i=j===w||c.isFunction(a);if(d)if(i)for(f in a){if(b.apply(a[f],
-d)===false)break}else for(;e<j;){if(b.apply(a[e++],d)===false)break}else if(i)for(f in a){if(b.call(a[f],f,a[f])===false)break}else for(d=a[0];e<j&&b.call(d,e,d)!==false;d=a[++e]);return a},trim:function(a){return(a||"").replace(Wa,"")},makeArray:function(a,b){b=b||[];if(a!=null)a.length==null||typeof a==="string"||c.isFunction(a)||typeof a!=="function"&&a.setInterval?ba.call(b,a):c.merge(b,a);return b},inArray:function(a,b){if(b.indexOf)return b.indexOf(a);for(var d=0,f=b.length;d<f;d++)if(b[d]===
-a)return d;return-1},merge:function(a,b){var d=a.length,f=0;if(typeof b.length==="number")for(var e=b.length;f<e;f++)a[d++]=b[f];else for(;b[f]!==w;)a[d++]=b[f++];a.length=d;return a},grep:function(a,b,d){for(var f=[],e=0,j=a.length;e<j;e++)!d!==!b(a[e],e)&&f.push(a[e]);return f},map:function(a,b,d){for(var f=[],e,j=0,i=a.length;j<i;j++){e=b(a[j],j,d);if(e!=null)f[f.length]=e}return f.concat.apply([],f)},guid:1,proxy:function(a,b,d){if(arguments.length===2)if(typeof b==="string"){d=a;a=d[b];b=w}else if(b&&
-!c.isFunction(b)){d=b;b=w}if(!b&&a)b=function(){return a.apply(d||this,arguments)};if(a)b.guid=a.guid=a.guid||b.guid||c.guid++;return b},uaMatch:function(a){a=a.toLowerCase();a=/(webkit)[ \/]([\w.]+)/.exec(a)||/(opera)(?:.*version)?[ \/]([\w.]+)/.exec(a)||/(msie) ([\w.]+)/.exec(a)||!/compatible/.test(a)&&/(mozilla)(?:.*? rv:([\w.]+))?/.exec(a)||[];return{browser:a[1]||"",version:a[2]||"0"}},browser:{}});P=c.uaMatch(P);if(P.browser){c.browser[P.browser]=true;c.browser.version=P.version}if(c.browser.webkit)c.browser.safari=
-true;if(ya)c.inArray=function(a,b){return ya.call(b,a)};T=c(s);if(s.addEventListener)L=function(){s.removeEventListener("DOMContentLoaded",L,false);c.ready()};else if(s.attachEvent)L=function(){if(s.readyState==="complete"){s.detachEvent("onreadystatechange",L);c.ready()}};(function(){c.support={};var a=s.documentElement,b=s.createElement("script"),d=s.createElement("div"),f="script"+J();d.style.display="none";d.innerHTML=" <link/><table></table><a href='/a' style='color:red;float:left;opacity:.55;'>a</a><input type='checkbox'/>";
-var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select").appendChild(s.createElement("option")).selected,
-parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCloneEvent=
-false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="<input type='radio' name='radiotest' checked='checked'/>";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="none"});a=function(k){var n=
-s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embed:true,object:true,
-applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.removeData(a)}}else{if(c.support.deleteExpando)delete a[c.expando];
-else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c.data(this,
-a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b===
-w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|input|object|select|textarea)/i,
-cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1)if(e.className){for(var j=" "+e.className+" ",
-i=e.className,o=0,k=b.length;o<k;o++)if(j.indexOf(" "+b[o]+" ")<0)i+=" "+b[o];e.className=c.trim(i)}else e.className=a}return this},removeClass:function(a){if(c.isFunction(a))return this.each(function(k){var n=c(this);n.removeClass(a.call(this,k,n.attr("class")))});if(a&&typeof a==="string"||a===w)for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1&&e.className)if(a){for(var j=(" "+e.className+" ").replace(Aa," "),i=0,o=b.length;i<o;i++)j=j.replace(" "+b[i]+" ",
-" ");e.className=c.trim(j)}else e.className=""}return this},toggleClass:function(a,b){var d=typeof a,f=typeof b==="boolean";if(c.isFunction(a))return this.each(function(e){var j=c(this);j.toggleClass(a.call(this,e,j.attr("class"),b),b)});return this.each(function(){if(d==="string")for(var e,j=0,i=c(this),o=b,k=a.split(ca);e=k[j++];){o=f?o:!i.hasClass(e);i[o?"addClass":"removeClass"](e)}else if(d==="undefined"||d==="boolean"){this.className&&c.data(this,"__className__",this.className);this.className=
-this.className||a===false?"":c.data(this,"__className__")||""}})},hasClass:function(a){a=" "+a+" ";for(var b=0,d=this.length;b<d;b++)if((" "+this[b].className+" ").replace(Aa," ").indexOf(a)>-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j<d;j++){var i=
-e[j];if(i.selected){a=c(i).val();if(b)return a;f.push(a)}}return f}if(Ba.test(b.type)&&!c.support.checkOn)return b.getAttribute("value")===null?"on":b.value;return(b.value||"").replace(Za,"")}return w}var o=c.isFunction(a);return this.each(function(k){var n=c(this),r=a;if(this.nodeType===1){if(o)r=a.call(this,k,n.val());if(typeof r==="number")r+="";if(c.isArray(r)&&Ba.test(this.type))this.checked=c.inArray(n.val(),r)>=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",this).each(function(){this.selected=
-c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed");
-a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=function(a){return a.replace(/[^\w\s\.\|`]/g,
-function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1){r=k.split(".");
-k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n,r,u,z=c.data(a),
-C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B<r.length;B++){u=r[B];if(d.guid===u.guid){if(i||k.test(u.namespace)){f==null&&r.splice(B--,1);n.remove&&n.remove.call(a,u)}if(f!=
-null)break}}if(r.length===0||f!=null&&r.length===1){if(!n.teardown||n.teardown.call(a,o)===false)Ca(a,e,z.handle);delete C[e]}}else for(var B=0;B<r.length;B++){u=r[B];if(i||k.test(u.namespace)){c.event.remove(a,n,u.handler,B);r.splice(B--,1)}}}if(c.isEmptyObject(C)){if(b=z.handle)b.elem=null;delete z.events;delete z.handle;c.isEmptyObject(z)&&c.removeData(a)}}}}},trigger:function(a,b,d,f){var e=a.type||a;if(!f){a=typeof a==="object"?a[G]?a:c.extend(c.Event(e),a):c.Event(e);if(e.indexOf("!")>=0){a.type=
-e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(!a.isPropagationStopped()&&
-f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive;
-if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e<j;e++){var i=d[e];if(b||f.test(i.namespace)){a.handler=i.handler;a.data=i.data;a.handleObj=i;i=i.handler.apply(this,arguments);if(i!==w){a.result=i;if(i===false){a.preventDefault();a.stopPropagation()}}if(a.isImmediatePropagationStopped())break}}}return a.result},props:"altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),
-fix:function(a){if(a[G])return a;var b=a;a=c.Event(b);for(var d=this.props.length,f;d;){f=this.props[--d];a[f]=b[f]}if(!a.target)a.target=a.srcElement||s;if(a.target.nodeType===3)a.target=a.target.parentNode;if(!a.relatedTarget&&a.fromElement)a.relatedTarget=a.fromElement===a.target?a.toElement:a.fromElement;if(a.pageX==null&&a.clientX!=null){b=s.documentElement;d=s.body;a.pageX=a.clientX+(b&&b.scrollLeft||d&&d.scrollLeft||0)-(b&&b.clientLeft||d&&d.clientLeft||0);a.pageY=a.clientY+(b&&b.scrollTop||
-d&&d.scrollTop||0)-(b&&b.clientTop||d&&d.clientTop||0)}if(!a.which&&(a.charCode||a.charCode===0?a.charCode:a.keyCode))a.which=a.charCode||a.keyCode;if(!a.metaKey&&a.ctrlKey)a.metaKey=a.ctrlKey;if(!a.which&&a.button!==w)a.which=a.button&1?1:a.button&2?3:a.button&4?2:0;return a},guid:1E8,proxy:c.proxy,special:{ready:{setup:c.bindReady,teardown:c.noop},live:{add:function(a){c.event.add(this,a.origType,c.extend({},a,{handler:oa}))},remove:function(a){var b=true,d=a.origType.replace(O,"");c.each(c.data(this,
-"events").live||[],function(){if(d===this.origType.replace(O,""))return b=false});b&&c.event.remove(this,a.origType,oa)}},beforeunload:{setup:function(a,b,d){if(this.setInterval)this.onbeforeunload=d;return false},teardown:function(a,b){if(this.onbeforeunload===b)this.onbeforeunload=null}}}};var Ca=s.removeEventListener?function(a,b,d){a.removeEventListener(b,d,false)}:function(a,b,d){a.detachEvent("on"+b,d)};c.Event=function(a){if(!this.preventDefault)return new c.Event(a);if(a&&a.type){this.originalEvent=
-a;this.type=a.type}else this.type=a;this.timeStamp=J();this[G]=true};c.Event.prototype={preventDefault:function(){this.isDefaultPrevented=Z;var a=this.originalEvent;if(a){a.preventDefault&&a.preventDefault();a.returnValue=false}},stopPropagation:function(){this.isPropagationStopped=Z;var a=this.originalEvent;if(a){a.stopPropagation&&a.stopPropagation();a.cancelBubble=true}},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=Z;this.stopPropagation()},isDefaultPrevented:Y,isPropagationStopped:Y,
-isImmediatePropagationStopped:Y};var Da=function(a){var b=a.relatedTarget;try{for(;b&&b!==this;)b=b.parentNode;if(b!==this){a.type=a.data;c.event.handle.apply(this,arguments)}}catch(d){}},Ea=function(a){a.type=a.data;c.event.handle.apply(this,arguments)};c.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){c.event.special[a]={setup:function(d){c.event.add(this,b,d&&d.selector?Ea:Da,a)},teardown:function(d){c.event.remove(this,b,d&&d.selector?Ea:Da)}}});if(!c.support.submitBubbles)c.event.special.submit=
-{setup:function(){if(this.nodeName.toLowerCase()!=="form"){c.event.add(this,"click.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="submit"||d==="image")&&c(b).closest("form").length)return na("submit",this,arguments)});c.event.add(this,"keypress.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="text"||d==="password")&&c(b).closest("form").length&&a.keyCode===13)return na("submit",this,arguments)})}else return false},teardown:function(){c.event.remove(this,".specialSubmit")}};
-if(!c.support.changeBubbles){var da=/textarea|input|select/i,ea,Fa=function(a){var b=a.type,d=a.value;if(b==="radio"||b==="checkbox")d=a.checked;else if(b==="select-multiple")d=a.selectedIndex>-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data",
-e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a,
-"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventListener(a,
-d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j<o;j++)c.event.add(this[j],d,i,f)}return this}});c.fn.extend({unbind:function(a,b){if(typeof a==="object"&&
-!a.preventDefault)for(var d in a)this.unbind(d,a[d]);else{d=0;for(var f=this.length;d<f;d++)c.event.remove(this[d],a,b)}return this},delegate:function(a,b,d,f){return this.live(b,d,f,a)},undelegate:function(a,b,d){return arguments.length===0?this.unbind("live"):this.die(b,null,d,a)},trigger:function(a,b){return this.each(function(){c.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0]){a=c.Event(a);a.preventDefault();a.stopPropagation();c.event.trigger(a,b,this[0]);return a.result}},
-toggle:function(a){for(var b=arguments,d=1;d<b.length;)c.proxy(a,b[d++]);return this.click(c.proxy(a,function(f){var e=(c.data(this,"lastToggle"+a.guid)||0)%d;c.data(this,"lastToggle"+a.guid,e+1);f.preventDefault();return b[e].apply(this,arguments)||false}))},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}});var Ga={focus:"focusin",blur:"focusout",mouseenter:"mouseover",mouseleave:"mouseout"};c.each(["live","die"],function(a,b){c.fn[b]=function(d,f,e,j){var i,o=0,k,n,r=j||this.selector,
-u=j?this:c(this.context);if(c.isFunction(f)){e=f;f=w}for(d=(d||"").split(" ");(i=d[o++])!=null;){j=O.exec(i);k="";if(j){k=j[0];i=i.replace(O,"")}if(i==="hover")d.push("mouseenter"+k,"mouseleave"+k);else{n=i;if(i==="focus"||i==="blur"){d.push(Ga[i]+k);i+=k}else i=(Ga[i]||i)+k;b==="live"?u.each(function(){c.event.add(this,pa(i,r),{data:f,selector:r,handler:e,origType:i,origHandler:e,preType:n})}):u.unbind(pa(i,r),e)}}return this}});c.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error".split(" "),
-function(a,b){c.fn[b]=function(d){return d?this.bind(b,d):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});A.attachEvent&&!A.addEventListener&&A.attachEvent("onunload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}});(function(){function a(g){for(var h="",l,m=0;g[m];m++){l=g[m];if(l.nodeType===3||l.nodeType===4)h+=l.nodeValue;else if(l.nodeType!==8)h+=a(l.childNodes)}return h}function b(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];
-if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1&&!p){t.sizcache=l;t.sizset=q}if(t.nodeName.toLowerCase()===h){y=t;break}t=t[g]}m[q]=y}}}function d(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1){if(!p){t.sizcache=l;t.sizset=q}if(typeof h!=="string"){if(t===h){y=true;break}}else if(k.filter(h,[t]).length>0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,
-e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g])g+=p.shift();
-t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||(y=t);y||k.error(D||
-g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h<g.length;h++)g[h]===g[h-1]&&g.splice(h--,1)}return g};k.matches=function(g,h){return k(g,null,null,h)};k.find=function(g,h,l){var m,q;if(!g)return[];
-for(var p=0,v=n.order.length;p<v;p++){var t=n.order[p];if(q=n.leftMatch[t].exec(g)){var y=q[1];q.splice(1,1);if(y.substr(y.length-1)!=="\\"){q[1]=(q[1]||"").replace(/\\/g,"");m=n.find[t](q,h,l);if(m!=null){g=g.replace(n.match[t],"");break}}}}m||(m=h.getElementsByTagName("*"));return{set:m,expr:g}};k.filter=function(g,h,l,m){for(var q=g,p=[],v=h,t,y,S=h&&h[0]&&x(h[0]);g&&h.length;){for(var H in n.filter)if((t=n.leftMatch[H].exec(g))!=null&&t[2]){var M=n.filter[H],I,D;D=t[1];y=false;t.splice(1,1);if(D.substr(D.length-
-1)!=="\\"){if(v===p)p=[];if(n.preFilter[H])if(t=n.preFilter[H](t,v,l,p,m,S)){if(t===true)continue}else y=I=true;if(t)for(var U=0;(D=v[U])!=null;U++)if(D){I=M(D,t,U,v);var Ha=m^!!I;if(l&&I!=null)if(Ha)y=true;else v[U]=false;else if(Ha){p.push(D);y=true}}if(I!==w){l||(v=p);g=g.replace(n.match[H],"");if(!y)return[];break}}}if(g===q)if(y==null)k.error(g);else break;q=g}return v};k.error=function(g){throw"Syntax error, unrecognized expression: "+g;};var n=k.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\w\u00c0-\uFFFF-]|\\.)+)/,
-CLASS:/\.((?:[\w\u00c0-\uFFFF-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\((even|odd|[\dn+-]*)\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(g){return g.getAttribute("href")}},
-relative:{"+":function(g,h){var l=typeof h==="string",m=l&&!/\W/.test(h);l=l&&!m;if(m)h=h.toLowerCase();m=0;for(var q=g.length,p;m<q;m++)if(p=g[m]){for(;(p=p.previousSibling)&&p.nodeType!==1;);g[m]=l||p&&p.nodeName.toLowerCase()===h?p||false:p===h}l&&k.filter(h,g,true)},">":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m<q;m++){var p=g[m];if(p){l=p.parentNode;g[m]=l.nodeName.toLowerCase()===h?l:false}}}else{m=0;for(q=g.length;m<q;m++)if(p=g[m])g[m]=
-l?p.parentNode:p.parentNode===h;l&&k.filter(h,g,true)}},"":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("parentNode",h,m,g,p,l)},"~":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("previousSibling",h,m,g,p,l)}},find:{ID:function(g,h,l){if(typeof h.getElementById!=="undefined"&&!l)return(g=h.getElementById(g[1]))?[g]:[]},NAME:function(g,h){if(typeof h.getElementsByName!=="undefined"){var l=[];
-h=h.getElementsByName(g[1]);for(var m=0,q=h.length;m<q;m++)h[m].getAttribute("name")===g[1]&&l.push(h[m]);return l.length===0?null:l}},TAG:function(g,h){return h.getElementsByTagName(g[1])}},preFilter:{CLASS:function(g,h,l,m,q,p){g=" "+g[1].replace(/\\/g,"")+" ";if(p)return g;p=0;for(var v;(v=h[p])!=null;p++)if(v)if(q^(v.className&&(" "+v.className+" ").replace(/[\t\n]/g," ").indexOf(g)>=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()},
-CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m.push.apply(m,
-g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)},
-text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}},
-setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return h<l[3]-0},gt:function(g,h,l){return h>l[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q==="not"){h=
-h[3];l=0;for(m=h.length;l<m;l++)if(h[l]===g)return false;return true}else k.error("Syntax error, unrecognized expression: "+q)},CHILD:function(g,h){var l=h[1],m=g;switch(l){case "only":case "first":for(;m=m.previousSibling;)if(m.nodeType===1)return false;if(l==="first")return true;m=g;case "last":for(;m=m.nextSibling;)if(m.nodeType===1)return false;return true;case "nth":l=h[2];var q=h[3];if(l===1&&q===0)return true;h=h[0];var p=g.parentNode;if(p&&(p.sizcache!==h||!g.nodeIndex)){var v=0;for(m=p.firstChild;m;m=
-m.nextSibling)if(m.nodeType===1)m.nodeIndex=++v;p.sizcache=h}g=g.nodeIndex-q;return l===0?g===0:g%l===0&&g/l>=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m==="!=":m===
-"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+)/g,function(g,
-h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l<m;l++)h.push(g[l]);else for(l=0;g[l];l++)h.push(g[l]);return h}}var B;if(s.documentElement.compareDocumentPosition)B=function(g,h){if(!g.compareDocumentPosition||
-!h.compareDocumentPosition){if(g==h)i=true;return g.compareDocumentPosition?-1:1}g=g.compareDocumentPosition(h)&4?-1:g===h?0:1;if(g===0)i=true;return g};else if("sourceIndex"in s.documentElement)B=function(g,h){if(!g.sourceIndex||!h.sourceIndex){if(g==h)i=true;return g.sourceIndex?-1:1}g=g.sourceIndex-h.sourceIndex;if(g===0)i=true;return g};else if(s.createRange)B=function(g,h){if(!g.ownerDocument||!h.ownerDocument){if(g==h)i=true;return g.ownerDocument?-1:1}var l=g.ownerDocument.createRange(),m=
-h.ownerDocument.createRange();l.setStart(g,0);l.setEnd(g,0);m.setStart(h,0);m.setEnd(h,0);g=l.compareBoundaryPoints(Range.START_TO_END,m);if(g===0)i=true;return g};(function(){var g=s.createElement("div"),h="script"+(new Date).getTime();g.innerHTML="<a name='"+h+"'/>";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!=="undefined"&&
-q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTML="<a href='#'></a>";
-if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="<p class='TEST'></p>";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l];h=null}}();
-(function(){var g=s.createElement("div");g.innerHTML="<div class='test e'></div><div class='test'></div>";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPosition(h)&16)}:
-function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q<p;q++)k(g,h[q],l);return k.filter(m,l)};c.find=k;c.expr=k.selectors;c.expr[":"]=c.expr.filters;c.unique=k.uniqueSort;c.text=a;c.isXMLDoc=x;c.contains=E})();var eb=/Until$/,fb=/^(?:parents|prevUntil|prevAll)/,
-gb=/,/;R=Array.prototype.slice;var Ia=function(a,b,d){if(c.isFunction(b))return c.grep(a,function(e,j){return!!b.call(e,j,e)===d});else if(b.nodeType)return c.grep(a,function(e){return e===b===d});else if(typeof b==="string"){var f=c.grep(a,function(e){return e.nodeType===1});if(Ua.test(b))return c.filter(b,f,!d);else b=c.filter(b,f)}return c.grep(a,function(e){return c.inArray(e,b)>=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f<e;f++){d=b.length;
-c.find(a,this[f],b);if(f>0)for(var j=d;j<b.length;j++)for(var i=0;i<d;i++)if(b[i]===b[j]){b.splice(j--,1);break}}return b},has:function(a){var b=c(a);return this.filter(function(){for(var d=0,f=b.length;d<f;d++)if(c.contains(this,b[d]))return true})},not:function(a){return this.pushStack(Ia(this,a,false),"not",a)},filter:function(a){return this.pushStack(Ia(this,a,true),"filter",a)},is:function(a){return!!a&&c.filter(a,this).length>0},closest:function(a,b){if(c.isArray(a)){var d=[],f=this[0],e,j=
-{},i;if(f&&a.length){e=0;for(var o=a.length;e<o;e++){i=a[e];j[i]||(j[i]=c.expr.match.POS.test(i)?c(i,b||this.context):i)}for(;f&&f.ownerDocument&&f!==b;){for(i in j){e=j[i];if(e.jquery?e.index(f)>-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||typeof a===
-"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode",
-d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")?
-a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeType!==1||!c(a).is(d));){a.nodeType===
-1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/<tbody/i,jb=/<|&#?\w+;/,ta=/<script|<object|<embed|<option|<style/i,ua=/checked\s*(?:[^=]|=\s*.checked.)/i,Ma=function(a,b,d){return hb.test(d)?
-a:b+"></"+d+">"},F={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div<div>","</div>"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d=
-c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this},
-wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})},
-prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,
-this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild);
-return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(Ja,
-""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b<d;b++)if(this[b].nodeType===1){c.cleanData(this[b].getElementsByTagName("*"));this[b].innerHTML=a}}catch(f){this.empty().append(a)}}else c.isFunction(a)?this.each(function(e){var j=c(this),i=j.html();j.empty().append(function(){return a.call(this,e,i)})}):this.empty().append(a);return this},replaceWith:function(a){if(this[0]&&
-this[0].parentNode){if(c.isFunction(a))return this.each(function(b){var d=c(this),f=d.html();d.replaceWith(a.call(this,b,f))});if(typeof a!=="string")a=c(a).detach();return this.each(function(){var b=this.nextSibling,d=this.parentNode;c(this).remove();b?c(b).before(a):c(d).append(a)})}else return this.pushStack(c(c.isFunction(a)?a():a),"replaceWith",a)},detach:function(a){return this.remove(a,true)},domManip:function(a,b,d){function f(u){return c.nodeName(u,"table")?u.getElementsByTagName("tbody")[0]||
-u.appendChild(u.ownerDocument.createElement("tbody")):u}var e,j,i=a[0],o=[],k;if(!c.support.checkClone&&arguments.length===3&&typeof i==="string"&&ua.test(i))return this.each(function(){c(this).domManip(a,b,d,true)});if(c.isFunction(i))return this.each(function(u){var z=c(this);a[0]=i.call(this,u,b?z.html():w);z.domManip(a,b,d)});if(this[0]){e=i&&i.parentNode;e=c.support.parentNode&&e&&e.nodeType===11&&e.childNodes.length===this.length?{fragment:e}:sa(a,this,o);k=e.fragment;if(j=k.childNodes.length===
-1?(k=k.firstChild):k.firstChild){b=b&&c.nodeName(j,"tr");for(var n=0,r=this.length;n<r;n++)d.call(b?f(this[n],j):this[n],n>0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length===1){d[b](this[0]);
-return this}else{e=0;for(var j=d.length;e<j;e++){var i=(e>0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec(i)||["",
-""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]==="<table>"&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.push(i);else e=
-c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b.events)e[k]?
-c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styleFloat",ja=
-function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")||"";f.filter=
-Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c.curCSS(a,
-"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedStyle(a,null))f=
-a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b=
-a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=/<script(.|\s)*?\/script>/gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!==
-"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("<div />").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}});return this},
-serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),
-function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href,
-global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{},ajax:function(a){function b(){e.success&&
-e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka.test(e.url)?
-"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;if(e.cache===
-false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if(!j){var B=
-false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.setRequestHeader("If-Modified-Since",
-c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q==="abort"){E||
-d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h.call(x);
-g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status===
-1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b===
-"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w)b=c.ajaxSettings.traditional;
-if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");
-this[a].style.display=d||"";if(c.css(this[a],"display")==="none"){d=this[a].nodeName;var f;if(la[d])f=la[d];else{var e=c("<"+d+" />").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a<b;a++)this[a].style.display=c.data(this[a],"olddisplay")||"";return this}},hide:function(a,b){if(a||a===0)return this.animate(K("hide",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");!d&&d!=="none"&&c.data(this[a],
-"olddisplay",c.css(this[a],"display"))}a=0;for(b=this.length;a<b;a++)this[a].style.display="none";return this}},_toggle:c.fn.toggle,toggle:function(a,b){var d=typeof a==="boolean";if(c.isFunction(a)&&c.isFunction(b))this._toggle.apply(this,arguments);else a==null||d?this.each(function(){var f=d?a:c(this).is(":hidden");c(this)[f?"show":"hide"]()}):this.animate(K("toggle",3),a,b);return this},fadeTo:function(a,b,d){return this.filter(":hidden").css("opacity",0).show().end().animate({opacity:b},a,d)},
-animate:function(a,b,d,f){var e=c.speed(b,d,f);if(c.isEmptyObject(a))return this.each(e.complete);return this[e.queue===false?"each":"queue"](function(){var j=c.extend({},e),i,o=this.nodeType===1&&c(this).is(":hidden"),k=this;for(i in a){var n=i.replace(ia,ja);if(i!==n){a[n]=a[i];delete a[i];i=n}if(a[i]==="hide"&&o||a[i]==="show"&&!o)return j.complete.call(this);if((i==="height"||i==="width")&&this.style){j.display=c.css(this,"display");j.overflow=this.style.overflow}if(c.isArray(a[i])){(j.specialEasing=
-j.specialEasing||{})[i]=a[i][1];a[i]=a[i][0]}}if(j.overflow!=null)this.style.overflow="hidden";j.curAnim=c.extend({},a);c.each(a,function(r,u){var z=new c.fx(k,j,r);if(Ab.test(u))z[u==="toggle"?o?"show":"hide":u](a);else{var C=Bb.exec(u),B=z.cur(true)||0;if(C){u=parseFloat(C[2]);var E=C[3]||"px";if(E!=="px"){k.style[r]=(u||1)+E;B=(u||1)/z.cur(true)*B;k.style[r]=B+E}if(C[1])u=(C[1]==="-="?-1:1)*u+B;z.custom(B,u,E)}else z.custom(B,u,"")}});return true})},stop:function(a,b){var d=c.timers;a&&this.queue([]);
-this.each(function(){for(var f=d.length-1;f>=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.fx.off?0:typeof f.duration===
-"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]||
-c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start;
-this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.startTime){this.now=
-this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this.options.curAnim)c.style(this.elem,
-e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b<a.length;b++)a[b]()||a.splice(b--,1);a.length||
-c.fx.stop()},stop:function(){clearInterval(W);W=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a){c.style(a.elem,"opacity",a.now)},_default:function(a){if(a.elem.style&&a.elem.style[a.prop]!=null)a.elem.style[a.prop]=(a.prop==="width"||a.prop==="height"?Math.max(0,a.now):a.now)+a.unit;else a.elem[a.prop]=a.now}}});if(c.expr&&c.expr.filters)c.expr.filters.animated=function(a){return c.grep(c.timers,function(b){return a===b.elem}).length};c.fn.offset="getBoundingClientRect"in s.documentElement?
-function(a){var b=this[0];if(a)return this.each(function(e){c.offset.setOffset(this,a,e)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);var d=b.getBoundingClientRect(),f=b.ownerDocument;b=f.body;f=f.documentElement;return{top:d.top+(self.pageYOffset||c.support.boxModel&&f.scrollTop||b.scrollTop)-(f.clientTop||b.clientTop||0),left:d.left+(self.pageXOffset||c.support.boxModel&&f.scrollLeft||b.scrollLeft)-(f.clientLeft||b.clientLeft||0)}}:function(a){var b=
-this[0];if(a)return this.each(function(r){c.offset.setOffset(this,a,r)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);c.offset.initialize();var d=b.offsetParent,f=b,e=b.ownerDocument,j,i=e.documentElement,o=e.body;f=(e=e.defaultView)?e.getComputedStyle(b,null):b.currentStyle;for(var k=b.offsetTop,n=b.offsetLeft;(b=b.parentNode)&&b!==o&&b!==i;){if(c.offset.supportsFixedPosition&&f.position==="fixed")break;j=e?e.getComputedStyle(b,null):b.currentStyle;
-k-=b.scrollTop;n-=b.scrollLeft;if(b===d){k+=b.offsetTop;n+=b.offsetLeft;if(c.offset.doesNotAddBorder&&!(c.offset.doesAddBorderForTableAndCells&&/^t(able|d|h)$/i.test(b.nodeName))){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=d;d=b.offsetParent}if(c.offset.subtractsBorderForOverflowNotVisible&&j.overflow!=="visible"){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=j}if(f.position==="relative"||f.position==="static"){k+=o.offsetTop;n+=o.offsetLeft}if(c.offset.supportsFixedPosition&&
-f.position==="fixed"){k+=Math.max(i.scrollTop,o.scrollTop);n+=Math.max(i.scrollLeft,o.scrollLeft)}return{top:k,left:n}};c.offset={initialize:function(){var a=s.body,b=s.createElement("div"),d,f,e,j=parseFloat(c.curCSS(a,"marginTop",true))||0;c.extend(b.style,{position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"});b.innerHTML="<div style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;'><div></div></div><table style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;' cellpadding='0' cellspacing='0'><tr><td></td></tr></table>";
-a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j;a.removeChild(b);
-c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b=b.call(a,
-d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{top:d.top-
-f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pageYOffset":
-"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return"scrollTo"in
-e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window);
+/*! jQuery v1.7.2 jquery.com | jquery.org/license */
+(function(a,b){function cy(a){return f.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:!1}function cu(a){if(!cj[a]){var b=c.body,d=f("<"+a+">").appendTo(b),e=d.css("display");d.remove();if(e==="none"||e===""){ck||(ck=c.createElement("iframe"),ck.frameBorder=ck.width=ck.height=0),b.appendChild(ck);if(!cl||!ck.createElement)cl=(ck.contentWindow||ck.contentDocument).document,cl.write((f.support.boxModel?"<!doctype html>":"")+"<html><body>"),cl.close();d=cl.createElement(a),cl.body.appendChild(d),e=f.css(d,"display"),b.removeChild(ck)}cj[a]=e}return cj[a]}function ct(a,b){var c={};f.each(cp.concat.apply([],cp.slice(0,b)),function(){c[this]=a});return c}function cs(){cq=b}function cr(){setTimeout(cs,0);return cq=f.now()}function ci(){try{return new a.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}}function ch(){try{return new a.XMLHttpRequest}catch(b){}}function cb(a,c){a.dataFilter&&(c=a.dataFilter(c,a.dataType));var d=a.dataTypes,e={},g,h,i=d.length,j,k=d[0],l,m,n,o,p;for(g=1;g<i;g++){if(g===1)for(h in a.converters)typeof h=="string"&&(e[h.toLowerCase()]=a.converters[h]);l=k,k=d[g];if(k==="*")k=l;else if(l!=="*"&&l!==k){m=l+" "+k,n=e[m]||e["* "+k];if(!n){p=b;for(o in e){j=o.split(" ");if(j[0]===l||j[0]==="*"){p=e[j[1]+" "+k];if(p){o=e[o],o===!0?n=p:p===!0&&(n=o);break}}}}!n&&!p&&f.error("No conversion from "+m.replace(" "," to ")),n!==!0&&(c=n?n(c):p(o(c)))}}return c}function ca(a,c,d){var e=a.contents,f=a.dataTypes,g=a.responseFields,h,i,j,k;for(i in g)i in d&&(c[g[i]]=d[i]);while(f[0]==="*")f.shift(),h===b&&(h=a.mimeType||c.getResponseHeader("content-type"));if(h)for(i in e)if(e[i]&&e[i].test(h)){f.unshift(i);break}if(f[0]in d)j=f[0];else{for(i in d){if(!f[0]||a.converters[i+" "+f[0]]){j=i;break}k||(k=i)}j=j||k}if(j){j!==f[0]&&f.unshift(j);return d[j]}}function b_(a,b,c,d){if(f.isArray(b))f.each(b,function(b,e){c||bD.test(a)?d(a,e):b_(a+"["+(typeof e=="object"?b:"")+"]",e,c,d)});else if(!c&&f.type(b)==="object")for(var e in b)b_(a+"["+e+"]",b[e],c,d);else d(a,b)}function b$(a,c){var d,e,g=f.ajaxSettings.flatOptions||{};for(d in c)c[d]!==b&&((g[d]?a:e||(e={}))[d]=c[d]);e&&f.extend(!0,a,e)}function bZ(a,c,d,e,f,g){f=f||c.dataTypes[0],g=g||{},g[f]=!0;var h=a[f],i=0,j=h?h.length:0,k=a===bS,l;for(;i<j&&(k||!l);i++)l=h[i](c,d,e),typeof l=="string"&&(!k||g[l]?l=b:(c.dataTypes.unshift(l),l=bZ(a,c,d,e,l,g)));(k||!l)&&!g["*"]&&(l=bZ(a,c,d,e,"*",g));return l}function bY(a){return function(b,c){typeof b!="string"&&(c=b,b="*");if(f.isFunction(c)){var d=b.toLowerCase().split(bO),e=0,g=d.length,h,i,j;for(;e<g;e++)h=d[e],j=/^\+/.test(h),j&&(h=h.substr(1)||"*"),i=a[h]=a[h]||[],i[j?"unshift":"push"](c)}}}function bB(a,b,c){var d=b==="width"?a.offsetWidth:a.offsetHeight,e=b==="width"?1:0,g=4;if(d>0){if(c!=="border")for(;e<g;e+=2)c||(d-=parseFloat(f.css(a,"padding"+bx[e]))||0),c==="margin"?d+=parseFloat(f.css(a,c+bx[e]))||0:d-=parseFloat(f.css(a,"border"+bx[e]+"Width"))||0;return d+"px"}d=by(a,b);if(d<0||d==null)d=a.style[b];if(bt.test(d))return d;d=parseFloat(d)||0;if(c)for(;e<g;e+=2)d+=parseFloat(f.css(a,"padding"+bx[e]))||0,c!=="padding"&&(d+=parseFloat(f.css(a,"border"+bx[e]+"Width"))||0),c==="margin"&&(d+=parseFloat(f.css(a,c+bx[e]))||0);return d+"px"}function bo(a){var b=c.createElement("div");bh.appendChild(b),b.innerHTML=a.outerHTML;return b.firstChild}function bn(a){var b=(a.nodeName||"").toLowerCase();b==="input"?bm(a):b!=="script"&&typeof a.getElementsByTagName!="undefined"&&f.grep(a.getElementsByTagName("input"),bm)}function bm(a){if(a.type==="checkbox"||a.type==="radio")a.defaultChecked=a.checked}function bl(a){return typeof a.getElementsByTagName!="undefined"?a.getElementsByTagName("*"):typeof a.querySelectorAll!="undefined"?a.querySelectorAll("*"):[]}function bk(a,b){var c;b.nodeType===1&&(b.clearAttributes&&b.clearAttributes(),b.mergeAttributes&&b.mergeAttributes(a),c=b.nodeName.toLowerCase(),c==="object"?b.outerHTML=a.outerHTML:c!=="input"||a.type!=="checkbox"&&a.type!=="radio"?c==="option"?b.selected=a.defaultSelected:c==="input"||c==="textarea"?b.defaultValue=a.defaultValue:c==="script"&&b.text!==a.text&&(b.text=a.text):(a.checked&&(b.defaultChecked=b.checked=a.checked),b.value!==a.value&&(b.value=a.value)),b.removeAttribute(f.expando),b.removeAttribute("_submit_attached"),b.removeAttribute("_change_attached"))}function bj(a,b){if(b.nodeType===1&&!!f.hasData(a)){var c,d,e,g=f._data(a),h=f._data(b,g),i=g.events;if(i){delete h.handle,h.events={};for(c in i)for(d=0,e=i[c].length;d<e;d++)f.event.add(b,c,i[c][d])}h.data&&(h.data=f.extend({},h.data))}}function bi(a,b){return f.nodeName(a,"table")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function U(a){var b=V.split("|"),c=a.createDocumentFragment();if(c.createElement)while(b.length)c.createElement(b.pop());return c}function T(a,b,c){b=b||0;if(f.isFunction(b))return f.grep(a,function(a,d){var e=!!b.call(a,d,a);return e===c});if(b.nodeType)return f.grep(a,function(a,d){return a===b===c});if(typeof b=="string"){var d=f.grep(a,function(a){return a.nodeType===1});if(O.test(b))return f.filter(b,d,!c);b=f.filter(b,d)}return f.grep(a,function(a,d){return f.inArray(a,b)>=0===c})}function S(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function K(){return!0}function J(){return!1}function n(a,b,c){var d=b+"defer",e=b+"queue",g=b+"mark",h=f._data(a,d);h&&(c==="queue"||!f._data(a,e))&&(c==="mark"||!f._data(a,g))&&setTimeout(function(){!f._data(a,e)&&!f._data(a,g)&&(f.removeData(a,d,!0),h.fire())},0)}function m(a){for(var b in a){if(b==="data"&&f.isEmptyObject(a[b]))continue;if(b!=="toJSON")return!1}return!0}function l(a,c,d){if(d===b&&a.nodeType===1){var e="data-"+c.replace(k,"-$1").toLowerCase();d=a.getAttribute(e);if(typeof d=="string"){try{d=d==="true"?!0:d==="false"?!1:d==="null"?null:f.isNumeric(d)?+d:j.test(d)?f.parseJSON(d):d}catch(g){}f.data(a,c,d)}else d=b}return d}function h(a){var b=g[a]={},c,d;a=a.split(/\s+/);for(c=0,d=a.length;c<d;c++)b[a[c]]=!0;return b}var c=a.document,d=a.navigator,e=a.location,f=function(){function J(){if(!e.isReady){try{c.documentElement.doScroll("left")}catch(a){setTimeout(J,1);return}e.ready()}}var e=function(a,b){return new e.fn.init(a,b,h)},f=a.jQuery,g=a.$,h,i=/^(?:[^#<]*(<[\w\W]+>)[^>]*$|#([\w\-]*)$)/,j=/\S/,k=/^\s+/,l=/\s+$/,m=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,n=/^[\],:{}\s]*$/,o=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,p=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,q=/(?:^|:|,)(?:\s*\[)+/g,r=/(webkit)[ \/]([\w.]+)/,s=/(opera)(?:.*version)?[ \/]([\w.]+)/,t=/(msie) ([\w.]+)/,u=/(mozilla)(?:.*? rv:([\w.]+))?/,v=/-([a-z]|[0-9])/ig,w=/^-ms-/,x=function(a,b){return(b+"").toUpperCase()},y=d.userAgent,z,A,B,C=Object.prototype.toString,D=Object.prototype.hasOwnProperty,E=Array.prototype.push,F=Array.prototype.slice,G=String.prototype.trim,H=Array.prototype.indexOf,I={};e.fn=e.prototype={constructor:e,init:function(a,d,f){var g,h,j,k;if(!a)return this;if(a.nodeType){this.context=this[0]=a,this.length=1;return this}if(a==="body"&&!d&&c.body){this.context=c,this[0]=c.body,this.selector=a,this.length=1;return this}if(typeof a=="string"){a.charAt(0)!=="<"||a.charAt(a.length-1)!==">"||a.length<3?g=i.exec(a):g=[null,a,null];if(g&&(g[1]||!d)){if(g[1]){d=d instanceof e?d[0]:d,k=d?d.ownerDocument||d:c,j=m.exec(a),j?e.isPlainObject(d)?(a=[c.createElement(j[1])],e.fn.attr.call(a,d,!0)):a=[k.createElement(j[1])]:(j=e.buildFragment([g[1]],[k]),a=(j.cacheable?e.clone(j.fragment):j.fragment).childNodes);return e.merge(this,a)}h=c.getElementById(g[2]);if(h&&h.parentNode){if(h.id!==g[2])return f.find(a);this.length=1,this[0]=h}this.context=c,this.selector=a;return this}return!d||d.jquery?(d||f).find(a):this.constructor(d).find(a)}if(e.isFunction(a))return f.ready(a);a.selector!==b&&(this.selector=a.selector,this.context=a.context);return e.makeArray(a,this)},selector:"",jquery:"1.7.2",length:0,size:function(){return this.length},toArray:function(){return F.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this[this.length+a]:this[a]},pushStack:function(a,b,c){var d=this.constructor();e.isArray(a)?E.apply(d,a):e.merge(d,a),d.prevObject=this,d.context=this.context,b==="find"?d.selector=this.selector+(this.selector?" ":"")+c:b&&(d.selector=this.selector+"."+b+"("+c+")");return d},each:function(a,b){return e.each(this,a,b)},ready:function(a){e.bindReady(),A.add(a);return this},eq:function(a){a=+a;return a===-1?this.slice(a):this.slice(a,a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(F.apply(this,arguments),"slice",F.call(arguments).join(","))},map:function(a){return this.pushStack(e.map(this,function(b,c){return a.call(b,c,b)}))},end:function(){return this.prevObject||this.constructor(null)},push:E,sort:[].sort,splice:[].splice},e.fn.init.prototype=e.fn,e.extend=e.fn.extend=function(){var a,c,d,f,g,h,i=arguments[0]||{},j=1,k=arguments.length,l=!1;typeof i=="boolean"&&(l=i,i=arguments[1]||{},j=2),typeof i!="object"&&!e.isFunction(i)&&(i={}),k===j&&(i=this,--j);for(;j<k;j++)if((a=arguments[j])!=null)for(c in a){d=i[c],f=a[c];if(i===f)continue;l&&f&&(e.isPlainObject(f)||(g=e.isArray(f)))?(g?(g=!1,h=d&&e.isArray(d)?d:[]):h=d&&e.isPlainObject(d)?d:{},i[c]=e.extend(l,h,f)):f!==b&&(i[c]=f)}return i},e.extend({noConflict:function(b){a.$===e&&(a.$=g),b&&a.jQuery===e&&(a.jQuery=f);return e},isReady:!1,readyWait:1,holdReady:function(a){a?e.readyWait++:e.ready(!0)},ready:function(a){if(a===!0&&!--e.readyWait||a!==!0&&!e.isReady){if(!c.body)return setTimeout(e.ready,1);e.isReady=!0;if(a!==!0&&--e.readyWait>0)return;A.fireWith(c,[e]),e.fn.trigger&&e(c).trigger("ready").off("ready")}},bindReady:function(){if(!A){A=e.Callbacks("once memory");if(c.readyState==="complete")return setTimeout(e.ready,1);if(c.addEventListener)c.addEventListener("DOMContentLoaded",B,!1),a.addEventListener("load",e.ready,!1);else if(c.attachEvent){c.attachEvent("onreadystatechange",B),a.attachEvent("onload",e.ready);var b=!1;try{b=a.frameElement==null}catch(d){}c.documentElement.doScroll&&b&&J()}}},isFunction:function(a){return e.type(a)==="function"},isArray:Array.isArray||function(a){return e.type(a)==="array"},isWindow:function(a){return a!=null&&a==a.window},isNumeric:function(a){return!isNaN(parseFloat(a))&&isFinite(a)},type:function(a){return a==null?String(a):I[C.call(a)]||"object"},isPlainObject:function(a){if(!a||e.type(a)!=="object"||a.nodeType||e.isWindow(a))return!1;try{if(a.constructor&&!D.call(a,"constructor")&&!D.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}var d;for(d in a);return d===b||D.call(a,d)},isEmptyObject:function(a){for(var b in a)return!1;return!0},error:function(a){throw new Error(a)},parseJSON:function(b){if(typeof b!="string"||!b)return null;b=e.trim(b);if(a.JSON&&a.JSON.parse)return a.JSON.parse(b);if(n.test(b.replace(o,"@").replace(p,"]").replace(q,"")))return(new Function("return "+b))();e.error("Invalid JSON: "+b)},parseXML:function(c){if(typeof c!="string"||!c)return null;var d,f;try{a.DOMParser?(f=new DOMParser,d=f.parseFromString(c,"text/xml")):(d=new ActiveXObject("Microsoft.XMLDOM"),d.async="false",d.loadXML(c))}catch(g){d=b}(!d||!d.documentElement||d.getElementsByTagName("parsererror").length)&&e.error("Invalid XML: "+c);return d},noop:function(){},globalEval:function(b){b&&j.test(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(w,"ms-").replace(v,x)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,c,d){var f,g=0,h=a.length,i=h===b||e.isFunction(a);if(d){if(i){for(f in a)if(c.apply(a[f],d)===!1)break}else for(;g<h;)if(c.apply(a[g++],d)===!1)break}else if(i){for(f in a)if(c.call(a[f],f,a[f])===!1)break}else for(;g<h;)if(c.call(a[g],g,a[g++])===!1)break;return a},trim:G?function(a){return a==null?"":G.call(a)}:function(a){return a==null?"":(a+"").replace(k,"").replace(l,"")},makeArray:function(a,b){var c=b||[];if(a!=null){var d=e.type(a);a.length==null||d==="string"||d==="function"||d==="regexp"||e.isWindow(a)?E.call(c,a):e.merge(c,a)}return c},inArray:function(a,b,c){var d;if(b){if(H)return H.call(b,a,c);d=b.length,c=c?c<0?Math.max(0,d+c):c:0;for(;c<d;c++)if(c in b&&b[c]===a)return c}return-1},merge:function(a,c){var d=a.length,e=0;if(typeof c.length=="number")for(var f=c.length;e<f;e++)a[d++]=c[e];else while(c[e]!==b)a[d++]=c[e++];a.length=d;return a},grep:function(a,b,c){var d=[],e;c=!!c;for(var f=0,g=a.length;f<g;f++)e=!!b(a[f],f),c!==e&&d.push(a[f]);return d},map:function(a,c,d){var f,g,h=[],i=0,j=a.length,k=a instanceof e||j!==b&&typeof j=="number"&&(j>0&&a[0]&&a[j-1]||j===0||e.isArray(a));if(k)for(;i<j;i++)f=c(a[i],i,d),f!=null&&(h[h.length]=f);else for(g in a)f=c(a[g],g,d),f!=null&&(h[h.length]=f);return h.concat.apply([],h)},guid:1,proxy:function(a,c){if(typeof c=="string"){var d=a[c];c=a,a=d}if(!e.isFunction(a))return b;var f=F.call(arguments,2),g=function(){return a.apply(c,f.concat(F.call(arguments)))};g.guid=a.guid=a.guid||g.guid||e.guid++;return g},access:function(a,c,d,f,g,h,i){var j,k=d==null,l=0,m=a.length;if(d&&typeof d=="object"){for(l in d)e.access(a,c,l,d[l],1,h,f);g=1}else if(f!==b){j=i===b&&e.isFunction(f),k&&(j?(j=c,c=function(a,b,c){return j.call(e(a),c)}):(c.call(a,f),c=null));if(c)for(;l<m;l++)c(a[l],d,j?f.call(a[l],l,c(a[l],d)):f,i);g=1}return g?a:k?c.call(a):m?c(a[0],d):h},now:function(){return(new Date).getTime()},uaMatch:function(a){a=a.toLowerCase();var b=r.exec(a)||s.exec(a)||t.exec(a)||a.indexOf("compatible")<0&&u.exec(a)||[];return{browser:b[1]||"",version:b[2]||"0"}},sub:function(){function a(b,c){return new a.fn.init(b,c)}e.extend(!0,a,this),a.superclass=this,a.fn=a.prototype=this(),a.fn.constructor=a,a.sub=this.sub,a.fn.init=function(d,f){f&&f instanceof e&&!(f instanceof a)&&(f=a(f));return e.fn.init.call(this,d,f,b)},a.fn.init.prototype=a.fn;var b=a(c);return a},browser:{}}),e.each("Boolean Number String Function Array Date RegExp Object".split(" "),function(a,b){I["[object "+b+"]"]=b.toLowerCase()}),z=e.uaMatch(y),z.browser&&(e.browser[z.browser]=!0,e.browser.version=z.version),e.browser.webkit&&(e.browser.safari=!0),j.test(" ")&&(k=/^[\s\xA0]+/,l=/[\s\xA0]+$/),h=e(c),c.addEventListener?B=function(){c.removeEventListener("DOMContentLoaded",B,!1),e.ready()}:c.attachEvent&&(B=function(){c.readyState==="complete"&&(c.detachEvent("onreadystatechange",B),e.ready())});return e}(),g={};f.Callbacks=function(a){a=a?g[a]||h(a):{};var c=[],d=[],e,i,j,k,l,m,n=function(b){var d,e,g,h,i;for(d=0,e=b.length;d<e;d++)g=b[d],h=f.type(g),h==="array"?n(g):h==="function"&&(!a.unique||!p.has(g))&&c.push(g)},o=function(b,f){f=f||[],e=!a.memory||[b,f],i=!0,j=!0,m=k||0,k=0,l=c.length;for(;c&&m<l;m++)if(c[m].apply(b,f)===!1&&a.stopOnFalse){e=!0;break}j=!1,c&&(a.once?e===!0?p.disable():c=[]:d&&d.length&&(e=d.shift(),p.fireWith(e[0],e[1])))},p={add:function(){if(c){var a=c.length;n(arguments),j?l=c.length:e&&e!==!0&&(k=a,o(e[0],e[1]))}return this},remove:function(){if(c){var b=arguments,d=0,e=b.length;for(;d<e;d++)for(var f=0;f<c.length;f++)if(b[d]===c[f]){j&&f<=l&&(l--,f<=m&&m--),c.splice(f--,1);if(a.unique)break}}return this},has:function(a){if(c){var b=0,d=c.length;for(;b<d;b++)if(a===c[b])return!0}return!1},empty:function(){c=[];return this},disable:function(){c=d=e=b;return this},disabled:function(){return!c},lock:function(){d=b,(!e||e===!0)&&p.disable();return this},locked:function(){return!d},fireWith:function(b,c){d&&(j?a.once||d.push([b,c]):(!a.once||!e)&&o(b,c));return this},fire:function(){p.fireWith(this,arguments);return this},fired:function(){return!!i}};return p};var i=[].slice;f.extend({Deferred:function(a){var b=f.Callbacks("once memory"),c=f.Callbacks("once memory"),d=f.Callbacks("memory"),e="pending",g={resolve:b,reject:c,notify:d},h={done:b.add,fail:c.add,progress:d.add,state:function(){return e},isResolved:b.fired,isRejected:c.fired,then:function(a,b,c){i.done(a).fail(b).progress(c);return this},always:function(){i.done.apply(i,arguments).fail.apply(i,arguments);return this},pipe:function(a,b,c){return f.Deferred(function(d){f.each({done:[a,"resolve"],fail:[b,"reject"],progress:[c,"notify"]},function(a,b){var c=b[0],e=b[1],g;f.isFunction(c)?i[a](function(){g=c.apply(this,arguments),g&&f.isFunction(g.promise)?g.promise().then(d.resolve,d.reject,d.notify):d[e+"With"](this===i?d:this,[g])}):i[a](d[e])})}).promise()},promise:function(a){if(a==null)a=h;else for(var b in h)a[b]=h[b];return a}},i=h.promise({}),j;for(j in g)i[j]=g[j].fire,i[j+"With"]=g[j].fireWith;i.done(function(){e="resolved"},c.disable,d.lock).fail(function(){e="rejected"},b.disable,d.lock),a&&a.call(i,i);return i},when:function(a){function m(a){return function(b){e[a]=arguments.length>1?i.call(arguments,0):b,j.notifyWith(k,e)}}function l(a){return function(c){b[a]=arguments.length>1?i.call(arguments,0):c,--g||j.resolveWith(j,b)}}var b=i.call(arguments,0),c=0,d=b.length,e=Array(d),g=d,h=d,j=d<=1&&a&&f.isFunction(a.promise)?a:f.Deferred(),k=j.promise();if(d>1){for(;c<d;c++)b[c]&&b[c].promise&&f.isFunction(b[c].promise)?b[c].promise().then(l(c),j.reject,m(c)):--g;g||j.resolveWith(j,b)}else j!==a&&j.resolveWith(j,d?[a]:[]);return k}}),f.support=function(){var b,d,e,g,h,i,j,k,l,m,n,o,p=c.createElement("div"),q=c.documentElement;p.setAttribute("className","t"),p.innerHTML=" <link/><table></table><a href='/a' style='top:1px;float:left;opacity:.55;'>a</a><input type='checkbox'/>",d=p.getElementsByTagName("*"),e=p.getElementsByTagName("a")[0];if(!d||!d.length||!e)return{};g=c.createElement("select"),h=g.appendChild(c.createElement("option")),i=p.getElementsByTagName("input")[0],b={leadingWhitespace:p.firstChild.nodeType===3,tbody:!p.getElementsByTagName("tbody").length,htmlSerialize:!!p.getElementsByTagName("link").length,style:/top/.test(e.getAttribute("style")),hrefNormalized:e.getAttribute("href")==="/a",opacity:/^0.55/.test(e.style.opacity),cssFloat:!!e.style.cssFloat,checkOn:i.value==="on",optSelected:h.selected,getSetAttribute:p.className!=="t",enctype:!!c.createElement("form").enctype,html5Clone:c.createElement("nav").cloneNode(!0).outerHTML!=="<:nav></:nav>",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,pixelMargin:!0},f.boxModel=b.boxModel=c.compatMode==="CSS1Compat",i.checked=!0,b.noCloneChecked=i.cloneNode(!0).checked,g.disabled=!0,b.optDisabled=!h.disabled;try{delete p.test}catch(r){b.deleteExpando=!1}!p.addEventListener&&p.attachEvent&&p.fireEvent&&(p.attachEvent("onclick",function(){b.noCloneEvent=!1}),p.cloneNode(!0).fireEvent("onclick")),i=c.createElement("input"),i.value="t",i.setAttribute("type","radio"),b.radioValue=i.value==="t",i.setAttribute("checked","checked"),i.setAttribute("name","t"),p.appendChild(i),j=c.createDocumentFragment(),j.appendChild(p.lastChild),b.checkClone=j.cloneNode(!0).cloneNode(!0).lastChild.checked,b.appendChecked=i.checked,j.removeChild(i),j.appendChild(p);if(p.attachEvent)for(n in{submit:1,change:1,focusin:1})m="on"+n,o=m in p,o||(p.setAttribute(m,"return;"),o=typeof p[m]=="function"),b[n+"Bubbles"]=o;j.removeChild(p),j=g=h=p=i=null,f(function(){var d,e,g,h,i,j,l,m,n,q,r,s,t,u=c.getElementsByTagName("body")[0];!u||(m=1,t="padding:0;margin:0;border:",r="position:absolute;top:0;left:0;width:1px;height:1px;",s=t+"0;visibility:hidden;",n="style='"+r+t+"5px solid #000;",q="<div "+n+"display:block;'><div style='"+t+"0;display:block;overflow:hidden;'></div></div>"+"<table "+n+"' cellpadding='0' cellspacing='0'>"+"<tr><td></td></tr></table>",d=c.createElement("div"),d.style.cssText=s+"width:0;height:0;position:static;top:0;margin-top:"+m+"px",u.insertBefore(d,u.firstChild),p=c.createElement("div"),d.appendChild(p),p.innerHTML="<table><tr><td style='"+t+"0;display:none'></td><td>t</td></tr></table>",k=p.getElementsByTagName("td"),o=k[0].offsetHeight===0,k[0].style.display="",k[1].style.display="none",b.reliableHiddenOffsets=o&&k[0].offsetHeight===0,a.getComputedStyle&&(p.innerHTML="",l=c.createElement("div"),l.style.width="0",l.style.marginRight="0",p.style.width="2px",p.appendChild(l),b.reliableMarginRight=(parseInt((a.getComputedStyle(l,null)||{marginRight:0}).marginRight,10)||0)===0),typeof p.style.zoom!="undefined"&&(p.innerHTML="",p.style.width=p.style.padding="1px",p.style.border=0,p.style.overflow="hidden",p.style.display="inline",p.style.zoom=1,b.inlineBlockNeedsLayout=p.offsetWidth===3,p.style.display="block",p.style.overflow="visible",p.innerHTML="<div style='width:5px;'></div>",b.shrinkWrapBlocks=p.offsetWidth!==3),p.style.cssText=r+s,p.innerHTML=q,e=p.firstChild,g=e.firstChild,i=e.nextSibling.firstChild.firstChild,j={doesNotAddBorder:g.offsetTop!==5,doesAddBorderForTableAndCells:i.offsetTop===5},g.style.position="fixed",g.style.top="20px",j.fixedPosition=g.offsetTop===20||g.offsetTop===15,g.style.position=g.style.top="",e.style.overflow="hidden",e.style.position="relative",j.subtractsBorderForOverflowNotVisible=g.offsetTop===-5,j.doesNotIncludeMarginInBodyOffset=u.offsetTop!==m,a.getComputedStyle&&(p.style.marginTop="1%",b.pixelMargin=(a.getComputedStyle(p,null)||{marginTop:0}).marginTop!=="1%"),typeof d.style.zoom!="undefined"&&(d.style.zoom=1),u.removeChild(d),l=p=d=null,f.extend(b,j))});return b}();var j=/^(?:\{.*\}|\[.*\])$/,k=/([A-Z])/g;f.extend({cache:{},uuid:0,expando:"jQuery"+(f.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(a){a=a.nodeType?f.cache[a[f.expando]]:a[f.expando];return!!a&&!m(a)},data:function(a,c,d,e){if(!!f.acceptData(a)){var g,h,i,j=f.expando,k=typeof c=="string",l=a.nodeType,m=l?f.cache:a,n=l?a[j]:a[j]&&j,o=c==="events";if((!n||!m[n]||!o&&!e&&!m[n].data)&&k&&d===b)return;n||(l?a[j]=n=++f.uuid:n=j),m[n]||(m[n]={},l||(m[n].toJSON=f.noop));if(typeof c=="object"||typeof c=="function")e?m[n]=f.extend(m[n],c):m[n].data=f.extend(m[n].data,c);g=h=m[n],e||(h.data||(h.data={}),h=h.data),d!==b&&(h[f.camelCase(c)]=d);if(o&&!h[c])return g.events;k?(i=h[c],i==null&&(i=h[f.camelCase(c)])):i=h;return i}},removeData:function(a,b,c){if(!!f.acceptData(a)){var d,e,g,h=f.expando,i=a.nodeType,j=i?f.cache:a,k=i?a[h]:h;if(!j[k])return;if(b){d=c?j[k]:j[k].data;if(d){f.isArray(b)||(b in d?b=[b]:(b=f.camelCase(b),b in d?b=[b]:b=b.split(" ")));for(e=0,g=b.length;e<g;e++)delete d[b[e]];if(!(c?m:f.isEmptyObject)(d))return}}if(!c){delete j[k].data;if(!m(j[k]))return}f.support.deleteExpando||!j.setInterval?delete j[k]:j[k]=null,i&&(f.support.deleteExpando?delete a[h]:a.removeAttribute?a.removeAttribute(h):a[h]=null)}},_data:function(a,b,c){return f.data(a,b,c,!0)},acceptData:function(a){if(a.nodeName){var b=f.noData[a.nodeName.toLowerCase()];if(b)return b!==!0&&a.getAttribute("classid")===b}return!0}}),f.fn.extend({data:function(a,c){var d,e,g,h,i,j=this[0],k=0,m=null;if(a===b){if(this.length){m=f.data(j);if(j.nodeType===1&&!f._data(j,"parsedAttrs")){g=j.attributes;for(i=g.length;k<i;k++)h=g[k].name,h.indexOf("data-")===0&&(h=f.camelCase(h.substring(5)),l(j,h,m[h]));f._data(j,"parsedAttrs",!0)}}return m}if(typeof a=="object")return this.each(function(){f.data(this,a)});d=a.split(".",2),d[1]=d[1]?"."+d[1]:"",e=d[1]+"!";return f.access(this,function(c){if(c===b){m=this.triggerHandler("getData"+e,[d[0]]),m===b&&j&&(m=f.data(j,a),m=l(j,a,m));return m===b&&d[1]?this.data(d[0]):m}d[1]=c,this.each(function(){var b=f(this);b.triggerHandler("setData"+e,d),f.data(this,a,c),b.triggerHandler("changeData"+e,d)})},null,c,arguments.length>1,null,!1)},removeData:function(a){return this.each(function(){f.removeData(this,a)})}}),f.extend({_mark:function(a,b){a&&(b=(b||"fx")+"mark",f._data(a,b,(f._data(a,b)||0)+1))},_unmark:function(a,b,c){a!==!0&&(c=b,b=a,a=!1);if(b){c=c||"fx";var d=c+"mark",e=a?0:(f._data(b,d)||1)-1;e?f._data(b,d,e):(f.removeData(b,d,!0),n(b,c,"mark"))}},queue:function(a,b,c){var d;if(a){b=(b||"fx")+"queue",d=f._data(a,b),c&&(!d||f.isArray(c)?d=f._data(a,b,f.makeArray(c)):d.push(c));return d||[]}},dequeue:function(a,b){b=b||"fx";var c=f.queue(a,b),d=c.shift(),e={};d==="inprogress"&&(d=c.shift()),d&&(b==="fx"&&c.unshift("inprogress"),f._data(a,b+".run",e),d.call(a,function(){f.dequeue(a,b)},e)),c.length||(f.removeData(a,b+"queue "+b+".run",!0),n(a,b,"queue"))}}),f.fn.extend({queue:function(a,c){var d=2;typeof a!="string"&&(c=a,a="fx",d--);if(arguments.length<d)return f.queue(this[0],a);return c===b?this:this.each(function(){var b=f.queue(this,a,c);a==="fx"&&b[0]!=="inprogress"&&f.dequeue(this,a)})},dequeue:function(a){return this.each(function(){f.dequeue(this,a)})},delay:function(a,b){a=f.fx?f.fx.speeds[a]||a:a,b=b||"fx";return this.queue(b,function(b,c){var d=setTimeout(b,a);c.stop=function(){clearTimeout(d)}})},clearQueue:function(a){return this.queue(a||"fx",[])},promise:function(a,c){function m(){--h||d.resolveWith(e,[e])}typeof a!="string"&&(c=a,a=b),a=a||"fx";var d=f.Deferred(),e=this,g=e.length,h=1,i=a+"defer",j=a+"queue",k=a+"mark",l;while(g--)if(l=f.data(e[g],i,b,!0)||(f.data(e[g],j,b,!0)||f.data(e[g],k,b,!0))&&f.data(e[g],i,f.Callbacks("once memory"),!0))h++,l.add(m);m();return d.promise(c)}});var o=/[\n\t\r]/g,p=/\s+/,q=/\r/g,r=/^(?:button|input)$/i,s=/^(?:button|input|object|select|textarea)$/i,t=/^a(?:rea)?$/i,u=/^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i,v=f.support.getSetAttribute,w,x,y;f.fn.extend({attr:function(a,b){return f.access(this,f.attr,a,b,arguments.length>1)},removeAttr:function(a){return this.each(function(){f.removeAttr(this,a)})},prop:function(a,b){return f.access(this,f.prop,a,b,arguments.length>1)},removeProp:function(a){a=f.propFix[a]||a;return this.each(function(){try{this[a]=b,delete this[a]}catch(c){}})},addClass:function(a){var b,c,d,e,g,h,i;if(f.isFunction(a))return this.each(function(b){f(this).addClass(a.call(this,b,this.className))});if(a&&typeof a=="string"){b=a.split(p);for(c=0,d=this.length;c<d;c++){e=this[c];if(e.nodeType===1)if(!e.className&&b.length===1)e.className=a;else{g=" "+e.className+" ";for(h=0,i=b.length;h<i;h++)~g.indexOf(" "+b[h]+" ")||(g+=b[h]+" ");e.className=f.trim(g)}}}return this},removeClass:function(a){var c,d,e,g,h,i,j;if(f.isFunction(a))return this.each(function(b){f(this).removeClass(a.call(this,b,this.className))});if(a&&typeof a=="string"||a===b){c=(a||"").split(p);for(d=0,e=this.length;d<e;d++){g=this[d];if(g.nodeType===1&&g.className)if(a){h=(" "+g.className+" ").replace(o," ");for(i=0,j=c.length;i<j;i++)h=h.replace(" "+c[i]+" "," ");g.className=f.trim(h)}else g.className=""}}return this},toggleClass:function(a,b){var c=typeof a,d=typeof b=="boolean";if(f.isFunction(a))return this.each(function(c){f(this).toggleClass(a.call(this,c,this.className,b),b)});return this.each(function(){if(c==="string"){var e,g=0,h=f(this),i=b,j=a.split(p);while(e=j[g++])i=d?i:!h.hasClass(e),h[i?"addClass":"removeClass"](e)}else if(c==="undefined"||c==="boolean")this.className&&f._data(this,"__className__",this.className),this.className=this.className||a===!1?"":f._data(this,"__className__")||""})},hasClass:function(a){var b=" "+a+" ",c=0,d=this.length;for(;c<d;c++)if(this[c].nodeType===1&&(" "+this[c].className+" ").replace(o," ").indexOf(b)>-1)return!0;return!1},val:function(a){var c,d,e,g=this[0];{if(!!arguments.length){e=f.isFunction(a);return this.each(function(d){var g=f(this),h;if(this.nodeType===1){e?h=a.call(this,d,g.val()):h=a,h==null?h="":typeof h=="number"?h+="":f.isArray(h)&&(h=f.map(h,function(a){return a==null?"":a+""})),c=f.valHooks[this.type]||f.valHooks[this.nodeName.toLowerCase()];if(!c||!("set"in c)||c.set(this,h,"value")===b)this.value=h}})}if(g){c=f.valHooks[g.type]||f.valHooks[g.nodeName.toLowerCase()];if(c&&"get"in c&&(d=c.get(g,"value"))!==b)return d;d=g.value;return typeof d=="string"?d.replace(q,""):d==null?"":d}}}}),f.extend({valHooks:{option:{get:function(a){var b=a.attributes.value;return!b||b.specified?a.value:a.text}},select:{get:function(a){var b,c,d,e,g=a.selectedIndex,h=[],i=a.options,j=a.type==="select-one";if(g<0)return null;c=j?g:0,d=j?g+1:i.length;for(;c<d;c++){e=i[c];if(e.selected&&(f.support.optDisabled?!e.disabled:e.getAttribute("disabled")===null)&&(!e.parentNode.disabled||!f.nodeName(e.parentNode,"optgroup"))){b=f(e).val();if(j)return b;h.push(b)}}if(j&&!h.length&&i.length)return f(i[g]).val();return h},set:function(a,b){var c=f.makeArray(b);f(a).find("option").each(function(){this.selected=f.inArray(f(this).val(),c)>=0}),c.length||(a.selectedIndex=-1);return c}}},attrFn:{val:!0,css:!0,html:!0,text:!0,data:!0,width:!0,height:!0,offset:!0},attr:function(a,c,d,e){var g,h,i,j=a.nodeType;if(!!a&&j!==3&&j!==8&&j!==2){if(e&&c in f.attrFn)return f(a)[c](d);if(typeof a.getAttribute=="undefined")return f.prop(a,c,d);i=j!==1||!f.isXMLDoc(a),i&&(c=c.toLowerCase(),h=f.attrHooks[c]||(u.test(c)?x:w));if(d!==b){if(d===null){f.removeAttr(a,c);return}if(h&&"set"in h&&i&&(g=h.set(a,d,c))!==b)return g;a.setAttribute(c,""+d);return d}if(h&&"get"in h&&i&&(g=h.get(a,c))!==null)return g;g=a.getAttribute(c);return g===null?b:g}},removeAttr:function(a,b){var c,d,e,g,h,i=0;if(b&&a.nodeType===1){d=b.toLowerCase().split(p),g=d.length;for(;i<g;i++)e=d[i],e&&(c=f.propFix[e]||e,h=u.test(e),h||f.attr(a,e,""),a.removeAttribute(v?e:c),h&&c in a&&(a[c]=!1))}},attrHooks:{type:{set:function(a,b){if(r.test(a.nodeName)&&a.parentNode)f.error("type property can't be changed");else if(!f.support.radioValue&&b==="radio"&&f.nodeName(a,"input")){var c=a.value;a.setAttribute("type",b),c&&(a.value=c);return b}}},value:{get:function(a,b){if(w&&f.nodeName(a,"button"))return w.get(a,b);return b in a?a.value:null},set:function(a,b,c){if(w&&f.nodeName(a,"button"))return w.set(a,b,c);a.value=b}}},propFix:{tabindex:"tabIndex",readonly:"readOnly","for":"htmlFor","class":"className",maxlength:"maxLength",cellspacing:"cellSpacing",cellpadding:"cellPadding",rowspan:"rowSpan",colspan:"colSpan",usemap:"useMap",frameborder:"frameBorder",contenteditable:"contentEditable"},prop:function(a,c,d){var e,g,h,i=a.nodeType;if(!!a&&i!==3&&i!==8&&i!==2){h=i!==1||!f.isXMLDoc(a),h&&(c=f.propFix[c]||c,g=f.propHooks[c]);return d!==b?g&&"set"in g&&(e=g.set(a,d,c))!==b?e:a[c]=d:g&&"get"in g&&(e=g.get(a,c))!==null?e:a[c]}},propHooks:{tabIndex:{get:function(a){var c=a.getAttributeNode("tabindex");return c&&c.specified?parseInt(c.value,10):s.test(a.nodeName)||t.test(a.nodeName)&&a.href?0:b}}}}),f.attrHooks.tabindex=f.propHooks.tabIndex,x={get:function(a,c){var d,e=f.prop(a,c);return e===!0||typeof e!="boolean"&&(d=a.getAttributeNode(c))&&d.nodeValue!==!1?c.toLowerCase():b},set:function(a,b,c){var d;b===!1?f.removeAttr(a,c):(d=f.propFix[c]||c,d in a&&(a[d]=!0),a.setAttribute(c,c.toLowerCase()));return c}},v||(y={name:!0,id:!0,coords:!0},w=f.valHooks.button={get:function(a,c){var d;d=a.getAttributeNode(c);return d&&(y[c]?d.nodeValue!=="":d.specified)?d.nodeValue:b},set:function(a,b,d){var e=a.getAttributeNode(d);e||(e=c.createAttribute(d),a.setAttributeNode(e));return e.nodeValue=b+""}},f.attrHooks.tabindex.set=w.set,f.each(["width","height"],function(a,b){f.attrHooks[b]=f.extend(f.attrHooks[b],{set:function(a,c){if(c===""){a.setAttribute(b,"auto");return c}}})}),f.attrHooks.contenteditable={get:w.get,set:function(a,b,c){b===""&&(b="false"),w.set(a,b,c)}}),f.support.hrefNormalized||f.each(["href","src","width","height"],function(a,c){f.attrHooks[c]=f.extend(f.attrHooks[c],{get:function(a){var d=a.getAttribute(c,2);return d===null?b:d}})}),f.support.style||(f.attrHooks.style={get:function(a){return a.style.cssText.toLowerCase()||b},set:function(a,b){return a.style.cssText=""+b}}),f.support.optSelected||(f.propHooks.selected=f.extend(f.propHooks.selected,{get:function(a){var b=a.parentNode;b&&(b.selectedIndex,b.parentNode&&b.parentNode.selectedIndex);return null}})),f.support.enctype||(f.propFix.enctype="encoding"),f.support.checkOn||f.each(["radio","checkbox"],function(){f.valHooks[this]={get:function(a){return a.getAttribute("value")===null?"on":a.value}}}),f.each(["radio","checkbox"],function(){f.valHooks[this]=f.extend(f.valHooks[this],{set:function(a,b){if(f.isArray(b))return a.checked=f.inArray(f(a).val(),b)>=0}})});var z=/^(?:textarea|input|select)$/i,A=/^([^\.]*)?(?:\.(.+))?$/,B=/(?:^|\s)hover(\.\S+)?\b/,C=/^key/,D=/^(?:mouse|contextmenu)|click/,E=/^(?:focusinfocus|focusoutblur)$/,F=/^(\w*)(?:#([\w\-]+))?(?:\.([\w\-]+))?$/,G=function(
+a){var b=F.exec(a);b&&(b[1]=(b[1]||"").toLowerCase(),b[3]=b[3]&&new RegExp("(?:^|\\s)"+b[3]+"(?:\\s|$)"));return b},H=function(a,b){var c=a.attributes||{};return(!b[1]||a.nodeName.toLowerCase()===b[1])&&(!b[2]||(c.id||{}).value===b[2])&&(!b[3]||b[3].test((c["class"]||{}).value))},I=function(a){return f.event.special.hover?a:a.replace(B,"mouseenter$1 mouseleave$1")};f.event={add:function(a,c,d,e,g){var h,i,j,k,l,m,n,o,p,q,r,s;if(!(a.nodeType===3||a.nodeType===8||!c||!d||!(h=f._data(a)))){d.handler&&(p=d,d=p.handler,g=p.selector),d.guid||(d.guid=f.guid++),j=h.events,j||(h.events=j={}),i=h.handle,i||(h.handle=i=function(a){return typeof f!="undefined"&&(!a||f.event.triggered!==a.type)?f.event.dispatch.apply(i.elem,arguments):b},i.elem=a),c=f.trim(I(c)).split(" ");for(k=0;k<c.length;k++){l=A.exec(c[k])||[],m=l[1],n=(l[2]||"").split(".").sort(),s=f.event.special[m]||{},m=(g?s.delegateType:s.bindType)||m,s=f.event.special[m]||{},o=f.extend({type:m,origType:l[1],data:e,handler:d,guid:d.guid,selector:g,quick:g&&G(g),namespace:n.join(".")},p),r=j[m];if(!r){r=j[m]=[],r.delegateCount=0;if(!s.setup||s.setup.call(a,e,n,i)===!1)a.addEventListener?a.addEventListener(m,i,!1):a.attachEvent&&a.attachEvent("on"+m,i)}s.add&&(s.add.call(a,o),o.handler.guid||(o.handler.guid=d.guid)),g?r.splice(r.delegateCount++,0,o):r.push(o),f.event.global[m]=!0}a=null}},global:{},remove:function(a,b,c,d,e){var g=f.hasData(a)&&f._data(a),h,i,j,k,l,m,n,o,p,q,r,s;if(!!g&&!!(o=g.events)){b=f.trim(I(b||"")).split(" ");for(h=0;h<b.length;h++){i=A.exec(b[h])||[],j=k=i[1],l=i[2];if(!j){for(j in o)f.event.remove(a,j+b[h],c,d,!0);continue}p=f.event.special[j]||{},j=(d?p.delegateType:p.bindType)||j,r=o[j]||[],m=r.length,l=l?new RegExp("(^|\\.)"+l.split(".").sort().join("\\.(?:.*\\.)?")+"(\\.|$)"):null;for(n=0;n<r.length;n++)s=r[n],(e||k===s.origType)&&(!c||c.guid===s.guid)&&(!l||l.test(s.namespace))&&(!d||d===s.selector||d==="**"&&s.selector)&&(r.splice(n--,1),s.selector&&r.delegateCount--,p.remove&&p.remove.call(a,s));r.length===0&&m!==r.length&&((!p.teardown||p.teardown.call(a,l)===!1)&&f.removeEvent(a,j,g.handle),delete o[j])}f.isEmptyObject(o)&&(q=g.handle,q&&(q.elem=null),f.removeData(a,["events","handle"],!0))}},customEvent:{getData:!0,setData:!0,changeData:!0},trigger:function(c,d,e,g){if(!e||e.nodeType!==3&&e.nodeType!==8){var h=c.type||c,i=[],j,k,l,m,n,o,p,q,r,s;if(E.test(h+f.event.triggered))return;h.indexOf("!")>=0&&(h=h.slice(0,-1),k=!0),h.indexOf(".")>=0&&(i=h.split("."),h=i.shift(),i.sort());if((!e||f.event.customEvent[h])&&!f.event.global[h])return;c=typeof c=="object"?c[f.expando]?c:new f.Event(h,c):new f.Event(h),c.type=h,c.isTrigger=!0,c.exclusive=k,c.namespace=i.join("."),c.namespace_re=c.namespace?new RegExp("(^|\\.)"+i.join("\\.(?:.*\\.)?")+"(\\.|$)"):null,o=h.indexOf(":")<0?"on"+h:"";if(!e){j=f.cache;for(l in j)j[l].events&&j[l].events[h]&&f.event.trigger(c,d,j[l].handle.elem,!0);return}c.result=b,c.target||(c.target=e),d=d!=null?f.makeArray(d):[],d.unshift(c),p=f.event.special[h]||{};if(p.trigger&&p.trigger.apply(e,d)===!1)return;r=[[e,p.bindType||h]];if(!g&&!p.noBubble&&!f.isWindow(e)){s=p.delegateType||h,m=E.test(s+h)?e:e.parentNode,n=null;for(;m;m=m.parentNode)r.push([m,s]),n=m;n&&n===e.ownerDocument&&r.push([n.defaultView||n.parentWindow||a,s])}for(l=0;l<r.length&&!c.isPropagationStopped();l++)m=r[l][0],c.type=r[l][1],q=(f._data(m,"events")||{})[c.type]&&f._data(m,"handle"),q&&q.apply(m,d),q=o&&m[o],q&&f.acceptData(m)&&q.apply(m,d)===!1&&c.preventDefault();c.type=h,!g&&!c.isDefaultPrevented()&&(!p._default||p._default.apply(e.ownerDocument,d)===!1)&&(h!=="click"||!f.nodeName(e,"a"))&&f.acceptData(e)&&o&&e[h]&&(h!=="focus"&&h!=="blur"||c.target.offsetWidth!==0)&&!f.isWindow(e)&&(n=e[o],n&&(e[o]=null),f.event.triggered=h,e[h](),f.event.triggered=b,n&&(e[o]=n));return c.result}},dispatch:function(c){c=f.event.fix(c||a.event);var d=(f._data(this,"events")||{})[c.type]||[],e=d.delegateCount,g=[].slice.call(arguments,0),h=!c.exclusive&&!c.namespace,i=f.event.special[c.type]||{},j=[],k,l,m,n,o,p,q,r,s,t,u;g[0]=c,c.delegateTarget=this;if(!i.preDispatch||i.preDispatch.call(this,c)!==!1){if(e&&(!c.button||c.type!=="click")){n=f(this),n.context=this.ownerDocument||this;for(m=c.target;m!=this;m=m.parentNode||this)if(m.disabled!==!0){p={},r=[],n[0]=m;for(k=0;k<e;k++)s=d[k],t=s.selector,p[t]===b&&(p[t]=s.quick?H(m,s.quick):n.is(t)),p[t]&&r.push(s);r.length&&j.push({elem:m,matches:r})}}d.length>e&&j.push({elem:this,matches:d.slice(e)});for(k=0;k<j.length&&!c.isPropagationStopped();k++){q=j[k],c.currentTarget=q.elem;for(l=0;l<q.matches.length&&!c.isImmediatePropagationStopped();l++){s=q.matches[l];if(h||!c.namespace&&!s.namespace||c.namespace_re&&c.namespace_re.test(s.namespace))c.data=s.data,c.handleObj=s,o=((f.event.special[s.origType]||{}).handle||s.handler).apply(q.elem,g),o!==b&&(c.result=o,o===!1&&(c.preventDefault(),c.stopPropagation()))}}i.postDispatch&&i.postDispatch.call(this,c);return c.result}},props:"attrChange attrName relatedNode srcElement altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(a,b){a.which==null&&(a.which=b.charCode!=null?b.charCode:b.keyCode);return a}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(a,d){var e,f,g,h=d.button,i=d.fromElement;a.pageX==null&&d.clientX!=null&&(e=a.target.ownerDocument||c,f=e.documentElement,g=e.body,a.pageX=d.clientX+(f&&f.scrollLeft||g&&g.scrollLeft||0)-(f&&f.clientLeft||g&&g.clientLeft||0),a.pageY=d.clientY+(f&&f.scrollTop||g&&g.scrollTop||0)-(f&&f.clientTop||g&&g.clientTop||0)),!a.relatedTarget&&i&&(a.relatedTarget=i===a.target?d.toElement:i),!a.which&&h!==b&&(a.which=h&1?1:h&2?3:h&4?2:0);return a}},fix:function(a){if(a[f.expando])return a;var d,e,g=a,h=f.event.fixHooks[a.type]||{},i=h.props?this.props.concat(h.props):this.props;a=f.Event(g);for(d=i.length;d;)e=i[--d],a[e]=g[e];a.target||(a.target=g.srcElement||c),a.target.nodeType===3&&(a.target=a.target.parentNode),a.metaKey===b&&(a.metaKey=a.ctrlKey);return h.filter?h.filter(a,g):a},special:{ready:{setup:f.bindReady},load:{noBubble:!0},focus:{delegateType:"focusin"},blur:{delegateType:"focusout"},beforeunload:{setup:function(a,b,c){f.isWindow(this)&&(this.onbeforeunload=c)},teardown:function(a,b){this.onbeforeunload===b&&(this.onbeforeunload=null)}}},simulate:function(a,b,c,d){var e=f.extend(new f.Event,c,{type:a,isSimulated:!0,originalEvent:{}});d?f.event.trigger(e,null,b):f.event.dispatch.call(b,e),e.isDefaultPrevented()&&c.preventDefault()}},f.event.handle=f.event.dispatch,f.removeEvent=c.removeEventListener?function(a,b,c){a.removeEventListener&&a.removeEventListener(b,c,!1)}:function(a,b,c){a.detachEvent&&a.detachEvent("on"+b,c)},f.Event=function(a,b){if(!(this instanceof f.Event))return new f.Event(a,b);a&&a.type?(this.originalEvent=a,this.type=a.type,this.isDefaultPrevented=a.defaultPrevented||a.returnValue===!1||a.getPreventDefault&&a.getPreventDefault()?K:J):this.type=a,b&&f.extend(this,b),this.timeStamp=a&&a.timeStamp||f.now(),this[f.expando]=!0},f.Event.prototype={preventDefault:function(){this.isDefaultPrevented=K;var a=this.originalEvent;!a||(a.preventDefault?a.preventDefault():a.returnValue=!1)},stopPropagation:function(){this.isPropagationStopped=K;var a=this.originalEvent;!a||(a.stopPropagation&&a.stopPropagation(),a.cancelBubble=!0)},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=K,this.stopPropagation()},isDefaultPrevented:J,isPropagationStopped:J,isImmediatePropagationStopped:J},f.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){f.event.special[a]={delegateType:b,bindType:b,handle:function(a){var c=this,d=a.relatedTarget,e=a.handleObj,g=e.selector,h;if(!d||d!==c&&!f.contains(c,d))a.type=e.origType,h=e.handler.apply(this,arguments),a.type=b;return h}}}),f.support.submitBubbles||(f.event.special.submit={setup:function(){if(f.nodeName(this,"form"))return!1;f.event.add(this,"click._submit keypress._submit",function(a){var c=a.target,d=f.nodeName(c,"input")||f.nodeName(c,"button")?c.form:b;d&&!d._submit_attached&&(f.event.add(d,"submit._submit",function(a){a._submit_bubble=!0}),d._submit_attached=!0)})},postDispatch:function(a){a._submit_bubble&&(delete a._submit_bubble,this.parentNode&&!a.isTrigger&&f.event.simulate("submit",this.parentNode,a,!0))},teardown:function(){if(f.nodeName(this,"form"))return!1;f.event.remove(this,"._submit")}}),f.support.changeBubbles||(f.event.special.change={setup:function(){if(z.test(this.nodeName)){if(this.type==="checkbox"||this.type==="radio")f.event.add(this,"propertychange._change",function(a){a.originalEvent.propertyName==="checked"&&(this._just_changed=!0)}),f.event.add(this,"click._change",function(a){this._just_changed&&!a.isTrigger&&(this._just_changed=!1,f.event.simulate("change",this,a,!0))});return!1}f.event.add(this,"beforeactivate._change",function(a){var b=a.target;z.test(b.nodeName)&&!b._change_attached&&(f.event.add(b,"change._change",function(a){this.parentNode&&!a.isSimulated&&!a.isTrigger&&f.event.simulate("change",this.parentNode,a,!0)}),b._change_attached=!0)})},handle:function(a){var b=a.target;if(this!==b||a.isSimulated||a.isTrigger||b.type!=="radio"&&b.type!=="checkbox")return a.handleObj.handler.apply(this,arguments)},teardown:function(){f.event.remove(this,"._change");return z.test(this.nodeName)}}),f.support.focusinBubbles||f.each({focus:"focusin",blur:"focusout"},function(a,b){var d=0,e=function(a){f.event.simulate(b,a.target,f.event.fix(a),!0)};f.event.special[b]={setup:function(){d++===0&&c.addEventListener(a,e,!0)},teardown:function(){--d===0&&c.removeEventListener(a,e,!0)}}}),f.fn.extend({on:function(a,c,d,e,g){var h,i;if(typeof a=="object"){typeof c!="string"&&(d=d||c,c=b);for(i in a)this.on(i,c,d,a[i],g);return this}d==null&&e==null?(e=c,d=c=b):e==null&&(typeof c=="string"?(e=d,d=b):(e=d,d=c,c=b));if(e===!1)e=J;else if(!e)return this;g===1&&(h=e,e=function(a){f().off(a);return h.apply(this,arguments)},e.guid=h.guid||(h.guid=f.guid++));return this.each(function(){f.event.add(this,a,e,d,c)})},one:function(a,b,c,d){return this.on(a,b,c,d,1)},off:function(a,c,d){if(a&&a.preventDefault&&a.handleObj){var e=a.handleObj;f(a.delegateTarget).off(e.namespace?e.origType+"."+e.namespace:e.origType,e.selector,e.handler);return this}if(typeof a=="object"){for(var g in a)this.off(g,c,a[g]);return this}if(c===!1||typeof c=="function")d=c,c=b;d===!1&&(d=J);return this.each(function(){f.event.remove(this,a,d,c)})},bind:function(a,b,c){return this.on(a,null,b,c)},unbind:function(a,b){return this.off(a,null,b)},live:function(a,b,c){f(this.context).on(a,this.selector,b,c);return this},die:function(a,b){f(this.context).off(a,this.selector||"**",b);return this},delegate:function(a,b,c,d){return this.on(b,a,c,d)},undelegate:function(a,b,c){return arguments.length==1?this.off(a,"**"):this.off(b,a,c)},trigger:function(a,b){return this.each(function(){f.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0])return f.event.trigger(a,b,this[0],!0)},toggle:function(a){var b=arguments,c=a.guid||f.guid++,d=0,e=function(c){var e=(f._data(this,"lastToggle"+a.guid)||0)%d;f._data(this,"lastToggle"+a.guid,e+1),c.preventDefault();return b[e].apply(this,arguments)||!1};e.guid=c;while(d<b.length)b[d++].guid=c;return this.click(e)},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}}),f.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(a,b){f.fn[b]=function(a,c){c==null&&(c=a,a=null);return arguments.length>0?this.on(b,null,a,c):this.trigger(b)},f.attrFn&&(f.attrFn[b]=!0),C.test(b)&&(f.event.fixHooks[b]=f.event.keyHooks),D.test(b)&&(f.event.fixHooks[b]=f.event.mouseHooks)}),function(){function x(a,b,c,e,f,g){for(var h=0,i=e.length;h<i;h++){var j=e[h];if(j){var k=!1;j=j[a];while(j){if(j[d]===c){k=e[j.sizset];break}if(j.nodeType===1){g||(j[d]=c,j.sizset=h);if(typeof b!="string"){if(j===b){k=!0;break}}else if(m.filter(b,[j]).length>0){k=j;break}}j=j[a]}e[h]=k}}}function w(a,b,c,e,f,g){for(var h=0,i=e.length;h<i;h++){var j=e[h];if(j){var k=!1;j=j[a];while(j){if(j[d]===c){k=e[j.sizset];break}j.nodeType===1&&!g&&(j[d]=c,j.sizset=h);if(j.nodeName.toLowerCase()===b){k=j;break}j=j[a]}e[h]=k}}}var a=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^\[\]]*\]|['"][^'"]*['"]|[^\[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,d="sizcache"+(Math.random()+"").replace(".",""),e=0,g=Object.prototype.toString,h=!1,i=!0,j=/\\/g,k=/\r\n/g,l=/\W/;[0,0].sort(function(){i=!1;return 0});var m=function(b,d,e,f){e=e||[],d=d||c;var h=d;if(d.nodeType!==1&&d.nodeType!==9)return[];if(!b||typeof b!="string")return e;var i,j,k,l,n,q,r,t,u=!0,v=m.isXML(d),w=[],x=b;do{a.exec(""),i=a.exec(x);if(i){x=i[3],w.push(i[1]);if(i[2]){l=i[3];break}}}while(i);if(w.length>1&&p.exec(b))if(w.length===2&&o.relative[w[0]])j=y(w[0]+w[1],d,f);else{j=o.relative[w[0]]?[d]:m(w.shift(),d);while(w.length)b=w.shift(),o.relative[b]&&(b+=w.shift()),j=y(b,j,f)}else{!f&&w.length>1&&d.nodeType===9&&!v&&o.match.ID.test(w[0])&&!o.match.ID.test(w[w.length-1])&&(n=m.find(w.shift(),d,v),d=n.expr?m.filter(n.expr,n.set)[0]:n.set[0]);if(d){n=f?{expr:w.pop(),set:s(f)}:m.find(w.pop(),w.length===1&&(w[0]==="~"||w[0]==="+")&&d.parentNode?d.parentNode:d,v),j=n.expr?m.filter(n.expr,n.set):n.set,w.length>0?k=s(j):u=!1;while(w.length)q=w.pop(),r=q,o.relative[q]?r=w.pop():q="",r==null&&(r=d),o.relative[q](k,r,v)}else k=w=[]}k||(k=j),k||m.error(q||b);if(g.call(k)==="[object Array]")if(!u)e.push.apply(e,k);else if(d&&d.nodeType===1)for(t=0;k[t]!=null;t++)k[t]&&(k[t]===!0||k[t].nodeType===1&&m.contains(d,k[t]))&&e.push(j[t]);else for(t=0;k[t]!=null;t++)k[t]&&k[t].nodeType===1&&e.push(j[t]);else s(k,e);l&&(m(l,h,e,f),m.uniqueSort(e));return e};m.uniqueSort=function(a){if(u){h=i,a.sort(u);if(h)for(var b=1;b<a.length;b++)a[b]===a[b-1]&&a.splice(b--,1)}return a},m.matches=function(a,b){return m(a,null,null,b)},m.matchesSelector=function(a,b){return m(b,null,null,[a]).length>0},m.find=function(a,b,c){var d,e,f,g,h,i;if(!a)return[];for(e=0,f=o.order.length;e<f;e++){h=o.order[e];if(g=o.leftMatch[h].exec(a)){i=g[1],g.splice(1,1);if(i.substr(i.length-1)!=="\\"){g[1]=(g[1]||"").replace(j,""),d=o.find[h](g,b,c);if(d!=null){a=a.replace(o.match[h],"");break}}}}d||(d=typeof b.getElementsByTagName!="undefined"?b.getElementsByTagName("*"):[]);return{set:d,expr:a}},m.filter=function(a,c,d,e){var f,g,h,i,j,k,l,n,p,q=a,r=[],s=c,t=c&&c[0]&&m.isXML(c[0]);while(a&&c.length){for(h in o.filter)if((f=o.leftMatch[h].exec(a))!=null&&f[2]){k=o.filter[h],l=f[1],g=!1,f.splice(1,1);if(l.substr(l.length-1)==="\\")continue;s===r&&(r=[]);if(o.preFilter[h]){f=o.preFilter[h](f,s,d,r,e,t);if(!f)g=i=!0;else if(f===!0)continue}if(f)for(n=0;(j=s[n])!=null;n++)j&&(i=k(j,f,n,s),p=e^i,d&&i!=null?p?g=!0:s[n]=!1:p&&(r.push(j),g=!0));if(i!==b){d||(s=r),a=a.replace(o.match[h],"");if(!g)return[];break}}if(a===q)if(g==null)m.error(a);else break;q=a}return s},m.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)};var n=m.getText=function(a){var b,c,d=a.nodeType,e="";if(d){if(d===1||d===9||d===11){if(typeof a.textContent=="string")return a.textContent;if(typeof a.innerText=="string")return a.innerText.replace(k,"");for(a=a.firstChild;a;a=a.nextSibling)e+=n(a)}else if(d===3||d===4)return a.nodeValue}else for(b=0;c=a[b];b++)c.nodeType!==8&&(e+=n(c));return e},o=m.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\w\u00c0-\uFFFF\-]|\\.)+)/,CLASS:/\.((?:[\w\u00c0-\uFFFF\-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF\-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF\-]|\\.)+)\s*(?:(\S?=)\s*(?:(['"])(.*?)\3|(#?(?:[\w\u00c0-\uFFFF\-]|\\.)*)|)|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*\-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\(\s*(even|odd|(?:[+\-]?\d+|(?:[+\-]?\d*)?n\s*(?:[+\-]\s*\d+)?))\s*\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^\-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF\-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(a){return a.getAttribute("href")},type:function(a){return a.getAttribute("type")}},relative:{"+":function(a,b){var c=typeof b=="string",d=c&&!l.test(b),e=c&&!d;d&&(b=b.toLowerCase());for(var f=0,g=a.length,h;f<g;f++)if(h=a[f]){while((h=h.previousSibling)&&h.nodeType!==1);a[f]=e||h&&h.nodeName.toLowerCase()===b?h||!1:h===b}e&&m.filter(b,a,!0)},">":function(a,b){var c,d=typeof b=="string",e=0,f=a.length;if(d&&!l.test(b)){b=b.toLowerCase();for(;e<f;e++){c=a[e];if(c){var g=c.parentNode;a[e]=g.nodeName.toLowerCase()===b?g:!1}}}else{for(;e<f;e++)c=a[e],c&&(a[e]=d?c.parentNode:c.parentNode===b);d&&m.filter(b,a,!0)}},"":function(a,b,c){var d,f=e++,g=x;typeof b=="string"&&!l.test(b)&&(b=b.toLowerCase(),d=b,g=w),g("parentNode",b,f,a,d,c)},"~":function(a,b,c){var d,f=e++,g=x;typeof b=="string"&&!l.test(b)&&(b=b.toLowerCase(),d=b,g=w),g("previousSibling",b,f,a,d,c)}},find:{ID:function(a,b,c){if(typeof b.getElementById!="undefined"&&!c){var d=b.getElementById(a[1]);return d&&d.parentNode?[d]:[]}},NAME:function(a,b){if(typeof b.getElementsByName!="undefined"){var c=[],d=b.getElementsByName(a[1]);for(var e=0,f=d.length;e<f;e++)d[e].getAttribute("name")===a[1]&&c.push(d[e]);return c.length===0?null:c}},TAG:function(a,b){if(typeof b.getElementsByTagName!="undefined")return b.getElementsByTagName(a[1])}},preFilter:{CLASS:function(a,b,c,d,e,f){a=" "+a[1].replace(j,"")+" ";if(f)return a;for(var g=0,h;(h=b[g])!=null;g++)h&&(e^(h.className&&(" "+h.className+" ").replace(/[\t\n\r]/g," ").indexOf(a)>=0)?c||d.push(h):c&&(b[g]=!1));return!1},ID:function(a){return a[1].replace(j,"")},TAG:function(a,b){return a[1].replace(j,"").toLowerCase()},CHILD:function(a){if(a[1]==="nth"){a[2]||m.error(a[0]),a[2]=a[2].replace(/^\+|\s*/g,"");var b=/(-?)(\d*)(?:n([+\-]?\d*))?/.exec(a[2]==="even"&&"2n"||a[2]==="odd"&&"2n+1"||!/\D/.test(a[2])&&"0n+"+a[2]||a[2]);a[2]=b[1]+(b[2]||1)-0,a[3]=b[3]-0}else a[2]&&m.error(a[0]);a[0]=e++;return a},ATTR:function(a,b,c,d,e,f){var g=a[1]=a[1].replace(j,"");!f&&o.attrMap[g]&&(a[1]=o.attrMap[g]),a[4]=(a[4]||a[5]||"").replace(j,""),a[2]==="~="&&(a[4]=" "+a[4]+" ");return a},PSEUDO:function(b,c,d,e,f){if(b[1]==="not")if((a.exec(b[3])||"").length>1||/^\w/.test(b[3]))b[3]=m(b[3],null,null,c);else{var g=m.filter(b[3],c,d,!0^f);d||e.push.apply(e,g);return!1}else if(o.match.POS.test(b[0])||o.match.CHILD.test(b[0]))return!0;return b},POS:function(a){a.unshift(!0);return a}},filters:{enabled:function(a){return a.disabled===!1&&a.type!=="hidden"},disabled:function(a){return a.disabled===!0},checked:function(a){return a.checked===!0},selected:function(a){a.parentNode&&a.parentNode.selectedIndex;return a.selected===!0},parent:function(a){return!!a.firstChild},empty:function(a){return!a.firstChild},has:function(a,b,c){return!!m(c[3],a).length},header:function(a){return/h\d/i.test(a.nodeName)},text:function(a){var b=a.getAttribute("type"),c=a.type;return a.nodeName.toLowerCase()==="input"&&"text"===c&&(b===c||b===null)},radio:function(a){return a.nodeName.toLowerCase()==="input"&&"radio"===a.type},checkbox:function(a){return a.nodeName.toLowerCase()==="input"&&"checkbox"===a.type},file:function(a){return a.nodeName.toLowerCase()==="input"&&"file"===a.type},password:function(a){return a.nodeName.toLowerCase()==="input"&&"password"===a.type},submit:function(a){var b=a.nodeName.toLowerCase();return(b==="input"||b==="button")&&"submit"===a.type},image:function(a){return a.nodeName.toLowerCase()==="input"&&"image"===a.type},reset:function(a){var b=a.nodeName.toLowerCase();return(b==="input"||b==="button")&&"reset"===a.type},button:function(a){var b=a.nodeName.toLowerCase();return b==="input"&&"button"===a.type||b==="button"},input:function(a){return/input|select|textarea|button/i.test(a.nodeName)},focus:function(a){return a===a.ownerDocument.activeElement}},setFilters:{first:function(a,b){return b===0},last:function(a,b,c,d){return b===d.length-1},even:function(a,b){return b%2===0},odd:function(a,b){return b%2===1},lt:function(a,b,c){return b<c[3]-0},gt:function(a,b,c){return b>c[3]-0},nth:function(a,b,c){return c[3]-0===b},eq:function(a,b,c){return c[3]-0===b}},filter:{PSEUDO:function(a,b,c,d){var e=b[1],f=o.filters[e];if(f)return f(a,c,b,d);if(e==="contains")return(a.textContent||a.innerText||n([a])||"").indexOf(b[3])>=0;if(e==="not"){var g=b[3];for(var h=0,i=g.length;h<i;h++)if(g[h]===a)return!1;return!0}m.error(e)},CHILD:function(a,b){var c,e,f,g,h,i,j,k=b[1],l=a;switch(k){case"only":case"first":while(l=l.previousSibling)if(l.nodeType===1)return!1;if(k==="first")return!0;l=a;case"last":while(l=l.nextSibling)if(l.nodeType===1)return!1;return!0;case"nth":c=b[2],e=b[3];if(c===1&&e===0)return!0;f=b[0],g=a.parentNode;if(g&&(g[d]!==f||!a.nodeIndex)){i=0;for(l=g.firstChild;l;l=l.nextSibling)l.nodeType===1&&(l.nodeIndex=++i);g[d]=f}j=a.nodeIndex-e;return c===0?j===0:j%c===0&&j/c>=0}},ID:function(a,b){return a.nodeType===1&&a.getAttribute("id")===b},TAG:function(a,b){return b==="*"&&a.nodeType===1||!!a.nodeName&&a.nodeName.toLowerCase()===b},CLASS:function(a,b){return(" "+(a.className||a.getAttribute("class"))+" ").indexOf(b)>-1},ATTR:function(a,b){var c=b[1],d=m.attr?m.attr(a,c):o.attrHandle[c]?o.attrHandle[c](a):a[c]!=null?a[c]:a.getAttribute(c),e=d+"",f=b[2],g=b[4];return d==null?f==="!=":!f&&m.attr?d!=null:f==="="?e===g:f==="*="?e.indexOf(g)>=0:f==="~="?(" "+e+" ").indexOf(g)>=0:g?f==="!="?e!==g:f==="^="?e.indexOf(g)===0:f==="$="?e.substr(e.length-g.length)===g:f==="|="?e===g||e.substr(0,g.length+1)===g+"-":!1:e&&d!==!1},POS:function(a,b,c,d){var e=b[2],f=o.setFilters[e];if(f)return f(a,c,b,d)}}},p=o.match.POS,q=function(a,b){return"\\"+(b-0+1)};for(var r in o.match)o.match[r]=new RegExp(o.match[r].source+/(?![^\[]*\])(?![^\(]*\))/.source),o.leftMatch[r]=new RegExp(/(^(?:.|\r|\n)*?)/.source+o.match[r].source.replace(/\\(\d+)/g,q));o.match.globalPOS=p;var s=function(a,b){a=Array.prototype.slice.call(a,0);if(b){b.push.apply(b,a);return b}return a};try{Array.prototype.slice.call(c.documentElement.childNodes,0)[0].nodeType}catch(t){s=function(a,b){var c=0,d=b||[];if(g.call(a)==="[object Array]")Array.prototype.push.apply(d,a);else if(typeof a.length=="number")for(var e=a.length;c<e;c++)d.push(a[c]);else for(;a[c];c++)d.push(a[c]);return d}}var u,v;c.documentElement.compareDocumentPosition?u=function(a,b){if(a===b){h=!0;return 0}if(!a.compareDocumentPosition||!b.compareDocumentPosition)return a.compareDocumentPosition?-1:1;return a.compareDocumentPosition(b)&4?-1:1}:(u=function(a,b){if(a===b){h=!0;return 0}if(a.sourceIndex&&b.sourceIndex)return a.sourceIndex-b.sourceIndex;var c,d,e=[],f=[],g=a.parentNode,i=b.parentNode,j=g;if(g===i)return v(a,b);if(!g)return-1;if(!i)return 1;while(j)e.unshift(j),j=j.parentNode;j=i;while(j)f.unshift(j),j=j.parentNode;c=e.length,d=f.length;for(var k=0;k<c&&k<d;k++)if(e[k]!==f[k])return v(e[k],f[k]);return k===c?v(a,f[k],-1):v(e[k],b,1)},v=function(a,b,c){if(a===b)return c;var d=a.nextSibling;while(d){if(d===b)return-1;d=d.nextSibling}return 1}),function(){var a=c.createElement("div"),d="script"+(new Date).getTime(),e=c.documentElement;a.innerHTML="<a name='"+d+"'/>",e.insertBefore(a,e.firstChild),c.getElementById(d)&&(o.find.ID=function(a,c,d){if(typeof c.getElementById!="undefined"&&!d){var e=c.getElementById(a[1]);return e?e.id===a[1]||typeof e.getAttributeNode!="undefined"&&e.getAttributeNode("id").nodeValue===a[1]?[e]:b:[]}},o.filter.ID=function(a,b){var c=typeof a.getAttributeNode!="undefined"&&a.getAttributeNode("id");return a.nodeType===1&&c&&c.nodeValue===b}),e.removeChild(a),e=a=null}(),function(){var a=c.createElement("div");a.appendChild(c.createComment("")),a.getElementsByTagName("*").length>0&&(o.find.TAG=function(a,b){var c=b.getElementsByTagName(a[1]);if(a[1]==="*"){var d=[];for(var e=0;c[e];e++)c[e].nodeType===1&&d.push(c[e]);c=d}return c}),a.innerHTML="<a href='#'></a>",a.firstChild&&typeof a.firstChild.getAttribute!="undefined"&&a.firstChild.getAttribute("href")!=="#"&&(o.attrHandle.href=function(a){return a.getAttribute("href",2)}),a=null}(),c.querySelectorAll&&function(){var a=m,b=c.createElement("div"),d="__sizzle__";b.innerHTML="<p class='TEST'></p>";if(!b.querySelectorAll||b.querySelectorAll(".TEST").length!==0){m=function(b,e,f,g){e=e||c;if(!g&&!m.isXML(e)){var h=/^(\w+$)|^\.([\w\-]+$)|^#([\w\-]+$)/.exec(b);if(h&&(e.nodeType===1||e.nodeType===9)){if(h[1])return s(e.getElementsByTagName(b),f);if(h[2]&&o.find.CLASS&&e.getElementsByClassName)return s(e.getElementsByClassName(h[2]),f)}if(e.nodeType===9){if(b==="body"&&e.body)return s([e.body],f);if(h&&h[3]){var i=e.getElementById(h[3]);if(!i||!i.parentNode)return s([],f);if(i.id===h[3])return s([i],f)}try{return s(e.querySelectorAll(b),f)}catch(j){}}else if(e.nodeType===1&&e.nodeName.toLowerCase()!=="object"){var k=e,l=e.getAttribute("id"),n=l||d,p=e.parentNode,q=/^\s*[+~]/.test(b);l?n=n.replace(/'/g,"\\$&"):e.setAttribute("id",n),q&&p&&(e=e.parentNode);try{if(!q||p)return s(e.querySelectorAll("[id='"+n+"'] "+b),f)}catch(r){}finally{l||k.removeAttribute("id")}}}return a(b,e,f,g)};for(var e in a)m[e]=a[e];b=null}}(),function(){var a=c.documentElement,b=a.matchesSelector||a.mozMatchesSelector||a.webkitMatchesSelector||a.msMatchesSelector;if(b){var d=!b.call(c.createElement("div"),"div"),e=!1;try{b.call(c.documentElement,"[test!='']:sizzle")}catch(f){e=!0}m.matchesSelector=function(a,c){c=c.replace(/\=\s*([^'"\]]*)\s*\]/g,"='$1']");if(!m.isXML(a))try{if(e||!o.match.PSEUDO.test(c)&&!/!=/.test(c)){var f=b.call(a,c);if(f||!d||a.document&&a.document.nodeType!==11)return f}}catch(g){}return m(c,null,null,[a]).length>0}}}(),function(){var a=c.createElement("div");a.innerHTML="<div class='test e'></div><div class='test'></div>";if(!!a.getElementsByClassName&&a.getElementsByClassName("e").length!==0){a.lastChild.className="e";if(a.getElementsByClassName("e").length===1)return;o.order.splice(1,0,"CLASS"),o.find.CLASS=function(a,b,c){if(typeof b.getElementsByClassName!="undefined"&&!c)return b.getElementsByClassName(a[1])},a=null}}(),c.documentElement.contains?m.contains=function(a,b){return a!==b&&(a.contains?a.contains(b):!0)}:c.documentElement.compareDocumentPosition?m.contains=function(a,b){return!!(a.compareDocumentPosition(b)&16)}:m.contains=function(){return!1},m.isXML=function(a){var b=(a?a.ownerDocument||a:0).documentElement;return b?b.nodeName!=="HTML":!1};var y=function(a,b,c){var d,e=[],f="",g=b.nodeType?[b]:b;while(d=o.match.PSEUDO.exec(a))f+=d[0],a=a.replace(o.match.PSEUDO,"");a=o.relative[a]?a+"*":a;for(var h=0,i=g.length;h<i;h++)m(a,g[h],e,c);return m.filter(f,e)};m.attr=f.attr,m.selectors.attrMap={},f.find=m,f.expr=m.selectors,f.expr[":"]=f.expr.filters,f.unique=m.uniqueSort,f.text=m.getText,f.isXMLDoc=m.isXML,f.contains=m.contains}();var L=/Until$/,M=/^(?:parents|prevUntil|prevAll)/,N=/,/,O=/^.[^:#\[\.,]*$/,P=Array.prototype.slice,Q=f.expr.match.globalPOS,R={children:!0,contents:!0,next:!0,prev:!0};f.fn.extend({find:function(a){var b=this,c,d;if(typeof a!="string")return f(a).filter(function(){for(c=0,d=b.length;c<d;c++)if(f.contains(b[c],this))return!0});var e=this.pushStack("","find",a),g,h,i;for(c=0,d=this.length;c<d;c++){g=e.length,f.find(a,this[c],e);if(c>0)for(h=g;h<e.length;h++)for(i=0;i<g;i++)if(e[i]===e[h]){e.splice(h--,1);break}}return e},has:function(a){var b=f(a);return this.filter(function(){for(var a=0,c=b.length;a<c;a++)if(f.contains(this,b[a]))return!0})},not:function(a){return this.pushStack(T(this,a,!1),"not",a)},filter:function(a){return this.pushStack(T(this,a,!0),"filter",a)},is:function(a){return!!a&&(typeof a=="string"?Q.test(a)?f(a,this.context).index(this[0])>=0:f.filter(a,this).length>0:this.filter(a).length>0)},closest:function(a,b){var c=[],d,e,g=this[0];if(f.isArray(a)){var h=1;while(g&&g.ownerDocument&&g!==b){for(d=0;d<a.length;d++)f(g).is(a[d])&&c.push({selector:a[d],elem:g,level:h});g=g.parentNode,h++}return c}var i=Q.test(a)||typeof a!="string"?f(a,b||this.context):0;for(d=0,e=this.length;d<e;d++){g=this[d];while(g){if(i?i.index(g)>-1:f.find.matchesSelector(g,a)){c.push(g);break}g=g.parentNode;if(!g||!g.ownerDocument||g===b||g.nodeType===11)break}}c=c.length>1?f.unique(c):c;return this.pushStack(c,"closest",a)},index:function(a){if(!a)return this[0]&&this[0].parentNode?this.prevAll().length:-1;if(typeof a=="string")return f.inArray(this[0],f(a));return f.inArray(a.jquery?a[0]:a,this)},add:function(a,b){var c=typeof a=="string"?f(a,b):f.makeArray(a&&a.nodeType?[a]:a),d=f.merge(this.get(),c);return this.pushStack(S(c[0])||S(d[0])?d:f.unique(d))},andSelf:function(){return this.add(this.prevObject)}}),f.each({parent:function(a){var b=a.parentNode;return b&&b.nodeType!==11?b:null},parents:function(a){return f.dir(a,"parentNode")},parentsUntil:function(a,b,c){return f.dir(a,"parentNode",c)},next:function(a){return f.nth(a,2,"nextSibling")},prev:function(a){return f.nth(a,2,"previousSibling")},nextAll:function(a){return f.dir(a,"nextSibling")},prevAll:function(a){return f.dir(a,"previousSibling")},nextUntil:function(a,b,c){return f.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return f.dir(a,"previousSibling",c)},siblings:function(a){return f.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return f.sibling(a.firstChild)},contents:function(a){return f.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:f.makeArray(a.childNodes)}},function(a,b){f.fn[a]=function(c,d){var e=f.map(this,b,c);L.test(a)||(d=c),d&&typeof d=="string"&&(e=f.filter(d,e)),e=this.length>1&&!R[a]?f.unique(e):e,(this.length>1||N.test(d))&&M.test(a)&&(e=e.reverse());return this.pushStack(e,a,P.call(arguments).join(","))}}),f.extend({filter:function(a,b,c){c&&(a=":not("+a+")");return b.length===1?f.find.matchesSelector(b[0],a)?[b[0]]:[]:f.find.matches(a,b)},dir:function(a,c,d){var e=[],g=a[c];while(g&&g.nodeType!==9&&(d===b||g.nodeType!==1||!f(g).is(d)))g.nodeType===1&&e.push(g),g=g[c];return e},nth:function(a,b,c,d){b=b||1;var e=0;for(;a;a=a[c])if(a.nodeType===1&&++e===b)break;return a},sibling:function(a,b){var c=[];for(;a;a=a.nextSibling)a.nodeType===1&&a!==b&&c.push(a);return c}});var V="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",W=/ jQuery\d+="(?:\d+|null)"/g,X=/^\s+/,Y=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,Z=/<([\w:]+)/,$=/<tbody/i,_=/<|&#?\w+;/,ba=/<(?:script|style)/i,bb=/<(?:script|object|embed|option|style)/i,bc=new RegExp("<(?:"+V+")[\\s/>]","i"),bd=/checked\s*(?:[^=]|=\s*.checked.)/i,be=/\/(java|ecma)script/i,bf=/^\s*<!(?:\[CDATA\[|\-\-)/,bg={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]},bh=U(c);bg.optgroup=bg.option,bg.tbody=bg.tfoot=bg.colgroup=bg.caption=bg.thead,bg.th=bg.td,f.support.htmlSerialize||(bg._default=[1,"div<div>","</div>"]),f.fn.extend({text:function(a){return f.access(this,function(a){return a===b?f.text(this):this.empty().append((this[0]&&this[0].ownerDocument||c).createTextNode(a))},null,a,arguments.length)},wrapAll:function(a){if(f.isFunction(a))return this.each(function(b){f(this).wrapAll(a.call(this,b))});if(this[0]){var b=f(a,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstChild&&a.firstChild.nodeType===1)a=a.firstChild;return a}).append(this)}return this},wrapInner:function(a){if(f.isFunction(a))return this.each(function(b){f(this).wrapInner(a.call(this,b))});return this.each(function(){var b=f(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=f.isFunction(a);return this.each(function(c){f(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){f.nodeName(this,"body")||f(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(a){this.nodeType===1&&this.appendChild(a)})},prepend:function(){return this.domManip(arguments,!0,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this)});if(arguments.length){var a=f
+.clean(arguments);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this.nextSibling)});if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,f.clean(arguments));return a}},remove:function(a,b){for(var c=0,d;(d=this[c])!=null;c++)if(!a||f.filter(a,[d]).length)!b&&d.nodeType===1&&(f.cleanData(d.getElementsByTagName("*")),f.cleanData([d])),d.parentNode&&d.parentNode.removeChild(d);return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++){b.nodeType===1&&f.cleanData(b.getElementsByTagName("*"));while(b.firstChild)b.removeChild(b.firstChild)}return this},clone:function(a,b){a=a==null?!1:a,b=b==null?a:b;return this.map(function(){return f.clone(this,a,b)})},html:function(a){return f.access(this,function(a){var c=this[0]||{},d=0,e=this.length;if(a===b)return c.nodeType===1?c.innerHTML.replace(W,""):null;if(typeof a=="string"&&!ba.test(a)&&(f.support.leadingWhitespace||!X.test(a))&&!bg[(Z.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Y,"<$1></$2>");try{for(;d<e;d++)c=this[d]||{},c.nodeType===1&&(f.cleanData(c.getElementsByTagName("*")),c.innerHTML=a);c=0}catch(g){}}c&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(a){if(this[0]&&this[0].parentNode){if(f.isFunction(a))return this.each(function(b){var c=f(this),d=c.html();c.replaceWith(a.call(this,b,d))});typeof a!="string"&&(a=f(a).detach());return this.each(function(){var b=this.nextSibling,c=this.parentNode;f(this).remove(),b?f(b).before(a):f(c).append(a)})}return this.length?this.pushStack(f(f.isFunction(a)?a():a),"replaceWith",a):this},detach:function(a){return this.remove(a,!0)},domManip:function(a,c,d){var e,g,h,i,j=a[0],k=[];if(!f.support.checkClone&&arguments.length===3&&typeof j=="string"&&bd.test(j))return this.each(function(){f(this).domManip(a,c,d,!0)});if(f.isFunction(j))return this.each(function(e){var g=f(this);a[0]=j.call(this,e,c?g.html():b),g.domManip(a,c,d)});if(this[0]){i=j&&j.parentNode,f.support.parentNode&&i&&i.nodeType===11&&i.childNodes.length===this.length?e={fragment:i}:e=f.buildFragment(a,this,k),h=e.fragment,h.childNodes.length===1?g=h=h.firstChild:g=h.firstChild;if(g){c=c&&f.nodeName(g,"tr");for(var l=0,m=this.length,n=m-1;l<m;l++)d.call(c?bi(this[l],g):this[l],e.cacheable||m>1&&l<n?f.clone(h,!0,!0):h)}k.length&&f.each(k,function(a,b){b.src?f.ajax({type:"GET",global:!1,url:b.src,async:!1,dataType:"script"}):f.globalEval((b.text||b.textContent||b.innerHTML||"").replace(bf,"/*$0*/")),b.parentNode&&b.parentNode.removeChild(b)})}return this}}),f.buildFragment=function(a,b,d){var e,g,h,i,j=a[0];b&&b[0]&&(i=b[0].ownerDocument||b[0]),i.createDocumentFragment||(i=c),a.length===1&&typeof j=="string"&&j.length<512&&i===c&&j.charAt(0)==="<"&&!bb.test(j)&&(f.support.checkClone||!bd.test(j))&&(f.support.html5Clone||!bc.test(j))&&(g=!0,h=f.fragments[j],h&&h!==1&&(e=h)),e||(e=i.createDocumentFragment(),f.clean(a,i,e,d)),g&&(f.fragments[j]=h?e:1);return{fragment:e,cacheable:g}},f.fragments={},f.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){f.fn[a]=function(c){var d=[],e=f(c),g=this.length===1&&this[0].parentNode;if(g&&g.nodeType===11&&g.childNodes.length===1&&e.length===1){e[b](this[0]);return this}for(var h=0,i=e.length;h<i;h++){var j=(h>0?this.clone(!0):this).get();f(e[h])[b](j),d=d.concat(j)}return this.pushStack(d,a,e.selector)}}),f.extend({clone:function(a,b,c){var d,e,g,h=f.support.html5Clone||f.isXMLDoc(a)||!bc.test("<"+a.nodeName+">")?a.cloneNode(!0):bo(a);if((!f.support.noCloneEvent||!f.support.noCloneChecked)&&(a.nodeType===1||a.nodeType===11)&&!f.isXMLDoc(a)){bk(a,h),d=bl(a),e=bl(h);for(g=0;d[g];++g)e[g]&&bk(d[g],e[g])}if(b){bj(a,h);if(c){d=bl(a),e=bl(h);for(g=0;d[g];++g)bj(d[g],e[g])}}d=e=null;return h},clean:function(a,b,d,e){var g,h,i,j=[];b=b||c,typeof b.createElement=="undefined"&&(b=b.ownerDocument||b[0]&&b[0].ownerDocument||c);for(var k=0,l;(l=a[k])!=null;k++){typeof l=="number"&&(l+="");if(!l)continue;if(typeof l=="string")if(!_.test(l))l=b.createTextNode(l);else{l=l.replace(Y,"<$1></$2>");var m=(Z.exec(l)||["",""])[1].toLowerCase(),n=bg[m]||bg._default,o=n[0],p=b.createElement("div"),q=bh.childNodes,r;b===c?bh.appendChild(p):U(b).appendChild(p),p.innerHTML=n[1]+l+n[2];while(o--)p=p.lastChild;if(!f.support.tbody){var s=$.test(l),t=m==="table"&&!s?p.firstChild&&p.firstChild.childNodes:n[1]==="<table>"&&!s?p.childNodes:[];for(i=t.length-1;i>=0;--i)f.nodeName(t[i],"tbody")&&!t[i].childNodes.length&&t[i].parentNode.removeChild(t[i])}!f.support.leadingWhitespace&&X.test(l)&&p.insertBefore(b.createTextNode(X.exec(l)[0]),p.firstChild),l=p.childNodes,p&&(p.parentNode.removeChild(p),q.length>0&&(r=q[q.length-1],r&&r.parentNode&&r.parentNode.removeChild(r)))}var u;if(!f.support.appendChecked)if(l[0]&&typeof (u=l.length)=="number")for(i=0;i<u;i++)bn(l[i]);else bn(l);l.nodeType?j.push(l):j=f.merge(j,l)}if(d){g=function(a){return!a.type||be.test(a.type)};for(k=0;j[k];k++){h=j[k];if(e&&f.nodeName(h,"script")&&(!h.type||be.test(h.type)))e.push(h.parentNode?h.parentNode.removeChild(h):h);else{if(h.nodeType===1){var v=f.grep(h.getElementsByTagName("script"),g);j.splice.apply(j,[k+1,0].concat(v))}d.appendChild(h)}}}return j},cleanData:function(a){var b,c,d=f.cache,e=f.event.special,g=f.support.deleteExpando;for(var h=0,i;(i=a[h])!=null;h++){if(i.nodeName&&f.noData[i.nodeName.toLowerCase()])continue;c=i[f.expando];if(c){b=d[c];if(b&&b.events){for(var j in b.events)e[j]?f.event.remove(i,j):f.removeEvent(i,j,b.handle);b.handle&&(b.handle.elem=null)}g?delete i[f.expando]:i.removeAttribute&&i.removeAttribute(f.expando),delete d[c]}}}});var bp=/alpha\([^)]*\)/i,bq=/opacity=([^)]*)/,br=/([A-Z]|^ms)/g,bs=/^[\-+]?(?:\d*\.)?\d+$/i,bt=/^-?(?:\d*\.)?\d+(?!px)[^\d\s]+$/i,bu=/^([\-+])=([\-+.\de]+)/,bv=/^margin/,bw={position:"absolute",visibility:"hidden",display:"block"},bx=["Top","Right","Bottom","Left"],by,bz,bA;f.fn.css=function(a,c){return f.access(this,function(a,c,d){return d!==b?f.style(a,c,d):f.css(a,c)},a,c,arguments.length>1)},f.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=by(a,"opacity");return c===""?"1":c}return a.style.opacity}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":f.support.cssFloat?"cssFloat":"styleFloat"},style:function(a,c,d,e){if(!!a&&a.nodeType!==3&&a.nodeType!==8&&!!a.style){var g,h,i=f.camelCase(c),j=a.style,k=f.cssHooks[i];c=f.cssProps[i]||i;if(d===b){if(k&&"get"in k&&(g=k.get(a,!1,e))!==b)return g;return j[c]}h=typeof d,h==="string"&&(g=bu.exec(d))&&(d=+(g[1]+1)*+g[2]+parseFloat(f.css(a,c)),h="number");if(d==null||h==="number"&&isNaN(d))return;h==="number"&&!f.cssNumber[i]&&(d+="px");if(!k||!("set"in k)||(d=k.set(a,d))!==b)try{j[c]=d}catch(l){}}},css:function(a,c,d){var e,g;c=f.camelCase(c),g=f.cssHooks[c],c=f.cssProps[c]||c,c==="cssFloat"&&(c="float");if(g&&"get"in g&&(e=g.get(a,!0,d))!==b)return e;if(by)return by(a,c)},swap:function(a,b,c){var d={},e,f;for(f in b)d[f]=a.style[f],a.style[f]=b[f];e=c.call(a);for(f in b)a.style[f]=d[f];return e}}),f.curCSS=f.css,c.defaultView&&c.defaultView.getComputedStyle&&(bz=function(a,b){var c,d,e,g,h=a.style;b=b.replace(br,"-$1").toLowerCase(),(d=a.ownerDocument.defaultView)&&(e=d.getComputedStyle(a,null))&&(c=e.getPropertyValue(b),c===""&&!f.contains(a.ownerDocument.documentElement,a)&&(c=f.style(a,b))),!f.support.pixelMargin&&e&&bv.test(b)&&bt.test(c)&&(g=h.width,h.width=c,c=e.width,h.width=g);return c}),c.documentElement.currentStyle&&(bA=function(a,b){var c,d,e,f=a.currentStyle&&a.currentStyle[b],g=a.style;f==null&&g&&(e=g[b])&&(f=e),bt.test(f)&&(c=g.left,d=a.runtimeStyle&&a.runtimeStyle.left,d&&(a.runtimeStyle.left=a.currentStyle.left),g.left=b==="fontSize"?"1em":f,f=g.pixelLeft+"px",g.left=c,d&&(a.runtimeStyle.left=d));return f===""?"auto":f}),by=bz||bA,f.each(["height","width"],function(a,b){f.cssHooks[b]={get:function(a,c,d){if(c)return a.offsetWidth!==0?bB(a,b,d):f.swap(a,bw,function(){return bB(a,b,d)})},set:function(a,b){return bs.test(b)?b+"px":b}}}),f.support.opacity||(f.cssHooks.opacity={get:function(a,b){return bq.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?parseFloat(RegExp.$1)/100+"":b?"1":""},set:function(a,b){var c=a.style,d=a.currentStyle,e=f.isNumeric(b)?"alpha(opacity="+b*100+")":"",g=d&&d.filter||c.filter||"";c.zoom=1;if(b>=1&&f.trim(g.replace(bp,""))===""){c.removeAttribute("filter");if(d&&!d.filter)return}c.filter=bp.test(g)?g.replace(bp,e):g+" "+e}}),f(function(){f.support.reliableMarginRight||(f.cssHooks.marginRight={get:function(a,b){return f.swap(a,{display:"inline-block"},function(){return b?by(a,"margin-right"):a.style.marginRight})}})}),f.expr&&f.expr.filters&&(f.expr.filters.hidden=function(a){var b=a.offsetWidth,c=a.offsetHeight;return b===0&&c===0||!f.support.reliableHiddenOffsets&&(a.style&&a.style.display||f.css(a,"display"))==="none"},f.expr.filters.visible=function(a){return!f.expr.filters.hidden(a)}),f.each({margin:"",padding:"",border:"Width"},function(a,b){f.cssHooks[a+b]={expand:function(c){var d,e=typeof c=="string"?c.split(" "):[c],f={};for(d=0;d<4;d++)f[a+bx[d]+b]=e[d]||e[d-2]||e[0];return f}}});var bC=/%20/g,bD=/\[\]$/,bE=/\r?\n/g,bF=/#.*$/,bG=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,bH=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,bI=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,bJ=/^(?:GET|HEAD)$/,bK=/^\/\//,bL=/\?/,bM=/<script\b[^<]*(?:(?!<\/script>)<[^<]*)*<\/script>/gi,bN=/^(?:select|textarea)/i,bO=/\s+/,bP=/([?&])_=[^&]*/,bQ=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+))?)?/,bR=f.fn.load,bS={},bT={},bU,bV,bW=["*/"]+["*"];try{bU=e.href}catch(bX){bU=c.createElement("a"),bU.href="",bU=bU.href}bV=bQ.exec(bU.toLowerCase())||[],f.fn.extend({load:function(a,c,d){if(typeof a!="string"&&bR)return bR.apply(this,arguments);if(!this.length)return this;var e=a.indexOf(" ");if(e>=0){var g=a.slice(e,a.length);a=a.slice(0,e)}var h="GET";c&&(f.isFunction(c)?(d=c,c=b):typeof c=="object"&&(c=f.param(c,f.ajaxSettings.traditional),h="POST"));var i=this;f.ajax({url:a,type:h,dataType:"html",data:c,complete:function(a,b,c){c=a.responseText,a.isResolved()&&(a.done(function(a){c=a}),i.html(g?f("<div>").append(c.replace(bM,"")).find(g):c)),d&&i.each(d,[c,b,a])}});return this},serialize:function(){return f.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?f.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||bN.test(this.nodeName)||bH.test(this.type))}).map(function(a,b){var c=f(this).val();return c==null?null:f.isArray(c)?f.map(c,function(a,c){return{name:b.name,value:a.replace(bE,"\r\n")}}):{name:b.name,value:c.replace(bE,"\r\n")}}).get()}}),f.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){f.fn[b]=function(a){return this.on(b,a)}}),f.each(["get","post"],function(a,c){f[c]=function(a,d,e,g){f.isFunction(d)&&(g=g||e,e=d,d=b);return f.ajax({type:c,url:a,data:d,success:e,dataType:g})}}),f.extend({getScript:function(a,c){return f.get(a,b,c,"script")},getJSON:function(a,b,c){return f.get(a,b,c,"json")},ajaxSetup:function(a,b){b?b$(a,f.ajaxSettings):(b=a,a=f.ajaxSettings),b$(a,b);return a},ajaxSettings:{url:bU,isLocal:bI.test(bV[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded; charset=UTF-8",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":bW},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":a.String,"text html":!0,"text json":f.parseJSON,"text xml":f.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:bY(bS),ajaxTransport:bY(bT),ajax:function(a,c){function w(a,c,l,m){if(s!==2){s=2,q&&clearTimeout(q),p=b,n=m||"",v.readyState=a>0?4:0;var o,r,u,w=c,x=l?ca(d,v,l):b,y,z;if(a>=200&&a<300||a===304){if(d.ifModified){if(y=v.getResponseHeader("Last-Modified"))f.lastModified[k]=y;if(z=v.getResponseHeader("Etag"))f.etag[k]=z}if(a===304)w="notmodified",o=!0;else try{r=cb(d,x),w="success",o=!0}catch(A){w="parsererror",u=A}}else{u=w;if(!w||a)w="error",a<0&&(a=0)}v.status=a,v.statusText=""+(c||w),o?h.resolveWith(e,[r,w,v]):h.rejectWith(e,[v,w,u]),v.statusCode(j),j=b,t&&g.trigger("ajax"+(o?"Success":"Error"),[v,d,o?r:u]),i.fireWith(e,[v,w]),t&&(g.trigger("ajaxComplete",[v,d]),--f.active||f.event.trigger("ajaxStop"))}}typeof a=="object"&&(c=a,a=b),c=c||{};var d=f.ajaxSetup({},c),e=d.context||d,g=e!==d&&(e.nodeType||e instanceof f)?f(e):f.event,h=f.Deferred(),i=f.Callbacks("once memory"),j=d.statusCode||{},k,l={},m={},n,o,p,q,r,s=0,t,u,v={readyState:0,setRequestHeader:function(a,b){if(!s){var c=a.toLowerCase();a=m[c]=m[c]||a,l[a]=b}return this},getAllResponseHeaders:function(){return s===2?n:null},getResponseHeader:function(a){var c;if(s===2){if(!o){o={};while(c=bG.exec(n))o[c[1].toLowerCase()]=c[2]}c=o[a.toLowerCase()]}return c===b?null:c},overrideMimeType:function(a){s||(d.mimeType=a);return this},abort:function(a){a=a||"abort",p&&p.abort(a),w(0,a);return this}};h.promise(v),v.success=v.done,v.error=v.fail,v.complete=i.add,v.statusCode=function(a){if(a){var b;if(s<2)for(b in a)j[b]=[j[b],a[b]];else b=a[v.status],v.then(b,b)}return this},d.url=((a||d.url)+"").replace(bF,"").replace(bK,bV[1]+"//"),d.dataTypes=f.trim(d.dataType||"*").toLowerCase().split(bO),d.crossDomain==null&&(r=bQ.exec(d.url.toLowerCase()),d.crossDomain=!(!r||r[1]==bV[1]&&r[2]==bV[2]&&(r[3]||(r[1]==="http:"?80:443))==(bV[3]||(bV[1]==="http:"?80:443)))),d.data&&d.processData&&typeof d.data!="string"&&(d.data=f.param(d.data,d.traditional)),bZ(bS,d,c,v);if(s===2)return!1;t=d.global,d.type=d.type.toUpperCase(),d.hasContent=!bJ.test(d.type),t&&f.active++===0&&f.event.trigger("ajaxStart");if(!d.hasContent){d.data&&(d.url+=(bL.test(d.url)?"&":"?")+d.data,delete d.data),k=d.url;if(d.cache===!1){var x=f.now(),y=d.url.replace(bP,"$1_="+x);d.url=y+(y===d.url?(bL.test(d.url)?"&":"?")+"_="+x:"")}}(d.data&&d.hasContent&&d.contentType!==!1||c.contentType)&&v.setRequestHeader("Content-Type",d.contentType),d.ifModified&&(k=k||d.url,f.lastModified[k]&&v.setRequestHeader("If-Modified-Since",f.lastModified[k]),f.etag[k]&&v.setRequestHeader("If-None-Match",f.etag[k])),v.setRequestHeader("Accept",d.dataTypes[0]&&d.accepts[d.dataTypes[0]]?d.accepts[d.dataTypes[0]]+(d.dataTypes[0]!=="*"?", "+bW+"; q=0.01":""):d.accepts["*"]);for(u in d.headers)v.setRequestHeader(u,d.headers[u]);if(d.beforeSend&&(d.beforeSend.call(e,v,d)===!1||s===2)){v.abort();return!1}for(u in{success:1,error:1,complete:1})v[u](d[u]);p=bZ(bT,d,c,v);if(!p)w(-1,"No Transport");else{v.readyState=1,t&&g.trigger("ajaxSend",[v,d]),d.async&&d.timeout>0&&(q=setTimeout(function(){v.abort("timeout")},d.timeout));try{s=1,p.send(l,w)}catch(z){if(s<2)w(-1,z);else throw z}}return v},param:function(a,c){var d=[],e=function(a,b){b=f.isFunction(b)?b():b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};c===b&&(c=f.ajaxSettings.traditional);if(f.isArray(a)||a.jquery&&!f.isPlainObject(a))f.each(a,function(){e(this.name,this.value)});else for(var g in a)b_(g,a[g],c,e);return d.join("&").replace(bC,"+")}}),f.extend({active:0,lastModified:{},etag:{}});var cc=f.now(),cd=/(\=)\?(&|$)|\?\?/i;f.ajaxSetup({jsonp:"callback",jsonpCallback:function(){return f.expando+"_"+cc++}}),f.ajaxPrefilter("json jsonp",function(b,c,d){var e=typeof b.data=="string"&&/^application\/x\-www\-form\-urlencoded/.test(b.contentType);if(b.dataTypes[0]==="jsonp"||b.jsonp!==!1&&(cd.test(b.url)||e&&cd.test(b.data))){var g,h=b.jsonpCallback=f.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,i=a[h],j=b.url,k=b.data,l="$1"+h+"$2";b.jsonp!==!1&&(j=j.replace(cd,l),b.url===j&&(e&&(k=k.replace(cd,l)),b.data===k&&(j+=(/\?/.test(j)?"&":"?")+b.jsonp+"="+h))),b.url=j,b.data=k,a[h]=function(a){g=[a]},d.always(function(){a[h]=i,g&&f.isFunction(i)&&a[h](g[0])}),b.converters["script json"]=function(){g||f.error(h+" was not called");return g[0]},b.dataTypes[0]="json";return"script"}}),f.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(a){f.globalEval(a);return a}}}),f.ajaxPrefilter("script",function(a){a.cache===b&&(a.cache=!1),a.crossDomain&&(a.type="GET",a.global=!1)}),f.ajaxTransport("script",function(a){if(a.crossDomain){var d,e=c.head||c.getElementsByTagName("head")[0]||c.documentElement;return{send:function(f,g){d=c.createElement("script"),d.async="async",a.scriptCharset&&(d.charset=a.scriptCharset),d.src=a.url,d.onload=d.onreadystatechange=function(a,c){if(c||!d.readyState||/loaded|complete/.test(d.readyState))d.onload=d.onreadystatechange=null,e&&d.parentNode&&e.removeChild(d),d=b,c||g(200,"success")},e.insertBefore(d,e.firstChild)},abort:function(){d&&d.onload(0,1)}}}});var ce=a.ActiveXObject?function(){for(var a in cg)cg[a](0,1)}:!1,cf=0,cg;f.ajaxSettings.xhr=a.ActiveXObject?function(){return!this.isLocal&&ch()||ci()}:ch,function(a){f.extend(f.support,{ajax:!!a,cors:!!a&&"withCredentials"in a})}(f.ajaxSettings.xhr()),f.support.ajax&&f.ajaxTransport(function(c){if(!c.crossDomain||f.support.cors){var d;return{send:function(e,g){var h=c.xhr(),i,j;c.username?h.open(c.type,c.url,c.async,c.username,c.password):h.open(c.type,c.url,c.async);if(c.xhrFields)for(j in c.xhrFields)h[j]=c.xhrFields[j];c.mimeType&&h.overrideMimeType&&h.overrideMimeType(c.mimeType),!c.crossDomain&&!e["X-Requested-With"]&&(e["X-Requested-With"]="XMLHttpRequest");try{for(j in e)h.setRequestHeader(j,e[j])}catch(k){}h.send(c.hasContent&&c.data||null),d=function(a,e){var j,k,l,m,n;try{if(d&&(e||h.readyState===4)){d=b,i&&(h.onreadystatechange=f.noop,ce&&delete cg[i]);if(e)h.readyState!==4&&h.abort();else{j=h.status,l=h.getAllResponseHeaders(),m={},n=h.responseXML,n&&n.documentElement&&(m.xml=n);try{m.text=h.responseText}catch(a){}try{k=h.statusText}catch(o){k=""}!j&&c.isLocal&&!c.crossDomain?j=m.text?200:404:j===1223&&(j=204)}}}catch(p){e||g(-1,p)}m&&g(j,k,m,l)},!c.async||h.readyState===4?d():(i=++cf,ce&&(cg||(cg={},f(a).unload(ce)),cg[i]=d),h.onreadystatechange=d)},abort:function(){d&&d(0,1)}}}});var cj={},ck,cl,cm=/^(?:toggle|show|hide)$/,cn=/^([+\-]=)?([\d+.\-]+)([a-z%]*)$/i,co,cp=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]],cq;f.fn.extend({show:function(a,b,c){var d,e;if(a||a===0)return this.animate(ct("show",3),a,b,c);for(var g=0,h=this.length;g<h;g++)d=this[g],d.style&&(e=d.style.display,!f._data(d,"olddisplay")&&e==="none"&&(e=d.style.display=""),(e===""&&f.css(d,"display")==="none"||!f.contains(d.ownerDocument.documentElement,d))&&f._data(d,"olddisplay",cu(d.nodeName)));for(g=0;g<h;g++){d=this[g];if(d.style){e=d.style.display;if(e===""||e==="none")d.style.display=f._data(d,"olddisplay")||""}}return this},hide:function(a,b,c){if(a||a===0)return this.animate(ct("hide",3),a,b,c);var d,e,g=0,h=this.length;for(;g<h;g++)d=this[g],d.style&&(e=f.css(d,"display"),e!=="none"&&!f._data(d,"olddisplay")&&f._data(d,"olddisplay",e));for(g=0;g<h;g++)this[g].style&&(this[g].style.display="none");return this},_toggle:f.fn.toggle,toggle:function(a,b,c){var d=typeof a=="boolean";f.isFunction(a)&&f.isFunction(b)?this._toggle.apply(this,arguments):a==null||d?this.each(function(){var b=d?a:f(this).is(":hidden");f(this)[b?"show":"hide"]()}):this.animate(ct("toggle",3),a,b,c);return this},fadeTo:function(a,b,c,d){return this.filter(":hidden").css("opacity",0).show().end().animate({opacity:b},a,c,d)},animate:function(a,b,c,d){function g(){e.queue===!1&&f._mark(this);var b=f.extend({},e),c=this.nodeType===1,d=c&&f(this).is(":hidden"),g,h,i,j,k,l,m,n,o,p,q;b.animatedProperties={};for(i in a){g=f.camelCase(i),i!==g&&(a[g]=a[i],delete a[i]);if((k=f.cssHooks[g])&&"expand"in k){l=k.expand(a[g]),delete a[g];for(i in l)i in a||(a[i]=l[i])}}for(g in a){h=a[g],f.isArray(h)?(b.animatedProperties[g]=h[1],h=a[g]=h[0]):b.animatedProperties[g]=b.specialEasing&&b.specialEasing[g]||b.easing||"swing";if(h==="hide"&&d||h==="show"&&!d)return b.complete.call(this);c&&(g==="height"||g==="width")&&(b.overflow=[this.style.overflow,this.style.overflowX,this.style.overflowY],f.css(this,"display")==="inline"&&f.css(this,"float")==="none"&&(!f.support.inlineBlockNeedsLayout||cu(this.nodeName)==="inline"?this.style.display="inline-block":this.style.zoom=1))}b.overflow!=null&&(this.style.overflow="hidden");for(i in a)j=new f.fx(this,b,i),h=a[i],cm.test(h)?(q=f._data(this,"toggle"+i)||(h==="toggle"?d?"show":"hide":0),q?(f._data(this,"toggle"+i,q==="show"?"hide":"show"),j[q]()):j[h]()):(m=cn.exec(h),n=j.cur(),m?(o=parseFloat(m[2]),p=m[3]||(f.cssNumber[i]?"":"px"),p!=="px"&&(f.style(this,i,(o||1)+p),n=(o||1)/j.cur()*n,f.style(this,i,n+p)),m[1]&&(o=(m[1]==="-="?-1:1)*o+n),j.custom(n,o,p)):j.custom(n,h,""));return!0}var e=f.speed(b,c,d);if(f.isEmptyObject(a))return this.each(e.complete,[!1]);a=f.extend({},a);return e.queue===!1?this.each(g):this.queue(e.queue,g)},stop:function(a,c,d){typeof a!="string"&&(d=c,c=a,a=b),c&&a!==!1&&this.queue(a||"fx",[]);return this.each(function(){function h(a,b,c){var e=b[c];f.removeData(a,c,!0),e.stop(d)}var b,c=!1,e=f.timers,g=f._data(this);d||f._unmark(!0,this);if(a==null)for(b in g)g[b]&&g[b].stop&&b.indexOf(".run")===b.length-4&&h(this,g,b);else g[b=a+".run"]&&g[b].stop&&h(this,g,b);for(b=e.length;b--;)e[b].elem===this&&(a==null||e[b].queue===a)&&(d?e[b](!0):e[b].saveState(),c=!0,e.splice(b,1));(!d||!c)&&f.dequeue(this,a)})}}),f.each({slideDown:ct("show",1),slideUp:ct("hide",1),slideToggle:ct("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a,b){f.fn[a]=function(a,c,d){return this.animate(b,a,c,d)}}),f.extend({speed:function(a,b,c){var d=a&&typeof a=="object"?f.extend({},a):{complete:c||!c&&b||f.isFunction(a)&&a,duration:a,easing:c&&b||b&&!f.isFunction(b)&&b};d.duration=f.fx.off?0:typeof d.duration=="number"?d.duration:d.duration in f.fx.speeds?f.fx.speeds[d.duration]:f.fx.speeds._default;if(d.queue==null||d.queue===!0)d.queue="fx";d.old=d.complete,d.complete=function(a){f.isFunction(d.old)&&d.old.call(this),d.queue?f.dequeue(this,d.queue):a!==!1&&f._unmark(this)};return d},easing:{linear:function(a){return a},swing:function(a){return-Math.cos(a*Math.PI)/2+.5}},timers:[],fx:function(a,b,c){this.options=b,this.elem=a,this.prop=c,b.orig=b.orig||{}}}),f.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this),(f.fx.step[this.prop]||f.fx.step._default)(this)},cur:function(){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];var a,b=f.css(this.elem,this.prop);return isNaN(a=parseFloat(b))?!b||b==="auto"?0:b:a},custom:function(a,c,d){function h(a){return e.step(a)}var e=this,g=f.fx;this.startTime=cq||cr(),this.end=c,this.now=this.start=a,this.pos=this.state=0,this.unit=d||this.unit||(f.cssNumber[this.prop]?"":"px"),h.queue=this.options.queue,h.elem=this.elem,h.saveState=function(){f._data(e.elem,"fxshow"+e.prop)===b&&(e.options.hide?f._data(e.elem,"fxshow"+e.prop,e.start):e.options.show&&f._data(e.elem,"fxshow"+e.prop,e.end))},h()&&f.timers.push(h)&&!co&&(co=setInterval(g.tick,g.interval))},show:function(){var a=f._data(this.elem,"fxshow"+this.prop);this.options.orig[this.prop]=a||f.style(this.elem,this.prop),this.options.show=!0,a!==b?this.custom(this.cur(),a):this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur()),f(this.elem).show()},hide:function(){this.options.orig[this.prop]=f._data(this.elem,"fxshow"+this.prop)||f.style(this.elem,this.prop),this.options.hide=!0,this.custom(this.cur(),0)},step:function(a){var b,c,d,e=cq||cr(),g=!0,h=this.elem,i=this.options;if(a||e>=i.duration+this.startTime){this.now=this.end,this.pos=this.state=1,this.update(),i.animatedProperties[this.prop]=!0;for(b in i.animatedProperties)i.animatedProperties[b]!==!0&&(g=!1);if(g){i.overflow!=null&&!f.support.shrinkWrapBlocks&&f.each(["","X","Y"],function(a,b){h.style["overflow"+b]=i.overflow[a]}),i.hide&&f(h).hide();if(i.hide||i.show)for(b in i.animatedProperties)f.style(h,b,i.orig[b]),f.removeData(h,"fxshow"+b,!0),f.removeData(h,"toggle"+b,!0);d=i.complete,d&&(i.complete=!1,d.call(h))}return!1}i.duration==Infinity?this.now=e:(c=e-this.startTime,this.state=c/i.duration,this.pos=f.easing[i.animatedProperties[this.prop]](this.state,c,0,1,i.duration),this.now=this.start+(this.end-this.start)*this.pos),this.update();return!0}},f.extend(f.fx,{tick:function(){var a,b=f.timers,c=0;for(;c<b.length;c++)a=b[c],!a()&&b[c]===a&&b.splice(c--,1);b.length||f.fx.stop()},interval:13,stop:function(){clearInterval(co),co=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a){f.style(a.elem,"opacity",a.now)},_default:function(a){a.elem.style&&a.elem.style[a.prop]!=null?a.elem.style[a.prop]=a.now+a.unit:a.elem[a.prop]=a.now}}}),f.each(cp.concat.apply([],cp),function(a,b){b.indexOf("margin")&&(f.fx.step[b]=function(a){f.style(a.elem,b,Math.max(0,a.now)+a.unit)})}),f.expr&&f.expr.filters&&(f.expr.filters.animated=function(a){return f.grep(f.timers,function(b){return a===b.elem}).length});var cv,cw=/^t(?:able|d|h)$/i,cx=/^(?:body|html)$/i;"getBoundingClientRect"in c.documentElement?cv=function(a,b,c,d){try{d=a.getBoundingClientRect()}catch(e){}if(!d||!f.contains(c,a))return d?{top:d.top,left:d.left}:{top:0,left:0};var g=b.body,h=cy(b),i=c.clientTop||g.clientTop||0,j=c.clientLeft||g.clientLeft||0,k=h.pageYOffset||f.support.boxModel&&c.scrollTop||g.scrollTop,l=h.pageXOffset||f.support.boxModel&&c.scrollLeft||g.scrollLeft,m=d.top+k-i,n=d.left+l-j;return{top:m,left:n}}:cv=function(a,b,c){var d,e=a.offsetParent,g=a,h=b.body,i=b.defaultView,j=i?i.getComputedStyle(a,null):a.currentStyle,k=a.offsetTop,l=a.offsetLeft;while((a=a.parentNode)&&a!==h&&a!==c){if(f.support.fixedPosition&&j.position==="fixed")break;d=i?i.getComputedStyle(a,null):a.currentStyle,k-=a.scrollTop,l-=a.scrollLeft,a===e&&(k+=a.offsetTop,l+=a.offsetLeft,f.support.doesNotAddBorder&&(!f.support.doesAddBorderForTableAndCells||!cw.test(a.nodeName))&&(k+=parseFloat(d.borderTopWidth)||0,l+=parseFloat(d.borderLeftWidth)||0),g=e,e=a.offsetParent),f.support.subtractsBorderForOverflowNotVisible&&d.overflow!=="visible"&&(k+=parseFloat(d.borderTopWidth)||0,l+=parseFloat(d.borderLeftWidth)||0),j=d}if(j.position==="relative"||j.position==="static")k+=h.offsetTop,l+=h.offsetLeft;f.support.fixedPosition&&j.position==="fixed"&&(k+=Math.max(c.scrollTop,h.scrollTop),l+=Math.max(c.scrollLeft,h.scrollLeft));return{top:k,left:l}},f.fn.offset=function(a){if(arguments.length)return a===b?this:this.each(function(b){f.offset.setOffset(this,a,b)});var c=this[0],d=c&&c.ownerDocument;if(!d)return null;if(c===d.body)return f.offset.bodyOffset(c);return cv(c,d,d.documentElement)},f.offset={bodyOffset:function(a){var b=a.offsetTop,c=a.offsetLeft;f.support.doesNotIncludeMarginInBodyOffset&&(b+=parseFloat(f.css(a,"marginTop"))||0,c+=parseFloat(f.css(a,"marginLeft"))||0);return{top:b,left:c}},setOffset:function(a,b,c){var d=f.css(a,"position");d==="static"&&(a.style.position="relative");var e=f(a),g=e.offset(),h=f.css(a,"top"),i=f.css(a,"left"),j=(d==="absolute"||d==="fixed")&&f.inArray("auto",[h,i])>-1,k={},l={},m,n;j?(l=e.position(),m=l.top,n=l.left):(m=parseFloat(h)||0,n=parseFloat(i)||0),f.isFunction(b)&&(b=b.call(a,c,g)),b.top!=null&&(k.top=b.top-g.top+m),b.left!=null&&(k.left=b.left-g.left+n),"using"in b?b.using.call(a,k):e.css(k)}},f.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),c=this.offset(),d=cx.test(b[0].nodeName)?{top:0,left:0}:b.offset();c.top-=parseFloat(f.css(a,"marginTop"))||0,c.left-=parseFloat(f.css(a,"marginLeft"))||0,d.top+=parseFloat(f.css(b[0],"borderTopWidth"))||0,d.left+=parseFloat(f.css(b[0],"borderLeftWidth"))||0;return{top:c.top-d.top,left:c.left-d.left}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||c.body;while(a&&!cx.test(a.nodeName)&&f.css(a,"position")==="static")a=a.offsetParent;return a})}}),f.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(a,c){var d=/Y/.test(c);f.fn[a]=function(e){return f.access(this,function(a,e,g){var h=cy(a);if(g===b)return h?c in h?h[c]:f.support.boxModel&&h.document.documentElement[e]||h.document.body[e]:a[e];h?h.scrollTo(d?f(h).scrollLeft():g,d?g:f(h).scrollTop()):a[e]=g},a,e,arguments.length,null)}}),f.each({Height:"height",Width:"width"},function(a,c){var d="client"+a,e="scroll"+a,g="offset"+a;f.fn["inner"+a]=function(){var a=this[0];return a?a.style?parseFloat(f.css(a,c,"padding")):this[c]():null},f.fn["outer"+a]=function(a){var b=this[0];return b?b.style?parseFloat(f.css(b,c,a?"margin":"border")):this[c]():null},f.fn[c]=function(a){return f.access(this,function(a,c,h){var i,j,k,l;if(f.isWindow(a)){i=a.document,j=i.documentElement[d];return f.support.boxModel&&j||i.body&&i.body[d]||j}if(a.nodeType===9){i=a.documentElement;if(i[d]>=i[e])return i[d];return Math.max(a.body[e],i[e],a.body[g],i[g])}if(h===b){k=f.css(a,c),l=parseFloat(k);return f.isNumeric(l)?l:k}f(a).css(c,h)},c,a,arguments.length,null)}}),a.jQuery=a.$=f,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return f})})(window); \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
index 4ab99764ce..4e0c9d75c7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
@@ -1,18 +1,5448 @@
-/*
- * jquery.layout 1.3.0 - Release Candidate 29.3
+/**
+ * @preserve jquery.layout 1.3.0 - Release Candidate 30.5
+ * $Date: 2012-04-14 08:00:00 (Sat, 14 Apr 2012) $
+ * $Rev: 303005 $
*
- * Copyright (c) 2010
+ * Copyright (c) 2012
* Fabrizio Balliano (http://www.fabrizioballiano.net)
* Kevin Dalman (http://allpro.net)
*
* Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
* and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
*
+ * Changelog: http://layout.jquery-dev.net/changelog.cfm#1.3.0.rc30.5
+ *
* Docs: http://layout.jquery-dev.net/documentation.html
* Tips: http://layout.jquery-dev.net/tips.html
* Help: http://groups.google.com/group/jquery-ui-layout
+ */
+
+/* JavaDoc Info: http://code.google.com/closure/compiler/docs/js-for-compiler.html
+ * {!Object} non-nullable type (never NULL)
+ * {?string} nullable type (sometimes NULL) - default for {Object}
+ * {number=} optional parameter
+ * {*} ALL types
+ */
+
+// NOTE: For best readability, view with a fixed-width font and tabs equal to 4-chars
+
+;(function ($) {
+
+// alias Math methods - used a lot!
+var min = Math.min
+, max = Math.max
+, round = Math.floor
+;
+function isStr (v) { return $.type(v) === "string"; }
+
+function runPluginCallbacks (Instance, a_fn) {
+ if ($.isArray(a_fn))
+ for (var i=0, c=a_fn.length; i<c; i++) {
+ var fn = a_fn[i];
+ try {
+ if (isStr(fn)) // 'name' of a function
+ fn = eval(fn);
+ if ($.isFunction(fn))
+ fn( Instance );
+ } catch (ex) {}
+ }
+};
+
+
+
+/*
+ * GENERIC $.layout METHODS - used by all layouts
+ */
+$.layout = {
+
+ version: "1.3.rc30.5"
+, revision: 0.033005 // 1.3.0 final = 1.0300 - major(n+).minor(nn)+patch(nn+)
+
+ // LANGUAGE CUSTOMIZATION
+, language: {
+ // Tips and messages for resizers, togglers, custom buttons, etc.
+ Open: "Open" // eg: "Open Pane"
+ , Close: "Close"
+ , Resize: "Resize"
+ , Slide: "Slide Open"
+ , Pin: "Pin"
+ , Unpin: "Un-Pin"
+ , noRoomToOpenTip: "Not enough room to show this pane."
+ , minSizeWarning: "Panel has reached its minimum size"
+ , maxSizeWarning: "Panel has reached its maximum size"
+ // Developer error messages
+ , pane: "pane" // description of "layout pane element"
+ , selector: "selector" // description of "jQuery-selector"
+ , errButton: "Error Adding Button \n\nInvalid "
+ , errContainerMissing: "UI Layout Initialization Error\n\nThe specified layout-container does not exist."
+ , errCenterPaneMissing: "UI Layout Initialization Error\n\nThe center-pane element does not exist.\n\nThe center-pane is a required element."
+ , errContainerHeight: "UI Layout Initialization Warning\n\nThe layout-container \"CONTAINER\" has no height.\n\nTherefore the layout is 0-height and hence 'invisible'!"
+ }
+
+ // can update code here if $.browser is phased out
+, browser: {
+ mozilla: !!$.browser.mozilla
+ , webkit: !!$.browser.webkit || !!$.browser.safari // webkit = jQ 1.4
+ , msie: !!$.browser.msie
+ , isIE6: !!$.browser.msie && $.browser.version == 6
+ , version: $.browser.version // not used in Layout core, but may be used by plugins
+ }
+
+ // *PREDEFINED* EFFECTS & DEFAULTS
+ // MUST list effect here - OR MUST set an fxSettings option (can be an empty hash: {})
+, effects: {
+
+ // Pane Open/Close Animations
+ slide: {
+ all: { duration: "fast" } // eg: duration: 1000, easing: "easeOutBounce"
+ , north: { direction: "up" }
+ , south: { direction: "down" }
+ , east: { direction: "right"}
+ , west: { direction: "left" }
+ }
+ , drop: {
+ all: { duration: "slow" }
+ , north: { direction: "up" }
+ , south: { direction: "down" }
+ , east: { direction: "right"}
+ , west: { direction: "left" }
+ }
+ , scale: {
+ all: { duration: "fast" }
+ }
+ // these are not recommended, but can be used
+ , blind: {}
+ , clip: {}
+ , explode: {}
+ , fade: {}
+ , fold: {}
+ , puff: {}
+
+ // Pane Resize Animations
+ , size: {
+ all: { easing: "swing" }
+ }
+ }
+
+ // INTERNAL CONFIG DATA - DO NOT CHANGE THIS!
+, config: {
+ optionRootKeys: "effects,panes,north,south,west,east,center".split(",")
+ , allPanes: "north,south,west,east,center".split(",")
+ , borderPanes: "north,south,west,east".split(",")
+ , oppositeEdge: {
+ north: "south"
+ , south: "north"
+ , east: "west"
+ , west: "east"
+ }
+ // offscreen data
+ , offscreenCSS: { left: "-99999px", right: "auto" } // used by hide/close if useOffscreenClose=true
+ , offscreenReset: "offscreenReset" // key used for data
+ // CSS used in multiple places
+ , hidden: { visibility: "hidden" }
+ , visible: { visibility: "visible" }
+ // layout element settings
+ , resizers: {
+ cssReq: {
+ position: "absolute"
+ , padding: 0
+ , margin: 0
+ , fontSize: "1px"
+ , textAlign: "left" // to counter-act "center" alignment!
+ , overflow: "hidden" // prevent toggler-button from overflowing
+ // SEE $.layout.defaults.zIndexes.resizer_normal
+ }
+ , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
+ background: "#DDD"
+ , border: "none"
+ }
+ }
+ , togglers: {
+ cssReq: {
+ position: "absolute"
+ , display: "block"
+ , padding: 0
+ , margin: 0
+ , overflow: "hidden"
+ , textAlign: "center"
+ , fontSize: "1px"
+ , cursor: "pointer"
+ , zIndex: 1
+ }
+ , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
+ background: "#AAA"
+ }
+ }
+ , content: {
+ cssReq: {
+ position: "relative" /* contain floated or positioned elements */
+ }
+ , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
+ overflow: "auto"
+ , padding: "10px"
+ }
+ , cssDemoPane: { // DEMO CSS - REMOVE scrolling from 'pane' when it has a content-div
+ overflow: "hidden"
+ , padding: 0
+ }
+ }
+ , panes: { // defaults for ALL panes - overridden by 'per-pane settings' below
+ cssReq: {
+ position: "absolute"
+ , margin: 0
+ // $.layout.defaults.zIndexes.pane_normal
+ }
+ , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
+ padding: "10px"
+ , background: "#FFF"
+ , border: "1px solid #BBB"
+ , overflow: "auto"
+ }
+ }
+ , north: {
+ side: "Top"
+ , sizeType: "Height"
+ , dir: "horz"
+ , cssReq: {
+ top: 0
+ , bottom: "auto"
+ , left: 0
+ , right: 0
+ , width: "auto"
+ // height: DYNAMIC
+ }
+ }
+ , south: {
+ side: "Bottom"
+ , sizeType: "Height"
+ , dir: "horz"
+ , cssReq: {
+ top: "auto"
+ , bottom: 0
+ , left: 0
+ , right: 0
+ , width: "auto"
+ // height: DYNAMIC
+ }
+ }
+ , east: {
+ side: "Right"
+ , sizeType: "Width"
+ , dir: "vert"
+ , cssReq: {
+ left: "auto"
+ , right: 0
+ , top: "auto" // DYNAMIC
+ , bottom: "auto" // DYNAMIC
+ , height: "auto"
+ // width: DYNAMIC
+ }
+ }
+ , west: {
+ side: "Left"
+ , sizeType: "Width"
+ , dir: "vert"
+ , cssReq: {
+ left: 0
+ , right: "auto"
+ , top: "auto" // DYNAMIC
+ , bottom: "auto" // DYNAMIC
+ , height: "auto"
+ // width: DYNAMIC
+ }
+ }
+ , center: {
+ dir: "center"
+ , cssReq: {
+ left: "auto" // DYNAMIC
+ , right: "auto" // DYNAMIC
+ , top: "auto" // DYNAMIC
+ , bottom: "auto" // DYNAMIC
+ , height: "auto"
+ , width: "auto"
+ }
+ }
+ }
+
+ // CALLBACK FUNCTION NAMESPACE - used to store reusable callback functions
+, callbacks: {}
+
+, getParentPaneElem: function (el) {
+ // must pass either a container or pane element
+ var $el = $(el)
+ , layout = $el.data("layout") || $el.data("parentLayout");
+ if (layout) {
+ var $cont = layout.container;
+ // see if this container is directly-nested inside an outer-pane
+ if ($cont.data("layoutPane")) return $cont;
+ var $pane = $cont.closest("."+ $.layout.defaults.panes.paneClass);
+ // if a pane was found, return it
+ if ($pane.data("layoutPane")) return $pane;
+ }
+ return null;
+ }
+
+, getParentPaneInstance: function (el) {
+ // must pass either a container or pane element
+ var $pane = $.layout.getParentPaneElem(el);
+ return $pane ? $pane.data("layoutPane") : null;
+ }
+
+, getParentLayoutInstance: function (el) {
+ // must pass either a container or pane element
+ var $pane = $.layout.getParentPaneElem(el);
+ return $pane ? $pane.data("parentLayout") : null;
+ }
+
+, getEventObject: function (evt) {
+ return typeof evt === "object" && evt.stopPropagation ? evt : null;
+ }
+, parsePaneName: function (evt_or_pane) {
+ // getEventObject() automatically calls .stopPropagation(), WHICH MUST BE DONE!
+ var evt = $.layout.getEventObject( evt_or_pane );
+ if (evt) {
+ // ALWAYS stop propagation of events triggered in Layout!
+ evt.stopPropagation();
+ return $(this).data("layoutEdge");
+ }
+ else
+ return evt_or_pane;
+ }
+
+
+ // LAYOUT-PLUGIN REGISTRATION
+ // more plugins can added beyond this default list
+, plugins: {
+ draggable: !!$.fn.draggable // resizing
+ , effects: {
+ core: !!$.effects // animimations (specific effects tested by initOptions)
+ , slide: $.effects && $.effects.slide // default effect
+ }
+ }
+
+// arrays of plugin or other methods to be triggered for events in *each layout* - will be passed 'Instance'
+, onCreate: [] // runs when layout is just starting to be created - right after options are set
+, onLoad: [] // runs after layout container and global events init, but before initPanes is called
+, onReady: [] // runs after initialization *completes* - ie, after initPanes completes successfully
+, onDestroy: [] // runs after layout is destroyed
+, onUnload: [] // runs after layout is destroyed OR when page unloads
+, afterOpen: [] // runs after setAsOpen() completes
+, afterClose: [] // runs after setAsClosed() completes
+
+ /*
+ * GENERIC UTILITY METHODS
+ */
+
+ // calculate and return the scrollbar width, as an integer
+, scrollbarWidth: function () { return window.scrollbarWidth || $.layout.getScrollbarSize('width'); }
+, scrollbarHeight: function () { return window.scrollbarHeight || $.layout.getScrollbarSize('height'); }
+, getScrollbarSize: function (dim) {
+ var $c = $('<div style="position: absolute; top: -10000px; left: -10000px; width: 100px; height: 100px; overflow: scroll;"></div>').appendTo("body");
+ var d = { width: $c.width() - $c[0].clientWidth, height: $c.height() - $c[0].clientHeight };
+ $c.remove();
+ window.scrollbarWidth = d.width;
+ window.scrollbarHeight = d.height;
+ return dim.match(/^(width|height)$/) ? d[dim] : d;
+ }
+
+
+ /**
+ * Returns hash container 'display' and 'visibility'
+ *
+ * @see $.swap() - swaps CSS, runs callback, resets CSS
+ */
+, showInvisibly: function ($E, force) {
+ if (!$E) return {};
+ if (!$E.jquery) $E = $($E);
+ var CSS = {
+ display: $E.css('display')
+ , visibility: $E.css('visibility')
+ };
+ if (force || CSS.display === "none") { // only if not *already hidden*
+ $E.css({ display: "block", visibility: "hidden" }); // show element 'invisibly' so can be measured
+ return CSS;
+ }
+ else return {};
+ }
+
+ /**
+ * Returns data for setting size of an element (container or a pane).
+ *
+ * @see _create(), onWindowResize() for container, plus others for pane
+ * @return JSON Returns a hash of all dimensions: top, bottom, left, right, outerWidth, innerHeight, etc
+ */
+, getElementDimensions: function ($E) {
+ var
+ d = {} // dimensions hash
+ , x = d.css = {} // CSS hash
+ , i = {} // TEMP insets
+ , b, p // TEMP border, padding
+ , N = $.layout.cssNum
+ , off = $E.offset()
+ ;
+ d.offsetLeft = off.left;
+ d.offsetTop = off.top;
+
+ $.each("Left,Right,Top,Bottom".split(","), function (idx, e) { // e = edge
+ b = x["border" + e] = $.layout.borderWidth($E, e);
+ p = x["padding"+ e] = $.layout.cssNum($E, "padding"+e);
+ i[e] = b + p; // total offset of content from outer side
+ d["inset"+ e] = p;
+ });
+
+ d.offsetWidth = $E.innerWidth(); // offsetWidth is used in calc when doing manual resize
+ d.offsetHeight = $E.innerHeight(); // ditto
+ d.outerWidth = $E.outerWidth();
+ d.outerHeight = $E.outerHeight();
+ d.innerWidth = max(0, d.outerWidth - i.Left - i.Right);
+ d.innerHeight = max(0, d.outerHeight - i.Top - i.Bottom);
+
+ x.width = $E.width();
+ x.height = $E.height();
+ x.top = N($E,"top",true);
+ x.bottom = N($E,"bottom",true);
+ x.left = N($E,"left",true);
+ x.right = N($E,"right",true);
+
+ //d.visible = $E.is(":visible");// && x.width > 0 && x.height > 0;
+
+ return d;
+ }
+
+, getElementCSS: function ($E, list) {
+ var
+ CSS = {}
+ , style = $E[0].style
+ , props = list.split(",")
+ , sides = "Top,Bottom,Left,Right".split(",")
+ , attrs = "Color,Style,Width".split(",")
+ , p, s, a, i, j, k
+ ;
+ for (i=0; i < props.length; i++) {
+ p = props[i];
+ if (p.match(/(border|padding|margin)$/))
+ for (j=0; j < 4; j++) {
+ s = sides[j];
+ if (p === "border")
+ for (k=0; k < 3; k++) {
+ a = attrs[k];
+ CSS[p+s+a] = style[p+s+a];
+ }
+ else
+ CSS[p+s] = style[p+s];
+ }
+ else
+ CSS[p] = style[p];
+ };
+ return CSS
+ }
+
+ /**
+ * Return the innerWidth for the current browser/doctype
+ *
+ * @see initPanes(), sizeMidPanes(), initHandles(), sizeHandles()
+ * @param {Array.<Object>} $E Must pass a jQuery object - first element is processed
+ * @param {number=} outerWidth (optional) Can pass a width, allowing calculations BEFORE element is resized
+ * @return {number} Returns the innerWidth of the elem by subtracting padding and borders
+ */
+, cssWidth: function ($E, outerWidth) {
+ var
+ b = $.layout.borderWidth
+ , n = $.layout.cssNum
+ ;
+ // a 'calculated' outerHeight can be passed so borders and/or padding are removed if needed
+ if (outerWidth <= 0) return 0;
+
+ if (!$.support.boxModel) return outerWidth;
+
+ // strip border and padding from outerWidth to get CSS Width
+ var W = outerWidth
+ - b($E, "Left")
+ - b($E, "Right")
+ - n($E, "paddingLeft")
+ - n($E, "paddingRight")
+ ;
+
+ return max(0,W);
+ }
+
+ /**
+ * Return the innerHeight for the current browser/doctype
+ *
+ * @see initPanes(), sizeMidPanes(), initHandles(), sizeHandles()
+ * @param {Array.<Object>} $E Must pass a jQuery object - first element is processed
+ * @param {number=} outerHeight (optional) Can pass a width, allowing calculations BEFORE element is resized
+ * @return {number} Returns the innerHeight of the elem by subtracting padding and borders
+ */
+, cssHeight: function ($E, outerHeight) {
+ var
+ b = $.layout.borderWidth
+ , n = $.layout.cssNum
+ ;
+ // a 'calculated' outerHeight can be passed so borders and/or padding are removed if needed
+ if (outerHeight <= 0) return 0;
+
+ if (!$.support.boxModel) return outerHeight;
+
+ // strip border and padding from outerHeight to get CSS Height
+ var H = outerHeight
+ - b($E, "Top")
+ - b($E, "Bottom")
+ - n($E, "paddingTop")
+ - n($E, "paddingBottom")
+ ;
+
+ return max(0,H);
+ }
+
+ /**
+ * Returns the 'current CSS numeric value' for a CSS property - 0 if property does not exist
+ *
+ * @see Called by many methods
+ * @param {Array.<Object>} $E Must pass a jQuery object - first element is processed
+ * @param {string} prop The name of the CSS property, eg: top, width, etc.
+ * @param {boolean=} [allowAuto=false] true = return 'auto' if that is value; false = return 0
+ * @return {(string|number)} Usually used to get an integer value for position (top, left) or size (height, width)
+ */
+, cssNum: function ($E, prop, allowAuto) {
+ if (!$E.jquery) $E = $($E);
+ var CSS = $.layout.showInvisibly($E)
+ , p = $.curCSS($E[0], prop, true)
+ , v = allowAuto && p=="auto" ? p : (parseInt(p, 10) || 0);
+ $E.css( CSS ); // RESET
+ return v;
+ }
+
+, borderWidth: function (el, side) {
+ if (el.jquery) el = el[0];
+ var b = "border"+ side.substr(0,1).toUpperCase() + side.substr(1); // left => Left
+ return $.curCSS(el, b+"Style", true) === "none" ? 0 : (parseInt($.curCSS(el, b+"Width", true), 10) || 0);
+ }
+
+ /**
+ * Mouse-tracking utility - FUTURE REFERENCE
+ *
+ * init: if (!window.mouse) {
+ * window.mouse = { x: 0, y: 0 };
+ * $(document).mousemove( $.layout.trackMouse );
+ * }
+ *
+ * @param {Object} evt
+ *
+, trackMouse: function (evt) {
+ window.mouse = { x: evt.clientX, y: evt.clientY };
+ }
+ */
+
+ /**
+ * SUBROUTINE for preventPrematureSlideClose option
+ *
+ * @param {Object} evt
+ * @param {Object=} el
+ */
+, isMouseOverElem: function (evt, el) {
+ var
+ $E = $(el || this)
+ , d = $E.offset()
+ , T = d.top
+ , L = d.left
+ , R = L + $E.outerWidth()
+ , B = T + $E.outerHeight()
+ , x = evt.pageX // evt.clientX ?
+ , y = evt.pageY // evt.clientY ?
+ ;
+ // if X & Y are < 0, probably means is over an open SELECT
+ return ($.layout.browser.msie && x < 0 && y < 0) || ((x >= L && x <= R) && (y >= T && y <= B));
+ }
+
+ /**
+ * Message/Logging Utility
+ *
+ * @example $.layout.msg("My message"); // log text
+ * @example $.layout.msg("My message", true); // alert text
+ * @example $.layout.msg({ foo: "bar" }, "Title"); // log hash-data, with custom title
+ * @example $.layout.msg({ foo: "bar" }, true, "Title", { sort: false }); -OR-
+ * @example $.layout.msg({ foo: "bar" }, "Title", { sort: false, display: true }); // alert hash-data
+ *
+ * @param {(Object|string)} info String message OR Hash/Array
+ * @param {(Boolean|string|Object)=} [popup=false] True means alert-box - can be skipped
+ * @param {(Object|string)=} [debugTitle=""] Title for Hash data - can be skipped
+ * @param {Object=} [debutOpts={}] Extra options for debug output
+ */
+, msg: function (info, popup, debugTitle, debugOpts) {
+ if ($.isPlainObject(info) && window.debugData) {
+ if (typeof popup === "string") {
+ debugOpts = debugTitle;
+ debugTitle = popup;
+ }
+ else if (typeof debugTitle === "object") {
+ debugOpts = debugTitle;
+ debugTitle = null;
+ }
+ var t = debugTitle || "log( <object> )"
+ , o = $.extend({ sort: false, returnHTML: false, display: false }, debugOpts);
+ if (popup === true || o.display)
+ debugData( info, t, o );
+ else if (window.console)
+ console.log(debugData( info, t, o ));
+ }
+ else if (popup)
+ alert(info);
+ else if (window.console)
+ console.log(info);
+ else {
+ var id = "#layoutLogger"
+ , $l = $(id);
+ if (!$l.length)
+ $l = createLog();
+ $l.children("ul").append('<li style="padding: 4px 10px; margin: 0; border-top: 1px solid #CCC;">'+ info.replace(/\</g,"&lt;").replace(/\>/g,"&gt;") +'</li>');
+ }
+
+ function createLog () {
+ var pos = $.support.fixedPosition ? 'fixed' : 'absolute'
+ , $e = $('<div id="layoutLogger" style="position: '+ pos +'; top: 5px; z-index: 999999; max-width: 25%; overflow: hidden; border: 1px solid #000; border-radius: 5px; background: #FBFBFB; box-shadow: 0 2px 10px rgba(0,0,0,0.3);">'
+ + '<div style="font-size: 13px; font-weight: bold; padding: 5px 10px; background: #F6F6F6; border-radius: 5px 5px 0 0; cursor: move;">'
+ + '<span style="float: right; padding-left: 7px; cursor: pointer;" title="Remove Console" onclick="$(this).closest(\'#layoutLogger\').remove()">X</span>Layout console.log</div>'
+ + '<ul style="font-size: 13px; font-weight: none; list-style: none; margin: 0; padding: 0 0 2px;"></ul>'
+ + '</div>'
+ ).appendTo("body");
+ $e.css('left', $(window).width() - $e.outerWidth() - 5)
+ if ($.ui.draggable) $e.draggable({ handle: ':first-child' });
+ return $e;
+ };
+ }
+
+};
+
+var lang = $.layout.language; // alias used in defaults...
+
+// DEFAULT OPTIONS - CHANGE IF DESIRED
+$.layout.defaults = {
+/*
+ * LAYOUT & LAYOUT-CONTAINER OPTIONS
+ * - none of these options are applicable to individual panes
+ */
+ name: "" // Not required, but useful for buttons and used for the state-cookie
+, containerSelector: "" // ONLY used when specifying a childOptions - to find container-element that is NOT directly-nested
+, containerClass: "ui-layout-container" // layout-container element
+, scrollToBookmarkOnLoad: true // after creating a layout, scroll to bookmark in URL (.../page.htm#myBookmark)
+, resizeWithWindow: true // bind thisLayout.resizeAll() to the window.resize event
+, resizeWithWindowDelay: 200 // delay calling resizeAll because makes window resizing very jerky
+, resizeWithWindowMaxDelay: 0 // 0 = none - force resize every XX ms while window is being resized
+, onresizeall_start: null // CALLBACK when resizeAll() STARTS - NOT pane-specific
+, onresizeall_end: null // CALLBACK when resizeAll() ENDS - NOT pane-specific
+, onload_start: null // CALLBACK when Layout inits - after options initialized, but before elements
+, onload_end: null // CALLBACK when Layout inits - after EVERYTHING has been initialized
+, onunload_start: null // CALLBACK when Layout is destroyed OR onWindowUnload
+, onunload_end: null // CALLBACK when Layout is destroyed OR onWindowUnload
+, autoBindCustomButtons: false // search for buttons with ui-layout-button class and auto-bind them
+, initPanes: true // false = DO NOT initialize the panes onLoad - will init later
+, showErrorMessages: true // enables fatal error messages to warn developers of common errors
+, showDebugMessages: false // display console-and-alert debug msgs - IF this Layout version _has_ debugging code!
+// Changing this zIndex value will cause other zIndex values to automatically change
+, zIndex: null // the PANE zIndex - resizers and masks will be +1
+// DO NOT CHANGE the zIndex values below unless you clearly understand their relationships
+, zIndexes: { // set _default_ z-index values here...
+ pane_normal: 0 // normal z-index for panes
+ , content_mask: 1 // applied to overlays used to mask content INSIDE panes during resizing
+ , resizer_normal: 2 // normal z-index for resizer-bars
+ , pane_sliding: 100 // applied to *BOTH* the pane and its resizer when a pane is 'slid open'
+ , pane_animate: 1000 // applied to the pane when being animated - not applied to the resizer
+ , resizer_drag: 10000 // applied to the CLONED resizer-bar when being 'dragged'
+ }
+/*
+ * PANE DEFAULT SETTINGS
+ * - settings under the 'panes' key become the default settings for *all panes*
+ * - ALL pane-options can also be set specifically for each panes, which will override these 'default values'
+ */
+, panes: { // default options for 'all panes' - will be overridden by 'per-pane settings'
+ applyDemoStyles: false // NOTE: renamed from applyDefaultStyles for clarity
+ , closable: true // pane can open & close
+ , resizable: true // when open, pane can be resized
+ , slidable: true // when closed, pane can 'slide open' over other panes - closes on mouse-out
+ , initClosed: false // true = init pane as 'closed'
+ , initHidden: false // true = init pane as 'hidden' - no resizer-bar/spacing
+ // SELECTORS
+ //, paneSelector: "" // MUST be pane-specific - jQuery selector for pane
+ , contentSelector: ".ui-layout-content" // INNER div/element to auto-size so only it scrolls, not the entire pane!
+ , contentIgnoreSelector: ".ui-layout-ignore" // element(s) to 'ignore' when measuring 'content'
+ , findNestedContent: false // true = $P.find(contentSelector), false = $P.children(contentSelector)
+ // GENERIC ROOT-CLASSES - for auto-generated classNames
+ , paneClass: "ui-layout-pane" // Layout Pane
+ , resizerClass: "ui-layout-resizer" // Resizer Bar
+ , togglerClass: "ui-layout-toggler" // Toggler Button
+ , buttonClass: "ui-layout-button" // CUSTOM Buttons - eg: '[ui-layout-button]-toggle/-open/-close/-pin'
+ // ELEMENT SIZE & SPACING
+ //, size: 100 // MUST be pane-specific -initial size of pane
+ , minSize: 0 // when manually resizing a pane
+ , maxSize: 0 // ditto, 0 = no limit
+ , spacing_open: 6 // space between pane and adjacent panes - when pane is 'open'
+ , spacing_closed: 6 // ditto - when pane is 'closed'
+ , togglerLength_open: 50 // Length = WIDTH of toggler button on north/south sides - HEIGHT on east/west sides
+ , togglerLength_closed: 50 // 100% OR -1 means 'full height/width of resizer bar' - 0 means 'hidden'
+ , togglerAlign_open: "center" // top/left, bottom/right, center, OR...
+ , togglerAlign_closed: "center" // 1 => nn = offset from top/left, -1 => -nn == offset from bottom/right
+ , togglerTip_open: lang.Close // Toggler tool-tip (title)
+ , togglerTip_closed: lang.Open // ditto
+ , togglerContent_open: "" // text or HTML to put INSIDE the toggler
+ , togglerContent_closed: "" // ditto
+ // RESIZING OPTIONS
+ , resizerDblClickToggle: true //
+ , autoResize: true // IF size is 'auto' or a percentage, then recalc 'pixel size' whenever the layout resizes
+ , autoReopen: true // IF a pane was auto-closed due to noRoom, reopen it when there is room? False = leave it closed
+ , resizerDragOpacity: 1 // option for ui.draggable
+ //, resizerCursor: "" // MUST be pane-specific - cursor when over resizer-bar
+ , maskContents: false // true = add DIV-mask over-or-inside this pane so can 'drag' over IFRAMES
+ , maskObjects: false // true = add IFRAME-mask over-or-inside this pane to cover objects/applets - content-mask will overlay this mask
+ , maskZindex: null // will override zIndexes.content_mask if specified - not applicable to iframe-panes
+ , resizingGrid: false // grid size that the resizers will snap-to during resizing, eg: [20,20]
+ , livePaneResizing: false // true = LIVE Resizing as resizer is dragged
+ , liveContentResizing: false // true = re-measure header/footer heights as resizer is dragged
+ , liveResizingTolerance: 1 // how many px change before pane resizes, to control performance
+ // TIPS & MESSAGES - also see lang object
+ , noRoomToOpenTip: lang.noRoomToOpenTip
+ , resizerTip: lang.Resize // Resizer tool-tip (title)
+ , sliderTip: lang.Slide // resizer-bar triggers 'sliding' when pane is closed
+ , sliderCursor: "pointer" // cursor when resizer-bar will trigger 'sliding'
+ , slideTrigger_open: "click" // click, dblclick, mouseenter
+ , slideTrigger_close: "mouseleave"// click, mouseleave
+ , slideDelay_open: 300 // applies only for mouseenter event - 0 = instant open
+ , slideDelay_close: 300 // applies only for mouseleave event (300ms is the minimum!)
+ , hideTogglerOnSlide: false // when pane is slid-open, should the toggler show?
+ , preventQuickSlideClose: $.layout.browser.webkit // Chrome triggers slideClosed as it is opening
+ , preventPrematureSlideClose: false // handle incorrect mouseleave trigger, like when over a SELECT-list in IE
+ // HOT-KEYS & MISC
+ , showOverflowOnHover: false // will bind allowOverflow() utility to pane.onMouseOver
+ , enableCursorHotkey: true // enabled 'cursor' hotkeys
+ //, customHotkey: "" // MUST be pane-specific - EITHER a charCode OR a character
+ , customHotkeyModifier: "SHIFT" // either 'SHIFT', 'CTRL' or 'CTRL+SHIFT' - NOT 'ALT'
+ // PANE ANIMATION
+ // NOTE: fxSss_open, fxSss_close & fxSss_size options (eg: fxName_open) are auto-generated if not passed
+ , fxName: "slide" // ('none' or blank), slide, drop, scale -- only relevant to 'open' & 'close', NOT 'size'
+ , fxSpeed: null // slow, normal, fast, 200, nnn - if passed, will OVERRIDE fxSettings.duration
+ , fxSettings: {} // can be passed, eg: { easing: "easeOutBounce", duration: 1500 }
+ , fxOpacityFix: true // tries to fix opacity in IE to restore anti-aliasing after animation
+ , animatePaneSizing: false // true = animate resizing after dragging resizer-bar OR sizePane() is called
+ /* NOTE: Action-specific FX options are auto-generated from the options above if not specifically set:
+ fxName_open: "slide" // 'Open' pane animation
+ fnName_close: "slide" // 'Close' pane animation
+ fxName_size: "slide" // 'Size' pane animation - when animatePaneSizing = true
+ fxSpeed_open: null
+ fxSpeed_close: null
+ fxSpeed_size: null
+ fxSettings_open: {}
+ fxSettings_close: {}
+ fxSettings_size: {}
+ */
+ // CHILD/NESTED LAYOUTS
+ , childOptions: null // Layout-options for nested/child layout - even {} is valid as options
+ , initChildLayout: true // true = child layout will be created as soon as _this_ layout completes initialization
+ , destroyChildLayout: true // true = destroy child-layout if this pane is destroyed
+ , resizeChildLayout: true // true = trigger child-layout.resizeAll() when this pane is resized
+ // PANE CALLBACKS
+ , triggerEventsOnLoad: false // true = trigger onopen OR onclose callbacks when layout initializes
+ , triggerEventsDuringLiveResize: true // true = trigger onresize callback REPEATEDLY if livePaneResizing==true
+ , onshow_start: null // CALLBACK when pane STARTS to Show - BEFORE onopen/onhide_start
+ , onshow_end: null // CALLBACK when pane ENDS being Shown - AFTER onopen/onhide_end
+ , onhide_start: null // CALLBACK when pane STARTS to Close - BEFORE onclose_start
+ , onhide_end: null // CALLBACK when pane ENDS being Closed - AFTER onclose_end
+ , onopen_start: null // CALLBACK when pane STARTS to Open
+ , onopen_end: null // CALLBACK when pane ENDS being Opened
+ , onclose_start: null // CALLBACK when pane STARTS to Close
+ , onclose_end: null // CALLBACK when pane ENDS being Closed
+ , onresize_start: null // CALLBACK when pane STARTS being Resized ***FOR ANY REASON***
+ , onresize_end: null // CALLBACK when pane ENDS being Resized ***FOR ANY REASON***
+ , onsizecontent_start: null // CALLBACK when sizing of content-element STARTS
+ , onsizecontent_end: null // CALLBACK when sizing of content-element ENDS
+ , onswap_start: null // CALLBACK when pane STARTS to Swap
+ , onswap_end: null // CALLBACK when pane ENDS being Swapped
+ , ondrag_start: null // CALLBACK when pane STARTS being ***MANUALLY*** Resized
+ , ondrag_end: null // CALLBACK when pane ENDS being ***MANUALLY*** Resized
+ }
+/*
+ * PANE-SPECIFIC SETTINGS
+ * - options listed below MUST be specified per-pane - they CANNOT be set under 'panes'
+ * - all options under the 'panes' key can also be set specifically for any pane
+ * - most options under the 'panes' key apply only to 'border-panes' - NOT the the center-pane
+ */
+, north: {
+ paneSelector: ".ui-layout-north"
+ , size: "auto" // eg: "auto", "30%", .30, 200
+ , resizerCursor: "n-resize" // custom = url(myCursor.cur)
+ , customHotkey: "" // EITHER a charCode (43) OR a character ("o")
+ }
+, south: {
+ paneSelector: ".ui-layout-south"
+ , size: "auto"
+ , resizerCursor: "s-resize"
+ , customHotkey: ""
+ }
+, east: {
+ paneSelector: ".ui-layout-east"
+ , size: 200
+ , resizerCursor: "e-resize"
+ , customHotkey: ""
+ }
+, west: {
+ paneSelector: ".ui-layout-west"
+ , size: 200
+ , resizerCursor: "w-resize"
+ , customHotkey: ""
+ }
+, center: {
+ paneSelector: ".ui-layout-center"
+ , minWidth: 0
+ , minHeight: 0
+ }
+};
+
+$.layout.optionsMap = {
+ // layout/global options - NOT pane-options
+ layout: ("stateManagement,effects,zIndexes,"
+ + "name,zIndex,scrollToBookmarkOnLoad,showErrorMessages,"
+ + "resizeWithWindow,resizeWithWindowDelay,resizeWithWindowMaxDelay,"
+ + "onresizeall,onresizeall_start,onresizeall_end,onload,onunload,autoBindCustomButtons").split(",")
+// borderPanes: [ ALL options that are NOT specified as 'layout' ]
+ // default.panes options that apply to the center-pane (most options apply _only_ to border-panes)
+, center: ("paneClass,contentSelector,contentIgnoreSelector,findNestedContent,applyDemoStyles,triggerEventsOnLoad,"
+ + "showOverflowOnHover,maskContents,maskObjects,liveContentResizing,"
+ + "childOptions,initChildLayout,resizeChildLayout,destroyChildLayout,"
+ + "onresize,onresize_start,onresize_end,onsizecontent,onsizecontent_start,onsizecontent_end").split(",")
+ // options that MUST be specifically set 'per-pane' - CANNOT set in the panes (defaults) key
+, noDefault: ("paneSelector,resizerCursor,customHotkey").split(",")
+};
+
+/**
+ * Processes options passed in converts flat-format data into subkey (JSON) format
+ * In flat-format, subkeys are _currently_ separated with 2 underscores, like north__optName
+ * Plugins may also call this method so they can transform their own data
+ *
+ * @param {!Object} hash Data/options passed by user - may be a single level or nested levels
+ * @return {Object} Returns hash of minWidth & minHeight
+ */
+$.layout.transformData = function (hash) {
+ var json = { panes: {}, center: {} } // init return object
+ , data, branch, optKey, keys, key, val, i, c;
+
+ if (typeof hash !== "object") return json; // no options passed
+
+ // convert all 'flat-keys' to 'sub-key' format
+ for (optKey in hash) {
+ branch = json;
+ data = $.layout.optionsMap.layout;
+ val = hash[ optKey ];
+ keys = optKey.split("__"); // eg: west__size or north__fxSettings__duration
+ c = keys.length - 1;
+ // convert underscore-delimited to subkeys
+ for (i=0; i <= c; i++) {
+ key = keys[i];
+ if (i === c)
+ branch[key] = val;
+ else if (!branch[key])
+ branch[key] = {}; // create the subkey
+ // recurse to sub-key for next loop - if not done
+ branch = branch[key];
+ }
+ }
+
+ return json;
+}
+
+// INTERNAL CONFIG DATA - DO NOT CHANGE THIS!
+$.layout.backwardCompatibility = {
+ // data used by renameOldOptions()
+ map: {
+ // OLD Option Name: NEW Option Name
+ applyDefaultStyles: "applyDemoStyles"
+ , resizeNestedLayout: "resizeChildLayout"
+ , resizeWhileDragging: "livePaneResizing"
+ , resizeContentWhileDragging: "liveContentResizing"
+ , triggerEventsWhileDragging: "triggerEventsDuringLiveResize"
+ , maskIframesOnResize: "maskContents"
+ , useStateCookie: "stateManagement.enabled"
+ , "cookie.autoLoad": "stateManagement.autoLoad"
+ , "cookie.autoSave": "stateManagement.autoSave"
+ , "cookie.keys": "stateManagement.stateKeys"
+ , "cookie.name": "stateManagement.cookie.name"
+ , "cookie.domain": "stateManagement.cookie.domain"
+ , "cookie.path": "stateManagement.cookie.path"
+ , "cookie.expires": "stateManagement.cookie.expires"
+ , "cookie.secure": "stateManagement.cookie.secure"
+ }
+ /**
+ * @param {Object} opts
+ */
+, renameOptions: function (opts) {
+ var map = $.layout.backwardCompatibility.map
+ , oldData, newData, value
+ ;
+ for (var itemPath in map) {
+ oldData = getBranch( itemPath );
+ value = oldData.branch[ oldData.key ]
+ if (value !== undefined) {
+ newData = getBranch( map[itemPath], true )
+ newData.branch[ newData.key ] = value;
+ delete oldData.branch[ oldData.key ];
+ }
+ }
+
+ /**
+ * @param {string} path
+ * @param {boolean=} [create=false] Create path if does not exist
+ */
+ function getBranch (path, create) {
+ var a = path.split(".") // split keys into array
+ , c = a.length - 1
+ , D = { branch: opts, key: a[c] } // init branch at top & set key (last item)
+ , i = 0, k, undef;
+ for (; i<c; i++) { // skip the last key (data)
+ k = a[i];
+ if (D.branch[ k ] == undefined) { // child-key does not exist
+ if (create) {
+ D.branch = D.branch[ k ] = {}; // create child-branch
+ }
+ else // can't go any farther
+ D.branch = {}; // branch is undefined
+ }
+ else
+ D.branch = D.branch[ k ]; // get child-branch
+ }
+ return D;
+ };
+ }
+ /**
+ * @param {Object} opts
+ */
+, renameAllOptions: function (opts) {
+ var ren = $.layout.backwardCompatibility.renameOptions;
+ // rename root (layout) options
+ ren( opts );
+ // rename 'defaults' to 'panes'
+ if (opts.defaults) {
+ if (typeof opts.panes !== "object")
+ opts.panes = {};
+ $.extend(true, opts.panes, opts.defaults);
+ delete opts.defaults;
+ }
+ // rename options in the the options.panes key
+ if (opts.panes) ren( opts.panes );
+ // rename options inside *each pane key*, eg: options.west
+ $.each($.layout.config.allPanes, function (i, pane) {
+ if (opts[pane]) ren( opts[pane] );
+ });
+ return opts;
+ }
+};
+
+
+
+/* ============================================================
+ * BEGIN WIDGET: $( selector ).layout( {options} );
+ * ============================================================
+ */
+$.fn.layout = function (opts) {
+ var
+
+ // local aliases to global data
+ browser = $.layout.browser
+, lang = $.layout.language // internal alias
+, _c = $.layout.config
+
+ // local aliases to utlity methods
+, cssW = $.layout.cssWidth
+, cssH = $.layout.cssHeight
+, elDims = $.layout.getElementDimensions
+, elCSS = $.layout.getElementCSS
+, evtObj = $.layout.getEventObject
+, evtPane = $.layout.parsePaneName
+
+/**
+ * options - populated by initOptions()
+ */
+, options = $.extend(true, {}, $.layout.defaults)
+, effects = options.effects = $.extend(true, {}, $.layout.effects)
+
+/**
+ * layout-state object
+ */
+, state = {
+ // generate unique ID to use for event.namespace so can unbind only events added by 'this layout'
+ id: "layout"+ $.now() // code uses alias: sID
+ , initialized: false
+ , container: {} // init all keys
+ , north: {}
+ , south: {}
+ , east: {}
+ , west: {}
+ , center: {}
+ }
+
+/**
+ * parent/child-layout pointers
+ */
+//, hasParentLayout = false - exists ONLY inside Instance so can be set externally
+, children = {
+ north: null
+ , south: null
+ , east: null
+ , west: null
+ , center: null
+ }
+
+/*
+ * ###########################
+ * INTERNAL HELPER FUNCTIONS
+ * ###########################
+ */
+
+ /**
+ * Manages all internal timers
+ */
+, timer = {
+ data: {}
+ , set: function (s, fn, ms) { timer.clear(s); timer.data[s] = setTimeout(fn, ms); }
+ , clear: function (s) { var t=timer.data; if (t[s]) {clearTimeout(t[s]); delete t[s];} }
+ }
+
+, _log = function (msg, popup) {
+ $.layout.msg( options.name +' / '+ msg, (popup && options.showErrorMessages) );
+ }
+
+ /**
+ * Executes a Callback function after a trigger event, like resize, open or close
+ *
+ * @param {?string} pane This is passed only so we can pass the 'pane object' to the callback
+ * @param {(string|function())} fn Accepts a function name, OR a comma-delimited array: [0]=function name, [1]=argument
+ */
+, _runCallbacks = function (evtName, pane, skipBoundEvents) {
+ var o = pane ? options[pane] : options
+ // names like onopen and onopen_end separate are interchangeable in options...
+ , long = evtName + (evtName.match(/_/) ? "" : "_end")
+ , short = long.match(/_end$/) ? long.substr(0, long.length - 4) : ""
+ , fn = o[long]
+ , retVal = "NC" // NC = No Callback
+ , args = []
+ ;
+ if (!fn && short)
+ fn = o[short];
+
+ // first trigger the callback set in the options
+ if (fn) {
+ //try {
+ // convert function name (string) to function object
+ if (isStr( fn )) {
+ if (fn.match(/,/)) {
+ // function name cannot contain a comma,
+ // so must be a function name AND a parameter to pass
+ args = fn.split(",")
+ , fn = eval(args[0]);
+ }
+ else // just the name of an external function?
+ fn = eval(fn);
+ }
+ // execute the callback, if exists
+ if ($.isFunction( fn )) {
+ if (args.length)
+ retVal = fn(args[1]); // pass the argument parsed from 'list'
+ else if (pane && $Ps[pane])
+ // pass data: pane-name, pane-element, pane-state, pane-options, and layout-name
+ retVal = fn( pane, $Ps[pane], state[pane], options[pane], options.name );
+ else // must be a layout/container callback - pass suitable info
+ retVal = fn( Instance, state, options, options.name );
+ }
+ //}
+ //catch (ex) {}
+ }
+
+ // trigger additional events bound directly to the pane
+ if (!skipBoundEvents && retVal !== false) {
+ if (pane) { // PANE events can be bound to each pane-elements
+ $Ps[pane].triggerHandler('layoutpane'+ long, [ pane, $Ps[pane], state[pane], options[pane], options.name ]);
+ if (short)
+ $Ps[pane].triggerHandler('layoutpane'+ short, [ pane, $Ps[pane], state[pane], options[pane], options.name ]);
+ }
+ else // LAYOUT events can be bound to the container-element
+ $N.triggerHandler('layout'+ long, [ pane, $Ps[pane], state[pane], options[pane], options.name ]);
+ }
+
+ // ALWAYS resizeChildLayout after a resize event - even during initialization
+ if (evtName === "onresize_end" || evtName === "onsizecontent_end")
+ resizeChildLayout(pane);
+
+ return retVal;
+ }
+
+
+ /**
+ * cure iframe display issues in IE & other browsers
+ */
+, _fixIframe = function (pane) {
+ if (browser.mozilla) return; // skip FireFox - it auto-refreshes iframes onShow
+ var $P = $Ps[pane];
+ // if the 'pane' is an iframe, do it
+ if (state[pane].tagName === "IFRAME")
+ $P.css(_c.hidden).css(_c.visible);
+ else // ditto for any iframes INSIDE the pane
+ $P.find('IFRAME').css(_c.hidden).css(_c.visible);
+ }
+
+ /**
+ * @param {string} pane Can accept ONLY a 'pane' (east, west, etc)
+ * @param {number=} outerSize (optional) Can pass a width, allowing calculations BEFORE element is resized
+ * @return {number} Returns the innerHeight/Width of el by subtracting padding and borders
+ */
+, cssSize = function (pane, outerSize) {
+ var fn = _c[pane].dir=="horz" ? cssH : cssW;
+ return fn($Ps[pane], outerSize);
+ }
+
+ /**
+ * @param {string} pane Can accept ONLY a 'pane' (east, west, etc)
+ * @return {Object} Returns hash of minWidth & minHeight
+ */
+, cssMinDims = function (pane) {
+ // minWidth/Height means CSS width/height = 1px
+ var $P = $Ps[pane]
+ , dir = _c[pane].dir
+ , d = {
+ minWidth: 1001 - cssW($P, 1000)
+ , minHeight: 1001 - cssH($P, 1000)
+ }
+ ;
+ if (dir === "horz") d.minSize = d.minHeight;
+ if (dir === "vert") d.minSize = d.minWidth;
+ return d;
+ }
+
+ // TODO: see if these methods can be made more useful...
+ // TODO: *maybe* return cssW/H from these so caller can use this info
+
+ /**
+ * @param {(string|!Object)} el
+ * @param {number=} outerWidth
+ * @param {boolean=} [autoHide=false]
+ */
+, setOuterWidth = function (el, outerWidth, autoHide) {
+ var $E = el, w;
+ if (isStr(el)) $E = $Ps[el]; // west
+ else if (!el.jquery) $E = $(el);
+ w = cssW($E, outerWidth);
+ $E.css({ width: w });
+ if (w > 0) {
+ if (autoHide && $E.data('autoHidden') && $E.innerHeight() > 0) {
+ $E.show().data('autoHidden', false);
+ if (!browser.mozilla) // FireFox refreshes iframes - IE does not
+ // make hidden, then visible to 'refresh' display after animation
+ $E.css(_c.hidden).css(_c.visible);
+ }
+ }
+ else if (autoHide && !$E.data('autoHidden'))
+ $E.hide().data('autoHidden', true);
+ }
+
+ /**
+ * @param {(string|!Object)} el
+ * @param {number=} outerHeight
+ * @param {boolean=} [autoHide=false]
+ */
+, setOuterHeight = function (el, outerHeight, autoHide) {
+ var $E = el, h;
+ if (isStr(el)) $E = $Ps[el]; // west
+ else if (!el.jquery) $E = $(el);
+ h = cssH($E, outerHeight);
+ $E.css({ height: h, visibility: "visible" }); // may have been 'hidden' by sizeContent
+ if (h > 0 && $E.innerWidth() > 0) {
+ if (autoHide && $E.data('autoHidden')) {
+ $E.show().data('autoHidden', false);
+ if (!browser.mozilla) // FireFox refreshes iframes - IE does not
+ $E.css(_c.hidden).css(_c.visible);
+ }
+ }
+ else if (autoHide && !$E.data('autoHidden'))
+ $E.hide().data('autoHidden', true);
+ }
+
+ /**
+ * @param {(string|!Object)} el
+ * @param {number=} outerSize
+ * @param {boolean=} [autoHide=false]
+ */
+, setOuterSize = function (el, outerSize, autoHide) {
+ if (_c[pane].dir=="horz") // pane = north or south
+ setOuterHeight(el, outerSize, autoHide);
+ else // pane = east or west
+ setOuterWidth(el, outerSize, autoHide);
+ }
+
+
+ /**
+ * Converts any 'size' params to a pixel/integer size, if not already
+ * If 'auto' or a decimal/percentage is passed as 'size', a pixel-size is calculated
+ *
+ /**
+ * @param {string} pane
+ * @param {(string|number)=} size
+ * @param {string=} [dir]
+ * @return {number}
+ */
+, _parseSize = function (pane, size, dir) {
+ if (!dir) dir = _c[pane].dir;
+
+ if (isStr(size) && size.match(/%/))
+ size = (size === '100%') ? -1 : parseInt(size, 10) / 100; // convert % to decimal
+
+ if (size === 0)
+ return 0;
+ else if (size >= 1)
+ return parseInt(size, 10);
+
+ var o = options, avail = 0;
+ if (dir=="horz") // north or south or center.minHeight
+ avail = sC.innerHeight - ($Ps.north ? o.north.spacing_open : 0) - ($Ps.south ? o.south.spacing_open : 0);
+ else if (dir=="vert") // east or west or center.minWidth
+ avail = sC.innerWidth - ($Ps.west ? o.west.spacing_open : 0) - ($Ps.east ? o.east.spacing_open : 0);
+
+ if (size === -1) // -1 == 100%
+ return avail;
+ else if (size > 0) // percentage, eg: .25
+ return round(avail * size);
+ else if (pane=="center")
+ return 0;
+ else { // size < 0 || size=='auto' || size==Missing || size==Invalid
+ // auto-size the pane
+ var dim = (dir === "horz" ? "height" : "width")
+ , $P = $Ps[pane]
+ , $C = dim === 'height' ? $Cs[pane] : false
+ , vis = $.layout.showInvisibly($P) // show pane invisibly if hidden
+ , szP = $P.css(dim) // SAVE current pane size
+ , szC = $C ? $C.css(dim) : 0 // SAVE current content size
+ ;
+ $P.css(dim, "auto");
+ if ($C) $C.css(dim, "auto");
+ size = (dim === "height") ? $P.outerHeight() : $P.outerWidth(); // MEASURE
+ $P.css(dim, szP).css(vis); // RESET size & visibility
+ if ($C) $C.css(dim, szC);
+ return size;
+ }
+ }
+
+ /**
+ * Calculates current 'size' (outer-width or outer-height) of a border-pane - optionally with 'pane-spacing' added
+ *
+ * @param {(string|!Object)} pane
+ * @param {boolean=} [inclSpace=false]
+ * @return {number} Returns EITHER Width for east/west panes OR Height for north/south panes - adjusted for boxModel & browser
+ */
+, getPaneSize = function (pane, inclSpace) {
+ var
+ $P = $Ps[pane]
+ , o = options[pane]
+ , s = state[pane]
+ , oSp = (inclSpace ? o.spacing_open : 0)
+ , cSp = (inclSpace ? o.spacing_closed : 0)
+ ;
+ if (!$P || s.isHidden)
+ return 0;
+ else if (s.isClosed || (s.isSliding && inclSpace))
+ return cSp;
+ else if (_c[pane].dir === "horz")
+ return $P.outerHeight() + oSp;
+ else // dir === "vert"
+ return $P.outerWidth() + oSp;
+ }
+
+ /**
+ * Calculate min/max pane dimensions and limits for resizing
+ *
+ * @param {string} pane
+ * @param {boolean=} [slide=false]
+ */
+, setSizeLimits = function (pane, slide) {
+ if (!isInitialized()) return;
+ var
+ o = options[pane]
+ , s = state[pane]
+ , c = _c[pane]
+ , dir = c.dir
+ , side = c.side.toLowerCase()
+ , type = c.sizeType.toLowerCase()
+ , isSliding = (slide != undefined ? slide : s.isSliding) // only open() passes 'slide' param
+ , $P = $Ps[pane]
+ , paneSpacing = o.spacing_open
+ // measure the pane on the *opposite side* from this pane
+ , altPane = _c.oppositeEdge[pane]
+ , altS = state[altPane]
+ , $altP = $Ps[altPane]
+ , altPaneSize = (!$altP || altS.isVisible===false || altS.isSliding ? 0 : (dir=="horz" ? $altP.outerHeight() : $altP.outerWidth()))
+ , altPaneSpacing = ((!$altP || altS.isHidden ? 0 : options[altPane][ altS.isClosed !== false ? "spacing_closed" : "spacing_open" ]) || 0)
+ // limitSize prevents this pane from 'overlapping' opposite pane
+ , containerSize = (dir=="horz" ? sC.innerHeight : sC.innerWidth)
+ , minCenterDims = cssMinDims("center")
+ , minCenterSize = dir=="horz" ? max(options.center.minHeight, minCenterDims.minHeight) : max(options.center.minWidth, minCenterDims.minWidth)
+ // if pane is 'sliding', then ignore center and alt-pane sizes - because 'overlays' them
+ , limitSize = (containerSize - paneSpacing - (isSliding ? 0 : (_parseSize("center", minCenterSize, dir) + altPaneSize + altPaneSpacing)))
+ , minSize = s.minSize = max( _parseSize(pane, o.minSize), cssMinDims(pane).minSize )
+ , maxSize = s.maxSize = min( (o.maxSize ? _parseSize(pane, o.maxSize) : 100000), limitSize )
+ , r = s.resizerPosition = {} // used to set resizing limits
+ , top = sC.insetTop
+ , left = sC.insetLeft
+ , W = sC.innerWidth
+ , H = sC.innerHeight
+ , rW = o.spacing_open // subtract resizer-width to get top/left position for south/east
+ ;
+ switch (pane) {
+ case "north": r.min = top + minSize;
+ r.max = top + maxSize;
+ break;
+ case "west": r.min = left + minSize;
+ r.max = left + maxSize;
+ break;
+ case "south": r.min = top + H - maxSize - rW;
+ r.max = top + H - minSize - rW;
+ break;
+ case "east": r.min = left + W - maxSize - rW;
+ r.max = left + W - minSize - rW;
+ break;
+ };
+ }
+
+ /**
+ * Returns data for setting the size/position of center pane. Also used to set Height for east/west panes
+ *
+ * @return JSON Returns a hash of all dimensions: top, bottom, left, right, (outer) width and (outer) height
+ */
+, calcNewCenterPaneDims = function () {
+ var d = {
+ top: getPaneSize("north", true) // true = include 'spacing' value for pane
+ , bottom: getPaneSize("south", true)
+ , left: getPaneSize("west", true)
+ , right: getPaneSize("east", true)
+ , width: 0
+ , height: 0
+ };
+
+ // NOTE: sC = state.container
+ // calc center-pane outer dimensions
+ d.width = sC.innerWidth - d.left - d.right; // outerWidth
+ d.height = sC.innerHeight - d.bottom - d.top; // outerHeight
+ // add the 'container border/padding' to get final positions relative to the container
+ d.top += sC.insetTop;
+ d.bottom += sC.insetBottom;
+ d.left += sC.insetLeft;
+ d.right += sC.insetRight;
+
+ return d;
+ }
+
+
+ /**
+ * @param {!Object} el
+ * @param {boolean=} [allStates=false]
+ */
+, getHoverClasses = function (el, allStates) {
+ var
+ $El = $(el)
+ , type = $El.data("layoutRole")
+ , pane = $El.data("layoutEdge")
+ , o = options[pane]
+ , root = o[type +"Class"]
+ , _pane = "-"+ pane // eg: "-west"
+ , _open = "-open"
+ , _closed = "-closed"
+ , _slide = "-sliding"
+ , _hover = "-hover " // NOTE the trailing space
+ , _state = $El.hasClass(root+_closed) ? _closed : _open
+ , _alt = _state === _closed ? _open : _closed
+ , classes = (root+_hover) + (root+_pane+_hover) + (root+_state+_hover) + (root+_pane+_state+_hover)
+ ;
+ if (allStates) // when 'removing' classes, also remove alternate-state classes
+ classes += (root+_alt+_hover) + (root+_pane+_alt+_hover);
+
+ if (type=="resizer" && $El.hasClass(root+_slide))
+ classes += (root+_slide+_hover) + (root+_pane+_slide+_hover);
+
+ return $.trim(classes);
+ }
+, addHover = function (evt, el) {
+ var $E = $(el || this);
+ if (evt && $E.data("layoutRole") === "toggler")
+ evt.stopPropagation(); // prevent triggering 'slide' on Resizer-bar
+ $E.addClass( getHoverClasses($E) );
+ }
+, removeHover = function (evt, el) {
+ var $E = $(el || this);
+ $E.removeClass( getHoverClasses($E, true) );
+ }
+
+, onResizerEnter = function (evt) { // ALSO called by toggler.mouseenter
+ if ($.fn.disableSelection)
+ $("body").disableSelection();
+ }
+, onResizerLeave = function (evt, el) {
+ var
+ e = el || this // el is only passed when called by the timer
+ , pane = $(e).data("layoutEdge")
+ , name = pane +"ResizerLeave"
+ ;
+ timer.clear(pane+"_openSlider"); // cancel slideOpen timer, if set
+ timer.clear(name); // cancel enableSelection timer - may re/set below
+ // this method calls itself on a timer because it needs to allow
+ // enough time for dragging to kick-in and set the isResizing flag
+ // dragging has a 100ms delay set, so this delay must be >100
+ if (!el) // 1st call - mouseleave event
+ timer.set(name, function(){ onResizerLeave(evt, e); }, 200);
+ // if user is resizing, then dragStop will enableSelection(), so can skip it here
+ else if (!state[pane].isResizing && $.fn.enableSelection) // 2nd call - by timer
+ $("body").enableSelection();
+ }
+
+/*
+ * ###########################
+ * INITIALIZATION METHODS
+ * ###########################
+ */
+
+ /**
+ * Initialize the layout - called automatically whenever an instance of layout is created
+ *
+ * @see none - triggered onInit
+ * @return mixed true = fully initialized | false = panes not initialized (yet) | 'cancel' = abort
+ */
+, _create = function () {
+ // initialize config/options
+ initOptions();
+ var o = options;
+
+ // TEMP state so isInitialized returns true during init process
+ state.creatingLayout = true;
+
+ // init plugins for this layout, if there are any (eg: stateManagement)
+ runPluginCallbacks( Instance, $.layout.onCreate );
+
+ // options & state have been initialized, so now run beforeLoad callback
+ // onload will CANCEL layout creation if it returns false
+ if (false === _runCallbacks("onload_start"))
+ return 'cancel';
+
+ // initialize the container element
+ _initContainer();
+
+ // bind hotkey function - keyDown - if required
+ initHotkeys();
+
+ // bind window.onunload
+ $(window).bind("unload."+ sID, unload);
+
+ // init plugins for this layout, if there are any (eg: customButtons)
+ runPluginCallbacks( Instance, $.layout.onLoad );
+
+ // if layout elements are hidden, then layout WILL NOT complete initialization!
+ // initLayoutElements will set initialized=true and run the onload callback IF successful
+ if (o.initPanes) _initLayoutElements();
+
+ delete state.creatingLayout;
+
+ return state.initialized;
+ }
+
+ /**
+ * Initialize the layout IF not already
+ *
+ * @see All methods in Instance run this test
+ * @return boolean true = layoutElements have been initialized | false = panes are not initialized (yet)
+ */
+, isInitialized = function () {
+ if (state.initialized || state.creatingLayout) return true; // already initialized
+ else return _initLayoutElements(); // try to init panes NOW
+ }
+
+ /**
+ * Initialize the layout - called automatically whenever an instance of layout is created
+ *
+ * @see _create() & isInitialized
+ * @return An object pointer to the instance created
+ */
+, _initLayoutElements = function (retry) {
+ // initialize config/options
+ var o = options;
+
+ // CANNOT init panes inside a hidden container!
+ if (!$N.is(":visible")) {
+ // handle Chrome bug where popup window 'has no height'
+ // if layout is BODY element, try again in 50ms
+ // SEE: http://layout.jquery-dev.net/samples/test_popup_window.html
+ if ( !retry && browser.webkit && $N[0].tagName === "BODY" )
+ setTimeout(function(){ _initLayoutElements(true); }, 50);
+ return false;
+ }
+
+ // a center pane is required, so make sure it exists
+ if (!getPane("center").length) {
+ if (options.showErrorMessages)
+ _log( lang.errCenterPaneMissing, true );
+ return false;
+ }
+
+ // TEMP state so isInitialized returns true during init process
+ state.creatingLayout = true;
+
+ // update Container dims
+ $.extend(sC, elDims( $N ));
+
+ // initialize all layout elements
+ initPanes(); // size & position panes - calls initHandles() - which calls initResizable()
+
+ if (o.scrollToBookmarkOnLoad) {
+ var l = self.location;
+ if (l.hash) l.replace( l.hash ); // scrollTo Bookmark
+ }
+
+ // check to see if this layout 'nested' inside a pane
+ if (Instance.hasParentLayout)
+ o.resizeWithWindow = false;
+ // bind resizeAll() for 'this layout instance' to window.resize event
+ else if (o.resizeWithWindow)
+ $(window).bind("resize."+ sID, windowResize);
+
+ delete state.creatingLayout;
+ state.initialized = true;
+
+ // init plugins for this layout, if there are any
+ runPluginCallbacks( Instance, $.layout.onReady );
+
+ // now run the onload callback, if exists
+ _runCallbacks("onload_end");
+
+ return true; // elements initialized successfully
+ }
+
+ /**
+ * Initialize nested layouts - called when _initLayoutElements completes
+ *
+ * NOT CURRENTLY USED
+ *
+ * @see _initLayoutElements
+ * @return An object pointer to the instance created
+ */
+, _initChildLayouts = function () {
+ $.each(_c.allPanes, function (idx, pane) {
+ if (options[pane].initChildLayout)
+ createChildLayout( pane );
+ });
+ }
+
+ /**
+ * Initialize nested layouts for a specific pane - can optionally pass layout-options
+ *
+ * @see _initChildLayouts
+ * @param {string} pane The pane being opened, ie: north, south, east, or west
+ * @param {Object=} [opts] Layout-options - if passed, will OVERRRIDE options[pane].childOptions
+ * @return An object pointer to the layout instance created - or null
+ */
+, createChildLayout = function (evt_or_pane, opts) {
+ var pane = evtPane.call(this, evt_or_pane)
+ , $P = $Ps[pane]
+ , C = children
+ ;
+ if ($P) {
+ var $C = $Cs[pane]
+ , o = opts || options[pane].childOptions
+ , d = "layout"
+ // determine which element is supposed to be the 'child container'
+ // if pane has a 'containerSelector' OR a 'content-div', use those instead of the pane
+ , $Cont = o.containerSelector ? $P.find( o.containerSelector ) : ($C || $P)
+ , containerFound = $Cont.length
+ // see if a child-layout ALREADY exists on this element
+ , child = containerFound ? (C[pane] = $Cont.data(d) || null) : null
+ ;
+ // if no layout exists, but childOptions are set, try to create the layout now
+ if (!child && containerFound && o)
+ child = C[pane] = $Cont.eq(0).layout(o) || null;
+ if (child)
+ child.hasParentLayout = true; // set parent-flag in child
+ }
+ Instance[pane].child = C[pane]; // ALWAYS set pane-object pointer, even if null
+ }
+
+, windowResize = function () {
+ var delay = Number(options.resizeWithWindowDelay);
+ if (delay < 10) delay = 100; // MUST have a delay!
+ // resizing uses a delay-loop because the resize event fires repeatly - except in FF, but delay anyway
+ timer.clear("winResize"); // if already running
+ timer.set("winResize", function(){
+ timer.clear("winResize");
+ timer.clear("winResizeRepeater");
+ var dims = elDims( $N );
+ // only trigger resizeAll() if container has changed size
+ if (dims.innerWidth !== sC.innerWidth || dims.innerHeight !== sC.innerHeight)
+ resizeAll();
+ }, delay);
+ // ALSO set fixed-delay timer, if not already running
+ if (!timer.data["winResizeRepeater"]) setWindowResizeRepeater();
+ }
+
+, setWindowResizeRepeater = function () {
+ var delay = Number(options.resizeWithWindowMaxDelay);
+ if (delay > 0)
+ timer.set("winResizeRepeater", function(){ setWindowResizeRepeater(); resizeAll(); }, delay);
+ }
+
+, unload = function () {
+ var o = options;
+
+ _runCallbacks("onunload_start");
+
+ // trigger plugin callabacks for this layout (eg: stateManagement)
+ runPluginCallbacks( Instance, $.layout.onUnload );
+
+ _runCallbacks("onunload_end");
+ }
+
+ /**
+ * Validate and initialize container CSS and events
+ *
+ * @see _create()
+ */
+, _initContainer = function () {
+ var
+ N = $N[0]
+ , tag = sC.tagName = N.tagName
+ , id = sC.id = N.id
+ , cls = sC.className = N.className
+ , o = options
+ , name = o.name
+ , fullPage= (tag === "BODY")
+ , props = "overflow,position,margin,padding,border"
+ , css = "layoutCSS"
+ , CSS = {}
+ , hid = "hidden" // used A LOT!
+ // see if this container is a 'pane' inside an outer-layout
+ , parent = $N.data("parentLayout") // parent-layout Instance
+ , pane = $N.data("layoutEdge") // pane-name in parent-layout
+ , isChild = parent && pane
+ ;
+ // sC -> state.container
+ sC.selector = $N.selector.split(".slice")[0];
+ sC.ref = (o.name ? o.name +' layout / ' : '') + tag + (id ? "#"+id : cls ? '.['+cls+']' : ''); // used in messages
+
+ $N .data({
+ layout: Instance
+ , layoutContainer: sID // FLAG to indicate this is a layout-container - contains unique internal ID
+ })
+ .addClass(o.containerClass)
+ ;
+ var layoutMethods = {
+ destroy: ''
+ , initPanes: ''
+ , resizeAll: 'resizeAll'
+ , resize: 'resizeAll'
+ }
+ , name;
+ // loop hash and bind all methods - include layoutID namespacing
+ for (name in layoutMethods) {
+ $N.bind("layout"+ name.toLowerCase() +"."+ sID, Instance[ layoutMethods[name] || name ]);
+ }
+
+ // if this container is another layout's 'pane', then set child/parent pointers
+ if (isChild) {
+ // update parent flag
+ Instance.hasParentLayout = true;
+ // set pointers to THIS child-layout (Instance) in parent-layout
+ // NOTE: parent.PANE.child is an ALIAS to parent.children.PANE
+ parent[pane].child = parent.children[pane] = $N.data("layout");
+ }
+
+ // SAVE original container CSS for use in destroy()
+ if (!$N.data(css)) {
+ // handle props like overflow different for BODY & HTML - has 'system default' values
+ if (fullPage) {
+ CSS = $.extend( elCSS($N, props), {
+ height: $N.css("height")
+ , overflow: $N.css("overflow")
+ , overflowX: $N.css("overflowX")
+ , overflowY: $N.css("overflowY")
+ });
+ // ALSO SAVE <HTML> CSS
+ var $H = $("html");
+ $H.data(css, {
+ height: "auto" // FF would return a fixed px-size!
+ , overflow: $H.css("overflow")
+ , overflowX: $H.css("overflowX")
+ , overflowY: $H.css("overflowY")
+ });
+ }
+ else // handle props normally for non-body elements
+ CSS = elCSS($N, props+",top,bottom,left,right,width,height,overflow,overflowX,overflowY");
+
+ $N.data(css, CSS);
+ }
+
+ try { // format html/body if this is a full page layout
+ if (fullPage) {
+ $("html").css({
+ height: "100%"
+ , overflow: hid
+ , overflowX: hid
+ , overflowY: hid
+ });
+ $("body").css({
+ position: "relative"
+ , height: "100%"
+ , overflow: hid
+ , overflowX: hid
+ , overflowY: hid
+ , margin: 0
+ , padding: 0 // TODO: test whether body-padding could be handled?
+ , border: "none" // a body-border creates problems because it cannot be measured!
+ });
+
+ // set current layout-container dimensions
+ $.extend(sC, elDims( $N ));
+ }
+ else { // set required CSS for overflow and position
+ // ENSURE container will not 'scroll'
+ CSS = { overflow: hid, overflowX: hid, overflowY: hid }
+ var
+ p = $N.css("position")
+ , h = $N.css("height")
+ ;
+ // if this is a NESTED layout, then container/outer-pane ALREADY has position and height
+ if (!isChild) {
+ if (!p || !p.match(/fixed|absolute|relative/))
+ CSS.position = "relative"; // container MUST have a 'position'
+ /*
+ if (!h || h=="auto")
+ CSS.height = "100%"; // container MUST have a 'height'
+ */
+ }
+ $N.css( CSS );
+
+ // set current layout-container dimensions
+ if ( $N.is(":visible") ) {
+ $.extend(sC, elDims( $N ));
+ if (o.showErrorMessages && sC.innerHeight < 1)
+ _log( lang.errContainerHeight.replace(/CONTAINER/, sC.ref), true );
+ }
+ }
+ } catch (ex) {}
+ }
+
+ /**
+ * Bind layout hotkeys - if options enabled
+ *
+ * @see _create() and addPane()
+ * @param {string=} [panes=""] The edge(s) to process
+ */
+, initHotkeys = function (panes) {
+ panes = panes ? panes.split(",") : _c.borderPanes;
+ // bind keyDown to capture hotkeys, if option enabled for ANY pane
+ $.each(panes, function (i, pane) {
+ var o = options[pane];
+ if (o.enableCursorHotkey || o.customHotkey) {
+ $(document).bind("keydown."+ sID, keyDown); // only need to bind this ONCE
+ return false; // BREAK - binding was done
+ }
+ });
+ }
+
+ /**
+ * Build final OPTIONS data
+ *
+ * @see _create()
+ */
+, initOptions = function () {
+ var data, d, pane, key, val, i, c, o;
+
+ // reprocess user's layout-options to have correct options sub-key structure
+ opts = $.layout.transformData( opts ); // panes = default subkey
+
+ // auto-rename old options for backward compatibility
+ opts = $.layout.backwardCompatibility.renameAllOptions( opts );
+
+ // if user-options has 'panes' key (pane-defaults), process it...
+ if (!$.isEmptyObject(opts.panes)) {
+ // REMOVE any pane-defaults that MUST be set per-pane
+ data = $.layout.optionsMap.noDefault;
+ for (i=0, c=data.length; i<c; i++) {
+ key = data[i];
+ delete opts.panes[key]; // OK if does not exist
+ }
+ // REMOVE any layout-options specified under opts.panes
+ data = $.layout.optionsMap.layout;
+ for (i=0, c=data.length; i<c; i++) {
+ key = data[i];
+ delete opts.panes[key]; // OK if does not exist
+ }
+ }
+
+ // MOVE any NON-layout-options to opts.panes
+ data = $.layout.optionsMap.layout;
+ var rootKeys = $.layout.config.optionRootKeys;
+ for (key in opts) {
+ val = opts[key];
+ if ($.inArray(key, rootKeys) < 0 && $.inArray(key, data) < 0) {
+ if (!opts.panes[key])
+ opts.panes[key] = $.isPlainObject(val) ? $.extend(true, {}, val) : val;
+ delete opts[key]
+ }
+ }
+
+ // START by updating ALL options from opts
+ $.extend(true, options, opts);
+
+ // CREATE final options (and config) for EACH pane
+ $.each(_c.allPanes, function (i, pane) {
+
+ // apply 'pane-defaults' to CONFIG.[PANE]
+ _c[pane] = $.extend( true, {}, _c.panes, _c[pane] );
+
+ d = options.panes;
+ o = options[pane];
+
+ // center-pane uses SOME keys in defaults.panes branch
+ if (pane === 'center') {
+ // ONLY copy keys from opts.panes listed in: $.layout.optionsMap.center
+ data = $.layout.optionsMap.center; // list of 'center-pane keys'
+ for (i=0, c=data.length; i<c; i++) { // loop the list...
+ key = data[i];
+ // only need to use pane-default if pane-specific value not set
+ if (!opts.center[key] && (opts.panes[key] || !o[key]))
+ o[key] = d[key]; // pane-default
+ }
+ }
+ else {
+ // border-panes use ALL keys in defaults.panes branch
+ o = options[pane] = $.extend({}, d, o); // re-apply pane-specific opts AFTER pane-defaults
+ createFxOptions( pane );
+ // ensure all border-pane-specific base-classes exist
+ if (!o.resizerClass) o.resizerClass = "ui-layout-resizer";
+ if (!o.togglerClass) o.togglerClass = "ui-layout-toggler";
+ }
+ // ensure we have base pane-class (ALL panes)
+ if (!o.paneClass) o.paneClass = "ui-layout-pane";
+ });
+
+ // update options.zIndexes if a zIndex-option specified
+ var zo = opts.zIndex
+ , z = options.zIndexes;
+ if (zo > 0) {
+ z.pane_normal = zo;
+ z.content_mask = max(zo+1, z.content_mask); // MIN = +1
+ z.resizer_normal = max(zo+2, z.resizer_normal); // MIN = +2
+ }
+
+ function createFxOptions ( pane ) {
+ var o = options[pane]
+ , d = options.panes;
+ // ensure fxSettings key to avoid errors
+ if (!o.fxSettings) o.fxSettings = {};
+ if (!d.fxSettings) d.fxSettings = {};
+
+ $.each(["_open","_close","_size"], function (i,n) {
+ var
+ sName = "fxName"+ n
+ , sSpeed = "fxSpeed"+ n
+ , sSettings = "fxSettings"+ n
+ // recalculate fxName according to specificity rules
+ , fxName = o[sName] =
+ o[sName] // options.west.fxName_open
+ || d[sName] // options.panes.fxName_open
+ || o.fxName // options.west.fxName
+ || d.fxName // options.panes.fxName
+ || "none" // MEANS $.layout.defaults.panes.fxName == "" || false || null || 0
+ ;
+ // validate fxName to ensure is valid effect - MUST have effect-config data in options.effects
+ if (fxName === "none" || !$.effects || !$.effects[fxName] || !options.effects[fxName])
+ fxName = o[sName] = "none"; // effect not loaded OR unrecognized fxName
+
+ // set vars for effects subkeys to simplify logic
+ var fx = options.effects[fxName] || {} // effects.slide
+ , fx_all = fx.all || null // effects.slide.all
+ , fx_pane = fx[pane] || null // effects.slide.west
+ ;
+ // create fxSpeed[_open|_close|_size]
+ o[sSpeed] =
+ o[sSpeed] // options.west.fxSpeed_open
+ || d[sSpeed] // options.west.fxSpeed_open
+ || o.fxSpeed // options.west.fxSpeed
+ || d.fxSpeed // options.panes.fxSpeed
+ || null // DEFAULT - let fxSetting.duration control speed
+ ;
+ // create fxSettings[_open|_close|_size]
+ o[sSettings] = $.extend(
+ {}
+ , fx_all // effects.slide.all
+ , fx_pane // effects.slide.west
+ , d.fxSettings // options.panes.fxSettings
+ , o.fxSettings // options.west.fxSettings
+ , d[sSettings] // options.panes.fxSettings_open
+ , o[sSettings] // options.west.fxSettings_open
+ );
+ });
+
+ // DONE creating action-specific-settings for this pane,
+ // so DELETE generic options - are no longer meaningful
+ delete o.fxName;
+ delete o.fxSpeed;
+ delete o.fxSettings;
+ }
+
+ // DELETE 'panes' key now that we are done - values were copied to EACH pane
+ delete options.panes;
+ }
+
+ /**
+ * Initialize module objects, styling, size and position for all panes
+ *
+ * @see _initElements()
+ * @param {string} pane The pane to process
+ */
+, getPane = function (pane) {
+ var sel = options[pane].paneSelector
+ if (sel.substr(0,1)==="#") // ID selector
+ // NOTE: elements selected 'by ID' DO NOT have to be 'children'
+ return $N.find(sel).eq(0);
+ else { // class or other selector
+ var $P = $N.children(sel).eq(0);
+ // look for the pane nested inside a 'form' element
+ return $P.length ? $P : $N.children("form:first").children(sel).eq(0);
+ }
+ }
+
+, initPanes = function () {
+ // NOTE: do north & south FIRST so we can measure their height - do center LAST
+ $.each(_c.allPanes, function (idx, pane) {
+ addPane( pane, true );
+ });
+
+ // init the pane-handles NOW in case we have to hide or close the pane below
+ initHandles();
+
+ // now that all panes have been initialized and initially-sized,
+ // make sure there is really enough space available for each pane
+ $.each(_c.borderPanes, function (i, pane) {
+ if ($Ps[pane] && state[pane].isVisible) { // pane is OPEN
+ setSizeLimits(pane);
+ makePaneFit(pane); // pane may be Closed, Hidden or Resized by makePaneFit()
+ }
+ });
+ // size center-pane AGAIN in case we 'closed' a border-pane in loop above
+ sizeMidPanes("center");
+
+ // Chrome/Webkit sometimes fires callbacks BEFORE it completes resizing!
+ // Before RC30.3, there was a 10ms delay here, but that caused layout
+ // to load asynchrously, which is BAD, so try skipping delay for now
+
+ // process pane contents and callbacks, and init/resize child-layout if exists
+ $.each(_c.allPanes, function (i, pane) {
+ var o = options[pane];
+ if ($Ps[pane]) {
+ if (state[pane].isVisible) { // pane is OPEN
+ sizeContent(pane);
+ // trigger pane.onResize if triggerEventsOnLoad = true
+ if (o.triggerEventsOnLoad)
+ _runCallbacks("onresize_end", pane);
+ else // automatic if onresize called, otherwise call it specifically
+ // resize child - IF inner-layout already exists (created before this layout)
+ resizeChildLayout(pane);
+ }
+ // init childLayout - even if pane is not visible
+ if (o.initChildLayout && o.childOptions)
+ createChildLayout(pane);
+ }
+ });
+ }
+
+ /**
+ * Add a pane to the layout - subroutine of initPanes()
+ *
+ * @see initPanes()
+ * @param {string} pane The pane to process
+ * @param {boolean=} [force=false] Size content after init
+ */
+, addPane = function (pane, force) {
+ if (!force && !isInitialized()) return;
+ var
+ o = options[pane]
+ , s = state[pane]
+ , c = _c[pane]
+ , fx = s.fx
+ , dir = c.dir
+ , spacing = o.spacing_open || 0
+ , isCenter = (pane === "center")
+ , CSS = {}
+ , $P = $Ps[pane]
+ , size, minSize, maxSize
+ ;
+ // if pane-pointer already exists, remove the old one first
+ if ($P)
+ removePane( pane, false, true, false );
+ else
+ $Cs[pane] = false; // init
+
+ $P = $Ps[pane] = getPane(pane);
+ if (!$P.length) {
+ $Ps[pane] = false; // logic
+ return;
+ }
+
+ // SAVE original Pane CSS
+ if (!$P.data("layoutCSS")) {
+ var props = "position,top,left,bottom,right,width,height,overflow,zIndex,display,backgroundColor,padding,margin,border";
+ $P.data("layoutCSS", elCSS($P, props));
+ }
+
+ // create alias for pane data in Instance - initHandles will add more
+ Instance[pane] = { name: pane, pane: $Ps[pane], content: $Cs[pane], options: options[pane], state: state[pane], child: children[pane] };
+
+ // add classes, attributes & events
+ $P .data({
+ parentLayout: Instance // pointer to Layout Instance
+ , layoutPane: Instance[pane] // NEW pointer to pane-alias-object
+ , layoutEdge: pane
+ , layoutRole: "pane"
+ })
+ .css(c.cssReq).css("zIndex", options.zIndexes.pane_normal)
+ .css(o.applyDemoStyles ? c.cssDemo : {}) // demo styles
+ .addClass( o.paneClass +" "+ o.paneClass+"-"+pane ) // default = "ui-layout-pane ui-layout-pane-west" - may be a dupe of 'paneSelector'
+ .bind("mouseenter."+ sID, addHover )
+ .bind("mouseleave."+ sID, removeHover )
+ ;
+ var paneMethods = {
+ hide: ''
+ , show: ''
+ , toggle: ''
+ , close: ''
+ , open: ''
+ , slideOpen: ''
+ , slideClose: ''
+ , slideToggle: ''
+ , size: 'manualSizePane'
+ , sizePane: 'manualSizePane'
+ , sizeContent: ''
+ , sizeHandles: ''
+ , enableClosable: ''
+ , disableClosable: ''
+ , enableSlideable: ''
+ , disableSlideable: ''
+ , enableResizable: ''
+ , disableResizable: ''
+ , swapPanes: 'swapPanes'
+ , swap: 'swapPanes'
+ , move: 'swapPanes'
+ , removePane: 'removePane'
+ , remove: 'removePane'
+ , createChildLayout: ''
+ , resizeChildLayout: ''
+ , resizeAll: 'resizeAll'
+ , resizeLayout: 'resizeAll'
+ }
+ , name;
+ // loop hash and bind all methods - include layoutID namespacing
+ for (name in paneMethods) {
+ $P.bind("layoutpane"+ name.toLowerCase() +"."+ sID, Instance[ paneMethods[name] || name ]);
+ }
+
+ // see if this pane has a 'scrolling-content element'
+ initContent(pane, false); // false = do NOT sizeContent() - called later
+
+ if (!isCenter) {
+ // call _parseSize AFTER applying pane classes & styles - but before making visible (if hidden)
+ // if o.size is auto or not valid, then MEASURE the pane and use that as its 'size'
+ size = s.size = _parseSize(pane, o.size);
+ minSize = _parseSize(pane,o.minSize) || 1;
+ maxSize = _parseSize(pane,o.maxSize) || 100000;
+ if (size > 0) size = max(min(size, maxSize), minSize);
+
+ // state for border-panes
+ s.isClosed = false; // true = pane is closed
+ s.isSliding = false; // true = pane is currently open by 'sliding' over adjacent panes
+ s.isResizing= false; // true = pane is in process of being resized
+ s.isHidden = false; // true = pane is hidden - no spacing, resizer or toggler is visible!
+
+ // array for 'pin buttons' whose classNames are auto-updated on pane-open/-close
+ if (!s.pins) s.pins = [];
+ }
+ // states common to ALL panes
+ s.tagName = $P[0].tagName;
+ s.edge = pane; // useful if pane is (or about to be) 'swapped' - easy find out where it is (or is going)
+ s.noRoom = false; // true = pane 'automatically' hidden due to insufficient room - will unhide automatically
+ s.isVisible = true; // false = pane is invisible - closed OR hidden - simplify logic
+
+ // set css-position to account for container borders & padding
+ switch (pane) {
+ case "north": CSS.top = sC.insetTop;
+ CSS.left = sC.insetLeft;
+ CSS.right = sC.insetRight;
+ break;
+ case "south": CSS.bottom = sC.insetBottom;
+ CSS.left = sC.insetLeft;
+ CSS.right = sC.insetRight;
+ break;
+ case "west": CSS.left = sC.insetLeft; // top, bottom & height set by sizeMidPanes()
+ break;
+ case "east": CSS.right = sC.insetRight; // ditto
+ break;
+ case "center": // top, left, width & height set by sizeMidPanes()
+ }
+
+ if (dir === "horz") // north or south pane
+ CSS.height = cssH($P, size);
+ else if (dir === "vert") // east or west pane
+ CSS.width = cssW($P, size);
+ //else if (isCenter) {}
+
+ $P.css(CSS); // apply size -- top, bottom & height will be set by sizeMidPanes
+ if (dir != "horz") sizeMidPanes(pane, true); // true = skipCallback
+
+ // close or hide the pane if specified in settings
+ if (o.initClosed && o.closable && !o.initHidden)
+ close(pane, true, true); // true, true = force, noAnimation
+ else if (o.initHidden || o.initClosed)
+ hide(pane); // will be completely invisible - no resizer or spacing
+ else if (!s.noRoom)
+ // make the pane visible - in case was initially hidden
+ $P.css("display","block");
+ // ELSE setAsOpen() - called later by initHandles()
+
+ // RESET visibility now - pane will appear IF display:block
+ $P.css("visibility","visible");
+
+ // check option for auto-handling of pop-ups & drop-downs
+ if (o.showOverflowOnHover)
+ $P.hover( allowOverflow, resetOverflow );
+
+ // if manually adding a pane AFTER layout initialization, then...
+ if (state.initialized) {
+ initHandles( pane );
+ initHotkeys( pane );
+ resizeAll(); // will sizeContent if pane is visible
+ if (s.isVisible) { // pane is OPEN
+ if (o.triggerEventsOnLoad)
+ _runCallbacks("onresize_end", pane);
+ else // automatic if onresize called, otherwise call it specifically
+ // resize child - IF inner-layout already exists (created before this layout)
+ resizeChildLayout(pane); // a previously existing childLayout
+ }
+ if (o.initChildLayout && o.childOptions)
+ createChildLayout(pane);
+ }
+ }
+
+ /**
+ * Initialize module objects, styling, size and position for all resize bars and toggler buttons
+ *
+ * @see _create()
+ * @param {string=} [panes=""] The edge(s) to process
+ */
+, initHandles = function (panes) {
+ panes = panes ? panes.split(",") : _c.borderPanes;
+
+ // create toggler DIVs for each pane, and set object pointers for them, eg: $R.north = north toggler DIV
+ $.each(panes, function (i, pane) {
+ var $P = $Ps[pane];
+ $Rs[pane] = false; // INIT
+ $Ts[pane] = false;
+ if (!$P) return; // pane does not exist - skip
+
+ var
+ o = options[pane]
+ , s = state[pane]
+ , c = _c[pane]
+ , rClass = o.resizerClass
+ , tClass = o.togglerClass
+ , side = c.side.toLowerCase()
+ , spacing = (s.isVisible ? o.spacing_open : o.spacing_closed)
+ , _pane = "-"+ pane // used for classNames
+ , _state = (s.isVisible ? "-open" : "-closed") // used for classNames
+ , I = Instance[pane]
+ // INIT RESIZER BAR
+ , $R = I.resizer = $Rs[pane] = $("<div></div>")
+ // INIT TOGGLER BUTTON
+ , $T = I.toggler = (o.closable ? $Ts[pane] = $("<div></div>") : false)
+ ;
+
+ //if (s.isVisible && o.resizable) ... handled by initResizable
+ if (!s.isVisible && o.slidable)
+ $R.attr("title", o.sliderTip).css("cursor", o.sliderCursor);
+
+ $R // if paneSelector is an ID, then create a matching ID for the resizer, eg: "#paneLeft" => "paneLeft-resizer"
+ .attr("id", (o.paneSelector.substr(0,1)=="#" ? o.paneSelector.substr(1) + "-resizer" : ""))
+ .data({
+ parentLayout: Instance
+ , layoutPane: Instance[pane] // NEW pointer to pane-alias-object
+ , layoutEdge: pane
+ , layoutRole: "resizer"
+ })
+ .css(_c.resizers.cssReq).css("zIndex", options.zIndexes.resizer_normal)
+ .css(o.applyDemoStyles ? _c.resizers.cssDemo : {}) // add demo styles
+ .addClass(rClass +" "+ rClass+_pane)
+ .hover(addHover, removeHover) // ALWAYS add hover-classes, even if resizing is not enabled - handle with CSS instead
+ .hover(onResizerEnter, onResizerLeave) // ALWAYS NEED resizer.mouseleave to balance toggler.mouseenter
+ .appendTo($N) // append DIV to container
+ ;
+
+ if ($T) {
+ $T // if paneSelector is an ID, then create a matching ID for the resizer, eg: "#paneLeft" => "#paneLeft-toggler"
+ .attr("id", (o.paneSelector.substr(0,1)=="#" ? o.paneSelector.substr(1) + "-toggler" : ""))
+ .data({
+ parentLayout: Instance
+ , layoutPane: Instance[pane] // NEW pointer to pane-alias-object
+ , layoutEdge: pane
+ , layoutRole: "toggler"
+ })
+ .css(_c.togglers.cssReq) // add base/required styles
+ .css(o.applyDemoStyles ? _c.togglers.cssDemo : {}) // add demo styles
+ .addClass(tClass +" "+ tClass+_pane)
+ .hover(addHover, removeHover) // ALWAYS add hover-classes, even if toggling is not enabled - handle with CSS instead
+ .bind("mouseenter", onResizerEnter) // NEED toggler.mouseenter because mouseenter MAY NOT fire on resizer
+ .appendTo($R) // append SPAN to resizer DIV
+ ;
+ // ADD INNER-SPANS TO TOGGLER
+ if (o.togglerContent_open) // ui-layout-open
+ $("<span>"+ o.togglerContent_open +"</span>")
+ .data({
+ layoutEdge: pane
+ , layoutRole: "togglerContent"
+ })
+ .data("layoutRole", "togglerContent")
+ .data("layoutEdge", pane)
+ .addClass("content content-open")
+ .css("display","none")
+ .appendTo( $T )
+ //.hover( addHover, removeHover ) // use ui-layout-toggler-west-hover .content-open instead!
+ ;
+ if (o.togglerContent_closed) // ui-layout-closed
+ $("<span>"+ o.togglerContent_closed +"</span>")
+ .data({
+ layoutEdge: pane
+ , layoutRole: "togglerContent"
+ })
+ .addClass("content content-closed")
+ .css("display","none")
+ .appendTo( $T )
+ //.hover( addHover, removeHover ) // use ui-layout-toggler-west-hover .content-closed instead!
+ ;
+ // ADD TOGGLER.click/.hover
+ enableClosable(pane);
+ }
+
+ // add Draggable events
+ initResizable(pane);
+
+ // ADD CLASSNAMES & SLIDE-BINDINGS - eg: class="resizer resizer-west resizer-open"
+ if (s.isVisible)
+ setAsOpen(pane); // onOpen will be called, but NOT onResize
+ else {
+ setAsClosed(pane); // onClose will be called
+ bindStartSlidingEvent(pane, true); // will enable events IF option is set
+ }
+
+ });
+
+ // SET ALL HANDLE DIMENSIONS
+ sizeHandles();
+ }
+
+
+ /**
+ * Initialize scrolling ui-layout-content div - if exists
+ *
+ * @see initPane() - or externally after an Ajax injection
+ * @param {string} [pane] The pane to process
+ * @param {boolean=} [resize=true] Size content after init
+ */
+, initContent = function (pane, resize) {
+ if (!isInitialized()) return;
+ var
+ o = options[pane]
+ , sel = o.contentSelector
+ , I = Instance[pane]
+ , $P = $Ps[pane]
+ , $C
+ ;
+ if (sel) $C = I.content = $Cs[pane] = (o.findNestedContent)
+ ? $P.find(sel).eq(0) // match 1-element only
+ : $P.children(sel).eq(0)
+ ;
+ if ($C && $C.length) {
+ $C.data("layoutRole", "content");
+ // SAVE original Pane CSS
+ if (!$C.data("layoutCSS"))
+ $C.data("layoutCSS", elCSS($C, "height"));
+ $C.css( _c.content.cssReq );
+ if (o.applyDemoStyles) {
+ $C.css( _c.content.cssDemo ); // add padding & overflow: auto to content-div
+ $P.css( _c.content.cssDemoPane ); // REMOVE padding/scrolling from pane
+ }
+ state[pane].content = {}; // init content state
+ if (resize !== false) sizeContent(pane);
+ // sizeContent() is called AFTER init of all elements
+ }
+ else
+ I.content = $Cs[pane] = false;
+ }
+
+
+ /**
+ * Add resize-bars to all panes that specify it in options
+ * -dependancy: $.fn.resizable - will skip if not found
+ *
+ * @see _create()
+ * @param {string=} [panes=""] The edge(s) to process
+ */
+, initResizable = function (panes) {
+ var draggingAvailable = $.layout.plugins.draggable
+ , side // set in start()
+ ;
+ panes = panes ? panes.split(",") : _c.borderPanes;
+
+ $.each(panes, function (idx, pane) {
+ var o = options[pane];
+ if (!draggingAvailable || !$Ps[pane] || !o.resizable) {
+ o.resizable = false;
+ return true; // skip to next
+ }
+
+ var s = state[pane]
+ , z = options.zIndexes
+ , c = _c[pane]
+ , side = c.dir=="horz" ? "top" : "left"
+ , opEdge = _c.oppositeEdge[pane]
+ , masks = pane +",center,"+ opEdge + (c.dir=="horz" ? ",west,east" : "")
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , base = o.resizerClass
+ , lastPos = 0 // used when live-resizing
+ , r, live // set in start because may change
+ // 'drag' classes are applied to the ORIGINAL resizer-bar while dragging is in process
+ , resizerClass = base+"-drag" // resizer-drag
+ , resizerPaneClass = base+"-"+pane+"-drag" // resizer-north-drag
+ // 'helper' class is applied to the CLONED resizer-bar while it is being dragged
+ , helperClass = base+"-dragging" // resizer-dragging
+ , helperPaneClass = base+"-"+pane+"-dragging" // resizer-north-dragging
+ , helperLimitClass = base+"-dragging-limit" // resizer-drag
+ , helperPaneLimitClass = base+"-"+pane+"-dragging-limit" // resizer-north-drag
+ , helperClassesSet = false // logic var
+ ;
+
+ if (!s.isClosed)
+ $R.attr("title", o.resizerTip)
+ .css("cursor", o.resizerCursor); // n-resize, s-resize, etc
+
+ $R.draggable({
+ containment: $N[0] // limit resizing to layout container
+ , axis: (c.dir=="horz" ? "y" : "x") // limit resizing to horz or vert axis
+ , delay: 0
+ , distance: 1
+ , grid: o.resizingGrid
+ // basic format for helper - style it using class: .ui-draggable-dragging
+ , helper: "clone"
+ , opacity: o.resizerDragOpacity
+ , addClasses: false // avoid ui-state-disabled class when disabled
+ //, iframeFix: o.draggableIframeFix // TODO: consider using when bug is fixed
+ , zIndex: z.resizer_drag
+
+ , start: function (e, ui) {
+ // REFRESH options & state pointers in case we used swapPanes
+ o = options[pane];
+ s = state[pane];
+ // re-read options
+ live = o.livePaneResizing;
+
+ // ondrag_start callback - will CANCEL hide if returns false
+ // TODO: dragging CANNOT be cancelled like this, so see if there is a way?
+ if (false === _runCallbacks("ondrag_start", pane)) return false;
+
+ s.isResizing = true; // prevent pane from closing while resizing
+ timer.clear(pane+"_closeSlider"); // just in case already triggered
+
+ // SET RESIZER LIMITS - used in drag()
+ setSizeLimits(pane); // update pane/resizer state
+ r = s.resizerPosition;
+ lastPos = ui.position[ side ]
+
+ $R.addClass( resizerClass +" "+ resizerPaneClass ); // add drag classes
+ helperClassesSet = false; // reset logic var - see drag()
+
+ // DISABLE TEXT SELECTION (probably already done by resizer.mouseOver)
+ $('body').disableSelection();
+
+ // MASK PANES CONTAINING IFRAMES, APPLETS OR OTHER TROUBLESOME ELEMENTS
+ showMasks( masks );
+ }
+
+ , drag: function (e, ui) {
+ if (!helperClassesSet) { // can only add classes after clone has been added to the DOM
+ //$(".ui-draggable-dragging")
+ ui.helper
+ .addClass( helperClass +" "+ helperPaneClass ) // add helper classes
+ .css({ right: "auto", bottom: "auto" }) // fix dir="rtl" issue
+ .children().css("visibility","hidden") // hide toggler inside dragged resizer-bar
+ ;
+ helperClassesSet = true;
+ // draggable bug!? RE-SET zIndex to prevent E/W resize-bar showing through N/S pane!
+ if (s.isSliding) $Ps[pane].css("zIndex", z.pane_sliding);
+ }
+ // CONTAIN RESIZER-BAR TO RESIZING LIMITS
+ var limit = 0;
+ if (ui.position[side] < r.min) {
+ ui.position[side] = r.min;
+ limit = -1;
+ }
+ else if (ui.position[side] > r.max) {
+ ui.position[side] = r.max;
+ limit = 1;
+ }
+ // ADD/REMOVE dragging-limit CLASS
+ if (limit) {
+ ui.helper.addClass( helperLimitClass +" "+ helperPaneLimitClass ); // at dragging-limit
+ window.defaultStatus = (limit>0 && pane.match(/north|west/)) || (limit<0 && pane.match(/south|east/)) ? lang.maxSizeWarning : lang.minSizeWarning;
+ }
+ else {
+ ui.helper.removeClass( helperLimitClass +" "+ helperPaneLimitClass ); // not at dragging-limit
+ window.defaultStatus = "";
+ }
+ // DYNAMICALLY RESIZE PANES IF OPTION ENABLED
+ // won't trigger unless resizer has actually moved!
+ if (live && Math.abs(ui.position[side] - lastPos) >= o.liveResizingTolerance) {
+ lastPos = ui.position[side];
+ resizePanes(e, ui, pane)
+ }
+ }
+
+ , stop: function (e, ui) {
+ $('body').enableSelection(); // RE-ENABLE TEXT SELECTION
+ window.defaultStatus = ""; // clear 'resizing limit' message from statusbar
+ $R.removeClass( resizerClass +" "+ resizerPaneClass ); // remove drag classes from Resizer
+ s.isResizing = false;
+ resizePanes(e, ui, pane, true, masks); // true = resizingDone
+ }
+
+ });
+ });
+
+ /**
+ * resizePanes
+ *
+ * Sub-routine called from stop() - and drag() if livePaneResizing
+ *
+ * @param {!Object} evt
+ * @param {!Object} ui
+ * @param {string} pane
+ * @param {boolean=} [resizingDone=false]
+ */
+ var resizePanes = function (evt, ui, pane, resizingDone, masks) {
+ var dragPos = ui.position
+ , c = _c[pane]
+ , o = options[pane]
+ , s = state[pane]
+ , resizerPos
+ ;
+ switch (pane) {
+ case "north": resizerPos = dragPos.top; break;
+ case "west": resizerPos = dragPos.left; break;
+ case "south": resizerPos = sC.offsetHeight - dragPos.top - o.spacing_open; break;
+ case "east": resizerPos = sC.offsetWidth - dragPos.left - o.spacing_open; break;
+ };
+ // remove container margin from resizer position to get the pane size
+ var newSize = resizerPos - sC["inset"+ c.side];
+
+ // Disable OR Resize Mask(s) created in drag.start
+ if (!resizingDone) {
+ // ensure we meet liveResizingTolerance criteria
+ if (Math.abs(newSize - s.size) < o.liveResizingTolerance)
+ return; // SKIP resize this time
+ // resize the pane
+ manualSizePane(pane, newSize, false, true); // true = noAnimation
+ sizeMasks(); // resize all visible masks
+ }
+ else { // resizingDone
+ // ondrag_end callback
+ if (false !== _runCallbacks("ondrag_end", pane))
+ manualSizePane(pane, newSize, false, true); // true = noAnimation
+ hideMasks(); // hide all masks, which include panes with 'content/iframe-masks'
+ if (s.isSliding && masks) // RE-SHOW only 'object-masks' so objects won't show through sliding pane
+ showMasks( masks, true ); // true = onlyForObjects
+ }
+ };
+ }
+
+ /**
+ * sizeMask
+ *
+ * Needed to overlay a DIV over an IFRAME-pane because mask CANNOT be *inside* the pane
+ * Called when mask created, and during livePaneResizing
+ */
+, sizeMask = function () {
+ var $M = $(this)
+ , pane = $M.data("layoutMask") // eg: "west"
+ , s = state[pane]
+ ;
+ // only masks over an IFRAME-pane need manual resizing
+ if (s.tagName == "IFRAME" && s.isVisible) // no need to mask closed/hidden panes
+ $M.css({
+ top: s.offsetTop
+ , left: s.offsetLeft
+ , width: s.outerWidth
+ , height: s.outerHeight
+ });
+ /* ALT Method...
+ var $P = $Ps[pane];
+ $M.css( $P.position() ).css({ width: $P[0].offsetWidth, height: $P[0].offsetHeight });
+ */
+ }
+, sizeMasks = function () {
+ $Ms.each( sizeMask ); // resize all 'visible' masks
+ }
+
+, showMasks = function (panes, onlyForObjects) {
+ var a = panes ? panes.split(",") : $.layout.config.allPanes
+ , z = options.zIndexes
+ , o, s;
+ $.each(a, function(i,p){
+ s = state[p];
+ o = options[p];
+ if (s.isVisible && ( (!onlyForObjects && o.maskContents) || o.maskObjects )) {
+ getMasks(p).each(function(){
+ sizeMask.call(this);
+ this.style.zIndex = s.isSliding ? z.pane_sliding+1 : z.pane_normal+1
+ this.style.display = "block";
+ });
+ }
+ });
+ }
+
+, hideMasks = function () {
+ // ensure no pane is resizing - could be a timing issue
+ var skip;
+ $.each( $.layout.config.borderPanes, function(i,p){
+ if (state[p].isResizing) {
+ skip = true;
+ return false; // BREAK
+ }
+ });
+ if (!skip)
+ $Ms.hide(); // hide ALL masks
+ }
+
+, getMasks = function (pane) {
+ var $Masks = $([])
+ , $M, i = 0, c = $Ms.length
+ ;
+ for (; i<c; i++) {
+ $M = $Ms.eq(i);
+ if ($M.data("layoutMask") === pane)
+ $Masks = $Masks.add( $M );
+ }
+ if ($Masks.length)
+ return $Masks;
+ else
+ return createMasks(pane);
+ }
+
+ /**
+ * createMasks
+ *
+ * Generates both DIV (ALWAYS used) and IFRAME (optional) elements as masks
+ * An IFRAME mask is created *under* the DIV when maskObjects=true, because a DIV cannot mask an applet
+ */
+, createMasks = function (pane) {
+ var
+ $P = $Ps[pane]
+ , s = state[pane]
+ , o = options[pane]
+ , z = options.zIndexes
+ //, objMask = o.maskObjects && s.tagName != "IFRAME" // check for option
+ , $Masks = $([])
+ , isIframe, el, $M, css, i
+ ;
+ if (!o.maskContents && !o.maskObjects) return $Masks;
+ // if o.maskObjects=true, then loop TWICE to create BOTH kinds of mask, else only create a DIV
+ for (i=0; i < (o.maskObjects ? 2 : 1); i++) {
+ isIframe = o.maskObjects && i==0;
+ el = document.createElement( isIframe ? "iframe" : "div" );
+ $M = $(el).data("layoutMask", pane); // add data to relate mask to pane
+ el.className = "ui-layout-mask ui-layout-mask-"+ pane; // for user styling
+ css = el.style;
+ // styles common to both DIVs and IFRAMES
+ css.display = "block";
+ css.position = "absolute";
+ if (isIframe) { // IFRAME-only props
+ el.frameborder = 0;
+ el.src = "about:blank";
+ css.opacity = 0;
+ css.filter = "Alpha(Opacity='0')";
+ css.border = 0;
+ }
+ // if pane is an IFRAME, then must mask the pane itself
+ if (s.tagName == "IFRAME") {
+ // NOTE sizing done by a subroutine so can be called during live-resizing
+ css.zIndex = z.pane_normal+1; // 1-higher than pane
+ $N.append( el ); // append to LAYOUT CONTAINER
+ }
+ // otherwise put masks *inside the pane* to mask its contents
+ else {
+ $M.addClass("ui-layout-mask-inside-pane");
+ css.zIndex = o.maskZindex || z.content_mask; // usually 1, but customizable
+ css.top = 0;
+ css.left = 0;
+ css.width = "100%";
+ css.height = "100%";
+ $P.append( el ); // append INSIDE pane element
+ }
+ // add to return object
+ $Masks = $Masks.add( el );
+ // add Mask to cached array so can be resized & reused
+ $Ms = $Ms.add( el );
+ }
+ return $Masks;
+ }
+
+
+ /**
+ * Destroy this layout and reset all elements
+ *
+ * @param {boolean=} [destroyChildren=false] Destory Child-Layouts first?
+ */
+, destroy = function (destroyChildren) {
+ // UNBIND layout events and remove global object
+ $(window).unbind("."+ sID); // resize & unload
+ $(document).unbind("."+ sID); // keyDown (hotkeys)
+
+ // need to look for parent layout BEFORE we remove the container data, else skips a level
+ //var parentPane = Instance.hasParentLayout ? $.layout.getParentPaneInstance( $N ) : null;
+
+ // reset layout-container
+ $N .clearQueue()
+ .removeData("layout")
+ .removeData("layoutContainer")
+ .removeClass(options.containerClass)
+ .unbind("."+ sID) // remove ALL Layout events
+ ;
+
+ // remove all mask elements that have been created
+ $Ms.remove();
+
+ // loop all panes to remove layout classes, attributes and bindings
+ $.each(_c.allPanes, function (i, pane) {
+ removePane( pane, false, true, destroyChildren ); // true = skipResize
+ });
+
+ // do NOT reset container CSS if is a 'pane' (or 'content') in an outer-layout - ie, THIS layout is 'nested'
+ var css = "layoutCSS";
+ if ($N.data(css) && !$N.data("layoutRole")) // RESET CSS
+ $N.css( $N.data(css) ).removeData(css);
+
+ // for full-page layouts, also reset the <HTML> CSS
+ if (sC.tagName === "BODY" && ($N = $("html")).data(css)) // RESET <HTML> CSS
+ $N.css( $N.data(css) ).removeData(css);
+
+ // trigger plugins for this layout, if there are any
+ runPluginCallbacks( Instance, $.layout.onDestroy );
+
+ // trigger state-management and onunload callback
+ unload();
+
+ // clear the Instance of everything except for container & options (so could recreate)
+ // RE-CREATE: myLayout = myLayout.container.layout( myLayout.options );
+ for (n in Instance)
+ if (!n.match(/^(container|options)$/)) delete Instance[ n ];
+ // add a 'destroyed' flag to make it easy to check
+ Instance.destroyed = true;
+
+ // if this is a child layout, CLEAR the child-pointer in the parent
+ /* for now the pointer REMAINS, but with only container, options and destroyed keys
+ if (parentPane) {
+ var layout = parentPane.pane.data("parentLayout");
+ parentPane.child = layout.children[ parentPane.name ] = null;
+ }
+ */
+
+ return Instance; // for coding convenience
+ }
+
+ /**
+ * Remove a pane from the layout - subroutine of destroy()
+ *
+ * @see destroy()
+ * @param {string} pane The pane to process
+ * @param {boolean=} [remove=false] Remove the DOM element?
+ * @param {boolean=} [skipResize=false] Skip calling resizeAll()?
+ */
+, removePane = function (evt_or_pane, remove, skipResize, destroyChild) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $P = $Ps[pane]
+ , $C = $Cs[pane]
+ , $R = $Rs[pane]
+ , $T = $Ts[pane]
+ ;
+ //alert( '$P.length = '+ $P.length );
+ // NOTE: elements can still exist even after remove()
+ // so check for missing data(), which is cleared by removed()
+ if ($P && $.isEmptyObject( $P.data() )) $P = false;
+ if ($C && $.isEmptyObject( $C.data() )) $C = false;
+ if ($R && $.isEmptyObject( $R.data() )) $R = false;
+ if ($T && $.isEmptyObject( $T.data() )) $T = false;
+
+ if ($P) $P.stop(true, true);
+
+ // check for a child layout
+ var o = options[pane]
+ , s = state[pane]
+ , d = "layout"
+ , css = "layoutCSS"
+ , child = children[pane] || ($P ? $P.data(d) : 0) || ($C ? $C.data(d) : 0) || null
+ , destroy = destroyChild !== undefined ? destroyChild : o.destroyChildLayout
+ ;
+
+ // FIRST destroy the child-layout(s)
+ if (destroy && child && !child.destroyed) {
+ child.destroy(true); // tell child-layout to destroy ALL its child-layouts too
+ if (child.destroyed) // destroy was successful
+ child = null; // clear pointer for logic below
+ }
+
+ if ($P && remove && !child)
+ $P.remove();
+ else if ($P && $P[0]) {
+ // create list of ALL pane-classes that need to be removed
+ var root = o.paneClass // default="ui-layout-pane"
+ , pRoot = root +"-"+ pane // eg: "ui-layout-pane-west"
+ , _open = "-open"
+ , _sliding= "-sliding"
+ , _closed = "-closed"
+ , classes = [ root, root+_open, root+_closed, root+_sliding, // generic classes
+ pRoot, pRoot+_open, pRoot+_closed, pRoot+_sliding ] // pane-specific classes
+ ;
+ $.merge(classes, getHoverClasses($P, true)); // ADD hover-classes
+ // remove all Layout classes from pane-element
+ $P .removeClass( classes.join(" ") ) // remove ALL pane-classes
+ .removeData("parentLayout")
+ .removeData("layoutPane")
+ .removeData("layoutRole")
+ .removeData("layoutEdge")
+ .removeData("autoHidden") // in case set
+ .unbind("."+ sID) // remove ALL Layout events
+ // TODO: remove these extra unbind commands when jQuery is fixed
+ //.unbind("mouseenter"+ sID)
+ //.unbind("mouseleave"+ sID)
+ ;
+ // do NOT reset CSS if this pane/content is STILL the container of a nested layout!
+ // the nested layout will reset its 'container' CSS when/if it is destroyed
+ if ($C && $C.data(d)) {
+ // a content-div may not have a specific width, so give it one to contain the Layout
+ $C.width( $C.width() );
+ child.resizeAll(); // now resize the Layout
+ }
+ else if ($C)
+ $C.css( $C.data(css) ).removeData(css).removeData("layoutRole");
+ // remove pane AFTER content in case there was a nested layout
+ if (!$P.data(d))
+ $P.css( $P.data(css) ).removeData(css);
+ }
+
+ // REMOVE pane resizer and toggler elements
+ if ($T) $T.remove();
+ if ($R) $R.remove();
+
+ // CLEAR all pointers and state data
+ Instance[pane] = $Ps[pane] = $Cs[pane] = $Rs[pane] = $Ts[pane] = children[pane] = false;
+ s = { removed: true };
+
+ if (!skipResize)
+ resizeAll();
+ }
+
+
+/*
+ * ###########################
+ * ACTION METHODS
+ * ###########################
+ */
+
+, _hidePane = function (pane) {
+ var $P = $Ps[pane]
+ , o = options[pane]
+ , s = $P[0].style
+ ;
+ if (o.useOffscreenClose) {
+ if (!$P.data(_c.offscreenReset))
+ $P.data(_c.offscreenReset, { left: s.left, right: s.right });
+ $P.css( _c.offscreenCSS );
+ }
+ else
+ $P.hide().removeData(_c.offscreenReset);
+ }
+
+, _showPane = function (pane) {
+ var $P = $Ps[pane]
+ , o = options[pane]
+ , off = _c.offscreenCSS
+ , old = $P.data(_c.offscreenReset)
+ , s = $P[0].style
+ ;
+ $P .show() // ALWAYS show, just in case
+ .removeData(_c.offscreenReset);
+ if (o.useOffscreenClose && old) {
+ if (s.left == off.left)
+ s.left = old.left;
+ if (s.right == off.right)
+ s.right = old.right;
+ }
+ }
+
+
+ /**
+ * Completely 'hides' a pane, including its spacing - as if it does not exist
+ * The pane is not actually 'removed' from the source, so can use 'show' to un-hide it
+ *
+ * @param {string} pane The pane being hidden, ie: north, south, east, or west
+ * @param {boolean=} [noAnimation=false]
+ */
+, hide = function (evt_or_pane, noAnimation) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , o = options[pane]
+ , s = state[pane]
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ ;
+ if (!$P || s.isHidden) return; // pane does not exist OR is already hidden
+
+ // onhide_start callback - will CANCEL hide if returns false
+ if (state.initialized && false === _runCallbacks("onhide_start", pane)) return;
+
+ s.isSliding = false; // just in case
+
+ // now hide the elements
+ if ($R) $R.hide(); // hide resizer-bar
+ if (!state.initialized || s.isClosed) {
+ s.isClosed = true; // to trigger open-animation on show()
+ s.isHidden = true;
+ s.isVisible = false;
+ if (!state.initialized)
+ _hidePane(pane); // no animation when loading page
+ sizeMidPanes(_c[pane].dir === "horz" ? "" : "center");
+ if (state.initialized || o.triggerEventsOnLoad)
+ _runCallbacks("onhide_end", pane);
+ }
+ else {
+ s.isHiding = true; // used by onclose
+ close(pane, false, noAnimation); // adjust all panes to fit
+ }
+ }
+
+ /**
+ * Show a hidden pane - show as 'closed' by default unless openPane = true
+ *
+ * @param {string} pane The pane being opened, ie: north, south, east, or west
+ * @param {boolean=} [openPane=false]
+ * @param {boolean=} [noAnimation=false]
+ * @param {boolean=} [noAlert=false]
+ */
+, show = function (evt_or_pane, openPane, noAnimation, noAlert) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , o = options[pane]
+ , s = state[pane]
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ ;
+ if (!$P || !s.isHidden) return; // pane does not exist OR is not hidden
+
+ // onshow_start callback - will CANCEL show if returns false
+ if (false === _runCallbacks("onshow_start", pane)) return;
+
+ s.isSliding = false; // just in case
+ s.isShowing = true; // used by onopen/onclose
+ //s.isHidden = false; - will be set by open/close - if not cancelled
+
+ // now show the elements
+ //if ($R) $R.show(); - will be shown by open/close
+ if (openPane === false)
+ close(pane, true); // true = force
+ else
+ open(pane, false, noAnimation, noAlert); // adjust all panes to fit
+ }
+
+
+ /**
+ * Toggles a pane open/closed by calling either open or close
+ *
+ * @param {string} pane The pane being toggled, ie: north, south, east, or west
+ * @param {boolean=} [slide=false]
+ */
+, toggle = function (evt_or_pane, slide) {
+ if (!isInitialized()) return;
+ var evt = evtObj(evt_or_pane)
+ , pane = evtPane.call(this, evt_or_pane)
+ , s = state[pane]
+ ;
+ if (evt) // called from to $R.dblclick OR triggerPaneEvent
+ evt.stopImmediatePropagation();
+ if (s.isHidden)
+ show(pane); // will call 'open' after unhiding it
+ else if (s.isClosed)
+ open(pane, !!slide);
+ else
+ close(pane);
+ }
+
+
+ /**
+ * Utility method used during init or other auto-processes
+ *
+ * @param {string} pane The pane being closed
+ * @param {boolean=} [setHandles=false]
+ */
+, _closePane = function (pane, setHandles) {
+ var
+ $P = $Ps[pane]
+ , s = state[pane]
+ ;
+ _hidePane(pane);
+ s.isClosed = true;
+ s.isVisible = false;
+ // UNUSED: if (setHandles) setAsClosed(pane, true); // true = force
+ }
+
+ /**
+ * Close the specified pane (animation optional), and resize all other panes as needed
+ *
+ * @param {string} pane The pane being closed, ie: north, south, east, or west
+ * @param {boolean=} [force=false]
+ * @param {boolean=} [noAnimation=false]
+ * @param {boolean=} [skipCallback=false]
+ */
+, close = function (evt_or_pane, force, noAnimation, skipCallback) {
+ var pane = evtPane.call(this, evt_or_pane);
+ // if pane has been initialized, but NOT the complete layout, close pane instantly
+ if (!state.initialized && $Ps[pane]) {
+ _closePane(pane); // INIT pane as closed
+ return;
+ }
+ if (!isInitialized()) return;
+
+ var
+ $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , $T = $Ts[pane]
+ , o = options[pane]
+ , s = state[pane]
+ , c = _c[pane]
+ , doFX, isShowing, isHiding, wasSliding;
+
+ // QUEUE in case another action/animation is in progress
+ $N.queue(function( queueNext ){
+
+ if ( !$P
+ || (!o.closable && !s.isShowing && !s.isHiding) // invalid request // (!o.resizable && !o.closable) ???
+ || (!force && s.isClosed && !s.isShowing) // already closed
+ ) return queueNext();
+
+ // onclose_start callback - will CANCEL hide if returns false
+ // SKIP if just 'showing' a hidden pane as 'closed'
+ var abort = !s.isShowing && false === _runCallbacks("onclose_start", pane);
+
+ // transfer logic vars to temp vars
+ isShowing = s.isShowing;
+ isHiding = s.isHiding;
+ wasSliding = s.isSliding;
+ // now clear the logic vars (REQUIRED before aborting)
+ delete s.isShowing;
+ delete s.isHiding;
+
+ if (abort) return queueNext();
+
+ doFX = !noAnimation && !s.isClosed && (o.fxName_close != "none");
+ s.isMoving = true;
+ s.isClosed = true;
+ s.isVisible = false;
+ // update isHidden BEFORE sizing panes
+ if (isHiding) s.isHidden = true;
+ else if (isShowing) s.isHidden = false;
+
+ if (s.isSliding) // pane is being closed, so UNBIND trigger events
+ bindStopSlidingEvents(pane, false); // will set isSliding=false
+ else // resize panes adjacent to this one
+ sizeMidPanes(_c[pane].dir === "horz" ? "" : "center", false); // false = NOT skipCallback
+
+ // if this pane has a resizer bar, move it NOW - before animation
+ setAsClosed(pane);
+
+ // CLOSE THE PANE
+ if (doFX) { // animate the close
+ // mask panes with objects
+ var masks = "center"+ (c.dir=="horz" ? ",west,east" : "");
+ showMasks( masks, true ); // true = ONLY mask panes with maskObjects=true
+ lockPaneForFX(pane, true); // need to set left/top so animation will work
+ $P.hide( o.fxName_close, o.fxSettings_close, o.fxSpeed_close, function () {
+ lockPaneForFX(pane, false); // undo
+ if (s.isClosed) close_2();
+ queueNext();
+ });
+ }
+ else { // hide the pane without animation
+ _hidePane(pane);
+ close_2();
+ queueNext();
+ };
+ });
+
+ // SUBROUTINE
+ function close_2 () {
+ s.isMoving = false;
+ bindStartSlidingEvent(pane, true); // will enable if o.slidable = true
+
+ // if opposite-pane was autoClosed, see if it can be autoOpened now
+ var altPane = _c.oppositeEdge[pane];
+ if (state[ altPane ].noRoom) {
+ setSizeLimits( altPane );
+ makePaneFit( altPane );
+ }
+
+ // hide any masks shown while closing
+ hideMasks();
+
+ if (!skipCallback && (state.initialized || o.triggerEventsOnLoad)) {
+ // onclose callback - UNLESS just 'showing' a hidden pane as 'closed'
+ if (!isShowing) _runCallbacks("onclose_end", pane);
+ // onhide OR onshow callback
+ if (isShowing) _runCallbacks("onshow_end", pane);
+ if (isHiding) _runCallbacks("onhide_end", pane);
+ }
+ }
+ }
+
+ /**
+ * @param {string} pane The pane just closed, ie: north, south, east, or west
+ */
+, setAsClosed = function (pane) {
+ var
+ $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , $T = $Ts[pane]
+ , o = options[pane]
+ , s = state[pane]
+ , side = _c[pane].side.toLowerCase()
+ , inset = "inset"+ _c[pane].side
+ , rClass = o.resizerClass
+ , tClass = o.togglerClass
+ , _pane = "-"+ pane // used for classNames
+ , _open = "-open"
+ , _sliding= "-sliding"
+ , _closed = "-closed"
+ ;
+ $R
+ .css(side, sC[inset]) // move the resizer
+ .removeClass( rClass+_open +" "+ rClass+_pane+_open )
+ .removeClass( rClass+_sliding +" "+ rClass+_pane+_sliding )
+ .addClass( rClass+_closed +" "+ rClass+_pane+_closed )
+ .unbind("dblclick."+ sID)
+ ;
+ // DISABLE 'resizing' when closed - do this BEFORE bindStartSlidingEvent?
+ if (o.resizable && $.layout.plugins.draggable)
+ $R
+ .draggable("disable")
+ .removeClass("ui-state-disabled") // do NOT apply disabled styling - not suitable here
+ .css("cursor", "default")
+ .attr("title","")
+ ;
+
+ // if pane has a toggler button, adjust that too
+ if ($T) {
+ $T
+ .removeClass( tClass+_open +" "+ tClass+_pane+_open )
+ .addClass( tClass+_closed +" "+ tClass+_pane+_closed )
+ .attr("title", o.togglerTip_closed) // may be blank
+ ;
+ // toggler-content - if exists
+ $T.children(".content-open").hide();
+ $T.children(".content-closed").css("display","block");
+ }
+
+ // sync any 'pin buttons'
+ syncPinBtns(pane, false);
+
+ if (state.initialized) {
+ // resize 'length' and position togglers for adjacent panes
+ sizeHandles();
+ }
+ }
+
+ /**
+ * Open the specified pane (animation optional), and resize all other panes as needed
+ *
+ * @param {string} pane The pane being opened, ie: north, south, east, or west
+ * @param {boolean=} [slide=false]
+ * @param {boolean=} [noAnimation=false]
+ * @param {boolean=} [noAlert=false]
+ */
+, open = function (evt_or_pane, slide, noAnimation, noAlert) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , $T = $Ts[pane]
+ , o = options[pane]
+ , s = state[pane]
+ , c = _c[pane]
+ , doFX, isShowing
+ ;
+ // QUEUE in case another action/animation is in progress
+ $N.queue(function( queueNext ){
+
+ if ( !$P
+ || (!o.resizable && !o.closable && !s.isShowing) // invalid request
+ || (s.isVisible && !s.isSliding) // already open
+ ) return queueNext();
+
+ // pane can ALSO be unhidden by just calling show(), so handle this scenario
+ if (s.isHidden && !s.isShowing) {
+ queueNext(); // call before show() because it needs the queue free
+ show(pane, true);
+ return;
+ }
+
+ if (o.autoResize && s.size != o.size) // resize pane to original size set in options
+ sizePane(pane, o.size, true, true, true); // true=skipCallback/forceResize/noAnimation
+ else
+ // make sure there is enough space available to open the pane
+ setSizeLimits(pane, slide);
+
+ // onopen_start callback - will CANCEL open if returns false
+ var cbReturn = _runCallbacks("onopen_start", pane);
+
+ if (cbReturn === "abort")
+ return queueNext();
+
+ // update pane-state again in case options were changed in onopen_start
+ if (cbReturn !== "NC") // NC = "No Callback"
+ setSizeLimits(pane, slide);
+
+ if (s.minSize > s.maxSize) { // INSUFFICIENT ROOM FOR PANE TO OPEN!
+ syncPinBtns(pane, false); // make sure pin-buttons are reset
+ if (!noAlert && o.noRoomToOpenTip)
+ alert(o.noRoomToOpenTip);
+ return queueNext(); // ABORT
+ }
+
+ if (slide) // START Sliding - will set isSliding=true
+ bindStopSlidingEvents(pane, true); // BIND trigger events to close sliding-pane
+ else if (s.isSliding) // PIN PANE (stop sliding) - open pane 'normally' instead
+ bindStopSlidingEvents(pane, false); // UNBIND trigger events - will set isSliding=false
+ else if (o.slidable)
+ bindStartSlidingEvent(pane, false); // UNBIND trigger events
+
+ s.noRoom = false; // will be reset by makePaneFit if 'noRoom'
+ makePaneFit(pane);
+
+ // transfer logic var to temp var
+ isShowing = s.isShowing;
+ // now clear the logic var
+ delete s.isShowing;
+
+ doFX = !noAnimation && s.isClosed && (o.fxName_open != "none");
+ s.isMoving = true;
+ s.isVisible = true;
+ s.isClosed = false;
+ // update isHidden BEFORE sizing panes - WHY??? Old?
+ if (isShowing) s.isHidden = false;
+
+ if (doFX) { // ANIMATE
+ // mask panes with objects
+ var masks = "center"+ (c.dir=="horz" ? ",west,east" : "");
+ if (s.isSliding) masks += ","+ _c.oppositeEdge[pane];
+ showMasks( masks, true ); // true = ONLY mask panes with maskObjects=true
+ lockPaneForFX(pane, true); // need to set left/top so animation will work
+ $P.show( o.fxName_open, o.fxSettings_open, o.fxSpeed_open, function() {
+ lockPaneForFX(pane, false); // undo
+ if (s.isVisible) open_2(); // continue
+ queueNext();
+ });
+ }
+ else { // no animation
+ _showPane(pane);// just show pane and...
+ open_2(); // continue
+ queueNext();
+ };
+ });
+
+ // SUBROUTINE
+ function open_2 () {
+ s.isMoving = false;
+
+ // cure iframe display issues
+ _fixIframe(pane);
+
+ // NOTE: if isSliding, then other panes are NOT 'resized'
+ if (!s.isSliding) { // resize all panes adjacent to this one
+ hideMasks(); // remove any masks shown while opening
+ sizeMidPanes(_c[pane].dir=="vert" ? "center" : "", false); // false = NOT skipCallback
+ }
+
+ // set classes, position handles and execute callbacks...
+ setAsOpen(pane);
+ };
+
+ }
+
+ /**
+ * @param {string} pane The pane just opened, ie: north, south, east, or west
+ * @param {boolean=} [skipCallback=false]
+ */
+, setAsOpen = function (pane, skipCallback) {
+ var
+ $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , $T = $Ts[pane]
+ , o = options[pane]
+ , s = state[pane]
+ , side = _c[pane].side.toLowerCase()
+ , inset = "inset"+ _c[pane].side
+ , rClass = o.resizerClass
+ , tClass = o.togglerClass
+ , _pane = "-"+ pane // used for classNames
+ , _open = "-open"
+ , _closed = "-closed"
+ , _sliding= "-sliding"
+ ;
+ $R
+ .css(side, sC[inset] + getPaneSize(pane)) // move the resizer
+ .removeClass( rClass+_closed +" "+ rClass+_pane+_closed )
+ .addClass( rClass+_open +" "+ rClass+_pane+_open )
+ ;
+ if (s.isSliding)
+ $R.addClass( rClass+_sliding +" "+ rClass+_pane+_sliding )
+ else // in case 'was sliding'
+ $R.removeClass( rClass+_sliding +" "+ rClass+_pane+_sliding )
+
+ if (o.resizerDblClickToggle)
+ $R.bind("dblclick", toggle );
+ removeHover( 0, $R ); // remove hover classes
+ if (o.resizable && $.layout.plugins.draggable)
+ $R .draggable("enable")
+ .css("cursor", o.resizerCursor)
+ .attr("title", o.resizerTip);
+ else if (!s.isSliding)
+ $R.css("cursor", "default"); // n-resize, s-resize, etc
+
+ // if pane also has a toggler button, adjust that too
+ if ($T) {
+ $T .removeClass( tClass+_closed +" "+ tClass+_pane+_closed )
+ .addClass( tClass+_open +" "+ tClass+_pane+_open )
+ .attr("title", o.togglerTip_open); // may be blank
+ removeHover( 0, $T ); // remove hover classes
+ // toggler-content - if exists
+ $T.children(".content-closed").hide();
+ $T.children(".content-open").css("display","block");
+ }
+
+ // sync any 'pin buttons'
+ syncPinBtns(pane, !s.isSliding);
+
+ // update pane-state dimensions - BEFORE resizing content
+ $.extend(s, elDims($P));
+
+ if (state.initialized) {
+ // resize resizer & toggler sizes for all panes
+ sizeHandles();
+ // resize content every time pane opens - to be sure
+ sizeContent(pane, true); // true = remeasure headers/footers, even if 'pane.isMoving'
+ }
+
+ if (!skipCallback && (state.initialized || o.triggerEventsOnLoad) && $P.is(":visible")) {
+ // onopen callback
+ _runCallbacks("onopen_end", pane);
+ // onshow callback - TODO: should this be here?
+ if (s.isShowing) _runCallbacks("onshow_end", pane);
+
+ // ALSO call onresize because layout-size *may* have changed while pane was closed
+ if (state.initialized)
+ _runCallbacks("onresize_end", pane);
+ }
+
+ // TODO: Somehow sizePane("north") is being called after this point???
+ }
+
+
+ /**
+ * slideOpen / slideClose / slideToggle
+ *
+ * Pass-though methods for sliding
+ */
+, slideOpen = function (evt_or_pane) {
+ if (!isInitialized()) return;
+ var evt = evtObj(evt_or_pane)
+ , pane = evtPane.call(this, evt_or_pane)
+ , s = state[pane]
+ , delay = options[pane].slideDelay_open
+ ;
+ // prevent event from triggering on NEW resizer binding created below
+ if (evt) evt.stopImmediatePropagation();
+
+ if (s.isClosed && evt && evt.type === "mouseenter" && delay > 0)
+ // trigger = mouseenter - use a delay
+ timer.set(pane+"_openSlider", open_NOW, delay);
+ else
+ open_NOW(); // will unbind events if is already open
+
+ /**
+ * SUBROUTINE for timed open
+ */
+ function open_NOW () {
+ if (!s.isClosed) // skip if no longer closed!
+ bindStopSlidingEvents(pane, true); // BIND trigger events to close sliding-pane
+ else if (!s.isMoving)
+ open(pane, true); // true = slide - open() will handle binding
+ };
+ }
+
+, slideClose = function (evt_or_pane) {
+ if (!isInitialized()) return;
+ var evt = evtObj(evt_or_pane)
+ , pane = evtPane.call(this, evt_or_pane)
+ , o = options[pane]
+ , s = state[pane]
+ , delay = s.isMoving ? 1000 : 300 // MINIMUM delay - option may override
+ ;
+ if (s.isClosed || s.isResizing)
+ return; // skip if already closed OR in process of resizing
+ else if (o.slideTrigger_close === "click")
+ close_NOW(); // close immediately onClick
+ else if (o.preventQuickSlideClose && s.isMoving)
+ return; // handle Chrome quick-close on slide-open
+ else if (o.preventPrematureSlideClose && evt && $.layout.isMouseOverElem(evt, $Ps[pane]))
+ return; // handle incorrect mouseleave trigger, like when over a SELECT-list in IE
+ else if (evt) // trigger = mouseleave - use a delay
+ // 1 sec delay if 'opening', else .3 sec
+ timer.set(pane+"_closeSlider", close_NOW, max(o.slideDelay_close, delay));
+ else // called programically
+ close_NOW();
+
+ /**
+ * SUBROUTINE for timed close
+ */
+ function close_NOW () {
+ if (s.isClosed) // skip 'close' if already closed!
+ bindStopSlidingEvents(pane, false); // UNBIND trigger events - TODO: is this needed here?
+ else if (!s.isMoving)
+ close(pane); // close will handle unbinding
+ };
+ }
+
+ /**
+ * @param {string} pane The pane being opened, ie: north, south, east, or west
+ */
+, slideToggle = function (evt_or_pane) {
+ var pane = evtPane.call(this, evt_or_pane);
+ toggle(pane, true);
+ }
+
+
+ /**
+ * Must set left/top on East/South panes so animation will work properly
+ *
+ * @param {string} pane The pane to lock, 'east' or 'south' - any other is ignored!
+ * @param {boolean} doLock true = set left/top, false = remove
+ */
+, lockPaneForFX = function (pane, doLock) {
+ var $P = $Ps[pane]
+ , s = state[pane]
+ , o = options[pane]
+ , z = options.zIndexes
+ ;
+ if (doLock) {
+ $P.css({ zIndex: z.pane_animate }); // overlay all elements during animation
+ if (pane=="south")
+ $P.css({ top: sC.insetTop + sC.innerHeight - $P.outerHeight() });
+ else if (pane=="east")
+ $P.css({ left: sC.insetLeft + sC.innerWidth - $P.outerWidth() });
+ }
+ else { // animation DONE - RESET CSS
+ // TODO: see if this can be deleted. It causes a quick-close when sliding in Chrome
+ $P.css({ zIndex: (s.isSliding ? z.pane_sliding : z.pane_normal) });
+ if (pane=="south")
+ $P.css({ top: "auto" });
+ // if pane is positioned 'off-screen', then DO NOT screw with it!
+ else if (pane=="east" && !$P.css("left").match(/\-99999/))
+ $P.css({ left: "auto" });
+ // fix anti-aliasing in IE - only needed for animations that change opacity
+ if (browser.msie && o.fxOpacityFix && o.fxName_open != "slide" && $P.css("filter") && $P.css("opacity") == 1)
+ $P[0].style.removeAttribute('filter');
+ }
+ }
+
+
+ /**
+ * Toggle sliding functionality of a specific pane on/off by adding removing 'slide open' trigger
+ *
+ * @see open(), close()
+ * @param {string} pane The pane to enable/disable, 'north', 'south', etc.
+ * @param {boolean} enable Enable or Disable sliding?
+ */
+, bindStartSlidingEvent = function (pane, enable) {
+ var o = options[pane]
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , evtName = o.slideTrigger_open.toLowerCase()
+ ;
+ if (!$R || (enable && !o.slidable)) return;
+
+ // make sure we have a valid event
+ if (evtName.match(/mouseover/))
+ evtName = o.slideTrigger_open = "mouseenter";
+ else if (!evtName.match(/click|dblclick|mouseenter/))
+ evtName = o.slideTrigger_open = "click";
+
+ $R
+ // add or remove event
+ [enable ? "bind" : "unbind"](evtName +'.'+ sID, slideOpen)
+ // set the appropriate cursor & title/tip
+ .css("cursor", enable ? o.sliderCursor : "default")
+ .attr("title", enable ? o.sliderTip : "")
+ ;
+ }
+
+ /**
+ * Add or remove 'mouseleave' events to 'slide close' when pane is 'sliding' open or closed
+ * Also increases zIndex when pane is sliding open
+ * See bindStartSlidingEvent for code to control 'slide open'
+ *
+ * @see slideOpen(), slideClose()
+ * @param {string} pane The pane to process, 'north', 'south', etc.
+ * @param {boolean} enable Enable or Disable events?
+ */
+, bindStopSlidingEvents = function (pane, enable) {
+ var o = options[pane]
+ , s = state[pane]
+ , c = _c[pane]
+ , z = options.zIndexes
+ , evtName = o.slideTrigger_close.toLowerCase()
+ , action = (enable ? "bind" : "unbind")
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ ;
+ s.isSliding = enable; // logic
+ timer.clear(pane+"_closeSlider"); // just in case
+
+ // remove 'slideOpen' event from resizer
+ // ALSO will raise the zIndex of the pane & resizer
+ if (enable) bindStartSlidingEvent(pane, false);
+
+ // RE/SET zIndex - increases when pane is sliding-open, resets to normal when not
+ $P.css("zIndex", enable ? z.pane_sliding : z.pane_normal);
+ $R.css("zIndex", enable ? z.pane_sliding+2 : z.resizer_normal); // NOTE: mask = pane_sliding+1
+
+ // make sure we have a valid event
+ if (!evtName.match(/click|mouseleave/))
+ evtName = o.slideTrigger_close = "mouseleave"; // also catches 'mouseout'
+
+ // add/remove slide triggers
+ $R[action](evtName, slideClose); // base event on resize
+ // need extra events for mouseleave
+ if (evtName === "mouseleave") {
+ // also close on pane.mouseleave
+ $P[action]("mouseleave."+ sID, slideClose);
+ // cancel timer when mouse moves between 'pane' and 'resizer'
+ $R[action]("mouseenter."+ sID, cancelMouseOut);
+ $P[action]("mouseenter."+ sID, cancelMouseOut);
+ }
+
+ if (!enable)
+ timer.clear(pane+"_closeSlider");
+ else if (evtName === "click" && !o.resizable) {
+ // IF pane is not resizable (which already has a cursor and tip)
+ // then set the a cursor & title/tip on resizer when sliding
+ $R.css("cursor", enable ? o.sliderCursor : "default");
+ $R.attr("title", enable ? o.togglerTip_open : ""); // use Toggler-tip, eg: "Close Pane"
+ }
+
+ // SUBROUTINE for mouseleave timer clearing
+ function cancelMouseOut (evt) {
+ timer.clear(pane+"_closeSlider");
+ evt.stopPropagation();
+ }
+ }
+
+
+ /**
+ * Hides/closes a pane if there is insufficient room - reverses this when there is room again
+ * MUST have already called setSizeLimits() before calling this method
+ *
+ * @param {string} pane The pane being resized
+ * @param {boolean=} [isOpening=false] Called from onOpen?
+ * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
+ * @param {boolean=} [force=false]
+ */
+, makePaneFit = function (pane, isOpening, skipCallback, force) {
+ var
+ o = options[pane]
+ , s = state[pane]
+ , c = _c[pane]
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , isSidePane = c.dir==="vert"
+ , hasRoom = false
+ ;
+ // special handling for center & east/west panes
+ if (pane === "center" || (isSidePane && s.noVerticalRoom)) {
+ // see if there is enough room to display the pane
+ // ERROR: hasRoom = s.minHeight <= s.maxHeight && (isSidePane || s.minWidth <= s.maxWidth);
+ hasRoom = (s.maxHeight >= 0);
+ if (hasRoom && s.noRoom) { // previously hidden due to noRoom, so show now
+ _showPane(pane);
+ if ($R) $R.show();
+ s.isVisible = true;
+ s.noRoom = false;
+ if (isSidePane) s.noVerticalRoom = false;
+ _fixIframe(pane);
+ }
+ else if (!hasRoom && !s.noRoom) { // not currently hidden, so hide now
+ _hidePane(pane);
+ if ($R) $R.hide();
+ s.isVisible = false;
+ s.noRoom = true;
+ }
+ }
+
+ // see if there is enough room to fit the border-pane
+ if (pane === "center") {
+ // ignore center in this block
+ }
+ else if (s.minSize <= s.maxSize) { // pane CAN fit
+ hasRoom = true;
+ if (s.size > s.maxSize) // pane is too big - shrink it
+ sizePane(pane, s.maxSize, skipCallback, force, true); // true = noAnimation
+ else if (s.size < s.minSize) // pane is too small - enlarge it
+ sizePane(pane, s.minSize, skipCallback, force, true);
+ // need s.isVisible because new pseudoClose method keeps pane visible, but off-screen
+ else if ($R && s.isVisible && $P.is(":visible")) {
+ // make sure resizer-bar is positioned correctly
+ // handles situation where nested layout was 'hidden' when initialized
+ var side = c.side.toLowerCase()
+ , pos = s.size + sC["inset"+ c.side]
+ ;
+ if ($.layout.cssNum($R, side) != pos) $R.css( side, pos );
+ }
+
+ // if was previously hidden due to noRoom, then RESET because NOW there is room
+ if (s.noRoom) {
+ // s.noRoom state will be set by open or show
+ if (s.wasOpen && o.closable) {
+ if (o.autoReopen)
+ open(pane, false, true, true); // true = noAnimation, true = noAlert
+ else // leave the pane closed, so just update state
+ s.noRoom = false;
+ }
+ else
+ show(pane, s.wasOpen, true, true); // true = noAnimation, true = noAlert
+ }
+ }
+ else { // !hasRoom - pane CANNOT fit
+ if (!s.noRoom) { // pane not set as noRoom yet, so hide or close it now...
+ s.noRoom = true; // update state
+ s.wasOpen = !s.isClosed && !s.isSliding;
+ if (s.isClosed){} // SKIP
+ else if (o.closable) // 'close' if possible
+ close(pane, true, true); // true = force, true = noAnimation
+ else // 'hide' pane if cannot just be closed
+ hide(pane, true); // true = noAnimation
+ }
+ }
+ }
+
+
+ /**
+ * sizePane / manualSizePane
+ * sizePane is called only by internal methods whenever a pane needs to be resized
+ * manualSizePane is an exposed flow-through method allowing extra code when pane is 'manually resized'
+ *
+ * @param {string} pane The pane being resized
+ * @param {number} size The *desired* new size for this pane - will be validated
+ * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
+ * @param {boolean=} [noAnimation=false]
+ */
+, manualSizePane = function (evt_or_pane, size, skipCallback, noAnimation) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , o = options[pane]
+ , s = state[pane]
+ // if resizing callbacks have been delayed and resizing is now DONE, force resizing to complete...
+ , forceResize = o.livePaneResizing && !s.isResizing
+ ;
+ // ANY call to manualSizePane disables autoResize - ie, percentage sizing
+ o.autoResize = false;
+ // flow-through...
+ sizePane(pane, size, skipCallback, forceResize, noAnimation); // will animate resize if option enabled
+ }
+
+ /**
+ * @param {string} pane The pane being resized
+ * @param {number} size The *desired* new size for this pane - will be validated
+ * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
+ * @param {boolean=} [force=false] Force resizing even if does not seem necessary
+ * @param {boolean=} [noAnimation=false]
+ */
+, sizePane = function (evt_or_pane, size, skipCallback, force, noAnimation) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane) // probably NEVER called from event?
+ , o = options[pane]
+ , s = state[pane]
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , side = _c[pane].side.toLowerCase()
+ , dimName = _c[pane].sizeType.toLowerCase()
+ , inset = "inset"+ _c[pane].side
+ , skipResizeWhileDragging = s.isResizing && !o.triggerEventsDuringLiveResize
+ , doFX = noAnimation !== true && o.animatePaneSizing
+ , oldSize, newSize
+ ;
+ // QUEUE in case another action/animation is in progress
+ $N.queue(function( queueNext ){
+ // calculate 'current' min/max sizes
+ setSizeLimits(pane); // update pane-state
+ oldSize = s.size;
+ size = _parseSize(pane, size); // handle percentages & auto
+ size = max(size, _parseSize(pane, o.minSize));
+ size = min(size, s.maxSize);
+ if (size < s.minSize) { // not enough room for pane!
+ queueNext(); // call before makePaneFit() because it needs the queue free
+ makePaneFit(pane, false, skipCallback); // will hide or close pane
+ return;
+ }
+
+ // IF newSize is same as oldSize, then nothing to do - abort
+ if (!force && size === oldSize)
+ return queueNext();
+
+ // onresize_start callback CANNOT cancel resizing because this would break the layout!
+ if (!skipCallback && state.initialized && s.isVisible)
+ _runCallbacks("onresize_start", pane);
+
+ // resize the pane, and make sure its visible
+ newSize = cssSize(pane, size);
+
+ if (doFX && $P.is(":visible")) { // ANIMATE
+ var fx = $.layout.effects.size[pane] || $.layout.effects.size.all
+ , easing = o.fxSettings_size.easing || fx.easing
+ , z = options.zIndexes
+ , props = {};
+ props[ dimName ] = newSize +'px';
+ s.isMoving = true;
+ // overlay all elements during animation
+ $P.css({ zIndex: z.pane_animate })
+ .show().animate( props, o.fxSpeed_size, easing, function(){
+ // reset zIndex after animation
+ $P.css({ zIndex: (s.isSliding ? z.pane_sliding : z.pane_normal) });
+ s.isMoving = false;
+ sizePane_2(); // continue
+ queueNext();
+ });
+ }
+ else { // no animation
+ $P.css( dimName, newSize ); // resize pane
+ // if pane is visible, then
+ if ($P.is(":visible"))
+ sizePane_2(); // continue
+ else {
+ // pane is NOT VISIBLE, so just update state data...
+ // when pane is *next opened*, it will have the new size
+ s.size = size; // update state.size
+ $.extend(s, elDims($P)); // update state dimensions
+ }
+ queueNext();
+ };
+
+ });
+
+ // SUBROUTINE
+ function sizePane_2 () {
+ /* Panes are sometimes not sized precisely in some browsers!?
+ * This code will resize the pane up to 3 times to nudge the pane to the correct size
+ */
+ var actual = dimName==='width' ? $P.outerWidth() : $P.outerHeight()
+ , tries = [{
+ pane: pane
+ , count: 1
+ , target: size
+ , actual: actual
+ , correct: (size === actual)
+ , attempt: size
+ , cssSize: newSize
+ }]
+ , lastTry = tries[0]
+ , msg = 'Inaccurate size after resizing the '+ pane +'-pane.'
+ ;
+ while ( !lastTry.correct ) {
+ thisTry = { pane: pane, count: lastTry.count+1, target: size };
+
+ if (lastTry.actual > size)
+ thisTry.attempt = max(0, lastTry.attempt - (lastTry.actual - size));
+ else // lastTry.actual < size
+ thisTry.attempt = max(0, lastTry.attempt + (size - lastTry.actual));
+
+ thisTry.cssSize = cssSize(pane, thisTry.attempt);
+ $P.css( dimName, thisTry.cssSize );
+
+ thisTry.actual = dimName=='width' ? $P.outerWidth() : $P.outerHeight();
+ thisTry.correct = (size === thisTry.actual);
+
+ // if showDebugMessages, log attempts and alert the user of this *non-fatal error*
+ if (options.showDebugMessages) {
+ if ( tries.length === 1) {
+ _log(msg, false);
+ _log(lastTry, false);
+ }
+ _log(thisTry, false);
+ }
+
+ // after 4 tries, is as close as its gonna get!
+ if (tries.length > 3) break;
+
+ tries.push( thisTry );
+ lastTry = tries[ tries.length - 1 ];
+ }
+ // END TESTING CODE
+
+ // update pane-state dimensions
+ s.size = size;
+ $.extend(s, elDims($P));
+
+ if (s.isVisible && $P.is(":visible")) {
+ // reposition the resizer-bar
+ if ($R) $R.css( side, size + sC[inset] );
+ // resize the content-div
+ sizeContent(pane);
+ }
+
+ if (!skipCallback && !skipResizeWhileDragging && state.initialized && s.isVisible)
+ _runCallbacks("onresize_end", pane);
+
+ // resize all the adjacent panes, and adjust their toggler buttons
+ // when skipCallback passed, it means the controlling method will handle 'other panes'
+ if (!skipCallback) {
+ // also no callback if live-resize is in progress and NOT triggerEventsDuringLiveResize
+ if (!s.isSliding) sizeMidPanes(_c[pane].dir=="horz" ? "" : "center", skipResizeWhileDragging, force);
+ sizeHandles();
+ }
+
+ // if opposite-pane was autoClosed, see if it can be autoOpened now
+ var altPane = _c.oppositeEdge[pane];
+ if (size < oldSize && state[ altPane ].noRoom) {
+ setSizeLimits( altPane );
+ makePaneFit( altPane, false, skipCallback );
+ }
+
+ // DEBUG - ALERT user/developer so they know there was a sizing problem
+ if (options.showDebugMessages && tries.length > 1)
+ _log(msg +'\nSee the Error Console for details.', true);
+ }
+ }
+
+ /**
+ * @see initPanes(), sizePane(), resizeAll(), open(), close(), hide()
+ * @param {string} panes The pane(s) being resized, comma-delmited string
+ * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
+ * @param {boolean=} [force=false]
+ */
+, sizeMidPanes = function (panes, skipCallback, force) {
+ panes = (panes ? panes : "east,west,center").split(",");
+
+ $.each(panes, function (i, pane) {
+ if (!$Ps[pane]) return; // NO PANE - skip
+ var
+ o = options[pane]
+ , s = state[pane]
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , isCenter= (pane=="center")
+ , hasRoom = true
+ , CSS = {}
+ , newCenter = calcNewCenterPaneDims()
+ ;
+ // update pane-state dimensions
+ $.extend(s, elDims($P));
+
+ if (pane === "center") {
+ if (!force && s.isVisible && newCenter.width === s.outerWidth && newCenter.height === s.outerHeight)
+ return true; // SKIP - pane already the correct size
+ // set state for makePaneFit() logic
+ $.extend(s, cssMinDims(pane), {
+ maxWidth: newCenter.width
+ , maxHeight: newCenter.height
+ });
+ CSS = newCenter;
+ // convert OUTER width/height to CSS width/height
+ CSS.width = cssW($P, CSS.width);
+ // NEW - allow pane to extend 'below' visible area rather than hide it
+ CSS.height = cssH($P, CSS.height);
+ hasRoom = CSS.width >= 0 && CSS.height >= 0; // height >= 0 = ALWAYS TRUE NOW
+ // during layout init, try to shrink east/west panes to make room for center
+ if (!state.initialized && o.minWidth > s.outerWidth) {
+ var
+ reqPx = o.minWidth - s.outerWidth
+ , minE = options.east.minSize || 0
+ , minW = options.west.minSize || 0
+ , sizeE = state.east.size
+ , sizeW = state.west.size
+ , newE = sizeE
+ , newW = sizeW
+ ;
+ if (reqPx > 0 && state.east.isVisible && sizeE > minE) {
+ newE = max( sizeE-minE, sizeE-reqPx );
+ reqPx -= sizeE-newE;
+ }
+ if (reqPx > 0 && state.west.isVisible && sizeW > minW) {
+ newW = max( sizeW-minW, sizeW-reqPx );
+ reqPx -= sizeW-newW;
+ }
+ // IF we found enough extra space, then resize the border panes as calculated
+ if (reqPx === 0) {
+ if (sizeE != minE)
+ sizePane('east', newE, true, force, true); // true = skipCallback/noAnimation - initPanes will handle when done
+ if (sizeW != minW)
+ sizePane('west', newW, true, force, true);
+ // now start over!
+ sizeMidPanes('center', skipCallback, force);
+ return; // abort this loop
+ }
+ }
+ }
+ else { // for east and west, set only the height, which is same as center height
+ // set state.min/maxWidth/Height for makePaneFit() logic
+ if (s.isVisible && !s.noVerticalRoom)
+ $.extend(s, elDims($P), cssMinDims(pane))
+ if (!force && !s.noVerticalRoom && newCenter.height === s.outerHeight)
+ return true; // SKIP - pane already the correct size
+ // east/west have same top, bottom & height as center
+ CSS.top = newCenter.top;
+ CSS.bottom = newCenter.bottom;
+ // NEW - allow pane to extend 'below' visible area rather than hide it
+ CSS.height = cssH($P, newCenter.height);
+ s.maxHeight = CSS.height;
+ hasRoom = (s.maxHeight >= 0); // ALWAYS TRUE NOW
+ if (!hasRoom) s.noVerticalRoom = true; // makePaneFit() logic
+ }
+
+ if (hasRoom) {
+ // resizeAll passes skipCallback because it triggers callbacks after ALL panes are resized
+ if (!skipCallback && state.initialized)
+ _runCallbacks("onresize_start", pane);
+
+ $P.css(CSS); // apply the CSS to pane
+ sizeHandles(pane); // also update resizer length
+ if (s.noRoom && !s.isClosed && !s.isHidden)
+ makePaneFit(pane); // will re-open/show auto-closed/hidden pane
+ if (s.isVisible) {
+ $.extend(s, elDims($P)); // update pane dimensions
+ if (state.initialized) sizeContent(pane); // also resize the contents, if exists
+ }
+ }
+ else if (!s.noRoom && s.isVisible) // no room for pane
+ makePaneFit(pane); // will hide or close pane
+
+ if (!s.isVisible)
+ return true; // DONE - next pane
+
+ /*
+ * Extra CSS for IE6 or IE7 in Quirks-mode - add 'width' to NORTH/SOUTH panes
+ * Normally these panes have only 'left' & 'right' positions so pane auto-sizes
+ * ALSO required when pane is an IFRAME because will NOT default to 'full width'
+ */
+ if (pane === "center") { // finished processing midPanes
+ var b = $.layout.browser;
+ var fix = b.isIE6 || (b.msie && !$.support.boxModel);
+ if ($Ps.north && (fix || state.north.tagName=="IFRAME"))
+ $Ps.north.css("width", cssW($Ps.north, sC.innerWidth));
+ if ($Ps.south && (fix || state.south.tagName=="IFRAME"))
+ $Ps.south.css("width", cssW($Ps.south, sC.innerWidth));
+ }
+
+ // resizeAll passes skipCallback because it triggers callbacks after ALL panes are resized
+ if (!skipCallback && state.initialized)
+ _runCallbacks("onresize_end", pane);
+ });
+ }
+
+
+ /**
+ * @see window.onresize(), callbacks or custom code
+ */
+, resizeAll = function () {
+ if (!state.initialized) {
+ _initLayoutElements();
+ return; // no need to resize since we just initialized!
+ }
+ var oldW = sC.innerWidth
+ , oldH = sC.innerHeight
+ ;
+ // cannot size layout when 'container' is hidden or collapsed
+ if (!$N.is(":visible:") ) return;
+ $.extend( state.container, elDims( $N ) ); // UPDATE container dimensions
+ if (!sC.outerHeight) return;
+
+ // onresizeall_start will CANCEL resizing if returns false
+ // state.container has already been set, so user can access this info for calcuations
+ if (false === _runCallbacks("onresizeall_start")) return false;
+
+ var // see if container is now 'smaller' than before
+ shrunkH = (sC.innerHeight < oldH)
+ , shrunkW = (sC.innerWidth < oldW)
+ , $P, o, s, dir
+ ;
+ // NOTE special order for sizing: S-N-E-W
+ $.each(["south","north","east","west"], function (i, pane) {
+ if (!$Ps[pane]) return; // no pane - SKIP
+ s = state[pane];
+ o = options[pane];
+ dir = _c[pane].dir;
+
+ if (o.autoResize && s.size != o.size) // resize pane to original size set in options
+ sizePane(pane, o.size, true, true, true); // true=skipCallback/forceResize/noAnimation
+ else {
+ setSizeLimits(pane);
+ makePaneFit(pane, false, true, true); // true=skipCallback/forceResize
+ }
+ });
+
+ sizeMidPanes("", true, true); // true=skipCallback, true=forceResize
+ sizeHandles(); // reposition the toggler elements
+
+ // trigger all individual pane callbacks AFTER layout has finished resizing
+ o = options; // reuse alias
+ $.each(_c.allPanes, function (i, pane) {
+ $P = $Ps[pane];
+ if (!$P) return; // SKIP
+ if (state[pane].isVisible) // undefined for non-existent panes
+ _runCallbacks("onresize_end", pane); // callback - if exists
+ });
+
+ _runCallbacks("onresizeall_end");
+ //_triggerLayoutEvent(pane, 'resizeall');
+ }
+
+ /**
+ * Whenever a pane resizes or opens that has a nested layout, trigger resizeAll
+ *
+ * @param {string} pane The pane just resized or opened
+ */
+, resizeChildLayout = function (evt_or_pane) {
+ var pane = evtPane.call(this, evt_or_pane);
+ if (!options[pane].resizeChildLayout) return;
+ var $P = $Ps[pane]
+ , $C = $Cs[pane]
+ , d = "layout"
+ , P = Instance[pane]
+ , L = children[pane]
+ ;
+ // user may have manually set EITHER instance pointer, so handle that
+ if (P.child && !L) {
+ // have to reverse the pointers!
+ var el = P.child.container;
+ L = children[pane] = (el ? el.data(d) : 0) || null; // set pointer _directly_ to layout instance
+ }
+
+ // if a layout-pointer exists, see if child has been destroyed
+ if (L && L.destroyed)
+ L = children[pane] = null; // clear child pointers
+ // no child layout pointer is set - see if there is a child layout NOW
+ if (!L) L = children[pane] = $P.data(d) || ($C ? $C.data(d) : 0) || null; // set/update child pointers
+
+ // ALWAYS refresh the pane.child alias
+ P.child = children[pane];
+
+ if (L) L.resizeAll();
+ }
+
+
+ /**
+ * IF pane has a content-div, then resize all elements inside pane to fit pane-height
+ *
+ * @param {string=} [panes=""] The pane(s) being resized
+ * @param {boolean=} [remeasure=false] Should the content (header/footer) be remeasured?
+ */
+, sizeContent = function (evt_or_panes, remeasure) {
+ if (!isInitialized()) return;
+
+ var panes = evtPane.call(this, evt_or_panes);
+ panes = panes ? panes.split(",") : _c.allPanes;
+
+ $.each(panes, function (idx, pane) {
+ var
+ $P = $Ps[pane]
+ , $C = $Cs[pane]
+ , o = options[pane]
+ , s = state[pane]
+ , m = s.content // m = measurements
+ ;
+ if (!$P || !$C || !$P.is(":visible")) return true; // NOT VISIBLE - skip
+
+ // if content-element was REMOVED, update OR remove the pointer
+ if (!$C.length) {
+ initContent(pane, false); // false = do NOT sizeContent() - already there!
+ if (!$C) return; // no replacement element found - pointer have been removed
+ }
+
+ // onsizecontent_start will CANCEL resizing if returns false
+ if (false === _runCallbacks("onsizecontent_start", pane)) return;
+
+ // skip re-measuring offsets if live-resizing
+ if ((!s.isMoving && !s.isResizing) || o.liveContentResizing || remeasure || m.top == undefined) {
+ _measure();
+ // if any footers are below pane-bottom, they may not measure correctly,
+ // so allow pane overflow and re-measure
+ if (m.hiddenFooters > 0 && $P.css("overflow") === "hidden") {
+ $P.css("overflow", "visible");
+ _measure(); // remeasure while overflowing
+ $P.css("overflow", "hidden");
+ }
+ }
+ // NOTE: spaceAbove/Below *includes* the pane paddingTop/Bottom, but not pane.borders
+ var newH = s.innerHeight - (m.spaceAbove - s.css.paddingTop) - (m.spaceBelow - s.css.paddingBottom);
+
+ if (!$C.is(":visible") || m.height != newH) {
+ // size the Content element to fit new pane-size - will autoHide if not enough room
+ setOuterHeight($C, newH, true); // true=autoHide
+ m.height = newH; // save new height
+ };
+
+ if (state.initialized)
+ _runCallbacks("onsizecontent_end", pane);
+
+ function _below ($E) {
+ return max(s.css.paddingBottom, (parseInt($E.css("marginBottom"), 10) || 0));
+ };
+
+ function _measure () {
+ var
+ ignore = options[pane].contentIgnoreSelector
+ , $Fs = $C.nextAll().not(ignore || ':lt(0)') // not :lt(0) = ALL
+ , $Fs_vis = $Fs.filter(':visible')
+ , $F = $Fs_vis.filter(':last')
+ ;
+ m = {
+ top: $C[0].offsetTop
+ , height: $C.outerHeight()
+ , numFooters: $Fs.length
+ , hiddenFooters: $Fs.length - $Fs_vis.length
+ , spaceBelow: 0 // correct if no content footer ($E)
+ }
+ m.spaceAbove = m.top; // just for state - not used in calc
+ m.bottom = m.top + m.height;
+ if ($F.length)
+ //spaceBelow = (LastFooter.top + LastFooter.height) [footerBottom] - Content.bottom + max(LastFooter.marginBottom, pane.paddingBotom)
+ m.spaceBelow = ($F[0].offsetTop + $F.outerHeight()) - m.bottom + _below($F);
+ else // no footer - check marginBottom on Content element itself
+ m.spaceBelow = _below($C);
+ };
+ });
+ }
+
+
+ /**
+ * Called every time a pane is opened, closed, or resized to slide the togglers to 'center' and adjust their length if necessary
+ *
+ * @see initHandles(), open(), close(), resizeAll()
+ * @param {string=} [panes=""] The pane(s) being resized
+ */
+, sizeHandles = function (evt_or_panes) {
+ var panes = evtPane.call(this, evt_or_panes)
+ panes = panes ? panes.split(",") : _c.borderPanes;
+
+ $.each(panes, function (i, pane) {
+ var
+ o = options[pane]
+ , s = state[pane]
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , $T = $Ts[pane]
+ , $TC
+ ;
+ if (!$P || !$R) return;
+
+ var
+ dir = _c[pane].dir
+ , _state = (s.isClosed ? "_closed" : "_open")
+ , spacing = o["spacing"+ _state]
+ , togAlign = o["togglerAlign"+ _state]
+ , togLen = o["togglerLength"+ _state]
+ , paneLen
+ , left
+ , offset
+ , CSS = {}
+ ;
+
+ if (spacing === 0) {
+ $R.hide();
+ return;
+ }
+ else if (!s.noRoom && !s.isHidden) // skip if resizer was hidden for any reason
+ $R.show(); // in case was previously hidden
+
+ // Resizer Bar is ALWAYS same width/height of pane it is attached to
+ if (dir === "horz") { // north/south
+ //paneLen = $P.outerWidth(); // s.outerWidth ||
+ paneLen = sC.innerWidth; // handle offscreen-panes
+ s.resizerLength = paneLen;
+ left = $.layout.cssNum($P, "left")
+ $R.css({
+ width: cssW($R, paneLen) // account for borders & padding
+ , height: cssH($R, spacing) // ditto
+ , left: left > -9999 ? left : sC.insetLeft // handle offscreen-panes
+ });
+ }
+ else { // east/west
+ paneLen = $P.outerHeight(); // s.outerHeight ||
+ s.resizerLength = paneLen;
+ $R.css({
+ height: cssH($R, paneLen) // account for borders & padding
+ , width: cssW($R, spacing) // ditto
+ , top: sC.insetTop + getPaneSize("north", true) // TODO: what if no North pane?
+ //, top: $.layout.cssNum($Ps["center"], "top")
+ });
+ }
+
+ // remove hover classes
+ removeHover( o, $R );
+
+ if ($T) {
+ if (togLen === 0 || (s.isSliding && o.hideTogglerOnSlide)) {
+ $T.hide(); // always HIDE the toggler when 'sliding'
+ return;
+ }
+ else
+ $T.show(); // in case was previously hidden
+
+ if (!(togLen > 0) || togLen === "100%" || togLen > paneLen) {
+ togLen = paneLen;
+ offset = 0;
+ }
+ else { // calculate 'offset' based on options.PANE.togglerAlign_open/closed
+ if (isStr(togAlign)) {
+ switch (togAlign) {
+ case "top":
+ case "left": offset = 0;
+ break;
+ case "bottom":
+ case "right": offset = paneLen - togLen;
+ break;
+ case "middle":
+ case "center":
+ default: offset = round((paneLen - togLen) / 2); // 'default' catches typos
+ }
+ }
+ else { // togAlign = number
+ var x = parseInt(togAlign, 10); //
+ if (togAlign >= 0) offset = x;
+ else offset = paneLen - togLen + x; // NOTE: x is negative!
+ }
+ }
+
+ if (dir === "horz") { // north/south
+ var width = cssW($T, togLen);
+ $T.css({
+ width: width // account for borders & padding
+ , height: cssH($T, spacing) // ditto
+ , left: offset // TODO: VERIFY that toggler positions correctly for ALL values
+ , top: 0
+ });
+ // CENTER the toggler content SPAN
+ $T.children(".content").each(function(){
+ $TC = $(this);
+ $TC.css("marginLeft", round((width-$TC.outerWidth())/2)); // could be negative
+ });
+ }
+ else { // east/west
+ var height = cssH($T, togLen);
+ $T.css({
+ height: height // account for borders & padding
+ , width: cssW($T, spacing) // ditto
+ , top: offset // POSITION the toggler
+ , left: 0
+ });
+ // CENTER the toggler content SPAN
+ $T.children(".content").each(function(){
+ $TC = $(this);
+ $TC.css("marginTop", round((height-$TC.outerHeight())/2)); // could be negative
+ });
+ }
+
+ // remove ALL hover classes
+ removeHover( 0, $T );
+ }
+
+ // DONE measuring and sizing this resizer/toggler, so can be 'hidden' now
+ if (!state.initialized && (o.initHidden || s.noRoom)) {
+ $R.hide();
+ if ($T) $T.hide();
+ }
+ });
+ }
+
+
+ /**
+ * @param {string} pane
+ */
+, enableClosable = function (evt_or_pane) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $T = $Ts[pane]
+ , o = options[pane]
+ ;
+ if (!$T) return;
+ o.closable = true;
+ $T .bind("click."+ sID, function(evt){ evt.stopPropagation(); toggle(pane); })
+ .css("visibility", "visible")
+ .css("cursor", "pointer")
+ .attr("title", state[pane].isClosed ? o.togglerTip_closed : o.togglerTip_open) // may be blank
+ .show();
+ }
+ /**
+ * @param {string} pane
+ * @param {boolean=} [hide=false]
+ */
+, disableClosable = function (evt_or_pane, hide) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $T = $Ts[pane]
+ ;
+ if (!$T) return;
+ options[pane].closable = false;
+ // is closable is disable, then pane MUST be open!
+ if (state[pane].isClosed) open(pane, false, true);
+ $T .unbind("."+ sID)
+ .css("visibility", hide ? "hidden" : "visible") // instead of hide(), which creates logic issues
+ .css("cursor", "default")
+ .attr("title", "");
+ }
+
+
+ /**
+ * @param {string} pane
+ */
+, enableSlidable = function (evt_or_pane) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $R = $Rs[pane]
+ ;
+ if (!$R || !$R.data('draggable')) return;
+ options[pane].slidable = true;
+ if (s.isClosed)
+ bindStartSlidingEvent(pane, true);
+ }
+ /**
+ * @param {string} pane
+ */
+, disableSlidable = function (evt_or_pane) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $R = $Rs[pane]
+ ;
+ if (!$R) return;
+ options[pane].slidable = false;
+ if (state[pane].isSliding)
+ close(pane, false, true);
+ else {
+ bindStartSlidingEvent(pane, false);
+ $R .css("cursor", "default")
+ .attr("title", "");
+ removeHover(null, $R[0]); // in case currently hovered
+ }
+ }
+
+
+ /**
+ * @param {string} pane
+ */
+, enableResizable = function (evt_or_pane) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $R = $Rs[pane]
+ , o = options[pane]
+ ;
+ if (!$R || !$R.data('draggable')) return;
+ o.resizable = true;
+ $R.draggable("enable");
+ if (!state[pane].isClosed)
+ $R .css("cursor", o.resizerCursor)
+ .attr("title", o.resizerTip);
+ }
+ /**
+ * @param {string} pane
+ */
+, disableResizable = function (evt_or_pane) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $R = $Rs[pane]
+ ;
+ if (!$R || !$R.data('draggable')) return;
+ options[pane].resizable = false;
+ $R .draggable("disable")
+ .css("cursor", "default")
+ .attr("title", "");
+ removeHover(null, $R[0]); // in case currently hovered
+ }
+
+
+ /**
+ * Move a pane from source-side (eg, west) to target-side (eg, east)
+ * If pane exists on target-side, move that to source-side, ie, 'swap' the panes
+ *
+ * @param {string} pane1 The pane/edge being swapped
+ * @param {string} pane2 ditto
+ */
+, swapPanes = function (evt_or_pane1, pane2) {
+ if (!isInitialized()) return;
+ var pane1 = evtPane.call(this, evt_or_pane1);
+ // change state.edge NOW so callbacks can know where pane is headed...
+ state[pane1].edge = pane2;
+ state[pane2].edge = pane1;
+ // run these even if NOT state.initialized
+ if (false === _runCallbacks("onswap_start", pane1)
+ || false === _runCallbacks("onswap_start", pane2)
+ ) {
+ state[pane1].edge = pane1; // reset
+ state[pane2].edge = pane2;
+ return;
+ }
+
+ var
+ oPane1 = copy( pane1 )
+ , oPane2 = copy( pane2 )
+ , sizes = {}
+ ;
+ sizes[pane1] = oPane1 ? oPane1.state.size : 0;
+ sizes[pane2] = oPane2 ? oPane2.state.size : 0;
+
+ // clear pointers & state
+ $Ps[pane1] = false;
+ $Ps[pane2] = false;
+ state[pane1] = {};
+ state[pane2] = {};
+
+ // ALWAYS remove the resizer & toggler elements
+ if ($Ts[pane1]) $Ts[pane1].remove();
+ if ($Ts[pane2]) $Ts[pane2].remove();
+ if ($Rs[pane1]) $Rs[pane1].remove();
+ if ($Rs[pane2]) $Rs[pane2].remove();
+ $Rs[pane1] = $Rs[pane2] = $Ts[pane1] = $Ts[pane2] = false;
+
+ // transfer element pointers and data to NEW Layout keys
+ move( oPane1, pane2 );
+ move( oPane2, pane1 );
+
+ // cleanup objects
+ oPane1 = oPane2 = sizes = null;
+
+ // make panes 'visible' again
+ if ($Ps[pane1]) $Ps[pane1].css(_c.visible);
+ if ($Ps[pane2]) $Ps[pane2].css(_c.visible);
+
+ // fix any size discrepancies caused by swap
+ resizeAll();
+
+ // run these even if NOT state.initialized
+ _runCallbacks("onswap_end", pane1);
+ _runCallbacks("onswap_end", pane2);
+
+ return;
+
+ function copy (n) { // n = pane
+ var
+ $P = $Ps[n]
+ , $C = $Cs[n]
+ ;
+ return !$P ? false : {
+ pane: n
+ , P: $P ? $P[0] : false
+ , C: $C ? $C[0] : false
+ , state: $.extend(true, {}, state[n])
+ , options: $.extend(true, {}, options[n])
+ }
+ };
+
+ function move (oPane, pane) {
+ if (!oPane) return;
+ var
+ P = oPane.P
+ , C = oPane.C
+ , oldPane = oPane.pane
+ , c = _c[pane]
+ , side = c.side.toLowerCase()
+ , inset = "inset"+ c.side
+ // save pane-options that should be retained
+ , s = $.extend({}, state[pane])
+ , o = options[pane]
+ // RETAIN side-specific FX Settings - more below
+ , fx = { resizerCursor: o.resizerCursor }
+ , re, size, pos
+ ;
+ $.each("fxName,fxSpeed,fxSettings".split(","), function (i, k) {
+ fx[k +"_open"] = o[k +"_open"];
+ fx[k +"_close"] = o[k +"_close"];
+ fx[k +"_size"] = o[k +"_size"];
+ });
+
+ // update object pointers and attributes
+ $Ps[pane] = $(P)
+ .data({
+ layoutPane: Instance[pane] // NEW pointer to pane-alias-object
+ , layoutEdge: pane
+ })
+ .css(_c.hidden)
+ .css(c.cssReq)
+ ;
+ $Cs[pane] = C ? $(C) : false;
+
+ // set options and state
+ options[pane] = $.extend({}, oPane.options, fx);
+ state[pane] = $.extend({}, oPane.state);
+
+ // change classNames on the pane, eg: ui-layout-pane-east ==> ui-layout-pane-west
+ re = new RegExp(o.paneClass +"-"+ oldPane, "g");
+ P.className = P.className.replace(re, o.paneClass +"-"+ pane);
+
+ // ALWAYS regenerate the resizer & toggler elements
+ initHandles(pane); // create the required resizer & toggler
+
+ // if moving to different orientation, then keep 'target' pane size
+ if (c.dir != _c[oldPane].dir) {
+ size = sizes[pane] || 0;
+ setSizeLimits(pane); // update pane-state
+ size = max(size, state[pane].minSize);
+ // use manualSizePane to disable autoResize - not useful after panes are swapped
+ manualSizePane(pane, size, true, true); // true/true = skipCallback/noAnimation
+ }
+ else // move the resizer here
+ $Rs[pane].css(side, sC[inset] + (state[pane].isVisible ? getPaneSize(pane) : 0));
+
+
+ // ADD CLASSNAMES & SLIDE-BINDINGS
+ if (oPane.state.isVisible && !s.isVisible)
+ setAsOpen(pane, true); // true = skipCallback
+ else {
+ setAsClosed(pane);
+ bindStartSlidingEvent(pane, true); // will enable events IF option is set
+ }
+
+ // DESTROY the object
+ oPane = null;
+ };
+ }
+
+
+ /**
+ * INTERNAL method to sync pin-buttons when pane is opened or closed
+ * Unpinned means the pane is 'sliding' - ie, over-top of the adjacent panes
+ *
+ * @see open(), setAsOpen(), setAsClosed()
+ * @param {string} pane These are the params returned to callbacks by layout()
+ * @param {boolean} doPin True means set the pin 'down', False means 'up'
+ */
+, syncPinBtns = function (pane, doPin) {
+ if ($.layout.plugins.buttons)
+ $.each(state[pane].pins, function (i, selector) {
+ $.layout.buttons.setPinState(Instance, $(selector), pane, doPin);
+ });
+ }
+
+; // END var DECLARATIONS
+
+ /**
+ * Capture keys when enableCursorHotkey - toggle pane if hotkey pressed
+ *
+ * @see document.keydown()
+ */
+ function keyDown (evt) {
+ if (!evt) return true;
+ var code = evt.keyCode;
+ if (code < 33) return true; // ignore special keys: ENTER, TAB, etc
+
+ var
+ PANE = {
+ 38: "north" // Up Cursor - $.ui.keyCode.UP
+ , 40: "south" // Down Cursor - $.ui.keyCode.DOWN
+ , 37: "west" // Left Cursor - $.ui.keyCode.LEFT
+ , 39: "east" // Right Cursor - $.ui.keyCode.RIGHT
+ }
+ , ALT = evt.altKey // no worky!
+ , SHIFT = evt.shiftKey
+ , CTRL = evt.ctrlKey
+ , CURSOR = (CTRL && code >= 37 && code <= 40)
+ , o, k, m, pane
+ ;
+
+ if (CURSOR && options[PANE[code]].enableCursorHotkey) // valid cursor-hotkey
+ pane = PANE[code];
+ else if (CTRL || SHIFT) // check to see if this matches a custom-hotkey
+ $.each(_c.borderPanes, function (i, p) { // loop each pane to check its hotkey
+ o = options[p];
+ k = o.customHotkey;
+ m = o.customHotkeyModifier; // if missing or invalid, treated as "CTRL+SHIFT"
+ if ((SHIFT && m=="SHIFT") || (CTRL && m=="CTRL") || (CTRL && SHIFT)) { // Modifier matches
+ if (k && code === (isNaN(k) || k <= 9 ? k.toUpperCase().charCodeAt(0) : k)) { // Key matches
+ pane = p;
+ return false; // BREAK
+ }
+ }
+ });
+
+ // validate pane
+ if (!pane || !$Ps[pane] || !options[pane].closable || state[pane].isHidden)
+ return true;
+
+ toggle(pane);
+
+ evt.stopPropagation();
+ evt.returnValue = false; // CANCEL key
+ return false;
+ };
+
+
+/*
+ * ######################################
+ * UTILITY METHODS
+ * called externally or by initButtons
+ * ######################################
+ */
+
+ /**
+ * Change/reset a pane overflow setting & zIndex to allow popups/drop-downs to work
+ *
+ * @param {Object=} [el] (optional) Can also be 'bound' to a click, mouseOver, or other event
+ */
+ function allowOverflow (el) {
+ if (!isInitialized()) return;
+ if (this && this.tagName) el = this; // BOUND to element
+ var $P;
+ if (isStr(el))
+ $P = $Ps[el];
+ else if ($(el).data("layoutRole"))
+ $P = $(el);
+ else
+ $(el).parents().each(function(){
+ if ($(this).data("layoutRole")) {
+ $P = $(this);
+ return false; // BREAK
+ }
+ });
+ if (!$P || !$P.length) return; // INVALID
+
+ var
+ pane = $P.data("layoutEdge")
+ , s = state[pane]
+ ;
+
+ // if pane is already raised, then reset it before doing it again!
+ // this would happen if allowOverflow is attached to BOTH the pane and an element
+ if (s.cssSaved)
+ resetOverflow(pane); // reset previous CSS before continuing
+
+ // if pane is raised by sliding or resizing, or its closed, then abort
+ if (s.isSliding || s.isResizing || s.isClosed) {
+ s.cssSaved = false;
+ return;
+ }
+
+ var
+ newCSS = { zIndex: (options.zIndexes.resizer_normal + 1) }
+ , curCSS = {}
+ , of = $P.css("overflow")
+ , ofX = $P.css("overflowX")
+ , ofY = $P.css("overflowY")
+ ;
+ // determine which, if any, overflow settings need to be changed
+ if (of != "visible") {
+ curCSS.overflow = of;
+ newCSS.overflow = "visible";
+ }
+ if (ofX && !ofX.match(/visible|auto/)) {
+ curCSS.overflowX = ofX;
+ newCSS.overflowX = "visible";
+ }
+ if (ofY && !ofY.match(/visible|auto/)) {
+ curCSS.overflowY = ofX;
+ newCSS.overflowY = "visible";
+ }
+
+ // save the current overflow settings - even if blank!
+ s.cssSaved = curCSS;
+
+ // apply new CSS to raise zIndex and, if necessary, make overflow 'visible'
+ $P.css( newCSS );
+
+ // make sure the zIndex of all other panes is normal
+ $.each(_c.allPanes, function(i, p) {
+ if (p != pane) resetOverflow(p);
+ });
+
+ };
+ /**
+ * @param {Object=} [el] (optional) Can also be 'bound' to a click, mouseOver, or other event
+ */
+ function resetOverflow (el) {
+ if (!isInitialized()) return;
+ if (this && this.tagName) el = this; // BOUND to element
+ var $P;
+ if (isStr(el))
+ $P = $Ps[el];
+ else if ($(el).data("layoutRole"))
+ $P = $(el);
+ else
+ $(el).parents().each(function(){
+ if ($(this).data("layoutRole")) {
+ $P = $(this);
+ return false; // BREAK
+ }
+ });
+ if (!$P || !$P.length) return; // INVALID
+
+ var
+ pane = $P.data("layoutEdge")
+ , s = state[pane]
+ , CSS = s.cssSaved || {}
+ ;
+ // reset the zIndex
+ if (!s.isSliding && !s.isResizing)
+ $P.css("zIndex", options.zIndexes.pane_normal);
+
+ // reset Overflow - if necessary
+ $P.css( CSS );
+
+ // clear var
+ s.cssSaved = false;
+ };
+
+/*
+ * #####################
+ * CREATE/RETURN LAYOUT
+ * #####################
+ */
+
+ // validate that container exists
+ var $N = $(this).eq(0); // FIRST matching Container element
+ if (!$N.length) {
+ if (options.showErrorMessages)
+ _log( lang.errContainerMissing, true );
+ return null;
+ };
+
+ // Users retrieve Instance of a layout with: $N.layout() OR $N.data("layout")
+ // return the Instance-pointer if layout has already been initialized
+ if ($N.data("layoutContainer") && $N.data("layout"))
+ return $N.data("layout"); // cached pointer
+
+ // init global vars
+ var
+ $Ps = {} // Panes x5 - set in initPanes()
+ , $Cs = {} // Content x5 - set in initPanes()
+ , $Rs = {} // Resizers x4 - set in initHandles()
+ , $Ts = {} // Togglers x4 - set in initHandles()
+ , $Ms = $([]) // Masks - up to 2 masks per pane (IFRAME + DIV)
+ // aliases for code brevity
+ , sC = state.container // alias for easy access to 'container dimensions'
+ , sID = state.id // alias for unique layout ID/namespace - eg: "layout435"
+ ;
+
+ // create Instance object to expose data & option Properties, and primary action Methods
+ var Instance = {
+ // layout data
+ options: options // property - options hash
+ , state: state // property - dimensions hash
+ // object pointers
+ , container: $N // property - object pointers for layout container
+ , panes: $Ps // property - object pointers for ALL Panes: panes.north, panes.center
+ , contents: $Cs // property - object pointers for ALL Content: contents.north, contents.center
+ , resizers: $Rs // property - object pointers for ALL Resizers, eg: resizers.north
+ , togglers: $Ts // property - object pointers for ALL Togglers, eg: togglers.north
+ // border-pane open/close
+ , hide: hide // method - ditto
+ , show: show // method - ditto
+ , toggle: toggle // method - pass a 'pane' ("north", "west", etc)
+ , open: open // method - ditto
+ , close: close // method - ditto
+ , slideOpen: slideOpen // method - ditto
+ , slideClose: slideClose // method - ditto
+ , slideToggle: slideToggle // method - ditto
+ // pane actions
+ , setSizeLimits: setSizeLimits // method - pass a 'pane' - update state min/max data
+ , _sizePane: sizePane // method -intended for user by plugins only!
+ , sizePane: manualSizePane // method - pass a 'pane' AND an 'outer-size' in pixels or percent, or 'auto'
+ , sizeContent: sizeContent // method - pass a 'pane'
+ , swapPanes: swapPanes // method - pass TWO 'panes' - will swap them
+ // pane element methods
+ , initContent: initContent // method - ditto
+ , addPane: addPane // method - pass a 'pane'
+ , removePane: removePane // method - pass a 'pane' to remove from layout, add 'true' to delete the pane-elem
+ , createChildLayout: createChildLayout// method - pass a 'pane' and (optional) layout-options (OVERRIDES options[pane].childOptions
+ // special pane option setting
+ , enableClosable: enableClosable // method - pass a 'pane'
+ , disableClosable: disableClosable // method - ditto
+ , enableSlidable: enableSlidable // method - ditto
+ , disableSlidable: disableSlidable // method - ditto
+ , enableResizable: enableResizable // method - ditto
+ , disableResizable: disableResizable// method - ditto
+ // utility methods for panes
+ , allowOverflow: allowOverflow // utility - pass calling element (this)
+ , resetOverflow: resetOverflow // utility - ditto
+ // layout control
+ , destroy: destroy // method - no parameters
+ , initPanes: isInitialized // method - no parameters
+ , resizeAll: resizeAll // method - no parameters
+ // callback triggering
+ , runCallbacks: _runCallbacks // method - pass evtName & pane (if a pane-event), eg: trigger("onopen", "west")
+ // alias collections of options, state and children - created in addPane and extended elsewhere
+ , hasParentLayout: false // set by initContainer()
+ , children: children // pointers to child-layouts, eg: Instance.children["west"]
+ , north: false // alias group: { name: pane, pane: $Ps[pane], options: options[pane], state: state[pane], child: children[pane] }
+ , south: false // ditto
+ , west: false // ditto
+ , east: false // ditto
+ , center: false // ditto
+ };
+
+ // create the border layout NOW
+ if (_create() === 'cancel') // onload_start callback returned false to CANCEL layout creation
+ return null;
+ else // true OR false -- if layout-elements did NOT init (hidden or do not exist), can auto-init later
+ return Instance; // return the Instance object
+
+}
+
+
+
+
+/**
+ * jquery.layout.state 1.0
+ * $Date: 2011-07-16 08:00:00 (Sat, 16 July 2011) $
+ *
+ * Copyright (c) 2010
+ * Kevin Dalman (http://allpro.net)
+ *
+ * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
+ * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
*
- * $Date: 2010-07-13 08:00:00 (Wed, 14 July 2010) $
- * $Rev: 30293 $
+ * @dependancies: UI Layout 1.3.0.rc30.1 or higher
+ * @dependancies: $.ui.cookie (above)
+ *
+ * @support: http://groups.google.com/group/jquery-ui-layout
+ */
+/*
+ * State-management options stored in options.stateManagement, which includes a .cookie hash
+ * Default options saves ALL KEYS for ALL PANES, ie: pane.size, pane.isClosed, pane.isHidden
+ *
+ * // STATE/COOKIE OPTIONS
+ * @example $(el).layout({
+ stateManagement: {
+ enabled: true
+ , stateKeys: "east.size,west.size,east.isClosed,west.isClosed"
+ , cookie: { name: "appLayout", path: "/" }
+ }
+ })
+ * @example $(el).layout({ stateManagement__enabled: true }) // enable auto-state-management using cookies
+ * @example $(el).layout({ stateManagement__cookie: { name: "appLayout", path: "/" } })
+ * @example $(el).layout({ stateManagement__cookie__name: "appLayout", stateManagement__cookie__path: "/" })
+ *
+ * // STATE/COOKIE METHODS
+ * @example myLayout.saveCookie( "west.isClosed,north.size,south.isHidden", {expires: 7} );
+ * @example myLayout.loadCookie();
+ * @example myLayout.deleteCookie();
+ * @example var JSON = myLayout.readState(); // CURRENT Layout State
+ * @example var JSON = myLayout.readCookie(); // SAVED Layout State (from cookie)
+ * @example var JSON = myLayout.state.stateData; // LAST LOADED Layout State (cookie saved in layout.state hash)
+ *
+ * CUSTOM STATE-MANAGEMENT (eg, saved in a database)
+ * @example var JSON = myLayout.readState( "west.isClosed,north.size,south.isHidden" );
+ * @example myLayout.loadState( JSON );
+ */
+
+/**
+ * UI COOKIE UTILITY
+ *
+ * A $.cookie OR $.ui.cookie namespace *should be standard*, but until then...
+ * This creates $.ui.cookie so Layout does not need the cookie.jquery.js plugin
+ * NOTE: This utility is REQUIRED by the layout.state plugin
+ *
+ * Cookie methods in Layout are created as part of State Management
+ */
+if (!$.ui) $.ui = {};
+$.ui.cookie = {
+
+ // cookieEnabled is not in DOM specs, but DOES works in all browsers,including IE6
+ acceptsCookies: !!navigator.cookieEnabled
+
+, read: function (name) {
+ var
+ c = document.cookie
+ , cs = c ? c.split(';') : []
+ , pair // loop var
+ ;
+ for (var i=0, n=cs.length; i < n; i++) {
+ pair = $.trim(cs[i]).split('='); // name=value pair
+ if (pair[0] == name) // found the layout cookie
+ return decodeURIComponent(pair[1]);
+
+ }
+ return null;
+ }
+
+, write: function (name, val, cookieOpts) {
+ var
+ params = ''
+ , date = ''
+ , clear = false
+ , o = cookieOpts || {}
+ , x = o.expires
+ ;
+ if (x && x.toUTCString)
+ date = x;
+ else if (x === null || typeof x === 'number') {
+ date = new Date();
+ if (x > 0)
+ date.setDate(date.getDate() + x);
+ else {
+ date.setFullYear(1970);
+ clear = true;
+ }
+ }
+ if (date) params += ';expires='+ date.toUTCString();
+ if (o.path) params += ';path='+ o.path;
+ if (o.domain) params += ';domain='+ o.domain;
+ if (o.secure) params += ';secure';
+ document.cookie = name +'='+ (clear ? "" : encodeURIComponent( val )) + params; // write or clear cookie
+ }
+
+, clear: function (name) {
+ $.ui.cookie.write(name, '', {expires: -1});
+ }
+
+};
+// if cookie.jquery.js is not loaded, create an alias to replicate it
+// this may be useful to other plugins or code dependent on that plugin
+if (!$.cookie) $.cookie = function (k, v, o) {
+ var C = $.ui.cookie;
+ if (v === null)
+ C.clear(k);
+ else if (v === undefined)
+ return C.read(k);
+ else
+ C.write(k, v, o);
+};
+
+
+// tell Layout that the state plugin is available
+$.layout.plugins.stateManagement = true;
+
+// Add State-Management options to layout.defaults
+$.layout.config.optionRootKeys.push("stateManagement");
+$.layout.defaults.stateManagement = {
+ enabled: false // true = enable state-management, even if not using cookies
+, autoSave: true // Save a state-cookie when page exits?
+, autoLoad: true // Load the state-cookie when Layout inits?
+ // List state-data to save - must be pane-specific
+, stateKeys: "north.size,south.size,east.size,west.size,"+
+ "north.isClosed,south.isClosed,east.isClosed,west.isClosed,"+
+ "north.isHidden,south.isHidden,east.isHidden,west.isHidden"
+, cookie: {
+ name: "" // If not specified, will use Layout.name, else just "Layout"
+ , domain: "" // blank = current domain
+ , path: "" // blank = current page, '/' = entire website
+ , expires: "" // 'days' to keep cookie - leave blank for 'session cookie'
+ , secure: false
+ }
+};
+// Set stateManagement as a layout-option, NOT a pane-option
+$.layout.optionsMap.layout.push("stateManagement");
+
+/*
+ * State Management methods
+ */
+$.layout.state = {
+
+ /**
+ * Get the current layout state and save it to a cookie
+ *
+ * myLayout.saveCookie( keys, cookieOpts )
+ *
+ * @param {Object} inst
+ * @param {(string|Array)=} keys
+ * @param {Object=} opts
+ */
+ saveCookie: function (inst, keys, cookieOpts) {
+ var o = inst.options
+ , oS = o.stateManagement
+ , oC = $.extend(true, {}, oS.cookie, cookieOpts || null)
+ , data = inst.state.stateData = inst.readState( keys || oS.stateKeys ) // read current panes-state
+ ;
+ $.ui.cookie.write( oC.name || o.name || "Layout", $.layout.state.encodeJSON(data), oC );
+ return $.extend(true, {}, data); // return COPY of state.stateData data
+ }
+
+ /**
+ * Remove the state cookie
+ *
+ * @param {Object} inst
+ */
+, deleteCookie: function (inst) {
+ var o = inst.options;
+ $.ui.cookie.clear( o.stateManagement.cookie.name || o.name || "Layout" );
+ }
+
+ /**
+ * Read & return data from the cookie - as JSON
+ *
+ * @param {Object} inst
+ */
+, readCookie: function (inst) {
+ var o = inst.options;
+ var c = $.ui.cookie.read( o.stateManagement.cookie.name || o.name || "Layout" );
+ // convert cookie string back to a hash and return it
+ return c ? $.layout.state.decodeJSON(c) : {};
+ }
+
+ /**
+ * Get data from the cookie and USE IT to loadState
+ *
+ * @param {Object} inst
+ */
+, loadCookie: function (inst) {
+ var c = $.layout.state.readCookie(inst); // READ the cookie
+ if (c) {
+ inst.state.stateData = $.extend(true, {}, c); // SET state.stateData
+ inst.loadState(c); // LOAD the retrieved state
+ }
+ return c;
+ }
+
+ /**
+ * Update layout options from the cookie, if one exists
+ *
+ * @param {Object} inst
+ * @param {Object=} stateData
+ * @param {boolean=} animate
+ */
+, loadState: function (inst, stateData, animate) {
+ stateData = $.layout.transformData( stateData ); // panes = default subkey
+ if ($.isEmptyObject( stateData )) return;
+ $.extend(true, inst.options, stateData); // update layout options
+ // if layout has already been initialized, then UPDATE layout state
+ if (inst.state.initialized) {
+ var pane, vis, o, s, h, c
+ , noAnimate = (animate===false)
+ ;
+ $.each($.layout.config.borderPanes, function (idx, pane) {
+ state = inst.state[pane];
+ o = stateData[ pane ];
+ if (typeof o != 'object') return; // no key, continue
+ s = o.size;
+ c = o.initClosed;
+ h = o.initHidden;
+ vis = state.isVisible;
+ // resize BEFORE opening
+ if (!vis)
+ inst.sizePane(pane, s, false, false);
+ if (h === true) inst.hide(pane, noAnimate);
+ else if (c === false) inst.open (pane, false, noAnimate);
+ else if (c === true) inst.close(pane, false, noAnimate);
+ else if (h === false) inst.show (pane, false, noAnimate);
+ // resize AFTER any other actions
+ if (vis)
+ inst.sizePane(pane, s, false, noAnimate); // animate resize if option passed
+ });
+ };
+ }
+
+ /**
+ * Get the *current layout state* and return it as a hash
+ *
+ * @param {Object=} inst
+ * @param {(string|Array)=} keys
+ */
+, readState: function (inst, keys) {
+ var
+ data = {}
+ , alt = { isClosed: 'initClosed', isHidden: 'initHidden' }
+ , state = inst.state
+ , panes = $.layout.config.allPanes
+ , pair, pane, key, val
+ ;
+ if (!keys) keys = inst.options.stateManagement.stateKeys; // if called by user
+ if ($.isArray(keys)) keys = keys.join(",");
+ // convert keys to an array and change delimiters from '__' to '.'
+ keys = keys.replace(/__/g, ".").split(',');
+ // loop keys and create a data hash
+ for (var i=0, n=keys.length; i < n; i++) {
+ pair = keys[i].split(".");
+ pane = pair[0];
+ key = pair[1];
+ if ($.inArray(pane, panes) < 0) continue; // bad pane!
+ val = state[ pane ][ key ];
+ if (val == undefined) continue;
+ if (key=="isClosed" && state[pane]["isSliding"])
+ val = true; // if sliding, then *really* isClosed
+ ( data[pane] || (data[pane]={}) )[ alt[key] ? alt[key] : key ] = val;
+ }
+ return data;
+ }
+
+ /**
+ * Stringify a JSON hash so can save in a cookie or db-field
+ */
+, encodeJSON: function (JSON) {
+ return parse(JSON);
+ function parse (h) {
+ var D=[], i=0, k, v, t; // k = key, v = value
+ for (k in h) {
+ v = h[k];
+ t = typeof v;
+ if (t == 'string') // STRING - add quotes
+ v = '"'+ v +'"';
+ else if (t == 'object') // SUB-KEY - recurse into it
+ v = parse(v);
+ D[i++] = '"'+ k +'":'+ v;
+ }
+ return '{'+ D.join(',') +'}';
+ };
+ }
+
+ /**
+ * Convert stringified JSON back to a hash object
+ * @see $.parseJSON(), adding in jQuery 1.4.1
+ */
+, decodeJSON: function (str) {
+ try { return $.parseJSON ? $.parseJSON(str) : window["eval"]("("+ str +")") || {}; }
+ catch (e) { return {}; }
+ }
+
+
+, _create: function (inst) {
+ var _ = $.layout.state;
+ // ADD State-Management plugin methods to inst
+ $.extend( inst, {
+ // readCookie - update options from cookie - returns hash of cookie data
+ readCookie: function () { return _.readCookie(inst); }
+ // deleteCookie
+ , deleteCookie: function () { _.deleteCookie(inst); }
+ // saveCookie - optionally pass keys-list and cookie-options (hash)
+ , saveCookie: function (keys, cookieOpts) { return _.saveCookie(inst, keys, cookieOpts); }
+ // loadCookie - readCookie and use to loadState() - returns hash of cookie data
+ , loadCookie: function () { return _.loadCookie(inst); }
+ // loadState - pass a hash of state to use to update options
+ , loadState: function (stateData, animate) { _.loadState(inst, stateData, animate); }
+ // readState - returns hash of current layout-state
+ , readState: function (keys) { return _.readState(inst, keys); }
+ // add JSON utility methods too...
+ , encodeJSON: _.encodeJSON
+ , decodeJSON: _.decodeJSON
+ });
+
+ // init state.stateData key, even if plugin is initially disabled
+ inst.state.stateData = {};
+
+ // read and load cookie-data per options
+ var oS = inst.options.stateManagement;
+ if (oS.enabled) {
+ if (oS.autoLoad) // update the options from the cookie
+ inst.loadCookie();
+ else // don't modify options - just store cookie data in state.stateData
+ inst.state.stateData = inst.readCookie();
+ }
+ }
+
+, _unload: function (inst) {
+ var oS = inst.options.stateManagement;
+ if (oS.enabled) {
+ if (oS.autoSave) // save a state-cookie automatically
+ inst.saveCookie();
+ else // don't save a cookie, but do store state-data in state.stateData key
+ inst.state.stateData = inst.readState();
+ }
+ }
+
+};
+
+// add state initialization method to Layout's onCreate array of functions
+$.layout.onCreate.push( $.layout.state._create );
+$.layout.onUnload.push( $.layout.state._unload );
+
+
+
+
+/**
+ * jquery.layout.buttons 1.0
+ * $Date: 2011-07-16 08:00:00 (Sat, 16 July 2011) $
+ *
+ * Copyright (c) 2010
+ * Kevin Dalman (http://allpro.net)
+ *
+ * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
+ * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
+ *
+ * @dependancies: UI Layout 1.3.0.rc30.1 or higher
+ *
+ * @support: http://groups.google.com/group/jquery-ui-layout
+ *
+ * Docs: [ to come ]
+ * Tips: [ to come ]
+ */
+
+// tell Layout that the state plugin is available
+$.layout.plugins.buttons = true;
+
+// Add buttons options to layout.defaults
+$.layout.defaults.autoBindCustomButtons = false;
+// Specify autoBindCustomButtons as a layout-option, NOT a pane-option
+$.layout.optionsMap.layout.push("autoBindCustomButtons");
+
+var lang = $.layout.language;
+
+/*
+ * Button methods
+ */
+$.layout.buttons = {
+
+ /**
+ * Searches for .ui-layout-button-xxx elements and auto-binds them as layout-buttons
+ *
+ * @see _create()
+ *
+ * @param {Object} inst Layout Instance object
+ */
+ init: function (inst) {
+ var pre = "ui-layout-button-"
+ , layout = inst.options.name || ""
+ , name;
+ $.each("toggle,open,close,pin,toggle-slide,open-slide".split(","), function (i, action) {
+ $.each($.layout.config.borderPanes, function (ii, pane) {
+ $("."+pre+action+"-"+pane).each(function(){
+ // if button was previously 'bound', data.layoutName was set, but is blank if layout has no 'name'
+ name = $(this).data("layoutName") || $(this).attr("layoutName");
+ if (name == undefined || name === layout)
+ inst.bindButton(this, action, pane);
+ });
+ });
+ });
+ }
+
+ /**
+ * Helper function to validate params received by addButton utilities
+ *
+ * Two classes are added to the element, based on the buttonClass...
+ * The type of button is appended to create the 2nd className:
+ * - ui-layout-button-pin // action btnClass
+ * - ui-layout-button-pin-west // action btnClass + pane
+ * - ui-layout-button-toggle
+ * - ui-layout-button-open
+ * - ui-layout-button-close
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
+ * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
+ *
+ * @return {Array.<Object>} If both params valid, the element matching 'selector' in a jQuery wrapper - otherwise returns null
+ */
+, get: function (inst, selector, pane, action) {
+ var $E = $(selector)
+ , o = inst.options
+ , err = o.showErrorMessages
+ ;
+ if (!$E.length) { // element not found
+ if (err) $.layout.msg(lang.errButton + lang.selector +": "+ selector, true);
+ }
+ else if ($.inArray(pane, $.layout.config.borderPanes) < 0) { // invalid 'pane' sepecified
+ if (err) $.layout.msg(lang.errButton + lang.pane +": "+ pane, true);
+ $E = $(""); // NO BUTTON
+ }
+ else { // VALID
+ var btn = o[pane].buttonClass +"-"+ action;
+ $E .addClass( btn +" "+ btn +"-"+ pane )
+ .data("layoutName", o.name); // add layout identifier - even if blank!
+ }
+ return $E;
+ }
+
+
+ /**
+ * NEW syntax for binding layout-buttons - will eventually replace addToggle, addOpen, etc.
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
+ * @param {string} action
+ * @param {string} pane
+ */
+, bind: function (inst, selector, action, pane) {
+ var _ = $.layout.buttons;
+ switch (action.toLowerCase()) {
+ case "toggle": _.addToggle (inst, selector, pane); break;
+ case "open": _.addOpen (inst, selector, pane); break;
+ case "close": _.addClose (inst, selector, pane); break;
+ case "pin": _.addPin (inst, selector, pane); break;
+ case "toggle-slide": _.addToggle (inst, selector, pane, true); break;
+ case "open-slide": _.addOpen (inst, selector, pane, true); break;
+ }
+ return inst;
+ }
+
+ /**
+ * Add a custom Toggler button for a pane
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
+ * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
+ * @param {boolean=} slide true = slide-open, false = pin-open
+ */
+, addToggle: function (inst, selector, pane, slide) {
+ $.layout.buttons.get(inst, selector, pane, "toggle")
+ .click(function(evt){
+ inst.toggle(pane, !!slide);
+ evt.stopPropagation();
+ });
+ return inst;
+ }
+
+ /**
+ * Add a custom Open button for a pane
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
+ * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
+ * @param {boolean=} slide true = slide-open, false = pin-open
+ */
+, addOpen: function (inst, selector, pane, slide) {
+ $.layout.buttons.get(inst, selector, pane, "open")
+ .attr("title", lang.Open)
+ .click(function (evt) {
+ inst.open(pane, !!slide);
+ evt.stopPropagation();
+ });
+ return inst;
+ }
+
+ /**
+ * Add a custom Close button for a pane
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
+ * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
+ */
+, addClose: function (inst, selector, pane) {
+ $.layout.buttons.get(inst, selector, pane, "close")
+ .attr("title", lang.Close)
+ .click(function (evt) {
+ inst.close(pane);
+ evt.stopPropagation();
+ });
+ return inst;
+ }
+
+ /**
+ * Add a custom Pin button for a pane
+ *
+ * Four classes are added to the element, based on the paneClass for the associated pane...
+ * Assuming the default paneClass and the pin is 'up', these classes are added for a west-pane pin:
+ * - ui-layout-pane-pin
+ * - ui-layout-pane-west-pin
+ * - ui-layout-pane-pin-up
+ * - ui-layout-pane-west-pin-up
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
+ * @param {string} pane Name of the pane the pin is for: 'north', 'south', etc.
+ */
+, addPin: function (inst, selector, pane) {
+ var _ = $.layout.buttons
+ , $E = _.get(inst, selector, pane, "pin");
+ if ($E.length) {
+ var s = inst.state[pane];
+ $E.click(function (evt) {
+ _.setPinState(inst, $(this), pane, (s.isSliding || s.isClosed));
+ if (s.isSliding || s.isClosed) inst.open( pane ); // change from sliding to open
+ else inst.close( pane ); // slide-closed
+ evt.stopPropagation();
+ });
+ // add up/down pin attributes and classes
+ _.setPinState(inst, $E, pane, (!s.isClosed && !s.isSliding));
+ // add this pin to the pane data so we can 'sync it' automatically
+ // PANE.pins key is an array so we can store multiple pins for each pane
+ s.pins.push( selector ); // just save the selector string
+ }
+ return inst;
+ }
+
+ /**
+ * Change the class of the pin button to make it look 'up' or 'down'
+ *
+ * @see addPin(), syncPins()
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {Array.<Object>} $Pin The pin-span element in a jQuery wrapper
+ * @param {string} pane These are the params returned to callbacks by layout()
+ * @param {boolean} doPin true = set the pin 'down', false = set it 'up'
+ */
+, setPinState: function (inst, $Pin, pane, doPin) {
+ var updown = $Pin.attr("pin");
+ if (updown && doPin === (updown=="down")) return; // already in correct state
+ var
+ pin = inst.options[pane].buttonClass +"-pin"
+ , side = pin +"-"+ pane
+ , UP = pin +"-up "+ side +"-up"
+ , DN = pin +"-down "+side +"-down"
+ ;
+ $Pin
+ .attr("pin", doPin ? "down" : "up") // logic
+ .attr("title", doPin ? lang.Unpin : lang.Pin)
+ .removeClass( doPin ? UP : DN )
+ .addClass( doPin ? DN : UP )
+ ;
+ }
+
+ /**
+ * INTERNAL function to sync 'pin buttons' when pane is opened or closed
+ * Unpinned means the pane is 'sliding' - ie, over-top of the adjacent panes
+ *
+ * @see open(), close()
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {string} pane These are the params returned to callbacks by layout()
+ * @param {boolean} doPin True means set the pin 'down', False means 'up'
+ */
+, syncPinBtns: function (inst, pane, doPin) {
+ // REAL METHOD IS _INSIDE_ LAYOUT - THIS IS HERE JUST FOR REFERENCE
+ $.each(state[pane].pins, function (i, selector) {
+ $.layout.buttons.setPinState(inst, $(selector), pane, doPin);
+ });
+ }
+
+
+, _load: function (inst) {
+ var _ = $.layout.buttons;
+ // ADD Button methods to Layout Instance
+ // Note: sel = jQuery Selector string
+ $.extend( inst, {
+ bindButton: function (sel, action, pane) { return _.bind(inst, sel, action, pane); }
+ // DEPRECATED METHODS
+ , addToggleBtn: function (sel, pane, slide) { return _.addToggle(inst, sel, pane, slide); }
+ , addOpenBtn: function (sel, pane, slide) { return _.addOpen(inst, sel, pane, slide); }
+ , addCloseBtn: function (sel, pane) { return _.addClose(inst, sel, pane); }
+ , addPinBtn: function (sel, pane) { return _.addPin(inst, sel, pane); }
+ });
+
+ // init state array to hold pin-buttons
+ for (var i=0; i<4; i++) {
+ var pane = $.layout.config.borderPanes[i];
+ inst.state[pane].pins = [];
+ }
+
+ // auto-init buttons onLoad if option is enabled
+ if ( inst.options.autoBindCustomButtons )
+ _.init(inst);
+ }
+
+, _unload: function (inst) {
+ // TODO: unbind all buttons???
+ }
+
+};
+
+// add initialization method to Layout's onLoad array of functions
+$.layout.onLoad.push( $.layout.buttons._load );
+//$.layout.onUnload.push( $.layout.buttons._unload );
+
+
+
+/**
+ * jquery.layout.browserZoom 1.0
+ * $Date: 2011-12-29 08:00:00 (Thu, 29 Dec 2011) $
+ *
+ * Copyright (c) 2012
+ * Kevin Dalman (http://allpro.net)
+ *
+ * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
+ * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
+ *
+ * @dependancies: UI Layout 1.3.0.rc30.1 or higher
+ *
+ * @support: http://groups.google.com/group/jquery-ui-layout
+ *
+ * @todo: Extend logic to handle other problematic zooming in browsers
+ * @todo: Add hotkey/mousewheel bindings to _instantly_ respond to these zoom event
+ */
+
+// tell Layout that the plugin is available
+$.layout.plugins.browserZoom = true;
+
+$.layout.defaults.browserZoomCheckInterval = 1000;
+$.layout.optionsMap.layout.push("browserZoomCheckInterval");
+
+/*
+ * browserZoom methods
*/
-(function($){$.fn.layout=function(opts){var lang={Pane:"Pane",Open:"Open",Close:"Close",Resize:"Resize",Slide:"Slide Open",Pin:"Pin",Unpin:"Un-Pin",selector:"selector",msgNoRoom:"Not enough room to show this pane.",errContainerMissing:"UI Layout Initialization Error\n\nThe specified layout-container does not exist.",errCenterPaneMissing:"UI Layout Initialization Error\n\nThe center-pane element does not exist.\n\nThe center-pane is a required element.",errContainerHeight:"UI Layout Initialization Warning\n\nThe layout-container \"CONTAINER\" has no height.\n\nTherefore the layout is 0-height and hence 'invisible'!",errButton:"Error Adding Button \n\nInvalid "};var options={name:"",scrollToBookmarkOnLoad:true,resizeWithWindow:true,resizeWithWindowDelay:200,resizeWithWindowMaxDelay:0,onresizeall_start:null,onresizeall_end:null,onload:null,onunload:null,autoBindCustomButtons:false,zIndex:null,defaults:{applyDemoStyles:false,closable:true,resizable:true,slidable:true,initClosed:false,initHidden:false,contentSelector:".ui-layout-content",contentIgnoreSelector:".ui-layout-ignore",findNestedContent:false,paneClass:"ui-layout-pane",resizerClass:"ui-layout-resizer",togglerClass:"ui-layout-toggler",buttonClass:"ui-layout-button",minSize:0,maxSize:0,spacing_open:6,spacing_closed:6,togglerLength_open:50,togglerLength_closed:50,togglerAlign_open:"center",togglerAlign_closed:"center",togglerTip_open:lang.Close,togglerTip_closed:lang.Open,togglerContent_open:"",togglerContent_closed:"",resizerDblClickToggle:true,autoResize:true,autoReopen:true,resizerDragOpacity:1,maskIframesOnResize:true,resizeNestedLayout:true,resizeWhileDragging:false,resizeContentWhileDragging:false,noRoomToOpenTip:lang.msgNoRoom,resizerTip:lang.Resize,sliderTip:lang.Slide,sliderCursor:"pointer",slideTrigger_open:"click",slideTrigger_close:"mouseleave",hideTogglerOnSlide:false,preventQuickSlideClose:!!($.browser.webkit||$.browser.safari),showOverflowOnHover:false,enableCursorHotkey:true,customHotkeyModifier:"SHIFT",fxName:"slide",fxSpeed:null,fxSettings:{},fxOpacityFix:true,triggerEventsOnLoad:false,triggerEventsWhileDragging:true,onshow_start:null,onshow_end:null,onhide_start:null,onhide_end:null,onopen_start:null,onopen_end:null,onclose_start:null,onclose_end:null,onresize_start:null,onresize_end:null,onsizecontent_start:null,onsizecontent_end:null,onswap_start:null,onswap_end:null,ondrag_start:null,ondrag_end:null},north:{paneSelector:".ui-layout-north",size:"auto",resizerCursor:"n-resize",customHotkey:""},south:{paneSelector:".ui-layout-south",size:"auto",resizerCursor:"s-resize",customHotkey:""},east:{paneSelector:".ui-layout-east",size:200,resizerCursor:"e-resize",customHotkey:""},west:{paneSelector:".ui-layout-west",size:200,resizerCursor:"w-resize",customHotkey:""},center:{paneSelector:".ui-layout-center",minWidth:0,minHeight:0},useStateCookie:false,cookie:{name:"",autoSave:true,autoLoad:true,domain:"",path:"",expires:"",secure:false,keys:"north.size,south.size,east.size,west.size,north.isClosed,south.isClosed,east.isClosed,west.isClosed,north.isHidden,south.isHidden,east.isHidden,west.isHidden"}};var effects={slide:{all:{duration:"fast"},north:{direction:"up"},south:{direction:"down"},east:{direction:"right"},west:{direction:"left"}},drop:{all:{duration:"slow"},north:{direction:"up"},south:{direction:"down"},east:{direction:"right"},west:{direction:"left"}},scale:{all:{duration:"fast"}}};var state={id:"layout"+new Date().getTime(),initialized:false,container:{},north:{},south:{},east:{},west:{},center:{},cookie:{}};var _c={allPanes:"north,south,west,east,center",borderPanes:"north,south,west,east",altSide:{north:"south",south:"north",east:"west",west:"east"},hidden:{visibility:"hidden"},visible:{visibility:"visible"},zIndex:{pane_normal:1,resizer_normal:2,iframe_mask:2,pane_sliding:100,pane_animate:1000,resizer_drag:10000},resizers:{cssReq:{position:"absolute",padding:0,margin:0,fontSize:"1px",textAlign:"left",overflow:"hidden"},cssDemo:{background:"#DDD",border:"none"}},togglers:{cssReq:{position:"absolute",display:"block",padding:0,margin:0,overflow:"hidden",textAlign:"center",fontSize:"1px",cursor:"pointer",zIndex:1},cssDemo:{background:"#AAA"}},content:{cssReq:{position:"relative"},cssDemo:{overflow:"auto",padding:"10px"},cssDemoPane:{overflow:"hidden",padding:0}},panes:{cssReq:{position:"absolute",margin:0},cssDemo:{padding:"10px",background:"#FFF",border:"1px solid #BBB",overflow:"auto"}},north:{side:"Top",sizeType:"Height",dir:"horz",cssReq:{top:0,bottom:"auto",left:0,right:0,width:"auto"},pins:[]},south:{side:"Bottom",sizeType:"Height",dir:"horz",cssReq:{top:"auto",bottom:0,left:0,right:0,width:"auto"},pins:[]},east:{side:"Right",sizeType:"Width",dir:"vert",cssReq:{left:"auto",right:0,top:"auto",bottom:"auto",height:"auto"},pins:[]},west:{side:"Left",sizeType:"Width",dir:"vert",cssReq:{left:0,right:"auto",top:"auto",bottom:"auto",height:"auto"},pins:[]},center:{dir:"center",cssReq:{left:"auto",right:"auto",top:"auto",bottom:"auto",height:"auto",width:"auto"}}};var timer={data:{},set:function(s,fn,ms){timer.clear(s);timer.data[s]=setTimeout(fn,ms)},clear:function(s){var t=timer.data;if(t[s]){clearTimeout(t[s]);delete t[s]}}};var isStr=function(o){try{return typeof o=="string"||(typeof o=="object"&&o.constructor.toString().match(/string/i)!==null)}catch(e){return false}};var str=function(o){return isStr(o)?$.trim(o):o==undefined||o==null?"":o};var min=function(x,y){return Math.min(x,y)};var max=function(x,y){return Math.max(x,y)};var _transformData=function(d){var a,json={cookie:{},defaults:{fxSettings:{}},north:{fxSettings:{}},south:{fxSettings:{}},east:{fxSettings:{}},west:{fxSettings:{}},center:{fxSettings:{}}};d=d||{};if(d.effects||d.cookie||d.defaults||d.north||d.south||d.west||d.east||d.center){json=$.extend(true,json,d)}else{$.each(d,function(key,val){a=key.split("__");if(!a[1]||json[a[0]]){json[a[1]?a[0]:"defaults"][a[1]?a[1]:a[0]]=val}})}return json};var _queue=function(action,pane,param){var tried=[];$.each(_c.borderPanes.split(","),function(i,p){if(_c[p].isMoving){bindCallback(p);return false}});function bindCallback(p){var c=_c[p];if(!c.doCallback){c.doCallback=true;c.callback=action+","+pane+","+(param?1:0)}else{tried.push(p);var cbPane=c.callback.split(",")[1];if(cbPane!=pane&&!$.inArray(cbPane,tried)>=0){bindCallback(cbPane)}}}};var _dequeue=function(pane){var c=_c[pane];_c.isLayoutBusy=false;delete c.isMoving;if(!c.doCallback||!c.callback){return}c.doCallback=false;var cb=c.callback.split(","),param=(cb[2]>0?true:false);if(cb[0]=="open"){open(cb[1],param)}else{if(cb[0]=="close"){close(cb[1],param)}}if(!c.doCallback){c.callback=null}};var _execCallback=function(pane,v_fn){if(!v_fn){return}var fn;try{if(typeof v_fn=="function"){fn=v_fn}else{if(!isStr(v_fn)){return}else{if(v_fn.match(/,/)){var args=v_fn.split(","),fn=eval(args[0]);if(typeof fn=="function"&&args.length>1){return fn(args[1])}}else{fn=eval(v_fn)}}}if(typeof fn=="function"){if(pane&&$Ps[pane]){return fn(pane,$Ps[pane],$.extend({},state[pane]),options[pane],options.name)}else{return fn(Instance,$.extend({},state),options,options.name)}}}catch(ex){}};var _showInvisibly=function($E,force){if(!$E){return{}}if(!$E.jquery){$E=$($E)}var CSS={display:$E.css("display"),visibility:$E.css("visibility")};if(force||CSS.display=="none"){$E.css({display:"block",visibility:"hidden"});return CSS}else{return{}}};var _fixIframe=function(pane){if(state.browser.mozilla){return}var $P=$Ps[pane];if(state[pane].tagName=="IFRAME"){$P.css(_c.hidden).css(_c.visible)}else{$P.find("IFRAME").css(_c.hidden).css(_c.visible)}};var _cssNum=function($E,prop){if(!$E.jquery){$E=$($E)}var CSS=_showInvisibly($E);var val=parseInt($.curCSS($E[0],prop,true),10)||0;$E.css(CSS);return val};var _borderWidth=function(E,side){if(E.jquery){E=E[0]}var b="border"+side.substr(0,1).toUpperCase()+side.substr(1);return $.curCSS(E,b+"Style",true)=="none"?0:(parseInt($.curCSS(E,b+"Width",true),10)||0)};var cssW=function(el,outerWidth){var str=isStr(el),$E=str?$Ps[el]:$(el);if(isNaN(outerWidth)){outerWidth=str?getPaneSize(el):$E.outerWidth()}if(outerWidth<=0){return 0}if(!state.browser.boxModel){return outerWidth}var W=outerWidth-_borderWidth($E,"Left")-_borderWidth($E,"Right")-_cssNum($E,"paddingLeft")-_cssNum($E,"paddingRight");return W>0?W:0};var cssH=function(el,outerHeight){var str=isStr(el),$E=str?$Ps[el]:$(el);if(isNaN(outerHeight)){outerHeight=str?getPaneSize(el):$E.outerHeight()}if(outerHeight<=0){return 0}if(!state.browser.boxModel){return outerHeight}var H=outerHeight-_borderWidth($E,"Top")-_borderWidth($E,"Bottom")-_cssNum($E,"paddingTop")-_cssNum($E,"paddingBottom");return H>0?H:0};var cssSize=function(pane,outerSize){if(_c[pane].dir=="horz"){return cssH(pane,outerSize)}else{return cssW(pane,outerSize)}};var cssMinDims=function(pane){var dir=_c[pane].dir,d={minWidth:1001-cssW(pane,1000),minHeight:1001-cssH(pane,1000)};if(dir=="horz"){d.minSize=d.minHeight}if(dir=="vert"){d.minSize=d.minWidth}return d};var setOuterWidth=function(el,outerWidth,autoHide){var $E=el,w;if(isStr(el)){$E=$Ps[el]}else{if(!el.jquery){$E=$(el)}}w=cssW($E,outerWidth);$E.css({width:w});if(w>0){if(autoHide&&$E.data("autoHidden")&&$E.innerHeight()>0){$E.show().data("autoHidden",false);if(!state.browser.mozilla){$E.css(_c.hidden).css(_c.visible)}}}else{if(autoHide&&!$E.data("autoHidden")){$E.hide().data("autoHidden",true)}}};var setOuterHeight=function(el,outerHeight,autoHide){var $E=el,h;if(isStr(el)){$E=$Ps[el]}else{if(!el.jquery){$E=$(el)}}h=cssH($E,outerHeight);$E.css({height:h,visibility:"visible"});if(h>0&&$E.innerWidth()>0){if(autoHide&&$E.data("autoHidden")){$E.show().data("autoHidden",false);if(!state.browser.mozilla){$E.css(_c.hidden).css(_c.visible)}}}else{if(autoHide&&!$E.data("autoHidden")){$E.hide().data("autoHidden",true)}}};var setOuterSize=function(el,outerSize,autoHide){if(_c[pane].dir=="horz"){setOuterHeight(el,outerSize,autoHide)}else{setOuterWidth(el,outerSize,autoHide)}};var _parseSize=function(pane,size,dir){if(!dir){dir=_c[pane].dir}if(isStr(size)&&size.match(/%/)){size=parseInt(size)/100}if(size===0){return 0}else{if(size>=1){return parseInt(size,10)}else{if(size>0){var o=options,avail;if(dir=="horz"){avail=sC.innerHeight-($Ps.north?o.north.spacing_open:0)-($Ps.south?o.south.spacing_open:0)}else{if(dir=="vert"){avail=sC.innerWidth-($Ps.west?o.west.spacing_open:0)-($Ps.east?o.east.spacing_open:0)}}return Math.floor(avail*size)}else{if(pane=="center"){return 0}else{var $P=$Ps[pane],dim=(dir=="horz"?"height":"width"),vis=_showInvisibly($P),s=$P.css(dim);$P.css(dim,"auto");size=(dim=="height")?$P.outerHeight():$P.outerWidth();$P.css(dim,s).css(vis);return size}}}}};var getPaneSize=function(pane,inclSpace){var $P=$Ps[pane],o=options[pane],s=state[pane],oSp=(inclSpace?o.spacing_open:0),cSp=(inclSpace?o.spacing_closed:0);if(!$P||s.isHidden){return 0}else{if(s.isClosed||(s.isSliding&&inclSpace)){return cSp}else{if(_c[pane].dir=="horz"){return $P.outerHeight()+oSp}else{return $P.outerWidth()+oSp}}}};var setSizeLimits=function(pane,slide){var o=options[pane],s=state[pane],c=_c[pane],dir=c.dir,side=c.side.toLowerCase(),type=c.sizeType.toLowerCase(),isSliding=(slide!=undefined?slide:s.isSliding),$P=$Ps[pane],paneSpacing=o.spacing_open,altPane=_c.altSide[pane],altS=state[altPane],$altP=$Ps[altPane],altPaneSize=(!$altP||altS.isVisible===false||altS.isSliding?0:(dir=="horz"?$altP.outerHeight():$altP.outerWidth())),altPaneSpacing=((!$altP||altS.isHidden?0:options[altPane][altS.isClosed!==false?"spacing_closed":"spacing_open"])||0),containerSize=(dir=="horz"?sC.innerHeight:sC.innerWidth),minCenterDims=cssMinDims("center"),minCenterSize=dir=="horz"?max(options.center.minHeight,minCenterDims.minHeight):max(options.center.minWidth,minCenterDims.minWidth),limitSize=(containerSize-paneSpacing-(isSliding?0:(_parseSize("center",minCenterSize,dir)+altPaneSize+altPaneSpacing))),minSize=s.minSize=max(_parseSize(pane,o.minSize),cssMinDims(pane).minSize),maxSize=s.maxSize=min((o.maxSize?_parseSize(pane,o.maxSize):100000),limitSize),r=s.resizerPosition={},top=sC.insetTop,left=sC.insetLeft,W=sC.innerWidth,H=sC.innerHeight,rW=o.spacing_open;switch(pane){case"north":r.min=top+minSize;r.max=top+maxSize;break;case"west":r.min=left+minSize;r.max=left+maxSize;break;case"south":r.min=top+H-maxSize-rW;r.max=top+H-minSize-rW;break;case"east":r.min=left+W-maxSize-rW;r.max=left+W-minSize-rW;break}};var calcNewCenterPaneDims=function(){var d={top:getPaneSize("north",true),bottom:getPaneSize("south",true),left:getPaneSize("west",true),right:getPaneSize("east",true),width:0,height:0};with(d){width=sC.innerWidth-left-right;height=sC.innerHeight-bottom-top;top+=sC.insetTop;bottom+=sC.insetBottom;left+=sC.insetLeft;right+=sC.insetRight}return d};var getElemDims=function($E){var d={},x=d.css={},i={},b,p,off=$E.offset();d.offsetLeft=off.left;d.offsetTop=off.top;$.each("Left,Right,Top,Bottom".split(","),function(idx,e){b=x["border"+e]=_borderWidth($E,e);p=x["padding"+e]=_cssNum($E,"padding"+e);i[e]=b+p;d["inset"+e]=p});d.offsetWidth=$E.innerWidth(true);d.offsetHeight=$E.innerHeight(true);d.outerWidth=$E.outerWidth();d.outerHeight=$E.outerHeight();d.innerWidth=d.outerWidth-i.Left-i.Right;d.innerHeight=d.outerHeight-i.Top-i.Bottom;x.width=$E.width();x.height=$E.height();return d};var getElemCSS=function($E,list){var CSS={},style=$E[0].style,props=list.split(","),sides="Top,Bottom,Left,Right".split(","),attrs="Color,Style,Width".split(","),p,s,a,i,j,k;for(i=0;i<props.length;i++){p=props[i];if(p.match(/(border|padding|margin)$/)){for(j=0;j<4;j++){s=sides[j];if(p=="border"){for(k=0;k<3;k++){a=attrs[k];CSS[p+s+a]=style[p+s+a]}}else{CSS[p+s]=style[p+s]}}}else{CSS[p]=style[p]}}return CSS};var getHoverClasses=function(el,allStates){var $El=$(el),type=$El.data("layoutRole"),pane=$El.data("layoutEdge"),o=options[pane],root=o[type+"Class"],_pane="-"+pane,_open="-open",_closed="-closed",_slide="-sliding",_hover="-hover ",_state=$El.hasClass(root+_closed)?_closed:_open,_alt=_state==_closed?_open:_closed,classes=(root+_hover)+(root+_pane+_hover)+(root+_state+_hover)+(root+_pane+_state+_hover);if(allStates){classes+=(root+_alt+_hover)+(root+_pane+_alt+_hover)}if(type=="resizer"&&$El.hasClass(root+_slide)){classes+=(root+_slide+_hover)+(root+_pane+_slide+_hover)}return $.trim(classes)};var addHover=function(evt,el){var e=el||this;$(e).addClass(getHoverClasses(e))};var removeHover=function(evt,el){var e=el||this;$(e).removeClass(getHoverClasses(e,true))};var onResizerEnter=function(evt){$("body").disableSelection();addHover(evt,this)};var onResizerLeave=function(evt,el){var e=el||this,pane=$(e).data("layoutEdge"),name=pane+"ResizerLeave";timer.clear(name);if(!el){removeHover(evt,this);timer.set(name,function(){onResizerLeave(evt,e)},200)}else{if(!state[pane].isResizing){$("body").enableSelection()}}};var _create=function(){initOptions();var o=options;if(false===_execCallback(null,o.onload)){return false}if(!getPane("center").length){alert(lang.errCenterPaneMissing);return null}if(o.useStateCookie&&o.cookie.autoLoad){loadCookie()}state.browser={mozilla:$.browser.mozilla,webkit:$.browser.webkit||$.browser.safari,msie:$.browser.msie,isIE6:$.browser.msie&&$.browser.version==6,boxModel:$.support.boxModel};initContainer();initPanes();initResizable();sizeContent();if(o.scrollToBookmarkOnLoad){with(self.location){if(hash){replace(hash)}}}if(o.autoBindCustomButtons){initButtons()}initHotkeys();if(o.resizeWithWindow&&!$Container.data("layoutRole")){$(window).bind("resize."+sID,windowResize)}$(window).bind("unload."+sID,unload);state.initialized=true};var windowResize=function(){var delay=Number(options.resizeWithWindowDelay)||100;if(delay>0){timer.clear("winResize");timer.set("winResize",function(){timer.clear("winResize");timer.clear("winResizeRepeater");resizeAll()},delay);if(!timer.data.winResizeRepeater){setWindowResizeRepeater()}}};var setWindowResizeRepeater=function(){var delay=Number(options.resizeWithWindowMaxDelay);if(delay>0){timer.set("winResizeRepeater",function(){setWindowResizeRepeater();resizeAll()},delay)}};var unload=function(){var o=options;state.cookie=getState();if(o.useStateCookie&&o.cookie.autoSave){saveCookie()}_execCallback(null,o.onunload)};var initContainer=function(){var $C=$Container,tag=sC.tagName=$C.attr("tagName"),fullPage=(tag=="BODY"),props="position,margin,padding,border",CSS={};sC.selector=$C.selector.split(".slice")[0];sC.ref=tag+"/"+sC.selector;$C.data("layoutContainer",sID).data("layoutName",options.name);if(!$C.data("layoutCSS")){if(fullPage){CSS=$.extend(getElemCSS($C,props),{height:$C.css("height"),overflow:$C.css("overflow"),overflowX:$C.css("overflowX"),overflowY:$C.css("overflowY")});var $H=$("html");$H.data("layoutCSS",{height:"auto",overflow:$H.css("overflow"),overflowX:$H.css("overflowX"),overflowY:$H.css("overflowY")})}else{CSS=getElemCSS($C,props+",top,bottom,left,right,width,height,overflow,overflowX,overflowY")}$C.data("layoutCSS",CSS)}try{if(fullPage){$("html").css({height:"100%",overflow:"hidden",overflowX:"hidden",overflowY:"hidden"});$("body").css({position:"relative",height:"100%",overflow:"hidden",overflowX:"hidden",overflowY:"hidden",margin:0,padding:0,border:"none"})}else{var CSS={overflow:"hidden"},p=$C.css("position"),h=$C.css("height");if(!$C.data("layoutRole")){if(!p||!p.match(/fixed|absolute|relative/)){CSS.position="relative"}}$C.css(CSS);if($C.is(":visible")&&$C.innerHeight()<2){alert(lang.errContainerHeight.replace(/CONTAINER/,sC.ref))}}}catch(ex){}$.extend(state.container,getElemDims($C))};var initHotkeys=function(){$.each(_c.borderPanes.split(","),function(i,pane){var o=options[pane];if(o.enableCursorHotkey||o.customHotkey){$(document).bind("keydown."+sID,keyDown);return false}})};var initOptions=function(){opts=_transformData(opts);var newOpts={applyDefaultStyles:"applyDemoStyles"};renameOpts(opts.defaults);$.each(_c.allPanes.split(","),function(i,pane){renameOpts(opts[pane])});if(opts.effects){$.extend(effects,opts.effects);delete opts.effects}$.extend(options.cookie,opts.cookie);var globals="name,zIndex,scrollToBookmarkOnLoad,resizeWithWindow,resizeWithWindowDelay,resizeWithWindowMaxDelay,onresizeall,onresizeall_start,onresizeall_end,onload,onunload,autoBindCustomButtons,useStateCookie";$.each(globals.split(","),function(i,key){if(opts[key]!==undefined){options[key]=opts[key]}else{if(opts.defaults[key]!==undefined){options[key]=opts.defaults[key];delete opts.defaults[key]}}});$.each("paneSelector,resizerCursor,customHotkey".split(","),function(i,key){delete opts.defaults[key]});$.extend(true,options.defaults,opts.defaults);_c.center=$.extend(true,{},_c.panes,_c.center);var z=options.zIndex;if(z===0||z>0){_c.zIndex.pane_normal=z;_c.zIndex.resizer_normal=z+1;_c.zIndex.iframe_mask=z+1}$.extend(options.center,opts.center);var o_Center=$.extend(true,{},options.defaults,opts.defaults,options.center);var optionsCenter=("paneClass,contentSelector,applyDemoStyles,triggerEventsOnLoad,showOverflowOnHover,onresize,onresize_start,onresize_end,resizeNestedLayout,resizeContentWhileDragging,onsizecontent,onsizecontent_start,onsizecontent_end").split(",");$.each(optionsCenter,function(i,key){options.center[key]=o_Center[key]});var o,defs=options.defaults;$.each(_c.borderPanes.split(","),function(i,pane){_c[pane]=$.extend(true,{},_c.panes,_c[pane]);o=options[pane]=$.extend(true,{},options.defaults,options[pane],opts.defaults,opts[pane]);if(!o.paneClass){o.paneClass="ui-layout-pane"}if(!o.resizerClass){o.resizerClass="ui-layout-resizer"}if(!o.togglerClass){o.togglerClass="ui-layout-toggler"}$.each(["_open","_close",""],function(i,n){var sName="fxName"+n,sSpeed="fxSpeed"+n,sSettings="fxSettings"+n;o[sName]=opts[pane][sName]||opts[pane].fxName||opts.defaults[sName]||opts.defaults.fxName||o[sName]||o.fxName||defs[sName]||defs.fxName||"none";var fxName=o[sName];if(fxName=="none"||!$.effects||!$.effects[fxName]||(!effects[fxName]&&!o[sSettings]&&!o.fxSettings)){fxName=o[sName]="none"}var fx=effects[fxName]||{},fx_all=fx.all||{},fx_pane=fx[pane]||{};o[sSettings]=$.extend({},fx_all,fx_pane,defs.fxSettings||{},defs[sSettings]||{},o.fxSettings,o[sSettings],opts.defaults.fxSettings,opts.defaults[sSettings]||{},opts[pane].fxSettings,opts[pane][sSettings]||{});o[sSpeed]=opts[pane][sSpeed]||opts[pane].fxSpeed||opts.defaults[sSpeed]||opts.defaults.fxSpeed||o[sSpeed]||o[sSettings].duration||o.fxSpeed||o.fxSettings.duration||defs.fxSpeed||defs.fxSettings.duration||fx_pane.duration||fx_all.duration||"normal"})});function renameOpts(O){for(var key in newOpts){if(O[key]!=undefined){O[newOpts[key]]=O[key];delete O[key]}}}};var getPane=function(pane){var sel=options[pane].paneSelector;if(sel.substr(0,1)==="#"){return $Container.find(sel).eq(0)}else{var $P=$Container.children(sel).eq(0);return $P.length?$P:$Container.children("form:first").children(sel).eq(0)}};var initPanes=function(){$.each(_c.allPanes.split(","),function(idx,pane){var o=options[pane],s=state[pane],c=_c[pane],fx=s.fx,dir=c.dir,spacing=o.spacing_open||0,isCenter=(pane=="center"),CSS={},$P,$C,size,minSize,maxSize;$Cs[pane]=false;$P=$Ps[pane]=getPane(pane);if(!$P.length){$Ps[pane]=false;return true}if(!$P.data("layoutCSS")){var props="position,top,left,bottom,right,width,height,overflow,zIndex,display,backgroundColor,padding,margin,border";$P.data("layoutCSS",getElemCSS($P,props))}$P.data("layoutName",options.name).data("layoutRole","pane").data("layoutEdge",pane).css(c.cssReq).css("zIndex",_c.zIndex.pane_normal).css(o.applyDemoStyles?c.cssDemo:{}).addClass(o.paneClass+" "+o.paneClass+"-"+pane).bind("mouseenter."+sID,addHover).bind("mouseleave."+sID,removeHover);initContent(pane,false);if(!isCenter){size=s.size=_parseSize(pane,o.size);minSize=_parseSize(pane,o.minSize)||1;maxSize=_parseSize(pane,o.maxSize)||100000;if(size>0){size=max(min(size,maxSize),minSize)}}s.tagName=$P.attr("tagName");s.edge=pane;s.noRoom=false;s.isVisible=true;if(!isCenter){s.isClosed=false;s.isSliding=false;s.isResizing=false;s.isHidden=false}switch(pane){case"north":CSS.top=sC.insetTop;CSS.left=sC.insetLeft;CSS.right=sC.insetRight;break;case"south":CSS.bottom=sC.insetBottom;CSS.left=sC.insetLeft;CSS.right=sC.insetRight;break;case"west":CSS.left=sC.insetLeft;break;case"east":CSS.right=sC.insetRight;break;case"center":}if(dir=="horz"){CSS.height=max(1,cssH(pane,size))}else{if(dir=="vert"){CSS.width=max(1,cssW(pane,size))}}$P.css(CSS);if(dir!="horz"){sizeMidPanes(pane,true)}$P.css({visibility:"visible",display:"block"});if(o.initClosed&&o.closable){close(pane,true,true)}else{if(o.initHidden||o.initClosed){hide(pane)}}if(o.showOverflowOnHover){$P.hover(allowOverflow,resetOverflow)}});initHandles();$.each(_c.borderPanes.split(","),function(i,pane){if($Ps[pane]&&state[pane].isVisible){setSizeLimits(pane);makePaneFit(pane)}});sizeMidPanes("center");$.each(_c.allPanes.split(","),function(i,pane){var o=options[pane];if($Ps[pane]&&o.triggerEventsOnLoad&&state[pane].isVisible){_execCallback(pane,o.onresize_end||o.onresize)}});if($Container.innerHeight()<2){alert(lang.errContainerHeight.replace(/CONTAINER/,sC.ref))}};var initHandles=function(panes){if(!panes||panes=="all"){panes=_c.borderPanes}$.each(panes.split(","),function(i,pane){var $P=$Ps[pane];$Rs[pane]=false;$Ts[pane]=false;if(!$P){return}var o=options[pane],s=state[pane],c=_c[pane],rClass=o.resizerClass,tClass=o.togglerClass,side=c.side.toLowerCase(),spacing=(s.isVisible?o.spacing_open:o.spacing_closed),_pane="-"+pane,_state=(s.isVisible?"-open":"-closed"),$R=$Rs[pane]=$("<div></div>"),$T=(o.closable?$Ts[pane]=$("<div></div>"):false);if(s.isVisible&&o.resizable){}else{if(!s.isVisible&&o.slidable){$R.attr("title",o.sliderTip).css("cursor",o.sliderCursor)}}$R.attr("id",(o.paneSelector.substr(0,1)=="#"?o.paneSelector.substr(1)+"-resizer":"")).data("layoutRole","resizer").data("layoutEdge",pane).css(_c.resizers.cssReq).css("zIndex",_c.zIndex.resizer_normal).css(o.applyDemoStyles?_c.resizers.cssDemo:{}).addClass(rClass+" "+rClass+_pane).appendTo($Container);if($T){$T.attr("id",(o.paneSelector.substr(0,1)=="#"?o.paneSelector.substr(1)+"-toggler":"")).data("layoutRole","toggler").data("layoutEdge",pane).css(_c.togglers.cssReq).css(o.applyDemoStyles?_c.togglers.cssDemo:{}).addClass(tClass+" "+tClass+_pane).appendTo($R).click(function(evt){toggle(pane);evt.stopPropagation()}).hover(addHover,removeHover);if(o.togglerContent_open){$("<span>"+o.togglerContent_open+"</span>").data("layoutRole","togglerContent").data("layoutEdge",pane).addClass("content content-open").css("display","none").appendTo($T).hover(addHover,removeHover)}if(o.togglerContent_closed){$("<span>"+o.togglerContent_closed+"</span>").data("layoutRole","togglerContent").data("layoutEdge",pane).addClass("content content-closed").css("display","none").appendTo($T).hover(addHover,removeHover)}}if(s.isVisible){setAsOpen(pane)}else{setAsClosed(pane);bindStartSlidingEvent(pane,true)}});sizeHandles("all")};var initContent=function(pane,resize){var o=options[pane],sel=o.contentSelector,$P=$Ps[pane],$C;if(sel){$C=$Cs[pane]=(o.findNestedContent)?$P.find(sel).eq(0):$P.children(sel).eq(0)}if($C&&$C.length){$C.css(_c.content.cssReq);if(o.applyDemoStyles){$C.css(_c.content.cssDemo);$P.css(_c.content.cssDemoPane)}state[pane].content={};if(resize!==false){sizeContent(pane)}}else{$Cs[pane]=false}};var initButtons=function(){var pre="ui-layout-button-",name;$.each("toggle,open,close,pin,toggle-slide,open-slide".split(","),function(i,action){$.each(_c.borderPanes.split(","),function(ii,pane){$("."+pre+action+"-"+pane).each(function(){name=$(this).data("layoutName")||$(this).attr("layoutName");if(name==undefined||name==options.name){if(action.substr("-slide")>0){bindButton(this,action.split("-")[0],pane,true)}else{bindButton(this,action,pane)}}})})})};var initResizable=function(panes){var draggingAvailable=(typeof $.fn.draggable=="function"),$Frames,side;if(!panes||panes=="all"){panes=_c.borderPanes}$.each(panes.split(","),function(idx,pane){var o=options[pane],s=state[pane],c=_c[pane],side=(c.dir=="horz"?"top":"left"),r,live;if(!draggingAvailable||!$Ps[pane]||!o.resizable){o.resizable=false;return true}var $P=$Ps[pane],$R=$Rs[pane],base=o.resizerClass,resizerClass=base+"-drag",resizerPaneClass=base+"-"+pane+"-drag",helperClass=base+"-dragging",helperPaneClass=base+"-"+pane+"-dragging",helperLimitClass=base+"-dragging-limit",helperClassesSet=false;if(!s.isClosed){$R.attr("title",o.resizerTip).css("cursor",o.resizerCursor)}$R.hover(onResizerEnter,onResizerLeave);$R.draggable({containment:$Container[0],axis:(c.dir=="horz"?"y":"x"),delay:0,distance:1,helper:"clone",opacity:o.resizerDragOpacity,addClasses:false,zIndex:_c.zIndex.resizer_drag,start:function(e,ui){o=options[pane];s=state[pane];live=o.resizeWhileDragging;if(false===_execCallback(pane,o.ondrag_start)){return false}_c.isLayoutBusy=true;s.isResizing=true;timer.clear(pane+"_closeSlider");setSizeLimits(pane);r=s.resizerPosition;$R.addClass(resizerClass+" "+resizerPaneClass);helperClassesSet=false;$Frames=$(o.maskIframesOnResize===true?"iframe":o.maskIframesOnResize).filter(":visible");var id,i=0;$Frames.each(function(){id="ui-layout-mask-"+(++i);$(this).data("layoutMaskID",id);$('<div id="'+id+'" class="ui-layout-mask ui-layout-mask-'+pane+'"/>').css({background:"#fff",opacity:"0.001",zIndex:_c.zIndex.iframe_mask,position:"absolute",width:this.offsetWidth+"px",height:this.offsetHeight+"px"}).css($(this).position()).appendTo(this.parentNode)});$("body").disableSelection()},drag:function(e,ui){if(!helperClassesSet){ui.helper.addClass(helperClass+" "+helperPaneClass).children().css("visibility","hidden");helperClassesSet=true;if(s.isSliding){$Ps[pane].css("zIndex",_c.zIndex.pane_sliding)}}var limit=0;if(ui.position[side]<r.min){ui.position[side]=r.min;limit=-1}else{if(ui.position[side]>r.max){ui.position[side]=r.max;limit=1}}if(limit){ui.helper.addClass(helperLimitClass);window.defaultStatus="Panel has reached its "+((limit>0&&pane.match(/north|west/))||(limit<0&&pane.match(/south|east/))?"maximum":"minimum")+" size"}else{ui.helper.removeClass(helperLimitClass);window.defaultStatus=""}if(live){resizePanes(e,ui,pane)}},stop:function(e,ui){$("body").enableSelection();window.defaultStatus="";$R.removeClass(resizerClass+" "+resizerPaneClass+" "+helperLimitClass);s.isResizing=false;_c.isLayoutBusy=false;resizePanes(e,ui,pane,true)}});var resizePanes=function(e,ui,pane,resizingDone){var dragPos=ui.position,c=_c[pane],resizerPos,newSize,i=0;switch(pane){case"north":resizerPos=dragPos.top;break;case"west":resizerPos=dragPos.left;break;case"south":resizerPos=sC.offsetHeight-dragPos.top-o.spacing_open;break;case"east":resizerPos=sC.offsetWidth-dragPos.left-o.spacing_open;break}if(resizingDone){$("div.ui-layout-mask").each(function(){this.parentNode.removeChild(this)});if(false===_execCallback(pane,o.ondrag_end||o.ondrag)){return false}}else{$Frames.each(function(){$("#"+$(this).data("layoutMaskID")).css($(this).position()).css({width:this.offsetWidth+"px",height:this.offsetHeight+"px"})})}newSize=resizerPos-sC["inset"+c.side];manualSizePane(pane,newSize)}})};var destroy=function(){$(window).unbind("."+sID);$(document).unbind("."+sID);window[sID]=null;var fullPage=(sC.tagName=="BODY"),_open="-open",_sliding="-sliding",_closed="-closed",$P,root,pRoot,pClasses;$.each(_c.allPanes.split(","),function(i,pane){$P=$Ps[pane];if(!$P){return true}if(pane!="center"){if($Ts[pane]){$Ts[pane].remove()}$Rs[pane].remove()}root=options[pane].paneClass;pRoot=root+"-"+pane;pClasses=[root,root+_open,root+_closed,root+_sliding,pRoot,pRoot+_open,pRoot+_closed,pRoot+_sliding];$.merge(pClasses,getHoverClasses($P,true));$P.removeClass(pClasses.join(" ")).removeData("layoutRole").removeData("layoutEdge").unbind("."+sID).unbind("mouseenter").unbind("mouseleave");if(!$P.data("layoutContainer")){$P.css($P.data("layoutCSS"))}});$Container.removeData("layoutContainer");if(!$Container.data("layoutEdge")){$Container.css($Container.data("layoutCSS"))}if(fullPage){$("html").css($("html").data("layoutCSS"))}unload();var n=options.name;if(n&&window[n]){window[n]=null}};var hide=function(pane,noAnimation){var o=options[pane],s=state[pane],$P=$Ps[pane],$R=$Rs[pane];if(!$P||s.isHidden){return}if(state.initialized&&false===_execCallback(pane,o.onhide_start)){return}s.isSliding=false;if($R){$R.hide()}if(!state.initialized||s.isClosed){s.isClosed=true;s.isHidden=true;s.isVisible=false;$P.hide();sizeMidPanes(_c[pane].dir=="horz"?"all":"center");if(state.initialized||o.triggerEventsOnLoad){_execCallback(pane,o.onhide_end||o.onhide)}}else{s.isHiding=true;close(pane,false,noAnimation)}};var show=function(pane,openPane,noAnimation,noAlert){var o=options[pane],s=state[pane],$P=$Ps[pane],$R=$Rs[pane];if(!$P||!s.isHidden){return}if(false===_execCallback(pane,o.onshow_start)){return}s.isSliding=false;s.isShowing=true;if(openPane===false){close(pane,true)}else{open(pane,false,noAnimation,noAlert)}};var toggle=function(pane,slide){if(!isStr(pane)){pane.stopImmediatePropagation();pane=$(this).data("layoutEdge")}var s=state[str(pane)];if(s.isHidden){show(pane)}else{if(s.isClosed){open(pane,!!slide)}else{close(pane)}}};var _closePane=function(pane,setHandles){var $P=$Ps[pane],s=state[pane];$P.hide();s.isClosed=true;s.isVisible=false};var close=function(pane,force,noAnimation,skipCallback){if(!state.initialized){_closePane(pane);return}var $P=$Ps[pane],$R=$Rs[pane],$T=$Ts[pane],o=options[pane],s=state[pane],doFX=!noAnimation&&!s.isClosed&&(o.fxName_close!="none"),isShowing=s.isShowing,isHiding=s.isHiding,wasSliding=s.isSliding;delete s.isShowing;delete s.isHiding;if(!$P||!o.closable){return}else{if(!force&&s.isClosed&&!isShowing){return}}if(_c.isLayoutBusy){_queue("close",pane,force);return}if(!isShowing&&false===_execCallback(pane,o.onclose_start)){return}_c[pane].isMoving=true;_c.isLayoutBusy=true;s.isClosed=true;s.isVisible=false;if(isHiding){s.isHidden=true}else{if(isShowing){s.isHidden=false}}if(s.isSliding){bindStopSlidingEvents(pane,false)}else{sizeMidPanes(_c[pane].dir=="horz"?"all":"center",false)}setAsClosed(pane);if(doFX){lockPaneForFX(pane,true);$P.hide(o.fxName_close,o.fxSettings_close,o.fxSpeed_close,function(){lockPaneForFX(pane,false);close_2()})}else{$P.hide();close_2()}function close_2(){if(s.isClosed){bindStartSlidingEvent(pane,true);var altPane=_c.altSide[pane];if(state[altPane].noRoom){setSizeLimits(altPane);makePaneFit(altPane)}if(!skipCallback&&(state.initialized||o.triggerEventsOnLoad)){if(!isShowing&&!wasSliding){_execCallback(pane,o.onclose_end||o.onclose)}if(isShowing){_execCallback(pane,o.onshow_end||o.onshow)}if(isHiding){_execCallback(pane,o.onhide_end||o.onhide)}}}_dequeue(pane)}};var setAsClosed=function(pane){var $P=$Ps[pane],$R=$Rs[pane],$T=$Ts[pane],o=options[pane],s=state[pane],side=_c[pane].side.toLowerCase(),inset="inset"+_c[pane].side,rClass=o.resizerClass,tClass=o.togglerClass,_pane="-"+pane,_open="-open",_sliding="-sliding",_closed="-closed";$R.css(side,sC[inset]).removeClass(rClass+_open+" "+rClass+_pane+_open).removeClass(rClass+_sliding+" "+rClass+_pane+_sliding).addClass(rClass+_closed+" "+rClass+_pane+_closed).unbind("dblclick."+sID);if(o.resizable&&typeof $.fn.draggable=="function"){$R.draggable("disable").removeClass("ui-state-disabled").css("cursor","default").attr("title","")}if($T){$T.removeClass(tClass+_open+" "+tClass+_pane+_open).addClass(tClass+_closed+" "+tClass+_pane+_closed).attr("title",o.togglerTip_closed);$T.children(".content-open").hide();$T.children(".content-closed").css("display","block")}syncPinBtns(pane,false);if(state.initialized){sizeHandles("all")}};var open=function(pane,slide,noAnimation,noAlert){var $P=$Ps[pane],$R=$Rs[pane],$T=$Ts[pane],o=options[pane],s=state[pane],doFX=!noAnimation&&s.isClosed&&(o.fxName_open!="none"),isShowing=s.isShowing;delete s.isShowing;if(!$P||(!o.resizable&&!o.closable)){return}else{if(s.isVisible&&!s.isSliding){return}}if(s.isHidden&&!isShowing){show(pane,true);return}if(_c.isLayoutBusy){_queue("open",pane,slide);return}if(false===_execCallback(pane,o.onopen_start)){return}setSizeLimits(pane,slide);if(s.minSize>s.maxSize){syncPinBtns(pane,false);if(!noAlert&&o.noRoomToOpenTip){alert(o.noRoomToOpenTip)}return}_c[pane].isMoving=true;_c.isLayoutBusy=true;if(slide){bindStopSlidingEvents(pane,true)}else{if(s.isSliding){bindStopSlidingEvents(pane,false)}else{if(o.slidable){bindStartSlidingEvent(pane,false)}}}s.noRoom=false;makePaneFit(pane);s.isVisible=true;s.isClosed=false;if(isShowing){s.isHidden=false}if(doFX){lockPaneForFX(pane,true);$P.show(o.fxName_open,o.fxSettings_open,o.fxSpeed_open,function(){lockPaneForFX(pane,false);open_2()})}else{$P.show();open_2()}function open_2(){if(s.isVisible){_fixIframe(pane);if(!s.isSliding){sizeMidPanes(_c[pane].dir=="vert"?"center":"all",false)}setAsOpen(pane)}_dequeue(pane)}};var setAsOpen=function(pane,skipCallback){var $P=$Ps[pane],$R=$Rs[pane],$T=$Ts[pane],o=options[pane],s=state[pane],side=_c[pane].side.toLowerCase(),inset="inset"+_c[pane].side,rClass=o.resizerClass,tClass=o.togglerClass,_pane="-"+pane,_open="-open",_closed="-closed",_sliding="-sliding";$R.css(side,sC[inset]+getPaneSize(pane)).removeClass(rClass+_closed+" "+rClass+_pane+_closed).addClass(rClass+_open+" "+rClass+_pane+_open);if(s.isSliding){$R.addClass(rClass+_sliding+" "+rClass+_pane+_sliding)}else{$R.removeClass(rClass+_sliding+" "+rClass+_pane+_sliding)}if(o.resizerDblClickToggle){$R.bind("dblclick",toggle)}removeHover(0,$R);if(o.resizable&&typeof $.fn.draggable=="function"){$R.draggable("enable").css("cursor",o.resizerCursor).attr("title",o.resizerTip)}else{if(!s.isSliding){$R.css("cursor","default")}}if($T){$T.removeClass(tClass+_closed+" "+tClass+_pane+_closed).addClass(tClass+_open+" "+tClass+_pane+_open).attr("title",o.togglerTip_open);removeHover(0,$T);$T.children(".content-closed").hide();$T.children(".content-open").css("display","block")}syncPinBtns(pane,!s.isSliding);$.extend(s,getElemDims($P));if(state.initialized){sizeHandles("all");sizeContent(pane,true)}if(!skipCallback&&(state.initialized||o.triggerEventsOnLoad)&&$P.is(":visible")){_execCallback(pane,o.onopen_end||o.onopen);if(s.isShowing){_execCallback(pane,o.onshow_end||o.onshow)}if(state.initialized){_execCallback(pane,o.onresize_end||o.onresize)}}};var slideOpen=function(evt_or_pane){var type=typeof evt_or_pane,pane=(type=="string"?evt_or_pane:$(this).data("layoutEdge"));if(type=="object"){evt_or_pane.stopImmediatePropagation()}if(state[pane].isClosed){open(pane,true)}else{bindStopSlidingEvents(pane,true)}};var slideClose=function(evt_or_pane){var $E=(isStr(evt_or_pane)?$Ps[evt_or_pane]:$(this)),pane=$E.data("layoutEdge"),o=options[pane],s=state[pane],$P=$Ps[pane];if(s.isClosed||s.isResizing){return}else{if(o.slideTrigger_close=="click"){close_NOW()}else{if(o.preventQuickSlideClose&&_c.isLayoutBusy){return}else{timer.set(pane+"_closeSlider",close_NOW,300)}}}function close_NOW(e){if(s.isClosed){bindStopSlidingEvents(pane,false)}else{close(pane)}}};var slideToggle=function(pane){toggle(pane,true)};var lockPaneForFX=function(pane,doLock){var $P=$Ps[pane];if(doLock){$P.css({zIndex:_c.zIndex.pane_animate});if(pane=="south"){$P.css({top:sC.insetTop+sC.innerHeight-$P.outerHeight()})}else{if(pane=="east"){$P.css({left:sC.insetLeft+sC.innerWidth-$P.outerWidth()})}}}else{if(pane=="south"){$P.css({top:"auto"})}else{if(pane=="east"){$P.css({left:"auto"})}}var o=options[pane];if(state.browser.msie&&o.fxOpacityFix&&o.fxName_open!="slide"&&$P.css("filter")&&$P.css("opacity")==1){$P[0].style.removeAttribute("filter")}}};var bindStartSlidingEvent=function(pane,enable){var o=options[pane],z=_c.zIndex,$P=$Ps[pane],$R=$Rs[pane],trigger=o.slideTrigger_open;if(!$R||!o.slidable){return}if(trigger.match(/mouseover/)){trigger=o.slideTrigger_open="mouseenter"}else{if(!trigger.match(/click|dblclick|mouseenter/)){trigger=o.slideTrigger_open="click"}}$R.css("zIndex",!enable?z.pane_sliding:z.resizer_normal);$P.css("zIndex",!enable?z.pane_sliding:z.pane_normal);$R[enable?"bind":"unbind"](trigger+"."+sID,slideOpen).css("cursor",enable?o.sliderCursor:"default").attr("title",enable?o.sliderTip:"")};var bindStopSlidingEvents=function(pane,enable){var o=options[pane],s=state[pane],trigger=o.slideTrigger_close,action=(enable?"bind":"unbind"),$P=$Ps[pane],$R=$Rs[pane];s.isSliding=enable;timer.clear(pane+"_closeSlider");if(enable){bindStartSlidingEvent(pane,false)}if(!trigger.match(/click|mouseleave/)){trigger=o.slideTrigger_close="mouseleave"}$R[action](trigger,slideClose);if(trigger=="mouseleave"){$P[action]("mouseleave."+sID,slideClose);$R[action]("mouseenter."+sID,cancelMouseOut);$P[action]("mouseenter."+sID,cancelMouseOut)}if(!enable){timer.clear(pane+"_closeSlider")}else{if(trigger=="click"&&!o.resizable){$R.css("cursor",enable?o.sliderCursor:"default");$R.attr("title",enable?o.togglerTip_open:"")}}function cancelMouseOut(evt){timer.clear(pane+"_closeSlider");evt.stopPropagation()}};var makePaneFit=function(pane,isOpening,skipCallback,force){var o=options[pane],s=state[pane],c=_c[pane],$P=$Ps[pane],$R=$Rs[pane],isSidePane=c.dir=="vert",hasRoom=false;if(pane=="center"||(isSidePane&&s.noVerticalRoom)){hasRoom=s.minHeight<=s.maxHeight&&(isSidePane||s.minWidth<=s.maxWidth);if(hasRoom&&s.noRoom){$P.show();if($R){$R.show()}s.isVisible=true;s.noRoom=false;if(isSidePane){s.noVerticalRoom=false}_fixIframe(pane)}else{if(!hasRoom&&!s.noRoom){$P.hide();if($R){$R.hide()}s.isVisible=false;s.noRoom=true}}}if(pane=="center"){}else{if(s.minSize<=s.maxSize){hasRoom=true;if(s.size>s.maxSize){sizePane(pane,s.maxSize,skipCallback,force)}else{if(s.size<s.minSize){sizePane(pane,s.minSize,skipCallback,force)}else{if($R&&$P.is(":visible")){var side=c.side.toLowerCase(),pos=s.size+sC["inset"+c.side];if(_cssNum($R,side)!=pos){$R.css(side,pos)}}}}if(s.noRoom){if(s.wasOpen&&o.closable){if(o.autoReopen){open(pane,false,true,true)}else{s.noRoom=false}}else{show(pane,s.wasOpen,true,true)}}}else{if(!s.noRoom){s.noRoom=true;s.wasOpen=!s.isClosed&&!s.isSliding;if(o.closable){close(pane,true,true)}else{hide(pane,true)}}}}};var manualSizePane=function(pane,size,skipCallback){var o=options[pane],forceResize=o.resizeWhileDragging&&!_c.isLayoutBusy;o.autoResize=false;sizePane(pane,size,skipCallback,forceResize)};var sizePane=function(pane,size,skipCallback,force){var o=options[pane],s=state[pane],$P=$Ps[pane],$R=$Rs[pane],side=_c[pane].side.toLowerCase(),inset="inset"+_c[pane].side,skipResizeWhileDragging=_c.isLayoutBusy&&!o.triggerEventsWhileDragging,oldSize;setSizeLimits(pane);oldSize=s.size;size=_parseSize(pane,size);size=max(size,_parseSize(pane,o.minSize));size=min(size,s.maxSize);if(size<s.minSize){makePaneFit(pane,false,skipCallback);return}if(!force&&size==oldSize){return}if(!skipCallback&&state.initialized&&s.isVisible){_execCallback(pane,o.onresize_start)}$P.css(_c[pane].sizeType.toLowerCase(),max(1,cssSize(pane,size)));s.size=size;$.extend(s,getElemDims($P));if($R&&$P.is(":visible")){$R.css(side,size+sC[inset])}sizeContent(pane);if(!skipCallback&&!skipResizeWhileDragging&&state.initialized&&s.isVisible){_execCallback(pane,o.onresize_end||o.onresize);if(o.resizeNestedLayout&&$P.data("layoutContainer")){$P.layout().resizeAll()}}if(!skipCallback){if(!s.isSliding){sizeMidPanes(_c[pane].dir=="horz"?"all":"center",skipResizeWhileDragging,force)}sizeHandles("all")}var altPane=_c.altSide[pane];if(size<oldSize&&state[altPane].noRoom){setSizeLimits(altPane);makePaneFit(altPane,false,skipCallback)}};var sizeMidPanes=function(panes,skipCallback,force){if(!panes||panes=="all"){panes="east,west,center"}$.each(panes.split(","),function(i,pane){if(!$Ps[pane]){return}var o=options[pane],s=state[pane],$P=$Ps[pane],$R=$Rs[pane],isCenter=(pane=="center"),hasRoom=true,CSS={},d=calcNewCenterPaneDims();$.extend(s,getElemDims($P));if(pane=="center"){if(!force&&s.isVisible&&d.width==s.outerWidth&&d.height==s.outerHeight){return true}$.extend(s,cssMinDims(pane),{maxWidth:d.width,maxHeight:d.height});CSS=d;CSS.width=cssW(pane,d.width);CSS.height=cssH(pane,d.height);hasRoom=CSS.width>0&&CSS.height>0;if(!hasRoom&&!state.initialized&&o.minWidth>0){var reqPx=o.minWidth-s.outerWidth,minE=options.east.minSize||0,minW=options.west.minSize||0,sizeE=state.east.size,sizeW=state.west.size,newE=sizeE,newW=sizeW;if(reqPx>0&&state.east.isVisible&&sizeE>minE){newE=max(sizeE-minE,sizeE-reqPx);reqPx-=sizeE-newE}if(reqPx>0&&state.west.isVisible&&sizeW>minW){newW=max(sizeW-minW,sizeW-reqPx);reqPx-=sizeW-newW}if(reqPx==0){if(sizeE!=minE){sizePane("east",newE,true)}if(sizeW!=minW){sizePane("west",newW,true)}sizeMidPanes("center",skipCallback,force);return}}}else{$.extend(s,getElemDims($P),cssMinDims(pane));if(!force&&!s.noVerticalRoom&&d.height==s.outerHeight){return true}CSS.top=d.top;CSS.bottom=d.bottom;CSS.height=cssH(pane,d.height);s.maxHeight=max(0,CSS.height);hasRoom=(s.maxHeight>0);if(!hasRoom){s.noVerticalRoom=true}}if(hasRoom){if(!skipCallback&&state.initialized){_execCallback(pane,o.onresize_start)}$P.css(CSS);$.extend(s,getElemDims($P));if(s.noRoom){makePaneFit(pane)}if(state.initialized){sizeContent(pane)}}else{if(!s.noRoom&&s.isVisible){makePaneFit(pane)}}if(pane=="center"){var b=state.browser;var fix=b.isIE6||(b.msie&&!b.boxModel);if($Ps.north&&(fix||state.north.tagName=="IFRAME")){$Ps.north.css("width",cssW($Ps.north,sC.innerWidth))}if($Ps.south&&(fix||state.south.tagName=="IFRAME")){$Ps.south.css("width",cssW($Ps.south,sC.innerWidth))}}if(!skipCallback&&state.initialized&&s.isVisible){_execCallback(pane,o.onresize_end||o.onresize);if(o.resizeNestedLayout&&$P.data("layoutContainer")){$P.layout().resizeAll()}}})};var resizeAll=function(){var oldW=sC.innerWidth,oldH=sC.innerHeight;$.extend(state.container,getElemDims($Container));if(!sC.outerHeight){return}if(false===_execCallback(null,options.onresizeall_start)){return false}var shrunkH=(sC.innerHeight<oldH),shrunkW=(sC.innerWidth<oldW),$P,o,s,dir;$.each(["south","north","east","west"],function(i,pane){if(!$Ps[pane]){return}s=state[pane];o=options[pane];dir=_c[pane].dir;if(o.autoResize&&s.size!=o.size){sizePane(pane,o.size,true,true)}else{setSizeLimits(pane);makePaneFit(pane,false,true,true)}});sizeMidPanes("all",true,true);sizeHandles("all");o=options;$.each(_c.allPanes.split(","),function(i,pane){$P=$Ps[pane];if(!$P){return}if(state[pane].isVisible){_execCallback(pane,o[pane].onresize_end||o[pane].onresize)}if(o[pane].resizeNestedLayout&&$P.data("layoutContainer")){$P.layout().resizeAll()}});_execCallback(null,o.onresizeall_end||o.onresizeall)};var sizeContent=function(panes,remeasure){if(!panes||panes=="all"){panes=_c.allPanes}$.each(panes.split(","),function(idx,pane){var $P=$Ps[pane],$C=$Cs[pane],o=options[pane],s=state[pane],m=s.content;if(!$P||!$C||!$P.is(":visible")){return true}if(false===_execCallback(null,o.onsizecontent_start)){return}if(!_c.isLayoutBusy||m.top==undefined||remeasure||o.resizeContentWhileDragging){_measure();if(m.hiddenFooters>0&&$P.css("overflow")=="hidden"){$P.css("overflow","visible");_measure();$P.css("overflow","hidden")}}var newH=s.innerHeight-(m.spaceAbove-s.css.paddingTop)-(m.spaceBelow-s.css.paddingBottom);if(!$C.is(":visible")||m.height!=newH){setOuterHeight($C,newH,true);m.height=newH}if(state.initialized){_execCallback(pane,o.onsizecontent_end||o.onsizecontent);if(o.resizeNestedLayout&&$C.data("layoutContainer")){$C.layout().resizeAll()}}function _below($E){return max(s.css.paddingBottom,(parseInt($E.css("marginBottom"))||0))}function _measure(){var ignore=options[pane].contentIgnoreSelector,$Fs=$C.nextAll().not(ignore||":lt(0)"),$Fs_vis=$Fs.filter(":visible"),$F=$Fs_vis.filter(":last");m={top:$C[0].offsetTop,height:$C.outerHeight(),numFooters:$Fs.length,hiddenFooters:$Fs.length-$Fs_vis.length,spaceBelow:0};m.spaceAbove=m.top;m.bottom=m.top+m.height;if($F.length){m.spaceBelow=($F[0].offsetTop+$F.outerHeight())-m.bottom+_below($F)}else{m.spaceBelow=_below($C)}}})};var sizeHandles=function(panes){if(!panes||panes=="all"){panes=_c.borderPanes}$.each(panes.split(","),function(i,pane){var o=options[pane],s=state[pane],$P=$Ps[pane],$R=$Rs[pane],$T=$Ts[pane],$TC;if(!$P||!$R){return}var dir=_c[pane].dir,_state=(s.isClosed?"_closed":"_open"),spacing=o["spacing"+_state],togAlign=o["togglerAlign"+_state],togLen=o["togglerLength"+_state],paneLen,offset,CSS={};if(spacing==0){$R.hide();return}else{if(!s.noRoom&&!s.isHidden){$R.show()}}if(dir=="horz"){paneLen=$P.outerWidth();s.resizerLength=paneLen;$R.css({width:max(1,cssW($R,paneLen)),height:max(0,cssH($R,spacing)),left:_cssNum($P,"left")})}else{paneLen=$P.outerHeight();s.resizerLength=paneLen;$R.css({height:max(1,cssH($R,paneLen)),width:max(0,cssW($R,spacing)),top:sC.insetTop+getPaneSize("north",true)})}removeHover(o,$R);if($T){if(togLen==0||(s.isSliding&&o.hideTogglerOnSlide)){$T.hide();return}else{$T.show()}if(!(togLen>0)||togLen=="100%"||togLen>paneLen){togLen=paneLen;offset=0}else{if(isStr(togAlign)){switch(togAlign){case"top":case"left":offset=0;break;case"bottom":case"right":offset=paneLen-togLen;break;case"middle":case"center":default:offset=Math.floor((paneLen-togLen)/2)}}else{var x=parseInt(togAlign);if(togAlign>=0){offset=x}else{offset=paneLen-togLen+x}}}if(dir=="horz"){var width=cssW($T,togLen);$T.css({width:max(0,width),height:max(1,cssH($T,spacing)),left:offset,top:0});$T.children(".content").each(function(){$TC=$(this);$TC.css("marginLeft",Math.floor((width-$TC.outerWidth())/2))})}else{var height=cssH($T,togLen);$T.css({height:max(0,height),width:max(1,cssW($T,spacing)),top:offset,left:0});$T.children(".content").each(function(){$TC=$(this);$TC.css("marginTop",Math.floor((height-$TC.outerHeight())/2))})}removeHover(0,$T)}if(!state.initialized&&o.initHidden){$R.hide();if($T){$T.hide()}}})};var swapPanes=function(pane1,pane2){state[pane1].edge=pane2;state[pane2].edge=pane1;var cancelled=false;if(false===_execCallback(pane1,options[pane1].onswap_start)){cancelled=true}if(!cancelled&&false===_execCallback(pane2,options[pane2].onswap_start)){cancelled=true}if(cancelled){state[pane1].edge=pane1;state[pane2].edge=pane2;return}var oPane1=copy(pane1),oPane2=copy(pane2),sizes={};sizes[pane1]=oPane1?oPane1.state.size:0;sizes[pane2]=oPane2?oPane2.state.size:0;$Ps[pane1]=false;$Ps[pane2]=false;state[pane1]={};state[pane2]={};if($Ts[pane1]){$Ts[pane1].remove()}if($Ts[pane2]){$Ts[pane2].remove()}if($Rs[pane1]){$Rs[pane1].remove()}if($Rs[pane2]){$Rs[pane2].remove()}$Rs[pane1]=$Rs[pane2]=$Ts[pane1]=$Ts[pane2]=false;move(oPane1,pane2);move(oPane2,pane1);oPane1=oPane2=sizes=null;if($Ps[pane1]){$Ps[pane1].css(_c.visible)}if($Ps[pane2]){$Ps[pane2].css(_c.visible)}resizeAll();_execCallback(pane1,options[pane1].onswap_end||options[pane1].onswap);_execCallback(pane2,options[pane2].onswap_end||options[pane2].onswap);return;function copy(n){var $P=$Ps[n],$C=$Cs[n];return !$P?false:{pane:n,P:$P?$P[0]:false,C:$C?$C[0]:false,state:$.extend({},state[n]),options:$.extend({},options[n])}}function move(oPane,pane){if(!oPane){return}var P=oPane.P,C=oPane.C,oldPane=oPane.pane,c=_c[pane],side=c.side.toLowerCase(),inset="inset"+c.side,s=$.extend({},state[pane]),o=options[pane],fx={resizerCursor:o.resizerCursor},re,size,pos;$.each("fxName,fxSpeed,fxSettings".split(","),function(i,k){fx[k]=o[k];fx[k+"_open"]=o[k+"_open"];fx[k+"_close"]=o[k+"_close"]});$Ps[pane]=$(P).data("layoutEdge",pane).css(_c.hidden).css(c.cssReq);$Cs[pane]=C?$(C):false;options[pane]=$.extend({},oPane.options,fx);state[pane]=$.extend({},oPane.state);re=new RegExp(o.paneClass+"-"+oldPane,"g");P.className=P.className.replace(re,o.paneClass+"-"+pane);initHandles(pane);initResizable(pane);if(c.dir!=_c[oldPane].dir){size=sizes[pane]||0;setSizeLimits(pane);size=max(size,state[pane].minSize);manualSizePane(pane,size,true)}else{$Rs[pane].css(side,sC[inset]+(state[pane].isVisible?getPaneSize(pane):0))}if(oPane.state.isVisible&&!s.isVisible){setAsOpen(pane,true)}else{setAsClosed(pane,true);bindStartSlidingEvent(pane,true)}oPane=null}};function keyDown(evt){if(!evt){return true}var code=evt.keyCode;if(code<33){return true}var PANE={38:"north",40:"south",37:"west",39:"east"},ALT=evt.altKey,SHIFT=evt.shiftKey,CTRL=evt.ctrlKey,CURSOR=(CTRL&&code>=37&&code<=40),o,k,m,pane;if(CURSOR&&options[PANE[code]].enableCursorHotkey){pane=PANE[code]}else{if(CTRL||SHIFT){$.each(_c.borderPanes.split(","),function(i,p){o=options[p];k=o.customHotkey;m=o.customHotkeyModifier;if((SHIFT&&m=="SHIFT")||(CTRL&&m=="CTRL")||(CTRL&&SHIFT)){if(k&&code==(isNaN(k)||k<=9?k.toUpperCase().charCodeAt(0):k)){pane=p;return false}}})}}if(!pane||!$Ps[pane]||!options[pane].closable||state[pane].isHidden){return true}toggle(pane);evt.stopPropagation();evt.returnValue=false;return false}function allowOverflow(el){if(this&&this.tagName){el=this}var $P;if(isStr(el)){$P=$Ps[el]}else{if($(el).data("layoutRole")){$P=$(el)}else{$(el).parents().each(function(){if($(this).data("layoutRole")){$P=$(this);return false}})}}if(!$P||!$P.length){return}var pane=$P.data("layoutEdge"),s=state[pane];if(s.cssSaved){resetOverflow(pane)}if(s.isSliding||s.isResizing||s.isClosed){s.cssSaved=false;return}var newCSS={zIndex:(_c.zIndex.pane_normal+2)},curCSS={},of=$P.css("overflow"),ofX=$P.css("overflowX"),ofY=$P.css("overflowY");if(of!="visible"){curCSS.overflow=of;newCSS.overflow="visible"}if(ofX&&!ofX.match(/visible|auto/)){curCSS.overflowX=ofX;newCSS.overflowX="visible"}if(ofY&&!ofY.match(/visible|auto/)){curCSS.overflowY=ofX;newCSS.overflowY="visible"}s.cssSaved=curCSS;$P.css(newCSS);$.each(_c.allPanes.split(","),function(i,p){if(p!=pane){resetOverflow(p)}})}function resetOverflow(el){if(this&&this.tagName){el=this}var $P;if(isStr(el)){$P=$Ps[el]}else{if($(el).data("layoutRole")){$P=$(el)}else{$(el).parents().each(function(){if($(this).data("layoutRole")){$P=$(this);return false}})}}if(!$P||!$P.length){return}var pane=$P.data("layoutEdge"),s=state[pane],CSS=s.cssSaved||{};if(!s.isSliding&&!s.isResizing){$P.css("zIndex",_c.zIndex.pane_normal)}$P.css(CSS);s.cssSaved=false}function getBtn(selector,pane,action){var $E=$(selector);if(!$E.length){alert(lang.errButton+lang.selector+": "+selector)}else{if(_c.borderPanes.indexOf(pane)==-1){alert(lang.errButton+lang.Pane.toLowerCase()+": "+pane)}else{var btn=options[pane].buttonClass+"-"+action;$E.addClass(btn+" "+btn+"-"+pane).data("layoutName",options.name);return $E}}return false}function bindButton(selector,action,pane){switch(action.toLowerCase()){case"toggle":addToggleBtn(selector,pane);break;case"open":addOpenBtn(selector,pane);break;case"close":addCloseBtn(selector,pane);break;case"pin":addPinBtn(selector,pane);break;case"toggle-slide":addToggleBtn(selector,pane,true);break;case"open-slide":addOpenBtn(selector,pane,true);break}}function addToggleBtn(selector,pane,slide){var $E=getBtn(selector,pane,"toggle");if($E){$E.click(function(evt){toggle(pane,!!slide);evt.stopPropagation()})}}function addOpenBtn(selector,pane,slide){var $E=getBtn(selector,pane,"open");if($E){$E.attr("title",lang.Open).click(function(evt){open(pane,!!slide);evt.stopPropagation()})}}function addCloseBtn(selector,pane){var $E=getBtn(selector,pane,"close");if($E){$E.attr("title",lang.Close).click(function(evt){close(pane);evt.stopPropagation()})}}function addPinBtn(selector,pane){var $E=getBtn(selector,pane,"pin");if($E){var s=state[pane];$E.click(function(evt){setPinState($(this),pane,(s.isSliding||s.isClosed));if(s.isSliding||s.isClosed){open(pane)}else{close(pane)}evt.stopPropagation()});setPinState($E,pane,(!s.isClosed&&!s.isSliding));_c[pane].pins.push(selector)}}function syncPinBtns(pane,doPin){$.each(_c[pane].pins,function(i,selector){setPinState($(selector),pane,doPin)})}function setPinState($Pin,pane,doPin){var updown=$Pin.attr("pin");if(updown&&doPin==(updown=="down")){return}var pin=options[pane].buttonClass+"-pin",side=pin+"-"+pane,UP=pin+"-up "+side+"-up",DN=pin+"-down "+side+"-down";$Pin.attr("pin",doPin?"down":"up").attr("title",doPin?lang.Unpin:lang.Pin).removeClass(doPin?UP:DN).addClass(doPin?DN:UP)}function isCookiesEnabled(){return(navigator.cookieEnabled!=0)}function getCookie(opts){var o=$.extend({},options.cookie,opts||{}),name=o.name||options.name||"Layout",c=document.cookie,cs=c?c.split(";"):[],pair;for(var i=0,n=cs.length;i<n;i++){pair=$.trim(cs[i]).split("=");if(pair[0]==name){return decodeJSON(decodeURIComponent(pair[1]))}}return""}function saveCookie(keys,opts){var o=$.extend({},options.cookie,opts||{}),name=o.name||options.name||"Layout",params="",date="",clear=false;if(o.expires.toUTCString){date=o.expires}else{if(typeof o.expires=="number"){date=new Date();if(o.expires>0){date.setDate(date.getDate()+o.expires)}else{date.setYear(1970);clear=true}}}if(date){params+=";expires="+date.toUTCString()}if(o.path){params+=";path="+o.path}if(o.domain){params+=";domain="+o.domain}if(o.secure){params+=";secure"}if(clear){state.cookie={};document.cookie=name+"="+params}else{state.cookie=getState(keys||o.keys);document.cookie=name+"="+encodeURIComponent(encodeJSON(state.cookie))+params}return $.extend({},state.cookie)}function deleteCookie(){saveCookie("",{expires:-1})}function loadCookie(opts){var o=getCookie(opts);if(o){state.cookie=$.extend({},o);loadState(o)}return o}function loadState(opts){$.extend(true,options,opts)}function getState(keys){var data={},alt={isClosed:"initClosed",isHidden:"initHidden"},pair,pane,key,val;if(!keys){keys=options.cookie.keys}if($.isArray(keys)){keys=keys.join(",")}keys=keys.replace(/__/g,".").split(",");for(var i=0,n=keys.length;i<n;i++){pair=keys[i].split(".");pane=pair[0];key=pair[1];if(_c.allPanes.indexOf(pane)<0){continue}val=state[pane][key];if(val==undefined){continue}if(key=="isClosed"&&state[pane]["isSliding"]){val=true}(data[pane]||(data[pane]={}))[alt[key]?alt[key]:key]=val}return data}function encodeJSON(JSON){return parse(JSON);function parse(h){var D=[],i=0,k,v,t;for(k in h){v=h[k];t=typeof v;if(t=="string"){v='"'+v+'"'}else{if(t=="object"){v=parse(v)}}D[i++]='"'+k+'":'+v}return"{"+D.join(",")+"}"}}function decodeJSON(str){try{return window["eval"]("("+str+")")||{}}catch(e){return{}}}var $Container=$(this).eq(0);if(!$Container.length){return null}if($Container.data("layoutContainer")){return $.extend({},window[$Container.data("layoutContainer")])}var $Ps={},$Cs={},$Rs={},$Ts={},sC=state.container,sID=state.id;_create();var Instance={options:options,state:state,container:$Container,panes:$Ps,contents:$Cs,resizers:$Rs,togglers:$Ts,toggle:toggle,hide:hide,show:show,open:open,close:close,slideOpen:slideOpen,slideClose:slideClose,slideToggle:slideToggle,initContent:initContent,sizeContent:sizeContent,sizePane:manualSizePane,swapPanes:swapPanes,resizeAll:resizeAll,destroy:destroy,setSizeLimits:setSizeLimits,bindButton:bindButton,addToggleBtn:addToggleBtn,addOpenBtn:addOpenBtn,addCloseBtn:addCloseBtn,addPinBtn:addPinBtn,allowOverflow:allowOverflow,resetOverflow:resetOverflow,encodeJSON:encodeJSON,decodeJSON:decodeJSON,getState:getState,getCookie:getCookie,saveCookie:saveCookie,deleteCookie:deleteCookie,loadCookie:loadCookie,loadState:loadState,cssWidth:cssW,cssHeight:cssH};window[sID]=Instance;return Instance}})(jQuery);
+$.layout.browserZoom = {
+
+ _init: function (inst) {
+ // abort if browser does not need this check
+ if ($.layout.browserZoom.ratio() !== false)
+ $.layout.browserZoom._setTimer(inst);
+ }
+
+, _setTimer: function (inst) {
+ // abort if layout destroyed or browser does not need this check
+ if (inst.destroyed) return;
+ var o = inst.options
+ , s = inst.state
+ // don't need check if inst has parentLayout, but check occassionally in case parent destroyed!
+ // MINIMUM 100ms interval, for performance
+ , ms = inst.hasParentLayout ? 5000 : Math.max( o.browserZoomCheckInterval, 100 )
+ ;
+ // set the timer
+ setTimeout(function(){
+ if (inst.destroyed || !o.resizeWithWindow) return;
+ var d = $.layout.browserZoom.ratio();
+ if (d !== s.browserZoom) {
+ s.browserZoom = d;
+ inst.resizeAll();
+ }
+ // set a NEW timeout
+ $.layout.browserZoom._setTimer(inst);
+ }
+ , ms );
+ }
+
+, ratio: function () {
+ var w = window
+ , s = screen
+ , d = document
+ , dE = d.documentElement || d.body
+ , b = $.layout.browser
+ , v = b.version
+ , r, sW, cW
+ ;
+ // we can ignore all browsers that fire window.resize event onZoom
+ if ((b.msie && v > 8)
+ || !b.msie
+ ) return false; // don't need to track zoom
+
+ if (s.deviceXDPI)
+ return calc(s.deviceXDPI, s.systemXDPI);
+ // everything below is just for future reference!
+ if (b.webkit && (r = d.body.getBoundingClientRect))
+ return calc((r.left - r.right), d.body.offsetWidth);
+ if (b.webkit && (sW = w.outerWidth))
+ return calc(sW, w.innerWidth);
+ if ((sW = s.width) && (cW = dE.clientWidth))
+ return calc(sW, cW);
+ return false; // no match, so cannot - or don't need to - track zoom
+
+ function calc (x,y) { return (parseInt(x,10) / parseInt(y,10) * 100).toFixed(); }
+ }
+
+};
+// add initialization method to Layout's onLoad array of functions
+$.layout.onReady.push( $.layout.browserZoom._init );
+
+
+
+})( jQuery ); \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
new file mode 100644
index 0000000000..bc29efb3e6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
new file mode 100644
index 0000000000..8313f4975b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
index 6fb83c133e..5a1779bba5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -106,7 +106,7 @@ a[href]:hover {
font-size: 24pt;
text-shadow: black 0px 2px 0px;
/* text-shadow: black 0px 0px 0px;*/
-text-decoration: none;
+text-decoration: none;
}
#definition #owner {
@@ -162,7 +162,7 @@ text-decoration: none;
padding-left: 15px;
background: url("arrow-right.png") no-repeat 0 3px transparent;
}
-
+
.toggleContainer.open .toggle {
background: url("arrow-down.png") no-repeat 0 3px transparent;
}
@@ -205,6 +205,11 @@ dl.attributes > dt {
font-style: italic;
}
+dl.attributes > dt.implicit {
+ font-weight: bold;
+ color: darkgreen;
+}
+
dl.attributes > dd {
display: block;
padding-left: 10em;
@@ -241,6 +246,17 @@ dl.attributes > dd {
color: white;
}
+#inheritedMembers > div.conversion > h3 {
+ background: #dadada url("conversionbg.gif") repeat-x bottom left; /* gray */
+ height: 17px;
+ font-style: italic;
+ font-size: 12pt;
+}
+
+#inheritedMembers > div.conversion > h3 * {
+ color: white;
+}
+
/* Member cells */
div.members > ol {
@@ -310,10 +326,21 @@ div.members > ol > li:last-child {
font-weight: bold;
}
-.signature .symbol .params .implicit {
+.signature .symbol > .implicit {
+ display: inline-block;
+ font-weight: bold;
+ text-decoration: underline;
+ color: darkgreen;
+}
+
+.signature .symbol .params > .implicit {
font-style: italic;
}
+.signature .symbol .implicit.deprecated {
+ text-decoration: line-through;
+}
+
.signature .symbol .name.deprecated {
text-decoration: line-through;
}
@@ -369,15 +396,15 @@ div.members > ol > li:last-child {
.cmt {}
.cmt p {
- margin: 0.7em 0;
+ margin: 0.7em 0;
}
.cmt p:first-child {
- margin-top: 0;
+ margin-top: 0;
}
.cmt p:last-child {
- margin-bottom: 0;
+ margin-bottom: 0;
}
.cmt h3,
@@ -539,7 +566,7 @@ div.fullcommenttop .block {
margin-bottom: 5px
}
-div.fullcomment div.block ol li p,
+div.fullcomment div.block ol li p,
div.fullcomment div.block ol li {
display:inline
}
@@ -583,10 +610,10 @@ div.fullcomment dl.paramcmts > dd {
/* Members filter tool */
#textfilter {
- position: relative;
- display: block;
+ position: relative;
+ display: block;
height: 20px;
- margin-bottom: 5px;
+ margin-bottom: 5px;
}
#textfilter > .pre {
@@ -600,7 +627,7 @@ div.fullcomment dl.paramcmts > dd {
}
#textfilter > .input {
- display: block;
+ display: block;
position: absolute;
top: 0;
right: 20px;
@@ -608,10 +635,10 @@ div.fullcomment dl.paramcmts > dd {
}
#textfilter > .input > input {
- height: 20px;
- padding: 1px;
- font-weight: bold;
- color: #000000;
+ height: 20px;
+ padding: 1px;
+ font-weight: bold;
+ color: #000000;
background: #ffffff url("filterboxbarbg.png") repeat-x top left;
width: 100%;
}
@@ -660,6 +687,13 @@ div.fullcomment dl.paramcmts > dd {
display: inline-block;
}
+#mbrsel > div > a {
+ position:relative;
+ top: -8px;
+ font-size: 11px;
+ text-shadow: #ffffff 0 1px 0;
+}
+
#mbrsel > div > ol#linearization {
display: table;
margin-left: 70px;
@@ -683,9 +717,32 @@ div.fullcomment dl.paramcmts > dd {
text-shadow: #ffffff 0 1px 0;
}
+#mbrsel > div > ol#implicits {
+ display: table;
+ margin-left: 70px;
+}
+
+#mbrsel > div > ol#implicits > li.in {
+ text-decoration: none;
+ float: left;
+ padding-right: 10px;
+ margin-right: 5px;
+ background: url(selected-right-implicits.png) no-repeat;
+ background-position: right 0px;
+}
+
+#mbrsel > div > ol#implicits > li.in > span{
+ color: #404040;
+ float: left;
+ padding: 1px 0 1px 10px;
+ background: url(selected-implicits.png) no-repeat;
+ background-position: 0px 0px;
+ text-shadow: #ffffff 0 1px 0;
+}
+
#mbrsel > div > ol > li {
/* padding: 3px 10px;*/
- line-height: 16pt;
+ line-height: 16pt;
display: inline-block;
cursor: pointer;
}
@@ -709,10 +766,10 @@ div.fullcomment dl.paramcmts > dd {
}
#mbrsel > div > ol > li.out {
- text-decoration: none;
- float: left;
- padding-right: 10px;
- margin-right: 5px;
+ text-decoration: none;
+ float: left;
+ padding-right: 10px;
+ margin-right: 5px;
}
#mbrsel > div > ol > li.out > span{
@@ -739,10 +796,10 @@ div.fullcomment dl.paramcmts > dd {
#mbrsel .showall {
color: #4C4C4C;
line-height: 16px;
- font-weight: bold;
+ font-weight: bold;
}
#mbrsel .showall span {
color: #4C4C4C;
- font-weight: bold;
+ font-weight: bold;
}*/ \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
index 3cdd9a7f27..33fbd83bee 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
@@ -2,30 +2,60 @@
// code by Gilles Dubochet with contributions by Pedro Furlanetto
$(document).ready(function(){
- var isHiddenClass;
- if (document.title == 'scala.AnyRef') {
- isHiddenClass = function (name) {
- return name == 'scala.Any';
- };
- } else {
- isHiddenClass = function (name) {
- return name == 'scala.Any' || name == 'scala.AnyRef';
- };
- }
+ var isHiddenClass = function (name) {
+ return name == 'scala.Any' ||
+ name == 'scala.AnyRef' ||
+ name == 'scala.Predef.any2stringfmt' ||
+ name == 'scala.Predef.any2stringadd' ||
+ name == 'scala.Predef.any2ArrowAssoc' ||
+ name == 'scala.Predef.any2Ensuring'
+ };
+
+ $("#linearization li:gt(0)").filter(function(){
+ return isHiddenClass($(this).attr("name"));
+ }).removeClass("in").addClass("out");
- $("#linearization li").filter(function(){
+ $("#implicits li").filter(function(){
return isHiddenClass($(this).attr("name"));
}).removeClass("in").addClass("out");
-
+
// Pre-filter members
filter();
// Member filter box
var input = $("#textfilter input");
input.bind("keyup", function(event) {
- if (event.keyCode == 27)
- input.val(""); // escape key
- filter(true);
+
+ switch ( event.keyCode ) {
+
+ case 27: // escape key
+ input.val("");
+ filter(true);
+ break;
+
+ case 38: // up
+ input.val("");
+ filter(false);
+ window.scrollTo(0, $("body").offset().top);
+ input.focus();
+ break;
+
+ case 33: //page up
+ input.val("");
+ filter(false);
+ break;
+
+ case 34: //page down
+ input.val("");
+ filter(false);
+ break;
+
+ default:
+ window.scrollTo(0, $("#mbrsel").offset().top);
+ filter(true);
+ break;
+
+ }
});
input.focus(function(event) {
input.select();
@@ -35,13 +65,13 @@ $(document).ready(function(){
filter();
});
$(document).keydown(function(event) {
- if(!event.altKey && !event.ctrlKey &&
- (event.keyCode == 27 || (event.keyCode >= 48 && event.keyCode <= 90)) &&
- document.activeElement != $("#textfilter input")[0]) {
- $("#textfilter input").focus();
+
+ if (event.keyCode == 9) { // tab
+ $("#index-input", window.parent.document).focus();
+ input.attr("value", "");
+ return false;
}
});
- $("#textfilter input").focus();
$("#linearization li").click(function(){
if ($(this).hasClass("in")) {
@@ -54,17 +84,38 @@ $(document).ready(function(){
};
filter();
});
- $("#ancestors > ol > li.hideall").click(function() {
+
+ $("#implicits li").click(function(){
+ if ($(this).hasClass("in")) {
+ $(this).removeClass("in");
+ $(this).addClass("out");
+ }
+ else if ($(this).hasClass("out")) {
+ $(this).removeClass("out");
+ $(this).addClass("in");
+ };
+ filter();
+ });
+
+ $("#mbrsel > div[id=ancestors] > ol > li.hideall").click(function() {
$("#linearization li.in").removeClass("in").addClass("out");
$("#linearization li:first").removeClass("out").addClass("in");
+ $("#implicits li.in").removeClass("in").addClass("out");
filter();
})
- $("#ancestors > ol > li.showall").click(function() {
- var filtered =
+ $("#mbrsel > div[id=ancestors] > ol > li.showall").click(function() {
+ var filteredLinearization =
$("#linearization li.out").filter(function() {
return ! isHiddenClass($(this).attr("name"));
});
- filtered.removeClass("out").addClass("in");
+ filteredLinearization.removeClass("out").addClass("in");
+
+ var filteredImplicits =
+ $("#implicits li.out").filter(function() {
+ return ! isHiddenClass($(this).attr("name"));
+ });
+ filteredImplicits.removeClass("out").addClass("in");
+
filter();
});
$("#visbl > ol > li.public").click(function() {
@@ -108,8 +159,10 @@ $(document).ready(function(){
});
/* Add toggle arrows */
- var docAllSigs = $("#template li").has(".fullcomment").find(".signature");
-
+ //var docAllSigs = $("#template li").has(".fullcomment").find(".signature");
+ // trying to speed things up a little bit
+ var docAllSigs = $("#template li[fullComment=yes] .signature");
+
function commentToggleFct(signature){
var parent = signature.parent();
var shortComment = $(".shortcomment", parent);
@@ -129,7 +182,7 @@ $(document).ready(function(){
docAllSigs.click(function() {
commentToggleFct($(this));
});
-
+
/* Linear super types and known subclasses */
function toggleShowContentFct(outerElement){
var content = $(".hiddenContent", outerElement);
@@ -148,20 +201,22 @@ $(document).ready(function(){
$(".toggleContainer").click(function() {
toggleShowContentFct($(this));
});
-
+
// Set parent window title
windowTitle();
});
function orderAlpha() {
$("#template > div.parent").hide();
- $("#ancestors").show();
+ $("#template > div.conversion").hide();
+ $("#mbrsel > div[id=ancestors]").show();
filter();
};
function orderInherit() {
$("#template > div.parent").show();
- $("#ancestors").hide();
+ $("#template > div.conversion").show();
+ $("#mbrsel > div[id=ancestors]").hide();
filter();
};
@@ -177,6 +232,9 @@ function initInherit() {
$("#inheritedMembers > div.parent").each(function(){
parents[$(this).attr("name")] = $(this);
});
+ $("#inheritedMembers > div.conversion").each(function(){
+ parents[$(this).attr("name")] = $(this);
+ });
$("#types > ol > li").each(function(){
var mbr = $(this);
this.mbrText = mbr.find("> .fullcomment .cmt").text();
@@ -216,21 +274,29 @@ function initInherit() {
$("#inheritedMembers > div.parent").each(function() {
if ($("> div.members", this).length == 0) { $(this).remove(); };
});
+ $("#inheritedMembers > div.conversion").each(function() {
+ if ($("> div.members", this).length == 0) { $(this).remove(); };
+ });
};
-function filter(scrollToMember) {
+/* filter used to take boolean scrollToMember */
+function filter() {
var query = $.trim($("#textfilter input").val()).toLowerCase();
query = query.replace(/[-[\]{}()*+?.,\\^$|#]/g, "\\$&").replace(/\s+/g, "|");
var queryRegExp = new RegExp(query, "i");
var privateMembersHidden = $("#visbl > ol > li.public").hasClass("in");
var orderingAlphabetic = $("#order > ol > li.alpha").hasClass("in");
- var hiddenSuperclassElements = orderingAlphabetic ? $("#linearization > li.out") : $("#linearization > li:gt(0)");
- var hiddenSuperclasses = hiddenSuperclassElements.map(function() {
+ var hiddenSuperclassElementsLinearization = orderingAlphabetic ? $("#linearization > li.out") : $("#linearization > li:gt(0)");
+ var hiddenSuperclassesLinearization = hiddenSuperclassElementsLinearization.map(function() {
+ return $(this).attr("name");
+ }).get();
+ var hiddenSuperclassElementsImplicits = orderingAlphabetic ? $("#implicits > li.out") : $("#implicits > li");
+ var hiddenSuperclassesImplicits = hiddenSuperclassElementsImplicits.map(function() {
return $(this).attr("name");
}).get();
var hideInheritedMembers;
-
+
if(orderingAlphabetic) {
$("#inheritedMembers").hide();
hideInheritedMembers = true;
@@ -242,9 +308,10 @@ function filter(scrollToMember) {
$("#allMembers > .members").each(filterFunc);
hideInheritedMembers = false;
$("#inheritedMembers > .parent > .members").each(filterFunc);
+ $("#inheritedMembers > .conversion > .members").each(filterFunc);
}
-
+
function filterFunc() {
var membersVisible = false;
var members = $(this);
@@ -262,12 +329,18 @@ function filter(scrollToMember) {
ownerIndex = name.lastIndexOf(".");
}
var owner = name.slice(0, ownerIndex);
- for (var i = 0; i < hiddenSuperclasses.length; i++) {
- if (hiddenSuperclasses[i] == owner) {
+ for (var i = 0; i < hiddenSuperclassesLinearization.length; i++) {
+ if (hiddenSuperclassesLinearization[i] == owner) {
mbr.hide();
return;
}
- }
+ };
+ for (var i = 0; i < hiddenSuperclassesImplicits.length; i++) {
+ if (hiddenSuperclassesImplicits[i] == owner) {
+ mbr.hide();
+ return;
+ }
+ };
}
if (query && !(queryRegExp.test(name) || queryRegExp.test(this.mbrText))) {
mbr.hide();
@@ -276,17 +349,13 @@ function filter(scrollToMember) {
mbr.show();
membersVisible = true;
});
-
+
if (membersVisible)
members.show();
else
members.hide();
};
- if (scrollToMember) {
- window.scrollTo(0, $("#mbrsel").offset().top);
- }
-
return false;
};
diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
index 6eb14a4907..6488847049 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
@@ -167,6 +167,8 @@ trait MemberEntity extends Entity {
/** Whether this member is abstract. */
def isAbstract: Boolean
+ /** If this member originates from an implicit conversion, we set the implicit information to the correct origin */
+ def byConversion: Option[ImplicitConversion]
}
object MemberEntity {
// Oh contravariance, contravariance, wherefore art thou contravariance?
@@ -246,6 +248,8 @@ trait DocTemplateEntity extends TemplateEntity with MemberEntity {
* other entity of the pair is the companion. */
def companion: Option[DocTemplateEntity]
+ /** The implicit conversions this template (class or trait, objects and packages are not affected) */
+ def conversions: List[ImplicitConversion]
}
@@ -413,3 +417,106 @@ trait Annotation extends Entity {
def arguments: List[ValueArgument]
}
+
+/** A trait that signals the member results from an implicit conversion */
+trait ImplicitConversion {
+
+ /** The source of the implicit conversion*/
+ def source: DocTemplateEntity
+
+ /** The result type after the conversion */
+ def targetType: TypeEntity
+
+ /** The entity for the method that performed the conversion, if it's documented (or just its name, otherwise) */
+ def convertorMethod: Either[MemberEntity, String]
+
+ /** A short name of the convertion */
+ def conversionShortName: String
+
+ /** A qualified name uniquely identifying the convertion (currently: the conversion method's qualified name) */
+ def conversionQualifiedName: String
+
+ /** The entity that performed the conversion */
+ def convertorOwner: TemplateEntity
+
+ /** The constraints that the transformations puts on the type parameters */
+ def constraints: List[Constraint]
+
+ /** The members inherited by this implicit conversion */
+ def members: List[MemberEntity]
+}
+
+/** A trait that encapsulates a constraint necessary for implicit conversion */
+trait Constraint {
+ // /** The implicit conversion during which this constraint appears */
+ // def conversion: ImplicitConversion
+}
+
+/** A constraint involving a type parameter which must be in scope */
+trait ImplicitInScopeConstraint extends Constraint {
+ /** The type of the implicit value required */
+ def implicitType: TypeEntity
+
+ /** toString for debugging */
+ override def toString = "an implicit _: " + implicitType.name + " must be in scope"
+}
+
+trait TypeClassConstraint extends ImplicitInScopeConstraint with TypeParamConstraint {
+ /** Type class name */
+ def typeClassEntity: TemplateEntity
+
+ /** toString for debugging */
+ override def toString = typeParamName + " is a class of type " + typeClassEntity.qualifiedName + " (" +
+ typeParamName + ": " + typeClassEntity.name + ")"
+}
+
+trait KnownTypeClassConstraint extends TypeClassConstraint {
+ /** Type explanation, takes the type parameter name and generates the explanation */
+ def typeExplanation: (String) => String
+
+ /** toString for debugging */
+ override def toString = typeExplanation(typeParamName) + " (" + typeParamName + ": " + typeClassEntity.name + ")"
+}
+
+/** A constraint involving a type parameter */
+trait TypeParamConstraint extends Constraint {
+ /** The type parameter involved */
+ def typeParamName: String
+}
+
+trait EqualTypeParamConstraint extends TypeParamConstraint {
+ /** The rhs */
+ def rhs: TypeEntity
+ /** toString for debugging */
+ override def toString = typeParamName + " is " + rhs.name + " (" + typeParamName + " =:= " + rhs.name + ")"
+}
+
+trait BoundedTypeParamConstraint extends TypeParamConstraint {
+ /** The lower bound */
+ def lowerBound: TypeEntity
+
+ /** The upper bound */
+ def upperBound: TypeEntity
+
+ /** toString for debugging */
+ override def toString = typeParamName + " is a superclass of " + lowerBound.name + " and a subclass of " +
+ upperBound.name + " (" + typeParamName + " >: " + lowerBound.name + " <: " + upperBound.name + ")"
+}
+
+trait LowerBoundedTypeParamConstraint extends TypeParamConstraint {
+ /** The lower bound */
+ def lowerBound: TypeEntity
+
+ /** toString for debugging */
+ override def toString = typeParamName + " is a superclass of " + lowerBound.name + " (" + typeParamName + " >: " +
+ lowerBound.name + ")"
+}
+
+trait UpperBoundedTypeParamConstraint extends TypeParamConstraint {
+ /** The lower bound */
+ def upperBound: TypeEntity
+
+ /** toString for debugging */
+ override def toString = typeParamName + " is a subclass of " + upperBound.name + " (" + typeParamName + " <: " +
+ upperBound.name + ")"
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
index ef3c2beffb..6392de22ff 100755
--- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -8,6 +8,7 @@ package doc
package model
import scala.collection._
+import language.reflectiveCalls
object IndexModelFactory {
@@ -15,7 +16,7 @@ object IndexModelFactory {
lazy val firstLetterIndex: Map[Char, SymbolMap] = {
- val result = new mutable.HashMap[Char,SymbolMap] {
+ object result extends mutable.HashMap[Char,SymbolMap] {
/* Owner template ordering */
implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase }
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index 670c9bbb3b..9062203dcd 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -17,7 +17,7 @@ import model.{ RootPackage => RootPackageEntity }
/** This trait extracts all required information for documentation from compilation units */
class ModelFactory(val global: Global, val settings: doc.Settings) {
- thisFactory: ModelFactory with CommentFactory with TreeFactory =>
+ thisFactory: ModelFactory with ModelFactoryImplicitSupport with CommentFactory with TreeFactory =>
import global._
import definitions.{ ObjectClass, RootPackage, EmptyPackage, NothingClass, AnyClass, AnyValClass, AnyRefClass }
@@ -42,7 +42,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
memberSym.isOmittablePrefix || (closestPackage(memberSym) == closestPackage(templateSym))
}
- private lazy val noSubclassCache = Set(AnyClass, AnyRefClass, ObjectClass)
+ private lazy val noSubclassCache = Set[Symbol](AnyClass, AnyRefClass, ObjectClass)
/** */
def makeModel: Option[Universe] = {
@@ -95,7 +95,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def isDocTemplate = false
}
- abstract class MemberImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
+ abstract class MemberImpl(sym: Symbol, implConv: ImplicitConversionImpl = null, inTpl: => DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
lazy val comment =
if (inTpl == null) None else thisFactory.comment(sym, inTpl)
override def inTemplate = inTpl
@@ -128,7 +128,14 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
if (sym.isImplicit) fgs += Paragraph(Text("implicit"))
if (sym.isSealed) fgs += Paragraph(Text("sealed"))
if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract"))
- if (!sym.isTrait && (sym hasFlag Flags.DEFERRED)) fgs += Paragraph(Text("abstract"))
+ /* Resetting the DEFERRED flag is a little trick here for refined types: (example from scala.collections)
+ * {{{
+ * implicit def traversable2ops[T](t: collection.GenTraversableOnce[T]) = new TraversableOps[T] {
+ * def isParallel = ...
+ * }}}
+ * the type the method returns is TraversableOps, which has all-abstract symbols. But in reality, it couldn't have
+ * any abstract terms, otherwise it would fail compilation. So we reset the DEFERRED flag. */
+ if (!sym.isTrait && (sym hasFlag Flags.DEFERRED) && (implConv eq null)) fgs += Paragraph(Text("abstract"))
if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final"))
fgs.toList
}
@@ -162,7 +169,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
case NullaryMethodType(res) => resultTpe(res)
case _ => tpe
}
- makeTypeInTemplateContext(resultTpe(sym.tpe), inTemplate, sym)
+ val tpe = if (implConv eq null) sym.tpe else implConv.toType memberInfo sym
+ makeTypeInTemplateContext(resultTpe(tpe), inTemplate, sym)
}
def isDef = false
def isVal = false
@@ -173,15 +181,17 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def isAliasType = false
def isAbstractType = false
def isAbstract =
- ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED))) ||
+ // for the explanation of implConv == null see comment on flags
+ ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED)) && (implConv == null)) ||
sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic
def isTemplate = false
+ def byConversion = if (implConv ne null) Some(implConv) else None
}
/** The instantiation of `TemplateImpl` triggers the creation of the following entities:
* All ancestors of the template and all non-package members.
*/
- abstract class DocTemplateImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with DocTemplateEntity {
+ abstract class DocTemplateImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, null, inTpl) with TemplateImpl with HigherKindedImpl with DocTemplateEntity {
//if (inTpl != null) println("mbr " + sym + " in " + (inTpl.toRoot map (_.sym)).mkString(" > "))
if (settings.verbose.value)
inform("Creating doc template for " + sym)
@@ -221,9 +231,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
Some(makeType(RefinedType(tps, EmptyScope), inTpl))
}
}
- val linearization: List[(TemplateEntity, TypeEntity)] = {
- sym.ancestors map { ancestor =>
- val typeEntity = makeType(sym.info.baseType(ancestor), this)
+
+ protected def linearizationFromSymbol(symbol: Symbol) = {
+ symbol.ancestors map { ancestor =>
+ val typeEntity = makeType(symbol.info.baseType(ancestor), this)
val tmplEntity = makeTemplate(ancestor) match {
case tmpl: DocTemplateImpl => tmpl registerSubClass this ; tmpl
case tmpl => tmpl
@@ -232,6 +243,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
}
+ val linearization = linearizationFromSymbol(sym)
def linearizationTemplates = linearization map { _._1 }
def linearizationTypes = linearization map { _._2 }
@@ -245,16 +257,20 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
def subClasses = if (subClassesCache == null) Nil else subClassesCache.toList
- protected lazy val memberSyms =
+ val conversions = if (settings.docImplicits.value) makeImplicitConversions(sym, this) else Nil
+
+ lazy val memberSyms =
// Only this class's constructors are part of its members, inherited constructors are not.
sym.info.members.filter(s => localShouldDocument(s) && (!s.isConstructor || s.owner == sym) && !isPureBridge(sym) )
- val members = memberSyms flatMap (makeMember(_, this))
- val templates = members collect { case c: DocTemplateEntity => c }
- val methods = members collect { case d: Def => d }
- val values = members collect { case v: Val => v }
- val abstractTypes = members collect { case t: AbstractType => t }
- val aliasTypes = members collect { case t: AliasType => t }
+ val members = (memberSyms.flatMap(makeMember(_, null, this))) :::
+ (conversions.flatMap((_.members))) // also take in the members from implicit conversions
+
+ val templates = members collect { case c: DocTemplateEntity => c }
+ val methods = members collect { case d: Def => d }
+ val values = members collect { case v: Val => v }
+ val abstractTypes = members collect { case t: AbstractType => t }
+ val aliasTypes = members collect { case t: AliasType => t }
override def isTemplate = true
def isDocTemplate = true
def companion = sym.companionSymbol match {
@@ -268,23 +284,33 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
abstract class PackageImpl(sym: Symbol, inTpl: => PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
override def inTemplate = inTpl
override def toRoot: List[PackageImpl] = this :: inTpl.toRoot
+ override val linearization = {
+ val symbol = sym.info.members.find {
+ s => s.isPackageObject
+ } getOrElse sym
+ linearizationFromSymbol(symbol)
+ }
val packages = members collect { case p: Package => p }
}
abstract class RootPackageImpl(sym: Symbol) extends PackageImpl(sym, null) with RootPackageEntity
- abstract class NonTemplateMemberImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, inTpl) with NonTemplateMemberEntity {
+ abstract class NonTemplateMemberImpl(sym: Symbol, implConv: ImplicitConversionImpl, inTpl: => DocTemplateImpl) extends MemberImpl(sym, implConv, inTpl) with NonTemplateMemberEntity {
override def qualifiedName = optimize(inTemplate.qualifiedName + "#" + name)
- lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "#" + name)
+ lazy val definitionName =
+ if (implConv == null) optimize(inDefinitionTemplates.head.qualifiedName + "#" + name)
+ else optimize(implConv.conversionQualifiedName + "#" + name)
def isUseCase = sym.isSynthetic
def isBridge = sym.isBridge
}
- abstract class NonTemplateParamMemberImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends NonTemplateMemberImpl(sym, inTpl) {
- def valueParams =
- sym.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
+ abstract class NonTemplateParamMemberImpl(sym: Symbol, implConv: ImplicitConversionImpl, inTpl: => DocTemplateImpl) extends NonTemplateMemberImpl(sym, implConv, inTpl) {
+ def valueParams = {
+ val info = if (implConv eq null) sym.info else implConv.toType memberInfo sym
+ info.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
if (p.nameString contains "$") makeValueParam(p, inTpl, optimize("arg" + i)) else makeValueParam(p, inTpl)
}}
+ }
}
abstract class ParameterImpl(sym: Symbol, inTpl: => TemplateImpl) extends EntityImpl(sym, inTpl) with ParameterEntity {
@@ -356,7 +382,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
override def qualifiedName = "_root_"
override def inheritedFrom = Nil
override def isRootPackage = true
- override protected lazy val memberSyms =
+ override lazy val memberSyms =
(bSym.info.members ++ EmptyPackage.info.members) filter { s =>
s != EmptyPackage && s != RootPackage
}
@@ -380,11 +406,13 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
else if (bSym.isPackage)
makeTemplate(bSym.owner) match {
case inPkg: PackageImpl => makePackage(bSym, inPkg) getOrElse (new NoDocTemplateImpl(bSym, inPkg))
+ case inNoDocTpl: NoDocTemplateImpl => new NoDocTemplateImpl(bSym, inNoDocTpl)
case _ => throw new Error("'" + bSym + "' must be in a package")
}
else if (templateShouldDocument(bSym))
makeTemplate(bSym.owner) match {
case inDTpl: DocTemplateImpl => makeDocTemplate(bSym, inDTpl)
+ case inNoDocTpl: NoDocTemplateImpl => new NoDocTemplateImpl(bSym, inNoDocTpl)
case _ => throw new Error("'" + bSym + "' must be in documentable template")
}
else
@@ -454,18 +482,19 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
/** */
- def makeMember(aSym: Symbol, inTpl: => DocTemplateImpl): List[MemberImpl] = {
+ // TODO: Should be able to override the type
+ def makeMember(aSym: Symbol, implConv: ImplicitConversionImpl, inTpl: => DocTemplateImpl): List[MemberImpl] = {
def makeMember0(bSym: Symbol, _useCaseOf: Option[MemberImpl]): Option[MemberImpl] = {
if (bSym.isGetter && bSym.isLazy)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with Val {
+ Some(new NonTemplateMemberImpl(bSym, implConv, inTpl) with Val {
override lazy val comment = // The analyser does not duplicate the lazy val's DocDef when it introduces its accessor.
thisFactory.comment(bSym.accessed, inTpl) // This hack should be removed after analyser is fixed.
override def isLazyVal = true
override def useCaseOf = _useCaseOf
})
else if (bSym.isGetter && bSym.accessed.isMutable)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with Val {
+ Some(new NonTemplateMemberImpl(bSym, implConv, inTpl) with Val {
override def isVar = true
override def useCaseOf = _useCaseOf
})
@@ -481,36 +510,36 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
else bSym
}
- Some(new NonTemplateParamMemberImpl(cSym, inTpl) with HigherKindedImpl with Def {
+ Some(new NonTemplateParamMemberImpl(cSym, implConv, inTpl) with HigherKindedImpl with Def {
override def isDef = true
override def useCaseOf = _useCaseOf
})
}
- else if (bSym.isConstructor)
- Some(new NonTemplateParamMemberImpl(bSym, inTpl) with Constructor {
+ else if (bSym.isConstructor && (implConv == null))
+ Some(new NonTemplateParamMemberImpl(bSym, implConv, inTpl) with Constructor {
override def isConstructor = true
def isPrimary = sym.isPrimaryConstructor
override def useCaseOf = _useCaseOf
})
else if (bSym.isGetter) // Scala field accessor or Java field
- Some(new NonTemplateMemberImpl(bSym, inTpl) with Val {
+ Some(new NonTemplateMemberImpl(bSym, implConv, inTpl) with Val {
override def isVal = true
override def useCaseOf = _useCaseOf
})
else if (bSym.isAbstractType)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with AbstractType {
+ Some(new NonTemplateMemberImpl(bSym, implConv, inTpl) with TypeBoundsImpl with HigherKindedImpl with AbstractType {
override def isAbstractType = true
override def useCaseOf = _useCaseOf
})
- else if (bSym.isAliasType)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with HigherKindedImpl with AliasType {
+ else if (bSym.isAliasType && bSym != AnyRefClass)
+ Some(new NonTemplateMemberImpl(bSym, implConv, inTpl) with HigherKindedImpl with AliasType {
override def isAliasType = true
def alias = makeTypeInTemplateContext(sym.tpe.dealias, inTpl, sym)
override def useCaseOf = _useCaseOf
})
else if (bSym.isPackage)
inTpl match { case inPkg: PackageImpl => makePackage(bSym, inPkg) }
- else if ((bSym.isClass || bSym.isModule) && templateShouldDocument(bSym))
+ else if ((bSym.isClass || bSym.isModule || bSym == AnyRefClass) && templateShouldDocument(bSym))
Some(makeDocTemplate(bSym, inTpl))
else
None
@@ -520,16 +549,16 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
Nil
else {
val allSyms = useCases(aSym, inTpl.sym) map { case (bSym, bComment, bPos) =>
- docComments.put(bSym, DocComment(bComment, bPos)) // put the comment in the list, don't parse it yet, closes SI-4898
+ docComments.put(bSym, DocComment(bComment, bPos)) // put the comment in the list, don't parse it yet, closes SI-4898
bSym
}
val member = makeMember0(aSym, None)
- if (allSyms.isEmpty)
- member.toList
- else
- // Use cases replace the original definitions - SI-5054
- allSyms flatMap { makeMember0(_, member) }
+ if (allSyms.isEmpty)
+ member.toList
+ else
+ // Use cases replace the original definitions - SI-5054
+ allSyms flatMap { makeMember0(_, member) }
}
}
@@ -639,9 +668,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
// nameBuffer append stripPrefixes.foldLeft(pre.prefixString)(_ stripPrefix _)
// }
val bSym = normalizeTemplate(aSym)
- if (bSym.isNonClassType)
+ if (bSym.isNonClassType) {
nameBuffer append bSym.decodedName
- else {
+ } else {
val tpl = makeTemplate(bSym)
val pos0 = nameBuffer.length
refBuffer += pos0 -> (tpl, tpl.name.length)
@@ -654,7 +683,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
/* Refined types */
case RefinedType(parents, defs) =>
- val ignoreParents = Set(AnyClass, ObjectClass)
+ val ignoreParents = Set[Symbol](AnyClass, ObjectClass)
val filtParents = parents filterNot (x => ignoreParents(x.typeSymbol)) match {
case Nil => parents
case ps => ps
@@ -674,9 +703,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
nameBuffer append '⇒'
appendType0(result)
/* Polymorphic types */
- case PolyType(tparams, result) => assert(tparams nonEmpty)
+ case PolyType(tparams, result) => assert(tparams.nonEmpty)
// throw new Error("Polymorphic type '" + tpe + "' cannot be printed as a type")
- def typeParamsToString(tps: List[Symbol]): String = if(tps isEmpty) "" else
+ def typeParamsToString(tps: List[Symbol]): String = if (tps.isEmpty) "" else
tps.map{tparam =>
tparam.varianceString + tparam.name + typeParamsToString(tparam.typeParams)
}.mkString("[", ", ", "]")
@@ -692,8 +721,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
def templateShouldDocument(aSym: Symbol): Boolean = {
- // TODO: document sourceless entities (e.g., Any, etc), based on a new Setting to be added
- (aSym.isPackageClass || (aSym.sourceFile != null)) && localShouldDocument(aSym) &&
+ // TODO: document sourceless entities (e.g., Any, etc), based on a new Setting to be added
+ (aSym.isPackageClass || (aSym.sourceFile != null)) && localShouldDocument(aSym) &&
( aSym.owner == NoSymbol || templateShouldDocument(aSym.owner) ) && !isEmptyJavaObject(aSym)
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
new file mode 100644
index 0000000000..c3525037cd
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
@@ -0,0 +1,520 @@
+/* NSC -- new Scala compiler -- Copyright 2007-2012 LAMP/EPFL
+ *
+ * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them.
+ *
+ * @author Vlad Ureche
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc
+package doc
+package model
+
+import comment._
+
+import scala.collection._
+import scala.util.matching.Regex
+
+import symtab.Flags
+import io._
+
+import model.{ RootPackage => RootPackageEntity }
+
+/**
+ * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them.
+ *
+ * Let's take this as an example:
+ * {{{
+ * object Test {
+ * class A
+ *
+ * class B {
+ * def foo = 1
+ * }
+ *
+ * class C extends B {
+ * def bar = 2
+ * class implicit
+ * }
+ *
+ * D def conv(a: A) = new C
+ * }
+ * }}}
+ *
+ * Overview:
+ * - scaladoc-ing the above classes, `A` will get two more methods: foo and bar, over its default methods
+ * - the nested classes (specifically `D` above), abstract types, type aliases and constructor members are not added to
+ * `A` (see makeMember0 in ModelFactory, last 3 cases)
+ * - the members added by implicit conversion are always listed under the implicit conversion, not under the class they
+ * actually come from (`foo` will be listed as coming from the implicit conversion to `C` instead of `B`) - see
+ * `definitionName` in MemberImpl
+ *
+ * Internals:
+ * TODO: Give an overview here
+ */
+trait ModelFactoryImplicitSupport {
+ thisFactory: ModelFactory with CommentFactory with TreeFactory =>
+
+ import global._
+ import global.analyzer._
+ import global.definitions._
+ import settings.hardcoded
+
+ // debugging:
+ val DEBUG: Boolean = settings.docImplicitsDebug.value
+ val ERROR: Boolean = true // currently we show all errors
+ @inline final def debug(msg: => String) = if (DEBUG) println(msg)
+ @inline final def error(msg: => String) = if (ERROR) println(msg)
+
+ /** This is a flag that indicates whether to eliminate implicits that cannot be satisfied within the current scope.
+ * For example, if an implicit conversion requires that there is a Numeric[T] in scope:
+ * {{{
+ * class A[T]
+ * class B extends A[Int]
+ * class C extends A[String]
+ * implicit def pimpA[T: Numeric](a: A[T]): D
+ * }}}
+ * For B, no constraints are generated as Numeric[Int] is already in the default scope. On the other hand, for the
+ * conversion from C to D, depending on -implicits-show-all, the conversion can:
+ * - not be generated at all, since there's no Numeric[String] in scope (if ran without -implicits-show-all)
+ * - generated with a *weird* constraint, Numeric[String] as the user might add it by hand (if flag is enabled)
+ */
+ val implicitsShowAll: Boolean = settings.docImplicitsShowAll.value
+ class ImplicitNotFound(tpe: Type) extends Exception("No implicit of type " + tpe + " found in scope.")
+
+ /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
+
+ class ImplicitConversionImpl(
+ val sym: Symbol,
+ val convSym: Symbol,
+ val toType: Type,
+ val constrs: List[Constraint],
+ inTpl: => DocTemplateImpl)
+ extends ImplicitConversion {
+
+ def source: DocTemplateEntity = inTpl
+
+ def targetType: TypeEntity = makeType(toType, inTpl)
+
+ def convertorOwner: TemplateEntity =
+ if (convSym != NoSymbol)
+ makeTemplate(convSym.owner)
+ else {
+ error("Scaladoc implicits: Implicit conversion from " + sym.tpe + " to " + toType + " done by " + convSym + " = NoSymbol!")
+ makeRootPackage.get // surely the root package was created :)
+ }
+
+ def convertorMethod: Either[MemberEntity, String] = {
+ var convertor: MemberEntity = null
+
+ convertorOwner match {
+ case doc: DocTemplateImpl =>
+ val convertors = members.collect { case m: MemberImpl if m.sym == convSym => m }
+ if (convertors.length == 1)
+ convertor = convertors.head
+ case _ =>
+ }
+ if (convertor ne null)
+ Left(convertor)
+ else
+ Right(convSym.nameString)
+ }
+
+ def conversionShortName = convSym.nameString
+
+ def conversionQualifiedName = convertorOwner.qualifiedName + "." + convSym.nameString
+
+ lazy val constraints: List[Constraint] = constrs
+
+ val members: List[MemberEntity] = {
+ // Obtain the members inherited by the implicit conversion
+ var memberSyms = toType.members.filter(implicitShouldDocument(_))
+ val existingMembers = sym.info.members
+
+ // Debugging part :)
+ debug(sym.nameString + "\n" + "=" * sym.nameString.length())
+ debug(" * conversion " + convSym + " from " + sym.tpe + " to " + toType)
+
+ // Members inherited by implicit conversions cannot override actual members
+ memberSyms = memberSyms.filterNot((sym1: Symbol) =>
+ existingMembers.exists(sym2 => sym1.name == sym2.name &&
+ !isDistinguishableFrom(toType.memberInfo(sym1), sym.info.memberInfo(sym2))))
+
+ debug(" -> full type: " + toType)
+ if (constraints.length != 0) {
+ debug(" -> constraints: ")
+ constraints foreach { constr => debug(" - " + constr) }
+ }
+ debug(" -> members:")
+ memberSyms foreach (sym => debug(" - "+ sym.decodedName +" : " + sym.info))
+ debug("")
+
+ memberSyms.flatMap((makeMember(_, this, inTpl)))
+ }
+ }
+
+ /* ============== MAKER METHODS ============== */
+
+ /**
+ * Make the implicit conversion objects
+ *
+ * A word about the scope of the implicit conversions: currently we look at a very basic context composed of the
+ * default Scala imports (Predef._ for example) and the companion object of the current class, if one exists. In the
+ * future we might want to extend this to more complex scopes.
+ */
+ def makeImplicitConversions(sym: Symbol, inTpl: => DocTemplateImpl): List[ImplicitConversion] =
+ // Nothing and Null are somewhat special -- they can be transformed by any implicit conversion available in scope.
+ // But we don't want that, so we'll simply refuse to find implicit conversions on for Nothing and Null
+ if (!(sym.isClass || sym.isTrait || sym == AnyRefClass) || sym == NothingClass || sym == NullClass) Nil
+ else {
+ var context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit)
+
+ val results = global.analyzer.allViewsFrom(sym.tpe, context, sym.typeParams)
+ var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl))
+ conversions = conversions.filterNot(_.members.isEmpty)
+
+ // Filter out specialized conversions from array
+ if (sym == ArrayClass)
+ conversions = conversions.filterNot((conv: ImplicitConversion) =>
+ hardcoded.arraySkipConversions.contains(conv.conversionQualifiedName))
+
+ // Filter out non-sensical conversions from value types
+ if (isPrimitiveValueType(sym.tpe))
+ conversions = conversions.filter((ic: ImplicitConversion) =>
+ hardcoded.valueClassFilter(sym.nameString, ic.conversionQualifiedName))
+
+ // Put the class-specific conversions in front
+ val (ownConversions, commonConversions) =
+ conversions.partition(conv => !hardcoded.commonConversionTargets.contains(conv.conversionQualifiedName))
+
+ ownConversions ::: commonConversions
+ }
+
+ /** makeImplicitConversion performs the heavier lifting to get the implicit listing:
+ * - for each possible conversion function (also called view)
+ * * figures out the final result of the view (to what is our class transformed?)
+ * * figures out the necessary constraints on the type parameters (such as T <: Int) and the context (such as Numeric[T])
+ * * lists all inherited members
+ *
+ * What? in details:
+ * - say we start from a class A[T1, T2, T3, T4]
+ * - we have an implicit function (view) in scope:
+ * def pimpA[T3 <: Long, T4](a: A[Int, Foo[Bar[X]], T3, T4])(implicit ev1: TypeTag[T4], ev2: Numeric[T4]): PimpedA
+ * - A is converted to PimpedA ONLY if a couple of constraints are satisfied:
+ * * T1 must be equal to Int
+ * * T2 must be equal to Foo[Bar[X]]
+ * * T3 must be upper bounded by Long
+ * * there must be evidence of Numeric[T4] and a TypeTag[T4] within scope
+ * - the final type is PimpedA and A therefore inherits a couple of members from pimpedA
+ *
+ * How?
+ * some notes:
+ * - Scala's type inference will want to solve all type parameters down to actual types, but we only want constraints
+ * to maintain generality
+ * - therefore, allViewsFrom wraps type parameters into "untouchable" type variables that only gather constraints,
+ * but are never solved down to a type
+ * - these must be reverted back to the type parameters and the constraints must be extracted and simplified (this is
+ * done by the uniteConstraints and boundedTParamsConstraints. Be sure to check them out
+ * - we also need to transform implicit parameters in the view's signature into constraints, such that Numeric[T4]
+ * appears as a constraint
+ */
+ def makeImplicitConversion(sym: Symbol, result: SearchResult, constrs: List[TypeConstraint], context: Context, inTpl: => DocTemplateImpl): List[ImplicitConversion] =
+ if (result.tree == EmptyTree) Nil
+ else {
+ // `result` will contain the type of the view (= implicit conversion method)
+ // the search introduces untouchable type variables, but we want to get back to type parameters
+ val viewFullType = result.tree.tpe
+ // set the previously implicit parameters to being explicit
+
+ val (viewSimplifiedType, viewImplicitTypes) = removeImplicitParameters(viewFullType)
+
+ // TODO: Isolate this corner case :) - Predef.<%< and put it in the testsuite
+ if (viewSimplifiedType.params.length != 1) {
+ // This is known to be caused by the `<%<` object in Predef:
+ // {{{
+ // sealed abstract class <%<[-From, +To] extends (From => To) with Serializable
+ // object <%< {
+ // implicit def conformsOrViewsAs[A <% B, B]: A <%< B = new (A <%< B) {def apply(x: A) = x}
+ // }
+ // }}}
+ // so we just won't generate an implicit conversion for implicit methods that only take implicit parameters
+ return Nil
+ }
+
+ // type the view application so we get the exact type of the result (not the formal type)
+ val viewTree = result.tree.setType(viewSimplifiedType)
+ val appliedTree = new ApplyImplicitView(viewTree, List(Ident("<argument>") setType viewTree.tpe.paramTypes.head))
+ val appliedTreeTyped: Tree = {
+ val newContext = context.makeImplicit(context.ambiguousErrors)
+ newContext.macrosEnabled = false // [Eugene] I assume you want macro signature, not macro expansion
+ val newTyper = global.analyzer.newTyper(newContext)
+ newTyper.silent(_.typed(appliedTree, global.analyzer.EXPRmode, WildcardType), false) match {
+
+ case global.analyzer.SilentResultValue(t: Tree) => t
+ case global.analyzer.SilentTypeError(err) =>
+ global.reporter.warning(sym.pos, err.toString)
+ return Nil
+ }
+ }
+
+ // now we have the final type:
+ val toType = wildcardToNothing(typeVarToOriginOrWildcard(appliedTreeTyped.tpe.finalResultType))
+
+ try {
+ // Transform bound constraints into scaladoc constraints
+ val implParamConstraints = makeImplicitConstraints(viewImplicitTypes, sym, context, inTpl)
+ val boundsConstraints = makeBoundedConstraints(sym.typeParams, constrs, inTpl)
+ // TODO: no substitution constraints appear in the library and compiler scaladoc. Maybe they can be removed?
+ val substConstraints = makeSubstitutionConstraints(result.subst, inTpl)
+ val constraints = implParamConstraints ::: boundsConstraints ::: substConstraints
+
+ List(new ImplicitConversionImpl(sym, result.tree.symbol, toType, constraints, inTpl))
+ } catch {
+ case i: ImplicitNotFound =>
+ //println(" Eliminating: " + toType)
+ Nil
+ }
+ }
+
+ def makeImplicitConstraints(types: List[Type], sym: Symbol, context: Context, inTpl: => DocTemplateImpl): List[Constraint] =
+ types.flatMap((tpe:Type) => {
+ // TODO: Before creating constraints, map typeVarToOriginOrWildcard on the implicitTypes
+ val implType = typeVarToOriginOrWildcard(tpe)
+ val qualifiedName = implType.typeSymbol.ownerChain.reverse.map(_.nameString).mkString(".")
+
+ var available: Option[Boolean] = None
+
+ // see: https://groups.google.com/forum/?hl=en&fromgroups#!topic/scala-internals/gm_fr0RKzC4
+ //
+ // println(implType + " => " + implType.isTrivial)
+ // var tpes: List[Type] = List(implType)
+ // while (!tpes.isEmpty) {
+ // val tpe = tpes.head
+ // tpes = tpes.tail
+ // tpe match {
+ // case TypeRef(pre, sym, args) =>
+ // tpes = pre :: args ::: tpes
+ // println(tpe + " => " + tpe.isTrivial)
+ // case _ =>
+ // println(tpe + " (of type" + tpe.getClass + ") => " + tpe.isTrivial)
+ // }
+ // }
+ // println("\n")
+
+ // look for type variables in the type. If there are none, we can decide if the implicit is there or not
+ if (implType.isTrivial) {
+ try {
+ context.flushBuffer() /* any errors here should not prevent future findings */
+ // TODO: Not sure this is the right thing to do -- seems similar to what scalac should be doing
+ val context2 = context.make(context.unit, context.tree, sym.owner, context.scope, context.imports)
+ val search = inferImplicit(EmptyTree, tpe, false, false, context2, false)
+ context.flushBuffer() /* any errors here should not prevent future findings */
+
+ available = Some(search.tree != EmptyTree)
+ } catch {
+ case _ =>
+ }
+ }
+
+ available match {
+ case Some(true) =>
+ Nil
+ case Some(false) if (!implicitsShowAll) =>
+ // if -implicits-show-all is not set, we get rid of impossible conversions (such as Numeric[String])
+ throw new ImplicitNotFound(implType)
+ case _ =>
+ val typeParamNames = sym.typeParams.map(_.name)
+
+ // TODO: This is maybe the worst hack I ever did - it's as dirty as hell, but it seems to work, so until I
+ // learn more about symbols, it'll have to do.
+ implType match {
+ case TypeRef(pre, sym, List(TypeRef(NoPrefix, targ, Nil))) if (typeParamNames contains targ.name) =>
+ hardcoded.knownTypeClasses.get(qualifiedName) match {
+ case Some(explanation) =>
+ List(new KnownTypeClassConstraint {
+ val typeParamName = targ.nameString
+ val typeExplanation = explanation
+ val typeClassEntity = makeTemplate(sym)
+ val implicitType: TypeEntity = makeType(implType, inTpl)
+ })
+ case None =>
+ List(new TypeClassConstraint {
+ val typeParamName = targ.nameString
+ val typeClassEntity = makeTemplate(sym)
+ val implicitType: TypeEntity = makeType(implType, inTpl)
+ })
+ }
+ case _ =>
+ List(new ImplicitInScopeConstraint{
+ val implicitType: TypeEntity = makeType(implType, inTpl)
+ })
+ }
+ }
+ })
+
+ def makeSubstitutionConstraints(subst: TreeTypeSubstituter, inTpl: => DocTemplateImpl): List[Constraint] =
+ (subst.from zip subst.to) map {
+ case (from, to) =>
+ new EqualTypeParamConstraint {
+ error("Scaladoc implicits: Unexpected type substitution constraint from: " + from + " to: " + to)
+ val typeParamName = from.toString
+ val rhs = makeType(to, inTpl)
+ }
+ }
+
+ def makeBoundedConstraints(tparams: List[Symbol], constrs: List[TypeConstraint], inTpl: => DocTemplateImpl): List[Constraint] =
+ (tparams zip constrs) flatMap {
+ case (tparam, constr) => {
+ uniteConstraints(constr) match {
+ case (loBounds, upBounds) => (loBounds filter (_ != NothingClass.tpe), upBounds filter (_ != AnyClass.tpe)) match {
+ case (Nil, Nil) =>
+ Nil
+ case (List(lo), List(up)) if (lo == up) =>
+ List(new EqualTypeParamConstraint {
+ val typeParamName = tparam.nameString
+ val rhs = makeType(lo, inTpl)
+ })
+ case (List(lo), List(up)) =>
+ List(new BoundedTypeParamConstraint {
+ val typeParamName = tparam.nameString
+ val lowerBound = makeType(lo, inTpl)
+ val upperBound = makeType(up, inTpl)
+ })
+ case (List(lo), Nil) =>
+ List(new LowerBoundedTypeParamConstraint {
+ val typeParamName = tparam.nameString
+ val lowerBound = makeType(lo, inTpl)
+ })
+ case (Nil, List(up)) =>
+ List(new UpperBoundedTypeParamConstraint {
+ val typeParamName = tparam.nameString
+ val upperBound = makeType(up, inTpl)
+ })
+ case other =>
+ // this is likely an error on the lub/glb side
+ error("Scaladoc implicits: Error computing lub/glb for: " + (tparam, constr) + ":\n" + other)
+ Nil
+ }
+ }
+ }
+ }
+
+ /**
+ * uniteConstraints takes a TypeConstraint instance and simplifies the constraints inside
+ *
+ * Normally TypeConstraint contains multiple lower and upper bounds, and we want to reduce this to a lower and an
+ * upper bound. Here are a couple of catches we need to be aware of:
+ * - before finding a view (implicit method in scope that maps class A[T1,T2,.. Tn] to something else) the type
+ * parameters are transformed into "untouchable" type variables so that type inference does not attempt to
+ * fully solve them down to a type but rather constrains them on both sides just enough for the view to be
+ * applicable -- now, we want to transform those type variables back to the original type parameters
+ * - some of the bounds fail type inference and therefore refer to Nothing => when performing unification (lub, glb)
+ * they start looking ugly => we (unsoundly) transform Nothing to WildcardType so we fool the unification algorithms
+ * into thinking there's nothing there
+ * - we don't want the wildcard types surviving the unification so we replace them back to Nothings
+ */
+ def uniteConstraints(constr: TypeConstraint): (List[Type], List[Type]) =
+ try {
+ (List(wildcardToNothing(lub(constr.loBounds map typeVarToOriginOrWildcard))),
+ List(wildcardToNothing(glb(constr.hiBounds map typeVarToOriginOrWildcard))))
+ } catch {
+ // does this actually ever happen? (probably when type vars occur in the bounds)
+ case x: Throwable => (constr.loBounds.distinct, constr.hiBounds.distinct)
+ }
+
+ /**
+ * Make implicits explicit - Not used curently
+ */
+ object implicitToExplicit extends TypeMap {
+ def apply(tp: Type): Type = mapOver(tp) match {
+ case MethodType(params, resultType) =>
+ MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType)
+ case other =>
+ other
+ }
+ }
+
+ /**
+ * removeImplicitParameters transforms implicit parameters from the view result type into constraints and
+ * returns the simplified type of the view
+ *
+ * for the example view:
+ * implicit def pimpMyClass[T](a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T]
+ * the implicit view result type is:
+ * (a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T]
+ * and the simplified type will be:
+ * MyClass[T] => PimpedMyClass[T]
+ */
+ def removeImplicitParameters(viewType: Type): (Type, List[Type]) = {
+
+ val params = viewType.paramss.flatten
+ val (normalParams, implParams) = params.partition(!_.isImplicit)
+ val simplifiedType = MethodType(normalParams, viewType.finalResultType)
+ val implicitTypes = implParams.map(_.tpe)
+
+ (simplifiedType, implicitTypes)
+ }
+
+ /**
+ * typeVarsToOriginOrWildcard transforms the "untouchable" type variables into either their origins (the original
+ * type parameters) or into wildcard types if nothing matches
+ */
+ object typeVarToOriginOrWildcard extends TypeMap {
+ def apply(tp: Type): Type = mapOver(tp) match {
+ case tv: TypeVar =>
+ if (tv.constr.inst.typeSymbol == NothingClass)
+ WildcardType
+ else
+ tv.origin //appliedType(tv.origin.typeConstructor, tv.typeArgs map this)
+ case other =>
+ if (other.typeSymbol == NothingClass)
+ WildcardType
+ else
+ other
+ }
+ }
+
+ /**
+ * wildcardToNothing transforms wildcard types back to Nothing
+ */
+ object wildcardToNothing extends TypeMap {
+ def apply(tp: Type): Type = mapOver(tp) match {
+ case WildcardType =>
+ NothingClass.tpe
+ case other =>
+ other
+ }
+ }
+
+ /** implicitShouldDocument decides whether a member inherited by implicit conversion should be documented */
+ def implicitShouldDocument(aSym: Symbol): Boolean = {
+ // We shouldn't document:
+ // - constructors
+ // - common methods (in Any, AnyRef, Object) as they are automatically removed
+ // - private and protected members (not accessible following an implicit conversion)
+ // - members starting with _ (usually reserved for internal stuff)
+ localShouldDocument(aSym) && (!aSym.isConstructor) && (aSym.owner != ObjectClass) &&
+ (aSym.owner != AnyClass) && (aSym.owner != AnyRefClass) &&
+ (!aSym.isProtected) && (!aSym.isPrivate) && (!aSym.name.startsWith("_")) &&
+ (aSym.isMethod || aSym.isGetter || aSym.isSetter) &&
+ (aSym.nameString != "getClass")
+ }
+
+ /* To put it very bluntly: checks if you can call implicitly added method with t1 when t2 is already there in the
+ * class. We suppose the name of the two members coincides
+ *
+ * The trick here is that the resultType does not matter - the condition for removal it that paramss have the same
+ * structure (A => B => C may not override (A, B) => C) and that all the types involved are
+ * of the implcit conversion's member are subtypes of the parent members' parameters */
+ def isDistinguishableFrom(t1: Type, t2: Type): Boolean =
+ if (t1.paramss.map(_.length) == t2.paramss.map(_.length)) {
+ for ((t1p, t2p) <- t1.paramss.flatten zip t2.paramss.flatten)
+ if (!isSubType(t1 memberInfo t1p, t2 memberInfo t2p))
+ return true // if on the corresponding parameter you give a type that is in t1 but not in t2
+ // example:
+ // def foo(a: Either[Int, Double]): Int = 3
+ // def foo(b: Left[T1]): Int = 6
+ // a.foo(Right(4.5d)) prints out 3 :)
+ false
+ } else true // the member structure is different foo(3, 5) vs foo(3)(5)
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
index 988f2e0ba9..f948d53c8b 100755
--- a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
@@ -52,7 +52,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
if (asym.isSetter) asym = asym.getter(asym.owner)
makeTemplate(asym.owner) match {
case docTmpl: DocTemplateImpl =>
- val mbrs: List[MemberImpl] = makeMember(asym,docTmpl)
+ val mbrs: List[MemberImpl] = makeMember(asym, null, docTmpl)
mbrs foreach { mbr => refs += ((start, (mbr,end))) }
case _ =>
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
index bc5cd4a958..e6bc76f676 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
@@ -13,6 +13,7 @@ import scala.collection._
import scala.util.matching.Regex
import scala.annotation.switch
import util.{NoPosition, Position}
+import language.postfixOps
/** The comment parser transforms raw comment strings into `Comment` objects.
* Call `parse` to run the parser. Note that the parser is stateless and
diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
index 2ef34cdd96..0f89236861 100644
--- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
@@ -13,6 +13,7 @@ import util.FakePos
import dependencies._
import io.AbstractFile
+import language.implicitConversions
trait BuildManager {
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index 12a3c4b3c6..edf0108e58 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -19,6 +19,7 @@ import scala.tools.nsc.io.Pickler._
import scala.tools.nsc.typechecker.DivergentImplicit
import scala.annotation.tailrec
import symtab.Flags.{ACCESSOR, PARAMACCESSOR}
+import language.implicitConversions
/** The main class of the presentation compiler in an interactive environment such as an IDE
*/
@@ -351,6 +352,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
case item: WorkItem => Some(item.raiseMissing())
case _ => Some(())
}
+
+ // don't forget to service interrupt requests
+ val iqs = scheduler.dequeueAllInterrupts(_.execute())
+
debugLog("ShutdownReq: cleaning work queue (%d items)".format(units.size))
debugLog("Cleanup up responses (%d loadedType pending, %d parsedEntered pending)"
.format(waitLoadedTypeResponses.size, getParsedEnteredResponses.size))
@@ -530,6 +535,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
unit.defined.clear()
unit.synthetics.clear()
unit.toCheck.clear()
+ unit.checkedFeatures = Set()
unit.targetPos = NoPosition
unit.contexts.clear()
unit.problems.clear()
@@ -924,7 +930,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
val implicitlyAdded = viaView != NoSymbol
members.add(sym, pre, implicitlyAdded) { (s, st) =>
new TypeMember(s, st,
- context.isAccessible(s, pre, superAccess && !implicitlyAdded),
+ context.isAccessible(if (s.hasGetter) s.getter(s.owner) else s, pre, superAccess && !implicitlyAdded),
inherited,
viaView)
}
diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
index 72e5ee42ed..49ba9d0aeb 100644
--- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
@@ -269,7 +269,8 @@ self: scala.tools.nsc.Global =>
protected def isEligible(t: Tree) = !t.pos.isTransparent
override def traverse(t: Tree) {
t match {
- case tt : TypeTree if tt.original != null => traverse(tt.original)
+ case tt : TypeTree if tt.original != null && (tt.pos includes tt.original.pos) =>
+ traverse(tt.original)
case _ =>
if (t.pos includes pos) {
if (isEligible(t)) last = t
diff --git a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala b/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
index 34a0a4abb6..e2dcc48709 100644
--- a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
+++ b/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
@@ -79,7 +79,7 @@ trait ScratchPadMaker { self: Global =>
addSandbox(stat)
} else {
val resName = nextRes()
- val dispResName = resName filter ('$' !=)
+ val dispResName = resName filter ('$' != _)
patches += Patch(stat.pos.start, "val " + resName + " = ")
addSandbox(stat)
toPrint += resultString(nameType(dispResName, stat.tpe), resName)
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
index 40bbd3fa8e..d6102734ab 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
@@ -17,15 +17,11 @@ private[tests] trait CoreTestDefs
extends PresentationCompilerTestDef
with AskCompletionAt {
- object MemberPrinter {
- def apply(member: compiler.Member): String =
- "`" + (member.sym.toString() + member.tpe.toString()).trim() + "`"
- }
-
- protected val marker = CompletionMarker
+ def memberPrinter(member: compiler.Member): String =
+ "[accessible: %5s] ".format(member.accessible) + "`" + (member.sym.toString() + member.tpe.toString()).trim() + "`"
override def runTest() {
- askAllSources(marker) { pos =>
+ askAllSources(CompletionMarker) { pos =>
askCompletionAt(pos)
} { (pos, members) =>
withResponseDelimiter {
@@ -35,7 +31,7 @@ private[tests] trait CoreTestDefs
reporter.println("retrieved %d members".format(members.size))
compiler ask { () =>
val filtered = members.filterNot(member => member.sym.name.toString == "getClass" || member.sym.isConstructor)
- reporter.println(filtered.map(MemberPrinter(_)).sortBy(_.toString()).mkString("\n"))
+ reporter.println(filtered.map(memberPrinter).sortBy(_.toString()).mkString("\n"))
}
}
}
@@ -48,10 +44,8 @@ private[tests] trait CoreTestDefs
extends PresentationCompilerTestDef
with AskTypeAt {
- protected val marker = TypeMarker
-
override def runTest() {
- askAllSources(marker) { pos =>
+ askAllSources(TypeMarker) { pos =>
askTypeAt(pos)
} { (pos, tree) =>
withResponseDelimiter {
@@ -69,10 +63,8 @@ private[tests] trait CoreTestDefs
with AskTypeAt
with AskCompletionAt {
- protected val marker = HyperlinkMarker
-
override def runTest() {
- askAllSources(marker) { pos =>
+ askAllSources(HyperlinkMarker) { pos =>
askTypeAt(pos)(NullReporter)
} { (pos, tree) =>
if(tree.symbol == compiler.NoSymbol) {
@@ -81,7 +73,7 @@ private[tests] trait CoreTestDefs
else {
reporter.println("\naskHyperlinkPos for `" + tree.symbol.name + "` at " + format(pos) + " " + pos.source.file.name)
val r = new Response[Position]
- // `tree.symbol.sourceFile` was discovered to be null when testing -Yvirtpatmat on the akka presentation test, where a position had shifted to point to `Int`
+ // `tree.symbol.sourceFile` was discovered to be null when testing using virtpatmat on the akka presentation test, where a position had shifted to point to `Int`
// askHyperlinkPos for `Int` at (73,19) pi.scala --> class Int in package scala has null sourceFile!
val treePath = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.path else null
val treeName = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.name else null
@@ -90,8 +82,9 @@ private[tests] trait CoreTestDefs
compiler.askLinkPos(tree.symbol, source, r)
r.get match {
case Left(pos) =>
+ val resolvedPos = if (tree.symbol.pos.isDefined) tree.symbol.pos else pos
withResponseDelimiter {
- reporter.println("[response] found askHyperlinkPos for `" + tree.symbol.name + "` at " + format(pos) + " " + tree.symbol.sourceFile.name)
+ reporter.println("[response] found askHyperlinkPos for `" + tree.symbol.name + "` at " + format(resolvedPos) + " " + tree.symbol.sourceFile.name)
}
case Right(ex) =>
ex.printStackTrace()
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/FindOccurrences.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/FindOccurrences.scala
deleted file mode 100644
index e2a5a3e3ee..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/FindOccurrences.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package scala.tools.nsc
-package interactive
-package tests.core
-
-import scala.tools.nsc.util.Position
-import scala.tools.nsc.util.SourceFile
-
-/** Find occurrences of `text` in the passed `sources`. */
-private[core] object FindOccurrences {
-
- def apply(sources: Seq[SourceFile])(text: String): Map[SourceFile, Seq[Position]] =
- allPositionsOf(sources, text)
-
- /** All positions of the given string in all source files. */
- private def allPositionsOf(sources: Seq[SourceFile], str: String): Map[SourceFile, Seq[Position]] =
- (for (s <- sources; p <- positionsOf(s, str)) yield p).groupBy(_.source)
-
- /** Return all positions of the given str in the given source file. */
- private def positionsOf(source: SourceFile, str: String): Seq[Position] = {
- val buf = new collection.mutable.ListBuffer[Position]
- var pos = source.content.indexOfSlice(str)
- while (pos >= 0) {
- buf += source.position(pos - 1) // we need the position before the first character of this marker
- pos = source.content.indexOfSlice(str, pos + 1)
- }
- buf.toList
- }
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
index c274e13976..d2baaf32c6 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
@@ -16,7 +16,7 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources {
* ask the type at all positions marked with `TypeMarker.marker` and println the result.
*/
private def askAllSourcesAsync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit) {
- val positions = allPositionsOf(marker.marker).valuesIterator.toList.flatten
+ val positions = allPositionsOf(str = marker.marker)
val responses = for (pos <- positions) yield askAt(pos)
for ((pos, r) <- positions zip responses) withResponse(pos, r)(f)
@@ -26,13 +26,25 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources {
* response before going to the next one.
*/
private def askAllSourcesSync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit) {
- val positions = allPositionsOf(marker.marker).valuesIterator.toList.flatten
+ val positions = allPositionsOf(str = marker.marker)
for (pos <- positions) withResponse(pos, askAt(pos))(f)
}
- private def allPositionsOf: String => Map[SourceFile, Seq[Position]] =
- FindOccurrences(sourceFiles) _
-
+ /** All positions of the given string in all source files. */
+ private def allPositionsOf(srcs: Seq[SourceFile] = sourceFiles, str: String): Seq[Position] =
+ for (s <- srcs; p <- positionsOf(s, str)) yield p
+
+ /** Return all positions of the given str in the given source file. */
+ private def positionsOf(source: SourceFile, str: String): Seq[Position] = {
+ val buf = new collection.mutable.ListBuffer[Position]
+ var pos = source.content.indexOfSlice(str)
+ while (pos >= 0) {
+ buf += source.position(pos - 1) // we need the position before the first character of this marker
+ pos = source.content.indexOfSlice(str, pos + 1)
+ }
+ buf.toList
+ }
+
private def withResponse[T](pos: Position, response: Response[T])(f: (Position, T) => Unit) {
/** Return the filename:line:col version of this position. */
def showPos(pos: Position): String =
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
index 390363eca8..8b8be697cc 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
@@ -5,10 +5,6 @@ import scala.tools.nsc.util.Position
trait PresentationCompilerTestDef {
- def compiler: Global
-
- protected val marker: TestMarker
-
private[tests] def runTest(): Unit
protected def withResponseDelimiter(block: => Unit)(implicit reporter: Reporter) {
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
index 518cb7bd76..21e90fe57f 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
@@ -13,7 +13,7 @@ private[tests] object SourcesCollector {
* */
def apply(base: Path, filter: SourceFilter): Array[SourceFile] = {
assert(base.isDirectory)
- base.walk.filter(filter).map(source).toArray
+ base.walk.filter(filter).map(source).toList.toArray.sortBy(_.file.name)
}
private def source(file: Path): SourceFile = source(AbstractFile.getFile(file.toFile))
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
index 3a605975f4..605ecee6c7 100644
--- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
@@ -19,11 +19,6 @@ class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader)
extends ClassLoader(parent)
with ScalaClassLoader
{
- // private val defined = mutable.Map[String, Class[_]]()
-
- override protected def trace =
- sys.props contains "scala.debug.classloader"
-
protected def classNameToPath(name: String): String =
if (name endsWith ".class") name
else name.replace('.', '/') + ".class"
@@ -68,29 +63,13 @@ class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader)
case null => super.classBytes(name)
case file => file.toByteArray
}
- override def loadClass(name: String, resolve: Boolean) = {
- classLoaderLog("load " + name + ".")
- super.loadClass(name, resolve)
- }
override def findClass(name: String): JClass = {
val bytes = classBytes(name)
- classLoaderLog("find %s: %s".format(name,
- if (bytes.isEmpty) "failed."
- else bytes.size + " bytes."
- ))
- if (bytes.isEmpty)
+ if (bytes.length == 0)
throw new ClassNotFoundException(name)
- else {
- val clazz = defineClass(name, bytes, 0, bytes.length)
- // defined(name) = clazz
- clazz
- }
+ else
+ defineClass(name, bytes, 0, bytes.length)
}
- // Don't know how to construct an URL for something which exists only in memory
- // override def getResource(name: String): URL = findAbstractFile(name) match {
- // case null => super.getResource(name)
- // case file => new URL(...)
- // }
private val packages = mutable.Map[String, Package]()
diff --git a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala
index 42a47896a2..fc68998225 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala
@@ -20,24 +20,6 @@ trait CodeHandlers[T] {
// Expressions are composed of operators and operands.
def expr(code: String): T
- // A declaration introduces names and assigns them types.
- // It can form part of a class definition (§5.1) or of a refinement in a compound type (§3.2.7).
- // (Ed: aka abstract members.)
- //
- // ‘val’ ValDcl | ‘var’ VarDcl | ‘def’ FunDcl | ‘type’ {nl} TypeDcl
- def decl(code: String): T
-
- // A definition introduces names that denote terms or types.
- // It can form part of an object or class definition or it can be local to a block.
- // (Ed: aka concrete members.)
- //
- // ‘val’ PatDef | ‘var’ VarDef | ‘def’ FunDef | ‘type’ {nl} TypeDef |
- // [‘case’] ‘class’ ClassDef | [‘case’] ‘object’ ObjectDef | ‘trait’ TraitDef
- def defn(code: String): T
-
- // An import clause has the form import p.I where p is a stable identifier (§3.1) and I is an import expression.
- def impt(code: String): T
-
// Statements occur as parts of blocks and templates.
// A statement can be an import, a definition or an expression, or it can be empty.
// Statements used in the template of a class definition can also be declarations.
@@ -53,9 +35,6 @@ trait CodeHandlers[T] {
}
def expr(code: String) = try Some(self.expr(code)) catch handler
- def decl(code: String) = try Some(self.decl(code)) catch handler
- def defn(code: String) = try Some(self.defn(code)) catch handler
- def impt(code: String) = try Some(self.impt(code)) catch handler
def stmt(code: String) = try Some(self.stmt(code)) catch handler
def stmts(code: String) = try (self.stmts(code) map (x => Some(x))) catch handlerSeq
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
index 86f48b9d69..7a3e1913b6 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
@@ -14,13 +14,11 @@ import Completion._
trait Completion {
type ExecResult
def resetVerbosity(): Unit
- def execute(line: String): Option[ExecResult]
def completer(): ScalaCompleter
}
object NoCompletion extends Completion {
type ExecResult = Nothing
def resetVerbosity() = ()
- def execute(line: String) = None
def completer() = NullCompleter
}
@@ -44,8 +42,6 @@ object Completion {
&& !(code startsWith "./")
&& !(code startsWith "..")
)
- private val pathStarts = """/ \ ./ ../ ~/""" split ' ' toSet
- def looksLikePath(code: String) = (code != null) && (pathStarts exists (code startsWith _))
object Forwarder {
def apply(forwardTo: () => Option[CompletionAware]): CompletionAware = new CompletionAware {
def completions(verbosity: Int) = forwardTo() map (_ completions verbosity) getOrElse Nil
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
index c33675a83a..b3bbeb3169 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
@@ -12,40 +12,15 @@ import scala.reflect.NameTransformer
* will supply their own candidates and resolve their own paths.
*/
trait CompletionAware {
- /** The delimiters which are meaningful when this CompletionAware
- * object is in control.
- */
- // TODO
- // def delimiters(): List[Char] = List('.')
-
/** The complete list of unqualified Strings to which this
* object will complete.
*/
def completions(verbosity: Int): List[String]
- /** Default filter to apply to completions.
- */
- def filterNotFunction(s: String): Boolean = false
-
- /** Default sort.
- */
- def sortFunction(s1: String, s2: String): Boolean = s1 < s2
-
- /** Default map.
- */
- def mapFunction(s: String) = NameTransformer decode s
-
/** The next completor in the chain.
*/
def follow(id: String): Option[CompletionAware] = None
- /** What to return if this completion is given as a command. It
- * returns None by default, which means to allow the repl to interpret
- * the line normally. Returning Some(_) means the line will never
- * reach the scala interpreter.
- */
- def execute(id: String): Option[Any] = None
-
/** A list of useful information regarding a specific uniquely
* identified completion. This is specifically written for the
* following situation, but should be useful elsewhere too:
@@ -75,45 +50,13 @@ trait CompletionAware {
else comps
else follow(parsed.bufferHead) map (_ completionsFor parsed.bufferTail) getOrElse Nil
- results filterNot filterNotFunction map mapFunction sortWith (sortFunction _)
- }
-
- /** TODO - unify this and completionsFor under a common traverser.
- */
- def executionFor(parsed: Parsed): Option[Any] = {
- import parsed._
-
- if (isUnqualified && !isLastDelimiter && (completions(verbosity) contains buffer)) execute(buffer)
- else if (!isQualified) None
- else follow(bufferHead) flatMap (_ executionFor bufferTail)
+ results.sorted
}
}
object CompletionAware {
val Empty = new CompletionAware { def completions(verbosity: Int) = Nil }
- /** Artificial object demonstrating completion */
- // lazy val replVars = CompletionAware(
- // Map[String, CompletionAware](
- // "ids" -> CompletionAware(() => unqualifiedIds, completionAware _),
- // "synthVars" -> CompletionAware(() => allBoundNames filter isSynthVarName map (_.toString)),
- // "types" -> CompletionAware(() => allSeenTypes map (_.toString)),
- // "implicits" -> CompletionAware(() => allImplicits map (_.toString))
- // )
- // )
-
- // class Forwarder(underlying: CompletionAware) extends CompletionAware {
- // override def completions() = underlying.completions()
- // override def filterNotFunction(s: String) = underlying.filterNotFunction(s)
- // override def sortFunction(s1: String, s2: String) = underlying.sortFunction(s1, s2)
- // override def mapFunction(s: String) = underlying.mapFunction(s)
- // override def follow(id: String) = underlying.follow(id)
- // override def execute(id: String) = underlying.execute(id)
- // override def completionsFor(parsed: Parsed) = underlying.completionsFor(parsed)
- // override def executionFor(parsed: Parsed) = underlying.executionFor(parsed)
- // }
- //
-
def unapply(that: Any): Option[CompletionAware] = that match {
case x: CompletionAware => Some((x))
case _ => None
diff --git a/src/compiler/scala/tools/nsc/interpreter/Dossiers.scala b/src/compiler/scala/tools/nsc/interpreter/Dossiers.scala
deleted file mode 100644
index d889cadf47..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/Dossiers.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-// Coming soon
-trait Dossiers {
- val intp: IMain
-
- import intp._
- import intp.global._
- import definitions._
-
- trait Dossier {
- def symbol: Symbol
- def staticType: Type
-
- def id = name.toString
- def name = symbol.name
- def normalizedType = staticType.typeSymbolDirect.tpe.normalize
- def simpleNameOfType = staticType.typeSymbol.simpleName
- def staticTypeString = staticType.toString
-
- override def toString = "Dossier on %s:\n static type %s (normalized %s)".format(
- symbol, staticType, normalizedType
- )
- }
-
- class TypeDossier(val symbol: TypeSymbol, val staticType: Type) extends Dossier {
- override def toString = super.toString
- }
-
- class TermDossier(val symbol: TermSymbol, val staticType: Type, val value: AnyRef) extends Dossier {
- def runtimeClass: JClass = value.getClass
- def runtimeSymbol: Symbol = getClassIfDefined(runtimeClass.getName)
- def runtimeType: Type = runtimeSymbol.tpe
- def runtimeTypeString = TypeStrings.fromClazz(runtimeClass)
-
- def runtimeTypedParam = NamedParamClass(id, runtimeTypeString, value)
- def staticTypedParam = NamedParamClass(id, staticTypeString, value)
-
- def isRuntimeTypeTighter = runtimeSymbol.ancestors contains normalizedType.typeSymbol
-
- override def toString = super.toString + (
- "\n runtime type %s/%s\n value %s".format(
- runtimeType, runtimeTypeString, value
- )
- )
- }
-}
-
diff --git a/src/compiler/scala/tools/nsc/interpreter/Eval.scala b/src/compiler/scala/tools/nsc/interpreter/Eval.scala
deleted file mode 100644
index 6a59cbb6bf..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/Eval.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-trait Eval {
- /** Executes code looking for an implicit conversion from the type
- * of the given identifier to CompletionAware.
- */
- // def completionAwareImplicit[T](id: String) = {
- // val f1string = "%s => %s".format(typeForIdent(id).get, classOf[CompletionAware].getName)
- // val code = """{
- // | def f(implicit x: (%s) = null): %s = x
- // | val f1 = f
- // | if (f1 == null) None else Some(f1(%s))
- // |}""".stripMargin.format(f1string, f1string, id)
- //
- // evalExpr[Option[CompletionAware]](code)
- // }
-
- // Coming soon
- // implicit def string2liftedcode(s: String): LiftedCode = new LiftedCode(s)
- // case class LiftedCode(code: String) {
- // val lifted: String = {
- // beQuietDuring { interpret(code) }
- // eval2[String]("({ " + code + " }).toString")
- // }
- // def >> : String = lifted
- // }
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
index a2ce8439de..79b429e26a 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -13,7 +13,6 @@ trait ExprTyper {
val repl: IMain
import repl._
- import replTokens.{ Tokenizer }
import global.{ reporter => _, Import => _, _ }
import definitions._
import syntaxAnalyzer.{ UnitParser, UnitScanner, token2name }
@@ -22,8 +21,7 @@ trait ExprTyper {
object codeParser extends { val global: repl.global.type = repl.global } with CodeHandlers[Tree] {
def applyRule[T](code: String, rule: UnitParser => T): T = {
reporter.reset()
- val unit = new CompilationUnit(new BatchSourceFile("<console>", code))
- val scanner = new UnitParser(unit)
+ val scanner = newUnitParser(code)
val result = rule(scanner)
if (!reporter.hasErrors)
@@ -31,25 +29,11 @@ trait ExprTyper {
result
}
- def tokens(code: String) = {
- reporter.reset()
- val unit = new CompilationUnit(new BatchSourceFile("<tokens>", code))
- val in = new UnitScanner(unit)
- in.init()
-
- new Tokenizer(in) tokenIterator
- }
- def decl(code: String) = CodeHandlers.fail("todo")
- def defn(code: String) = CodeHandlers.fail("todo")
+ def defns(code: String) = stmts(code) collect { case x: DefTree => x }
def expr(code: String) = applyRule(code, _.expr())
- def impt(code: String) = applyRule(code, _.importExpr())
- def impts(code: String) = applyRule(code, _.importClause())
- def stmts(code: String) = applyRule(code, _.templateStatSeq(false)._2)
- def stmt(code: String) = stmts(code) match {
- case List(t) => t
- case xs => CodeHandlers.fail("Not a single statement: " + xs.mkString(", "))
- }
+ def stmts(code: String) = applyRule(code, _.templateStats())
+ def stmt(code: String) = stmts(code).last // guaranteed nonempty
}
/** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
@@ -62,77 +46,60 @@ trait ExprTyper {
else Some(trees)
}
}
- def tokens(line: String) = beQuietDuring(codeParser.tokens(line))
+ // def parsesAsExpr(line: String) = {
+ // import codeParser._
+ // (opt expr line).isDefined
+ // }
- // TODO: integrate these into a CodeHandler[Type].
+ def symbolOfLine(code: String): Symbol = {
+ def asExpr(): Symbol = {
+ val name = freshInternalVarName()
+ // Typing it with a lazy val would give us the right type, but runs
+ // into compiler bugs with things like existentials, so we compile it
+ // behind a def and strip the NullaryMethodType which wraps the expr.
+ val line = "def " + name + " = {\n" + code + "\n}"
+
+ interpretSynthetic(line) match {
+ case IR.Success =>
+ val sym0 = symbolOfTerm(name)
+ // drop NullaryMethodType
+ val sym = sym0.cloneSymbol setInfo afterTyper(sym0.info.finalResultType)
+ if (sym.info.typeSymbol eq UnitClass) NoSymbol
+ else sym
+ case _ => NoSymbol
+ }
+ }
+ def asDefn(): Symbol = {
+ val old = repl.definedSymbolList.toSet
+
+ interpretSynthetic(code) match {
+ case IR.Success =>
+ repl.definedSymbolList filterNot old match {
+ case Nil => NoSymbol
+ case sym :: Nil => sym
+ case syms => NoSymbol.newOverloaded(NoPrefix, syms)
+ }
+ case _ => NoSymbol
+ }
+ }
+ beQuietDuring(asExpr()) orElse beQuietDuring(asDefn())
+ }
- // XXX literals.
- // 1) Identifiers defined in the repl.
- // 2) A path loadable via getModule.
- // 3) Try interpreting it as an expression.
private var typeOfExpressionDepth = 0
def typeOfExpression(expr: String, silent: Boolean = true): Type = {
- repltrace("typeOfExpression(" + expr + ")")
if (typeOfExpressionDepth > 2) {
repldbg("Terminating typeOfExpression recursion for expression: " + expr)
return NoType
}
-
- def asQualifiedImport: Type = {
- val name = expr.takeWhile(_ != '.')
- typeOfExpression(importedTermNamed(name).fullName + expr.drop(name.length), true)
- }
- def asModule: Type = getModuleIfDefined(expr).tpe
- def asExpr: Type = {
- val lhs = freshInternalVarName()
- val line = "lazy val " + lhs + " =\n" + expr
-
- interpret(line, true) match {
- case IR.Success => typeOfExpression(lhs, true)
- case _ => NoType
- }
- }
-
- def evaluate(): Type = {
- typeOfExpressionDepth += 1
- try typeOfTerm(expr) orElse asModule orElse asExpr orElse asQualifiedImport
- finally typeOfExpressionDepth -= 1
- }
-
+ typeOfExpressionDepth += 1
// Don't presently have a good way to suppress undesirable success output
// while letting errors through, so it is first trying it silently: if there
// is an error, and errors are desired, then it re-evaluates non-silently
// to induce the error message.
- beSilentDuring(evaluate()) orElse beSilentDuring(typeOfDeclaration(expr)) orElse {
- if (!silent)
- evaluate()
-
- NoType
+ try beSilentDuring(symbolOfLine(expr).tpe) match {
+ case NoType if !silent => symbolOfLine(expr).tpe // generate error
+ case tpe => tpe
}
+ finally typeOfExpressionDepth -= 1
}
- // Since people will be giving us ":t def foo = 5" even though that is not an
- // expression, we have a means of typing declarations too.
- private def typeOfDeclaration(code: String): Type = {
- repltrace("typeOfDeclaration(" + code + ")")
- val obname = freshInternalVarName()
-
- interpret("object " + obname + " {\n" + code + "\n}\n", true) match {
- case IR.Success =>
- val sym = symbolOfTerm(obname)
- if (sym == NoSymbol) NoType else {
- // TODO: bitmap$n is not marked synthetic.
- val decls = sym.tpe.decls.toList filterNot (x => x.isConstructor || x.isPrivate || (x.name.toString contains "$"))
- repltrace("decls: " + decls)
- if (decls.isEmpty) NoType
- else cleanMemberDecl(sym, decls.last.name)
- }
- case _ =>
- NoType
- }
- }
- // def compileAndTypeExpr(expr: String): Option[Typer] = {
- // class TyperRun extends Run {
- // override def stopPhase(name: String) = name == "superaccessors"
- // }
- // }
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/FileCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/FileCompletion.scala
deleted file mode 100644
index e1eb938b3c..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/FileCompletion.scala
+++ /dev/null
@@ -1,56 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-/** TODO
- * Spaces, dots, and other things in filenames are not correctly handled.
- * space-escaping, knowing when we're inside quotes, etc. would be nice.
- */
-
-import io.{ Directory, Path }
-
-/** This isn't 100% clean right now, but it works and is simple. Rather
- * than delegate to new objects on each '/' in the path, we treat the
- * buffer like a path and process it directly.
- */
-object FileCompletion {
- def executionFor(buffer: String): Option[Path] = {
- Some(Directory.Home match {
- case Some(d) if buffer startsWith "~" => d / buffer.tail
- case _ => Path(buffer)
- }) filter (_.exists)
- }
-
- private def fileCompletionForwarder(buffer: String, where: Directory): List[String] = {
- completionsFor(where.path + buffer) map (_ stripPrefix where.path) toList
- }
-
- private def homeCompletions(buffer: String): List[String] = {
- require(buffer startsWith "~/")
- val home = Directory.Home getOrElse (return Nil)
- fileCompletionForwarder(buffer.tail, home) map ("~" + _)
- }
- private def cwdCompletions(buffer: String): List[String] = {
- require(buffer startsWith "./")
- val cwd = Directory.Current getOrElse (return Nil)
- fileCompletionForwarder(buffer.tail, cwd) map ("." + _)
- }
-
- def completionsFor(buffer: String): List[String] =
- if (buffer startsWith "~/") homeCompletions(buffer)
- else if (buffer startsWith "./") cwdCompletions(buffer)
- else {
- val p = Path(buffer)
- val (dir, stub) =
- // don't want /foo/. expanding "."
- if (p.name == ".") (p.parent, ".")
- else if (p.isDirectory) (p.toDirectory, "")
- else (p.parent, p.name)
-
- dir.list filter (_.name startsWith stub) map (_.path) toList
- }
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
index 108d4377a8..de778e7469 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
@@ -12,17 +12,18 @@ import java.util.concurrent.locks.ReentrantLock
import scala.sys.process.Process
import session._
import scala.util.Properties.{ jdkHome, javaVersion }
-import scala.tools.util.{ Signallable, Javap }
+import scala.tools.util.{ Javap }
import scala.annotation.tailrec
import scala.collection.mutable.ListBuffer
import scala.concurrent.ops
import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
import interpreter._
-import io.{ File, Sources, Directory }
+import io.{ File, Directory }
import scala.reflect.NameTransformer._
import util.ScalaClassLoader
import ScalaClassLoader._
import scala.tools.util._
+import language.{implicitConversions, existentials}
/** The Scala interactive shell. It provides a read-eval-print loop
* around the Interpreter class.
@@ -48,33 +49,68 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
var settings: Settings = _
var intp: IMain = _
- override def echoCommandMessage(msg: String): Unit =
- intp.reporter.printMessage(msg)
+ @deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp
+ @deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: Interpreter): Unit = intp = i
- def isAsync = !settings.Yreplsync.value
- lazy val power = new Power(intp, new StdReplVals(this))
- lazy val NoType = intp.global.NoType
+ /** Having inherited the difficult "var-ness" of the repl instance,
+ * I'm trying to work around it by moving operations into a class from
+ * which it will appear a stable prefix.
+ */
+ private def onIntp[T](f: IMain => T): T = f(intp)
+
+ class IMainOps[T <: IMain](val intp: T) {
+ import intp._
+ import global._
+
+ def printAfterTyper(msg: => String) =
+ intp.reporter printUntruncatedMessage afterTyper(msg)
+
+ /** Strip NullaryMethodType artifacts. */
+ private def replInfo(sym: Symbol) = {
+ sym.info match {
+ case NullaryMethodType(restpe) if sym.isAccessor => restpe
+ case info => info
+ }
+ }
+ def echoTypeStructure(sym: Symbol) =
+ printAfterTyper("" + deconstruct.show(replInfo(sym)))
+
+ def echoTypeSignature(sym: Symbol, verbose: Boolean) = {
+ if (verbose) ILoop.this.echo("// Type signature")
+ printAfterTyper("" + replInfo(sym))
+
+ if (verbose) {
+ ILoop.this.echo("\n// Internal Type structure")
+ echoTypeStructure(sym)
+ }
+ }
+ }
+ implicit def stabilizeIMain(intp: IMain) = new IMainOps[intp.type](intp)
- // TODO
- // object opt extends AestheticSettings
- //
- @deprecated("Use `intp` instead.", "2.9.0")
- def interpreter = intp
+ /** TODO -
+ * -n normalize
+ * -l label with case class parameter names
+ * -c complete - leave nothing out
+ */
+ private def typeCommandInternal(expr: String, verbose: Boolean): Result = {
+ onIntp { intp =>
+ val sym = intp.symbolOfLine(expr)
+ if (sym.exists) intp.echoTypeSignature(sym, verbose)
+ else ""
+ }
+ }
- @deprecated("Use `intp` instead.", "2.9.0")
- def interpreter_= (i: Interpreter): Unit = intp = i
+ override def echoCommandMessage(msg: String) {
+ intp.reporter printUntruncatedMessage msg
+ }
+ def isAsync = !settings.Yreplsync.value
+ lazy val power = new Power(intp, new StdReplVals(this))
def history = in.history
/** The context class loader at the time this object was created */
protected val originalClassLoader = Thread.currentThread.getContextClassLoader
- // Install a signal handler so we can be prodded.
- private val signallable =
- if (isReplDebug)
- Signallable("Dump repl state.")(dumpCommand())
- else null
-
// classpath entries added via :cp
var addedClasspath: String = ""
@@ -103,32 +139,15 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
if (intp ne null) {
intp.close()
intp = null
- removeSigIntHandler()
}
}
class ILoopInterpreter extends IMain(settings, out) {
outer =>
- private class ThreadStoppingLineManager(classLoader: ClassLoader) extends Line.Manager(classLoader) {
- override def onRunaway(line: Line[_]): Unit = {
- val template = """
- |// She's gone rogue, captain! Have to take her out!
- |// Calling Thread.stop on runaway %s with offending code:
- |// scala> %s""".stripMargin
-
- echo(template.format(line.thread, line.code))
- // XXX no way to suppress the deprecation warning
- line.thread.stop()
- in.redrawLine()
- }
- }
override lazy val formatting = new Formatting {
def prompt = ILoop.this.prompt
}
- override protected def createLineManager(classLoader: ClassLoader): Line.Manager =
- new ThreadStoppingLineManager(classLoader)
-
override protected def parentClassLoader =
settings.explicitParentLoader.getOrElse( classOf[ILoop].getClassLoader )
}
@@ -238,47 +257,21 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand),
cmd("implicits", "[-v]", "show the implicits in scope", implicitsCommand),
cmd("javap", "<path|class>", "disassemble a file or class name", javapCommand),
- nullary("keybindings", "show how ctrl-[A-Z] and other keys are bound", keybindingsCommand),
cmd("load", "<path>", "load and interpret a Scala file", loadCommand),
nullary("paste", "enter paste mode: all input up to ctrl-D compiled together", pasteCommand),
nullary("power", "enable power user mode", powerCmd),
nullary("quit", "exit the interpreter", () => Result(false, None)),
nullary("replay", "reset execution and replay all previous commands", replay),
nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand),
- // nullary("reset", "reset the interpreter, forgetting session values but retaining session types", replay),
shCommand,
nullary("silent", "disable/enable automatic printing of results", verbosity),
- cmd("type", "<expr>", "display the type of an expression without evaluating it", typeCommand),
+ cmd("type", "[-v] <expr>", "display the type of an expression without evaluating it", typeCommand),
nullary("warnings", "show the suppressed warnings from the most recent line which had any", warningsCommand)
)
/** Power user commands */
lazy val powerCommands: List[LoopCommand] = List(
- nullary("dump", "displays a view of the interpreter's internal state", dumpCommand),
- nullary("vals", "gives information about the power mode repl vals", valsCommand),
- cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand),
- cmd("wrap", "<method>", "name of method to wrap around each repl line", wrapCommand) withLongHelp ("""
- |:wrap
- |:wrap clear
- |:wrap <method>
- |
- |Installs a wrapper around each line entered into the repl.
- |Currently it must be the simple name of an existing method
- |with the specific signature shown in the following example.
- |
- |def timed[T](body: => T): T = {
- | val start = System.nanoTime
- | try body
- | finally println((System.nanoTime - start) + " nanos elapsed.")
- |}
- |:wrap timed
- |
- |If given no argument, :wrap names the wrapper installed.
- |An argument of clear will remove the wrapper if any is active.
- |Note that wrappers do not compose (a new one replaces the old
- |one) and also that the :phase command uses the same machinery,
- |so setting :wrap will clear any :phase setting.
- """.stripMargin.trim)
+ cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand)
)
private def dumpCommand(): Result = {
@@ -321,10 +314,9 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
- private def implicitsCommand(line: String): Result = {
- val intp = ILoop.this.intp
+ private def implicitsCommand(line: String): Result = onIntp { intp =>
import intp._
- import global.{ Symbol, afterTyper }
+ import global._
def p(x: Any) = intp.reporter.printMessage("" + x)
@@ -435,14 +427,14 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
private lazy val javap = substituteAndLog[Javap]("javap", NoJavap)(newJavap())
// Still todo: modules.
- private def typeCommand(line: String): Result = {
- if (line.trim == "") ":type <expression>"
- else {
- val tp = intp.typeOfExpression(line, false)
- if (tp == NoType) "" // the error message was already printed
- else intp.global.afterTyper(tp.toString)
+ private def typeCommand(line0: String): Result = {
+ line0.trim match {
+ case "" => ":type [-v] <expression>"
+ case s if s startsWith "-v " => typeCommandInternal(s stripPrefix "-v " trim, true)
+ case s => typeCommandInternal(s, false)
}
}
+
private def warningsCommand(): Result = {
intp.lastWarnings foreach { case (pos, msg) => intp.reporter.warning(pos, msg) }
}
@@ -460,60 +452,39 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
else res.show()
}
}
- private def keybindingsCommand(): Result = {
- if (in.keyBindings.isEmpty) "Key bindings unavailable."
- else {
- echo("Reading jline properties for default key bindings.")
- echo("Accuracy not guaranteed: treat this as a guideline only.\n")
- in.keyBindings foreach (x => echo ("" + x))
- }
- }
+
private def wrapCommand(line: String): Result = {
def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T"
- val intp = ILoop.this.intp
- val g: intp.global.type = intp.global
- import g._
-
- words(line) match {
- case Nil =>
- intp.executionWrapper match {
- case "" => "No execution wrapper is set."
- case s => "Current execution wrapper: " + s
- }
- case "clear" :: Nil =>
- intp.executionWrapper match {
- case "" => "No execution wrapper is set."
- case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper."
- }
- case wrapper :: Nil =>
- intp.typeOfExpression(wrapper) match {
- case PolyType(List(targ), MethodType(List(arg), restpe)) =>
- intp setExecutionWrapper intp.pathToTerm(wrapper)
- "Set wrapper to '" + wrapper + "'"
- case tp =>
- failMsg + (
- if (tp == g.NoType) "\nFound: <unknown>"
- else "\nFound: <unknown>"
- )
- }
- case _ => failMsg
+ onIntp { intp =>
+ import intp._
+ import global._
+
+ words(line) match {
+ case Nil =>
+ intp.executionWrapper match {
+ case "" => "No execution wrapper is set."
+ case s => "Current execution wrapper: " + s
+ }
+ case "clear" :: Nil =>
+ intp.executionWrapper match {
+ case "" => "No execution wrapper is set."
+ case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper."
+ }
+ case wrapper :: Nil =>
+ intp.typeOfExpression(wrapper) match {
+ case PolyType(List(targ), MethodType(List(arg), restpe)) =>
+ intp setExecutionWrapper intp.pathToTerm(wrapper)
+ "Set wrapper to '" + wrapper + "'"
+ case tp =>
+ failMsg + "\nFound: <unknown>"
+ }
+ case _ => failMsg
+ }
}
}
private def pathToPhaseWrapper = intp.pathToTerm("$r") + ".phased.atCurrent"
private def phaseCommand(name: String): Result = {
- // This line crashes us in TreeGen:
- //
- // if (intp.power.phased set name) "..."
- //
- // Exception in thread "main" java.lang.AssertionError: assertion failed: ._7.type
- // at scala.Predef$.assert(Predef.scala:99)
- // at scala.tools.nsc.ast.TreeGen.mkAttributedQualifier(TreeGen.scala:69)
- // at scala.tools.nsc.ast.TreeGen.mkAttributedQualifier(TreeGen.scala:44)
- // at scala.tools.nsc.ast.TreeGen.mkAttributedRef(TreeGen.scala:101)
- // at scala.tools.nsc.ast.TreeGen.mkAttributedStableRef(TreeGen.scala:143)
- //
- // But it works like so, type annotated.
val phased: Phased = power.phased
import phased.NoPhaseName
@@ -554,14 +525,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
private val crashRecovery: PartialFunction[Throwable, Boolean] = {
case ex: Throwable =>
- if (settings.YrichExes.value) {
- val sources = implicitly[Sources]
- echo("\n" + ex.getMessage)
- echo(
- if (isReplDebug) "[searching " + sources.path + " for exception contexts...]"
- else "[searching for exception contexts...]"
- )
- }
echo(intp.global.throwableAsString(ex))
ex match {
@@ -814,31 +777,12 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
else if (Completion.looksLikeInvocation(code) && intp.mostRecentVar != "") {
interpretStartingWith(intp.mostRecentVar + code)
}
- else {
- def runCompletion =
- try in.completion execute code map (intp bindValue _)
- catch { case ex: Exception => None }
-
- /** Due to my accidentally letting file completion execution sneak ahead
- * of actual parsing this now operates in such a way that the scala
- * interpretation always wins. However to avoid losing useful file
- * completion I let it fail and then check the others. So if you
- * type /tmp it will echo a failure and then give you a Directory object.
- * It's not pretty: maybe I'll implement the silence bits I need to avoid
- * echoing the failure.
- */
- if (intp isParseable code) {
- val (code, result) = reallyInterpret
- if (power != null && code == IR.Error)
- runCompletion
-
- result
- }
- else runCompletion match {
- case Some(_) => None // completion hit: avoid the latent error
- case _ => reallyInterpret._2 // trigger the latent error
- }
+ else if (code.trim startsWith "//") {
+ // line comment, do nothing
+ None
}
+ else
+ reallyInterpret._2
}
// runs :load `file` on any files passed via -i
@@ -917,7 +861,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
/** process command-line arguments and do as they request */
def process(args: Array[String]): Boolean = {
- val command = new CommandLine(args.toList, msg => echo("scala: " + msg))
+ val command = new CommandLine(args.toList, echo)
def neededHelp(): String =
(if (command.settings.help.value) command.usageMsg + "\n" else "") +
(if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "")
@@ -930,13 +874,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
@deprecated("Use `process` instead", "2.9.0")
- def main(args: Array[String]): Unit = {
- if (isReplDebug)
- System.out.println(new java.util.Date)
-
- process(args)
- }
- @deprecated("Use `process` instead", "2.9.0")
def main(settings: Settings): Unit = process(settings)
}
@@ -999,32 +936,4 @@ object ILoop {
}
}
def run(lines: List[String]): String = run(lines map (_ + "\n") mkString)
-
- // provide the enclosing type T
- // in order to set up the interpreter's classpath and parent class loader properly
- def breakIf[T: Manifest](assertion: => Boolean, args: NamedParam*): Unit =
- if (assertion) break[T](args.toList)
-
- // start a repl, binding supplied args
- def break[T: Manifest](args: List[NamedParam]): Unit = savingContextLoader {
- val msg = if (args.isEmpty) "" else " Binding " + args.size + " value%s.".format(
- if (args.size == 1) "" else "s"
- )
- echo("Debug repl starting." + msg)
- val repl = new ILoop {
- override def prompt = "\ndebug> "
- }
- repl.settings = new Settings(echo)
- repl.settings.embeddedDefaults[T]
- repl.createInterpreter()
- repl.in = new JLineReader(new JLineCompletion(repl))
-
- // rebind exit so people don't accidentally call sys.exit by way of predef
- repl.quietRun("""def exit = println("Type :quit to resume program execution.")""")
- args foreach (p => repl.bind(p.name, p.tpe, p.value))
- repl.loop()
-
- echo("\nDebug repl exiting.")
- repl.closeInterpreter()
- }
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala b/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
index 2f02748e8f..9072eaae46 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package interpreter
import util.Position
-import scala.tools.util.SignalManager
import scala.util.control.Exception.ignoring
/**
@@ -33,37 +32,6 @@ trait ILoopInit {
echoAndRefresh(msg)
}
- /** Try to install sigint handler: ignore failure. Signal handler
- * will interrupt current line execution if any is in progress.
- *
- * Attempting to protect the repl from accidental exit, we only honor
- * a single ctrl-C if the current buffer is empty: otherwise we look
- * for a second one within a short time.
- */
- protected def installSigIntHandler() {
- def onExit() {
- Console.println("") // avoiding "shell prompt in middle of line" syndrome
- sys.exit(1)
- }
- ignoring(classOf[Exception]) {
- SignalManager("INT") = {
- if (intp == null || intp.lineManager == null)
- onExit()
- else if (intp.lineManager.running)
- intp.lineManager.cancel()
- else if (in.currentLine != "") {
- // non-empty buffer, so make them hit ctrl-C a second time
- SignalManager("INT") = onExit()
- io.timer(5)(installSigIntHandler()) // and restore original handler if they don't
- }
- else onExit()
- }
- }
- }
- protected def removeSigIntHandler() {
- squashAndLog("removeSigIntHandler")(SignalManager("INT") = null)
- }
-
private val initLock = new java.util.concurrent.locks.ReentrantLock()
private val initCompilerCondition = initLock.newCondition() // signal the compiler is initialized
private val initLoopCondition = initLock.newCondition() // signal the whole repl is initialized
@@ -100,14 +68,12 @@ trait ILoopInit {
withLock { while (!initIsComplete) initLoopCondition.await() }
}
// private def warningsThunks = List(
- // () => intp.bind("lastWarnings", "" + manifest[List[(Position, String)]], intp.lastWarnings _),
+ // () => intp.bind("lastWarnings", "" + typeTag[List[(Position, String)]], intp.lastWarnings _),
// )
protected def postInitThunks = List[Option[() => Unit]](
Some(intp.setContextClassLoader _),
- if (isReplPower) Some(() => enablePowerMode(true)) else None,
- // do this last to avoid annoying uninterruptible startups
- Some(installSigIntHandler _)
+ if (isReplPower) Some(() => enablePowerMode(true)) else None
).flatten
// ++ (
// warningsThunks
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
index c0f7d8412a..956e282b26 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
@@ -13,17 +13,18 @@ import scala.sys.BooleanProp
import io.VirtualDirectory
import scala.tools.nsc.io.AbstractFile
import reporters._
-import reporters.{Reporter => NscReporter}
import symtab.Flags
import scala.reflect.internal.Names
import scala.tools.util.PathResolver
-import scala.tools.nsc.util.{ ScalaClassLoader, Exceptional, Indenter }
+import scala.tools.nsc.util.ScalaClassLoader
import ScalaClassLoader.URLClassLoader
-import Exceptional.unwrap
+import scala.tools.nsc.util.Exceptional.unwrap
import scala.collection.{ mutable, immutable }
import scala.util.control.Exception.{ ultimately }
import IMain._
import java.util.concurrent.Future
+import typechecker.Analyzer
+import language.implicitConversions
/** directory to save .class files to */
private class ReplVirtualDirectory(out: JPrintWriter) extends VirtualDirectory("(memory)", None) {
@@ -91,7 +92,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
* on the future.
*/
private var _classLoader: AbstractFileClassLoader = null // active classloader
- private var _lineManager: Line.Manager = null // logic for individual lines
private val _compiler: Global = newCompiler(settings, reporter) // our private compiler
private val nextReqId = {
@@ -139,7 +139,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
lazy val formatting: Formatting = new Formatting {
val prompt = Properties.shellPromptString
}
- lazy val reporter: ConsoleReporter = new ReplReporter(this)
+ lazy val reporter: ReplReporter = new ReplReporter(this)
import formatting._
import reporter.{ printMessage, withoutTruncating }
@@ -155,6 +155,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
catch AbstractOrMissingHandler()
}
+ private def tquoted(s: String) = "\"\"\"" + s + "\"\"\""
+
// argument is a thunk to execute after init is done
def initialize(postInitSignal: => Unit) {
synchronized {
@@ -193,8 +195,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
import global._
import definitions.{
- ScalaPackage, JavaLangPackage, PredefModule, RootClass,
- getClassIfDefined, getModuleIfDefined, getRequiredModule, getRequiredClass
+ ScalaPackage, JavaLangPackage, RootClass,
+ getClassIfDefined, getModuleIfDefined, getRequiredModule, getRequiredClass,
+ termMember, typeMember
}
private implicit def privateTreeOps(t: Tree): List[Tree] = {
@@ -203,8 +206,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}).toList
}
- implicit def installReplTypeOps(tp: Type): ReplTypeOps = new ReplTypeOps(tp)
- class ReplTypeOps(tp: Type) {
+ implicit class ReplTypeOps(tp: Type) {
def orElse(other: => Type): Type = if (tp ne NoType) tp else other
def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp)
}
@@ -226,10 +228,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
import naming._
- // object dossiers extends {
- // val intp: imain.type = imain
- // } with Dossiers { }
- // import dossiers._
+ object deconstruct extends {
+ val global: imain.global.type = imain.global
+ } with StructuredTypeStrings
lazy val memberHandlers = new {
val intp: imain.type = imain
@@ -265,21 +266,15 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def executionWrapper = _executionWrapper
def setExecutionWrapper(code: String) = _executionWrapper = code
def clearExecutionWrapper() = _executionWrapper = ""
- def lineManager = _lineManager
/** interpreter settings */
lazy val isettings = new ISettings(this)
- /** Create a line manager. Overridable. */
- protected def noLineManager = ReplPropsKludge.noThreadCreation(settings)
- protected def createLineManager(classLoader: ClassLoader): Line.Manager = new Line.Manager(classLoader)
-
/** Instantiate a compiler. Overridable. */
- protected def newCompiler(settings: Settings, reporter: NscReporter) = {
+ protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = {
settings.outputDirs setSingleOutput virtualDirectory
settings.exposeEmptyPackage.value = true
-
- Global(settings, reporter)
+ new Global(settings, reporter) with ReplGlobal
}
/** Parent classloader. Overridable. */
@@ -305,19 +300,14 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
ensureClassLoader()
}
final def ensureClassLoader() {
- if (_classLoader == null) {
+ if (_classLoader == null)
_classLoader = makeClassLoader()
- _lineManager = if (noLineManager) null else createLineManager(_classLoader)
- }
}
def classLoader: AbstractFileClassLoader = {
ensureClassLoader()
_classLoader
}
private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(virtualDirectory, parent) {
- private[IMain] var traceClassLoading = isReplTrace
- override protected def trace = super.trace || traceClassLoading
-
/** Overridden here to try translating a simple name to the generated
* class name if the original attempt fails. This method is used by
* getResourceAsStream as well as findClass.
@@ -345,7 +335,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
classLoader.setAsContext()
// this is risky, but it's our only possibility to make default reflexive mirror to work with REPL
- // so far we have only used the default mirror to create a few manifests for the compiler
+ // so far we have only used the default mirror to create a few tags for the compiler
// so it shouldn't be in conflict with our classloader, especially since it respects its parent
scala.reflect.mirror.classLoader = classLoader
}
@@ -363,7 +353,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def flatName(id: String) = optFlatName(id) getOrElse id
def optFlatName(id: String) = requestForIdent(id) map (_ fullFlatName id)
- def allDefinedNames = definedNameMap.keys.toList sortBy (_.toString)
+ def allDefinedNames = definedNameMap.keys.toList.sorted
def pathToType(id: String): String = pathToName(newTypeName(id))
def pathToTerm(id: String): String = pathToName(newTermName(id))
def pathToName(name: Name): String = {
@@ -521,7 +511,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
trees.last match {
case _:Assign => // we don't want to include assignments
case _:TermTree | _:Ident | _:Select => // ... but do want other unnamed terms.
- val varName = if (synthetic) freshInternalVarName() else ("" + freshUserTermName())
+ val varName = if (synthetic) freshInternalVarName() else freshUserVarName()
val rewrittenLine = (
// In theory this would come out the same without the 1-specific test, but
// it's a cushion against any more sneaky parse-tree position vs. code mismatches:
@@ -587,8 +577,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
* e.g. that there were no parse errors.
*/
def interpret(line: String): IR.Result = interpret(line, false)
+ def interpretSynthetic(line: String): IR.Result = interpret(line, true)
def interpret(line: String, synthetic: Boolean): IR.Result = {
def loadAndRunReq(req: Request) = {
+ classLoader.setAsContext()
val (result, succeeded) = req.loadAndRun
/** To our displeasure, ConsoleReporter offers only printMessage,
@@ -660,7 +652,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
result
}
def directBind(p: NamedParam): IR.Result = directBind(p.name, p.tpe, p.value)
- def directBind[T: Manifest](name: String, value: T): IR.Result = directBind((name, value))
+ def directBind[T: ClassTag](name: String, value: T): IR.Result = directBind((name, value))
def rebind(p: NamedParam): IR.Result = {
val name = p.name
@@ -676,11 +668,12 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
if (ids.isEmpty) IR.Success
else interpret("import " + ids.mkString(", "))
- def quietBind(p: NamedParam): IR.Result = beQuietDuring(bind(p))
- def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
- def bind[T: Manifest](name: String, value: T): IR.Result = bind((name, value))
- def bindValue(x: Any): IR.Result = bindValue("" + freshUserTermName(), x)
- def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x)
+ def quietBind(p: NamedParam): IR.Result = beQuietDuring(bind(p))
+ def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
+ def bind[T: TypeTag](name: String, value: T): IR.Result = bind((name, value))
+ def bindSyntheticValue(x: Any): IR.Result = bindValue(freshInternalVarName(), x)
+ def bindValue(x: Any): IR.Result = bindValue(freshUserVarName(), x)
+ def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x)
/** Reset this interpreter, forgetting all user-specified requests. */
def reset() {
@@ -717,33 +710,14 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
val printName = sessionNames.print
val resultName = sessionNames.result
- class LineExceptional(ex: Throwable) extends Exceptional(ex) {
- private def showReplInternal = isettings.showInternalStackTraces
-
- override def spanFn(frame: JavaStackFrame) =
- if (showReplInternal) super.spanFn(frame)
- else !(frame.className startsWith evalPath)
-
- override def contextPrelude = super.contextPrelude + (
- if (showReplInternal) ""
- else "/* The repl internal portion of the stack trace is elided. */\n"
- )
- }
def bindError(t: Throwable) = {
if (!bindExceptions) // avoid looping if already binding
throw t
val unwrapped = unwrap(t)
withLastExceptionLock[String]({
- if (opt.richExes) {
- val ex = new LineExceptional(unwrapped)
- directBind[Exceptional]("lastException", ex)
- ex.contextHead + "\n(access lastException for the full trace)"
- }
- else {
- directBind[Throwable]("lastException", unwrapped)
- util.stackTraceString(unwrapped)
- }
+ directBind[Throwable]("lastException", unwrapped)
+ util.stackTraceString(unwrapped)
}, util.stackTraceString(unwrapped))
}
@@ -799,9 +773,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
*/
def resolvePathToSymbol(accessPath: String): Symbol = {
val readRoot = getRequiredModule(readPath) // the outermost wrapper
- (accessPath split '.').foldLeft(readRoot) { (sym, name) =>
- if (name == "") sym else
- afterTyper(sym.info member newTermName(name))
+ (accessPath split '.').foldLeft(readRoot: Symbol) {
+ case (sym, "") => sym
+ case (sym, name) => afterTyper(termMember(sym, name))
}
}
/** We get a bunch of repeated warnings for reasons I haven't
@@ -825,7 +799,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
def lastWarnings: List[(Position, String)] = (
if (lastRun == null) Nil
- else removeDupWarnings(lastRun.deprecationWarnings.reverse) ++ lastRun.uncheckedWarnings.reverse
+ else removeDupWarnings(lastRun.allConditionalWarnings flatMap (_.warnings))
)
private var lastRun: Run = _
private def evalMethod(name: String) = evalClass.getMethods filter (_.getName == name) match {
@@ -852,6 +826,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
/** handlers for each tree in this request */
val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _)
+ def defHandlers = handlers collect { case x: MemberDefHandler => x }
/** all (public) names defined by these statements */
val definedNames = handlers flatMap (_.definedNames)
@@ -863,6 +838,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def termNames = handlers flatMap (_.definesTerm)
def typeNames = handlers flatMap (_.definesType)
def definedOrImported = handlers flatMap (_.definedOrImported)
+ def definedSymbolList = defHandlers flatMap (_.definedSymbols)
+
+ def definedTypeSymbol(name: String) = definedSymbols(newTypeName(name))
+ def definedTermSymbol(name: String) = definedSymbols(newTermName(name))
/** Code to import bound names from previous lines - accessPath is code to
* append to objectName to access anything bound by request.
@@ -915,8 +894,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this
}
- def tquoted(s: String) = "\"\"\"" + s + "\"\"\""
-
private object ResultObjectSourceCode extends CodeAssembler[MemberHandler] {
/** We only want to generate this code when the result
* is a value which can be referred to as-is.
@@ -970,6 +947,16 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
typeOf
typesOfDefinedTerms
+ // Assign symbols to the original trees
+ // TODO - just use the new trees.
+ defHandlers foreach { dh =>
+ val name = dh.member.name
+ definedSymbols get name foreach { sym =>
+ dh.member setSymbol sym
+ repldbg("Set symbol of " + name + " to " + sym.defString)
+ }
+ }
+
// compile the result-extraction object
beQuietDuring {
savingSettings(_.nowarn.value = true) {
@@ -986,46 +973,28 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString)))
def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbol.simpleName)
- private def typeMap[T](f: Type => T): Map[Name, T] = {
- termNames ++ typeNames map (x => x -> f(cleanMemberDecl(resultSymbol, x))) toMap
- }
+ private def typeMap[T](f: Type => T) =
+ mapFrom[Name, Name, T](termNames ++ typeNames)(x => f(cleanMemberDecl(resultSymbol, x)))
/** Types of variables defined by this request. */
- lazy val compilerTypeOf = typeMap[Type](x => x)
+ lazy val compilerTypeOf = typeMap[Type](x => x) withDefaultValue NoType
/** String representations of same. */
lazy val typeOf = typeMap[String](tp => afterTyper(tp.toString))
// lazy val definedTypes: Map[Name, Type] = {
// typeNames map (x => x -> afterTyper(resultSymbol.info.nonPrivateDecl(x).tpe)) toMap
// }
- lazy val definedSymbols: Map[Name, Symbol] = (
+ lazy val definedSymbols = (
termNames.map(x => x -> applyToResultMember(x, x => x)) ++
- typeNames.map(x => x -> compilerTypeOf.get(x).map(_.typeSymbol).getOrElse(NoSymbol))
- ).toMap
+ typeNames.map(x => x -> compilerTypeOf(x).typeSymbol)
+ ).toMap[Name, Symbol] withDefaultValue NoSymbol
- lazy val typesOfDefinedTerms: Map[Name, Type] =
- termNames map (x => x -> applyToResultMember(x, _.tpe)) toMap
+ lazy val typesOfDefinedTerms = mapFrom[Name, Name, Type](termNames)(x => applyToResultMember(x, _.tpe))
/** load and run the code using reflection */
def loadAndRun: (String, Boolean) = {
- if (lineManager == null) return {
- try { ("" + (lineRep call sessionNames.print), true) }
- catch { case ex => (lineRep.bindError(ex), false) }
- }
- import interpreter.Line._
-
- try {
- val execution = lineManager.set(originalLine)(lineRep call sessionNames.print)
- execution.await()
-
- execution.state match {
- case Done => ("" + execution.get(), true)
- case Threw => (lineRep.bindError(execution.caught()), false)
- case Cancelled => ("Execution interrupted by signal.\n", false)
- case Running => ("Execution still running! Seems impossible.", false)
- }
- }
- finally lineManager.clear()
+ try { ("" + (lineRep call sessionNames.print), true) }
+ catch { case ex => (lineRep.bindError(ex), false) }
}
override def toString = "Request(line=%s, %s trees)".format(line, trees.size)
@@ -1075,18 +1044,21 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
def valueOfTerm(id: String): Option[AnyRef] =
- requestForIdent(id) flatMap (_.getEval)
+ requestForName(newTermName(id)) flatMap (_.getEval)
def classOfTerm(id: String): Option[JClass] =
valueOfTerm(id) map (_.getClass)
def typeOfTerm(id: String): Type = newTermName(id) match {
case nme.ROOTPKG => definitions.RootClass.tpe
- case name => requestForName(name) flatMap (_.compilerTypeOf get name) getOrElse NoType
+ case name => requestForName(name).fold(NoType: Type)(_ compilerTypeOf name)
}
+ def symbolOfType(id: String): Symbol =
+ requestForName(newTypeName(id)).fold(NoSymbol: Symbol)(_ definedTypeSymbol id)
+
def symbolOfTerm(id: String): Symbol =
- requestForIdent(id) flatMap (_.definedSymbols get newTermName(id)) getOrElse NoSymbol
+ requestForIdent(newTermName(id)).fold(NoSymbol: Symbol)(_ definedTermSymbol id)
def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = {
classOfTerm(id) flatMap { clazz =>
@@ -1116,32 +1088,30 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
}
- object replTokens extends {
- val global: imain.global.type = imain.global
- } with ReplTokens { }
-
- private object exprTyper extends {
+ object exprTyper extends {
val repl: IMain.this.type = imain
} with ExprTyper { }
def parse(line: String): Option[List[Tree]] = exprTyper.parse(line)
+
+ def symbolOfLine(code: String): Symbol =
+ exprTyper.symbolOfLine(code)
+
def typeOfExpression(expr: String, silent: Boolean = true): Type =
exprTyper.typeOfExpression(expr, silent)
- def prettyPrint(code: String) =
- replTokens.prettyPrint(exprTyper tokens code)
-
protected def onlyTerms(xs: List[Name]) = xs collect { case x: TermName => x }
protected def onlyTypes(xs: List[Name]) = xs collect { case x: TypeName => x }
- def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalTermName
- def definedTypes = onlyTypes(allDefinedNames)
- def definedSymbols = prevRequests.toSet flatMap ((x: Request) => x.definedSymbols.values)
+ def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalTermName
+ def definedTypes = onlyTypes(allDefinedNames)
+ def definedSymbols = prevRequestList.flatMap(_.definedSymbols.values).toSet[Symbol]
+ def definedSymbolList = prevRequestList flatMap (_.definedSymbolList) filterNot (s => isInternalTermName(s.name))
// Terms with user-given names (i.e. not res0 and not synthetic)
def namedDefinedTerms = definedTerms filterNot (x => isUserVarName("" + x) || directlyBoundNames(x))
- private def findName(name: Name) = definedSymbols find (_.name == name)
+ private def findName(name: Name) = definedSymbols find (_.name == name) getOrElse NoSymbol
/** Translate a repl-defined identifier into a Symbol.
*/
@@ -1150,15 +1120,19 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def types(name: String): Symbol = {
val tpname = newTypeName(name)
- findName(tpname) getOrElse getClassIfDefined(tpname)
+ findName(tpname) orElse getClassIfDefined(tpname)
}
def terms(name: String): Symbol = {
val termname = newTypeName(name)
- findName(termname) getOrElse getModuleIfDefined(termname)
+ findName(termname) orElse getModuleIfDefined(termname)
}
- def types[T: ClassManifest] : Symbol = types(classManifest[T].erasure.getName)
- def terms[T: ClassManifest] : Symbol = terms(classManifest[T].erasure.getName)
- def apply[T: ClassManifest] : Symbol = apply(classManifest[T].erasure.getName)
+ // [Eugene to Paul] possibly you could make use of TypeTags here
+ def types[T: ClassTag] : Symbol = types(classTag[T].erasure.getName)
+ def terms[T: ClassTag] : Symbol = terms(classTag[T].erasure.getName)
+ def apply[T: ClassTag] : Symbol = apply(classTag[T].erasure.getName)
+
+ def classSymbols = allDefSymbols collect { case x: ClassSymbol => x }
+ def methodSymbols = allDefSymbols collect { case x: MethodSymbol => x }
/** the previous requests this interpreter has processed */
private var executingRequest: Request = _
@@ -1167,7 +1141,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
private val definedNameMap = mutable.Map[Name, Request]()
private val directlyBoundNames = mutable.Set[Name]()
- private def allHandlers = prevRequestList flatMap (_.handlers)
+ def allHandlers = prevRequestList flatMap (_.handlers)
+ def allDefHandlers = allHandlers collect { case x: MemberDefHandler => x }
+ def allDefSymbols = allDefHandlers map (_.symbol) filter (_ ne NoSymbol)
+
def lastRequest = if (prevRequests.isEmpty) null else prevRequests.last
def prevRequestList = prevRequests.toList
def allSeenTypes = prevRequestList flatMap (_.typeOf.values.toList) distinct
@@ -1204,12 +1181,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def isShow = code.lines exists (_.trim endsWith "// show")
def isShowRaw = code.lines exists (_.trim endsWith "// raw")
- // checking for various debug signals
- if (isShowRaw)
- replTokens withRawTokens prettyPrint(code)
- else if (repllog.isTrace || isShow)
- prettyPrint(code)
-
// old style
beSilentDuring(parse(code)) foreach { ts =>
ts foreach { t =>
diff --git a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
index 55706f4fd2..241ba5fa4a 100644
--- a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
@@ -22,7 +22,6 @@ trait InteractiveReader {
def history: History
def completion: Completion
- def keyBindings: List[KeyBinding]
def eraseLine(): Unit
def redrawLine(): Unit
def currentLine: String
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
index a86462ad5f..b1e6a9d7d9 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -194,14 +194,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
// literal Ints, Strings, etc.
object literals extends CompletionAware {
- def simpleParse(code: String): Tree = {
- val unit = new CompilationUnit(new util.BatchSourceFile("<console>", code))
- val scanner = new syntaxAnalyzer.UnitParser(unit)
- val tss = scanner.templateStatSeq(false)._2
-
- if (tss.size == 1) tss.head else EmptyTree
- }
-
+ def simpleParse(code: String): Tree = newUnitParser(code).templateStats().last
def completions(verbosity: Int) = Nil
override def follow(id: String) = simpleParse(id) match {
@@ -286,24 +279,6 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
if (parsed.isEmpty) xs map ("." + _) else xs
}
- // chasing down results which won't parse
- // This used to work fine, now it reports a type error before any
- // exception gets to us. See SI-5657. Don't have time to deal with
- // it, so disabling everything.
- def execute(line: String): Option[ExecResult] = {
- return None // disabled
-
- val parsed = Parsed(line)
- def noDotOrSlash = line forall (ch => ch != '.' && ch != '/')
-
- if (noDotOrSlash) None // we defer all unqualified ids to the repl.
- else {
- (ids executionFor parsed) orElse
- (rootClass executionFor parsed) orElse
- (FileCompletion executionFor line)
- }
- }
-
// generic interface for querying (e.g. interpreter loop, testing)
def completions(buf: String): List[String] =
topLevelFor(Parsed.dotted(buf + ".", buf.length + 1))
@@ -367,15 +342,9 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
if (!looksLikeInvocation(buf)) None
else tryCompletion(Parsed.dotted(buf drop 1, cursor), lastResultFor)
- def regularCompletion = tryCompletion(mkDotted, topLevelFor)
- def fileCompletion =
- if (!looksLikePath(buf)) None
- else tryCompletion(mkUndelimited, FileCompletion completionsFor _.buffer)
-
def tryAll = (
lastResultCompletion
- orElse regularCompletion
- orElse fileCompletion
+ orElse tryCompletion(mkDotted, topLevelFor)
getOrElse Candidates(cursor, Nil)
)
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
index 99f6b627eb..758f6e2abc 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
@@ -22,9 +22,6 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
lazy val completion = _completion
lazy val history: JLineHistory = JLineHistory()
- lazy val keyBindings =
- try KeyBinding parse slurp(term.getDefaultBindings)
- catch { case _: Exception => Nil }
private def term = consoleReader.getTerminal()
def reset() = term.reset()
diff --git a/src/compiler/scala/tools/nsc/interpreter/KeyBinding.scala b/src/compiler/scala/tools/nsc/interpreter/KeyBinding.scala
deleted file mode 100644
index a7ca3a77f3..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/KeyBinding.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-case class KeyBinding(name: String, code: Int, aliases: List[String], description: String) {
- def nameString = if (aliases.nonEmpty) aliases mkString ", " else name
- override def toString = "%3d %s: %s".format(code, nameString, description)
-}
-
-object KeyBinding {
- def parse(bindings: String): List[KeyBinding] = {
- def loop(xs: List[String]): List[KeyBinding] = {
- val (comment, lines) = xs span (_ startsWith "#")
- val description = comment map (_ drop 1 trim) mkString " "
- val (aliases, desc) = description span (_ != ':') match {
- case (x, y) => (
- x split ',' map (_.trim) toList,
- if (y == "") "" else y.tail.trim
- )
- }
- lines match {
- case Nil => Nil
- case hd :: tl =>
- val kb = (hd indexOf '=') match {
- case -1 => KeyBinding(hd, -1, aliases, desc)
- case idx => KeyBinding(hd drop idx + 1, hd take idx toInt, aliases, desc)
- }
- kb :: loop(tl)
- }
- }
- // This is verrrrrrrry specific to the current contents
- // of the keybindings.properties in jline.
- loop(bindings split "\\n" map (_.trim) dropWhile (_ != "") filterNot (_ == "") toList) sortBy (_.code)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Line.scala b/src/compiler/scala/tools/nsc/interpreter/Line.scala
deleted file mode 100644
index 3062c95dae..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/Line.scala
+++ /dev/null
@@ -1,107 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import java.util.concurrent.locks.ReentrantLock
-import scala.tools.nsc.util.Exceptional
-import Exceptional.unwrap
-import Line._
-
-/** Encapsulation of a single line in the repl. The concurrency
- * infrastructure arose to deal with signals so SIGINT could be
- * trapped without losing the repl session, but it will be useful
- * in ways beyond that. Each line obtains a thread and the repl
- * waits on a condition indicating that either the line has
- * completed or failed.
- */
-class Line[+T](val code: String, classLoader: ClassLoader, body: => T) {
- private var _state: State = Running
- private var _result: Option[Any] = None
- private var _caught: Option[Throwable] = None
- private val lock = new ReentrantLock()
- private val finished = lock.newCondition()
-
- private def withLock[T](body: => T) = {
- lock.lock()
- try body
- finally lock.unlock()
- }
- private def setState(state: State) = withLock {
- _state = state
- finished.signal()
- }
- // private because it should be called by the manager.
- private def cancel() = if (running) setState(Cancelled)
-
- private def runAndSetState[T](body: => T) {
- try { _result = Some(body) ; setState(Done) }
- catch { case t => _caught = Some(t) ; setState(Threw) }
- }
-
- // This is where the line thread is created and started.
- private val _thread: Thread =
- io.newThread(_ setContextClassLoader classLoader)(runAndSetState(body))
-
- def state = _state
- def thread = _thread
- def alive = thread.isAlive
- def runaway = !success && alive
- def success = _state == Done
- def running = _state == Running
-
- def caught() = { await() ; _caught.orNull }
- def get() = {
- await()
- _result getOrElse sys.error("Called get with no result. Code: " + code)
- }
- def await() = withLock { while (running) finished.await() }
-}
-
-object Line {
- // seconds to let a runaway thread live before calling stop()
- private val HUNTER_KILLER_DELAY = 5
-
- // A line opens in state Running, and will eventually
- // transition to Threw (an exception was caught), Cancelled
- // (the line was explicitly cancelled, presumably by SIGINT)
- // or Done (success).
- sealed abstract class State
- case object Running extends State
- case object Threw extends State
- case object Cancelled extends State
- case object Done extends State
-
- class Manager(classLoader: ClassLoader) {
- /** Override to add behavior for runaway lines. This method will
- * be called if a line thread is still running five seconds after
- * it has been cancelled.
- */
- def onRunaway(line: Line[_]): Unit = ()
-
- private var _current: Option[Line[_]] = None
- def current = _current
-
- def clear() = {
- _current foreach (_.cancel())
- _current = None
- }
- def set[T](code: String)(body: => T) = {
- val line = new Line(code, classLoader, body)
- _current = Some(line)
- line
- }
- def running = _current.isDefined
- def cancel() = {
- current foreach { line =>
- line.thread.interrupt()
- line.cancel()
- if (line.runaway)
- io.timer(HUNTER_KILLER_DELAY) { if (line.alive) onRunaway(line) }
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
index 9469baa4e2..3520a60ee5 100644
--- a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
@@ -8,6 +8,7 @@ package interpreter
import collection.{ mutable, immutable }
import mutable.ListBuffer
+import language.implicitConversions
class ProcessResult(val line: String) {
import sys.process._
diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
index 099034fe97..a29eb3ac6d 100644
--- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -9,6 +9,7 @@ package interpreter
import scala.collection.{ mutable, immutable }
import scala.PartialFunction.cond
import scala.reflect.internal.Chars
+import language.implicitConversions
trait MemberHandlers {
val intp: IMain
@@ -63,6 +64,7 @@ trait MemberHandlers {
}
sealed abstract class MemberDefHandler(override val member: MemberDef) extends MemberHandler(member) {
+ def symbol = if (member.symbol eq null) NoSymbol else member.symbol
def name: Name = member.name
def mods: Modifiers = member.mods
def keyword = member.keyword
@@ -71,6 +73,7 @@ trait MemberHandlers {
override def definesImplicit = member.mods.isImplicit
override def definesTerm: Option[TermName] = Some(name.toTermName) filter (_ => name.isTermName)
override def definesType: Option[TypeName] = Some(name.toTypeName) filter (_ => name.isTypeName)
+ override def definedSymbols = if (symbol eq NoSymbol) Nil else List(symbol)
}
/** Class to handle one member among all the members included
@@ -88,6 +91,7 @@ trait MemberHandlers {
def importedNames = List[Name]()
def definedNames = definesTerm.toList ++ definesType.toList
def definedOrImported = definedNames ++ importedNames
+ def definedSymbols = List[Symbol]()
def extraCodeToEvaluate(req: Request): String = ""
def resultExtractionCode(req: Request): String = ""
@@ -120,7 +124,7 @@ trait MemberHandlers {
private def vparamss = member.vparamss
private def isMacro = member.mods.hasFlag(scala.reflect.internal.Flags.MACRO)
// true if not a macro and 0-arity
- override def definesValue = !isMacro && (vparamss.isEmpty || vparamss.head.isEmpty && vparamss.tail.isEmpty)
+ override def definesValue = !isMacro && flattensToEmpty(vparamss)
override def resultExtractionCode(req: Request) =
if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else ""
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
index e92888d89b..a3cbfffc3b 100644
--- a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
@@ -7,24 +7,25 @@ package scala.tools.nsc
package interpreter
import NamedParam._
+import language.implicitConversions
trait NamedParamCreator {
protected def freshName: () => String
def apply(name: String, tpe: String, value: Any): NamedParam = NamedParamClass(name, tpe, value)
- def apply[T: Manifest](name: String, x: T): NamedParam = new Typed[T](name, x)
- def apply[T: Manifest](x: T): NamedParam = apply(freshName(), x)
+ def apply[T: TypeTag](name: String, x: T): NamedParam = new Typed[T](name, x)
+ def apply[T: TypeTag](x: T): NamedParam = apply(freshName(), x)
def clazz(name: String, x: Any): NamedParam = new Untyped(name, x)
def clazz(x: Any): NamedParam = clazz(freshName(), x)
- implicit def namedValue[T: Manifest](name: String, x: T): NamedParam = apply(name, x)
- implicit def tuple[T: Manifest](pair: (String, T)): NamedParam = apply(pair._1, pair._2)
+ implicit def namedValue[T: TypeTag](name: String, x: T): NamedParam = apply(name, x)
+ implicit def tuple[T: TypeTag](pair: (String, T)): NamedParam = apply(pair._1, pair._2)
}
object NamedParam extends NamedParamCreator {
- class Typed[T: Manifest](val name: String, val value: T) extends NamedParam {
- val tpe = TypeStrings.fromManifest[T]
+ class Typed[T: TypeTag](val name: String, val value: T) extends NamedParam {
+ val tpe = TypeStrings.fromTag[T]
}
class Untyped(val name: String, val value: Any) extends NamedParam {
val tpe = TypeStrings.fromValue(value)
@@ -43,4 +44,4 @@ trait NamedParam {
def tpe: String
def value: Any
override def toString = name + ": " + tpe
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Phased.scala b/src/compiler/scala/tools/nsc/interpreter/Phased.scala
index d164d1cbb0..f39c025a86 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Phased.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Phased.scala
@@ -7,6 +7,7 @@ package scala.tools.nsc
package interpreter
import scala.collection.{ mutable, immutable }
+import language.implicitConversions
/** Mix this into an object and use it as a phasing
* swiss army knife.
diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala
index cc06100f5f..9c4c05f1ee 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Power.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala
@@ -13,6 +13,7 @@ import session.{ History }
import scala.io.Codec
import java.net.{ URL, MalformedURLException }
import io.{ Path }
+import language.implicitConversions
/** Collecting some power mode examples.
@@ -41,10 +42,10 @@ Lost after 18/flatten {
/** A class for methods to be injected into the intp in power mode.
*/
-class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: ReplValsImpl) {
+class Power[ReplValsImpl <: ReplVals : TypeTag](val intp: IMain, replVals: ReplValsImpl) {
import intp.{ beQuietDuring, typeOfExpression, interpret, parse }
import intp.global._
- import definitions.{ manifestToType, manifestToSymbol, getClassIfDefined, getModuleIfDefined }
+ import definitions.{ compilerTypeFromTag, compilerSymbolFromTag, getClassIfDefined, getModuleIfDefined }
abstract class SymSlurper {
def isKeep(sym: Symbol): Boolean
@@ -161,7 +162,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
}
trait LowPriorityInternalInfo {
- implicit def apply[T: Manifest] : InternalInfo[T] = new InternalInfo[T](None)
+ implicit def apply[T: TypeTag] : InternalInfo[T] = new InternalInfo[T](None)
}
object InternalInfo extends LowPriorityInternalInfo { }
@@ -172,21 +173,21 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
* of the conveniences exist on that wrapper.
*/
trait LowPriorityInternalInfoWrapper {
- implicit def apply[T: Manifest] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None)
+ implicit def apply[T: TypeTag] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None)
}
object InternalInfoWrapper extends LowPriorityInternalInfoWrapper {
}
- class InternalInfoWrapper[T: Manifest](value: Option[T] = None) {
+ class InternalInfoWrapper[T: TypeTag](value: Option[T] = None) {
def ? : InternalInfo[T] = new InternalInfo[T](value)
}
/** Todos...
- * translate manifest type arguments into applied types
+ * translate tag type arguments into applied types
* customizable symbol filter (had to hardcode no-spec to reduce noise)
*/
- class InternalInfo[T: Manifest](value: Option[T] = None) {
- private def newInfo[U: Manifest](value: U): InternalInfo[U] = new InternalInfo[U](Some(value))
+ class InternalInfo[T: TypeTag](value: Option[T] = None) {
+ private def newInfo[U: TypeTag](value: U): InternalInfo[U] = new InternalInfo[U](Some(value))
private def isSpecialized(s: Symbol) = s.name.toString contains "$mc"
private def isImplClass(s: Symbol) = s.name.toString endsWith "$class"
@@ -197,8 +198,8 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
|| s.isAnonOrRefinementClass
|| s.isAnonymousFunction
)
- def symbol = manifestToSymbol(fullManifest)
- def tpe = manifestToType(fullManifest)
+ def symbol = compilerSymbolFromTag(tag)
+ def tpe = compilerTypeFromTag(tag)
def name = symbol.name
def companion = symbol.companionSymbol
def info = symbol.info
@@ -225,19 +226,19 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
def pkgClasses = pkgMembers filter (s => s.isClass && s.isDefinedInPackage)
def pkgSymbols = new PackageSlurper(pkgClass).slurp() filterNot excludeMember
- def fullManifest = manifest[T]
- def erasure = fullManifest.erasure
+ def tag = typeTag[T]
+ def erasure = tag.erasure
def shortClass = erasure.getName split "[$.]" last
def baseClasses = tpe.baseClasses
- def baseClassDecls = baseClasses map (x => (x, x.info.decls.toList.sortBy(_.name.toString))) toMap
+ def baseClassDecls = mapFrom(baseClasses)(_.info.decls.toList.sortBy(_.name))
def ancestors = baseClasses drop 1
def ancestorDeclares(name: String) = ancestors filter (_.info member newTermName(name) ne NoSymbol)
def baseTypes = tpe.baseTypeSeq.toList
- def <:<[U: Manifest](other: U) = tpe <:< newInfo(other).tpe
- def lub[U: Manifest](other: U) = intp.global.lub(List(tpe, newInfo(other).tpe))
- def glb[U: Manifest](other: U) = intp.global.glb(List(tpe, newInfo(other).tpe))
+ def <:<[U: TypeTag](other: U) = tpe <:< newInfo(other).tpe
+ def lub[U: TypeTag](other: U) = intp.global.lub(List(tpe, newInfo(other).tpe))
+ def glb[U: TypeTag](other: U) = intp.global.glb(List(tpe, newInfo(other).tpe))
override def toString = value match {
case Some(x) => "%s (%s)".format(x, shortClass)
@@ -320,8 +321,6 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
}
class RichReplString(s: String) {
- // pretty print the string
- def pp() { intp.prettyPrint(s) }
// make an url out of the string
def u: URL = (
if (s contains ":") new URL(s)
@@ -336,7 +335,6 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
}
class RichReplURL(url: URL)(implicit codec: Codec) {
def slurp(): String = io.Streamable.slurp(url)
- def pp() { intp prettyPrint slurp() }
}
class RichSymbolList(syms: List[Symbol]) {
def sigs = syms map (_.defString)
@@ -361,11 +359,10 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
else if (s1 isLess s2) -1
else 1
}
- implicit lazy val powerNameOrdering: Ordering[Name] = Ordering[String] on (_.toString)
implicit lazy val powerSymbolOrdering: Ordering[Symbol] = Ordering[Name] on (_.name)
implicit lazy val powerTypeOrdering: Ordering[Type] = Ordering[Symbol] on (_.typeSymbol)
- implicit def replInternalInfo[T: Manifest](x: T): InternalInfoWrapper[T] = new InternalInfoWrapper[T](Some(x))
+ implicit def replInternalInfo[T: TypeTag](x: T): InternalInfoWrapper[T] = new InternalInfoWrapper[T](Some(x))
implicit def replEnhancedStrings(s: String): RichReplString = new RichReplString(s)
implicit def replMultiPrinting[T: Prettifier](xs: TraversableOnce[T]): MultiPrettifierClass[T] =
new MultiPrettifierClass[T](xs.toSeq)
@@ -380,10 +377,13 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
}
trait ReplUtilities {
- def module[T: Manifest] = getModuleIfDefined(manifest[T].erasure.getName stripSuffix nme.MODULE_SUFFIX_STRING)
- def clazz[T: Manifest] = getClassIfDefined(manifest[T].erasure.getName)
- def info[T: Manifest] = InternalInfo[T]
- def ?[T: Manifest] = InternalInfo[T]
+ // [Eugene to Paul] needs review!
+ // def module[T: TypeTag] = getModuleIfDefined(typeTag[T].erasure.getName stripSuffix nme.MODULE_SUFFIX_STRING)
+ // def clazz[T: TypeTag] = getClassIfDefined(typeTag[T].erasure.getName)
+ def module[T: TypeTag] = typeTag[T].sym.suchThat(_.isPackage)
+ def clazz[T: TypeTag] = typeTag[T].sym.suchThat(_.isClass)
+ def info[T: TypeTag] = InternalInfo[T]
+ def ?[T: TypeTag] = InternalInfo[T]
def url(s: String) = {
try new URL(s)
catch { case _: MalformedURLException =>
@@ -409,8 +409,8 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
lazy val phased: Phased = new { val global: intp.global.type = intp.global } with Phased { }
def context(code: String) = analyzer.rootContext(unit(code))
- def source(code: String) = new BatchSourceFile("<console>", code)
- def unit(code: String) = new CompilationUnit(source(code))
+ def source(code: String) = newSourceFile(code)
+ def unit(code: String) = newCompilationUnit(code)
def trees(code: String) = parse(code) getOrElse Nil
def typeOf(id: String) = intp.typeOfExpression(id)
diff --git a/src/compiler/scala/tools/nsc/interpreter/ProductCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/ProductCompletion.scala
deleted file mode 100644
index dc4582eb17..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ProductCompletion.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-class SeqCompletion[T](elems: Seq[T]) extends CompletionAware {
- lazy val completions = elems.indices.toList map ("(%d)" format _)
- def completions(verbosity: Int) = completions
- private def elemAt(name: String) =
- if (completions contains name) Some(elems(name drop 1 dropRight 1 toInt)) else None
-
- override def execute(name: String) = elemAt(name)
- override def follow(name: String) = elemAt(name) map (x => ProductCompletion(x))
-}
-
-/** TODO - deal with non-case products by giving them _1 _2 etc. */
-class ProductCompletion(root: Product) extends CompletionAware {
- lazy val caseFields: List[Any] = root.productIterator.toList
- lazy val caseNames: List[String] = ByteCode caseParamNamesForPath root.getClass.getName getOrElse Nil
- private def isValid = caseFields.length == caseNames.length
-
- private def fieldForName(s: String) = (completions indexOf s) match {
- case idx if idx > -1 && isValid => Some(caseFields(idx))
- case _ => None
- }
-
- lazy val completions = caseNames
- def completions(verbosity: Int) = completions
- override def execute(name: String) = fieldForName(name)
- override def follow(name: String) = fieldForName(name) map (x => ProductCompletion(x))
-}
-
-object ProductCompletion {
- /** TODO: other traversables. */
- def apply(elem: Any): CompletionAware = elem match {
- case x: Seq[_] => new SeqCompletion[Any](x)
- case x: Product => new ProductCompletion(x)
- // case x: Map[_, _] =>
- case _ => CompletionAware.Empty
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala
new file mode 100644
index 0000000000..05321dd7e6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala
@@ -0,0 +1,57 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import reporters._
+import typechecker.Analyzer
+
+/** A layer on top of Global so I can guarantee some extra
+ * functionality for the repl. It doesn't do much yet.
+ */
+trait ReplGlobal extends Global {
+ // This exists mostly because using the reporter too early leads to deadlock.
+ private def echo(msg: String) { Console println msg }
+
+ override def abort(msg: String): Nothing = {
+ echo("ReplGlobal.abort: " + msg)
+ super.abort(msg)
+ }
+
+ override lazy val analyzer = new {
+ val global: ReplGlobal.this.type = ReplGlobal.this
+ } with Analyzer {
+ override def newTyper(context: Context): Typer = new Typer(context) {
+ override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+ val res = super.typed(tree, mode, pt)
+ tree match {
+ case Ident(name) if !tree.symbol.hasPackageFlag && !name.toString.startsWith("$") =>
+ repldbg("typed %s: %s".format(name, res.tpe))
+ case _ =>
+ }
+ res
+ }
+ }
+ }
+
+ object replPhase extends SubComponent {
+ val global: ReplGlobal.this.type = ReplGlobal.this
+ val phaseName = "repl"
+ val runsAfter = List[String]("typer")
+ val runsRightAfter = None
+ def newPhase(_prev: Phase): StdPhase = new StdPhase(_prev) {
+ def apply(unit: CompilationUnit) {
+ repldbg("Running replPhase on " + unit.body)
+ // newNamer(rootContext(unit)).enterSym(unit.body)
+ }
+ }
+ }
+
+ override protected def computePhaseDescriptors: List[SubComponent] = {
+ addToPhasesSet(replPhase, "repl")
+ super.computePhaseDescriptors
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala b/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala
index 99489b7c99..5eb1e0ae18 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala
@@ -13,7 +13,6 @@ class ReplProps {
val jlineDebug = bool("scala.tools.jline.internal.Log.debug")
val jlineTrace = bool("scala.tools.jline.internal.Log.trace")
- val noThreads = bool("scala.repl.no-threads")
val info = bool("scala.repl.info")
val debug = bool("scala.repl.debug")
@@ -24,9 +23,3 @@ class ReplProps {
val powerInitCode = Prop[JFile]("scala.repl.power.initcode")
val powerBanner = Prop[JFile]("scala.repl.power.banner")
}
-
-object ReplPropsKludge {
- // !!! short term binary compatibility hack for 2.9.1 to put this
- // here - needed a not previously existing object.
- def noThreadCreation(settings: Settings) = replProps.noThreads || settings.Yreplsync.value
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala b/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
index 130af990ad..fb61dfb672 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
@@ -9,7 +9,11 @@ package interpreter
import reporters._
import IMain._
+/** Like ReplGlobal, a layer for ensuring extra functionality.
+ */
class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.in, new ReplStrippingWriter(intp)) {
+ def printUntruncatedMessage(msg: String) = withoutTruncating(printMessage(msg))
+
override def printMessage(msg: String) {
// Avoiding deadlock if the compiler starts logging before
// the lazy val is complete.
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala b/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala
index 175f6263ad..0c9f4fcd47 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala
@@ -11,15 +11,6 @@ import scala.PartialFunction.cond
import scala.reflect.internal.Chars
trait ReplStrings {
- // Longest common prefix
- def longestCommonPrefix(xs: List[String]): String = {
- if (xs.isEmpty || xs.contains("")) ""
- else xs.head.head match {
- case ch =>
- if (xs.tail forall (_.head == ch)) "" + ch + longestCommonPrefix(xs map (_.tail))
- else ""
- }
- }
/** Convert a string into code that can recreate the string.
* This requires replacing all special characters by escape
* codes. It does not add the surrounding " marks. */
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplTokens.scala b/src/compiler/scala/tools/nsc/interpreter/ReplTokens.scala
deleted file mode 100644
index 1c7b256c33..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ReplTokens.scala
+++ /dev/null
@@ -1,285 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import util.{ BatchSourceFile, Indenter }
-import scala.tools.nsc.ast.parser.Tokens._
-import java.lang.Integer.toOctalString
-
-/** This began as an attempt at a completely minimal
- * pretty printer for a token stream, but as it turns out
- * it's "minimal, pretty, scala: pick any two." So
- * now it's an unattractive hybrid between minimalism
- * and other things. Still, it's a big improvement on the
- * way I was printing source in the repl, so in it goes.
- *
- * @author Paul Phillips
- */
-abstract class ReplTokens {
- val global: Global
-
- import global._
- import syntaxAnalyzer.{ UnitScanner, token2name }
-
- // Mostly, this means print <NL> so we can see where
- // semicolon inference took place.
- private var rawTokens: Boolean = false
- def withRawTokens[T](body: => T): T = {
- rawTokens = true
- try body
- finally rawTokens = false
- }
- // There's the seed of a good idea in here, but you wouldn't
- // know it from the current implementation. The objects are
- // trying to depict what feelings of coziness a given token
- // might have toward its immediate neighbors. But it lacks
- // sufficient granularity and a good resolution mechanism.
- sealed abstract class Cozy(l: => Boolean, r: => Boolean) {
- def left = l
- def right = r
- }
- object Cozy {
- def unapply(x: Cozy) = Some((x.left, x.right))
- }
- case object |--*--| extends Cozy(false, false)
- case object <*--| extends Cozy(true, false)
- case object |--*> extends Cozy(false, true)
- case object <*> extends Cozy(true, true)
-
- @annotation.switch def escapedChar(ch: Char): String = ch match {
- case '\b' => "\\b"
- case '\t' => "\\t"
- case '\n' => "\\n"
- case '\f' => "\\f"
- case '\r' => "\\r"
- case '"' => "\\\""
- case '\'' => "\\\'"
- case '\\' => "\\\\"
- case _ => String.valueOf(ch)
- }
- def escape(text: String): String = {
- text map { ch =>
- if (ch.isControl) "\\0" + toOctalString(ch)
- else escapedChar(ch)
- } mkString ""
- }
- private class Arrow(code: Int) {
- def ->(str: String): (Int, ReplToken) = (code, Token(code)(str))
- def ->(tok: ReplToken): (Int, ReplToken) = (code, tok)
- }
- private val symbolTokenMap = {
- implicit def liftToken(code: Int): Arrow = new Arrow(code)
-
- Map[Int, ReplToken](
- AT -> At,
- CASECLASS -> "case class",
- CASEOBJECT -> "case object",
- COLON -> Colon,
- COMMA -> Comma,
- DOT -> Dot,
- EOF -> Eof,
- ERROR -> "<error>",
- FALSE -> False,
- IMPORT -> Import,
- LBRACE -> LBrace,
- LBRACKET -> LBracket,
- LPAREN -> LParen,
- NEWLINE -> Newline,
- NEWLINES -> Newlines,
- NULL -> Null,
- RBRACE -> RBrace,
- RBRACKET -> RBracket,
- RPAREN -> RParen,
- SEMI -> Semi,
- SUBTYPE -> Subtype,
- SUPERTYPE -> Supertype,
- TRUE -> True,
- VIEWBOUND -> ViewBound,
- XMLSTART -> "<xmlstart>"
- )
- }
- def isAlphaId(t: ReplToken) = t match {
- case Id(name) => name forall (ch => ch.isDigit || ch.isLetter || ch == '_')
- case _ => false
- }
- def isOperatorId(t: ReplToken) = t match {
- case Id(name) => !isAlphaId(t)
- case _ => false
- }
-
- sealed abstract class ReplToken(val tokenString: String, val cozy: Cozy) {
- def this(str: String) = this(str, |--*--| )
-
- def insistsOnSpace = false
- def cozyRight(other: ReplToken) = (cozy.right || other.cozy.left)
- def cozyLeft(other: ReplToken) = (cozy.left || other.cozy.right)
-
- final def <--?-->(other: ReplToken) = {
- !(insistsOnSpace || other.insistsOnSpace) && (
- (this cozyRight other) ||
- (other cozyLeft this)
- )
- }
-
- // to show invisibles
- def rawString = tokenString
- override def toString = (
- if (rawTokens) rawString
- else tokenString
- )
- }
- trait InsistCozyRight extends ReplToken {
- final override def cozyRight(other: ReplToken) = true
- }
- trait InsistCozyLeft extends ReplToken {
- final override def cozyLeft(other: ReplToken) = true
- }
- trait InsistCozy extends InsistCozyLeft with InsistCozyRight { }
- trait InsistSpaced extends ReplToken {
- final override def insistsOnSpace = true
- }
- trait CozyWithLetters extends ReplToken {
- override def cozyRight(other: ReplToken) = isAlphaId(other) || super.cozyRight(other)
- override def cozyLeft(other: ReplToken) = isAlphaId(other) || super.cozyLeft(other)
- }
- trait Brackets extends ReplToken {
- private def isCozyToken(t: ReplToken) = t == LBracket || t == RBracket || isAlphaId(t)
- override def cozyRight(other: ReplToken) = isCozyToken(other) || super.cozyRight(other)
- override def cozyLeft(other: ReplToken) = isCozyToken(other) || super.cozyLeft(other)
- }
-
- case class Token(value: Int)(str: String) extends ReplToken(str) { }
- case class Id(name: String) extends ReplToken(name) { }
- case class Lit[T](value: T) extends ReplToken(value match {
- case s: String => "\"" + s + "\""
- case _ => "" + value
- })
- case object At extends ReplToken("@") with InsistCozyRight { }
- case object Colon extends ReplToken(":", <*--|)
- case object Comma extends ReplToken(",", <*--|) with InsistCozyLeft { }
- case object Dot extends ReplToken(".", <*>) with InsistCozy { }
- case object Eof extends ReplToken("EOF")
- case object ErrorToken extends ReplToken("<internal error>")
- case object False extends ReplToken("false")
- case object Import extends ReplToken("import")
- case object LBrace extends ReplToken("{") with InsistSpaced { }
- case object LBracket extends ReplToken("[") with Brackets { }
- case object LParen extends ReplToken("(", |--*>)
- case object Newline extends ReplToken("\n", <*>) with InsistCozy { override def rawString = "<NL>\n" }
- case object Newlines extends ReplToken("\n\n", <*>) with InsistCozy { override def rawString = "<NLS>\n\n" }
- case object Null extends ReplToken("null")
- case object RBrace extends ReplToken("}", |--*>) with InsistSpaced { }
- case object RBracket extends ReplToken("]") with Brackets { }
- case object RParen extends ReplToken(")", <*--|)
- case object Semi extends ReplToken(";", <*--|)
- case object Subtype extends ReplToken("<:") with InsistSpaced { }
- case object Supertype extends ReplToken(">:") with InsistSpaced { }
- case object True extends ReplToken("true")
- case object ViewBound extends ReplToken("<%") with InsistSpaced { }
-
- class Tokenizer(in: UnitScanner) {
- private def translate(tokenCode: Int): ReplToken = tokenCode match {
- case IDENTIFIER | BACKQUOTED_IDENT => Id("" + in.name)
- case CHARLIT | INTLIT | LONGLIT => Lit(in.intVal)
- case DOUBLELIT | FLOATLIT => Lit(in.floatVal)
- case STRINGLIT => Lit(escape(in.strVal))
- case SYMBOLLIT => Lit(scala.Symbol(in.strVal))
- case _ =>
- symbolTokenMap.getOrElse(
- tokenCode,
- token2name get tokenCode match {
- case Some(name) => Token(tokenCode)("" + name)
- case _ => Token(tokenCode)("<unknown: " + tokenCode + ">")
- }
- )
- }
- def tokenIterator: Iterator[ReplToken] = (
- Iterator continually {
- try translate(in.token)
- finally in.nextToken()
- } takeWhile (_ ne Eof)
- )
- }
-
- def prettyPrintRaw(tokens: TraversableOnce[ReplToken]) {
- withRawTokens(prettyPrint(tokens))
- }
-
- def prettyPrint(tokens: TraversableOnce[ReplToken]) {
- new TokenPrinter prettyPrint tokens
- }
-
- private class TokenPrinter {
- type TokenTriple = (ReplToken, ReplToken, ReplToken)
- val writer = new Indenter
- var prev: List[ReplToken] = Nil
-
- def isIdentPart(t: ReplToken) = t match {
- case Dot | Id(_) => true
- case _ => false
- }
- def prevNonIdent = prev dropWhile isIdentPart match {
- case Nil => ErrorToken
- case t :: _ => t
- }
- def inImport = prevNonIdent == Import
-
- def printToken(left: ReplToken, token: ReplToken) = token match {
- case LBrace =>
- writer openIndent (
- if (writer.atStartOfLine) token
- else " " + token
- )
- case RBrace =>
- writer.closeIndent(token)
- case tok @ (Newline | Newlines) =>
- writer.nextIndent(tok)
- case _ =>
- writer print (
- if (writer.atStartOfLine) token
- else if (left <--?--> token) token
- else " " + token
- )
- }
-
- def prettyPrint(tokens: TraversableOnce[ReplToken]) {
- val it = Iterator(Newline) ++ tokens.toIterator ++ Iterator(Newline) sliding 3 map { x =>
- val List(x1, x2, x3) = x
- ((x1, x2, x3))
- }
- prettyPrint(it)
- }
- def prettyPrint(it: Iterator[TokenTriple]) {
- while (it.hasNext) it.next match {
- // special casing to avoid newline on empty blocks
- case (left, LBrace, RBrace) =>
- it.next
- writer print " { }"
- // special casing to avoid newlines on import x.{ y, z, q }
- case (left, LBrace, _) if inImport =>
- writer print LBrace
- def loop() {
- if (it.hasNext) {
- val (_, tok, _) = it.next
- if (tok != Comma) {
- writer print " "
- }
- writer print tok
- if (tok != RBrace)
- loop()
- }
- }
- loop()
- case (left, token, right) =>
- printToken(left, token)
-
- if (it.hasNext) prev ::= token
- else printToken(token, right)
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
index e293c0fed9..280247f20c 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
@@ -6,6 +6,9 @@
package scala.tools.nsc
package interpreter
+import scala.reflect.{mirror => rm}
+import language.implicitConversions
+
/** A class which the repl utilizes to expose predefined objects.
* The base implementation is empty; the standard repl implementation
* is StdReplVals.
@@ -25,7 +28,8 @@ class StdReplVals(final val r: ILoop) extends ReplVals {
final lazy val phased = power.phased
final lazy val analyzer = global.analyzer
- final lazy val treedsl = new { val global: intp.global.type = intp.global } with ast.TreeDSL { }
+ object treedsl extends { val global: intp.global.type = intp.global } with ast.TreeDSL { }
+
final lazy val typer = analyzer.newTyper(
analyzer.rootContext(
power.unit("").asInstanceOf[analyzer.global.CompilationUnit]
@@ -33,13 +37,15 @@ class StdReplVals(final val r: ILoop) extends ReplVals {
)
def lastRequest = intp.lastRequest
- final lazy val replImplicits = new power.Implicits2 {
+ class ReplImplicits extends power.Implicits2 {
import intp.global._
- private val manifestFn = ReplVals.mkManifestToType[intp.global.type](global)
- implicit def mkManifestToType(sym: Symbol) = manifestFn(sym)
+ private val tagFn = ReplVals.mkCompilerTypeFromTag[intp.global.type](global)
+ implicit def mkCompilerTypeFromTag(sym: Symbol) = tagFn(sym)
}
+ final lazy val replImplicits = new ReplImplicits
+
def typed[T <: analyzer.global.Tree](tree: T): T = typer.typed(tree).asInstanceOf[T]
}
@@ -48,29 +54,29 @@ object ReplVals {
* not being seen as the same type as bar.global.Type even though
* the globals are the same. Dependent method types to the rescue.
*/
- def mkManifestToType[T <: Global](global: T) = {
+ def mkCompilerTypeFromTag[T <: Global](global: T) = {
import global._
import definitions._
- /** We can't use definitions.manifestToType directly because we're passing
+ /** We can't use definitions.compilerTypeFromTag directly because we're passing
* it to map and the compiler refuses to perform eta expansion on a method
* with a dependent return type. (Can this be relaxed?) To get around this
* I have this forwarder which widens the type and then cast the result back
* to the dependent type.
*/
- def manifestToType(m: Manifest[_]): Global#Type =
- definitions.manifestToType(m)
+ def compilerTypeFromTag(t: rm.TypeTag[_]): Global#Type =
+ definitions.compilerTypeFromTag(t)
- class AppliedTypeFromManifests(sym: Symbol) {
- def apply[M](implicit m1: Manifest[M]): Type =
+ class AppliedTypeFromTags(sym: Symbol) {
+ def apply[M](implicit m1: rm.TypeTag[M]): Type =
if (sym eq NoSymbol) NoType
- else appliedType(sym, manifestToType(m1).asInstanceOf[Type])
+ else appliedType(sym, compilerTypeFromTag(m1).asInstanceOf[Type])
- def apply[M1, M2](implicit m1: Manifest[M1], m2: Manifest[M2]): Type =
+ def apply[M1, M2](implicit m1: rm.TypeTag[M1], m2: rm.TypeTag[M2]): Type =
if (sym eq NoSymbol) NoType
- else appliedType(sym, manifestToType(m1).asInstanceOf[Type], manifestToType(m2).asInstanceOf[Type])
+ else appliedType(sym, compilerTypeFromTag(m1).asInstanceOf[Type], compilerTypeFromTag(m2).asInstanceOf[Type])
}
- (sym: Symbol) => new AppliedTypeFromManifests(sym)
+ (sym: Symbol) => new AppliedTypeFromTags(sym)
}
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala b/src/compiler/scala/tools/nsc/interpreter/RichClass.scala
index 59a7b9b5d2..2e735e3b9b 100644
--- a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/RichClass.scala
@@ -7,18 +7,21 @@ package scala.tools.nsc
package interpreter
class RichClass[T](val clazz: Class[T]) {
- def toManifest: Manifest[T] = Manifest[T](ClassManifest[T](clazz).tpe)
+ def toTag: ClassTag[T] = ClassTag[T](clazz)
def toTypeString: String = TypeStrings.fromClazz(clazz)
// Sadly isAnonymousClass does not return true for scala anonymous
// classes because our naming scheme is not doing well against the
// jvm's many assumptions.
- def isScalaAnonymous = clazz.isAnonymousClass || (clazz.getName contains "$anon$")
+ def isScalaAnonymous = (
+ try clazz.isAnonymousClass || (clazz.getName contains "$anon$")
+ catch { case _: java.lang.InternalError => false } // good ol' "Malformed class name"
+ )
/** It's not easy... to be... me... */
- def supermans: List[Manifest[_]] = supers map (_.toManifest)
- def superNames: List[String] = supers map (_.getName)
- def interfaces: List[JClass] = supers filter (_.isInterface)
+ def supermans: List[ClassTag[_]] = supers map (_.toTag)
+ def superNames: List[String] = supers map (_.getName)
+ def interfaces: List[JClass] = supers filter (_.isInterface)
def hasAncestorName(f: String => Boolean) = superNames exists f
def hasAncestor(f: JClass => Boolean) = supers exists f
diff --git a/src/compiler/scala/tools/nsc/interpreter/Runner.scala b/src/compiler/scala/tools/nsc/interpreter/Runner.scala
deleted file mode 100644
index f9f75da3c6..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/Runner.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-object Runner {
- def main(args: Array[String]): Unit = new ILoop process args
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
index 992bef8056..a57b047bc0 100644
--- a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
@@ -18,7 +18,6 @@ extends InteractiveReader
{
val history = NoHistory
val completion = NoCompletion
- val keyBindings: List[KeyBinding] = Nil
def init() = ()
def reset() = ()
diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
index 872ac00bfd..5d5123811e 100644
--- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
@@ -11,6 +11,142 @@ import r.TypeVariable
import scala.reflect.NameTransformer
import NameTransformer._
import scala.reflect.{mirror => rm}
+import typechecker.DestructureTypes
+import scala.tools.util.StringOps.ojoin
+
+/** A more principled system for turning types into strings.
+ */
+trait StructuredTypeStrings extends DestructureTypes {
+ val global: Global
+ import global._
+ import definitions._
+
+ case class LabelAndType(label: String, typeName: String) { }
+ object LabelAndType {
+ val empty = LabelAndType("", "")
+ }
+ case class Grouping(ldelim: String, mdelim: String, rdelim: String, labels: Boolean) {
+ def join(elems: String*): String = (
+ if (elems.isEmpty) ""
+ else elems.mkString(ldelim, mdelim, rdelim)
+ )
+ }
+ val NoGrouping = Grouping("", "", "", false)
+ val ListGrouping = Grouping("(", ", ", ")", false)
+ val ProductGrouping = Grouping("(", ", ", ")", true)
+ val ParamGrouping = Grouping("(", ", ", ")", true)
+ val BlockGrouping = Grouping(" { ", "; ", "}", false)
+
+ private implicit def lowerName(n: Name): String = "" + n
+ private def str(level: Int)(body: => String): String = " " * level + body
+ private def block(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = {
+ val l1 = str(level)(name + grouping.ldelim)
+ val l2 = nodes.map(_ show level + 1)
+ val l3 = str(level)(grouping.rdelim)
+
+ l1 +: l2 :+ l3 mkString "\n"
+ }
+ private def maybeBlock(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = {
+ import grouping._
+ val threshold = 70
+
+ val try1 = str(level)(name + grouping.join(nodes map (_.show(0, grouping.labels)): _*))
+ if (try1.length < threshold) try1
+ else block(level, grouping)(name, nodes)
+ }
+ private def shortClass(x: Any) = {
+ if (opt.debug) {
+ val name = (x.getClass.getName split '.').last
+ val isAnon = name.reverse takeWhile (_ != '$') forall (_.isDigit)
+ val str = if (isAnon) name else (name split '$').last
+
+ " // " + str
+ }
+ else ""
+ }
+
+ sealed abstract class TypeNode {
+ def grouping: Grouping
+ def nodes: List[TypeNode]
+
+ def show(indent: Int, showLabel: Boolean): String = maybeBlock(indent, grouping)(mkPrefix(showLabel), nodes)
+ def show(indent: Int): String = show(indent, true)
+ def show(): String = show(0)
+
+ def withLabel(l: String): this.type = modifyNameInfo(_.copy(label = l))
+ def withType(t: String): this.type = modifyNameInfo(_.copy(typeName = t))
+
+ def label = nameInfo.label
+ def typeName = nameInfo.typeName
+
+ protected def mkPrefix(showLabel: Boolean) = {
+ val pre = if (showLabel && label != "") label + " = " else ""
+ pre + typeName
+ }
+ override def toString = show() // + "(toString)"
+ private var nameInfo: LabelAndType = LabelAndType.empty
+ private def modifyNameInfo(f: LabelAndType => LabelAndType): this.type = {
+ nameInfo = f(nameInfo)
+ this
+ }
+ }
+ case class TypeAtom[T](atom: T) extends TypeNode {
+ def grouping = NoGrouping
+ def nodes = Nil
+ override protected def mkPrefix(showLabel: Boolean) =
+ super.mkPrefix(showLabel) + atom + shortClass(atom)
+ }
+ case class TypeProduct(nodes: List[TypeNode]) extends TypeNode {
+ def grouping: Grouping = ProductGrouping
+ def emptyTypeName = ""
+ override def typeName = if (nodes.isEmpty) emptyTypeName else super.typeName
+ }
+
+ /** For a NullaryMethod, in = TypeEmpty; for MethodType(Nil, _) in = TypeNil */
+ class NullaryFunction(out: TypeNode) extends TypeProduct(List(out)) {
+ override def typeName = "NullaryMethodType"
+ }
+ class MonoFunction(in: TypeNode, out: TypeNode) extends TypeProduct(List(in, out)) {
+ override def typeName = "MethodType"
+ }
+ class PolyFunction(in: TypeNode, out: TypeNode) extends TypeProduct(List(in, out)) {
+ override def typeName = "PolyType"
+ }
+
+ class TypeList(nodes: List[TypeNode]) extends TypeProduct(nodes) {
+ override def grouping = ListGrouping
+ override def emptyTypeName = "Nil"
+ override def typeName = "List"
+ }
+ class TypeScope(nodes: List[TypeNode]) extends TypeProduct(nodes) {
+ override def grouping = BlockGrouping
+ override def typeName = "Scope"
+ override def emptyTypeName = "EmptyScope"
+ }
+
+ object TypeEmpty extends TypeNode {
+ override def grouping = NoGrouping
+ override def nodes = Nil
+ override def label = ""
+ override def typeName = ""
+ override def show(indent: Int, showLabel: Boolean) = ""
+ }
+
+ object intoNodes extends DestructureType[TypeNode] {
+ def withLabel(node: TypeNode, label: String): TypeNode = node withLabel label
+ def withType(node: TypeNode, typeName: String): TypeNode = node withType typeName
+
+ def wrapEmpty = TypeEmpty
+ def wrapSequence(nodes: List[TypeNode]) = new TypeList(nodes)
+ def wrapProduct(nodes: List[TypeNode]) = new TypeProduct(nodes)
+ def wrapPoly(in: TypeNode, out: TypeNode) = new PolyFunction(in, out)
+ def wrapMono(in: TypeNode, out: TypeNode) = if (in == wrapEmpty) new NullaryFunction(out) else new MonoFunction(in, out)
+ def wrapAtom[U](value: U) = new TypeAtom(value)
+ }
+
+ def show(tp: Type): String = intoNodes(tp).show
+}
+
/** Logic for turning a type into a String. The goal is to be
* able to take some arbitrary object 'x' and obtain the most precise
@@ -56,8 +192,8 @@ trait TypeStrings {
else enclClass.getName + "." + (name stripPrefix enclPre)
)
}
- def scalaName(m: ClassManifest[_]): String = scalaName(m.erasure)
- def anyClass(x: Any): JClass = if (x == null) null else x.getClass
+ def scalaName(m: ClassTag[_]): String = scalaName(m.erasure)
+ def anyClass(x: Any): JClass = if (x == null) null else x.getClass
private def brackets(tps: String*): String =
if (tps.isEmpty) ""
@@ -73,25 +209,25 @@ trait TypeStrings {
brackets(clazz.getTypeParameters map tvarString: _*)
}
- private def tparamString[T: Manifest] : String = {
+ private def tparamString[T: TypeTag] : String = {
// [Eugene to Paul] needs review!!
- def typeArguments: List[rm.Type] = manifest[T].tpe.typeArguments
+ def typeArguments: List[rm.Type] = typeTag[T].tpe.typeArguments
def typeVariables: List[java.lang.Class[_]] = typeArguments map (targ => rm.typeToClass(targ))
brackets(typeArguments map (jc => tvarString(List(jc))): _*)
}
/** Going for an overabundance of caution right now. Later these types
- * can be a lot more precise, but right now the manifests have a habit of
+ * can be a lot more precise, but right now the tags have a habit of
* introducing material which is not syntactically valid as scala source.
* When this happens it breaks the repl. It would be nice if we mandated
- * that manifest toString methods (or some other method, since it's bad
+ * that tag toString methods (or some other method, since it's bad
* practice to rely on toString for correctness) generated the VALID string
* representation of the type.
*/
- def fromTypedValue[T: Manifest](x: T): String = fromManifest[T]
- def fromValue(value: Any): String = if (value == null) "Null" else fromClazz(anyClass(value))
- def fromClazz(clazz: JClass): String = scalaName(clazz) + tparamString(clazz)
- def fromManifest[T: Manifest] : String = scalaName(manifest[T].erasure) + tparamString[T]
+ def fromTypedValue[T: TypeTag](x: T): String = fromTag[T]
+ def fromValue(value: Any): String = if (value == null) "Null" else fromClazz(anyClass(value))
+ def fromClazz(clazz: JClass): String = scalaName(clazz) + tparamString(clazz)
+ def fromTag[T: TypeTag] : String = scalaName(typeTag[T].erasure) + tparamString[T]
/** Reducing fully qualified noise for some common packages.
*/
diff --git a/src/compiler/scala/tools/nsc/interpreter/XMLCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/XMLCompletion.scala
deleted file mode 100644
index 9979814afb..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/XMLCompletion.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import xml.{ XML, Group, Node, NodeSeq }
-import XMLCompletion._
-import scala.collection.{ mutable, immutable }
-
-class XMLCompletion(root: Node) extends CompletionAware {
- private val nodeCache = new mutable.HashMap[String, Node]
- private def getNode(s: String): Option[Node] = {
- completions // make sure cache is populated
- nodeCache get s
- }
-
- lazy val completions: List[String] = {
- def children = root.child.toList
- def uniqueTags = children groupBy (_.label) filter (_._2.size == 1) map (_._1)
- val uniqs = uniqueTags.toList
-
- children.foldLeft(List[String]())((res, node) => {
- val name = node.label
- def count = res filter (_ startsWith (name + "[")) size // ]
- val suffix = if (uniqs contains name) "" else "[%d]" format (count + 1)
- val s = name + suffix
-
- nodeCache(s) = node
-
- s :: res
- }).sorted
- }
- def completions(verbosity: Int) = completions
-
- override def execute(id: String) = getNode(id)
- override def follow(id: String) = getNode(id) map (x => new XMLCompletion(x))
-}
-
-object XMLCompletion {
- def apply(x: Node) = new XMLCompletion(x)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala
index e78e92c8f8..7d2610e3c8 100644
--- a/src/compiler/scala/tools/nsc/interpreter/package.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/package.scala
@@ -5,6 +5,8 @@
package scala.tools.nsc
+import language.implicitConversions
+
/** The main REPL related classes and values are as follows.
* In addition to standard compiler classes Global and Settings, there are:
*
@@ -33,6 +35,8 @@ package object interpreter extends ReplConfig with ReplStrings {
val IR = Results
+ implicit def postfixOps = language.postfixOps // make all postfix ops in this package compile without warning
+
private[interpreter] implicit def javaCharSeqCollectionToScala(xs: JCollection[_ <: CharSequence]): List[String] = {
import collection.JavaConverters._
xs.asScala.toList map ("" + _)
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala b/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala
index a09182319c..795ca79668 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala
@@ -45,11 +45,5 @@ object JLineHistory {
override def toString = "History(size = " + size + ", index = " + index + ")"
}
- def apply(): JLineHistory =
- try { new JLineFileHistory }
- catch { case x: Exception =>
- Console.println("Error creating file history: memory history only. " + x)
- util.Exceptional(x).show()
- new SimpleHistory()
- }
-} \ No newline at end of file
+ def apply(): JLineHistory = try new JLineFileHistory catch { case x: Exception => new SimpleHistory() }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/package.scala b/src/compiler/scala/tools/nsc/interpreter/session/package.scala
index 8fbba2f05e..4e5b08c8cb 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/package.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/session/package.scala
@@ -5,6 +5,7 @@
package scala.tools.nsc
package interpreter
+import language.implicitConversions
/** Files having to do with the state of a repl session:
* lines of text entered, types and terms defined, etc.
diff --git a/src/compiler/scala/tools/nsc/io/AbstractFile.scala b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
index b51cf1228c..deb914f806 100644
--- a/src/compiler/scala/tools/nsc/io/AbstractFile.scala
+++ b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
@@ -10,6 +10,7 @@ package io
import java.io.{ FileOutputStream, IOException, InputStream, OutputStream, BufferedOutputStream }
import java.net.URL
import scala.collection.mutable.ArrayBuffer
+import scala.reflect.api.RequiredFile
/**
* @author Philippe Altherr
@@ -81,7 +82,7 @@ object AbstractFile {
* <code>global.settings.encoding.value</code>.
* </p>
*/
-abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
+abstract class AbstractFile extends AnyRef with RequiredFile with Iterable[AbstractFile] {
/** Returns the name of this abstract file. */
def name: String
diff --git a/src/compiler/scala/tools/nsc/io/ClassAndJarInfo.scala b/src/compiler/scala/tools/nsc/io/ClassAndJarInfo.scala
deleted file mode 100644
index d0a0b17494..0000000000
--- a/src/compiler/scala/tools/nsc/io/ClassAndJarInfo.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.net.{ URL, URLClassLoader }
-import java.io.IOException
-import collection.JavaConverters._
-
-/** A convenience class for finding the jar with the bytecode for
- * a given Class object and similar common tasks.
- */
-class ClassAndJarInfo[T: ClassManifest] {
- val man = classManifest[T]
- def clazz = man.erasure
- def internalName = clazz.getName.replace('.', '/')
-
- def resourceURL = new URLClassLoader(Array[URL]()) getResource internalName + ".class"
-
- def baseOfPath(path: String) = path indexOf '!' match {
- case -1 => path stripSuffix internalName + ".class"
- case idx => path take idx
- }
-
- def simpleClassName = clazz.getName split """[$.]""" last
- def classUrl = clazz getResource simpleClassName + ".class"
- def codeSource = protectionDomain.getCodeSource()
- def jarManifest = (
- try new JManifest(jarManifestUrl.openStream())
- catch { case _: IOException => new JManifest() }
- )
- def jarManifestMainAttrs = jarManifest.getMainAttributes().asScala
- def jarManifestUrl = new URL(baseOfPath("" + classUrl) + "!/META-INF/MANIFEST.MF")
- def locationFile = File(locationUrl.toURI.getPath())
- def locationUrl = if (codeSource == null) new URL("file:///") else codeSource.getLocation()
- def protectionDomain = clazz.getProtectionDomain()
- def rootClasspath = rootPossibles find (_.exists)
- def rootFromLocation = Path(locationUrl.toURI.getPath())
- def rootFromResource = Path(baseOfPath(classUrl.getPath) stripPrefix "file:")
- def rootPossibles = Iterator(rootFromResource, rootFromLocation)
-}
diff --git a/src/compiler/scala/tools/nsc/io/File.scala b/src/compiler/scala/tools/nsc/io/File.scala
index cc512493d9..06cb20e4ac 100644
--- a/src/compiler/scala/tools/nsc/io/File.scala
+++ b/src/compiler/scala/tools/nsc/io/File.scala
@@ -15,6 +15,7 @@ import java.io.{
BufferedInputStream, BufferedOutputStream, IOException, PrintStream, PrintWriter, Closeable => JCloseable }
import java.nio.channels.{ Channel, FileChannel }
import scala.io.Codec
+import language.{reflectiveCalls, implicitConversions}
object File {
def pathSeparator = java.io.File.pathSeparator
diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala
index bbed5a9e20..b322df986c 100644
--- a/src/compiler/scala/tools/nsc/io/Jar.scala
+++ b/src/compiler/scala/tools/nsc/io/Jar.scala
@@ -11,6 +11,7 @@ import java.util.jar._
import collection.JavaConverters._
import Attributes.Name
import util.ClassPath
+import language.implicitConversions
// Attributes.Name instances:
//
diff --git a/src/compiler/scala/tools/nsc/io/NullPrintStream.scala b/src/compiler/scala/tools/nsc/io/NullPrintStream.scala
deleted file mode 100644
index 52c7ddc74b..0000000000
--- a/src/compiler/scala/tools/nsc/io/NullPrintStream.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.io.{ PrintStream, ByteArrayOutputStream }
-
-/** A sink for when you want to discard all output.
- */
-class NullPrintStream extends PrintStream(new ByteArrayOutputStream()) { }
-
-object NullPrintStream extends NullPrintStream {
- def setOut() = Console setOut this
- def setErr() = Console setErr this
- def setOutAndErr() = { setOut() ; setErr() }
- def sinkingOutAndErr[T](body: => T): T =
- Console.withOut(this) {
- Console.withErr(this) {
- body
- }
- }
-
- def sinkingSystemOutAndErr[T](body: => T): T = {
- val savedOut = System.out
- val savedErr = System.err
- System setOut NullPrintStream
- System setErr NullPrintStream
- try body
- finally {
- System setOut savedOut
- System setErr savedErr
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/io/Path.scala b/src/compiler/scala/tools/nsc/io/Path.scala
index a1b8e5e4d5..b8cf15bfcf 100644
--- a/src/compiler/scala/tools/nsc/io/Path.scala
+++ b/src/compiler/scala/tools/nsc/io/Path.scala
@@ -11,6 +11,7 @@ import java.io.{
BufferedInputStream, BufferedOutputStream, RandomAccessFile }
import java.net.{ URI, URL }
import scala.util.Random.alphanumeric
+import language.implicitConversions
/** An abstraction for filesystem paths. The differences between
* Path, File, and Directory are primarily to communicate intent.
diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala
index 80b6e086da..416b84eec6 100644
--- a/src/compiler/scala/tools/nsc/io/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/io/Pickler.scala
@@ -3,6 +3,7 @@ package scala.tools.nsc.io
import annotation.unchecked
import Lexer._
import java.io.Writer
+import language.implicitConversions
/** An abstract class for writing and reading Scala objects to and
* from a legible representation. The presesentation follows the following grammar:
@@ -168,14 +169,11 @@ object Pickler {
case class ~[+S, +T](fst: S, snd: T)
/** A wrapper class to be able to use `~` s an infix method */
- class TildeDecorator[S](x: S) {
+ implicit class TildeDecorator[S](x: S) {
/** Infix method that forms a `~`-pair. */
def ~ [T](y: T): S ~ T = new ~ (x, y)
}
- /** An implicit wrapper that adds `~` as a method to any value. */
- implicit def tildeDecorator[S](x: S): TildeDecorator[S] = new TildeDecorator(x)
-
/** A converter from binary functions to functions over `~`-pairs
*/
implicit def fromTilde[T1, T2, R](f: (T1, T2) => R): T1 ~ T2 => R = { case x1 ~ x2 => f(x1, x2) }
@@ -418,7 +416,7 @@ object Pickler {
iterPickler[T] .wrapped { Vector() ++ _ } { _.iterator } .labelled ("scala.Vector")
/** A pickler for array values */
- implicit def array[T : ClassManifest : Pickler]: Pickler[Array[T]] =
+ implicit def array[T : ClassTag : Pickler]: Pickler[Array[T]] =
iterPickler[T] .wrapped { _.toArray} { _.iterator } .labelled ("scala.Array")
}
diff --git a/src/compiler/scala/tools/nsc/io/PlainFile.scala b/src/compiler/scala/tools/nsc/io/PlainFile.scala
index 83b8cc32c4..21276e8740 100644
--- a/src/compiler/scala/tools/nsc/io/PlainFile.scala
+++ b/src/compiler/scala/tools/nsc/io/PlainFile.scala
@@ -51,6 +51,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
override def output = givenPath.toFile.outputStream()
override def sizeOption = Some(givenPath.length.toInt)
+ override def toString = path
override def hashCode(): Int = fpath.hashCode
override def equals(that: Any): Boolean = that match {
case x: PlainFile => fpath == x.fpath
diff --git a/src/compiler/scala/tools/nsc/io/Sources.scala b/src/compiler/scala/tools/nsc/io/Sources.scala
deleted file mode 100644
index 25d27acae8..0000000000
--- a/src/compiler/scala/tools/nsc/io/Sources.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-package scala.tools.nsc
-package io
-
-import util.ClassPath
-import java.util.concurrent.{ Future, ConcurrentHashMap, ExecutionException }
-import java.util.zip.ZipException
-import collection.JavaConverters._
-import Properties.{ envOrElse, propOrElse }
-
-class Sources(val path: String) {
- val expandedPath = ClassPath.join(ClassPath expandPath path: _*)
- val cache = new ConcurrentHashMap[String, List[Fileish]]
- def allNames = cache.keys.asScala.toList.sorted
- def apply(name: String) = get(name)
- def size = cache.asScala.values map (_.length) sum
- def isEmpty = path == ""
-
- private var debug = false
- private def dbg(msg: => Any) = if (debug) Console println msg
- private val partitioned = ClassPath toPaths expandedPath partition (_.isDirectory)
-
- val dirs = partitioned._1 map (_.toDirectory)
- val jars = partitioned._2 filter Jar.isJarOrZip map (_.toFile)
- val (isDone, force) = (
- if (path == "") (() => true, () => ())
- else {
- val f1 = spawn(calculateDirs())
- val f2 = spawn(calculateJars())
- val fn1 = () => { f1.isDone() && f2.isDone() }
- val fn2 = () => { f1.get() ; f2.get() ; () }
-
- (fn1, fn2)
- }
- )
-
- private def catchZip(body: => Unit): Unit = {
- try body
- catch { case x: ZipException => dbg("Caught: " + x) }
- }
-
- private def calculateDirs() =
- dirs foreach { d => dbg(d) ; catchZip(addSources(d.deepFiles map (x => Fileish(x)))) }
-
- private def calculateJars() =
- jars foreach { j => dbg(j) ; catchZip(addSources(new Jar(j).fileishIterator)) }
-
- private def addSources(fs: TraversableOnce[Fileish]) =
- fs foreach { f => if (f.isSourceFile) add(f.name, f) }
-
- private def get(key: String): List[Fileish] =
- if (cache containsKey key) cache.get(key) else Nil
-
- private def add(key: String, value: Fileish) = {
- if (cache containsKey key) cache.replace(key, value :: cache.get(key))
- else cache.put(key, List(value))
- }
- override def toString = "Sources(%d dirs, %d jars, %d sources)".format(
- dirs.size, jars.size, cache.asScala.values map (_.length) sum
- )
-}
-
-trait LowPrioritySourcesImplicits {
- self: Sources.type =>
-
- implicit def fallbackSources: Sources = defaultSources
-}
-
-object Sources extends LowPrioritySourcesImplicits {
- val empty = new Sources("")
-
- private def libraryInits = ClassPath.scalaLibrary.toList flatMap (_.toAbsolute.parents)
- private def librarySourceDir = libraryInits map (_ / "src") find (_.isDirectory)
- private def expandedSourceDir = librarySourceDir.toList flatMap (ClassPath expandDir _.path)
-
- private val initialPath = sys.props.traceSourcePath.value
- private val initialSources = apply(expandedSourceDir :+ initialPath: _*)
-
- def defaultSources = {
- val path = sys.props.traceSourcePath.value
- if (path == "") empty
- else if (path == initialPath) initialSources
- else apply(expandedSourceDir :+ path: _*)
- }
-
- def apply(paths: String*): Sources = new Sources(ClassPath.join(paths: _*))
-}
diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala
index 88679e6dce..d29030603e 100644
--- a/src/compiler/scala/tools/nsc/io/package.scala
+++ b/src/compiler/scala/tools/nsc/io/package.scala
@@ -8,8 +8,11 @@ package scala.tools.nsc
import java.util.concurrent.{ Future, Callable }
import java.util.{ Timer, TimerTask }
import java.util.jar.{ Attributes }
+import language.implicitConversions
package object io {
+ implicit def postfixOps = language.postfixOps // make all postfix ops in this package compile without warning
+
type JManifest = java.util.jar.Manifest
type JFile = java.io.File
@@ -20,9 +23,6 @@ package object io {
def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
def spawn[T](body: => T): Future[T] = daemonThreadPool submit callable(body)
def submit(runnable: Runnable) = daemonThreadPool submit runnable
- def runnableFn(f: () => Unit): Runnable = runnable(f())
- def callableFn[T](f: () => T): Callable[T] = callable(f())
- def spawnFn[T](f: () => T): Future[T] = spawn(f())
// Create, start, and return a daemon thread
def daemonize(body: => Unit): Thread = newThread(_ setDaemon true)(body)
@@ -41,4 +41,4 @@ package object io {
alarm.schedule(tt, seconds * 1000)
alarm
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 06b06c50a6..f71e067366 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -12,6 +12,7 @@ import scala.tools.nsc.util.OffsetPosition
import scala.collection.mutable.ListBuffer
import symtab.Flags
import JavaTokens._
+import language.implicitConversions
trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val global : Global
@@ -788,23 +789,24 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val idefs = members.toList ::: (sdefs flatMap forwarders)
(sdefs, idefs)
}
-
+ def annotationParents = List(
+ gen.scalaAnnotationDot(tpnme.Annotation),
+ Select(javaLangDot(nme.annotation), tpnme.Annotation),
+ gen.scalaAnnotationDot(tpnme.ClassfileAnnotation)
+ )
def annotationDecl(mods: Modifiers): List[Tree] = {
accept(AT)
accept(INTERFACE)
val pos = in.currentPos
val name = identForType()
- val parents = List(scalaDot(tpnme.Annotation),
- Select(javaLangDot(nme.annotation), tpnme.Annotation),
- scalaDot(tpnme.ClassfileAnnotation))
val (statics, body) = typeBody(AT, name)
def getValueMethodType(tree: Tree) = tree match {
case DefDef(_, nme.value, _, _, tpt, _) => Some(tpt.duplicate)
case _ => None
}
- var templ = makeTemplate(parents, body)
+ var templ = makeTemplate(annotationParents, body)
for (stat <- templ.body; tpt <- getValueMethodType(stat))
- templ = makeTemplate(parents, makeConstructor(List(tpt)) :: templ.body)
+ templ = makeTemplate(annotationParents, makeConstructor(List(tpt)) :: templ.body)
addCompanionObject(statics, atPos(pos) {
ClassDef(mods, name, List(), templ)
})
@@ -870,7 +872,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
skipAhead()
accept(RBRACE)
}
- ValDef(Modifiers(Flags.JAVA | Flags.STATIC), name, enumType, blankExpr)
+ // The STABLE flag is to signal to namer that this was read from a
+ // java enum, and so should be given a Constant type (thereby making
+ // it usable in annotations.)
+ ValDef(Modifiers(Flags.STABLE | Flags.JAVA | Flags.STATIC), name, enumType, blankExpr)
}
}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index 45b63b0ca0..d47756e757 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -10,6 +10,7 @@ import scala.tools.nsc.util._
import scala.reflect.internal.Chars._
import JavaTokens._
import scala.annotation.switch
+import language.implicitConversions
// Todo merge these better with Scanners
trait JavaScanners extends ast.parser.ScannersCommon {
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
index 371f4bc4d8..72e6f32af1 100644
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
@@ -10,6 +10,7 @@ import transform.ExplicitOuter
import ast.{ TreePrinters, Trees }
import java.io.{ StringWriter, PrintWriter }
import annotation.elidable
+import language.postfixOps
/** Ancillary bits of ParallelMatching which are better off
* out of the way.
@@ -29,11 +30,10 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching =>
object Types {
import definitions._
- implicit def enrichType(x: Type): RichType = new RichType(x)
- val subrangeTypes = Set(ByteClass, ShortClass, CharClass, IntClass)
+ val subrangeTypes = Set[Symbol](ByteClass, ShortClass, CharClass, IntClass)
- class RichType(undecodedTpe: Type) {
+ implicit class RichType(undecodedTpe: Type) {
def tpe = decodedEqualsType(undecodedTpe)
def isAnyRef = tpe <:< AnyRefClass.tpe
diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala
index e1ff88557e..b29fac225a 100644
--- a/src/compiler/scala/tools/nsc/matching/Matrix.scala
+++ b/src/compiler/scala/tools/nsc/matching/Matrix.scala
@@ -9,6 +9,7 @@ package matching
import transform.ExplicitOuter
import symtab.Flags
import scala.collection.mutable
+import language.implicitConversions
trait Matrix extends MatrixAdditions {
self: ExplicitOuter with ParallelMatching =>
@@ -255,4 +256,4 @@ trait Matrix extends MatrixAdditions {
recordSyntheticSym(owner.newVariable(n, pos, flagsLong) setInfo tpe)
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 43aad9f591..7346d9c59f 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -14,6 +14,7 @@ import transform.ExplicitOuter
import symtab.Flags
import mutable.ListBuffer
import annotation.elidable
+import language.postfixOps
trait ParallelMatching extends ast.TreeDSL
with MatchSupport
diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
index 56297f0195..b2a721c586 100644
--- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
+++ b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
@@ -8,6 +8,7 @@ package matching
import transform.ExplicitOuter
import PartialFunction._
+import language.postfixOps
trait PatternBindings extends ast.TreeDSL
{
@@ -133,4 +134,4 @@ trait PatternBindings extends ast.TreeDSL
}
val NoBinding: Bindings = new Bindings(Nil)
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/package.scala b/src/compiler/scala/tools/nsc/package.scala
index a908062b2f..88d600c113 100644
--- a/src/compiler/scala/tools/nsc/package.scala
+++ b/src/compiler/scala/tools/nsc/package.scala
@@ -6,11 +6,6 @@
package scala.tools
package object nsc {
- @deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0")
- type InterpreterSettings = interpreter.ISettings
- @deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0")
- val InterpreterResults = interpreter.Results
-
type Phase = scala.reflect.internal.Phase
val NoPhase = scala.reflect.internal.NoPhase
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index 9254ec8628..48c4a9b5b3 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -138,7 +138,7 @@ object Plugin {
} yield entry)).distinct
val loader = loaderFor(alljars)
- alljars map (loadFrom(_, loader)) flatten
+ (alljars map (loadFrom(_, loader))).flatten
}
/** Instantiate a plugin class, given the class and
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
index da913a1601..ba042b7b78 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
@@ -70,7 +70,7 @@ trait Plugins {
}
}
- val plugs = pick(roughPluginsList, Set(), phasesSet map (_.phaseName) toSet)
+ val plugs = pick(roughPluginsList, Set(), (phasesSet map (_.phaseName)).toSet)
/** Verify requirements are present. */
for (req <- settings.require.value ; if !(plugs exists (_.name == req)))
@@ -112,5 +112,5 @@ trait Plugins {
def pluginOptionsHelp: String =
(for (plug <- roughPluginsList ; help <- plug.optionsHelp) yield {
"\nOptions for plugin '%s':\n%s\n".format(plug.name, help)
- }) mkString
+ }).mkString
}
diff --git a/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala b/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala
deleted file mode 100644
index f55d0684c8..0000000000
--- a/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2002-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package reporters
-
-import scala.tools.util.AbstractTimer
-
-/**
- * This class implements a timer that uses a Reporter to issue
- * timings.
- */
-class ReporterTimer(reporter: Reporter) extends AbstractTimer {
- def issue(msg: String, duration: Long) =
- reporter.info(null, "[" + msg + " in " + duration + "ms]", false)
-}
diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
index 9cea935a63..c1dad2da82 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
@@ -35,7 +35,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
case s: AbsSettings => this.userSetSettings == s.userSetSettings
case _ => false
}
- override def toString() = "Settings {\n%s}\n" format (userSetSettings map (" " + _ + "\n") mkString)
+ override def toString() = "Settings {\n%s}\n" format (userSetSettings map (" " + _ + "\n")).mkString
def toConciseString = userSetSettings.mkString("(", " ", ")")
def checkDependencies =
diff --git a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala b/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
index c010c6a3ea..2baff0bb1c 100644
--- a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
@@ -23,14 +23,14 @@ trait AestheticSettings {
def deprecation = settings.deprecation.value
def experimental = settings.Xexperimental.value
def fatalWarnings = settings.fatalWarnings.value
+ def feature = settings.feature.value
def future = settings.future.value
def logClasspath = settings.Ylogcp.value
def printStats = settings.Ystatistics.value
- def richExes = settings.YrichExes.value || sys.props.traceSourcePath.isSet
def target = settings.target.value
def unchecked = settings.unchecked.value
def verbose = settings.verbose.value
- def virtPatmat = settings.YvirtPatmat.value
+ def virtPatmat = !settings.XoldPatmat.value
/** Derived values */
def jvm = target startsWith "jvm"
diff --git a/src/compiler/scala/tools/nsc/settings/ImmutableSettings.scala b/src/compiler/scala/tools/nsc/settings/ImmutableSettings.scala
deleted file mode 100644
index 4de0c2332c..0000000000
--- a/src/compiler/scala/tools/nsc/settings/ImmutableSettings.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package settings
-
-/** TODO.
- */
-class ImmutableSettings
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index ea12300785..c4dd9a2a36 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -136,7 +136,7 @@ class MutableSettings(val errorFn: String => Unit)
val (p, args) = StringOps.splitWhere(s, _ == ':', true) getOrElse (return None)
// any non-Nil return value means failure and we return s unmodified
- tryToSetIfExists(p, args split "," toList, (s: Setting) => s.tryToSetColon _)
+ tryToSetIfExists(p, (args split ",").toList, (s: Setting) => s.tryToSetColon _)
}
// if arg is of form -Xfoo or -Xfoo bar (name = "-Xfoo")
@@ -183,8 +183,8 @@ class MutableSettings(val errorFn: String => Unit)
* The class loader defining `T` should provide resources `app.class.path`
* and `boot.class.path`. These resources should contain the application
* and boot classpaths in the same form as would be passed on the command line.*/
- def embeddedDefaults[T: Manifest]: Unit =
- embeddedDefaults(implicitly[Manifest[T]].erasure.getClassLoader)
+ def embeddedDefaults[T: ClassTag]: Unit =
+ embeddedDefaults(classTag[T].erasure.getClassLoader)
/** Initializes these settings for embedded use by a class from the given class loader.
* The class loader for `T` should provide resources `app.class.path`
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index e9a7e3dab4..88a89d54eb 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -11,6 +11,7 @@ package settings
import annotation.elidable
import scala.tools.util.PathResolver.Defaults
import scala.collection.mutable
+import language.{implicitConversions, existentials}
trait ScalaSettings extends AbsScalaSettings
with StandardScalaSettings
@@ -40,11 +41,11 @@ trait ScalaSettings extends AbsScalaSettings
protected def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce)
/** Internal use - syntax enhancements. */
- private class EnableSettings[T <: Setting](val s: T) {
- def enabling(toEnable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toEnable foreach (_.value = true))
+ private class EnableSettings[T <: BooleanSetting](val s: T) {
+ def enabling(toEnable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toEnable foreach (_.value = s.value))
def andThen(f: s.T => Unit): s.type = s withPostSetHook (setting => f(setting.value))
}
- private implicit def installEnableSettings[T <: Setting](s: T) = new EnableSettings(s)
+ private implicit def installEnableSettings[T <: BooleanSetting](s: T) = new EnableSettings(s)
/** Disable a setting */
def disable(s: Setting) = allSettings -= s
@@ -62,6 +63,7 @@ trait ScalaSettings extends AbsScalaSettings
val classpath = PathSetting ("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp"
val d = OutputSetting (outputDirs, ".")
val nospecialization = BooleanSetting ("-no-specialization", "Ignore @specialize annotations.")
+ val language = MultiStringSetting("-language", "feature", "Enable one or more language features.")
/**
* -X "Advanced" settings
@@ -87,7 +89,7 @@ trait ScalaSettings extends AbsScalaSettings
val Xmigration28 = BooleanSetting ("-Xmigration", "Warn about constructs whose behavior may have changed between 2.7 and 2.8.")
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
- val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode.")
+ val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
val plugin = MultiStringSetting("-Xplugin", "file", "Load one or more plugins from files.")
val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable the given plugin(s).")
val showPlugins = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.")
@@ -106,6 +108,9 @@ trait ScalaSettings extends AbsScalaSettings
val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.")
val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
+ val XoldPatmat = BooleanSetting ("-Xoldpatmat", "Use the pre-2.10 pattern matcher. Otherwise, the 'virtualizing' pattern matcher is used in 2.10.")
+ val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.")
+
/** Compatibility stubs for options whose value name did
* not previously match the option name.
*/
@@ -134,6 +139,7 @@ trait ScalaSettings extends AbsScalaSettings
val termConflict = ChoiceSetting ("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error")
val inline = BooleanSetting ("-Yinline", "Perform inlining when possible.")
val inlineHandlers = BooleanSetting ("-Yinline-handlers", "Perform exception handler inlining when possible.")
+ val YinlinerWarnings= BooleanSetting ("-Yinline-warnings", "Emit inlining warnings. (Normally surpressed due to high volume)")
val Xlinearizer = ChoiceSetting ("-Ylinearizer", "which", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo")
val log = PhasesSetting ("-Ylog", "Log operations during")
val Ylogcp = BooleanSetting ("-Ylog-classpath", "Output information about what classpath is being applied.")
@@ -141,9 +147,6 @@ trait ScalaSettings extends AbsScalaSettings
val noimports = BooleanSetting ("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.")
val nopredef = BooleanSetting ("-Yno-predef", "Compile without importing Predef.")
val noAdaptedArgs = BooleanSetting ("-Yno-adapted-args", "Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver.")
- val Yprofile = PhasesSetting ("-Yprofile", "(Requires jvm -agentpath to contain yjgpagent) Profile CPU usage of")
- val YprofileMem = BooleanSetting ("-Yprofile-memory", "Profile memory, get heap snapshot after each compiler run (requires yjpagent, see above).")
- val YprofileClass = StringSetting ("-Yprofile-class", "class", "Name of profiler class.", "scala.tools.util.YourkitProfiling")
val Yrecursion = IntSetting ("-Yrecursion", "Set recursion depth used when locking symbols.", 0, Some((0, Int.MaxValue)), (_: String) => None)
val selfInAnnots = BooleanSetting ("-Yself-in-annots", "Include a \"self\" identifier inside of annotations.")
val Xshowtrees = BooleanSetting ("-Yshow-trees", "(Requires -Xprint:) Print detailed ASTs in formatted form.")
@@ -162,22 +165,18 @@ trait ScalaSettings extends AbsScalaSettings
val stopBefore = PhasesSetting ("-Ystop-before", "Stop before")
val refinementMethodDispatch
= ChoiceSetting ("-Ystruct-dispatch", "policy", "structural method dispatch policy", List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache")
- val globalClass = StringSetting ("-Yglobal-class", "class", "subclass of scala.tools.nsc.Global to use for compiler", "")
val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
- val YrichExes = BooleanSetting ("-Yrich-exceptions", "Fancier exceptions. Set source search path with -D" + sys.SystemProperties.traceSourcePath.key)
val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
- val Ymacrocopypaste = BooleanSetting ("-Ymacro-copypaste", "Dump macro expansions in copypasteable representation.")
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.")
val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.")
+ val Yinvalidate = StringSetting ("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "")
val noSelfCheck = BooleanSetting ("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.")
- val YvirtPatmat = BooleanSetting ("-Yvirtpatmat", "Translate pattern matches into flatMap/orElse calls. See scala.MatchingStrategy.")
val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
val exposeEmptyPackage = BooleanSetting("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly()
- val YnoProductN = BooleanSetting ("-Yno-productN", "Do not add ProductN to case classes")
def stop = stopAfter
@@ -189,7 +188,8 @@ trait ScalaSettings extends AbsScalaSettings
val Yidedebug = BooleanSetting("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
val Yinferdebug = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.")
val Yissuedebug = BooleanSetting("-Yissue-debug", "Print stack traces when a context issues an error.")
- val Ymacrodebug = BooleanSetting("-Ymacro-debug", "Trace macro-related activities: compilation, generation of synthetics, classloading, expansion, exceptions.")
+ val YmacrodebugLite = BooleanSetting("-Ymacro-debug-lite", "Trace essential macro-related activities.")
+ val YmacrodebugVerbose = BooleanSetting("-Ymacro-debug-verbose", "Trace all macro-related activities: compilation, generation of synthetics, classloading, expansion, exceptions.")
val Ypmatdebug = BooleanSetting("-Ypmat-debug", "Trace all pattern matcher activity.")
val Yposdebug = BooleanSetting("-Ypos-debug", "Trace position validation.")
val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.")
@@ -203,7 +203,7 @@ trait ScalaSettings extends AbsScalaSettings
val Xexperimental = BooleanSetting("-Xexperimental", "Enable experimental extensions.") enabling experimentalSettings
// Feature extensions
- val Xmacros = BooleanSetting("-Xmacros", "Enable macros.")
+ val Xmacros = BooleanSetting("-Xmacros", "Enable macros.") // [Martin] Can be retired now.
val XmacroSettings = MultiStringSetting("-Xmacro-settings", "option", "Custom settings for macros.")
val XmacroPrimaryClasspath = PathSetting("-Xmacro-primary-classpath", "Classpath to load macros implementations from, defaults to compilation classpath (aka \"library classpath\".", "")
val XmacroFallbackClasspath = PathSetting("-Xmacro-fallback-classpath", "Classpath to load macros implementations from if they cannot be loaded from library classpath.", "")
diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
index c5b477c7bd..1bb0948168 100644
--- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
@@ -32,6 +32,7 @@ trait StandardScalaSettings {
val deprecation = BooleanSetting ("-deprecation", "Emit warning and location for usages of deprecated APIs.")
val encoding = StringSetting ("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding)
val explaintypes = BooleanSetting ("-explaintypes", "Explain type errors in more detail.")
+ val feature = BooleanSetting ("-feature", "Emit warning and location for usages of features that should be imported explicitly.")
val g = ChoiceSetting ("-g", "level", "Set level of generated debugging info.", List("none", "source", "line", "vars", "notailcalls"), "vars")
val help = BooleanSetting ("-help", "Print a synopsis of standard options")
val make = ChoiceSetting ("-make", "policy", "Recompilation detection policy", List("all", "changed", "immediate", "transitive", "transitivenocp"), "all")
@@ -39,7 +40,7 @@ trait StandardScalaSettings {
val nowarn = BooleanSetting ("-nowarn", "Generate no warnings.")
val optimise: BooleanSetting // depends on post hook which mutates other settings
val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.")
- val target = ChoiceSetting ("-target", "target", "Target platform for object files.", List("jvm-1.5", "msil"), "jvm-1.5")
+ val target = ChoiceSetting ("-target", "target", "Target platform for object files.", List("jvm-1.5", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil"), "jvm-1.5")
val unchecked = BooleanSetting ("-unchecked", "Enable detailed unchecked (erasure) warnings.")
val uniqid = BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.")
val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.")
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 7eb04eaf40..15edac16d5 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -51,6 +51,46 @@ abstract class SymbolLoaders {
enterIfNew(owner, module, completer)
}
+ /** Enter package with given `name` into scope of `root`
+ * and give them `completer` as type.
+ */
+ def enterPackage(root: Symbol, name: String, completer: SymbolLoader): Symbol = {
+ val pname = newTermName(name)
+ val preExisting = root.info.decls lookup pname
+ if (preExisting != NoSymbol) {
+ // Some jars (often, obfuscated ones) include a package and
+ // object with the same name. Rather than render them unusable,
+ // offer a setting to resolve the conflict one way or the other.
+ // This was motivated by the desire to use YourKit probes, which
+ // require yjp.jar at runtime. See SI-2089.
+ if (settings.termConflict.isDefault)
+ throw new TypeError(
+ root+" contains object and package with same name: "+
+ name+"\none of them needs to be removed from classpath"
+ )
+ else if (settings.termConflict.value == "package") {
+ global.warning(
+ "Resolving package/object name conflict in favor of package " +
+ preExisting.fullName + ". The object will be inaccessible."
+ )
+ root.info.decls.unlink(preExisting)
+ }
+ else {
+ global.warning(
+ "Resolving package/object name conflict in favor of object " +
+ preExisting.fullName + ". The package will be inaccessible."
+ )
+ return NoSymbol
+ }
+ }
+ // todo: find out initialization sequence for pkg/pkg.moduleClass is different from enterModule
+ val pkg = root.newPackage(pname)
+ pkg.moduleClass setInfo completer
+ pkg setInfo pkg.moduleClass.tpe
+ root.info.decls enter pkg
+ pkg
+ }
+
/** Enter class and module with given `name` into scope of `root`
* and give them `completer` as type.
*/
@@ -171,40 +211,6 @@ abstract class SymbolLoaders {
class PackageLoader(classpath: ClassPath[platform.BinaryRepr]) extends SymbolLoader {
protected def description = "package loader "+ classpath.name
- def enterPackage(root: Symbol, name: String, completer: SymbolLoader) {
- val preExisting = root.info.decls.lookup(newTermName(name))
- if (preExisting != NoSymbol) {
- // Some jars (often, obfuscated ones) include a package and
- // object with the same name. Rather than render them unusable,
- // offer a setting to resolve the conflict one way or the other.
- // This was motivated by the desire to use YourKit probes, which
- // require yjp.jar at runtime. See SI-2089.
- if (settings.termConflict.isDefault)
- throw new TypeError(
- root+" contains object and package with same name: "+
- name+"\none of them needs to be removed from classpath"
- )
- else if (settings.termConflict.value == "package") {
- global.warning(
- "Resolving package/object name conflict in favor of package " +
- preExisting.fullName + ". The object will be inaccessible."
- )
- root.info.decls.unlink(preExisting)
- }
- else {
- global.warning(
- "Resolving package/object name conflict in favor of object " +
- preExisting.fullName + ". The package will be inaccessible."
- )
- return
- }
- }
- val pkg = root.newPackage(newTermName(name))
- pkg.moduleClass.setInfo(completer)
- pkg.setInfo(pkg.moduleClass.tpe)
- root.info.decls.enter(pkg)
- }
-
protected def doComplete(root: Symbol) {
assert(root.isPackageClass, root)
root.setInfo(new PackageClassInfoType(newScope, root))
@@ -232,6 +238,16 @@ abstract class SymbolLoaders {
protected def doComplete(root: Symbol) {
val start = startTimer(classReadNanos)
classfileParser.parse(classfile, root)
+ if (root.associatedFile eq null) {
+ root match {
+ // In fact, the ModuleSymbol forwards its setter to the module class
+ case _: ClassSymbol | _: ModuleSymbol =>
+ debuglog("ClassfileLoader setting %s.associatedFile = %s".format(root.name, classfile))
+ root.associatedFile = classfile
+ case _ =>
+ debuglog("Not setting associatedFile to %s because %s is a %s".format(classfile, root.name, root.shortSymbolClass))
+ }
+ }
stopTimer(classReadNanos, start)
}
override def sourcefile: Option[AbstractFile] = classfileParser.srcfile
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
index fb85ebeeb0..75b486ca7d 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
@@ -6,7 +6,4 @@
package scala.tools.nsc
package symtab
-import ast.{Trees, TreePrinters, DocComments}
-import util._
-
abstract class SymbolTable extends reflect.internal.SymbolTable \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
index e62070a239..3cf5cc2f54 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
@@ -7,6 +7,8 @@ package scala.tools.nsc
package symtab
import scala.collection.{ mutable, immutable }
+import language.implicitConversions
+import language.postfixOps
/** Printing the symbol graph (for those symbols attached to an AST node)
* after each phase.
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 3d3cea75d6..7373a610d7 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -9,7 +9,6 @@ package classfile
import java.io.{ File, IOException }
import java.lang.Integer.toHexString
-
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
import scala.annotation.switch
@@ -24,91 +23,86 @@ import scala.tools.nsc.io.AbstractFile
abstract class ClassfileParser {
val global: Global
import global._
-
+ import definitions.{ AnnotationClass, ClassfileAnnotationClass }
import scala.reflect.internal.ClassfileConstants._
import Flags._
protected var in: AbstractFileReader = _ // the class file reader
protected var clazz: Symbol = _ // the class symbol containing dynamic members
protected var staticModule: Symbol = _ // the module symbol containing static members
- protected var instanceDefs: Scope = _ // the scope of all instance definitions
- protected var staticDefs: Scope = _ // the scope of all static definitions
+ protected var instanceScope: Scope = _ // the scope of all instance definitions
+ protected var staticScope: Scope = _ // the scope of all static definitions
protected var pool: ConstantPool = _ // the classfile's constant pool
protected var isScala: Boolean = _ // does class file describe a scala class?
protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation?
protected var isScalaRaw: Boolean = _ // this class file is a scala class with no pickled info
- protected var hasMeta: Boolean = _ // does class file contain jaco meta attribute?s
protected var busy: Option[Symbol] = None // lock to detect recursive reads
- private var externalName: Name = _ // JVM name of the current class
+ protected var currentClass: Name = _ // JVM name of the current class
protected var classTParams = Map[Name,Symbol]()
protected var srcfile0 : Option[AbstractFile] = None
+ protected def moduleClass: Symbol = staticModule.moduleClass
def srcfile = srcfile0
- private object metaParser extends MetaParser {
- val global: ClassfileParser.this.global.type = ClassfileParser.this.global
- }
+ private def currentIsTopLevel = currentClass.toString.indexOf('$') < 0
private object unpickler extends scala.reflect.internal.pickling.UnPickler {
val global: ClassfileParser.this.global.type = ClassfileParser.this.global
}
- def parse(file: AbstractFile, root: Symbol) = try {
- debuglog("[class] >> " + root.fullName)
+ private def handleMissing(e: MissingRequirementError) = {
+ if (settings.debug.value) e.printStackTrace
+ throw new IOException("Missing dependency '" + e.req + "', required by " + in.file)
+ }
+ private def handleError(e: Exception) = {
+ if (settings.debug.value) e.printStackTrace()
+ throw new IOException("class file '%s' is broken\n(%s/%s)".format(
+ in.file,
+ e.getClass,
+ if (e.getMessage eq null) "" else e.getMessage)
+ )
+ }
+ private def mismatchError(c: Symbol) = {
+ throw new IOException("class file '%s' has location not matching its contents: contains ".format(in.file) + c)
+ }
- def handleMissing(e: MissingRequirementError) = {
- if (settings.debug.value) e.printStackTrace
- throw new IOException("Missing dependency '" + e.req + "', required by " + in.file)
+ private def parseErrorHandler[T]: PartialFunction[Throwable, T] = {
+ case e: MissingRequirementError => handleMissing(e)
+ case e: RuntimeException => handleError(e)
+ }
+ @inline private def pushBusy[T](sym: Symbol)(body: => T): T = {
+ busy match {
+ case Some(`sym`) => throw new IOException("unsatisfiable cyclic dependency in '%s'".format(sym))
+ case Some(sym1) => throw new IOException("illegal class file dependency between '%s' and '%s'".format(sym, sym1))
+ case _ => ()
}
- def handleError(e: Exception) = {
- if (settings.debug.value) e.printStackTrace()
- throw new IOException("class file '" + in.file + "' is broken\n(" + {
- if (e.getMessage() != null) e.getMessage()
- else e.getClass.toString
- } + ")")
- }
- assert(!busy.isDefined, {
- val (s1, s2) = (busy.get, root)
- if (s1 eq s2) "unsatisfiable cyclic dependency in '%s'".format(s1)
- else "illegal class file dependency between '%s' and '%s'".format(s1, s2)
- })
-
- busy = Some(root)
- /*root match {
- case cs: ClassSymbol =>
- cs.classFile = file
- case ms: ModuleSymbol =>
- ms.moduleClass.asInstanceOf[ClassSymbol].classFile = file
- case _ =>
- println("Skipping class: " + root + ": " + root.getClass)
- }
-*/
- this.in = new AbstractFileReader(file)
- if (root.isModule) {
- this.clazz = root.companionClass
- this.staticModule = root
- } else {
- this.clazz = root
- this.staticModule = root.companionModule
- }
- this.isScala = false
- this.hasMeta = false
- try {
+ busy = Some(sym)
+ try body
+ catch parseErrorHandler
+ finally busy = None
+ }
+ @inline private def raiseLoaderLevel[T](body: => T): T = {
+ loaders.parentsLevel += 1
+ try body
+ finally loaders.parentsLevel -= 1
+ }
+
+ def parse(file: AbstractFile, root: Symbol): Unit = {
+ debuglog("[class] >> " + root.fullName)
+
+ pushBusy(root) {
+ this.in = new AbstractFileReader(file)
+ this.clazz = if (root.isModule) root.companionClass else root
+ this.staticModule = clazz.companionModule
+ this.isScala = false
+
parseHeader
this.pool = new ConstantPool
parseClass()
}
- catch {
- case e: MissingRequirementError => handleMissing(e)
- case e: RuntimeException => handleError(e)
- }
- } finally {
- busy = None
}
- protected def statics: Symbol = staticModule.moduleClass
-
private def parseHeader() {
val magic = in.nextInt
if (magic != JAVA_MAGIC)
@@ -501,12 +495,11 @@ abstract class ClassfileParser {
var sawPrivateConstructor = false
def parseClass() {
- val jflags = in.nextChar
+ val jflags = in.nextChar
val isAnnotation = hasAnnotation(jflags)
- var sflags = toScalaClassFlags(jflags)
- var nameIdx = in.nextChar
- externalName = pool.getClassName(nameIdx)
- val c = if (externalName.toString.indexOf('$') < 0) pool.getClassSymbol(nameIdx) else clazz
+ var sflags = toScalaClassFlags(jflags)
+ var nameIdx = in.nextChar
+ currentClass = pool.getClassName(nameIdx)
/** Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled.
* Updates the read pointer of 'in'. */
@@ -516,39 +509,37 @@ abstract class ClassfileParser {
val ifaces = in.nextChar
in.bp += ifaces * 2 // .. and iface count interfaces
List(definitions.AnyRefClass.tpe) // dummy superclass, will be replaced by pickled information
- } else {
- try {
- loaders.parentsLevel += 1
- val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe }
- else pool.getSuperClass(in.nextChar).tpe
- val ifaceCount = in.nextChar
- var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe
- if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces
- superType :: ifaces
- } finally {
- loaders.parentsLevel -= 1
- }
+ }
+ else raiseLoaderLevel {
+ val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe }
+ else pool.getSuperClass(in.nextChar).tpe
+ val ifaceCount = in.nextChar
+ var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe
+ if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces
+ superType :: ifaces
}
}
- if (c != clazz && externalName.toString.indexOf("$") < 0) {
- if ((clazz eq NoSymbol) && (c ne NoSymbol)) clazz = c
- else throw new IOException("class file '" + in.file + "' contains wrong " + c)
+ val c = if (currentIsTopLevel) pool.getClassSymbol(nameIdx) else clazz
+ if (currentIsTopLevel) {
+ if (c != clazz) {
+ if ((clazz eq NoSymbol) && (c ne NoSymbol)) clazz = c
+ else mismatchError(c)
+ }
}
addEnclosingTParams(clazz)
- parseInnerClasses() // also sets the isScala / isScalaRaw / hasMeta flags, see r15956
+ parseInnerClasses() // also sets the isScala / isScalaRaw flags, see r15956
// get the class file parser to reuse scopes.
- instanceDefs = newScope
- staticDefs = newScope
+ instanceScope = newScope
+ staticScope = newScope
- val classInfo = ClassInfoType(parseParents, instanceDefs, clazz)
- val staticInfo = ClassInfoType(List(), staticDefs, statics)
+ val classInfo = ClassInfoType(parseParents, instanceScope, clazz)
+ val staticInfo = ClassInfoType(List(), staticScope, moduleClass)
- if (!isScala && !isScalaRaw) {
-// println("Entering inner classes for " + clazz)
+ if (!isScala && !isScalaRaw)
enterOwnInnerClasses
- }
+
val curbp = in.bp
skipMembers() // fields
skipMembers() // methods
@@ -556,34 +547,31 @@ abstract class ClassfileParser {
clazz setFlag sflags
setPrivateWithin(clazz, jflags)
setPrivateWithin(staticModule, jflags)
- if (!hasMeta || isScalaRaw) {
- clazz.setInfo(classInfo)
- }
- statics setInfo staticInfo
- staticModule.setInfo(statics.tpe)
+ clazz.setInfo(classInfo)
+ moduleClass setInfo staticInfo
+ staticModule.setInfo(moduleClass.tpe)
staticModule.setFlag(JAVA)
staticModule.moduleClass.setFlag(JAVA)
// attributes now depend on having infos set already
parseAttributes(clazz, classInfo)
- loaders.pendingLoadActions = { () =>
+ def queueLoad() {
in.bp = curbp
- val fieldCount = in.nextChar
- for (i <- 0 until fieldCount) parseField()
+ 0 until in.nextChar foreach (_ => parseField())
sawPrivateConstructor = false
- val methodCount = in.nextChar
- for (i <- 0 until methodCount) parseMethod()
- if (!sawPrivateConstructor &&
- (instanceDefs.lookup(nme.CONSTRUCTOR) == NoSymbol &&
- (sflags & INTERFACE) == 0L))
- {
- //Console.println("adding constructor to " + clazz);//DEBUG
- instanceDefs enter clazz.newClassConstructor(NoPosition)
- }
- ()
- } :: loaders.pendingLoadActions
+ 0 until in.nextChar foreach (_ => parseMethod())
+ val needsConstructor = (
+ !sawPrivateConstructor
+ && instanceScope.lookup(nme.CONSTRUCTOR) == NoSymbol
+ && (sflags & INTERFACE) == 0
+ )
+ if (needsConstructor)
+ instanceScope enter clazz.newClassConstructor(NoPosition)
+ }
+
+ loaders.pendingLoadActions ::= (queueLoad _)
if (loaders.parentsLevel == 0) {
- while (!loaders.pendingLoadActions.isEmpty) {
+ while (loaders.pendingLoadActions.nonEmpty) {
val item = loaders.pendingLoadActions.head
loaders.pendingLoadActions = loaders.pendingLoadActions.tail
item()
@@ -627,12 +615,11 @@ abstract class ClassfileParser {
// sealed java enums (experimental)
if (isEnum && opt.experimental) {
- // need to give singleton type
- sym setInfo info.narrow
- if (!sym.superClass.isSealed)
- sym.superClass setFlag SEALED
+ val enumClass = sym.owner.linkedClassOfClass
+ if (!enumClass.isSealed)
+ enumClass setFlag (SEALED | ABSTRACT)
- sym.superClass addChild sym
+ enumClass addChild sym
}
}
}
@@ -646,8 +633,6 @@ abstract class ClassfileParser {
sawPrivateConstructor = true
in.skip(2); skipAttributes()
} else {
- if ((jflags & JAVA_ACC_BRIDGE) != 0)
- sflags |= BRIDGE
if ((sflags & PRIVATE) != 0L && global.settings.optimise.value) {
in.skip(4); skipAttributes()
} else {
@@ -658,7 +643,7 @@ abstract class ClassfileParser {
info match {
case MethodType(params, restpe) =>
// if this is a non-static inner class, remove the explicit outer parameter
- val newParams = innerClasses.get(externalName) match {
+ val newParams = innerClasses.get(currentClass) match {
case Some(entry) if !isScalaRaw && !isStatic(entry.jflags) =>
assert(params.head.tpe.typeSymbol == clazz.owner, params.head.tpe.typeSymbol + ": " + clazz.owner)
params.tail
@@ -732,7 +717,7 @@ abstract class ClassfileParser {
}
val newtparam = sym.newExistential(newTypeName("?"+i), sym.pos) setInfo bounds
existentials += newtparam
- xs += newtparam.tpe //@M should probably be .tpeHK
+ xs += newtparam.tpeHK
i += 1
case _ =>
xs += sig2type(tparams, skiptvs)
@@ -746,7 +731,7 @@ abstract class ClassfileParser {
} else {
// raw type - existentially quantify all type parameters
val eparams = typeParamsToExistentials(classSym, classSym.unsafeTypeParams)
- val t = typeRef(pre, classSym, eparams.map(_.tpe))
+ val t = typeRef(pre, classSym, eparams.map(_.tpeHK))
val res = newExistentialType(eparams, t)
if (settings.debug.value && settings.verbose.value)
println("raw type " + classSym + " -> " + res)
@@ -774,8 +759,12 @@ abstract class ClassfileParser {
// make unbounded Array[T] where T is a type variable into Array[T with Object]
// (this is necessary because such arrays have a representation which is incompatible
// with arrays of primitive types.
- if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe))
+ // NOTE that the comparison to Object only works for abstract types bounded by classes that are strict subclasses of Object
+ // if the bound is exactly Object, it will have been converted to Any, and the comparison will fail
+ // see also RestrictJavaArraysMap (when compiling java sources directly)
+ if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe)) {
elemtp = intersectionType(List(elemtp, definitions.ObjectClass.tpe))
+ }
definitions.arrayType(elemtp)
case '(' =>
@@ -843,7 +832,7 @@ abstract class ClassfileParser {
while (index < end) {
parents += sig2type(tparams, false) // here the variance doesnt'matter
}
- ClassInfoType(parents.toList, instanceDefs, sym)
+ ClassInfoType(parents.toList, instanceScope, sym)
}
GenPolyType(ownTypeParams, tpe)
} // sigToType
@@ -870,9 +859,8 @@ abstract class ClassfileParser {
sym.setInfo(newType)
if (settings.debug.value && settings.verbose.value)
println("" + sym + "; signature = " + sig + " type = " + newType)
- hasMeta = true
- } else
- in.skip(attrLen)
+ }
+ else in.skip(attrLen)
case tpnme.SyntheticATTR =>
sym.setFlag(SYNTHETIC)
in.skip(attrLen)
@@ -896,10 +884,6 @@ abstract class ClassfileParser {
in.skip(attrLen)
case tpnme.ScalaATTR =>
isScalaRaw = true
- case tpnme.JacoMetaATTR =>
- val meta = pool.getName(in.nextChar).toString.trim()
- metaParser.parse(meta, sym, symtype)
- this.hasMeta = true
// Attribute on methods of java annotation classes when that method has a default
case tpnme.AnnotationDefaultATTR =>
sym.addAnnotation(definitions.AnnotationDefaultAttr)
@@ -1065,19 +1049,18 @@ abstract class ClassfileParser {
}
// begin parseAttributes
- val attrCount = in.nextChar
- for (i <- 0 until attrCount) parseAttribute()
+ for (i <- 0 until in.nextChar) parseAttribute()
}
/** Enter own inner classes in the right scope. It needs the scopes to be set up,
* and implicitly current class' superclasses.
*/
private def enterOwnInnerClasses() {
- def className(name: Name): Name = {
+ def className(name: Name): Name =
name.subName(name.lastPos('.') + 1, name.length)
- }
- def enterClassAndModule(entry: InnerClassEntry, completer: global.loaders.SymbolLoader, jflags: Int) {
+ def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) {
+ val completer = new global.loaders.ClassfileLoader(file)
val name = entry.originalName
var sflags = toScalaClassFlags(jflags)
val owner = getOwner(jflags)
@@ -1086,6 +1069,8 @@ abstract class ClassfileParser {
val innerModule = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer
innerModule.moduleClass setInfo global.loaders.moduleClassLoader
+ List(innerClass, innerModule.moduleClass) foreach (_.associatedFile = file)
+
scope enter innerClass
scope enter innerModule
@@ -1103,11 +1088,11 @@ abstract class ClassfileParser {
for (entry <- innerClasses.values) {
// create a new class member for immediate inner classes
- if (entry.outerName == externalName) {
+ if (entry.outerName == currentClass) {
val file = global.classPath.findSourceFile(entry.externalName.toString) getOrElse {
throw new AssertionError(entry.externalName)
}
- enterClassAndModule(entry, new global.loaders.ClassfileLoader(file), entry.jflags)
+ enterClassAndModule(entry, file, entry.jflags)
}
}
}
@@ -1126,11 +1111,6 @@ abstract class ClassfileParser {
val attrLen = in.nextInt
attrName match {
case tpnme.SignatureATTR =>
- if (!isScala)
- hasMeta = true
- in.skip(attrLen)
- case tpnme.JacoMetaATTR =>
- this.hasMeta = true
in.skip(attrLen)
case tpnme.ScalaSignatureATTR =>
isScala = true
@@ -1195,10 +1175,10 @@ abstract class ClassfileParser {
def innerSymbol(externalName: Name, innerName: Name, static: Boolean): Symbol = {
def getMember(sym: Symbol, name: Name): Symbol =
if (static)
- if (sym == clazz) staticDefs.lookup(name)
+ if (sym == clazz) staticScope.lookup(name)
else sym.companionModule.info.member(name)
else
- if (sym == clazz) instanceDefs.lookup(name)
+ if (sym == clazz) instanceScope.lookup(name)
else sym.info.member(name)
innerClasses.get(externalName) match {
@@ -1221,16 +1201,13 @@ abstract class ClassfileParser {
s
case None =>
- val cls = classNameToSymbol(externalName)
- cls
- //if (static) cls.companionClass else cls
+ classNameToSymbol(externalName)
}
}
get(externalName) match {
case Some(entry) =>
- val clazz = innerSymbol(entry.externalName, entry.originalName, isStatic(entry.jflags))
- clazz
+ innerSymbol(entry.externalName, entry.originalName, isStatic(entry.jflags))
case None =>
classNameToSymbol(externalName)
}
@@ -1264,10 +1241,10 @@ abstract class ClassfileParser {
}
protected def getOwner(flags: Int): Symbol =
- if (isStatic(flags)) statics else clazz
+ if (isStatic(flags)) moduleClass else clazz
protected def getScope(flags: Int): Scope =
- if (isStatic(flags)) staticDefs else instanceDefs
+ if (isStatic(flags)) staticScope else instanceScope
private def setPrivateWithin(sym: Symbol, jflags: Int) {
if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)) == 0)
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 775a7a9d38..862a3ffdc7 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -165,7 +165,7 @@ abstract class ICodeReader extends ClassfileParser {
else if (name == fulltpnme.RuntimeNull)
definitions.NullClass
else if (nme.isImplClassName(name)) {
- val iface = definitions.getClass(nme.interfaceName(name))
+ val iface = definitions.getClass(tpnme.interfaceName(name))
log("forcing " + iface.owner + " at phase: " + phase + " impl: " + iface.implClass)
iface.owner.info // force the mixin type-transformer
definitions.getClass(name)
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
deleted file mode 100644
index eb8e7a14a5..0000000000
--- a/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
+++ /dev/null
@@ -1,166 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package symtab
-package classfile
-
-import java.util.{StringTokenizer, NoSuchElementException}
-
-import scala.collection.mutable.ListBuffer
-
-abstract class MetaParser{
-
- val global: Global
- import global._
-
- private var scanner: StringTokenizer = _
- private var owner: Symbol = _
- private var ownertype: Type = _
- private var token: String = _
- private var locals: Scope = null
-
- def parse(meta: String, sym: Symbol, symtype: Type) {
- //System.out.println("parse meta for " + sym + ":" + meta + ", locals = " + locals);//DEBUG
- this.scanner = new StringTokenizer(meta, "()[], \t<;", true)
- this.owner = sym
- this.ownertype = symtype
- nextToken()
- if (token == "class") parseClass()
- else if (token == "method") parseMethod()
- else if (token == "field") parseField()
- else if (token == "constr") parseConstr()
- else owner.setInfo(symtype);
- }
-
- protected def nextToken() {
- try {
- do { token = scanner.nextToken().trim() } while (token.length() == 0)
- } catch {
- case ex: NoSuchElementException => token = ""
- }
- }
-
- protected def parseType(): Type = {
- val str = token
- nextToken()
- val sym = locals.lookup(newTypeName(str))
- if (sym != NoSymbol) sym.tpe
- else {
- val tp = definitions.getRequiredClass(str).tpe;
- if (token != "[") tp
- else {
- val args = new ListBuffer[Type];
- do {
- nextToken(); args += parseType();
- } while (token == ",");
- nextToken();
- appliedType(tp, args.toList)
- }
- }
- }
-
- protected def parseTypeParam(): Symbol = {
- val vflag =
- if (token == "+") { nextToken(); Flags.COVARIANT }
- else if (token == "-") { nextToken(); Flags.CONTRAVARIANT }
- else 0;
- assert(token startsWith "?", token)
- val sym = owner.newTypeParameter(newTypeName(token)).setFlag(vflag)
- nextToken()
- val lo =
- if (token == ">") { nextToken(); parseType() }
- else definitions.NothingClass.tpe
- val hi =
- if (token == "<") { nextToken(); parseType() }
- else definitions.AnyClass.tpe
- sym.setInfo(TypeBounds(lo, hi))
- locals enter sym;
- sym
- }
-
- protected def parseTypeParams(): List[Symbol] = {
- nextToken()
- val syms = new ListBuffer[Symbol]
- if (token != "]") {
- syms += parseTypeParam()
- while (token == ",") {
- nextToken(); syms += parseTypeParam();
- }
- }
- assert(token == "]")
- syms.toList
- }
-
- protected def parseParams(): List[Type] = {
- nextToken()
- val tps = new ListBuffer[Type]
- if (token != ")") {
- tps += parseType()
- while (token == ",") {
- nextToken(); tps += parseType()
- }
- }
- assert(token == ")")
- tps.toList
- }
-
- protected def parseClass() {
- locals = newScope
- def parse(): Type = {
- nextToken()
- if (token == "[") {
- PolyType(parseTypeParams(), parse())
- } else if (token == "extends") {
- val tps = new ListBuffer[Type]
- do {
- nextToken(); tps += parseType()
- } while (token == "with");
- ownertype match {
- case ClassInfoType(parents, decls, clazz) =>
- ClassInfoType(tps.toList, decls, clazz)
- }
- } else ownertype
- }
- owner.setInfo(parse())
- assert(token == ";")
- }
-
- protected def parseMethod() {
- val globals = locals
- locals = if (locals eq null) newScope else newNestedScope(locals)
- def parse(): Type = {
- nextToken();
- if (token == "[") PolyType(parseTypeParams(), parse())
- else if (token == "(") {
- val formals = parseParams()
- MethodType(owner.newSyntheticValueParams(formals), parse())
- }
- else parseType()
- }
- owner.setInfo(parse())
- locals = globals
- assert(token == ";")
- }
-
- protected def parseField() {
- nextToken()
- owner.setInfo(parseType())
- assert(token == ";")
- }
-
- protected def parseConstr() {
- def parse(): Type = {
- nextToken()
- if (token == "(") {
- val formals = parseParams()
- MethodType(owner.newSyntheticValueParams(formals), parse())
- }
- else owner.owner.tpe
- }
- owner.setInfo(parse())
- assert(token == ";")
- }
-}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index edbe6df472..192cc94b90 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -62,9 +62,16 @@ abstract class Pickler extends SubComponent {
// when we pickle it: so let's report an error instead. We know next
// to nothing about what happened, but our supposition is a lot better
// than "bad type: <error>" in terms of explanatory power.
- for (t <- unit.body ; if t.isErroneous) {
- unit.error(t.pos, "erroneous or inaccessible type")
- return
+ for (t <- unit.body) {
+ if (t.isErroneous) {
+ unit.error(t.pos, "erroneous or inaccessible type")
+ return
+ }
+
+ if (!t.isDef && t.hasSymbol && t.symbol.isTermMacro) {
+ unit.error(t.pos, "macro has not been expanded")
+ return
+ }
}
pickle(unit.body)
@@ -149,7 +156,7 @@ abstract class Pickler extends SubComponent {
putChildren(sym, children.toList sortBy (_.sealedSortName))
}
- for (annot <- sym.annotations filter (ann => ann.isStatic && !ann.isErroneous) reverse)
+ for (annot <- (sym.annotations filter (ann => ann.isStatic && !ann.isErroneous)).reverse)
putAnnotation(sym, annot)
}
else if (sym != NoSymbol) {
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
index 4b847fa94a..028d6f2484 100644
--- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
@@ -12,6 +12,7 @@ import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute,
import scala.collection.{ mutable, immutable }
import scala.reflect.internal.pickling.UnPickler
import ch.epfl.lamp.compiler.msil.Type.TMVarUsage
+import language.implicitConversions
/**
* @author Nikolay Mihaylov
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index 5a11926048..e5fc98f23c 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -66,7 +66,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
private def newImplClass(iface: Symbol): Symbol = {
val inClass = iface.owner.isClass
- val implName = nme.implClassName(iface.name)
+ val implName = tpnme.implClassName(iface.name)
val implFlags = (iface.flags & ~(INTERFACE | lateINTERFACE)) | IMPLCLASS
val impl0 = (
@@ -204,10 +204,9 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
def transformMixinInfo(tp: Type): Type = tp match {
case ClassInfoType(parents, decls, clazz) =>
- if (clazz.needsImplClass) {
- clazz setFlag lateINTERFACE
- implClass(clazz) // generate an impl class
- }
+ if (clazz.needsImplClass)
+ implClass(clazz setFlag lateINTERFACE) // generate an impl class
+
val parents1 = parents match {
case Nil => Nil
case hd :: tl =>
@@ -215,14 +214,12 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
if (clazz.isTrait) erasedTypeRef(ObjectClass) :: tl
else parents
}
- val decls1 = scopeTransform(clazz) { decls filter (sym =>
- if (clazz.isInterface) isInterfaceMember(sym)
- else (!sym.isType || sym.isClass))
- }
-
- //if (!clazz.isPackageClass) System.out.println("Decls of "+clazz+" after explicitOuter = " + decls1);//DEBUG
- //if ((parents1 eq parents) && (decls1 eq decls)) tp
- //else
+ val decls1 = scopeTransform(clazz)(
+ decls filter (sym =>
+ if (clazz.isInterface) isInterfaceMember(sym)
+ else sym.isClass || sym.isTerm
+ )
+ )
ClassInfoType(parents1, decls1, clazz)
case _ =>
tp
@@ -242,27 +239,29 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
}
}
- private def ifaceMemberDef(tree: Tree): Tree =
- if (!tree.isDef || !isInterfaceMember(tree.symbol)) EmptyTree
- else if (needsImplMethod(tree.symbol)) DefDef(tree.symbol, EmptyTree)
- else tree
+ private def createMemberDef(tree: Tree, isForInterface: Boolean)(create: Tree => Tree) = {
+ val isInterfaceTree = tree.isDef && isInterfaceMember(tree.symbol)
+ if (isInterfaceTree && needsImplMethod(tree.symbol))
+ create(tree)
+ else if (isInterfaceTree == isForInterface)
+ tree
+ else
+ EmptyTree
+ }
+ private def implMemberDef(tree: Tree): Tree = createMemberDef(tree, false)(implMethodDef)
+ private def ifaceMemberDef(tree: Tree): Tree = createMemberDef(tree, true)(t => DefDef(t.symbol, EmptyTree))
private def ifaceTemplate(templ: Template): Template =
treeCopy.Template(templ, templ.parents, emptyValDef, templ.body map ifaceMemberDef)
- private def implMethodDef(tree: Tree, ifaceMethod: Symbol): Tree =
- implMethodMap.get(ifaceMethod) match {
- case Some(implMethod) =>
- tree.symbol = implMethod
- new ChangeOwnerAndReturnTraverser(ifaceMethod, implMethod)(tree)
- case None =>
- abort("implMethod missing for " + ifaceMethod)
- }
-
- private def implMemberDef(tree: Tree): Tree =
- if (!tree.isDef || !isInterfaceMember(tree.symbol)) tree
- else if (needsImplMethod(tree.symbol)) implMethodDef(tree, tree.symbol)
- else EmptyTree
+ /** Transforms the member tree containing the implementation
+ * into a member of the impl class.
+ */
+ private def implMethodDef(tree: Tree): Tree = (
+ implMethodMap get tree.symbol
+ map (impl => new ChangeOwnerAndReturnTraverser(tree.symbol, impl)(tree setSymbol impl))
+ getOrElse abort("implMethod missing for " + tree.symbol)
+ )
/** Add mixin constructor definition
* def $init$(): Unit = ()
@@ -309,8 +308,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
// body until now, because the typer knows that Any has no
// constructor and won't accept a call to super.init.
assert((clazz isSubClass AnyValClass) || clazz.info.parents.isEmpty, clazz)
- val superCall = Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), Nil)
- Block(List(superCall), expr)
+ Block(List(Apply(gen.mkSuperSelect, Nil)), expr)
case Block(stats, expr) =>
// needs `hasSymbol` check because `supercall` could be a block (named / default args)
@@ -334,14 +332,12 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
case Template(parents, self, body) =>
val parents1 = sym.owner.info.parents map (t => TypeTree(t) setPos tree.pos)
treeCopy.Template(tree, parents1, emptyValDef, body)
- case This(_) =>
- if (sym.needsImplClass) {
- val impl = implClass(sym)
- var owner = currentOwner
- while (owner != sym && owner != impl) owner = owner.owner;
- if (owner == impl) This(impl) setPos tree.pos
- else tree
- } else tree
+ case This(_) if sym.needsImplClass =>
+ val impl = implClass(sym)
+ var owner = currentOwner
+ while (owner != sym && owner != impl) owner = owner.owner;
+ if (owner == impl) This(impl) setPos tree.pos
+ else tree
/* !!!
case Super(qual, mix) =>
val mix1 = mix
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index eea87c8ba6..bbdf10a021 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -9,6 +9,7 @@ package transform
import symtab._
import Flags._
import scala.collection._
+import language.postfixOps
abstract class CleanUp extends Transform with ast.TreeDSL {
import global._
@@ -610,7 +611,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val ntree = typedWithPos(tree.pos)(safeREF(staticFieldSym))
super.transform(ntree)
- // This transform replaces Array(Predef.wrapArray(Array(...)), <manifest>)
+ // This transform replaces Array(Predef.wrapArray(Array(...)), <tag>)
// with just Array(...)
case Apply(appMeth, List(Apply(wrapRefArrayMeth, List(array)), _))
if (wrapRefArrayMeth.symbol == Predef_wrapRefArray &&
@@ -677,7 +678,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
// need to create a new block with inits and the old term
treeCopy.Block(term, newStaticInits.toList, term)
}
- case None =>
+ case _ =>
// create new static ctor
val staticCtorSym = currentClass.newStaticConstructor(template.pos)
val rhs = Block(newStaticInits.toList, Literal(Constant(())))
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 4f833c82d3..bc4483923a 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -362,9 +362,8 @@ abstract class Constructors extends Transform with ast.TreeDSL {
val tree =
If(
Apply(
- Select(
- Apply(gen.mkAttributedRef(specializedFlag), List()),
- definitions.getMember(definitions.BooleanClass, nme.UNARY_!)),
+ CODE.NOT (
+ Apply(gen.mkAttributedRef(specializedFlag), List())),
List()),
Block(stats, Literal(Constant())),
EmptyTree)
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index e2ce3b62b4..ea66dbedd6 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package transform
-import scala.tools.reflect.SigParser
import scala.reflect.internal.ClassfileConstants._
import scala.collection.{ mutable, immutable }
import symtab._
@@ -66,16 +65,6 @@ abstract class Erasure extends AddInterfaces
}
}
- // for debugging signatures: traces logic given system property
- // performance: get the value here
- val traceSignatures = (sys.BooleanProp keyExists "scalac.sigs.trace").value
- private object traceSig extends util.Tracer(() => traceSignatures) {
- override def stringify(x: Any) = x match {
- case tp: Type => super.stringify(dropAllRefinements(tp))
- case _ => super.stringify(x)
- }
- }
-
override protected def verifyJavaErasure = settings.Xverify.value || settings.debug.value
def needsJavaSig(tp: Type) = !settings.Ynogenericsig.value && NeedsSigCollector.collect(tp)
@@ -173,21 +162,6 @@ abstract class Erasure extends AddInterfaces
}
}
- /** Run the signature parser to catch bogus signatures.
- */
- def isValidSignature(sym: Symbol, sig: String) = (
- /** Since we're using a sun internal class for signature validation,
- * we have to allow for it not existing or otherwise malfunctioning:
- * in which case we treat every signature as valid. Medium term we
- * should certainly write independent signature validation.
- */
- SigParser.isParserAvailable && (
- if (sym.isMethod) SigParser verifyMethod sig
- else if (sym.isTerm) SigParser verifyType sig
- else SigParser verifyClass sig
- )
- )
-
private def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.normalize match {
case RefinedType(parents, _) => parents map (_.normalize)
case tp => tp :: Nil
@@ -199,7 +173,7 @@ abstract class Erasure extends AddInterfaces
def javaSig(sym0: Symbol, info: Type): Option[String] = beforeErasure {
val isTraitSignature = sym0.enclClass.isTrait
- def superSig(parents: List[Type]) = traceSig("superSig", parents) {
+ def superSig(parents: List[Type]) = {
val ps = (
if (isTraitSignature) {
// java is unthrilled about seeing interfaces inherit from classes
@@ -210,10 +184,10 @@ abstract class Erasure extends AddInterfaces
}
else parents
)
- ps map boxedSig mkString
+ (ps map boxedSig).mkString
}
def boxedSig(tp: Type) = jsig(tp, primitiveOK = false)
- def boundsSig(bounds: List[Type]) = traceSig("boundsSig", bounds) {
+ def boundsSig(bounds: List[Type]) = {
val (isTrait, isClass) = bounds partition (_.typeSymbol.isTrait)
val classPart = isClass match {
case Nil => ":" // + boxedSig(ObjectClass.tpe)
@@ -222,7 +196,7 @@ abstract class Erasure extends AddInterfaces
classPart :: (isTrait map boxedSig) mkString ":"
}
def paramSig(tsym: Symbol) = tsym.name + boundsSig(hiBounds(tsym.info.bounds))
- def polyParamSig(tparams: List[Symbol]) = traceSig("polyParamSig", tparams) (
+ def polyParamSig(tparams: List[Symbol]) = (
if (tparams.isEmpty) ""
else tparams map paramSig mkString ("<", "", ">")
)
@@ -315,22 +289,11 @@ abstract class Erasure extends AddInterfaces
else jsig(etp)
}
}
- val result = traceSig("javaSig", (sym0, info)) {
- if (needsJavaSig(info)) {
- try Some(jsig(info, toplevel = true))
- catch { case ex: UnknownSig => None }
- }
- else None
- }
- // Debugging: immediately verify signatures when tracing.
- if (traceSignatures) {
- result foreach { sig =>
- if (!isValidSignature(sym0, sig))
- println("**** invalid signature for " + sym0 + ": " + sig)
- }
+ if (needsJavaSig(info)) {
+ try Some(jsig(info, toplevel = true))
+ catch { case ex: UnknownSig => None }
}
-
- result
+ else None
}
class UnknownSig extends Exception
@@ -480,17 +443,22 @@ abstract class Erasure extends AddInterfaces
// TODO: should we do this for user-defined unapplies as well?
// does the first argument list have exactly one argument -- for user-defined unapplies we can't be sure
def maybeWrap(bridgingCall: Tree): Tree = {
- val canReturnNone = afterErasure(
- member.isSynthetic
- && (member.name == nme.unapply || member.name == nme.unapplySeq)
- && !(member.tpe <:< other.tpe) // no static guarantees (TODO: is the subtype test ever true?)
- )
- if (canReturnNone) {
- import CODE._
+ val guardExtractor = ( // can't statically know which member is going to be selected, so don't let this depend on member.isSynthetic
+ (member.name == nme.unapply || member.name == nme.unapplySeq)
+ && !afterErasure((member.tpe <:< other.tpe))) // no static guarantees (TODO: is the subtype test ever true?)
+
+ import CODE._
+ val _false = FALSE_typed
+ val pt = member.tpe.resultType
+ lazy val zero =
+ if (_false.tpe <:< pt) _false
+ else if (NoneModule.tpe <:< pt) REF(NoneModule)
+ else EmptyTree
+
+ if (guardExtractor && (zero ne EmptyTree)) {
val typeTest = gen.mkIsInstanceOf(REF(bridge.firstParam), member.tpe.params.head.tpe)
- IF (typeTest) THEN bridgingCall ELSE REF(NoneModule)
- }
- else bridgingCall
+ IF (typeTest) THEN bridgingCall ELSE zero
+ } else bridgingCall
}
val rhs = member.tpe match {
case MethodType(Nil, ConstantType(c)) => Literal(c)
@@ -618,8 +586,12 @@ abstract class Erasure extends AddInterfaces
// See SI-4731 for one example of how this occurs.
log("Attempted to cast to Unit: " + tree)
tree.duplicate setType pt
- }
- else gen.mkAttributedCast(tree, pt)
+ } else if (tree.tpe != null && tree.tpe.typeSymbol == ArrayClass && pt.typeSymbol == ArrayClass) {
+ // See SI-2386 for one example of when this might be necessary.
+ val needsExtraCast = isPrimitiveValueType(tree.tpe.typeArgs.head) && !isPrimitiveValueType(pt.typeArgs.head)
+ val tree1 = if (needsExtraCast) gen.mkRuntimeCall(nme.toObjectArray, List(tree)) else tree
+ gen.mkAttributedCast(tree1, pt)
+ } else gen.mkAttributedCast(tree, pt)
}
/** Adapt `tree` to expected type `pt`.
@@ -640,7 +612,8 @@ abstract class Erasure extends AddInterfaces
else if (isPrimitiveValueType(tree.tpe) && !isPrimitiveValueType(pt)) {
adaptToType(box(tree, pt.toString), pt)
} else if (tree.tpe.isInstanceOf[MethodType] && tree.tpe.params.isEmpty) {
- assert(tree.symbol.isStable, "adapt "+tree+":"+tree.tpe+" to "+pt)
+ // [H] this assert fails when trying to typecheck tree !(SomeClass.this.bitmap) for single lazy val
+ //assert(tree.symbol.isStable, "adapt "+tree+":"+tree.tpe+" to "+pt)
adaptToType(Apply(tree, List()) setPos tree.pos setType tree.tpe.resultType, pt)
// } else if (pt <:< tree.tpe)
// cast(tree, pt)
@@ -665,6 +638,7 @@ abstract class Erasure extends AddInterfaces
*/
private def adaptMember(tree: Tree): Tree = {
//Console.println("adaptMember: " + tree);
+ val x = 2 + 2
tree match {
case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
if tree.symbol == Any_asInstanceOf =>
@@ -993,7 +967,7 @@ abstract class Erasure extends AddInterfaces
}
// Rewrite 5.getClass to ScalaRunTime.anyValClass(5)
else if (isPrimitiveValueClass(qual.tpe.typeSymbol))
- global.typer.typed(gen.mkRuntimeCall(nme.anyValClass, List(qual)))
+ global.typer.typed(gen.mkRuntimeCall(nme.anyValClass, List(qual, typer.resolveErasureTag(qual.tpe.widen, tree.pos, true))))
else
tree
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 16c7c3c3ff..9cffb6a1e1 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -149,7 +149,7 @@ abstract class ExplicitOuter extends InfoTransform
if (sym.owner.isTrait && ((sym hasFlag (ACCESSOR | SUPERACCESSOR)) || sym.isModule)) { // 5
sym.makeNotPrivate(sym.owner)
}
- if (sym.owner.isTrait) sym setNotFlag PROTECTED // 6
+ if (sym.owner.isTrait && sym.isProtected) sym setFlag notPROTECTED // 6
if (sym.isClassConstructor && isInner(sym.owner)) { // 1
val p = sym.newValueParameter(innerClassConstructorParamName, sym.pos)
.setInfo(sym.owner.outerClass.thisType)
@@ -357,7 +357,7 @@ abstract class ExplicitOuter extends InfoTransform
*/
def mixinOuterAccessorDef(mixinClass: Symbol): Tree = {
val outerAcc = outerAccessor(mixinClass) overridingSymbol currentClass
- def mixinPrefix = currentClass.thisType baseType mixinClass prefix;
+ def mixinPrefix = (currentClass.thisType baseType mixinClass).prefix
assert(outerAcc != NoSymbol, "No outer accessor for inner mixin " + mixinClass + " in " + currentClass)
// I added the mixinPrefix.typeArgs.nonEmpty condition to address the
// crash in SI-4970. I feel quite sure this can be improved.
@@ -448,8 +448,8 @@ abstract class ExplicitOuter extends InfoTransform
override def transform(tree: Tree): Tree = {
val sym = tree.symbol
if (sym != null && sym.isType) { //(9)
- sym setNotFlag PRIVATE
- sym setNotFlag PROTECTED
+ if (sym.isPrivate) sym setFlag notPRIVATE
+ if (sym.isProtected) sym setFlag notPROTECTED
}
tree match {
case Template(parents, self, decls) =>
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 3515c1d521..8556cc9ddc 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -44,17 +44,28 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
* in `extensionMethod` if the first name has the wrong type. We thereby gain a level of insensitivity
* of how overloaded types are ordered between phases and picklings.
*/
- private def extensionNames(imeth: Symbol): Stream[Name] =
- imeth.owner.info.decl(imeth.name).tpe match {
+ private def extensionNames(imeth: Symbol): Stream[Name] = {
+ val decl = imeth.owner.info.decl(imeth.name)
+
+ // Bridge generation is done at phase `erasure`, but new scopes are only generated
+ // for the phase after that. So bridges are visible in earlier phases.
+ //
+ // `info.member(imeth.name)` filters these out, but we need to use `decl`
+ // to restrict ourselves to members defined in the current class, so we
+ // must do the filtering here.
+ val declTypeNoBridge = decl.filter(sym => !sym.isBridge).tpe
+
+ declTypeNoBridge match {
case OverloadedType(_, alts) =>
val index = alts indexOf imeth
assert(index >= 0, alts+" does not contain "+imeth)
def altName(index: Int) = newTermName("extension"+index+"$"+imeth.name)
- altName(index) #:: ((0 until alts.length).toStream filter (index !=) map altName)
+ altName(index) #:: ((0 until alts.length).toStream filter (index != _) map altName)
case tpe =>
assert(tpe != NoType, imeth.name+" not found in "+imeth.owner+"'s decls: "+imeth.owner.info.decls)
Stream(newTermName("extension$"+imeth.name))
}
+ }
/** Return the extension method that corresponds to given instance method `meth`.
*/
diff --git a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
index 0905fa86c6..880f0f0157 100644
--- a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
@@ -37,7 +37,7 @@ trait InfoTransform extends Transform {
val changesBaseClasses = InfoTransform.this.changesBaseClasses
def transform(sym: Symbol, tpe: Type): Type = transformInfo(sym, tpe)
}
- infoTransformers.insert(infoTransformer)
+ infoTransformers insert infoTransformer
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 6bddfe8d57..718e58b855 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -108,28 +108,30 @@ abstract class LambdaLift extends InfoTransform {
* }
*/
private def markFree(sym: Symbol, enclosure: Symbol): Boolean = {
- debuglog("mark free: " + sym + " of " + sym.owner + " marked free in " + enclosure)
- if (enclosure == sym.owner.logicallyEnclosingMember) true
- else if (enclosure.isPackageClass || !markFree(sym, enclosure.skipConstructor.owner.logicallyEnclosingMember)) false
- else {
- val ss = symSet(free, enclosure)
- if (!ss(sym)) {
- ss addEntry sym
- renamable addEntry sym
- beforePickler {
- // The param symbol in the MethodType should not be renamed, only the symbol in scope. This way,
- // parameter names for named arguments are not changed. Example: without cloning the MethodType,
- // def closure(x: Int) = { () => x }
- // would have the signature
- // closure: (x$1: Int)() => Int
- if (sym.isParameter && sym.owner.info.paramss.exists(_ contains sym))
- sym.owner modifyInfo (_ cloneInfo sym.owner)
+ debuglog("mark free: " + sym.fullLocationString + " marked free in " + enclosure)
+ (enclosure == sym.owner.logicallyEnclosingMember) || {
+ debuglog("%s != %s".format(enclosure, sym.owner.logicallyEnclosingMember))
+ if (enclosure.isPackageClass || !markFree(sym, enclosure.skipConstructor.owner.logicallyEnclosingMember)) false
+ else {
+ val ss = symSet(free, enclosure)
+ if (!ss(sym)) {
+ ss addEntry sym
+ renamable addEntry sym
+ beforePickler {
+ // The param symbol in the MethodType should not be renamed, only the symbol in scope. This way,
+ // parameter names for named arguments are not changed. Example: without cloning the MethodType,
+ // def closure(x: Int) = { () => x }
+ // would have the signature
+ // closure: (x$1: Int)() => Int
+ if (sym.isParameter && sym.owner.info.paramss.exists(_ contains sym))
+ sym.owner modifyInfo (_ cloneInfo sym.owner)
+ }
+ changedFreeVars = true
+ debuglog("" + sym + " is free in " + enclosure);
+ if (sym.isVariable) sym setFlag CAPTURED
}
- changedFreeVars = true
- debuglog("" + sym + " is free in " + enclosure);
- if (sym.isVariable) sym setFlag CAPTURED
+ !enclosure.isClass
}
- !enclosure.isClass
}
}
@@ -159,7 +161,7 @@ abstract class LambdaLift extends InfoTransform {
// for that failure. There should be exactly one method for any given
// entity which always gives the right answer.
if (sym.isImplClass)
- localImplClasses((sym.owner, nme.interfaceName(sym.name))) = sym
+ localImplClasses((sym.owner, tpnme.interfaceName(sym.name))) = sym
else {
renamable addEntry sym
if (sym.isTrait)
@@ -229,7 +231,7 @@ abstract class LambdaLift extends InfoTransform {
def renameTrait(traitSym: Symbol, implSym: Symbol) {
val originalImplName = implSym.name
renameSym(traitSym)
- implSym setName nme.implClassName(traitSym.name)
+ implSym setName tpnme.implClassName(traitSym.name)
debuglog("renaming impl class in step with %s: %s => %s".format(traitSym, originalImplName, implSym.name))
}
@@ -273,8 +275,11 @@ abstract class LambdaLift extends InfoTransform {
if (ps.isEmpty) searchIn(enclosure.skipConstructor.owner)
else ps.head
}
- debuglog("proxy " + sym + " in " + sym.owner + " from " + currentOwner.ownerChain.mkString(" -> ") +
- " " + sym.owner.logicallyEnclosingMember)
+ debuglog("proxy %s from %s has logical enclosure %s".format(
+ sym.debugLocationString,
+ currentOwner.debugLocationString,
+ sym.owner.logicallyEnclosingMember.debugLocationString)
+ )
if (isSameOwnerEnclosure(sym)) sym
else searchIn(currentOwner)
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index 85ba539993..e8387c80f5 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -12,7 +12,8 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
import CODE._
val phaseName: String = "lazyvals"
- val FLAGS_PER_WORD: Int
+ private val FLAGS_PER_BYTE: Int = 8 // Byte
+ private def bitmapKind = ByteClass
def newTransformer(unit: CompilationUnit): Transformer =
new LazyValues(unit)
@@ -53,7 +54,6 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
private val lazyVals = perRunCaches.newMap[Symbol, Int]() withDefaultValue 0
import symtab.Flags._
- import lazyVals._
/** Perform the following transformations:
* - for a lazy accessor inside a method, make it check the initialization bitmap
@@ -68,8 +68,20 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
curTree = tree
tree match {
+
+ case Block(_, _) =>
+ val block1 = super.transform(tree)
+ val Block(stats, expr) = block1
+ val stats1 = stats.flatMap(_ match {
+ case Block(List(d1@DefDef(_, n1, _, _, _, _)), d2@DefDef(_, n2, _, _, _, _)) if (nme.newLazyValSlowComputeName(n2) == n1) =>
+ List(d1, d2)
+ case stat =>
+ List(stat)
+ })
+ treeCopy.Block(block1, stats1, expr)
+
case DefDef(_, _, _, _, _, rhs) => atOwner(tree.symbol) {
- val res = if (!sym.owner.isClass && sym.isLazy) {
+ val (res, slowPathDef) = if (!sym.owner.isClass && sym.isLazy) {
val enclosingClassOrDummyOrMethod = {
val enclMethod = sym.enclMethod
@@ -84,13 +96,14 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
}
val idx = lazyVals(enclosingClassOrDummyOrMethod)
lazyVals(enclosingClassOrDummyOrMethod) = idx + 1
- val rhs1 = mkLazyDef(enclosingClassOrDummyOrMethod, super.transform(rhs), idx, sym)
+ val (rhs1, sDef) = mkLazyDef(enclosingClassOrDummyOrMethod, transform(rhs), idx, sym)
sym.resetFlag((if (lazyUnit(sym)) 0 else LAZY) | ACCESSOR)
- rhs1
- } else
- super.transform(rhs)
-
- deriveDefDef(tree)(_ => if (LocalLazyValFinder.find(res)) typed(addBitmapDefs(sym, res)) else res)
+ (rhs1, sDef)
+ } else
+ (transform(rhs), EmptyTree)
+
+ val ddef1 = deriveDefDef(tree)(_ => if (LocalLazyValFinder.find(res)) typed(addBitmapDefs(sym, res)) else res)
+ if (slowPathDef != EmptyTree) Block(slowPathDef, ddef1) else ddef1
}
case Template(_, _, body) => atOwner(currentOwner) {
@@ -126,7 +139,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
case ValDef(_, _, _, _) if !sym.owner.isModule && !sym.owner.isClass =>
deriveValDef(tree) { rhs0 =>
- val rhs = super.transform(rhs0)
+ val rhs = transform(rhs0)
if (LocalLazyValFinder.find(rhs)) typed(addBitmapDefs(sym, rhs)) else rhs
}
@@ -175,6 +188,24 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
case _ => prependStats(bmps, rhs)
}
}
+
+ def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
+ stats: List[Tree], retVal: Tree): Tree = {
+ val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, STABLE | PRIVATE)
+ defSym setInfo MethodType(List(), lzyVal.tpe.resultType)
+ defSym.owner = lzyVal.owner
+ if (bitmaps.contains(lzyVal))
+ bitmaps(lzyVal).map(_.owner = defSym)
+ val rhs: Tree = (gen.mkSynchronizedCheck(clazz, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
+ DEF(defSym).mkTree(addBitmapDefs(lzyVal, BLOCK(rhs, retVal))) setSymbol defSym
+ }
+
+
+ def mkFastPathBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
+ stats: List[Tree], retVal: Tree): (Tree, Tree) = {
+ val slowPathDef: Tree = mkSlowPathDef(clazz, lzyVal, cond, syncBody, stats, retVal)
+ (If(cond, Apply(ID(slowPathDef.symbol), List()), retVal), slowPathDef)
+ }
/** return a 'lazified' version of rhs. Rhs should conform to the
* following schema:
@@ -185,33 +216,38 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
* <rhs> when the lazy value has type Unit (for which there is no field
* to cache it's value.
*
- * The result will be a tree of the form
- * {
- * if ((bitmap$n & MASK) == 0) {
+ * Similarly as for normal lazy val members (see Mixin), the result will be a tree of the form
+ * { if ((bitmap&n & MASK) == 0) this.l$compute()
+ * else l$
+ *
+ * def l$compute() = { synchronized(enclosing_class_or_dummy) {
+ * if ((bitmap$n & MASK) == 0) {
* l$ = <rhs>
* bitmap$n = bimap$n | MASK
+ * }}
+ * l$
* }
- * l$
* }
- * where bitmap$n is an int value acting as a bitmap of initialized values. It is
- * the 'n' is (offset / 32), the MASK is (1 << (offset % 32)). If the value has type
- * unit, no field is used to cache the value, so the resulting code is:
+ * where bitmap$n is a byte value acting as a bitmap of initialized values. It is
+ * the 'n' is (offset / 8), the MASK is (1 << (offset % 8)). If the value has type
+ * unit, no field is used to cache the value, so the l$compute will now look as following:
* {
- * if ((bitmap$n & MASK) == 0) {
+ * def l$compute() = { synchronized(enclosing_class_or_dummy) {
+ * if ((bitmap$n & MASK) == 0) {
* <rhs>;
* bitmap$n = bimap$n | MASK
- * }
+ * }}
* ()
+ * }
* }
*/
- private def mkLazyDef(methOrClass: Symbol, tree: Tree, offset: Int, lazyVal: Symbol): Tree = {
+ private def mkLazyDef(methOrClass: Symbol, tree: Tree, offset: Int, lazyVal: Symbol): (Tree, Tree) = {
val bitmapSym = getBitmapFor(methOrClass, offset)
- val mask = LIT(1 << (offset % FLAGS_PER_WORD))
+ val mask = LIT(1 << (offset % FLAGS_PER_BYTE))
val bitmapRef = if (methOrClass.isClass) Select(This(methOrClass), bitmapSym) else Ident(bitmapSym)
def mkBlock(stmt: Tree) = BLOCK(stmt, mkSetFlag(bitmapSym, mask, bitmapRef), UNIT)
-
val (block, res) = tree match {
case Block(List(assignment), res) if !lazyUnit(lazyVal) =>
(mkBlock(assignment), res)
@@ -219,16 +255,13 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
(mkBlock(rhs), UNIT)
}
- val cond = (bitmapRef INT_& mask) INT_== ZERO
-
- atPos(tree.pos)(localTyper.typed {
- def body = gen.mkDoubleCheckedLocking(methOrClass.enclClass, cond, List(block), Nil)
- BLOCK(body, res)
- })
+ val cond = (bitmapRef GEN_& (mask, bitmapKind)) GEN_== (ZERO, bitmapKind)
+ val lazyDefs = mkFastPathBody(methOrClass.enclClass, lazyVal, cond, List(block), Nil, res)
+ (atPos(tree.pos)(localTyper.typed {lazyDefs._1 }), atPos(tree.pos)(localTyper.typed {lazyDefs._2 }))
}
private def mkSetFlag(bmp: Symbol, mask: Tree, bmpRef: Tree): Tree =
- bmpRef === (bmpRef INT_| mask)
+ bmpRef === (bmpRef GEN_| (mask, bitmapKind))
val bitmaps = mutable.Map[Symbol, List[Symbol]]() withDefaultValue Nil
@@ -236,12 +269,12 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
* given offset.
*/
private def getBitmapFor(meth: Symbol, offset: Int): Symbol = {
- val n = offset / FLAGS_PER_WORD
+ val n = offset / FLAGS_PER_BYTE
val bmps = bitmaps(meth)
if (bmps.length > n)
bmps(n)
else {
- val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(IntClass.tpe)
+ val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(ByteClass.tpe)
beforeTyper {
sym addAnnotation VolatileAttr
}
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 0e4975c04c..79b9317f20 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -29,9 +29,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
/** Map a lazy, mixedin field accessor to it's trait member accessor */
private val initializer = perRunCaches.newMap[Symbol, Symbol]
- /** Deferred bitmaps that will be added during the transformation of a class */
- private val deferredBitmaps = perRunCaches.newMap[Symbol, List[Tree]]() withDefaultValue Nil
-
// --------- helper functions -----------------------------------------------
/** A member of a trait is implemented statically if its implementation after the
@@ -123,7 +120,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
*/
private def rebindSuper(base: Symbol, member: Symbol, mixinClass: Symbol): Symbol =
afterPickler {
- var bcs = base.info.baseClasses.dropWhile(mixinClass !=).tail
+ var bcs = base.info.baseClasses.dropWhile(mixinClass != _).tail
var sym: Symbol = NoSymbol
debuglog("starting rebindsuper " + base + " " + member + ":" + member.tpe +
" " + mixinClass + " " + base.info.baseClasses + "/" + bcs)
@@ -495,6 +492,19 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* fields count as fields defined by the class itself.
*/
private val fieldOffset = perRunCaches.newMap[Symbol, Int]()
+
+ private val bitmapKindForCategory = perRunCaches.newMap[Name, ClassSymbol]()
+
+ // ByteClass, IntClass, LongClass
+ private def bitmapKind(field: Symbol): ClassSymbol = bitmapKindForCategory(bitmapCategory(field))
+
+ private def flagsPerBitmap(field: Symbol): Int = bitmapKind(field) match {
+ case BooleanClass => 1
+ case ByteClass => 8
+ case IntClass => 32
+ case LongClass => 64
+ }
+
/** The first transform; called in a pre-order traversal at phase mixin
* (that is, every node is processed before its children).
@@ -613,12 +623,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
if (field.accessed hasAnnotation TransientAttr) {
if (isNormal) BITMAP_TRANSIENT
else BITMAP_CHECKINIT_TRANSIENT
- }
- else if (field hasFlag PRIVATE | notPRIVATE) {
- if (isNormal) BITMAP_PRIVATE
- else BITMAP_CHECKINIT
- }
- else {
+ } else {
if (isNormal) BITMAP_NORMAL
else BITMAP_CHECKINIT
}
@@ -686,11 +691,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else newDefs ::: (stats filter isNotDuplicate)
}
- def addDeferredBitmap(clazz: Symbol, tree: Tree) {
- // Append the set of deferred defs
- deferredBitmaps(clazz) ::= typedPos(clazz.pos)(tree)
- }
-
/** If `stat` is a superaccessor, complete it by adding a right-hand side.
* Note: superaccessors are always abstract until this point.
* The method to call in a superaccessor is stored in the accessor symbol's alias field.
@@ -708,108 +708,116 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
stat
}
- import lazyVals._
-
- /**
- * Private or transient lazy vals use bitmaps that are private for the class context,
- * unlike public or protected vals, which can use inherited bitmaps.
- * Similarly fields in the checkinit mode use private bitmaps.
- */
- def isLocalBitmapField(field: Symbol) = (
- field.accessed.hasAnnotation(TransientAttr)
- || field.hasFlag(PRIVATE | notPRIVATE)
- || isCheckInitField(field)
- )
-
/**
* Return the bitmap field for 'offset'. Depending on the hierarchy it is possible to reuse
* the bitmap of its parents. If that does not exist yet we create one.
*/
- def bitmapFor(clazz0: Symbol, offset: Int, field: Symbol, searchParents: Boolean = true): Symbol = {
+ def bitmapFor(clazz0: Symbol, offset: Int, field: Symbol): Symbol = {
val category = bitmapCategory(field)
- val bitmapName = nme.newBitmapName(category, offset / FLAGS_PER_WORD)
- val sym = clazz0.info.member(bitmapName)
+ val bitmapName = nme.newBitmapName(category, offset / flagsPerBitmap(field))
+ val sym = clazz0.info.decl(bitmapName)
assert(!sym.isOverloaded, sym)
-
+
def createBitmap: Symbol = {
- val sym = clazz0.newVariable(bitmapName, clazz0.pos) setInfo IntClass.tpe
+ val bitmapKind = bitmapKindForCategory(category)
+ val sym = clazz0.newVariable(bitmapName, clazz0.pos) setInfo bitmapKind.tpe
beforeTyper(sym addAnnotation VolatileAttr)
category match {
case nme.BITMAP_TRANSIENT | nme.BITMAP_CHECKINIT_TRANSIENT => sym addAnnotation TransientAttr
case _ =>
}
- category match {
- case nme.BITMAP_NORMAL if field.isLazy => sym setFlag PROTECTED
- case _ => sym setFlag PrivateLocal
+ val init = bitmapKind match {
+ case BooleanClass => VAL(sym) === FALSE
+ case _ => VAL(sym) === ZERO
}
-
+
+ sym setFlag PrivateLocal
clazz0.info.decls.enter(sym)
- if (clazz0 == clazz)
- addDef(clazz.pos, VAL(sym) === ZERO)
- else {
- //FIXME: the assertion below will not work because of the way bitmaps are added.
- // They should be added during infoTransform, so that in separate compilation, bitmap
- // is a member of clazz and doesn't fail the condition couple lines below.
- // This works, as long as we assume that the previous classes were compiled correctly.
- //assert(clazz0.sourceFile != null)
- addDeferredBitmap(clazz0, VAL(sym) === ZERO)
- }
+ addDef(clazz0.pos, init)
sym
}
if (sym ne NoSymbol)
sym
- else if (searchParents && !isLocalBitmapField(field))
- bitmapForParents(clazz0, offset, field) getOrElse createBitmap
else
createBitmap
}
-
- def bitmapForParents(clazz0: Symbol, offset: Int, valSym: Symbol): Option[Symbol] = {
- def requiredBitmaps(fs: Int): Int = if (fs == 0) -1 else (fs - 1) / FLAGS_PER_WORD
- val bitmapNum = offset / FLAGS_PER_WORD
-
- // filter private and transient
- // since we do not inherit normal values (in checkinit mode) also filter them out
- // !!! Not sure how that comment relates to this code...
- superClassesToCheck(clazz0) foreach { cl =>
- val fields0 = usedBits(cl)
-
- if (requiredBitmaps(fields0) < bitmapNum) {
- val fields1 = cl.info.decls filter isNonLocalFieldWithBitmap size;
- return {
- if (requiredBitmaps(fields0 + fields1) >= bitmapNum)
- Some(bitmapFor(cl, offset, valSym, false))
- else None // Don't waste time, since we won't find bitmap anyway
- }
- }
- }
- None
+
+ def maskForOffset(offset: Int, sym: Symbol, kind: ClassSymbol): Tree = {
+ def realOffset = offset % flagsPerBitmap(sym)
+ if (kind == LongClass ) LIT(1L << realOffset) else LIT(1 << realOffset)
}
/** Return an (untyped) tree of the form 'Clazz.this.bmp = Clazz.this.bmp | mask'. */
- def mkSetFlag(clazz: Symbol, offset: Int, valSym: Symbol): Tree = {
- val bmp = bitmapFor(clazz, offset, valSym)
- val mask = LIT(1 << (offset % FLAGS_PER_WORD))
- def x = This(clazz) DOT bmp
+ def mkSetFlag(clazz: Symbol, offset: Int, valSym: Symbol, kind: ClassSymbol): Tree = {
+ val bmp = bitmapFor(clazz, offset, valSym)
+ def mask = maskForOffset(offset, valSym, kind)
+ def x = This(clazz) DOT bmp
+ def newValue = if (kind == BooleanClass) TRUE else (x GEN_| (mask, kind))
- x === (x INT_| mask)
+ x === newValue
}
/** Return an (untyped) tree of the form 'clazz.this.bitmapSym & mask (==|!=) 0', the
* precise comparison operator depending on the value of 'equalToZero'.
*/
- def mkTest(clazz: Symbol, mask: Tree, bitmapSym: Symbol, equalToZero: Boolean): Tree = {
- def lhs = (This(clazz) DOT bitmapSym) INT_& mask
- if (equalToZero) lhs INT_== ZERO
- else lhs INT_!= ZERO
+ def mkTest(clazz: Symbol, mask: Tree, bitmapSym: Symbol, equalToZero: Boolean, kind: ClassSymbol): Tree = {
+ val bitmapTree = (This(clazz) DOT bitmapSym)
+ def lhs = bitmapTree GEN_& (mask, kind)
+ kind match {
+ case BooleanClass =>
+ if (equalToZero) NOT(bitmapTree)
+ else bitmapTree
+ case _ =>
+ if (equalToZero) lhs GEN_== (ZERO, kind)
+ else lhs GEN_!= (ZERO, kind)
+ }
+ }
+
+ def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
+ stats: List[Tree], retVal: Tree, attrThis: Tree, args: List[Tree]): Symbol = {
+ val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, PRIVATE)
+ val params = defSym newSyntheticValueParams args.map(_.symbol.tpe)
+ defSym setInfoAndEnter MethodType(params, lzyVal.tpe.resultType)
+ val rhs: Tree = (gen.mkSynchronizedCheck(attrThis, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
+ val strictSubst = new TreeSymSubstituterWithCopying(args.map(_.symbol), params)
+ addDef(position(defSym), DEF(defSym).mkTree(strictSubst(BLOCK(rhs, retVal))) setSymbol defSym)
+ defSym
+ }
+
+ def mkFastPathLazyBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
+ stats: List[Tree], retVal: Tree): Tree = {
+ mkFastPathBody(clazz, lzyVal, cond, syncBody, stats, retVal, gen.mkAttributedThis(clazz), List())
+ }
+
+ def mkFastPathBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
+ stats: List[Tree], retVal: Tree, attrThis: Tree, args: List[Tree]): Tree = {
+ val slowPathSym: Symbol = mkSlowPathDef(clazz, lzyVal, cond, syncBody, stats, retVal, attrThis, args)
+ If(cond, fn (This(clazz), slowPathSym, args.map(arg => Ident(arg.symbol)): _*), retVal)
}
+
+
+ /** Always copy the tree if we are going to perform sym substitution,
+ * otherwise we will side-effect on the tree that is used in the fast path
+ */
+ class TreeSymSubstituterWithCopying(from: List[Symbol], to: List[Symbol]) extends TreeSymSubstituter(from, to) {
+ override def transform(tree: Tree): Tree =
+ if (tree.hasSymbol && from.contains(tree.symbol))
+ super.transform(tree.duplicate)
+ else super.transform(tree.duplicate)
+
+ override def apply[T <: Tree](tree: T): T = if (from.isEmpty) tree else super.apply(tree)
+ }
/** return a 'lazified' version of rhs. It uses double-checked locking to ensure
- * initialization is performed at most once. Private fields used only in this
- * initializer are subsequently set to null.
+ * initialization is performed at most once. For performance reasons the double-checked
+ * locking is split into two parts, the first (fast) path checks the bitmap without
+ * synchronizing, and if that fails it initializes the lazy val within the
+ * synchronization block (slow path). This way the inliner should optimize
+ * the fast path because the method body is small enough.
+ * Private fields used only in this initializer are subsequently set to null.
*
* @param clazz The class symbol
* @param init The tree which initializes the field ( f = <rhs> )
@@ -817,56 +825,64 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* @param offset The offset of this field in the flags bitmap
*
* The result will be a tree of the form
- * {
- * if ((bitmap$n & MASK) == 0) {
- * synchronized(this) {
- * if ((bitmap$n & MASK) == 0) {
- * init // l$ = <rhs>
- * bitmap$n = bimap$n | MASK
- * }
- * }
- * this.f1 = null
- * ... this.fn = null
+ * { if ((bitmap&n & MASK) == 0) this.l$compute()
+ * else l$
+ *
+ * ...
+ * def l$compute() = { synchronized(this) {
+ * if ((bitmap$n & MASK) == 0) {
+ * init // l$ = <rhs>
+ * bitmap$n = bimap$n | MASK
+ * }}
+ * l$
* }
- * l$
+ *
+ * ...
+ * this.f1 = null
+ * ... this.fn = null
* }
- * where bitmap$n is an int value acting as a bitmap of initialized values. It is
- * the 'n' is (offset / 32), the MASK is (1 << (offset % 32)).
+ * where bitmap$n is a byte, int or long value acting as a bitmap of initialized values.
+ * The kind of the bitmap determines how many bit indicators for lazy vals are stored in it.
+ * For Int bitmap it is 32 and then 'n' in the above code is: (offset / 32),
+ * the MASK is (1 << (offset % 32)).
+ * If the class contains only a single lazy val then the bitmap is represented
+ * as a Boolean and the condition checking is a simple bool test.
*/
def mkLazyDef(clazz: Symbol, lzyVal: Symbol, init: List[Tree], retVal: Tree, offset: Int): Tree = {
def nullify(sym: Symbol) = Select(This(clazz), sym.accessedOrSelf) === LIT(null)
val bitmapSym = bitmapFor(clazz, offset, lzyVal)
- val mask = LIT(1 << (offset % FLAGS_PER_WORD))
- def cond = mkTest(clazz, mask, bitmapSym, true)
+ val kind = bitmapKind(lzyVal)
+ val mask = maskForOffset(offset, lzyVal, kind)
+ def cond = mkTest(clazz, mask, bitmapSym, true, kind)
val nulls = lazyValNullables(lzyVal).toList sortBy (_.id) map nullify
- def syncBody = init ::: List(mkSetFlag(clazz, offset, lzyVal), UNIT)
+ def syncBody = init ::: List(mkSetFlag(clazz, offset, lzyVal, kind), UNIT)
if (nulls.nonEmpty)
log("nulling fields inside " + lzyVal + ": " + nulls)
- val result = gen.mkDoubleCheckedLocking(clazz, cond, syncBody, nulls)
- typedPos(init.head.pos)(BLOCK(result, retVal))
+ typedPos(init.head.pos)(mkFastPathLazyBody(clazz, lzyVal, cond, syncBody, nulls, retVal))
}
- def mkInnerClassAccessorDoubleChecked(attrThis: Tree, rhs: Tree): Tree =
+ def mkInnerClassAccessorDoubleChecked(attrThis: Tree, rhs: Tree, moduleSym: Symbol, args: List[Tree]): Tree =
rhs match {
case Block(List(assign), returnTree) =>
val Assign(moduleVarRef, _) = assign
val cond = Apply(Select(moduleVarRef, nme.eq), List(NULL))
- val doubleSynchrTree = gen.mkDoubleCheckedLocking(attrThis, cond, List(assign), Nil)
- Block(List(doubleSynchrTree), returnTree)
+ mkFastPathBody(clazz, moduleSym, cond, List(assign), List(NULL), returnTree, attrThis, args)
case _ =>
assert(false, "Invalid getter " + rhs + " for module in class " + clazz)
EmptyTree
}
def mkCheckedAccessor(clazz: Symbol, retVal: Tree, offset: Int, pos: Position, fieldSym: Symbol): Tree = {
- val bitmapSym = bitmapFor(clazz, offset, fieldSym.getter(fieldSym.owner))
- val mask = LIT(1 << (offset % FLAGS_PER_WORD))
+ val sym = fieldSym.getter(fieldSym.owner)
+ val bitmapSym = bitmapFor(clazz, offset, sym)
+ val kind = bitmapKind(sym)
+ val mask = maskForOffset(offset, sym, kind)
val msg = "Uninitialized field: " + unit.source + ": " + pos.line
val result =
- IF (mkTest(clazz, mask, bitmapSym, false)) .
+ IF (mkTest(clazz, mask, bitmapSym, false, kind)) .
THEN (retVal) .
ELSE (THROW(UninitializedErrorClass, LIT(msg)))
@@ -887,14 +903,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
if (sym.isLazy && !isEmpty && !clazz.isImplClass) {
assert(fieldOffset contains sym, sym)
- deriveDefDef(stat)(rhs =>
- if (isUnit)
- mkLazyDef(clazz, sym, List(rhs), UNIT, fieldOffset(sym))
- else {
- val Block(stats, res) = rhs
+ deriveDefDef(stat) {
+ case t if isUnit => mkLazyDef(clazz, sym, List(t), UNIT, fieldOffset(sym))
+
+ case Block(stats, res) =>
mkLazyDef(clazz, sym, stats, Select(This(clazz), res.symbol), fieldOffset(sym))
- }
- )
+
+ case t => t // pass specialized lazy vals through
+ }
}
else if (needsInitFlag(sym) && !isEmpty && !clazz.hasFlag(IMPLCLASS | TRAIT)) {
assert(fieldOffset contains sym, sym)
@@ -911,7 +927,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else if (settings.checkInit.value && !clazz.isTrait && sym.isSetter) {
val getter = sym.getter(clazz)
if (needsInitFlag(getter) && fieldOffset.isDefinedAt(getter))
- deriveDefDef(stat)(rhs => Block(List(rhs, localTyper.typed(mkSetFlag(clazz, fieldOffset(getter), getter))), UNIT))
+ deriveDefDef(stat)(rhs => Block(List(rhs, localTyper.typed(mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter)))), UNIT))
else stat
}
else if (sym.isModule && (!clazz.isTrait || clazz.isImplClass) && !sym.isBridge) {
@@ -921,7 +937,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// Martin to Hubert: I think this can be replaced by selfRef(tree.pos)
// @PP: It does not seem so, it crashes for me trying to bootstrap.
if (clazz.isImplClass) gen.mkAttributedIdent(stat.vparamss.head.head.symbol) else gen.mkAttributedThis(clazz),
- rhs
+ rhs, sym, stat.vparamss.head
)
)
)
@@ -939,7 +955,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val sym = clazz.info decl lhs.symbol.getterName suchThat (_.isGetter)
if (needsInitAndHasOffset(sym)) {
debuglog("adding checked getter for: " + sym + " " + lhs.symbol.flagString)
- List(localTyper typed mkSetFlag(clazz, fieldOffset(sym), sym))
+ List(localTyper typed mkSetFlag(clazz, fieldOffset(sym), sym, bitmapKind(sym)))
}
else Nil
}
@@ -964,23 +980,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def addInitBits(clazz: Symbol, rhs: Tree): Tree =
new AddInitBitsTransformer(clazz) transform rhs
- def isNonLocalFieldWithBitmap(field: Symbol) =
- isFieldWithBitmap(field) && !isLocalBitmapField(field)
-
def isCheckInitField(field: Symbol) =
needsInitFlag(field) && !field.isDeferred
def superClassesToCheck(clazz: Symbol) =
clazz.ancestors filterNot (_ hasFlag TRAIT | JAVA)
- /**
- * Return the number of bits used by superclass fields.
- */
- def usedBits(clazz0: Symbol): Int =
- superClassesToCheck(clazz0) flatMap (_.info.decls) count { f =>
- f.owner != clazz0 && isNonLocalFieldWithBitmap(f)
- }
-
// begin addNewDefs
/** Fill the map from fields to offset numbers.
@@ -988,25 +993,27 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* code generation easier later.
*/
def buildBitmapOffsets() {
- def fold(zero: Int, fields: List[Symbol]) = {
- var idx = zero
+ def fold(fields: List[Symbol], category: Name) = {
+ var idx = 0
fields foreach { f =>
- idx += 1
fieldOffset(f) = idx
+ idx += 1
}
+
+ if (idx == 0) ()
+ else if (idx == 1) bitmapKindForCategory(category) = BooleanClass
+ else if (idx < 9) bitmapKindForCategory(category) = ByteClass
+ else if (idx < 33) bitmapKindForCategory(category) = IntClass
+ else bitmapKindForCategory(category) = LongClass
}
clazz.info.decls.toList groupBy bitmapCategory foreach {
case (nme.NO_NAME, _) => ()
- case (nme.BITMAP_NORMAL, fields) => fold(usedBits(clazz), fields)
- case (_, fields) => fold(0, fields)
+ case (category, fields) => fold(fields, category)
}
}
buildBitmapOffsets()
var stats1 = addCheckedGetters(clazz, stats)
- // add deferred bitmaps
- deferredBitmaps remove clazz foreach { d => stats1 = add(stats1, d) }
-
def accessedReference(sym: Symbol) = sym.tpe match {
case MethodType(Nil, ConstantType(c)) => Literal(c)
case _ =>
@@ -1052,7 +1059,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val getter = sym.getter(clazz)
if (!needsInitFlag(getter)) init
- else Block(init, mkSetFlag(clazz, fieldOffset(getter), getter), UNIT)
+ else Block(init, mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter)), UNIT)
}
}
else if (needsInitFlag(sym))
@@ -1069,7 +1076,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val rhs = gen.newModule(sym, vdef.symbol.tpe)
val assignAndRet = gen.mkAssignAndReturn(vdef.symbol, rhs)
val attrThis = gen.mkAttributedThis(clazz)
- val rhs1 = mkInnerClassAccessorDoubleChecked(attrThis, assignAndRet)
+ val rhs1 = mkInnerClassAccessorDoubleChecked(attrThis, assignAndRet, sym, List())
addDefDef(sym, rhs1)
}
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index b85ae26cf1..f2e109a5ad 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -8,6 +8,7 @@ package transform
import scala.tools.nsc.symtab.Flags
import scala.collection.{ mutable, immutable }
+import language.postfixOps
/** Specialize code on types.
*
@@ -66,8 +67,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
import definitions.{
RootClass, BooleanClass, UnitClass, ArrayClass,
- ScalaValueClasses, isPrimitiveValueClass, isScalaValueType,
- SpecializedClass, AnyRefClass, ObjectClass, AnyRefModule,
+ ScalaValueClasses, isPrimitiveValueClass, isPrimitiveValueType,
+ SpecializedClass, UnspecializedClass, AnyRefClass, ObjectClass, AnyRefModule,
GroupOfSpecializable, uncheckedVarianceClass, ScalaInlineClass
}
@@ -144,7 +145,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def includes(t1: TypeEnv, t2: TypeEnv) = t1 forall {
case (sym, tpe) =>
t2 get sym exists { t2tp =>
- (tpe == t2tp) || !(isScalaValueType(tpe) || isScalaValueType(t2tp)) // u.t.b. (t2tp <:< AnyRefClass.tpe)
+ (tpe == t2tp) || !(isPrimitiveValueType(tpe) || isPrimitiveValueType(t2tp)) // u.t.b. (t2tp <:< AnyRefClass.tpe)
}
}
@@ -265,7 +266,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* specialized type.
*/
def survivingArgs(sym: Symbol, args: List[Type]): List[Type] =
- for ((tvar, tpe) <- sym.info.typeParams.zip(args) if !tvar.isSpecialized || !isScalaValueType(tpe))
+ for ((tvar, tpe) <- sym.info.typeParams.zip(args) if !tvar.isSpecialized || !isPrimitiveValueType(tpe))
yield tpe
val specializedType = new TypeMap {
@@ -373,12 +374,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* - members with specialized type parameters found in the given environment
* - constructors of specialized classes
* - normalized members whose type bounds appear in the environment
+ * But suppressed for:
+ * - any member with the @unspecialized annotation, or which has an
+ * enclosing member with the annotation.
*/
- private def needsSpecialization(env: TypeEnv, sym: Symbol): Boolean = {
- specializedTypeVars(sym).intersect(env.keySet).diff(wasSpecializedForTypeVars(sym)).nonEmpty ||
- (sym.isClassConstructor && (sym.enclClass.typeParams exists (_.isSpecialized))) ||
- (isNormalizedMember(sym) && info(sym).typeBoundsIn(env))
- }
+ private def needsSpecialization(env: TypeEnv, sym: Symbol): Boolean = (
+ !sym.ownerChain.exists(_ hasAnnotation UnspecializedClass) && (
+ specializedTypeVars(sym).intersect(env.keySet).diff(wasSpecializedForTypeVars(sym)).nonEmpty
+ || sym.isClassConstructor && (sym.enclClass.typeParams exists (_.isSpecialized))
+ || isNormalizedMember(sym) && info(sym).typeBoundsIn(env)
+ )
+ )
def isNormalizedMember(m: Symbol) = m.isSpecialized && (info get m exists {
case NormalizedMember(_) => true
@@ -442,7 +448,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Type parameters that survive when specializing in the specified environment. */
def survivingParams(params: List[Symbol], env: TypeEnv) =
- params.filter(p => !p.isSpecialized || !isScalaValueType(env(p)))
+ params.filter(p => !p.isSpecialized || !isPrimitiveValueType(env(p)))
/** Produces the symbols from type parameters `syms` of the original owner,
* in the given type environment `env`. The new owner is `nowner`.
@@ -660,16 +666,19 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// log("other concrete " + m)
forwardToOverload(m)
- } else if (m.isValue && !m.isMethod) { // concrete value definition
+ } else if (m.isMethod && m.hasFlag(LAZY)) {
+ forwardToOverload(m)
+
+ } else if (m.isValue && !m.isMethod && !m.hasFlag(LAZY)) { // concrete value definition
def mkAccessor(field: Symbol, name: Name) = {
- val newFlags = (SPECIALIZED | m.getter(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR | LAZY)
+ val newFlags = (SPECIALIZED | m.getter(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR)
// we rely on the super class to initialize param accessors
val sym = sClass.newMethod(name, field.pos, newFlags)
info(sym) = SpecializedAccessor(field)
sym
}
def overrideIn(clazz: Symbol, sym: Symbol) = {
- val newFlags = (sym.flags | OVERRIDE | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR | PARAMACCESSOR | LAZY)
+ val newFlags = (sym.flags | OVERRIDE | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR | PARAMACCESSOR)
val sym1 = sym.cloneSymbol(clazz, newFlags)
sym1 modifyInfo (_ asSeenFrom (clazz.tpe, sym1.owner))
}
@@ -746,9 +755,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
}
-
+
val subclasses = specializations(clazz.info.typeParams) filter satisfiable
- subclasses foreach { env =>
+ subclasses foreach {
+ env =>
val spc = specializedClass(env, decls1)
val existing = clazz.owner.info.decl(spc.name)
@@ -819,7 +829,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// concise printing of type env
private def pp(env: TypeEnv): String = {
- env.toList.sortBy(_._1.name.toString) map {
+ env.toList.sortBy(_._1.name) map {
case (k, v) =>
val vsym = v.typeSymbol
if (k == vsym) "" + k.name
@@ -874,7 +884,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Return the specialized overload of `m`, in the given environment. */
private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv): Symbol = {
- val newFlags = (sym.flags | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR | ACCESSOR | LAZY)
+ val newFlags = (sym.flags | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR)
// this method properly duplicates the symbol's info
( sym.cloneSymbol(owner, newFlags, specializedName(sym, env))
modifyInfo (info => subst(env, info.asSeenFrom(owner.thisType, sym.owner)))
@@ -922,7 +932,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (currentRun compiles overriding)
checkOverriddenTParams(overridden)
- val env = unify(overridden.info, overriding.info, emptyEnv, false)
+ val env = unify(overridden.info, overriding.info, emptyEnv, false, true)
def atNext = afterSpecialize(overridden.owner.info.decl(specializedName(overridden, env)))
if (TypeEnv.restrict(env, stvars).nonEmpty && TypeEnv.isValid(env, overridden) && atNext != NoSymbol) {
@@ -984,8 +994,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* Fails if such an environment cannot be found.
*
* If `strict` is true, a UnifyError is thrown if unification is impossible.
+ *
+ * If `tparams` is true, then the methods tries to unify over type params in polytypes as well.
*/
- private def unify(tp1: Type, tp2: Type, env: TypeEnv, strict: Boolean): TypeEnv = (tp1, tp2) match {
+ private def unify(tp1: Type, tp2: Type, env: TypeEnv, strict: Boolean, tparams: Boolean = false): TypeEnv = (tp1, tp2) match {
case (TypeRef(_, sym1, _), _) if sym1.isSpecialized =>
debuglog("Unify " + tp1 + ", " + tp2)
if (isPrimitiveValueClass(tp2.typeSymbol) || isSpecializedAnyRefSubtype(tp2, sym1))
@@ -1014,17 +1026,20 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("Unify polytypes " + tp1 + " and " + tp2)
if (strict && tparams1.length != tparams2.length)
unifyError(tp1, tp2)
+ else if (tparams && tparams1.length == tparams2.length)
+ unify(res1 :: tparams1.map(_.info), res2 :: tparams2.map(_.info), env, strict)
else
unify(res1, res2, env, strict)
- case (PolyType(_, res), other) => unify(res, other, env, strict)
- case (ThisType(_), ThisType(_)) => env
- case (_, SingleType(_, _)) => unify(tp1, tp2.underlying, env, strict)
- case (SingleType(_, _), _) => unify(tp1.underlying, tp2, env, strict)
- case (ThisType(_), _) => unify(tp1.widen, tp2, env, strict)
- case (_, ThisType(_)) => unify(tp1, tp2.widen, env, strict)
- case (RefinedType(_, _), RefinedType(_, _)) => env
- case (AnnotatedType(_, tp1, _), tp2) => unify(tp2, tp1, env, strict)
- case (ExistentialType(_, res1), _) => unify(tp2, res1, env, strict)
+ case (PolyType(_, res), other) => unify(res, other, env, strict)
+ case (ThisType(_), ThisType(_)) => env
+ case (_, SingleType(_, _)) => unify(tp1, tp2.underlying, env, strict)
+ case (SingleType(_, _), _) => unify(tp1.underlying, tp2, env, strict)
+ case (ThisType(_), _) => unify(tp1.widen, tp2, env, strict)
+ case (_, ThisType(_)) => unify(tp1, tp2.widen, env, strict)
+ case (RefinedType(_, _), RefinedType(_, _)) => env
+ case (AnnotatedType(_, tp1, _), tp2) => unify(tp2, tp1, env, strict)
+ case (ExistentialType(_, res1), _) => unify(tp2, res1, env, strict)
+ case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) => unify(List(lo1, hi1), List(lo2, hi2), env, strict)
case _ =>
debuglog("don't know how to unify %s [%s] with %s [%s]".format(tp1, tp1.getClass, tp2, tp2.getClass))
env
@@ -1310,7 +1325,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else None
} else None
}
-
+
+ def reportError[T](body: =>T)(handler: TypeError => T): T =
+ try body
+ catch {
+ case te: TypeError =>
+ reporter.error(tree.pos, te.msg)
+ handler(te)
+ }
+
curTree = tree
tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
@@ -1319,11 +1342,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (found.typeSymbol ne tpt.tpe.typeSymbol) {
// the ctor can be specialized
debuglog("** instantiated specialized type: " + found)
- try localTyper.typedPos(tree.pos)(New(found, transformTrees(args): _*))
- catch {
- case te: TypeError =>
- reporter.error(tree.pos, te.msg)
- super.transform(tree)
+ reportError {
+ localTyper.typedPos(tree.pos)(New(found, transformTrees(args): _*))
+ } {
+ _ => super.transform(tree)
}
} else super.transform(tree)
@@ -1341,11 +1363,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val residualTargs = symbol.info.typeParams zip targs collect {
case (tvar, targ) if !env.contains(tvar) || !isPrimitiveValueClass(env(tvar).typeSymbol) => targ
}
- if (specMember.info.typeParams.isEmpty) {
- // See SI-5583. Don't know why it happens now if it didn't before.
- if (residualTargs.nonEmpty)
- log("!!! Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs)))
-
+ // See SI-5583. Don't know why it happens now if it didn't before.
+ if (specMember.info.typeParams.isEmpty && residualTargs.nonEmpty) {
+ log("!!! Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs)))
localTyper.typed(sel)
}
else {
@@ -1368,26 +1388,23 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
tree
case Select(qual, name) =>
- debuglog("[%s] looking at Select: %s sym: %s: %s [tree.tpe: %s]".format(
- tree.pos.safeLine, tree, symbol, symbol.info, tree.tpe))
+ debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe))
//log("!!! select " + tree + " -> " + symbol.info + " specTypeVars: " + specializedTypeVars(symbol.info))
if (specializedTypeVars(symbol.info).nonEmpty && name != nme.CONSTRUCTOR) {
// log("!!! unifying " + (symbol, symbol.tpe) + " and " + (tree, tree.tpe))
val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
// log("!!! found env: " + env + "; overloads: " + overloads(symbol))
- debuglog("checking for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe + " env: " + env)
if (!env.isEmpty) {
+ // debuglog("checking for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe + " env: " + env)
val specMember = overload(symbol, env)
- //log("!!! found member: " + specMember)
if (specMember.isDefined) {
- // log("** routing " + tree + " to " + specMember.get.sym.fullName)
localTyper.typedOperator(atPos(tree.pos)(Select(transform(qual), specMember.get.sym.name)))
- } else {
+ }
+ else {
val qual1 = transform(qual)
val specMember = qual1.tpe.member(specializedName(symbol, env)).suchThat(_.tpe matches subst(env, symbol.tpe))
if (specMember ne NoSymbol) {
- // log("** using spec member " + specMember + ": " + specMember.tpe)
val tree1 = atPos(tree.pos)(Select(qual1, specMember))
if (specMember.isMethod)
localTyper.typedOperator(tree1)
@@ -1430,10 +1447,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol))
if (symbol.isConstructor) {
- val t = atOwner(symbol) {
- val superRef: Tree = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
- forwardCtorCall(tree.pos, superRef, vparamss, symbol.owner)
- }
+ val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperSelect, vparamss, symbol.owner))
+
if (symbol.isPrimaryConstructor)
localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant()))))
else // duplicate the original constructor
@@ -1483,13 +1498,21 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
})
debuglog("created special overload tree " + t)
debuglog("created " + t)
- localTyper.typed(t)
+ reportError {
+ localTyper.typed(t)
+ } {
+ _ => super.transform(tree)
+ }
case fwd @ Forward(_) =>
debuglog("forward: " + fwd + ", " + ddef)
val rhs1 = forwardCall(tree.pos, gen.mkAttributedRef(symbol.owner.thisType, fwd.target), vparamss)
debuglog("-->d completed forwarder to specialized overload: " + fwd.target + ": " + rhs1)
- localTyper.typed(deriveDefDef(tree)(_ => rhs1))
+ reportError {
+ localTyper.typed(deriveDefDef(tree)(_ => rhs1))
+ } {
+ _ => super.transform(tree)
+ }
case SpecializedAccessor(target) =>
val rhs1 = if (symbol.isGetter)
@@ -1563,7 +1586,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// val (_, origtparams) = splitParams(source.typeParams)
val env = typeEnv(symbol)
val boundTvars = env.keySet
- val origtparams = source.typeParams.filter(tparam => !boundTvars(tparam) || !isScalaValueType(env(tparam)))
+ val origtparams = source.typeParams.filter(tparam => !boundTvars(tparam) || !isPrimitiveValueType(env(tparam)))
if (origtparams.nonEmpty || symbol.typeParams.nonEmpty)
debuglog("substituting " + origtparams + " for " + symbol.typeParams)
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 9915f7e9fc..4c9d855413 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -154,7 +154,10 @@ abstract class TailCalls extends Transform {
def isTransformed = isEligible && accessed(label)
def tailrecFailure() = unit.error(failPos, "could not optimize @tailrec annotated " + method + ": " + failReason)
- def newThis(pos: Position) = method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis
+ def newThis(pos: Position) = logResult("Creating new `this` during tailcalls\n method: %s\n current class: %s".format(
+ method.ownerChain.mkString(" -> "), currentClass.ownerChain.mkString(" -> "))) {
+ method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis
+ }
override def toString(): String = (
"" + method.name + " tparams: " + tparams + " tailPos: " + tailPos +
@@ -222,7 +225,7 @@ abstract class TailCalls extends Transform {
if (!ctx.isEligible) fail("it is neither private nor final so can be overridden")
else if (!isRecursiveCall) {
- if (receiverIsSuper) failHere("it contains a recursive call targetting a supertype")
+ if (receiverIsSuper) failHere("it contains a recursive call targeting supertype " + receiver.tpe)
else failHere(defaultReason)
}
else if (!matchesTypeArgs) failHere("it is called recursively with different type arguments")
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 11f06a0541..8af12f3f10 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -8,6 +8,7 @@ package transform
import symtab.Flags._
import scala.collection.{ mutable, immutable }
+import language.postfixOps
/*<export> */
/** - uncurry all symbol and tree types (@see UnCurryPhase) -- this includes normalizing all proper types.
@@ -71,14 +72,21 @@ abstract class UnCurry extends InfoTransform
}
class UnCurryTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
-
- private var needTryLift = false
- private var inPattern = false
+ private var needTryLift = false
+ private var inPattern = false
private var inConstructorFlag = 0L
- private val byNameArgs = new mutable.HashSet[Tree]
- private val noApply = new mutable.HashSet[Tree]
- private val newMembers = mutable.ArrayBuffer[Tree]()
- private val repeatedParams = mutable.Map[Symbol, List[ValDef]]()
+ private val byNameArgs = mutable.HashSet[Tree]()
+ private val noApply = mutable.HashSet[Tree]()
+ private val newMembers = mutable.Map[Symbol, mutable.Buffer[Tree]]()
+ private val repeatedParams = mutable.Map[Symbol, List[ValDef]]()
+
+ /** Add a new synthetic member for `currentOwner` */
+ private def addNewMember(t: Tree): Unit =
+ newMembers.getOrElseUpdate(currentOwner, mutable.Buffer()) += t
+
+ /** Process synthetic members for `owner`. They are removed form the `newMembers` as a side-effect. */
+ @inline private def useNewMembers[T](owner: Symbol)(f: List[Tree] => T): T =
+ f(newMembers.remove(owner).getOrElse(Nil).toList)
@inline private def withInPattern[T](value: Boolean)(body: => T): T = {
inPattern = value
@@ -86,30 +94,40 @@ abstract class UnCurry extends InfoTransform
finally inPattern = !value
}
+ private def newFunction0(body: Tree): Tree = {
+ val result = localTyper.typedPos(body.pos)(Function(Nil, body)).asInstanceOf[Function]
+ log("Change owner from %s to %s in %s".format(currentOwner, result.symbol, result.body))
+ result.body changeOwner (currentOwner -> result.symbol)
+ transformFunction(result)
+ }
+
private lazy val serialVersionUIDAnnotation =
AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List())
private var nprinted = 0
- override def transform(tree: Tree): Tree = try { //debug
- postTransform(mainTransform(tree))
- } catch {
- case ex: Throwable =>
- if (nprinted < 10) {
- Console.println("exception when traversing " + tree)
- nprinted += 1
- }
- throw ex
- }
+ // I don't have a clue why I'm catching TypeErrors here, but it's better
+ // than spewing stack traces at end users for internal errors. Examples
+ // which hit at this point should not be hard to come by, but the immediate
+ // motivation can be seen in continuations-neg/t3718.
+ override def transform(tree: Tree): Tree = (
+ try postTransform(mainTransform(tree))
+ catch { case ex: TypeError =>
+ unit.error(ex.pos, ex.msg)
+ debugStack(ex)
+ EmptyTree
+ }
+ )
/* Is tree a reference `x` to a call by name parameter that needs to be converted to
* x.apply()? Note that this is not the case if `x` is used as an argument to another
* call by name parameter.
*/
- def isByNameRef(tree: Tree): Boolean =
- tree.isTerm && tree.hasSymbol &&
- isByNameParamType(tree.symbol.tpe) &&
- !byNameArgs(tree)
+ def isByNameRef(tree: Tree) = (
+ tree.isTerm
+ && !byNameArgs(tree)
+ && tree.hasSymbolWhich(s => isByNameParamType(s.tpe))
+ )
/** Uncurry a type of a tree node.
* This function is sensitive to whether or not we are in a pattern -- when in a pattern
@@ -236,23 +254,21 @@ abstract class UnCurry extends InfoTransform
deEta(fun) match {
// nullary or parameterless
case fun1 if fun1 ne fun => fun1
+ case _ if fun.tpe.typeSymbol == PartialFunctionClass =>
+ // only get here when running under -Xoldpatmat
+ synthPartialFunction(fun)
case _ =>
- def owner = fun.symbol.owner
- def targs = fun.tpe.typeArgs
- def isPartial = fun.tpe.typeSymbol == PartialFunctionClass
- assert(!(opt.virtPatmat && isPartial)) // empty-selector matches have already been translated into instantiations of anonymous (partial) functions
-
- def parents =
- if (isFunctionType(fun.tpe)) List(abstractFunctionForFunctionType(fun.tpe), SerializableClass.tpe)
- else if (isPartial) List(appliedType(AbstractPartialFunctionClass, targs: _*), SerializableClass.tpe)
- else List(ObjectClass.tpe, fun.tpe, SerializableClass.tpe)
-
- val anonClass = owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
+ val parents = (
+ if (isFunctionType(fun.tpe)) addSerializable(abstractFunctionForFunctionType(fun.tpe))
+ else addSerializable(ObjectClass.tpe, fun.tpe)
+ )
+ val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
anonClass setInfo ClassInfoType(parents, newScope, anonClass)
+ val targs = fun.tpe.typeArgs
val (formals, restpe) = (targs.init, targs.last)
- def applyMethodDef = {
+ val applyMethodDef = {
val methSym = anonClass.newMethod(nme.apply, fun.pos, FINAL)
methSym setInfoAndEnter MethodType(methSym newSyntheticValueParams formals, restpe)
@@ -268,71 +284,115 @@ abstract class UnCurry extends InfoTransform
methDef
}
- // def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
- def applyOrElseMethodDef = {
- val methSym = anonClass.newMethod(fun.pos, nme.applyOrElse) setFlag (FINAL | OVERRIDE)
+ localTyper.typedPos(fun.pos) {
+ Block(
+ List(ClassDef(anonClass, NoMods, List(List()), List(List()), List(applyMethodDef), fun.pos)),
+ Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
+ }
- val List(argtpe) = formals
- val A1 = methSym newTypeParameter(newTypeName("A1")) setInfo TypeBounds.upper(argtpe)
- val B1 = methSym newTypeParameter(newTypeName("B1")) setInfo TypeBounds.lower(restpe)
- val methFormals = List(A1.tpe, functionType(List(A1.tpe), B1.tpe))
- val params@List(x, default) = methSym newSyntheticValueParams methFormals
- methSym setInfoAndEnter polyType(List(A1, B1), MethodType(params, B1.tpe))
+ }
- val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), List(x))
- val body = localTyper.typedPos(fun.pos) { import CODE._
- gen.mkUncheckedMatch(gen.withDefaultCase(substParam(fun.body), scrut => REF(default) APPLY (REF(x))))
- }
- body.changeOwner(fun.symbol -> methSym)
+ def synthPartialFunction(fun: Function) = {
+ if (!settings.XoldPatmat.value) debugwarn("Under the new pattern matching scheme, PartialFunction should have been synthesized during typers.")
+
+ val targs = fun.tpe.typeArgs
+ val (formals, restpe) = (targs.init, targs.last)
+
+ val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
+ val parents = addSerializable(appliedType(AbstractPartialFunctionClass, targs: _*))
+ anonClass setInfo ClassInfoType(parents, newScope, anonClass)
+
+ // duplicate before applyOrElseMethodDef is run so that it does not mess up our trees and label symbols (we have a fresh set)
+ // otherwise `TreeSymSubstituter(fun.vparams map (_.symbol), params)` won't work as the subst has been run already
+ val bodyForIDA = {
+ val duped = fun.body.duplicate
+ val oldParams = new mutable.ListBuffer[Symbol]()
+ val newParams = new mutable.ListBuffer[Symbol]()
+
+ val oldSyms0 =
+ duped filter {
+ case l@LabelDef(_, params, _) =>
+ params foreach {p =>
+ val oldSym = p.symbol
+ p.symbol = oldSym.cloneSymbol
+ oldParams += oldSym
+ newParams += p.symbol
+ }
+ true
+ case _ => false
+ } map (_.symbol)
+ val oldSyms = oldParams.toList ++ oldSyms0
+ val newSyms = newParams.toList ++ (oldSyms0 map (_.cloneSymbol))
+ // println("duping "+ oldSyms +" --> "+ (newSyms map (_.ownerChain)))
- val methDef = DefDef(methSym, body)
+ val substLabels = new TreeSymSubstituter(oldSyms, newSyms)
- // Have to repack the type to avoid mismatches when existentials
- // appear in the result - see SI-4869.
- methDef.tpt setType localTyper.packedType(body, methSym)
- methDef
- }
+ substLabels(duped)
+ }
- // duplicate before applyOrElseMethodDef is run so we start with the same symbols as applyOrElseMethodDef
- // otherwise `TreeSymSubstituter(fun.vparams map (_.symbol), params)` won't work as the subst has been run already
- val bodyForIDA = fun.body.duplicate
- def isDefinedAtMethodDef = {
- val methSym = anonClass.newMethod(nme.isDefinedAt, fun.pos, FINAL)
- val params = methSym newSyntheticValueParams formals
- methSym setInfoAndEnter MethodType(params, BooleanClass.tpe)
-
- val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), params)
- def doSubst(x: Tree) = substParam(resetLocalAttrs(x)) // see pos/t1761 for why `resetLocalAttrs`
- object isDefinedAtTransformer extends gen.MatchMatcher {
- // TODO: optimize duplication, but make sure ValDef's introduced by wrap are treated correctly
- override def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = { import CODE._
- gen.mkUncheckedMatch(
- if (cases exists treeInfo.isDefaultCase) TRUE_typed
- else
- doSubst(wrap(
- Match(selector,
- (cases map (c => deriveCaseDef(c)(x => TRUE_typed))) :+ (
- DEFAULT ==> FALSE_typed)
- )))
- )
+ // def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
+ val applyOrElseMethodDef = {
+ val methSym = anonClass.newMethod(fun.pos, nme.applyOrElse) setFlag (FINAL | OVERRIDE)
+
+ val List(argtpe) = formals
+ val A1 = methSym newTypeParameter(newTypeName("A1")) setInfo TypeBounds.upper(argtpe)
+ val B1 = methSym newTypeParameter(newTypeName("B1")) setInfo TypeBounds.lower(restpe)
+ val methFormals = List(A1.tpe, functionType(List(A1.tpe), B1.tpe))
+ val params@List(x, default) = methSym newSyntheticValueParams methFormals
+ methSym setInfoAndEnter polyType(List(A1, B1), MethodType(params, B1.tpe))
+
+ val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), List(x))
+ val body = localTyper.typedPos(fun.pos) { import CODE._
+ def defaultAction(scrut: Tree) = REF(default) APPLY (REF(x))
+
+ substParam(fun.body) match {
+ case orig@Match(selector, cases) =>
+ if (cases exists treeInfo.isDefaultCase) orig
+ else {
+ val defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, defaultAction(selector.duplicate))
+ Match(/*gen.mkUnchecked*/(selector), cases :+ defaultCase)
}
- }
- val body = isDefinedAtTransformer(bodyForIDA)
- body.changeOwner(fun.symbol -> methSym)
- DefDef(methSym, body)
}
+ }
+ body.changeOwner(fun.symbol -> methSym)
- val members =
- if (isPartial) List(applyOrElseMethodDef, isDefinedAtMethodDef)
- else List(applyMethodDef)
+ val methDef = DefDef(methSym, body)
+
+ // Have to repack the type to avoid mismatches when existentials
+ // appear in the result - see SI-4869.
+ methDef.tpt setType localTyper.packedType(body, methSym)
+ methDef
+ }
+
+ val isDefinedAtMethodDef = {
+ val methSym = anonClass.newMethod(nme.isDefinedAt, fun.pos, FINAL)
+ val params = methSym newSyntheticValueParams formals
+ methSym setInfoAndEnter MethodType(params, BooleanClass.tpe)
+
+ val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), params)
+ def doSubst(x: Tree) = substParam(resetLocalAttrsKeepLabels(x)) // see pos/t1761 for why `resetLocalAttrs`, but must keep label symbols around
+
+ val body = bodyForIDA match {
+ case Match(selector, cases) =>
+ if (cases exists treeInfo.isDefaultCase) TRUE_typed
+ else
+ doSubst(Match(/*gen.mkUnchecked*/(selector),
+ (cases map (c => deriveCaseDef(c)(x => TRUE_typed))) :+ (
+ DEFAULT ==> FALSE_typed)))
- localTyper.typedPos(fun.pos) {
- Block(
- List(ClassDef(anonClass, NoMods, List(List()), List(List()), members, fun.pos)),
- Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
- }
}
+ body.changeOwner(fun.symbol -> methSym)
+
+ DefDef(methSym, body)
+ }
+
+ localTyper.typedPos(fun.pos) {
+ Block(
+ List(ClassDef(anonClass, NoMods, List(List()), List(List()), List(applyOrElseMethodDef, isDefinedAtMethodDef), fun.pos)),
+ Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
+ }
+ }
def transformArgs(pos: Position, fun: Symbol, args: List[Tree], formals: List[Type]) = {
val isJava = fun.isJavaDefined
@@ -358,30 +418,35 @@ abstract class UnCurry extends InfoTransform
def sequenceToArray(tree: Tree) = {
val toArraySym = tree.tpe member nme.toArray
assert(toArraySym != NoSymbol)
- def getClassTag(tp: Type): Tree = {
- val tag = localTyper.resolveClassTag(tree, tp)
+ def getArrayTag(tp: Type): Tree = {
+ val tag = localTyper.resolveArrayTag(tp, tree.pos)
// Don't want bottom types getting any further than this (SI-4024)
- if (tp.typeSymbol.isBottomClass) getClassTag(AnyClass.tpe)
+ if (tp.typeSymbol.isBottomClass) getArrayTag(AnyClass.tpe)
else if (!tag.isEmpty) tag
- else if (tp.bounds.hi ne tp) getClassTag(tp.bounds.hi)
- else localTyper.TyperErrorGen.MissingClassTagError(tree, tp)
+ else if (tp.bounds.hi ne tp) getArrayTag(tp.bounds.hi)
+ else localTyper.TyperErrorGen.MissingArrayTagError(tree, tp)
+ }
+ def traversableArrayTag(tpe: Type): Tree = {
+ (tpe baseType TraversableClass).typeArgs match {
+ case targ :: _ => getArrayTag(targ)
+ case _ => EmptyTree
+ }
}
afterUncurry {
localTyper.typedPos(pos) {
- Apply(gen.mkAttributedSelect(tree, toArraySym),
- List(getClassTag(tree.tpe.baseType(TraversableClass).typeArgs.head)))
+ gen.mkMethodCall(tree, toArraySym, Nil, List(traversableArrayTag(tree.tpe)))
}
}
}
var suffix: Tree =
if (treeInfo isWildcardStarArgList args) {
- val Typed(tree, _) = args.last;
+ val Typed(tree, _) = args.last
if (isJava)
if (tree.tpe.typeSymbol == ArrayClass) tree
else sequenceToArray(tree)
else
- if (tree.tpe.typeSymbol isSubClass TraversableClass) tree // @PP: I suspect this should be SeqClass
+ if (tree.tpe.typeSymbol isSubClass SeqClass) tree
else arrayToSequence(tree, varargsElemType)
}
else {
@@ -392,7 +457,8 @@ abstract class UnCurry extends InfoTransform
}
afterUncurry {
- if (isJava && isPrimitiveArray(suffix.tpe) && isArrayOfSymbol(fun.tpe.params.last.tpe, ObjectClass)) {
+ if (isJava && !isReferenceArray(suffix.tpe) && isArrayOfSymbol(fun.tpe.params.last.tpe, ObjectClass)) {
+ // The array isn't statically known to be a reference array, so call ScalaRuntime.toObjectArray.
suffix = localTyper.typedPos(pos) {
gen.mkRuntimeCall(nme.toObjectArray, List(suffix))
}
@@ -404,22 +470,26 @@ abstract class UnCurry extends InfoTransform
val args1 = if (isVarArgTypes(formals)) transformVarargs(formals.last.typeArgs.head) else args
map2(formals, args1) { (formal, arg) =>
- if (!isByNameParamType(formal)) {
+ if (!isByNameParamType(formal))
arg
- } else if (isByNameRef(arg)) {
+ else if (isByNameRef(arg)) {
byNameArgs += arg
- arg setType functionType(List(), arg.tpe)
- } else {
- if (opt.verboseDebug) {
- val posstr = arg.pos.source.path + ":" + arg.pos.line
- val permstr = if (fun.isPrivate) "private" else "notprivate"
- log("byname | %s | %s | %s".format(posstr, fun.fullName, permstr))
- }
+ arg setType functionType(Nil, arg.tpe)
+ }
+ else {
+ log("byname | %s | %s | %s".format(
+ arg.pos.source.path + ":" + arg.pos.line, fun.fullName,
+ if (fun.isPrivate) "private" else "")
+ )
- val result = localTyper.typed(
- Function(Nil, arg) setPos arg.pos).asInstanceOf[Function]
- new ChangeOwnerTraverser(currentOwner, result.symbol).traverse(arg)
- transformFunction(result)
+ arg match {
+ // don't add a thunk for by-name argument if argument already is an application of
+ // a Function0. We can then remove the application and use the existing Function0.
+ case Apply(Select(recv, nme.apply), Nil) if recv.tpe.typeSymbol isSubClass FunctionClass(0) =>
+ recv
+ case _ =>
+ newFunction0(arg)
+ }
}
}
}
@@ -506,7 +576,12 @@ abstract class UnCurry extends InfoTransform
if ((sym ne null) && (sym.elisionLevel.exists (_ < settings.elidebelow.value || settings.noassertions.value)))
replaceElidableTree(tree)
else translateSynchronized(tree) match {
- case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ case dd @ DefDef(mods, name, tparams, _, tpt, rhs) =>
+ // Remove default argument trees from parameter ValDefs, SI-4812
+ val vparamssNoRhs = dd.vparamss mapConserve (_ mapConserve {p =>
+ treeCopy.ValDef(p, p.mods, p.name, p.tpt, EmptyTree)
+ })
+
if (dd.symbol hasAnnotation VarargsClass) saveRepeatedParams(dd)
withNeedLift(false) {
@@ -524,10 +599,10 @@ abstract class UnCurry extends InfoTransform
}
treeCopy.DefDef(
dd, mods, name, transformTypeDefs(tparams),
- transformValDefss(vparamss), transform(tpt), rhs1)
+ transformValDefss(vparamssNoRhs), transform(tpt), rhs1)
}
} else {
- super.transform(dd)
+ super.transform(treeCopy.DefDef(dd, mods, name, tparams, vparamssNoRhs, tpt, rhs))
}
}
case ValDef(_, _, _, rhs) =>
@@ -613,9 +688,8 @@ abstract class UnCurry extends InfoTransform
tree match {
/* Some uncurry post transformations add members to templates.
- * When inside a template, the following sequence is available:
- * - newMembers
- * Any entry in this sequence will be added into the template
+ *
+ * Members registered by `addMembers` for the current template are added
* once the template transformation has finished.
*
* In particular, this case will add:
@@ -623,8 +697,10 @@ abstract class UnCurry extends InfoTransform
*/
case Template(_, _, _) =>
localTyper = typer.atOwner(tree, currentClass)
- try deriveTemplate(tree)(transformTrees(newMembers.toList) ::: _)
- finally newMembers.clear()
+ useNewMembers(currentClass) {
+ newMembers =>
+ deriveTemplate(tree)(transformTrees(newMembers) ::: _)
+ }
case dd @ DefDef(_, _, _, vparamss0, _, rhs0) =>
val flatdd = copyDefDef(dd)(
@@ -666,12 +742,12 @@ abstract class UnCurry extends InfoTransform
case Apply(Apply(fn, args), args1) =>
treeCopy.Apply(tree, fn, args ::: args1)
case Ident(name) =>
- assert(name != tpnme.WILDCARD_STAR)
+ assert(name != tpnme.WILDCARD_STAR, tree)
applyUnary()
case Select(_, _) | TypeApply(_, _) =>
applyUnary()
- case ret @ Return(expr) if (isNonLocalReturn(ret)) =>
- debuglog("non local return in "+ret.symbol+" from "+currentOwner.enclMethod)
+ case ret @ Return(expr) if isNonLocalReturn(ret) =>
+ log("non-local return from %s to %s".format(currentOwner.enclMethod, ret.symbol))
atPos(ret.pos)(nonLocalReturnThrow(expr, ret.symbol))
case TypeTree() =>
tree
@@ -696,7 +772,7 @@ abstract class UnCurry extends InfoTransform
/* Called during post transform, after the method argument lists have been flattened.
* It looks for the method in the `repeatedParams` map, and generates a Java-style
- * varargs forwarder. It then adds the forwarder to the `newMembers` sequence.
+ * varargs forwarder.
*/
private def addJavaVarargsForwarders(dd: DefDef, flatdd: DefDef): DefDef = {
if (!dd.symbol.hasAnnotation(VarargsClass) || !repeatedParams.contains(dd.symbol))
@@ -719,10 +795,10 @@ abstract class UnCurry extends InfoTransform
)
}
- val reps = repeatedParams(dd.symbol)
- val rpsymbols = reps.map(_.symbol).toSet
- val theTyper = typer.atOwner(dd, currentClass)
- val flatparams = flatdd.vparamss.head
+ val reps = repeatedParams(dd.symbol)
+ val rpsymbols = reps.map(_.symbol).toSet
+ val theTyper = typer.atOwner(dd, currentClass)
+ val flatparams = flatdd.vparamss.head
// create the type
val forwformals = flatparams map {
@@ -773,8 +849,7 @@ abstract class UnCurry extends InfoTransform
case None =>
// enter symbol into scope
currentClass.info.decls enter forwsym
- // add the method to `newMembers`
- newMembers += forwtree
+ addNewMember(forwtree)
}
flatdd
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index 18c7635b1e..a77df71312 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -24,6 +24,7 @@ trait Analyzer extends AnyRef
with NamesDefaults
with TypeDiagnostics
with ContextErrors
+ with StdAttachments
{
val global : Global
import global._
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index edc69be827..affa9cd63b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -9,7 +9,6 @@ package typechecker
import scala.collection.{ mutable, immutable }
import scala.tools.util.StringOps.{ countElementsAsString, countAsString }
import symtab.Flags.{ PRIVATE, PROTECTED }
-import scala.tools.util.EditDistance.similarString
trait ContextErrors {
self: Analyzer =>
@@ -129,11 +128,11 @@ trait ContextErrors {
val retyped = typed (tree.duplicate setType null)
val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic)
- if (foundDecls.isEmpty) found
+ if (foundDecls.isEmpty || (found.typeSymbol eq NoSymbol)) found
else {
// The members arrive marked private, presumably because there was no
// expected type and so they're considered members of an anon class.
- foundDecls foreach (_ resetFlag (PRIVATE | PROTECTED))
+ foundDecls foreach (_.makePublic)
// TODO: if any of the found parents match up with required parents after normalization,
// print the error so that they match. The major beneficiary there would be
// java.lang.Object vs. AnyRef.
@@ -171,36 +170,7 @@ trait ContextErrors {
NormalTypeError(tree, "reference to " + name + " is ambiguous;\n" + msg)
def SymbolNotFoundError(tree: Tree, name: Name, owner: Symbol, startingIdentCx: Context) = {
- /*** Disabled pending investigation of performance impact.
-
- // This laborious determination arrived at to keep the tests working.
- val calcSimilar = (
- name.length > 2 && (
- startingIdentCx.reportErrors
- || startingIdentCx.enclClassOrMethod.reportErrors
- )
- )
- // avoid calculating if we're in "silent" mode.
- // name length check to limit unhelpful suggestions for e.g. "x" and "b1"
- val similar = {
- if (!calcSimilar) ""
- else {
- val allowed = (
- startingIdentCx.enclosingContextChain
- flatMap (ctx => ctx.scope.toList ++ ctx.imports.flatMap(_.allImportedSymbols))
- filter (sym => sym.isTerm == name.isTermName)
- filterNot (sym => sym.isPackage || sym.isSynthetic || sym.hasMeaninglessName)
- )
- val allowedStrings = (
- allowed.map("" + _.name).distinct.sorted
- filterNot (s => (s contains '$') || (s contains ' '))
- )
- similarString("" + name, allowedStrings)
- }
- }
- */
- val similar = ""
- NormalTypeError(tree, "not found: "+decodeWithKind(name, owner) + similar)
+ NormalTypeError(tree, "not found: "+decodeWithKind(name, owner))
}
// typedAppliedTypeTree
@@ -344,6 +314,11 @@ trait ContextErrors {
setError(tree)
}
+ def MacroPartialApplicationError(tree: Tree) = {
+ issueNormalTypeError(tree, "macros cannot be partially applied")
+ setError(tree)
+ }
+
//typedReturn
def ReturnOutsideOfDefError(tree: Tree) = {
issueNormalTypeError(tree, "return outside method definition")
@@ -483,7 +458,7 @@ trait ContextErrors {
val keep = missing take 3 map (_.name)
".\nUnspecified value parameter%s %s".format(
if (missing.tail.isEmpty) "" else "s",
- if (missing drop 3 nonEmpty) (keep :+ "...").mkString(", ")
+ if ((missing drop 3).nonEmpty) (keep :+ "...").mkString(", ")
else keep.mkString("", ", ", ".")
)
}
@@ -504,6 +479,9 @@ trait ContextErrors {
def ApplyWithoutArgsError(tree: Tree, fun: Tree) =
NormalTypeError(tree, fun.tpe+" does not take parameters")
+ def DynamicVarArgUnsupported(tree: Tree, name: String) =
+ issueNormalTypeError(tree, name+ " does not support passing a vararg parameter")
+
//checkClassType
def TypeNotAStablePrefixError(tpt: Tree, pre: Type) = {
issueNormalTypeError(tpt, "type "+pre+" is not a stable prefix")
@@ -584,9 +562,9 @@ trait ContextErrors {
def AbstractExistentiallyOverParamerizedTpeError(tree: Tree, tp: Type) =
issueNormalTypeError(tree, "can't existentially abstract over parameterized type " + tp)
- // classTagTree
- def MissingClassTagError(tree: Tree, tp: Type) = {
- issueNormalTypeError(tree, "cannot find class tag for element type "+tp)
+ // resolveArrayTag
+ def MissingArrayTagError(tree: Tree, tp: Type) = {
+ issueNormalTypeError(tree, "cannot find array tag for element type "+tp)
setError(tree)
}
@@ -614,19 +592,23 @@ trait ContextErrors {
setError(tree)
}
- // checkNoDoubleDefs...
- // @PP: I hacked the filename in (context0.unit) to work around SI-4893. It would be
- // much better if every symbol could offer some idea of where it came from, else
- // the obviously untrue claim that something has been defined twice can only frustrate.
- // There's no direct test because partest doesn't work, but to reproduce, separately
- // compile the next two lines:
- // package object foo { val x: Class[_] = null }
- // package foo
def DefDefinedTwiceError(sym0: Symbol, sym1: Symbol) = {
+ // Most of this hard work is associated with SI-4893.
val isBug = sym0.isAbstractType && sym1.isAbstractType && (sym0.name startsWith "_$")
- issueSymbolTypeError(sym0, sym1+" is defined twice in " + context0.unit
- + ( if (isBug) "\n(this error is likely due to a bug in the scala compiler involving wildcards in package objects)" else "" )
+ val addendums = List(
+ if (sym0.associatedFile eq sym1.associatedFile)
+ Some("conflicting symbols both originated in file '%s'".format(sym0.associatedFile.canonicalPath))
+ else if ((sym0.associatedFile ne null) && (sym1.associatedFile ne null))
+ Some("conflicting symbols originated in files '%s' and '%s'".format(sym0.associatedFile.canonicalPath, sym1.associatedFile.canonicalPath))
+ else None ,
+ if (isBug) Some("Note: this may be due to a bug in the compiler involving wildcards in package objects") else None
)
+ val addendum = addendums.flatten match {
+ case Nil => ""
+ case xs => xs.mkString("\n ", "\n ", "")
+ }
+
+ issueSymbolTypeError(sym0, sym1+" is defined twice" + addendum)
}
// cyclic errors
@@ -644,7 +626,7 @@ trait ContextErrors {
private def applyErrorMsg(tree: Tree, msg: String, argtpes: List[Type], pt: Type) = {
def asParams(xs: List[Any]) = xs.mkString("(", ", ", ")")
- def resType = if (pt isWildcard) "" else " with expected result type " + pt
+ def resType = if (pt.isWildcard) "" else " with expected result type " + pt
def allTypes = (alternatives(tree) flatMap (_.paramTypes)) ++ argtpes :+ pt
def locals = alternatives(tree) flatMap (_.typeParams)
@@ -708,34 +690,44 @@ trait ContextErrors {
setError(tree)
}
- def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) = {
+ // side-effect on the tree, break the overloaded type cycle in infer
+ @inline
+ private def setErrorOnLastTry(lastTry: Boolean, tree: Tree) = if (lastTry) setError(tree)
+
+ def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type, lastTry: Boolean) = {
issueNormalTypeError(tree,
applyErrorMsg(tree, " cannot be applied to ", argtpes, pt))
// since inferMethodAlternative modifies the state of the tree
// we have to set the type of tree to ErrorType only in the very last
- // fallback action that is done in the inference (tracking it manually is error prone).
+ // fallback action that is done in the inference.
// This avoids entering infinite loop in doTypeApply.
- if (implicitly[Context].reportErrors) setError(tree)
+ setErrorOnLastTry(lastTry, tree)
}
def AmbiguousMethodAlternativeError(tree: Tree, pre: Type, best: Symbol,
- firstCompeting: Symbol, argtpes: List[Type], pt: Type) = {
- val msg0 =
- "argument types " + argtpes.mkString("(", ",", ")") +
- (if (pt == WildcardType) "" else " and expected result type " + pt)
- val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, msg0)
- // discover last attempt in a similar way as for NoBestMethodAlternativeError
- if (implicitly[Context].ambiguousErrors) setError(tree)
- issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ firstCompeting: Symbol, argtpes: List[Type], pt: Type, lastTry: Boolean) = {
+
+ if (!(argtpes exists (_.isErroneous)) && !pt.isErroneous) {
+ val msg0 =
+ "argument types " + argtpes.mkString("(", ",", ")") +
+ (if (pt == WildcardType) "" else " and expected result type " + pt)
+ val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, msg0)
+ issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ setErrorOnLastTry(lastTry, tree)
+ } else setError(tree) // do not even try further attempts because they should all fail
+ // even if this is not the last attempt (because of the SO's possibility on the horizon)
+
}
- def NoBestExprAlternativeError(tree: Tree, pt: Type) =
+ def NoBestExprAlternativeError(tree: Tree, pt: Type, lastTry: Boolean) = {
issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(tree.symbol.tpe, pt, isPossiblyMissingArgs(tree.symbol.tpe, pt))))
+ setErrorOnLastTry(lastTry, tree)
+ }
- def AmbiguousExprAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, pt: Type) = {
+ def AmbiguousExprAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, pt: Type, lastTry: Boolean) = {
val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, "expected type " + pt)
- setError(tree)
issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ setErrorOnLastTry(lastTry, tree)
}
// checkBounds
@@ -858,7 +850,7 @@ trait ContextErrors {
// (note that this is not a compilation error, it's an artifact of implicit search algorithm)
// normally, such "errors" are discarded by `isCyclicOrErroneous` in Implicits.scala
// but in our case this won't work, because isCyclicOrErroneous catches CyclicReference exceptions
- // while our error will manifest itself as a "recursive method needs a return type"
+ // while our error will present itself as a "recursive method needs a return type"
//
// hence we (together with reportTypeError in TypeDiagnostics) make sure that this CyclicReference
// evades all the handlers on its way and successfully reaches `isCyclicOrErroneous` in Implicits
@@ -892,8 +884,12 @@ trait ContextErrors {
val s1 = if (prevSym.isModule) "case class companion " else ""
val s2 = if (prevSym.isSynthetic) "(compiler-generated) " + s1 else ""
val s3 = if (prevSym.isCase) "case class " + prevSym.name else "" + prevSym
+ val where = if (currentSym.owner.isPackageClass != prevSym.owner.isPackageClass) {
+ val inOrOut = if (prevSym.owner.isPackageClass) "outside of" else "in"
+ " %s package object %s".format(inOrOut, ""+prevSym.effectiveOwner.name)
+ } else ""
- issueSymbolTypeError(currentSym, prevSym.name + " is already defined as " + s2 + s3)
+ issueSymbolTypeError(currentSym, prevSym.name + " is already defined as " + s2 + s3 + where)
}
def MaxParametersCaseClassError(tree: Tree) =
@@ -1052,14 +1048,24 @@ trait ContextErrors {
setError(arg)
} else arg
}
+
+ def WarnAfterNonSilentRecursiveInference(param: Symbol, arg: Tree)(implicit context: Context) = {
+ val note = "type-checking the invocation of "+ param.owner +" checks if the named argument expression '"+ param.name + " = ...' is a valid assignment\n"+
+ "in the current scope. The resulting type inference error (see above) can be fixed by providing an explicit type in the local definition for "+ param.name +"."
+ context.warning(arg.pos, note)
+ }
def UnknownParameterNameNamesDefaultError(arg: Tree, name: Name)(implicit context: Context) = {
issueNormalTypeError(arg, "unknown parameter name: " + name)
setError(arg)
}
- def DoubleParamNamesDefaultError(arg: Tree, name: Name)(implicit context: Context) = {
- issueNormalTypeError(arg, "parameter specified twice: "+ name)
+ def DoubleParamNamesDefaultError(arg: Tree, name: Name, pos: Int, otherName: Option[Name])(implicit context: Context) = {
+ val annex = otherName match {
+ case Some(oName) => "\nNote that that '"+ oName +"' is not a parameter name of the invoked method."
+ case None => ""
+ }
+ issueNormalTypeError(arg, "parameter '"+ name +"' is already specified at parameter position "+ pos + annex)
setError(arg)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index fe1c90fe67..f4f081252f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -67,6 +67,7 @@ trait Contexts { self: Analyzer =>
val c = sc.make(unit, tree, sc.owner, sc.scope, sc.imports)
if (erasedTypes) c.setThrowErrors() else c.setReportErrors()
c.implicitsEnabled = !erasedTypes
+ c.enrichmentEnabled = c.implicitsEnabled
c
}
@@ -106,7 +107,7 @@ trait Contexts { self: Analyzer =>
var depth: Int = 0
var imports: List[ImportInfo] = List() // currently visible imports
var openImplicits: List[(Type,Tree)] = List() // types for which implicit arguments
- // are currently searched
+ // are currently searched
// for a named application block (Tree) the corresponding NamedApplyInfo
var namedApplyBlockInfo: Option[(Tree, NamedApplyInfo)] = None
var prefix: Type = NoPrefix
@@ -120,6 +121,7 @@ trait Contexts { self: Analyzer =>
var diagnostic: List[String] = Nil // these messages are printed when issuing an error
var implicitsEnabled = false
var macrosEnabled = true
+ var enrichmentEnabled = false // to selectively allow enrichment in patterns, where other kinds of implicit conversions are not allowed
var checking = false
var retyping = false
@@ -192,8 +194,25 @@ trait Contexts { self: Analyzer =>
def withImplicitsDisabled[T](op: => T): T = {
val saved = implicitsEnabled
implicitsEnabled = false
+ val savedP = enrichmentEnabled
+ enrichmentEnabled = false
try op
- finally implicitsEnabled = saved
+ finally {
+ implicitsEnabled = saved
+ enrichmentEnabled = savedP
+ }
+ }
+
+ def withImplicitsDisabledAllowEnrichment[T](op: => T): T = {
+ val saved = implicitsEnabled
+ implicitsEnabled = false
+ val savedP = enrichmentEnabled
+ enrichmentEnabled = true
+ try op
+ finally {
+ implicitsEnabled = saved
+ enrichmentEnabled = savedP
+ }
}
def withMacrosEnabled[T](op: => T): T = {
@@ -246,6 +265,7 @@ trait Contexts { self: Analyzer =>
c.typingIndentLevel = typingIndentLevel
c.implicitsEnabled = this.implicitsEnabled
c.macrosEnabled = this.macrosEnabled
+ c.enrichmentEnabled = this.enrichmentEnabled
c.checking = this.checking
c.retyping = this.retyping
c.openImplicits = this.openImplicits
@@ -298,6 +318,7 @@ trait Contexts { self: Analyzer =>
def makeImplicit(reportAmbiguousErrors: Boolean) = {
val c = makeSilent(reportAmbiguousErrors)
c.implicitsEnabled = false
+ c.enrichmentEnabled = false
c
}
@@ -333,32 +354,30 @@ trait Contexts { self: Analyzer =>
private def unitError(pos: Position, msg: String) =
unit.error(pos, if (checking) "\n**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg)
-
- def issue(err: AbsTypeError) {
+
+ @inline private def issueCommon(err: AbsTypeError)(pf: PartialFunction[AbsTypeError, Unit]) {
debugwarn("issue error: " + err.errMsg)
if (settings.Yissuedebug.value) (new Exception).printStackTrace()
- if (reportErrors) unitError(err.errPos, addDiagString(err.errMsg))
+ if (pf isDefinedAt err) pf(err)
else if (bufferErrors) { buffer += err }
else throw new TypeError(err.errPos, err.errMsg)
}
+ def issue(err: AbsTypeError) {
+ issueCommon(err) { case _ if reportErrors =>
+ unitError(err.errPos, addDiagString(err.errMsg))
+ }
+ }
+
def issueAmbiguousError(pre: Type, sym1: Symbol, sym2: Symbol, err: AbsTypeError) {
- debugwarn("issue ambiguous error: " + err.errMsg)
- if (settings.Yissuedebug.value) (new Exception).printStackTrace()
- if (ambiguousErrors) {
+ issueCommon(err) { case _ if ambiguousErrors =>
if (!pre.isErroneous && !sym1.isErroneous && !sym2.isErroneous)
unitError(err.errPos, err.errMsg)
- } else if (bufferErrors) { buffer += err }
- else throw new TypeError(err.errPos, err.errMsg)
+ }
}
def issueAmbiguousError(err: AbsTypeError) {
- debugwarn("issue ambiguous error: " + err.errMsg)
- if (settings.Yissuedebug.value) (new Exception).printStackTrace()
- if (ambiguousErrors)
- unitError(err.errPos, addDiagString(err.errMsg))
- else if (bufferErrors) { buffer += err }
- else throw new TypeError(err.errPos, err.errMsg)
+ issueCommon(err) { case _ if ambiguousErrors => unitError(err.errPos, addDiagString(err.errMsg)) }
}
// TODO remove
@@ -384,6 +403,17 @@ trait Contexts { self: Analyzer =>
case _ => outer.isLocal()
}
+ /** Fast path for some slow checks (ambiguous assignment in Refchecks, and
+ * existence of __match for MatchTranslation in virtpatmat.) This logic probably
+ * needs improvement.
+ */
+ def isNameInScope(name: Name) = (
+ enclosingContextChain exists (ctx =>
+ (ctx.scope.lookupEntry(name) != null)
+ || (ctx.owner.rawInfo.member(name) != NoSymbol)
+ )
+ )
+
// nextOuter determines which context is searched next for implicits
// (after `this`, which contributes `newImplicits` below.) In
// most cases, it is simply the outer context: if we're owned by
@@ -408,8 +438,8 @@ trait Contexts { self: Analyzer =>
def enclosingContextChain: List[Context] = this :: outer.enclosingContextChain
- override def toString = "Context(%s@%s unit=%s scope=%s errors=%b)".format(
- owner.fullName, tree.shortClass, unit, scope.##, hasErrors
+ override def toString = "Context(%s@%s unit=%s scope=%s errors=%b, reportErrors=%b, throwErrors=%b)".format(
+ owner.fullName, tree.shortClass, unit, scope.##, hasErrors, reportErrors, throwErrors
)
/** Is `sub` a subclass of `base` or a companion object of such a subclass?
*/
@@ -568,16 +598,16 @@ trait Contexts { self: Analyzer =>
* it is accessible, and if it is imported there is not already a local symbol
* with the same names. Local symbols override imported ones. This fixes #2866.
*/
- private def isQualifyingImplicit(sym: Symbol, pre: Type, imported: Boolean) =
+ private def isQualifyingImplicit(name: Name, sym: Symbol, pre: Type, imported: Boolean) =
sym.isImplicit &&
isAccessible(sym, pre) &&
!(imported && {
- val e = scope.lookupEntry(sym.name)
+ val e = scope.lookupEntry(name)
(e ne null) && (e.owner == scope)
})
private def collectImplicits(syms: List[Symbol], pre: Type, imported: Boolean = false): List[ImplicitInfo] =
- for (sym <- syms if isQualifyingImplicit(sym, pre, imported)) yield
+ for (sym <- syms if isQualifyingImplicit(sym.name, sym, pre, imported)) yield
new ImplicitInfo(sym.name, pre, sym)
private def collectImplicitImports(imp: ImportInfo): List[ImplicitInfo] = {
@@ -591,7 +621,7 @@ trait Contexts { self: Analyzer =>
var impls = collect(sels1) filter (info => info.name != from)
if (to != nme.WILDCARD) {
for (sym <- imp.importedSymbol(to).alternatives)
- if (isQualifyingImplicit(sym, pre, imported = true))
+ if (isQualifyingImplicit(to, sym, pre, imported = true))
impls = new ImplicitInfo(to, pre, sym) :: impls
}
impls
diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
new file mode 100644
index 0000000000..0b414801d6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
@@ -0,0 +1,206 @@
+/* NSC -- new Scala compiler
+* Copyright 2005-2012 LAMP/EPFL
+* @author Paul Phillips
+*/
+
+package scala.tools.nsc
+package typechecker
+
+/** A generic means of breaking down types into their subcomponents.
+ * Types are decomposed top down, and recognizable substructure is
+ * dispatched via self-apparently named methods. Those methods can
+ * be overridden for custom behavior, but only the abstract methods
+ * require implementations, each of which must create some unknown
+ * "Node" type from its inputs.
+ *
+ * - wrapProduct create Node from a product of Nodes
+ * - wrapSequence create Node from a sequence of Nodes
+ * - wrapAtom create Node from an arbitrary value
+ *
+ * This is a work in progress.
+ */
+trait DestructureTypes {
+ val global: Global
+ import global._
+ import definitions.{ NothingClass, AnyClass }
+
+ trait DestructureType[Node] extends (Type => Node) {
+ def withLabel(node: Node, label: String): Node
+ def withType(node: Node, typeName: String): Node
+
+ def wrapEmpty: Node
+ def wrapPoly(in: Node, out: Node): Node
+ def wrapMono(in: Node, out: Node): Node
+ def wrapProduct(nodes: List[Node]): Node
+ def wrapSequence(nodes: List[Node]): Node
+ def wrapAtom[U](value: U): Node
+
+ private implicit def liftToTerm(name: String): TermName = newTermName(name)
+
+ private val openSymbols = collection.mutable.Set[Symbol]()
+
+ private def nodeList[T](elems: List[T], mkNode: T => Node): Node =
+ if (elems.isEmpty) wrapEmpty else list(elems map mkNode)
+
+ private def scopeMemberList(elems: List[Symbol]): Node = nodeList(elems, wrapAtom)
+ private def typeList(elems: List[Type]): Node = nodeList(elems, this)
+ private def symbolList(elems: List[Symbol]): Node = nodeList(elems, wrapSymbolInfo)
+ private def treeList(elems: List[Tree]): Node = nodeList(elems, wrapTree)
+ private def annotationList(annots: List[AnnotationInfo]): Node = nodeList(annots, annotation)
+
+ private def assocsNode(ann: AnnotationInfo): Node = {
+ val (names, args) = ann.assocs.toIndexedSeq.unzip
+ if (names.isEmpty) wrapEmpty
+ else node("assocs", nodeList(names.indices.toList, (i: Int) => atom(names(i).toString, args(i))))
+ }
+ private def typeTypeName(tp: Type) = tp match {
+ case mt @ MethodType(_, _) if mt.isImplicit => "ImplicitMethodType"
+ case TypeRef(_, sym, _) => typeRefType(sym)
+ case _ => tp.kind
+ }
+
+ def wrapTree(tree: Tree): Node = withType(
+ tree match {
+ case x: NameTree => atom(x.name.toString, x)
+ case _ => wrapAtom(tree)
+ },
+ tree.printingPrefix
+ )
+ def wrapSymbol(label: String, sym: Symbol): Node = {
+ if (sym eq NoSymbol) wrapEmpty
+ else atom(label, sym)
+ }
+ def wrapInfo(sym: Symbol) = sym.info match {
+ case TypeBounds(lo, hi) => typeBounds(lo, hi)
+ case PolyType(tparams, restpe) => polyFunction(tparams, restpe)
+ case _ => wrapEmpty
+ }
+ def wrapSymbolInfo(sym: Symbol): Node = {
+ if ((sym eq NoSymbol) || openSymbols(sym)) wrapEmpty
+ else {
+ openSymbols += sym
+ try product(symbolType(sym), wrapAtom(sym.defString))
+ finally openSymbols -= sym
+ }
+ }
+
+ def list(nodes: List[Node]): Node = wrapSequence(nodes)
+ def product(tp: Type, nodes: Node*): Node = product(typeTypeName(tp), nodes: _*)
+ def product(typeName: String, nodes: Node*): Node = (
+ nodes.toList filterNot (_ == wrapEmpty) match {
+ case Nil => wrapEmpty
+ case xs => withType(wrapProduct(xs), typeName)
+ }
+ )
+
+ def atom[U](label: String, value: U): Node = node(label, wrapAtom(value))
+ def constant(label: String, const: Constant): Node = atom(label, const)
+
+ def scope(decls: Scope): Node = node("decls", scopeMemberList(decls.toList))
+ def const[T](named: (String, T)): Node = constant(named._1, Constant(named._2))
+
+ def resultType(restpe: Type): Node = this("resultType", restpe)
+ def typeParams(tps: List[Symbol]): Node = node("typeParams", symbolList(tps))
+ def valueParams(params: List[Symbol]): Node = node("params", symbolList(params))
+ def typeArgs(tps: List[Type]): Node = node("args", typeList(tps))
+ def parentList(tps: List[Type]): Node = node("parents", typeList(tps))
+
+ def polyFunction(tparams: List[Symbol], restpe: Type): Node = wrapPoly(typeParams(tparams), resultType(restpe))
+ def monoFunction(params: List[Symbol], restpe: Type): Node = wrapMono(valueParams(params), resultType(restpe))
+ def nullaryFunction(restpe: Type): Node = wrapMono(wrapEmpty, this(restpe))
+
+ def prefix(pre: Type): Node = pre match {
+ case NoPrefix => wrapEmpty
+ case _ => this("pre", pre)
+ }
+ def typeBounds(lo0: Type, hi0: Type): Node = {
+ val lo = if ((lo0 eq WildcardType) || (lo0.typeSymbol eq NothingClass)) wrapEmpty else this("lo", lo0)
+ val hi = if ((hi0 eq WildcardType) || (hi0.typeSymbol eq AnyClass)) wrapEmpty else this("hi", hi0)
+
+ product("TypeBounds", lo, hi)
+ }
+
+ def annotation(ann: AnnotationInfo): Node = product(
+ "AnnotationInfo",
+ this("atp", ann.atp),
+ node("args", treeList(ann.args)),
+ assocsNode(ann)
+ )
+ def typeConstraint(constr: TypeConstraint): Node = product(
+ "TypeConstraint",
+ node("lo", typeList(constr.loBounds)),
+ node("hi", typeList(constr.hiBounds)),
+ this("inst", constr.inst)
+ )
+ def annotatedType(annotations: List[AnnotationInfo], underlying: Type) = product(
+ "AnnotatedType",
+ node("annotations", annotationList(annotations)),
+ this("underlying", underlying)
+ )
+
+ /** This imposes additional structure beyond that which is visible in
+ * the case class hierarchy. In particular, (too) many different constructs
+ * are encoded in TypeRefs; here they are partitioned somewhat before
+ * being dispatched.
+ *
+ * For example, a typical type parameter is encoded as TypeRef(NoPrefix, sym, Nil)
+ * with its upper and lower bounds stored in the info of the symbol. Viewing the
+ * TypeRef naively we are treated to both too much information (useless prefix, usually
+ * empty args) and too little (bounds hidden behind indirection.) So drop the prefix
+ * and promote the bounds.
+ */
+ def typeRef(tp: TypeRef) = {
+ val TypeRef(pre, sym, args) = tp
+ // Filtered down to elements with "interesting" content
+ product(
+ tp,
+ if (sym.isDefinedInPackage) wrapEmpty else prefix(pre),
+ wrapSymbolInfo(sym),
+ typeArgs(args),
+ if (tp ne tp.normalize) this("normalize", tp.normalize) else wrapEmpty
+ )
+ }
+
+ def symbolType(sym: Symbol) = (
+ if (sym.isRefinementClass) "Refinement"
+ else if (sym.isAliasType) "Alias"
+ else if (sym.isTypeSkolem) "TypeSkolem"
+ else if (sym.isTypeParameter) "TypeParam"
+ else if (sym.isAbstractType) "AbstractType"
+ else if (sym.isType) "TypeSymbol"
+ else "TermSymbol"
+ )
+ def typeRefType(sym: Symbol) = (
+ if (sym.isRefinementClass) "RefinementTypeRef"
+ else if (sym.isAliasType) "AliasTypeRef"
+ else if (sym.isTypeSkolem) "SkolemTypeRef"
+ else if (sym.isTypeParameter) "TypeParamTypeRef"
+ else if (sym.isAbstractType) "AbstractTypeRef"
+ else "TypeRef"
+ ) + ( if (sym.isFBounded) "(F-Bounded)" else "" )
+
+ def node(label: String, node: Node): Node = withLabel(node, label)
+ def apply(label: String, tp: Type): Node = withLabel(this(tp), label)
+
+ def apply(tp: Type): Node = tp match {
+ case AntiPolyType(pre, targs) => product(tp, prefix(pre), typeArgs(targs))
+ case ClassInfoType(parents, decls, clazz) => product(tp, parentList(parents), scope(decls), wrapAtom(clazz))
+ case ConstantType(const) => product(tp, constant("value", const))
+ case DeBruijnIndex(level, index, args) => product(tp, const("level" -> level), const("index" -> index), typeArgs(args))
+ case OverloadedType(pre, alts) => product(tp, prefix(pre), node("alts", typeList(alts map pre.memberType)))
+ case RefinedType(parents, decls) => product(tp, parentList(parents), scope(decls))
+ case SingleType(pre, sym) => product(tp, prefix(pre), wrapAtom(sym))
+ case SuperType(thistp, supertp) => product(tp, this("this", thistp), this("super", supertp))
+ case ThisType(clazz) => product(tp, wrapAtom(clazz))
+ case TypeVar(inst, constr) => product(tp, this("inst", inst), typeConstraint(constr))
+ case AnnotatedType(annotations, underlying, _) => annotatedType(annotations, underlying)
+ case ExistentialType(tparams, underlying) => polyFunction(tparams, underlying)
+ case PolyType(tparams, restpe) => polyFunction(tparams, restpe)
+ case MethodType(params, restpe) => monoFunction(params, restpe)
+ case NullaryMethodType(restpe) => nullaryFunction(restpe)
+ case TypeBounds(lo, hi) => typeBounds(lo, hi)
+ case tr @ TypeRef(pre, sym, args) => typeRef(tr)
+ case _ => wrapAtom(tp) // XXX see what this is
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index f6d1e42c32..b7a6ea677e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -143,8 +143,8 @@ abstract class Duplicators extends Analyzer {
else
sym
- private def invalidate(tree: Tree) {
- debuglog("attempting to invalidate " + tree.symbol + ", owner - " + (if (tree.symbol ne null) tree.symbol.owner else "<NULL>"))
+ private def invalidate(tree: Tree, owner: Symbol = NoSymbol) {
+ debuglog("attempting to invalidate " + tree.symbol)
if (tree.isDef && tree.symbol != NoSymbol) {
debuglog("invalid " + tree.symbol)
invalidSyms(tree.symbol) = tree
@@ -158,18 +158,20 @@ abstract class Duplicators extends Analyzer {
newsym.setInfo(fixType(ldef.symbol.info))
ldef.symbol = newsym
debuglog("newsym: " + newsym + " info: " + newsym.info)
-
+
case vdef @ ValDef(mods, name, _, rhs) if mods.hasFlag(Flags.LAZY) =>
debuglog("ValDef " + name + " sym.info: " + vdef.symbol.info)
invalidSyms(vdef.symbol) = vdef
- val newsym = vdef.symbol.cloneSymbol(context.owner)
+ val newowner = if (owner != NoSymbol) owner else context.owner
+ val newsym = vdef.symbol.cloneSymbol(newowner)
newsym.setInfo(fixType(vdef.symbol.info))
vdef.symbol = newsym
- debuglog("newsym: " + newsym + " info: " + newsym.info)
-
+ debuglog("newsym: " + newsym + " info: " + newsym.info + ", owner: " + newsym.owner + ", " + newsym.owner.isClass)
+ if (newsym.owner.isClass) newsym.owner.info.decls enter newsym
+
case DefDef(_, name, tparams, vparamss, _, rhs) =>
// invalidate parameters
- invalidate(tparams ::: vparamss.flatten)
+ invalidateAll(tparams ::: vparamss.flatten)
tree.symbol = NoSymbol
case _ =>
@@ -178,15 +180,15 @@ abstract class Duplicators extends Analyzer {
}
}
- private def invalidate(stats: List[Tree]) {
- stats foreach invalidate
+ private def invalidateAll(stats: List[Tree], owner: Symbol = NoSymbol) {
+ stats.foreach(invalidate(_, owner))
}
def retypedMethod(ddef: DefDef, oldThis: Symbol, newThis: Symbol): Tree = {
oldClassOwner = oldThis
newClassOwner = newThis
- invalidate(ddef.tparams)
- for (vdef <- ddef.vparamss.flatten) {
+ invalidateAll(ddef.tparams)
+ mforeach(ddef.vparamss) { vdef =>
invalidate(vdef)
vdef.tpe = null
}
@@ -239,15 +241,15 @@ abstract class Duplicators extends Analyzer {
case Block(stats, res) =>
debuglog("invalidating block")
- invalidate(stats)
+ invalidateAll(stats)
invalidate(res)
tree.tpe = null
super.typed(tree, mode, pt)
case ClassDef(_, _, _, tmpl @ Template(parents, _, stats)) =>
- // log("invalidating classdef " + tree.tpe)
+ // log("invalidating classdef " + tree)
tmpl.symbol = tree.symbol.newLocalDummy(tree.pos)
- invalidate(stats)
+ invalidateAll(stats, tree.symbol)
tree.tpe = null
super.typed(tree, mode, pt)
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index 92ce0e6de4..e1fb683aa9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -63,15 +63,18 @@ trait EtaExpansion { self: Analyzer =>
* @return ...
*/
def liftoutPrefix(tree: Tree): Tree = {
- def liftout(tree: Tree): Tree =
+ def liftout(tree: Tree, byName: Boolean): Tree =
if (treeInfo.isExprSafeToInline(tree)) tree
else {
val vname: Name = freshName()
// Problem with ticket #2351 here
defs += atPos(tree.pos) {
- ValDef(Modifiers(SYNTHETIC), vname.toTermName, TypeTree(), tree)
+ val rhs = if (byName) Function(List(), tree) else tree
+ ValDef(Modifiers(SYNTHETIC), vname.toTermName, TypeTree(), rhs)
+ }
+ atPos(tree.pos.focus) {
+ if (byName) Apply(Ident(vname), List()) else Ident(vname)
}
- Ident(vname) setPos tree.pos.focus
}
val tree1 = tree match {
// a partial application using named arguments has the following form:
@@ -85,11 +88,14 @@ trait EtaExpansion { self: Analyzer =>
defs ++= stats
liftoutPrefix(fun)
case Apply(fn, args) =>
- treeCopy.Apply(tree, liftoutPrefix(fn), args mapConserve (liftout)) setType null
+ val byName = fn.tpe.params.map(p => definitions.isByNameParamType(p.tpe))
+ // zipAll: with repeated params, there might be more args than params
+ val newArgs = args.zipAll(byName, EmptyTree, false) map { case (arg, byN) => liftout(arg, byN) }
+ treeCopy.Apply(tree, liftoutPrefix(fn), newArgs) setType null
case TypeApply(fn, args) =>
treeCopy.TypeApply(tree, liftoutPrefix(fn), args) setType null
case Select(qual, name) =>
- treeCopy.Select(tree, liftout(qual), name) setSymbol NoSymbol setType null
+ treeCopy.Select(tree, liftout(qual, false), name) setSymbol NoSymbol setType null
case Ident(name) =>
tree
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 3a789b83b6..217cadaab8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -32,10 +32,10 @@ trait Implicits {
import global.typer.{ printTyping, deindentTyping, indentTyping, printInference }
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult =
- inferImplicit(tree, pt, reportAmbiguous, isView, context, true, NoPosition)
+ inferImplicit(tree, pt, reportAmbiguous, isView, context, true, tree.pos)
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean): SearchResult =
- inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent, NoPosition)
+ inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent, tree.pos)
/** Search for an implicit value. See the comment on `result` at the end of class `ImplicitSearch`
* for more info how the search is conducted.
@@ -94,6 +94,27 @@ trait Implicits {
result
}
+ /** Find all views from type `tp` (in which `tpars` are free)
+ *
+ * Note that the trees in the search results in the returned list share the same type variables.
+ * Ignore their constr field! The list of type constraints returned along with each tree specifies the constraints that
+ * must be met by the corresponding type parameter in `tpars` (for the returned implicit view to be valid).
+ *
+ * @arg tp from-type for the implicit conversion
+ * @arg context search implicits here
+ * @arg tpars symbols that should be considered free type variables
+ * (implicit search should not try to solve them, just track their constraints)
+ */
+ def allViewsFrom(tp: Type, context: Context, tpars: List[Symbol]): List[(SearchResult, List[TypeConstraint])] = {
+ // my untouchable typevars are better than yours (they can't be constrained by them)
+ val tvars = tpars map (TypeVar untouchable _)
+ val tpSubsted = tp.subst(tpars, tvars)
+
+ val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyClass.tpe), true, context.makeImplicit(false))
+
+ search.allImplicitsPoly(tvars)
+ }
+
private final val sizeLimit = 50000
private type Infos = List[ImplicitInfo]
private type Infoss = List[List[ImplicitInfo]]
@@ -145,7 +166,7 @@ trait Implicits {
}
def isCyclicOrErroneous =
- try containsError(tpe)
+ try sym.hasFlag(LOCKED) || containsError(tpe)
catch { case _: CyclicReference => true }
var useCountArg: Int = 0
@@ -193,13 +214,13 @@ trait Implicits {
override def hashCode = 1
}
- /** A constructor for types ?{ name: tp }, used in infer view to member
+ /** A constructor for types ?{ def/type name: tp }, used in infer view to member
* searches.
*/
def memberWildcardType(name: Name, tp: Type) = {
val result = refinedType(List(WildcardType), NoSymbol)
name match {
- case x: TermName => result.typeSymbol.newValue(x) setInfoAndEnter tp
+ case x: TermName => result.typeSymbol.newMethod(x) setInfoAndEnter tp
case x: TypeName => result.typeSymbol.newAbstractType(x) setInfoAndEnter tp
}
result
@@ -325,7 +346,11 @@ trait Implicits {
case _ => tp
}
def stripped(tp: Type): Type = {
- deriveTypeWithWildcards(freeTypeParametersNoSkolems.collect(tp))(tp)
+ // `t.typeSymbol` returns the symbol of the normalized type. If that normalized type
+ // is a `PolyType`, the symbol of the result type is collected. This is precisely
+ // what we require for SI-5318.
+ val syms = for (t <- tp; if t.typeSymbol.isTypeParameter) yield t.typeSymbol
+ deriveTypeWithWildcards(syms.distinct)(tp)
}
def sum(xs: List[Int]) = (0 /: xs)(_ + _)
def complexity(tp: Type): Int = tp.normalize match {
@@ -369,7 +394,7 @@ trait Implicits {
private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean): SearchResult = {
(context.openImplicits find { case (tp, tree1) => tree1.symbol == tree.symbol && dominates(pt, tp)}) match {
case Some(pending) =>
- // println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
+ //println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
throw DivergentImplicit
case None =>
try {
@@ -378,7 +403,7 @@ trait Implicits {
typedImplicit0(info, ptChecked)
} catch {
case ex: DivergentImplicit =>
- // println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
+ //println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
if (context.openImplicits.tail.isEmpty) {
if (!(pt.isErroneous))
DivergingImplicitExpansionError(tree, pt, info.sym)(context)
@@ -510,7 +535,7 @@ trait Implicits {
private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean): SearchResult = {
incCounter(plausiblyCompatibleImplicits)
- printTyping(
+ printTyping (
ptBlock("typedImplicit0",
"info.name" -> info.name,
"ptChecked" -> ptChecked,
@@ -533,7 +558,11 @@ trait Implicits {
val itree = atPos(pos.focus) {
if (info.pre == NoPrefix) Ident(info.name)
- else Select(gen.mkAttributedQualifier(info.pre), info.name)
+ else {
+ // SI-2405 Not info.name, which might be an aliased import
+ val implicitMemberName = info.sym.name
+ Select(gen.mkAttributedQualifier(info.pre), implicitMemberName)
+ }
}
printTyping("typedImplicit1 %s, pt=%s, from implicit %s:%s".format(
typeDebug.ptTree(itree), wildPt, info.name, info.tpe)
@@ -612,6 +641,7 @@ trait Implicits {
else {
val subst = new TreeTypeSubstituter(okParams, okArgs)
subst traverse itree2
+ notifyUndetparamsInferred(okParams, okArgs)
subst
}
@@ -689,7 +719,7 @@ trait Implicits {
def comesBefore(sym: Symbol, owner: Symbol) = {
val ownerPos = owner.pos.pointOrElse(Int.MaxValue)
sym.pos.pointOrElse(0) < ownerPos && (
- if (sym hasAccessorFlag) {
+ if (sym.hasAccessorFlag) {
val symAcc = sym.accessed // #3373
symAcc.pos.pointOrElse(0) < ownerPos &&
!(owner.ownerChain exists (o => (o eq sym) || (o eq symAcc))) // probably faster to iterate only once, don't feel like duplicating hasTransOwner for this case
@@ -742,12 +772,14 @@ trait Implicits {
* so that if there is a best candidate it can still be selected.
*/
private var divergence = false
- private val MaxDiverges = 1 // not sure if this should be > 1
- private val divergenceHandler = util.Exceptional.expiringHandler(MaxDiverges) {
- case x: DivergentImplicit =>
- divergence = true
- log("discarding divergent implicit during implicit search")
- SearchFailure
+ private val divergenceHandler: PartialFunction[Throwable, SearchResult] = {
+ var remaining = 1;
+ { case x: DivergentImplicit if remaining > 0 =>
+ remaining -= 1
+ divergence = true
+ log("discarding divergent implicit during implicit search")
+ SearchFailure
+ }
}
/** Sorted list of eligible implicits.
@@ -809,7 +841,7 @@ trait Implicits {
/** Returns all eligible ImplicitInfos and their SearchResults in a map.
*/
- def findAll() = eligible map (info => (info, typedImplicit(info, false))) toMap
+ def findAll() = mapFrom(eligible)(typedImplicit(_, false))
/** Returns the SearchResult of the best match.
*/
@@ -905,7 +937,7 @@ trait Implicits {
}
case None =>
if (pre.isStable) {
- val companion = sym.companionModule
+ val companion = companionSymbolOf(sym, context)
companion.moduleClass match {
case mc: ModuleClassSymbol =>
val infos =
@@ -1098,68 +1130,199 @@ trait Implicits {
implicitInfoss1
}
- // these should be lazy, otherwise we wouldn't be able to compile scala-library with starr
- private val TagSymbols = Set(ClassTagClass, TypeTagClass, ConcreteTypeTagClass)
- private val TagMaterializers = Map(
- ClassTagClass -> MacroInternal_materializeClassTag,
- TypeTagClass -> MacroInternal_materializeTypeTag,
+ private def TagSymbols = TagMaterializers.keySet
+ private val TagMaterializers = Map[Symbol, Symbol](
+ ArrayTagClass -> MacroInternal_materializeArrayTag,
+ ErasureTagClass -> MacroInternal_materializeErasureTag,
+ ClassTagClass -> MacroInternal_materializeClassTag,
+ TypeTagClass -> MacroInternal_materializeTypeTag,
ConcreteTypeTagClass -> MacroInternal_materializeConcreteTypeTag
)
- def tagOfType(pre: Type, tp: Type, tagClass: Symbol): SearchResult = {
- def success(arg: Tree) =
+ /** Creates a tree will produce a tag of the requested flavor.
+ * An EmptyTree is returned if materialization fails.
+ */
+ private def tagOfType(pre: Type, tp: Type, tagClass: Symbol): SearchResult = {
+ def success(arg: Tree) = {
+ def isMacroException(msg: String): Boolean =
+ // [Eugene] very unreliable, ask Hubert about a better way
+ msg contains "exception during macro expansion"
+
+ def processMacroExpansionError(pos: Position, msg: String): SearchResult = {
+ // giving up and reporting all macro exceptions regardless of their source
+ // this might lead to an avalanche of errors if one of your implicit macros misbehaves
+ if (isMacroException(msg)) context.error(pos, msg)
+ failure(arg, "failed to typecheck the materialized tag: %n%s".format(msg), pos)
+ }
+
try {
val tree1 = typed(atPos(pos.focus)(arg))
- def isErroneous = tree exists (_.isErroneous)
- if (context.hasErrors) failure(tp, "failed to typecheck the materialized typetag: %n%s".format(context.errBuffer.head.errMsg), context.errBuffer.head.errPos)
+ if (context.hasErrors) processMacroExpansionError(context.errBuffer.head.errPos, context.errBuffer.head.errMsg)
else new SearchResult(tree1, EmptyTreeTypeSubstituter)
} catch {
case ex: TypeError =>
- failure(arg, "failed to typecheck the materialized typetag: %n%s".format(ex.msg), ex.pos)
+ processMacroExpansionError(ex.pos, ex.msg)
}
-
- val prefix = (tagClass, pre) match {
- // ClassTags only exist for scala.reflect.mirror, so their materializer doesn't care about prefixes
- case (ClassTagClass, _) =>
- gen.mkAttributedRef(Reflect_mirror) setType singleType(Reflect_mirror.owner.thisPrefix, Reflect_mirror)
- // [Eugene to Martin] this is the crux of the interaction between implicits and reifiers
- // here we need to turn a (supposedly path-dependent) type into a tree that will be used as a prefix
- // I'm not sure if I've done this right - please, review
- case (_, SingleType(prePre, preSym)) =>
- gen.mkAttributedRef(prePre, preSym) setType pre
- // necessary only to compile typetags used inside the Universe cake
- case (_, ThisType(thisSym)) =>
- gen.mkAttributedThis(thisSym)
- case _ =>
- // if ``pre'' is not a PDT, e.g. if someone wrote
- // implicitly[scala.reflect.makro.Context#TypeTag[Int]]
- // then we need to fail, because we don't know the prefix to use during type reification
- return failure(tp, "tag error: unsupported prefix type %s (%s)".format(pre, pre.kind))
}
+ val prefix = (
+ // ClassTags only exist for scala.reflect.mirror, so their materializer
+ // doesn't care about prefixes
+ if ((tagClass eq ArrayTagClass) || (tagClass eq ErasureTagClass) || (tagClass eq ClassTagClass)) ReflectMirrorPrefix
+ else pre match {
+ // [Eugene to Martin] this is the crux of the interaction between
+ // implicits and reifiers here we need to turn a (supposedly
+ // path-dependent) type into a tree that will be used as a prefix I'm
+ // not sure if I've done this right - please, review
+ case SingleType(prePre, preSym) =>
+ gen.mkAttributedRef(prePre, preSym) setType pre
+ // necessary only to compile typetags used inside the Universe cake
+ case ThisType(thisSym) =>
+ gen.mkAttributedThis(thisSym)
+ case _ =>
+ // if ``pre'' is not a PDT, e.g. if someone wrote
+ // implicitly[scala.reflect.makro.Context#TypeTag[Int]]
+ // then we need to fail, because we don't know the prefix to use during type reification
+ return failure(tp, "tag error: unsupported prefix type %s (%s)".format(pre, pre.kind))
+ }
+ )
// todo. migrate hardcoded materialization in Implicits to corresponding implicit macros
- var materializer = atPos(pos.focus)(Apply(TypeApply(Ident(TagMaterializers(tagClass)), List(TypeTree(tp))), List(prefix)))
+ var materializer = atPos(pos.focus)(
+ gen.mkMethodCall(TagMaterializers(tagClass), List(tp), List(prefix))
+ )
if (settings.XlogImplicits.value) println("materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer))
- success(materializer)
+ if (context.macrosEnabled) success(materializer)
+ else failure(materializer, "macros are disabled")
+ }
+
+ private val ManifestSymbols = Set[Symbol](PartialManifestClass, FullManifestClass, OptManifestClass)
+
+ /** Creates a tree that calls the relevant factory method in object
+ * reflect.Manifest for type 'tp'. An EmptyTree is returned if
+ * no manifest is found. todo: make this instantiate take type params as well?
+ */
+ private def manifestOfType(tp: Type, full: Boolean): SearchResult = {
+
+ /** Creates a tree that calls the factory method called constructor in object reflect.Manifest */
+ def manifestFactoryCall(constructor: String, tparg: Type, args: Tree*): Tree =
+ if (args contains EmptyTree) EmptyTree
+ else typedPos(tree.pos.focus) {
+ val mani = gen.mkManifestFactoryCall(full, constructor, tparg, args.toList)
+ if (settings.debug.value) println("generated manifest: "+mani) // DEBUG
+ mani
+ }
+
+ /** Creates a tree representing one of the singleton manifests.*/
+ def findSingletonManifest(name: String) = typedPos(tree.pos.focus) {
+ Select(gen.mkAttributedRef(FullManifestModule), name)
+ }
+
+ /** Re-wraps a type in a manifest before calling inferImplicit on the result */
+ def findManifest(tp: Type, manifestClass: Symbol = if (full) FullManifestClass else PartialManifestClass) =
+ inferImplicit(tree, appliedType(manifestClass, tp), true, false, context).tree
+
+ def findSubManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else OptManifestClass)
+ def mot(tp0: Type, from: List[Symbol], to: List[Type]): SearchResult = {
+ implicit def wrapResult(tree: Tree): SearchResult =
+ if (tree == EmptyTree) SearchFailure else new SearchResult(tree, if (from.isEmpty) EmptyTreeTypeSubstituter else new TreeTypeSubstituter(from, to))
+
+ val tp1 = tp0.normalize
+ tp1 match {
+ case ThisType(_) | SingleType(_, _) =>
+ // can't generate a reference to a value that's abstracted over by an existential
+ if (containsExistential(tp1)) EmptyTree
+ else manifestFactoryCall("singleType", tp, gen.mkAttributedQualifier(tp1))
+ case ConstantType(value) =>
+ manifestOfType(tp1.deconst, full)
+ case TypeRef(pre, sym, args) =>
+ if (isPrimitiveValueClass(sym) || isPhantomClass(sym)) {
+ findSingletonManifest(sym.name.toString)
+ } else if (sym == ObjectClass || sym == AnyRefClass) {
+ findSingletonManifest("Object")
+ } else if (sym == RepeatedParamClass || sym == ByNameParamClass) {
+ EmptyTree
+ } else if (sym == ArrayClass && args.length == 1) {
+ manifestFactoryCall("arrayType", args.head, findManifest(args.head))
+ } else if (sym.isClass) {
+ val classarg0 = gen.mkClassOf(tp1)
+ val classarg = tp match {
+ case _: ExistentialType => gen.mkCast(classarg0, ClassType(tp))
+ case _ => classarg0
+ }
+ val suffix = classarg :: (args map findSubManifest)
+ manifestFactoryCall(
+ "classType", tp,
+ (if ((pre eq NoPrefix) || pre.typeSymbol.isStaticOwner) suffix
+ else findSubManifest(pre) :: suffix): _*)
+ } else if (sym.isExistentiallyBound && full) {
+ manifestFactoryCall("wildcardType", tp,
+ findManifest(tp.bounds.lo), findManifest(tp.bounds.hi))
+ }
+ // looking for a manifest of a type parameter that hasn't been inferred by now,
+ // can't do much, but let's not fail
+ else if (undetParams contains sym) {
+ // #3859: need to include the mapping from sym -> NothingClass.tpe in the SearchResult
+ mot(NothingClass.tpe, sym :: from, NothingClass.tpe :: to)
+ } else {
+ // a manifest should have been found by normal searchImplicit
+ EmptyTree
+ }
+ case RefinedType(parents, decls) => // !!! not yet: if !full || decls.isEmpty =>
+ // refinement is not generated yet
+ if (hasLength(parents, 1)) findManifest(parents.head)
+ else if (full) manifestFactoryCall("intersectionType", tp, parents map findSubManifest: _*)
+ else mot(erasure.intersectionDominator(parents), from, to)
+ case ExistentialType(tparams, result) =>
+ mot(tp1.skolemizeExistential, from, to)
+ case _ =>
+ EmptyTree
+/* !!! the following is almost right, but we have to splice nested manifest
+ * !!! types into this type. This requires a substantial extension of
+ * !!! reifiers.
+ val reifier = new Reifier()
+ val rtree = reifier.reifyTopLevel(tp1)
+ manifestFactoryCall("apply", tp, rtree)
+*/
+ }
+ }
+
+ val tagInScope =
+ if (full) context.withMacrosDisabled(resolveTypeTag(ReflectMirrorPrefix.tpe, tp, pos, true))
+ else context.withMacrosDisabled(resolveArrayTag(tp, pos))
+ if (tagInScope.isEmpty) mot(tp, Nil, Nil)
+ else {
+ val interop =
+ if (full) gen.mkMethodCall(ReflectPackage, nme.concreteTypeTagToManifest, List(tp), List(tagInScope))
+ else gen.mkMethodCall(ReflectPackage, nme.arrayTagToClassManifest, List(tp), List(tagInScope))
+ wrapResult(interop)
+ }
}
- /** The manifest corresponding to type `pt`, provided `pt` is an instance of Manifest.
+ def wrapResult(tree: Tree): SearchResult =
+ if (tree == EmptyTree) SearchFailure else new SearchResult(tree, EmptyTreeTypeSubstituter)
+
+ /** The tag corresponding to type `pt`, provided `pt` is a flavor of a tag.
*/
private def implicitTagOrOfExpectedType(pt: Type): SearchResult = pt.dealias match {
- case TypeRef(pre, sym, args) if TagSymbols(sym) =>
- tagOfType(pre, args.head, sym)
+ case TypeRef(pre, sym, arg :: Nil) if ManifestSymbols(sym) =>
+ manifestOfType(arg, sym == FullManifestClass) match {
+ case SearchFailure if sym == OptManifestClass => wrapResult(gen.mkAttributedRef(NoManifest))
+ case result => result
+ }
+ case TypeRef(pre, sym, arg :: Nil) if TagSymbols(sym) =>
+ tagOfType(pre, arg, sym)
case tp@TypeRef(_, sym, _) if sym.isAbstractType =>
implicitTagOrOfExpectedType(tp.bounds.lo) // #3977: use tp (==pt.dealias), not pt (if pt is a type alias, pt.bounds.lo == pt)
case _ =>
searchImplicit(implicitsOfExpectedType, false)
// shouldn't we pass `pt` to `implicitsOfExpectedType`, or is the recursive case
- // for an abstract type really only meant for manifests?
+ // for an abstract type really only meant for tags?
}
/** The result of the implicit search:
* First search implicits visible in current context.
* If that fails, search implicits in expected type `pt`.
- * // [Eugene] the following two lines should be deleted after we migrate delegate manifest materialization to implicit macros
+ * // [Eugene] the following two lines should be deleted after we migrate delegate tag materialization to implicit macros
* If that fails, and `pt` is an instance of a ClassTag, try to construct a class tag.
* If that fails, and `pt` is an instance of a TypeTag, try to construct a type tag.
* If all fails return SearchFailure
@@ -1202,6 +1365,26 @@ trait Implicits {
def search(iss: Infoss, isLocal: Boolean) = applicableInfos(iss, isLocal).values
(search(context.implicitss, true) ++ search(implicitsOfExpectedType, false)).toList.filter(_.tree ne EmptyTree)
}
+
+ // find all implicits for some type that contains type variables
+ // collect the constraints that result from typing each implicit
+ def allImplicitsPoly(tvars: List[TypeVar]): List[(SearchResult, List[TypeConstraint])] = {
+ def resetTVars() = tvars foreach { _.constr = new TypeConstraint }
+
+ def eligibleInfos(iss: Infoss, isLocal: Boolean) = new ImplicitComputation(iss, if (isLocal) util.HashSet[Name](512) else null).eligible
+ val allEligibleInfos = (eligibleInfos(context.implicitss, true) ++ eligibleInfos(implicitsOfExpectedType, false)).toList
+
+ allEligibleInfos flatMap { ii =>
+ // each ImplicitInfo contributes a distinct set of constraints (generated indirectly by typedImplicit)
+ // thus, start each type var off with a fresh for every typedImplicit
+ resetTVars()
+ // any previous errors should not affect us now
+ context.flushBuffer()
+ val res = typedImplicit(ii, false)
+ if (res.tree ne EmptyTree) List((res, tvars map (_.constr)))
+ else Nil
+ }
+ }
}
object ImplicitNotFoundMsg {
@@ -1210,7 +1393,7 @@ trait Implicits {
// where `X` refers to a type parameter of `sym`
def check(sym: Symbol): Option[String] =
sym.getAnnotation(ImplicitNotFoundClass).flatMap(_.stringArg(0) match {
- case Some(m) => new Message(sym, m) validate
+ case Some(m) => new Message(sym, m).validate
case None => Some("Missing argument `msg` on implicitNotFound annotation.")
})
@@ -1235,7 +1418,7 @@ trait Implicits {
val decls = typeParamNames.toSet
(refs &~ decls) match {
- case s if s isEmpty => None
+ case s if s.isEmpty => None
case unboundNames =>
val singular = unboundNames.size == 1
Some("The type parameter"+( if(singular) " " else "s " )+ unboundNames.mkString(", ") +
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 98b8d7673e..85c2aebfab 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -83,7 +83,7 @@ trait Infer {
def apply(tp: Type): Type = tp match {
case WildcardType | BoundedWildcardType(_) | NoType =>
throw new NoInstance("undetermined type")
- case tv @ TypeVar(origin, constr) =>
+ case tv @ TypeVar(origin, constr) if !tv.untouchable =>
if (constr.inst == NoType) {
throw new DeferredNoInstance(() =>
"no unique instantiation of type variable " + origin + " could be found")
@@ -305,9 +305,21 @@ trait Infer {
}
- def isCompatible(tp: Type, pt: Type): Boolean = {
+ /** "Compatible" means conforming after conversions.
+ * "Raising to a thunk" is not implicit; therefore, for purposes of applicability and
+ * specificity, an arg type `A` is considered compatible with cbn formal parameter type `=>A`.
+ * For this behavior, the type `pt` must have cbn params preserved; for instance, `formalTypes(removeByName = false)`.
+ *
+ * `isAsSpecific` no longer prefers A by testing applicability to A for both m(A) and m(=>A)
+ * since that induces a tie between m(=>A) and m(=>A,B*) [SI-3761]
+ */
+ private def isCompatible(tp: Type, pt: Type): Boolean = {
+ def isCompatibleByName(tp: Type, pt: Type): Boolean = pt match {
+ case TypeRef(_, ByNameParamClass, List(res)) if !isByNameParamType(tp) => isCompatible(tp, res)
+ case _ => false
+ }
val tp1 = normalize(tp)
- (tp1 weak_<:< pt) || isCoercible(tp1, pt)
+ (tp1 weak_<:< pt) || isCoercible(tp1, pt) || isCompatibleByName(tp, pt)
}
def isCompatibleArgs(tps: List[Type], pts: List[Type]) =
(tps corresponds pts)(isCompatible)
@@ -446,7 +458,7 @@ trait Infer {
type Result = collection.mutable.LinkedHashMap[Symbol, Option[Type]]
def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists(
- m collect {case (p, Some(a)) => (p, a)} unzip ))
+ (m collect {case (p, Some(a)) => (p, a)}).unzip ))
object Undets {
def unapply(m: Result): Some[(List[Symbol], List[Type], List[Symbol])] = Some(toLists{
@@ -499,7 +511,8 @@ trait Infer {
else Some(
if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass)
else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass)
- else if ((opt.experimental || opt.virtPatmat) && tvar.constr.avoidWiden) targ
+ // this infers Foo.type instead of "object Foo" (see also widenIfNecessary)
+ else if (targ.typeSymbol.isModuleClass || ((opt.experimental || opt.virtPatmat) && tvar.constr.avoidWiden)) targ
else targ.widen
)
))
@@ -616,7 +629,8 @@ trait Infer {
} else if (argPos.contains(pos)) { // parameter specified twice
namesOK = false
} else {
- positionalAllowed = false
+ if (index != pos)
+ positionalAllowed = false
argPos(index) = pos
}
index += 1
@@ -660,7 +674,7 @@ trait Infer {
case ExistentialType(tparams, qtpe) =>
isApplicable(undetparams, qtpe, argtpes0, pt)
case MethodType(params, _) =>
- val formals = formalTypes(params map { _.tpe }, argtpes0.length)
+ val formals = formalTypes(params map { _.tpe }, argtpes0.length, removeByName = false)
def tryTupleApply: Boolean = {
// if 1 formal, 1 argtpe (a tuple), otherwise unmodified argtpes0
@@ -766,7 +780,7 @@ trait Infer {
isAsSpecific(res, ftpe2)
case mt: MethodType if mt.isImplicit =>
isAsSpecific(ftpe1.resultType, ftpe2)
- case MethodType(params, _) if params nonEmpty =>
+ case MethodType(params, _) if params.nonEmpty =>
var argtpes = params map (_.tpe)
if (isVarArgsList(params) && isVarArgsList(ftpe2.params))
argtpes = argtpes map (argtpe =>
@@ -776,7 +790,7 @@ trait Infer {
isAsSpecific(PolyType(tparams, res), ftpe2)
case PolyType(tparams, mt: MethodType) if mt.isImplicit =>
isAsSpecific(PolyType(tparams, mt.resultType), ftpe2)
- case PolyType(_, MethodType(params, _)) if params nonEmpty =>
+ case PolyType(_, MethodType(params, _)) if params.nonEmpty =>
isApplicable(List(), ftpe2, params map (_.tpe), WildcardType)
// case NullaryMethodType(res) =>
// isAsSpecific(res, ftpe2)
@@ -1074,7 +1088,7 @@ trait Infer {
*/
def inferConstructorInstance(tree: Tree, undetparams: List[Symbol], pt0: Type) {
val pt = widen(pt0)
- val ptparams = freeTypeParamsOfTerms.collect(pt)
+ val ptparams = freeTypeParamsOfTerms(pt)
val ctorTp = tree.tpe
val resTp = ctorTp.finalResultType
@@ -1189,6 +1203,50 @@ trait Infer {
}
}
+ /** Does `tp` contain any types that cannot be checked at run-time (i.e., after erasure, will isInstanceOf[erased(tp)] imply conceptualIsInstanceOf[tp]?)
+ * we should find a way to ask erasure: hey, is `tp` going to make it through you with all of its isInstanceOf resolving powers intact?
+ * TODO: at the very least, reduce duplication wrt checkCheckable
+ */
+ def containsUnchecked(tp: Type): Boolean = {
+ def check(tp: Type, bound: List[Symbol]): Boolean = {
+ def isSurroundingTypeParam(sym: Symbol) = {
+ val e = context.scope.lookupEntry(sym.name)
+ ( (e ne null)
+ && (e.sym == sym )
+ && !e.sym.isTypeParameterOrSkolem
+ && (e.owner == context.scope)
+ )
+ }
+ def isLocalBinding(sym: Symbol) = (
+ sym.isAbstractType && (
+ (bound contains sym)
+ || (sym.name == tpnme.WILDCARD)
+ || isSurroundingTypeParam(sym)
+ )
+ )
+ tp.normalize match {
+ case SingleType(pre, _) =>
+ check(pre, bound)
+ case TypeRef(_, ArrayClass, arg :: _) =>
+ check(arg, bound)
+ case tp @ TypeRef(pre, sym, args) =>
+ ( (sym.isAbstractType && !isLocalBinding(sym))
+ || (args exists (x => !isLocalBinding(x.typeSymbol)))
+ || check(pre, bound)
+ )
+ // case RefinedType(_, decls) if decls.nonEmpty =>
+ // patternWarning(tp, "refinement ")
+ case RefinedType(parents, _) =>
+ parents exists (p => check(p, bound))
+ case ExistentialType(quantified, tp1) =>
+ check(tp1, bound ::: quantified)
+ case _ =>
+ false
+ }
+ }
+ check(tp, Nil)
+ }
+
def checkCheckable(tree: Tree, tp: Type, kind: String) {
def patternWarning(tp0: Type, prefix: String) = {
context.unit.uncheckedWarning(tree.pos, prefix+tp0+" in type "+kind+tp+" is unchecked since it is eliminated by erasure")
@@ -1264,8 +1322,8 @@ trait Infer {
def inferTypedPattern(tree0: Tree, pattp: Type, pt0: Type): Type = {
val pt = widen(pt0)
- val ptparams = freeTypeParamsOfTerms.collect(pt)
- val tpparams = freeTypeParamsOfTerms.collect(pattp)
+ val ptparams = freeTypeParamsOfTerms(pt)
+ val tpparams = freeTypeParamsOfTerms(pattp)
def ptMatchesPattp = pt matchesPattern pattp.widen
def pattpMatchesPt = pattp matchesPattern pt
@@ -1318,7 +1376,7 @@ trait Infer {
def inferModulePattern(pat: Tree, pt: Type) =
if (!(pat.tpe <:< pt)) {
- val ptparams = freeTypeParamsOfTerms.collect(pt)
+ val ptparams = freeTypeParamsOfTerms(pt)
debuglog("free type params (2) = " + ptparams)
val ptvars = ptparams map freshVar
val pt1 = pt.instantiateTypeParams(ptparams, ptvars)
@@ -1335,19 +1393,6 @@ trait Infer {
}
}
- abstract class SymCollector extends TypeCollector(List[Symbol]()) {
- protected def includeCondition(sym: Symbol): Boolean
-
- def traverse(tp: Type) {
- tp.normalize match {
- case TypeRef(_, sym, _) =>
- if (includeCondition(sym) && !result.contains(sym)) result = sym :: result
- case _ =>
- }
- mapOver(tp)
- }
- }
-
object approximateAbstracts extends TypeMap {
def apply(tp: Type): Type = tp.normalize match {
case TypeRef(pre, sym, _) if sym.isAbstractType => WildcardType
@@ -1355,31 +1400,30 @@ trait Infer {
}
}
- /** A traverser to collect type parameters referred to in a type
+ /** Collects type parameters referred to in a type.
*/
- object freeTypeParamsOfTerms extends SymCollector {
+ def freeTypeParamsOfTerms(tp: Type): List[Symbol] = {
// An inferred type which corresponds to an unknown type
// constructor creates a file/declaration order-dependent crasher
// situation, the behavior of which depends on the state at the
// time the typevar is created. Until we can deal with these
// properly, we can avoid it by ignoring type parameters which
// have type constructors amongst their bounds. See SI-4070.
- protected def includeCondition(sym: Symbol) = (
- sym.isAbstractType
- && sym.owner.isTerm
- && !sym.info.bounds.exists(_.typeParams.nonEmpty)
- )
- }
-
- /** A traverser to collect type parameters referred to in a type
- */
- object freeTypeParametersNoSkolems extends SymCollector {
- protected def includeCondition(sym: Symbol): Boolean =
- sym.isTypeParameter && sym.owner.isTerm
- }
+ def isFreeTypeParamOfTerm(sym: Symbol) = (
+ sym.isAbstractType
+ && sym.owner.isTerm
+ && !sym.info.bounds.exists(_.typeParams.nonEmpty)
+ )
- object typeRefs extends SymCollector {
- protected def includeCondition(sym: Symbol): Boolean = true
+ // Intentionally *not* using `Type#typeSymbol` here, which would normalize `tp`
+ // and collect symbols from the result type of any resulting `PolyType`s, which
+ // are not free type parameters of `tp`.
+ //
+ // Contrast with `isFreeTypeParamNoSkolem`.
+ val syms = tp collect {
+ case TypeRef(_, sym, _) if isFreeTypeParamOfTerm(sym) => sym
+ }
+ syms.distinct
}
/* -- Overload Resolution ---------------------------------------------- */
@@ -1403,10 +1447,10 @@ trait Infer {
* If no alternative matches `pt`, take the parameterless one anyway.
*/
def inferExprAlternative(tree: Tree, pt: Type) = tree.tpe match {
- case OverloadedType(pre, alts) => tryTwice {
- val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt))
- val secondTry = alts0.isEmpty
- val alts1 = if (secondTry) alts else alts0
+ case OverloadedType(pre, alts) => tryTwice { isSecondTry =>
+ val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt))
+ val noAlternatives = alts0.isEmpty
+ val alts1 = if (noAlternatives) alts else alts0
//println("trying "+alts1+(alts1 map (_.tpe))+(alts1 map (_.locationString))+" for "+pt)
def improves(sym1: Symbol, sym2: Symbol): Boolean =
@@ -1434,10 +1478,10 @@ trait Infer {
}
}
// todo: missing test case
- NoBestExprAlternativeError(tree, pt)
+ NoBestExprAlternativeError(tree, pt, isSecondTry)
} else if (!competing.isEmpty) {
- if (secondTry) { NoBestExprAlternativeError(tree, pt); setError(tree) }
- else if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt)
+ if (noAlternatives) NoBestExprAlternativeError(tree, pt, isSecondTry)
+ else if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt, isSecondTry)
} else {
// val applicable = alts1 filter (alt =>
// global.typer.infer.isWeaklyCompatible(pre.memberType(alt), pt))
@@ -1516,10 +1560,10 @@ trait Infer {
* assignment expression.
*/
def inferMethodAlternative(tree: Tree, undetparams: List[Symbol],
- argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false): Unit = tree.tpe match {
+ argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false, lastInferAttempt: Boolean = true): Unit = tree.tpe match {
case OverloadedType(pre, alts) =>
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
- tryTwice {
+ tryTwice { isSecondTry =>
debuglog("infer method alt "+ tree.symbol +" with alternatives "+
(alts map pre.memberType) +", argtpes = "+ argtpes +", pt = "+ pt)
@@ -1541,13 +1585,10 @@ trait Infer {
if (improves(alt, best)) alt else best)
val competing = applicable.dropWhile(alt => best == alt || improves(best, alt))
if (best == NoSymbol) {
- if (pt == WildcardType) NoBestMethodAlternativeError(tree, argtpes, pt)
- else inferMethodAlternative(tree, undetparams, argtpes, WildcardType)
+ if (pt == WildcardType) NoBestMethodAlternativeError(tree, argtpes, pt, isSecondTry && lastInferAttempt)
+ else inferMethodAlternative(tree, undetparams, argtpes, WildcardType, lastInferAttempt = isSecondTry)
} else if (!competing.isEmpty) {
- if (!(argtpes exists (_.isErroneous)) && !pt.isErroneous)
- AmbiguousMethodAlternativeError(tree, pre, best, competing.head, argtpes, pt)
- else setError(tree)
- ()
+ AmbiguousMethodAlternativeError(tree, pre, best, competing.head, argtpes, pt, isSecondTry && lastInferAttempt)
} else {
// checkNotShadowed(tree.pos, pre, best, applicable)
tree.setSymbol(best).setType(pre.memberType(best))
@@ -1561,29 +1602,28 @@ trait Infer {
*
* @param infer ...
*/
- def tryTwice(infer: => Unit): Unit = {
+ def tryTwice(infer: Boolean => Unit): Unit = {
if (context.implicitsEnabled) {
val saved = context.state
var fallback = false
context.setBufferErrors()
- val res = try {
- context.withImplicitsDisabled(infer)
+ try {
+ context.withImplicitsDisabled(infer(false))
if (context.hasErrors) {
fallback = true
context.restoreState(saved)
context.flushBuffer()
- infer
+ infer(true)
}
} catch {
case ex: CyclicReference => throw ex
case ex: TypeError => // recoverable cyclic references
context.restoreState(saved)
- if (!fallback) infer else ()
+ if (!fallback) infer(true) else ()
}
context.restoreState(saved)
- res
}
- else infer
+ else infer(true)
}
/** Assign <code>tree</code> the type of all polymorphic alternatives
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 9608108a0d..c10901cdce 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -9,6 +9,8 @@ import scala.collection.mutable.ListBuffer
import scala.compat.Platform.EOL
import scala.reflect.makro.runtime.{Context => MacroContext}
import scala.reflect.runtime.Mirror
+import util.Statistics._
+import scala.reflect.makro.util._
/**
* Code to deal with macros, namely with:
@@ -35,13 +37,12 @@ import scala.reflect.runtime.Mirror
* (Expr(elems))
* (TypeTag(Int))
*/
-trait Macros { self: Analyzer =>
+trait Macros extends Traces {
+ self: Analyzer =>
+
import global._
import definitions._
-
- val macroDebug = settings.Ymacrodebug.value
- val macroCopypaste = settings.Ymacrocopypaste.value
- val macroTrace = scala.tools.nsc.util.trace when macroDebug
+ def globalSettings = global.settings
val globalMacroCache = collection.mutable.Map[Any, Any]()
val perRunMacroCache = perRunCaches.newMap[Symbol, collection.mutable.Map[Any, Any]]
@@ -75,7 +76,7 @@ trait Macros { self: Analyzer =>
case ThisType(sym) if sym == macroDef.owner =>
SingleType(SingleType(SingleType(NoPrefix, paramsCtx(0)), MacroContextPrefix), ExprValue)
case SingleType(NoPrefix, sym) =>
- vparamss.flatten.find(_.symbol == sym) match {
+ mfind(vparamss)(_.symbol == sym) match {
case Some(macroDefParam) =>
SingleType(SingleType(NoPrefix, param(macroDefParam)), ExprValue)
case _ =>
@@ -120,7 +121,7 @@ trait Macros { self: Analyzer =>
val paramsCtx = List(ctxParam)
val paramsThis = List(makeParam(nme.macroThis, macroDef.pos, implType(false, ownerTpe), SYNTHETIC))
val paramsTparams = tparams map param
- val paramssParams = vparamss map (_ map param)
+ val paramssParams = mmap(vparamss)(param)
var paramsss = List[List[List[Symbol]]]()
// tparams are no longer part of a signature, they get into macro implementations via context bounds
@@ -135,11 +136,11 @@ trait Macros { self: Analyzer =>
}
import SigGenerator._
- macroTrace("generating macroImplSigs for: ")(macroDef)
- macroTrace("tparams are: ")(tparams)
- macroTrace("vparamss are: ")(vparamss)
- macroTrace("retTpe is: ")(retTpe)
- macroTrace("macroImplSigs are: ")(paramsss, implRetTpe)
+ macroTraceVerbose("generating macroImplSigs for: ")(macroDef)
+ macroTraceVerbose("tparams are: ")(tparams)
+ macroTraceVerbose("vparamss are: ")(vparamss)
+ macroTraceVerbose("retTpe is: ")(retTpe)
+ macroTraceVerbose("macroImplSigs are: ")(paramsss, implRetTpe)
}
private def transformTypeTagEvidenceParams(paramss: List[List[Symbol]], transform: (Symbol, Symbol) => Option[Symbol]): List[List[Symbol]] = {
@@ -154,7 +155,7 @@ trait Macros { self: Analyzer =>
case TypeRef(SingleType(NoPrefix, contextParam), sym, List(tparam)) =>
var wannabe = sym
while (wannabe.isAliasType) wannabe = wannabe.info.typeSymbol
- if (wannabe != definitions.TypeTagClass)
+ if (wannabe != definitions.TypeTagClass && wannabe != definitions.ConcreteTypeTagClass)
List(param)
else
transform(param, tparam.typeSymbol) map (_ :: Nil) getOrElse Nil
@@ -182,10 +183,14 @@ trait Macros { self: Analyzer =>
*/
def typedMacroBody(typer: Typer, ddef: DefDef): Tree = {
import typer.context
- if (macroDebug) println("typechecking macro def %s at %s".format(ddef.symbol, ddef.pos))
+ macroLogVerbose("typechecking macro def %s at %s".format(ddef.symbol, ddef.pos))
- implicit def augmentString(s: String) = new AugmentedString(s)
- class AugmentedString(s: String) {
+ if (!typer.checkFeature(ddef.pos, MacrosFeature, immediate = true)) {
+ ddef.symbol setFlag IS_ERROR
+ return EmptyTree
+ }
+
+ implicit class AugmentedString(s: String) {
def abbreviateCoreAliases: String = { // hack!
var result = s
result = result.replace("c.mirror.TypeTag", "c.TypeTag")
@@ -262,7 +267,7 @@ trait Macros { self: Analyzer =>
val rhs = ddef.rhs
validatePreTyper(rhs)
- if (hasErrors) macroTrace("macro def failed to satisfy trivial preconditions: ")(macroDef)
+ if (hasErrors) macroTraceVerbose("macro def failed to satisfy trivial preconditions: ")(macroDef)
// we use typed1 instead of typed, because otherwise adapt is going to mess us up
// if adapt sees <qualifier>.<method>, it will want to perform eta-expansion and will fail
@@ -279,12 +284,7 @@ trait Macros { self: Analyzer =>
case Success(expanded) =>
try {
val typechecked = typer.typed1(expanded, EXPRmode, WildcardType)
- if (macroDebug) {
- println("typechecked1:")
- println(typechecked)
- println(showRaw(typechecked))
- }
-
+ macroLogVerbose("typechecked1:%n%s%n%s".format(typechecked, showRaw(typechecked)))
typechecked
} finally {
openMacros = openMacros.tail
@@ -307,7 +307,7 @@ trait Macros { self: Analyzer =>
var rhs1 = typecheckRhs(rhs)
def typecheckedWithErrors = (rhs1 exists (_.isErroneous)) || reporter.ERROR.count != prevNumErrors
hasErrors = hasErrors || typecheckedWithErrors
- if (typecheckedWithErrors) macroTrace("body of a macro def failed to typecheck: ")(ddef)
+ if (typecheckedWithErrors) macroTraceVerbose("body of a macro def failed to typecheck: ")(ddef)
val macroImpl = rhs1.symbol
macroDef withAnnotation AnnotationInfo(MacroImplAnnotation.tpe, List(rhs1), Nil)
@@ -325,7 +325,7 @@ trait Macros { self: Analyzer =>
validatePostTyper(rhs1)
}
if (hasErrors)
- macroTrace("macro def failed to satisfy trivial preconditions: ")(macroDef)
+ macroTraceVerbose("macro def failed to satisfy trivial preconditions: ")(macroDef)
}
if (!hasErrors) {
@@ -346,6 +346,16 @@ trait Macros { self: Analyzer =>
if (actparamss.length != reqparamss.length)
compatibilityError("number of parameter sections differ")
+ def checkSubType(slot: String, reqtpe: Type, acttpe: Type): Unit = {
+ val ok = if (macroDebugVerbose) {
+ if (reqtpe eq acttpe) println(reqtpe + " <: " + acttpe + "?" + EOL + "true")
+ withTypesExplained(reqtpe <:< acttpe)
+ } else reqtpe <:< acttpe
+ if (!ok) {
+ compatibilityError("type mismatch for %s: %s does not conform to %s".format(slot, reqtpe.toString.abbreviateCoreAliases, acttpe.toString.abbreviateCoreAliases))
+ }
+ }
+
if (!hasErrors) {
try {
for ((rparams, aparams) <- reqparamss zip actparamss) {
@@ -371,27 +381,20 @@ trait Macros { self: Analyzer =>
compatibilityError("types incompatible for parameter "+aparam.name+": corresponding is not a vararg parameter")
if (!hasErrors) {
var atpe = aparam.tpe.substSym(flatactparams, flatreqparams).instantiateTypeParams(tparams, tvars)
-
+ atpe = atpe.dealias // SI-5706
// strip the { type PrefixType = ... } refinement off the Context or otherwise we get compatibility errors
atpe = atpe match {
case RefinedType(List(tpe), Scope(sym)) if tpe == MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
case _ => atpe
}
-
- val ok = if (macroDebug) withTypesExplained(rparam.tpe <:< atpe) else rparam.tpe <:< atpe
- if (!ok) {
- compatibilityError("type mismatch for parameter "+rparam.name+": "+rparam.tpe.toString.abbreviateCoreAliases+" does not conform to "+atpe)
- }
+ checkSubType("parameter " + rparam.name, rparam.tpe, atpe)
}
}
}
}
if (!hasErrors) {
val atpe = actres.substSym(flatactparams, flatreqparams).instantiateTypeParams(tparams, tvars)
- val ok = if (macroDebug) withTypesExplained(atpe <:< reqres) else atpe <:< reqres
- if (!ok) {
- compatibilityError("type mismatch for return type : "+reqres.toString.abbreviateCoreAliases+" does not conform to "+(if (ddef.tpt.tpe != null) atpe.toString else atpe.toString.abbreviateCoreAliases))
- }
+ checkSubType("return type", atpe, reqres)
}
if (!hasErrors) {
val targs = solvedTypes(tvars, tparams, tparams map varianceInType(actres), false,
@@ -420,7 +423,7 @@ trait Macros { self: Analyzer =>
var actparamss = macroImpl.paramss
actparamss = transformTypeTagEvidenceParams(actparamss, (param, tparam) => None)
- val rettpe = if (ddef.tpt.tpe != null) ddef.tpt.tpe else computeMacroDefTypeFromMacroImpl(ddef, macroDef, macroImpl)
+ val rettpe = if (!ddef.tpt.isEmpty) typer.typedType(ddef.tpt).tpe else computeMacroDefTypeFromMacroImpl(ddef, macroDef, macroImpl)
val (reqparamsss0, reqres0) = macroImplSigs(macroDef, ddef.tparams, ddef.vparamss, rettpe)
var reqparamsss = reqparamsss0
@@ -429,7 +432,7 @@ trait Macros { self: Analyzer =>
val implicitParams = actparamss.flatten filter (_.isImplicit)
if (implicitParams.length > 0) {
reportError(implicitParams.head.pos, "macro implementations cannot have implicit parameters other than TypeTag evidences")
- macroTrace("macro def failed to satisfy trivial preconditions: ")(macroDef)
+ macroTraceVerbose("macro def failed to satisfy trivial preconditions: ")(macroDef)
}
if (!hasErrors) {
@@ -450,9 +453,9 @@ trait Macros { self: Analyzer =>
"\n found : "+showMeth(actparamss, actres, false)+
"\n"+addendum)
- macroTrace("considering " + reqparamsss.length + " possibilities of compatible macro impl signatures for macro def: ")(ddef.name)
+ macroTraceVerbose("considering " + reqparamsss.length + " possibilities of compatible macro impl signatures for macro def: ")(ddef.name)
val results = reqparamsss map (checkCompatibility(_, actparamss, reqres, actres))
- if (macroDebug) (reqparamsss zip results) foreach { case (reqparamss, result) =>
+ if (macroDebugVerbose) (reqparamsss zip results) foreach { case (reqparamss, result) =>
println("%s %s".format(if (result.isEmpty) "[ OK ]" else "[FAILED]", reqparamss))
result foreach (errorMsg => println(" " + errorMsg))
}
@@ -464,7 +467,7 @@ trait Macros { self: Analyzer =>
compatibilityError(mostRelevantMessage)
} else {
assert((results filter (_.isEmpty)).length == 1, results)
- if (macroDebug) (reqparamsss zip results) filter (_._2.isEmpty) foreach { case (reqparamss, result) =>
+ if (macroDebugVerbose) (reqparamsss zip results) filter (_._2.isEmpty) foreach { case (reqparamss, result) =>
println("typechecked macro impl as: " + reqparamss)
}
}
@@ -537,20 +540,20 @@ trait Macros { self: Analyzer =>
def unsigma(tpe: Type): Type = {
// unfortunately, we cannot dereference ``paramss'', because we're in the middle of inferring a type for ``macroDef''
// val defParamss = macroDef.paramss
- val defParamss = macroDdef.vparamss map (_ map (_.symbol))
+ val defParamss = mmap(macroDdef.vparamss)(_.symbol)
var implParamss = macroImpl.paramss
implParamss = transformTypeTagEvidenceParams(implParamss, (param, tparam) => None)
val implCtxParam = if (implParamss.length > 0 && implParamss(0).length > 0) implParamss(0)(0) else null
def implParamToDefParam(implParam: Symbol): Symbol = {
- val indices = (implParamss drop 1 zipWithIndex) map { case (implParams, index) => (index, implParams indexOf implParam) } filter (_._2 != -1) headOption;
+ val indices = (((implParamss drop 1).zipWithIndex) map { case (implParams, index) => (index, implParams indexOf implParam) } filter (_._2 != -1)).headOption
val defParam = indices flatMap {
case (plistIndex, pIndex) =>
if (defParamss.length <= plistIndex) None
else if (defParamss(plistIndex).length <= pIndex) None
else Some(defParamss(plistIndex)(pIndex))
}
- defParam orNull
+ defParam.orNull
}
class UnsigmaTypeMap extends TypeMap {
@@ -588,11 +591,11 @@ trait Macros { self: Analyzer =>
val libraryClassLoader = {
if (settings.XmacroPrimaryClasspath.value != "") {
- if (macroDebug) println("primary macro mirror: initializing from -Xmacro-primary-classpath: %s".format(settings.XmacroPrimaryClasspath.value))
+ macroLogVerbose("primary macro mirror: initializing from -Xmacro-primary-classpath: %s".format(settings.XmacroPrimaryClasspath.value))
val classpath = toURLs(settings.XmacroFallbackClasspath.value)
ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
} else {
- if (macroDebug) println("primary macro mirror: initializing from -cp: %s".format(global.classPath.asURLs))
+ macroLogVerbose("primary macro mirror: initializing from -cp: %s".format(global.classPath.asURLs))
val classpath = global.classPath.asURLs
var loader: ClassLoader = ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
@@ -618,7 +621,7 @@ trait Macros { self: Analyzer =>
throw new UnsupportedOperationException("Scala reflection not available on this platform")
val fallbackClassLoader = {
- if (macroDebug) println("fallback macro mirror: initializing from -Xmacro-fallback-classpath: %s".format(settings.XmacroFallbackClasspath.value))
+ macroLogVerbose("fallback macro mirror: initializing from -Xmacro-fallback-classpath: %s".format(settings.XmacroFallbackClasspath.value))
val classpath = toURLs(settings.XmacroFallbackClasspath.value)
ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
}
@@ -636,137 +639,181 @@ trait Macros { self: Analyzer =>
* @return Some(runtime) if macro implementation can be loaded successfully from either of the mirrors,
* None otherwise.
*/
- private def macroRuntime(macroDef: Symbol): Option[List[Any] => Any] = {
- macroTrace("looking for macro implementation: ")(macroDef)
- macroTrace("macroDef is annotated with: ")(macroDef.annotations)
-
- val ann = macroDef.getAnnotation(MacroImplAnnotation)
- if (ann == None) {
- macroTrace("@macroImpl annotation is missing (this means that macro definition failed to typecheck)")(macroDef)
- return None
- }
+ private type MacroRuntime = List[Any] => Any
+ private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, Option[MacroRuntime]]
+ private lazy val fastTrack: Map[Symbol, MacroRuntime] = {
+ import scala.reflect.api.Universe
+ import scala.reflect.makro.internal._
+ Map( // challenge: how can we factor out the common code? Does not seem to be easy.
+ MacroInternal_materializeArrayTag -> (args => {
+ assert(args.length == 3, args)
+ val c = args(0).asInstanceOf[MacroContext]
+ materializeArrayTag_impl(c)(args(1).asInstanceOf[c.Expr[Universe]])(args(2).asInstanceOf[c.TypeTag[_]])
+ }),
+ MacroInternal_materializeErasureTag -> (args => {
+ assert(args.length == 3, args)
+ val c = args(0).asInstanceOf[MacroContext]
+ materializeErasureTag_impl(c)(args(1).asInstanceOf[c.Expr[Universe]])(args(2).asInstanceOf[c.TypeTag[_]])
+ }),
+ MacroInternal_materializeClassTag -> (args => {
+ assert(args.length == 3, args)
+ val c = args(0).asInstanceOf[MacroContext]
+ materializeClassTag_impl(c)(args(1).asInstanceOf[c.Expr[Universe]])(args(2).asInstanceOf[c.TypeTag[_]])
+ }),
+ MacroInternal_materializeTypeTag -> (args => {
+ assert(args.length == 3, args)
+ val c = args(0).asInstanceOf[MacroContext]
+ materializeTypeTag_impl(c)(args(1).asInstanceOf[c.Expr[Universe]])(args(2).asInstanceOf[c.TypeTag[_]])
+ }),
+ MacroInternal_materializeConcreteTypeTag -> (args => {
+ assert(args.length == 3, args)
+ val c = args(0).asInstanceOf[MacroContext]
+ materializeConcreteTypeTag_impl(c)(args(1).asInstanceOf[c.Expr[Universe]])(args(2).asInstanceOf[c.TypeTag[_]])
+ })
+ )
+ }
+ private def macroRuntime(macroDef: Symbol): Option[MacroRuntime] = {
+ macroTraceVerbose("looking for macro implementation: ")(macroDef)
+ if (fastTrack contains macroDef) {
+ macroLogVerbose("macro expansion serviced by a fast track")
+ Some(fastTrack(macroDef))
+ } else {
+ macroRuntimesCache.getOrElseUpdate(macroDef, {
+ val runtime = {
+ macroTraceVerbose("looking for macro implementation: ")(macroDef)
+ macroTraceVerbose("macroDef is annotated with: ")(macroDef.annotations)
- val macroImpl = ann.get.args(0).symbol
- if (macroImpl == NoSymbol) {
- macroTrace("@macroImpl annotation is malformed (this means that macro definition failed to typecheck)")(macroDef)
- return None
- }
+ val ann = macroDef.getAnnotation(MacroImplAnnotation)
+ if (ann == None) {
+ macroTraceVerbose("@macroImpl annotation is missing (this means that macro definition failed to typecheck)")(macroDef)
+ return None
+ }
- if (macroDebug) println("resolved implementation %s at %s".format(macroImpl, macroImpl.pos))
- if (macroImpl.isErroneous) {
- macroTrace("macro implementation is erroneous (this means that either macro body or macro implementation signature failed to typecheck)")(macroDef)
- return None
- }
+ val macroImpl = ann.get.args(0).symbol
+ if (macroImpl == NoSymbol) {
+ macroTraceVerbose("@macroImpl annotation is malformed (this means that macro definition failed to typecheck)")(macroDef)
+ return None
+ }
- def loadMacroImpl(macroMirror: Mirror): Option[(Object, macroMirror.Symbol)] = {
- try {
- // this logic relies on the assumptions that were valid for the old macro prototype
- // namely that macro implementations can only be defined in top-level classes and modules
- // with the new prototype that materialized in a SIP, macros need to be statically accessible, which is different
- // for example, a macro def could be defined in a trait that is implemented by an object
- // there are some more clever cases when seemingly non-static method ends up being statically accessible
- // however, the code below doesn't account for these guys, because it'd take a look of time to get it right
- // for now I leave it as a todo and move along to more the important stuff
-
- macroTrace("loading implementation class from %s: ".format(macroMirror))(macroImpl.owner.fullName)
- macroTrace("classloader is: ")("%s of type %s".format(macroMirror.classLoader, if (macroMirror.classLoader != null) macroMirror.classLoader.getClass.toString else "primordial classloader"))
- def inferClasspath(cl: ClassLoader) = cl match {
- case cl: java.net.URLClassLoader => "[" + (cl.getURLs mkString ",") + "]"
- case null => "[" + scala.tools.util.PathResolver.Environment.javaBootClassPath + "]"
- case _ => "<unknown>"
- }
- macroTrace("classpath is: ")(inferClasspath(macroMirror.classLoader))
-
- // [Eugene] relies on the fact that macro implementations can only be defined in static classes
- // [Martin to Eugene] There's similar logic buried in Symbol#flatname. Maybe we can refactor?
- def classfile(sym: Symbol): String = {
- def recur(sym: Symbol): String = sym match {
- case sym if sym.owner.isPackageClass =>
- val suffix = if (sym.isModuleClass) "$" else ""
- sym.fullName + suffix
- case sym =>
- val separator = if (sym.owner.isModuleClass) "" else "$"
- recur(sym.owner) + separator + sym.javaSimpleName.toString
+ macroLogVerbose("resolved implementation %s at %s".format(macroImpl, macroImpl.pos))
+ if (macroImpl.isErroneous) {
+ macroTraceVerbose("macro implementation is erroneous (this means that either macro body or macro implementation signature failed to typecheck)")(macroDef)
+ return None
}
- if (sym.isClass || sym.isModule) recur(sym)
- else recur(sym.enclClass)
- }
+ def loadMacroImpl(macroMirror: Mirror): Option[(Object, macroMirror.Symbol)] = {
+ try {
+ // this logic relies on the assumptions that were valid for the old macro prototype
+ // namely that macro implementations can only be defined in top-level classes and modules
+ // with the new prototype that materialized in a SIP, macros need to be statically accessible, which is different
+ // for example, a macro def could be defined in a trait that is implemented by an object
+ // there are some more clever cases when seemingly non-static method ends up being statically accessible
+ // however, the code below doesn't account for these guys, because it'd take a look of time to get it right
+ // for now I leave it as a todo and move along to more the important stuff
+
+ macroTraceVerbose("loading implementation class from %s: ".format(macroMirror))(macroImpl.owner.fullName)
+ macroTraceVerbose("classloader is: ")("%s of type %s".format(macroMirror.classLoader, if (macroMirror.classLoader != null) macroMirror.classLoader.getClass.toString else "primordial classloader"))
+ def inferClasspath(cl: ClassLoader) = cl match {
+ case cl: java.net.URLClassLoader => "[" + (cl.getURLs mkString ",") + "]"
+ case null => "[" + scala.tools.util.PathResolver.Environment.javaBootClassPath + "]"
+ case _ => "<unknown>"
+ }
+ macroTraceVerbose("classpath is: ")(inferClasspath(macroMirror.classLoader))
+
+ // [Eugene] relies on the fact that macro implementations can only be defined in static classes
+ // [Martin to Eugene] There's similar logic buried in Symbol#flatname. Maybe we can refactor?
+ def classfile(sym: Symbol): String = {
+ def recur(sym: Symbol): String = sym match {
+ case sym if sym.owner.isPackageClass =>
+ val suffix = if (sym.isModuleClass) "$" else ""
+ sym.fullName + suffix
+ case sym =>
+ val separator = if (sym.owner.isModuleClass) "" else "$"
+ recur(sym.owner) + separator + sym.javaSimpleName.toString
+ }
- // [Eugene] this doesn't work for inner classes
- // neither does macroImpl.owner.javaClassName, so I had to roll my own implementation
- //val receiverName = macroImpl.owner.fullName
- val implClassName = classfile(macroImpl.owner)
- val implClassSymbol: macroMirror.Symbol = macroMirror.symbolForName(implClassName)
+ if (sym.isClass || sym.isModule) recur(sym)
+ else recur(sym.enclClass)
+ }
- if (macroDebug) {
- println("implClassSymbol is: " + implClassSymbol.fullNameString)
+ // [Eugene] this doesn't work for inner classes
+ // neither does macroImpl.owner.javaClassName, so I had to roll my own implementation
+ //val receiverName = macroImpl.owner.fullName
+ val implClassName = classfile(macroImpl.owner)
+ val implClassSymbol: macroMirror.Symbol = macroMirror.symbolForName(implClassName)
- if (implClassSymbol != macroMirror.NoSymbol) {
- val implClass = macroMirror.classToJava(implClassSymbol)
- val implSource = implClass.getProtectionDomain.getCodeSource
- println("implClass is %s from %s".format(implClass, implSource))
- println("implClassLoader is %s with classpath %s".format(implClass.getClassLoader, inferClasspath(implClass.getClassLoader)))
- }
- }
+ if (macroDebugVerbose) {
+ println("implClassSymbol is: " + implClassSymbol.fullNameString)
- val implObjSymbol = implClassSymbol.companionModule
- macroTrace("implObjSymbol is: ")(implObjSymbol.fullNameString)
+ if (implClassSymbol != macroMirror.NoSymbol) {
+ val implClass = macroMirror.classToJava(implClassSymbol)
+ val implSource = implClass.getProtectionDomain.getCodeSource
+ println("implClass is %s from %s".format(implClass, implSource))
+ println("implClassLoader is %s with classpath %s".format(implClass.getClassLoader, inferClasspath(implClass.getClassLoader)))
+ }
+ }
- if (implObjSymbol == macroMirror.NoSymbol) None
- else {
- // yet another reflection method that doesn't work for inner classes
- //val receiver = macroMirror.companionInstance(receiverClass)
- val implObj = try {
- val implObjClass = java.lang.Class.forName(implClassName, true, macroMirror.classLoader)
- implObjClass getField "MODULE$" get null
- } catch {
- case ex: NoSuchFieldException => macroTrace("exception when loading implObj: ")(ex); null
- case ex: NoClassDefFoundError => macroTrace("exception when loading implObj: ")(ex); null
- case ex: ClassNotFoundException => macroTrace("exception when loading implObj: ")(ex); null
- }
+ val implObjSymbol = implClassSymbol.companionModule
+ macroTraceVerbose("implObjSymbol is: ")(implObjSymbol.fullNameString)
+
+ if (implObjSymbol == macroMirror.NoSymbol) None
+ else {
+ // yet another reflection method that doesn't work for inner classes
+ //val receiver = macroMirror.companionInstance(receiverClass)
+ val implObj = try {
+ val implObjClass = java.lang.Class.forName(implClassName, true, macroMirror.classLoader)
+ implObjClass getField "MODULE$" get null
+ } catch {
+ case ex: NoSuchFieldException => macroTraceVerbose("exception when loading implObj: ")(ex); null
+ case ex: NoClassDefFoundError => macroTraceVerbose("exception when loading implObj: ")(ex); null
+ case ex: ClassNotFoundException => macroTraceVerbose("exception when loading implObj: ")(ex); null
+ }
- if (implObj == null) None
- else {
- val implMethSymbol = implObjSymbol.info.member(macroMirror.newTermName(macroImpl.name.toString))
- if (macroDebug) {
- println("implMethSymbol is: " + implMethSymbol.fullNameString)
- println("jimplMethSymbol is: " + macroMirror.methodToJava(implMethSymbol))
- }
+ if (implObj == null) None
+ else {
+ val implMethSymbol = implObjSymbol.info.member(macroMirror.newTermName(macroImpl.name.toString))
+ macroLogVerbose("implMethSymbol is: " + implMethSymbol.fullNameString)
+ macroLogVerbose("jimplMethSymbol is: " + macroMirror.methodToJava(implMethSymbol))
- if (implMethSymbol == macroMirror.NoSymbol) None
- else {
- if (macroDebug) println("successfully loaded macro impl as (%s, %s)".format(implObj, implMethSymbol))
- Some((implObj, implMethSymbol))
+ if (implMethSymbol == macroMirror.NoSymbol) None
+ else {
+ macroLogVerbose("successfully loaded macro impl as (%s, %s)".format(implObj, implMethSymbol))
+ Some((implObj, implMethSymbol))
+ }
+ }
+ }
+ } catch {
+ case ex: ClassNotFoundException =>
+ macroTraceVerbose("implementation class failed to load: ")(ex.toString)
+ None
}
}
- }
- } catch {
- case ex: ClassNotFoundException =>
- macroTrace("implementation class failed to load: ")(ex.toString)
- None
- }
- }
- val primary = loadMacroImpl(primaryMirror)
- primary match {
- case Some((implObj, implMethSymbol)) =>
- def runtime(args: List[Any]) = primaryMirror.invoke(implObj, implMethSymbol)(args: _*).asInstanceOf[Any]
- Some(runtime)
- case None =>
- if (settings.XmacroFallbackClasspath.value != "") {
- if (macroDebug) println("trying to load macro implementation from the fallback mirror: %s".format(settings.XmacroFallbackClasspath.value))
- val fallback = loadMacroImpl(fallbackMirror)
- fallback match {
+ val primary = loadMacroImpl(primaryMirror)
+ primary match {
case Some((implObj, implMethSymbol)) =>
- def runtime(args: List[Any]) = fallbackMirror.invoke(implObj, implMethSymbol)(args: _*).asInstanceOf[Any]
- Some(runtime)
+ def runtime(args: List[Any]) = primaryMirror.invoke(implObj, implMethSymbol)(args: _*).asInstanceOf[Any]
+ Some(runtime _)
case None =>
- None
+ if (settings.XmacroFallbackClasspath.value != "") {
+ macroLogVerbose("trying to load macro implementation from the fallback mirror: %s".format(settings.XmacroFallbackClasspath.value))
+ val fallback = loadMacroImpl(fallbackMirror)
+ fallback match {
+ case Some((implObj, implMethSymbol)) =>
+ def runtime(args: List[Any]) = fallbackMirror.invoke(implObj, implMethSymbol)(args: _*).asInstanceOf[Any]
+ Some(runtime _)
+ case None =>
+ None
+ }
+ } else {
+ None
+ }
}
- } else {
- None
}
+
+ if (runtime == None) macroDef setFlag IS_ERROR
+ runtime
+ })
}
}
@@ -779,7 +826,7 @@ trait Macros { self: Analyzer =>
val prefix = Expr(prefixTree)(TypeTag.Nothing)
val expandee = expandeeTree
} with MacroContext {
- override def toString = "MacroContext(%s@%s +%d)".format(expandee.symbol.name, expandee.pos, openMacros.length - 1 /* exclude myself */)
+ override def toString = "MacroContext(%s@%s +%d)".format(expandee.symbol.name, expandee.pos, enclosingMacros.length - 1 /* exclude myself */)
}
/** Calculate the arguments to pass to a macro implementation when expanding the provided tree.
@@ -790,9 +837,12 @@ trait Macros { self: Analyzer =>
* @return list of runtime objects to pass to the implementation obtained by ``macroRuntime''
*/
private def macroArgs(typer: Typer, expandee: Tree): Option[List[Any]] = {
+ val macroDef = expandee.symbol
+ val runtime = macroRuntime(macroDef) orElse { return None }
var prefixTree: Tree = EmptyTree
- var typeArgs = List[Tree]()
- val exprArgs = new ListBuffer[List[Expr[_]]]
+ var typeArgs = List[Tree]()
+ val exprArgs = ListBuffer[List[Expr[_]]]()
+
def collectMacroArgs(tree: Tree): Unit = tree match {
case Apply(fn, args) =>
// todo. infer precise typetag for this Expr, namely the declared type of the corresponding macro impl argument
@@ -806,16 +856,15 @@ trait Macros { self: Analyzer =>
case _ =>
}
collectMacroArgs(expandee)
- val context = macroContext(typer, prefixTree, expandee)
+ val context = expandee.attachmentOpt[MacroAttachment].flatMap(_.macroContext).getOrElse(macroContext(typer, prefixTree, expandee))
var argss: List[List[Any]] = List(context) :: exprArgs.toList
- macroTrace("argss: ")(argss)
+ macroTraceVerbose("argss: ")(argss)
- val macroDef = expandee.symbol
val ann = macroDef.getAnnotation(MacroImplAnnotation).getOrElse(throw new Error("assertion failed. %s: %s".format(macroDef, macroDef.annotations)))
val macroImpl = ann.args(0).symbol
var paramss = macroImpl.paramss
val tparams = macroImpl.typeParams
- macroTrace("paramss: ")(paramss)
+ macroTraceVerbose("paramss: ")(paramss)
// we need to take care of all possible combos of nullary/empty-paramlist macro defs vs nullary/empty-arglist invocations
// nullary def + nullary invocation => paramss and argss match, everything is okay
@@ -824,10 +873,10 @@ trait Macros { self: Analyzer =>
// empty-paramlist def + empty-arglist invocation => paramss and argss match, everything is okay
// that's almost it, but we need to account for the fact that paramss might have context bounds that mask the empty last paramlist
val paramss_without_evidences = transformTypeTagEvidenceParams(paramss, (param, tparam) => None)
- val isEmptyParamlistDef = paramss_without_evidences.length != 0 && paramss_without_evidences.last.isEmpty
- val isEmptyArglistInvocation = argss.length != 0 && argss.last.isEmpty
+ val isEmptyParamlistDef = paramss_without_evidences.nonEmpty && paramss_without_evidences.last.isEmpty
+ val isEmptyArglistInvocation = argss.nonEmpty && argss.last.isEmpty
if (isEmptyParamlistDef && !isEmptyArglistInvocation) {
- if (macroDebug) println("isEmptyParamlistDef && !isEmptyArglistInvocation: appending a List() to argss")
+ macroLogVerbose("isEmptyParamlistDef && !isEmptyArglistInvocation: appending a List() to argss")
argss = argss :+ Nil
}
@@ -835,7 +884,7 @@ trait Macros { self: Analyzer =>
val numParamLists = paramss_without_evidences.length
val numArgLists = argss.length
if (numParamLists != numArgLists) {
- typer.context.error(expandee.pos, "macros cannot be partially applied")
+ typer.TyperErrorGen.MacroPartialApplicationError(expandee)
return None
}
@@ -855,9 +904,8 @@ trait Macros { self: Analyzer =>
// then T and U need to be inferred from the lexical scope of the call using ``asSeenFrom''
// whereas V won't be resolved by asSeenFrom and need to be loaded directly from ``expandee'' which needs to contain a TypeApply node
// also, macro implementation reference may contain a regular type as a type argument, then we pass it verbatim
- paramss = transformTypeTagEvidenceParams(paramss, (param, tparam) => Some(tparam))
- if (paramss.lastOption map (params => !params.isEmpty && params.forall(_.isType)) getOrElse false) argss = argss :+ Nil
- val evidences = paramss.last takeWhile (_.isType) map (tparam => {
+ val resolved = collection.mutable.Map[Symbol, Type]()
+ paramss = transformTypeTagEvidenceParams(paramss, (param, tparam) => {
val TypeApply(_, implRefTargs) = ann.args(0)
var implRefTarg = implRefTargs(tparam.paramPos).tpe.typeSymbol
val tpe = if (implRefTarg.isTypeParameterOrSkolem) {
@@ -873,13 +921,28 @@ trait Macros { self: Analyzer =>
macroDef.owner)
} else
implRefTarg.tpe
- if (macroDebug) println("resolved tparam %s as %s".format(tparam, tpe))
- tpe
- }) map (tpe => {
- val ttag = TypeTag(tpe)
+ macroLogVerbose("resolved tparam %s as %s".format(tparam, tpe))
+ resolved(tparam) = tpe
+ param.tpe.typeSymbol match {
+ case definitions.TypeTagClass =>
+ // do nothing
+ case definitions.ConcreteTypeTagClass =>
+ if (!tpe.isConcrete) context.abort(context.enclosingPosition, "cannot create ConcreteTypeTag from a type %s having unresolved type parameters".format(tpe))
+ // otherwise do nothing
+ case _ =>
+ throw new Error("unsupported tpe: " + tpe)
+ }
+ Some(tparam)
+ })
+ val tags = paramss.last takeWhile (_.isType) map (resolved(_)) map (tpe => {
+ // generally speaking, it's impossible to calculate erasure from a tpe here
+ // the tpe might be compiled by this run, so its jClass might not exist yet
+ // hence I just pass `null` instead and leave this puzzle to macro programmers
+ val ttag = TypeTag(tpe, null)
if (ttag.isConcrete) ttag.toConcrete else ttag
})
- argss = argss.dropRight(1) :+ (evidences ++ argss.last)
+ if (paramss.lastOption map (params => !params.isEmpty && params.forall(_.isType)) getOrElse false) argss = argss :+ Nil
+ argss = argss.dropRight(1) :+ (tags ++ argss.last) // todo. add support for context bounds in argss
assert(argss.length == paramss.length, "argss: %s, paramss: %s".format(argss, paramss))
val rawArgss = for ((as, ps) <- argss zip paramss) yield {
@@ -887,7 +950,7 @@ trait Macros { self: Analyzer =>
else as
}
val rawArgs = rawArgss.flatten
- macroTrace("rawArgs: ")(rawArgs)
+ macroTraceVerbose("rawArgs: ")(rawArgs)
Some(rawArgs)
}
@@ -895,6 +958,7 @@ trait Macros { self: Analyzer =>
* See more informations in comments to ``openMacros'' in ``scala.reflect.makro.Context''.
*/
var openMacros = List[MacroContext]()
+ def enclosingMacroPosition = openMacros map (_.macroApplication.pos) find (_ ne NoPosition) getOrElse NoPosition
/** Performs macro expansion:
* 1) Checks whether the expansion needs to be delayed (see ``mustDelayMacroExpansion'')
@@ -903,13 +967,13 @@ trait Macros { self: Analyzer =>
* 4) Checks that the result is a tree bound to this universe
* 5) Typechecks the result against the return type of the macro definition
*
- * If -Ymacro-debug is enabled, you will get detailed log of how exactly this function
+ * If -Ymacro-debug-lite is enabled, you will get basic notifications about macro expansion
+ * along with macro expansions logged in the form that can be copy/pasted verbatim into REPL.
+ *
+ * If -Ymacro-debug-verbose is enabled, you will get detailed log of how exactly this function
* performs class loading and method resolution in order to load the macro implementation.
* The log will also include other non-trivial steps of macro expansion.
*
- * If -Ymacro-copypaste is enabled along with -Ymacro-debug, you will get macro expansions
- * logged in the form that can be copy/pasted verbatim into REPL (useful for debugging!).
- *
* @return
* the expansion result if the expansion has been successful,
* the fallback method invocation if the expansion has been unsuccessful, but there is a fallback,
@@ -918,64 +982,74 @@ trait Macros { self: Analyzer =>
* the expandee with an error marker set if the expansion has been cancelled due malformed arguments or implementation
* the expandee with an error marker set if there has been an error
*/
- def macroExpand(typer: Typer, expandee: Tree, pt: Type): Tree =
- macroExpand1(typer, expandee) match {
- case Success(expanded) =>
- try {
- var expectedTpe = expandee.tpe
-
- // [Eugene] weird situation. what's the conventional way to deal with it?
- val isNullaryInvocation = expandee match {
- case TypeApply(Select(_, _), _) => true
- case Select(_, _) => true
- case _ => false
- }
- if (isNullaryInvocation) expectedTpe match {
- case MethodType(Nil, restpe) =>
- macroTrace("nullary invocation of a method with an empty parameter list. unwrapping expectedTpe from " + expectedTpe + " to:")(restpe)
- expectedTpe = restpe
- case _ => ;
- }
+ def macroExpand(typer: Typer, expandee: Tree, mode: Int = EXPRmode, pt: Type = WildcardType): Tree = {
+ def fail(what: String, tree: Tree): Tree = {
+ val err = typer.context.errBuffer.head
+ this.fail(typer, tree, "failed to %s: %s at %s".format(what, err.errMsg, err.errPos))
+ return expandee
+ }
+ val start = startTimer(macroExpandNanos)
+ incCounter(macroExpandCount)
+ try {
+ macroExpand1(typer, expandee) match {
+ case Success(expanded) =>
+ try {
+ var expectedTpe = expandee.tpe
+
+ // [Eugene] weird situation. what's the conventional way to deal with it?
+ val isNullaryInvocation = expandee match {
+ case TypeApply(Select(_, _), _) => true
+ case TypeApply(Ident(_), _) => true
+ case Select(_, _) => true
+ case Ident(_) => true
+ case _ => false
+ }
+ if (isNullaryInvocation) expectedTpe match {
+ case NullaryMethodType(restpe) =>
+ macroTraceVerbose("nullary invocation of a nullary method. unwrapping expectedTpe from " + expectedTpe + " to: ")(restpe)
+ expectedTpe = restpe
+ case MethodType(Nil, restpe) =>
+ macroTraceVerbose("nullary invocation of a method with an empty parameter list. unwrapping expectedTpe from " + expectedTpe + " to: ")(restpe)
+ expectedTpe = restpe
+ case _ => ;
+ }
- var typechecked = typer.context.withImplicitsEnabled(typer.typed(expanded, EXPRmode, expectedTpe))
- if (macroDebug) {
- println("typechecked1:")
- println(typechecked)
- println(showRaw(typechecked))
- }
+ macroLogVerbose("typechecking1 against %s: %s".format(expectedTpe, expanded))
+ var typechecked = typer.context.withImplicitsEnabled(typer.typed(expanded, EXPRmode, expectedTpe))
+ if (typer.context.hasErrors) fail("typecheck against macro def return type", expanded)
+ macroLogVerbose("typechecked1:%n%s%n%s".format(typechecked, showRaw(typechecked)))
- typechecked = typer.context.withImplicitsEnabled(typer.typed(typechecked, EXPRmode, pt))
- if (macroDebug) {
- println("typechecked2:")
- println(typechecked)
- println(showRaw(typechecked))
- }
+ macroLogVerbose("typechecking2 against %s: %s".format(pt, expanded))
+ typechecked = typer.context.withImplicitsEnabled(typer.typed(typechecked, EXPRmode, pt))
+ if (typer.context.hasErrors) fail("typecheck against expected type", expanded)
+ macroLogVerbose("typechecked2:%n%s%n%s".format(typechecked, showRaw(typechecked)))
- typechecked
- } finally {
- openMacros = openMacros.tail
- }
- case Fallback(fallback) =>
- typer.context.withImplicitsEnabled(typer.typed(fallback, EXPRmode, pt))
- case Other(result) =>
- result
+ typechecked
+ } finally {
+ openMacros = openMacros.tail
+ }
+ case Fallback(fallback) =>
+ typer.context.withImplicitsEnabled(typer.typed(fallback, EXPRmode, pt))
+ case Other(result) =>
+ result
+ }
+ } finally {
+ stopTimer(macroExpandNanos, start)
}
+ }
private sealed abstract class MacroExpansionResult extends Product with Serializable
private case class Success(expanded: Tree) extends MacroExpansionResult
private case class Fallback(fallback: Tree) extends MacroExpansionResult
private case class Other(result: Tree) extends MacroExpansionResult
- private def Delay(expandee: Tree) = Other(expandee)
+ private def Delay(expanded: Tree) = Other(expanded)
private def Skip(expanded: Tree) = Other(expanded)
private def Cancel(expandee: Tree) = Other(expandee)
private def Failure(expandee: Tree) = Other(expandee)
private def fail(typer: Typer, expandee: Tree, msg: String = null) = {
- if (macroDebug || macroCopypaste) {
- var msg1 = if (msg contains "exception during macro expansion") msg.split(EOL).drop(1).headOption.getOrElse("?") else msg
- if (macroDebug) msg1 = msg
- println("macro expansion has failed: %s".format(msg1))
- }
- val pos = if (expandee.pos != NoPosition) expandee.pos else openMacros.find(c => c.expandee.pos != NoPosition).map(_.expandee.pos).getOrElse(NoPosition)
+ def msgForLog = if (msg != null && (msg contains "exception during macro expansion")) msg.split(EOL).drop(1).headOption.getOrElse("?") else msg
+ macroLogLite("macro expansion has failed: %s".format(msgForLog))
+ val pos = if (expandee.pos != NoPosition) expandee.pos else enclosingMacroPosition
if (msg != null) typer.context.error(pos, msg)
typer.infer.setError(expandee)
Failure(expandee)
@@ -984,185 +1058,206 @@ trait Macros { self: Analyzer =>
/** Does the same as ``macroExpand'', but without typechecking the expansion
* Meant for internal use within the macro infrastructure, don't use it elsewhere.
*/
- private def macroExpand1(typer: Typer, expandee: Tree): MacroExpansionResult = {
- // if a macro implementation is incompatible or any of the arguments are erroneous
- // there is no sense to expand the macro itself => it will only make matters worse
- if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) {
- val reason = if (expandee.symbol.isErroneous) "incompatible macro implementation" else "erroneous arguments"
- macroTrace("cancelled macro expansion because of %s: ".format(reason))(expandee)
- return Cancel(typer.infer.setError(expandee))
- }
+ private def macroExpand1(typer: Typer, expandee: Tree): MacroExpansionResult =
+ // InfoLevel.Verbose examines and prints out infos of symbols
+ // by the means of this'es these symbols can climb up the lexical scope
+ // when these symbols will be examined by a node printer
+ // they will enumerate and analyze their children (ask for infos and tpes)
+ // if one of those children involves macro expansion, things might get nasty
+ // that's why I'm temporarily turning this behavior off
+ withInfoLevel(nodePrinters.InfoLevel.Quiet) {
+ // if a macro implementation is incompatible or any of the arguments are erroneous
+ // there is no sense to expand the macro itself => it will only make matters worse
+ if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) {
+ val reason = if (expandee.symbol.isErroneous) "incompatible macro implementation" else "erroneous arguments"
+ macroTraceVerbose("cancelled macro expansion because of %s: ".format(reason))(expandee)
+ return Cancel(typer.infer.setError(expandee))
+ }
- if (!isDelayed(expandee)) {
- if (macroDebug || macroCopypaste) println("typechecking macro expansion %s at %s".format(expandee, expandee.pos))
+ macroRuntime(expandee.symbol) match {
+ case Some(runtime) =>
+ macroExpandWithRuntime(typer, expandee, runtime)
+ case None =>
+ macroExpandWithoutRuntime(typer, expandee)
+ }
+ }
+ /** Expands a macro when a runtime (i.e. the macro implementation) can be successfully loaded
+ * Meant for internal use within the macro infrastructure, don't use it elsewhere.
+ */
+ private def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroExpansionResult = {
+ def issueFreeError(sym: FreeSymbol) = {
+ val template = (
+ "Macro expansion contains free @kind@ variable %s. Have you forgotten to use %s? "
+ + "If you have troubles tracking free @kind@ variables, consider using -Xlog-free-@kind@s"
+ )
+ val forgotten = (
+ if (sym.isTerm) "eval when splicing this variable into a reifee"
+ else "c.TypeTag annotation for this type parameter"
+ )
+ typer.context.error(expandee.pos,
+ template.replaceAllLiterally("@kind@", sym.name.nameKind).format(
+ sym.name + " " + sym.origin, forgotten)
+ )
+ }
+ def macroExpandInternal = {
+ val wasDelayed = isDelayed(expandee)
val undetparams = calculateUndetparams(expandee)
- if (undetparams.size != 0) {
- macroTrace("macro expansion is delayed: ")(expandee)
- delayed += expandee -> (typer.context, undetparams)
- Delay(expandee)
- } else {
- val macroDef = expandee.symbol
- macroRuntime(macroDef) match {
- case Some(runtime) =>
- val savedInfolevel = nodePrinters.infolevel
- try {
- // InfoLevel.Verbose examines and prints out infos of symbols
- // by the means of this'es these symbols can climb up the lexical scope
- // when these symbols will be examined by a node printer
- // they will enumerate and analyze their children (ask for infos and tpes)
- // if one of those children involves macro expansion, things might get nasty
- // that's why I'm temporarily turning this behavior off
- nodePrinters.infolevel = nodePrinters.InfoLevel.Quiet
- val args = macroArgs(typer, expandee)
- args match {
- case Some(args) =>
- // adding stuff to openMacros is easy, but removing it is a nightmare
- // it needs to be sprinkled over several different code locations
- val (context: MacroContext) :: _ = args
- openMacros = context :: openMacros
- val expanded: MacroExpansionResult = try {
- val prevNumErrors = reporter.ERROR.count
- val expanded = runtime(args)
- val currNumErrors = reporter.ERROR.count
- if (currNumErrors != prevNumErrors) {
- fail(typer, expandee) // errors have been reported by the macro itself
- } else {
- expanded match {
- case expanded: Expr[_] =>
- if (macroDebug || macroCopypaste) {
- if (macroDebug) println("original:")
- println(expanded.tree)
- println(showRaw(expanded.tree))
- }
-
- freeTerms(expanded.tree) foreach (fte => typer.context.error(expandee.pos,
- ("macro expansion contains free term variable %s %s. "+
- "have you forgot to use eval when splicing this variable into a reifee? " +
- "if you have troubles tracking free term variables, consider using -Xlog-free-terms").format(fte.name, fte.origin)))
- freeTypes(expanded.tree) foreach (fty => typer.context.error(expandee.pos,
- ("macro expansion contains free type variable %s %s. "+
- "have you forgot to use c.TypeTag annotation for this type parameter? " +
- "if you have troubles tracking free type variables, consider using -Xlog-free-types").format(fty.name, fty.origin)))
-
- val currNumErrors = reporter.ERROR.count
- if (currNumErrors != prevNumErrors) {
- fail(typer, expandee)
- } else {
- // inherit the position from the first position-ful expandee in macro callstack
- // this is essential for sane error messages
- var tree = expanded.tree
- var position = openMacros.find(c => c.expandee.pos != NoPosition).map(_.expandee.pos).getOrElse(NoPosition)
- tree = atPos(position.focus)(tree)
-
- // now macro expansion gets typechecked against the macro definition return type
- // however, this happens in macroExpand, not here in macroExpand1
- Success(tree)
- }
- case expanded if expanded.isInstanceOf[Expr[_]] =>
- val msg = "macro must return a compiler-specific expr; returned value is Expr, but it doesn't belong to this compiler's universe"
- fail(typer, expandee, msg)
- case expanded =>
- val msg = "macro must return a compiler-specific expr; returned value is of class: %s".format(expanded.getClass)
- fail(typer, expandee, msg)
- }
- }
- } catch {
- case ex: Throwable =>
- openMacros = openMacros.tail
- throw ex
- }
- if (!expanded.isInstanceOf[Success]) openMacros = openMacros.tail
- expanded
- case None =>
- fail(typer, expandee) // error has been reported by macroArgs
- }
- } catch {
- case ex =>
- // [Eugene] any ideas about how to improve this one?
- val realex = ReflectionUtils.unwrapThrowable(ex)
- realex match {
- case realex: reflect.makro.runtime.AbortMacroException =>
- if (macroDebug || macroCopypaste) println("macro expansion has failed: %s".format(realex.msg))
- fail(typer, expandee) // error has been reported by abort
- case _ =>
- val message = {
- try {
- // the most reliable way of obtaining currently executing method
- // http://stackoverflow.com/questions/442747/getting-the-name-of-the-current-executing-method
- val currentMethodName = new Object(){}.getClass().getEnclosingMethod().getName
- val relevancyThreshold = realex.getStackTrace().indexWhere(este => este.getMethodName == currentMethodName)
- if (relevancyThreshold == -1) None
- else {
- var relevantElements = realex.getStackTrace().take(relevancyThreshold + 1)
- var framesTillReflectiveInvocationOfMacroImpl = relevantElements.reverse.indexWhere(_.isNativeMethod) + 1
- relevantElements = relevantElements dropRight framesTillReflectiveInvocationOfMacroImpl
-
- realex.setStackTrace(relevantElements)
- val message = new java.io.StringWriter()
- realex.printStackTrace(new java.io.PrintWriter(message))
- Some(EOL + message)
- }
- } catch {
- // if the magic above goes boom, just fall back to uninformative, but better than nothing, getMessage
- case ex: Throwable =>
- None
- }
- } getOrElse realex.getMessage
- fail(typer, expandee, "exception during macro expansion: " + message)
- }
- } finally {
- nodePrinters.infolevel = savedInfolevel
- }
- case None =>
- def notFound() = {
- typer.context.error(expandee.pos, "macro implementation not found: " + macroDef.name + " " +
- "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)\n" +
- "if you do need to define macro implementations along with the rest of your program, consider two-phase compilation with -Xmacro-fallback-classpath " +
- "in the second phase pointing to the output of the first phase")
- None
+ val nowDelayed = !typer.context.macrosEnabled || undetparams.nonEmpty
+
+ def failExpansion(msg: String = null) = fail(typer, expandee, msg)
+ def performExpansion(args: List[Any]): MacroExpansionResult = {
+ val numErrors = reporter.ERROR.count
+ def hasNewErrors = reporter.ERROR.count > numErrors
+
+ val expanded = runtime(args)
+
+ if (hasNewErrors)
+ failExpansion() // errors have been reported by the macro itself
+ else expanded match {
+ case expanded: Expr[_] =>
+ macroLogVerbose("original:")
+ macroLogLite("" + expanded.tree + "\n" + showRaw(expanded.tree))
+
+ freeTerms(expanded.tree) foreach issueFreeError
+ freeTypes(expanded.tree) foreach issueFreeError
+ if (hasNewErrors) failExpansion()
+
+ // inherit the position from the first position-ful expandee in macro callstack
+ // this is essential for sane error messages
+ // now macro expansion gets typechecked against the macro definition return type
+ // however, this happens in macroExpand, not here in macroExpand1
+ else Success(atPos(enclosingMacroPosition.focus)(expanded.tree))
+ case _ =>
+ failExpansion(
+ "macro must return a compiler-specific expr; returned value is " + (
+ if (expanded.isInstanceOf[Expr[_]]) " Expr, but it doesn't belong to this compiler's universe"
+ else " of " + expanded.getClass
+ )
+ )
+ }
+ }
+
+ if (wasDelayed) {
+ if (nowDelayed) Delay(expandee)
+ else Skip(macroExpandAll(typer, expandee))
+ }
+ else {
+ macroLogLite("typechecking macro expansion %s at %s".format(expandee, expandee.pos))
+ macroArgs(typer, expandee).fold(failExpansion(): MacroExpansionResult) {
+ case args @ ((context: MacroContext) :: _) =>
+ if (nowDelayed) {
+ macroLogLite("macro expansion is delayed: %s".format(expandee))
+ delayed += expandee -> undetparams
+ // need to save typer context for `macroExpandAll`
+ // need to save macro context to preserve enclosures
+ expandee attach MacroAttachment(delayed = true, typerContext = typer.context, macroContext = Some(context))
+ Delay(expandee)
}
- def fallBackToOverridden(tree: Tree): Option[Tree] = {
- tree match {
- case Select(qual, name) if (macroDef.isTermMacro) =>
- macroDef.allOverriddenSymbols match {
- case first :: _ =>
- Some(Select(qual, name) setPos tree.pos setSymbol first)
- case _ =>
- macroTrace("macro is not overridden: ")(tree)
- notFound()
- }
- case Apply(fn, args) =>
- fallBackToOverridden(fn) match {
- case Some(fn1) => Some(Apply(fn1, args) setPos tree.pos)
- case _ => None
- }
- case TypeApply(fn, args) =>
- fallBackToOverridden(fn) match {
- case Some(fn1) => Some(TypeApply(fn1, args) setPos tree.pos)
- case _ => None
- }
- case _ =>
- macroTrace("unexpected tree in fallback: ")(tree)
- notFound()
+ else {
+ // adding stuff to openMacros is easy, but removing it is a nightmare
+ // it needs to be sprinkled over several different code locations
+ // why? https://github.com/scala/scala/commit/bd3eacbae21f39b1ac7fe8ade4ed71fa98e1a28d#L2R1137
+ // todo. will be improved
+ openMacros ::= context
+ var isSuccess = false
+ try performExpansion(args) match {
+ case x: Success => isSuccess = true ; x
+ case x => x
+ }
+ finally {
+ expandee.detach(classOf[MacroAttachment])
+ if (!isSuccess) openMacros = openMacros.tail
}
- }
- fallBackToOverridden(expandee) match {
- case Some(tree1) =>
- macroTrace("falling back to ")(tree1)
- currentRun.macroExpansionFailed = true
- Fallback(tree1)
- case None =>
- fail(typer, expandee)
}
}
}
- } else {
- val undetparams = calculateUndetparams(expandee)
- if (undetparams.size != 0)
- Delay(expandee)
- else
- Skip(macroExpandAll(typer, expandee))
+ }
+
+ try macroExpandInternal
+ catch { case ex => handleMacroExpansionException(typer, expandee, ex) }
+ }
+
+ private def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroExpansionResult = {
+ val macroDef = expandee.symbol
+ def notFound() = {
+ typer.context.error(expandee.pos, "macro implementation not found: " + macroDef.name + " " +
+ "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)\n" +
+ "if you do need to define macro implementations along with the rest of your program, consider two-phase compilation with -Xmacro-fallback-classpath " +
+ "in the second phase pointing to the output of the first phase")
+ None
+ }
+ def fallBackToOverridden(tree: Tree): Option[Tree] = {
+ tree match {
+ case Select(qual, name) if (macroDef.isTermMacro) =>
+ macroDef.allOverriddenSymbols match {
+ case first :: _ =>
+ Some(Select(qual, name) setPos tree.pos setSymbol first)
+ case _ =>
+ macroTraceVerbose("macro is not overridden: ")(tree)
+ notFound()
+ }
+ case Apply(fn, args) =>
+ fallBackToOverridden(fn) match {
+ case Some(fn1) => Some(Apply(fn1, args) setPos tree.pos)
+ case _ => None
+ }
+ case TypeApply(fn, args) =>
+ fallBackToOverridden(fn) match {
+ case Some(fn1) => Some(TypeApply(fn1, args) setPos tree.pos)
+ case _ => None
+ }
+ case _ =>
+ macroTraceVerbose("unexpected tree in fallback: ")(tree)
+ notFound()
+ }
+ }
+ fallBackToOverridden(expandee) match {
+ case Some(tree1) =>
+ macroTraceLite("falling back to: ")(tree1)
+ currentRun.macroExpansionFailed = true
+ Fallback(tree1)
+ case None =>
+ fail(typer, expandee)
+ }
+ }
+
+ private def handleMacroExpansionException(typer: Typer, expandee: Tree, ex: Throwable): MacroExpansionResult = {
+ // [Eugene] any ideas about how to improve this one?
+ val realex = ReflectionUtils.unwrapThrowable(ex)
+ realex match {
+ case realex: reflect.makro.runtime.AbortMacroException =>
+ macroLogVerbose("macro expansion has failed: %s".format(realex.msg))
+ fail(typer, expandee) // error has been reported by abort
+ case err: TypeError =>
+ macroLogLite("macro expansion has failed: %s at %s".format(err.msg, err.pos))
+ throw err // error should be propagated, don't report
+ case _ =>
+ val message = {
+ try {
+ // [Eugene] is there a better way?
+ // [Paul] See Exceptional.scala and Origins.scala.
+ val relevancyThreshold = realex.getStackTrace().indexWhere(este => este.getMethodName == "macroExpand1")
+ if (relevancyThreshold == -1) None
+ else {
+ var relevantElements = realex.getStackTrace().take(relevancyThreshold + 1)
+ def isMacroInvoker(este: StackTraceElement) = este.isNativeMethod || (este.getClassName != null && (este.getClassName contains "fastTrack"))
+ var threshold = relevantElements.reverse.indexWhere(isMacroInvoker) + 1
+ while (threshold != relevantElements.length && isMacroInvoker(relevantElements(relevantElements.length - threshold - 1))) threshold += 1
+ relevantElements = relevantElements dropRight threshold
+
+ realex.setStackTrace(relevantElements)
+ val message = new java.io.StringWriter()
+ realex.printStackTrace(new java.io.PrintWriter(message))
+ Some(EOL + message)
+ }
+ } catch {
+ // if the magic above goes boom, just fall back to uninformative, but better than nothing, getMessage
+ case ex: Throwable =>
+ None
+ }
+ } getOrElse realex.getMessage
+ fail(typer, expandee, "exception during macro expansion: " + message)
}
}
@@ -1179,29 +1274,34 @@ trait Macros { self: Analyzer =>
* 2) undetparams (sym.isTypeParameter && !sym.isSkolem)
*/
var hasPendingMacroExpansions = false
- private val delayed = perRunCaches.newWeakMap[Tree, (Context, collection.mutable.Set[Int])]
+ private val delayed = perRunCaches.newWeakMap[Tree, collection.mutable.Set[Int]]
private def isDelayed(expandee: Tree) = delayed contains expandee
private def calculateUndetparams(expandee: Tree): collection.mutable.Set[Int] =
- delayed.get(expandee).map(_._2).getOrElse {
- val calculated = collection.mutable.Set[Int]()
+ delayed.get(expandee).getOrElse {
+ val calculated = collection.mutable.Set[Symbol]()
expandee foreach (sub => {
- def traverse(sym: Symbol) = if (sym != null && (undetparams contains sym.id)) calculated += sym.id
+ def traverse(sym: Symbol) = if (sym != null && (undetparams contains sym.id)) calculated += sym
if (sub.symbol != null) traverse(sub.symbol)
if (sub.tpe != null) sub.tpe foreach (sub => traverse(sub.typeSymbol))
})
- calculated
+ macroLogVerbose("calculateUndetparams: %s".format(calculated))
+ calculated map (_.id)
}
private val undetparams = perRunCaches.newSet[Int]
- def notifyUndetparamsAdded(newUndets: List[Symbol]): Unit = undetparams ++= newUndets map (_.id)
+ def notifyUndetparamsAdded(newUndets: List[Symbol]): Unit = {
+ undetparams ++= newUndets map (_.id)
+ if (macroDebugVerbose) newUndets foreach (sym => println("undetParam added: %s".format(sym)))
+ }
def notifyUndetparamsInferred(undetNoMore: List[Symbol], inferreds: List[Type]): Unit = {
undetparams --= undetNoMore map (_.id)
+ if (macroDebugVerbose) (undetNoMore zip inferreds) foreach { case (sym, tpe) => println("undetParam inferred: %s as %s".format(sym, tpe))}
if (!delayed.isEmpty)
delayed.toList foreach {
- case (expandee, (_, undetparams)) if !undetparams.isEmpty =>
+ case (expandee, undetparams) if !undetparams.isEmpty =>
undetparams --= undetNoMore map (_.id)
if (undetparams.isEmpty) {
hasPendingMacroExpansions = true
- macroTrace("macro expansion is pending: ")(expandee)
+ macroTraceVerbose("macro expansion is pending: ")(expandee)
}
case _ =>
// do nothing
@@ -1217,9 +1317,12 @@ trait Macros { self: Analyzer =>
override def transform(tree: Tree) = super.transform(tree match {
// todo. expansion should work from the inside out
case wannabe if (delayed contains wannabe) && calculateUndetparams(wannabe).isEmpty =>
- val (context, _) = delayed(wannabe)
+ val context = wannabe.attachment[MacroAttachment].typerContext
delayed -= wannabe
- macroExpand(newTyper(context), wannabe, WildcardType)
+ context.implicitsEnabled = typer.context.implicitsEnabled
+ context.enrichmentEnabled = typer.context.enrichmentEnabled
+ context.macrosEnabled = typer.context.macrosEnabled
+ macroExpand(newTyper(context), wannabe, EXPRmode, WildcardType)
case _ =>
tree
})
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index e1c12adbcc..cf94f7d4d6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -8,6 +8,8 @@ package typechecker
import symtab.Flags._
import scala.collection.{ mutable, immutable }
import scala.tools.util.StringOps.{ ojoin }
+import scala.reflect.{ mirror => rm }
+import language.higherKinds
/** Logic related to method synthesis which involves cooperation between
* Namer and Typer.
@@ -20,50 +22,51 @@ trait MethodSynthesis {
import CODE._
object synthesisUtil {
- type M[T] = Manifest[T]
- type CM[T] = ClassManifest[T]
+ type CTT[T] = rm.ConcreteTypeTag[T]
+ type CT[T] = ClassTag[T]
def ValOrDefDef(sym: Symbol, body: Tree) =
if (sym.isLazy) ValDef(sym, body)
else DefDef(sym, body)
- def applyTypeInternal(manifests: List[M[_]]): Type = {
+ def applyTypeInternal(tags: List[CTT[_]]): Type = {
// [Eugene to Paul] needs review!!
- val symbols = manifests map manifestToSymbol
+ val symbols = tags map compilerSymbolFromTag
val container :: args = symbols
val tparams = container.typeConstructor.typeParams
// Conservative at present - if manifests were more usable this could do a lot more.
- require(symbols forall (_ ne NoSymbol), "Must find all manifests: " + symbols)
+ // [Eugene to Paul] all right, they are now. what do you have in mind?
+ require(symbols forall (_ ne NoSymbol), "Must find all tags: " + symbols)
require(container.owner.isPackageClass, "Container must be a top-level class in a package: " + container)
require(tparams.size == args.size, "Arguments must match type constructor arity: " + tparams + ", " + args)
appliedType(container, args map (_.tpe): _*)
}
- def companionType[T](implicit m: M[T]) =
+ def companionType[T](implicit m: CTT[T]) =
getRequiredModule(m.erasure.getName).tpe
// Use these like `applyType[List, Int]` or `applyType[Map, Int, String]`
- def applyType[CC](implicit m1: M[CC]): Type =
+ def applyType[CC](implicit m1: CTT[CC]): Type =
applyTypeInternal(List(m1))
- def applyType[CC[X1], X1](implicit m1: M[CC[_]], m2: M[X1]): Type =
+ def applyType[CC[X1], X1](implicit m1: CTT[CC[_]], m2: CTT[X1]): Type =
applyTypeInternal(List(m1, m2))
- def applyType[CC[X1, X2], X1, X2](implicit m1: M[CC[_,_]], m2: M[X1], m3: M[X2]): Type =
+ def applyType[CC[X1, X2], X1, X2](implicit m1: CTT[CC[_,_]], m2: CTT[X1], m3: CTT[X2]): Type =
applyTypeInternal(List(m1, m2, m3))
- def applyType[CC[X1, X2, X3], X1, X2, X3](implicit m1: M[CC[_,_,_]], m2: M[X1], m3: M[X2], m4: M[X3]): Type =
+ def applyType[CC[X1, X2, X3], X1, X2, X3](implicit m1: CTT[CC[_,_,_]], m2: CTT[X1], m3: CTT[X2], m4: CTT[X3]): Type =
applyTypeInternal(List(m1, m2, m3, m4))
- def newMethodType[F](owner: Symbol)(implicit m: Manifest[F]): Type = {
- val fnSymbol = manifestToSymbol(m)
- assert(fnSymbol isSubClass FunctionClass(m.tpe.typeArguments.size - 1), (owner, m))
+ def newMethodType[F](owner: Symbol)(implicit t: CTT[F]): Type = {
+ val fnSymbol = compilerSymbolFromTag(t)
+ assert(fnSymbol isSubClass FunctionClass(t.tpe.typeArguments.size - 1), (owner, t))
// [Eugene to Paul] needs review!!
// val symbols = m.typeArguments map (m => manifestToSymbol(m))
// val formals = symbols.init map (_.typeConstructor)
- val formals = manifestToType(m).typeArguments
+ val formals = compilerTypeFromTag(t).typeArguments
val params = owner newSyntheticValueParams formals
MethodType(params, formals.last)
}
@@ -231,11 +234,16 @@ trait MethodSynthesis {
// TODO: need to shuffle annotations between wrapper and class.
val wrapper = ImplicitClassWrapper(cd)
val meth = wrapper.derivedSym
- val mdef = context.unit.synthetics(meth)
- context.unit.synthetics -= meth
- meth setAnnotations deriveAnnotations(annotations, MethodTargetClass, false)
- cd.symbol setAnnotations deriveAnnotations(annotations, ClassTargetClass, true)
- List(cd, mdef)
+ context.unit.synthetics get meth match {
+ case Some(mdef) =>
+ context.unit.synthetics -= meth
+ meth setAnnotations deriveAnnotations(annotations, MethodTargetClass, false)
+ cd.symbol setAnnotations deriveAnnotations(annotations, ClassTargetClass, true)
+ List(cd, mdef)
+ case _ =>
+ // Shouldn't happen, but let's give ourselves a reasonable error when it does
+ abort("No synthetics for " + meth + ": synthetics contains " + context.unit.synthetics.keys.mkString(", "))
+ }
case _ =>
List(stat)
}
@@ -245,13 +253,12 @@ trait MethodSynthesis {
else List(Getter(vd))
)
def beanAccessors(vd: ValDef): List[DerivedFromValDef] = {
+ val setter = if (vd.mods.isMutable) List(BeanSetter(vd)) else Nil
if (forMSIL) Nil
- else if (vd.symbol hasAnnotation BeanPropertyAttr) {
- if (vd.mods.isMutable) List(BeanGetter(vd), BeanSetter(vd))
- else List(BeanGetter(vd))
- }
+ else if (vd.symbol hasAnnotation BeanPropertyAttr)
+ BeanGetter(vd) :: setter
else if (vd.symbol hasAnnotation BooleanBeanPropertyAttr)
- List(BooleanBeanGetter(vd))
+ BooleanBeanGetter(vd) :: setter
else Nil
}
def allValDefDerived(vd: ValDef) = {
@@ -371,13 +378,15 @@ trait MethodSynthesis {
def completer(sym: Symbol): Type = ??? // not needed
def createAndEnterSymbol(): Symbol = enterSyntheticSym(derivedTree)
def derivedSym: Symbol = {
- val result = enclClass.info decl name
+ // Only methods will do! Don't want to pick up any stray
+ // companion objects of the same name.
+ val result = enclClass.info decl name suchThat (_.isMethod)
assert(result != NoSymbol, "not found: "+name+" in "+enclClass+" "+enclClass.info.decls)
result
}
def derivedTree: DefDef =
factoryMeth(mods & flagsMask | flagsExtra, name, tree, symbolic = false)
- def flagsExtra: Long = METHOD | IMPLICIT
+ def flagsExtra: Long = METHOD | IMPLICIT | SYNTHETIC
def flagsMask: Long = AccessFlags
def name: TermName = tree.name.toTermName
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Modes.scala b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
index 48068b58d4..3eff5ef024 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Modes.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
@@ -105,7 +105,7 @@ trait Modes {
final def inFunMode(mode: Int) = (mode & FUNmode) != 0
final def inPolyMode(mode: Int) = (mode & POLYmode) != 0
final def inPatternMode(mode: Int) = (mode & PATTERNmode) != 0
-
+ final def inExprModeOr(mode: Int, others: Int) = (mode & (EXPRmode | others)) != 0
final def inExprModeButNot(mode: Int, prohibited: Int) =
(mode & (EXPRmode | prohibited)) == EXPRmode
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index ffd00751e0..4eba665b93 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -139,16 +139,9 @@ trait Namers extends MethodSynthesis {
|| vd.symbol.isLazy
)
- def setPrivateWithin[Sym <: Symbol](tree: Tree, sym: Sym, mods: Modifiers): Sym =
+ def setPrivateWithin[T <: Symbol](tree: Tree, sym: T, mods: Modifiers): T =
if (sym.isPrivateLocal || !mods.hasAccessBoundary) sym
- else sym setPrivateWithin (
- typer.qualifyingClass(tree, mods.privateWithin, true) match {
- case None =>
- NoSymbol
- case Some(sym) =>
- sym
- }
- )
+ else sym setPrivateWithin typer.qualifyingClass(tree, mods.privateWithin, packageOK = true)
def setPrivateWithin(tree: MemberDef, sym: Symbol): Symbol =
setPrivateWithin(tree, sym, tree.mods)
@@ -160,24 +153,14 @@ trait Namers extends MethodSynthesis {
def moduleClassFlags(moduleFlags: Long) =
(moduleFlags & ModuleToClassFlags) | inConstructorFlag
- private def resetKeepingFlags(sym: Symbol, keeping: Long): Symbol = {
- val keep = sym.flags & keeping
- sym reset NoType
- sym setFlag keep
- }
-
def updatePosFlags(sym: Symbol, pos: Position, flags: Long): Symbol = {
debuglog("[overwrite] " + sym)
- resetKeepingFlags(sym, LOCKED)
- sym setFlag flags
- sym setPos pos
-
- if (sym.isModule && sym.moduleClass != NoSymbol)
- updatePosFlags(sym.moduleClass, pos, moduleClassFlags(flags))
+ val newFlags = (sym.flags & LOCKED) | flags
+ sym reset NoType setFlag newFlags setPos pos
+ sym.moduleClass andAlso (updatePosFlags(_, pos, moduleClassFlags(flags)))
if (sym.owner.isPackageClass) {
- val companion = companionSymbolOf(sym, context)
- if (companion != NoSymbol) {
+ companionSymbolOf(sym, context) andAlso { companion =>
val assignNoType = companion.rawInfo match {
case _: SymLoader => true
case tp => tp.isComplete && (runId(sym.validTo) != currentRunId)
@@ -400,9 +383,7 @@ trait Namers extends MethodSynthesis {
if (m.isModule && !m.isPackage && inCurrentScope(m) && (currentRun.canRedefine(m) || m.isSynthetic)) {
updatePosFlags(m, tree.pos, moduleFlags)
setPrivateWithin(tree, m)
- if (m.moduleClass != NoSymbol)
- setPrivateWithin(tree, m.moduleClass)
-
+ m.moduleClass andAlso (setPrivateWithin(tree, _))
context.unit.synthetics -= m
tree.symbol = m
}
@@ -475,8 +456,7 @@ trait Namers extends MethodSynthesis {
val defSym = context.prefix.member(to) filter (
sym => sym.exists && context.isAccessible(sym, context.prefix, false))
- if (defSym != NoSymbol)
- typer.permanentlyHiddenWarning(pos, to0, defSym)
+ defSym andAlso (typer.permanentlyHiddenWarning(pos, to0, _))
}
}
if (!tree.symbol.isSynthetic && expr.symbol != null && !expr.symbol.isInterpreterWrapper) {
@@ -493,12 +473,8 @@ trait Namers extends MethodSynthesis {
if (from != nme.WILDCARD && base != ErrorType) {
if (isValid(from)) {
- if (currentRun.compileSourceFor(expr, from)) {
- // side effecting, apparently
- typeSig(tree)
- }
// for Java code importing Scala objects
- else if (!nme.isModuleName(from) || isValid(nme.stripModuleSuffix(from))) {
+ if (!nme.isModuleName(from) || isValid(nme.stripModuleSuffix(from))) {
typer.TyperErrorGen.NotAMemberError(tree, expr, from)
typer.infer.setError(tree)
}
@@ -587,7 +563,15 @@ trait Namers extends MethodSynthesis {
assignAndEnterFinishedSymbol(tree)
else
enterGetterSetter(tree)
+
+ // When java enums are read from bytecode, they are known to have
+ // constant types by the jvm flag and assigned accordingly. When
+ // they are read from source, the java parser marks them with the
+ // STABLE flag, and now we receive that signal.
+ if (tree.symbol hasAllFlags STABLE | JAVA)
+ tree.symbol setInfo ConstantType(Constant(tree.symbol))
}
+
def enterLazyVal(tree: ValDef, lazyAccessor: Symbol): TermSymbol = {
// If the owner is not a class, this is a lazy val from a method,
// with no associated field. It has an accessor with $lzy appended to its name and
@@ -628,11 +612,12 @@ trait Namers extends MethodSynthesis {
def enterClassDef(tree: ClassDef) {
val ClassDef(mods, name, tparams, impl) = tree
+ val primaryConstructorArity = treeInfo.firstConstructorArgs(impl.body).size
tree.symbol = enterClassSymbol(tree)
tree.symbol setInfo completerOf(tree)
if (mods.isCase) {
- if (treeInfo.firstConstructorArgs(impl.body).size > MaxFunctionArity)
+ if (primaryConstructorArity > MaxFunctionArity)
MaxParametersCaseClassError(tree)
val m = ensureCompanionObject(tree, caseModuleDef)
@@ -647,7 +632,7 @@ trait Namers extends MethodSynthesis {
classAndNamerOfModule(m) = (tree, null)
}
val owner = tree.symbol.owner
- if (owner.isPackageObjectClass) {
+ if (settings.lint.value && owner.isPackageObjectClass && !mods.isImplicit) {
context.unit.warning(tree.pos,
"it is not recommended to define classes/objects inside of package objects.\n" +
"If possible, define " + tree.symbol + " in " + owner.skipPackageObject + " instead."
@@ -656,8 +641,11 @@ trait Namers extends MethodSynthesis {
// Suggested location only.
if (mods.isImplicit) {
- log("enter implicit wrapper "+tree+", owner = "+owner)
- enterImplicitWrapper(tree)
+ if (primaryConstructorArity == 1) {
+ log("enter implicit wrapper "+tree+", owner = "+owner)
+ enterImplicitWrapper(tree)
+ }
+ else context.unit.error(tree.pos, "implicit classes must accept exactly one primary constructor parameter")
}
}
@@ -669,10 +657,9 @@ trait Namers extends MethodSynthesis {
protected def enterExistingSym(sym: Symbol): Context = {
if (forInteractive && sym != null && sym.owner.isTerm) {
enterIfNotThere(sym)
- if (sym.isLazy) {
- val acc = sym.lazyAccessor
- if (acc != NoSymbol) enterIfNotThere(acc)
- }
+ if (sym.isLazy)
+ sym.lazyAccessor andAlso enterIfNotThere
+
defaultParametersOfMethod(sym) foreach { symRef => enterIfNotThere(symRef()) }
}
this.context
@@ -782,7 +769,10 @@ trait Namers extends MethodSynthesis {
val tpe1 = dropRepeatedParamType(tpe.deconst)
val tpe2 = tpe1.widen
- if (sym.isVariable || sym.isMethod && !sym.hasAccessorFlag)
+ // This infers Foo.type instead of "object Foo"
+ // See Infer#adjustTypeArgs for the polymorphic case.
+ if (tpe.typeSymbolDirect.isModuleClass) tpe1
+ else if (sym.isVariable || sym.isMethod && !sym.hasAccessorFlag)
if (tpe2 <:< pt) tpe2 else tpe1
else if (isHidden(tpe)) tpe2
// In an attempt to make pattern matches involving method local vals
@@ -801,8 +791,8 @@ trait Namers extends MethodSynthesis {
val typedBody =
if (tree.symbol.isTermMacro) defnTyper.computeMacroDefType(tree, pt)
else defnTyper.computeType(tree.rhs, pt)
- val sym = if (owner.isMethod) owner else tree.symbol
- val typedDefn = widenIfNecessary(sym, typedBody, pt)
+
+ val typedDefn = widenIfNecessary(tree.symbol, typedBody, pt)
assignTypeToTree(tree, typedDefn)
}
@@ -1312,14 +1302,18 @@ trait Namers extends MethodSynthesis {
if (expr1.symbol != null && expr1.symbol.isRootPackage)
RootImportError(tree)
- val newImport = treeCopy.Import(tree, expr1, selectors).asInstanceOf[Import]
- checkSelectors(newImport)
- transformed(tree) = newImport
- // copy symbol and type attributes back into old expression
- // so that the structure builder will find it.
- expr.symbol = expr1.symbol
- expr.tpe = expr1.tpe
- ImportType(expr1)
+ if (expr1.isErrorTyped)
+ ErrorType
+ else {
+ val newImport = treeCopy.Import(tree, expr1, selectors).asInstanceOf[Import]
+ checkSelectors(newImport)
+ transformed(tree) = newImport
+ // copy symbol and type attributes back into old expression
+ // so that the structure builder will find it.
+ expr.symbol = expr1.symbol
+ expr.tpe = expr1.tpe
+ ImportType(expr1)
+ }
}
val result =
@@ -1368,6 +1362,11 @@ trait Namers extends MethodSynthesis {
/** Convert Java generic array type T[] to (T with Object)[]
* (this is necessary because such arrays have a representation which is incompatible
* with arrays of primitive types.)
+ *
+ * @note the comparison to Object only works for abstract types bounded by classes that are strict subclasses of Object
+ * if the bound is exactly Object, it will have been converted to Any, and the comparison will fail
+ *
+ * see also sigToType
*/
private object RestrictJavaArraysMap extends TypeMap {
def apply(tp: Type): Type = tp match {
@@ -1424,6 +1423,8 @@ trait Namers extends MethodSynthesis {
fail(LazyAndEarlyInit)
if (sym.info.typeSymbol == FunctionClass(0) && sym.isValueParameter && sym.owner.isCaseClass)
fail(ByNameParameter)
+ if (sym.isTrait && sym.isFinal && !sym.isSubClass(AnyValClass))
+ checkNoConflict(ABSTRACT, FINAL)
if (sym.isDeferred) {
// Is this symbol type always allowed the deferred flag?
@@ -1547,13 +1548,13 @@ trait Namers extends MethodSynthesis {
* call this method?
*/
def companionSymbolOf(original: Symbol, ctx: Context): Symbol = {
- try original.companionSymbol match {
- case NoSymbol =>
+ try {
+ original.companionSymbol orElse {
ctx.lookup(original.name.companionName, original.owner).suchThat(sym =>
(original.isTerm || sym.hasModuleFlag) &&
(sym isCoDefinedWith original)
)
- case sym => sym
+ }
}
catch {
case e: InvalidCompanions =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 4d84bf4af2..932e4548ef 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -39,16 +39,16 @@ trait NamesDefaults { self: Analyzer =>
def isNamed(arg: Tree) = nameOf(arg).isDefined
/** @param pos maps indices from old to new */
- def reorderArgs[T: ClassManifest](args: List[T], pos: Int => Int): List[T] = {
+ def reorderArgs[T: ArrayTag](args: List[T], pos: Int => Int): List[T] = {
val res = new Array[T](args.length)
foreachWithIndex(args)((arg, index) => res(pos(index)) = arg)
res.toList
}
/** @param pos maps indices from new to old (!) */
- def reorderArgsInv[T: ClassManifest](args: List[T], pos: Int => Int): List[T] = {
+ def reorderArgsInv[T: ArrayTag](args: List[T], pos: Int => Int): List[T] = {
val argsArray = args.toArray
- argsArray.indices map (i => argsArray(pos(i))) toList
+ (argsArray.indices map (i => argsArray(pos(i)))).toList
}
/** returns `true` if every element is equal to its index */
@@ -155,21 +155,23 @@ trait NamesDefaults { self: Analyzer =>
val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos) setInfo qual.tpe
blockTyper.context.scope enter sym
val vd = atPos(sym.pos)(ValDef(sym, qual) setType NoType)
+ // it stays in Vegas: SI-5720, SI-5727
+ qual changeOwner (blockTyper.context.owner -> sym)
+ val newQual = atPos(qual.pos.focus)(blockTyper.typedQualifier(Ident(sym.name)))
var baseFunTransformed = atPos(baseFun.pos.makeTransparent) {
- // don't use treeCopy: it would assign opaque position.
- val f = Select(gen.mkAttributedRef(sym), selected)
- .setType(baseFun1.tpe).setSymbol(baseFun1.symbol)
+ // setSymbol below is important because the 'selected' function might be overloaded. by
+ // assigning the correct method symbol, typedSelect will just assign the type. the reason
+ // to still call 'typed' is to correctly infer singleton types, SI-5259.
+ val f = blockTyper.typedOperator(Select(newQual, selected).setSymbol(baseFun1.symbol))
if (funTargs.isEmpty) f
else TypeApply(f, funTargs).setType(baseFun.tpe)
}
val b = Block(List(vd), baseFunTransformed)
.setType(baseFunTransformed.tpe).setPos(baseFun.pos)
-
- val defaultQual = Some(atPos(qual.pos.focus)(gen.mkAttributedRef(sym)))
context.namedApplyBlockInfo =
- Some((b, NamedApplyInfo(defaultQual, defaultTargs, Nil, blockTyper)))
+ Some((b, NamedApplyInfo(Some(newQual), defaultTargs, Nil, blockTyper)))
b
}
@@ -258,19 +260,18 @@ trait NamesDefaults { self: Analyzer =>
def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[ValDef] = {
val context = blockTyper.context
val symPs = map2(args, paramTypes)((arg, tpe) => {
- val byName = isByNameParamType(tpe)
- val (argTpe, repeated) =
- if (isScalaRepeatedParamType(tpe)) arg match {
- case Typed(expr, Ident(tpnme.WILDCARD_STAR)) =>
- (expr.tpe, true)
- case _ =>
- (seqType(arg.tpe), true)
- } else (arg.tpe, false)
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos)
- val valType = if (byName) functionType(List(), argTpe)
- else if (repeated) argTpe
- else argTpe
- s.setInfo(valType)
+ val byName = isByNameParamType(tpe)
+ val repeated = isScalaRepeatedParamType(tpe)
+ val argTpe = (
+ if (repeated) arg match {
+ case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
+ case _ => seqType(arg.tpe)
+ }
+ else arg.tpe
+ ).widen // have to widen or types inferred from literal defaults will be singletons
+ val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
+ if (byName) functionType(Nil, argTpe) else argTpe
+ )
(context.scope.enter(s), byName, repeated)
})
map2(symPs, args) {
@@ -336,7 +337,7 @@ trait NamesDefaults { self: Analyzer =>
// cannot call blockTyper.typedBlock here, because the method expr might be partially applied only
val res = blockTyper.doTypedApply(tree, expr, refArgs, mode, pt)
res.setPos(res.pos.makeTransparent)
- val block = Block(stats ::: valDefs, res).setType(res.tpe).setPos(tree.pos)
+ val block = Block(stats ::: valDefs, res).setType(res.tpe).setPos(tree.pos.makeTransparent)
context.namedApplyBlockInfo =
Some((block, NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper)))
block
@@ -444,21 +445,12 @@ trait NamesDefaults { self: Analyzer =>
}
}
- /** Fast path for ambiguous assignment check.
- */
- private def isNameInScope(context: Context, name: Name) = (
- context.enclosingContextChain exists (ctx =>
- (ctx.scope.lookupEntry(name) != null)
- || (ctx.owner.rawInfo.member(name) != NoSymbol)
- )
- )
-
/** A full type check is very expensive; let's make sure there's a name
* somewhere which could potentially be ambiguous before we go that route.
*/
private def isAmbiguousAssignment(typer: Typer, param: Symbol, arg: Tree) = {
import typer.context
- isNameInScope(context, param.name) && {
+ (context isNameInScope param.name) && {
// for named arguments, check whether the assignment expression would
// typecheck. if it does, report an ambiguous error.
val paramtpe = param.tpe.cloneInfo(param)
@@ -485,7 +477,18 @@ trait NamesDefaults { self: Analyzer =>
// instead of arg, but can't do that because eventually setType(ErrorType)
// is called, and EmptyTree can only be typed NoType. Thus we need to
// disable conforms as a view...
- try typer.silent(_.typed(arg, subst(paramtpe))) match {
+ val errsBefore = reporter.ERROR.count
+ try typer.silent { tpr =>
+ val res = tpr.typed(arg, subst(paramtpe))
+ // better warning for SI-5044: if `silent` was not actually silent give a hint to the user
+ // [H]: the reason why `silent` is not silent is because the cyclic reference exception is
+ // thrown in a context completely different from `context` here. The exception happens while
+ // completing the type, and TypeCompleter is created/run with a non-silent Namer `context`
+ // and there is at the moment no way to connect the two unless we go through some global state.
+ if (errsBefore < reporter.ERROR.count)
+ WarnAfterNonSilentRecursiveInference(param, arg)(context)
+ res
+ } match {
case SilentResultValue(t) => !t.isErroneous // #4041
case _ => false
}
@@ -494,7 +497,7 @@ trait NamesDefaults { self: Analyzer =>
// CyclicReferences. Fix for #3685
case cr @ CyclicReference(sym, _) =>
(sym.name == param.name) && sym.accessedOrSelf.isVariable && {
- NameClashError(sym, arg)(typer.context)
+ NameClashError(sym, arg)(context)
true
}
}
@@ -514,7 +517,7 @@ trait NamesDefaults { self: Analyzer =>
// maps indices from (order written by user) to (order of definition)
val argPos = Array.fill(args.length)(-1)
var positionalAllowed = true
- val namelessArgs = mapWithIndex(args) { (arg, index) =>
+ val namelessArgs = mapWithIndex(args) { (arg, argIndex) =>
arg match {
case arg @ AssignOrNamedArg(Ident(name), rhs) =>
def matchesName(param: Symbol) = !param.isSynthetic && (
@@ -526,30 +529,35 @@ trait NamesDefaults { self: Analyzer =>
case _ => false
})
)
- val pos = params indexWhere matchesName
- if (pos == -1) {
+ val paramPos = params indexWhere matchesName
+ if (paramPos == -1) {
if (positionalAllowed) {
- argPos(index) = index
+ argPos(argIndex) = argIndex
// prevent isNamed from being true when calling doTypedApply recursively,
// treat the arg as an assignment of type Unit
Assign(arg.lhs, rhs) setPos arg.pos
}
else UnknownParameterNameNamesDefaultError(arg, name)
}
- else if (argPos contains pos)
- DoubleParamNamesDefaultError(arg, name)
- else if (isAmbiguousAssignment(typer, params(pos), arg))
+ else if (argPos contains paramPos) {
+ val existingArgIndex = argPos.indexWhere(_ == paramPos)
+ val otherName = args(paramPos) match {
+ case AssignOrNamedArg(Ident(oName), rhs) if oName != name => Some(oName)
+ case _ => None
+ }
+ DoubleParamNamesDefaultError(arg, name, existingArgIndex+1, otherName)
+ } else if (isAmbiguousAssignment(typer, params(paramPos), arg))
AmbiguousReferenceInNamesDefaultError(arg, name)
else {
// if the named argument is on the original parameter
// position, positional after named is allowed.
- if (index != pos)
+ if (argIndex != paramPos)
positionalAllowed = false
- argPos(index) = pos
+ argPos(argIndex) = paramPos
rhs
}
case _ =>
- argPos(index) = index
+ argPos(argIndex) = argIndex
if (positionalAllowed) arg
else PositionalAfterNamedNamesDefaultError(arg)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
deleted file mode 100644
index aff8368f75..0000000000
--- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
+++ /dev/null
@@ -1,1739 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Adriaan Moors
- */
-
-package scala.tools.nsc
-package typechecker
-
-import symtab._
-import Flags.{MUTABLE, METHOD, LABEL, SYNTHETIC}
-
-/** Translate pattern matching into method calls (these methods form a zero-plus monad), similar in spirit to how for-comprehensions are compiled.
- *
- * For each case, express all patterns as extractor calls, guards as 0-ary extractors, and sequence them using `flatMap`
- * (lifting the body of the case into the monad using `one`).
- *
- * Cases are combined into a pattern match using the `orElse` combinator (the implicit failure case is expressed using the monad's `zero`).
-
- * TODO:
- * - interaction with CPS
- * - Array patterns
- * - implement spec more closely (see TODO's)
- * - DCE
- * - use manifests for type testing
- *
- * (longer-term) TODO:
- * - user-defined unapplyProd
- * - recover GADT typing by locally inserting implicit witnesses to type equalities derived from the current case, and considering these witnesses during subtyping (?)
- * - recover exhaustivity and unreachability checking using a variation on the type-safe builder pattern
- */
-trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
- import global._
- import definitions._
-
- val SYNTH_CASE = Flags.CASE | SYNTHETIC
-
- object vpmName {
- val one = newTermName("one")
- val drop = newTermName("drop")
- val flatMap = newTermName("flatMap")
- val get = newTermName("get")
- val guard = newTermName("guard")
- val isEmpty = newTermName("isEmpty")
- val orElse = newTermName("orElse")
- val outer = newTermName("<outer>")
- val runOrElse = newTermName("runOrElse")
- val zero = newTermName("zero")
- val _match = newTermName("__match") // don't call it __match, since that will trigger virtual pattern matching...
-
- def counted(str: String, i: Int) = newTermName(str+i)
- }
-
- object MatchTranslator {
- def apply(typer: Typer): MatchTranslation with CodegenCore = {
- import typer._
- // typing `_match` to decide which MatchTranslator to create adds 4% to quick.comp.timer
- newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
- case SilentResultValue(ms) => new PureMatchTranslator(typer, ms)
- case _ => new OptimizingMatchTranslator(typer)
- }
- }
- }
-
- class PureMatchTranslator(val typer: Typer, val matchStrategy: Tree) extends MatchTranslation with TreeMakers with PureCodegen
- class OptimizingMatchTranslator(val typer: Typer) extends MatchTranslation with TreeMakers with MatchOptimizations
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// talking to userland
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
- /** Interface with user-defined match monad?
- * if there's a `__match` in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
-
- type Matcher[P[_], M[+_], A] = {
- def flatMap[B](f: P[A] => M[B]): M[B]
- def orElse[B >: A](alternative: => M[B]): M[B]
- }
-
- abstract class MatchStrategy[P[_], M[+_]] {
- // runs the matcher on the given input
- def runOrElse[T, U](in: P[T])(matcher: P[T] => M[U]): P[U]
-
- def zero: M[Nothing]
- def one[T](x: P[T]): M[T]
- def guard[T](cond: P[Boolean], then: => P[T]): M[T]
- def isSuccess[T, U](x: P[T])(f: P[T] => M[U]): P[Boolean] // used for isDefinedAt
- }
-
- * P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`)
-
-
- * if no `__match` is found, we assume the following implementation (and generate optimized code accordingly)
-
- object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] {
- def zero = None
- def one[T](x: T) = Some(x)
- // NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted
- def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None
- def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x))
- def isSuccess[T, U](x: T)(f: T => Option[U]): Boolean = !f(x).isEmpty
- }
-
- */
- trait MatchMonadInterface {
- val typer: Typer
- val matchOwner = typer.context.owner
-
- def inMatchMonad(tp: Type): Type
- def pureType(tp: Type): Type
- final def matchMonadResult(tp: Type): Type =
- tp.baseType(matchMonadSym).typeArgs match {
- case arg :: Nil => arg
- case _ => ErrorType
- }
-
- protected def matchMonadSym: Symbol
- }
-
- trait MatchTranslation extends MatchMonadInterface { self: TreeMakers with CodegenCore =>
- import typer.{typed, context, silent, reallyExists}
-
- /** Implement a pattern match by turning its cases (including the implicit failure case)
- * into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
- *
- * For `scrutinee match { case1 ... caseN }`, the resulting tree has the shape
- * `runOrElse(scrutinee)(x => translateCase1(x).orElse(translateCase2(x)).....orElse(zero))`
- *
- * NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed
- * thus, you must typecheck the result (and that will in turn translate nested matches)
- * this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch)
- */
- def translateMatch(scrut: Tree, cases: List[CaseDef], pt: Type, scrutType: Type, matchFailGenOverride: Option[Tree => Tree] = None): Tree = {
- // we don't transform after typers
- // (that would require much more sophistication when generating trees,
- // and the only place that emits Matches after typers is for exception handling anyway)
- assert(phase.id <= currentRun.typerPhase.id, phase)
-
- val scrutSym = freshSym(scrut.pos, pureType(scrutType)) setFlag SYNTH_CASE
- // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
- combineCases(scrut, scrutSym, cases map translateCase(scrutSym, pt), pt, matchOwner, matchFailGenOverride)
- }
-
- // return list of typed CaseDefs that are supported by the backend (typed/bind/wildcard)
- // we don't have a global scrutinee -- the caught exception must be bound in each of the casedefs
- // there's no need to check the scrutinee for null -- "throw null" becomes "throw new NullPointerException"
- // try to simplify to a type-based switch, or fall back to a catch-all case that runs a normal pattern match
- // unlike translateMatch, we type our result before returning it
- def translateTry(caseDefs: List[CaseDef], pt: Type, pos: Position): List[CaseDef] =
- // if they're already simple enough to be handled by the back-end, we're done
- if (caseDefs forall treeInfo.isCatchCase) caseDefs
- else {
- val swatches = { // switch-catches
- val bindersAndCases = caseDefs map { caseDef =>
- // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
- // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
- val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
- (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
- }
-
- for(cases <- emitTypeSwitch(bindersAndCases, pt) toList;
- if cases forall treeInfo.isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end
- cse <- cases) yield fixerUpper(matchOwner, pos)(cse).asInstanceOf[CaseDef]
- }
-
- val catches = if (swatches nonEmpty) swatches else {
- val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
- val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))}
-
- val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex")
-
- List(
- atPos(pos) {
- CaseDef(
- Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
- EmptyTree,
- combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(CODE.REF(exSym))))
- )
- })
- }
-
- typer.typedCases(catches, ThrowableClass.tpe, WildcardType)
- }
-
-
-
- /** The translation of `pat if guard => body` has two aspects:
- * 1) the substitution due to the variables bound by patterns
- * 2) the combination of the extractor calls using `flatMap`.
- *
- * 2) is easy -- it looks like: `translatePattern_1.flatMap(translatePattern_2....flatMap(translatePattern_N.flatMap(translateGuard.flatMap((x_i) => success(Xbody(x_i)))))...)`
- * this must be right-leaning tree, as can be seen intuitively by considering the scope of bound variables:
- * variables bound by pat_1 must be visible from the function inside the left-most flatMap right up to Xbody all the way on the right
- * 1) is tricky because translatePattern_i determines the shape of translatePattern_i+1:
- * zoom in on `translatePattern_1.flatMap(translatePattern_2)` for example -- it actually looks more like:
- * `translatePattern_1(x_scrut).flatMap((x_1) => {y_i -> x_1._i}translatePattern_2)`
- *
- * `x_1` references the result (inside the monad) of the extractor corresponding to `pat_1`,
- * this result holds the values for the constructor arguments, which translatePattern_1 has extracted
- * from the object pointed to by `x_scrut`. The `y_i` are the symbols bound by `pat_1` (in order)
- * in the scope of the remainder of the pattern, and they must thus be replaced by:
- * - (for 1-ary unapply) x_1
- * - (for n-ary unapply, n > 1) selection of the i'th tuple component of `x_1`
- * - (for unapplySeq) x_1.apply(i)
- *
- * in the treemakers,
- *
- * Thus, the result type of `translatePattern_i`'s extractor must conform to `M[(T_1,..., T_n)]`.
- *
- * Operationally, phase 1) is a foldLeft, since we must consider the depth-first-flattening of
- * the transformed patterns from left to right. For every pattern ast node, it produces a transformed ast and
- * a function that will take care of binding and substitution of the next ast (to the right).
- *
- */
- def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = caseDef match { case CaseDef(pattern, guard, body) =>
- translatePattern(scrutSym, pattern) ++ translateGuard(guard) :+ translateBody(body, pt)
- }
-
- def translatePattern(patBinder: Symbol, patTree: Tree): List[TreeMaker] = {
- // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
- type TranslationStep = (List[TreeMaker], List[(Symbol, Tree)])
- @inline def withSubPats(treeMakers: List[TreeMaker], subpats: (Symbol, Tree)*): TranslationStep = (treeMakers, subpats.toList)
- @inline def noFurtherSubPats(treeMakers: TreeMaker*): TranslationStep = (treeMakers.toList, Nil)
-
- val pos = patTree.pos
-
- def translateExtractorPattern(extractor: ExtractorCall): TranslationStep = {
- if (!extractor.isTyped) throw new TypeError(pos, "Could not typecheck extractor call: "+ extractor)
- // if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
-
- // must use type `tp`, which is provided by extractor's result, not the type expected by binder,
- // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
- // (it will later result in a type test when `tp` is not a subtype of `b.info`)
- // TODO: can we simplify this, together with the Bound case?
- (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) => b setInfo tp } // println("changing "+ b +" : "+ b.info +" -> "+ tp);
-
- // println("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))
- // example check: List[Int] <:< ::[Int]
- // TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
- val (typeTestTreeMaker, patBinderOrCasted) =
- if (needsTypeTest(patBinder.info.widen, extractor.paramType)) {
- // chain a type-testing extractor before the actual extractor call
- // it tests the type, checks the outer pointer and casts to the expected type
- // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
- // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
- val treeMaker = TypeTestTreeMaker(patBinder, extractor.paramType, pos)
- (List(treeMaker), treeMaker.nextBinder)
- } else (Nil, patBinder)
-
- withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, pos), extractor.subBindersAndPatterns: _*)
- }
-
-
- object MaybeBoundTyped {
- /** Decompose the pattern in `tree`, of shape C(p_1, ..., p_N), into a list of N symbols, and a list of its N sub-trees
- * The list of N symbols contains symbols for every bound name as well as the un-named sub-patterns (fresh symbols are generated here for these).
- * The returned type is the one inferred by inferTypedPattern (`owntype`)
- *
- * @arg patBinder symbol used to refer to the result of the previous pattern's extractor (will later be replaced by the outer tree with the correct tree to refer to that patterns result)
- */
- def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
- case Bound(subpatBinder, typed@Typed(expr, tpt)) if typed.tpe ne null => Some((subpatBinder, typed.tpe))
- case Bind(_, typed@Typed(expr, tpt)) if typed.tpe ne null => Some((patBinder, typed.tpe))
- case Typed(expr, tpt) if tree.tpe ne null => Some((patBinder, tree.tpe))
- case _ => None
- }
- }
-
- val (treeMakers, subpats) = patTree match {
- // skip wildcard trees -- no point in checking them
- case WildcardPattern() => noFurtherSubPats()
- case UnApply(unfun, args) =>
- // TODO: check unargs == args
- // println("unfun: "+ (unfun.tpe, unfun.symbol.ownerChain, unfun.symbol.info, patBinder.info))
- translateExtractorPattern(ExtractorCall(unfun, args))
-
- /** A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
- It consists of a stable identifier c, followed by element patterns p1, ..., pn.
- The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
-
- If the case class is monomorphic, then it must conform to the expected type of the pattern,
- and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected types of the element patterns p1, ..., pn.
-
- If the case class is polymorphic, then its type parameters are instantiated so that the instantiation of c conforms to the expected type of the pattern.
- The instantiated formal parameter types of c’s primary constructor are then taken as the expected types of the component patterns p1, ..., pn.
-
- The pattern matches all objects created from constructor invocations c(v1, ..., vn) where each element pattern pi matches the corresponding value vi .
- A special case arises when c’s formal parameter types end in a repeated parameter. This is further discussed in (§8.1.9).
- **/
- case Apply(fun, args) =>
- ExtractorCall.fromCaseClass(fun, args) map translateExtractorPattern getOrElse {
- error("cannot find unapply member for "+ fun +" with args "+ args)
- noFurtherSubPats()
- }
-
- /** A typed pattern x : T consists of a pattern variable x and a type pattern T.
- The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
- This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
- **/
- // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
- case MaybeBoundTyped(subPatBinder, pt) =>
- // a typed pattern never has any subtrees
- noFurtherSubPats(TypeAndEqualityTestTreeMaker(subPatBinder, patBinder, pt, pos))
-
- /** A pattern binder x@p consists of a pattern variable x and a pattern p.
- The type of the variable x is the static type T of the pattern p.
- This pattern matches any value v matched by the pattern p,
- provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
- and it binds the variable name to that value.
- **/
- case Bound(subpatBinder, p) =>
- // replace subpatBinder by patBinder (as if the Bind was not there)
- withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)),
- // must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type, this is not guaranteed until we cast
- (patBinder, p)
- )
-
- /** 8.1.4 Literal Patterns
- A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
- The type of L must conform to the expected type of the pattern.
-
- 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
- The pattern matches any value v such that r == v (§12.1).
- The type of r must conform to the expected type of the pattern.
- **/
- case Literal(Constant(_)) | Ident(_) | Select(_, _) =>
- noFurtherSubPats(EqualityTestTreeMaker(patBinder, patTree, pos))
-
- case Alternative(alts) =>
- noFurtherSubPats(AlternativesTreeMaker(patBinder, alts map (translatePattern(patBinder, _)), alts.head.pos))
-
- /* TODO: Paul says about future version: I think this should work, and always intended to implement if I can get away with it.
- case class Foo(x: Int, y: String)
- case class Bar(z: Int)
-
- def f(x: Any) = x match { case Foo(x, _) | Bar(x) => x } // x is lub of course.
- */
-
- case Bind(n, p) => // this happens in certain ill-formed programs, there'll be an error later
- // println("WARNING: Bind tree with unbound symbol "+ patTree)
- noFurtherSubPats() // there's no symbol -- something's wrong... don't fail here though (or should we?)
-
- // case Star(_) | ArrayValue | This => error("stone age pattern relics encountered!")
-
- case _ =>
- error("unsupported pattern: "+ patTree +"(a "+ patTree.getClass +")")
- noFurtherSubPats()
- }
-
- treeMakers ++ subpats.flatMap { case (binder, pat) =>
- translatePattern(binder, pat) // recurse on subpatterns
- }
- }
-
- def translateGuard(guard: Tree): List[TreeMaker] =
- if (guard == EmptyTree) Nil
- else List(GuardTreeMaker(guard))
-
- // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by codegen.one),
- // so that user can generate failure when needed -- use implicit conversion to lift into monad on-demand?
- // to enable this, probably need to move away from Option to a monad specific to pattern-match,
- // so that we can return Option's from a match without ambiguity whether this indicates failure in the monad, or just some result in the monad
- // 2) body.tpe is the type of the body after applying the substitution that represents the solution of GADT type inference
- // need the explicit cast in case our substitutions in the body change the type to something that doesn't take GADT typing into account
- def translateBody(body: Tree, matchPt: Type): TreeMaker =
- BodyTreeMaker(body, matchPt)
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
- object ExtractorCall {
- def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCallRegular(unfun, args)
-
- def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = Some(new ExtractorCallProd(fun, args))
-
- // THE PRINCIPLED SLOW PATH -- NOT USED
- // generate a call to the (synthetically generated) extractor of a case class
- // NOTE: it's an apply, not a select, since in general an extractor call may have multiple argument lists (including an implicit one)
- // that we need to preserve, so we supply the scrutinee as Ident(nme.SELECTOR_DUMMY),
- // and replace that dummy by a reference to the actual binder in translateExtractorPattern
- def fromCaseClassUnapply(fun: Tree, args: List[Tree]): Option[ExtractorCall] = {
- // TODO: can we rework the typer so we don't have to do all this twice?
- // undo rewrite performed in (5) of adapt
- val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
- val origSym = orig.symbol
- val extractor = unapplyMember(origSym.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe)
-
- if((fun.tpe eq null) || fun.tpe.isError || (extractor eq NoSymbol)) {
- None
- } else {
- // this is a tricky balance: pos/t602.scala, pos/sudoku.scala, run/virtpatmat_alts.scala must all be happy
- // bypass typing at own risk: val extractorCall = Select(orig, extractor) setType caseClassApplyToUnapplyTp(fun.tpe)
- // can't always infer type arguments (pos/t602):
- /* case class Span[K <: Ordered[K]](low: Option[K]) {
- override def equals(x: Any): Boolean = x match {
- case Span((low0 @ _)) if low0 equals low => true
- }
- }*/
- // so... leave undetermined type params floating around if we have to
- // (if we don't infer types, uninstantiated type params show up later: pos/sudoku.scala)
- // (see also run/virtpatmat_alts.scala)
- val savedUndets = context.undetparams
- val extractorCall = try {
- context.undetparams = Nil
- silent(_.typed(Apply(Select(orig, extractor), List(Ident(nme.SELECTOR_DUMMY) setType fun.tpe.finalResultType)), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
- case SilentResultValue(extractorCall) => extractorCall // if !extractorCall.containsError()
- case _ =>
- // this fails to resolve overloading properly...
- // Apply(typedOperator(Select(orig, extractor)), List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway
-
- // println("funtpe after = "+ fun.tpe.finalResultType)
- // println("orig: "+(orig, orig.tpe))
- val tgt = typed(orig, EXPRmode | QUALmode | POLYmode, HasMember(extractor.name)) // can't specify fun.tpe.finalResultType as the type for the extractor's arg,
- // as it may have been inferred incorrectly (see t602, where it's com.mosol.sl.Span[Any], instead of com.mosol.sl.Span[?K])
- // println("tgt = "+ (tgt, tgt.tpe))
- val oper = typed(Select(tgt, extractor.name), EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType)
- // println("oper: "+ (oper, oper.tpe))
- Apply(oper, List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway
- }
- } finally context.undetparams = savedUndets
-
- Some(this(extractorCall, args)) // TODO: simplify spliceApply?
- }
- }
- }
-
- abstract class ExtractorCall(val args: List[Tree]) {
- val nbSubPats = args.length
-
- // everything okay, captain?
- def isTyped : Boolean
-
- def isSeq: Boolean
- lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last)
-
- // to which type should the previous binder be casted?
- def paramType : Type
-
- // binder has been casted to paramType if necessary
- def treeMaker(binder: Symbol, pos: Position): TreeMaker
-
- // `subPatBinders` are the variables bound by this pattern in the following patterns
- // subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
- lazy val subPatBinders = args map {
- case Bound(b, p) => b
- case p => freshSym(p.pos, prefix = "p")
- }
-
- lazy val subBindersAndPatterns: List[(Symbol, Tree)] = (subPatBinders zip args) map {
- case (b, Bound(_, p)) => (b, p)
- case bp => bp
- }
-
- def subPatTypes: List[Type] =
- if(isSeq) {
- val TypeRef(pre, SeqClass, args) = seqTp
- // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
- formalTypes(rawSubPatTypes.init :+ typeRef(pre, RepeatedParamClass, args), nbSubPats)
- } else rawSubPatTypes
-
- protected def rawSubPatTypes: List[Type]
-
- protected def seqTp = rawSubPatTypes.last baseType SeqClass
- protected def seqLenCmp = rawSubPatTypes.last member nme.lengthCompare
- protected lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
- protected lazy val lastIndexingBinder = if(lastIsStar) nbSubPats-2 else nbSubPats-1
- protected lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1
- protected lazy val minLenToCheck = if(lastIsStar) 1 else 0
- protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder+1)
- protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i)
-
- // the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require isSeq
- protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
- val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder))
- val nbIndexingIndices = indexingIndices.length
-
- // this error-condition has already been checked by checkStarPatOK:
- // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
- // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
- (((1 to firstIndexingBinder) map tupleSel(binder)) ++
- // then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
- (indexingIndices map codegen.index(seqTree(binder))) ++
- // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder
- (if(!lastIsStar) Nil else List(
- if(nbIndexingIndices == 0) seqTree(binder)
- else codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
- }
-
- // the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require (nbSubPats > 0 && (!lastIsStar || isSeq))
- protected def subPatRefs(binder: Symbol): List[Tree] =
- if (nbSubPats == 0) Nil
- else if (isSeq) subPatRefsSeq(binder)
- else ((1 to nbSubPats) map tupleSel(binder)).toList
-
- protected def lengthGuard(binder: Symbol): Option[Tree] =
- // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
- if (!isSeq || (expectedLength < minLenToCheck)) None
- else { import CODE._
- // `binder.lengthCompare(expectedLength)`
- def checkExpectedLength = (seqTree(binder) DOT seqLenCmp)(LIT(expectedLength))
-
- // the comparison to perform
- // when the last subpattern is a wildcard-star the expectedLength is but a lower bound
- // (otherwise equality is required)
- def compareOp: (Tree, Tree) => Tree =
- if (lastIsStar) _ INT_>= _
- else _ INT_== _
-
- // `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
- Some((seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO))
- }
- }
-
- // TODO: to be called when there's a def unapplyProd(x: T): U
- // U must have N members _1,..., _N -- the _i are type checked, call their type Ti,
- //
- // for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
- class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) {
- // TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here:
- /*override def equals(x$1: Any): Boolean = ...
- val o5: Option[com.mosol.sl.Span[Any]] = // Span[Any] --> Any is not a legal type argument for Span!
- */
- // private val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
- // private val origExtractorTp = unapplyMember(orig.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe).tpe
- // private val extractorTp = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)
- // println("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)))
- // println("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe)))
- private def constructorTp = fun.tpe
-
- def isTyped = fun.isTyped
-
- // to which type should the previous binder be casted?
- def paramType = constructorTp.finalResultType
-
- def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
- protected def rawSubPatTypes = constructorTp.paramTypes
-
- // binder has type paramType
- def treeMaker(binder: Symbol, pos: Position): TreeMaker = {
- // checks binder ne null before chaining to the next extractor
- ProductExtractorTreeMaker(binder, lengthGuard(binder), Substitution(subPatBinders, subPatRefs(binder)))
- }
-
-/* TODO: remove special case when the following bug is fixed
-class Foo(x: Other) { x._1 } // BUG: can't refer to _1 if its defining class has not been type checked yet
-case class Other(y: String)
--- this is ok:
-case class Other(y: String)
-class Foo(x: Other) { x._1 } // no error in this order
-*/
- override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
- // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
- val caseAccs = binder.info.typeSymbol.caseFieldAccessors
- if (caseAccs isDefinedAt (i-1)) REF(binder) DOT caseAccs(i-1)
- else codegen.tupleSel(binder)(i)
- }
-
- override def toString(): String = "case class "+ (if (constructorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args
- }
-
- class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) {
- private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false }
-
- def tpe = extractorCall.tpe
- def isTyped = (tpe ne NoType) && extractorCall.isTyped && (resultInMonad ne ErrorType)
- def paramType = tpe.paramTypes.head
- def resultType = tpe.finalResultType
- def isSeq = extractorCall.symbol.name == nme.unapplySeq
-
- def treeMaker(patBinderOrCasted: Symbol, pos: Position): TreeMaker = {
- // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
- val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
- val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
- ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder, Substitution(subPatBinders, subPatRefs(binder)))(resultType.typeSymbol == BooleanClass)
- }
-
- override protected def seqTree(binder: Symbol): Tree =
- if (firstIndexingBinder == 0) CODE.REF(binder)
- else super.seqTree(binder)
-
- // the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require (nbSubPats > 0 && (!lastIsStar || isSeq))
- override protected def subPatRefs(binder: Symbol): List[Tree] =
- if (!isSeq && nbSubPats == 1) List(CODE.REF(binder)) // special case for extractors
- else super.subPatRefs(binder)
-
- protected def spliceApply(binder: Symbol): Tree = {
- object splice extends Transformer {
- override def transform(t: Tree) = t match {
- case Apply(x, List(Ident(nme.SELECTOR_DUMMY))) =>
- treeCopy.Apply(t, x, List(CODE.REF(binder)))
- case _ => super.transform(t)
- }
- }
- splice.transform(extractorCallIncludingDummy)
- }
-
- // what's the extractor's result type in the monad?
- // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
- protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
- if (resultType.typeSymbol == BooleanClass) UnitClass.tpe
- else matchMonadResult(resultType)
- }
-
- protected lazy val rawSubPatTypes =
- if (resultInMonad.typeSymbol eq UnitClass) Nil
- else if(nbSubPats == 1) List(resultInMonad)
- else getProductArgs(resultInMonad) match {
- case Nil => List(resultInMonad)
- case x => x
- }
-
- override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")."
- }
-
- /** A conservative approximation of which patterns do not discern anything.
- * They are discarded during the translation.
- */
- object WildcardPattern {
- def unapply(pat: Tree): Boolean = pat match {
- case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
- case Ident(nme.WILDCARD) => true
- case Star(WildcardPattern()) => true
- case x: Ident => treeInfo.isVarPattern(x)
- case Alternative(ps) => ps forall (WildcardPattern.unapply(_))
- case EmptyTree => true
- case _ => false
- }
- }
-
- object Bound {
- def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
- case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
- Some((t.symbol, p))
- case _ => None
- }
- }
- }
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// substitution
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- trait TypedSubstitution extends MatchMonadInterface {
- object Substitution {
- def apply(from: Symbol, to: Tree) = new Substitution(List(from), List(to))
- // requires sameLength(from, to)
- def apply(from: List[Symbol], to: List[Tree]) =
- if (from nonEmpty) new Substitution(from, to) else EmptySubstitution
- }
-
- class Substitution(val from: List[Symbol], val to: List[Tree]) {
- // We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed,
- // and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees.
- def apply(tree: Tree): Tree = {
- // according to -Ystatistics 10% of translateMatch's time is spent in this method...
- // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst
- if (!tree.exists { case i@Ident(_) => from contains i.symbol case _ => false}) tree
- else (new Transformer {
- @inline private def typedIfOrigTyped(to: Tree, origTp: Type): Tree =
- if (origTp == null || origTp == NoType) to
- // important: only type when actually substing and when original tree was typed
- // (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors)
- else typer.typed(to, EXPRmode, WildcardType)
-
- override def transform(tree: Tree): Tree = {
- def subst(from: List[Symbol], to: List[Tree]): Tree =
- if (from.isEmpty) tree
- else if (tree.symbol == from.head) typedIfOrigTyped(to.head.shallowDuplicate, tree.tpe)
- else subst(from.tail, to.tail)
-
- tree match {
- case Ident(_) => subst(from, to)
- case _ => super.transform(tree)
- }
- }
- }).transform(tree)
- }
-
-
- // the substitution that chains `other` before `this` substitution
- // forall t: Tree. this(other(t)) == (this >> other)(t)
- def >>(other: Substitution): Substitution = {
- val (fromFiltered, toFiltered) = (from, to).zipped filter { (f, t) => !other.from.contains(f) }
- new Substitution(other.from ++ fromFiltered, other.to.map(apply) ++ toFiltered) // a quick benchmarking run indicates the `.map(apply)` is not too costly
- }
- override def toString = (from zip to) mkString("Substitution(", ", ", ")")
- }
-
- object EmptySubstitution extends Substitution(Nil, Nil) {
- override def apply(tree: Tree): Tree = tree
- override def >>(other: Substitution): Substitution = other
- }
- }
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// the making of the trees
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- trait TreeMakers extends TypedSubstitution { self: CodegenCore =>
- def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) =
- (cases, Nil)
-
- def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree]): Option[Tree] =
- None
-
- // for catch (no need to customize match failure)
- def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] =
- None
-
- abstract class TreeMaker {
- /** captures the scope and the value of the bindings in patterns
- * important *when* the substitution happens (can't accumulate and do at once after the full matcher has been constructed)
- */
- def substitution: Substitution =
- if (currSub eq null) localSubstitution
- else currSub
-
- protected def localSubstitution: Substitution
-
- private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
- if (currSub ne null) {
- println("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst))
- Thread.dumpStack()
- }
- else currSub = outerSubst >> substitution
- }
- private[this] var currSub: Substitution = null
-
- // build Tree that chains `next` after the current extractor
- def chainBefore(next: Tree)(casegen: Casegen): Tree
- }
-
- trait NoNewBinders extends TreeMaker {
- protected val localSubstitution: Substitution = EmptySubstitution
- }
-
- case class TrivialTreeMaker(tree: Tree) extends TreeMaker with NoNewBinders {
- def chainBefore(next: Tree)(casegen: Casegen): Tree = tree
- }
-
- case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker with NoNewBinders {
- def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree)
- atPos(body.pos)(casegen.one(substitution(body))) // since SubstOnly treemakers are dropped, need to do it here
- }
-
- case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
- val localSubstitution = Substitution(prevBinder, CODE.REF(nextBinder))
- def chainBefore(next: Tree)(casegen: Casegen): Tree = substitution(next)
- }
-
- abstract class FunTreeMaker extends TreeMaker {
- val nextBinder: Symbol
- }
-
- abstract class CondTreeMaker extends FunTreeMaker {
- val pos: Position
- val prevBinder: Symbol
- val nextBinderTp: Type
- val cond: Tree
- val res: Tree
-
- lazy val nextBinder = freshSym(pos, nextBinderTp)
- lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree =
- atPos(pos)(casegen.flatMapCond(cond, res, nextBinder, substitution(next)))
- }
-
- /**
- * Make a TreeMaker that will result in an extractor call specified by `extractor`
- * the next TreeMaker (here, we don't know which it'll be) is chained after this one by flatMap'ing
- * a function with binder `nextBinder` over our extractor's result
- * the function's body is determined by the next TreeMaker
- * in this function's body, and all the subsequent ones, references to the symbols in `from` will be replaced by the corresponding tree in `to`
- */
- case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol, localSubstitution: Substitution)(extractorReturnsBoolean: Boolean) extends FunTreeMaker {
- def chainBefore(next: Tree)(casegen: Casegen): Tree = {
- val condAndNext = extraCond map (casegen.ifThenElseZero(_, next)) getOrElse next
- atPos(extractor.pos)(
- if (extractorReturnsBoolean) casegen.flatMapCond(extractor, CODE.UNIT, nextBinder, substitution(condAndNext))
- else casegen.flatMap(extractor, nextBinder, substitution(condAndNext))
- )
- }
-
- override def toString = "X"+(extractor, nextBinder)
- }
-
- // TODO: allow user-defined unapplyProduct
- case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree], localSubstitution: Substitution) extends TreeMaker { import CODE._
- def chainBefore(next: Tree)(casegen: Casegen): Tree = {
- val nullCheck = REF(prevBinder) OBJ_NE NULL
- val cond = extraCond map (nullCheck AND _) getOrElse nullCheck
- casegen.ifThenElseZero(cond, substitution(next))
- }
-
- override def toString = "P"+(prevBinder, extraCond getOrElse "", localSubstitution)
- }
-
- // tack an outer test onto `cond` if binder.info and expectedType warrant it
- def maybeWithOuterCheck(binder: Symbol, expectedTp: Type)(cond: Tree): Tree = { import CODE._
- if ( !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
- && needsOuterTest(expectedTp, binder.info, matchOwner)) {
- val expectedPrefix = expectedTp.prefix match {
- case ThisType(clazz) => THIS(clazz)
- case pre => REF(pre.prefix, pre.termSymbol)
- }
-
- // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
- // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
- val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC
- val outerCheck = (Select(codegen._asInstanceOf(binder, expectedTp), outer)) OBJ_EQ expectedPrefix
-
- // first check cond, since that should ensure we're not selecting outer on null
- codegen.and(cond, outerCheck)
- }
- else
- cond
- }
-
- // TODO: also need to test when erasing pt loses crucial information (and if we can recover it using a manifest)
- def needsTypeTest(tp: Type, pt: Type) = !(tp <:< pt)
- private def typeTest(binder: Symbol, pt: Type) = maybeWithOuterCheck(binder, pt)(codegen._isInstanceOf(binder, pt))
-
- // need to substitute since binder may be used outside of the next extractor call (say, in the body of the case)
- case class TypeTestTreeMaker(prevBinder: Symbol, nextBinderTp: Type, pos: Position) extends CondTreeMaker {
- val cond = typeTest(prevBinder, nextBinderTp)
- val res = codegen._asInstanceOf(prevBinder, nextBinderTp)
- override def toString = "TT"+(prevBinder, nextBinderTp)
- }
-
- // implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations)
- // TODO: normalize construction, which yields a combination of a EqualityTestTreeMaker (when necessary) and a TypeTestTreeMaker
- case class TypeAndEqualityTestTreeMaker(prevBinder: Symbol, patBinder: Symbol, pt: Type, pos: Position) extends CondTreeMaker {
- val nextBinderTp = glb(List(patBinder.info.widen, pt))
-
- /** Type patterns consist of types, type variables, and wildcards. A type pattern T is of one of the following forms:
- - A reference to a class C, p.C, or T#C.
- This type pattern matches any non-null instance of the given class.
- Note that the prefix of the class, if it is given, is relevant for determining class instances.
- For instance, the pattern p.C matches only instances of classes C which were created with the path p as prefix.
- The bottom types scala.Nothing and scala.Null cannot be used as type patterns, because they would match nothing in any case.
-
- - A singleton type p.type.
- This type pattern matches only the value denoted by the path p
- (that is, a pattern match involved a comparison of the matched value with p using method eq in class AnyRef). // TODO: the actual pattern matcher uses ==, so that's what I'm using for now
- // https://issues.scala-lang.org/browse/SI-4577 "pattern matcher, still disappointing us at equality time"
-
- - A compound type pattern T1 with ... with Tn where each Ti is a type pat- tern.
- This type pattern matches all values that are matched by each of the type patterns Ti.
-
- - A parameterized type pattern T[a1,...,an], where the ai are type variable patterns or wildcards _.
- This type pattern matches all values which match T for some arbitrary instantiation of the type variables and wildcards.
- The bounds or alias type of these type variable are determined as described in (§8.3).
-
- - A parameterized type pattern scala.Array[T1], where T1 is a type pattern. // TODO
- This type pattern matches any non-null instance of type scala.Array[U1], where U1 is a type matched by T1.
- **/
-
- // generate the tree for the run-time test that follows from the fact that
- // a `scrut` of known type `scrutTp` is expected to have type `expectedTp`
- // uses maybeWithOuterCheck to check the type's prefix
- private def typeAndEqualityTest(patBinder: Symbol, pt: Type): Tree = { import CODE._
- // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null`
- // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false")
- def genEqualsAndInstanceOf(sym: Symbol): Tree
- = codegen._equals(REF(sym), patBinder) AND codegen._isInstanceOf(patBinder, pt.widen)
-
- def isRefTp(tp: Type) = tp <:< AnyRefClass.tpe
-
- val patBinderTp = patBinder.info.widen
- def isMatchUnlessNull = isRefTp(pt) && !needsTypeTest(patBinderTp, pt)
-
- // TODO: [SPEC] type test for Array
- // TODO: use manifests to improve tests (for erased types we can do better when we have a manifest)
- pt match {
- case SingleType(_, sym) /*this implies sym.isStable*/ => genEqualsAndInstanceOf(sym) // TODO: [SPEC] the spec requires `eq` instead of `==` here
- case ThisType(sym) if sym.isModule => genEqualsAndInstanceOf(sym) // must use == to support e.g. List() == Nil
- case ThisType(sym) => REF(patBinder) OBJ_EQ This(sym)
- case ConstantType(Constant(null)) if isRefTp(patBinderTp) => REF(patBinder) OBJ_EQ NULL
- case ConstantType(const) => codegen._equals(Literal(const), patBinder)
- case _ if isMatchUnlessNull => maybeWithOuterCheck(patBinder, pt)(REF(patBinder) OBJ_NE NULL)
- case _ => typeTest(patBinder, pt)
- }
- }
-
- val cond = typeAndEqualityTest(patBinder, pt)
- val res = codegen._asInstanceOf(patBinder, nextBinderTp)
-
- // TODO: remove this
- def isStraightTypeTest = cond match { case TypeApply(_, _) => cond.symbol == Any_isInstanceOf case _ => false }
-
- override def toString = "TET"+(patBinder, pt)
- }
-
- // need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp)
- case class EqualityTestTreeMaker(prevBinder: Symbol, patTree: Tree, pos: Position) extends CondTreeMaker {
- val nextBinderTp = prevBinder.info.widen
-
- // NOTE: generate `patTree == patBinder`, since the extractor must be in control of the equals method (also, patBinder may be null)
- // equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
- val cond = codegen._equals(patTree, prevBinder)
- val res = CODE.REF(prevBinder)
- override def toString = "ET"+(prevBinder, patTree)
- }
-
- case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders {
- // don't substitute prevBinder to nextBinder, a set of alternatives does not need to introduce a new binder, simply reuse the previous one
-
- override private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
- super.incorporateOuterSubstitution(outerSubst)
- altss = altss map (alts => propagateSubstitution(alts, substitution))
- }
-
- def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = { import CODE._
- atPos(pos){
- // one alternative may still generate multiple trees (e.g., an extractor call + equality test)
- // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
- val combinedAlts = altss map (altTreeMakers =>
- ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(TRUE_typed)))(casegen))
- )
-
- val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => FALSE_typed))
- codegenAlt.ifThenElseZero(findAltMatcher, substitution(next))
- }
- }
- }
-
- case class GuardTreeMaker(guardTree: Tree) extends TreeMaker with NoNewBinders {
- def chainBefore(next: Tree)(casegen: Casegen): Tree = casegen.flatMapGuard(substitution(guardTree), next)
- override def toString = "G("+ guardTree +")"
- }
-
- // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
- // requires propagateSubstitution(treeMakers) has been called
- def combineExtractors(treeMakers: List[TreeMaker])(casegen: Casegen): Tree =
- treeMakers.foldRight(EmptyTree: Tree)((a, b) => a.chainBefore(b)(casegen))
-
-
- def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker])
-
- // a foldLeft to accumulate the localSubstitution left-to-right
- // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution
- def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = {
- var accumSubst: Substitution = initial
- treeMakers foreach { maker =>
- maker incorporateOuterSubstitution accumSubst
- accumSubst = maker.substitution
- }
- removeSubstOnly(treeMakers)
- }
-
- // calls propagateSubstitution on the treemakers
- def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = {
- // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
- val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution))
- combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, matchFailGenOverride)
- }
-
- def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree =
- fixerUpper(owner, scrut.pos){
- val ptDefined = if (isFullyDefined(pt)) pt else NoType
- def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree)))
-
- emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride).getOrElse{
- if (casesNoSubstOnly nonEmpty) {
- // before optimizing, check casesNoSubstOnly for presence of a default case,
- // since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
- // exhaustivity and reachability must be checked before optimization as well
- // TODO: improve notion of trivial/irrefutable -- a trivial type test before the body still makes for a default case
- // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op)
- // irrefutability checking should use the approximation framework also used for CSE, unreachability and exhaustivity checking
- val synthCatchAll =
- if (casesNoSubstOnly.nonEmpty && {
- val nonTrivLast = casesNoSubstOnly.last
- nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
- }) None
- else matchFailGen
-
- val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt)
-
- val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases map combineExtractors, synthCatchAll)
-
- if (toHoist isEmpty) matchRes else Block(toHoist, matchRes)
- } else {
- codegen.matcher(scrut, scrutSym, pt)(Nil, matchFailGen)
- }
- }
- }
-
- // TODO: do this during tree construction, but that will require tracking the current owner in treemakers
- // TODO: assign more fine-grained positions
- // fixes symbol nesting, assigns positions
- protected def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser {
- currentOwner = origOwner
-
- override def traverse(t: Tree) {
- if (t != EmptyTree && t.pos == NoPosition) {
- t.setPos(pos)
- }
- t match {
- case Function(_, _) if t.symbol == NoSymbol =>
- t.symbol = currentOwner.newAnonymousFunctionValue(t.pos)
- // println("new symbol for "+ (t, t.symbol.ownerChain))
- case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
- // println("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
- t.symbol.owner = currentOwner
- case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
- // println("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
- if(d.symbol.isLazy) { // for lazy val's accessor -- is there no tree??
- assert(d.symbol.lazyAccessor != NoSymbol && d.symbol.lazyAccessor.owner == d.symbol.owner, d.symbol.lazyAccessor)
- d.symbol.lazyAccessor.owner = currentOwner
- }
- if(d.symbol.moduleClass ne NoSymbol)
- d.symbol.moduleClass.owner = currentOwner
-
- d.symbol.owner = currentOwner
- // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
- // println("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
- case _ =>
- }
- super.traverse(t)
- }
-
- // override def apply
- // println("before fixerupper: "+ xTree)
- // currentRun.trackerFactory.snapshot()
- // println("after fixerupper")
- // currentRun.trackerFactory.snapshot()
- }
- }
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// generate actual trees
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- trait CodegenCore extends MatchMonadInterface {
- private var ctr = 0
- def freshName(prefix: String) = {ctr += 1; vpmName.counted(prefix, ctr)}
-
- // assert(owner ne null); assert(owner ne NoSymbol)
- def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") =
- NoSymbol.newTermSymbol(freshName(prefix), pos) setInfo /*repackExistential*/(tp)
-
- // codegen relevant to the structure of the translation (how extractors are combined)
- trait AbsCodegen {
- def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree
-
- // local / context-free
- def _asInstanceOf(b: Symbol, tp: Type): Tree
- def _equals(checker: Tree, binder: Symbol): Tree
- def _isInstanceOf(b: Symbol, tp: Type): Tree
- def and(a: Tree, b: Tree): Tree
- def drop(tgt: Tree)(n: Int): Tree
- def index(tgt: Tree)(i: Int): Tree
- def mkZero(tp: Type): Tree
- def tupleSel(binder: Symbol)(i: Int): Tree
- }
-
- // structure
- trait Casegen extends AbsCodegen { import CODE._
- def one(res: Tree): Tree
-
- def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree
- def flatMapGuard(cond: Tree, next: Tree): Tree
- def ifThenElseZero(c: Tree, then: Tree): Tree = IF (c) THEN then ELSE zero
- protected def zero: Tree
- }
-
- def codegen: AbsCodegen
-
- def typesConform(tp: Type, pt: Type) = ((tp eq pt) || (tp <:< pt))
-
- abstract class CommonCodegen extends AbsCodegen { import CODE._
- def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
- def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree)
- def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
- def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
- def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
- def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
- def and(a: Tree, b: Tree): Tree = a AND b
-
- // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
- def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree = { val tpX = /*repackExistential*/(tp)
- if (!force && (t.tpe ne NoType) && t.isTyped && typesConform(t.tpe, tpX)) t //{ println("warning: emitted redundant asInstanceOf: "+(t, t.tpe, tp)); t } //.setType(tpX)
- else gen.mkAsInstanceOf(t, tpX, true, false)
- }
-
- def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), /*repackExistential*/(tp), true, false)
- // { val tpX = /*repackExistential*/(tp)
- // if (typesConform(b.info, tpX)) { println("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE }
- // else gen.mkIsInstanceOf(REF(b), tpX, true, false)
- // }
-
- def _asInstanceOf(b: Symbol, tp: Type): Tree = { val tpX = /*repackExistential*/(tp)
- if (typesConform(b.info, tpX)) REF(b) //{ println("warning: emitted redundant asInstanceOf: "+(b, b.info, tp)); REF(b) } //.setType(tpX)
- else gen.mkAsInstanceOf(REF(b), tpX, true, false)
- }
-
- // duplicated out of frustration with cast generation
- def mkZero(tp: Type): Tree = {
- tp.typeSymbol match {
- case UnitClass => Literal(Constant())
- case BooleanClass => Literal(Constant(false))
- case FloatClass => Literal(Constant(0.0f))
- case DoubleClass => Literal(Constant(0.0d))
- case ByteClass => Literal(Constant(0.toByte))
- case ShortClass => Literal(Constant(0.toShort))
- case IntClass => Literal(Constant(0))
- case LongClass => Literal(Constant(0L))
- case CharClass => Literal(Constant(0.toChar))
- case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
- }
- }
- }
- }
-
- trait PureMatchMonadInterface extends MatchMonadInterface {
- val matchStrategy: Tree
-
- def inMatchMonad(tp: Type): Type = appliedType(oneSig, List(tp)).finalResultType
- def pureType(tp: Type): Type = appliedType(oneSig, List(tp)).paramTypes.head
- protected def matchMonadSym = oneSig.finalResultType.typeSymbol
-
- import CODE._
- def _match(n: Name): SelectStart = matchStrategy DOT n
-
- private lazy val oneSig: Type =
- typer.typed(_match(vpmName.one), EXPRmode | POLYmode | TAPPmode | FUNmode, WildcardType).tpe // TODO: error message
- }
-
- trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
- def codegen: AbsCodegen = pureCodegen
-
- object pureCodegen extends CommonCodegen with Casegen { import CODE._
- //// methods in MatchingStrategy (the monad companion) -- used directly in translation
- // __match.runOrElse(`scrut`)(`scrutSym` => `matcher`)
- // TODO: consider catchAll, or virtualized matching will break in exception handlers
- def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
- _match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, cases map (f => f(this)) reduceLeft typedOrElse))
-
- // __match.one(`res`)
- def one(res: Tree): Tree = (_match(vpmName.one)) (res)
- // __match.zero
- protected def zero: Tree = _match(vpmName.zero)
- // __match.guard(`c`, `then`)
- def guard(c: Tree, then: Tree): Tree = _match(vpmName.guard) APPLY (c, then)
-
- //// methods in the monad instance -- used directly in translation
- // `prev`.flatMap(`b` => `next`)
- def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next))
- // `thisCase`.orElse(`elseCase`)
- def typedOrElse(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase)
- // __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`)
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = flatMap(guard(cond, res), nextBinder, next)
- // __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`)
- def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), next)
- }
- }
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// OPTIMIZATIONS
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// decisions, decisions
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
- trait TreeMakerApproximation extends TreeMakers { self: CodegenCore =>
- object Test {
- var currId = 0
- }
- case class Test(cond: Cond, treeMaker: TreeMaker) {
- // def <:<(other: Test) = cond <:< other.cond
- // def andThen_: (prev: List[Test]): List[Test] =
- // prev.filterNot(this <:< _) :+ this
-
- private val reusedBy = new collection.mutable.HashSet[Test]
- var reuses: Option[Test] = None
- def registerReuseBy(later: Test): Unit = {
- assert(later.reuses.isEmpty, later.reuses)
- reusedBy += later
- later.reuses = Some(this)
- }
-
- val id = { Test.currId += 1; Test.currId}
- override def toString =
- if (cond eq Top) "T"
- else if(cond eq Havoc) "!?"
- else "T"+ id + (if(reusedBy nonEmpty) "!["+ treeMaker +"]" else (if(reuses.isEmpty) "["+ treeMaker +"]" else " cf. T"+reuses.get.id))
- }
-
- object Cond {
- // def refines(self: Cond, other: Cond): Boolean = (self, other) match {
- // case (Bottom, _) => true
- // case (Havoc , _) => true
- // case (_ , Top) => true
- // case (_ , _) => false
- // }
- var currId = 0
- }
-
- abstract class Cond {
- // def testedPath: Tree
- // def <:<(other: Cond) = Cond.refines(this, other)
-
- val id = { Cond.currId += 1; Cond.currId}
- }
-
- // does not contribute any knowledge
- case object Top extends Cond
-
- // takes away knowledge. e.g., a user-defined guard
- case object Havoc extends Cond
-
- // we know everything! everything!
- // this either means the case is unreachable,
- // or that it is statically known to be picked -- at this point in the decision tree --> no point in emitting further alternatives
- // case object Bottom extends Cond
-
-
- object EqualityCond {
- private val uniques = new collection.mutable.HashMap[(Tree, Tree), EqualityCond]
- def apply(testedPath: Tree, rhs: Tree): EqualityCond = uniques getOrElseUpdate((testedPath, rhs), new EqualityCond(testedPath, rhs))
- }
- class EqualityCond(testedPath: Tree, rhs: Tree) extends Cond {
- // def negation = TopCond // inequality doesn't teach us anything
- // do simplification when we know enough about the tree statically:
- // - collapse equal trees
- // - accumulate tests when (in)equality not known statically
- // - become bottom when we statically know this can never match
-
- override def toString = testedPath +" == "+ rhs +"#"+ id
- }
-
- object TypeCond {
- private val uniques = new collection.mutable.HashMap[(Tree, Type), TypeCond]
- def apply(testedPath: Tree, pt: Type): TypeCond = uniques getOrElseUpdate((testedPath, pt), new TypeCond(testedPath, pt))
- }
- class TypeCond(testedPath: Tree, pt: Type) extends Cond {
- // def negation = TopCond // inequality doesn't teach us anything
- // do simplification when we know enough about the tree statically:
- // - collapse equal trees
- // - accumulate tests when (in)equality not known statically
- // - become bottom when we statically know this can never match
- override def toString = testedPath +" <: "+ pt +"#"+ id
- }
-
- object TypeAndEqualityCond {
- private val uniques = new collection.mutable.HashMap[(Tree, Type), TypeAndEqualityCond]
- def apply(testedPath: Tree, pt: Type): TypeAndEqualityCond = uniques getOrElseUpdate((testedPath, pt), new TypeAndEqualityCond(testedPath, pt))
- }
- class TypeAndEqualityCond(testedPath: Tree, pt: Type) extends Cond {
- // def negation = TopCond // inequality doesn't teach us anything
- // do simplification when we know enough about the tree statically:
- // - collapse equal trees
- // - accumulate tests when (in)equality not known statically
- // - become bottom when we statically know this can never match
- override def toString = testedPath +" (<: && ==) "+ pt +"#"+ id
- }
-
- def approximateMatch(root: Symbol, cases: List[List[TreeMaker]]): List[List[Test]] = {
- // a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively)
- val pointsToBound = collection.mutable.HashSet(root)
-
- // the substitution that renames variables to variables in pointsToBound
- var normalize: Substitution = EmptySubstitution
-
- // replaces a variable (in pointsToBound) by a selection on another variable in pointsToBound
- // TODO check:
- // pointsToBound -- accumSubst.from == Set(root) && (accumSubst.from.toSet -- pointsToBound) isEmpty
- var accumSubst: Substitution = EmptySubstitution
-
- val trees = new collection.mutable.HashSet[Tree]
-
- def approximateTreeMaker(tm: TreeMaker): Test = {
- val subst = tm.substitution
-
- // find part of substitution that replaces bound symbols by new symbols, and reverse that part
- // so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal
- val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition {case (f, t) =>
- t.isInstanceOf[Ident] && (t.symbol ne NoSymbol) && pointsToBound(f)
- }
- val (boundFrom, boundTo) = boundSubst.unzip
- normalize >>= Substitution(boundTo map (_.symbol), boundFrom map (CODE.REF(_)))
- // println("normalize: "+ normalize)
-
- val (unboundFrom, unboundTo) = unboundSubst unzip
- val okSubst = Substitution(unboundFrom, unboundTo map (normalize(_))) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway
- pointsToBound ++= ((okSubst.from, okSubst.to).zipped filter { (f, t) => pointsToBound exists (sym => t.exists(_.symbol == sym)) })._1
- // println("pointsToBound: "+ pointsToBound)
-
- accumSubst >>= okSubst
- // println("accumSubst: "+ accumSubst)
-
- // TODO: improve, e.g., for constants
- def sameValue(a: Tree, b: Tree): Boolean = (a eq b) || ((a, b) match {
- case (_ : Ident, _ : Ident) => a.symbol eq b.symbol
- case _ => false
- })
-
- // hashconsing trees (modulo value-equality)
- def unique(t: Tree): Tree =
- trees find (a => a.equalsStructure0(t)(sameValue)) match {
- case Some(orig) => orig // println("unique: "+ (t eq orig, orig));
- case _ => trees += t; t
- }
-
- def uniqueTp(tp: Type): Type = tp match {
- // typerefs etc are already hashconsed
- case _ : UniqueType => tp
- case tp@RefinedType(parents, EmptyScope) => tp.memo(tp: Type)(identity) // TODO: does this help?
- case _ => tp
- }
-
- def binderToUniqueTree(b: Symbol) = unique(accumSubst(normalize(CODE.REF(b))))
-
- Test(tm match {
- case ProductExtractorTreeMaker(pb, None, subst) => Top // TODO: NotNullTest(prevBinder)
- case tm@TypeTestTreeMaker(prevBinder, nextBinderTp, _) => TypeCond(binderToUniqueTree(prevBinder), uniqueTp(nextBinderTp))
- case tm@TypeAndEqualityTestTreeMaker(_, patBinder, pt, _) => TypeAndEqualityCond(binderToUniqueTree(patBinder), uniqueTp(pt))
- case tm@EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree))
- case ExtractorTreeMaker(_, _, _, _)
- | GuardTreeMaker(_)
- | ProductExtractorTreeMaker(_, Some(_), _) => Havoc
- case AlternativesTreeMaker(_, _, _) => Havoc // TODO: can do better here
- case SubstOnlyTreeMaker(_, _) => Top
- case BodyTreeMaker(_, _) => Havoc
- }, tm)
- }
-
- cases.map { _ map approximateTreeMaker }
- }
- }
-
-////
- trait CommonSubconditionElimination extends TreeMakerApproximation { self: OptimizedCodegen =>
- /** a flow-sensitive, generalised, common sub-expression elimination
- * reuse knowledge from performed tests
- * the only sub-expressions we consider are the conditions and results of the three tests (type, type&equality, equality)
- * when a sub-expression is share, it is stored in a mutable variable
- * the variable is floated up so that its scope includes all of the program that shares it
- * we generalize sharing to implication, where b reuses a if a => b and priors(a) => priors(b) (the priors of a sub expression form the path through the decision tree)
- *
- * intended to be generalised to exhaustivity/reachability checking
- */
- def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
- val testss = approximateMatch(prevBinder, cases)
-
- // interpret:
- val dependencies = new collection.mutable.LinkedHashMap[Test, Set[Cond]]
- val tested = new collection.mutable.HashSet[Cond]
- testss foreach { tests =>
- tested.clear()
- tests dropWhile { test =>
- val cond = test.cond
- if ((cond eq Havoc) || (cond eq Top)) (cond eq Top) // stop when we encounter a havoc, skip top
- else {
- tested += cond
-
- // is there an earlier test that checks our condition and whose dependencies are implied by ours?
- dependencies find { case (priorTest, deps) =>
- ((priorTest.cond eq cond) || (deps contains cond)) && (deps subsetOf tested)
- } foreach { case (priorTest, deps) =>
- // if so, note the dependency in both tests
- priorTest registerReuseBy test
- }
-
- dependencies(test) = tested.toSet // copies
- true
- }
- }
- }
-
- // find longest prefix of tests that reuse a prior test, and whose dependent conditions monotonically increase
- // then, collapse these contiguous sequences of reusing tests
- // store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used)
- // replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable
- val reused = new collection.mutable.HashMap[TreeMaker, ReusedCondTreeMaker]
- var okToCall = false
- val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)}
-
- val res = testss map { tests =>
- var currDeps = Set[Cond]()
- val (sharedPrefix, suffix) = tests span { test =>
- (test.cond eq Top) || (for(
- reusedTest <- test.reuses;
- nextDeps <- dependencies.get(reusedTest);
- diff <- (nextDeps -- currDeps).headOption;
- _ <- Some(currDeps = nextDeps))
- yield diff).nonEmpty
- }
-
- val collapsedTreeMakers = if (sharedPrefix.nonEmpty) { // even sharing prefixes of length 1 brings some benefit (overhead-percentage for compiler: 26->24%, lib: 19->16%)
- for (test <- sharedPrefix; reusedTest <- test.reuses) reusedTest.treeMaker match {
- case reusedCTM: CondTreeMaker => reused(reusedCTM) = ReusedCondTreeMaker(reusedCTM)
- case _ =>
- }
-
- // println("sharedPrefix: "+ sharedPrefix)
- for (lastShared <- sharedPrefix.reverse.dropWhile(_.cond eq Top).headOption;
- lastReused <- lastShared.reuses)
- yield ReusingCondTreeMaker(sharedPrefix, reusedOrOrig) :: suffix.map(_.treeMaker)
- } else None
-
- collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains Top-tests, which are dropped above)
- }
- okToCall = true // TODO: remove (debugging)
-
- res mapConserve (_ mapConserve reusedOrOrig)
- }
-
- object ReusedCondTreeMaker {
- def apply(orig: CondTreeMaker) = new ReusedCondTreeMaker(orig.prevBinder, orig.nextBinder, orig.cond, orig.res, orig.pos)
- }
- class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, pos: Position) extends TreeMaker { import CODE._
- lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
- lazy val storedCond = freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE
- lazy val treesToHoist: List[Tree] = {
- nextBinder setFlag MUTABLE
- List(storedCond, nextBinder) map { b => VAL(b) === codegen.mkZero(b.info) }
- }
-
- // TODO: finer-grained duplication
- def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen)
- atPos(pos)(casegen.asInstanceOf[optimizedCodegen.OptimizedCasegen].flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
- }
-
- case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._
- lazy val dropped_priors = sharedPrefix map (t => (toReused(t.treeMaker), t.reuses map (test => toReused(test.treeMaker))))
- lazy val localSubstitution = {
- val (from, to) = dropped_priors.collect {
- case (dropped: CondTreeMaker, Some(prior: ReusedCondTreeMaker)) =>
- (dropped.nextBinder, REF(prior.nextBinder))
- }.unzip
- val oldSubs = dropped_priors.collect {
- case (dropped: TreeMaker, _) =>
- dropped.substitution
- }
- oldSubs.foldLeft(Substitution(from, to))(_ >> _)
- }
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree = {
- val cond = REF(dropped_priors.reverse.collectFirst{case (_, Some(ctm: ReusedCondTreeMaker)) => ctm}.get.storedCond)
-
- // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift, and its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
- casegen.ifThenElseZero(cond, substitution(next).duplicate)
- }
- }
- }
-
-
- //// DCE
- trait DeadCodeElimination extends TreeMakers { self: CodegenCore =>
- // TODO: non-trivial dead-code elimination
- // e.g., the following match should compile to a simple instanceof:
- // case class Ident(name: String)
- // for (Ident(name) <- ts) println(name)
- def doDCE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
- // do minimal DCE
- cases
- }
- }
-
- //// SWITCHES -- TODO: operate on Tests rather than TreeMakers
- trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface { self: CodegenCore =>
- abstract class SwitchMaker {
- abstract class SwitchableTreeMakerExtractor { def unapply(x: TreeMaker): Option[Tree] }
- val SwitchableTreeMaker: SwitchableTreeMakerExtractor
-
- def alternativesSupported: Boolean
-
- def isDefault(x: CaseDef): Boolean
- def defaultSym: Symbol
- def defaultBody: Tree
- def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef
-
- private def sequence[T](xs: List[Option[T]]): Option[List[T]] =
- if (xs exists (_.isEmpty)) None else Some(xs.flatten)
-
- // empty list ==> failure
- def apply(cases: List[(Symbol, List[TreeMaker])], pt: Type): List[CaseDef] = {
- val caseDefs = cases map { case (scrutSym, makers) =>
- makers match {
- // default case
- case (btm@BodyTreeMaker(body, _)) :: Nil =>
- Some(defaultCase(scrutSym, btm.substitution(body)))
- // constant (or typetest for typeSwitch)
- case SwitchableTreeMaker(pattern) :: (btm@BodyTreeMaker(body, _)) :: Nil =>
- Some(CaseDef(pattern, EmptyTree, btm.substitution(body)))
- // alternatives
- case AlternativesTreeMaker(_, altss, _) :: (btm@BodyTreeMaker(body, _)) :: Nil if alternativesSupported =>
- val casePatterns = altss map {
- case SwitchableTreeMaker(pattern) :: Nil =>
- Some(pattern)
- case _ =>
- None
- }
-
- sequence(casePatterns) map { patterns =>
- val substedBody = btm.substitution(body)
- CaseDef(Alternative(patterns), EmptyTree, substedBody)
- }
- case _ => //println("can't emit switch for "+ makers)
- None //failure (can't translate pattern to a switch)
- }
- }
-
- (for(
- caseDefs <- sequence(caseDefs)) yield
- if (caseDefs exists isDefault) caseDefs
- else {
- caseDefs :+ defaultCase()
- }
- ) getOrElse Nil
- }
- }
-
- class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree]) extends SwitchMaker {
- val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
- val alternativesSupported = true
-
- object SwitchablePattern { def unapply(pat: Tree): Option[Tree] = pat match {
- case Literal(const@Constant((_: Byte ) | (_: Short) | (_: Int ) | (_: Char ))) =>
- Some(Literal(Constant(const.intValue))) // TODO: Java 7 allows strings in switches
- case _ => None
- }}
-
- object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
- def unapply(x: TreeMaker): Option[Tree] = x match {
- case EqualityTestTreeMaker(_, SwitchablePattern(const), _) => Some(const)
- case _ => None
- }
- }
-
- def isDefault(x: CaseDef): Boolean = x match {
- case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
- case _ => false
- }
-
- def defaultSym: Symbol = scrutSym
- def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse MATCHERROR(REF(scrutSym)) }
- def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
- DEFAULT ==> body
- }}
- }
-
- override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree]): Option[Tree] = { import CODE._
- val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride)
- // TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result
- if (regularSwitchMaker.switchableTpe(scrutSym.tpe)) {
- val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt)
- if (caseDefsWithDefault isEmpty) None
- else {
- // match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
- val scrutToInt: Tree =
- if (scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
- else (REF(scrutSym) DOT (nme.toInt))
- Some(BLOCK(
- VAL(scrutSym) === scrut,
- Match(gen.mkSynthSwitchSelector(scrutToInt), caseDefsWithDefault) // add switch annotation
- ))
- }
- } else None
- }
-
- // for the catch-cases in a try/catch
- private object typeSwitchMaker extends SwitchMaker {
- def switchableTpe(tp: Type) = true
- val alternativesSupported = false // TODO: needs either back-end support of flattening of alternatives during typers
-
- // TODO: there are more treemaker-sequences that can be handled by type tests
- // analyze the result of approximateTreeMaker rather than the TreeMaker itself
- object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
- def unapply(x: TreeMaker): Option[Tree] = x match {
- case tm@TypeTestTreeMaker(_, _, _) =>
- Some(Bind(tm.nextBinder, Typed(Ident(nme.WILDCARD), TypeTree(tm.nextBinderTp)) /* not used by back-end */)) // -- TODO: use this if binder does not occur in the body
- case tm@TypeAndEqualityTestTreeMaker(_, patBinder, pt, _) if tm.isStraightTypeTest =>
- Some(Bind(tm.nextBinder, Typed(Ident(nme.WILDCARD), TypeTree(tm.nextBinderTp)) /* not used by back-end */))
- case _ =>
- None
- }
- }
-
- def isDefault(x: CaseDef): Boolean = x match {
- case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
- case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
- case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
- case _ => false
- }
-
- lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableClass.tpe)
- def defaultBody: Tree = Throw(CODE.REF(defaultSym))
- def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
- CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) ==> body
- }}
- }
-
- // TODO: drop null checks
- override def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] = {
- val caseDefsWithDefault = typeSwitchMaker(bindersAndCases, pt)
- if (caseDefsWithDefault isEmpty) None
- else Some(caseDefsWithDefault)
- }
- }
-
- trait OptimizedMatchMonadInterface extends MatchMonadInterface {
- override def inMatchMonad(tp: Type): Type = optionType(tp)
- override def pureType(tp: Type): Type = tp
- override protected def matchMonadSym = OptionClass
- }
-
- trait OptimizedCodegen extends CodegenCore with TypedSubstitution with OptimizedMatchMonadInterface {
- override def codegen: AbsCodegen = optimizedCodegen
-
- // trait AbsOptimizedCodegen extends AbsCodegen {
- // def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree
- // }
- // def optimizedCodegen: AbsOptimizedCodegen
-
- // when we know we're targetting Option, do some inlining the optimizer won't do
- // for example, `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard
- // this is a special instance of the advanced inlining optimization that takes a method call on
- // an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases
- object optimizedCodegen extends CommonCodegen { import CODE._
-
- /** Inline runOrElse and get rid of Option allocations
- *
- * runOrElse(scrut: scrutTp)(matcher): resTp = matcher(scrut) getOrElse ${catchAll(`scrut`)}
- * the matcher's optional result is encoded as a flag, keepGoing, where keepGoing == true encodes result.isEmpty,
- * if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
- */
- def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = {
- val matchEnd = NoSymbol.newLabel(freshName("matchEnd"), NoPosition) setFlag SYNTH_CASE
- val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, SYNTHETIC) setInfo restpe
- matchEnd setInfo MethodType(List(matchRes), restpe)
-
- def newCaseSym = NoSymbol.newLabel(freshName("case"), NoPosition) setInfo MethodType(Nil, restpe) setFlag SYNTH_CASE
- var nextCase = newCaseSym
- def caseDef(mkCase: Casegen => Tree): Tree = {
- val currCase = nextCase
- nextCase = newCaseSym
- val casegen = new OptimizedCasegen(matchEnd, nextCase, restpe)
- LabelDef(currCase, Nil, mkCase(casegen))
- }
-
- def catchAll = matchFailGen map { matchFailGen =>
- val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
- LabelDef(nextCase, Nil, matchEnd APPLY (_asInstanceOf(matchFailGen(scrutRef), restpe))) // need to jump to matchEnd with result generated by matchFailGen (could be `FALSE` for isDefinedAt)
- } toList
- // catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
- // if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
-
- // the generated block is taken apart in TailCalls under the following assumptions
- // the assumption is once we encounter a case, the remainder of the block will consist of cases
- // the prologue may be empty, usually it is the valdef that stores the scrut
- // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
-
- // scrutSym == NoSymbol when generating an alternatives matcher
- val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
- Block(
- scrutDef ++ (cases map caseDef) ++ catchAll,
- LabelDef(matchEnd, List(matchRes), REF(matchRes))
- )
- }
-
- class OptimizedCasegen(matchEnd: Symbol, nextCase: Symbol, restpe: Type) extends CommonCodegen with Casegen {
- def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
- optimizedCodegen.matcher(scrut, scrutSym, restpe)(cases, matchFailGen)
-
- // only used to wrap the RHS of a body
- // res: T
- // returns MatchMonad[T]
- def one(res: Tree): Tree = matchEnd APPLY (_asInstanceOf(res, restpe)) // need cast for GADT magic
- protected def zero: Tree = nextCase APPLY ()
-
- // prev: MatchMonad[T]
- // b: T
- // next: MatchMonad[U]
- // returns MatchMonad[U]
- def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
- val tp = inMatchMonad(b.tpe)
- val prevSym = freshSym(prev.pos, tp, "o")
- val isEmpty = tp member vpmName.isEmpty
- val get = tp member vpmName.get
-
- BLOCK(
- VAL(prevSym) === prev,
- // must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
- ifThenElseZero(NOT(prevSym DOT isEmpty), Substitution(b, prevSym DOT get)(next))
- )
- }
-
- // cond: Boolean
- // res: T
- // nextBinder: T
- // next == MatchMonad[U]
- // returns MatchMonad[U]
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree =
- ifThenElseZero(cond, BLOCK(
- VAL(nextBinder) === res,
- next
- ))
-
- // guardTree: Boolean
- // next: MatchMonad[T]
- // returns MatchMonad[T]
- def flatMapGuard(guardTree: Tree, next: Tree): Tree =
- ifThenElseZero(guardTree, next)
-
- def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree =
- ifThenElseZero(cond, BLOCK(
- condSym === TRUE_typed,
- nextBinder === res,
- next
- ))
- }
-
- }
- }
-
-
- trait MatchOptimizations extends CommonSubconditionElimination
- with DeadCodeElimination
- with SwitchEmission
- with OptimizedCodegen { self: TreeMakers =>
- override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) = {
- val optCases = doCSE(prevBinder, doDCE(prevBinder, cases, pt), pt)
- val toHoist = (
- for (treeMakers <- optCases)
- yield treeMakers.collect{case tm: ReusedCondTreeMaker => tm.treesToHoist}
- ).flatten.flatten.toList
- (optCases, toHoist)
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
new file mode 100644
index 0000000000..48985213d1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -0,0 +1,3169 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2012 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc
+package typechecker
+
+import symtab._
+import Flags.{MUTABLE, METHOD, LABEL, SYNTHETIC}
+import language.postfixOps
+import scala.tools.nsc.transform.TypingTransformers
+import scala.tools.nsc.transform.Transform
+import scala.collection.mutable.HashSet
+import scala.collection.mutable.HashMap
+import scala.tools.nsc.util.Statistics
+
+/** Translate pattern matching.
+ *
+ * Either into optimized if/then/else's,
+ * or virtualized as method calls (these methods form a zero-plus monad), similar in spirit to how for-comprehensions are compiled.
+ *
+ * For each case, express all patterns as extractor calls, guards as 0-ary extractors, and sequence them using `flatMap`
+ * (lifting the body of the case into the monad using `one`).
+ *
+ * Cases are combined into a pattern match using the `orElse` combinator (the implicit failure case is expressed using the monad's `zero`).
+ *
+ * TODO:
+ * - use TypeTags for type testing
+ * - DCE (on irrefutable patterns)
+ * - update spec and double check it's implemented correctly (see TODO's)
+ *
+ * (longer-term) TODO:
+ * - user-defined unapplyProd
+ * - recover GADT typing by locally inserting implicit witnesses to type equalities derived from the current case, and considering these witnesses during subtyping (?)
+ * - recover exhaustivity and unreachability checking using a variation on the type-safe builder pattern
+ */
+trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL { // self: Analyzer =>
+ import Statistics._
+
+ val global: Global // need to repeat here because otherwise last mixin defines global as
+ // SymbolTable. If we had DOT this would not be an issue
+ import global._ // the global environment
+ import definitions._ // standard classes and methods
+
+ val phaseName: String = "patmat"
+
+ def patmatDebug(msg: String) = println(msg)
+
+ def newTransformer(unit: CompilationUnit): Transformer =
+ if (opt.virtPatmat) new MatchTransformer(unit)
+ else noopTransformer
+
+ // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
+ private lazy val MarkerCPSAdaptPlus = definitions.getClassIfDefined("scala.util.continuations.cpsPlus")
+ private lazy val MarkerCPSAdaptMinus = definitions.getClassIfDefined("scala.util.continuations.cpsMinus")
+ private lazy val MarkerCPSSynth = definitions.getClassIfDefined("scala.util.continuations.cpsSynth")
+ private lazy val stripTriggerCPSAnns = List(MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
+ private lazy val MarkerCPSTypes = definitions.getClassIfDefined("scala.util.continuations.cpsParam")
+ private lazy val strippedCPSAnns = MarkerCPSTypes :: stripTriggerCPSAnns
+ private def removeCPSAdaptAnnotations(tp: Type) = tp filterAnnotations (ann => !(strippedCPSAnns exists (ann matches _)))
+
+ class MatchTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ override def transform(tree: Tree): Tree = tree match {
+ case Match(sel, cases) =>
+ val origTp = tree.tpe
+ // setType origTp intended for CPS -- TODO: is it necessary?
+ localTyper.typed(translator.translateMatch(treeCopy.Match(tree, transform(sel), transformTrees(cases).asInstanceOf[List[CaseDef]]))) setType origTp
+ case Try(block, catches, finalizer) =>
+ treeCopy.Try(tree, transform(block), translator.translateTry(transformTrees(catches).asInstanceOf[List[CaseDef]], tree.tpe, tree.pos), transform(finalizer))
+ case _ => super.transform(tree)
+ }
+
+ def translator: MatchTranslation with CodegenCore = {
+ new OptimizingMatchTranslator(localTyper)
+ }
+ }
+
+ import definitions._
+ import analyzer._ //Typer
+
+ val SYNTH_CASE = Flags.CASE | SYNTHETIC
+
+ case class DefaultOverrideMatchAttachment(default: Tree)
+
+ object vpmName {
+ val one = newTermName("one")
+ val drop = newTermName("drop")
+ val flatMap = newTermName("flatMap")
+ val get = newTermName("get")
+ val guard = newTermName("guard")
+ val isEmpty = newTermName("isEmpty")
+ val orElse = newTermName("orElse")
+ val outer = newTermName("<outer>")
+ val runOrElse = newTermName("runOrElse")
+ val zero = newTermName("zero")
+ val _match = newTermName("__match") // don't call the val __match, since that will trigger virtual pattern matching...
+
+ def counted(str: String, i: Int) = newTermName(str+i)
+ }
+
+ class PureMatchTranslator(val typer: Typer, val matchStrategy: Tree) extends MatchTranslation with TreeMakers with PureCodegen
+ class OptimizingMatchTranslator(val typer: Typer) extends MatchTranslation with TreeMakers with MatchOptimizations
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// talking to userland
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ /** Interface with user-defined match monad?
+ * if there's a `__match` in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
+
+ type Matcher[P[_], M[+_], A] = {
+ def flatMap[B](f: P[A] => M[B]): M[B]
+ def orElse[B >: A](alternative: => M[B]): M[B]
+ }
+
+ abstract class MatchStrategy[P[_], M[+_]] {
+ // runs the matcher on the given input
+ def runOrElse[T, U](in: P[T])(matcher: P[T] => M[U]): P[U]
+
+ def zero: M[Nothing]
+ def one[T](x: P[T]): M[T]
+ def guard[T](cond: P[Boolean], then: => P[T]): M[T]
+ def isSuccess[T, U](x: P[T])(f: P[T] => M[U]): P[Boolean] // used for isDefinedAt
+ }
+
+ * P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`)
+
+
+ * if no `__match` is found, we assume the following implementation (and generate optimized code accordingly)
+
+ object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] {
+ def zero = None
+ def one[T](x: T) = Some(x)
+ // NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted
+ def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None
+ def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x))
+ def isSuccess[T, U](x: T)(f: T => Option[U]): Boolean = !f(x).isEmpty
+ }
+
+ */
+ trait MatchMonadInterface {
+ val typer: Typer
+ val matchOwner = typer.context.owner
+
+ def inMatchMonad(tp: Type): Type
+ def pureType(tp: Type): Type
+ final def matchMonadResult(tp: Type): Type =
+ tp.baseType(matchMonadSym).typeArgs match {
+ case arg :: Nil => arg
+ case _ => ErrorType
+ }
+
+ protected def matchMonadSym: Symbol
+ }
+
+ trait MatchTranslation extends MatchMonadInterface { self: TreeMakers with CodegenCore =>
+ import typer.{typed, context, silent, reallyExists}
+ // import typer.infer.containsUnchecked
+
+ /** Implement a pattern match by turning its cases (including the implicit failure case)
+ * into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
+ *
+ * For `scrutinee match { case1 ... caseN }`, the resulting tree has the shape
+ * `runOrElse(scrutinee)(x => translateCase1(x).orElse(translateCase2(x)).....orElse(zero))`
+ *
+ * NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed
+ * thus, you must typecheck the result (and that will in turn translate nested matches)
+ * this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch)
+ */
+ def translateMatch(match_ : Match): Tree = {
+ val Match(selector, cases) = match_
+
+ // we don't transform after uncurry
+ // (that would require more sophistication when generating trees,
+ // and the only place that emits Matches after typers is for exception handling anyway)
+ if(phase.id >= currentRun.uncurryPhase.id) debugwarn("running translateMatch at "+ phase +" on "+ selector +" match "+ cases)
+ // patmatDebug("translating "+ cases.mkString("{", "\n", "}"))
+
+ def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
+ case TypeRef(_, RepeatedParamClass, arg :: Nil) => seqType(arg)
+ case _ => tp
+ }
+
+ val start = startTimer(patmatNanos)
+
+ val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations))
+
+ val origPt = match_.tpe
+ // when one of the internal cps-type-state annotations is present, strip all CPS annotations
+ // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
+ // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
+ val ptUnCPS =
+ if (MarkerCPSAdaptPlus != NoSymbol && (stripTriggerCPSAnns exists origPt.hasAnnotation))
+ removeCPSAdaptAnnotations(origPt)
+ else origPt
+
+ // we've packed the type for each case in typedMatch so that if all cases have the same existential case, we get a clean lub
+ // here, we should open up the existential again
+ // relevant test cases: pos/existentials-harmful.scala, pos/gadt-gilles.scala, pos/t2683.scala, pos/virtpatmat_exist4.scala
+ // TODO: fix skolemizeExistential (it should preserve annotations, right?)
+ val pt = repeatedToSeq(ptUnCPS.skolemizeExistential(context.owner, context.tree) withAnnotations ptUnCPS.annotations)
+
+ // the alternative to attaching the default case override would be to simply
+ // append the default to the list of cases and suppress the unreachable case error that may arise (once we detect that...)
+ val matchFailGenOverride = match_ firstAttachment {case DefaultOverrideMatchAttachment(default) => ((scrut: Tree) => default)}
+
+ val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag SYNTH_CASE
+ // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
+ val combined = combineCases(selector, selectorSym, cases map translateCase(selectorSym, pt), pt, matchOwner, matchFailGenOverride)
+
+ stopTimer(patmatNanos, start)
+ combined
+ }
+
+ // return list of typed CaseDefs that are supported by the backend (typed/bind/wildcard)
+ // we don't have a global scrutinee -- the caught exception must be bound in each of the casedefs
+ // there's no need to check the scrutinee for null -- "throw null" becomes "throw new NullPointerException"
+ // try to simplify to a type-based switch, or fall back to a catch-all case that runs a normal pattern match
+ // unlike translateMatch, we type our result before returning it
+ def translateTry(caseDefs: List[CaseDef], pt: Type, pos: Position): List[CaseDef] =
+ // if they're already simple enough to be handled by the back-end, we're done
+ if (caseDefs forall treeInfo.isCatchCase) caseDefs
+ else {
+ val swatches = { // switch-catches
+ val bindersAndCases = caseDefs map { caseDef =>
+ // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
+ // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
+ val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
+ }
+
+ for(cases <- emitTypeSwitch(bindersAndCases, pt).toList;
+ if cases forall treeInfo.isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end
+ cse <- cases) yield fixerUpper(matchOwner, pos)(cse).asInstanceOf[CaseDef]
+ }
+
+ val catches = if (swatches.nonEmpty) swatches else {
+ val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))}
+
+ val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex")
+
+ List(
+ atPos(pos) {
+ CaseDef(
+ Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
+ EmptyTree,
+ combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(CODE.REF(exSym))))
+ )
+ })
+ }
+
+ typer.typedCases(catches, ThrowableClass.tpe, WildcardType)
+ }
+
+
+
+ /** The translation of `pat if guard => body` has two aspects:
+ * 1) the substitution due to the variables bound by patterns
+ * 2) the combination of the extractor calls using `flatMap`.
+ *
+ * 2) is easy -- it looks like: `translatePattern_1.flatMap(translatePattern_2....flatMap(translatePattern_N.flatMap(translateGuard.flatMap((x_i) => success(Xbody(x_i)))))...)`
+ * this must be right-leaning tree, as can be seen intuitively by considering the scope of bound variables:
+ * variables bound by pat_1 must be visible from the function inside the left-most flatMap right up to Xbody all the way on the right
+ * 1) is tricky because translatePattern_i determines the shape of translatePattern_i+1:
+ * zoom in on `translatePattern_1.flatMap(translatePattern_2)` for example -- it actually looks more like:
+ * `translatePattern_1(x_scrut).flatMap((x_1) => {y_i -> x_1._i}translatePattern_2)`
+ *
+ * `x_1` references the result (inside the monad) of the extractor corresponding to `pat_1`,
+ * this result holds the values for the constructor arguments, which translatePattern_1 has extracted
+ * from the object pointed to by `x_scrut`. The `y_i` are the symbols bound by `pat_1` (in order)
+ * in the scope of the remainder of the pattern, and they must thus be replaced by:
+ * - (for 1-ary unapply) x_1
+ * - (for n-ary unapply, n > 1) selection of the i'th tuple component of `x_1`
+ * - (for unapplySeq) x_1.apply(i)
+ *
+ * in the treemakers,
+ *
+ * Thus, the result type of `translatePattern_i`'s extractor must conform to `M[(T_1,..., T_n)]`.
+ *
+ * Operationally, phase 1) is a foldLeft, since we must consider the depth-first-flattening of
+ * the transformed patterns from left to right. For every pattern ast node, it produces a transformed ast and
+ * a function that will take care of binding and substitution of the next ast (to the right).
+ *
+ */
+ def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = caseDef match { case CaseDef(pattern, guard, body) =>
+ translatePattern(scrutSym, pattern) ++ translateGuard(guard) :+ translateBody(body, pt)
+ }
+
+ def translatePattern(patBinder: Symbol, patTree: Tree): List[TreeMaker] = {
+ // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
+ type TranslationStep = (List[TreeMaker], List[(Symbol, Tree)])
+ @inline def withSubPats(treeMakers: List[TreeMaker], subpats: (Symbol, Tree)*): TranslationStep = (treeMakers, subpats.toList)
+ @inline def noFurtherSubPats(treeMakers: TreeMaker*): TranslationStep = (treeMakers.toList, Nil)
+
+ val pos = patTree.pos
+
+ def translateExtractorPattern(extractor: ExtractorCall): TranslationStep = {
+ if (!extractor.isTyped) ErrorUtils.issueNormalTypeError(patTree, "Could not typecheck extractor call: "+ extractor)(context)
+ // if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
+
+ // must use type `tp`, which is provided by extractor's result, not the type expected by binder,
+ // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
+ // (it will later result in a type test when `tp` is not a subtype of `b.info`)
+ // TODO: can we simplify this, together with the Bound case?
+ (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) => b setInfo tp } // patmatDebug("changing "+ b +" : "+ b.info +" -> "+ tp);
+
+ // patmatDebug("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))
+ // example check: List[Int] <:< ::[Int]
+ // TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
+ val (typeTestTreeMaker, patBinderOrCasted) =
+ if (needsTypeTest(patBinder.info.widen, extractor.paramType)) {
+ // chain a type-testing extractor before the actual extractor call
+ // it tests the type, checks the outer pointer and casts to the expected type
+ // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
+ // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
+ val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
+ (List(treeMaker), treeMaker.nextBinder)
+ } else (Nil, patBinder)
+
+ withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, pos), extractor.subBindersAndPatterns: _*)
+ }
+
+
+ object MaybeBoundTyped {
+ /** Decompose the pattern in `tree`, of shape C(p_1, ..., p_N), into a list of N symbols, and a list of its N sub-trees
+ * The list of N symbols contains symbols for every bound name as well as the un-named sub-patterns (fresh symbols are generated here for these).
+ * The returned type is the one inferred by inferTypedPattern (`owntype`)
+ *
+ * @arg patBinder symbol used to refer to the result of the previous pattern's extractor (will later be replaced by the outer tree with the correct tree to refer to that patterns result)
+ */
+ def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
+ // the Ident subpattern can be ignored, subpatBinder or patBinder tell us all we need to know about it
+ case Bound(subpatBinder, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((subpatBinder, typed.tpe))
+ case Bind(_, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((patBinder, typed.tpe))
+ case Typed(Ident(_), tpt) if tree.tpe ne null => Some((patBinder, tree.tpe))
+ case _ => None
+ }
+ }
+
+ val (treeMakers, subpats) = patTree match {
+ // skip wildcard trees -- no point in checking them
+ case WildcardPattern() => noFurtherSubPats()
+ case UnApply(unfun, args) =>
+ // TODO: check unargs == args
+ // patmatDebug("unfun: "+ (unfun.tpe, unfun.symbol.ownerChain, unfun.symbol.info, patBinder.info))
+ translateExtractorPattern(ExtractorCall(unfun, args))
+
+ /** A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
+ It consists of a stable identifier c, followed by element patterns p1, ..., pn.
+ The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
+
+ If the case class is monomorphic, then it must conform to the expected type of the pattern,
+ and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected types of the element patterns p1, ..., pn.
+
+ If the case class is polymorphic, then its type parameters are instantiated so that the instantiation of c conforms to the expected type of the pattern.
+ The instantiated formal parameter types of c’s primary constructor are then taken as the expected types of the component patterns p1, ..., pn.
+
+ The pattern matches all objects created from constructor invocations c(v1, ..., vn) where each element pattern pi matches the corresponding value vi .
+ A special case arises when c’s formal parameter types end in a repeated parameter. This is further discussed in (§8.1.9).
+ **/
+ case Apply(fun, args) =>
+ ExtractorCall.fromCaseClass(fun, args) map translateExtractorPattern getOrElse {
+ ErrorUtils.issueNormalTypeError(patTree, "Could not find unapply member for "+ fun +" with args "+ args)(context)
+ noFurtherSubPats()
+ }
+
+ /** A typed pattern x : T consists of a pattern variable x and a type pattern T.
+ The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
+ This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
+ **/
+ // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
+ case MaybeBoundTyped(subPatBinder, pt) =>
+ // a typed pattern never has any subtrees
+ noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, glb(List(patBinder.info.widen, pt)).normalize)(pos))
+
+ /** A pattern binder x@p consists of a pattern variable x and a pattern p.
+ The type of the variable x is the static type T of the pattern p.
+ This pattern matches any value v matched by the pattern p,
+ provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
+ and it binds the variable name to that value.
+ **/
+ case Bound(subpatBinder, p) =>
+ // replace subpatBinder by patBinder (as if the Bind was not there)
+ withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)),
+ // must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type, this is not guaranteed until we cast
+ (patBinder, p)
+ )
+
+ /** 8.1.4 Literal Patterns
+ A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
+ The type of L must conform to the expected type of the pattern.
+
+ 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
+ The pattern matches any value v such that r == v (§12.1).
+ The type of r must conform to the expected type of the pattern.
+ **/
+ case Literal(Constant(_)) | Ident(_) | Select(_, _) =>
+ noFurtherSubPats(EqualityTestTreeMaker(patBinder, patTree, pos))
+
+ case Alternative(alts) =>
+ noFurtherSubPats(AlternativesTreeMaker(patBinder, alts map (translatePattern(patBinder, _)), alts.head.pos))
+
+ /* TODO: Paul says about future version: I think this should work, and always intended to implement if I can get away with it.
+ case class Foo(x: Int, y: String)
+ case class Bar(z: Int)
+
+ def f(x: Any) = x match { case Foo(x, _) | Bar(x) => x } // x is lub of course.
+ */
+
+ case Bind(n, p) => // this happens in certain ill-formed programs, there'll be an error later
+ // patmatDebug("WARNING: Bind tree with unbound symbol "+ patTree)
+ noFurtherSubPats() // there's no symbol -- something's wrong... don't fail here though (or should we?)
+
+ // case Star(_) | ArrayValue | This => error("stone age pattern relics encountered!")
+
+ case _ =>
+ error("unsupported pattern: "+ patTree +"(a "+ patTree.getClass +")")
+ noFurtherSubPats()
+ }
+
+ treeMakers ++ subpats.flatMap { case (binder, pat) =>
+ translatePattern(binder, pat) // recurse on subpatterns
+ }
+ }
+
+ def translateGuard(guard: Tree): List[TreeMaker] =
+ if (guard == EmptyTree) Nil
+ else List(GuardTreeMaker(guard))
+
+ // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by codegen.one),
+ // so that user can generate failure when needed -- use implicit conversion to lift into monad on-demand?
+ // to enable this, probably need to move away from Option to a monad specific to pattern-match,
+ // so that we can return Option's from a match without ambiguity whether this indicates failure in the monad, or just some result in the monad
+ // 2) body.tpe is the type of the body after applying the substitution that represents the solution of GADT type inference
+ // need the explicit cast in case our substitutions in the body change the type to something that doesn't take GADT typing into account
+ def translateBody(body: Tree, matchPt: Type): TreeMaker =
+ BodyTreeMaker(body, matchPt)
+
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ object ExtractorCall {
+ def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCallRegular(unfun, args)
+
+ def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = Some(new ExtractorCallProd(fun, args))
+
+ // THE PRINCIPLED SLOW PATH -- NOT USED
+ // generate a call to the (synthetically generated) extractor of a case class
+ // NOTE: it's an apply, not a select, since in general an extractor call may have multiple argument lists (including an implicit one)
+ // that we need to preserve, so we supply the scrutinee as Ident(nme.SELECTOR_DUMMY),
+ // and replace that dummy by a reference to the actual binder in translateExtractorPattern
+ def fromCaseClassUnapply(fun: Tree, args: List[Tree]): Option[ExtractorCall] = {
+ // TODO: can we rework the typer so we don't have to do all this twice?
+ // undo rewrite performed in (5) of adapt
+ val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
+ val origSym = orig.symbol
+ val extractor = unapplyMember(origSym.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe)
+
+ if((fun.tpe eq null) || fun.tpe.isError || (extractor eq NoSymbol)) {
+ None
+ } else {
+ // this is a tricky balance: pos/t602.scala, pos/sudoku.scala, run/virtpatmat_alts.scala must all be happy
+ // bypass typing at own risk: val extractorCall = Select(orig, extractor) setType caseClassApplyToUnapplyTp(fun.tpe)
+ // can't always infer type arguments (pos/t602):
+ /* case class Span[K <: Ordered[K]](low: Option[K]) {
+ override def equals(x: Any): Boolean = x match {
+ case Span((low0 @ _)) if low0 equals low => true
+ }
+ }*/
+ // so... leave undetermined type params floating around if we have to
+ // (if we don't infer types, uninstantiated type params show up later: pos/sudoku.scala)
+ // (see also run/virtpatmat_alts.scala)
+ val savedUndets = context.undetparams
+ val extractorCall = try {
+ context.undetparams = Nil
+ silent(_.typed(Apply(Select(orig, extractor), List(Ident(nme.SELECTOR_DUMMY) setType fun.tpe.finalResultType)), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
+ case SilentResultValue(extractorCall) => extractorCall // if !extractorCall.containsError()
+ case _ =>
+ // this fails to resolve overloading properly...
+ // Apply(typedOperator(Select(orig, extractor)), List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway
+
+ // patmatDebug("funtpe after = "+ fun.tpe.finalResultType)
+ // patmatDebug("orig: "+(orig, orig.tpe))
+ val tgt = typed(orig, EXPRmode | QUALmode | POLYmode, HasMember(extractor.name)) // can't specify fun.tpe.finalResultType as the type for the extractor's arg,
+ // as it may have been inferred incorrectly (see t602, where it's com.mosol.sl.Span[Any], instead of com.mosol.sl.Span[?K])
+ // patmatDebug("tgt = "+ (tgt, tgt.tpe))
+ val oper = typed(Select(tgt, extractor.name), EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType)
+ // patmatDebug("oper: "+ (oper, oper.tpe))
+ Apply(oper, List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway
+ }
+ } finally context.undetparams = savedUndets
+
+ Some(this(extractorCall, args)) // TODO: simplify spliceApply?
+ }
+ }
+ }
+
+ abstract class ExtractorCall(val args: List[Tree]) {
+ val nbSubPats = args.length
+
+ // everything okay, captain?
+ def isTyped : Boolean
+
+ def isSeq: Boolean
+ lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last)
+
+ // to which type should the previous binder be casted?
+ def paramType : Type
+
+ // binder has been casted to paramType if necessary
+ def treeMaker(binder: Symbol, pos: Position): TreeMaker
+
+ // `subPatBinders` are the variables bound by this pattern in the following patterns
+ // subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
+ lazy val subPatBinders = args map {
+ case Bound(b, p) => b
+ case p => freshSym(p.pos, prefix = "p")
+ }
+
+ lazy val subBindersAndPatterns: List[(Symbol, Tree)] = (subPatBinders zip args) map {
+ case (b, Bound(_, p)) => (b, p)
+ case bp => bp
+ }
+
+ def subPatTypes: List[Type] =
+ if(isSeq) {
+ val TypeRef(pre, SeqClass, args) = seqTp
+ // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
+ formalTypes(rawSubPatTypes.init :+ typeRef(pre, RepeatedParamClass, args), nbSubPats)
+ } else rawSubPatTypes
+
+ protected def rawSubPatTypes: List[Type]
+
+ protected def seqTp = rawSubPatTypes.last baseType SeqClass
+ protected def seqLenCmp = rawSubPatTypes.last member nme.lengthCompare
+ protected lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
+ protected lazy val lastIndexingBinder = if(lastIsStar) nbSubPats-2 else nbSubPats-1
+ protected lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1
+ protected lazy val minLenToCheck = if(lastIsStar) 1 else 0
+ protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder+1)
+ protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i)
+
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require isSeq
+ protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
+ val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder))
+ val nbIndexingIndices = indexingIndices.length
+
+ // this error-condition has already been checked by checkStarPatOK:
+ // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
+ // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
+ (((1 to firstIndexingBinder) map tupleSel(binder)) ++
+ // then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
+ (indexingIndices map codegen.index(seqTree(binder))) ++
+ // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder
+ (if(!lastIsStar) Nil else List(
+ if(nbIndexingIndices == 0) seqTree(binder)
+ else codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+ }
+
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require (nbSubPats > 0 && (!lastIsStar || isSeq))
+ protected def subPatRefs(binder: Symbol): List[Tree] =
+ if (nbSubPats == 0) Nil
+ else if (isSeq) subPatRefsSeq(binder)
+ else ((1 to nbSubPats) map tupleSel(binder)).toList
+
+ protected def lengthGuard(binder: Symbol): Option[Tree] =
+ // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
+ checkedLength map { expectedLength => import CODE._
+ // `binder.lengthCompare(expectedLength)`
+ def checkExpectedLength = (seqTree(binder) DOT seqLenCmp)(LIT(expectedLength))
+
+ // the comparison to perform
+ // when the last subpattern is a wildcard-star the expectedLength is but a lower bound
+ // (otherwise equality is required)
+ def compareOp: (Tree, Tree) => Tree =
+ if (lastIsStar) _ INT_>= _
+ else _ INT_== _
+
+ // `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
+ (seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO)
+ }
+
+ def checkedLength: Option[Int] =
+ // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
+ if (!isSeq || (expectedLength < minLenToCheck)) None
+ else Some(expectedLength)
+
+ }
+
+ // TODO: to be called when there's a def unapplyProd(x: T): U
+ // U must have N members _1,..., _N -- the _i are type checked, call their type Ti,
+ //
+ // for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
+ class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) {
+ // TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here:
+ /*override def equals(x$1: Any): Boolean = ...
+ val o5: Option[com.mosol.sl.Span[Any]] = // Span[Any] --> Any is not a legal type argument for Span!
+ */
+ // private val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
+ // private val origExtractorTp = unapplyMember(orig.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe).tpe
+ // private val extractorTp = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)
+ // patmatDebug("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)))
+ // patmatDebug("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe)))
+ private def constructorTp = fun.tpe
+
+ def isTyped = fun.isTyped
+
+ // to which type should the previous binder be casted?
+ def paramType = constructorTp.finalResultType
+
+ def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
+ protected def rawSubPatTypes = constructorTp.paramTypes
+
+ // binder has type paramType
+ def treeMaker(binder: Symbol, pos: Position): TreeMaker = {
+ // checks binder ne null before chaining to the next extractor
+ ProductExtractorTreeMaker(binder, lengthGuard(binder), Substitution(subPatBinders, subPatRefs(binder)))
+ }
+
+ // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
+ override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
+ // caseFieldAccessors is messed up after typers (reversed, names mangled for non-public fields)
+ // TODO: figure out why...
+ val accessors = binder.caseFieldAccessors
+ // luckily, the constrParamAccessors are still sorted properly, so sort the field-accessors using them
+ // (need to undo name-mangling, including the sneaky trailing whitespace)
+ val constrParamAccessors = binder.constrParamAccessors
+
+ def indexInCPA(acc: Symbol) =
+ constrParamAccessors indexWhere { orig =>
+ // patmatDebug("compare: "+ (orig, acc, orig.name, acc.name, (acc.name == orig.name), (acc.name startsWith (orig.name append "$"))))
+ val origName = orig.name.toString.trim
+ val accName = acc.name.toString.trim
+ (accName == origName) || (accName startsWith (origName + "$"))
+ }
+
+ // patmatDebug("caseFieldAccessors: "+ (accessors, binder.caseFieldAccessors map indexInCPA))
+ // patmatDebug("constrParamAccessors: "+ constrParamAccessors)
+
+ val accessorsSorted = accessors sortBy indexInCPA
+ if (accessorsSorted isDefinedAt (i-1)) REF(binder) DOT accessorsSorted(i-1)
+ else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
+ }
+
+ override def toString(): String = "case class "+ (if (constructorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args
+ }
+
+ class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) {
+ private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false }
+
+ def tpe = extractorCall.tpe
+ def isTyped = (tpe ne NoType) && extractorCall.isTyped && (resultInMonad ne ErrorType)
+ def paramType = tpe.paramTypes.head
+ def resultType = tpe.finalResultType
+ def isSeq = extractorCall.symbol.name == nme.unapplySeq
+
+ def treeMaker(patBinderOrCasted: Symbol, pos: Position): TreeMaker = {
+ // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
+ val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
+ val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder, Substitution(subPatBinders, subPatRefs(binder)))(resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted)
+ }
+
+ override protected def seqTree(binder: Symbol): Tree =
+ if (firstIndexingBinder == 0) CODE.REF(binder)
+ else super.seqTree(binder)
+
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require (nbSubPats > 0 && (!lastIsStar || isSeq))
+ override protected def subPatRefs(binder: Symbol): List[Tree] =
+ if (!isSeq && nbSubPats == 1) List(CODE.REF(binder)) // special case for extractors
+ else super.subPatRefs(binder)
+
+ protected def spliceApply(binder: Symbol): Tree = {
+ object splice extends Transformer {
+ override def transform(t: Tree) = t match {
+ case Apply(x, List(Ident(nme.SELECTOR_DUMMY))) =>
+ treeCopy.Apply(t, x, List(CODE.REF(binder)))
+ case _ => super.transform(t)
+ }
+ }
+ splice.transform(extractorCallIncludingDummy)
+ }
+
+ // what's the extractor's result type in the monad?
+ // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
+ protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
+ if (resultType.typeSymbol == BooleanClass) UnitClass.tpe
+ else matchMonadResult(resultType)
+ }
+
+ protected lazy val rawSubPatTypes =
+ if (resultInMonad.typeSymbol eq UnitClass) Nil
+ else if(nbSubPats == 1) List(resultInMonad)
+ else getProductArgs(resultInMonad) match {
+ case Nil => List(resultInMonad)
+ case x => x
+ }
+
+ override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")."
+ }
+
+ /** A conservative approximation of which patterns do not discern anything.
+ * They are discarded during the translation.
+ */
+ object WildcardPattern {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
+ case Ident(nme.WILDCARD) => true
+ case Star(WildcardPattern()) => true
+ case x: Ident => treeInfo.isVarPattern(x)
+ case Alternative(ps) => ps forall (WildcardPattern.unapply(_))
+ case EmptyTree => true
+ case _ => false
+ }
+ }
+
+ object Bound {
+ def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
+ case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
+ Some((t.symbol, p))
+ case _ => None
+ }
+ }
+ }
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// substitution
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait TypedSubstitution extends MatchMonadInterface {
+ object Substitution {
+ def apply(from: Symbol, to: Tree) = new Substitution(List(from), List(to))
+ // requires sameLength(from, to)
+ def apply(from: List[Symbol], to: List[Tree]) =
+ if (from nonEmpty) new Substitution(from, to) else EmptySubstitution
+ }
+
+ class Substitution(val from: List[Symbol], val to: List[Tree]) {
+ // We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed,
+ // and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees.
+ def apply(tree: Tree): Tree = {
+ // according to -Ystatistics 10% of translateMatch's time is spent in this method...
+ // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst
+ if (!tree.exists { case i@Ident(_) => from contains i.symbol case _ => false}) tree
+ else (new Transformer {
+ @inline private def typedIfOrigTyped(to: Tree, origTp: Type): Tree =
+ if (origTp == null || origTp == NoType) to
+ // important: only type when actually substing and when original tree was typed
+ // (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors)
+ else typer.typed(to, EXPRmode, WildcardType)
+
+ override def transform(tree: Tree): Tree = {
+ def subst(from: List[Symbol], to: List[Tree]): Tree =
+ if (from.isEmpty) tree
+ else if (tree.symbol == from.head) typedIfOrigTyped(to.head.shallowDuplicate, tree.tpe)
+ else subst(from.tail, to.tail)
+
+ tree match {
+ case Ident(_) => subst(from, to)
+ case _ => super.transform(tree)
+ }
+ }
+ }).transform(tree)
+ }
+
+
+ // the substitution that chains `other` before `this` substitution
+ // forall t: Tree. this(other(t)) == (this >> other)(t)
+ def >>(other: Substitution): Substitution = {
+ val (fromFiltered, toFiltered) = (from, to).zipped filter { (f, t) => !other.from.contains(f) }
+ new Substitution(other.from ++ fromFiltered, other.to.map(apply) ++ toFiltered) // a quick benchmarking run indicates the `.map(apply)` is not too costly
+ }
+ override def toString = (from.map(_.name) zip to) mkString("Substitution(", ", ", ")")
+ }
+
+ object EmptySubstitution extends Substitution(Nil, Nil) {
+ override def apply(tree: Tree): Tree = tree
+ override def >>(other: Substitution): Substitution = other
+ }
+ }
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// the making of the trees
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait TreeMakers extends TypedSubstitution { self: CodegenCore =>
+ def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, unchecked: Boolean): (List[List[TreeMaker]], List[Tree]) =
+ (cases, Nil)
+
+ def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree]): Option[Tree] =
+ None
+
+ // for catch (no need to customize match failure)
+ def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] =
+ None
+
+ abstract class TreeMaker {
+ def pos: Position
+
+ /** captures the scope and the value of the bindings in patterns
+ * important *when* the substitution happens (can't accumulate and do at once after the full matcher has been constructed)
+ */
+ def substitution: Substitution =
+ if (currSub eq null) localSubstitution
+ else currSub
+
+ protected def localSubstitution: Substitution
+
+ private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
+ if (currSub ne null) {
+ // patmatDebug("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst))
+ Thread.dumpStack()
+ }
+ else currSub = outerSubst >> substitution
+ }
+ private[this] var currSub: Substitution = null
+
+ // build Tree that chains `next` after the current extractor
+ def chainBefore(next: Tree)(casegen: Casegen): Tree
+ }
+
+ trait NoNewBinders extends TreeMaker {
+ protected val localSubstitution: Substitution = EmptySubstitution
+ }
+
+ case class TrivialTreeMaker(tree: Tree) extends TreeMaker with NoNewBinders {
+ def pos = tree.pos
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = tree
+ }
+
+ case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker with NoNewBinders {
+ def pos = body.pos
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree)
+ atPos(body.pos)(casegen.one(substitution(body))) // since SubstOnly treemakers are dropped, need to do it here
+ override def toString = "B"+(body, matchPt)
+ }
+
+ case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
+ val pos = NoPosition
+
+ val localSubstitution = Substitution(prevBinder, CODE.REF(nextBinder))
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = substitution(next)
+ override def toString = "S"+ localSubstitution
+ }
+
+ abstract class FunTreeMaker extends TreeMaker {
+ val nextBinder: Symbol
+ def pos = nextBinder.pos
+ }
+
+ abstract class CondTreeMaker extends FunTreeMaker {
+ val prevBinder: Symbol
+ val nextBinderTp: Type
+ val cond: Tree
+ val res: Tree
+
+ lazy val nextBinder = freshSym(pos, nextBinderTp)
+ lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree =
+ atPos(pos)(casegen.flatMapCond(cond, res, nextBinder, substitution(next)))
+ }
+
+ /**
+ * Make a TreeMaker that will result in an extractor call specified by `extractor`
+ * the next TreeMaker (here, we don't know which it'll be) is chained after this one by flatMap'ing
+ * a function with binder `nextBinder` over our extractor's result
+ * the function's body is determined by the next TreeMaker
+ * in this function's body, and all the subsequent ones, references to the symbols in `from` will be replaced by the corresponding tree in `to`
+ */
+ case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol, localSubstitution: Substitution)(extractorReturnsBoolean: Boolean, val checkedLength: Option[Int], val prevBinder: Symbol) extends FunTreeMaker {
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ val condAndNext = extraCond map (casegen.ifThenElseZero(_, next)) getOrElse next
+ atPos(extractor.pos)(
+ if (extractorReturnsBoolean) casegen.flatMapCond(extractor, CODE.UNIT, nextBinder, substitution(condAndNext))
+ else casegen.flatMap(extractor, nextBinder, substitution(condAndNext))
+ )
+ }
+
+ override def toString = "X"+(extractor, nextBinder.name)
+ }
+
+ // TODO: allow user-defined unapplyProduct
+ case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree], localSubstitution: Substitution) extends FunTreeMaker { import CODE._
+ val nextBinder = prevBinder // just passing through
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ val nullCheck = REF(prevBinder) OBJ_NE NULL
+ val cond = extraCond map (nullCheck AND _) getOrElse nullCheck
+ casegen.ifThenElseZero(cond, substitution(next))
+ }
+
+ override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution)
+ }
+
+ // typetag-based tests are inserted by the type checker
+ def needsTypeTest(tp: Type, pt: Type): Boolean = !(tp <:< pt)
+
+ object TypeTestTreeMaker {
+ // factored out so that we can consistently generate other representations of the tree that implements the test
+ // (e.g. propositions for exhaustivity and friends, boolean for isPureTypeTest)
+ trait TypeTestCondStrategy {
+ type Result
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result
+ // TODO: can probably always widen
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result
+ def nonNullTest(testedBinder: Symbol): Result
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result
+ def eqTest(pat: Tree, testedBinder: Symbol): Result
+ def and(a: Result, b: Result): Result
+ }
+
+ object treeCondStrategy extends TypeTestCondStrategy { import CODE._
+ type Result = Tree
+
+ def and(a: Result, b: Result): Result = a AND b
+ def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp)
+ def nonNullTest(testedBinder: Symbol) = REF(testedBinder) OBJ_NE NULL
+ def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder)
+ def eqTest(pat: Tree, testedBinder: Symbol) = REF(testedBinder) OBJ_EQ pat
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Tree = {
+ val expectedOuter = expectedTp.prefix match {
+ case ThisType(clazz) => THIS(clazz)
+ case pre => REF(pre.prefix, pre.termSymbol)
+ }
+
+ // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
+ // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
+ val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC
+
+ (Select(codegen._asInstanceOf(testedBinder, expectedTp), outer)) OBJ_EQ expectedOuter
+ }
+ }
+
+ object pureTypeTestChecker extends TypeTestCondStrategy {
+ type Result = Boolean
+
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result = true
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
+ def nonNullTest(testedBinder: Symbol): Result = false
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result = false
+ def eqTest(pat: Tree, testedBinder: Symbol): Result = false
+ def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false
+ }
+ }
+
+ /** implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations)
+ *
+ * Type patterns consist of types, type variables, and wildcards. A type pattern T is of one of the following forms:
+ - A reference to a class C, p.C, or T#C.
+ This type pattern matches any non-null instance of the given class.
+ Note that the prefix of the class, if it is given, is relevant for determining class instances.
+ For instance, the pattern p.C matches only instances of classes C which were created with the path p as prefix.
+ The bottom types scala.Nothing and scala.Null cannot be used as type patterns, because they would match nothing in any case.
+
+ - A singleton type p.type.
+ This type pattern matches only the value denoted by the path p
+ (that is, a pattern match involved a comparison of the matched value with p using method eq in class AnyRef). // TODO: the actual pattern matcher uses ==, so that's what I'm using for now
+ // https://issues.scala-lang.org/browse/SI-4577 "pattern matcher, still disappointing us at equality time"
+
+ - A compound type pattern T1 with ... with Tn where each Ti is a type pat- tern.
+ This type pattern matches all values that are matched by each of the type patterns Ti.
+
+ - A parameterized type pattern T[a1,...,an], where the ai are type variable patterns or wildcards _.
+ This type pattern matches all values which match T for some arbitrary instantiation of the type variables and wildcards.
+ The bounds or alias type of these type variable are determined as described in (§8.3).
+
+ - A parameterized type pattern scala.Array[T1], where T1 is a type pattern. // TODO
+ This type pattern matches any non-null instance of type scala.Array[U1], where U1 is a type matched by T1.
+ **/
+ case class TypeTestTreeMaker(prevBinder: Symbol, testedBinder: Symbol, expectedTp: Type, nextBinderTp: Type)(override val pos: Position, extractorArgTypeTest: Boolean = false) extends CondTreeMaker {
+ import TypeTestTreeMaker._
+ // patmatDebug("TTTM"+(prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))
+
+ lazy val outerTestNeeded = (
+ !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
+ && needsOuterTest(expectedTp, testedBinder.info, matchOwner))
+
+ // the logic to generate the run-time test that follows from the fact that
+ // a `prevBinder` is expected to have type `expectedTp`
+ // the actual tree-generation logic is factored out, since the analyses generate Cond(ition)s rather than Trees
+ // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null`
+ // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false")
+ def renderCondition(cs: TypeTestCondStrategy): cs.Result = {
+ import cs._
+
+ def default =
+ // do type test first to ensure we won't select outer on null
+ if (outerTestNeeded) and(typeTest(testedBinder, expectedTp), outerTest(testedBinder, expectedTp))
+ else typeTest(testedBinder, expectedTp)
+
+ // propagate expected type
+ @inline def expTp(t: Tree): t.type = t setType expectedTp
+
+ // true when called to type-test the argument to an extractor
+ // don't do any fancy equality checking, just test the type
+ if (extractorArgTypeTest) default
+ else expectedTp match {
+ // TODO: [SPEC] the spec requires `eq` instead of `==` for singleton types
+ // this implies sym.isStable
+ case SingleType(_, sym) => and(equalsTest(CODE.REF(sym), testedBinder), typeTest(testedBinder, expectedTp.widen))
+ // must use == to support e.g. List() == Nil
+ case ThisType(sym) if sym.isModule => and(equalsTest(CODE.REF(sym), testedBinder), typeTest(testedBinder, expectedTp.widen))
+ case ConstantType(Constant(null)) if testedBinder.info.widen <:< AnyRefClass.tpe
+ => eqTest(expTp(CODE.NULL), testedBinder)
+ case ConstantType(const) => equalsTest(expTp(Literal(const)), testedBinder)
+ case ThisType(sym) => eqTest(expTp(This(sym)), testedBinder)
+
+ // TODO: verify that we don't need to special-case Array
+ // I think it's okay:
+ // - the isInstanceOf test includes a test for the element type
+ // - Scala's arrays are invariant (so we don't drop type tests unsoundly)
+ case _ if (expectedTp <:< AnyRefClass.tpe) && !needsTypeTest(testedBinder.info.widen, expectedTp) =>
+ // do non-null check first to ensure we won't select outer on null
+ if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
+ else nonNullTest(testedBinder)
+
+ case _ => default
+ }
+ }
+
+ val cond = renderCondition(treeCondStrategy)
+ val res = codegen._asInstanceOf(testedBinder, nextBinderTp)
+
+ // is this purely a type test, e.g. no outer check, no equality tests (used in switch emission)
+ def isPureTypeTest = renderCondition(pureTypeTestChecker)
+
+ override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp)
+ }
+
+ // need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp)
+ case class EqualityTestTreeMaker(prevBinder: Symbol, patTree: Tree, override val pos: Position) extends CondTreeMaker {
+ val nextBinderTp = prevBinder.info.widen
+
+ // NOTE: generate `patTree == patBinder`, since the extractor must be in control of the equals method (also, patBinder may be null)
+ // equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
+ val cond = codegen._equals(patTree, prevBinder)
+ val res = CODE.REF(prevBinder)
+ override def toString = "ET"+(prevBinder.name, patTree)
+ }
+
+ case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders {
+ // don't substitute prevBinder to nextBinder, a set of alternatives does not need to introduce a new binder, simply reuse the previous one
+
+ override private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
+ super.incorporateOuterSubstitution(outerSubst)
+ altss = altss map (alts => propagateSubstitution(alts, substitution))
+ }
+
+ def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = { import CODE._
+ atPos(pos){
+ // one alternative may still generate multiple trees (e.g., an extractor call + equality test)
+ // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
+ val combinedAlts = altss map (altTreeMakers =>
+ ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(TRUE_typed)))(casegen))
+ )
+
+ val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => FALSE_typed))
+ codegenAlt.ifThenElseZero(findAltMatcher, substitution(next))
+ }
+ }
+ }
+
+ case class GuardTreeMaker(guardTree: Tree) extends TreeMaker with NoNewBinders {
+ val pos = guardTree.pos
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = casegen.flatMapGuard(substitution(guardTree), next)
+ override def toString = "G("+ guardTree +")"
+ }
+
+ // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
+ // requires propagateSubstitution(treeMakers) has been called
+ def combineExtractors(treeMakers: List[TreeMaker])(casegen: Casegen): Tree =
+ treeMakers.foldRight(EmptyTree: Tree)((a, b) => a.chainBefore(b)(casegen))
+
+
+ def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker])
+
+ // a foldLeft to accumulate the localSubstitution left-to-right
+ // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution
+ def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = {
+ var accumSubst: Substitution = initial
+ treeMakers foreach { maker =>
+ maker incorporateOuterSubstitution accumSubst
+ accumSubst = maker.substitution
+ }
+ removeSubstOnly(treeMakers)
+ }
+
+ // calls propagateSubstitution on the treemakers
+ def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = {
+ // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
+ val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution))
+ combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, matchFailGenOverride)
+ }
+
+ def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree =
+ fixerUpper(owner, scrut.pos){
+ val ptDefined = if (isFullyDefined(pt)) pt else NoType
+ def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree)))
+ // patmatDebug("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
+
+ def isSwitchAnnotation(tpe: Type) = tpe hasAnnotation SwitchClass
+ def isUncheckedAnnotation(tpe: Type) = tpe hasAnnotation UncheckedClass
+
+ val (unchecked, requireSwitch) =
+ if (settings.XnoPatmatAnalysis.value) (true, false)
+ else scrut match {
+ case Typed(_, tpt) =>
+ (isUncheckedAnnotation(tpt.tpe),
+ // matches with two or fewer cases need not apply for switchiness (if-then-else will do)
+ isSwitchAnnotation(tpt.tpe) && casesNoSubstOnly.lengthCompare(2) > 0)
+ case _ =>
+ (false, false)
+ }
+
+ emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride).getOrElse{
+ if (requireSwitch) typer.context.unit.warning(scrut.pos, "could not emit switch for @switch annotated match")
+
+ if (casesNoSubstOnly nonEmpty) {
+ // before optimizing, check casesNoSubstOnly for presence of a default case,
+ // since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
+ // exhaustivity and reachability must be checked before optimization as well
+ // TODO: improve notion of trivial/irrefutable -- a trivial type test before the body still makes for a default case
+ // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op)
+ // irrefutability checking should use the approximation framework also used for CSE, unreachability and exhaustivity checking
+ val synthCatchAll =
+ if (casesNoSubstOnly.nonEmpty && {
+ val nonTrivLast = casesNoSubstOnly.last
+ nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
+ }) None
+ else matchFailGen
+
+ val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt, unchecked)
+
+ val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases map combineExtractors, synthCatchAll)
+
+ if (toHoist isEmpty) matchRes else Block(toHoist, matchRes)
+ } else {
+ codegen.matcher(scrut, scrutSym, pt)(Nil, matchFailGen)
+ }
+ }
+ }
+
+ // TODO: do this during tree construction, but that will require tracking the current owner in treemakers
+ // TODO: assign more fine-grained positions
+ // fixes symbol nesting, assigns positions
+ protected def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser {
+ currentOwner = origOwner
+
+ override def traverse(t: Tree) {
+ if (t != EmptyTree && t.pos == NoPosition) {
+ t.setPos(pos)
+ }
+ t match {
+ case Function(_, _) if t.symbol == NoSymbol =>
+ t.symbol = currentOwner.newAnonymousFunctionValue(t.pos)
+ // patmatDebug("new symbol for "+ (t, t.symbol.ownerChain))
+ case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
+ // patmatDebug("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
+ t.symbol.owner = currentOwner
+ case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
+ // patmatDebug("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
+ if(d.symbol.isLazy) { // for lazy val's accessor -- is there no tree??
+ assert(d.symbol.lazyAccessor != NoSymbol && d.symbol.lazyAccessor.owner == d.symbol.owner, d.symbol.lazyAccessor)
+ d.symbol.lazyAccessor.owner = currentOwner
+ }
+ if(d.symbol.moduleClass ne NoSymbol)
+ d.symbol.moduleClass.owner = currentOwner
+
+ d.symbol.owner = currentOwner
+ // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
+ // patmatDebug("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
+ case _ =>
+ }
+ super.traverse(t)
+ }
+
+ // override def apply
+ // patmatDebug("before fixerupper: "+ xTree)
+ // currentRun.trackerFactory.snapshot()
+ // patmatDebug("after fixerupper")
+ // currentRun.trackerFactory.snapshot()
+ }
+ }
+
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// generate actual trees
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait CodegenCore extends MatchMonadInterface {
+ private var ctr = 0
+ def freshName(prefix: String) = {ctr += 1; vpmName.counted(prefix, ctr)}
+
+ // assert(owner ne null); assert(owner ne NoSymbol)
+ def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") =
+ NoSymbol.newTermSymbol(freshName(prefix), pos) setInfo tp
+
+ // codegen relevant to the structure of the translation (how extractors are combined)
+ trait AbsCodegen {
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree
+
+ // local / context-free
+ def _asInstanceOf(b: Symbol, tp: Type): Tree
+ def _equals(checker: Tree, binder: Symbol): Tree
+ def _isInstanceOf(b: Symbol, tp: Type): Tree
+ def and(a: Tree, b: Tree): Tree
+ def drop(tgt: Tree)(n: Int): Tree
+ def index(tgt: Tree)(i: Int): Tree
+ def mkZero(tp: Type): Tree
+ def tupleSel(binder: Symbol)(i: Int): Tree
+ }
+
+ // structure
+ trait Casegen extends AbsCodegen { import CODE._
+ def one(res: Tree): Tree
+
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree
+ def flatMapGuard(cond: Tree, next: Tree): Tree
+ def ifThenElseZero(c: Tree, then: Tree): Tree = IF (c) THEN then ELSE zero
+ protected def zero: Tree
+ }
+
+ def codegen: AbsCodegen
+
+ def typesConform(tp: Type, pt: Type) = ((tp eq pt) || (tp <:< pt))
+
+ abstract class CommonCodegen extends AbsCodegen { import CODE._
+ def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
+ def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree)
+ def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
+ def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
+ def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
+ def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+ def and(a: Tree, b: Tree): Tree = a AND b
+
+ // drop annotations generated by CPS plugin etc, since its annotationchecker rejects T @cps[U] <: Any
+ // let's assume for now annotations don't affect casts, drop them there, and bring them back using the outer Typed tree
+ private def mkCast(t: Tree, tp: Type) =
+ Typed(gen.mkAsInstanceOf(t, tp.withoutAnnotations, true, false), TypeTree() setType tp)
+
+ // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
+ def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree = if (!force && (t.tpe ne NoType) && t.isTyped && typesConform(t.tpe, tp)) t else mkCast(t, tp)
+ def _asInstanceOf(b: Symbol, tp: Type): Tree = if (typesConform(b.info, tp)) REF(b) else mkCast(REF(b), tp)
+ def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false)
+ // if (typesConform(b.info, tpX)) { patmatDebug("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE }
+
+ // duplicated out of frustration with cast generation
+ def mkZero(tp: Type): Tree = {
+ tp.typeSymbol match {
+ case UnitClass => Literal(Constant())
+ case BooleanClass => Literal(Constant(false))
+ case FloatClass => Literal(Constant(0.0f))
+ case DoubleClass => Literal(Constant(0.0d))
+ case ByteClass => Literal(Constant(0.toByte))
+ case ShortClass => Literal(Constant(0.toShort))
+ case IntClass => Literal(Constant(0))
+ case LongClass => Literal(Constant(0L))
+ case CharClass => Literal(Constant(0.toChar))
+ case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
+ }
+ }
+ }
+ }
+
+ trait PureMatchMonadInterface extends MatchMonadInterface {
+ val matchStrategy: Tree
+
+ def inMatchMonad(tp: Type): Type = appliedType(oneSig, List(tp)).finalResultType
+ def pureType(tp: Type): Type = appliedType(oneSig, List(tp)).paramTypes.headOption getOrElse NoType // fail gracefully (otherwise we get crashes)
+ protected def matchMonadSym = oneSig.finalResultType.typeSymbol
+
+ import CODE._
+ def _match(n: Name): SelectStart = matchStrategy DOT n
+
+ private lazy val oneSig: Type =
+ typer.typed(_match(vpmName.one), EXPRmode | POLYmode | TAPPmode | FUNmode, WildcardType).tpe // TODO: error message
+ }
+
+ trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
+ def codegen: AbsCodegen = pureCodegen
+
+ object pureCodegen extends CommonCodegen with Casegen { import CODE._
+ //// methods in MatchingStrategy (the monad companion) -- used directly in translation
+ // __match.runOrElse(`scrut`)(`scrutSym` => `matcher`)
+ // TODO: consider catchAll, or virtualized matching will break in exception handlers
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
+ _match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, cases map (f => f(this)) reduceLeft typedOrElse))
+
+ // __match.one(`res`)
+ def one(res: Tree): Tree = (_match(vpmName.one)) (res)
+ // __match.zero
+ protected def zero: Tree = _match(vpmName.zero)
+ // __match.guard(`c`, `then`)
+ def guard(c: Tree, then: Tree): Tree = _match(vpmName.guard) APPLY (c, then)
+
+ //// methods in the monad instance -- used directly in translation
+ // `prev`.flatMap(`b` => `next`)
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next))
+ // `thisCase`.orElse(`elseCase`)
+ def typedOrElse(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase)
+ // __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`)
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = flatMap(guard(cond, res), nextBinder, next)
+ // __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`)
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), next)
+ }
+ }
+
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// OPTIMIZATIONS
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// decisions, decisions
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ trait TreeMakerApproximation extends TreeMakers with Prettification{ self: CodegenCore =>
+ object Test {
+ var currId = 0
+ }
+ case class Test(cond: Cond, treeMaker: TreeMaker) {
+ // def <:<(other: Test) = cond <:< other.cond
+ // def andThen_: (prev: List[Test]): List[Test] =
+ // prev.filterNot(this <:< _) :+ this
+
+ // private val reusedBy = new collection.mutable.HashSet[Test]
+ var reuses: Option[Test] = None
+ def registerReuseBy(later: Test): Unit = {
+ assert(later.reuses.isEmpty, later.reuses)
+ // reusedBy += later
+ later.reuses = Some(this)
+ }
+
+ val id = { Test.currId += 1; Test.currId}
+ override def toString =
+ "T"+ id + "C("+ cond +")" //+ (reuses map ("== T"+_.id) getOrElse (if(reusedBy.isEmpty) treeMaker else reusedBy mkString (treeMaker+ " -->(", ", ",")")))
+ }
+
+ object Cond {
+ // def refines(self: Cond, other: Cond): Boolean = (self, other) match {
+ // case (Bottom, _) => true
+ // case (Havoc , _) => true
+ // case (_ , Top) => true
+ // case (_ , _) => false
+ // }
+ var currId = 0
+ }
+
+ abstract class Cond {
+ // def testedPath: Tree
+ // def <:<(other: Cond) = Cond.refines(this, other)
+
+ val id = { Cond.currId += 1; Cond.currId}
+ }
+
+ // does not contribute any knowledge
+ case object Top extends Cond {override def toString = "T"}
+
+
+ // takes away knowledge. e.g., a user-defined guard
+ case object Havoc extends Cond {override def toString = "_|_"}
+
+ // we know everything! everything!
+ // this either means the case is unreachable,
+ // or that it is statically known to be picked -- at this point in the decision tree --> no point in emitting further alternatives
+ // case object Bottom extends Cond
+
+
+ case class AndCond(a: Cond, b: Cond) extends Cond {override def toString = a +"/\\"+ b}
+ case class OrCond(a: Cond, b: Cond) extends Cond {override def toString = "("+a+") \\/ ("+ b +")"}
+
+ object EqualityCond {
+ private val uniques = new collection.mutable.HashMap[(Tree, Tree), EqualityCond]
+ def apply(testedPath: Tree, rhs: Tree): EqualityCond = uniques getOrElseUpdate((testedPath, rhs), new EqualityCond(testedPath, rhs))
+ def unapply(c: EqualityCond) = Some(c.testedPath, c.rhs)
+ }
+ class EqualityCond(val testedPath: Tree, val rhs: Tree) extends Cond {
+ // def negation = TopCond // inequality doesn't teach us anything
+ // do simplification when we know enough about the tree statically:
+ // - collapse equal trees
+ // - accumulate tests when (in)equality not known statically
+ // - become bottom when we statically know this can never match
+
+ override def toString = testedPath +" == "+ rhs +"#"+ id
+ }
+
+ object NonNullCond {
+ private val uniques = new collection.mutable.HashMap[Tree, NonNullCond]
+ def apply(testedPath: Tree): NonNullCond = uniques getOrElseUpdate(testedPath, new NonNullCond(testedPath))
+ def unapply(c: NonNullCond) = Some(c.testedPath)
+ }
+ class NonNullCond(val testedPath: Tree) extends Cond {
+ override def toString = testedPath +" ne null " +"#"+ id
+ }
+
+ object TypeCond {
+ private val uniques = new collection.mutable.HashMap[(Tree, Type), TypeCond]
+ def apply(testedPath: Tree, pt: Type): TypeCond = uniques getOrElseUpdate((testedPath, pt), new TypeCond(testedPath, pt))
+ def unapply(c: TypeCond) = Some(c.testedPath, c.pt)
+ }
+ class TypeCond(val testedPath: Tree, val pt: Type) extends Cond {
+ // def negation = TopCond // inequality doesn't teach us anything
+ // do simplification when we know enough about the tree statically:
+ // - collapse equal trees
+ // - accumulate tests when (in)equality not known statically
+ // - become bottom when we statically know this can never match
+ override def toString = testedPath +" : "+ pt +"#"+ id
+ }
+
+// class OuterEqCond(val testedPath: Tree, val expectedType: Type) extends Cond {
+// val expectedOuter = expectedTp.prefix match {
+// case ThisType(clazz) => THIS(clazz)
+// case pre => REF(pre.prefix, pre.termSymbol)
+// }
+//
+// // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
+// // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
+// val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC
+//
+// (Select(codegen._asInstanceOf(testedBinder, expectedTp), outer)) OBJ_EQ expectedOuter
+// }
+
+
+ // returns (tree, tests), where `tree` will be used to refer to `root` in `tests`
+ abstract class TreeMakersToConds(val root: Symbol) {
+ def discard() = {
+ pointsToBound.clear()
+ trees.clear()
+ normalize = EmptySubstitution
+ accumSubst = EmptySubstitution
+ }
+ // a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively)
+ private val pointsToBound = collection.mutable.HashSet(root)
+ private val trees = collection.mutable.HashSet.empty[Tree]
+
+ // the substitution that renames variables to variables in pointsToBound
+ private var normalize: Substitution = EmptySubstitution
+
+ // replaces a variable (in pointsToBound) by a selection on another variable in pointsToBound
+ // in the end, instead of having x1, x1.hd, x2, x2.hd, ... flying around,
+ // we want something like x1, x1.hd, x1.hd.tl, x1.hd.tl.hd, so that we can easily recognize when
+ // we're testing the same variable
+ // TODO check:
+ // pointsToBound -- accumSubst.from == Set(root) && (accumSubst.from.toSet -- pointsToBound) isEmpty
+ private var accumSubst: Substitution = EmptySubstitution
+
+ private def updateSubstitution(subst: Substitution) = {
+ // find part of substitution that replaces bound symbols by new symbols, and reverse that part
+ // so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal
+ val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition {
+ case (f, t) =>
+ t.isInstanceOf[Ident] && (t.symbol ne NoSymbol) && pointsToBound(f)
+ }
+ val (boundFrom, boundTo) = boundSubst.unzip
+ val (unboundFrom, unboundTo) = unboundSubst.unzip
+
+ // reverse substitution that would otherwise replace a variable we already encountered by a new variable
+ // NOTE: this forgets the more precise type we have for these later variables, but that's probably okay
+ normalize >>= Substitution(boundTo map (_.symbol), boundFrom map (CODE.REF(_)))
+ // patmatDebug("normalize: "+ normalize)
+
+ val okSubst = Substitution(unboundFrom, unboundTo map (normalize(_))) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway
+ pointsToBound ++= ((okSubst.from, okSubst.to).zipped filter { (f, t) => pointsToBound exists (sym => t.exists(_.symbol == sym)) })._1
+ // patmatDebug("pointsToBound: "+ pointsToBound)
+
+ accumSubst >>= okSubst
+ // patmatDebug("accumSubst: "+ accumSubst)
+ }
+
+
+ // TODO: improve, e.g., for constants
+ def sameValue(a: Tree, b: Tree): Boolean = (a eq b) || ((a, b) match {
+ case (_ : Ident, _ : Ident) => a.symbol eq b.symbol
+ case _ => false
+ })
+
+ // hashconsing trees (modulo value-equality)
+ def unique(t: Tree, tpOverride: Type = NoType): Tree =
+ trees find (a => a.equalsStructure0(t)(sameValue)) match {
+ case Some(orig) => orig // patmatDebug("unique: "+ (t eq orig, orig));
+ case _ =>
+ trees += t
+ if (tpOverride != NoType) t setType tpOverride
+ else t
+ }
+
+ def uniqueTp(tp: Type): Type = tp match {
+ // typerefs etc are already hashconsed
+ case _ : UniqueType => tp
+ case tp@RefinedType(parents, EmptyScope) => tp.memo(tp: Type)(identity) // TODO: does this help?
+ case _ => tp
+ }
+
+ // produce the unique tree used to refer to this binder
+ // the type of the binder passed to the first invocation
+ // determines the type of the tree that'll be returned for that binder as of then
+ final def binderToUniqueTree(b: Symbol) =
+ unique(accumSubst(normalize(CODE.REF(b))), b.tpe)
+
+ @inline def /\(conds: Iterable[Cond]) = if (conds.isEmpty) Top else conds.reduceLeft(AndCond(_, _))
+ @inline def \/(conds: Iterable[Cond]) = if (conds.isEmpty) Havoc else conds.reduceLeft(OrCond(_, _))
+
+ // note that the sequencing of operations is important: must visit in same order as match execution
+ // binderToUniqueTree uses the type of the first symbol that was encountered as the type for all future binders
+ final protected def treeMakerToCond(tm: TreeMaker, condMaker: CondMaker): Cond = {
+ updateSubstitution(tm.substitution)
+ condMaker(tm)(treeMakerToCond(_, condMaker))
+ }
+
+ final protected def treeMakerToCondNoSubst(tm: TreeMaker, condMaker: CondMaker): Cond =
+ condMaker(tm)(treeMakerToCondNoSubst(_, condMaker))
+
+ type CondMaker = TreeMaker => (TreeMaker => Cond) => Cond
+ final def makeCond(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = {
+ tm match {
+ case ttm@TypeTestTreeMaker(prevBinder, testedBinder, pt, _) =>
+ object condStrategy extends TypeTestTreeMaker.TypeTestCondStrategy {
+ type Result = Cond
+ def and(a: Result, b: Result) = AndCond(a, b)
+ def outerTest(testedBinder: Symbol, expectedTp: Type) = Top // TODO OuterEqCond(testedBinder, expectedType)
+ def typeTest(b: Symbol, pt: Type) = { // a type test implies the tested path is non-null (null.isInstanceOf[T] is false for all T)
+ val p = binderToUniqueTree(b); AndCond(NonNullCond(p), TypeCond(p, uniqueTp(pt)))
+ }
+ def nonNullTest(testedBinder: Symbol) = NonNullCond(binderToUniqueTree(testedBinder))
+ def equalsTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat))
+ def eqTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat)) // TODO: eq, not ==
+ }
+ ttm.renderCondition(condStrategy)
+ case EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree))
+ case AlternativesTreeMaker(_, altss, _) => \/(altss map (alts => /\(alts map recurse)))
+ case ProductExtractorTreeMaker(testedBinder, None, subst) => NonNullCond(binderToUniqueTree(testedBinder))
+ case ExtractorTreeMaker(_, _, _, _)
+ | GuardTreeMaker(_)
+ | ProductExtractorTreeMaker(_, Some(_), _)
+ | BodyTreeMaker(_, _) => Havoc
+ case SubstOnlyTreeMaker(_, _) => Top
+ }
+ }
+
+ final def approximateMatch(cases: List[List[TreeMaker]], condMaker: CondMaker = makeCond): List[List[Test]] = cases.map { _ map (tm => Test(treeMakerToCond(tm, condMaker), tm)) }
+
+ final def approximateMatchAgain(cases: List[List[TreeMaker]], condMaker: CondMaker = makeCond): List[List[Test]] = cases.map { _ map (tm => Test(treeMakerToCondNoSubst(tm, condMaker), tm)) }
+ }
+
+ def approximateMatch(root: Symbol, cases: List[List[TreeMaker]]): List[List[Test]] = {
+ object approximator extends TreeMakersToConds(root)
+ approximator.approximateMatch(cases)
+ }
+
+ def showTreeMakers(cases: List[List[TreeMaker]]) = {
+ patmatDebug("treeMakers:")
+ patmatDebug(alignAcrossRows(cases, ">>"))
+ }
+
+ def showTests(testss: List[List[Test]]) = {
+ patmatDebug("tests: ")
+ patmatDebug(alignAcrossRows(testss, "&"))
+ }
+ }
+
+ trait Prettification {
+ private def max(xs: Seq[Int]) = if (xs isEmpty) 0 else xs max
+
+ def alignedColumns(cols: Seq[AnyRef]): Seq[String] = {
+ def toString(x: AnyRef) = if (x eq null) "" else x.toString
+ if (cols.isEmpty || cols.tails.isEmpty) cols map toString
+ else {
+ val (colStrs, colLens) = cols map {c => val s = toString(c); (s, s.length)} unzip
+ val maxLen = max(colLens)
+ val avgLen = colLens.sum/colLens.length
+ val goalLen = maxLen min avgLen*2
+ def pad(s: String) = {
+ val toAdd = ((goalLen - s.length) max 0) + 2
+ (" " * (toAdd/2)) + s + (" " * (toAdd/2 + (toAdd%2)))
+ }
+ cols map (x => pad(toString(x)))
+ }
+ }
+ def alignAcrossRows(xss: List[List[AnyRef]], sep: String, lineSep: String = "\n"): String = {
+ val maxLen = max(xss map (_.length))
+ val padded = xss map (xs => xs ++ List.fill(maxLen - xs.length)(null))
+ padded.transpose.map(alignedColumns).transpose map (_.mkString(sep)) mkString(lineSep)
+ }
+ }
+
+ // http://www.cis.upenn.edu/~cis510/tcl/chap3.pdf
+ // http://users.encs.concordia.ca/~ta_ahmed/ms_thesis.pdf
+ trait Logic extends Prettification {
+ class Prop
+ case class Eq(p: Var, q: Const) extends Prop
+
+ type Const <: AbsConst
+
+ trait AbsConst {
+ def implies(other: Const): Boolean
+ def excludes(other: Const): Boolean
+ }
+
+ type Var <: AbsVar
+
+ trait AbsVar {
+ // indicate we may later require a prop for V = C
+ def registerEquality(c: Const): Unit
+
+ // indicates null is part of the domain
+ def considerNull: Unit
+
+ // compute the domain and return it (call considerNull first!)
+ def domainSyms: Option[Set[Sym]]
+
+ // for this var, call it V, turn V = C into the equivalent proposition in boolean logic
+ // registerEquality(c) must have been called prior to this call
+ // in fact, all equalities relevant to this variable must have been registered
+ def propForEqualsTo(c: Const): Prop
+
+ // populated by registerEquality
+ // once equalitySyms has been called, must not call registerEquality anymore
+ def equalitySyms: List[Sym]
+ }
+
+ // would be nice to statically check whether a prop is equational or pure,
+ // but that requires typing relations like And(x: Tx, y: Ty) : (if(Tx == PureProp && Ty == PureProp) PureProp else Prop)
+ case class And(a: Prop, b: Prop) extends Prop
+ case class Or(a: Prop, b: Prop) extends Prop
+ case class Not(a: Prop) extends Prop
+
+ case object True extends Prop
+ case object False extends Prop
+
+ // symbols are propositions
+ case class Sym(val variable: Var, val const: Const) extends Prop {
+ private[this] val id = nextSymId
+ override def toString = variable +"="+ const +"#"+ id
+ }
+ private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
+
+
+ @inline def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
+ @inline def \/(props: Iterable[Prop]) = if (props.isEmpty) False else props.reduceLeft(Or(_, _))
+
+
+ trait PropTraverser {
+ def apply(x: Prop): Unit = x match {
+ case And(a, b) => apply(a); apply(b)
+ case Or(a, b) => apply(a); apply(b)
+ case Not(a) => apply(a)
+ case Eq(a, b) => applyVar(a); applyConst(b)
+ case _ =>
+ }
+ def applyVar(x: Var): Unit = {}
+ def applyConst(x: Const): Unit = {}
+ }
+
+ def gatherVariables(p: Prop): Set[Var] = {
+ val vars = new HashSet[Var]()
+ (new PropTraverser {
+ override def applyVar(v: Var) = vars += v
+ })(p)
+ vars.toSet
+ }
+
+ trait PropMap {
+ def apply(x: Prop): Prop = x match { // TODO: mapConserve
+ case And(a, b) => And(apply(a), apply(b))
+ case Or(a, b) => Or(apply(a), apply(b))
+ case Not(a) => Not(apply(a))
+ case p => p
+ }
+ }
+
+ // convert finite domain propositional logic with subtyping to pure boolean propositional logic
+ // a type test or a value equality test are modelled as a variable being equal to some constant
+ // a variable V may be assigned multiple constants, as long as they do not contradict each other
+ // according to subtyping, e.g., V = ConstantType(1) and V = Int are valid assignments
+ // we rewrite V = C to a fresh boolean symbol, and model what we know about the variable's domain
+ // in a prelude (the equality axioms)
+ // 1. a variable with a closed domain (of a sealed type) must be assigned one of the instantiatable types in its domain
+ // 2. for each variable V in props, and each constant C it is compared to,
+ // compute which assignments imply each other (as in the example above: V = 1 implies V = Int)
+ // and which assignments are mutually exclusive (V = String implies -(V = Int))
+ //
+ // note that this is a conservative approximation: V = Constant(A) and V = Constant(B)
+ // are considered mutually exclusive (and thus both cases are considered reachable in {case A => case B =>}),
+ // even though A may be equal to B (and thus the second case is not "dynamically reachable")
+ //
+ // TODO: for V1 representing x1 and V2 standing for x1.head, encode that
+ // V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable)
+ def removeVarEq(props: List[Prop], considerNull: Boolean = false): (Prop, List[Prop]) = {
+ val start = startTimer(patmatAnaVarEq)
+
+ val vars = new collection.mutable.HashSet[Var]
+
+ object gatherEqualities extends PropTraverser {
+ override def apply(p: Prop) = p match {
+ case Eq(v, c) =>
+ vars += v
+ v.registerEquality(c)
+ case _ => super.apply(p)
+ }
+ }
+
+ object rewriteEqualsToProp extends PropMap {
+ override def apply(p: Prop) = p match {
+ case Eq(v, c) => v.propForEqualsTo(c)
+ case _ => super.apply(p)
+ }
+ }
+
+ props foreach gatherEqualities.apply
+ if (considerNull) vars foreach (_.considerNull)
+
+ val pure = props map rewriteEqualsToProp.apply
+
+ var eqAxioms: Prop = True
+ @inline def addAxiom(p: Prop) = eqAxioms = And(eqAxioms, p)
+
+ case class ExcludedPair(a: Const, b: Const) {
+ override def equals(o: Any) = o match {
+ case ExcludedPair(aa, bb) => (a == aa && b == bb) || (a == bb && b == aa)
+ case _ => false
+ }
+ // make ExcludedPair(a, b).hashCode == ExcludedPair(b, a).hashCode
+ override def hashCode = a.hashCode ^ b.hashCode
+ }
+
+ // patmatDebug("vars: "+ vars)
+ vars.foreach { v =>
+ val excludedPair = new collection.mutable.HashSet[ExcludedPair]
+
+ // if v.domainSyms.isEmpty, we must consider the domain to be infinite
+ // otherwise, since the domain fully partitions the type of the value,
+ // exactly one of the types (and whatever it implies, imposed separately) must be chosen
+ // consider X ::= A | B | C, and A => B
+ // coverage is formulated as: A \/ B \/ C and the implications are
+ v.domainSyms foreach { dsyms => addAxiom(\/(dsyms)) }
+
+ val syms = v.equalitySyms
+ // patmatDebug("eqSyms "+(v, syms))
+ syms foreach { sym =>
+ // if we've already excluded the pair at some point (-A \/ -B), then don't exclude the symmetric one (-B \/ -A)
+ // (nor the positive implications -B \/ A, or -A \/ B, which would entail the equality axioms falsifying the whole formula)
+ val todo = syms filterNot (b => (b.const == sym.const) || excludedPair(ExcludedPair(b.const, sym.const)))
+ val (excluded, notExcluded) = todo partition (b => sym.const.excludes(b.const))
+ val implied = notExcluded filter (b => sym.const.implies(b.const))
+ // patmatDebug("implications: "+ (sym.const, excluded, implied, syms))
+
+ // when this symbol is true, what must hold...
+ implied foreach (impliedSym => addAxiom(Or(Not(sym), impliedSym)))
+
+ // ... and what must not?
+ excluded foreach {excludedSym =>
+ excludedPair += ExcludedPair(sym.const, excludedSym.const)
+ addAxiom(Or(Not(sym), Not(excludedSym)))
+ }
+ }
+ }
+
+ // patmatDebug("eqAxioms:\n"+ cnfString(eqFreePropToSolvable(eqAxioms)))
+ // patmatDebug("pure:\n"+ cnfString(eqFreePropToSolvable(pure)))
+
+ stopTimer(patmatAnaVarEq, start)
+
+ (eqAxioms, pure)
+ }
+
+
+ type Formula
+ def andFormula(a: Formula, b: Formula): Formula
+
+ class CNFBudgetExceeded extends RuntimeException("CNF budget exceeded")
+
+ // may throw an CNFBudgetExceeded
+ def propToSolvable(p: Prop) = {
+ val (eqAxioms, pure :: Nil) = removeVarEq(List(p), considerNull = false)
+ eqFreePropToSolvable(And(eqAxioms, pure))
+ }
+
+ def eqFreePropToSolvable(p: Prop): Formula
+ def cnfString(f: Formula): String
+
+ type Model = Map[Sym, Boolean]
+ val EmptyModel: Model
+ val NoModel: Model
+
+ def findModelFor(f: Formula): Model
+ def findAllModelsFor(f: Formula): List[Model]
+ }
+
+ trait CNF extends Logic {
+ // CNF: a formula is a conjunction of clauses
+ type Formula = Array[Clause]
+ def formula(c: Clause*): Formula = c.toArray
+ def andFormula(a: Formula, b: Formula): Formula = a ++ b
+
+ // a clause is a disjunction of distinct literals
+ type Clause = Set[Lit]
+ def clause(l: Lit*): Clause = l.toSet
+ @inline private def merge(a: Clause, b: Clause) = a ++ b
+
+ type Lit
+ def Lit(sym: Sym, pos: Boolean = true): Lit
+
+ // throws an CNFBudgetExceeded when the prop results in a CNF that's too big
+ def eqFreePropToSolvable(p: Prop): Formula = {
+ // TODO: for now, reusing the normalization from DPLL
+ def negationNormalForm(p: Prop): Prop = p match {
+ case And(a, b) => And(negationNormalForm(a), negationNormalForm(b))
+ case Or(a, b) => Or(negationNormalForm(a), negationNormalForm(b))
+ case Not(And(a, b)) => negationNormalForm(Or(Not(a), Not(b)))
+ case Not(Or(a, b)) => negationNormalForm(And(Not(a), Not(b)))
+ case Not(Not(p)) => negationNormalForm(p)
+ case Not(True) => False
+ case Not(False) => True
+ case True
+ | False
+ | (_ : Sym)
+ | Not(_ : Sym) => p
+ }
+
+ val TrueF = formula()
+ val FalseF = formula(clause())
+ def lit(s: Sym) = formula(clause(Lit(s)))
+ def negLit(s: Sym) = formula(clause(Lit(s, false)))
+
+ def conjunctiveNormalForm(p: Prop, budget: Int = 256): Formula = {
+ def distribute(a: Formula, b: Formula, budget: Int): Formula =
+ if (budget <= 0) throw new CNFBudgetExceeded
+ else
+ (a, b) match {
+ // true \/ _ = true
+ // _ \/ true = true
+ case (trueA, trueB) if trueA.size == 0 || trueB.size == 0 => TrueF
+ // lit \/ lit
+ case (a, b) if a.size == 1 && b.size == 1 => formula(merge(a(0), b(0)))
+ // (c1 /\ ... /\ cn) \/ d = ((c1 \/ d) /\ ... /\ (cn \/ d))
+ // d \/ (c1 /\ ... /\ cn) = ((d \/ c1) /\ ... /\ (d \/ cn))
+ case (cs, ds) =>
+ val (big, small) = if (cs.size > ds.size) (cs, ds) else (ds, cs)
+ big flatMap (c => distribute(formula(c), small, budget - (big.size*small.size)))
+ }
+
+ if (budget <= 0) throw new CNFBudgetExceeded
+
+ p match {
+ case True => TrueF
+ case False => FalseF
+ case s: Sym => lit(s)
+ case Not(s: Sym) => negLit(s)
+ case And(a, b) =>
+ val cnfA = conjunctiveNormalForm(a, budget - 1)
+ val cnfB = conjunctiveNormalForm(b, budget - cnfA.size)
+ cnfA ++ cnfB
+ case Or(a, b) =>
+ val cnfA = conjunctiveNormalForm(a)
+ val cnfB = conjunctiveNormalForm(b)
+ distribute(cnfA, cnfB, budget - (cnfA.size + cnfB.size))
+ }
+ }
+
+ val start = startTimer(patmatCNF)
+ val res = conjunctiveNormalForm(negationNormalForm(p))
+ stopTimer(patmatCNF, start)
+ patmatCNFSizes(res.size) += 1
+
+// patmatDebug("cnf for\n"+ p +"\nis:\n"+cnfString(res))
+ res
+ }
+
+ }
+
+ trait DPLLSolver extends CNF {
+ // a literal is a (possibly negated) variable
+ def Lit(sym: Sym, pos: Boolean = true) = new Lit(sym, pos)
+ class Lit(val sym: Sym, val pos: Boolean) {
+ override def toString = if (!pos) "-"+ sym.toString else sym.toString
+ override def equals(o: Any) = o match {
+ case o: Lit => (o.sym == sym) && (o.pos == pos)
+ case _ => false
+ }
+ override def hashCode = sym.hashCode + pos.hashCode
+
+ def unary_- = Lit(sym, !pos)
+ }
+
+ def cnfString(f: Formula) = alignAcrossRows(f map (_.toList) toList, "\\/", " /\\\n")
+
+ // adapted from http://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat)
+ val EmptyModel = Map.empty[Sym, Boolean]
+ val NoModel: Model = null
+
+ // returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??)
+ def findAllModelsFor(f: Formula): List[Model] = {
+ val vars: Set[Sym] = f.flatMap(_ collect {case l: Lit => l.sym}).toSet
+ // patmatDebug("vars "+ vars)
+ // the negation of a model -(S1=True/False /\ ... /\ SN=True/False) = clause(S1=False/True, ...., SN=False/True)
+ def negateModel(m: Model) = clause(m.toSeq.map{ case (sym, pos) => Lit(sym, !pos) } : _*)
+
+ def findAllModels(f: Formula, models: List[Model], recursionDepthAllowed: Int = 10): List[Model]=
+ if (recursionDepthAllowed == 0) models
+ else {
+ // patmatDebug("solving\n"+ cnfString(f))
+ val model = findModelFor(f)
+ // if we found a solution, conjunct the formula with the model's negation and recurse
+ if (model ne NoModel) {
+ val unassigned = (vars -- model.keySet).toList
+ // patmatDebug("unassigned "+ unassigned +" in "+ model)
+ def force(lit: Lit) = {
+ val model = withLit(findModelFor(dropUnit(f, lit)), lit)
+ if (model ne NoModel) List(model)
+ else Nil
+ }
+ val forced = unassigned flatMap { s =>
+ force(Lit(s, true)) ++ force(Lit(s, false))
+ }
+ // patmatDebug("forced "+ forced)
+ val negated = negateModel(model)
+ findAllModels(f :+ negated, model :: (forced ++ models), recursionDepthAllowed - 1)
+ }
+ else models
+ }
+
+ findAllModels(f, Nil)
+ }
+
+ @inline private def withLit(res: Model, l: Lit): Model = if (res eq NoModel) NoModel else res + (l.sym -> l.pos)
+ @inline private def dropUnit(f: Formula, unitLit: Lit) = {
+ val negated = -unitLit
+ // drop entire clauses that are trivially true
+ // (i.e., disjunctions that contain the literal we're making true in the returned model),
+ // and simplify clauses by dropping the negation of the literal we're making true
+ // (since False \/ X == X)
+ f.filterNot(_.contains(unitLit)).map(_ - negated)
+ }
+
+ def findModelFor(f: Formula): Model = {
+ @inline def orElse(a: Model, b: => Model) = if (a ne NoModel) a else b
+
+ // patmatDebug("dpll\n"+ cnfString(f))
+
+ val start = startTimer(patmatAnaDPLL)
+
+ val satisfiableWithModel: Model =
+ if (f isEmpty) EmptyModel
+ else if(f exists (_.isEmpty)) NoModel
+ else f.find(_.size == 1) match {
+ case Some(unitClause) =>
+ val unitLit = unitClause.head
+ // patmatDebug("unit: "+ unitLit)
+ withLit(findModelFor(dropUnit(f, unitLit)), unitLit)
+ case _ =>
+ // partition symbols according to whether they appear in positive and/or negative literals
+ val pos = new HashSet[Sym]()
+ val neg = new HashSet[Sym]()
+ f.foreach{_.foreach{ lit =>
+ if (lit.pos) pos += lit.sym else neg += lit.sym
+ }}
+ // appearing in both positive and negative
+ val impures = pos intersect neg
+ // appearing only in either positive/negative positions
+ val pures = (pos ++ neg) -- impures
+
+ if (pures nonEmpty) {
+ val pureSym = pures.head
+ // turn it back into a literal
+ // (since equality on literals is in terms of equality
+ // of the underlying symbol and its positivity, simply construct a new Lit)
+ val pureLit = Lit(pureSym, pos(pureSym))
+ // patmatDebug("pure: "+ pureLit +" pures: "+ pures +" impures: "+ impures)
+ val simplified = f.filterNot(_.contains(pureLit))
+ withLit(findModelFor(simplified), pureLit)
+ } else {
+ val split = f.head.head
+ // patmatDebug("split: "+ split)
+ orElse(findModelFor(f :+ clause(split)), findModelFor(f :+ clause(-split)))
+ }
+ }
+
+ stopTimer(patmatAnaDPLL, start)
+
+ satisfiableWithModel
+ }
+ }
+
+
+ /**
+ * Represent a match as a formula in propositional logic that encodes whether the match matches (abstractly: we only consider types)
+ *
+ */
+ trait SymbolicMatchAnalysis extends TreeMakerApproximation with Logic { self: CodegenCore =>
+ def prepareNewAnalysis() = { Var.resetUniques(); Const.resetUniques() }
+
+ object Var {
+ private var _nextId = 0
+ def nextId = {_nextId += 1; _nextId}
+
+ def resetUniques() = {_nextId = 0; uniques.clear()}
+ private val uniques = new collection.mutable.HashMap[Tree, Var]
+ def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe))
+ }
+ class Var(val path: Tree, fullTp: Type, checked: Boolean = true) extends AbsVar {
+ private[this] val id: Int = Var.nextId
+
+ // private[this] var canModify: Option[Array[StackTraceElement]] = None
+ @inline private[this] def ensureCanModify = {} //if (canModify.nonEmpty) patmatDebug("BUG!"+ this +" modified after having been observed: "+ canModify.get.mkString("\n"))
+
+ @inline private[this] def observed = {} //canModify = Some(Thread.currentThread.getStackTrace)
+
+ // don't access until all potential equalities have been registered using registerEquality
+ private[this] val symForEqualsTo = new collection.mutable.HashMap[Const, Sym]
+
+ // when looking at the domain, we only care about types we can check at run time
+ val domainTp: Type = checkableType(fullTp)
+
+ private[this] var _considerNull = false
+ def considerNull: Unit = { ensureCanModify; if (NullTp <:< domainTp) _considerNull = true }
+
+ // case None => domain is unknown,
+ // case Some(List(tps: _*)) => domain is exactly tps
+ // we enumerate the subtypes of the full type, as that allows us to filter out more types statically,
+ // once we go to run-time checks (on Const's), convert them to checkable types
+ // TODO: there seems to be bug for singleton domains (variable does not show up in model)
+ lazy val domain: Option[Set[Const]] =
+ if (!checked) None
+ else {
+ val subConsts = enumerateSubtypes(fullTp).map{ tps =>
+ tps.toSet[Type].map{ tp =>
+ val domainC = TypeConst(tp)
+ registerEquality(domainC)
+ domainC
+ }
+ }
+
+ val allConsts =
+ if (! _considerNull) subConsts
+ else {
+ registerEquality(NullConst)
+ subConsts map (_ + NullConst)
+ }
+
+ observed; allConsts
+ }
+
+ // accessing after calling considerNull will result in inconsistencies
+ lazy val domainSyms: Option[Set[Sym]] = domain map { _ map symForEqualsTo }
+
+
+ // populate equalitySyms
+ // don't care about the result, but want only one fresh symbol per distinct constant c
+ def registerEquality(c: Const): Unit = {ensureCanModify; symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
+
+ // don't access until all potential equalities have been registered using registerEquality
+ lazy val equalitySyms = {observed; symForEqualsTo.values.toList}
+
+ // return the symbol that represents this variable being equal to the constant `c`, if it exists, otherwise False (for robustness)
+ // (registerEquality(c) must have been called prior, either when constructing the domain or from outside)
+ def propForEqualsTo(c: Const): Prop = {observed; symForEqualsTo.getOrElse(c, False)}
+
+
+ // don't call until all equalities have been registered and considerNull has been called (if needed)
+ def describe = toString + ": " + fullTp + domain.map(_.mkString(" ::= ", " | ", "// "+ symForEqualsTo.keys)).getOrElse(symForEqualsTo.keys.mkString(" ::= ", " | ", " | ...")) + " // = " + path
+ override def toString = "V"+ id
+ }
+
+
+ // all our variables range over types
+ // a literal constant becomes ConstantType(Constant(v)) when the type allows it (roughly, anyval + string + null)
+ // equality between variables: SingleType(x) (note that pattern variables cannot relate to each other -- it's always patternVar == nonPatternVar)
+ object Const {
+ def resetUniques() = {_nextTypeId = 0; _nextValueId = 0; uniques.clear()} // patmatDebug("RESET")
+
+ private var _nextTypeId = 0
+ def nextTypeId = {_nextTypeId += 1; _nextTypeId}
+
+ private var _nextValueId = 0
+ def nextValueId = {_nextValueId += 1; _nextValueId}
+
+ private val uniques = new collection.mutable.HashMap[Type, Const]
+ private[SymbolicMatchAnalysis] def unique(tp: Type, mkFresh: => Const): Const =
+ uniques.get(tp).getOrElse(
+ uniques.find {case (oldTp, oldC) => oldTp =:= tp} match {
+ case Some((_, c)) => c
+ case _ =>
+ val fresh = mkFresh
+ uniques(tp) = fresh
+ fresh
+ })
+ }
+
+ sealed abstract class Const extends AbsConst {
+ protected def tp: Type
+ protected def wideTp: Type
+
+ def isAny = wideTp.typeSymbol == AnyClass
+
+ final def implies(other: Const): Boolean = {
+ val r = (this, other) match {
+ case (_: ValueConst, _: ValueConst) => this == other // hashconsed
+ case (_: ValueConst, _: TypeConst) => tp <:< other.tp
+ case (_: TypeConst, _) => tp <:< other.tp
+ case _ => false
+ }
+ // if(r) patmatDebug("implies : "+(this, other))
+ // else patmatDebug("NOT implies: "+(this, other))
+ r
+ }
+
+ // does V = C preclude V having value `other`? V = null is an exclusive assignment,
+ // but V = 1 does not preclude V = Int, or V = Any
+ final def excludes(other: Const): Boolean = {
+ val r = (this, other) match {
+ case (_, NullConst) => true
+ case (NullConst, _) => true
+ // this causes false negative for unreachability, but that's ok:
+ // example: val X = 1; val Y = 1; (2: Int) match { case X => case Y => /* considered reachable */ }
+ case (_: ValueConst, _: ValueConst) => this != other
+ case (_: ValueConst, _: TypeConst) => !((tp <:< other.tp) || (other.tp <:< wideTp))
+ case (_: TypeConst, _: ValueConst) => !((other.tp <:< tp) || (tp <:< other.wideTp))
+ case (_: TypeConst, _: TypeConst) => !((tp <:< other.tp) || (other.tp <:< tp))
+ case _ => false
+ }
+ // if(r) patmatDebug("excludes : "+(this, other))
+ // else patmatDebug("NOT excludes: "+(this, other))
+ r
+ }
+
+ // note: use reference equality on Const since they're hash-consed (doing type equality all the time is too expensive)
+ // the equals inherited from AnyRef does just this
+ }
+
+
+ object TypeConst {
+ def apply(tp: Type) = {
+ if (tp =:= NullTp) NullConst
+ else if (tp.isInstanceOf[SingletonType]) ValueConst.fromType(tp)
+ else Const.unique(tp, new TypeConst(tp))
+ }
+ def unapply(c: TypeConst): Some[Type] = Some(c.tp)
+ }
+
+ // corresponds to a type test that does not imply any value-equality (well, except for outer checks, which we don't model yet)
+ sealed class TypeConst(val tp: Type) extends Const {
+ assert(!(tp =:= NullTp))
+ private[this] val id: Int = Const.nextTypeId
+
+ val wideTp = tp.widen
+
+ override def toString = tp.toString //+"#"+ id
+ }
+
+ // p is a unique type or a constant value
+ object ValueConst {
+ def fromType(tp: Type) = {
+ assert(tp.isInstanceOf[SingletonType])
+ val toString = tp match {
+ case ConstantType(c) => c.escapedStringValue
+ case _ => tp.toString
+ }
+ Const.unique(tp, new ValueConst(tp, tp.widen, toString))
+ }
+ def apply(p: Tree) = {
+ val tp = p.tpe.normalize
+ if (tp =:= NullTp) NullConst
+ else {
+ val wideTp = {
+ if (p.hasSymbol && p.symbol.isStable) tp.asSeenFrom(tp.prefix, p.symbol.owner).widen
+ else tp.widen
+ }
+
+ val narrowTp =
+ if (tp.isInstanceOf[SingletonType]) tp
+ else p match {
+ case Literal(c) =>
+ if (c.tpe.typeSymbol == UnitClass) c.tpe
+ else ConstantType(c)
+ case p if p.symbol.isStable =>
+ singleType(tp.prefix, p.symbol)
+ case x =>
+ // TODO: better type
+ x.tpe.narrow
+ }
+
+ val toString =
+ if (p.hasSymbol && p.symbol.isStable) p.symbol.name.toString // tp.toString
+ else p.toString //+"#"+ id
+
+ Const.unique(narrowTp, new ValueConst(narrowTp, checkableType(wideTp), toString)) // must make wide type checkable so that it is comparable to types from TypeConst
+ }
+ }
+ }
+ sealed class ValueConst(val tp: Type, val wideTp: Type, override val toString: String) extends Const {
+ // patmatDebug("VC"+(tp, wideTp, toString))
+ assert(!(tp =:= NullTp))
+ private[this] val id: Int = Const.nextValueId
+ }
+
+ lazy val NullTp = ConstantType(Constant(null))
+ case object NullConst extends Const {
+ protected def tp = NullTp
+ protected def wideTp = NullTp
+
+ def isValue = true
+ override def toString = "null"
+ }
+
+
+ // turns a case (represented as a list of abstract tests)
+ // into a proposition that is satisfiable if the case may match
+ def symbolicCase(tests: List[Test], modelNull: Boolean = false): Prop = {
+ def symbolic(t: Cond): Prop = t match {
+ case AndCond(a, b) => And(symbolic(a), symbolic(b))
+ case OrCond(a, b) => Or(symbolic(a), symbolic(b))
+ case Top => True
+ case Havoc => False
+ case TypeCond(p, pt) => Eq(Var(p), TypeConst(checkableType(pt)))
+ case EqualityCond(p, q) => Eq(Var(p), ValueConst(q))
+ case NonNullCond(p) => if (!modelNull) True else Not(Eq(Var(p), NullConst))
+ }
+
+ val testsBeforeBody = tests.takeWhile(t => !t.treeMaker.isInstanceOf[BodyTreeMaker])
+ /\(testsBeforeBody.map(t => symbolic(t.cond)))
+ }
+
+ // TODO: model dependencies between variables: if V1 corresponds to (x: List[_]) and V2 is (x.hd), V2 cannot be assigned when V1 = null or V1 = Nil
+ // right now hackily implement this by pruning counter-examples
+ // unreachability would also benefit from a more faithful representation
+
+ // reachability (dead code)
+
+ // computes the first 0-based case index that is unreachable (if any)
+ // a case is unreachable if it implies its preceding cases
+ // call C the formula that is satisfiable if the considered case matches
+ // call P the formula that is satisfiable if the cases preceding it match
+ // the case is reachable if there is a model for -P /\ C,
+ // thus, the case is unreachable if there is no model for -(-P /\ C),
+ // or, equivalently, P \/ -C, or C => P
+ def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Int] = {
+ // customize TreeMakersToConds (which turns a tree of tree makers into a more abstract DAG of tests)
+ // when approximating the current case (which we hope is reachable), be optimistic about the unknowns
+ object reachabilityApproximation extends TreeMakersToConds(prevBinder) {
+ def makeCondOptimistic(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = tm match {
+ // for unreachability, let's assume a guard always matches (unless we statically determined otherwise)
+ // otherwise, a guarded case would be considered unreachable
+ case GuardTreeMaker(guard) =>
+ guard.tpe match {
+ case ConstantType(Constant(false)) => Havoc // not the best name; however, symbolically, 'Havoc' becomes 'False'
+ case _ => Top
+ }
+ // similar to a guard, user-defined extractors should not cause us to freak out
+ // if we're not 100% sure it does not match (i.e., its result type is None or Constant(false) -- TODO),
+ // let's stay optimistic and assume it does
+ case ExtractorTreeMaker(_, _, _, _)
+ | ProductExtractorTreeMaker(_, Some(_), _) => Top
+ // TODO: consider length-checks
+ case _ =>
+ makeCond(tm)(recurse)
+ }
+
+ // be pessimistic when approximating the prefix of the current case
+ // we hope the prefix fails so that we might get to the current case, which we hope is not dead
+ def makeCondPessimistic(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = makeCond(tm)(recurse)
+ }
+
+ val start = startTimer(patmatAnaReach)
+
+ // use the same approximator so we share variables,
+ // but need different conditions depending on whether we're conservatively looking for failure or success
+ val testCasesOk = reachabilityApproximation.approximateMatch(cases, reachabilityApproximation.makeCondOptimistic)
+ val testCasesFail = reachabilityApproximation.approximateMatchAgain(cases, reachabilityApproximation.makeCondPessimistic)
+
+ reachabilityApproximation.discard()
+ prepareNewAnalysis()
+
+ val propsCasesOk = testCasesOk map (t => symbolicCase(t, modelNull = true))
+ val propsCasesFail = testCasesFail map (t => Not(symbolicCase(t, modelNull = true)))
+ val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, considerNull = true)
+ val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, considerNull = true)
+
+ try {
+ // most of the time eqAxiomsFail == eqAxiomsOk, but the different approximations might cause different variables to disapper in general
+ val eqAxiomsCNF =
+ if (eqAxiomsFail == eqAxiomsOk) eqFreePropToSolvable(eqAxiomsFail)
+ else eqFreePropToSolvable(And(eqAxiomsFail, eqAxiomsOk))
+
+ var prefix = eqAxiomsCNF
+ var prefixRest = symbolicCasesFail
+ var current = symbolicCasesOk
+ var reachable = true
+ var caseIndex = 0
+
+ // patmatDebug("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables) map (_.describe) mkString ("\n")))
+ // patmatDebug("equality axioms:\n"+ cnfString(eqAxiomsCNF))
+
+ // invariant (prefixRest.length == current.length) && (prefix.reverse ++ prefixRest == symbolicCasesFail)
+ // termination: prefixRest.length decreases by 1
+ while (prefixRest.nonEmpty && reachable) {
+ val prefHead = prefixRest.head
+ caseIndex += 1
+ prefixRest = prefixRest.tail
+ if (prefixRest.isEmpty) reachable = true
+ else {
+ prefix = andFormula(eqFreePropToSolvable(prefHead), prefix)
+ current = current.tail
+ val model = findModelFor(andFormula(eqFreePropToSolvable(current.head), prefix))
+
+ // patmatDebug("trying to reach:\n"+ cnfString(current.head) +"\nunder prefix:\n"+ cnfString(prefix))
+ // if (ok) patmatDebug("reached: "+ modelString(model))
+
+ reachable = model ne NoModel
+ }
+ }
+
+ stopTimer(patmatAnaReach, start)
+
+ if (reachable) None else Some(caseIndex)
+ } catch {
+ case e : CNFBudgetExceeded =>
+// debugWarn(util.Position.formatMessage(prevBinder.pos, "Cannot check match for reachability", false))
+// e.printStackTrace()
+ None // CNF budget exceeded
+ }
+ }
+
+ // exhaustivity
+
+ // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
+ // TODO: domain of feasibly enumerable built-in types (enums, char?)
+ def enumerateSubtypes(tp: Type): Option[List[Type]] =
+ tp.typeSymbol match {
+ case BooleanClass =>
+ // patmatDebug("enum bool "+ tp)
+ Some(List(ConstantType(Constant(true)), ConstantType(Constant(false))))
+ // TODO case _ if tp.isTupleType => // recurse into component types
+ case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
+ // patmatDebug("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
+ None
+ case sym =>
+ val subclasses = (
+ sym.sealedDescendants.toList sortBy (_.sealedSortName)
+ // symbols which are both sealed and abstract need not be covered themselves, because
+ // all of their children must be and they cannot otherwise be created.
+ filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)))
+ // patmatDebug("subclasses "+ (sym, subclasses))
+
+ val tpApprox = typer.infer.approximateAbstracts(tp)
+ val pre = tpApprox.prefix
+ // valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
+ val validSubTypes = (subclasses flatMap {sym =>
+ // have to filter out children which cannot match: see ticket #3683 for an example
+ // compare to the fully known type `tp` (modulo abstract types),
+ // so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
+ // however, must approximate abstract types in
+ val subTp = appliedType(pre.memberType(sym), sym.typeParams.map(_ => WildcardType))
+ val subTpApprox = typer.infer.approximateAbstracts(subTp) // TODO: needed?
+ // patmatDebug("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
+ if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
+ else None
+ })
+ // patmatDebug("enum sealed "+ (tp, tpApprox) + " as "+ validSubTypes)
+ Some(validSubTypes)
+ }
+
+ // approximate a type to the static type that is fully checkable at run time,
+ // hiding statically known but dynamically uncheckable information using existential quantification
+ // TODO: this is subject to the availability of TypeTags (since an abstract type with a type tag is checkable at run time)
+ def checkableType(tp: Type): Type = {
+ // TODO: this is extremely rough...
+ object toCheckable extends TypeMap {
+ def apply(tp: Type) = tp match {
+ case TypeRef(pre, sym, a :: as) if sym ne ArrayClass =>
+ // replace type args by existentials, since they can't be checked
+ // TODO: when type tags are available, we will check -- when this is implemented, can we take that into account here?
+ // TODO: don't reuse sym.typeParams, they have bounds (and those must not be considered)
+ newExistentialType(sym.typeParams, sym.tpe).asSeenFrom(pre, sym.owner)
+ case _ => mapOver(tp)
+ }
+ }
+ val res = toCheckable(tp)
+ // patmatDebug("checkable "+(tp, res))
+ res
+ }
+
+ // a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed)
+ // we consider tuple types with at least one component of a checkable type as a checkable type
+ def uncheckableType(tp: Type): Boolean = {
+ @inline def tupleComponents(tp: Type) = tp.normalize.typeArgs
+ val checkable = (
+ (isTupleType(tp) && tupleComponents(tp).exists(tp => !uncheckableType(tp)))
+ || enumerateSubtypes(tp).nonEmpty)
+ // if (!checkable) patmatDebug("deemed uncheckable: "+ tp)
+ !checkable
+ }
+
+ def exhaustive(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[String] = if (uncheckableType(prevBinder.info)) Nil else {
+ // customize TreeMakersToConds (which turns a tree of tree makers into a more abstract DAG of tests)
+ // - approximate the pattern `List()` (unapplySeq on List with empty length) as `Nil`,
+ // otherwise the common (xs: List[Any]) match { case List() => case x :: xs => } is deemed unexhaustive
+ // - back off (to avoid crying exhaustive too often) when:
+ // - there are guards -->
+ // - there are extractor calls (that we can't secretly/soundly) rewrite
+ val start = startTimer(patmatAnaExhaust)
+ var backoff = false
+ object exhaustivityApproximation extends TreeMakersToConds(prevBinder) {
+ def makeCondExhaustivity(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = tm match {
+ case p @ ExtractorTreeMaker(extractor, Some(lenCheck), testedBinder, _) =>
+ p.checkedLength match {
+ // pattern: `List()` (interpret as `Nil`)
+ // TODO: make it more general List(1, 2) => 1 :: 2 :: Nil
+ case Some(0) if testedBinder.tpe.typeSymbol == ListClass => // extractor.symbol.owner == SeqFactory
+ EqualityCond(binderToUniqueTree(p.prevBinder), unique(Ident(NilModule) setType NilModule.tpe))
+ case _ =>
+ backoff = true
+ makeCond(tm)(recurse)
+ }
+ case ExtractorTreeMaker(_, _, _, _) =>
+// patmatDebug("backing off due to "+ tm)
+ backoff = true
+ makeCond(tm)(recurse)
+ case GuardTreeMaker(guard) =>
+ guard.tpe match {
+ case ConstantType(Constant(true)) => Top
+ case ConstantType(Constant(false)) => Havoc
+ case _ =>
+// patmatDebug("can't statically interpret guard: "+(guard, guard.tpe))
+ backoff = true
+ Havoc
+ }
+ case _ =>
+ makeCond(tm)(recurse)
+ }
+ }
+
+ val tests = exhaustivityApproximation.approximateMatch(cases, exhaustivityApproximation.makeCondExhaustivity)
+
+ if (backoff) Nil else {
+ val prevBinderTree = exhaustivityApproximation.binderToUniqueTree(prevBinder)
+
+ exhaustivityApproximation.discard()
+ prepareNewAnalysis()
+
+ val symbolicCases = tests map (symbolicCase(_, modelNull = false))
+
+
+ // TODO: null tests generate too much noise, so disabled them -- is there any way to bring them back?
+ // assuming we're matching on a non-null scrutinee (prevBinder), when does the match fail?
+ // val nonNullScrutineeCond =
+ // assume non-null for all the components of the tuple we're matching on (if we're matching on a tuple)
+ // if (isTupleType(prevBinder.tpe))
+ // prevBinder.tpe.typeArgs.mapWithIndex{case (_, i) => NonNullCond(codegen.tupleSel(prevBinderTree)(i))}.reduceLeft(AndCond)
+ // else
+ // NonNullCond(prevBinderTree)
+ // val matchFails = And(symbolic(nonNullScrutineeCond), Not(symbolicCases reduceLeft (Or(_, _))))
+
+ // when does the match fail?
+ val matchFails = Not(\/(symbolicCases))
+
+ // debug output:
+ // patmatDebug("analysing:")
+ // showTreeMakers(cases)
+ // showTests(tests)
+ //
+ // val vars = gatherVariables(matchFails)
+ // patmatDebug("\nvars:\n"+ (vars map (_.describe) mkString ("\n")))
+ //
+ // patmatDebug("\nmatchFails as CNF:\n"+ cnfString(propToSolvable(matchFails)))
+
+ try {
+ // find the models (under which the match fails)
+ val matchFailModels = findAllModelsFor(propToSolvable(matchFails))
+
+ val scrutVar = Var(prevBinderTree)
+ val counterExamples = matchFailModels.map(modelToCounterExample(scrutVar))
+
+ val pruned = CounterExample.prune(counterExamples).map(_.toString).sorted
+
+ stopTimer(patmatAnaExhaust, start)
+ pruned
+ } catch {
+ case e : CNFBudgetExceeded =>
+ // patmatDebug(util.Position.formatMessage(prevBinder.pos, "Cannot check match for exhaustivity", false))
+ // e.printStackTrace()
+ Nil // CNF budget exceeded
+ }
+ }
+ }
+
+ object CounterExample {
+ def prune(examples: List[CounterExample]): List[CounterExample] = {
+ val distinct = examples.filterNot(_ == NoExample).toSet
+ distinct.filterNot(ce => distinct.exists(other => (ce ne other) && ce.coveredBy(other))).toList
+ }
+ }
+
+ // a way to construct a value that will make the match fail: a constructor invocation, a constant, an object of some type)
+ class CounterExample {
+ protected[SymbolicMatchAnalysis] def flattenConsArgs: List[CounterExample] = Nil
+ def coveredBy(other: CounterExample): Boolean = this == other || other == WildcardExample
+ }
+ case class ValueExample(c: ValueConst) extends CounterExample { override def toString = c.toString }
+ case class TypeExample(c: Const) extends CounterExample { override def toString = "(_ : "+ c +")" }
+ case class NegativeExample(nonTrivialNonEqualTo: List[Const]) extends CounterExample {
+ // require(nonTrivialNonEqualTo.nonEmpty, nonTrivialNonEqualTo)
+ override def toString = {
+ val negation =
+ if (nonTrivialNonEqualTo.tail.isEmpty) nonTrivialNonEqualTo.head.toString
+ else nonTrivialNonEqualTo.map(_.toString).sorted.mkString("in (", ", ", ")")
+ "<not "+ negation +">"
+ }
+ }
+ case class ListExample(ctorArgs: List[CounterExample]) extends CounterExample {
+ protected[SymbolicMatchAnalysis] override def flattenConsArgs: List[CounterExample] = ctorArgs match {
+ case hd :: tl :: Nil => hd :: tl.flattenConsArgs
+ case _ => Nil
+ }
+ protected[SymbolicMatchAnalysis] lazy val elems = flattenConsArgs
+
+ override def coveredBy(other: CounterExample): Boolean =
+ other match {
+ case other@ListExample(_) =>
+ this == other || ((elems.length == other.elems.length) && (elems zip other.elems).forall{case (a, b) => a coveredBy b})
+ case _ => super.coveredBy(other)
+ }
+
+ override def toString = elems.mkString("List(", ", ", ")")
+ }
+ case class TupleExample(ctorArgs: List[CounterExample]) extends CounterExample {
+ override def toString = ctorArgs.mkString("(", ", ", ")")
+
+ override def coveredBy(other: CounterExample): Boolean =
+ other match {
+ case TupleExample(otherArgs) =>
+ this == other || ((ctorArgs.length == otherArgs.length) && (ctorArgs zip otherArgs).forall{case (a, b) => a coveredBy b})
+ case _ => super.coveredBy(other)
+ }
+ }
+ case class ConstructorExample(cls: Symbol, ctorArgs: List[CounterExample]) extends CounterExample {
+ override def toString = cls.decodedName + (if (cls.isModuleClass) "" else ctorArgs.mkString("(", ", ", ")"))
+ }
+
+ case object WildcardExample extends CounterExample { override def toString = "_" }
+ case object NoExample extends CounterExample { override def toString = "??" }
+
+ @inline def modelToVarAssignment(model: Model): Map[Var, (Seq[Const], Seq[Const])] =
+ model.toSeq.groupBy{f => f match {case (sym, value) => sym.variable} }.mapValues{ xs =>
+ val (trues, falses) = xs.partition(_._2)
+ (trues map (_._1.const), falses map (_._1.const))
+ // should never be more than one value in trues...
+ }
+
+ def varAssignmentString(varAssignment: Map[Var, (Seq[Const], Seq[Const])]) =
+ varAssignment.toSeq.sortBy(_._1.toString).map { case (v, (trues, falses)) =>
+ val assignment = "== "+ (trues mkString("(", ", ", ")")) +" != ("+ (falses mkString(", ")) +")"
+ v +"(="+ v.path +": "+ v.domainTp +") "+ assignment
+ }.mkString("\n")
+
+ def modelString(model: Model) = varAssignmentString(modelToVarAssignment(model))
+
+ // return constructor call when the model is a true counter example
+ // (the variables don't take into account type information derived from other variables,
+ // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _),
+ // since we didn't realize the tail of the outer cons was a Nil)
+ def modelToCounterExample(scrutVar: Var)(model: Model): CounterExample = {
+ // x1 = ...
+ // x1.hd = ...
+ // x1.tl = ...
+ // x1.hd.hd = ...
+ // ...
+ val varAssignment = modelToVarAssignment(model)
+
+ // patmatDebug("var assignment for model "+ model +":\n"+ varAssignmentString(varAssignment))
+
+ // chop a path into a list of symbols
+ def chop(path: Tree): List[Symbol] = path match {
+ case Ident(_) => List(path.symbol)
+ case Select(pre, name) => chop(pre) :+ path.symbol
+ case _ => // patmatDebug("don't know how to chop "+ path)
+ Nil
+ }
+
+ // turn the variable assignments into a tree
+ // the root is the scrutinee (x1), edges are labelled by the fields that are assigned
+ // a node is a variable example (which is later turned into a counter example)
+ object VariableAssignment {
+ private def findVar(path: List[Symbol]) = path match {
+ case List(root) if root == scrutVar.path.symbol => Some(scrutVar)
+ case _ => varAssignment.find{case (v, a) => chop(v.path) == path}.map(_._1)
+ }
+
+ private val uniques = new collection.mutable.HashMap[Var, VariableAssignment]
+ private def unique(variable: Var): VariableAssignment =
+ uniques.getOrElseUpdate(variable, {
+ val (eqTo, neqTo) = varAssignment.getOrElse(variable, (Nil, Nil)) // TODO
+ VariableAssignment(variable, eqTo.toList, neqTo.toList, HashMap.empty)
+ })
+
+ def apply(variable: Var): VariableAssignment = {
+ val path = chop(variable.path)
+ val pre = path.init
+ val field = path.last
+
+ val newCtor = unique(variable)
+
+ if (pre.isEmpty) newCtor
+ else {
+ findVar(pre) foreach { preVar =>
+ val outerCtor = this(preVar)
+ outerCtor.fields(field) = newCtor
+ }
+ newCtor
+ }
+ }
+ }
+
+ // node in the tree that describes how to construct a counter-example
+ case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: collection.mutable.Map[Symbol, VariableAssignment]) {
+ // need to prune since the model now incorporates all super types of a constant (needed for reachability)
+ private lazy val prunedEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && (better implies subsumed)))
+ private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.domainTp }).typeSymbol.primaryConstructor
+ private lazy val ctorParams = if (ctor == NoSymbol || ctor.paramss.isEmpty) Nil else ctor.paramss.head
+ private lazy val cls = if (ctor == NoSymbol) NoSymbol else ctor.owner
+ private lazy val caseFieldAccs = if (cls == NoSymbol) Nil else cls.caseFieldAccessors
+
+
+ def allFieldAssignmentsLegal: Boolean =
+ (fields.keySet subsetOf caseFieldAccs.toSet) && fields.values.forall(_.allFieldAssignmentsLegal)
+
+ private lazy val nonTrivialNonEqualTo = notEqualTo.filterNot{c => c.isAny }
+
+ // NoExample if the constructor call is ill-typed
+ // (thus statically impossible -- can we incorporate this into the formula?)
+ // beBrief is used to suppress negative information nested in tuples -- it tends to get too noisy
+ def toCounterExample(beBrief: Boolean = false): CounterExample =
+ if (!allFieldAssignmentsLegal) NoExample
+ else {
+ // patmatDebug("describing "+ (variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))
+ val res = prunedEqualTo match {
+ // a definite assignment to a value
+ case List(eq: ValueConst) if fields.isEmpty => ValueExample(eq)
+
+ // constructor call
+ // or we did not gather any information about equality but we have information about the fields
+ // --> typical example is when the scrutinee is a tuple and all the cases first unwrap that tuple and only then test something interesting
+ case _ if cls != NoSymbol &&
+ ( prunedEqualTo.nonEmpty
+ || (fields.nonEmpty && !isPrimitiveValueClass(cls) && prunedEqualTo.isEmpty && notEqualTo.isEmpty)) =>
+
+ @inline def args(brevity: Boolean = beBrief) = {
+ // figure out the constructor arguments from the field assignment
+ val argLen = (caseFieldAccs.length min ctorParams.length)
+
+ (0 until argLen).map(i => fields.get(caseFieldAccs(i)).map(_.toCounterExample(brevity)) getOrElse WildcardExample).toList
+ }
+
+ cls match {
+ case ConsClass => ListExample(args())
+ case _ if isTupleSymbol(cls) => TupleExample(args(true))
+ case _ => ConstructorExample(cls, args())
+ }
+
+ // a definite assignment to a type
+ case List(eq) if fields.isEmpty => TypeExample(eq)
+
+ // negative information
+ case Nil if nonTrivialNonEqualTo.nonEmpty =>
+ // negation tends to get pretty verbose
+ if (beBrief) WildcardExample else NegativeExample(nonTrivialNonEqualTo)
+
+ // not a valid counter-example, possibly since we have a definite type but there was a field mismatch
+ // TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
+ case _ => NoExample
+ }
+ // patmatDebug("described as: "+ res)
+ res
+ }
+
+ override def toString = toCounterExample().toString
+ }
+
+ // slurp in information from other variables
+ varAssignment.keys.foreach{ v => if (v != scrutVar) VariableAssignment(v) }
+
+ // this is the variable we want a counter example for
+ VariableAssignment(scrutVar).toCounterExample()
+ }
+ }
+
+////
+ trait CommonSubconditionElimination extends TreeMakerApproximation { self: OptimizedCodegen =>
+ /** a flow-sensitive, generalised, common sub-expression elimination
+ * reuse knowledge from performed tests
+ * the only sub-expressions we consider are the conditions and results of the three tests (type, type&equality, equality)
+ * when a sub-expression is shared, it is stored in a mutable variable
+ * the variable is floated up so that its scope includes all of the program that shares it
+ * we generalize sharing to implication, where b reuses a if a => b and priors(a) => priors(b) (the priors of a sub expression form the path through the decision tree)
+ */
+ def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
+ val testss = approximateMatch(prevBinder, cases)
+
+ // interpret:
+ val dependencies = new collection.mutable.LinkedHashMap[Test, Set[Cond]]
+ val tested = new collection.mutable.HashSet[Cond]
+
+ def storeDependencies(test: Test) = {
+ val cond = test.cond
+
+ def simplify(c: Cond): Set[Cond] = c match {
+ case AndCond(a, b) => simplify(a) ++ simplify(b)
+ case OrCond(_, _) => Set(Havoc) // TODO: supremum?
+ case NonNullCond(_) => Set(Top) // not worth remembering
+ case _ => Set(c)
+ }
+ val conds = simplify(cond)
+
+ if (conds(Havoc)) false // stop when we encounter a havoc
+ else {
+ val nonTrivial = conds filterNot (_ == Top)
+ if (nonTrivial nonEmpty) {
+ tested ++= nonTrivial
+
+ // is there an earlier test that checks our condition and whose dependencies are implied by ours?
+ dependencies find {
+ case (priorTest, deps) =>
+ ((simplify(priorTest.cond) == nonTrivial) || // our conditions are implied by priorTest if it checks the same thing directly
+ (nonTrivial subsetOf deps) // or if it depends on a superset of our conditions
+ ) && (deps subsetOf tested) // the conditions we've tested when we are here in the match satisfy the prior test, and hence what it tested
+ } foreach {
+ case (priorTest, _) =>
+ // if so, note the dependency in both tests
+ priorTest registerReuseBy test
+ }
+
+ dependencies(test) = tested.toSet // copies
+ }
+ true
+ }
+ }
+
+
+ testss foreach { tests =>
+ tested.clear()
+ tests dropWhile storeDependencies
+ }
+ // patmatDebug("dependencies: "+ dependencies)
+
+ // find longest prefix of tests that reuse a prior test, and whose dependent conditions monotonically increase
+ // then, collapse these contiguous sequences of reusing tests
+ // store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used)
+ // replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable
+ val reused = new collection.mutable.HashMap[TreeMaker, ReusedCondTreeMaker]
+ var okToCall = false
+ val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)}
+
+ // maybe collapse: replace shared prefix of tree makers by a ReusingCondTreeMaker
+ // once this has been computed, we'll know which tree makers are reused,
+ // and we'll replace those by the ReusedCondTreeMakers we've constructed (and stored in the reused map)
+ val collapsed = testss map { tests =>
+ // map tests to the equivalent list of treemakers, replacing shared prefixes by a reusing treemaker
+ // if there's no sharing, simply map to the tree makers corresponding to the tests
+ var currDeps = Set[Cond]()
+ val (sharedPrefix, suffix) = tests span { test =>
+ (test.cond eq Top) || (for(
+ reusedTest <- test.reuses;
+ nextDeps <- dependencies.get(reusedTest);
+ diff <- (nextDeps -- currDeps).headOption;
+ _ <- Some(currDeps = nextDeps))
+ yield diff).nonEmpty
+ }
+
+ val collapsedTreeMakers =
+ if (sharedPrefix.isEmpty) None
+ else { // even sharing prefixes of length 1 brings some benefit (overhead-percentage for compiler: 26->24%, lib: 19->16%)
+ for (test <- sharedPrefix; reusedTest <- test.reuses) reusedTest.treeMaker match {
+ case reusedCTM: CondTreeMaker => reused(reusedCTM) = ReusedCondTreeMaker(reusedCTM)
+ case _ =>
+ }
+
+ // patmatDebug("sharedPrefix: "+ sharedPrefix)
+ // if the shared prefix contains interesting conditions (!= Top)
+ // and the last of such interesting shared conditions reuses another treemaker's test
+ // replace the whole sharedPrefix by a ReusingCondTreeMaker
+ for (lastShared <- sharedPrefix.reverse.dropWhile(_.cond eq Top).headOption;
+ lastReused <- lastShared.reuses)
+ yield ReusingCondTreeMaker(sharedPrefix, reusedOrOrig) :: suffix.map(_.treeMaker)
+ }
+
+ collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains Top-tests, which are dropped above)
+ }
+ okToCall = true // TODO: remove (debugging)
+
+ // replace original treemakers that are reused (as determined when computing collapsed),
+ // by ReusedCondTreeMakers
+ val reusedMakers = collapsed mapConserve (_ mapConserve reusedOrOrig)
+// patmatDebug("after CSE:")
+// showTreeMakers(reusedMakers)
+ reusedMakers
+ }
+
+ object ReusedCondTreeMaker {
+ def apply(orig: CondTreeMaker) = new ReusedCondTreeMaker(orig.prevBinder, orig.nextBinder, orig.cond, orig.res, orig.pos)
+ }
+ class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, val pos: Position) extends TreeMaker { import CODE._
+ lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
+ lazy val storedCond = freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE
+ lazy val treesToHoist: List[Tree] = {
+ nextBinder setFlag MUTABLE
+ List(storedCond, nextBinder) map { b => VAL(b) === codegen.mkZero(b.info) }
+ }
+
+ // TODO: finer-grained duplication
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen)
+ atPos(pos)(casegen.asInstanceOf[optimizedCodegen.OptimizedCasegen].flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
+
+ override def toString = "Memo"+(nextBinder.name, storedCond.name, cond, res, substitution)
+ }
+
+ case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._
+ val pos = sharedPrefix.last.treeMaker.pos
+
+ lazy val localSubstitution = {
+ // replace binder of each dropped treemaker by corresponding binder bound by the most recent reused treemaker
+ var mostRecentReusedMaker: ReusedCondTreeMaker = null
+ def mapToStored(droppedBinder: Symbol) = if (mostRecentReusedMaker eq null) Nil else List((droppedBinder, REF(mostRecentReusedMaker.nextBinder)))
+ val (from, to) = sharedPrefix.flatMap { dropped =>
+ dropped.reuses.map(test => toReused(test.treeMaker)).foreach {
+ case reusedMaker: ReusedCondTreeMaker =>
+ mostRecentReusedMaker = reusedMaker
+ case _ =>
+ }
+
+ // TODO: have super-trait for retrieving the variable that's operated on by a tree maker
+ // and thus assumed in scope, either because it binds it or because it refers to it
+ dropped.treeMaker match {
+ case dropped: FunTreeMaker =>
+ mapToStored(dropped.nextBinder)
+ case _ => Nil
+ }
+ }.unzip
+ val rerouteToReusedBinders = Substitution(from, to)
+
+ val collapsedDroppedSubst = sharedPrefix map (t => (toReused(t.treeMaker).substitution))
+
+ collapsedDroppedSubst.foldLeft(rerouteToReusedBinders)(_ >> _)
+ }
+
+ lazy val lastReusedTreeMaker = sharedPrefix.reverse.flatMap(tm => tm.reuses map (test => toReused(test.treeMaker))).collectFirst{case x: ReusedCondTreeMaker => x}.head
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift,
+ // and in its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
+ casegen.ifThenElseZero(REF(lastReusedTreeMaker.storedCond), substitution(next).duplicate)
+ }
+ override def toString = "R"+(lastReusedTreeMaker.storedCond.name, substitution)
+ }
+ }
+
+
+ //// DCE
+ trait DeadCodeElimination extends TreeMakers { self: CodegenCore =>
+ // TODO: non-trivial dead-code elimination
+ // e.g., the following match should compile to a simple instanceof:
+ // case class Ident(name: String)
+ // for (Ident(name) <- ts) println(name)
+ def doDCE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
+ // do minimal DCE
+ cases
+ }
+ }
+
+ //// SWITCHES -- TODO: operate on Tests rather than TreeMakers
+ trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface { self: CodegenCore =>
+ abstract class SwitchMaker {
+ abstract class SwitchableTreeMakerExtractor { def unapply(x: TreeMaker): Option[Tree] }
+ val SwitchableTreeMaker: SwitchableTreeMakerExtractor
+
+ def alternativesSupported: Boolean
+
+ def isDefault(x: CaseDef): Boolean
+ def defaultSym: Symbol
+ def defaultBody: Tree
+ def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef
+
+ private def sequence[T](xs: List[Option[T]]): Option[List[T]] =
+ if (xs exists (_.isEmpty)) None else Some(xs.flatten)
+
+ // empty list ==> failure
+ def apply(cases: List[(Symbol, List[TreeMaker])], pt: Type): List[CaseDef] = {
+ val caseDefs = cases map { case (scrutSym, makers) =>
+ makers match {
+ // default case
+ case (btm@BodyTreeMaker(body, _)) :: Nil =>
+ Some(defaultCase(scrutSym, btm.substitution(body)))
+ // constant (or typetest for typeSwitch)
+ case SwitchableTreeMaker(pattern) :: (btm@BodyTreeMaker(body, _)) :: Nil =>
+ Some(CaseDef(pattern, EmptyTree, btm.substitution(body)))
+ // alternatives
+ case AlternativesTreeMaker(_, altss, _) :: (btm@BodyTreeMaker(body, _)) :: Nil if alternativesSupported =>
+ val casePatterns = altss map {
+ case SwitchableTreeMaker(pattern) :: Nil =>
+ Some(pattern)
+ case _ =>
+ None
+ }
+
+ sequence(casePatterns) map { patterns =>
+ val substedBody = btm.substitution(body)
+ CaseDef(Alternative(patterns), EmptyTree, substedBody)
+ }
+ case _ => // patmatDebug("can't emit switch for "+ makers)
+ None //failure (can't translate pattern to a switch)
+ }
+ }
+
+ (for(
+ caseDefs <- sequence(caseDefs)) yield
+ if (caseDefs exists isDefault) caseDefs
+ else {
+ caseDefs :+ defaultCase()
+ }
+ ) getOrElse Nil
+ }
+ }
+
+ class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree]) extends SwitchMaker {
+ val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
+ val alternativesSupported = true
+
+ object SwitchablePattern { def unapply(pat: Tree): Option[Tree] = pat match {
+ case Literal(const@Constant((_: Byte ) | (_: Short) | (_: Int ) | (_: Char ))) =>
+ Some(Literal(Constant(const.intValue))) // TODO: Java 7 allows strings in switches
+ case _ => None
+ }}
+
+ object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
+ def unapply(x: TreeMaker): Option[Tree] = x match {
+ case EqualityTestTreeMaker(_, SwitchablePattern(const), _) => Some(const)
+ case _ => None
+ }
+ }
+
+ def isDefault(x: CaseDef): Boolean = x match {
+ case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
+ case _ => false
+ }
+
+ def defaultSym: Symbol = scrutSym
+ def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse MATCHERROR(REF(scrutSym)) }
+ def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ DEFAULT ==> body
+ }}
+ }
+
+ override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree]): Option[Tree] = { import CODE._
+ val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride)
+ // TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result
+ if (regularSwitchMaker.switchableTpe(scrutSym.tpe)) {
+ val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt)
+ if (caseDefsWithDefault isEmpty) None // not worth emitting a switch.
+ else {
+ // match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
+ val scrutToInt: Tree =
+ if (scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
+ else (REF(scrutSym) DOT (nme.toInt))
+ Some(BLOCK(
+ VAL(scrutSym) === scrut,
+ Match(scrutToInt, caseDefsWithDefault) // a switch
+ ))
+ }
+ } else None
+ }
+
+ // for the catch-cases in a try/catch
+ private object typeSwitchMaker extends SwitchMaker {
+ def switchableTpe(tp: Type) = true
+ val alternativesSupported = false // TODO: needs either back-end support of flattening of alternatives during typers
+
+ // TODO: there are more treemaker-sequences that can be handled by type tests
+ // analyze the result of approximateTreeMaker rather than the TreeMaker itself
+ object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
+ def unapply(x: TreeMaker): Option[Tree] = x match {
+ case tm@TypeTestTreeMaker(_, _, pt, _) if tm.isPureTypeTest => // -- TODO: use this if binder does not occur in the body
+ Some(Bind(tm.nextBinder, Typed(Ident(nme.WILDCARD), TypeTree(pt)) /* not used by back-end */))
+ case _ =>
+ None
+ }
+ }
+
+ def isDefault(x: CaseDef): Boolean = x match {
+ case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+ case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+ case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
+ case _ => false
+ }
+
+ lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableClass.tpe)
+ def defaultBody: Tree = Throw(CODE.REF(defaultSym))
+ def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) ==> body
+ }}
+ }
+
+ // TODO: drop null checks
+ override def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] = {
+ val caseDefsWithDefault = typeSwitchMaker(bindersAndCases, pt)
+ if (caseDefsWithDefault isEmpty) None
+ else Some(caseDefsWithDefault)
+ }
+ }
+
+ trait OptimizedMatchMonadInterface extends MatchMonadInterface {
+ override def inMatchMonad(tp: Type): Type = optionType(tp)
+ override def pureType(tp: Type): Type = tp
+ override protected def matchMonadSym = OptionClass
+ }
+
+ trait OptimizedCodegen extends CodegenCore with TypedSubstitution with OptimizedMatchMonadInterface {
+ override def codegen: AbsCodegen = optimizedCodegen
+
+ // trait AbsOptimizedCodegen extends AbsCodegen {
+ // def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree
+ // }
+ // def optimizedCodegen: AbsOptimizedCodegen
+
+ // when we know we're targetting Option, do some inlining the optimizer won't do
+ // for example, `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard
+ // this is a special instance of the advanced inlining optimization that takes a method call on
+ // an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases
+ object optimizedCodegen extends CommonCodegen { import CODE._
+
+ /** Inline runOrElse and get rid of Option allocations
+ *
+ * runOrElse(scrut: scrutTp)(matcher): resTp = matcher(scrut) getOrElse ${catchAll(`scrut`)}
+ * the matcher's optional result is encoded as a flag, keepGoing, where keepGoing == true encodes result.isEmpty,
+ * if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
+ */
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = {
+ val matchEnd = NoSymbol.newLabel(freshName("matchEnd"), NoPosition) setFlag SYNTH_CASE
+ val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, SYNTHETIC) setInfo restpe.withoutAnnotations //
+ matchEnd setInfo MethodType(List(matchRes), restpe)
+
+ def newCaseSym = NoSymbol.newLabel(freshName("case"), NoPosition) setInfo MethodType(Nil, restpe) setFlag SYNTH_CASE
+ var nextCase = newCaseSym
+ def caseDef(mkCase: Casegen => Tree): Tree = {
+ val currCase = nextCase
+ nextCase = newCaseSym
+ val casegen = new OptimizedCasegen(matchEnd, nextCase, restpe)
+ LabelDef(currCase, Nil, mkCase(casegen))
+ }
+
+ def catchAll = matchFailGen map { matchFailGen =>
+ val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
+ // must jump to matchEnd, use result generated by matchFailGen (could be `FALSE` for isDefinedAt)
+ LabelDef(nextCase, Nil, matchEnd APPLY (matchFailGen(scrutRef)))
+ // don't cast the arg to matchEnd when using PartialFun synth in uncurry, since it won't detect the throw (see gen.withDefaultCase)
+ // the cast is necessary when using typedMatchAnonFun-style PartialFun synth:
+ // (_asInstanceOf(matchFailGen(scrutRef), restpe))
+ } toList
+ // catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
+ // if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
+
+ // the generated block is taken apart in TailCalls under the following assumptions
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+
+ // scrutSym == NoSymbol when generating an alternatives matcher
+ val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
+ Block(
+ scrutDef ++ (cases map caseDef) ++ catchAll,
+ LabelDef(matchEnd, List(matchRes), REF(matchRes))
+ )
+ }
+
+ class OptimizedCasegen(matchEnd: Symbol, nextCase: Symbol, restpe: Type) extends CommonCodegen with Casegen {
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
+ optimizedCodegen.matcher(scrut, scrutSym, restpe)(cases, matchFailGen)
+
+ // only used to wrap the RHS of a body
+ // res: T
+ // returns MatchMonad[T]
+ def one(res: Tree): Tree = matchEnd APPLY (_asInstanceOf(res, restpe)) // need cast for GADT magic
+ protected def zero: Tree = nextCase APPLY ()
+
+ // prev: MatchMonad[T]
+ // b: T
+ // next: MatchMonad[U]
+ // returns MatchMonad[U]
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
+ val tp = inMatchMonad(b.tpe)
+ val prevSym = freshSym(prev.pos, tp, "o")
+ val isEmpty = tp member vpmName.isEmpty
+ val get = tp member vpmName.get
+
+ BLOCK(
+ VAL(prevSym) === prev,
+ // must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
+ ifThenElseZero(NOT(prevSym DOT isEmpty), Substitution(b, prevSym DOT get)(next))
+ )
+ }
+
+ // cond: Boolean
+ // res: T
+ // nextBinder: T
+ // next == MatchMonad[U]
+ // returns MatchMonad[U]
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree =
+ ifThenElseZero(cond, BLOCK(
+ VAL(nextBinder) === res,
+ next
+ ))
+
+ // guardTree: Boolean
+ // next: MatchMonad[T]
+ // returns MatchMonad[T]
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree =
+ ifThenElseZero(guardTree, next)
+
+ def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree =
+ ifThenElseZero(cond, BLOCK(
+ condSym === TRUE_typed,
+ nextBinder === res,
+ next
+ ))
+ }
+
+ }
+ }
+
+
+ trait MatchOptimizations extends CommonSubconditionElimination
+ with DeadCodeElimination
+ with SwitchEmission
+ with OptimizedCodegen
+ with SymbolicMatchAnalysis
+ with DPLLSolver { self: TreeMakers =>
+ override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, unchecked: Boolean): (List[List[TreeMaker]], List[Tree]) = {
+ if (!unchecked) {
+ unreachableCase(prevBinder, cases, pt) foreach { caseIndex =>
+ typer.context.unit.warning(cases(caseIndex).last.pos, "unreachable code")
+ }
+ }
+ val counterExamples = if (unchecked) Nil else exhaustive(prevBinder, cases, pt)
+ if (counterExamples.nonEmpty) {
+ val ceString =
+ if (counterExamples.tail.isEmpty) "input: " + counterExamples.head
+ else "inputs: " + counterExamples.mkString(", ")
+
+ typer.context.unit.warning(prevBinder.pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
+ }
+
+ val optCases = doCSE(prevBinder, doDCE(prevBinder, cases, pt), pt)
+ val toHoist = (
+ for (treeMakers <- optCases)
+ yield treeMakers.collect{case tm: ReusedCondTreeMaker => tm.treesToHoist}
+ ).flatten.flatten.toList
+ (optCases, toHoist)
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index ad727d4082..6d9c9c4ce8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -10,6 +10,7 @@ import symtab.Flags._
import collection.{ mutable, immutable }
import transform.InfoTransform
import scala.collection.mutable.ListBuffer
+import language.postfixOps
/** <p>
* Post-attribution checking and transformation.
@@ -358,7 +359,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
/** Is the intersection between given two lists of overridden symbols empty?
*/
def intersectionIsEmpty(syms1: List[Symbol], syms2: List[Symbol]) =
- !(syms1 exists (syms2 contains))
+ !(syms1 exists (syms2 contains _))
if (typesOnly) checkOverrideTypes()
else {
@@ -385,8 +386,9 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
overrideError("cannot be used here - classes can only override abstract types");
} else if (other.isEffectivelyFinal) { // (1.2)
overrideError("cannot override final member");
- // synthetic exclusion needed for (at least) default getters.
- } else if (!other.isDeferred && !member.isAnyOverride && !member.isSynthetic) {
+ } else if (!other.isDeferred && !member.isAnyOverride && !member.isSynthetic) { // (*)
+ // (*) Synthetic exclusion for (at least) default getters, fixes SI-5178. We cannot assign the OVERRIDE flag to
+ // the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket.
if (isNeitherInClass && !(other.owner isSubClass member.owner))
emitOverrideError(
clazz + " inherits conflicting members:\n "
@@ -805,9 +807,9 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
for (i <- 0 until seenTypes.length) {
val baseClass = clazz.info.baseTypeSeq(i).typeSymbol
seenTypes(i) match {
- case List() =>
+ case Nil =>
println("??? base "+baseClass+" not found in basetypes of "+clazz)
- case List(_) =>
+ case _ :: Nil =>
;// OK
case tp1 :: tp2 :: _ =>
unit.error(clazz.pos, "illegal inheritance;\n " + clazz +
@@ -1057,7 +1059,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
def typesString = normalizeAll(qual.tpe.widen)+" and "+normalizeAll(args.head.tpe.widen)
/** Symbols which limit the warnings we can issue since they may be value types */
- val isMaybeValue = Set(AnyClass, AnyRefClass, AnyValClass, ObjectClass, ComparableClass, JavaSerializableClass)
+ val isMaybeValue = Set[Symbol](AnyClass, AnyRefClass, AnyValClass, ObjectClass, ComparableClass, JavaSerializableClass)
// Whether def equals(other: Any) has known behavior: it is the default
// inherited from java.lang.Object, or it is a synthetically generated
@@ -1065,12 +1067,10 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
// equals.
def isUsingWarnableEquals = {
val m = receiver.info.member(nme.equals_)
- def n = actual.info.member(nme.equals_)
- ( (m == Object_equals)
- || (m == Any_equals)
- || (m.isSynthetic && m.owner.isCase && !n.owner.isCase)
- )
+ ((m == Object_equals) || (m == Any_equals) || isMethodCaseEquals(m))
}
+ def isMethodCaseEquals(m: Symbol) = m.isSynthetic && m.owner.isCase
+ def isCaseEquals = isMethodCaseEquals(receiver.info.member(nme.equals_))
// Whether this == or != is one of those defined in Any/AnyRef or an overload from elsewhere.
def isUsingDefaultScalaOp = {
val s = fn.symbol
@@ -1084,8 +1084,16 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
def isEitherNullable = (NullClass.tpe <:< receiver.info) || (NullClass.tpe <:< actual.info)
def isBoolean(s: Symbol) = unboxedValueClass(s) == BooleanClass
def isUnit(s: Symbol) = unboxedValueClass(s) == UnitClass
- def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || (s isSubClass ScalaNumberClass)
- def isSpecial(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || (s isSubClass ScalaNumberClass) || isMaybeValue(s)
+ def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || isAnyNumber(s)
+ def isScalaNumber(s: Symbol) = s isSubClass ScalaNumberClass
+ // test is behind a platform guard
+ def isJavaNumber(s: Symbol) = !forMSIL && (s isSubClass JavaNumberClass)
+ // includes java.lang.Number if appropriate [SI-5779]
+ def isAnyNumber(s: Symbol) = isScalaNumber(s) || isJavaNumber(s)
+ def isMaybeAnyValue(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || isMaybeValue(s)
+ // used to short-circuit unrelatedTypes check if both sides are special
+ def isSpecial(s: Symbol) = isMaybeAnyValue(s) || isAnyNumber(s)
+ // unused
def possibleNumericCount = onSyms(_ filter (x => isNumeric(x) || isMaybeValue(x)) size)
val nullCount = onSyms(_ filter (_ == NullClass) size)
@@ -1093,9 +1101,11 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
val msg = alwaysEqual == (name == nme.EQ || name == nme.eq)
unit.warning(pos, "comparing "+what+" using `"+name.decode+"' will always yield " + msg)
}
-
def nonSensible(pre: String, alwaysEqual: Boolean) =
nonSensibleWarning(pre+"values of types "+typesString, alwaysEqual)
+ def nonSensiblyEq() = nonSensible("", true)
+ def nonSensiblyNeq() = nonSensible("", false)
+ def nonSensiblyNew() = nonSensibleWarning("a fresh object", false)
def unrelatedTypes() = {
val msg = if (name == nme.EQ || name == nme.eq)
@@ -1103,52 +1113,73 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
unit.warning(pos, typesString + " are unrelated: they will most likely " + msg)
}
- if (nullCount == 2)
- nonSensible("", true) // null == null
+ if (nullCount == 2) // null == null
+ nonSensiblyEq()
else if (nullCount == 1) {
if (onSyms(_ exists isPrimitiveValueClass)) // null == 5
- nonSensible("", false)
+ nonSensiblyNeq()
else if (onTrees( _ exists isNew)) // null == new AnyRef
- nonSensibleWarning("a fresh object", false)
+ nonSensiblyNew()
}
else if (isBoolean(receiver)) {
if (!isBoolean(actual) && !isMaybeValue(actual)) // true == 5
- nonSensible("", false)
+ nonSensiblyNeq()
}
else if (isUnit(receiver)) {
if (isUnit(actual)) // () == ()
- nonSensible("", true)
+ nonSensiblyEq()
else if (!isUnit(actual) && !isMaybeValue(actual)) // () == "abc"
- nonSensible("", false)
+ nonSensiblyNeq()
}
else if (isNumeric(receiver)) {
if (!isNumeric(actual) && !forMSIL)
if (isUnit(actual) || isBoolean(actual) || !isMaybeValue(actual)) // 5 == "abc"
- nonSensible("", false)
+ nonSensiblyNeq()
}
- else if (isWarnable) {
+ else if (isWarnable && !isCaseEquals) {
if (isNew(qual)) // new X == y
- nonSensibleWarning("a fresh object", false)
+ nonSensiblyNew()
else if (isNew(args.head) && (receiver.isEffectivelyFinal || isReferenceOp)) // object X ; X == new Y
- nonSensibleWarning("a fresh object", false)
+ nonSensiblyNew()
else if (receiver.isEffectivelyFinal && !(receiver isSubClass actual)) { // object X, Y; X == Y
if (isEitherNullable)
nonSensible("non-null ", false)
else
- nonSensible("", false)
+ nonSensiblyNeq()
}
}
// possibleNumericCount is insufficient or this will warn on e.g. Boolean == j.l.Boolean
if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
- if (actual isSubClass receiver) ()
- else if (receiver isSubClass actual) ()
- // warn only if they have no common supertype below Object
- else {
+ // better to have lubbed and lost
+ def warnIfLubless(): Unit = {
val common = global.lub(List(actual.tpe, receiver.tpe))
if (ObjectClass.tpe <:< common)
unrelatedTypes()
}
+ def eitherSubclasses = (actual isSubClass receiver) || (receiver isSubClass actual)
+ // warn if actual has a case parent that is not same as receiver's;
+ // if actual is not a case, then warn if no common supertype, as below
+ if (isCaseEquals) {
+ def thisCase = receiver.info.member(nme.equals_).owner
+ actual.info.baseClasses.find(_.isCase) match {
+ case Some(p) if p != thisCase => nonSensible("case class ", false)
+ case None =>
+ // stronger message on (Some(1) == None)
+ //if (receiver.isCase && receiver.isEffectivelyFinal && !(receiver isSubClass actual)) nonSensiblyNeq()
+ //else
+ // if a class, it must be super to thisCase (and receiver) since not <: thisCase
+ if (!actual.isTrait && !(receiver isSubClass actual)) nonSensiblyNeq()
+ else if (!eitherSubclasses) warnIfLubless()
+ case _ =>
+ }
+ }
+ else if (actual isSubClass receiver) ()
+ else if (receiver isSubClass actual) ()
+ // warn only if they have no common supertype below Object
+ else {
+ warnIfLubless()
+ }
}
case _ =>
}
@@ -1256,6 +1287,15 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
def transformStat(tree: Tree, index: Int): List[Tree] = tree match {
+ case t if treeInfo.isSelfConstrCall(t) =>
+ assert(index == 0, index)
+ val t = transform(tree)
+ if (currentLevel.maxindex > 0) {
+ // An implementation restriction to avoid VerifyErrors and lazyvals mishaps; see SI-4717
+ debuglog("refsym = " + currentLevel.refsym)
+ unit.error(currentLevel.refpos, "forward reference not allowed from self constructor invocation")
+ }
+ List(t)
case ModuleDef(_, _, _) => eliminateModuleDefs(tree)
case ValDef(_, _, _, _) =>
val tree1 @ ValDef(_, _, _, rhs) = transform(tree) // important to do before forward reference check
@@ -1436,6 +1476,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
analyzer.ImplicitNotFoundMsg.check(sym) foreach { warn =>
unit.warning(tree.pos, "Invalid implicitNotFound message for %s%s:\n%s".format(sym, sym.locationString, warn))
}
+
case tpt@TypeTree() =>
if(tpt.original != null) {
tpt.original foreach {
@@ -1455,8 +1496,23 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
private def transformCaseApply(tree: Tree, ifNot: => Unit) = {
val sym = tree.symbol
-
- if (sym.isSourceMethod && sym.isCase && sym.name == nme.apply)
+
+ def isClassTypeAccessible(tree: Tree): Boolean = tree match {
+ case TypeApply(fun, targs) =>
+ isClassTypeAccessible(fun)
+ case Select(module, apply) =>
+ // Fixes SI-5626. Classes in refinement types cannot be constructed with `new`. In this case,
+ // the companion class is actually not a ClassSymbol, but a reference to an abstract type.
+ module.symbol.companionClass.isClass
+ }
+
+ val doTransform =
+ sym.isSourceMethod &&
+ sym.isCase &&
+ sym.name == nme.apply &&
+ isClassTypeAccessible(tree)
+
+ if (doTransform)
toConstructor(tree.pos, tree.tpe)
else {
ifNot
@@ -1654,9 +1710,10 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
inPattern = false
treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
case LabelDef(_, _, _) if gen.hasSynthCaseSymbol(result) =>
+ val old = inPattern
inPattern = true
val res = deriveLabelDef(result)(transform)
- inPattern = false
+ inPattern = old
res
case _ =>
super.transform(result)
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
new file mode 100644
index 0000000000..329a247106
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -0,0 +1,10 @@
+package scala.tools.nsc
+package typechecker
+
+import scala.reflect.makro.runtime.{Context => MacroContext}
+
+trait StdAttachments {
+ self: Analyzer =>
+
+ case class MacroAttachment(delayed: Boolean, typerContext: Context, macroContext: Option[MacroContext])
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 43cbea83ff..d327d9c397 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -134,7 +134,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
// otherwise lead to either a compiler crash or runtime failure.
private lazy val isDisallowed = {
import definitions._
- Set(Any_isInstanceOf, Object_isInstanceOf, Any_asInstanceOf, Object_asInstanceOf, Object_==, Object_!=, Object_##)
+ Set[Symbol](Any_isInstanceOf, Object_isInstanceOf, Any_asInstanceOf, Object_asInstanceOf, Object_==, Object_!=, Object_##)
}
override def transform(tree: Tree): Tree = {
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index da87d38ab0..6faa9a3cb7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -50,7 +50,7 @@ trait SyntheticMethods extends ast.TreeDSL {
import synthesizer._
if (clazz0 == AnyValClass || isPrimitiveValueClass(clazz0)) return {
- if (clazz0.info member nme.getClass_ isDeferred) {
+ if ((clazz0.info member nme.getClass_).isDeferred) {
// XXX dummy implementation for now
val getClassMethod = createMethod(nme.getClass_, getClassReturnType(clazz.tpe))(_ => NULL)
deriveTemplate(templ)(_ :+ getClassMethod)
@@ -74,7 +74,7 @@ trait SyntheticMethods extends ast.TreeDSL {
// Option[Int] { def productIterator: Iterator[String] }
//
// appearing legitimately, but this breaks invariant places
- // like Manifests and Arrays which are not robust and infer things
+ // like Tags and Arrays which are not robust and infer things
// which they shouldn't.
val accessorLub = (
if (opt.experimental) {
@@ -89,6 +89,11 @@ trait SyntheticMethods extends ast.TreeDSL {
def forwardToRuntime(method: Symbol): Tree =
forwardMethod(method, getMember(ScalaRunTimeModule, method.name prepend "_"))(mkThis :: _)
+ def callStaticsMethod(name: String)(args: Tree*): Tree = {
+ val method = termMember(RuntimeStaticsModule, name)
+ Apply(gen.mkAttributedRef(method), args.toList)
+ }
+
// Any member, including private
def hasConcreteImpl(name: Name) =
clazz.info.member(name).alternatives exists (m => !m.isDeferred && !m.isSynthetic)
@@ -222,13 +227,47 @@ trait SyntheticMethods extends ast.TreeDSL {
)
}
+ def hashcodeImplementation(sym: Symbol): Tree = {
+ sym.tpe.finalResultType.typeSymbol match {
+ case UnitClass | NullClass => Literal(Constant(0))
+ case BooleanClass => If(Ident(sym), Literal(Constant(1231)), Literal(Constant(1237)))
+ case IntClass | ShortClass | ByteClass | CharClass => Ident(sym)
+ case LongClass => callStaticsMethod("longHash")(Ident(sym))
+ case DoubleClass => callStaticsMethod("doubleHash")(Ident(sym))
+ case FloatClass => callStaticsMethod("floatHash")(Ident(sym))
+ case _ => callStaticsMethod("anyHash")(Ident(sym))
+ }
+ }
+
+ def specializedHashcode = {
+ createMethod(nme.hashCode_, Nil, IntClass.tpe) { m =>
+ val accumulator = m.newVariable(newTermName("acc"), m.pos, SYNTHETIC) setInfo IntClass.tpe
+ val valdef = ValDef(accumulator, Literal(Constant(0xcafebabe)))
+ val mixes = accessors map (acc =>
+ Assign(
+ Ident(accumulator),
+ callStaticsMethod("mix")(Ident(accumulator), hashcodeImplementation(acc))
+ )
+ )
+ val finish = callStaticsMethod("finalizeHash")(Ident(accumulator), Literal(Constant(arity)))
+
+ Block(valdef :: mixes, finish)
+ }
+ }
+ def chooseHashcode = {
+ if (accessors exists (x => isPrimitiveValueType(x.tpe.finalResultType)))
+ specializedHashcode
+ else
+ forwardToRuntime(Object_hashCode)
+ }
+
def valueClassMethods = List(
Any_hashCode -> (() => hashCodeDerivedValueClassMethod),
Any_equals -> (() => equalsDerivedValueClassMethod)
)
def caseClassMethods = productMethods ++ productNMethods ++ Seq(
- Object_hashCode -> (() => forwardToRuntime(Object_hashCode)),
+ Object_hashCode -> (() => chooseHashcode),
Object_toString -> (() => forwardToRuntime(Object_toString)),
Object_equals -> (() => equalsCaseClassMethod)
)
@@ -299,11 +338,12 @@ trait SyntheticMethods extends ast.TreeDSL {
newAcc resetFlag (ACCESSOR | PARAMACCESSOR)
ddef.rhs.duplicate
}
+ // TODO: shouldn't the next line be: `original resetFlag CASEACCESSOR`?
ddef.symbol resetFlag CASEACCESSOR
lb += logResult("case accessor new")(newAcc)
}
- lb ++= templ.body ++= synthesize() toList
+ (lb ++= templ.body ++= synthesize()).toList
}
if (phase.id > currentRun.typerPhase.id) templ
diff --git a/src/compiler/scala/tools/nsc/typechecker/Taggings.scala b/src/compiler/scala/tools/nsc/typechecker/Taggings.scala
new file mode 100644
index 0000000000..fb0d6fb3c5
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/Taggings.scala
@@ -0,0 +1,71 @@
+package scala.tools.nsc
+package typechecker
+
+trait Taggings {
+ self: Analyzer =>
+
+ import global._
+ import definitions._
+
+ trait Tagging {
+ self: Typer =>
+
+ private def resolveTag(taggedTp: Type, pos: Position) = beforeTyper {
+ inferImplicit(
+ EmptyTree,
+ taggedTp,
+ /*reportAmbiguous =*/ true,
+ /*isView =*/ false,
+ /*context =*/ context,
+ /*saveAmbiguousDivergent =*/ true,
+ /*pos =*/ pos
+ ).tree
+ }
+
+ /** Finds in scope or materializes an ArrayTag.
+ * Should be used instead of ClassTag or ClassManifest every time compiler needs to create an array.
+ *
+ * @param tp Type we're looking an ArrayTag for, e.g. resolveArrayTag(IntClass.tpe, pos) will look for ArrayTag[Int].
+ * @param pos Position for error reporting. Please, provide meaningful value.
+ *
+ * @returns Tree that represents an `scala.reflect.ArrayTag` for `tp` if everything is okay.
+ * EmptyTree if the result contains unresolved (i.e. not spliced) type parameters and abstract type members.
+ */
+ def resolveArrayTag(tp: Type, pos: Position): Tree = {
+ val taggedTp = appliedType(ArrayTagClass.typeConstructor, List(tp))
+ resolveTag(taggedTp, pos)
+ }
+
+ /** Finds in scope or materializes an ErasureTag (if `concrete` is false) or a ClassTag (if `concrete` is true).
+ * Should be used instead of ClassTag or ClassManifest every time compiler needs to persist an erasure.
+ *
+ * @param tp Type we're looking an ErasureTag for, e.g. resolveErasureTag(IntClass.tpe, pos, true) will look for ClassTag[Int].
+ * @param pos Position for error reporting. Please, provide meaningful value.
+ * @param concrete If true then the result must not contain unresolved (i.e. not spliced) type parameters and abstract type members.
+ * If false then the function will always succeed (abstract types will be erased to their upper bounds).
+ *
+ * @returns Tree that represents an `scala.reflect.ErasureTag` for `tp` if everything is okay.
+ * EmptyTree if `concrete` is true and the result contains unresolved (i.e. not spliced) type parameters and abstract type members.
+ */
+ def resolveErasureTag(tp: Type, pos: Position, concrete: Boolean): Tree = {
+ val taggedTp = appliedType(if (concrete) ClassTagClass.typeConstructor else ErasureTagClass.typeConstructor, List(tp))
+ resolveTag(taggedTp, pos)
+ }
+
+ /** Finds in scope or materializes a TypeTag (if `concrete` is false) or a ConcreteTypeTag (if `concrete` is true).
+ *
+ * @param pre Prefix that represents a universe this type tag will be bound to.
+ * @param tp Type we're looking a TypeTag for, e.g. resolveTypeTag(reflectMirrorPrefix, IntClass.tpe, pos, false) will look for scala.reflect.mirror.TypeTag[Int].
+ * @param pos Position for error reporting. Please, provide meaningful value.
+ * @param concrete If true then the result must not contain unresolved (i.e. not spliced) type parameters and abstract type members.
+ * If false then the function will always succeed (abstract types will be reified as free types).
+ *
+ * @returns Tree that represents a `scala.reflect.TypeTag` for `tp` if everything is okay.
+ * EmptyTree if `concrete` is true and the result contains unresolved (i.e. not spliced) type parameters and abstract type members.
+ */
+ def resolveTypeTag(pre: Type, tp: Type, pos: Position, concrete: Boolean): Tree = {
+ val taggedTp = appliedType(singleType(pre, pre member (if (concrete) ConcreteTypeTagClass else TypeTagClass).name), List(tp))
+ resolveTag(taggedTp, pos)
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 8895905ca7..fde760c752 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -14,20 +14,6 @@ import util.returning
abstract class TreeCheckers extends Analyzer {
import global._
- private val everything = ListBuffer[(Phase, Map[Tree, (Symbol, Type)])]()
- private val currentTrees = mutable.Map[Tree, (Symbol, Type)]()
-
- if (settings.debug.value) {
- sys addShutdownHook {
- for ((ph, map) <- everything.toList) {
- println("\n>>>> " + ph + "\n")
- for ((tree, (sym, tpe)) <- map.toList.sortBy(_._1.summaryString)) {
- println("%20s %20s %s".format(sym, tpe, ("" + tree) take 50))
- }
- }
- }
- }
-
private def classstr(x: AnyRef) = x.getClass.getName split """\\.|\\$""" last;
private def typestr(x: Type) = " (tpe = " + x + ")"
private def treestr(t: Tree) = t + " [" + classstr(t) + "]" + typestr(t.tpe)
@@ -106,16 +92,11 @@ abstract class TreeCheckers extends Analyzer {
if (maps.isEmpty || maps.last._1 != ph)
maps += ((ph, new PhaseMap))
- currentTrees.clear()
traverse(unit.body)
- everything += ((ph, currentTrees.toMap))
-
reportChanges()
}
override def traverse(tree: Tree): Unit = {
val sym = tree.symbol
- currentTrees(tree) = ((sym, tree.tpe))
-
if (sym != null && sym != NoSymbol) {
record(sym, tree)
tree match {
@@ -146,11 +127,20 @@ abstract class TreeCheckers extends Analyzer {
def assertFn(cond: Boolean, msg: => Any) =
if (!cond) errorFn(msg)
+ private def wrap[T](msg: => Any)(body: => Unit) {
+ try body
+ catch { case x =>
+ Console.println("Caught " + x)
+ Console.println(msg)
+ x.printStackTrace
+ }
+ }
+
def checkTrees() {
if (settings.verbose.value)
Console.println("[consistency check at the beginning of phase " + phase + "]")
- currentRun.units foreach check
+ currentRun.units foreach (x => wrap(x)(check(x)))
}
def printingTypings[T](body: => T): T = {
@@ -219,11 +209,11 @@ abstract class TreeCheckers extends Analyzer {
tree.tpe = null
saved
})
- super.typed(tree, mode, pt) match {
+ wrap(tree)(super.typed(tree, mode, pt) match {
case _: Literal => ()
case x if x ne tree => treesDiffer(tree, x)
case _ => ()
- }
+ })
case _ => ()
}
@@ -290,7 +280,12 @@ abstract class TreeCheckers extends Analyzer {
if (sym.owner != currentOwner) {
val expected = currentOwner.ownerChain find (x => cond(x)) getOrElse fail("DefTree can't find owner: ")
if (sym.owner != expected)
- fail("Expected owner %s (out of %s), found %s: ".format(expected, currentOwner.ownerChain, sym.owner))
+ fail("""|
+ | currentOwner chain: %s
+ | symbol chain: %s""".stripMargin.format(
+ currentOwner.ownerChain take 3 mkString " -> ",
+ sym.ownerChain mkString " -> ")
+ )
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 2b7c8e8304..b0f6e44e88 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -26,11 +26,12 @@ import util.Statistics._
* @author Martin Odersky
* @version 1.0
*/
-trait Typers extends Modes with Adaptations with PatMatVirtualiser {
+trait Typers extends Modes with Adaptations with Taggings {
self: Analyzer =>
import global._
import definitions._
+ import patmat.DefaultOverrideMatchAttachment
final def forArgMode(fun: Tree, mode: Int) =
if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode
@@ -83,7 +84,19 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
private def isPastTyper = phase.id > currentRun.typerPhase.id
- abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with TyperContextErrors {
+ // To enable decent error messages when the typer crashes.
+ // TODO - this only catches trees which go through def typed,
+ // but there are all kinds of back ways - typedClassDef, etc. etc.
+ // Funnel everything through one doorway.
+ var lastTreeToTyper: Tree = EmptyTree
+
+ // when true:
+ // - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
+ // - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
+ // this is disabled by: -Xoldpatmat, scaladoc or interactive compilation
+ @inline private def newPatternMatching = opt.virtPatmat && !forScaladoc && !forInteractive // && (phase.id < currentRun.uncurryPhase.id)
+
+ abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tagging with TyperContextErrors {
import context0.unit
import typeDebug.{ ptTree, ptBlock, ptLine }
import TyperErrorGen._
@@ -101,6 +114,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case MethodType(params, _) =>
val argResultsBuff = new ListBuffer[SearchResult]()
val argBuff = new ListBuffer[Tree]()
+ // paramFailed cannot be initialized with params.exists(_.tpe.isError) because that would
+ // hide some valid errors for params preceding the erroneous one.
var paramFailed = false
def mkPositionalArg(argTree: Tree, paramName: Name) = argTree
@@ -116,7 +131,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
for(ar <- argResultsBuff)
paramTp = paramTp.subst(ar.subst.from, ar.subst.to)
- val res = if (paramFailed) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, false, context)
+ val res = if (paramFailed || (paramTp.isError && {paramFailed = true; true})) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, false, context)
argResultsBuff += res
if (res != SearchFailure) {
@@ -455,13 +470,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
* of a this or super with prefix <code>qual</code>.
* packageOk is equal false when qualifying class symbol
*/
- def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean = false): Option[Symbol] =
+ def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean) =
context.enclClass.owner.ownerChain.find(o => qual.isEmpty || o.isClass && o.name == qual) match {
- case Some(c) if packageOK || !c.isPackageClass =>
- Some(c)
- case _ =>
- QualifyingClassError(tree, qual)
- None
+ case Some(c) if packageOK || !c.isPackageClass => c
+ case _ => QualifyingClassError(tree, qual) ; NoSymbol
}
/** The typer for an expression, depending on where we are. If we are before a superclass
@@ -512,7 +524,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
res
}
-
/** The typer for a label definition. If this is part of a template we
* first have to enter the label definition.
*/
@@ -675,7 +686,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
if (tree.tpe.isInstanceOf[MethodType] && pre.isStable && sym.tpe.params.isEmpty &&
(isStableContext(tree, mode, pt) || sym.isModule))
- tree.setType(MethodType(List(), singleType(pre, sym)))
+ tree.setType(MethodType(List(), singleType(pre, sym))) // TODO: should this be a NullaryMethodType?
else tree
}
@@ -734,6 +745,58 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
}
+ /** Check whether feature given by `featureTrait` is enabled.
+ * If it is not, issue an error or a warning depending on whether the feature is required.
+ * @param construct A string expression that is substituted for "#" in the feature description string
+ * @param immediate When set, feature check is run immediately, otherwise it is run
+ * at the end of the typechecking run for the enclosing unit. This
+ * is done to avoid potential cyclic reference errors by implicits
+ * that are forced too early.
+ * @return if feature check is run immediately: true if feature is enabled, false otherwise
+ * if feature check is delayed or suppressed because we are past typer: true
+ */
+ def checkFeature(pos: Position, featureTrait: Symbol, construct: => String = "", immediate: Boolean = false): Boolean =
+ if (isPastTyper) true
+ else {
+ val nestedOwners =
+ featureTrait.owner.ownerChain.takeWhile(_ != languageFeatureModule.moduleClass).reverse
+ val featureName = (nestedOwners map (_.name + ".")).mkString + featureTrait.name
+ def action(): Boolean = {
+ def hasImport = inferImplicit(EmptyTree: Tree, featureTrait.tpe, true, false, context) != SearchFailure
+ def hasOption = settings.language.value contains featureName
+ val OK = hasImport || hasOption
+ if (!OK) {
+ val Some(AnnotationInfo(_, List(Literal(Constant(featureDesc: String)), Literal(Constant(required: Boolean))), _)) =
+ featureTrait getAnnotation LanguageFeatureAnnot
+ val req = if (required) "needs to" else "should"
+ var raw = featureDesc + " " + req + " be enabled\n" +
+ "by making the implicit value language." + featureName + " visible."
+ if (!(currentRun.reportedFeature contains featureTrait))
+ raw += "\nThis can be achieved by adding the import clause 'import language." + featureName + "'\n" +
+ "or by setting the compiler option -language:" + featureName + ".\n" +
+ "See the Scala docs for value scala.language." + featureName + " for a discussion\n" +
+ "why the feature " + req + " be explicitly enabled."
+ currentRun.reportedFeature += featureTrait
+ val msg = raw replace ("#", construct)
+ if (required) unit.error(pos, msg)
+ else currentRun.featureWarnings.warn(pos, msg)
+ }
+ OK
+ }
+ if (immediate) {
+ action()
+ } else {
+ unit.toCheck += action
+ true
+ }
+ }
+
+ def checkExistentialsFeature(pos: Position, tpe: Type, prefix: String) = tpe match {
+ case extp: ExistentialType if !extp.isRepresentableWithWildcards =>
+ checkFeature(pos, ExistentialsFeature, prefix+" "+tpe)
+ case _ =>
+ }
+
/** Perform the following adaptations of expression, pattern or type `tree` wrt to
* given mode `mode` and given prototype `pt`:
* (-1) For expressions with annotated types, let AnnotationCheckers decide what to do
@@ -773,17 +836,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree = {
def adaptToImplicitMethod(mt: MethodType): Tree = {
- if (context.undetparams nonEmpty) { // (9) -- should revisit dropped condition `(mode & POLYmode) == 0`
+ if (context.undetparams.nonEmpty) { // (9) -- should revisit dropped condition `(mode & POLYmode) == 0`
// dropped so that type args of implicit method are inferred even if polymorphic expressions are allowed
// needed for implicits in 2.8 collection library -- maybe once #3346 is fixed, we can reinstate the condition?
context.undetparams = inferExprInstance(tree, context.extractUndetparams(), pt,
// approximate types that depend on arguments since dependency on implicit argument is like dependency on type parameter
mt.approximate,
- // if we are looking for a manifest, instantiate type to Nothing anyway,
- // as we would get ambiguity errors otherwise. Example
- // Looking for a manifest of Nil: This has many potential types,
- // so we need to instantiate to minimal type List[Nothing].
- keepNothings = false, // retract Nothing's that indicate failure, ambiguities in manifests are dealt with in manifestOfType
+ keepNothings = false,
useWeaklyCompatible = true) // #3808
}
@@ -869,7 +928,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
KindArityMismatchError(tree, pt)
} else tree match { // (6)
case TypeTree() => tree
- case _ => TypeTree(tree.tpe) setOriginal (tree) setPos (tree.pos)
+ case _ => TypeTree(tree.tpe) setOriginal tree
}
}
@@ -901,9 +960,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
* see test/files/../t5189*.scala
*/
def adaptConstrPattern(): Tree = { // (5)
- val extractor = tree.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe)))
+ def isExtractor(sym: Symbol) = reallyExists(unapplyMember(sym.tpe))
+ val extractor = tree.symbol filter isExtractor
if (extractor != NoSymbol) {
tree setSymbol extractor
+ tree.tpe match {
+ case OverloadedType(pre, alts) => tree.tpe = overloadedType(pre, alts filter isExtractor)
+ case _ =>
+ }
val unapply = unapplyMember(extractor.tpe)
val clazz = unapplyParameterType(unapply)
@@ -1010,7 +1074,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val tparams1 = cloneSymbols(tparams)
val tree1 = if (tree.isType) tree
else TypeApply(tree, tparams1 map (tparam =>
- TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos //@M/tcpolyinfer: changed tparam.tpe to tparam.tpeHK
+ TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos
context.undetparams ++= tparams1
notifyUndetparamsAdded(tparams1)
adapt(tree1 setType restpe.substSym(tparams, tparams1), mode, pt, original)
@@ -1031,10 +1095,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
if (tree.isType)
adaptType()
- else if (context.macrosEnabled && // when macros are enabled
- inExprModeButNot(mode, FUNmode) && !tree.isDef && // and typechecking application
- tree.symbol != null && tree.symbol.isTermMacro) // of a term macro
- macroExpand(this, tree, pt)
+ else if (
+ inExprModeButNot(mode, FUNmode) && !tree.isDef && // typechecking application
+ tree.symbol != null && tree.symbol.isTermMacro) // of a macro
+ macroExpand(this, tree, mode, pt)
else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode))
adaptConstrPattern()
else if (inAllModes(mode, EXPRmode | FUNmode) &&
@@ -1171,7 +1235,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
&& !qtpe.typeSymbol.isBottomClass
&& qtpe != WildcardType
&& !qual.isInstanceOf[ApplyImplicitView] // don't chain views
- && context.implicitsEnabled
+ && (context.implicitsEnabled || context.enrichmentEnabled)
// Elaborating `context.implicitsEnabled`:
// don't try to adapt a top-level type that's the subject of an implicit search
// this happens because, if isView, typedImplicit tries to apply the "current" implicit value to
@@ -1564,10 +1628,16 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val clazz = mdef.symbol.moduleClass
val typedMods = typedModifiers(mdef.mods)
assert(clazz != NoSymbol, mdef)
+ val noSerializable = (
+ (linkedClass eq NoSymbol)
+ || linkedClass.isErroneous
+ || !linkedClass.isSerializable
+ || clazz.isSerializable
+ )
val impl1 = typerReportAnyContextErrors(context.make(mdef.impl, clazz, newScope)) {
_.typedTemplate(mdef.impl, {
parentTypes(mdef.impl) ++ (
- if (linkedClass == NoSymbol || !linkedClass.isSerializable || clazz.isSerializable) Nil
+ if (noSerializable) Nil
else {
clazz.makeSerializable()
List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus)
@@ -1941,6 +2011,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (meth.isStructuralRefinementMember)
checkMethodStructuralCompatible(meth)
+ if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match {
+ case List(param) :: _ if !param.isImplicit =>
+ checkFeature(ddef.pos, ImplicitConversionsFeature, meth.toString)
+ case _ =>
+ }
+
treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType
}
@@ -1972,6 +2048,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case TypeBounds(lo1, hi1) if (!(lo1 <:< hi1)) => LowerBoundError(tdef, lo1, hi1)
case _ => ()
}
+
+ if (tdef.symbol.isDeferred && tdef.symbol.info.isHigherKinded)
+ checkFeature(tdef.pos, HigherKindsFeature)
+
treeCopy.TypeDef(tdef, typedMods, tdef.name, tparams1, rhs1) setType NoType
}
@@ -2115,7 +2195,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
var body1: Tree = typed(cdef.body, pt)
val contextWithTypeBounds = context.nextEnclosing(_.tree.isInstanceOf[CaseDef])
- if (contextWithTypeBounds.savedTypeBounds nonEmpty) {
+ if (contextWithTypeBounds.savedTypeBounds.nonEmpty) {
body1.tpe = contextWithTypeBounds restoreTypeBounds body1.tpe
// insert a cast if something typechecked under the GADT constraints,
@@ -2151,88 +2231,94 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def adaptCase(cdef: CaseDef, mode: Int, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe))
- def prepareTranslateMatch(selector0: Tree, cases: List[CaseDef], mode: Int, resTp: Type) = {
- val (selector, doTranslation) = selector0 match {
- case Annotated(Ident(nme.synthSwitch), selector) => (selector, false)
- case s => (s, true)
- }
- val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
- val selectorTp = packCaptured(selector1.tpe.widen)
+ def ptOrLub(tps: List[Type], pt: Type ) = if (isFullyDefined(pt)) (pt, false) else weakLub(tps map (_.deconst))
+ def ptOrLubPacked(trees: List[Tree], pt: Type) = if (isFullyDefined(pt)) (pt, false) else weakLub(trees map (c => packedType(c, context.owner).deconst))
+
+ // takes untyped sub-trees of a match and type checks them
+ def typedMatch(selector: Tree, cases: List[CaseDef], mode: Int, pt: Type, tree: Tree = EmptyTree): Match = {
+ val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
+ val selectorTp = packCaptured(selector1.tpe.widen).skolemizeExistential(context.owner, selector)
+ val casesTyped = typedCases(cases, selectorTp, pt)
- val casesTyped = typedCases(cases, selectorTp, resTp)
- val caseTypes = casesTyped map (c => packedType(c, context.owner).deconst)
- val (ownType, needAdapt) = if (isFullyDefined(resTp)) (resTp, false) else weakLub(caseTypes)
+ val (resTp, needAdapt) =
+ if (opt.virtPatmat) ptOrLubPacked(casesTyped, pt)
+ else ptOrLub(casesTyped map (_.tpe), pt)
- val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, ownType))
+ val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, resTp))
- (selector1, selectorTp, casesAdapted, ownType, doTranslation)
+ treeCopy.Match(tree, selector1, casesAdapted) setType resTp
}
- def translateMatch(selector1: Tree, selectorTp: Type, casesAdapted: List[CaseDef], ownType: Type, doTranslation: Boolean, matchFailGen: Option[Tree => Tree] = None) = {
- def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
- case TypeRef(_, RepeatedParamClass, arg :: Nil) => seqType(arg)
- case _ => tp
- }
+ // match has been typed -- virtualize it if we're feeling experimental
+ // (virtualized matches are expanded during type checking so they have the full context available)
+ // otherwise, do nothing: matches are translated during phase `patmat` (unless -Xoldpatmat)
+ def virtualizedMatch(match_ : Match, mode: Int, pt: Type) = {
+ import patmat.{vpmName, PureMatchTranslator, OptimizingMatchTranslator}
+
+ // TODO: add fallback __match sentinel to predef
+ val matchStrategy: Tree =
+ if (!(newPatternMatching && opt.experimental && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen
+ else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
+ case SilentResultValue(ms) => ms
+ case _ => null
+ }
- if (!doTranslation) { // a switch
- Match(selector1, casesAdapted) setType ownType // setType of the Match to avoid recursing endlessly
- } else {
- val scrutType = repeatedToSeq(elimAnonymousClass(selectorTp))
- // we've packed the type for each case in prepareTranslateMatch so that if all cases have the same existential case, we get a clean lub
- // here, we should open up the existential again
- // relevant test cases: pos/existentials-harmful.scala, pos/gadt-gilles.scala, pos/t2683.scala, pos/virtpatmat_exist4.scala
- MatchTranslator(this).translateMatch(selector1, casesAdapted, repeatedToSeq(ownType.skolemizeExistential(context.owner, context.tree)), scrutType, matchFailGen)
- }
+ if (matchStrategy ne null) // virtualize
+ typed((new PureMatchTranslator(this.asInstanceOf[patmat.global.analyzer.Typer] /*TODO*/, matchStrategy)).translateMatch(match_), mode, pt)
+ else
+ match_ // will be translated in phase `patmat`
}
- def typedMatchAnonFun(tree: Tree, cases: List[CaseDef], mode: Int, pt0: Type, selOverride: Option[(List[ValDef], Tree)] = None) = {
- val pt = deskolemizeGADTSkolems(pt0)
- val targs = pt.normalize.typeArgs
- val arity = if (isFunctionType(pt)) targs.length - 1 else 1 // TODO pt should always be a (Partial)Function, right?
- val ptRes = if (targs.isEmpty) WildcardType else targs.last // may not be fully defined
+ // synthesize and type check a PartialFunction implementation based on a match specified by `cases`
+ // Match(EmptyTree, cases) ==> new PartialFunction { def apply<OrElse>(params) = `translateMatch('`(param1,...,paramN)` match { cases }')` }
+ // for fresh params, the selector of the match we'll translated simply gathers those in a tuple
+ // NOTE: restricted to PartialFunction -- leave Function trees if the expected type does not demand a partial function
+ class MatchFunTyper(tree: Tree, cases: List[CaseDef], mode: Int, pt0: Type) {
+ // TODO: remove FunctionN support -- this is currently designed so that it can emit FunctionN and PartialFunction subclasses
+ // however, we should leave Function nodes until Uncurry so phases after typer can still detect normal Function trees
+ // we need to synthesize PartialFunction impls, though, to avoid nastiness in Uncurry in transforming&duplicating generated pattern matcher trees
+ // TODO: remove PartialFunction support from UnCurry
+ private val pt = deskolemizeGADTSkolems(pt0)
+ private val targs = pt.normalize.typeArgs
+ private val arity = if (isFunctionType(pt)) targs.length - 1 else 1 // TODO pt should always be a (Partial)Function, right?
+ private val ptRes = if (targs.isEmpty) WildcardType else targs.last // may not be fully defined
+
+ private val isPartial = pt.typeSymbol == PartialFunctionClass
+ assert(isPartial)
- val isPartial = pt.typeSymbol == PartialFunctionClass
- val anonClass = context.owner.newAnonymousFunctionClass(tree.pos)
- val funThis = This(anonClass)
- val serialVersionUIDAnnotation = AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List())
+ private val anonClass = context.owner.newAnonymousFunctionClass(tree.pos)
+ private val funThis = This(anonClass)
- anonClass addAnnotation serialVersionUIDAnnotation
+ anonClass addAnnotation AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List())
def deriveFormals =
- selOverride match {
- case None if targs.isEmpty => Nil
- case None => targs.init // is there anything we can do if targs.isEmpty??
- case Some((vparams, _)) =>
- vparams map {p => if(p.tpt.tpe == null) typedType(p.tpt).tpe else p.tpt.tpe}
- }
+ if (targs.isEmpty) Nil
+ else targs.init
- def mkParams(methodSym: Symbol, formals: List[Type] = deriveFormals) = {
- selOverride match {
- case None if targs.isEmpty => MissingParameterTypeAnonMatchError(tree, pt); (Nil, EmptyTree)
- case None =>
- val ps = methodSym newSyntheticValueParams formals // is there anything we can do if targs.isEmpty??
- val ids = ps map (p => Ident(p.name))
- val sel = atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) }
- (ps, sel)
- case Some((vparams, sel)) =>
- val newParamSyms = (vparams, formals).zipped map {(p, tp) =>
- methodSym.newValueParameter(p.name, p.pos.focus, SYNTHETIC) setInfo tp
- }
+ def mkParams(methodSym: Symbol, formals: List[Type] = deriveFormals) =
+ if (formals.isEmpty) { MissingParameterTypeAnonMatchError(tree, pt); Nil }
+ else methodSym newSyntheticValueParams formals
- (newParamSyms, sel.duplicate)
+ def mkSel(params: List[Symbol]) =
+ if (params.isEmpty) EmptyTree
+ else {
+ val ids = params map (p => Ident(p.name))
+ atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) }
}
- }
import CODE._
// need to duplicate the cases before typing them to generate the apply method, or the symbols will be all messed up
- val casesTrue = if (isPartial) cases map (c => deriveCaseDef(c)(x => TRUE_typed).duplicate) else Nil
+ val casesTrue = if (isPartial) cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE_typed)).duplicate) else Nil
+ // println("casesTrue "+ casesTrue)
+ def parentsPartial(targs: List[Type]) = addSerializable(appliedType(AbstractPartialFunctionClass.typeConstructor, targs))
def applyMethod = {
// rig the show so we can get started typing the method body -- later we'll correct the infos...
- anonClass setInfo ClassInfoType(List(ObjectClass.tpe, pt, SerializableClass.tpe), newScope, anonClass)
- val methodSym = anonClass.newMethod(nme.apply, tree.pos, FINAL)
- val (paramSyms, selector) = mkParams(methodSym)
+ anonClass setInfo ClassInfoType(addSerializable(ObjectClass.tpe, pt), newScope, anonClass)
+ val methodSym = anonClass.newMethod(nme.apply, tree.pos, if(isPartial) (FINAL | OVERRIDE) else FINAL)
+ val paramSyms = mkParams(methodSym)
+ val selector = mkSel(paramSyms)
if (selector eq EmptyTree) EmptyTree
else {
@@ -2241,15 +2327,18 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
paramSyms foreach (methodBodyTyper.context.scope enter _)
- val (selector1, selectorTp, casesAdapted, resTp, doTranslation) = methodBodyTyper.prepareTranslateMatch(selector, cases, mode, ptRes)
+ val match_ = methodBodyTyper.typedMatch(gen.mkUnchecked(selector), cases, mode, ptRes)
+ val resTp = match_.tpe
val methFormals = paramSyms map (_.tpe)
- val parents = List(abstractFunctionType(methFormals, resTp), SerializableClass.tpe)
-
+ val parents = (
+ if (isPartial) parentsPartial(List(methFormals.head, resTp))
+ else addSerializable(abstractFunctionType(methFormals, resTp))
+ )
anonClass setInfo ClassInfoType(parents, newScope, anonClass)
methodSym setInfoAndEnter MethodType(paramSyms, resTp)
- DefDef(methodSym, methodBodyTyper.translateMatch(selector1, selectorTp, casesAdapted, resTp, doTranslation))
+ DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, resTp))
}
}
@@ -2257,15 +2346,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def applyOrElseMethodDef = {
// rig the show so we can get started typing the method body -- later we'll correct the infos...
// targs were type arguments for PartialFunction, so we know they will work for AbstractPartialFunction as well
- def parents(targs: List[Type]) = List(appliedType(AbstractPartialFunctionClass.typeConstructor, targs), SerializableClass.tpe)
-
- anonClass setInfo ClassInfoType(parents(targs), newScope, anonClass)
+ anonClass setInfo ClassInfoType(parentsPartial(targs), newScope, anonClass)
val methodSym = anonClass.newMethod(nme.applyOrElse, tree.pos, FINAL | OVERRIDE)
// create the parameter that corresponds to the function's parameter
- val List(argTp) = deriveFormals
- val A1 = methodSym newTypeParameter(newTypeName("A1")) setInfo TypeBounds.upper(argTp)
- val (List(x), selector) = mkParams(methodSym, List(A1.tpe))
+ val List(argTp) = deriveFormals
+ val A1 = methodSym newTypeParameter(newTypeName("A1")) setInfo TypeBounds.upper(argTp)
+ val paramSyms@List(x) = mkParams(methodSym, List(A1.tpe))
+ val selector = mkSel(paramSyms)
if (selector eq EmptyTree) EmptyTree
else {
@@ -2279,40 +2367,64 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
paramSyms foreach (methodBodyTyper.context.scope enter _)
- val (selector1, selectorTp, casesAdapted, resTp, doTranslation) = methodBodyTyper.prepareTranslateMatch(selector, cases, mode, ptRes)
+ val match_ = methodBodyTyper.typedMatch(gen.mkUnchecked(selector), cases, mode, ptRes)
+ val resTp = match_.tpe
- anonClass setInfo ClassInfoType(parents(List(argTp, resTp)), newScope, anonClass)
+ anonClass setInfo ClassInfoType(parentsPartial(List(argTp, resTp)), newScope, anonClass)
B1 setInfo TypeBounds.lower(resTp)
anonClass.info.decls enter methodSym // methodSym's info need not change (B1's bound has been updated instead)
- // use applyOrElse's first parameter since the scrut's type has been widened
- def doDefault(scrut_ignored: Tree) = REF(default) APPLY (REF(x))
+ match_ setType B1.tpe
- val body = methodBodyTyper.translateMatch(selector1, selectorTp, casesAdapted, B1.tpe, doTranslation, Some(doDefault))
+ // the default uses applyOrElse's first parameter since the scrut's type has been widened
+ val body = methodBodyTyper.virtualizedMatch(match_ withAttachment DefaultOverrideMatchAttachment(REF(default) APPLY (REF(x))), mode, B1.tpe)
DefDef(methodSym, body)
}
}
def isDefinedAtMethod = {
- val methodSym = anonClass.newMethod(nme.isDefinedAt, tree.pos, FINAL)
- val (paramSyms, selector) = mkParams(methodSym)
+ val methodSym = anonClass.newMethod(nme.isDefinedAt, tree.pos.makeTransparent, FINAL)
+ val paramSyms = mkParams(methodSym)
+ val selector = mkSel(paramSyms)
+
if (selector eq EmptyTree) EmptyTree
else {
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
paramSyms foreach (methodBodyTyper.context.scope enter _)
methodSym setInfoAndEnter MethodType(paramSyms, BooleanClass.tpe)
- val (selector1, selectorTp, casesAdapted, resTp, doTranslation) = methodBodyTyper.prepareTranslateMatch(selector, casesTrue, mode, BooleanClass.tpe)
- val body = methodBodyTyper.translateMatch(selector1, selectorTp, casesAdapted, resTp, doTranslation, Some(scrutinee => FALSE_typed))
+ val match_ = methodBodyTyper.typedMatch(gen.mkUnchecked(selector), casesTrue, mode, BooleanClass.tpe)
+ val body = methodBodyTyper.virtualizedMatch(match_ withAttachment DefaultOverrideMatchAttachment(FALSE_typed), mode, BooleanClass.tpe)
DefDef(methodSym, body)
}
}
- val members = if (!isPartial) List(applyMethod) else List(applyOrElseMethodDef, isDefinedAtMethod)
- if (members.head eq EmptyTree) setError(tree)
- else typed(Block(List(ClassDef(anonClass, NoMods, List(List()), List(List()), members, tree.pos)), New(anonClass.tpe)), mode, pt)
+ lazy val members = if (isPartial) {
+ // somehow @cps annotations upset the typer when looking at applyOrElse's signature, but not apply's
+ // TODO: figure out the details (T @cps[U] is not a subtype of Any, but then why does it work for the apply method?)
+ if (targs forall (_ <:< AnyClass.tpe)) List(applyOrElseMethodDef, isDefinedAtMethod)
+ else List(applyMethod, isDefinedAtMethod)
+ } else List(applyMethod)
+
+ def translated =
+ if (members.head eq EmptyTree) setError(tree)
+ else typed(atPos(tree.pos)(Block(List(ClassDef(anonClass, NoMods, List(List()), List(List()), members, tree.pos.focus)), atPos(tree.pos.focus)(New(anonClass.tpe)))), mode, pt)
+ }
+
+ // Function(params, Match(sel, cases)) ==> new <Partial>Function { def apply<OrElse>(params) = `translateMatch('sel match { cases }')` }
+ class MatchFunTyperBetaReduced(fun: Function, sel: Tree, cases: List[CaseDef], mode: Int, pt: Type) extends MatchFunTyper(fun, cases, mode, pt) {
+ override def deriveFormals =
+ fun.vparams map { p => if(p.tpt.tpe == null) typedType(p.tpt).tpe else p.tpt.tpe }
+
+ // the only difference from the super class is that we must preserve the names of the parameters
+ override def mkParams(methodSym: Symbol, formals: List[Type] = deriveFormals) =
+ (fun.vparams, formals).zipped map { (p, tp) =>
+ methodSym.newValueParameter(p.name, p.pos.focus, SYNTHETIC) setInfo tp
+ }
+
+ override def mkSel(params: List[Symbol]) = sel.duplicate
}
/**
@@ -2366,11 +2478,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
fun.body match {
- case Match(sel, cases) if opt.virtPatmat =>
+ // later phase indicates scaladoc is calling (where shit is messed up, I tell you)
+ // -- so fall back to old patmat, which is more forgiving
+ case Match(sel, cases) if (sel ne EmptyTree) && newPatternMatching && (pt.typeSymbol == PartialFunctionClass) =>
// go to outer context -- must discard the context that was created for the Function since we're discarding the function
// thus, its symbol, which serves as the current context.owner, is not the right owner
// you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner)
- newTyper(context.outer).typedMatchAnonFun(fun, cases, mode, pt, Some((fun.vparams, sel)))
+ val outerTyper = newTyper(context.outer)
+ (new outerTyper.MatchFunTyperBetaReduced(fun, sel, cases, mode, pt)).translated
case _ =>
val vparamSyms = fun.vparams map { vparam =>
enterSym(context, vparam)
@@ -2395,10 +2510,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
namer.enterSyms(stats)
// need to delay rest of typedRefinement to avoid cyclic reference errors
unit.toCheck += { () =>
- // go to next outer context which is not silent, see #3614
- var c = context
- while (c.bufferErrors) c = c.outer
- val stats1 = newTyper(c).typedStats(stats, NoSymbol)
+ val stats1 = typedStats(stats, NoSymbol)
for (stat <- stats1 if stat.isDef) {
val member = stat.symbol
if (!(context.owner.ancestors forall
@@ -2411,7 +2523,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def typedImport(imp : Import) : Import = (transformed remove imp) match {
case Some(imp1: Import) => imp1
- case None => log("unhandled import: "+imp+" in "+unit); imp
+ case _ => log("unhandled import: "+imp+" in "+unit); imp
}
private def isWarnablePureExpression(tree: Tree) = tree match {
case EmptyTree | Literal(Constant(())) => false
@@ -2437,9 +2549,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
else
stat match {
case imp @ Import(_, _) =>
- context = context.makeNewImport(imp)
imp.symbol.initialize
- typedImport(imp)
+ if (!imp.symbol.isError) {
+ context = context.makeNewImport(imp)
+ typedImport(imp)
+ } else EmptyTree
case _ =>
if (localTarget && !includesTargetPos(stat)) {
// skip typechecking of statements in a sequence where some other statement includes
@@ -2817,7 +2931,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
(args exists isNamed) || // uses a named argument
isNamedApplyBlock(fun)) { // fun was transformed to a named apply block =>
// integrate this application into the block
- tryNamesDefaults
+ if (dyna.isApplyDynamicNamed(fun)) dyna.typedNamedApply(tree, fun, args, mode, pt)
+ else tryNamesDefaults
} else {
val tparams = context.extractUndetparams()
if (tparams.isEmpty) { // all type params are defined
@@ -2877,7 +2992,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
assert(!inPatternMode(mode), modeString(mode)) // this case cannot arise for patterns
val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt)
val strictTargs = map2(lenientTargs, tparams)((targ, tparam) =>
- if (targ == WildcardType) tparam.tpe else targ) //@M TODO: should probably be .tpeHK
+ if (targ == WildcardType) tparam.tpeHK else targ)
var remainingParams = paramTypes
def typedArgToPoly(arg: Tree, formal: Type): Tree = { //TR TODO: cleanup
val lenientPt = formal.instantiateTypeParams(tparams, lenientTargs)
@@ -3030,7 +3145,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (annInfo.atp.isErroneous) { hasError = true; None }
else Some(NestedAnnotArg(annInfo))
- // use of Array.apply[T: ClassManifest](xs: T*): Array[T]
+ // use of Array.apply[T: ArrayTag](xs: T*): Array[T]
// and Array.apply(x: Int, xs: Int*): Array[Int] (and similar)
case Apply(fun, args) =>
val typedFun = typed(fun, forFunMode(mode), WildcardType)
@@ -3126,13 +3241,15 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
if (hasError) annotationError
- else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setOriginal(Apply(typedFun, args)).setPos(ann.pos)
+ else AnnotationInfo(annType, List(), nvPairs map {p => (p._1.asInstanceOf[Name], p._2.get)}).setOriginal(Apply(typedFun, args).setPos(ann.pos)) // [Eugene+] why do we need this cast?
}
} else if (requireJava) {
reportAnnotationError(NestedAnnotationError(ann, annType))
} else {
val typedAnn = if (selfsym == NoSymbol) {
- typed(ann, mode, annClass.tpe)
+ // local dummy fixes SI-5544
+ val localTyper = newTyper(context.make(ann, context.owner.newLocalDummy(ann.pos)))
+ localTyper.typed(ann, mode, annClass.tpe)
} else {
// Since a selfsym is supplied, the annotation should have
// an extra "self" identifier in scope for type checking.
@@ -3218,14 +3335,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case tp => tp
}
- (hidden map { s =>
- // Hanging onto lower bound in case anything interesting
- // happens with it.
- (s, s.existentialBound match {
- case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s))
- case _ => hiBound(s)
- })
- }).toMap
+ // Hanging onto lower bound in case anything interesting
+ // happens with it.
+ mapFrom(hidden)(s => s.existentialBound match {
+ case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s))
+ case _ => hiBound(s)
+ })
}
/** Given a set `rawSyms` of term- and type-symbols, and a type
@@ -3249,7 +3364,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val typeParams: List[Symbol] = rawSyms map { sym =>
val name = sym.name match {
case x: TypeName => x
- case x => nme.singletonName(x)
+ case x => tpnme.singletonName(x)
}
val bound = allBounds(sym)
val sowner = if (isRawParameter(sym)) context.owner else sym.owner
@@ -3438,7 +3553,108 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case ErrorType =>
setError(treeCopy.TypeApply(tree, fun, args))
case _ =>
- TypedApplyDoesNotTakeTpeParametersError(tree, fun)
+ fun match {
+ // drop the application for an applyDynamic or selectDynamic call since it has been pushed down
+ case treeInfo.DynamicApplication(_, _) => fun
+ case _ => TypedApplyDoesNotTakeTpeParametersError(tree, fun)
+ }
+ }
+
+ object dyna {
+ import treeInfo.{isApplyDynamicName, DynamicUpdate, DynamicApplicationNamed}
+
+ def acceptsApplyDynamic(tp: Type) = tp.typeSymbol isNonBottomSubClass DynamicClass
+
+ /** Returns `Some(t)` if `name` can be selected dynamically on `qual`, `None` if not.
+ * `t` specifies the type to be passed to the applyDynamic/selectDynamic call (unless it is NoType)
+ * NOTE: currently either returns None or Some(NoType) (scala-virtualized extends this to Some(t) for selections on staged Structs)
+ */
+ def acceptsApplyDynamicWithType(qual: Tree, name: Name): Option[Type] =
+ // don't selectDynamic selectDynamic, do select dynamic at unknown type,
+ // in scala-virtualized, we may return a Some(tp) where tp ne NoType
+ if (!isApplyDynamicName(name) && acceptsApplyDynamic(qual.tpe.widen)) Some(NoType)
+ else None
+
+ def isDynamicallyUpdatable(tree: Tree) = tree match {
+ case DynamicUpdate(qual, name) =>
+ // if the qualifier is a Dynamic, that's all we need to know
+ acceptsApplyDynamic(qual.tpe)
+ case _ => false
+ }
+
+ def isApplyDynamicNamed(fun: Tree): Boolean = fun match {
+ case DynamicApplicationNamed(qual, _) if acceptsApplyDynamic(qual.tpe.widen) => true
+ case _ => false
+ // look deeper?
+ // val methPart = treeInfo.methPart(fun)
+ // println("methPart of "+ fun +" is "+ methPart)
+ // if (methPart ne fun) isApplyDynamicNamed(methPart)
+ // else false
+ }
+
+ def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
+ def argToBinding(arg: Tree): Tree = arg match {
+ case AssignOrNamedArg(Ident(name), rhs) => gen.mkTuple(List(CODE.LIT(name.toString), rhs))
+ case _ => gen.mkTuple(List(CODE.LIT(""), arg))
+ }
+ typed(treeCopy.Apply(orig, fun, args map argToBinding), mode, pt)
+ }
+
+ /** Translate selection that does not typecheck according to the normal rules into a selectDynamic/applyDynamic.
+ *
+ * foo.method("blah") ~~> foo.applyDynamic("method")("blah")
+ * foo.method(x = "blah") ~~> foo.applyDynamicNamed("method")(("x", "blah"))
+ * foo.varia = 10 ~~> foo.updateDynamic("varia")(10)
+ * foo.field ~~> foo.selectDynamic("field")
+ * foo.arr(10) = 13 ~~> foo.selectDynamic("arr").update(10, 13)
+ *
+ * what if we want foo.field == foo.selectDynamic("field") == 1, but `foo.field = 10` == `foo.selectDynamic("field").update(10)` == ()
+ * what would the signature for selectDynamic be? (hint: it needs to depend on whether an update call is coming or not)
+ *
+ * need to distinguish selectDynamic and applyDynamic somehow: the former must return the selected value, the latter must accept an apply or an update
+ * - could have only selectDynamic and pass it a boolean whether more is to come,
+ * so that it can either return the bare value or something that can handle the apply/update
+ * HOWEVER that makes it hard to return unrelated values for the two cases
+ * --> selectDynamic's return type is now dependent on the boolean flag whether more is to come
+ * - simplest solution: have two method calls
+ *
+ */
+ def mkInvoke(cxTree: Tree, tree: Tree, qual: Tree, name: Name): Option[Tree] =
+ acceptsApplyDynamicWithType(qual, name) map { tp =>
+ // tp eq NoType => can call xxxDynamic, but not passing any type args (unless specified explicitly by the user)
+ // in scala-virtualized, when not NoType, tp is passed as type argument (for selection on a staged Struct)
+
+ // strip off type application -- we're not doing much with outer, so don't bother preserving cxTree's attributes etc
+ val (outer, explicitTargs) = cxTree match {
+ case TypeApply(fun, targs) => (fun, targs)
+ case Apply(TypeApply(fun, targs), args) => (Apply(fun, args), targs)
+ case t => (t, Nil)
+ }
+
+ @inline def hasNamedArg(as: List[Tree]) = as collectFirst {case AssignOrNamedArg(lhs, rhs) =>} nonEmpty
+
+ // note: context.tree includes at most one Apply node
+ // thus, we can't use it to detect we're going to receive named args in expressions such as:
+ // qual.sel(a)(a2, arg2 = "a2")
+ val oper = outer match {
+ case Apply(`tree`, as) =>
+ val oper =
+ if (hasNamedArg(as)) nme.applyDynamicNamed
+ else nme.applyDynamic
+ // not supported: foo.bar(a1,..., an: _*)
+ if (treeInfo.isWildcardStarArgList(as)) {
+ DynamicVarArgUnsupported(tree, oper)
+ return Some(setError(tree))
+ } else oper
+ case Assign(`tree`, _) => nme.updateDynamic
+ case _ => nme.selectDynamic
+ }
+
+ val dynSel = Select(qual, oper)
+ val tappSel = if (explicitTargs nonEmpty) TypeApply(dynSel, explicitTargs) else dynSel
+
+ atPos(qual.pos)(Apply(tappSel, List(Literal(Constant(name.decode)))))
+ }
}
@inline final def deindentTyping() = context.typingIndentLevel -= 2
@@ -3456,8 +3672,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def isPatternMode = inPatternMode(mode)
//Console.println("typed1("+tree.getClass()+","+Integer.toHexString(mode)+","+pt+")")
- def ptOrLub(tps: List[Type]) = if (isFullyDefined(pt)) (pt, false) else weakLub(tps map (_.deconst))
-
//@M! get the type of the qualifier in a Select tree, otherwise: NoType
def prefixType(fun: Tree): Type = fun match {
case Select(qualifier, _) => qualifier.tpe
@@ -3483,7 +3697,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
else
tree
original setType ann.tpe
- original setPos tree.pos.focus
TypeTree(tpe) setOriginal original setPos tree.pos.focus
}
@@ -3542,7 +3755,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
ann.tpe = arg1.tpe.withAnnotation(annotInfo)
}
val atype = ann.tpe
- Typed(arg1, resultingTypeTree(atype)) setPos tree.pos.focus setType atype
+ Typed(arg1, resultingTypeTree(atype)) setPos tree.pos setType atype
}
}
@@ -3612,10 +3825,17 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case _ =>
}
}
+// if (varsym.isVariable ||
+// // setter-rewrite has been done above, so rule out methods here, but, wait a minute, why are we assigning to non-variables after erasure?!
+// (phase.erasedTypes && varsym.isValue && !varsym.isMethod)) {
if (varsym.isVariable || varsym.isValue && phase.erasedTypes) {
val rhs1 = typed(rhs, EXPRmode | BYVALmode, lhs1.tpe)
treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitClass.tpe
}
+ else if(dyna.isDynamicallyUpdatable(lhs1)) {
+ val rhs1 = typed(rhs, EXPRmode | BYVALmode, WildcardType)
+ typed1(Apply(lhs1, List(rhs1)), mode, pt)
+ }
else fail()
}
@@ -3630,14 +3850,18 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
lazy val thenTp = packedType(thenp1, context.owner)
lazy val elseTp = packedType(elsep1, context.owner)
+ // println("typedIf: "+(thenp1.tpe, elsep1.tpe, ptOrLub(List(thenp1.tpe, elsep1.tpe)),"\n", thenTp, elseTp, thenTp =:= elseTp))
val (owntype, needAdapt) =
// in principle we should pack the types of each branch before lubbing, but lub doesn't really work for existentials anyway
// in the special (though common) case where the types are equal, it pays to pack before comparing
// especially virtpatmat needs more aggressive unification of skolemized types
// this breaks src/library/scala/collection/immutable/TrieIterator.scala
- if (opt.virtPatmat && !isPastTyper && thenTp =:= elseTp) (thenp1.tpe, false) // use unpacked type
+ if ( opt.virtPatmat && !isPastTyper
+ && thenp1.tpe.annotations.isEmpty && elsep1.tpe.annotations.isEmpty // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
+ && thenTp =:= elseTp
+ ) (thenp1.tpe, false) // use unpacked type
// TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
- else ptOrLub(List(thenp1.tpe, elsep1.tpe))
+ else ptOrLub(List(thenp1.tpe, elsep1.tpe), pt)
if (needAdapt) { //isNumericValueType(owntype)) {
thenp1 = adapt(thenp1, mode, owntype)
@@ -3647,33 +3871,26 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
}
- def typedMatch(tree: Tree, selector: Tree, cases: List[CaseDef]): Tree = {
- if (opt.virtPatmat && !isPastTyper) {
- if (selector ne EmptyTree) {
- val (selector1, selectorTp, casesAdapted, ownType, doTranslation) = prepareTranslateMatch(selector, cases, mode, pt)
- typed(translateMatch(selector1, selectorTp, casesAdapted, ownType, doTranslation), mode, pt)
- } else typedMatchAnonFun(tree, cases, mode, pt)
- } else if (selector == EmptyTree) {
- val arity = if (isFunctionType(pt)) pt.normalize.typeArgs.length - 1 else 1
- val params = for (i <- List.range(0, arity)) yield
- atPos(tree.pos.focusStart) {
- ValDef(Modifiers(PARAM | SYNTHETIC),
- unit.freshTermName("x" + i + "$"), TypeTree(), EmptyTree)
- }
- val ids = for (p <- params) yield Ident(p.name)
- val selector1 = atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) }
- val body = treeCopy.Match(tree, selector1, cases)
- typed1(atPos(tree.pos) { Function(params, body) }, mode, pt)
- } else {
- val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
- var cases1 = typedCases(cases, packCaptured(selector1.tpe.widen), pt)
- val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe))
- if (needAdapt) {
- cases1 = cases1 map (adaptCase(_, mode, owntype))
+ // under -Xexperimental (and not -Xoldpatmat), and when there's a suitable __match in scope, virtualize the pattern match
+ // otherwise, type the Match and leave it until phase `patmat` (immediately after typer)
+ // empty-selector matches are transformed into synthetic PartialFunction implementations when the expected type demands it
+ def typedVirtualizedMatch(tree: Tree, selector: Tree, cases: List[CaseDef]): Tree =
+ if (selector == EmptyTree) {
+ if (newPatternMatching && (pt.typeSymbol == PartialFunctionClass)) (new MatchFunTyper(tree, cases, mode, pt)).translated
+ else {
+ val arity = if (isFunctionType(pt)) pt.normalize.typeArgs.length - 1 else 1
+ val params = for (i <- List.range(0, arity)) yield
+ atPos(tree.pos.focusStart) {
+ ValDef(Modifiers(PARAM | SYNTHETIC),
+ unit.freshTermName("x" + i + "$"), TypeTree(), EmptyTree)
+ }
+ val ids = for (p <- params) yield Ident(p.name)
+ val selector1 = atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) }
+ val body = treeCopy.Match(tree, selector1, cases)
+ typed1(atPos(tree.pos) { Function(params, body) }, mode, pt)
}
- treeCopy.Match(tree, selector1, cases1) setType owntype
- }
- }
+ } else
+ virtualizedMatch(typedMatch(selector, cases, mode, pt, tree), mode, pt)
def typedReturn(expr: Tree) = {
val enclMethod = context.enclMethod
@@ -3732,6 +3949,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val sym = tp.typeSymbol.initialize
if (sym.isAbstractType || sym.hasAbstractFlag)
IsAbstractError(tree, sym)
+ else if (isPrimitiveValueClass(sym)) {
+ NotAMemberError(tpt, TypeTree(tp), nme.CONSTRUCTOR)
+ setError(tpt)
+ }
else if (!( tp == sym.thisSym.tpe // when there's no explicit self type -- with (#3612) or without self variable
// sym.thisSym.tpe == tp.typeOfThis (except for objects)
|| narrowRhs(tp) <:< tp.typeOfThis
@@ -3937,7 +4158,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
def convertToAssignment(fun: Tree, qual: Tree, name: Name, args: List[Tree]): Tree = {
- val prefix = name.subName(0, name.length - nme.EQL.length)
+ val prefix = name stripSuffix nme.EQL
def mkAssign(vble: Tree): Tree =
Assign(
vble,
@@ -3946,22 +4167,20 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
) setPos tree.pos
def mkUpdate(table: Tree, indices: List[Tree]) = {
- gen.evalOnceAll(table :: indices, context.owner, context.unit) { ts =>
- val tab = ts.head
- val is = ts.tail
- Apply(
- Select(tab(), nme.update) setPos table.pos,
- ((is map (i => i())) ::: List(
- Apply(
- Select(
- Apply(
- Select(tab(), nme.apply) setPos table.pos,
- is map (i => i())) setPos qual.pos,
- prefix) setPos fun.pos,
- args) setPos tree.pos)
- )
- ) setPos tree.pos
- }
+ gen.evalOnceAll(table :: indices, context.owner, context.unit) {
+ case tab :: is =>
+ def mkCall(name: Name, extraArgs: Tree*) = (
+ Apply(
+ Select(tab(), name) setPos table.pos,
+ is.map(i => i()) ++ extraArgs
+ ) setPos tree.pos
+ )
+ mkCall(
+ nme.update,
+ Apply(Select(mkCall(nme.apply), prefix) setPos fun.pos, args) setPos tree.pos
+ )
+ case _ => EmptyTree
+ }
}
val tree1 = qual match {
@@ -4027,15 +4246,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype)
}
- def typedThis(qual: Name) = {
- val qualifyingClassSym = if (tree.symbol != NoSymbol) Some(tree.symbol) else qualifyingClass(tree, qual)
- qualifyingClassSym match {
- case Some(clazz) =>
- tree setSymbol clazz setType clazz.thisType.underlying
- if (isStableContext(tree, mode, pt)) tree setType clazz.thisType
- tree
- case None => tree
- }
+ def typedThis(qual: Name) = tree.symbol orElse qualifyingClass(tree, qual, packageOK = false) match {
+ case NoSymbol => tree
+ case clazz =>
+ tree setSymbol clazz setType clazz.thisType.underlying
+ if (isStableContext(tree, mode, pt)) tree setType clazz.thisType else tree
}
/** Attribute a selection where <code>tree</code> is <code>qual.name</code>.
@@ -4046,31 +4261,22 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
* @return ...
*/
def typedSelect(qual: Tree, name: Name): Tree = {
- val sym =
- if (tree.symbol != NoSymbol) {
- if (phase.erasedTypes && qual.isInstanceOf[Super])
- qual.tpe = tree.symbol.owner.tpe
- if (false && settings.debug.value) { // todo: replace by settings.check.value?
- val alts = qual.tpe.member(tree.symbol.name).alternatives
- if (!(alts exists (alt =>
- alt == tree.symbol || alt.isTerm && (alt.tpe matches tree.symbol.tpe))))
- assert(false, "symbol "+tree.symbol+tree.symbol.locationString+" not in "+alts+" of "+qual.tpe+
- "\n members = "+qual.tpe.members+
- "\n type history = "+qual.tpe.termSymbol.infosString+
- "\n phase = "+phase)
- }
- tree.symbol
- } else {
- member(qual, name)
+ def asDynamicCall = dyna.mkInvoke(context.tree, tree, qual, name) map (typed1(_, mode, pt))
+
+ val sym = tree.symbol orElse member(qual, name) orElse {
+ // symbol not found? --> try to convert implicitly to a type that does have the required
+ // member. Added `| PATTERNmode` to allow enrichment in patterns (so we can add e.g., an
+ // xml member to StringContext, which in turn has an unapply[Seq] method)
+ if (name != nme.CONSTRUCTOR && inExprModeOr(mode, PATTERNmode)) {
+ val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, true, true)
+ if (qual1 ne qual)
+ return typed(treeCopy.Select(tree, qual1, name), mode, pt)
}
- if (sym == NoSymbol && name != nme.CONSTRUCTOR && (mode & EXPRmode) != 0) {
- val qual1 =
- if (member(qual, name) != NoSymbol) qual
- else adaptToMemberWithArgs(tree, qual, name, mode, true, true)
-
- if (qual1 ne qual)
- return typed(treeCopy.Select(tree, qual1, name), mode, pt)
+ NoSymbol
}
+ if (phase.erasedTypes && qual.isInstanceOf[Super] && tree.symbol != NoSymbol)
+ qual.tpe = tree.symbol.owner.tpe
+
if (!reallyExists(sym)) {
if (context.owner.enclosingTopLevelClass.isJavaDefined && name.isTypeName) {
val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) }
@@ -4078,26 +4284,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
// try to expand according to Dynamic rules.
+ asDynamicCall foreach (x => return x)
- if (settings.Xexperimental.value && (qual.tpe.widen.typeSymbol isNonBottomSubClass DynamicClass)) {
- var dynInvoke = Apply(Select(qual, nme.applyDynamic), List(Literal(Constant(name.decode))))
- context.tree match {
- case Apply(tree1, args) if tree1 eq tree =>
- ;
- case _ =>
- dynInvoke = Apply(dynInvoke, List())
- }
- return typed1(util.trace("dynatype: ")(dynInvoke), mode, pt)
- }
-
- if (settings.debug.value) {
- log(
- "qual = "+qual+":"+qual.tpe+
- "\nSymbol="+qual.tpe.termSymbol+"\nsymbol-info = "+qual.tpe.termSymbol.info+
- "\nscope-id = "+qual.tpe.termSymbol.info.decls.hashCode()+"\nmembers = "+qual.tpe.members+
- "\nname = "+name+"\nfound = "+sym+"\nowner = "+context.enclClass.owner
- )
- }
+ debuglog(
+ "qual = "+qual+":"+qual.tpe+
+ "\nSymbol="+qual.tpe.termSymbol+"\nsymbol-info = "+qual.tpe.termSymbol.info+
+ "\nscope-id = "+qual.tpe.termSymbol.info.decls.hashCode()+"\nmembers = "+qual.tpe.members+
+ "\nname = "+name+"\nfound = "+sym+"\nowner = "+context.enclClass.owner
+ )
def makeInteractiveErrorTree = {
val tree1 = tree match {
@@ -4129,7 +4323,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (err.kind != ErrorKinds.Access) {
context issue err
return setError(tree)
- } else (tree1, Some(err))
+ }
+ else (tree1, Some(err))
case SilentResultValue(treeAndPre) =>
(stabilize(treeAndPre._1, treeAndPre._2, mode, pt), None)
}
@@ -4146,7 +4341,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
result match {
// could checkAccessible (called by makeAccessible) potentially have skipped checking a type application in qual?
- case SelectFromTypeTree(qual@TypeTree(), name) if qual.tpe.typeArgs nonEmpty => // TODO: somehow the new qual is not checked in refchecks
+ case SelectFromTypeTree(qual@TypeTree(), name) if qual.tpe.typeArgs.nonEmpty => // TODO: somehow the new qual is not checked in refchecks
treeCopy.SelectFromTypeTree(
result,
(TypeTreeWithDeferredRefCheck(){ () => val tp = qual.tpe; val sym = tp.typeSymbolDirect
@@ -4159,10 +4354,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, false, false)
if (!qual1.isErrorTyped && (qual1 ne qual))
typed(Select(qual1, name) setPos tree.pos, mode, pt)
- else {
- issue(accessibleError.get)
- setError(tree)
- }
+ else
+ // before failing due to access, try a dynamic call.
+ asDynamicCall getOrElse {
+ issue(accessibleError.get)
+ setError(tree)
+ }
case _ =>
result
}
@@ -4225,10 +4422,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
var cx = startingIdentContext
while (defSym == NoSymbol && cx != NoContext && (cx.scope ne null)) { // cx.scope eq null arises during FixInvalidSyms in Duplicators
- // !!! Shouldn't the argument to compileSourceFor be cx, not context?
- // I can't tell because those methods do nothing in the standard compiler,
- // presumably they are overridden in the IDE.
- currentRun.compileSourceFor(context.asInstanceOf[analyzer.Context], name)
pre = cx.enclClass.prefix
defEntry = cx.scope.lookupEntry(name)
if ((defEntry ne null) && qualifies(defEntry.sym)) {
@@ -4466,7 +4659,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
typedValDef(vdef)
case ddef @ DefDef(_, _, _, _, _, _) =>
- newTyper(context.makeNewScope(tree, sym)).typedDefDef(ddef)
+ // flag default getters for constructors. An actual flag would be nice. See SI-5543.
+ //val flag = ddef.mods.hasDefaultFlag && ddef.mods.hasFlag(PRESUPER)
+ val flag = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
+ nme.defaultGetterToMethod(sym.name) == nme.CONSTRUCTOR
+ newTyper(context.makeNewScope(tree, sym)).constrTyperIf(flag).typedDefDef(ddef)
case tdef @ TypeDef(_, _, _, _) =>
typedTypeDef(tdef)
@@ -4537,7 +4734,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
typedIf(cond, thenp, elsep)
case tree @ Match(selector, cases) =>
- typedMatch(tree, selector, cases)
+ typedVirtualizedMatch(tree, selector, cases)
case Return(expr) =>
typedReturn(expr)
@@ -4547,16 +4744,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
var catches1 = typedCases(catches, ThrowableClass.tpe, pt)
val finalizer1 = if (finalizer.isEmpty) finalizer
else typed(finalizer, UnitClass.tpe)
- val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)))
+ val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)), pt)
if (needAdapt) {
block1 = adapt(block1, mode, owntype)
catches1 = catches1 map (adaptCase(_, mode, owntype))
}
- if(!isPastTyper && opt.virtPatmat) {
- catches1 = (MatchTranslator(this)).translateTry(catches1, owntype, tree.pos)
- }
-
treeCopy.Try(tree, block1, catches1, finalizer1) setType owntype
case Throw(expr) =>
@@ -4657,12 +4850,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// [Eugene] no more MaxArrayDims. ClassTags are flexible enough to allow creation of arrays of arbitrary dimensionality (w.r.t JVM restrictions)
val Some((level, componentType)) = erasure.GenericArray.unapply(tpt.tpe)
val tagType = List.iterate(componentType, level)(tpe => appliedType(ArrayClass.asType, List(tpe))).last
- val newArrayApp = atPos(tree.pos) {
- val tag = resolveClassTag(tree, tagType)
- if (tag.isEmpty) MissingClassTagError(tree, tagType)
+ val newArrayApp = atPos(tree.pos) {
+ val tag = resolveArrayTag(tagType, tree.pos)
+ if (tag.isEmpty) MissingArrayTagError(tree, tagType)
else new ApplyToImplicitArgs(Select(tag, nme.newArray), args)
- }
- typed(newArrayApp, mode, pt)
+ }
+ typed(newArrayApp, mode, pt)
case tree1 =>
tree1
}
@@ -4709,6 +4902,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
else
typedSelect(qual1, name)
+ if (tree.isInstanceOf[PostfixSelect])
+ checkFeature(tree.pos, PostfixOpsFeature, name.decode)
+ if (tree1.symbol != null && tree1.symbol.isOnlyRefinementMember)
+ checkFeature(tree1.pos, ReflectiveCallsFeature, tree1.symbol.toString)
+
if (qual1.symbol == RootPackage) treeCopy.Ident(tree1, name)
else tree1
@@ -4754,9 +4952,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
treeCopy.TypeBoundsTree(tree, lo1, hi1) setType TypeBounds(lo1.tpe, hi1.tpe)
case etpt @ ExistentialTypeTree(_, _) =>
- typerWithLocalContext(context.makeNewScope(tree, context.owner)){
+ val tree1 = typerWithLocalContext(context.makeNewScope(tree, context.owner)){
_.typedExistentialTypeTree(etpt, mode)
}
+ checkExistentialsFeature(tree1.pos, tree1.tpe, "the existential type")
+ tree1
case dc@TypeTreeWithDeferredRefCheck() => dc // TODO: should we re-type the wrapped tree? then we need to change TypeTreeWithDeferredRefCheck's representation to include the wrapped tree explicitly (instead of in its closure)
case tpt @ TypeTree() =>
@@ -4782,6 +4982,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
* @return ...
*/
def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+ lastTreeToTyper = tree
indentTyping()
var alreadyTyped = false
@@ -4806,6 +5007,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
ptLine("typing %s: pt = %s".format(ptTree(tree), pt),
"undetparams" -> context.undetparams,
"implicitsEnabled" -> context.implicitsEnabled,
+ "enrichmentEnabled" -> context.enrichmentEnabled,
+ "mode" -> modeString(mode),
"silent" -> context.bufferErrors,
"context.owner" -> context.owner
)
@@ -4909,7 +5112,22 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// We disable implicits because otherwise some constructs will
// type check which should not. The pattern matcher does not
// perform implicit conversions in an attempt to consummate a match.
- context.withImplicitsDisabled(typed(tree, PATTERNmode, pt))
+
+ // on the one hand,
+ // "abc" match { case Seq('a', 'b', 'c') => true }
+ // should be ruled out statically, otherwise this is a runtime
+ // error both because there is an implicit from String to Seq
+ // (even though such implicits are not used by the matcher) and
+ // because the typer is fine with concluding that "abc" might
+ // be of type "String with Seq[T]" and thus eligible for a call
+ // to unapplySeq.
+
+ // on the other hand, we want to be able to use implicits to add members retro-actively (e.g., add xml to StringContext)
+
+ // as a compromise, context.enrichmentEnabled tells adaptToMember to go ahead and enrich,
+ // but arbitrary conversions (in adapt) are disabled
+ // TODO: can we achieve the pattern matching bit of the string interpolation SIP without this?
+ context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt))
}
/** Types a (fully parameterized) type tree */
@@ -4966,7 +5184,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
assert(!context.owner.isTermMacro, context.owner)
val tree1 = typed(tree, pt)
transformed(tree) = tree1
- packedType(tree1, context.owner)
+ val tpe = packedType(tree1, context.owner)
+ checkExistentialsFeature(tree.pos, tpe, "inferred existential type")
+ tpe
}
def computeMacroDefType(tree: Tree, pt: Type): Type = {
@@ -5001,36 +5221,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case None => typed(tree, mode, pt)
}
- // `tree` is only necessary here for its position
- // but that's invaluable for error reporting, so I decided to include it into this method's contract
- // before passing EmptyTree, please, consider passing something meaningful first
- def resolveClassTag(tree: Tree, tp: Type): Tree = beforeTyper {
- inferImplicit(
- EmptyTree,
- appliedType(ClassTagClass.typeConstructor, List(tp)),
- /*reportAmbiguous =*/ true,
- /*isView =*/ false,
- /*context =*/ context,
- /*saveAmbiguousDivergent =*/ true,
- /*pos =*/ tree.pos
- ).tree
- }
-
- // `tree` is only necessary here for its position
- // but that's invaluable for error reporting, so I decided to include it into this method's contract
- // before passing EmptyTree, please, consider passing something meaningful first
- def resolveTypeTag(tree: Tree, pre: Type, tp: Type, full: Boolean): Tree = beforeTyper {
- inferImplicit(
- EmptyTree,
- appliedType(singleType(pre, pre member (if (full) ConcreteTypeTagClass else TypeTagClass).name), List(tp)),
- /*reportAmbiguous =*/ true,
- /*isView =*/ false,
- /*context =*/ context,
- /*saveAmbiguousDivergent =*/ true,
- /*pos =*/ tree.pos
- ).tree
- }
-
/*
def convertToTypeTree(tree: Tree): Tree = tree match {
case TypeTree() => tree
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 1f79d8212d..d75e2705c3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -200,7 +200,7 @@ trait Unapplies extends ast.TreeDSL
def caseClassCopyMeth(cdef: ClassDef): Option[DefDef] = {
def isDisallowed(vd: ValDef) = isRepeatedParamType(vd.tpt) || isByNameParamType(vd.tpt)
val cparamss = constrParamss(cdef)
- val flat = cparamss flatten
+ val flat = cparamss.flatten
if (cdef.symbol.hasAbstractFlag || (flat exists isDisallowed)) None
else {
@@ -210,13 +210,25 @@ trait Unapplies extends ast.TreeDSL
// and re-added in ``finishWith'' in the namer.
def paramWithDefault(vd: ValDef) =
treeCopy.ValDef(vd, vd.mods | DEFAULTPARAM, vd.name, atPos(vd.pos.focus)(TypeTree() setOriginal vd.tpt), toIdent(vd))
-
- val paramss = mmap(cparamss)(paramWithDefault)
- val classTpe = classType(cdef, tparams)
+
+ val (copyParamss, funParamss) = cparamss match {
+ case Nil => (Nil, Nil)
+ case ps :: pss =>
+ (List(ps.map(paramWithDefault)), mmap(pss)(p => copyUntyped[ValDef](p).copy(rhs = EmptyTree)))
+ }
+
+ val classTpe = classType(cdef, tparams)
+ val bodyTpe = funParamss.foldRight(classTpe)((params, restp) => gen.scalaFunctionConstr(params.map(_.tpt), restp))
+
+ val argss = copyParamss match {
+ case Nil => Nil
+ case ps :: _ => mmap(ps :: funParamss)(toIdent)
+ }
+ val body = funParamss.foldRight(New(classTpe, argss): Tree)(Function)
Some(atPos(cdef.pos.focus)(
- DefDef(Modifiers(SYNTHETIC), nme.copy, tparams, paramss, classTpe,
- New(classTpe, mmap(paramss)(toIdent)))
+ DefDef(Modifiers(SYNTHETIC), nme.copy, tparams, copyParamss, bodyTpe,
+ body)
))
}
}
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index 11d7db5180..472b5180b4 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -9,7 +9,7 @@ package util
import java.net.URL
import scala.collection.{ mutable, immutable }
-import io.{ File, Directory, Path, Jar, AbstractFile, ClassAndJarInfo }
+import io.{ File, Directory, Path, Jar, AbstractFile }
import scala.tools.util.StringOps.splitWhere
import Jar.isJarOrZip
import File.pathSeparator
@@ -23,16 +23,6 @@ import java.net.MalformedURLException
* @author Stepan Koltsov
*/
object ClassPath {
- def scalaLibrary = locate[Option[_]]
- def scalaCompiler = locate[Global]
-
- def infoFor[T](value: T) = info(value.getClass)
- def info[T](clazz: Class[T]) = new ClassAndJarInfo()(ClassManifest[T](clazz))
- def info[T: ClassManifest] = new ClassAndJarInfo[T]
- def locate[T: ClassManifest] = info[T] rootClasspath
- def locateJar[T: ClassManifest] = info[T].rootPossibles find (x => isJarOrZip(x)) map (x => File(x))
- def locateDir[T: ClassManifest] = info[T].rootPossibles find (_.isDirectory) map (_.toDirectory)
-
/** Expand single path entry */
private def expandS(pattern: String): List[String] = {
val wildSuffix = File.separator + "*"
@@ -54,26 +44,6 @@ object ClassPath {
else List(pattern)
}
- /** Return duplicated classpath entries as
- * (name, list of origins)
- * in the order they occur on the path.
- */
- // def findDuplicates(cp: ClassPath[_]) = {
- // def toFullName(x: (String, _, cp.AnyClassRep)) = x._1 + "." + x._3.name
- // def toOriginString(x: ClassPath[_]) = x.origin getOrElse x.name
- //
- // /** Flatten everything into tuples, recombine grouped by name, filter down to 2+ entries. */
- // val flattened = (
- // for ((pkgName, pkg) <- cp.allPackagesWithNames ; clazz <- pkg.classes) yield
- // (pkgName, pkg, clazz)
- // )
- // val multipleAppearingEntries = flattened groupBy toFullName filter (_._2.size > 1)
- //
- // /** Extract results. */
- // for (name <- flattened map toFullName distinct ; dups <- multipleAppearingEntries get name) yield
- // (name, dups map { case (_, cp, _) => toOriginString(cp) })
- // }
-
/** Split classpath using platform-dependent path separator */
def split(path: String): List[String] = (path split pathSeparator).toList filterNot (_ == "") distinct
@@ -240,26 +210,6 @@ abstract class ClassPath[T] {
def packages: IndexedSeq[ClassPath[T]]
def sourcepaths: IndexedSeq[AbstractFile]
- /** Information which entails walking the tree. This is probably only
- * necessary for tracking down problems - it's normally not used.
- */
- // def allPackages: List[ClassPath[T]] = packages ::: (packages flatMap (_.allPackages))
- // def allPackageNames: List[String] = {
- // def subpackages(prefix: String, cp: ClassPath[T]): List[String] = (
- // (cp.packages map (prefix + _.name)) :::
- // (cp.packages flatMap (x => subpackages(prefix + x.name + ".", x)))
- // )
- // subpackages("", this)
- // }
- // def allPackagesWithNames: List[(String, ClassPath[T])] = {
- // val root = packages map (p => p.name -> p)
- // val subs =
- // for ((prefix, p) <- root ; (k, v) <- p.allPackagesWithNames) yield
- // (prefix + "." + k, v)
- //
- // root ::: subs
- // }
-
/**
* Represents classes which can be loaded with a ClassfileLoader/MsilFileLoader
* and / or a SourcefileLoader.
@@ -362,6 +312,13 @@ class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[Ab
override def toString() = "directory classpath: "+ origin.getOrElse("?")
}
+class DeltaClassPath[T](original: MergedClassPath[T], subst: Map[ClassPath[T], ClassPath[T]])
+extends MergedClassPath[T](original.entries map (e => subst getOrElse (e, e)), original.context) {
+ // not sure we should require that here. Commented out for now.
+ // require(subst.keySet subsetOf original.entries.toSet)
+ // We might add specialized operations for computing classes packages here. Not sure it's worth it.
+}
+
/**
* A classpath unifying multiple class- and sourcepath entries.
*/
@@ -373,7 +330,7 @@ extends ClassPath[T] {
this(entries.toIndexedSeq, context)
def name = entries.head.name
- def asURLs = entries flatMap (_.asURLs) toList
+ def asURLs = (entries flatMap (_.asURLs)).toList
lazy val sourcepaths: IndexedSeq[AbstractFile] = entries flatMap (_.sourcepaths)
override def origin = Some(entries map (x => x.origin getOrElse x.name) mkString ("Merged(", ", ", ")"))
@@ -431,41 +388,10 @@ extends ClassPath[T] {
}
new MergedClassPath[T](newEntries, context)
}
- //
- // override def allPackages: List[ClassPath[T]] = entries flatMap (_.allPackages)
- // override def allPackageNames = entries flatMap (_.allPackageNames)
- // override def allPackagesWithNames = entries flatMap (_.allPackagesWithNames)
- //
- // def duplicatedClasses = {
- // def toFullName(x: (String, _, AnyClassRep)) = x._1 + "." + x._3.name
- //
- // /** Flatten everything into tuples, recombine grouped by name, filter down to 2+ entries. */
- // val flattened = (
- // for ((pkgName, pkg) <- allPackagesWithNames ; clazz <- pkg.classes) yield
- // (pkgName, pkg, clazz)
- // )
- // val multipleAppearingEntries = flattened groupBy toFullName filter (_._2.size > 1)
- //
- // /** Using original name list as reference point, return duplicated entries as
- // * (name, list of origins)
- // * in the order they occur on the path.
- // */
- // for (name <- flattened map toFullName distinct ; dups <- multipleAppearingEntries get name) yield
- // (name, dups map {
- // case (_, cp, _) if cp.origin.isDefined => cp.origin.get
- // case (_, cp, _) => cp.asURLs.mkString
- // })
- // }
- //
def show() {
println("ClassPath %s has %d entries and results in:\n".format(name, entries.size))
asClasspathString split ':' foreach (x => println(" " + x))
}
- // def showDuplicates() =
- // ClassPath findDuplicates this foreach {
- // case (name, xs) => println(xs.mkString(name + ":\n ", "\n ", "\n"))
- // }
- //
override def toString() = "merged classpath "+ entries.mkString("(", "\n", ")")
}
diff --git a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
index 00fe49d36a..78bfd5e908 100644
--- a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
+++ b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
@@ -20,11 +20,10 @@ import scala.collection.mutable.ListBuffer
*/
trait ParserUtil extends Parsers {
- class ParserPlus[+T](underlying: Parser[T]) {
+ protected implicit class ParserPlus[+T](underlying: Parser[T]) {
def !~>[U](p: => Parser[U]): Parser[U] = (underlying ~! p) ^^ { case a~b => b }
def <~![U](p: => Parser[U]): Parser[T] = (underlying ~! p) ^^ { case a~b => a }
}
- protected implicit def parser2parserPlus[T](p: Parser[T]): ParserPlus[T] = new ParserPlus(p)
}
case class CommandLine(
diff --git a/src/compiler/scala/tools/nsc/util/Exceptional.scala b/src/compiler/scala/tools/nsc/util/Exceptional.scala
index 667b7d15a6..7c093b7866 100644
--- a/src/compiler/scala/tools/nsc/util/Exceptional.scala
+++ b/src/compiler/scala/tools/nsc/util/Exceptional.scala
@@ -3,140 +3,10 @@ package util
import java.util.concurrent.ExecutionException
import java.lang.reflect.{ InvocationTargetException, UndeclaredThrowableException }
-import io.{ Sources, Fileish }
import scala.tools.util.StringOps._
-
-/** A simple throwable wrapper so it looks more like a parade of
- * glittering frame-shaped beauties than the other thing.
- */
-class Exceptional(val ex: Throwable)(implicit prefs: ScalaPrefs) {
- val formatter = prefs.exceptionFormatter(ex)
- val unwrapped = Exceptional.unwrap(ex)
- val table = formatter.newTable(unwrapped)
- def rawTrace() = unwrapped.printStackTrace()
- def isScanDone = prefs.codeSources.isDone()
-
- /** Block until the scanning is complete. */
- def force(): this.type = {
- prefs.codeSources.force()
- this
- }
-
- /** Stack frame contexts are only shown as long as this is true. */
- def spanFn(frame: JavaStackFrame): Boolean = true
-
- /** The result of this will be printed before a context trace. */
- def contextPrelude: String =
- if (isScanDone || prefs.codeSources.isEmpty) ""
- else "/* Still scanning source path: there may be more momentarily. */\n"
-
- /** Frames with surrounding context. */
- private def contextFrames = toList takeWhile spanFn
- def contextHead(): String = contextElems.headOption getOrElse ""
- def contextElems() = contextFrames map formatter.inContext
- def context(): String = context(length)
- def context(num: Int): String = contextPrelude + ojoinOr(contextFrames take num map formatter.inContext, "\n", "No stack trace.")
-
- /** Exceptional doesn't extend Seq because it turns out to be super
- * annoying in the repl: tab-completion sees all the Seq methods.
- */
- def length = toList.length
- def toList = table.toList
- def iterator = table.iterator
- def apply(index: Int) = table(index)
-
- def causes = Exceptional.causes(ex)
- def summary = unwrapped.toString + "\n at " + apply(0).shortNameString
-
- private def println(msg: Any) = {
- Console println msg
- Console.flush()
- }
-
- def show(): Unit = println(context())
- def show(num: Int): Unit = println(context(num))
- def showCauses() = println((ex :: causes).mkString("", "\n caused by -> ", ""))
- def showTable() = println(table)
- def showSummary() = println(summary)
-
- override def toString = summary
-}
-
+import language.implicitConversions
object Exceptional {
- type Catcher[+T] = PartialFunction[Throwable, T]
-
- /** Creates an exception handler which will only ever catch the given
- * number of exceptions (if the given pf is defined there) and after
- * that will disable itself.
- */
- def expiringHandler[T](numCatches: Int)(pf: Catcher[T]): Catcher[T] = {
- var remaining = numCatches;
- { case ex: Throwable if remaining > 0 && pf.isDefinedAt(ex) =>
- remaining -= 1
- pf(ex)
- }
- }
-
- /** The Throwable => Exceptional implicit plus the associated factory. */
- implicit def throwableToExceptional(ex: Throwable)(implicit prefs: ScalaPrefs): Exceptional = apply(ex)(prefs)
- def apply(ex: Throwable)(implicit prefs: ScalaPrefs) = new Exceptional(ex)(prefs)
-
- /** Some handy functions. */
- def stack() = JavaStackFrame frames ((new Throwable).getStackTrace dropWhile isLocal)
- def showme() = apply(new Throwable).show()
- def showstack() = apply(new Throwable).showTable()
-
- /** A frame formatter with more refined aesthetics than the default.
- * Come, let us be civilized.
- */
- object ScalaFormat extends TableDef[JavaStackFrame] {
- >> ("file" -> (_.fileName)) >+ ":"
- << ("line" -> (_.line))
- >> ("class" -> (_.shortestName)) >+ "."
- << ("method" -> (_.methodName))
- }
-
- trait Calibrated {
- def newTable(ex: Throwable): TableDef[JavaStackFrame]#Table
- def inContext(frame: JavaStackFrame): String
- }
- trait Formatter extends (Throwable => Calibrated) {
- def apply(ex: Throwable): Calibrated
- }
- object Formatter {
- def apply(implicit prefs: ScalaPrefs): Formatter = new Formatter {
- def apply(ex: Throwable) = new Calibrated {
- def newTable(ex: Throwable) = new ScalaFormat.Table(JavaStackFrame frames ex)
- def inContext(frame: JavaStackFrame) = new FrameContext(frame, prefs.codeSources) toString
- }
- }
- }
-
- /** Java stack traces have the interesting property of using only the name
- * of the file, no paths. This makes it a bit of a gamble to try to associate
- * a stack frame with a specific file. Witness the heuristic.
- */
- def locateSources(sources: Sources, frame: JavaStackFrame): List[Fileish] = {
- // if only one has a matching path, that's fairly sure to be the one
- val matches = sources(frame.fileName) filter (_.pkgFromPath endsWith frame.pkgName)
- if (matches.isEmpty || matches.tail.isEmpty)
- return matches
-
- // otherwise we'll drink them in and look for a package name
- matches filter (_.pkgFromSource endsWith frame.pkgName)
- }
-
- /** Right now this punts if more than one match and it accepts the first at random.
- */
- def locateSource(sources: Sources, frame: JavaStackFrame): Option[Fileish] =
- locateSources(sources, frame).headOption
-
- def isLocal(ste: StackTraceElement) = ste.getClassName startsWith this.getClass.getName
- def causes(x: Throwable): List[Throwable] = x.getCause match {
- case null => Nil
- case ex => x :: causes(ex)
- }
def unwrap(x: Throwable): Throwable = x match {
case _: InvocationTargetException |
_: ExceptionInInitializerError |
diff --git a/src/compiler/scala/tools/nsc/util/FlagsUtil.scala b/src/compiler/scala/tools/nsc/util/FlagsUtil.scala
deleted file mode 100644
index b615775468..0000000000
--- a/src/compiler/scala/tools/nsc/util/FlagsUtil.scala
+++ /dev/null
@@ -1,233 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package util
-
-// Overloading invariants: these are "pseudoinvariants" because many of the
-// methods do not exist on Modifiers, only on Symbol, not to mention it is only
-// speculative that they are mutually exclusive: but is here to provide a
-// starting point for further refinement.
-//
-// 16: BYNAMEPARAM CAPTURED COVARIANT
-// x.isParameter ==> BYNAMEPARAM
-// x.isMutable ==> CAPTURED
-// x.isType ==> COVARIANT
-//
-// 17: CONTRAVARIANT INCONSTRUCTOR LABEL
-// x.isType ==> CONTRAVARIANT
-// x.isClass ==> INCONSTRUCTOR
-// x.isMethod ==> LABEL
-//
-// 25: DEFAULTPARAM TRAIT
-// x.isParameter ==> DEFAULTPARAM
-// x.isClass ==> TRAIT
-//
-// 35: EXISTENTIAL MIXEDIN
-// x.isType ==> EXISTENTIAL
-// x.isTerm ==> MIXEDIN
-//
-// 37: IMPLCLASS PRESUPER
-// x.isClass ==> IMPLCLASS
-// x.isTerm ==> PRESUPER
-
-import scala.collection.{ mutable, immutable }
-import symtab.Flags.ExplicitFlags
-
-class TransFlagManager[T <: Global](val global: T) {
- import global._
- import definitions._
-
- private var trackerStack: List[FlagTracker] = Nil
- private def trackerString = trackerStack.mkString(" ")
-
- class FlagTracker(val name: String) {
- private val mask = symtab.Flags.TRANS_FLAG
- private val seen = new mutable.HashSet[Symbol]
-
- private def doWeOwnFlag = trackerStack.headOption exists (_ eq this)
- private def isOK = trackerStack.isEmpty || (trackerStack.head eq this)
-
- def apply(sym: Symbol) = {
- if (!isOK)
- log("Testing " + sym.name + " for " + name + " flag, but not at top of stack: " + trackerString)
-
- sym hasFlag mask
- }
- def set(sym: Symbol) = {
- if (!isOK)
- log("Tried to set " + name + " but not at top of stack: " + trackerString)
-
- seen += sym
- sym setFlag mask
- }
- def reset(sym: Symbol) = {
- if (!isOK)
- log("Tried to clear " + name + " but not at top of stack: " + trackerString)
-
- seen -= sym
- sym resetFlag mask
- }
- def clear() {
- if (!doWeOwnFlag && seen.nonEmpty)
- log("Clearing " + seen.size + " " + name + " flags even though the stack is: " + trackerString)
-
- seen foreach (_ resetFlag mask)
- seen.clear()
- }
- }
-
- def forceClear() = {
- if (trackerStack.nonEmpty) {
- log("Warning: force clearing the stack at " + phase + ": " + trackerString)
- trackerStack foreach (_.clear())
- trackerStack = Nil
- }
- }
-
- def claimTransFlag(label: String): FlagTracker = {
- if (trackerStack.isEmpty || trackerStack.head.name != label)
- trackerStack ::= new FlagTracker(label)
-
- trackerStack.head
- }
- def releaseTransFlag(label: String): Boolean = {
- trackerStack.isEmpty || {
- if (trackerStack.head.name == label) {
- trackerStack.head.clear()
- trackerStack = trackerStack.tail
- true
- }
- else {
- log("Warning: trying to release " + label + " flag but the stack is: " + trackerStack.mkString(" "))
- false
- }
- }
- }
- def holdingTransFlag[U](label: String)(f: FlagTracker => U): U = {
- try {
- val t = claimTransFlag(label)
- f(t)
- }
- finally {
- releaseTransFlag(label)
- }
- }
-}
-
-
-/** Some functions for generating comments and methods involving flags,
- * with the output determined by reflection so we can have a little more
- * assurance that documentation and debugging output match up with reality.
- *
- * For the compiler, the output can be generated with:
- * scala scala.tools.nsc.util.FlagsUtilCompiler
- */
-class FlagsUtil(flagsObject: AnyRef) {
- /** Override to tweak flag strings before output. */
- def addFlagMetadata(name: String) = name
-
- /** Runs the generative methods in this class. */
- def reflectiveAnalyzeFlags() = {
- mkFlagsTable()
- println("")
- mkFlagToStringMethod()
- }
- /** A list of the flag names found at each bit position.
- */
- def reflectiveFlagNames: List[List[String]] = {
- val pairs = flagMethods map { m =>
- m.getName -> ((m invoke flagsObject) match {
- case x: java.lang.Integer => x.intValue: Long
- case x: java.lang.Long => x.longValue
- })
- }
- (0 to 63).toList map { idx =>
- pairs collect { case (name, value) if value == (1L << idx) => name }
- }
- }
- /** Prints a comment table identifying all the flags (as seen
- * via reflection) and at what bit each is located.
- */
- def mkFlagsTable() = {
- val markedFlagNames = reflectiveFlagNames map (_ map addFlagMetadata)
-
- val widths = 0 to 2 map { column =>
- markedFlagNames collect { case xs if xs.length > column =>
- xs(column).length
- } max
- }
- val fmt = "// %2d: " + (widths map ("%" + _ + "s") mkString " ")
- def padded(xs: List[String]) = xs match {
- case Nil => List("", "", "")
- case x :: Nil => List(x, "", "")
- case x1 :: x2 :: Nil => List(x1, x2, "")
- case _ => xs take 3
- }
- println("// Generated by mkFlagsTable() at " + now + "\n//")
- // prints the grid showing which flags are at each index
- for ((names, idx) <- markedFlagNames.zipWithIndex) {
- println(fmt.format(idx :: padded(names) : _*))
- }
- }
- /** Prints an implementation of flagToString based on the reflectively
- * determined contents of the flags class.
- */
- def mkFlagToStringMethod() = {
- def key(xs: List[String], flag: Long) = xs match {
- case Nil => "%19s".format("0x" + "%x".format(flag) + "L")
- case x :: _ =>
- if (x.head.isLower) "`" + x + "`"
- else x
- }
- def value(xs: List[String], flag: Long) = "\"" + (xs match {
- case Nil => ""
- case x :: Nil if (flag & ExplicitFlags) != 0 => x.toLowerCase
- case xs => xs.map(_.toLowerCase).mkString("<", "/", ">")
- }) + "\""
- val pairs: List[(String, String)] = reflectiveFlagNames.zipWithIndex map {
- case (xs, idx) => (key(xs, 1L << idx), value(xs, 1L << idx))
- }
- val keyWidth = pairs map (_._1.length) max
- val bodyWidth = pairs map (_._2.length) max
- val fmt = " case %" + keyWidth + "s => %-" + bodyWidth + "s // (1L << %d)"
-
- println("// Generated by mkFlagToStringMethod() at " + now)
- println("@annotation.switch override def flagToString(flag: Long): String = flag match {")
- for (((key, body), idx) <- pairs.zipWithIndex) {
- print(fmt.format(key, body, idx))
- println("")
- }
- println(" case _ => \"\"")
- println("}")
- }
-
- def isFlagName(s: String) = s stripPrefix "late" stripPrefix "not" forall (x => x.isUpper || x == '_')
- def flagMethods = flagsObject.getClass.getMethods.toList filter (x => isFlagName(x.getName)) sortBy (_.getName)
- private def now = new java.util.Date toString
-}
-
-object FlagsUtil {
- import reflect.internal.ModifierFlags
-
- trait MarkModifiers extends FlagsUtil {
- lazy val isModifiersFlag = classOf[ModifierFlags].getMethods map (_.getName) filter isFlagName toSet
- override def addFlagMetadata(name: String) = {
- if (isModifiersFlag(name)) name + "/M"
- else name
- }
- }
-}
-
-/** Convenience standalone programs.
- */
-object FlagsUtilCompiler extends FlagsUtil(symtab.Flags) with FlagsUtil.MarkModifiers {
- def main(args: Array[String]): Unit = reflectiveAnalyzeFlags()
-}
-
-object FlagsUtilLibrary extends FlagsUtil(reflect.internal.Flags) with FlagsUtil.MarkModifiers {
- def main(args: Array[String]): Unit = reflectiveAnalyzeFlags()
-}
-
diff --git a/src/compiler/scala/tools/nsc/util/Indenter.scala b/src/compiler/scala/tools/nsc/util/Indenter.scala
deleted file mode 100644
index f9ddc4a194..0000000000
--- a/src/compiler/scala/tools/nsc/util/Indenter.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-package scala.tools.nsc
-package util
-
-import java.io.PrintStream
-
-class Indenter(var stringFn: Any => String) {
- def this() = this("" + _)
- def out: PrintStream = System.out
-
- var indentSpaces = 2
- var isSorted = false
- var openString = ""
- var closeString = ""
-
- def braces: this.type = {
- openString = " {"
- closeString = "}"
- this
- }
- def sorted: this.type = { isSorted = true ; this }
- def stringify(fn: Any => String): this.type = {
- stringFn = fn
- this
- }
-
- def atStartOfLine = startOfLine
- private var indentLevel = 0
- private var startOfLine = true
- def indent: this.type = { indentLevel += 1 ; this }
- def undent: this.type = { indentLevel -= 1 ; this }
- def currentIndent = " " * indentLevel * indentSpaces
- def printIndent() = {
- out.print(currentIndent)
- startOfLine = true
- }
-
- // Execute the given body indented one level further.
- def >>[T](body: => T): T = {
- indentLevel += 1
- try body
- finally indentLevel -= 1
- }
-
- def openIndent(token: Any) {
- print(token + "\n")
- indent
- printIndent()
- }
- def closeIndent(token: Any) {
- print("\n")
- undent
- printIndent()
- print(token)
- }
- def finishLine(token: Any) {
- print(token)
- printIndent()
- }
- def nextIndent(endOfLine: Any) = finishLine(endOfLine)
-
- def block(label: String)(body: => Unit) {
- if (label != "" || openString != "")
- pp(label + openString)
-
- this >> body
-
- if (closeString != "")
- pp(closeString)
- }
- def print(x: Any) = {
- out print stringFn(x)
- out.flush()
- startOfLine = false
- }
- def pps(xs: TraversableOnce[Any]) {
- if (isSorted) xs.toSeq.sortBy("" + _) foreach pp
- else xs foreach pp
- }
- def pp(x: Any) {
- printIndent()
- out println stringFn(x)
- out.flush()
- startOfLine = false
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/JavaStackFrame.scala b/src/compiler/scala/tools/nsc/util/JavaStackFrame.scala
deleted file mode 100644
index d25698ed38..0000000000
--- a/src/compiler/scala/tools/nsc/util/JavaStackFrame.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package util
-
-import io.{ Fileish, Sources }
-import Exceptional._
-
-class FrameContext(frame: JavaStackFrame, codeSources: Sources) {
- val sourceFile = locateSource(codeSources, frame)
- import frame._
-
- def windowWidth = 3
- def windowSize = windowWidth * 2 + 1
-
- lazy val context = sourceFile collect {
- case f if line > 0 =>
- val start = math.max(0, line - windowWidth)
- f.lines().toList.slice(start, start + windowSize)
- } getOrElse Nil
-
- protected def fallbackContext = "%s (%s:%s)".format(tag, fileName, line)
-
- private def linestr(index: Int) = {
- val current = line - windowWidth + index
- val marker = if (current == line) "*" else " "
- marker + current
- }
- private def contextLines = context.zipWithIndex map {
- case (l, i) => linestr(i) + ": " + l + "\n"
- }
- override def toString =
- if (context.isEmpty) fallbackContext
- else contextLines.mkString(tag + "\n", "", "")
-}
-
-object FrameContext {
- def apply(elem: StackTraceElement): FrameContext = apply(new JavaStackFrame(elem))
- def apply(frame: JavaStackFrame): FrameContext = new FrameContext(frame, Sources())
-}
-
-class JavaStackFrame(val elem: StackTraceElement) {
- def className: String = elem.getClassName()
- def methodName: String = elem.getMethodName()
- def fileName: String = elem.getFileName()
- def line: Int = elem.getLineNumber()
-
- private def segs = className takeWhile (ch => ch != '$' && ch != '(') split '.' toList ;
- lazy val pkgName = segs.init mkString "."
- lazy val shortName = segs.last
- lazy val shortestName = if (fileName startsWith (shortName + ".")) "<--" else shortName
-
- private def standardString(which: String) =
- "%s.%s(%s:%s)".format(which, methodName, fileName, line)
-
- def locationString = fileName + ":" + line
- def javaString = standardString(className)
- def shortNameString = standardString(shortName)
- def tag = "[%s.%s]".format(shortName, methodName)
-
- override def toString = shortNameString
-}
-
-object JavaStackFrame {
- def apply(elem: StackTraceElement) = new JavaStackFrame(elem)
- def frames(xs: Array[StackTraceElement]): Array[JavaStackFrame] = xs map (x => new JavaStackFrame(x))
- def frames(t: Throwable): Array[JavaStackFrame] = frames(Exceptional.unwrap(t).getStackTrace)
-}
diff --git a/src/compiler/scala/tools/nsc/util/Position.scala b/src/compiler/scala/tools/nsc/util/Position.scala
index 573f7bc7b2..208cd5703a 100644
--- a/src/compiler/scala/tools/nsc/util/Position.scala
+++ b/src/compiler/scala/tools/nsc/util/Position.scala
@@ -40,7 +40,15 @@ trait Position extends scala.reflect.api.Position with scala.reflect.api.Attachm
/** A bit weird method that is necessary to safely update positions without destroying custom attachments */
// necessary for conformance with Attachment
- def withPos(pos: scala.reflect.api.Position) = pos
+ def withPos(newPos: scala.reflect.api.Position): scala.reflect.api.Attachment = newPos
+
+ /** Exposes itself as payload of Attachment */
+ // necessary for conformance with Attachment
+ def payload: Position = this
+
+ /** A bit weird method that is necessary to safely update positions without destroying custom attachments */
+ // necessary for conformance with Attachment
+ def withPayload(newPos: Any): scala.reflect.api.Attachment = newPos.asInstanceOf[Position]
/** Java file corresponding to the source file of this position.
*/
diff --git a/src/compiler/scala/tools/nsc/util/ProxyReport.scala b/src/compiler/scala/tools/nsc/util/ProxyReport.scala
deleted file mode 100644
index 4fc86c3a32..0000000000
--- a/src/compiler/scala/tools/nsc/util/ProxyReport.scala
+++ /dev/null
@@ -1,146 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package util
-
-import scala.collection.{ mutable, immutable, generic }
-
-/** A class for analyzing forwarding/proxy relationships.
- */
-trait ProxyReport {
- val global: Global
- import global._
- import definitions._
-
- private object classes {
- def isIgnorable(sym: Symbol) = sym :: sym.allOverriddenSymbols exists { s =>
- ObjectClass isSubClass s.owner
- }
- def nonPrivateMethods(sym: Symbol) = {
- val methods = sym.initialize.tpe.nonPrivateMembers filter { x =>
- x.isMethod && !x.isConstructor && !x.isPrivate && !isIgnorable(x)
- }
- methods foreach (m => m.initialize.info.paramss.flatten foreach (_.initialize))
- methods
- }
- lazy val GlobalClass = getRequiredClass(classOf[Global].getName)
- lazy val GenericClass = getRequiredModule("scala.collection.generic").moduleClass
- lazy val CollectionClass = getRequiredModule("scala.collection").moduleClass
-
- def getType(name: String) = getMember(GlobalClass, newTypeName(name))
- def getColl(name: String) = getMember(CollectionClass, newTypeName(name))
- def getGeneric(name: String) = getMember(GenericClass, newTypeName(name))
-
- // the following operations + those in RewrappingTypeProxy are all operations
- // in class Type that are overridden in some subclass
- // Important to keep this up-to-date when new operations are added!
- def TypeClass = getType("Type")
- def SimpleTypeProxy = getType("SimpleTypeProxy")
- def RewrappingTypeProxy = getType("RewrappingTypeProxy")
-
- def TraversableForwarder = getGeneric("TraversableForwarder")
- def IterableForwarder = getGeneric("IterableForwarder")
- def SeqForwarder = getGeneric("SeqForwarder")
- def TraversableLike = getColl("TraversableLike")
- def TraversableProxy = getColl("TraversableProxyLike")
- def IterableLike = getColl("IterableLike")
- def IterableProxy = getColl("IterableProxyLike")
- def MapLike = getColl("MapLike")
- def MapProxy = getColl("MapProxyLike")
- def SeqLike = getColl("SeqLike")
- def SeqProxy = getColl("SeqProxyLike")
- def SetLike = getColl("SetLike")
- def SetProxy = getColl("SetProxyLike")
- }
- import classes._
-
- val wrappedHeader = """
-/** With respect to %s, %s wraps:
- */
-trait Wrapped {
- """.trim
- val unwrappedHeader = """
-/** With respect to %s, %s does NOT wrap:
- */
-trait Unwrapped {
- """.trim
-
- def wrapReport(underlying: Symbol, proxy: Symbol) = {
- val underlyingMs = nonPrivateMethods(underlying)
- val proxyMs = nonPrivateMethods(proxy) filterNot (_.owner == underlying)
- val (wrapped, unwrapped) = underlyingMs partition (m =>
- proxyMs exists (p =>
- (p.name == m.name) && {
- val self = proxy.thisType
- val memberTp = self.memberType(p)
- val parentTp = self.memberType(m)
-
- refChecks.overridesTypeInPrefix(memberTp, parentTp, self)
- // || {
- // // if (p.paramss.flatten.length == m.paramss.flatten.length)
- // // println("names equal, overridesType false:\n " + ((p, m, memberTp, parentTp, self)) + "\n")
- //
- // false
- // }
- }
- )
- )
-
- def show(xs: List[Symbol], template: String) = {
- val lines = xs.map(_.initialize.defString).sorted.map(" " + _ + "\n")
- lines.mkString(template.format(underlying, proxy) + "\n", "", "}")
- }
-
- show(wrapped, wrappedHeader) + "\n\n" + show(unwrapped, unwrappedHeader)
- }
-
- lazy val wrappers = List(
- TypeClass -> SimpleTypeProxy,
- TypeClass -> RewrappingTypeProxy,
- TraversableClass -> TraversableForwarder,
- IterableClass -> IterableForwarder,
- SeqClass -> SeqForwarder,
- TraversableLike -> TraversableProxy,
- IterableLike -> IterableProxy,
- MapLike -> MapProxy,
- SetLike -> SetProxy,
- SeqLike -> SeqProxy
- )
-
- def generate(dir: io.Directory) = {
- /** A proxy for a type (identified by field `underlying`) that forwards most
- * operations to it (for exceptions, see WrappingProxy, which forwards even more operations).
- * every operation that is overridden for some kind of types should be forwarded.
- */
- for ((clazz, proxy) <- wrappers) {
- val text = wrapReport(clazz, proxy)
- val file = dir / (proxy.fullName + ".scala") toFile;
-
- file writeAll text
- println("Created " + file)
- }
- }
-}
-
-object ProxyReportRunner {
- class ProxyGlobal(s: Settings) extends Global(s) {
- object proxyReport extends {
- val global: ProxyGlobal.this.type = ProxyGlobal.this
- } with util.ProxyReport
- }
-
- def main(args: Array[String]): Unit = {
- if (args.isEmpty)
- return println("Give an output directory as argument.")
-
- val dir = io.Directory(args(0)).createDirectory()
- val s = new Settings()
- s.processArguments(args.toList.tail, true)
- val g = new ProxyGlobal(s)
- val run = new g.Run()
- g.afterTyper(g.proxyReport.generate(dir))
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/RegexCache.scala b/src/compiler/scala/tools/nsc/util/RegexCache.scala
deleted file mode 100644
index 896c1a536f..0000000000
--- a/src/compiler/scala/tools/nsc/util/RegexCache.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Lex Spoon
- */
-
-package scala.tools.nsc
-package util
-import java.util.regex.Pattern
-import scala.collection.mutable
-
-object RegexCache {
- /** Maps patterns to compiled regexes */
- private val regexMap = mutable.Map.empty[String, Pattern]
-
- /** Lists the regexes that have been recorded in order */
- private val regexList = new mutable.Queue[String]
-
- private val regexesToCache = 1000
-
- /** Compile a regex and add it to the cache */
- private def compileAndAdd(regex: String): Pattern = {
- val pattern = Pattern.compile(regex)
-
- regexMap += (regex -> pattern)
- regexList += regex
-
- if (regexMap.size > regexesToCache)
- regexMap -= regexList.dequeue()
-
- pattern
- }
-
-
- /** Compile a regex, caching */
- def apply(regex: String): Pattern =
- regexMap.get(regex) match {
- case Some(pattern) => pattern
- case None => compileAndAdd(regex)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
index fda713c5c6..4c7920d6b3 100644
--- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
+++ b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
@@ -14,7 +14,7 @@ import java.net.URL
import scala.reflect.ReflectionUtils.unwrapHandler
import ScalaClassLoader._
import scala.util.control.Exception.{ catching }
-// import Exceptional.unwrap
+import language.implicitConversions
trait HasClassPath {
def classPathURLs: Seq[URL]
@@ -24,14 +24,6 @@ trait HasClassPath {
* of java reflection.
*/
trait ScalaClassLoader extends JClassLoader {
- /** Override to see classloader activity traced */
- protected def trace: Boolean = false
- protected lazy val classLoaderUniqueId = "Cl#" + System.identityHashCode(this)
- protected def classLoaderLog(msg: => String) {
- if (trace)
- Console.err.println("[" + classLoaderUniqueId + "] " + msg)
- }
-
/** Executing an action with this classloader as context classloader */
def asContext[T](action: => T): T = {
val saved = contextLoader
@@ -53,20 +45,8 @@ trait ScalaClassLoader extends JClassLoader {
def create(path: String): AnyRef =
tryToInitializeClass[AnyRef](path) map (_.newInstance()) orNull
- override def findClass(name: String) = {
- val result = super.findClass(name)
- classLoaderLog("findClass(%s) = %s".format(name, result))
- result
- }
-
- override def loadClass(name: String, resolve: Boolean) = {
- val result = super.loadClass(name, resolve)
- classLoaderLog("loadClass(%s, %s) = %s".format(name, resolve, result))
- result
- }
-
- def constructorsOf[T <: AnyRef : Manifest]: List[Constructor[T]] =
- manifest[T].erasure.getConstructors.toList map (_.asInstanceOf[Constructor[T]])
+ def constructorsOf[T <: AnyRef : ClassTag]: List[Constructor[T]] =
+ classTag[T].erasure.getConstructors.toList map (_.asInstanceOf[Constructor[T]])
/** The actual bytes for a class file, or an empty array if it can't be found. */
def classBytes(className: String): Array[Byte] = classAsStream(className) match {
@@ -98,7 +78,6 @@ trait ScalaClassLoader extends JClassLoader {
case null => Nil
case p => p.loaderChain
})
- override def toString = classLoaderUniqueId
}
/** Methods for obtaining various classloaders.
@@ -123,9 +102,9 @@ object ScalaClassLoader {
def bootLoader = apply(null)
def contextChain = loaderChain(contextLoader)
- def pathToErasure[T: ClassManifest] = pathToClass(classManifest[T].erasure)
- def pathToClass(clazz: Class[_]) = clazz.getName.replace('.', JFile.separatorChar) + ".class"
- def locate[T: ClassManifest] = contextLoader getResource pathToErasure[T]
+ def pathToErasure[T: ClassTag] = pathToClass(classTag[T].erasure)
+ def pathToClass(clazz: Class[_]) = clazz.getName.replace('.', JFile.separatorChar) + ".class"
+ def locate[T: ClassTag] = contextLoader getResource pathToErasure[T]
/** Tries to guess the classpath by type matching the context classloader
* and its parents, looking for any classloaders which will reveal their
@@ -171,7 +150,7 @@ object ScalaClassLoader {
classloaderURLs :+= url
super.addURL(url)
}
- def toLongString = urls.mkString("URLClassLoader(id=" + classLoaderUniqueId + "\n ", "\n ", "\n)\n")
+ def toLongString = urls.mkString("URLClassLoader(\n ", "\n ", "\n)\n")
}
def fromURLs(urls: Seq[URL], parent: ClassLoader = null): URLClassLoader =
diff --git a/src/compiler/scala/tools/nsc/util/ScalaPrefs.scala b/src/compiler/scala/tools/nsc/util/ScalaPrefs.scala
deleted file mode 100644
index 03e0f54606..0000000000
--- a/src/compiler/scala/tools/nsc/util/ScalaPrefs.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package util
-
-import io.Sources
-
-trait ScalaPrefs {
- def codeSources: Sources
- def exceptionFormatter: Exceptional.Formatter
-}
-
-trait LowPriorityScalaPrefs {
- implicit object DefaultScalaPrefs extends ScalaPrefs {
- def codeSources = Sources.defaultSources
- def exceptionFormatter = Exceptional.Formatter(this)
- }
-}
-
-object ScalaPrefs extends LowPriorityScalaPrefs {
- def apply(implicit prefs: ScalaPrefs): ScalaPrefs = prefs
-}
diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
index fc39a218f8..53ed96d0d3 100644
--- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala
+++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
@@ -11,8 +11,6 @@ import java.io.{File, FileInputStream, PrintStream}
import java.lang.Long.toHexString
import java.lang.Float.intBitsToFloat
import java.lang.Double.longBitsToDouble
-
-import cmd.program.Simple
import scala.reflect.internal.{Flags, Names}
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
import interpreter.ByteCode.scalaSigBytesForPath
@@ -119,21 +117,18 @@ object ShowPickled extends Names {
result.toInt
}
- def printFile(buf: PickleBuffer, out: PrintStream): Unit = printFile(buf, out, false)
- def printFile(buf: PickleBuffer, out: PrintStream, bare: Boolean) {
+ def printFile(buf: PickleBuffer, out: PrintStream) {
out.println("Version " + buf.readNat() + "." + buf.readNat())
val index = buf.createIndex
val entryList = makeEntryList(buf, index)
buf.readIndex = 0
- /** A print wrapper which discards everything if bare is true.
- */
- def p(s: String) = if (!bare) out print s
+ def p(s: String) = out print s
def printNameRef() {
val idx = buf.readNat()
val name = entryList nameAt idx
- val toPrint = if (bare) " " + name else " %s(%s)".format(idx, name)
+ val toPrint = " %s(%s)".format(idx, name)
out print toPrint
}
@@ -156,7 +151,7 @@ object ShowPickled extends Names {
val accessBoundary = (
for (idx <- privateWithin) yield {
val s = entryList nameAt idx
- if (bare) s else idx + "(" + s + ")"
+ idx + "(" + s + ")"
}
)
val flagString = {
@@ -283,31 +278,18 @@ object ShowPickled extends Names {
try Some(new PickleBuffer(data, 0, data.length))
catch { case _: Exception => None }
- def show(what: String, pickle: PickleBuffer, bare: Boolean) = {
+ def show(what: String, pickle: PickleBuffer) = {
Console.println(what)
val saved = pickle.readIndex
pickle.readIndex = 0
- printFile(pickle, Console.out, bare)
+ printFile(pickle, Console.out)
pickle.readIndex = saved
}
- private lazy val ShowPickledSpec =
- Simple(
- Simple.scalaProgramInfo("showPickled", "Usage: showPickled [--bare] <classname>"),
- List("--bare" -> "suppress numbers in output"),
- Nil,
- null
- )
-
- /** Option --bare suppresses numbers so the output can be diffed.
- */
def main(args: Array[String]) {
- val runner = ShowPickledSpec instance args
- import runner._
-
- residualArgs foreach { arg =>
+ args foreach { arg =>
(fromFile(arg) orElse fromName(arg)) match {
- case Some(pb) => show(arg + ":", pb, parsed isSet "--bare")
+ case Some(pb) => show(arg + ":", pb)
case _ => Console.println("Cannot read " + arg)
}
}
diff --git a/src/compiler/scala/tools/nsc/util/Statistics.scala b/src/compiler/scala/tools/nsc/util/Statistics.scala
index d1cdd30dd8..53ab6654ee 100644
--- a/src/compiler/scala/tools/nsc/util/Statistics.scala
+++ b/src/compiler/scala/tools/nsc/util/Statistics.scala
@@ -57,6 +57,17 @@ class Statistics extends scala.reflect.internal.util.Statistics {
val counter2: SubCounter = new SubCounter(subtypeCount)
val timer1: Timer = new Timer
val timer2: Timer = new Timer
+
+ val macroExpandCount = new Counter
+ val macroExpandNanos = new Timer
+
+ val patmatNanos = new Timer
+ val patmatAnaDPLL = new Timer
+ val patmatAnaVarEq = new Timer
+ val patmatCNF = new Timer
+ val patmatAnaExhaust = new Timer
+ val patmatAnaReach = new Timer
+ val patmatCNFSizes = new collection.mutable.HashMap[Int, Int] withDefaultValue 0
}
object Statistics extends Statistics
@@ -68,7 +79,7 @@ abstract class StatisticsInfo {
val global: Global
import global._
- var phasesShown = List("parser", "typer", "erasure", "cleanup")
+ var phasesShown = List("parser", "typer", "patmat", "erasure", "cleanup")
def countNodes(tree: Tree, counts: ClassCounts) {
for (t <- tree) counts(t.getClass) += 1
@@ -80,10 +91,15 @@ abstract class StatisticsInfo {
def showRelTyper(timer: Timer) =
timer+showPercent(timer.nanos, typerNanos.nanos)
- def showCounts(counts: ClassCounts) =
+ def showRelPatmat(timer: Timer) =
+ timer+showPercent(timer.nanos, patmatNanos.nanos)
+
+ def showCounts[T](counts: scala.collection.mutable.Map[T, Int]) =
counts.toSeq.sortWith(_._2 > _._2).map {
- case (cls, cnt) =>
+ case (cls: Class[_], cnt) =>
cls.toString.substring(cls.toString.lastIndexOf("$") + 1)+": "+cnt
+ case (o, cnt) =>
+ o.toString +": "+cnt
}
def print(phase: Phase) = if (phasesShown contains phase.name) {
@@ -125,34 +141,36 @@ abstract class StatisticsInfo {
inform("ms type-flow-analysis: " + analysis.timer.millis)
if (phase.name == "typer") {
- inform("time spent typechecking : "+showRelTyper(typerNanos))
- inform("time classfilereading : "+showRelTyper(classReadNanos))
- inform("time spent in implicits : "+showRelTyper(implicitNanos))
- inform(" successful in scope : "+showRelTyper(inscopeSucceedNanos))
- inform(" failed in scope : "+showRelTyper(inscopeFailNanos))
- inform(" successful of type : "+showRelTyper(oftypeSucceedNanos))
- inform(" failed of type : "+showRelTyper(oftypeFailNanos))
- inform(" assembling parts : "+showRelTyper(subtypeETNanos))
- inform(" matchesPT : "+showRelTyper(matchesPtNanos))
- inform("implicit cache hits : "+showRelative(implicitCacheHits.value + implicitCacheMisses.value)(implicitCacheHits.value))
- inform("time spent in failed : "+showRelTyper(failedSilentNanos))
- inform(" failed apply : "+showRelTyper(failedApplyNanos))
- inform(" failed op= : "+showRelTyper(failedOpEqNanos))
- inform("time spent ref scanning : "+showRelTyper(isReferencedNanos))
- inform("micros by tree node : "+showCounts(microsByType))
- inform("#visits by tree node : "+showCounts(visitsByType))
+ inform("time spent typechecking : " + showRelTyper(typerNanos))
+ inform("time classfilereading : " + showRelTyper(classReadNanos))
+ inform("time spent in implicits : " + showRelTyper(implicitNanos))
+ inform(" successful in scope : " + showRelTyper(inscopeSucceedNanos))
+ inform(" failed in scope : " + showRelTyper(inscopeFailNanos))
+ inform(" successful of type : " + showRelTyper(oftypeSucceedNanos))
+ inform(" failed of type : " + showRelTyper(oftypeFailNanos))
+ inform(" assembling parts : " + showRelTyper(subtypeETNanos))
+ inform(" matchesPT : " + showRelTyper(matchesPtNanos))
+ inform("implicit cache hits : " + showRelative(implicitCacheHits.value + implicitCacheMisses.value)(implicitCacheHits.value))
+ inform("time spent in failed : " + showRelTyper(failedSilentNanos))
+ inform(" failed apply : " + showRelTyper(failedApplyNanos))
+ inform(" failed op= : " + showRelTyper(failedOpEqNanos))
+ inform("time spent ref scanning : " + showRelTyper(isReferencedNanos))
+ inform("micros by tree node : " + showCounts(microsByType))
+ inform("#visits by tree node : " + showCounts(visitsByType))
val average = new ClassCounts
for (c <- microsByType.keysIterator) average(c) = microsByType(c)/visitsByType(c)
- inform("avg micros by tree node : "+showCounts(average))
- inform("time spent in <:< : "+showRelTyper(subtypeNanos))
- inform("time spent in findmember : "+showRelTyper(findMemberNanos))
- inform("time spent in asSeenFrom : "+showRelTyper(asSeenFromNanos))
- inform("#implicit searches : " + implicitSearchCount)
+ inform("avg micros by tree node : " + showCounts(average))
+ inform("time spent in <:< : " + showRelTyper(subtypeNanos))
+ inform("time spent in findmember : " + showRelTyper(findMemberNanos))
+ inform("time spent in asSeenFrom : " + showRelTyper(asSeenFromNanos))
+ inform("#implicit searches : " + implicitSearchCount)
inform("#tried, plausible, matching, typed, found implicits: "+triedImplicits+", "+plausiblyCompatibleImplicits+", "+matchingImplicits+", "+typedImplicits+", "+foundImplicits)
- inform("#implicit improves tests : " + improvesCount)
- inform("#implicit improves cached: " + improvesCachedCount)
- inform("#implicit inscope hits : " + inscopeImplicitHits)
- inform("#implicit oftype hits : " + oftypeImplicitHits)
+ inform("#implicit improves tests : " + improvesCount)
+ inform("#implicit improves cached : " + improvesCachedCount)
+ inform("#implicit inscope hits : " + inscopeImplicitHits)
+ inform("#implicit oftype hits : " + oftypeImplicitHits)
+ inform("#macro expansions : " + macroExpandCount)
+ inform("#time spent in macroExpand : " + showRelTyper(macroExpandNanos))
}
if (ctr1 != null) inform("#ctr1 : " + ctr1)
@@ -164,6 +182,16 @@ abstract class StatisticsInfo {
if (timer1 != null) inform("#timer1 : " + timer1)
if (timer2 != null) inform("#timer2 : " + timer2)
//for (t <- uniques.iterator) println("unique: "+t)
+
+ if (phase.name == "patmat") {
+ inform("time spent in patmat : " + patmatNanos )
+ inform(" of which DPLL : " + showRelPatmat(patmatAnaDPLL ))
+ inform("of which in CNF conversion : " + showRelPatmat(patmatCNF ))
+ inform(" CNF size counts : " + showCounts(patmatCNFSizes ))
+ inform("of which variable equality : " + showRelPatmat(patmatAnaVarEq ))
+ inform(" of which in exhaustivity : " + showRelPatmat(patmatAnaExhaust))
+ inform("of which in unreachability : " + showRelPatmat(patmatAnaReach ))
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/util/Tracer.scala b/src/compiler/scala/tools/nsc/util/Tracer.scala
deleted file mode 100644
index acbf60da5b..0000000000
--- a/src/compiler/scala/tools/nsc/util/Tracer.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package util
-
-import java.io.PrintStream
-import scala.runtime.ScalaRunTime
-
-class Tracer(enabled: () => Boolean) {
- def out: PrintStream = System.out
- def stringify(x: Any) = ScalaRunTime stringOf x
-
- // So can pass tuples, lists, whatever as arguments and don't
- // get a lot of extra parens or noisy prefixes.
- def stringifyArgs(x: Any) = {
- x match {
- case x: TraversableOnce[_] => x map stringify mkString ", "
- case x: Product => x.productIterator map stringify mkString ", "
- case _ => stringify(x)
- }
- }
-
- private val LBRACE = "{"
- private val RBRACE = "}"
- private var indentLevel = 0
- private def spaces = " " * (indentLevel * 2)
- private def pblock(result: Any) = {
- p(LBRACE + "\n")
- indented(p(spaces + stringify(result) + "\n"))
- p(spaces + RBRACE + "\n")
- }
- private def passign(name: String, args: String) =
- p(spaces + name + "(" + args + ") = ")
-
- private def indented[T](body: => T): T = {
- indentLevel += 1
- try body
- finally indentLevel -= 1
- }
- private def p(s: String) = {
- out.print(s)
- out.flush()
- }
-
- def apply[T](name: String, args: => Any)(body: => T): T = {
- val result = body
-
- if (enabled()) {
- passign(name, stringifyArgs(args))
- val resultToPrint = result match {
- case Some(x) => x
- case _ => result
- }
- // concise output optimization
- val isOneliner = resultToPrint match {
- case _: Boolean | _: None.type => true
- case s: String => s.length < 40
- case _ => false
- }
- if (isOneliner) p(stringify(resultToPrint) + "\n")
- else pblock(resultToPrint)
- }
-
- result
- }
-}
-
-object Tracer {
- def apply(enabled: => Boolean): Tracer = new Tracer(() => enabled)
-}
diff --git a/src/compiler/scala/tools/nsc/util/WeakHashSet.scala b/src/compiler/scala/tools/nsc/util/WeakHashSet.scala
index 6a10422b00..5bbb766e21 100644
--- a/src/compiler/scala/tools/nsc/util/WeakHashSet.scala
+++ b/src/compiler/scala/tools/nsc/util/WeakHashSet.scala
@@ -4,6 +4,7 @@ import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.Builder
import scala.collection.mutable.SetBuilder
+import scala.collection.generic.Clearable
import scala.runtime.AbstractFunction1
/** A bare-bones implementation of a mutable `Set` that uses weak references
@@ -12,7 +13,7 @@ import scala.runtime.AbstractFunction1
* This implementation offers only add/remove/test operations,
* therefore it does not fulfill the contract of Scala collection sets.
*/
-class WeakHashSet[T <: AnyRef] extends AbstractFunction1[T, Boolean] {
+class WeakHashSet[T <: AnyRef] extends AbstractFunction1[T, Boolean] with Clearable {
private val underlying = mutable.HashSet[WeakReferenceWithEquals[T]]()
/** Add the given element to this set. */
diff --git a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
index 2534e1192d..8c037cbda5 100644
--- a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
+++ b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
@@ -30,6 +30,10 @@ class WorkScheduler {
todo.dequeueAll(a => f(a).isDefined).map(a => f(a).get)
}
+ def dequeueAllInterrupts(f: InterruptReq => Unit): Unit = synchronized {
+ interruptReqs.dequeueAll { iq => f(iq); true }
+ }
+
/** Called from server: return optional exception posted by client
* Reset to no exception.
*/
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index 88e6c51e9f..1336cca3c5 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -9,6 +9,8 @@ import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter,
package object util {
+ implicit def postfixOps = language.postfixOps // make all postfix ops in this package compile without warning
+
// forwarder for old code that builds against 2.9 and 2.10
val Chars = scala.reflect.internal.Chars
diff --git a/src/compiler/scala/tools/reflect/Invoked.scala b/src/compiler/scala/tools/reflect/Invoked.scala
deleted file mode 100644
index 30c6201a0d..0000000000
--- a/src/compiler/scala/tools/reflect/Invoked.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package reflect
-
-import java.lang.reflect.{ Method, Proxy }
-
-/** A class representing a single method call. It is primarily for use
- * in tandem with Mock. If the invocation did not target an InvocationHandler,
- * proxy will be null.
- */
-class Invoked private (val proxy: AnyRef, val m: Method, val args: List[AnyRef]) {
- def name = m.getName
- def arity = m.getParameterTypes.size
- def returnType = m.getReturnType
- def returns[T: Manifest] = returnType == manifest[T].erasure
-
- def invokeOn(target: AnyRef) = m.invoke(target, args: _*)
- def isObjectMethod = Set("toString", "equals", "hashCode") contains name
-
- override def toString = "Invoked: %s called with %s".format(
- m.getName,
- if (args.isEmpty) "no args" else "args '%s'".format(args mkString ", ")
- )
-}
-
-object Invoked {
- def apply(m: Method, args: Seq[Any]): Invoked = apply(null, m, args)
- def apply(proxy: AnyRef, m: Method, args: Seq[Any]): Invoked = {
- val fixedArgs = if (args == null) Nil else args.toList map (_.asInstanceOf[AnyRef])
- new Invoked(proxy, m, fixedArgs)
- }
- def unapply(x: Any) = x match {
- case x: Invoked => Some((x.proxy, x.m, x.args))
- case _ => None
- }
- object NameAndArgs {
- def unapply(x: Any) = x match {
- case x: Invoked => Some((x.name, x.args))
- case _ => None
- }
- }
- object NameAndArity {
- def unapply(x: Any) = x match {
- case x: Invoked => Some((x.name, x.arity))
- case _ => None
- }
- }
-}
diff --git a/src/compiler/scala/tools/reflect/Mock.scala b/src/compiler/scala/tools/reflect/Mock.scala
deleted file mode 100644
index 52c052b8a2..0000000000
--- a/src/compiler/scala/tools/reflect/Mock.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package reflect
-
-import java.lang.reflect.{ Method, Proxy, InvocationHandler }
-
-/** A wrapper around java dynamic proxies to make it easy to pose
- * as an interface. See SignalManager for an example usage.
- */
-trait Mock extends (Invoked => AnyRef) {
- mock =>
-
- def interfaces: List[Class[_]]
- def classLoader: ClassLoader
- def apply(invoked: Invoked): AnyRef
-
- def newProxyInstance(handler: InvocationHandler): AnyRef =
- Proxy.newProxyInstance(classLoader, interfaces.toArray, handler)
- def newProxyInstance(): AnyRef =
- newProxyInstance(newInvocationHandler())
-
- def newInvocationHandler() = new InvocationHandler {
- def invoke(proxy: AnyRef, method: Method, args: Array[AnyRef]) =
- try { mock(Invoked(proxy, method, args)) }
- catch { case _: NoClassDefFoundError => sys.exit(1) }
- }
-}
-
-/** The methods in Mock create the actual proxy instance which can be used
- * in place of the associated interface(s).
- */
-object Mock {
- /** The default implementation calls the partial function if defined, and
- * routes Object methods to the proxy: otherwise it throws an exception.
- */
- def fromInterfaces(clazz: Class[_], clazzes: Class[_]*)(pf: PartialFunction[Invoked, AnyRef]): AnyRef = {
- val ints = clazz :: clazzes.toList
- require(ints forall (_.isInterface), "All class objects must represent interfaces")
-
- val mock = new Mock {
- val interfaces = ints
- def classLoader = clazz.getClassLoader
- def apply(invoked: Invoked) =
- if (pf.isDefinedAt(invoked)) pf(invoked)
- else if (invoked.isObjectMethod) invoked invokeOn this
- else throw new NoSuchMethodException("" + invoked)
- }
- mock.newProxyInstance()
- }
- /** Tries to implement all the class's interfaces.
- */
- def fromClass(clazz: Class[_])(pf: PartialFunction[Invoked, AnyRef]): AnyRef = allInterfaces(clazz) match {
- case Nil => sys.error(clazz + " implements no interfaces.")
- case x :: xs => fromInterfaces(x, xs: _*)(pf)
- }
-}
diff --git a/src/compiler/scala/tools/reflect/Shield.scala b/src/compiler/scala/tools/reflect/Shield.scala
deleted file mode 100644
index f9c7e54454..0000000000
--- a/src/compiler/scala/tools/reflect/Shield.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package reflect
-
-import java.lang.reflect.Constructor
-import nsc.util.ScalaClassLoader
-
-/** A support class for simplifying the otherwise disbelief-inspiring
- * process of working with classes completely reflectively. This is
- * the case with e.g. sun.misc.Signal* due to environments which are
- * antagonistic to their use. See SignalManager for an example.
- *
- * The name "Shield" is a reference to shielding the JVM from knowledge
- * of what we're doing.
- */
-trait Shield {
- def className: String
- def classLoader: ScalaClassLoader
-
- // Override this if you are more ambitious about logging or throwing.
- def onError[T >: Null](msg: String): T = null
-
- /** This is handy because all reflective calls want back an AnyRef but
- * we will often be generating Units.
- */
- protected implicit def boxedUnit(x: Unit): AnyRef = scala.runtime.BoxedUnit.UNIT
-
- lazy val clazz: Class[_] = classLoader.tryToLoadClass(className) getOrElse onError("Failed to load " + className)
- lazy val methods = clazz.getMethods.toList
-
- def constructor(paramTypes: Class[_]*) = clazz.getConstructor(paramTypes: _*).asInstanceOf[Constructor[AnyRef]]
- def method(name: String, arity: Int) = uniqueMethod(name, arity)
- def field(name: String) = clazz getField name
-
- def matchingMethods(name: String, arity: Int) = methods filter (m => nameAndArity(m) == ((name, arity)))
- def uniqueMethod(name: String, arity: Int) = matchingMethods(name, arity) match {
- case List(x) => x
- case _ => onError("No unique match for " + name)
- }
-}
diff --git a/src/compiler/scala/tools/reflect/SigParser.scala b/src/compiler/scala/tools/reflect/SigParser.scala
deleted file mode 100644
index 5d85778570..0000000000
--- a/src/compiler/scala/tools/reflect/SigParser.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package reflect
-
-import java.lang.reflect.{ GenericSignatureFormatError, Method }
-
-/** The usual reflection song and dance to avoid referencing
- * any sun.* classes.
- */
-class SigParser {
- val SunSignatureParser = "sun.reflect.generics.parser.SignatureParser"
- private lazy val makeMethod: Method =
- try Class.forName(SunSignatureParser) getMethod "make"
- catch { case t => null }
-
- def make() = makeMethod.invoke(null).asInstanceOf[SignatureParserInterface]
-
- private def wrap(op: => Any) =
- try { op ; true }
- catch { case _: GenericSignatureFormatError => false }
-
- def isParserAvailable = makeMethod != null
- def verifyClass(s: String) = isParserAvailable && wrap(make() parseClassSig s)
- def verifyMethod(s: String) = isParserAvailable && wrap(make() parseMethodSig s)
- def verifyType(s: String) = isParserAvailable && wrap(make() parseTypeSig s)
-
- type ClassSignature <: AnyRef
- type MethodTypeSignature <: AnyRef
- type TypeSignature <: AnyRef
-
- type SignatureParserInterface = {
- def isParserAvailable: Boolean
- def parseClassSig(s: String): ClassSignature
- def parseMethodSig(s: String): MethodTypeSignature
- def parseTypeSig(s: String): TypeSignature
- }
-}
-object SigParser extends SigParser { }
diff --git a/src/compiler/scala/tools/reflect/UniversalFn.scala b/src/compiler/scala/tools/reflect/UniversalFn.scala
deleted file mode 100644
index 9ccd580560..0000000000
--- a/src/compiler/scala/tools/reflect/UniversalFn.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package reflect
-
-import java.lang.reflect.{ Method, InvocationTargetException }
-import java.{ lang => jl }
-
-/** For certain reflection tasks it is convenient to treat all methods
- * as having the same signature: (Seq[AnyRef]) => AnyRef
- *
- * That is the "universal signature" and UniversalFn exists to provide
- * it without abandoning the information we had before we needed it.
- * One place this is used: closures can pose as arbitrary interfaces,
- * and this is how we route the arguments from the actual method
- * invocation (which targets a proxy object) to the original closure.
- */
-class UniversalFn private (val closure: AnyRef, val method: Method) extends (Seq[AnyRef] => AnyRef) {
- universal =>
-
- /** Given an interface type argument, creates a proxy object of the
- * type of the interface which implements all its methods by routing
- * them to this universal function. Will throw an exception in the
- * face of any bad data.
- */
- def as[T: Manifest] : T = {
- val clazz = manifest[T].erasure
- require(clazz.isInterface, "Type argument must be an interface.")
-
- val interfaceMethods = clazz.getDeclaredMethods.toSet
- val proxy = Mock.fromInterfaces(clazz) {
- case Invoked(_, m, args) if interfaceMethods(m) => universal(args)
- }
- proxy.asInstanceOf[T]
- }
-
- def apply(xs: Seq[AnyRef]): AnyRef =
- try method.invoke(closure, xs: _*)
- catch { case x: InvocationTargetException => throw x.getCause() }
-}
-
-object UniversalFn {
- /** We use a private constructor so we can enforce some rules: we don't want
- * universal functions to stack up, and right now we will only allow objects
- * which appear to be closures (there's no reason not to eventually lift
- * this restriction, but it should be harder to shoot your foot first.)
- */
- def apply(closure: AnyRef): UniversalFn = closure match {
- case x: UniversalFn => x
- case _ =>
- val m = uniqueApply(closure) getOrElse {
- throw new IllegalArgumentException("Argument must have exactly one non-bridge apply method.")
- }
- new UniversalFn(closure, m)
- }
-}
diff --git a/src/compiler/scala/tools/reflect/package.scala b/src/compiler/scala/tools/reflect/package.scala
deleted file mode 100644
index f5c836a4e9..0000000000
--- a/src/compiler/scala/tools/reflect/package.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-
-import java.lang.reflect.Method
-import java.{ lang => jl }
-
-package object reflect {
- def nameAndArity(m: Method) = (m.getName, m.getParameterTypes.size)
- def allInterfaces(cl: Class[_]): List[Class[_]] =
- if (cl == null) Nil
- else cl.getInterfaces.toList ++ allInterfaces(cl.getSuperclass) distinct
-
- def methodsNamed(target: AnyRef, name: String): List[Method] =
- target.getClass.getMethods filter (x => x.getName == name) toList
-
- /** If there is a single non-bridge apply method in the given instance,
- * return it: otherwise None.
- */
- def uniqueApply(target: AnyRef) = {
- methodsNamed(target, "apply") filterNot (_.isBridge) match {
- case List(x) => Some(x)
- case _ => None
- }
- }
-
- def zeroOfClass(clazz: Class[_]) = zeroOf(Manifest(ClassManifest(clazz).tpe))
- def zeroOf[T](implicit m: Manifest[T]): AnyRef = {
- if (m == manifest[Boolean] || m == manifest[jl.Boolean]) false: jl.Boolean
- else if (m == manifest[Unit] || m == manifest[jl.Void] || m == manifest[scala.runtime.BoxedUnit]) scala.runtime.BoxedUnit.UNIT
- else if (m == manifest[Char] || m == manifest[jl.Character]) 0.toChar: jl.Character
- else if (m == manifest[Byte] || m == manifest[jl.Byte]) 0.toByte: jl.Byte
- else if (m == manifest[Short] || m == manifest[jl.Short]) 0.toShort: jl.Short
- else if (m == manifest[Int] || m == manifest[jl.Integer]) 0: jl.Integer
- else if (m == manifest[Long] || m == manifest[jl.Long]) 0l: jl.Long
- else if (m == manifest[Float] || m == manifest[jl.Float]) 0f: jl.Float
- else if (m == manifest[Double] || m == manifest[jl.Double]) 0d: jl.Double
- else null
- }
-}
diff --git a/src/compiler/scala/tools/util/AbstractTimer.scala b/src/compiler/scala/tools/util/AbstractTimer.scala
deleted file mode 100644
index 210a1ee53c..0000000000
--- a/src/compiler/scala/tools/util/AbstractTimer.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools.util
-
-import compat.Platform.currentTime
-import scala.collection.mutable
-
-/**
- * This abstract class implements the collection of timings. How the
- * collected timings are issued has to be implemented in subclasses.
- *
- * @author Philippe Altherr
- * @version 1.0
- */
-abstract class AbstractTimer {
-
- //########################################################################
- // Private Fields
-
- /** A stack for maintaining start times */
- private val starts = new mutable.Stack[Long]()
-
- //########################################################################
- // Public Methods
-
- /** Issues a timing information (duration in milliseconds). */
- def issue(message: String, duration: Long): Unit
-
- /** Starts a new timer. */
- def start() {
- starts push currentTime
- }
-
- /** Ends the current timer. */
- def stop(message: String) {
- val stop = currentTime
- issue(message, stop - starts.pop)
- }
-
- /** Drops the current timer. */
- def drop() {
- starts.pop
- }
-
- //########################################################################
-}
diff --git a/src/compiler/scala/tools/util/ClassPathSettings.scala b/src/compiler/scala/tools/util/ClassPathSettings.scala
deleted file mode 100644
index d202279ea1..0000000000
--- a/src/compiler/scala/tools/util/ClassPathSettings.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2006-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package util
-
-trait ClassPathSettings {
- def javabootclasspath: String // -javabootclasspath
- def javaextdirs: String // -javaextdirs
- def bootclasspath: String // -bootclasspath
- def extdirs: String // -extdirs
- def classpath: String // -classpath
- def sourcepath: String // -sourcepath
-}
-
-// val debugLogger = {
-// val f = File("/tmp/path-resolve-log.txt")
-// if (f.exists) f.truncate()
-// else f.createFile()
-//
-// val res = f.bufferedWriter()
-// res write ("Started debug log: %s\n".format(new java.util.Date))
-// res
-// }
-// def log(msg: Any) = {
-// Console println msg
-// debugLogger.write(msg.toString + "\n")
-// debugLogger flush
-// }
-
diff --git a/src/compiler/scala/tools/util/EditDistance.scala b/src/compiler/scala/tools/util/EditDistance.scala
deleted file mode 100644
index 0af34020a8..0000000000
--- a/src/compiler/scala/tools/util/EditDistance.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package util
-
-object EditDistance {
- import java.lang.Character.{ toLowerCase => lower }
-
- def similarString(name: String, allowed: TraversableOnce[String]): String = {
- val suggested = suggestions(name, allowed.toSeq, maxDistance = 1, maxSuggestions = 2)
- if (suggested.isEmpty) ""
- else suggested.mkString(" (similar: ", ", ", ")")
- }
-
- def suggestions(a: String, bs: Seq[String], maxDistance: Int, maxSuggestions: Int): Seq[String] = (
- bs map (b => (b, distance(a, b)))
- filter (_._2 <= maxDistance)
- sortBy (_._2)
- take (maxSuggestions)
- map (_._1)
- )
-
- def distance(a: String, b: String): Int = levenshtein(a, b, transpositions = true)
-
- def levenshtein(s: String, t: String, transpositions: Boolean): Int = {
- val n = s.length
- val m = t.length
- if (n == 0) return m
- if (m == 0) return n
-
- val d = Array.ofDim[Int](n + 1, m + 1)
- var i = 0
- val max = math.max(m, n)
- while (i <= max) {
- if (i <= n)
- d(i)(0) = i
- if (i <= m)
- d(0)(i) = i
- i += 1
- }
- i = 1
-
- while (i <= n) {
- val s_i = s(i - 1)
- var j = 1
- while (j <= m) {
- val t_j = t(j - 1)
- val cost = if (lower(s_i) == lower(t_j)) 0 else 1
-
- val c1 = d(i - 1)(j) + 1
- val c2 = d(i)(j - 1) + 1
- val c3 = d(i - 1)(j - 1) + cost
-
- d(i)(j) = c1 min c2 min c3
-
- if (transpositions) {
- if (i > 1 && j > 1 && s(i - 1) == t(j - 2) && s(i - 2) == t(j - 1))
- d(i)(j) = d(i)(j) min (d(i - 2)(j - 2) + cost)
- }
- j += 1
- }
- i += 1
- }
-
- d(n)(m)
- }
-}
diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala
index 6d5988d1dd..42d8a910f0 100644
--- a/src/compiler/scala/tools/util/Javap.scala
+++ b/src/compiler/scala/tools/util/Javap.scala
@@ -10,7 +10,7 @@ import java.lang.reflect.{ GenericSignatureFormatError, Method, Constructor }
import java.lang.{ ClassLoader => JavaClassLoader }
import scala.tools.nsc.util.ScalaClassLoader
import java.io.{ InputStream, PrintWriter, ByteArrayInputStream, FileNotFoundException }
-import scala.tools.nsc.io.{ File, NullPrintStream }
+import scala.tools.nsc.io.File
import Javap._
trait Javap {
diff --git a/src/compiler/scala/tools/util/Profiling.scala b/src/compiler/scala/tools/util/Profiling.scala
deleted file mode 100644
index 44393eef20..0000000000
--- a/src/compiler/scala/tools/util/Profiling.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package util
-
-/** This is a (very) minimal stub for profiling, the purpose
- * of which is making it possible to integrate profiling hooks in
- * the compiler without creating a dependency on any particular
- * profiler. You can specify a profiler class (which must be an
- * instance of this class) like so:
- *
- * // or -Yprofile:phase to profile individual phases
- * scalac -Yprofile-class your.profiler.Class -Yprofile:all <files>
- *
- */
-abstract class Profiling {
- def isActive: Boolean
- def startProfiling(): Unit
- def stopProfiling(): Unit
- def captureSnapshot(): Unit
-
- def allocationFreq: Option[Int] // record every Nth allocation
- def startRecordingAllocations(): Unit
- def stopRecordingAllocations(): Unit
-
- def profile[T](body: => T): T = profileCPU(body)
-
- def profileCPU[T](body: => T): T = {
- startProfiling()
- val result = body
- stopProfiling()
- captureSnapshot()
- result
- }
-
- def profileMem[T](body: => T): T = {
- startRecordingAllocations()
- val result = body
- stopRecordingAllocations()
- result
- }
-
- /** Advance the current object generation.
- *
- * Each object on the heap is associated to a generation number. Generations
- * start at 1, and are automatically advanced on each snapshot capture.
- */
- def advanceGeneration(desc: String = ""): Unit
-}
diff --git a/src/compiler/scala/tools/util/SignalManager.scala b/src/compiler/scala/tools/util/SignalManager.scala
deleted file mode 100644
index e93297386a..0000000000
--- a/src/compiler/scala/tools/util/SignalManager.scala
+++ /dev/null
@@ -1,275 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package util
-
-import java.lang.reflect.{ Method, Constructor }
-import scala.tools.reflect._
-import scala.collection.{ mutable, immutable }
-import nsc.io.timer
-import nsc.util.{ ScalaClassLoader, Exceptional }
-import Exceptional.unwrap
-import scala.util.Random
-
-/** Signal handling code. 100% clean of any references to sun.misc:
- * it's all reflection and proxies and invocation handlers and lasers,
- * so even the choosiest runtimes will be cool with it.
- *
- * Sun/Oracle says sun.misc.* is unsupported and therefore so is all
- * of this. Simple examples:
- * {{{
- val manager = scala.tools.util.SignalManager // or you could make your own
- // Assignment clears any old handlers; += chains them.
- manager("HUP") = println("HUP 1!")
- manager("HUP") += println("HUP 2!")
- // Use raise() to raise a signal: this will print both lines
- manager("HUP").raise()
- // See a report on every signal's current handler
- manager.dump()
- * }}}
- */
-class SignalManager(classLoader: ScalaClassLoader) {
- def this() = this(ScalaClassLoader.appLoader)
- private val illegalArgHandler: PartialFunction[Throwable, Boolean] = {
- case x if unwrap(x).isInstanceOf[IllegalArgumentException] => false
- }
- private def fail(msg: String) = new SignalError(msg)
-
- object rSignalHandler extends Shield {
- val className = "sun.misc.SignalHandler"
- val classLoader = SignalManager.this.classLoader
-
- lazy val SIG_DFL = field("SIG_DFL") get null
- lazy val SIG_IGN = field("SIG_IGN") get null
-
- /** Create a new signal handler based on the function.
- */
- def apply(action: Invoked => Unit) = Mock.fromInterfaces(clazz) {
- case inv @ Invoked.NameAndArgs("handle", _ :: Nil) => action(inv)
- }
- def empty = rSignalHandler(_ => ())
- }
- import rSignalHandler.{ SIG_DFL, SIG_IGN }
-
- object rSignal extends Shield {
- val className = "sun.misc.Signal"
- val classLoader = SignalManager.this.classLoader
-
- lazy val handleMethod = method("handle", 2)
- lazy val raiseMethod = method("raise", 1)
- lazy val numberMethod = method("getNumber", 0)
-
- /** Create a new Signal with the given name.
- */
- def apply(name: String) = constructor(classOf[String]) newInstance name
- def handle(signal: AnyRef, current: AnyRef) = {
- if (signal == null || current == null) fail("Signals cannot be null")
- else handleMethod.invoke(null, signal, current)
- }
- def raise(signal: AnyRef) = {
- if (signal == null) fail("Signals cannot be null")
- else raiseMethod.invoke(null, signal)
- }
- def number(signal: AnyRef): Int = numberMethod.invoke(signal).asInstanceOf[Int]
-
- class WSignal(val name: String) {
- lazy val signal = rSignal apply name
- def number = rSignal number signal
- def raise() = rSignal raise signal
- def handle(handler: AnyRef) = rSignal.handle(signal, handler)
-
- def isError = false
- def setTo(body: => Unit) = register(name, false, body)
- def +=(body: => Unit) = register(name, true, body)
-
- /** It's hard to believe there's no way to get a signal's current
- * handler without replacing it, but if there is I couldn't find
- * it, so we have this swapping code.
- */
- def withCurrentHandler[T](f: AnyRef => T): T = {
- val swap = handle(rSignalHandler.empty)
-
- try f(swap)
- finally handle(swap)
- }
- def isDefault = try withCurrentHandler {
- case SIG_DFL => true
- case _ => false
- } catch illegalArgHandler
- def isIgnored = try withCurrentHandler {
- case SIG_IGN => true
- case _ => false
- } catch illegalArgHandler
- def isSetTo(ref: AnyRef) =
- try withCurrentHandler { _ eq ref }
- catch illegalArgHandler
-
- def handlerString() = withCurrentHandler {
- case SIG_DFL => "Default"
- case SIG_IGN => "Ignore"
- case x => "" + x
- }
-
- override def toString = "%10s %s".format("SIG" + name,
- try handlerString()
- catch { case x: Exception => "VM threw " + unwrap(x) }
- )
- override def equals(other: Any) = other match {
- case x: WSignal => name == x.name
- case _ => false
- }
- override def hashCode = name.##
- }
- }
- type WSignal = rSignal.WSignal
-
- /** Adds a handler for the named signal. If shouldChain is true,
- * the installed handler will call the previous handler after the
- * new one has executed. If false, the old handler is dropped.
- */
- private def register(name: String, shouldChain: Boolean, body: => Unit) = {
- val signal = rSignal(name)
- val current = rSignalHandler(_ => body)
- val prev = rSignal.handle(signal, current)
-
- if (shouldChain) {
- val chainer = rSignalHandler { inv =>
- val signal = inv.args.head
-
- inv invokeOn current
- prev match {
- case SIG_IGN | SIG_DFL => ()
- case _ => inv invokeOn prev
- }
- }
- rSignal.handle(signal, chainer)
- chainer
- }
- else current
- }
-
- /** Use apply and update to get and set handlers.
- */
- def apply(name: String): WSignal =
- try { new WSignal(name) }
- catch { case x: IllegalArgumentException => new SignalError(x.getMessage) }
-
- def update(name: String, body: => Unit): Unit = apply(name) setTo body
-
- class SignalError(message: String) extends WSignal("") {
- override def isError = true
- override def toString = message
- }
-
- def public(name: String, description: String)(body: => Unit): Unit = {
- try {
- val wsig = apply(name)
- if (wsig.isError)
- return
-
- wsig setTo body
- registerInfoHandler()
- addPublicHandler(wsig, description)
- }
- catch {
- case x: Exception => () // ignore failure
- }
- }
- /** Makes sure the info handler is registered if we see activity. */
- private def registerInfoHandler() = {
- val INFO = apply("INFO")
- if (publicHandlers.isEmpty && INFO.isDefault) {
- INFO setTo Console.println(info())
- addPublicHandler(INFO, "Print signal handler registry on console.")
- }
- }
- private def addPublicHandler(wsig: WSignal, description: String) = {
- if (publicHandlers contains wsig) ()
- else publicHandlers = publicHandlers.updated(wsig, description)
- }
- private var publicHandlers: Map[WSignal, String] = Map()
- def info(): String = {
- registerInfoHandler()
- val xs = publicHandlers.toList sortBy (_._1.name) map {
- case (wsig, descr) => " %2d %5s %s".format(wsig.number, wsig.name, descr)
- }
-
- xs.mkString("\nSignal handler registry:\n", "\n", "")
- }
-}
-
-object SignalManager extends SignalManager {
- private implicit def mkWSignal(name: String): WSignal = this(name)
- private lazy val signalNumberMap = all map (x => x.number -> x) toMap
-
- def all = List(
- HUP, INT, QUIT, ILL, TRAP, ABRT, EMT, FPE, // 1-8
- KILL, BUS, SEGV, SYS, PIPE, ALRM, TERM, URG, // 9-15
- STOP, TSTP, CONT, CHLD, TTIN, TTOU, IO, XCPU, // 16-23
- XFSZ, VTALRM, PROF, WINCH, INFO, USR1, USR2 // 24-31
- )
- /** Signals which are either inaccessible or which seem like
- * particularly bad choices when looking for an open one.
- */
- def reserved = Set(QUIT, TRAP, ABRT, KILL, BUS, SEGV, ALRM, STOP, INT)
- def unreserved = all filterNot reserved
- def defaultSignals() = unreserved filter (_.isDefault)
- def ignoredSignals() = unreserved filter (_.isIgnored)
- def findOpenSignal() = Random.shuffle(defaultSignals()).head
-
- def dump() = all foreach (x => println("%2s %s".format(x.number, x)))
-
- def apply(sigNumber: Int): WSignal = signalNumberMap(sigNumber)
-
- def HUP: WSignal = "HUP"
- def INT: WSignal = "INT"
- def QUIT: WSignal = "QUIT"
- def ILL: WSignal = "ILL"
- def TRAP: WSignal = "TRAP"
- def ABRT: WSignal = "ABRT"
- def EMT: WSignal = "EMT"
- def FPE: WSignal = "FPE"
- def KILL: WSignal = "KILL"
- def BUS: WSignal = "BUS"
- def SEGV: WSignal = "SEGV"
- def SYS: WSignal = "SYS"
- def PIPE: WSignal = "PIPE"
- def ALRM: WSignal = "ALRM"
- def TERM: WSignal = "TERM"
- def URG: WSignal = "URG"
- def STOP: WSignal = "STOP"
- def TSTP: WSignal = "TSTP"
- def CONT: WSignal = "CONT"
- def CHLD: WSignal = "CHLD"
- def TTIN: WSignal = "TTIN"
- def TTOU: WSignal = "TTOU"
- def IO: WSignal = "IO"
- def XCPU: WSignal = "XCPU"
- def XFSZ: WSignal = "XFSZ"
- def VTALRM: WSignal = "VTALRM"
- def PROF: WSignal = "PROF"
- def WINCH: WSignal = "WINCH"
- def INFO: WSignal = "INFO"
- def USR1: WSignal = "USR1"
- def USR2: WSignal = "USR2"
-
- /** Given a number of seconds, a signal, and a function: sets up a handler which upon
- * receiving the signal once, calls the function with argument true, and if the
- * signal is received again within the allowed time, calls it with argument false.
- * (Otherwise it calls it with true and starts the timer over again.)
- */
- def requireInterval(seconds: Int, wrapper: WSignal)(fn: Boolean => Unit) = {
- var received = false
- wrapper setTo {
- if (received) fn(false)
- else {
- received = true
- fn(true)
- timer(seconds)(received = false)
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/util/Signallable.scala b/src/compiler/scala/tools/util/Signallable.scala
deleted file mode 100644
index af98bfac83..0000000000
--- a/src/compiler/scala/tools/util/Signallable.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package util
-
-import java.security.AccessControlException
-
-/** A class for things which are signallable.
- */
-abstract class Signallable[T] private (val signal: String, val description: String) {
- private var last: Option[T] = None
- private def lastString = last match {
- case Some(()) => ""
- case Some(x) => "" + x
- case _ => ""
- }
-
- /** The most recent result from the signal handler. */
- def lastResult: Option[T] = last
-
- /** Method to be executed when the associated signal is received. */
- def onSignal(): T
-
- // todo:
- // def unregister(): Boolean
-
- override def toString = " SIG(%s) => %s%s".format(
- signal, description, if (lastString == "") "" else " (" + lastString + ")"
- )
-}
-
-object Signallable {
- /** Same as the other apply, but an open signal is found for you.
- */
- def apply[T](description: String)(body: => T): Signallable[T] = wrap {
- apply(SignalManager.findOpenSignal().name, description)(body)
- }
-
- /** Given a signal name, a description, and a handler body, this
- * registers a signal handler and returns the Signallable instance.
- * The signal handler registry is thereafter available by calling
- * SignalManager.info(), or sending SIGINFO to the manager will
- * dump it to console.
- */
- def apply[T](signal: String, description: String)(body: => T): Signallable[T] = wrap {
- val result = create[T](signal, description, body)
- SignalManager.public(signal, description)(result.onSignal())
- result
- }
-
- private def wrap[T](body: => Signallable[T]): Signallable[T] =
- try body catch { case _: AccessControlException => null }
-
- private def create[T](signal: String, description: String, body: => T): Signallable[T] =
- new Signallable[T](signal, description) {
- def onSignal = {
- val result = body
- last = Some(result)
- result
- }
- }
-}
diff --git a/src/compiler/scala/tools/util/SocketConnection.scala b/src/compiler/scala/tools/util/SocketConnection.scala
deleted file mode 100644
index 6b56be569c..0000000000
--- a/src/compiler/scala/tools/util/SocketConnection.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools.util
-
-import java.io.{PrintWriter, InputStreamReader, BufferedReader}
-import java.io.IOException
-import java.net.{Socket, InetAddress}
-import java.net.UnknownHostException
-
-/** This class implements the connection to the server.
- *
- * @author Martin Odersky
- * @version 1.0
- */
-class SocketConnection(hostname: String, port: Int) {
-
- def this(port: Int) = this(InetAddress.getLocalHost().getHostName(), port)
-
- private var socket: Socket = _
- var out: PrintWriter = _
- var in: BufferedReader = _
- var errorMessage: String = _
-
- def open(): Boolean = {
- try {
- socket = new Socket(hostname, port)
- out = new PrintWriter(socket.getOutputStream(), true)
- in = new BufferedReader(new InputStreamReader(socket.getInputStream()))
- true
- } catch {
- case e: UnknownHostException =>
- errorMessage = "Don't know about host: " + hostname + "."
- false
- case e: IOException =>
- errorMessage = "Couldn't get I/O for the connection to: " + hostname + "."
- false
- }
- }
-
- def close() {
- in.close()
- out.close()
- socket.close()
- }
-}
diff --git a/src/compiler/scala/tools/util/StringOps.scala b/src/compiler/scala/tools/util/StringOps.scala
index 02eb364abe..725e0afb79 100644
--- a/src/compiler/scala/tools/util/StringOps.scala
+++ b/src/compiler/scala/tools/util/StringOps.scala
@@ -25,6 +25,16 @@ trait StringOps {
val ys = oempty(xs: _*)
if (ys.isEmpty) orElse else ys mkString sep
}
+ def trimTrailingSpace(s: String) = {
+ if (s.length == 0 || !s.charAt(s.length - 1).isWhitespace) s
+ else {
+ var idx = s.length - 1
+ while (idx >= 0 && s.charAt(idx).isWhitespace)
+ idx -= 1
+
+ s.substring(0, idx + 1)
+ }
+ }
def decompose(str: String, sep: Char): List[String] = {
def ws(start: Int): List[String] =
diff --git a/src/compiler/scala/tools/util/Which.scala b/src/compiler/scala/tools/util/Which.scala
deleted file mode 100644
index 1cafe156b5..0000000000
--- a/src/compiler/scala/tools/util/Which.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package util
-
-import scala.tools.nsc._
-
-/** A tool for identifying which classfile is being used.
- * under the given conditions.
- */
-object Which {
- def main(args: Array[String]): Unit = {
- val settings = new Settings()
- val names = settings.processArguments(args.toList, true)._2
- val global = new Global(settings)
- val cp = global.classPath
-
- import cp._
-
- for (name <- names) {
- def fail() = println("Could not find: %s".format(name))
- (cp findClass name) match {
- case Some(classRep) => classRep.binary match {
- case Some(f) => println("%s is %s".format(name, f))
- case _ => fail
- }
- case _ => fail
- }
- }
- }
-}
-
-
-
-
diff --git a/src/compiler/scala/tools/util/color/Ansi.scala b/src/compiler/scala/tools/util/color/Ansi.scala
deleted file mode 100644
index 1ed43579bb..0000000000
--- a/src/compiler/scala/tools/util/color/Ansi.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.util
-package color
-
-import collection.mutable
-
-object Ansi {
- final val ESC = '\u001b' // <esc>
- final val LBR = '\u005b' // [
- final val CSI = new String(Array(ESC, LBR)) // control sequence introducer
- final val CSI_FINAL = "m" // control sequence final byte
-
- def colors = List(Black, Red, Green, Yellow, Blue, Magenta, Cyan, White)
- def effects = List(Reset, Bright, Faint, Italic, Underline, Blink, Inverse, Hidden, Strikethrough)
-
- // No, that's not the finale of "CSI: Crime Scene Investigation."
-
- def colorizerFor(codes: Seq[Int]): String => String =
- s => ansiCodeToString(codes) + s + ansiCodeToString(0)
-
- def ansiCodeToString(code: Int): String = CSI + code + CSI_FINAL
- def ansiCodeToString(codes: Seq[Int]): String = codes.mkString(CSI, ";", CSI_FINAL)
-}
-
-/** An ansi control sequence. The colorize function prepends
- * the control sequence to the given String and appends a
- * reset sequence.
- */
-class Ansi(atoms0: List[AnsiAtom]) {
- val atoms = atoms0 sortBy (x => (!x.isAttr, x.isInstanceOf[AnsiBackground]))
- val colorize = Ansi colorizerFor codes
-
- def codes = atoms map (_.code)
- def /(that: AnsiAtom) = new Ansi(atoms :+ that)
- // This looks redundant with / , but isn't - it is a way
- // to ensure that the argument will be a background color,
- // even if a foreground color is passed as an argument
- // (as it will be implicitly converted.)
- def on(that: AnsiBackground) = this / that
-
- // Convenience functions.
- def reset = this / Reset
- def bright = this / Bright
- def faint = this / Faint
- def italic = this / Italic
- def underline = this / Underline
- def blink = this / Blink
- def inverse = this / Inverse
- def hidden = this / Hidden
- def strikethrough = this / Strikethrough
-
- // adjectives first
- override def toString = atoms mkString " "
-}
diff --git a/src/compiler/scala/tools/util/color/AnsiAtom.scala b/src/compiler/scala/tools/util/color/AnsiAtom.scala
deleted file mode 100644
index 5d5490f6e9..0000000000
--- a/src/compiler/scala/tools/util/color/AnsiAtom.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.util
-package color
-
-case object Reset extends AnsiAttr(0)
-case object Bright extends AnsiAttr(1)
-case object Faint extends AnsiAttr(2)
-case object Italic extends AnsiAttr(3)
-case object Underline extends AnsiAttr(4)
-case object Blink extends AnsiAttr(5)
-case object Inverse extends AnsiAttr(7)
-case object Hidden extends AnsiAttr(8)
-case object Strikethrough extends AnsiAttr(9)
-
-case object Black extends AnsiForeground(30)
-case object Red extends AnsiForeground(31)
-case object Green extends AnsiForeground(32)
-case object Yellow extends AnsiForeground(33)
-case object Blue extends AnsiForeground(34)
-case object Magenta extends AnsiForeground(35)
-case object Cyan extends AnsiForeground(36)
-case object White extends AnsiForeground(37)
-case object Default extends AnsiForeground(39)
-
-/** One piece of an ansi control sequence. Either a color
- * (foreground or background) or an attribute (e.g. bright, underline.)
- * Control sequences are created from AnsiAtoms with the / operator.
- */
-trait AnsiAtom {
- def code: Int
- def isAttr: Boolean
-}
-sealed abstract class AnsiAttr(val code: Int) extends AnsiAtom {
- final def isAttr = true
-}
-sealed abstract class AnsiColor(val code: Int) extends AnsiAtom {
- final def isAttr = false
- def flip: AnsiColor
-}
-sealed abstract class AnsiForeground(code: Int) extends AnsiColor(code) {
- require(30 <= code && code <= 39, code)
- val flip: AnsiBackground = new AnsiBackground(this)
-}
-sealed class AnsiBackground(val flip: AnsiForeground) extends AnsiColor(flip.code + 10) {
- require(40 <= code && code <= 49, code)
- override def toString = "(on " + flip + " background)"
-}
diff --git a/src/compiler/scala/tools/util/color/CString.scala b/src/compiler/scala/tools/util/color/CString.scala
deleted file mode 100644
index fa57229f09..0000000000
--- a/src/compiler/scala/tools/util/color/CString.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-package scala.tools.util
-package color
-
-/** A colorized String. It's difficult to achieve precise
- * formatting and selective string colorization simultaneously,
- * because all length-based calculations will break down in
- * the face of the ansi controls. It doesn't do much yet, but
- * this is here to eventually make that transparent.
- */
-final class CString(val uncolorized: String, val colorized: String) {
- def visibleLength = uncolorized.length
- def colorizedLength = colorized.length
- def show() = Console println colorized
- def bytes() = colorized map (_.toByte)
- def >() = show()
-
- def append(x: CString): CString = new CString(uncolorized + x.uncolorized, colorized + x.colorized)
- def +(other: CString): CString = this append other
-
- override def toString = colorized
-}
-
-class CStringOps(str: String) {
- /** String to String operation.
- * println("foo" in Red)
- * println("bar" in Magenta.bright)
- */
- def in(ansi: Ansi): String = ansi colorize str
-
- /** Gave in to one bit of punctuation, because everyone adds
- * strings with '+' and we need something which higher precedence
- * for it to be at all satisfying.
- *
- * "foo" %> Red + "bar" %> Magenta.bright
- */
- def %>(ansi: Ansi): CString = new CString(str, in(ansi))
-}
diff --git a/src/compiler/scala/tools/util/color/ColorNames.scala b/src/compiler/scala/tools/util/color/ColorNames.scala
deleted file mode 100644
index ff4b01a9df..0000000000
--- a/src/compiler/scala/tools/util/color/ColorNames.scala
+++ /dev/null
@@ -1,391 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.util.color
-
-/** Raw data adapted from perl's Term-ExtendedColor, which is published
- * under perl's Artistic license: http://dev.perl.org/licenses/artistic.html
- *
- * These aren't actually in use yet.
- */
-trait ColorNames {
- type ColorType
- def translateCode(ansiCode: String): ColorType
-
- private implicit def liftAnsiCode(code: String): ColorType = translateCode(code)
-
- // Possible alternative names or aliases, also from the perl:
- //
- // reset, clear, normal reset all attributes
- // bold, bright bold or bright, depending on implementation
- // faint decreased intensity (not widely supported)
- // italic, cursive italic or cursive
- // underline, underscore underline
- // blink slow blink
- // blink_ms rapid blink (only supported in MS DOS)
- // reverse, inverse, negative reverse video
- // conceal conceal, or hide (not widely supported)
-
- // Brightest to darkest color
- val red1: ColorType = "5;196"
- val red2: ColorType = "5;160"
- val red3: ColorType = "5;124"
- val red4: ColorType = "5;088"
- val red5: ColorType = "5;052"
-
- val green1: ColorType = "5;156"
- val green2: ColorType = "5;150"
- val green3: ColorType = "5;120"
- val green4: ColorType = "5;114"
- val green5: ColorType = "5;084"
- val green6: ColorType = "5;078"
- val green7: ColorType = "5;155"
- val green8: ColorType = "5;149"
- val green9: ColorType = "5;119"
- val green10: ColorType = "5;113"
- val green11: ColorType = "5;083"
- val green12: ColorType = "5;077"
- val green13: ColorType = "5;047"
- val green14: ColorType = "5;041"
- val green15: ColorType = "5;118"
- val green16: ColorType = "5;112"
- val green17: ColorType = "5;082"
- val green18: ColorType = "5;076"
- val green19: ColorType = "5;046"
- val green20: ColorType = "5;040"
- val green21: ColorType = "5;034"
- val green22: ColorType = "5;028"
- val green23: ColorType = "5;022"
- val green24: ColorType = "5;107"
- val green25: ColorType = "5;071"
- val green26: ColorType = "5;070"
- val green27: ColorType = "5;064"
- val green28: ColorType = "5;065"
-
- val blue1: ColorType = "5;075"
- val blue2: ColorType = "5;074"
- val blue3: ColorType = "5;073"
- val blue4: ColorType = "5;039"
- val blue5: ColorType = "5;038"
- val blue6: ColorType = "5;037"
- val blue7: ColorType = "5;033"
- val blue8: ColorType = "5;032"
- val blue9: ColorType = "5;031"
- val blue10: ColorType = "5;027"
- val blue11: ColorType = "5;026"
- val blue12: ColorType = "5;025"
- val blue13: ColorType = "5;021"
- val blue14: ColorType = "5;020"
- val blue15: ColorType = "5;019"
- val blue16: ColorType = "5;018"
- val blue17: ColorType = "5;017"
-
- val yellow1: ColorType = "5;228"
- val yellow2: ColorType = "5;222"
- val yellow3: ColorType = "5;192"
- val yellow4: ColorType = "5;186"
- val yellow5: ColorType = "5;227"
- val yellow6: ColorType = "5;221"
- val yellow7: ColorType = "5;191"
- val yellow8: ColorType = "5;185"
- val yellow9: ColorType = "5;226"
- val yellow10: ColorType = "5;220"
- val yellow11: ColorType = "5;190"
- val yellow12: ColorType = "5;184"
- val yellow13: ColorType = "5;214"
- val yellow14: ColorType = "5;178"
- val yellow15: ColorType = "5;208"
- val yellow16: ColorType = "5;172"
- val yellow17: ColorType = "5;202"
- val yellow18: ColorType = "5;166"
-
- val magenta1: ColorType = "5;219"
- val magenta2: ColorType = "5;183"
- val magenta3: ColorType = "5;218"
- val magenta4: ColorType = "5;182"
- val magenta5: ColorType = "5;217"
- val magenta6: ColorType = "5;181"
- val magenta7: ColorType = "5;213"
- val magenta8: ColorType = "5;177"
- val magenta9: ColorType = "5;212"
- val magenta10: ColorType = "5;176"
- val magenta11: ColorType = "5;211"
- val magenta12: ColorType = "5;175"
- val magenta13: ColorType = "5;207"
- val magenta14: ColorType = "5;171"
- val magenta15: ColorType = "5;205"
- val magenta16: ColorType = "5;169"
- val magenta17: ColorType = "5;201"
- val magenta18: ColorType = "5;165"
- val magenta19: ColorType = "5;200"
- val magenta20: ColorType = "5;164"
- val magenta21: ColorType = "5;199"
- val magenta22: ColorType = "5;163"
- val magenta23: ColorType = "5;198"
- val magenta24: ColorType = "5;162"
- val magenta25: ColorType = "5;197"
- val magenta26: ColorType = "5;161"
-
- val gray1: ColorType = "5;255"
- val gray2: ColorType = "5;254"
- val gray3: ColorType = "5;253"
- val gray4: ColorType = "5;252"
- val gray5: ColorType = "5;251"
- val gray6: ColorType = "5;250"
- val gray7: ColorType = "5;249"
- val gray8: ColorType = "5;248"
- val gray9: ColorType = "5;247"
- val gray10: ColorType = "5;246"
- val gray11: ColorType = "5;245"
- val gray12: ColorType = "5;244"
- val gray13: ColorType = "5;243"
- val gray14: ColorType = "5;242"
- val gray15: ColorType = "5;241"
- val gray16: ColorType = "5;240"
- val gray17: ColorType = "5;239"
- val gray18: ColorType = "5;238"
- val gray19: ColorType = "5;237"
- val gray20: ColorType = "5;236"
- val gray21: ColorType = "5;235"
- val gray22: ColorType = "5;234"
- val gray23: ColorType = "5;233"
- val gray24: ColorType = "5;232"
-
- val purple1: ColorType = "5;147"
- val purple2: ColorType = "5;146"
- val purple3: ColorType = "5;145"
- val purple4: ColorType = "5;141"
- val purple5: ColorType = "5;140"
- val purple6: ColorType = "5;139"
- val purple7: ColorType = "5;135"
- val purple8: ColorType = "5;134"
- val purple9: ColorType = "5;133"
- val purple10: ColorType = "5;129"
- val purple11: ColorType = "5;128"
- val purple12: ColorType = "5;127"
- val purple13: ColorType = "5;126"
- val purple14: ColorType = "5;125"
- val purple15: ColorType = "5;111"
- val purple16: ColorType = "5;110"
- val purple17: ColorType = "5;109"
- val purple18: ColorType = "5;105"
- val purple19: ColorType = "5;104"
- val purple20: ColorType = "5;103"
- val purple21: ColorType = "5;099"
- val purple22: ColorType = "5;098"
- val purple23: ColorType = "5;097"
- val purple24: ColorType = "5;096"
- val purple25: ColorType = "5;093"
- val purple26: ColorType = "5;092"
- val purple27: ColorType = "5;091"
- val purple28: ColorType = "5;090"
- val purple29: ColorType = "5;055"
- val purple30: ColorType = "5;054"
-
- val cyan1: ColorType = "5;159"
- val cyan2: ColorType = "5;158"
- val cyan3: ColorType = "5;157"
- val cyan4: ColorType = "5;153"
- val cyan5: ColorType = "5;152"
- val cyan6: ColorType = "5;151"
- val cyan7: ColorType = "5;123"
- val cyan8: ColorType = "5;122"
- val cyan9: ColorType = "5;121"
- val cyan10: ColorType = "5;117"
- val cyan11: ColorType = "5;116"
- val cyan12: ColorType = "5;115"
- val cyan13: ColorType = "5;087"
- val cyan14: ColorType = "5;086"
- val cyan15: ColorType = "5;085"
- val cyan16: ColorType = "5;081"
- val cyan17: ColorType = "5;080"
- val cyan18: ColorType = "5;079"
- val cyan19: ColorType = "5;051"
- val cyan20: ColorType = "5;050"
- val cyan21: ColorType = "5;049"
- val cyan22: ColorType = "5;045"
- val cyan23: ColorType = "5;044"
- val cyan24: ColorType = "5;043"
-
- val orange1: ColorType = "5;208"
- val orange2: ColorType = "5;172"
- val orange3: ColorType = "5;202"
- val orange4: ColorType = "5;166"
- val orange5: ColorType = "5;130"
-
- // Approximations of X11 color mappings
- // https://secure.wikimedia.org/wikipedia/en/wiki/X11%20colors
-
- val aquamarine1: ColorType = "5;086"
- val aquamarine3: ColorType = "5;079"
- val blueviolet: ColorType = "5;057"
- val cadetblue1: ColorType = "5;072"
- val cadetblue2: ColorType = "5;073"
- val chartreuse1: ColorType = "5;118"
- val chartreuse2: ColorType = "5;082"
- val chartreuse3: ColorType = "5;070"
- val chartreuse4: ColorType = "5;064"
- val cornflowerblue: ColorType = "5;069"
- val cornsilk1: ColorType = "5;230"
- val darkblue: ColorType = "5;018"
- val darkcyan: ColorType = "5;036"
- val darkgoldenrod: ColorType = "5;136"
- val darkgreen: ColorType = "5;022"
- val darkkhaki: ColorType = "5;143"
- val darkmagenta1: ColorType = "5;090"
- val darkmagenta2: ColorType = "5;091"
- val darkolivegreen1: ColorType = "5;191"
- val darkolivegreen2: ColorType = "5;155"
- val darkolivegreen3: ColorType = "5;107"
- val darkolivegreen4: ColorType = "5;113"
- val darkolivegreen5: ColorType = "5;149"
- val darkorange3: ColorType = "5;130"
- val darkorange4: ColorType = "5;166"
- val darkorange1: ColorType = "5;208"
- val darkred1: ColorType = "5;052"
- val darkred2: ColorType = "5;088"
- val darkseagreen1: ColorType = "5;158"
- val darkseagreen2: ColorType = "5;157"
- val darkseagreen3: ColorType = "5;150"
- val darkseagreen4: ColorType = "5;071"
- val darkslategray1: ColorType = "5;123"
- val darkslategray2: ColorType = "5;087"
- val darkslategray3: ColorType = "5;116"
- val darkturquoise: ColorType = "5;044"
- val darkviolet: ColorType = "5;128"
- val deeppink1: ColorType = "5;198"
- val deeppink2: ColorType = "5;197"
- val deeppink3: ColorType = "5;162"
- val deeppink4: ColorType = "5;125"
- val deepskyblue1: ColorType = "5;039"
- val deepskyblue2: ColorType = "5;038"
- val deepskyblue3: ColorType = "5;031"
- val deepskyblue4: ColorType = "5;023"
- val dodgerblue1: ColorType = "5;033"
- val dodgerblue2: ColorType = "5;027"
- val dodgerblue3: ColorType = "5;026"
- val gold1: ColorType = "5;220"
- val gold3: ColorType = "5;142"
- val greenyellow: ColorType = "5;154"
- val grey0: ColorType = "5;016"
- val grey100: ColorType = "5;231"
- val grey11: ColorType = "5;234"
- val grey15: ColorType = "5;235"
- val grey19: ColorType = "5;236"
- val grey23: ColorType = "5;237"
- val grey27: ColorType = "5;238"
- val grey30: ColorType = "5;239"
- val grey3: ColorType = "5;232"
- val grey35: ColorType = "5;240"
- val grey37: ColorType = "5;059"
- val grey39: ColorType = "5;241"
- val grey42: ColorType = "5;242"
- val grey46: ColorType = "5;243"
- val grey50: ColorType = "5;244"
- val grey53: ColorType = "5;102"
- val grey54: ColorType = "5;245"
- val grey58: ColorType = "5;246"
- val grey62: ColorType = "5;247"
- val grey63: ColorType = "5;139"
- val grey66: ColorType = "5;248"
- val grey69: ColorType = "5;145"
- val grey70: ColorType = "5;249"
- val grey74: ColorType = "5;250"
- val grey7: ColorType = "5;233"
- val grey78: ColorType = "5;251"
- val grey82: ColorType = "5;252"
- val grey84: ColorType = "5;188"
- val grey85: ColorType = "5;253"
- val grey89: ColorType = "5;254"
- val grey93: ColorType = "5;255"
- val honeydew2: ColorType = "5;194"
- val hotpink2: ColorType = "5;169"
- val hotpink3: ColorType = "5;132"
- val hotpink: ColorType = "5;205"
- val indianred1: ColorType = "5;203"
- val indianred: ColorType = "5;167"
- val khaki1: ColorType = "5;228"
- val khaki3: ColorType = "5;185"
- val lightcoral: ColorType = "5;210"
- val lightcyan1: ColorType = "5;195"
- val lightcyan3: ColorType = "5;152"
- val lightgoldenrod1: ColorType = "5;227"
- val lightgoldenrod2: ColorType = "5;186"
- val lightgoldenrod3: ColorType = "5;179"
- val lightgreen: ColorType = "5;119"
- val lightpink1: ColorType = "5;217"
- val lightpink3: ColorType = "5;174"
- val lightpink4: ColorType = "5;095"
- val lightsalmon1: ColorType = "5;216"
- val lightsalmon3: ColorType = "5;137"
- val lightseagreen: ColorType = "5;037"
- val lightskyblue1: ColorType = "5;153"
- val lightskyblue3: ColorType = "5;109"
- val lightslateblue: ColorType = "5;105"
- val lightslategrey: ColorType = "5;103"
- val lightsteelblue1: ColorType = "5;189"
- val lightsteelblue3: ColorType = "5;146"
- val lightsteelblue: ColorType = "5;147"
- val lightyellow3: ColorType = "5;187"
- val mediumorchid1: ColorType = "5;171"
- val mediumorchid3: ColorType = "5;133"
- val mediumorchid: ColorType = "5;134"
- val mediumpurple1: ColorType = "5;141"
- val mediumpurple2: ColorType = "5;135"
- val mediumpurple3: ColorType = "5;097"
- val mediumpurple4: ColorType = "5;060"
- val mediumpurple: ColorType = "5;104"
- val mediumspringgreen: ColorType = "5;049"
- val mediumturquoise: ColorType = "5;080"
- val mediumvioletred: ColorType = "5;126"
- val mistyrose1: ColorType = "5;224"
- val mistyrose3: ColorType = "5;181"
- val navajowhite1: ColorType = "5;223"
- val navajowhite3: ColorType = "5;144"
- val navyblue: ColorType = "5;017"
- val orangered1: ColorType = "5;202"
- val orchid1: ColorType = "5;213"
- val orchid2: ColorType = "5;212"
- val orchid: ColorType = "5;170"
- val palegreen1: ColorType = "5;121"
- val palegreen3: ColorType = "5;077"
- val paleturquoise1: ColorType = "5;159"
- val paleturquoise4: ColorType = "5;066"
- val palevioletred1: ColorType = "5;211"
- val pink1: ColorType = "5;218"
- val pink3: ColorType = "5;175"
- val plum1: ColorType = "5;219"
- val plum2: ColorType = "5;183"
- val plum3: ColorType = "5;176"
- val plum4: ColorType = "5;096"
- val purple: ColorType = "5;129"
- val rosybrown: ColorType = "5;138"
- val royalblue1: ColorType = "5;063"
- val salmon1: ColorType = "5;209"
- val sandybrown: ColorType = "5;215"
- val seagreen1: ColorType = "5;084"
- val seagreen2: ColorType = "5;083"
- val seagreen3: ColorType = "5;078"
- val skyblue1: ColorType = "5;117"
- val skyblue2: ColorType = "5;111"
- val skyblue3: ColorType = "5;074"
- val slateblue1: ColorType = "5;099"
- val slateblue3: ColorType = "5;061"
- val springgreen1: ColorType = "5;048"
- val springgreen2: ColorType = "5;042"
- val springgreen3: ColorType = "5;035"
- val springgreen4: ColorType = "5;029"
- val steelblue1: ColorType = "5;075"
- val steelblue3: ColorType = "5;068"
- val steelblue: ColorType = "5;067"
- val tan: ColorType = "5;180"
- val thistle1: ColorType = "5;225"
- val thistle3: ColorType = "5;182"
- val turquoise2: ColorType = "5;045"
- val turquoise4: ColorType = "5;030"
- val violet: ColorType = "5;177"
- val wheat1: ColorType = "5;229"
- val wheat4: ColorType = "5;101"
-}
diff --git a/src/compiler/scala/tools/util/color/package.scala b/src/compiler/scala/tools/util/color/package.scala
deleted file mode 100644
index 3b3e85751e..0000000000
--- a/src/compiler/scala/tools/util/color/package.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.util
-
-/**
- * Wrappers around ansi colors.
- *
- * @author Paul Phillips
- * @version 2.10
- */
-package object color {
- implicit def implicitLiftAnsiAtom(c: AnsiAtom): Ansi = new Ansi(List(c))
- implicit def implicitColorToBackground(c: AnsiColor): AnsiBackground = c match {
- case x: AnsiBackground => x
- case x: AnsiForeground => x.flip
- }
- implicit def implicitCStringOps(str: String): CStringOps = new CStringOps(str)
- implicit def implicitCString(str: String): CString = new CString(str, str)
-}
diff --git a/src/continuations/library/scala/util/continuations/ControlContext.scala b/src/continuations/library/scala/util/continuations/ControlContext.scala
index 19c2bc4038..910ca60eb0 100644
--- a/src/continuations/library/scala/util/continuations/ControlContext.scala
+++ b/src/continuations/library/scala/util/continuations/ControlContext.scala
@@ -13,7 +13,7 @@ import annotation.{ Annotation, StaticAnnotation, TypeConstraint }
/** This annotation is used to mark a parameter as part of a continuation
* context.
*
- * The type `A @cps[B,C]` is desugared to `ControlContext[A,B,C]` at compile
+ * The type `A @cpsParam[B,C]` is desugared to `ControlContext[A,B,C]` at compile
* time.
*
* @tparam B The type of computation state after computation has executed, and
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index af0d768607..862b19d0a4 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -3,8 +3,9 @@
package scala.tools.selectivecps
import scala.tools.nsc.Global
+import scala.tools.nsc.typechecker.Modes
-abstract class CPSAnnotationChecker extends CPSUtils {
+abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val global: Global
import global._
import definitions._
@@ -94,8 +95,7 @@ abstract class CPSAnnotationChecker extends CPSUtils {
if (!cpsEnabled) return bounds
val anyAtCPS = newCpsParamsMarker(NothingClass.tpe, AnyClass.tpe)
-
- if (isFunctionType(tparams.head.owner.tpe) || tparams.head.owner == PartialFunctionClass) {
+ if (isFunctionType(tparams.head.owner.tpe) || isPartialFunctionType(tparams.head.owner.tpe)) {
vprintln("function bound: " + tparams.head.owner.tpe + "/"+bounds+"/"+targs)
if (hasCpsParamTypes(targs.last))
bounds.reverse match {
@@ -178,59 +178,38 @@ abstract class CPSAnnotationChecker extends CPSUtils {
override def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = {
if (!cpsEnabled) return tree
- vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+ vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + modeString(mode) + " / " + pt)
- val annots1 = cpsParamAnnotation(tree.tpe)
- val annots2 = cpsParamAnnotation(pt)
+ val patMode = (mode & global.analyzer.PATTERNmode) != 0
+ val exprMode = (mode & global.analyzer.EXPRmode) != 0
+ val byValMode = (mode & global.analyzer.BYVALmode) != 0
- if ((mode & global.analyzer.PATTERNmode) != 0) {
- if (!annots1.isEmpty) {
- return tree modifyType removeAllCPSAnnotations
- }
- }
+ val annotsTree = cpsParamAnnotation(tree.tpe)
+ val annotsExpected = cpsParamAnnotation(pt)
-/*
+ // not sure I rephrased this comment correctly:
+ // replacing `patMode` in the condition below by `patMode || ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode))`
// doesn't work correctly -- still relying on addAnnotations to remove things from ValDef symbols
- if ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode) != 0) {
- if (!annots1.isEmpty) {
- println("removing annotation from " + tree + "/" + tree.tpe)
- val s = tree.setType(removeAllCPSAnnotations(tree.tpe))
- println(s)
- s
- }
- }
-*/
-
- if ((mode & global.analyzer.EXPRmode) != 0) {
- if (annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.BYVALmode) == 0)) { // shiftUnit
- // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
- // tree will look like having any possible annotation
- //println("adapt annotations " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
- //val same = annots2 forall { case AnnotationInfo(atp: TypeRef, _, _) => atp.typeArgs(0) =:= atp.typeArgs(1) }
- // TBD: use same or not? see infer0.scala/infer1.scala
-
- // CAVEAT:
- // for monomorphic answer types we want to have @plus @cps (for better checking)
- // for answer type modification we want to have only @plus (because actual answer type may differ from pt)
-
- //val known = global.analyzer.isFullyDefined(pt)
-
- if (/*same &&*/ !hasPlusMarker(tree.tpe)) {
- //if (known)
- return tree modifyType (_ withAnnotations newPlusMarker() :: annots2) // needed for #1807
- //else
- // return tree.setType(tree.tpe.withAnnotations(adapt::Nil))
- }
- tree
- } else if (!annots1.isEmpty && ((mode & global.analyzer.BYVALmode) != 0)) { // dropping annotation
- // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
- // tree will look like having no annotation
- if (!hasMinusMarker(tree.tpe)) {
- return tree modifyType addMinusMarker
- }
- }
- }
- tree
+ if (patMode && !annotsTree.isEmpty) tree modifyType removeAllCPSAnnotations
+ else if (exprMode && !byValMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) { // shiftUnit
+ // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
+ // tree will look like having any possible annotation
+ //println("adapt annotations " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+
+ // CAVEAT:
+ // for monomorphic answer types we want to have @plus @cps (for better checking)
+ // for answer type modification we want to have only @plus (because actual answer type may differ from pt)
+
+ val res = tree modifyType (_ withAnnotations newPlusMarker() :: annotsExpected) // needed for #1807
+ vprintln("adapted annotations (not by val) of " + tree + " to " + res.tpe)
+ res
+ } else if (exprMode && byValMode && !hasMinusMarker(tree.tpe) && annotsTree.nonEmpty) { // dropping annotation
+ // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
+ // tree will look like having no annotation
+ val res = tree modifyType addMinusMarker
+ vprintln("adapted annotations (by val) of " + tree + " to " + res.tpe)
+ res
+ } else tree
}
def updateAttributesFromChildren(tpe: Type, childAnnots: List[AnnotationInfo], byName: List[Tree]): Type = {
@@ -455,11 +434,10 @@ abstract class CPSAnnotationChecker extends CPSUtils {
transChildrenInOrder(tree, tpe, List(cond), List(thenp, elsep))
case Match(select, cases) =>
- // TODO: can there be cases that are not CaseDefs?? check collect vs map!
- transChildrenInOrder(tree, tpe, List(select), cases:::(cases collect { case CaseDef(_, _, body) => body }))
+ transChildrenInOrder(tree, tpe, List(select), cases:::(cases map { case CaseDef(_, _, body) => body }))
case Try(block, catches, finalizer) =>
- val tpe1 = transChildrenInOrder(tree, tpe, Nil, block::catches:::(catches collect { case CaseDef(_, _, body) => body }))
+ val tpe1 = transChildrenInOrder(tree, tpe, Nil, block::catches:::(catches map { case CaseDef(_, _, body) => body }))
val annots = cpsParamAnnotation(tpe1)
if (annots.nonEmpty) {
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
index a6737573ea..017c8d24fd 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
@@ -71,25 +71,46 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
// { x => x match { case A => ... }} to
// { x => shiftUnit(x match { case A => ... })}
// which Uncurry cannot handle (see function6.scala)
+ // thus, we push down the shiftUnit to each of the case bodies
val ext = getExternalAnswerTypeAnn(body.tpe)
+ val pureBody = getAnswerTypeAnn(body.tpe).isEmpty
+
+ def transformPureMatch(tree: Tree, selector: Tree, cases: List[CaseDef]) = {
+ val caseVals = cases map { case cd @ CaseDef(pat, guard, body) =>
+ // if (!hasPlusMarker(body.tpe)) body.tpe = body.tpe withAnnotation newPlusMarker() // TODO: to avoid warning
+ val bodyVal = transExpr(body, None, ext) // ??? triggers "cps-transformed unexpectedly" warning in transTailValue
+ treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
+ }
+ treeCopy.Match(tree, transform(selector), caseVals)
+ }
+
+ def transformPureVirtMatch(body: Block, selDef: ValDef, cases: List[Tree], matchEnd: Tree) = {
+ val stats = transform(selDef) :: (cases map (transExpr(_, None, ext)))
+ treeCopy.Block(body, stats, transExpr(matchEnd, None, ext))
+ }
val body1 = body match {
- case Match(selector, cases) if (ext.isDefined && getAnswerTypeAnn(body.tpe).isEmpty) =>
- val cases1 = for {
- cd @ CaseDef(pat, guard, caseBody) <- cases
- caseBody1 = transExpr(body, None, ext)
- } yield {
- treeCopy.CaseDef(cd, transform(pat), transform(guard), caseBody1)
- }
- treeCopy.Match(tree, transform(selector), cases1)
+ case Match(selector, cases) if ext.isDefined && pureBody =>
+ transformPureMatch(body, selector, cases)
+
+ // virtpatmat switch
+ case Block(List(selDef: ValDef), mat@Match(selector, cases)) if ext.isDefined && pureBody =>
+ treeCopy.Block(body, List(transform(selDef)), transformPureMatch(mat, selector, cases))
+
+ // virtpatmat
+ case b@Block(matchStats@((selDef: ValDef) :: cases), matchEnd) if ext.isDefined && pureBody && (matchStats forall gen.hasSynthCaseSymbol) =>
+ transformPureVirtMatch(b, selDef, cases, matchEnd)
+
+ // virtpatmat that stores the scrut separately -- TODO: can we eliminate this case??
+ case Block(List(selDef0: ValDef), mat@Block(matchStats@((selDef: ValDef) :: cases), matchEnd)) if ext.isDefined && pureBody && (matchStats forall gen.hasSynthCaseSymbol)=>
+ treeCopy.Block(body, List(transform(selDef0)), transformPureVirtMatch(mat, selDef, cases, matchEnd))
case _ =>
transExpr(body, None, ext)
}
- debuglog("result "+body1)
- debuglog("result is of type "+body1.tpe)
+ debuglog("anf result "+body1+"\nresult is of type "+body1.tpe)
treeCopy.Function(ff, transformValDefs(vparams), body1)
}
@@ -120,7 +141,6 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
transExpr(tree, None, None)
case _ =>
-
if (hasAnswerTypeAnn(tree.tpe)) {
if (!cpsAllowed)
unit.error(tree.pos, "cps code not allowed here / " + tree.getClass + " / " + tree)
@@ -170,63 +190,74 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
tree match {
case Block(stms, expr) =>
val (cpsA2, cpsR2) = (cpsA, linearize(cpsA, getAnswerTypeAnn(tree.tpe))) // tbd
-// val (cpsA2, cpsR2) = (None, getAnswerTypeAnn(tree.tpe))
- val (a, b) = transBlock(stms, expr, cpsA2, cpsR2)
+ // val (cpsA2, cpsR2) = (None, getAnswerTypeAnn(tree.tpe))
- val tree1 = (treeCopy.Block(tree, a, b)) // no updateSynthFlag here!!!
+ val (a, b) = transBlock(stms, expr, cpsA2, cpsR2)
+ val tree1 = (treeCopy.Block(tree, a, b)) // no updateSynthFlag here!!!
(Nil, tree1, cpsA)
- case If(cond, thenp, elsep) =>
- /* possible situations:
- cps before (cpsA)
- cps in condition (spc) <-- synth flag set if *only* here!
- cps in (one or both) branches */
- val (condStats, condVal, spc) = transInlineValue(cond, cpsA)
- val (cpsA2, cpsR2) = if (hasSynthMarker(tree.tpe))
- (spc, linearize(spc, getAnswerTypeAnn(tree.tpe))) else
- (None, getAnswerTypeAnn(tree.tpe)) // if no cps in condition, branches must conform to tree.tpe directly
- val thenVal = transExpr(thenp, cpsA2, cpsR2)
- val elseVal = transExpr(elsep, cpsA2, cpsR2)
-
- // check that then and else parts agree (not necessary any more, but left as sanity check)
- if (cpsR.isDefined) {
- if (elsep == EmptyTree)
- unit.error(tree.pos, "always need else part in cps code")
- }
- if (hasAnswerTypeAnn(thenVal.tpe) != hasAnswerTypeAnn(elseVal.tpe)) {
- unit.error(tree.pos, "then and else parts must both be cps code or neither of them")
- }
-
- (condStats, updateSynthFlag(treeCopy.If(tree, condVal, thenVal, elseVal)), spc)
+ case If(cond, thenp, elsep) =>
+ /* possible situations:
+ cps before (cpsA)
+ cps in condition (spc) <-- synth flag set if *only* here!
+ cps in (one or both) branches */
+ val (condStats, condVal, spc) = transInlineValue(cond, cpsA)
+ val (cpsA2, cpsR2) = if (hasSynthMarker(tree.tpe))
+ (spc, linearize(spc, getAnswerTypeAnn(tree.tpe))) else
+ (None, getAnswerTypeAnn(tree.tpe)) // if no cps in condition, branches must conform to tree.tpe directly
+ val thenVal = transExpr(thenp, cpsA2, cpsR2)
+ val elseVal = transExpr(elsep, cpsA2, cpsR2)
+
+ // check that then and else parts agree (not necessary any more, but left as sanity check)
+ if (cpsR.isDefined) {
+ if (elsep == EmptyTree)
+ unit.error(tree.pos, "always need else part in cps code")
+ }
+ if (hasAnswerTypeAnn(thenVal.tpe) != hasAnswerTypeAnn(elseVal.tpe)) {
+ unit.error(tree.pos, "then and else parts must both be cps code or neither of them")
+ }
- case Match(selector, cases) =>
+ (condStats, updateSynthFlag(treeCopy.If(tree, condVal, thenVal, elseVal)), spc)
- val (selStats, selVal, spc) = transInlineValue(selector, cpsA)
- val (cpsA2, cpsR2) = if (hasSynthMarker(tree.tpe))
- (spc, linearize(spc, getAnswerTypeAnn(tree.tpe))) else
- (None, getAnswerTypeAnn(tree.tpe))
+ case Match(selector, cases) =>
+ val (selStats, selVal, spc) = transInlineValue(selector, cpsA)
+ val (cpsA2, cpsR2) =
+ if (hasSynthMarker(tree.tpe)) (spc, linearize(spc, getAnswerTypeAnn(tree.tpe)))
+ else (None, getAnswerTypeAnn(tree.tpe))
- val caseVals = for {
- cd @ CaseDef(pat, guard, body) <- cases
- bodyVal = transExpr(body, cpsA2, cpsR2)
- } yield {
- treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
- }
+ val caseVals = cases map { case cd @ CaseDef(pat, guard, body) =>
+ val bodyVal = transExpr(body, cpsA2, cpsR2)
+ treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
+ }
- (selStats, updateSynthFlag(treeCopy.Match(tree, selVal, caseVals)), spc)
+ (selStats, updateSynthFlag(treeCopy.Match(tree, selVal, caseVals)), spc)
+ // this is utterly broken: LabelDefs need to be considered together when transforming them to DefDefs:
+ // suppose a Block {L1; ... ; LN}
+ // this should become {D1def ; ... ; DNdef ; D1()}
+ // where D$idef = def L$i(..) = {L$i.body; L${i+1}(..)}
case ldef @ LabelDef(name, params, rhs) =>
+ // println("trans LABELDEF "+(name, params, tree.tpe, hasAnswerTypeAnn(tree.tpe)))
+ // TODO why does the labeldef's type have a cpsMinus annotation, whereas the rhs does not? (BYVALmode missing/too much somewhere?)
if (hasAnswerTypeAnn(tree.tpe)) {
- val sym = currentOwner.newMethod(name, tree.pos, Flags.SYNTHETIC) setInfo ldef.symbol.info
- val rhs1 = new TreeSymSubstituter(List(ldef.symbol), List(sym)).transform(rhs)
+ // currentOwner.newMethod(name, tree.pos, Flags.SYNTHETIC) setInfo ldef.symbol.info
+ val sym = ldef.symbol resetFlag Flags.LABEL
+ val rhs1 = rhs //new TreeSymSubstituter(List(ldef.symbol), List(sym)).transform(rhs)
val rhsVal = transExpr(rhs1, None, getAnswerTypeAnn(tree.tpe)) changeOwner (currentOwner -> sym)
val stm1 = localTyper.typed(DefDef(sym, rhsVal))
- val expr = localTyper.typed(Apply(Ident(sym), List()))
-
- (List(stm1), expr, cpsA)
+ // since virtpatmat does not rely on fall-through, don't call the labels it emits
+ // transBlock will take care of calling the first label
+ // calling each labeldef is wrong, since some labels may be jumped over
+ // we can get away with this for now since the only other labels we emit are for tailcalls/while loops,
+ // which do not have consecutive labeldefs (and thus fall-through is irrelevant)
+ if (gen.hasSynthCaseSymbol(ldef)) (List(stm1), localTyper.typed{Literal(Constant(()))}, cpsA)
+ else {
+ assert(params.isEmpty, "problem in ANF transforming label with non-empty params "+ ldef)
+ (List(stm1), localTyper.typed{Apply(Ident(sym), List())}, cpsA)
+ }
} else {
val rhsVal = transExpr(rhs, None, None)
(Nil, updateSynthFlag(treeCopy.LabelDef(tree, name, params, rhsVal)), cpsA)
@@ -324,7 +355,20 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
List(expr)
)
)
- return ((stms, call))
+ // This is today's sick/meaningless heuristic for spotting breakdown so
+ // we don't proceed until stack traces start draping themselves over everything.
+ // If there are wildcard types in the tree and B == Nothing, something went wrong.
+ // (I thought WildcardTypes would be enough, but nope. 'reset0 { 0 }' has them.)
+ //
+ // Code as simple as reset((_: String).length)
+ // will crash meaninglessly without this check. See SI-3718.
+ //
+ // TODO - obviously this should be done earlier, differently, or with
+ // a more skilled hand. Most likely, all three.
+ if ((b.typeSymbol eq NothingClass) && call.tpe.exists(_ eq WildcardType))
+ unit.error(tree.pos, "cannot cps-transform malformed (possibly in shift/reset placement) expression")
+ else
+ return ((stms, call))
}
catch {
case ex:TypeError =>
@@ -412,18 +456,32 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
}
def transBlock(stms: List[Tree], expr: Tree, cpsA: CPSInfo, cpsR: CPSInfo): (List[Tree], Tree) = {
- stms match {
- case Nil =>
- transTailValue(expr, cpsA, cpsR)
-
- case stm::rest =>
- var (rest2, expr2) = (rest, expr)
- val (headStms, headSpc) = transInlineStm(stm, cpsA)
- val (restStms, restExpr) = transBlock(rest2, expr2, headSpc, cpsR)
- (headStms:::restStms, restExpr)
- }
+ def rec(currStats: List[Tree], currAns: CPSInfo, accum: List[Tree]): (List[Tree], Tree) =
+ currStats match {
+ case Nil =>
+ val (anfStats, anfExpr) = transTailValue(expr, currAns, cpsR)
+ (accum ++ anfStats, anfExpr)
+
+ case stat :: rest =>
+ val (stats, nextAns) = transInlineStm(stat, currAns)
+ rec(rest, nextAns, accum ++ stats)
+ }
+
+ val (anfStats, anfExpr) = rec(stms, cpsA, List())
+ // println("\nanf-block:\n"+ ((stms :+ expr) mkString ("{", "\n", "}")) +"\nBECAME\n"+ ((anfStats :+ anfExpr) mkString ("{", "\n", "}")))
+ // println("synth case? "+ (anfStats map (t => (t, t.isDef, gen.hasSynthCaseSymbol(t)))))
+ // SUPER UGLY HACK: handle virtpatmat-style matches, whose labels have already been turned into DefDefs
+ if (anfStats.nonEmpty && (anfStats forall (t => !t.isDef || gen.hasSynthCaseSymbol(t)))) {
+ val (prologue, rest) = (anfStats :+ anfExpr) span (s => !s.isInstanceOf[DefDef]) // find first case
+ // println("rest: "+ rest)
+ // val (defs, calls) = rest partition (_.isInstanceOf[DefDef])
+ if (rest nonEmpty){
+ // the filter drops the ()'s emitted when transValue encountered a LabelDef
+ val stats = prologue ++ (rest filter (_.isInstanceOf[DefDef])).reverse // ++ calls
+ // println("REVERSED "+ (stats mkString ("{", "\n", "}")))
+ (stats, localTyper.typed{Apply(Ident(rest.head.symbol), List())}) // call first label to kick-start the match
+ } else (anfStats, anfExpr)
+ } else (anfStats, anfExpr)
}
-
-
}
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
index 6453671eac..dcb7cd601f 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
@@ -15,7 +15,7 @@ import scala.tools.nsc.ast._
* In methods marked @cps, CPS-transform assignments introduced by ANF-transform phase.
*/
abstract class SelectiveCPSTransform extends PluginComponent with
- InfoTransform with TypingTransformers with CPSUtils {
+ InfoTransform with TypingTransformers with CPSUtils with TreeDSL {
// inherits abstract value `global` and class `Phase` from Transform
import global._ // the global environment
@@ -65,6 +65,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
class CPSTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ private val patmatTransformer = patmat.newTransformer(unit)
override def transform(tree: Tree): Tree = {
if (!cpsEnabled) return tree
@@ -190,28 +191,29 @@ abstract class SelectiveCPSTransform extends PluginComponent with
val targettp = transformCPSType(tree.tpe)
-// val expr2 = if (catches.nonEmpty) {
- val pos = catches.head.pos
- val argSym = currentOwner.newValueParameter(cpsNames.ex, pos).setInfo(ThrowableClass.tpe)
- val rhs = Match(Ident(argSym), catches1)
- val fun = Function(List(ValDef(argSym)), rhs)
- val funSym = currentOwner.newValueParameter(cpsNames.catches, pos).setInfo(appliedType(PartialFunctionClass.tpe, List(ThrowableClass.tpe, targettp)))
- val funDef = localTyper.typed(atPos(pos) { ValDef(funSym, fun) })
- val expr2 = localTyper.typed(atPos(pos) { Apply(Select(expr1, expr1.tpe.member(cpsNames.flatMapCatch)), List(Ident(funSym))) })
-
- argSym.owner = fun.symbol
- rhs.changeOwner(currentOwner -> fun.symbol)
-
- val exSym = currentOwner.newValueParameter(cpsNames.ex, pos).setInfo(ThrowableClass.tpe)
- val catch2 = { localTyper.typedCases(List(
- CaseDef(Bind(exSym, Typed(Ident("_"), TypeTree(ThrowableClass.tpe))),
- Apply(Select(Ident(funSym), nme.isDefinedAt), List(Ident(exSym))),
- Apply(Ident(funSym), List(Ident(exSym))))
- ), ThrowableClass.tpe, targettp) }
-
- //typedCases(tree, catches, ThrowableClass.tpe, pt)
-
- localTyper.typed(Block(List(funDef), treeCopy.Try(tree, treeCopy.Block(block1, stms, expr2), catch2, finalizer1)))
+ val pos = catches.head.pos
+ val funSym = currentOwner.newValueParameter(cpsNames.catches, pos).setInfo(appliedType(PartialFunctionClass.tpe, List(ThrowableClass.tpe, targettp)))
+ val funDef = localTyper.typed(atPos(pos) {
+ ValDef(funSym, Match(EmptyTree, catches1))
+ })
+ val expr2 = localTyper.typed(atPos(pos) {
+ Apply(Select(expr1, expr1.tpe.member(cpsNames.flatMapCatch)), List(Ident(funSym)))
+ })
+
+ val exSym = currentOwner.newValueParameter(cpsNames.ex, pos).setInfo(ThrowableClass.tpe)
+
+ import CODE._
+ // generate a case that is supported directly by the back-end
+ val catchIfDefined = CaseDef(
+ Bind(exSym, Ident(nme.WILDCARD)),
+ EmptyTree,
+ IF ((REF(funSym) DOT nme.isDefinedAt)(REF(exSym))) THEN (REF(funSym) APPLY (REF(exSym))) ELSE Throw(REF(exSym))
+ )
+
+ val catch2 = localTyper.typedCases(List(catchIfDefined), ThrowableClass.tpe, targettp)
+ //typedCases(tree, catches, ThrowableClass.tpe, pt)
+
+ patmatTransformer.transform(localTyper.typed(Block(List(funDef), treeCopy.Try(tree, treeCopy.Block(block1, stms, expr2), catch2, finalizer1))))
/*
diff --git a/src/dbc/scala/dbc/DataType.scala b/src/dbc/scala/dbc/DataType.scala
deleted file mode 100644
index 677621e0c0..0000000000
--- a/src/dbc/scala/dbc/DataType.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc;
-
-
-/** An ISO-9075:2003 (SQL) data type. Mappings between SQL types and
- * database specific types should be provided by the database driver.
- */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class DataType {
-
- /** Tests whether this datatype is equivalent to another. Usually, two
- * types are defined as equivalent if they are equal. Two types can be
- * equivalent without being equal if values of those types will be
- * encoded in the same native Scala type.
- */
- def isEquivalent(datatype: DataType): Boolean;
-
- /** Tests whether this datatype is equivalent or a subtype of another
- * datatype. Type <code>A</code> is said to be subtype of type
- * <code>B</code> if any value of type <code>A</code> can be
- * represented as a value of type <code>B</code>.
- */
- def isSubtypeOf(datatype: DataType): Boolean;
-
- /** The native Scala type in which values of this SQL type will be
- * encoded.
- */
- type NativeType <: Any;
-
- /** The native Scala type in which values of this SQL type will be
- * encoded. This must point to the same type as <code>NativeType</code>.
- */
- def nativeTypeId: DataType.Id;
-
- /** Whether the value can take the null value, None when this property is
- * unknown.
- */
- def nullable: Option[Boolean] = None;
-
- /** The SQL name of the type */
- def sqlString: String = "UNDEFINED DATA TYPE"
-
-}
-
-@deprecated(DbcIsDeprecated, "2.9.0") object DataType {
-
- type Id = Int;
-
- val OBJECT : Id = 10;
- val BOOLEAN : Id = 20;
- val BYTE : Id = 30;
- val SHORT : Id = 31;
- val INT : Id = 32;
- val LONG : Id = 33;
- val BIG_INTEGER: Id = 34;
- val BIG_DECIMAL: Id = 35;
- val FLOAT : Id = 40;
- val DOUBLE : Id = 41;
- val STRING : Id = 50;
-
-}
diff --git a/src/dbc/scala/dbc/Database.scala b/src/dbc/scala/dbc/Database.scala
deleted file mode 100644
index 60e16367e8..0000000000
--- a/src/dbc/scala/dbc/Database.scala
+++ /dev/null
@@ -1,187 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-
-
-import java.sql._
-
-/** A link to a database. The <code>Database</code> abstract class must
- * be specialised for every different DBMS.
- *
- * @author Gilles Dubochet
- */
-@deprecated(DbcIsDeprecated, "2.9.0") case class Database(dbms: Vendor) {
-
- class Closed extends Exception {}
-
- /** A lock used for operations that need to be atomic for this database
- * instance. */
- private val lock: scala.concurrent.Lock = new scala.concurrent.Lock()
-
- /** The vendor of the DBMS that contains this database. */
- private val vendor: Vendor = dbms
-
- /** The Database connections available to use. */
- private var availableConnections: List[Connection] = Nil
-
- /** The connections that are currently in use. */
- private var usedConnections: List[Connection] = Nil
-
- /** Whether the database no longer accepts new connections. */
- private var closing: Boolean = false;
-
- /** Retrieves a connection from the available connection pool or creates
- * a new one.
- *
- * @return A connection that can be used to access the database.
- */
- private def getConnection: Connection = {
- if (closing) {
- throw new Closed;
- } else {
- availableConnections match {
- case Nil => {
- lock.acquire;
- val connection = vendor.getConnection;
- usedConnections = connection :: usedConnections;
- lock.release;
- connection
- }
- case connection :: cs => {
- lock.acquire;
- availableConnections = cs;
- usedConnections = connection :: usedConnections;
- lock.release;
- connection;
- }
- }
- }
- }
-
- /** Closes a connection to this database. A closed connection might
- * also return to the available connection pool if the latter is depleted.
- *
- * @param connection The connection that should be closed.
- */
- private def closeConnection(connection: Connection): Unit = {
- if (closing) {
- connection.close()
- } else {
- lock.acquire
- usedConnections = usedConnections.filterNot(e => (e.equals(connection)));
- if (availableConnections.length < vendor.retainedConnections)
- availableConnections = connection :: availableConnections
- else
- connection.close()
- lock.release
- }
- }
-
- /** ..
- */
- def close() {
- closing = true
- for (conn <- availableConnections) conn.close()
- }
-
- /** Executes a statement that returns a relation on this database.
- *
- * @param relationStatement The statement to execute.
- * @return The relation returned by the database for this statement.
- */
- def executeStatement(relationStatement: statement.Relation): result.Relation =
- executeStatement(relationStatement, false);
-
- /** Executes a statement that returns a relation on this database.
- *
- * @param relationStatement The statement to execute.
- * @param debug Whether debugging information should be printed on the console.
- * @return The relation returned by the database for this statement.
- */
- def executeStatement(relationStatement: statement.Relation,
- debug: Boolean): result.Relation =
- new scala.dbc.result.Relation {
- val statement = relationStatement
- if (debug) Console.println("## " + statement.sqlString)
- private val connection = getConnection
- val sqlResult = connection.createStatement().executeQuery(statement.sqlString)
- closeConnection(connection)
- statement.typeCheck(this)
- }
-
- /** Executes a statement that updates the state of the database.
- * @param statusStatement The statement to execute.
- * @return The status of the database after the statement has been executed. */
- def executeStatement(statusStatement: statement.Status): result.Status[Unit] =
- executeStatement(statusStatement, false);
-
- /** Executes a statement that updates the state of the database.
- *
- * @param statusStatement The statement to execute.
- * @param debug Whether debugging information should be printed on the console.
- * @return The status of the database after the statement has been executed.
- */
- def executeStatement(statusStatement: statement.Status,
- debug: Boolean): result.Status[Unit] =
- new scala.dbc.result.Status[Unit] {
- val statement = statusStatement;
- if (debug) Console.println("## " + statement.sqlString);
- def result = ()
- private val connection = getConnection;
- val jdbcStatement: java.sql.Statement = connection.createStatement();
- jdbcStatement.execute(statement.sqlString);
- val touchedCount = Some(jdbcStatement.getUpdateCount());
- closeConnection(connection);
- }
-
- /** Executes a list of statements or other operations inside a transaction.
- * Only statements are protected in a transaction, other Scala code is not.
- *
- * @param transactionStatement The transaction to execute as a closure.
- * @return The status of the database after the transaction has been executed.
- */
- def executeStatement[ResultType](transactionStatement: statement.Transaction[ResultType]): result.Status[ResultType] =
- executeStatement(transactionStatement, false);
-
- /** Executes a list of statements or other operations inside a transaction.
- * Only statements are protected in a transaction, other Scala code is not.
- *
- * @param transactionStatement The transaction to execute as a closure.
- * @param debug Whether debugging information should be printed on the console.
- * @return The status of the database after the transaction has been executed.
- */
- def executeStatement[ResultType](transactionStatement: statement.Transaction[ResultType], debug: Boolean): result.Status[ResultType] = {
- new scala.dbc.result.Status[ResultType] {
- val touchedCount = None
- val statement = transactionStatement
- private val connection = getConnection
- connection.setAutoCommit(false)
- val jdbcStatement: java.sql.Statement = connection.createStatement();
- if (debug) Console.println("## " + transactionStatement.sqlStartString);
- jdbcStatement.execute(transactionStatement.sqlStartString);
- val result: ResultType = try {
- val buffer = transactionStatement.transactionBody(Database.this);
- if (debug) Console.println("## " + transactionStatement.sqlCommitString);
- jdbcStatement.execute(transactionStatement.sqlCommitString);
- buffer
- } catch {
- case e: Throwable => {
- if (debug) Console.println("## " + transactionStatement.sqlAbortString);
- jdbcStatement.execute(transactionStatement.sqlAbortString);
- throw e
- }
- }
- connection.setAutoCommit(true)
- closeConnection(connection)
- }
- }
-
-}
diff --git a/src/dbc/scala/dbc/Syntax.scala b/src/dbc/scala/dbc/Syntax.scala
deleted file mode 100644
index 85cd1c1a1d..0000000000
--- a/src/dbc/scala/dbc/Syntax.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc;
-
-
-import java.math.{BigDecimal, BigInteger};
-
-
-/** This class ..
- *
- */
-@deprecated(DbcIsDeprecated, "2.9.0") object Syntax {
-
- import syntax.DataTypeUtil;
-
- /* Data types */
- def boolean = DataTypeUtil.boolean;
- def tinyint = DataTypeUtil.tinyint;
- def smallint = DataTypeUtil.smallint;
- def integer = DataTypeUtil.integer;
- def bigint = DataTypeUtil.bigint;
- def real = DataTypeUtil.real;
-
- def numeric(precision: Int) = DataTypeUtil.numeric(precision);
- def numeric(precision: Int, scale: Int) = DataTypeUtil.numeric(precision, scale);
-
- def doublePrecision = DataTypeUtil.doublePrecision;
- def character(length: Int) = DataTypeUtil.character(length);
- def characterVarying(length: Int) = DataTypeUtil.characterVarying(length);
- def characterLargeObject = DataTypeUtil.characterLargeObject;
-
- /* Statements */
- //def select
-
- /* Other stuff */
- def database (server: String, username: String, password: String): dbc.Database =
- syntax.Database.database(server, username, password);
-
-}
diff --git a/src/dbc/scala/dbc/Utilities.scala b/src/dbc/scala/dbc/Utilities.scala
deleted file mode 100644
index c37b1bb98d..0000000000
--- a/src/dbc/scala/dbc/Utilities.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc;
-
-
-/** An object offering transformation methods (views) on various values.
- * This object's members must be visible in an expression to use value
- * auto-conversion.
- */
-@deprecated(DbcIsDeprecated, "2.9.0") object Utilities {
-
- implicit def constantToValue (obj: statement.expression.Constant): Value =
- obj.constantValue;
-
- implicit def valueToConstant (obj: Value): statement.expression.Constant =
- new statement.expression.Constant {
- val constantValue = obj;
- }
-
-}
diff --git a/src/dbc/scala/dbc/Value.scala b/src/dbc/scala/dbc/Value.scala
deleted file mode 100644
index a502f51cb5..0000000000
--- a/src/dbc/scala/dbc/Value.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc;
-
-
-/** A SQL-99 value of any type. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Value {
-
- /** The SQL-99 type of the value. */
- val dataType: DataType;
-
- type NativeType = dataType.type#NativeType;
-
- val nativeValue: NativeType;
-
- /** A SQL-99 compliant string representation of the value. */
- def sqlString: String;
-
-}
diff --git a/src/dbc/scala/dbc/Vendor.scala b/src/dbc/scala/dbc/Vendor.scala
deleted file mode 100644
index 68f6102526..0000000000
--- a/src/dbc/scala/dbc/Vendor.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc;
-
-
-import java.sql.{Connection, Driver};
-
-
-/** This class ..
- */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Vendor {
-
- def nativeDriverClass: Class[_];
- def uri: java.net.URI;
- def user: String;
- def pass: String;
- def nativeProperties: java.util.Properties = {
- val properties = new java.util.Properties();
- properties.setProperty("user", user);
- properties.setProperty("password", pass);
- properties
- }
-
- def retainedConnections: Int;
-
- def getConnection: Connection = {
- val driver = nativeDriverClass.newInstance().asInstanceOf[Driver];
- driver.connect(uri.toString(),nativeProperties)
- }
-
- def urlProtocolString: String;
-
-}
diff --git a/src/dbc/scala/dbc/datatype/ApproximateNumeric.scala b/src/dbc/scala/dbc/datatype/ApproximateNumeric.scala
deleted file mode 100644
index 31752e18c7..0000000000
--- a/src/dbc/scala/dbc/datatype/ApproximateNumeric.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A type category for all SQL types that store varying-precision
- * numbers.
- */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class ApproximateNumeric[Type] (
- override val nativeTypeId: DataType.Id
-) extends datatype.Numeric[Type](nativeTypeId) {
-
- def isEquivalent(datatype: DataType) = datatype match {
- case dt: ApproximateNumeric[_] =>
- (nativeTypeId == dt.nativeTypeId &&
- precisionRadix == dt.precisionRadix &&
- precision == dt.precision &&
- signed == dt.signed)
- case _ =>
- false
- }
-
- def isSubtypeOf (datatype:DataType) = datatype match {
- case dt:ApproximateNumeric[_] =>
- (nativeTypeId == dt.nativeTypeId &&
- precisionRadix == dt.precisionRadix &&
- precision <= dt.precision &&
- signed == dt.signed)
- case _ =>
- false
- }
-
- /** A SQL-99 compliant string representation of the type.
- * <h3>Compatibility notice</h3> This method assumes that a real
- * uses 32 bits and a double 64. This is not defined in the
- * standard but is usually the case.
- */
- override def sqlString: java.lang.String = Tuple2(precisionRadix,precision) match {
- case Tuple2(2,64) => "REAL"
- case Tuple2(2,128) => "DOUBLE PRECISION"
- case Tuple2(2,p) =>
- throw exception.UnsupportedFeature("SQL-99 does not support an approximate numeric type with a binary defined precision other than 16, 32 and 64 bits");
- case Tuple2(10,p) => "FLOAT (" + p.toString() + ")"
- case Tuple2(pr,_) =>
- throw exception.UnsupportedFeature("SQL-99 does not support the precision of an approximate numeric type to be defined in a radix other than 2 or 10");
- }
-
-}
diff --git a/src/dbc/scala/dbc/datatype/Boolean.scala b/src/dbc/scala/dbc/datatype/Boolean.scala
deleted file mode 100644
index 4a32ce98f1..0000000000
--- a/src/dbc/scala/dbc/datatype/Boolean.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** The SQL type for a truth value. */
-@deprecated(DbcIsDeprecated, "2.9.0") class Boolean extends DataType {
-
- def isEquivalent (datatype:DataType) = datatype match {
- case dt:Boolean => true
- case _ => false
- }
-
- def isSubtypeOf (datatype:DataType) = isEquivalent(datatype);
-
- type NativeType = scala.Boolean;
- val nativeTypeId = DataType.BOOLEAN;
-
- /** A SQL-99 compliant string representation of the type. */
- override def sqlString: java.lang.String = "BOOLEAN";
-
-}
diff --git a/src/dbc/scala/dbc/datatype/Character.scala b/src/dbc/scala/dbc/datatype/Character.scala
deleted file mode 100644
index 02dec06281..0000000000
--- a/src/dbc/scala/dbc/datatype/Character.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A SQL type for a string of characters of arbitrary length with
- * arbitrary character set.
- */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Character extends CharacterString {
-
- def isEquivalent(datatype: DataType) = datatype match {
- case dt: Character =>
- length == dt.length && encoding == dt.encoding
- case _ =>
- false
- }
-
- def isSubtypeOf(datatype: DataType) = datatype match {
- case dt: Character =>
- length >= dt.length && encoding == dt.encoding
- case _ =>
- false
- }
-
- /** The length of the string defined in characters. */
- def length: Int;
-
- /** A SQL-99 compliant string representation of the type. */
- override def sqlString: java.lang.String = "CHARACTER (" + length.toString() + ")";
-
-}
diff --git a/src/dbc/scala/dbc/datatype/CharacterLargeObject.scala b/src/dbc/scala/dbc/datatype/CharacterLargeObject.scala
deleted file mode 100644
index 2c7ef64d66..0000000000
--- a/src/dbc/scala/dbc/datatype/CharacterLargeObject.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A SQL type for an unbounded length string of characters with arbitrary
- * character set. */
-@deprecated(DbcIsDeprecated, "2.9.0") class CharacterLargeObject extends CharacterString {
-
- def isEquivalent (datatype:DataType) = datatype match {
- case dt:CharacterLargeObject => {
- encoding == dt.encoding
- }
- case _ => false
- }
-
- def isSubtypeOf (datatype:DataType) = isEquivalent(datatype);
-
- /** A SQL-99 compliant string representation of the type. */
- override def sqlString: java.lang.String = "CHARACTER LARGE OBJECT";
-
-}
diff --git a/src/dbc/scala/dbc/datatype/CharacterString.scala b/src/dbc/scala/dbc/datatype/CharacterString.scala
deleted file mode 100644
index 54d6e0111f..0000000000
--- a/src/dbc/scala/dbc/datatype/CharacterString.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A type category for all SQL types that store strings of characters. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class CharacterString extends String {
-
- type NativeType = java.lang.String;
- val nativeTypeId = DataType.STRING;
-
- /** The name of the character set in which the string is encoded. */
- def encoding: Option[java.lang.String] = None;
-
-}
diff --git a/src/dbc/scala/dbc/datatype/CharacterVarying.scala b/src/dbc/scala/dbc/datatype/CharacterVarying.scala
deleted file mode 100644
index 9df487579f..0000000000
--- a/src/dbc/scala/dbc/datatype/CharacterVarying.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A SQL type for a varying length string of characters with arbitrary
- * maximal length and arbitrary character set.
- */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class CharacterVarying extends CharacterString {
-
- def isEquivalent(datatype: DataType) = datatype match {
- case dt: CharacterVarying =>
- length == dt.length && encoding == dt.encoding
- case _ =>
- false
- }
-
- def isSubtypeOf(datatype: DataType) = datatype match {
- case dt: CharacterVarying =>
- length >= dt.length && encoding == dt.encoding
- case _ =>
- false
- }
-
- /** The maximal length of the string defined in characters. */
- def length: Int;
-
- /** A SQL-99 compliant string representation of the type. */
- override def sqlString: java.lang.String =
- "CHARACTER VARYING (" + length.toString() + ")";
-
-}
diff --git a/src/dbc/scala/dbc/datatype/ExactNumeric.scala b/src/dbc/scala/dbc/datatype/ExactNumeric.scala
deleted file mode 100644
index a578846977..0000000000
--- a/src/dbc/scala/dbc/datatype/ExactNumeric.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A type category for all SQL types that store constant-precision
- * numbers.
- */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class ExactNumeric[Type](
- override val nativeTypeId: DataType.Id
-) extends datatype.Numeric[Type](nativeTypeId) {
-
- def isEquivalent(datatype: DataType) = datatype match {
- case dt: ExactNumeric[_] =>
- (nativeTypeId == dt.nativeTypeId &&
- precisionRadix == dt.precisionRadix &&
- precision == dt.precision &&
- scale == dt.scale &&
- signed == dt.signed)
- case _ =>
- false
- }
-
- def isSubtypeOf(datatype: DataType) = datatype match {
- case dt: ExactNumeric[_] =>
- (nativeTypeId == dt.nativeTypeId &&
- precisionRadix == dt.precisionRadix &&
- precision <= dt.precision &&
- scale <= dt.scale &&
- signed == dt.signed)
- case _ =>
- false
- }
-
- /** The number of digits used after the decimal point. */
- def scale: Int;
-
- /** A SQL-99 compliant string representation of the type.
- * <h3>Compatibility notice</h3> This method assumes that an integer
- * uses 32 bits, a small 16 and a big 64. This is not defined in the
- * standard but is usually the case.
- */
- override def sqlString: java.lang.String = Tuple3(precisionRadix,precision,scale) match {
- case Tuple3(2,16,0) => "SMALLINT"
- case Tuple3(2,32,0) => "INTEGER"
- case Tuple3(2,64,0) => "BIGINT"
- case Tuple3(2,java.lang.Integer.MAX_VALUE,0) => "BIGINT"
- case Tuple3(2,p,s) =>
- throw exception.UnsupportedFeature("SQL-99 does not support an exact numeric type with a binary defined precision other than 16, 32 and 64 bits");
- case Tuple3(10,p,0) => "NUMERIC (" + p.toString() + ")"
- case Tuple3(10,p,s) => "NUMERIC (" + p.toString() + ", " + s.toString() + ")"
- case Tuple3(pr,_,_) =>
- throw exception.UnsupportedFeature("SQL-99 does not support the precision of an exact numeric type to be defined in a radix other than 2 or 10");
- }
-
-}
diff --git a/src/dbc/scala/dbc/datatype/Factory.scala b/src/dbc/scala/dbc/datatype/Factory.scala
deleted file mode 100644
index bb9b3f5b61..0000000000
--- a/src/dbc/scala/dbc/datatype/Factory.scala
+++ /dev/null
@@ -1,250 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-import java.sql.Types._;
-import java.math.BigInteger;
-import java.math.BigDecimal;
-
-@deprecated(DbcIsDeprecated, "2.9.0") object Factory {
-
- final val java_lang_Integer_SIZE = 32;
- final val java_lang_Long_SIZE = 64;
-
- /** Returns a nullable property formatted as a boolean option */
- def isNullable (metadata:java.sql.ResultSetMetaData, index:Int): Option[scala.Boolean] =
- metadata.isNullable(index) match {
- case java.sql.ResultSetMetaData.columnNoNulls => Some(false);
- case java.sql.ResultSetMetaData.columnNullable => Some(true);
- case java.sql.ResultSetMetaData.columnNullableUnknown => None;
- }
-
- /** Returns the binary precision for an integer field. This should only be
- * used to find precision for integer numbers. It assumes that
- * bytes cannot be used partially (result % 8 = 0). */
- def bytePrecision (precision:Int, signed:scala.Boolean, safe:scala.Boolean): Int = {
- val decimalPrecision = precision + (if (safe) 1 else 0);
- Pair(signed,decimalPrecision) match {
- case Pair(_,0) => java.lang.Integer.MAX_VALUE // That's a bit of a hack.
- case Pair(_,dp) if (dp <= 3) => 8
- case Pair(_,dp) if (dp <= 5) => 16
- case Pair(true,dp) if (dp <= 7) => 24
- case Pair(false,dp) if (dp <= 8) => 24
- case Pair(_,dp) if (dp <= 10) => 32
- case Pair(true,dp) if (dp <= 12) => 40
- case Pair(false,dp) if (dp <= 13) => 40
- case Pair(_,dp) if (dp <= 15) => 48
- case Pair(_,dp) if (dp <= 17) => 56
- case Pair(true,dp) if (dp <= 19) => 64
- case Pair(false,dp) if (dp <= 20) => 64
- case Pair(_,dp) if (dp <= 22) => 72
- case Pair(true,dp) if (dp <= 24) => 80
- case Pair(false,dp) if (dp <= 25) => 80
- case Pair(_,dp) if (dp <= 27) => 88
- case Pair(_,dp) if (dp <= 29) => 96
- case Pair(_,dp) if (dp <= 32) => 104
- case Pair(_,dp) if (dp <= 34) => 112
- case Pair(true,dp) if (dp <= 36) => 120
- case Pair(false,dp) if (dp <= 37) => 120
- case Pair(_,dp) if (dp <= 39) => 128
- case _ => java.lang.Integer.MAX_VALUE
- }
- }
-
- def create (metadata:java.sql.ResultSetMetaData, index:Int): DataType = {
- metadata.getColumnType(index) match {
- /* Boolean data types. */
- case BOOLEAN => new datatype.Boolean {
- override val nullable = isNullable(metadata,index);
- }
- case BIT => new datatype.Boolean {
- override val nullable = isNullable(metadata,index);
- }
- /* Fixed precision numeric data types. */
- case DECIMAL => {
- Pair(bytePrecision(metadata.getPrecision(index),metadata.isSigned(index),true),metadata.getScale(index) == 0) match {
- case Pair(bp,true) if (bp <= java_lang_Integer_SIZE) =>
- new datatype.ExactNumeric[Int](DataType.INT) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- case Pair(bp,true) if (bp <= java_lang_Long_SIZE) =>
- new datatype.ExactNumeric[Long](DataType.LONG) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- case Pair(_,true) =>
- new datatype.ExactNumeric[BigInteger](DataType.BIG_INTEGER) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- case Pair(_,false) =>
- new datatype.ExactNumeric[BigDecimal](DataType.BIG_DECIMAL) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- }
- }
- case NUMERIC => {
- Pair(bytePrecision(metadata.getPrecision(index),metadata.isSigned(index),true),metadata.getScale(index) == 0) match {
- case Pair(bp,true) if (bp <= java_lang_Integer_SIZE) =>
- new datatype.ExactNumeric[Int](DataType.INT) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- case Pair(bp,true) if (bp <= java_lang_Long_SIZE) =>
- new datatype.ExactNumeric[Long](DataType.LONG) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- case Pair(_,true) =>
- new datatype.ExactNumeric[BigInteger](DataType.BIG_INTEGER) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- case Pair(_,false) =>
- new datatype.ExactNumeric[BigDecimal](DataType.BIG_DECIMAL) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- }
- }
- /* Fixed precision integer data types. */
- case BIGINT =>
- new datatype.ExactNumeric[Long](DataType.LONG) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 2;
- val precision = 64;
- val signed = metadata.isSigned(index);
- val scale = 0;
- }
- case INTEGER =>
- new datatype.ExactNumeric[Int](DataType.INT) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 2;
- val precision = 32;
- val signed = metadata.isSigned(index);
- val scale = 0;
- }
- case SMALLINT =>
- new datatype.ExactNumeric[Short](DataType.SHORT) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 2;
- val precision = 16;
- val signed = metadata.isSigned(index);
- val scale = 0;
- }
- case TINYINT =>
- new datatype.ExactNumeric[Byte](DataType.BYTE) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 2;
- val precision = 8;
- val signed = metadata.isSigned(index);
- val scale = 0;
- }
- /* Floating point numeric data types. */
- case REAL =>
- new datatype.ApproximateNumeric[Float](DataType.FLOAT) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 2;
- val precision = 64;
- val signed = metadata.isSigned(index);
- }
- case DOUBLE =>
- new datatype.ApproximateNumeric[Double](DataType.DOUBLE) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 2;
- val precision = 128;
- val signed = metadata.isSigned(index);
- }
- case FLOAT =>
- new datatype.ApproximateNumeric[Double](DataType.DOUBLE) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 2;
- val precision = 128;
- val signed = metadata.isSigned(index);
- }
- /* Character string data types. */
- case CHAR => new datatype.Character {
- override val nullable = isNullable(metadata,index);
- val length = metadata.getColumnDisplaySize(index);
- }
- case CLOB => new datatype.CharacterLargeObject {
- override val nullable = isNullable(metadata,index);
- }
- case LONGVARCHAR => {
- if (metadata.getColumnDisplaySize(index) >= 0)
- new datatype.CharacterVarying {
- override val nullable = isNullable(metadata,index);
- def length = metadata.getColumnDisplaySize(index);
- }
- else // A PostgreSQL Hack
- new datatype.CharacterLargeObject {
- override val nullable = isNullable(metadata,index);
- }
- }
- case VARCHAR => {
- if (metadata.getColumnDisplaySize(index) >= 0)
- new datatype.CharacterVarying {
- override val nullable = isNullable(metadata,index);
- def length = metadata.getColumnDisplaySize(index);
- }
- else // A PostgreSQL Hack
- new datatype.CharacterLargeObject {
- override val nullable = isNullable(metadata,index);
- }
- }
- /* Undefined cases. */
- case OTHER => new datatype.Unknown {
- override val nullable = isNullable(metadata, index);
- }
- /* Unsupported data types. */
- case REF | ARRAY | STRUCT =>
- sys.error ("I don't support composite data types yet.");
- case DATALINK | DISTINCT | JAVA_OBJECT | NULL =>
- sys.error ("I won't support strange data types.");
- /* Unsupported binary string data types. */
- case BINARY | BLOB | LONGVARBINARY | VARBINARY =>
- sys.error ("I don't support binary string data types yet.");
- /* Unsupported date and time data types. */
- case DATE | TIME | TIMESTAMP =>
- sys.error ("I don't support date and time data types yet.");
- /* Default case */
- case x => sys.error ("I don't know about this ("+metadata.getColumnTypeName(index)+") JDBC type.")
- }
- }
-}
diff --git a/src/dbc/scala/dbc/datatype/Numeric.scala b/src/dbc/scala/dbc/datatype/Numeric.scala
deleted file mode 100644
index c13f454dde..0000000000
--- a/src/dbc/scala/dbc/datatype/Numeric.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A type category for all SQL types that store numbers. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Numeric[Type](_nativeTypeId: DataType.Id) extends DataType {
-
- type NativeType = Type;
- val nativeTypeId = _nativeTypeId;
-
- /** The radix in which the precision (and scale when appliable) is defined.
- * ISO-9075 only allows 2 and 10 for this value.
- */
- def precisionRadix: Int;
-
- /** The number of significant digits for that number. */
- def precision: Int;
-
- /** Whether the number is signed or not. */
- def signed: scala.Boolean;
-
-}
diff --git a/src/dbc/scala/dbc/datatype/String.scala b/src/dbc/scala/dbc/datatype/String.scala
deleted file mode 100644
index 291504f777..0000000000
--- a/src/dbc/scala/dbc/datatype/String.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A type category for all SQL types that store strings of elements.
- */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class String extends DataType {
-
- /** The maximal possible length of the string defined in characters.
- * This is an implementation-specific value.
- */
- def maxLength: Option[Int] = None;
-
-}
diff --git a/src/dbc/scala/dbc/datatype/Unknown.scala b/src/dbc/scala/dbc/datatype/Unknown.scala
deleted file mode 100644
index 14a33c6be9..0000000000
--- a/src/dbc/scala/dbc/datatype/Unknown.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** The SQL type for a truth value. */
-@deprecated(DbcIsDeprecated, "2.9.0") class Unknown extends DataType {
-
- def isEquivalent(datatype: DataType) = datatype match {
- case dt: Unknown =>
- nativeTypeId == dt.nativeTypeId
- case _ =>
- false
- }
-
- def isSubtypeOf(datatype: DataType) = true;
-
- type NativeType = AnyRef;
- val nativeTypeId = DataType.OBJECT;
-
- /** A SQL-99 compliant string representation of the type. */
- override def sqlString: java.lang.String =
- sys.error("The 'UNKNOWN' data type cannot be represented.");
-
-}
diff --git a/src/dbc/scala/dbc/exception/IncompatibleSchema.scala b/src/dbc/scala/dbc/exception/IncompatibleSchema.scala
deleted file mode 100644
index c8d53bbf1a..0000000000
--- a/src/dbc/scala/dbc/exception/IncompatibleSchema.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package exception
-
-
-/** A type category for all SQL types that store constant-precision numbers. */
-@deprecated(DbcIsDeprecated, "2.9.0") case class IncompatibleSchema (
- expectedSchema: List[DataType],
- foundSchema: List[DataType]
-) extends Exception;
diff --git a/src/dbc/scala/dbc/package.scala b/src/dbc/scala/dbc/package.scala
deleted file mode 100644
index b1552e11d3..0000000000
--- a/src/dbc/scala/dbc/package.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-package scala
-
-package object dbc {
- final val DbcIsDeprecated =
- "scala.dbc will be removed after version 2.9. Use an active sql library such as scalaquery instead."
-} \ No newline at end of file
diff --git a/src/dbc/scala/dbc/result/Field.scala b/src/dbc/scala/dbc/result/Field.scala
deleted file mode 100644
index cd3309bb14..0000000000
--- a/src/dbc/scala/dbc/result/Field.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package result
-
-
-import scala.dbc.datatype._
-import scala.dbc.value._
-
-/** An ISO-9075:2003 (SQL) table field. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Field {
-
- /** The content (value) of the field. The type of this value is undefined,
- * transformation into a useful type will be done by an automatic view
- * function defined in the field object.
- */
- def content: Value
-
- final def value[Type <: Value]: Type =
- content.asInstanceOf[Type]
-
- final def exactNumericValue[NativeType] =
- content.asInstanceOf[dbc.value.ExactNumeric[NativeType]]
-
- final def approximateNumericValue[NativeType] =
- content.asInstanceOf[dbc.value.ApproximateNumeric[NativeType]]
-
- final def booleanValue =
- content.asInstanceOf[dbc.value.Boolean]
-
- final def characterValue =
- content.asInstanceOf[dbc.value.Character]
-
- final def characterLargeObjectValue =
- content.asInstanceOf[dbc.value.CharacterLargeObject]
-
- final def characterVaryingValue =
- content.asInstanceOf[dbc.value.CharacterVarying]
-
- final def unknownValue =
- content.asInstanceOf[dbc.value.Unknown]
-
- /** The tuple that contains this field. */
- def originatingTuple: Tuple
-
- /** The field metadata attached to this field. */
- def metadata: FieldMetadata
-
-}
-
-@deprecated(DbcIsDeprecated, "2.9.0") object Field {
-
- implicit def fieldToValue (field: Field): Value = field.content
-
-}
diff --git a/src/dbc/scala/dbc/result/FieldMetadata.scala b/src/dbc/scala/dbc/result/FieldMetadata.scala
deleted file mode 100644
index 3c2de297d0..0000000000
--- a/src/dbc/scala/dbc/result/FieldMetadata.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package result
-
-
-/** The class <code>FieldMetadata</cocde> provides informations attached to
- * a field about its content and its relationship to the originating database.
- */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class FieldMetadata {
-
- /** The name of the field. */
- def name: String
-
- /** The index of the field in the tuple. */
- def index: Int
-
- /** The expected type of the field. This information is used for automatic
- * transformation of the field value into a usable type.
- */
- def datatype: DataType
-
- /** The name of the catalog in the database from which the field originates */
- def catalog: String
-
- /** The name of the schema in the database from which the field originates */
- def schema: String
-
- /** The name of the table in the database from which the field originates */
- def table: String
-
-}
diff --git a/src/dbc/scala/dbc/result/Relation.scala b/src/dbc/scala/dbc/result/Relation.scala
deleted file mode 100644
index 98d653d61e..0000000000
--- a/src/dbc/scala/dbc/result/Relation.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package result
-
-
-/** An ISO-9075:2003 (SQL) table. This is equivalent to a relation in the
- * relational model. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Relation
-extends collection.AbstractIterable[Tuple]
- with Iterable[Tuple] {
-
- /** The statement that generated this relation. */
- def statement: scala.dbc.statement.Relation
-
- /** A JDBC result containing this relation. */
- protected def sqlResult: java.sql.ResultSet
-
- /** A JDBC metadata object attached to the relation. */
- protected def sqlMetadata: java.sql.ResultSetMetaData = sqlResult.getMetaData()
-
- /** Metadata about all fields in a tuple of the relation. */
- def metadata: List[FieldMetadata] =
- for (count <- List.range(1, sqlMetadata.getColumnCount()+1)) yield
- new FieldMetadata {
- val name: String = sqlMetadata.getColumnName(count)
- val index: Int = count
- val datatype: DataType = dbc.datatype.Factory.create(sqlMetadata,count)
- val catalog: String = sqlMetadata.getCatalogName(count)
- val schema: String = sqlMetadata.getSchemaName(count)
- val table: String = sqlMetadata.getTableName(count)
- }
-
- /** Metadata about the field at the given index. If there is no such
- * field <code>None</code> is returned instead. */
- def metadataFor (index:Int): Option[FieldMetadata] = {
- val meta = metadata
- if (meta.length > index)
- Some(meta(index))
- else
- None
- }
-
- /** Metadata about the field with the given column name. If there is no
- * such field, <code>None</code> is returned instead. */
- def metadataFor (name:String): Option[FieldMetadata] =
- metadata.find(f=>(f.name==name));
-
- /** An iterator on the tuples of the relation.
- * <h3>Caution</h3> A Relation only has one single iterator, due to limitations
- * in DBMS. This means that if this method is called multiple times, all returned
- * iterators will share the same state. */
- def iterator: Iterator[Tuple] = new collection.AbstractIterator[Tuple] {
- protected val result: java.sql.ResultSet = Relation.this.sqlResult
- def hasNext: Boolean = resultNext
- private var resultNext = result.next()
- def next: Tuple = {
- if (resultNext) {
- val newTuple = new Tuple {
- val me = this
- val originatingRelation = Relation.this
- val fields: List[Field] = for (fieldMetadata <- metadata) yield
- new Field {
- val metadata = fieldMetadata
- val content = dbc.value.Factory.create(result,metadata.index,metadata.datatype)
- val originatingTuple = me
- }
- }
- resultNext = result.next()
- newTuple
- }
- else sys.error("next on empty iterator")
- }
- }
-
-}
diff --git a/src/dbc/scala/dbc/result/Status.scala b/src/dbc/scala/dbc/result/Status.scala
deleted file mode 100644
index d3152a58ab..0000000000
--- a/src/dbc/scala/dbc/result/Status.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package result;
-
-
-import scala.dbc.datatype._;
-
-/** An object containing the status of a query */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Status[ResultType] {
-
- /** The statement that generated this status result. */
- def statement: scala.dbc.statement.Statement;
-
- /** The number of elements modified or added by this statement. */
- def touchedCount: Option[Int];
-
- def result: ResultType;
-
-}
diff --git a/src/dbc/scala/dbc/result/Tuple.scala b/src/dbc/scala/dbc/result/Tuple.scala
deleted file mode 100644
index 80ab5c22aa..0000000000
--- a/src/dbc/scala/dbc/result/Tuple.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package result;
-
-
-/** An ISO-9075:2003 (SQL) table row. This is equivalent to a tuple in the relational model. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Tuple {
-
- /** All the fields contained in the tuple. */
- def fields: List[Field];
-
- /** The relation that contains the tuple. */
- def originatingRelation: Relation;
-
- /** The field at the given index. If there is no such field (that is the index is out of bounds), <code>None</code> is returned instead. */
- def apply (index:Int): Field =
- try {
- fields(index)
- } catch {
- case e =>
- throw new java.lang.IndexOutOfBoundsException("Field at index "+index+" does not exist in relation");
- }
-
- /** The field with the given column name. If there is no such field, <code>None</code> is returned instead. */
- def apply (name:String): Field = {
- def findField (fields: List[Field], name:String): Field = fields match {
- case Nil => throw new java.lang.IndexOutOfBoundsException("Field '"+name+"' does not exist in relation")
- case field :: _ if (field.metadata.name == name) => field
- case field :: fields => findField (fields, name)
- }
- findField (fields, name);
- }
-}
diff --git a/src/dbc/scala/dbc/statement/DerivedColumn.scala b/src/dbc/scala/dbc/statement/DerivedColumn.scala
deleted file mode 100644
index ae05df986a..0000000000
--- a/src/dbc/scala/dbc/statement/DerivedColumn.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class DerivedColumn {
-
- /** The value for the column. This value can be of any type but must be
- * calculated from fields that appear in a relation that takes part
- * in the query.
- */
- def valueExpression: Expression
-
- /** A new name for this field. This name must be unique for the query in
- * which the column takes part.
- */
- def asClause: Option[String]
-
- /** A SQL-99 compliant string representation of the derived column
- * sub-statement. This only has a meaning inside a select statement.
- */
- def sqlString: String =
- valueExpression.sqlInnerString +
- (asClause match {
- case None => ""
- case Some(ac) => " AS " + ac
- })
-
-}
diff --git a/src/dbc/scala/dbc/statement/Expression.scala b/src/dbc/scala/dbc/statement/Expression.scala
deleted file mode 100644
index c2da91e9ef..0000000000
--- a/src/dbc/scala/dbc/statement/Expression.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-/** An expression that calculates some value from fields. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Expression extends Relation {
-
- def fieldTypes: List[DataType] = Nil
-
- /** A SQL-99 compliant string representation of the expression. */
- def sqlString: String = "SELECT " + sqlInnerString
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement.
- */
- def sqlInnerString: String
-
-}
diff --git a/src/dbc/scala/dbc/statement/Insert.scala b/src/dbc/scala/dbc/statement/Insert.scala
deleted file mode 100644
index 189ccec54f..0000000000
--- a/src/dbc/scala/dbc/statement/Insert.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-import scala.dbc.statement.expression._
-
-/** An insertion of values into a table. */
-@deprecated(DbcIsDeprecated, "2.9.0") case class Insert(insertionTarget: String, insertionData: InsertionData)
- extends Status {
-
- /** A SQL-99 compliant string representation of the select statement. */
- def sqlString: String =
- "INSERT INTO " + insertionTarget + " " + insertionData.sqlString
-
- /** The name of the table where the data should be added. */
- //def insertionTarget: String
-
- /** The data that will be added tot he table. */
- //def insertionData: InsertionData
-
-}
diff --git a/src/dbc/scala/dbc/statement/InsertionData.scala b/src/dbc/scala/dbc/statement/InsertionData.scala
deleted file mode 100644
index e91ad7efe6..0000000000
--- a/src/dbc/scala/dbc/statement/InsertionData.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-import scala.dbc.statement.expression._
-
-/** Data to be inserted into a table in an <code>Insert</code>. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class InsertionData {
- def sqlString: String
-}
-
-@deprecated(DbcIsDeprecated, "2.9.0") object InsertionData {
- /** Insertion of data resulting from a query on the database. */
- @deprecated(DbcIsDeprecated, "2.9.0") case class Subquery(query: Relation) extends InsertionData {
- def sqlString = query.sqlString
- }
- /** Insertion of data as explicitly defined values. */
- @deprecated(DbcIsDeprecated, "2.9.0") case class Constructor(
- columnNames: Option[List[String]],
- columnValues: List[Expression]
- ) extends InsertionData {
- def sqlString =
- (columnNames match {
- case None => ""
- case Some(cn) => cn.mkString(" (",", ",")")
- }) +
- " VALUES" +
- columnValues.map(e => e.sqlInnerString).mkString(" (",", ",")")
- }
-}
diff --git a/src/dbc/scala/dbc/statement/IsolationLevel.scala b/src/dbc/scala/dbc/statement/IsolationLevel.scala
deleted file mode 100644
index b31614c3dd..0000000000
--- a/src/dbc/scala/dbc/statement/IsolationLevel.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class IsolationLevel {
- def sqlString: String
-}
-
-@deprecated(DbcIsDeprecated, "2.9.0") object IsolationLevel {
- case object ReadUncommitted extends IsolationLevel {
- def sqlString = "ISOLATION LEVEL READ UNCOMMITTED"
- }
- case object ReadCommitted extends IsolationLevel {
- def sqlString = "ISOLATION LEVEL READ COMMITTED"
- }
- case object RepeatableRead extends IsolationLevel {
- def sqlString = "ISOLATION LEVEL REPEATABLE READ"
- }
- case object Serializable extends IsolationLevel {
- def sqlString = "ISOLATION LEVEL SERIALIZABLE"
- }
-}
diff --git a/src/dbc/scala/dbc/statement/JoinType.scala b/src/dbc/scala/dbc/statement/JoinType.scala
deleted file mode 100644
index 698612b10d..0000000000
--- a/src/dbc/scala/dbc/statement/JoinType.scala
+++ /dev/null
@@ -1,56 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-/** A join behaviour in a <code>Jointure</code>. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class JoinType {
- /** A SQL-99 string representation of the join behaviour. */
- def sqlString: String
-}
-
-@deprecated(DbcIsDeprecated, "2.9.0") object JoinType {
-
- /** A join behaviour where a joined tuple is created only when a
- * corresponding tuple exists in both original relations.
- */
- case object Inner extends JoinType {
- val sqlString = "INNER JOIN"
- }
-
- /** A join behaviour family where a joined tuple is created even when a
- * tuple has no corresponding tuple in the other relation. The fields
- * populated by values of the other tuple will receive the NULL value.
- */
- abstract class Outer extends JoinType
-
- object Outer {
- /** An outer join behaviour where there will be at least on tuple for
- * every tuple in the left relation.
- */
- case object Left extends Outer {
- val sqlString = "LEFT OUTER JOIN"
- }
- /** An outer join behaviour where there will be at least on tuple for
- * every tuple in the right relation.
- */
- case object Right extends Outer {
- val sqlString = "RIGHT OUTER JOIN"
- }
- /** An outer join behaviour where there will be at least on tuple for
- * every tuple in both right and left relations.
- */
- case object Full extends Outer {
- val sqlString = "FULL OUTER JOIN"
- }
- }
-}
diff --git a/src/dbc/scala/dbc/statement/Jointure.scala b/src/dbc/scala/dbc/statement/Jointure.scala
deleted file mode 100644
index 74c871cc3e..0000000000
--- a/src/dbc/scala/dbc/statement/Jointure.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-/** A jointure between two relations. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Jointure extends Relation {
-
- /** The relation on the left part of the join. */
- def leftRelation: Relation
-
- /** The relation on the right part of the join. */
- def rightRelation: Relation
-
- /** The type of the jointure. */
- def joinType: JoinType
-
- /** The condition on which the jointure needs be done. */
- def joinCondition: Option[Expression]
-
- /** A SQL-99 compliant string representation of the relation statement. */
- def sqlString: String = "SELECT * FROM " + sqlInnerString
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside a query.
- */
- def sqlInnerString: String =
- leftRelation.sqlInnerString + " " +
- joinType.sqlString + " " +
- rightRelation.sqlInnerString +
- (joinCondition match {
- case Some(jc) => jc.sqlString
- case None => ""
- })
-
-}
diff --git a/src/dbc/scala/dbc/statement/Relation.scala b/src/dbc/scala/dbc/statement/Relation.scala
deleted file mode 100644
index 787707ee82..0000000000
--- a/src/dbc/scala/dbc/statement/Relation.scala
+++ /dev/null
@@ -1,55 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement;
-
-
-/** A statement that returns a relation. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Relation extends Statement {
-
- def isCompatibleType: (DataType,DataType)=>Boolean =
- ((dt,wdt)=>dt.isSubtypeOf(wdt));
-
- def typeCheck (relation: result.Relation): Unit = {
- val sameType: Boolean = (
- relation.metadata.length == fieldTypes.length &&
- (relation.metadata.zip(fieldTypes).forall({case Pair(field,expectedType) =>
- isCompatibleType(field.datatype, expectedType)}))
- );
- if (!sameType)
- throw new exception.IncompatibleSchema(fieldTypes,relation.metadata.map(field=>field.datatype));
- }
-
- def fieldTypes: List[DataType];
-
- def sqlTypeString: String =
- if (fieldTypes.isEmpty)
- "UNTYPED"
- else
- fieldTypes.map(dt=>dt.sqlString).mkString("RELATION (",", ",")");
-
- /** A SQL-99 compliant string representation of the statement. */
- def sqlString: String;
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String;
-
- /** Executes the statement on the given database. */
- def execute (database: scala.dbc.Database): scala.dbc.result.Relation = {
- database.executeStatement(this);
- }
-
- def execute (database:scala.dbc.Database, debug:Boolean): scala.dbc.result.Relation = {
- database.executeStatement(this,debug);
- }
-
-}
diff --git a/src/dbc/scala/dbc/statement/Select.scala b/src/dbc/scala/dbc/statement/Select.scala
deleted file mode 100644
index a9ca0212ed..0000000000
--- a/src/dbc/scala/dbc/statement/Select.scala
+++ /dev/null
@@ -1,99 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-/** A statement that when executed on a database will return a relation.
- * The returned relation will be a subset of a table in the database or
- * a jointure between such subsets. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Select extends Relation {
-
- /** Defines if duplicated tuples should be removed from the returned
- * relation. <h3>Compatibility notice</h3> Some DBMS (PostgreSQL) allow
- * uniqueness constrains on an arbitrary field instead of the entire
- * tuple. */
- def setQuantifier: Option[SetQuantifier]
-
- /** Defines the output fields that a tuple in the returned relation will
- * contain, and their content with respect to the tables in the
- * database. If the fields are not specified (that is the list is
- * empty), all possible input fields will be returned. <h3>Compatibility
- * notice</h3> SQL's qualified asterisk select sublist is not
- * available. */
- def selectList: List[DerivedColumn]
-
- /** Defines the relations from which the query will obtain its data.*/
- def fromClause: List[Relation]
-
- /** Defines condition that must be true in the returned relation's tuples.
- * This value expression must return a boolean or boolean-compatible
- * value. This condition is applied before any GROUP BY clause.
- */
- def whereClause: Option[Expression]
-
- /** Defines the grouping of the returned relation's tuples. One tuple is
- * returned for every group. The value of <code>selectList</code> must
- * use aggregate functions for calculation.
- */
- def groupByClause: Option[List[Expression]]
-
- /** Defines conditions that must be true in the returned relation's tuples.
- * The value expression must return a boolean can only refer to fields
- * that are grouped or to any field from inside an aggregate function.
- */
- def havingClause: Option[Expression]
-
- /* def windowClause: Option[_]; */
-
- /** A SQL-99 compliant string representation of the select statement. */
- def sqlString: String = (
- "SELECT" +
- (setQuantifier match {
- case None => ""
- case Some(sq) => " " + sq.sqlString
- }) +
- (selectList match {
- case Nil => " *"
- case _ => (" " + selectList.tail.foldLeft(selectList.head.sqlString)
- ((name:String, dc:DerivedColumn) => name + ", " + dc.sqlString))
- }) +
- (fromClause match {
- case Nil => sys.error("Empty from clause is not allowed")
- case _ => (" FROM " + fromClause.tail.foldLeft(fromClause.head.sqlInnerString)
- ((name:String, rel:Relation) => name + ", " + rel.sqlInnerString))
- }) +
- (whereClause match {
- case None => ""
- case Some(expr) => " WHERE " + expr.sqlInnerString
- }) +
- (groupByClause match {
- case None => ""
- case Some(gbl) => gbl match {
- case Nil => sys.error("Empty group by clause is not allowed")
- case _ =>
- (" GROUP BY " +
- gbl.tail.foldLeft(gbl.head.sqlInnerString)
- ((name:String, gb) => name + ", " + gb.sqlInnerString))
- }
- }) +
- (havingClause match {
- case None => ""
- case Some(expr) => " HAVING " + expr.sqlString
- })
- );
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside a query.
- */
- def sqlInnerString: String = "("+sqlString+")"
-
-}
diff --git a/src/dbc/scala/dbc/statement/SetQuantifier.scala b/src/dbc/scala/dbc/statement/SetQuantifier.scala
deleted file mode 100644
index 77a4b79b8d..0000000000
--- a/src/dbc/scala/dbc/statement/SetQuantifier.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-/** A set quantifier that defines the collection type of a relation. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class SetQuantifier {
- /** A SQL-99 compliant string representation of the set quantifier. */
- def sqlString: String
-}
-
-@deprecated(DbcIsDeprecated, "2.9.0") object SetQuantifier {
-
- /** A set quantifier that defines a relation as being a bag. That means
- * that duplicates are allowed.
- */
- case object AllTuples extends SetQuantifier {
- /** A SQL-99 compliant string representation of the set quantifier. */
- def sqlString: String = "ALL"
- }
-
- /** A set quantifier that defines a relation as being a set. That means
- * that duplicates are not allowed and will be pruned.
- */
- case object DistinctTuples extends SetQuantifier {
- /** A SQL-99 compliant string representation of the set quantifier. */
- def sqlString: String = "DISTINCT"
- }
-}
diff --git a/src/dbc/scala/dbc/statement/Status.scala b/src/dbc/scala/dbc/statement/Status.scala
deleted file mode 100644
index 0ce64b978d..0000000000
--- a/src/dbc/scala/dbc/statement/Status.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-import scala.dbc.Database
-import scala.dbc.result
-
-/** A statement that changes the status of the database. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Status extends Statement {
-
- /** A SQL-99 compliant string representation of the statement. */
- def sqlString: String
-
- /** Executes the statement on the given database. */
- def execute(database: Database): result.Status[Unit] = {
- database.executeStatement(this)
- }
-
- def execute(database: Database, debug: Boolean): result.Status[Unit] = {
- database.executeStatement(this, debug)
- }
-
-}
diff --git a/src/dbc/scala/dbc/statement/Table.scala b/src/dbc/scala/dbc/statement/Table.scala
deleted file mode 100644
index e729f801a3..0000000000
--- a/src/dbc/scala/dbc/statement/Table.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-/** A reference to a table in the database.
- * @author Gilles Dubochet
- * @version 1.0 */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Table extends Relation {
-
- /** The name of the table in the database. */
- def tableName: String
-
- /** The name that the table will be called in the enclosing statement. */
- def tableRename: Option[String]
-
- /** A SQL-99 compliant string representation of the relation statement. */
- def sqlString: String = "SELECT * FROM " + tableName
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside a query. */
- def sqlInnerString: String =
- tableName +
- (tableRename match {
- case None => ""
- case Some(rename) => " AS " + rename
- })
-
-}
diff --git a/src/dbc/scala/dbc/statement/Transaction.scala b/src/dbc/scala/dbc/statement/Transaction.scala
deleted file mode 100644
index 1740dae3f9..0000000000
--- a/src/dbc/scala/dbc/statement/Transaction.scala
+++ /dev/null
@@ -1,55 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement;
-
-
-/** A statement that changes the status of the database. */
-@deprecated(DbcIsDeprecated, "2.9.0") case class Transaction [ResultType] (
- transactionBody: (scala.dbc.Database=>ResultType),
- accessMode: Option[AccessMode],
- isolationLevel: Option[IsolationLevel]
-) extends Statement {
-
- /** A SQL-99 compliant string representation of the statement. */
- def sqlStartString: String = (
- "START TRANSACTION" +
- (Pair(accessMode,isolationLevel) match {
- case Pair(None,None) => ""
- case Pair(Some(am),None) => " " + am.sqlString
- case Pair(None,Some(il)) => " " + il.sqlString
- case Pair(Some(am),Some(il)) => " " + am.sqlString + ", " + il.sqlString
- })
- );
-
- def sqlCommitString: String = {
- "COMMIT"
- }
-
- def sqlAbortString: String = {
- "ROLLBACK"
- }
-
- //def transactionBody: (()=>Unit);
-
- //def accessMode: Option[AccessMode];
-
- //def isolationLevel: Option[IsolationLevel];
-
- def execute (database: scala.dbc.Database): scala.dbc.result.Status[ResultType] = {
- database.executeStatement(this);
- }
-
- def execute (database: scala.dbc.Database, debug: Boolean): scala.dbc.result.Status[ResultType] = {
- database.executeStatement(this,debug);
- }
-
-}
diff --git a/src/dbc/scala/dbc/statement/Update.scala b/src/dbc/scala/dbc/statement/Update.scala
deleted file mode 100644
index 836549a4be..0000000000
--- a/src/dbc/scala/dbc/statement/Update.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement;
-
-
-import scala.dbc.statement.expression._;
-
-/** An update of the state of a table. */
-@deprecated(DbcIsDeprecated, "2.9.0") case class Update (
- updateTarget: String,
- setClauses: List[SetClause],
- whereClause: Option[Expression]
-) extends Status {
-
-
- /** A SQL-99 compliant string representation of the select statement. */
- def sqlString: String = (
- "UPDATE " +
- updateTarget +
- " SET " + setClauses.map(sc=>sc.sqlString).mkString("",", ","") +
- (whereClause match {
- case None => ""
- case Some(expr) => " WHERE " + expr.sqlString
- })
- );
-
- /** The name of the table that should be updated. */
- //def updateTarget: String;
-
- /** The data that will be added tot he table. */
- //def setClauses: List[SetClause];
-
- /** Defines condition that must be true in the tuples that will be updated.
- * This value expression must return a boolean or boolean-compatible
- * value. */
- //def whereClause: Option[scala.dbc.statement.expression.Expression];
-
-}
diff --git a/src/dbc/scala/dbc/statement/expression/Aggregate.scala b/src/dbc/scala/dbc/statement/expression/Aggregate.scala
deleted file mode 100644
index c42bffe20e..0000000000
--- a/src/dbc/scala/dbc/statement/expression/Aggregate.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression
-
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Aggregate extends Expression {
-
- def aggregateName: String;
-
- def setFunction: SetFunction;
-
- def filterClause: Option[Expression];
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = (
- aggregateName +
- "(" + setFunction.sqlString + ")" +
- (filterClause match {
- case None => ""
- case Some(fc) => " FILTER (WHERE " + fc.sqlString + ")"
- })
- )
-
-}
diff --git a/src/dbc/scala/dbc/statement/expression/BinaryOperator.scala b/src/dbc/scala/dbc/statement/expression/BinaryOperator.scala
deleted file mode 100644
index 32f016dbf6..0000000000
--- a/src/dbc/scala/dbc/statement/expression/BinaryOperator.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class BinaryOperator extends Expression {
-
- /** The name of the operator. */
- def operator: String;
-
- /** The expression applied on the left of the operator. */
- def leftOperand: Expression;
-
- /** The expression applied on the right of the operator. */
- def rightOperand: Expression;
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = {
- leftOperand.sqlInnerString + " " + operator + " " + rightOperand.sqlInnerString
- }
-
-}
diff --git a/src/dbc/scala/dbc/statement/expression/Constant.scala b/src/dbc/scala/dbc/statement/expression/Constant.scala
deleted file mode 100644
index 70ec7819dc..0000000000
--- a/src/dbc/scala/dbc/statement/expression/Constant.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Constant extends Expression {
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = constantValue.sqlString;
-
- /** The value of the constant. */
- def constantValue: Value;
-}
diff --git a/src/dbc/scala/dbc/statement/expression/Default.scala b/src/dbc/scala/dbc/statement/expression/Default.scala
deleted file mode 100644
index 78204d0172..0000000000
--- a/src/dbc/scala/dbc/statement/expression/Default.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
-case object Default extends Expression {
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = "DEFAULT";
-
-}
diff --git a/src/dbc/scala/dbc/statement/expression/Field.scala b/src/dbc/scala/dbc/statement/expression/Field.scala
deleted file mode 100644
index 9a90903a99..0000000000
--- a/src/dbc/scala/dbc/statement/expression/Field.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Field extends Expression {
-
- /** The name of the schema in the database where the field is located. */
- def schemaName: Option[String] = None;
-
- /** The name of the table in the database where the field is located. */
- def tableName: Option[String];
-
- /** The name of the field in the database. */
- def fieldName: String;
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = (
- (schemaName match {
- case None => ""
- case Some(sn) => sn + "."
- }) +
- (tableName match {
- case None => ""
- case Some(tn) => tn + "."
- }) + fieldName
- )
-
-}
diff --git a/src/dbc/scala/dbc/statement/expression/FunctionCall.scala b/src/dbc/scala/dbc/statement/expression/FunctionCall.scala
deleted file mode 100644
index 962cf209b9..0000000000
--- a/src/dbc/scala/dbc/statement/expression/FunctionCall.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
-@deprecated(DbcIsDeprecated, "2.9.0") case class FunctionCall (
- functionName: String,
- arguments: List[Expression]
-) extends Expression {
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = {
- functionName + "(" + arguments.mkString("",", ","") + ")"
- }
-
- /** The name of the function to call. */
- //def functionName: String;
-
- /** A list of all argument expressions to pass to the function, in order. */
- //def arguments: List[Expression];
-
-}
diff --git a/src/dbc/scala/dbc/statement/expression/Select.scala b/src/dbc/scala/dbc/statement/expression/Select.scala
deleted file mode 100644
index 7a6a4a21c4..0000000000
--- a/src/dbc/scala/dbc/statement/expression/Select.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Select extends Expression {
-
- /** The actual select statement */
- def selectStatement: statement.Select;
-
- /** A SQL-99 compliant string representation of the expression. */
- override def sqlString: String = selectStatement.sqlString;
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = "("+selectStatement.sqlString+")";
-
-}
diff --git a/src/dbc/scala/dbc/statement/expression/SetFunction.scala b/src/dbc/scala/dbc/statement/expression/SetFunction.scala
deleted file mode 100644
index 060b2236f2..0000000000
--- a/src/dbc/scala/dbc/statement/expression/SetFunction.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class SetFunction {
- /** A SQL-99 compliant string representation of the set quantifier. */
- def sqlString: String;
-}
-
-@deprecated(DbcIsDeprecated, "2.9.0") object SetFunction {
- abstract class Asterisk extends SetFunction {
- def sqlString = "(*)";
- }
- abstract class General extends SetFunction {
- def setQuantifier: Option[SetQuantifier];
- def valueExpression: Expression;
- def sqlString = (
- "(" +
- (setQuantifier match {
- case None => ""
- case Some(sq) => sq.sqlString + " "
- }) +
- valueExpression.sqlString + ")"
- );
- }
- abstract class Binary extends SetFunction {
- def sqlString = sys.error("Binary set function is not supported yet.");
- }
-}
diff --git a/src/dbc/scala/dbc/statement/expression/TypeCast.scala b/src/dbc/scala/dbc/statement/expression/TypeCast.scala
deleted file mode 100644
index dbb8dc1b4d..0000000000
--- a/src/dbc/scala/dbc/statement/expression/TypeCast.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
-@deprecated(DbcIsDeprecated, "2.9.0") case class TypeCast (
- expression: Expression,
- castType: DataType
-) extends Expression {
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = {
- "CAST (" + expression.sqlInnerString + " AS " + castType.sqlString + ")";
- }
-
- /** The expression that will be casted. */
- //def expression: Expression;
-
- /** The type to which to cast. */
- //def castType: scala.dbc.datatype.DataType;
-}
diff --git a/src/dbc/scala/dbc/statement/expression/UnaryOperator.scala b/src/dbc/scala/dbc/statement/expression/UnaryOperator.scala
deleted file mode 100644
index 4172c451fb..0000000000
--- a/src/dbc/scala/dbc/statement/expression/UnaryOperator.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class UnaryOperator extends Expression {
-
- /** The name of the operator */
- def operator: String;
-
- /** Whether the operator comes before the operand or not. */
- def operatorIsLeft: Boolean;
-
- /** The operand applied to the operator. */
- def operand: Expression;
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = operatorIsLeft match {
- case true => operator + " " + operand.sqlInnerString;
- case false => operand.sqlInnerString + " " + operator;
- }
-}
diff --git a/src/dbc/scala/dbc/syntax/DataTypeUtil.scala b/src/dbc/scala/dbc/syntax/DataTypeUtil.scala
deleted file mode 100644
index a0ebd1713e..0000000000
--- a/src/dbc/scala/dbc/syntax/DataTypeUtil.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package syntax;
-
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-
-@deprecated(DbcIsDeprecated, "2.9.0") object DataTypeUtil {
-
- final val java_lang_Integer_SIZE = 32;
- final val java_lang_Long_SIZE = 64;
-
- def boolean = new datatype.Boolean;
- def tinyint = new datatype.ExactNumeric[Byte](dbc.DataType.BYTE) {
- val precisionRadix = 2;
- val precision = 8;
- val signed = true;
- val scale = 0;
- }
- def smallint = new datatype.ExactNumeric[Short](dbc.DataType.SHORT) {
- val precisionRadix = 2;
- val precision = 16;
- val signed = true;
- val scale = 0;
- }
- def integer = new datatype.ExactNumeric[Int](dbc.DataType.INT) {
- val precisionRadix = 2;
- val precision = 32;
- val signed = true;
- val scale = 0;
- }
- def bigint = new datatype.ExactNumeric[Long](dbc.DataType.LONG) {
- val precisionRadix = 2;
- val precision = 64;
- val signed = true;
- val scale = 0;
- }
- def numeric (_precision:Int): DataType = numeric(_precision,0);
- def numeric (_precision:Int, _scale:Int): DataType =
- Pair(datatype.Factory.bytePrecision(_precision,true,true),_scale == 0) match {
- case Pair(bp,true) if (bp <= java_lang_Integer_SIZE) =>
- new datatype.ExactNumeric[Int](DataType.INT) {
- val precisionRadix = 10;
- val precision = _precision;
- val signed = true;
- val scale = 0;
- }
- case Pair(bp,true) if (bp <= java_lang_Long_SIZE) =>
- new datatype.ExactNumeric[Long](DataType.LONG) {
- val precisionRadix = 10;
- val precision = _precision;
- val signed = true;
- val scale = 0;
- }
- case Pair(_,true) =>
- new datatype.ExactNumeric[BigInteger](DataType.BIG_INTEGER) {
- val precisionRadix = 10;
- val precision = _precision;
- val signed = true;
- val scale = 0;
- }
- case Pair(_,false) =>
- new datatype.ExactNumeric[BigDecimal](DataType.BIG_DECIMAL) {
- val precisionRadix = 10;
- val precision = _precision;
- val signed = true;
- val scale = _scale;
- }
- }
- def real = new datatype.ApproximateNumeric[Float](DataType.FLOAT) {
- val precisionRadix = 2;
- val precision = 64;
- val signed = true;
- }
- def doublePrecision = new datatype.ApproximateNumeric[Double](DataType.DOUBLE) {
- val precisionRadix = 2;
- val precision = 128;
- val signed = true;
- }
- def character (_length: Int) = new datatype.Character {
- val length = _length;
- }
- def characterVarying (_length: Int) = new datatype.CharacterVarying {
- def length = _length;
- }
- def characterLargeObject = new datatype.CharacterLargeObject;
-
-}
diff --git a/src/dbc/scala/dbc/syntax/Database.scala b/src/dbc/scala/dbc/syntax/Database.scala
deleted file mode 100644
index 4357fb7d4c..0000000000
--- a/src/dbc/scala/dbc/syntax/Database.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package syntax;
-
-
-import java.net.URI;
-
-@deprecated(DbcIsDeprecated, "2.9.0") object Database {
-
- def database (server:String, username:String, password:String): dbc.Database = {
- val uri = new URI(server);
- // Java 1.5 if (uri.toString().contains("postgres")) {
- if (uri.toString().indexOf("postgres") != -1) {
- new dbc.Database(new vendor.PostgreSQL {
- val uri = new URI(server);
- val user = username;
- val pass = password;
- })
- } else {
- throw new Exception("No DBMS vendor support could be found for the given URI");
- }
- }
-
-}
diff --git a/src/dbc/scala/dbc/syntax/Statement.scala b/src/dbc/scala/dbc/syntax/Statement.scala
deleted file mode 100644
index baccbfaa64..0000000000
--- a/src/dbc/scala/dbc/syntax/Statement.scala
+++ /dev/null
@@ -1,274 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package syntax;
-
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-
-import StatementExpression._;
-
-/*
-
-ASSUMPTIONS:
-
-IMPROVABLE:
-For type safety, all types must be defined. If one is missing, none is taken into account.
-It is possible to redefine many types or renamings for a field, in that case,
- only the last one is taken into account ("a" as "b" as "c" of boolean as "e" of integer
- is equivalent to "a" as "e" of integer).
-
-FIXED:
-
-*/
-
-@deprecated(DbcIsDeprecated, "2.9.0") object Statement {
-
- // SELECT ZYGOTE ...
-
- def select: SelectZygote = new SelectZygote {
- val setQuantifier = None;
- }
- def selectBag: SelectZygote = new SelectZygote {
- val setQuantifier = Some(statement.SetQuantifier.AllTuples);
- }
- def selectSet: SelectZygote = new SelectZygote {
- val setQuantifier = Some(statement.SetQuantifier.DistinctTuples);
- }
-
- abstract class SelectZygote {
- def setQuantifier: Option[statement.SetQuantifier];
- def fields (sdc:SelectDerivedColumns): SelectOf = new SelectOf {
- val setQuantifier = SelectZygote.this.setQuantifier;
- val selectList = sdc.selectList;
- val selectTypes = sdc.selectTypes;
- }
- }
-
- abstract class SelectDerivedField {
- def fieldValue: StatementField;
- def fieldRename: Option[String] = {val x = None; x}
- def fieldType: Option[dbc.DataType] = {val x = None; x}
- def as (rename:String): SelectDerivedField = new SelectDerivedField {
- val fieldValue = SelectDerivedField.this.fieldValue;
- override val fieldRename = Some(rename);
- override val fieldType = SelectDerivedField.this.fieldType;
- }
- def of (datatype:dbc.DataType): SelectDerivedField = new SelectDerivedField {
- val fieldValue = SelectDerivedField.this.fieldValue;
- override val fieldRename = SelectDerivedField.this.fieldRename;
- override val fieldType = Some(datatype);
- }
- }
-
- implicit def statementFieldToSelectDerivedField (fv:StatementField): SelectDerivedField = new SelectDerivedField {
- val fieldValue = fv;
- }
-
- implicit def stringToSelectDerivedField (fv:String): SelectDerivedField = new SelectDerivedField {
- val fieldValue: StatementField = StatementExpression.stringToStatementField(fv);
- }
-
- abstract class SelectDerivedColumns {
- def selectList: List[statement.DerivedColumn];
- def selectTypes: List[DataType];
- def and (sdc:SelectDerivedColumns): SelectDerivedColumns = new SelectDerivedColumns {
- val selectList = SelectDerivedColumns.this.selectList ::: sdc.selectList;
- val selectTypes =
- if (SelectDerivedColumns.this.selectTypes.isEmpty | sdc.selectTypes.isEmpty)
- Nil
- else
- SelectDerivedColumns.this.selectTypes ::: sdc.selectTypes;
- }
- }
-
- implicit def selectDerivedFieldToSelectDerivedColumns (sdf:SelectDerivedField): SelectDerivedColumns = new SelectDerivedColumns {
- val selectList = List(new statement.DerivedColumn {
- val valueExpression = sdf.fieldValue.toStatement;
- val asClause = sdf.fieldRename;
- });
- val selectTypes = if (sdf.fieldType.isEmpty) Nil else List(sdf.fieldType.get);
- }
-
- implicit def stringToSelectDerivedColumns (sdfs:String): SelectDerivedColumns = {
- val sdf: SelectDerivedField = sdfs;
- selectDerivedFieldToSelectDerivedColumns(sdf);
- }
-
- // SELECT OF ...
-
- abstract class SelectOf {
- def setQuantifier: Option[statement.SetQuantifier];
- def selectList: List[statement.DerivedColumn];
- def selectTypes: List[DataType];
- def from (sst:SelectSourceTables): SelectBeyond = new SelectBeyond {
- val setQuantifier = SelectOf.this.setQuantifier;
- val selectList = SelectOf.this.selectList;
- val selectTypes = SelectOf.this.selectTypes;
- val fromClause = sst.fromClause;
- val whereClause = None;
- val groupByClause = None;
- val havingClause = None;
- }
- }
-
- abstract class SelectSourceTable {
- def fromRelation: statement.Relation;
- def innerJoin (sst: SelectSourceTable): SelectSourceTable = new SelectSourceTable {
- val fromRelation = new statement.Jointure {
- val leftRelation = SelectSourceTable.this.fromRelation;
- val rightRelation = sst.fromRelation;
- val joinType = statement.JoinType.Inner;
- val joinCondition = None;
- val fieldTypes = leftRelation.fieldTypes ::: rightRelation.fieldTypes;
- }
- }
- def leftOuterJoin (sst: SelectSourceTable): SelectSourceTable = new SelectSourceTable {
- val fromRelation = new statement.Jointure {
- val leftRelation = SelectSourceTable.this.fromRelation;
- val rightRelation = sst.fromRelation;
- val joinType = statement.JoinType.Outer.Left;
- val joinCondition = None;
- val fieldTypes = leftRelation.fieldTypes ::: rightRelation.fieldTypes;
- }
- }
- def rightOuterJoin (sst: SelectSourceTable): SelectSourceTable = new SelectSourceTable {
- val fromRelation = new statement.Jointure {
- val leftRelation = SelectSourceTable.this.fromRelation;
- val rightRelation = sst.fromRelation;
- val joinType = statement.JoinType.Outer.Right;
- val joinCondition = None;
- val fieldTypes = leftRelation.fieldTypes ::: rightRelation.fieldTypes;
- }
- }
- def fullOuterJoin (sst: SelectSourceTable): SelectSourceTable = new SelectSourceTable {
- val fromRelation = new statement.Jointure {
- val leftRelation = SelectSourceTable.this.fromRelation;
- val rightRelation = sst.fromRelation;
- val joinType = statement.JoinType.Outer.Full;
- val joinCondition = None;
- val fieldTypes = leftRelation.fieldTypes ::: rightRelation.fieldTypes;
- }
- }
- }
-
- implicit def stringToSelectSourceTable (sct:String): SelectSourceTable = new SelectSourceTable {
- val fromRelation = new statement.Table {
- val tableName = sct;
- val tableRename = None;
- val fieldTypes = Nil;
- }
- }
-
- implicit def selectToSelectSourceTable (sct:statement.Select): SelectSourceTable = new SelectSourceTable {
- val fromRelation = sct;
- }
-
- abstract class SelectSourceTables {
- def fromClause: List[statement.Relation];
- def join (sct:SelectSourceTable): SelectSourceTables = new SelectSourceTables {
- val fromClause = SelectSourceTables.this.fromClause ::: List(sct.fromRelation);
- }
- }
-
- implicit def stringToSelectSourceTables (sct:String): SelectSourceTables = new SelectSourceTables {
- val fromClause = List(new statement.Table {
- val tableName = sct;
- val tableRename = None;
- val fieldTypes = Nil;
- });
- }
-
- implicit def selectToSelectSourceTables (sct:statement.Select): SelectSourceTables = new SelectSourceTables {
- val fromClause = List(sct);
- }
-
- implicit def selectSourceTableToSelectSourceTables (sct:SelectSourceTable): SelectSourceTables = new SelectSourceTables {
- val fromClause = List(sct.fromRelation);
- }
-
- // SELECT BEYOND ...
-
- abstract class SelectBeyond {
- def setQuantifier: Option[statement.SetQuantifier];
- def selectList: List[statement.DerivedColumn];
- def selectTypes: List[DataType];
- def fromClause: List[statement.Relation];
- def whereClause: Option[statement.Expression];
- def groupByClause: Option[List[statement.Expression]];
- def havingClause: Option[statement.Expression];
- def where (se:StatementExpression): SelectBeyond = new SelectBeyond {
- val setQuantifier = SelectBeyond.this.setQuantifier;
- val selectList = SelectBeyond.this.selectList;
- val selectTypes = SelectBeyond.this.selectTypes;
- val fromClause = SelectBeyond.this.fromClause;
- val whereClause = Some(se.toStatement);
- val groupByClause = SelectBeyond.this.groupByClause;
- val havingClause = SelectBeyond.this.havingClause;
- }
- def groupBy (sgb:SelectGroupBy): SelectBeyond = new SelectBeyond {
- val setQuantifier = SelectBeyond.this.setQuantifier;
- val selectList = SelectBeyond.this.selectList;
- val selectTypes = SelectBeyond.this.selectTypes;
- val fromClause = SelectBeyond.this.fromClause;
- val whereClause = SelectBeyond.this.whereClause;
- val groupByClause = Some(sgb.groupByClause);
- val havingClause = SelectBeyond.this.havingClause;
- }
- def having (se:StatementExpression): SelectBeyond = new SelectBeyond {
- val setQuantifier = SelectBeyond.this.setQuantifier;
- val selectList = SelectBeyond.this.selectList;
- val selectTypes = SelectBeyond.this.selectTypes;
- val fromClause = SelectBeyond.this.fromClause;
- val whereClause = SelectBeyond.this.whereClause;
- val groupByClause = SelectBeyond.this.groupByClause;
- val havingClause = Some(se.toStatement);
- }
- }
-
- implicit def selectBeyondToStatementSelect (sb:SelectBeyond): statement.Select = new statement.Select {
- val setQuantifier = sb.setQuantifier;
- val selectList = sb.selectList;
- val fromClause = sb.fromClause;
- val whereClause = sb.whereClause;
- val groupByClause = sb.groupByClause;
- val havingClause = sb.havingClause;
- val fieldTypes = sb.selectTypes;
- }
-
- abstract class SelectGroupBy {
- def groupByClause: List[statement.Expression];
- def then (se:StatementExpression): SelectGroupBy = new SelectGroupBy {
- val groupByClause =
- SelectGroupBy.this.groupByClause ::: List(se.toStatement);
- }
- def then (se:String): SelectGroupBy = new SelectGroupBy {
- val groupByClause =
- SelectGroupBy.this.groupByClause ::: List(new statement.expression.Field {
- val tableName = None;
- val fieldName = se;
- });
- }
- }
-
- implicit def statementExpressionToSelectGroupBy (se:StatementExpression): SelectGroupBy = new SelectGroupBy {
- val groupByClause = List(se.toStatement);
- }
-
- implicit def stringToSelectGroupBy (se:String): SelectGroupBy = new SelectGroupBy {
- val groupByClause = List(new statement.expression.Field {
- val tableName = None;
- val fieldName = se;
- });
- }
-
-}
diff --git a/src/dbc/scala/dbc/syntax/StatementExpression.scala b/src/dbc/scala/dbc/syntax/StatementExpression.scala
deleted file mode 100644
index 65bb0947f7..0000000000
--- a/src/dbc/scala/dbc/syntax/StatementExpression.scala
+++ /dev/null
@@ -1,221 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package syntax;
-
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class StatementExpression {
-
- def toStatement: statement.Expression;
-
- def and (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "AND";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def or (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "OR";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def == (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "=";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def < (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "<";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def > (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = ">";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def <= (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "<=";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def >= (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = ">=";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def <> (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "<>";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def isNull: StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.UnaryOperator {
- val operator = "IS NULL";
- val operatorIsLeft = false;
- val operand = StatementExpression.this.toStatement;
- }
- }
- def isNotNull: StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.UnaryOperator {
- val operator = "IS NOT NULL";
- val operatorIsLeft = false;
- val operand = StatementExpression.this.toStatement;
- }
- }
- def + (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "+";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def - (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "-";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def * (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "*";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def / (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "/";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def % (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "%";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def ^ (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "^";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def not : StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.UnaryOperator {
- val operator = "!";
- val operatorIsLeft = false;
- val operand = StatementExpression.this.toStatement;
- }
- }
- def || (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "||";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def like (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "LIKE";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def similar (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "SIMILAR";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def in (se:statement.Select): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "IN";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = new statement.expression.Select {
- val selectStatement = se;
- };
- }
- }
-
-}
-
-@deprecated(DbcIsDeprecated, "2.9.0") object StatementExpression {
-
- def not (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.UnaryOperator {
- val operator = "NOT";
- val operatorIsLeft = true;
- val operand = se.toStatement;
- }
- }
- def abs (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.UnaryOperator {
- val operator = "@";
- val operatorIsLeft = true;
- val operand = se.toStatement;
- }
- }
- def exists (se:statement.Select): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.UnaryOperator {
- val operator = "EXISTS";
- val operatorIsLeft = true;
- val operand = new statement.expression.Select {
- val selectStatement = se;
- };
- }
- }
-
- abstract class StatementField extends StatementExpression {
- def fieldName: String;
- def tableName: Option[String] = None;
- def in (tn:String): StatementField = new StatementField {
- val fieldName = StatementField.this.fieldName;
- override val tableName = Some(tn);
- }
- def toStatement: statement.expression.Field = new statement.expression.Field {
- override val schemaName = None;
- val tableName = StatementField.this.tableName;
- val fieldName = StatementField.this.fieldName;
- }
- }
-
- implicit def stringToStatementField (ef:String): StatementField = new StatementField {
- val fieldName = ef;
- }
-
-
-
-
-}
diff --git a/src/dbc/scala/dbc/value/ApproximateNumeric.scala b/src/dbc/scala/dbc/value/ApproximateNumeric.scala
deleted file mode 100644
index fa47d8815b..0000000000
--- a/src/dbc/scala/dbc/value/ApproximateNumeric.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class ApproximateNumeric [Type] extends Value {
-
- val dataType: datatype.ApproximateNumeric[Type];
-
- def sqlString = nativeValue.toString();
-
- }
-
-@deprecated(DbcIsDeprecated, "2.9.0") object ApproximateNumeric {
-
- implicit def approximateNumericToFloar (obj:value.ApproximateNumeric[Float]): Float = obj.nativeValue;
- implicit def approximateNumericToDouble (obj:value.ApproximateNumeric[Double]): Double = obj.nativeValue;
-
-}
diff --git a/src/dbc/scala/dbc/value/Boolean.scala b/src/dbc/scala/dbc/value/Boolean.scala
deleted file mode 100644
index 5221ce2328..0000000000
--- a/src/dbc/scala/dbc/value/Boolean.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Boolean extends Value {
-
- val dataType: datatype.Boolean;
-
- def sqlString = if (nativeValue) "TRUE" else "FALSE";
-
-}
-
-@deprecated(DbcIsDeprecated, "2.9.0") object Boolean {
-
- implicit def booleanToBoolean (obj:value.Boolean): scala.Boolean = obj.nativeValue;
-
-}
diff --git a/src/dbc/scala/dbc/value/Character.scala b/src/dbc/scala/dbc/value/Character.scala
deleted file mode 100644
index 4ff983c591..0000000000
--- a/src/dbc/scala/dbc/value/Character.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
-/** A SQL-99 value of type character string. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Character extends Value {
-
- override val dataType: datatype.Character;
-
- /** An SQL-99 compliant string representation of the value. */
- def sqlString: String = {
- "'" + nativeValue + "'"
- }
-
-}
-
-/** An object offering transformation methods (views) on the value.
- * This object must be visible in an expression to use value auto-
- * conversion. */
-@deprecated(DbcIsDeprecated, "2.9.0") object Character {
-
- /** A character string value as a native string. */
- implicit def characterToString (obj:value.Character): String = obj.nativeValue;
-
-}
diff --git a/src/dbc/scala/dbc/value/CharacterLargeObject.scala b/src/dbc/scala/dbc/value/CharacterLargeObject.scala
deleted file mode 100644
index b9e81eb3af..0000000000
--- a/src/dbc/scala/dbc/value/CharacterLargeObject.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
-/** A SQL-99 value of type character large object. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class CharacterLargeObject extends Value {
-
- override val dataType: datatype.CharacterLargeObject;
-
- /** An SQL-99 compliant string representation of the value. */
- def sqlString: String = {
- "'" + nativeValue + "'"
- }
-
-}
-
-/** An object offering transformation methods (views) on the value.
- * This object must be visible in an expression to use value auto-
- * conversion. */
-@deprecated(DbcIsDeprecated, "2.9.0") object CharacterLargeObject {
-
- /** A character large object value as a native string. */
- implicit def characterLargeObjectToString (obj:value.CharacterLargeObject): String = obj.nativeValue;
-
-}
diff --git a/src/dbc/scala/dbc/value/CharacterVarying.scala b/src/dbc/scala/dbc/value/CharacterVarying.scala
deleted file mode 100644
index 72e7d06362..0000000000
--- a/src/dbc/scala/dbc/value/CharacterVarying.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
-/** A SQL-99 value of type character varying string. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class CharacterVarying extends Value {
-
- override val dataType: datatype.CharacterVarying;
-
- /** An SQL-99 compliant string representation of the value. */
- def sqlString: String = {
- "'" + nativeValue + "'"
- }
-
-}
-
-/** An object offering transformation methods (views) on the value.
- * This object must be visible in an expression to use value auto-
- * conversion. */
-@deprecated(DbcIsDeprecated, "2.9.0") object CharacterVarying {
-
- /** A character varying string value as a native string. */
- implicit def characterVaryingToString (obj:value.CharacterVarying): String = obj.nativeValue;
-
-}
diff --git a/src/dbc/scala/dbc/value/Conversion.scala b/src/dbc/scala/dbc/value/Conversion.scala
deleted file mode 100644
index c9297e37db..0000000000
--- a/src/dbc/scala/dbc/value/Conversion.scala
+++ /dev/null
@@ -1,156 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-import java.math._;
-
-@deprecated(DbcIsDeprecated, "2.9.0") object Conversion {
-
- class Illegal (msg:String) extends Exception(msg);
-
- implicit def view1 (value:Value): Byte = {
- if (value.dataType.nativeTypeId == DataType.BYTE) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Byte]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to byte: "+value)
- }
- }
-
- implicit def view2 (value:Value): Short = {
- if (value.dataType.nativeTypeId == DataType.BYTE) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Byte]];
- v.nativeValue.toShort
- } else if (value.dataType.nativeTypeId == DataType.SHORT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Short]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to short: "+value)
- }
- }
-
- implicit def view3 (value:Value): Int = {
- if (value.dataType.nativeTypeId == DataType.BYTE) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Byte]];
- v.nativeValue.toInt
- } else if (value.dataType.nativeTypeId == DataType.SHORT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Short]];
- v.nativeValue.toInt
- } else if (value.dataType.nativeTypeId == DataType.INT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Int]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to int: "+value)
- }
- }
-
- implicit def view4 (value:Value): Long = {
- if (value.dataType.nativeTypeId == DataType.BYTE) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Byte]];
- v.nativeValue.toLong
- } else if (value.dataType.nativeTypeId == DataType.SHORT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Short]];
- v.nativeValue.toLong
- } else if (value.dataType.nativeTypeId == DataType.INT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Int]];
- v.nativeValue.toLong
- } else if (value.dataType.nativeTypeId == DataType.LONG) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Long]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to long: "+value)
- }
- }
-
- implicit def view5 (value:Value): BigInteger = {
- if (value.dataType.nativeTypeId == DataType.BYTE) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Byte]];
- new BigInteger(v.nativeValue.toString(),10)
- } else if (value.dataType.nativeTypeId == DataType.SHORT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Short]];
- new BigInteger(v.nativeValue.toString(),10)
- } else if (value.dataType.nativeTypeId == DataType.INT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Int]];
- new BigInteger(v.nativeValue.toString(),10)
- } else if (value.dataType.nativeTypeId == DataType.LONG) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Long]];
- new BigInteger(v.nativeValue.toString(),10)
- } else if (value.dataType.nativeTypeId == DataType.BIG_INTEGER) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[BigInteger]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to big integer: "+value)
- }
- }
-
- implicit def view6 (value:Value): BigDecimal = {
- if (value.dataType.nativeTypeId == DataType.BYTE) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Byte]];
- new BigDecimal(v.nativeValue.toString())
- } else if (value.dataType.nativeTypeId == DataType.SHORT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Short]];
- new BigDecimal(v.nativeValue.toString())
- } else if (value.dataType.nativeTypeId == DataType.INT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Int]];
- new BigDecimal(v.nativeValue.toString())
- } else if (value.dataType.nativeTypeId == DataType.LONG) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Long]];
- new BigDecimal(v.nativeValue.toString())
- } else if (value.dataType.nativeTypeId == DataType.BIG_INTEGER) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[BigInteger]];
- new BigDecimal(v.nativeValue)
- } else if (value.dataType.nativeTypeId == DataType.BIG_DECIMAL) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[BigDecimal]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to big decimal: "+value)
- }
- }
-
- implicit def view7 (value:Value): Float = {
- if (value.dataType.nativeTypeId == DataType.FLOAT) {
- val v = value.asInstanceOf[dbc.value.ApproximateNumeric[Float]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to float: "+value)
- }
- }
-
- implicit def view8 (value:Value): Double = {
- if (value.dataType.nativeTypeId == DataType.FLOAT) {
- val v = value.asInstanceOf[dbc.value.ApproximateNumeric[Float]];
- v.nativeValue.toFloat
- } else if (value.dataType.nativeTypeId == DataType.DOUBLE) {
- val v = value.asInstanceOf[dbc.value.ApproximateNumeric[Double]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to double: "+value)
- }
- }
-
- implicit def view9 (value:Value): scala.Boolean = {
- if (value.dataType.nativeTypeId == DataType.BOOLEAN) {
- val v = value.asInstanceOf[dbc.value.Boolean];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to boolean: "+value)
- }
- }
-
- implicit def view10 (value:Value): String = value match {
- case v:dbc.value.Character => v.nativeValue;
- case v:dbc.value.CharacterLargeObject => v.nativeValue;
- case v:dbc.value.CharacterVarying => v.nativeValue;
- case _ => throw new Illegal("Cannot convert value to string")
- }
-
-}
diff --git a/src/dbc/scala/dbc/value/ExactNumeric.scala b/src/dbc/scala/dbc/value/ExactNumeric.scala
deleted file mode 100644
index 7cd8b89a8c..0000000000
--- a/src/dbc/scala/dbc/value/ExactNumeric.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
-import java.math.BigInteger;
-import java.math.BigDecimal;
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class ExactNumeric [Type] extends Value {
-
- val dataType: datatype.ExactNumeric[Type];
-
- def sqlString = nativeValue.toString();
-
-}
-
-@deprecated(DbcIsDeprecated, "2.9.0") object ExactNumeric {
-
- implicit def exactNumericToByte (obj:value.ExactNumeric[Byte]): Byte = obj.nativeValue;
- implicit def exactNumericToShort (obj:value.ExactNumeric[Short]): Short = obj.nativeValue;
- implicit def exactNumericToInt (obj:value.ExactNumeric[Int]): Int = obj.nativeValue;
- implicit def exactNumericToLong (obj:value.ExactNumeric[Long]): Long = obj.nativeValue;
- implicit def exactNumericToBigInteger (obj:value.ExactNumeric[BigInteger]): BigInteger = obj.nativeValue;
- implicit def exactNumericToBigDecimal (obj:value.ExactNumeric[BigDecimal]): BigDecimal = obj.nativeValue;
-
-}
diff --git a/src/dbc/scala/dbc/value/Factory.scala b/src/dbc/scala/dbc/value/Factory.scala
deleted file mode 100644
index 2d6101f6de..0000000000
--- a/src/dbc/scala/dbc/value/Factory.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
-import java.math.BigInteger;
-import java.math.BigDecimal;
-
-@deprecated(DbcIsDeprecated, "2.9.0") object Factory {
-
- def create (result: java.sql.ResultSet, index: Int, expectedDataType: DataType): Value = {
- expectedDataType.nativeTypeId match {
- case DataType.OBJECT =>
- new value.Unknown {
- val dataType = expectedDataType.asInstanceOf[datatype.Unknown];
- val nativeValue: AnyRef = result.getObject(index);
- }
- case DataType.STRING => {
- expectedDataType match {
- case t:datatype.Character =>
- new value.Character {
- val dataType = t;
- val nativeValue: String = result.getString(index);
- }
- case t:datatype.CharacterVarying =>
- new value.CharacterVarying {
- val dataType = t;
- val nativeValue: String = result.getString(index);
- }
- case t:datatype.CharacterLargeObject =>
- new value.CharacterLargeObject {
- val dataType = t;
- val nativeValue: String = result.getString(index);
- }
- }
- }
- case DataType.BOOLEAN =>
- new value.Boolean {
- val dataType = expectedDataType.asInstanceOf[datatype.Boolean];
- val nativeValue: scala.Boolean = result.getBoolean(index);
- }
- case DataType.FLOAT =>
- new value.ApproximateNumeric[Float] {
- val dataType = expectedDataType.asInstanceOf[datatype.ApproximateNumeric[Float]];
- val nativeValue: Float = result.getFloat(index);
- }
- case DataType.DOUBLE =>
- new value.ApproximateNumeric[Double] {
- val dataType = expectedDataType.asInstanceOf[datatype.ApproximateNumeric[Double]];
- val nativeValue: Double = result.getDouble(index);
- }
- case DataType.BYTE =>
- new value.ExactNumeric[Byte] {
- val dataType = expectedDataType.asInstanceOf[datatype.ExactNumeric[Byte]];
- val nativeValue: Byte = result.getByte(index);
- }
- case DataType.SHORT =>
- new value.ExactNumeric[Short] {
- val dataType = expectedDataType.asInstanceOf[datatype.ExactNumeric[Short]];
- val nativeValue: Short = result.getShort(index);
- }
- case DataType.INT =>
- new value.ExactNumeric[Int] {
- val dataType = expectedDataType.asInstanceOf[datatype.ExactNumeric[Int]];
- val nativeValue: Int = result.getInt(index);
- }
- case DataType.LONG =>
- new value.ExactNumeric[Long] {
- val dataType = expectedDataType.asInstanceOf[datatype.ExactNumeric[Long]];
- val nativeValue:Long = result.getLong(index);
- }
- case DataType.BIG_INTEGER =>
- new value.ExactNumeric[BigInteger] {
- val dataType = expectedDataType.asInstanceOf[datatype.ExactNumeric[BigInteger]];
- val nativeValue: BigInteger = result.getBigDecimal(index).unscaledValue();
- }
- case DataType.BIG_DECIMAL =>
- new value.ExactNumeric[BigDecimal] {
- val dataType = expectedDataType.asInstanceOf[datatype.ExactNumeric[BigDecimal]];
- val nativeValue: BigDecimal = result.getBigDecimal(index);
- }
-
- }
- }
-
-}
diff --git a/src/dbc/scala/dbc/value/Unknown.scala b/src/dbc/scala/dbc/value/Unknown.scala
deleted file mode 100644
index 89764a5831..0000000000
--- a/src/dbc/scala/dbc/value/Unknown.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Unknown extends Value {
-
- val dataType: datatype.Unknown;
-
- def sqlString = sys.error("An 'ANY' value cannot be represented.");
-
-}
-
-@deprecated(DbcIsDeprecated, "2.9.0") object UnknownType {
-
- def view (obj:value.Unknown): AnyRef = obj.nativeValue;
-
-}
diff --git a/src/dbc/scala/dbc/vendor/PostgreSQL.scala b/src/dbc/scala/dbc/vendor/PostgreSQL.scala
deleted file mode 100644
index ac528d5f82..0000000000
--- a/src/dbc/scala/dbc/vendor/PostgreSQL.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package vendor;
-
-
-import compat.Platform
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class PostgreSQL extends Vendor {
-
- def uri:java.net.URI;
- def user:String;
- def pass:String;
-
- val retainedConnections = 5;
-
- val nativeDriverClass = Platform.getClassForName("org.postgresql.Driver");
-
- val urlProtocolString = "jdbc:postgresql:"
-
-}
diff --git a/src/detach/library/scala/remoting/Channel.scala b/src/detach/library/scala/remoting/Channel.scala
index 541e45a477..54b8fb100e 100644
--- a/src/detach/library/scala/remoting/Channel.scala
+++ b/src/detach/library/scala/remoting/Channel.scala
@@ -116,20 +116,20 @@ class Channel protected (socket: Socket) {
* the expected type.
*/
@throws(classOf[ChannelException])
- def receive[T](implicit expected: reflect.Manifest[T]): T = {
- val found = in.readObject().asInstanceOf[reflect.Manifest[_]]
+ def receive[T](implicit expected: reflect.ClassTag[T]): T = {
+ val found = in.readObject().asInstanceOf[reflect.ClassTag[_]]
info("receive: found="+found+", expected="+expected)
- import scala.reflect.Manifest
+ import scala.reflect.ClassTag
val x = found match {
- case Manifest.Unit => ()
- case Manifest.Boolean => in.readBoolean()
- case Manifest.Byte => in.readByte()
- case Manifest.Char => in.readChar()
- case Manifest.Short => in.readShort()
- case Manifest.Int => in.readInt()
- case Manifest.Long => in.readLong()
- case Manifest.Float => in.readFloat()
- case Manifest.Double => in.readDouble()
+ case ClassTag.Unit => ()
+ case ClassTag.Boolean => in.readBoolean()
+ case ClassTag.Byte => in.readByte()
+ case ClassTag.Char => in.readChar()
+ case ClassTag.Short => in.readShort()
+ case ClassTag.Int => in.readInt()
+ case ClassTag.Long => in.readLong()
+ case ClassTag.Float => in.readFloat()
+ case ClassTag.Double => in.readDouble()
case _ => in.readObject()
}
val res = if (found <:< expected)
@@ -144,12 +144,12 @@ class Channel protected (socket: Socket) {
/** <code>?</code> method may throw either an
* <code>ClassNotFoundException</code> or an <code>IOException</code>.
*/
- def ?[T](implicit m: reflect.Manifest[T]): T = receive[T](m)
+ def ?[T](implicit t: reflect.ClassTag[T]): T = receive[T](t)
/** <code>send</code> method may throw an <code>IOException</code>.
*/
- def send[T](x: T)(implicit m: reflect.Manifest[T]) {
- out writeObject m
+ def send[T](x: T)(implicit t: reflect.ClassTag[T]) {
+ out writeObject t
x match {
case x: Unit => // nop
case x: Boolean => out writeBoolean x
@@ -168,7 +168,7 @@ class Channel protected (socket: Socket) {
/** <code>!</code> method may throw an <code>IOException</code>.
*/
- def ![T](x: T)(implicit m: reflect.Manifest[T]) { send(x)(m) }
+ def ![T](x: T)(implicit m: reflect.ClassTag[T]) { send(x)(m) }
def close() {
try { socket.close() }
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
index 13439e9019..65654be69b 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
@@ -1,5 +1,4 @@
/*
-
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/publicdomain/zero/1.0/
@@ -12,22 +11,18 @@ import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Random;
-//import java.util.concurrent.AbstractExecutorService;
+import java.util.concurrent.AbstractExecutorService;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
-//import java.util.concurrent.RunnableFuture;
+import java.util.concurrent.RunnableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.AbstractQueuedSynchronizer;
import java.util.concurrent.locks.Condition;
-interface RunnableFuture<T> extends Runnable {
- //TR placeholder for java.util.concurrent.RunnableFuture
-}
-
/**
* An {@link ExecutorService} for running {@link ForkJoinTask}s.
* A {@code ForkJoinPool} provides the entry point for submissions
@@ -127,7 +122,7 @@ interface RunnableFuture<T> extends Runnable {
* @since 1.7
* @author Doug Lea
*/
-public class ForkJoinPool /*extends AbstractExecutorService*/ {
+public class ForkJoinPool extends AbstractExecutorService {
/*
* Implementation Overview
@@ -634,7 +629,7 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
final ForkJoinPool pool; // the containing pool (may be null)
final ForkJoinWorkerThread owner; // owning thread or null if shared
volatile Thread parker; // == owner during call to park; else null
- ForkJoinTask<?> currentJoin; // task being joined in awaitJoin
+ volatile ForkJoinTask<?> currentJoin; // task being joined in awaitJoin
ForkJoinTask<?> currentSteal; // current non-local task being executed
// Heuristic padding to ameliorate unfortunate memory placements
Object p00, p01, p02, p03, p04, p05, p06, p07;
@@ -726,12 +721,11 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
* version of this method because it is never needed.)
*/
final ForkJoinTask<?> pop() {
- ForkJoinTask<?> t; int m;
- ForkJoinTask<?>[] a = array;
- if (a != null && (m = a.length - 1) >= 0) {
+ ForkJoinTask<?>[] a; ForkJoinTask<?> t; int m;
+ if ((a = array) != null && (m = a.length - 1) >= 0) {
for (int s; (s = top - 1) - base >= 0;) {
- int j = ((m & s) << ASHIFT) + ABASE;
- if ((t = (ForkJoinTask<?>)U.getObjectVolatile(a, j)) == null)
+ long j = ((m & s) << ASHIFT) + ABASE;
+ if ((t = (ForkJoinTask<?>)U.getObject(a, j)) == null)
break;
if (U.compareAndSwapObject(a, j, t, null)) {
top = s;
@@ -835,54 +829,6 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
}
/**
- * If present, removes from queue and executes the given task, or
- * any other cancelled task. Returns (true) immediately on any CAS
- * or consistency check failure so caller can retry.
- *
- * @return false if no progress can be made
- */
- final boolean tryRemoveAndExec(ForkJoinTask<?> task) {
- boolean removed = false, empty = true, progress = true;
- ForkJoinTask<?>[] a; int m, s, b, n;
- if ((a = array) != null && (m = a.length - 1) >= 0 &&
- (n = (s = top) - (b = base)) > 0) {
- for (ForkJoinTask<?> t;;) { // traverse from s to b
- int j = ((--s & m) << ASHIFT) + ABASE;
- t = (ForkJoinTask<?>)U.getObjectVolatile(a, j);
- if (t == null) // inconsistent length
- break;
- else if (t == task) {
- if (s + 1 == top) { // pop
- if (!U.compareAndSwapObject(a, j, task, null))
- break;
- top = s;
- removed = true;
- }
- else if (base == b) // replace with proxy
- removed = U.compareAndSwapObject(a, j, task,
- new EmptyTask());
- break;
- }
- else if (t.status >= 0)
- empty = false;
- else if (s + 1 == top) { // pop and throw away
- if (U.compareAndSwapObject(a, j, t, null))
- top = s;
- break;
- }
- if (--n == 0) {
- if (!empty && base == b)
- progress = false;
- break;
- }
- }
- }
- if (removed)
- task.doExec();
- return progress;
- }
-
- /**
* Initializes or doubles the capacity of array. Call either
* by owner or with lock held -- it is OK for base, but not
* top, to move while resizings are in progress.
@@ -944,69 +890,98 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
// Execution methods
/**
- * Removes and runs tasks until empty, using local mode
- * ordering. Normally called only after checking for apparent
- * non-emptiness.
+ * Pops and runs tasks until empty.
*/
- final void runLocalTasks() {
- // hoist checks from repeated pop/poll
- ForkJoinTask<?>[] a; int m;
- if ((a = array) != null && (m = a.length - 1) >= 0) {
- if (mode == 0) {
- for (int s; (s = top - 1) - base >= 0;) {
- int j = ((m & s) << ASHIFT) + ABASE;
- ForkJoinTask<?> t =
- (ForkJoinTask<?>)U.getObjectVolatile(a, j);
- if (t != null) {
- if (U.compareAndSwapObject(a, j, t, null)) {
- top = s;
- t.doExec();
- }
- }
- else
- break;
- }
+ private void popAndExecAll() {
+ // A bit faster than repeated pop calls
+ ForkJoinTask<?>[] a; int m, s; long j; ForkJoinTask<?> t;
+ while ((a = array) != null && (m = a.length - 1) >= 0 &&
+ (s = top - 1) - base >= 0 &&
+ (t = ((ForkJoinTask<?>)
+ U.getObject(a, j = ((m & s) << ASHIFT) + ABASE)))
+ != null) {
+ if (U.compareAndSwapObject(a, j, t, null)) {
+ top = s;
+ t.doExec();
}
- else {
- for (int b; (b = base) - top < 0;) {
- int j = ((m & b) << ASHIFT) + ABASE;
- ForkJoinTask<?> t =
- (ForkJoinTask<?>)U.getObjectVolatile(a, j);
- if (t != null) {
- if (base == b &&
- U.compareAndSwapObject(a, j, t, null)) {
- base = b + 1;
- t.doExec();
- }
- } else if (base == b) {
- if (b + 1 == top)
+ }
+ }
+
+ /**
+ * Polls and runs tasks until empty.
+ */
+ private void pollAndExecAll() {
+ for (ForkJoinTask<?> t; (t = poll()) != null;)
+ t.doExec();
+ }
+
+ /**
+ * If present, removes from queue and executes the given task, or
+ * any other cancelled task. Returns (true) immediately on any CAS
+ * or consistency check failure so caller can retry.
+ *
+ * @return 0 if no progress can be made, else positive
+ * (this unusual convention simplifies use with tryHelpStealer.)
+ */
+ final int tryRemoveAndExec(ForkJoinTask<?> task) {
+ int stat = 1;
+ boolean removed = false, empty = true;
+ ForkJoinTask<?>[] a; int m, s, b, n;
+ if ((a = array) != null && (m = a.length - 1) >= 0 &&
+ (n = (s = top) - (b = base)) > 0) {
+ for (ForkJoinTask<?> t;;) { // traverse from s to b
+ int j = ((--s & m) << ASHIFT) + ABASE;
+ t = (ForkJoinTask<?>)U.getObjectVolatile(a, j);
+ if (t == null) // inconsistent length
+ break;
+ else if (t == task) {
+ if (s + 1 == top) { // pop
+ if (!U.compareAndSwapObject(a, j, task, null))
break;
- Thread.yield(); // wait for lagging update
+ top = s;
+ removed = true;
}
+ else if (base == b) // replace with proxy
+ removed = U.compareAndSwapObject(a, j, task,
+ new EmptyTask());
+ break;
+ }
+ else if (t.status >= 0)
+ empty = false;
+ else if (s + 1 == top) { // pop and throw away
+ if (U.compareAndSwapObject(a, j, t, null))
+ top = s;
+ break;
+ }
+ if (--n == 0) {
+ if (!empty && base == b)
+ stat = 0;
+ break;
}
}
}
+ if (removed)
+ task.doExec();
+ return stat;
}
/**
* Executes a top-level task and any local tasks remaining
* after execution.
- *
- * @return true unless terminating
*/
- final boolean runTask(ForkJoinTask<?> t) {
- boolean alive = true;
+ final void runTask(ForkJoinTask<?> t) {
if (t != null) {
currentSteal = t;
t.doExec();
- if (top != base) // conservative guard
- runLocalTasks();
+ if (top != base) { // process remaining local tasks
+ if (mode == 0)
+ popAndExecAll();
+ else
+ pollAndExecAll();
+ }
++nsteals;
currentSteal = null;
}
- else if (runState < 0) // terminating
- alive = false;
- return alive;
}
/**
@@ -1072,7 +1047,6 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
ASHIFT = 31 - Integer.numberOfLeadingZeros(s);
}
}
-
/**
* Per-thread records for threads that submit to pools. Currently
* holds only pseudo-random seed / index that is used to choose
@@ -1165,7 +1139,7 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
* traversal parameters at the expense of sometimes blocking when
* we could be helping.
*/
- private static final int MAX_HELP = 32;
+ private static final int MAX_HELP = 64;
/**
* Secondary time-based bound (in nanosecs) for helping attempts
@@ -1175,7 +1149,7 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
* value should roughly approximate the time required to create
* and/or activate a worker thread.
*/
- private static final long COMPENSATION_DELAY = 100L * 1000L; // 0.1 millisec
+ private static final long COMPENSATION_DELAY = 1L << 18; // ~0.25 millisec
/**
* Increment for seed generators. See class ThreadLocal for
@@ -1326,22 +1300,28 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
*
* @param w the worker's queue
*/
+
final void registerWorker(WorkQueue w) {
Mutex lock = this.lock;
lock.lock();
try {
WorkQueue[] ws = workQueues;
if (w != null && ws != null) { // skip on shutdown/failure
- int rs, n;
- while ((n = ws.length) < // ensure can hold total
- (parallelism + (short)(ctl >>> TC_SHIFT) << 1))
- workQueues = ws = Arrays.copyOf(ws, n << 1);
- int m = n - 1;
+ int rs, n = ws.length, m = n - 1;
int s = nextSeed += SEED_INCREMENT; // rarely-colliding sequence
w.seed = (s == 0) ? 1 : s; // ensure non-zero seed
int r = (s << 1) | 1; // use odd-numbered indices
- while (ws[r &= m] != null) // step by approx half size
- r += ((n >>> 1) & SQMASK) + 2;
+ if (ws[r &= m] != null) { // collision
+ int probes = 0; // step by approx half size
+ int step = (n <= 4) ? 2 : ((n >>> 1) & SQMASK) + 2;
+ while (ws[r = (r + step) & m] != null) {
+ if (++probes >= n) {
+ workQueues = ws = Arrays.copyOf(ws, n <<= 1);
+ m = n - 1;
+ probes = 0;
+ }
+ }
+ }
w.eventCount = w.poolIndex = r; // establish before recording
ws[r] = w; // also update seq
runState = ((rs = runState) & SHUTDOWN) | ((rs + 2) & ~SHUTDOWN);
@@ -1488,7 +1468,6 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
}
}
-
// Scanning for tasks
/**
@@ -1496,7 +1475,7 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
*/
final void runWorker(WorkQueue w) {
w.growArray(false); // initialize queue array in this thread
- do {} while (w.runTask(scan(w)));
+ do { w.runTask(scan(w)); } while (w.runState >= 0);
}
/**
@@ -1559,8 +1538,7 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
q.base = b + 1; // specialization of pollAt
return t;
}
- else if ((t != null || b + 1 != q.top) &&
- (ec < 0 || j <= m)) {
+ else if (ec < 0 || j <= m) {
rs = 0; // mark scan as imcomplete
break; // caller can retry after release
}
@@ -1568,6 +1546,7 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
if (--j < 0)
break;
}
+
long c = ctl; int e = (int)c, a = (int)(c >> AC_SHIFT), nr, ns;
if (e < 0) // decode ctl on empty scan
w.runState = -1; // pool is terminating
@@ -1635,7 +1614,7 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
*/
private void idleAwaitWork(WorkQueue w, long currentCtl, long prevCtl) {
if (w.eventCount < 0 && !tryTerminate(false, false) &&
- (int)prevCtl != 0 && ctl == currentCtl) {
+ (int)prevCtl != 0 && !hasQueuedSubmissions() && ctl == currentCtl) {
Thread wt = Thread.currentThread();
Thread.yield(); // yield before block
while (ctl == currentCtl) {
@@ -1670,70 +1649,79 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
* leaves hints in workers to speed up subsequent calls. The
* implementation is very branchy to cope with potential
* inconsistencies or loops encountering chains that are stale,
- * unknown, or so long that they are likely cyclic. All of these
- * cases are dealt with by just retrying by caller.
+ * unknown, or so long that they are likely cyclic.
*
* @param joiner the joining worker
* @param task the task to join
- * @return true if found or ran a task (and so is immediately retryable)
- */
- private boolean tryHelpStealer(WorkQueue joiner, ForkJoinTask<?> task) {
- WorkQueue[] ws;
- int m, depth = MAX_HELP; // remaining chain depth
- boolean progress = false;
- if ((ws = workQueues) != null && (m = ws.length - 1) > 0 &&
- task.status >= 0) {
- ForkJoinTask<?> subtask = task; // current target
- outer: for (WorkQueue j = joiner;;) {
- WorkQueue stealer = null; // find stealer of subtask
- WorkQueue v = ws[j.stealHint & m]; // try hint
- if (v != null && v.currentSteal == subtask)
- stealer = v;
- else { // scan
- for (int i = 1; i <= m; i += 2) {
- if ((v = ws[i]) != null && v.currentSteal == subtask &&
- v != joiner) {
- stealer = v;
- j.stealHint = i; // save hint
- break;
- }
+ * @return 0 if no progress can be made, negative if task
+ * known complete, else positive
+ */
+ private int tryHelpStealer(WorkQueue joiner, ForkJoinTask<?> task) {
+ int stat = 0, steps = 0; // bound to avoid cycles
+ if (joiner != null && task != null) { // hoist null checks
+ restart: for (;;) {
+ ForkJoinTask<?> subtask = task; // current target
+ for (WorkQueue j = joiner, v;;) { // v is stealer of subtask
+ WorkQueue[] ws; int m, s, h;
+ if ((s = task.status) < 0) {
+ stat = s;
+ break restart;
}
- if (stealer == null)
- break;
- }
-
- for (WorkQueue q = stealer;;) { // try to help stealer
- ForkJoinTask[] a; ForkJoinTask<?> t; int b;
- if (task.status < 0)
- break outer;
- if ((b = q.base) - q.top < 0 && (a = q.array) != null) {
- progress = true;
- int i = (((a.length - 1) & b) << ASHIFT) + ABASE;
- t = (ForkJoinTask<?>)U.getObjectVolatile(a, i);
- if (subtask.status < 0) // must recheck before taking
- break outer;
- if (t != null &&
- q.base == b &&
- U.compareAndSwapObject(a, i, t, null)) {
- q.base = b + 1;
- joiner.runSubtask(t);
+ if ((ws = workQueues) == null || (m = ws.length - 1) <= 0)
+ break restart; // shutting down
+ if ((v = ws[h = (j.stealHint | 1) & m]) == null ||
+ v.currentSteal != subtask) {
+ for (int origin = h;;) { // find stealer
+ if (((h = (h + 2) & m) & 15) == 1 &&
+ (subtask.status < 0 || j.currentJoin != subtask))
+ continue restart; // occasional staleness check
+ if ((v = ws[h]) != null &&
+ v.currentSteal == subtask) {
+ j.stealHint = h; // save hint
+ break;
+ }
+ if (h == origin)
+ break restart; // cannot find stealer
}
- else if (q.base == b)
- break outer; // possibly stalled
}
- else { // descend
- ForkJoinTask<?> next = stealer.currentJoin;
- if (--depth <= 0 || subtask.status < 0 ||
- next == null || next == subtask)
- break outer; // stale, dead-end, or cyclic
- subtask = next;
- j = stealer;
- break;
+ for (;;) { // help stealer or descend to its stealer
+ ForkJoinTask[] a; int b;
+ if (subtask.status < 0) // surround probes with
+ continue restart; // consistency checks
+ if ((b = v.base) - v.top < 0 && (a = v.array) != null) {
+ int i = (((a.length - 1) & b) << ASHIFT) + ABASE;
+ ForkJoinTask<?> t =
+ (ForkJoinTask<?>)U.getObjectVolatile(a, i);
+ if (subtask.status < 0 || j.currentJoin != subtask ||
+ v.currentSteal != subtask)
+ continue restart; // stale
+ stat = 1; // apparent progress
+ if (t != null && v.base == b &&
+ U.compareAndSwapObject(a, i, t, null)) {
+ v.base = b + 1; // help stealer
+ joiner.runSubtask(t);
+ }
+ else if (v.base == b && ++steps == MAX_HELP)
+ break restart; // v apparently stalled
+ }
+ else { // empty -- try to descend
+ ForkJoinTask<?> next = v.currentJoin;
+ if (subtask.status < 0 || j.currentJoin != subtask ||
+ v.currentSteal != subtask)
+ continue restart; // stale
+ else if (next == null || ++steps == MAX_HELP)
+ break restart; // dead-end or maybe cyclic
+ else {
+ subtask = next;
+ j = v;
+ break;
+ }
+ }
}
}
}
}
- return progress;
+ return stat;
}
/**
@@ -1833,44 +1821,50 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
* @return task status on exit
*/
final int awaitJoin(WorkQueue joiner, ForkJoinTask<?> task) {
- ForkJoinTask<?> prevJoin = joiner.currentJoin;
- joiner.currentJoin = task;
- long startTime = 0L;
- for (int k = 0, s; ; ++k) {
- if ((joiner.isEmpty() ? // try to help
- !tryHelpStealer(joiner, task) :
- !joiner.tryRemoveAndExec(task))) {
- if (k == 0) {
- startTime = System.nanoTime();
- tryPollForAndExec(joiner, task); // check uncommon case
- }
- else if ((k & (MAX_HELP - 1)) == 0 &&
- System.nanoTime() - startTime >= COMPENSATION_DELAY &&
- tryCompensate(task, null)) {
- if (task.trySetSignal() && task.status >= 0) {
- synchronized (task) {
- if (task.status >= 0) {
- try { // see ForkJoinTask
- task.wait(); // for explanation
- } catch (InterruptedException ie) {
+ int s;
+ if ((s = task.status) >= 0) {
+ ForkJoinTask<?> prevJoin = joiner.currentJoin;
+ joiner.currentJoin = task;
+ long startTime = 0L;
+ for (int k = 0;;) {
+ if ((s = (joiner.isEmpty() ? // try to help
+ tryHelpStealer(joiner, task) :
+ joiner.tryRemoveAndExec(task))) == 0 &&
+ (s = task.status) >= 0) {
+ if (k == 0) {
+ startTime = System.nanoTime();
+ tryPollForAndExec(joiner, task); // check uncommon case
+ }
+ else if ((k & (MAX_HELP - 1)) == 0 &&
+ System.nanoTime() - startTime >=
+ COMPENSATION_DELAY &&
+ tryCompensate(task, null)) {
+ if (task.trySetSignal()) {
+ synchronized (task) {
+ if (task.status >= 0) {
+ try { // see ForkJoinTask
+ task.wait(); // for explanation
+ } catch (InterruptedException ie) {
+ }
}
+ else
+ task.notifyAll();
}
- else
- task.notifyAll();
}
+ long c; // re-activate
+ do {} while (!U.compareAndSwapLong
+ (this, CTL, c = ctl, c + AC_UNIT));
}
- long c; // re-activate
- do {} while (!U.compareAndSwapLong
- (this, CTL, c = ctl, c + AC_UNIT));
}
+ if (s < 0 || (s = task.status) < 0) {
+ joiner.currentJoin = prevJoin;
+ break;
+ }
+ else if ((k++ & (MAX_HELP - 1)) == MAX_HELP >>> 1)
+ Thread.yield(); // for politeness
}
- if ((s = task.status) < 0) {
- joiner.currentJoin = prevJoin;
- return s;
- }
- else if ((k & (MAX_HELP - 1)) == MAX_HELP >>> 1)
- Thread.yield(); // for politeness
}
+ return s;
}
/**
@@ -1887,7 +1881,7 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
while ((s = task.status) >= 0 &&
(joiner.isEmpty() ?
tryHelpStealer(joiner, task) :
- joiner.tryRemoveAndExec(task)))
+ joiner.tryRemoveAndExec(task)) != 0)
;
return s;
}
@@ -1919,6 +1913,7 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
}
}
+
/**
* Runs tasks until {@code isQuiescent()}. We piggyback on
* active count ctl maintenance, but rather than blocking
@@ -1927,8 +1922,9 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
*/
final void helpQuiescePool(WorkQueue w) {
for (boolean active = true;;) {
- if (w.base - w.top < 0)
- w.runLocalTasks(); // exhaust local queue
+ ForkJoinTask<?> localTask; // exhaust local queue
+ while ((localTask = w.nextLocalTask()) != null)
+ localTask.doExec();
WorkQueue q = findNonEmptyStealQueue(w);
if (q != null) {
ForkJoinTask<?> t; int b;
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
index 5619bb0255..15c60118b3 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
@@ -16,7 +16,7 @@ import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
-//import java.util.concurrent.RunnableFuture;
+import java.util.concurrent.RunnableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.locks.ReentrantLock;
@@ -115,18 +115,19 @@ import java.lang.reflect.Constructor;
* <p>The ForkJoinTask class is not usually directly subclassed.
* Instead, you subclass one of the abstract classes that support a
* particular style of fork/join processing, typically {@link
- * RecursiveAction} for computations that do not return results, or
- * {@link RecursiveTask} for those that do. Normally, a concrete
- * ForkJoinTask subclass declares fields comprising its parameters,
- * established in a constructor, and then defines a {@code compute}
- * method that somehow uses the control methods supplied by this base
- * class. While these methods have {@code public} access (to allow
- * instances of different task subclasses to call each other's
- * methods), some of them may only be called from within other
- * ForkJoinTasks (as may be determined using method {@link
- * #inForkJoinPool}). Attempts to invoke them in other contexts
- * result in exceptions or errors, possibly including
- * {@code ClassCastException}.
+ * RecursiveAction} for most computations that do not return results,
+ * {@link RecursiveTask} for those that do, and {@link
+ * CountedCompleter} for those in which completed actions trigger
+ * other actions. Normally, a concrete ForkJoinTask subclass declares
+ * fields comprising its parameters, established in a constructor, and
+ * then defines a {@code compute} method that somehow uses the control
+ * methods supplied by this base class. While these methods have
+ * {@code public} access (to allow instances of different task
+ * subclasses to call each other's methods), some of them may only be
+ * called from within other ForkJoinTasks (as may be determined using
+ * method {@link #inForkJoinPool}). Attempts to invoke them in other
+ * contexts result in exceptions or errors, possibly including {@code
+ * ClassCastException}.
*
* <p>Method {@link #join} and its variants are appropriate for use
* only when completion dependencies are acyclic; that is, the
@@ -137,17 +138,17 @@ import java.lang.reflect.Constructor;
* {@link Phaser}, {@link #helpQuiesce}, and {@link #complete}) that
* may be of use in constructing custom subclasses for problems that
* are not statically structured as DAGs. To support such usages a
- * ForkJoinTask may be atomically <em>marked</em> using {@link
- * #markForkJoinTask} and checked for marking using {@link
- * #isMarkedForkJoinTask}. The ForkJoinTask implementation does not
- * use these {@code protected} methods or marks for any purpose, but
+ * ForkJoinTask may be atomically <em>tagged</em> with a {@code
+ * short} value using {@link #setForkJoinTaskTag} or {@link
+ * #compareAndSetForkJoinTaskTag} and checked using {@link
+ * #getForkJoinTaskTag}. The ForkJoinTask implementation does not
+ * use these {@code protected} methods or tags for any purpose, but
* they may be of use in the construction of specialized subclasses.
* For example, parallel graph traversals can use the supplied methods
* to avoid revisiting nodes/tasks that have already been processed.
- * Also, completion based designs can use them to record that one
- * subtask has completed. (Method names for marking are bulky in part
- * to encourage definition of methods that reflect their usage
- * patterns.)
+ * Also, completion based designs can use them to record that subtasks
+ * have completed. (Method names for tagging are bulky in part to
+ * encourage definition of methods that reflect their usage patterns.)
*
* <p>Most base support methods are {@code final}, to prevent
* overriding of implementations that are intrinsically tied to the
@@ -213,6 +214,10 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* thin-lock techniques, so use some odd coding idioms that tend
* to avoid them, mainly by arranging that every synchronized
* block performs a wait, notifyAll or both.
+ *
+ * These control bits occupy only (some of) the upper half (16
+ * bits) of status field. The lower bits are used for user-defined
+ * tags.
*/
/** The run status of this task */
@@ -221,13 +226,12 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
static final int NORMAL = 0xf0000000; // must be negative
static final int CANCELLED = 0xc0000000; // must be < NORMAL
static final int EXCEPTIONAL = 0x80000000; // must be < CANCELLED
- static final int SIGNAL = 0x00000001;
- static final int MARKED = 0x00000002;
+ static final int SIGNAL = 0x00010000; // must be >= 1 << 16
+ static final int SMASK = 0x0000ffff; // short bits for tags
/**
* Marks completion and wakes up threads waiting to join this
- * task. A specialization for NORMAL completion is in method
- * doExec.
+ * task.
*
* @param completion one of NORMAL, CANCELLED, EXCEPTIONAL
* @return completion status on exit
@@ -237,7 +241,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
if ((s = status) < 0)
return s;
if (U.compareAndSwapInt(this, STATUS, s, s | completion)) {
- if ((s & SIGNAL) != 0)
+ if ((s >>> 16) != 0)
synchronized (this) { notifyAll(); }
return completion;
}
@@ -259,26 +263,22 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
} catch (Throwable rex) {
return setExceptionalCompletion(rex);
}
- while ((s = status) >= 0 && completed) {
- if (U.compareAndSwapInt(this, STATUS, s, s | NORMAL)) {
- if ((s & SIGNAL) != 0)
- synchronized (this) { notifyAll(); }
- return NORMAL;
- }
- }
+ if (completed)
+ s = setCompletion(NORMAL);
}
return s;
}
/**
- * Tries to set SIGNAL status. Used by ForkJoinPool. Other
- * variants are directly incorporated into externalAwaitDone etc.
+ * Tries to set SIGNAL status unless already completed. Used by
+ * ForkJoinPool. Other variants are directly incorporated into
+ * externalAwaitDone etc.
*
* @return true if successful
*/
final boolean trySetSignal() {
- int s;
- return U.compareAndSwapInt(this, STATUS, s = status, s | SIGNAL);
+ int s = status;
+ return s >= 0 && U.compareAndSwapInt(this, STATUS, s, s | SIGNAL);
}
/**
@@ -328,7 +328,6 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
return s;
}
-
/**
* Implementation for join, get, quietlyJoin. Directly handles
* only cases of already-completed, external wait, and
@@ -417,25 +416,39 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* @return status on exit
*/
private int setExceptionalCompletion(Throwable ex) {
- int h = System.identityHashCode(this);
- final ReentrantLock lock = exceptionTableLock;
- lock.lock();
- try {
- expungeStaleExceptions();
- ExceptionNode[] t = exceptionTable;
- int i = h & (t.length - 1);
- for (ExceptionNode e = t[i]; ; e = e.next) {
- if (e == null) {
- t[i] = new ExceptionNode(this, ex, t[i]);
- break;
+ int s;
+ if ((s = status) >= 0) {
+ int h = System.identityHashCode(this);
+ final ReentrantLock lock = exceptionTableLock;
+ lock.lock();
+ try {
+ expungeStaleExceptions();
+ ExceptionNode[] t = exceptionTable;
+ int i = h & (t.length - 1);
+ for (ExceptionNode e = t[i]; ; e = e.next) {
+ if (e == null) {
+ t[i] = new ExceptionNode(this, ex, t[i]);
+ break;
+ }
+ if (e.get() == this) // already present
+ break;
}
- if (e.get() == this) // already present
- break;
+ } finally {
+ lock.unlock();
}
- } finally {
- lock.unlock();
+ s = setCompletion(EXCEPTIONAL);
}
- return setCompletion(EXCEPTIONAL);
+ ForkJoinTask<?> p = internalGetCompleter(); // propagate
+ if (p != null && p.status >= 0)
+ p.setExceptionalCompletion(ex);
+ return s;
+ }
+
+ /**
+ * Exception propagation support for tasks with completers.
+ */
+ ForkJoinTask<?> internalGetCompleter() {
+ return null;
}
/**
@@ -517,7 +530,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
Throwable ex;
if (e == null || (ex = e.ex) == null)
return null;
- if (e.thrower != Thread.currentThread().getId()) {
+ if (false && e.thrower != Thread.currentThread().getId()) {
Class<? extends Throwable> ec = ex.getClass();
try {
Constructor<?> noArgCtor = null;
@@ -907,6 +920,18 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
/**
+ * Completes this task normally without setting a value. The most
+ * recent value established by {@link #setRawResult} (or {@code
+ * null} by default) will be returned as the result of subsequent
+ * invocations of {@code join} and related operations.
+ *
+ * @since 1.8
+ */
+ public final void quietlyComplete() {
+ setCompletion(NORMAL);
+ }
+
+ /**
* Waits if necessary for the computation to complete, and then
* retrieves its result.
*
@@ -1225,15 +1250,18 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
protected abstract void setRawResult(V value);
/**
- * Immediately performs the base action of this task. This method
- * is designed to support extensions, and should not in general be
- * called otherwise. The return value controls whether this task
- * is considered to be done normally. It may return false in
+ * Immediately performs the base action of this task and returns
+ * true if, upon return from this method, this task is guaranteed
+ * to have completed normally. This method may return false
+ * otherwise, to indicate that this task is not necessarily
+ * complete (or is not known to be complete), for example in
* asynchronous actions that require explicit invocations of
- * {@link #complete} to become joinable. It may also throw an
- * (unchecked) exception to indicate abnormal exit.
+ * completion methods. This method may also throw an (unchecked)
+ * exception to indicate abnormal exit. This method is designed to
+ * support extensions, and should not in general be called
+ * otherwise.
*
- * @return {@code true} if completed normally
+ * @return {@code true} if this task is known to have completed normally
*/
protected abstract boolean exec();
@@ -1302,44 +1330,53 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
return wt.pool.nextTaskFor(wt.workQueue);
}
- // Mark-bit operations
+ // tag operations
/**
- * Returns true if this task is marked.
+ * Returns the tag for this task.
*
- * @return true if this task is marked
+ * @return the tag for this task
* @since 1.8
*/
- public final boolean isMarkedForkJoinTask() {
- return (status & MARKED) != 0;
+ public final short getForkJoinTaskTag() {
+ return (short)status;
}
/**
- * Atomically sets the mark on this task.
+ * Atomically sets the tag value for this task.
*
- * @return true if this task was previously unmarked
+ * @param tag the tag value
+ * @return the previous value of the tag
* @since 1.8
*/
- public final boolean markForkJoinTask() {
+ public final short setForkJoinTaskTag(short tag) {
for (int s;;) {
- if (((s = status) & MARKED) != 0)
- return false;
- if (U.compareAndSwapInt(this, STATUS, s, s | MARKED))
- return true;
+ if (U.compareAndSwapInt(this, STATUS, s = status,
+ (s & ~SMASK) | (tag & SMASK)))
+ return (short)s;
}
}
/**
- * Atomically clears the mark on this task.
+ * Atomically conditionally sets the tag value for this task.
+ * Among other applications, tags can be used as visit markers
+ * in tasks operating on graphs, as in methods that check: {@code
+ * if (task.compareAndSetForkJoinTaskTag((short)0, (short)1))}
+ * before processing, otherwise exiting because the node has
+ * already been visited.
*
- * @return true if this task was previously marked
+ * @param e the expected tag value
+ * @param tag the new tag value
+ * @return true if successful; i.e., the current value was
+ * equal to e and is now tag.
* @since 1.8
*/
- public final boolean unmarkForkJoinTask() {
+ public final boolean compareAndSetForkJoinTaskTag(short e, short tag) {
for (int s;;) {
- if (((s = status) & MARKED) == 0)
+ if ((short)(s = status) != e)
return false;
- if (U.compareAndSwapInt(this, STATUS, s, s & ~MARKED))
+ if (U.compareAndSwapInt(this, STATUS, s,
+ (s & ~SMASK) | (tag & SMASK)))
return true;
}
}
diff --git a/src/intellij/dbc.iml.SAMPLE b/src/intellij/dbc.iml.SAMPLE
deleted file mode 100644
index 5a6df4cfaa..0000000000
--- a/src/intellij/dbc.iml.SAMPLE
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module type="JAVA_MODULE" version="4">
- <component name="FacetManager">
- <facet type="scala" name="Scala">
- <configuration>
- <option name="compilerLibraryLevel" value="Project" />
- <option name="compilerLibraryName" value="compiler-locker" />
- <option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=128M -d32 -server -XX:+UseParallelGC" />
- </configuration>
- </facet>
- </component>
- <component name="NewModuleRootManager" inherit-compiler-output="true">
- <exclude-output />
- <content url="file://$MODULE_DIR$/../dbc">
- <sourceFolder url="file://$MODULE_DIR$/../dbc" isTestSource="false" />
- </content>
- <orderEntry type="inheritedJdk" />
- <orderEntry type="sourceFolder" forTests="false" />
- <orderEntry type="module" module-name="library" />
- </component>
-</module>
-
diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE
index 93b6285cfb..43d6362708 100644
--- a/src/intellij/scala-lang.ipr.SAMPLE
+++ b/src/intellij/scala-lang.ipr.SAMPLE
@@ -197,13 +197,13 @@
<modules>
<module fileurl="file://$PROJECT_DIR$/actors.iml" filepath="$PROJECT_DIR$/actors.iml" />
<module fileurl="file://$PROJECT_DIR$/compiler.iml" filepath="$PROJECT_DIR$/compiler.iml" />
- <module fileurl="file://$PROJECT_DIR$/dbc.iml" filepath="$PROJECT_DIR$/dbc.iml" />
<module fileurl="file://$PROJECT_DIR$/library.iml" filepath="$PROJECT_DIR$/library.iml" />
<module fileurl="file://$PROJECT_DIR$/manual.iml" filepath="$PROJECT_DIR$/manual.iml" />
<module fileurl="file://$PROJECT_DIR$/partest.iml" filepath="$PROJECT_DIR$/partest.iml" />
<module fileurl="file://$PROJECT_DIR$/scala.iml" filepath="$PROJECT_DIR$/scala.iml" />
<module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
<module fileurl="file://$PROJECT_DIR$/swing.iml" filepath="$PROJECT_DIR$/swing.iml" />
+ <module fileurl="file://$PROJECT_DIR$/test.iml" filepath="$PROJECT_DIR$/test.iml" />
</modules>
</component>
<component name="ProjectResources">
diff --git a/src/intellij/test.iml.SAMPLE b/src/intellij/test.iml.SAMPLE
new file mode 100644
index 0000000000..e6729ae362
--- /dev/null
+++ b/src/intellij/test.iml.SAMPLE
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../../test" />
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="actors" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="continuations" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="swing" />
+ <orderEntry type="module" module-name="partest" />
+ </component>
+</module>
+
diff --git a/src/library/rootdoc.txt b/src/library/rootdoc.txt
index 6145429f1e..da27a0084b 100644
--- a/src/library/rootdoc.txt
+++ b/src/library/rootdoc.txt
@@ -22,6 +22,6 @@ Many other packages exist. See the complete list on the left.
Identifiers in the scala package and the [[scala.Predef]] object are always in scope by default.
-Some of these identifiers are type aliases provided as shortcuts to commonly used classes. For example, List is an alias for scala.collection.immutable.[[scala.collection.immutable.List]].
+Some of these identifiers are type aliases provided as shortcuts to commonly used classes. For example, `List` is an alias for scala.collection.immutable.[[scala.collection.immutable.List]].
-Other aliases refer to classes providing by the underlying platform. For example, on the JVM, String is an alias for java.lang.String.
+Other aliases refer to classes provided by the underlying platform. For example, on the JVM, `String` is an alias for `java.lang.String`.
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index 99c54ce58c..e7cf399fa4 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -12,17 +12,17 @@ import scala.collection.generic._
import scala.collection.{ mutable, immutable }
import mutable.{ ArrayBuilder, ArraySeq }
import compat.Platform.arraycopy
-import scala.reflect.ClassManifest
+import scala.reflect.ArrayTag
import scala.runtime.ScalaRunTime.{ array_apply, array_update }
/** Contains a fallback builder for arrays when the element type
- * does not have a class manifest. In that case a generic array is built.
+ * does not have a class tag. In that case a generic array is built.
*/
class FallbackArrayBuilding {
/** A builder factory that generates a generic array.
* Called instead of `Array.newBuilder` if the element type of an array
- * does not have a class manifest. Note that fallbackBuilder factory
+ * does not have a class tag. Note that fallbackBuilder factory
* needs an implicit parameter (otherwise it would not be dominated in
* implicit search by `Array.canBuildFrom`). We make sure that
* implicit search is always successful.
@@ -48,16 +48,16 @@ class FallbackArrayBuilding {
* @version 1.0
*/
object Array extends FallbackArrayBuilding {
- implicit def canBuildFrom[T](implicit m: ClassManifest[T]): CanBuildFrom[Array[_], T, Array[T]] =
+ implicit def canBuildFrom[T](implicit t: ArrayTag[T]): CanBuildFrom[Array[_], T, Array[T]] =
new CanBuildFrom[Array[_], T, Array[T]] {
- def apply(from: Array[_]) = ArrayBuilder.make[T]()(m)
- def apply() = ArrayBuilder.make[T]()(m)
+ def apply(from: Array[_]) = ArrayBuilder.make[T]()(t)
+ def apply() = ArrayBuilder.make[T]()(t)
}
/**
* Returns a new [[scala.collection.mutable.ArrayBuilder]].
*/
- def newBuilder[T](implicit m: ClassManifest[T]): ArrayBuilder[T] = ArrayBuilder.make[T]()(m)
+ def newBuilder[T](implicit t: ArrayTag[T]): ArrayBuilder[T] = ArrayBuilder.make[T]()(t)
private def slowcopy(src : AnyRef,
srcPos : Int,
@@ -98,14 +98,14 @@ object Array extends FallbackArrayBuilding {
}
/** Returns an array of length 0 */
- def empty[T: ClassManifest]: Array[T] = new Array[T](0)
+ def empty[T: ArrayTag]: Array[T] = new Array[T](0)
/** Creates an array with given elements.
*
* @param xs the elements to put in the array
* @return an array containing all elements from xs.
*/
- def apply[T: ClassManifest](xs: T*): Array[T] = {
+ def apply[T: ArrayTag](xs: T*): Array[T] = {
val array = new Array[T](xs.length)
var i = 0
for (x <- xs.iterator) { array(i) = x; i += 1 }
@@ -194,23 +194,23 @@ object Array extends FallbackArrayBuilding {
}
/** Creates array with given dimensions */
- def ofDim[T: ClassManifest](n1: Int): Array[T] =
+ def ofDim[T: ArrayTag](n1: Int): Array[T] =
new Array[T](n1)
/** Creates a 2-dimensional array */
- def ofDim[T: ClassManifest](n1: Int, n2: Int): Array[Array[T]] = {
+ def ofDim[T: ArrayTag](n1: Int, n2: Int): Array[Array[T]] = {
val arr: Array[Array[T]] = (new Array[Array[T]](n1): Array[Array[T]])
for (i <- 0 until n1) arr(i) = new Array[T](n2)
arr
// tabulate(n1)(_ => ofDim[T](n2))
}
/** Creates a 3-dimensional array */
- def ofDim[T: ClassManifest](n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] =
+ def ofDim[T: ArrayTag](n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] =
tabulate(n1)(_ => ofDim[T](n2, n3))
/** Creates a 4-dimensional array */
- def ofDim[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] =
+ def ofDim[T: ArrayTag](n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] =
tabulate(n1)(_ => ofDim[T](n2, n3, n4))
/** Creates a 5-dimensional array */
- def ofDim[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] =
+ def ofDim[T: ArrayTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] =
tabulate(n1)(_ => ofDim[T](n2, n3, n4, n5))
/** Concatenates all arrays into a single array.
@@ -218,7 +218,7 @@ object Array extends FallbackArrayBuilding {
* @param xss the given arrays
* @return the array created from concatenating `xss`
*/
- def concat[T: ClassManifest](xss: Array[T]*): Array[T] = {
+ def concat[T: ArrayTag](xss: Array[T]*): Array[T] = {
val b = newBuilder[T]
b.sizeHint(xss.map(_.size).sum)
for (xs <- xss) b ++= xs
@@ -239,7 +239,7 @@ object Array extends FallbackArrayBuilding {
* @return an Array of size n, where each element contains the result of computing
* `elem`.
*/
- def fill[T: ClassManifest](n: Int)(elem: => T): Array[T] = {
+ def fill[T: ArrayTag](n: Int)(elem: => T): Array[T] = {
val b = newBuilder[T]
b.sizeHint(n)
var i = 0
@@ -257,7 +257,7 @@ object Array extends FallbackArrayBuilding {
* @param n2 the number of elements in the 2nd dimension
* @param elem the element computation
*/
- def fill[T: ClassManifest](n1: Int, n2: Int)(elem: => T): Array[Array[T]] =
+ def fill[T: ArrayTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] =
tabulate(n1)(_ => fill(n2)(elem))
/** Returns a three-dimensional array that contains the results of some element
@@ -268,7 +268,7 @@ object Array extends FallbackArrayBuilding {
* @param n3 the number of elements in the 3nd dimension
* @param elem the element computation
*/
- def fill[T: ClassManifest](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] =
+ def fill[T: ArrayTag](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] =
tabulate(n1)(_ => fill(n2, n3)(elem))
/** Returns a four-dimensional array that contains the results of some element
@@ -280,7 +280,7 @@ object Array extends FallbackArrayBuilding {
* @param n4 the number of elements in the 4th dimension
* @param elem the element computation
*/
- def fill[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => T): Array[Array[Array[Array[T]]]] =
+ def fill[T: ArrayTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => T): Array[Array[Array[Array[T]]]] =
tabulate(n1)(_ => fill(n2, n3, n4)(elem))
/** Returns a five-dimensional array that contains the results of some element
@@ -293,7 +293,7 @@ object Array extends FallbackArrayBuilding {
* @param n5 the number of elements in the 5th dimension
* @param elem the element computation
*/
- def fill[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => T): Array[Array[Array[Array[Array[T]]]]] =
+ def fill[T: ArrayTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => T): Array[Array[Array[Array[Array[T]]]]] =
tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem))
/** Returns an array containing values of a given function over a range of integer
@@ -303,7 +303,7 @@ object Array extends FallbackArrayBuilding {
* @param f The function computing element values
* @return A traversable consisting of elements `f(0),f(1), ..., f(n - 1)`
*/
- def tabulate[T: ClassManifest](n: Int)(f: Int => T): Array[T] = {
+ def tabulate[T: ArrayTag](n: Int)(f: Int => T): Array[T] = {
val b = newBuilder[T]
b.sizeHint(n)
var i = 0
@@ -321,7 +321,7 @@ object Array extends FallbackArrayBuilding {
* @param n2 the number of elements in the 2nd dimension
* @param f The function computing element values
*/
- def tabulate[T: ClassManifest](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] =
+ def tabulate[T: ArrayTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] =
tabulate(n1)(i1 => tabulate(n2)(f(i1, _)))
/** Returns a three-dimensional array containing values of a given function
@@ -332,7 +332,7 @@ object Array extends FallbackArrayBuilding {
* @param n3 the number of elements in the 3rd dimension
* @param f The function computing element values
*/
- def tabulate[T: ClassManifest](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => T): Array[Array[Array[T]]] =
+ def tabulate[T: ArrayTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => T): Array[Array[Array[T]]] =
tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _)))
/** Returns a four-dimensional array containing values of a given function
@@ -344,7 +344,7 @@ object Array extends FallbackArrayBuilding {
* @param n4 the number of elements in the 4th dimension
* @param f The function computing element values
*/
- def tabulate[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => T): Array[Array[Array[Array[T]]]] =
+ def tabulate[T: ArrayTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => T): Array[Array[Array[Array[T]]]] =
tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _)))
/** Returns a five-dimensional array containing values of a given function
@@ -357,13 +357,13 @@ object Array extends FallbackArrayBuilding {
* @param n5 the number of elements in the 5th dimension
* @param f The function computing element values
*/
- def tabulate[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => T): Array[Array[Array[Array[Array[T]]]]] =
+ def tabulate[T: ArrayTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => T): Array[Array[Array[Array[Array[T]]]]] =
tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _)))
/** Returns an array containing a sequence of increasing integers in a range.
*
- * @param from the start value of the array
- * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned)
+ * @param start the start value of the array
+ * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned)
* @return the array with values in range `start, start + 1, ..., end - 1`
* up to, but excluding, `end`.
*/
@@ -396,7 +396,7 @@ object Array extends FallbackArrayBuilding {
* @param f the function that is repeatedly applied
* @return the array returning `len` values in the sequence `start, f(start), f(f(start)), ...`
*/
- def iterate[T: ClassManifest](start: T, len: Int)(f: T => T): Array[T] = {
+ def iterate[T: ArrayTag](start: T, len: Int)(f: T => T): Array[T] = {
val b = newBuilder[T]
if (len > 0) {
@@ -438,10 +438,8 @@ object Array extends FallbackArrayBuilding {
* Arrays make use of two common pieces of Scala syntactic sugar, shown on lines 2 and 3 of the above
* example code.
* Line 2 is translated into a call to `apply(Int)`, while line 3 is translated into a call to
- * `update(Int, T)`. For more information on these transformations, see the
- * [[http://www.scala-lang.org/docu/files/ScalaReference.pdf Scala Language Specification v2.8]], Sections
- * 6.6 and 6.15 respectively.
- *
+ * `update(Int, T)`.
+ *
* Two implicit conversions exist in [[scala.Predef]] that are frequently applied to arrays: a conversion
* to [[scala.collection.mutable.ArrayOps]] (shown on line 4 of the example above) and a conversion
* to [[scala.collection.mutable.WrappedArray]] (a subtype of [[scala.collections.Seq]]).
@@ -465,8 +463,22 @@ object Array extends FallbackArrayBuilding {
*
* @author Martin Odersky
* @version 1.0
- * @see [[http://www.scala-lang.org/docu/files/collections-api/collections_38.html#anchor "The Scala 2.8 Collections' API"]]
- * section on `Array` by Martin Odersky for more information.
+ * @see [[http://www.scala-lang.org/docu/files/ScalaReference.pdf Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.)
+ * @see [[http://docs.scala-lang.org/sips/completed/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8.
+ * @see [[http://docs.scala-lang.org/overviews/collections/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information.
+ * @define coll array
+ * @define Coll `Array`
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define collectExample
+ * @define undefinedorder
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is either `Array[B]` if an ArrayTag is available for B or `ArraySeq[B]` otherwise.
+ * @define zipthatinfo $thatinfo
+ * @define bfinfo an implicit value of class `CanBuildFrom` which determines the result class `That` from the current
+ * representation type `Repr` and the new element type `B`.
*/
final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable {
diff --git a/src/library/scala/Boolean.scala b/src/library/scala/Boolean.scala
index 5078e59d28..edb82b33fe 100644
--- a/src/library/scala/Boolean.scala
+++ b/src/library/scala/Boolean.scala
@@ -17,7 +17,7 @@ package scala
* There is an implicit conversion from [[scala.Boolean]] => [[scala.runtime.RichBoolean]]
* which provides useful non-primitive operations.
*/
-final class Boolean extends AnyVal {
+final class Boolean private extends AnyVal {
/**
* Negates a Boolean expression.
*
@@ -110,7 +110,7 @@ final class Boolean extends AnyVal {
override def getClass(): Class[Boolean] = sys.error("stub")
}
-object Boolean extends AnyValCompanion {
+object Boolean extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
diff --git a/src/library/scala/Byte.scala b/src/library/scala/Byte.scala
index f9c5f6003e..b5b3d88e3f 100644
--- a/src/library/scala/Byte.scala
+++ b/src/library/scala/Byte.scala
@@ -17,7 +17,7 @@ package scala
* There is an implicit conversion from [[scala.Byte]] => [[scala.runtime.RichByte]]
* which provides useful non-primitive operations.
*/
-final class Byte extends AnyVal {
+final class Byte private extends AnyVal {
def toByte: Byte = sys.error("stub")
def toShort: Short = sys.error("stub")
def toChar: Char = sys.error("stub")
@@ -27,7 +27,7 @@ final class Byte extends AnyVal {
def toDouble: Double = sys.error("stub")
/**
- * @return the bitwise negation of this value
+ * Returns the bitwise negation of this value.
* @example {{{
* ~5 == -6
* // in binary: ~00000101 ==
@@ -36,30 +36,30 @@ final class Byte extends AnyVal {
*/
def unary_~ : Int = sys.error("stub")
/**
- * @return this value, unmodified
+ * Returns this value, unmodified.
*/
def unary_+ : Int = sys.error("stub")
/**
- * @return the negation of this value
+ * Returns the negation of this value.
*/
def unary_- : Int = sys.error("stub")
def +(x: String): String = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the new right bits with zeroes.
* @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
*/
def <<(x: Int): Int = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the new right bits with zeroes.
* @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
*/
def <<(x: Long): Int = sys.error("stub")
/**
- * @return this value bit-shifted right by the specified number of bits,
+ * Returns this value bit-shifted right by the specified number of bits,
* filling the new left bits with zeroes.
* @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
* @example {{{
@@ -70,7 +70,7 @@ final class Byte extends AnyVal {
*/
def >>>(x: Int): Int = sys.error("stub")
/**
- * @return this value bit-shifted right by the specified number of bits,
+ * Returns this value bit-shifted right by the specified number of bits,
* filling the new left bits with zeroes.
* @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
* @example {{{
@@ -81,7 +81,7 @@ final class Byte extends AnyVal {
*/
def >>>(x: Long): Int = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the right bits with the same value as the left-most bit of this.
* The effect of this is to retain the sign of the value.
* @example {{{
@@ -92,7 +92,7 @@ final class Byte extends AnyVal {
*/
def >>(x: Int): Int = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the right bits with the same value as the left-most bit of this.
* The effect of this is to retain the sign of the value.
* @example {{{
@@ -104,181 +104,181 @@ final class Byte extends AnyVal {
def >>(x: Long): Int = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Double): Boolean = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -289,7 +289,7 @@ final class Byte extends AnyVal {
*/
def |(x: Byte): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -300,7 +300,7 @@ final class Byte extends AnyVal {
*/
def |(x: Short): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -311,7 +311,7 @@ final class Byte extends AnyVal {
*/
def |(x: Char): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -322,7 +322,7 @@ final class Byte extends AnyVal {
*/
def |(x: Int): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -334,7 +334,7 @@ final class Byte extends AnyVal {
def |(x: Long): Long = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -345,7 +345,7 @@ final class Byte extends AnyVal {
*/
def &(x: Byte): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -356,7 +356,7 @@ final class Byte extends AnyVal {
*/
def &(x: Short): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -367,7 +367,7 @@ final class Byte extends AnyVal {
*/
def &(x: Char): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -378,7 +378,7 @@ final class Byte extends AnyVal {
*/
def &(x: Int): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -390,7 +390,7 @@ final class Byte extends AnyVal {
def &(x: Long): Long = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -401,7 +401,7 @@ final class Byte extends AnyVal {
*/
def ^(x: Byte): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -412,7 +412,7 @@ final class Byte extends AnyVal {
*/
def ^(x: Short): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -423,7 +423,7 @@ final class Byte extends AnyVal {
*/
def ^(x: Char): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -434,7 +434,7 @@ final class Byte extends AnyVal {
*/
def ^(x: Int): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -446,154 +446,154 @@ final class Byte extends AnyVal {
def ^(x: Long): Long = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Byte): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Short): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Char): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Int): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Long): Long = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Float): Float = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Double): Double = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Byte): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Short): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Char): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Int): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Long): Long = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Float): Float = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Double): Double = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Byte): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Short): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Char): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Int): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Long): Long = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Float): Float = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Double): Double = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Byte): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Short): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Char): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Int): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Long): Long = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Float): Float = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Double): Double = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Byte): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Short): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Char): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Int): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Long): Long = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Float): Float = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Double): Double = sys.error("stub")
override def getClass(): Class[Byte] = sys.error("stub")
}
-object Byte extends AnyValCompanion {
+object Byte extends AnyValCompanion {
/** The smallest value representable as a Byte.
*/
final val MinValue = java.lang.Byte.MIN_VALUE
@@ -622,5 +622,10 @@ object Byte extends AnyValCompanion {
/** The String representation of the scala.Byte companion object.
*/
override def toString = "object scala.Byte"
+ implicit def byte2short(x: Byte): Short = x.toShort
+ implicit def byte2int(x: Byte): Int = x.toInt
+ implicit def byte2long(x: Byte): Long = x.toLong
+ implicit def byte2float(x: Byte): Float = x.toFloat
+ implicit def byte2double(x: Byte): Double = x.toDouble
}
diff --git a/src/library/scala/Char.scala b/src/library/scala/Char.scala
index 3d459782cd..e0ac9a2550 100644
--- a/src/library/scala/Char.scala
+++ b/src/library/scala/Char.scala
@@ -17,7 +17,7 @@ package scala
* There is an implicit conversion from [[scala.Char]] => [[scala.runtime.RichChar]]
* which provides useful non-primitive operations.
*/
-final class Char extends AnyVal {
+final class Char private extends AnyVal {
def toByte: Byte = sys.error("stub")
def toShort: Short = sys.error("stub")
def toChar: Char = sys.error("stub")
@@ -27,7 +27,7 @@ final class Char extends AnyVal {
def toDouble: Double = sys.error("stub")
/**
- * @return the bitwise negation of this value
+ * Returns the bitwise negation of this value.
* @example {{{
* ~5 == -6
* // in binary: ~00000101 ==
@@ -36,30 +36,30 @@ final class Char extends AnyVal {
*/
def unary_~ : Int = sys.error("stub")
/**
- * @return this value, unmodified
+ * Returns this value, unmodified.
*/
def unary_+ : Int = sys.error("stub")
/**
- * @return the negation of this value
+ * Returns the negation of this value.
*/
def unary_- : Int = sys.error("stub")
def +(x: String): String = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the new right bits with zeroes.
* @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
*/
def <<(x: Int): Int = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the new right bits with zeroes.
* @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
*/
def <<(x: Long): Int = sys.error("stub")
/**
- * @return this value bit-shifted right by the specified number of bits,
+ * Returns this value bit-shifted right by the specified number of bits,
* filling the new left bits with zeroes.
* @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
* @example {{{
@@ -70,7 +70,7 @@ final class Char extends AnyVal {
*/
def >>>(x: Int): Int = sys.error("stub")
/**
- * @return this value bit-shifted right by the specified number of bits,
+ * Returns this value bit-shifted right by the specified number of bits,
* filling the new left bits with zeroes.
* @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
* @example {{{
@@ -81,7 +81,7 @@ final class Char extends AnyVal {
*/
def >>>(x: Long): Int = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the right bits with the same value as the left-most bit of this.
* The effect of this is to retain the sign of the value.
* @example {{{
@@ -92,7 +92,7 @@ final class Char extends AnyVal {
*/
def >>(x: Int): Int = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the right bits with the same value as the left-most bit of this.
* The effect of this is to retain the sign of the value.
* @example {{{
@@ -104,181 +104,181 @@ final class Char extends AnyVal {
def >>(x: Long): Int = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Double): Boolean = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -289,7 +289,7 @@ final class Char extends AnyVal {
*/
def |(x: Byte): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -300,7 +300,7 @@ final class Char extends AnyVal {
*/
def |(x: Short): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -311,7 +311,7 @@ final class Char extends AnyVal {
*/
def |(x: Char): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -322,7 +322,7 @@ final class Char extends AnyVal {
*/
def |(x: Int): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -334,7 +334,7 @@ final class Char extends AnyVal {
def |(x: Long): Long = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -345,7 +345,7 @@ final class Char extends AnyVal {
*/
def &(x: Byte): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -356,7 +356,7 @@ final class Char extends AnyVal {
*/
def &(x: Short): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -367,7 +367,7 @@ final class Char extends AnyVal {
*/
def &(x: Char): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -378,7 +378,7 @@ final class Char extends AnyVal {
*/
def &(x: Int): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -390,7 +390,7 @@ final class Char extends AnyVal {
def &(x: Long): Long = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -401,7 +401,7 @@ final class Char extends AnyVal {
*/
def ^(x: Byte): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -412,7 +412,7 @@ final class Char extends AnyVal {
*/
def ^(x: Short): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -423,7 +423,7 @@ final class Char extends AnyVal {
*/
def ^(x: Char): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -434,7 +434,7 @@ final class Char extends AnyVal {
*/
def ^(x: Int): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -446,154 +446,154 @@ final class Char extends AnyVal {
def ^(x: Long): Long = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Byte): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Short): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Char): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Int): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Long): Long = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Float): Float = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Double): Double = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Byte): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Short): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Char): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Int): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Long): Long = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Float): Float = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Double): Double = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Byte): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Short): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Char): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Int): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Long): Long = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Float): Float = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Double): Double = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Byte): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Short): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Char): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Int): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Long): Long = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Float): Float = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Double): Double = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Byte): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Short): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Char): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Int): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Long): Long = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Float): Float = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Double): Double = sys.error("stub")
override def getClass(): Class[Char] = sys.error("stub")
}
-object Char extends AnyValCompanion {
+object Char extends AnyValCompanion {
/** The smallest value representable as a Char.
*/
final val MinValue = java.lang.Character.MIN_VALUE
@@ -622,5 +622,9 @@ object Char extends AnyValCompanion {
/** The String representation of the scala.Char companion object.
*/
override def toString = "object scala.Char"
+ implicit def char2int(x: Char): Int = x.toInt
+ implicit def char2long(x: Char): Long = x.toLong
+ implicit def char2float(x: Char): Float = x.toFloat
+ implicit def char2double(x: Char): Double = x.toDouble
}
diff --git a/src/library/scala/Console.scala b/src/library/scala/Console.scala
index 5e62df6c71..50e6626700 100644
--- a/src/library/scala/Console.scala
+++ b/src/library/scala/Console.scala
@@ -186,7 +186,6 @@ object Console {
* }
* }}}
*
- * @param in the new input stream.
* @param thunk the code to execute with
* the new input stream active
*
diff --git a/src/library/scala/DelayedInit.scala b/src/library/scala/DelayedInit.scala
index e898bca720..52a38ca6f7 100644
--- a/src/library/scala/DelayedInit.scala
+++ b/src/library/scala/DelayedInit.scala
@@ -8,15 +8,41 @@
package scala
-/** Classes and traits inheriting the `DelayedInit` marker trait
- * will have their initialization code rewritten as follows:
+/** Classes and objects (but note, not traits) inheriting the `DelayedInit`
+ * marker trait will have their initialization code rewritten as follows:
* `code` becomes `delayedInit(code)`.
*
* Initialization code comprises all statements and all value definitions
* that are executed during initialization.
*
+ * Example:
+ * {{{
+ * trait Helper extends DelayedInit {
+ * def delayedInit(body: => Unit) = {
+ * println("dummy text, printed before initialization of C")
+ * body // evaluates the initialization code of C
+ * }
+ * }
+ *
+ * class C extends Helper {
+ * println("this is the initialization code of C")
+ * }
+ *
+ * object Test extends App {
+ * val c = new C
+ * }
+ * }}}
+ *
+ * Should result in the following being printed:
+ * {{{
+ * dummy text, printed before initialization of C
+ * this is the initialization code of C
+ * }}}
+ *
+ * @see "Delayed Initialization" subsection of the Scala Language Specification (section 5.1)
+ *
* @author Martin Odersky
*/
trait DelayedInit {
def delayedInit(x: => Unit): Unit
-}
+} \ No newline at end of file
diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala
index 01414265c4..bb659b963a 100644
--- a/src/library/scala/Double.scala
+++ b/src/library/scala/Double.scala
@@ -17,7 +17,7 @@ package scala
* There is an implicit conversion from [[scala.Double]] => [[scala.runtime.RichDouble]]
* which provides useful non-primitive operations.
*/
-final class Double extends AnyVal {
+final class Double private extends AnyVal {
def toByte: Byte = sys.error("stub")
def toShort: Short = sys.error("stub")
def toChar: Char = sys.error("stub")
@@ -27,339 +27,339 @@ final class Double extends AnyVal {
def toDouble: Double = sys.error("stub")
/**
- * @return this value, unmodified
+ * Returns this value, unmodified.
*/
def unary_+ : Double = sys.error("stub")
/**
- * @return the negation of this value
+ * Returns the negation of this value.
*/
def unary_- : Double = sys.error("stub")
def +(x: String): String = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Double): Boolean = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Byte): Double = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Short): Double = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Char): Double = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Int): Double = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Long): Double = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Float): Double = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Double): Double = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Byte): Double = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Short): Double = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Char): Double = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Int): Double = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Long): Double = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Float): Double = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Double): Double = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Byte): Double = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Short): Double = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Char): Double = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Int): Double = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Long): Double = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Float): Double = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Double): Double = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Byte): Double = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Short): Double = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Char): Double = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Int): Double = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Long): Double = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Float): Double = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Double): Double = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Byte): Double = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Short): Double = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Char): Double = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Int): Double = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Long): Double = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Float): Double = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Double): Double = sys.error("stub")
override def getClass(): Class[Double] = sys.error("stub")
}
-object Double extends AnyValCompanion {
+object Double extends AnyValCompanion {
/** The smallest positive value greater than 0.0d which is
* representable as a Double.
*/
diff --git a/src/library/scala/Dynamic.scala b/src/library/scala/Dynamic.scala
index dcf7599742..faf834d310 100644
--- a/src/library/scala/Dynamic.scala
+++ b/src/library/scala/Dynamic.scala
@@ -9,12 +9,25 @@
package scala
/** A marker trait that enables dynamic invocations. Instances `x` of this
- * trait allow calls `x.meth(args)` for arbitrary method names `meth` and
- * argument lists `args`. If a call is not natively supported by `x`, it
- * is rewritten to `x.applyDynamic("meth")(args)`.
+ * trait allow method invocations `x.meth(args)` for arbitrary method
+ * names `meth` and argument lists `args` as well as field accesses
+ * `x.field` for arbitrary field names `field`.
*
- * As of scala 2.9, `scalac` must receive the `-Xexperimental` option for
- * `Dynamic` to receive this treatment.
+ * If a call is not natively supported by `x` (i.e. if type checking
+ * fails), it is rewritten according to the following rules:
+ *
+ * {{{
+ * foo.method("blah") ~~> foo.applyDynamic("method")("blah")
+ * foo.method(x = "blah") ~~> foo.applyDynamicNamed("method")(("x", "blah"))
+ * foo.method(x = 1, 2) ~~> foo.applyDynamicNamed("method")(("x", 1), ("", 2))
+ * foo.field ~~> foo.selectDynamic("field")
+ * foo.varia = 10 ~~> foo.updateDynamic("varia")(10)
+ * foo.arr(10) = 13 ~~> foo.selectDynamic("arr").update(10, 13)
+ * foo.arr(10) ~~> foo.applyDynamics("arr")(10)
+ * }}}
+ *
+ * As of Scala 2.10, defining direct or indirect subclasses of this trait
+ * is only possible if the language feature `dynamics` is enabled.
*/
trait Dynamic
diff --git a/src/library/scala/Either.scala b/src/library/scala/Either.scala
index e454cdf5ec..b35d8a7c8a 100644
--- a/src/library/scala/Either.scala
+++ b/src/library/scala/Either.scala
@@ -10,6 +10,8 @@
package scala
+import language.implicitConversions
+
/** Represents a value of one of two possible types (a disjoint union.)
* Instances of Either are either an instance of [[scala.Left]] or [[scala.Right]].
*
@@ -201,12 +203,6 @@ final case class Right[+A, +B](b: B) extends Either[A, B] {
}
object Either {
- class MergeableEither[A](x: Either[A, A]) {
- def merge: A = x match {
- case Left(a) => a
- case Right(a) => a
- }
- }
/**
* Allows use of a ``merge`` method to extract values from Either instances
@@ -219,7 +215,14 @@ object Either {
* r.merge: Seq[Int] // Vector(1)
* }}}
*/
- implicit def either2mergeable[A](x: Either[A, A]): MergeableEither[A] = new MergeableEither(x)
+ implicit class MergeableEither[A](x: Either[A, A]) {
+ def merge: A = x match {
+ case Left(a) => a
+ case Right(a) => a
+ }
+ }
+ @deprecated("use MergeableEither instead", "2.10")
+ def either2mergeable[A](x: Either[A, A]): MergeableEither[A] = new MergeableEither(x)
/**
* Projects an `Either` into a `Left`.
@@ -258,7 +261,7 @@ object Either {
* case ex => Left(ex)
* }
*
- * // this will only be executed if interactWithDB returns a Some
+ * // this will only be executed if interactWithDB returns a Right
* val report =
* for (r <- interactWithDB(someQuery).right) yield generateReport(r)
* if (report.isRight)
@@ -294,7 +297,7 @@ object Either {
* Left(12).left.foreach(x => println(x)) // prints "12"
* Right(12).left.foreach(x => println(x)) // doesn't print
* }}}
- * @param e The side-effecting function to execute.
+ * @param f The side-effecting function to execute.
*/
def foreach[U](f: A => U) = e match {
case Left(a) => f(a)
@@ -355,7 +358,7 @@ object Either {
* Left(12).left.flatMap(x => Left("scala")) // Left("scala")
* Right(12).left.flatMap(x => Left("scala") // Right(12)
* }}}
- * @param The function to bind across `Left`.
+ * @param f The function to bind across `Left`.
*/
def flatMap[BB >: B, X](f: A => Either[X, BB]) = e match {
case Left(a) => f(a)
@@ -459,7 +462,7 @@ object Either {
* Right(12).right.foreach(x => println(x)) // prints "12"
* Left(12).right.foreach(x => println(x)) // doesn't print
* }}}
- * @param e The side-effecting function to execute.
+ * @param f The side-effecting function to execute.
*/
def foreach[U](f: B => U) = e match {
case Left(_) => {}
@@ -513,7 +516,7 @@ object Either {
/**
* Binds the given function across `Right`.
*
- * @param The function to bind across `Right`.
+ * @param f The function to bind across `Right`.
*/
def flatMap[AA >: A, Y](f: B => Either[AA, Y]) = e match {
case Left(a) => Left(a)
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index dc67d32ba0..2b658ee4f7 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -70,10 +70,9 @@ abstract class Enumeration (initial: Int) extends Serializable {
/** The name of this enumeration.
*/
- override def toString = (
- (getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.' last)
- split Pattern.quote(NAME_JOIN_STRING) last
- )
+ override def toString =
+ ((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split
+ Pattern.quote(NAME_JOIN_STRING)).last
/** The mapping from the integer used to identify values to the actual
* values. */
@@ -114,8 +113,8 @@ abstract class Enumeration (initial: Int) extends Serializable {
* enumeration, but no higher than 0. */
private var bottomId = if(initial < 0) initial else 0
- /** The highest integer amongst those used to identify values in this
- * enumeration. */
+ /** The one higher than the highest integer amongst those used to identify
+ * values in this enumeration. */
final def maxId = topId
/** The value of this enumeration with given id `x`
diff --git a/src/library/scala/Float.scala b/src/library/scala/Float.scala
index ff5b3cb112..bb03581062 100644
--- a/src/library/scala/Float.scala
+++ b/src/library/scala/Float.scala
@@ -17,7 +17,7 @@ package scala
* There is an implicit conversion from [[scala.Float]] => [[scala.runtime.RichFloat]]
* which provides useful non-primitive operations.
*/
-final class Float extends AnyVal {
+final class Float private extends AnyVal {
def toByte: Byte = sys.error("stub")
def toShort: Short = sys.error("stub")
def toChar: Char = sys.error("stub")
@@ -27,339 +27,339 @@ final class Float extends AnyVal {
def toDouble: Double = sys.error("stub")
/**
- * @return this value, unmodified
+ * Returns this value, unmodified.
*/
def unary_+ : Float = sys.error("stub")
/**
- * @return the negation of this value
+ * Returns the negation of this value.
*/
def unary_- : Float = sys.error("stub")
def +(x: String): String = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Double): Boolean = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Byte): Float = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Short): Float = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Char): Float = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Int): Float = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Long): Float = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Float): Float = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Double): Double = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Byte): Float = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Short): Float = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Char): Float = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Int): Float = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Long): Float = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Float): Float = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Double): Double = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Byte): Float = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Short): Float = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Char): Float = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Int): Float = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Long): Float = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Float): Float = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Double): Double = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Byte): Float = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Short): Float = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Char): Float = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Int): Float = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Long): Float = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Float): Float = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Double): Double = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Byte): Float = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Short): Float = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Char): Float = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Int): Float = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Long): Float = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Float): Float = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Double): Double = sys.error("stub")
override def getClass(): Class[Float] = sys.error("stub")
}
-object Float extends AnyValCompanion {
+object Float extends AnyValCompanion {
/** The smallest positive value greater than 0.0f which is
* representable as a Float.
*/
@@ -401,5 +401,6 @@ object Float extends AnyValCompanion {
/** The String representation of the scala.Float companion object.
*/
override def toString = "object scala.Float"
+ implicit def float2double(x: Float): Double = x.toDouble
}
diff --git a/src/library/scala/Function.scala b/src/library/scala/Function.scala
index 9fa56a332f..270581a3aa 100644
--- a/src/library/scala/Function.scala
+++ b/src/library/scala/Function.scala
@@ -20,7 +20,6 @@ object Function {
* function `f,,1,, andThen ... andThen f,,n,,`.
*
* @param fs The given sequence of functions
- * @return ...
*/
def chain[a](fs: Seq[a => a]): a => a = { x => (x /: fs) ((x, f) => f(x)) }
@@ -38,7 +37,7 @@ object Function {
* @param f a function `T => Option[R]`
* @return a partial function defined for those inputs where
* f returns `Some(_)` and undefined where `f` returns `None`.
- * @see [[scala.PartialFunction#lift]]
+ * @see [[scala.PartialFunction]], method `lift`.
*/
def unlift[T, R](f: T => Option[R]): PartialFunction[T, R] = PartialFunction.unlifted(f)
@@ -72,9 +71,6 @@ object Function {
*
* @note These functions are slotted for deprecation, but it is on
* hold pending superior type inference for tupling anonymous functions.
- *
- * @param f ...
- * @return ...
*/
// @deprecated("Use `f.tupled` instead")
def tupled[a1, a2, b](f: (a1, a2) => b): Tuple2[a1, a2] => b = {
diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala
index dceed26439..3690a0e65b 100644
--- a/src/library/scala/Function0.scala
+++ b/src/library/scala/Function0.scala
@@ -6,13 +6,13 @@
** |/ **
\* */
// GENERATED CODE: DO NOT EDIT.
-// genprod generated these sources at: Tue Feb 14 16:49:03 PST 2012
+// genprod generated these sources at: Mon Apr 30 07:46:11 PDT 2012
package scala
/** A function of 0 parameters.
- *
+ *
* In the following example, the definition of javaVersion is a
* shorthand for the anonymous class definition anonfun0:
*
@@ -24,7 +24,7 @@ package scala
* def apply(): String = sys.props("java.version")
* }
* assert(javaVersion() == anonfun0())
- * }
+ * }
* }}}
*
* Note that `Function1` does not define a total function, as might
diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala
index 8995ef912b..f9b37fc6bd 100644
--- a/src/library/scala/Function1.scala
+++ b/src/library/scala/Function1.scala
@@ -11,7 +11,7 @@ package scala
/** A function of 1 parameter.
- *
+ *
* In the following example, the definition of succ is a
* shorthand for the anonymous class definition anonfun1:
*
@@ -22,7 +22,7 @@ package scala
* def apply(x: Int): Int = x + 1
* }
* assert(succ(0) == anonfun1(0))
- * }
+ * }
* }}}
*
* Note that `Function1` does not define a total function, as might
@@ -44,7 +44,7 @@ trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, s
* @param g a function A => T1
* @return a new function `f` such that `f(x) == apply(g(x))`
*/
- def compose[A](g: A => T1): A => R = { x => apply(g(x)) }
+ @annotation.unspecialized def compose[A](g: A => T1): A => R = { x => apply(g(x)) }
/** Composes two instances of Function1 in a new Function1, with this function applied first.
*
@@ -52,7 +52,7 @@ trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, s
* @param g a function R => A
* @return a new function `f` such that `f(x) == g(apply(x))`
*/
- def andThen[A](g: R => A): T1 => A = { x => g(apply(x)) }
+ @annotation.unspecialized def andThen[A](g: R => A): T1 => A = { x => g(apply(x)) }
override def toString() = "<function1>"
}
diff --git a/src/library/scala/Function10.scala b/src/library/scala/Function10.scala
index 9e107fc53d..f7e5d414f2 100644
--- a/src/library/scala/Function10.scala
+++ b/src/library/scala/Function10.scala
@@ -21,16 +21,17 @@ trait Function10[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, +R] extends
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)).curried
}
-
/** Creates a tupled version of this function: instead of 10 arguments,
* it accepts a single [[scala.Tuple10]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)) == f(Tuple10(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)`
*/
- def tupled: Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10] => R = {
+
+ @annotation.unspecialized def tupled: Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10] => R = {
case Tuple10(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)
}
override def toString() = "<function10>"
diff --git a/src/library/scala/Function11.scala b/src/library/scala/Function11.scala
index 783a86ab5d..53742bf733 100644
--- a/src/library/scala/Function11.scala
+++ b/src/library/scala/Function11.scala
@@ -21,16 +21,17 @@ trait Function11[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, +R] ex
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)).curried
}
-
/** Creates a tupled version of this function: instead of 11 arguments,
* it accepts a single [[scala.Tuple11]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)) == f(Tuple11(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)`
*/
- def tupled: Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11] => R = {
+
+ @annotation.unspecialized def tupled: Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11] => R = {
case Tuple11(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)
}
override def toString() = "<function11>"
diff --git a/src/library/scala/Function12.scala b/src/library/scala/Function12.scala
index 7f4dee6216..e349d9017d 100644
--- a/src/library/scala/Function12.scala
+++ b/src/library/scala/Function12.scala
@@ -21,16 +21,17 @@ trait Function12[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)).curried
}
-
/** Creates a tupled version of this function: instead of 12 arguments,
* it accepts a single [[scala.Tuple12]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)) == f(Tuple12(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)`
*/
- def tupled: Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12] => R = {
+
+ @annotation.unspecialized def tupled: Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12] => R = {
case Tuple12(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)
}
override def toString() = "<function12>"
diff --git a/src/library/scala/Function13.scala b/src/library/scala/Function13.scala
index 23853dde69..10ec64b87a 100644
--- a/src/library/scala/Function13.scala
+++ b/src/library/scala/Function13.scala
@@ -21,16 +21,17 @@ trait Function13[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)).curried
}
-
/** Creates a tupled version of this function: instead of 13 arguments,
* it accepts a single [[scala.Tuple13]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)) == f(Tuple13(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)`
*/
- def tupled: Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13] => R = {
+
+ @annotation.unspecialized def tupled: Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13] => R = {
case Tuple13(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)
}
override def toString() = "<function13>"
diff --git a/src/library/scala/Function14.scala b/src/library/scala/Function14.scala
index 372f1cfafb..82dd409223 100644
--- a/src/library/scala/Function14.scala
+++ b/src/library/scala/Function14.scala
@@ -21,16 +21,17 @@ trait Function14[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)).curried
}
-
/** Creates a tupled version of this function: instead of 14 arguments,
* it accepts a single [[scala.Tuple14]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)) == f(Tuple14(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)`
*/
- def tupled: Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14] => R = {
+
+ @annotation.unspecialized def tupled: Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14] => R = {
case Tuple14(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)
}
override def toString() = "<function14>"
diff --git a/src/library/scala/Function15.scala b/src/library/scala/Function15.scala
index 47c7309695..be5fbeeca1 100644
--- a/src/library/scala/Function15.scala
+++ b/src/library/scala/Function15.scala
@@ -21,16 +21,17 @@ trait Function15[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)).curried
}
-
/** Creates a tupled version of this function: instead of 15 arguments,
* it accepts a single [[scala.Tuple15]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)) == f(Tuple15(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)`
*/
- def tupled: Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15] => R = {
+
+ @annotation.unspecialized def tupled: Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15] => R = {
case Tuple15(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)
}
override def toString() = "<function15>"
diff --git a/src/library/scala/Function16.scala b/src/library/scala/Function16.scala
index 8eea42de5b..7a185b369c 100644
--- a/src/library/scala/Function16.scala
+++ b/src/library/scala/Function16.scala
@@ -21,16 +21,17 @@ trait Function16[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)).curried
}
-
/** Creates a tupled version of this function: instead of 16 arguments,
* it accepts a single [[scala.Tuple16]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)) == f(Tuple16(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)`
*/
- def tupled: Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16] => R = {
+
+ @annotation.unspecialized def tupled: Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16] => R = {
case Tuple16(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)
}
override def toString() = "<function16>"
diff --git a/src/library/scala/Function17.scala b/src/library/scala/Function17.scala
index 2d93af34f2..94e0000802 100644
--- a/src/library/scala/Function17.scala
+++ b/src/library/scala/Function17.scala
@@ -21,16 +21,17 @@ trait Function17[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)).curried
}
-
/** Creates a tupled version of this function: instead of 17 arguments,
* it accepts a single [[scala.Tuple17]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)) == f(Tuple17(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)`
*/
- def tupled: Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17] => R = {
+
+ @annotation.unspecialized def tupled: Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17] => R = {
case Tuple17(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)
}
override def toString() = "<function17>"
diff --git a/src/library/scala/Function18.scala b/src/library/scala/Function18.scala
index ffca98c443..a3ee6776ba 100644
--- a/src/library/scala/Function18.scala
+++ b/src/library/scala/Function18.scala
@@ -21,16 +21,17 @@ trait Function18[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)).curried
}
-
/** Creates a tupled version of this function: instead of 18 arguments,
* it accepts a single [[scala.Tuple18]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)) == f(Tuple18(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)`
*/
- def tupled: Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18] => R = {
+
+ @annotation.unspecialized def tupled: Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18] => R = {
case Tuple18(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)
}
override def toString() = "<function18>"
diff --git a/src/library/scala/Function19.scala b/src/library/scala/Function19.scala
index f661ea7707..038dcbb778 100644
--- a/src/library/scala/Function19.scala
+++ b/src/library/scala/Function19.scala
@@ -21,16 +21,17 @@ trait Function19[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)).curried
}
-
/** Creates a tupled version of this function: instead of 19 arguments,
* it accepts a single [[scala.Tuple19]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)) == f(Tuple19(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)`
*/
- def tupled: Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19] => R = {
+
+ @annotation.unspecialized def tupled: Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19] => R = {
case Tuple19(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)
}
override def toString() = "<function19>"
diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala
index cacb96ef5d..0794a4048a 100644
--- a/src/library/scala/Function2.scala
+++ b/src/library/scala/Function2.scala
@@ -11,7 +11,7 @@ package scala
/** A function of 2 parameters.
- *
+ *
* In the following example, the definition of max is a
* shorthand for the anonymous class definition anonfun2:
*
@@ -23,7 +23,7 @@ package scala
* def apply(x: Int, y: Int): Int = if (x < y) y else x
* }
* assert(max(0, 1) == anonfun2(0, 1))
- * }
+ * }
* }}}
*
* Note that `Function1` does not define a total function, as might
@@ -40,16 +40,17 @@ trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @speciali
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2) == apply(x1, x2)`
- */ def curried: T1 => T2 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => R = {
(x1: T1) => (x2: T2) => apply(x1, x2)
}
-
/** Creates a tupled version of this function: instead of 2 arguments,
* it accepts a single [[scala.Tuple2]] argument.
*
* @return a function `f` such that `f((x1, x2)) == f(Tuple2(x1, x2)) == apply(x1, x2)`
*/
- def tupled: Tuple2[T1, T2] => R = {
+
+ @annotation.unspecialized def tupled: Tuple2[T1, T2] => R = {
case Tuple2(x1, x2) => apply(x1, x2)
}
override def toString() = "<function2>"
diff --git a/src/library/scala/Function20.scala b/src/library/scala/Function20.scala
index e4fb9f280c..727684d6d5 100644
--- a/src/library/scala/Function20.scala
+++ b/src/library/scala/Function20.scala
@@ -21,16 +21,17 @@ trait Function20[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)).curried
}
-
/** Creates a tupled version of this function: instead of 20 arguments,
* it accepts a single [[scala.Tuple20]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)) == f(Tuple20(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)`
*/
- def tupled: Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20] => R = {
+
+ @annotation.unspecialized def tupled: Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20] => R = {
case Tuple20(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)
}
override def toString() = "<function20>"
diff --git a/src/library/scala/Function21.scala b/src/library/scala/Function21.scala
index 9823386856..2441278be8 100644
--- a/src/library/scala/Function21.scala
+++ b/src/library/scala/Function21.scala
@@ -21,16 +21,17 @@ trait Function21[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)).curried
}
-
/** Creates a tupled version of this function: instead of 21 arguments,
* it accepts a single [[scala.Tuple21]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)) == f(Tuple21(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)`
*/
- def tupled: Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21] => R = {
+
+ @annotation.unspecialized def tupled: Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21] => R = {
case Tuple21(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)
}
override def toString() = "<function21>"
diff --git a/src/library/scala/Function22.scala b/src/library/scala/Function22.scala
index e708f7f49a..1f70b190a6 100644
--- a/src/library/scala/Function22.scala
+++ b/src/library/scala/Function22.scala
@@ -21,16 +21,17 @@ trait Function22[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21)(x22) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => T22 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => T22 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21, x22: T22) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)).curried
}
-
/** Creates a tupled version of this function: instead of 22 arguments,
* it accepts a single [[scala.Tuple22]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)) == f(Tuple22(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)`
*/
- def tupled: Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22] => R = {
+
+ @annotation.unspecialized def tupled: Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22] => R = {
case Tuple22(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)
}
override def toString() = "<function22>"
diff --git a/src/library/scala/Function3.scala b/src/library/scala/Function3.scala
index 62a997c1b5..bbbde82056 100644
--- a/src/library/scala/Function3.scala
+++ b/src/library/scala/Function3.scala
@@ -21,16 +21,17 @@ trait Function3[-T1, -T2, -T3, +R] extends AnyRef { self =>
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3) == apply(x1, x2, x3)`
- */ def curried: T1 => T2 => T3 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => R = {
(x1: T1) => (x2: T2) => (x3: T3) => apply(x1, x2, x3)
}
-
/** Creates a tupled version of this function: instead of 3 arguments,
* it accepts a single [[scala.Tuple3]] argument.
*
* @return a function `f` such that `f((x1, x2, x3)) == f(Tuple3(x1, x2, x3)) == apply(x1, x2, x3)`
*/
- def tupled: Tuple3[T1, T2, T3] => R = {
+
+ @annotation.unspecialized def tupled: Tuple3[T1, T2, T3] => R = {
case Tuple3(x1, x2, x3) => apply(x1, x2, x3)
}
override def toString() = "<function3>"
diff --git a/src/library/scala/Function4.scala b/src/library/scala/Function4.scala
index 86d2faeac8..f100860a97 100644
--- a/src/library/scala/Function4.scala
+++ b/src/library/scala/Function4.scala
@@ -21,16 +21,17 @@ trait Function4[-T1, -T2, -T3, -T4, +R] extends AnyRef { self =>
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4) == apply(x1, x2, x3, x4)`
- */ def curried: T1 => T2 => T3 => T4 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => R = {
(x1: T1) => (x2: T2) => (x3: T3) => (x4: T4) => apply(x1, x2, x3, x4)
}
-
/** Creates a tupled version of this function: instead of 4 arguments,
* it accepts a single [[scala.Tuple4]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4)) == f(Tuple4(x1, x2, x3, x4)) == apply(x1, x2, x3, x4)`
*/
- def tupled: Tuple4[T1, T2, T3, T4] => R = {
+
+ @annotation.unspecialized def tupled: Tuple4[T1, T2, T3, T4] => R = {
case Tuple4(x1, x2, x3, x4) => apply(x1, x2, x3, x4)
}
override def toString() = "<function4>"
diff --git a/src/library/scala/Function5.scala b/src/library/scala/Function5.scala
index bd9af77f12..cba9b6ce52 100644
--- a/src/library/scala/Function5.scala
+++ b/src/library/scala/Function5.scala
@@ -21,16 +21,17 @@ trait Function5[-T1, -T2, -T3, -T4, -T5, +R] extends AnyRef { self =>
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5) == apply(x1, x2, x3, x4, x5)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5) => self.apply(x1, x2, x3, x4, x5)).curried
}
-
/** Creates a tupled version of this function: instead of 5 arguments,
* it accepts a single [[scala.Tuple5]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5)) == f(Tuple5(x1, x2, x3, x4, x5)) == apply(x1, x2, x3, x4, x5)`
*/
- def tupled: Tuple5[T1, T2, T3, T4, T5] => R = {
+
+ @annotation.unspecialized def tupled: Tuple5[T1, T2, T3, T4, T5] => R = {
case Tuple5(x1, x2, x3, x4, x5) => apply(x1, x2, x3, x4, x5)
}
override def toString() = "<function5>"
diff --git a/src/library/scala/Function6.scala b/src/library/scala/Function6.scala
index 4f601a468c..0b8addf7de 100644
--- a/src/library/scala/Function6.scala
+++ b/src/library/scala/Function6.scala
@@ -21,16 +21,17 @@ trait Function6[-T1, -T2, -T3, -T4, -T5, -T6, +R] extends AnyRef { self =>
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6) == apply(x1, x2, x3, x4, x5, x6)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6) => self.apply(x1, x2, x3, x4, x5, x6)).curried
}
-
/** Creates a tupled version of this function: instead of 6 arguments,
* it accepts a single [[scala.Tuple6]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6)) == f(Tuple6(x1, x2, x3, x4, x5, x6)) == apply(x1, x2, x3, x4, x5, x6)`
*/
- def tupled: Tuple6[T1, T2, T3, T4, T5, T6] => R = {
+
+ @annotation.unspecialized def tupled: Tuple6[T1, T2, T3, T4, T5, T6] => R = {
case Tuple6(x1, x2, x3, x4, x5, x6) => apply(x1, x2, x3, x4, x5, x6)
}
override def toString() = "<function6>"
diff --git a/src/library/scala/Function7.scala b/src/library/scala/Function7.scala
index 6978b6545d..2098658fa9 100644
--- a/src/library/scala/Function7.scala
+++ b/src/library/scala/Function7.scala
@@ -21,16 +21,17 @@ trait Function7[-T1, -T2, -T3, -T4, -T5, -T6, -T7, +R] extends AnyRef { self =>
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7) == apply(x1, x2, x3, x4, x5, x6, x7)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7) => self.apply(x1, x2, x3, x4, x5, x6, x7)).curried
}
-
/** Creates a tupled version of this function: instead of 7 arguments,
* it accepts a single [[scala.Tuple7]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7)) == f(Tuple7(x1, x2, x3, x4, x5, x6, x7)) == apply(x1, x2, x3, x4, x5, x6, x7)`
*/
- def tupled: Tuple7[T1, T2, T3, T4, T5, T6, T7] => R = {
+
+ @annotation.unspecialized def tupled: Tuple7[T1, T2, T3, T4, T5, T6, T7] => R = {
case Tuple7(x1, x2, x3, x4, x5, x6, x7) => apply(x1, x2, x3, x4, x5, x6, x7)
}
override def toString() = "<function7>"
diff --git a/src/library/scala/Function8.scala b/src/library/scala/Function8.scala
index 903551d939..08a480dce5 100644
--- a/src/library/scala/Function8.scala
+++ b/src/library/scala/Function8.scala
@@ -21,16 +21,17 @@ trait Function8[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, +R] extends AnyRef { sel
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8) == apply(x1, x2, x3, x4, x5, x6, x7, x8)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8)).curried
}
-
/** Creates a tupled version of this function: instead of 8 arguments,
* it accepts a single [[scala.Tuple8]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8)) == f(Tuple8(x1, x2, x3, x4, x5, x6, x7, x8)) == apply(x1, x2, x3, x4, x5, x6, x7, x8)`
*/
- def tupled: Tuple8[T1, T2, T3, T4, T5, T6, T7, T8] => R = {
+
+ @annotation.unspecialized def tupled: Tuple8[T1, T2, T3, T4, T5, T6, T7, T8] => R = {
case Tuple8(x1, x2, x3, x4, x5, x6, x7, x8) => apply(x1, x2, x3, x4, x5, x6, x7, x8)
}
override def toString() = "<function8>"
diff --git a/src/library/scala/Function9.scala b/src/library/scala/Function9.scala
index 0c273ba929..2e35f7949c 100644
--- a/src/library/scala/Function9.scala
+++ b/src/library/scala/Function9.scala
@@ -21,16 +21,17 @@ trait Function9[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, +R] extends AnyRef
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)`
- */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => R = {
+ */
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)).curried
}
-
/** Creates a tupled version of this function: instead of 9 arguments,
* it accepts a single [[scala.Tuple9]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9)) == f(Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)`
*/
- def tupled: Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9] => R = {
+
+ @annotation.unspecialized def tupled: Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9] => R = {
case Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)
}
override def toString() = "<function9>"
diff --git a/src/library/scala/Int.scala b/src/library/scala/Int.scala
index 316bbced2d..d5d7ef011d 100644
--- a/src/library/scala/Int.scala
+++ b/src/library/scala/Int.scala
@@ -17,7 +17,7 @@ package scala
* There is an implicit conversion from [[scala.Int]] => [[scala.runtime.RichInt]]
* which provides useful non-primitive operations.
*/
-final class Int extends AnyVal {
+final class Int private extends AnyVal {
def toByte: Byte = sys.error("stub")
def toShort: Short = sys.error("stub")
def toChar: Char = sys.error("stub")
@@ -27,7 +27,7 @@ final class Int extends AnyVal {
def toDouble: Double = sys.error("stub")
/**
- * @return the bitwise negation of this value
+ * Returns the bitwise negation of this value.
* @example {{{
* ~5 == -6
* // in binary: ~00000101 ==
@@ -36,30 +36,30 @@ final class Int extends AnyVal {
*/
def unary_~ : Int = sys.error("stub")
/**
- * @return this value, unmodified
+ * Returns this value, unmodified.
*/
def unary_+ : Int = sys.error("stub")
/**
- * @return the negation of this value
+ * Returns the negation of this value.
*/
def unary_- : Int = sys.error("stub")
def +(x: String): String = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the new right bits with zeroes.
* @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
*/
def <<(x: Int): Int = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the new right bits with zeroes.
* @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
*/
def <<(x: Long): Int = sys.error("stub")
/**
- * @return this value bit-shifted right by the specified number of bits,
+ * Returns this value bit-shifted right by the specified number of bits,
* filling the new left bits with zeroes.
* @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
* @example {{{
@@ -70,7 +70,7 @@ final class Int extends AnyVal {
*/
def >>>(x: Int): Int = sys.error("stub")
/**
- * @return this value bit-shifted right by the specified number of bits,
+ * Returns this value bit-shifted right by the specified number of bits,
* filling the new left bits with zeroes.
* @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
* @example {{{
@@ -81,7 +81,7 @@ final class Int extends AnyVal {
*/
def >>>(x: Long): Int = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the right bits with the same value as the left-most bit of this.
* The effect of this is to retain the sign of the value.
* @example {{{
@@ -92,7 +92,7 @@ final class Int extends AnyVal {
*/
def >>(x: Int): Int = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the right bits with the same value as the left-most bit of this.
* The effect of this is to retain the sign of the value.
* @example {{{
@@ -104,181 +104,181 @@ final class Int extends AnyVal {
def >>(x: Long): Int = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Double): Boolean = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -289,7 +289,7 @@ final class Int extends AnyVal {
*/
def |(x: Byte): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -300,7 +300,7 @@ final class Int extends AnyVal {
*/
def |(x: Short): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -311,7 +311,7 @@ final class Int extends AnyVal {
*/
def |(x: Char): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -322,7 +322,7 @@ final class Int extends AnyVal {
*/
def |(x: Int): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -334,7 +334,7 @@ final class Int extends AnyVal {
def |(x: Long): Long = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -345,7 +345,7 @@ final class Int extends AnyVal {
*/
def &(x: Byte): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -356,7 +356,7 @@ final class Int extends AnyVal {
*/
def &(x: Short): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -367,7 +367,7 @@ final class Int extends AnyVal {
*/
def &(x: Char): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -378,7 +378,7 @@ final class Int extends AnyVal {
*/
def &(x: Int): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -390,7 +390,7 @@ final class Int extends AnyVal {
def &(x: Long): Long = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -401,7 +401,7 @@ final class Int extends AnyVal {
*/
def ^(x: Byte): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -412,7 +412,7 @@ final class Int extends AnyVal {
*/
def ^(x: Short): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -423,7 +423,7 @@ final class Int extends AnyVal {
*/
def ^(x: Char): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -434,7 +434,7 @@ final class Int extends AnyVal {
*/
def ^(x: Int): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -446,154 +446,154 @@ final class Int extends AnyVal {
def ^(x: Long): Long = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Byte): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Short): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Char): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Int): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Long): Long = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Float): Float = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Double): Double = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Byte): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Short): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Char): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Int): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Long): Long = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Float): Float = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Double): Double = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Byte): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Short): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Char): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Int): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Long): Long = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Float): Float = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Double): Double = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Byte): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Short): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Char): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Int): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Long): Long = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Float): Float = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Double): Double = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Byte): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Short): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Char): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Int): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Long): Long = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Float): Float = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Double): Double = sys.error("stub")
override def getClass(): Class[Int] = sys.error("stub")
}
-object Int extends AnyValCompanion {
+object Int extends AnyValCompanion {
/** The smallest value representable as a Int.
*/
final val MinValue = java.lang.Integer.MIN_VALUE
@@ -622,5 +622,8 @@ object Int extends AnyValCompanion {
/** The String representation of the scala.Int companion object.
*/
override def toString = "object scala.Int"
+ implicit def int2long(x: Int): Long = x.toLong
+ implicit def int2float(x: Int): Float = x.toFloat
+ implicit def int2double(x: Int): Double = x.toDouble
}
diff --git a/src/library/scala/Long.scala b/src/library/scala/Long.scala
index ce8618c22a..85131b4f54 100644
--- a/src/library/scala/Long.scala
+++ b/src/library/scala/Long.scala
@@ -17,7 +17,7 @@ package scala
* There is an implicit conversion from [[scala.Long]] => [[scala.runtime.RichLong]]
* which provides useful non-primitive operations.
*/
-final class Long extends AnyVal {
+final class Long private extends AnyVal {
def toByte: Byte = sys.error("stub")
def toShort: Short = sys.error("stub")
def toChar: Char = sys.error("stub")
@@ -27,7 +27,7 @@ final class Long extends AnyVal {
def toDouble: Double = sys.error("stub")
/**
- * @return the bitwise negation of this value
+ * Returns the bitwise negation of this value.
* @example {{{
* ~5 == -6
* // in binary: ~00000101 ==
@@ -36,30 +36,30 @@ final class Long extends AnyVal {
*/
def unary_~ : Long = sys.error("stub")
/**
- * @return this value, unmodified
+ * Returns this value, unmodified.
*/
def unary_+ : Long = sys.error("stub")
/**
- * @return the negation of this value
+ * Returns the negation of this value.
*/
def unary_- : Long = sys.error("stub")
def +(x: String): String = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the new right bits with zeroes.
* @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
*/
def <<(x: Int): Long = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the new right bits with zeroes.
* @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
*/
def <<(x: Long): Long = sys.error("stub")
/**
- * @return this value bit-shifted right by the specified number of bits,
+ * Returns this value bit-shifted right by the specified number of bits,
* filling the new left bits with zeroes.
* @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
* @example {{{
@@ -70,7 +70,7 @@ final class Long extends AnyVal {
*/
def >>>(x: Int): Long = sys.error("stub")
/**
- * @return this value bit-shifted right by the specified number of bits,
+ * Returns this value bit-shifted right by the specified number of bits,
* filling the new left bits with zeroes.
* @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
* @example {{{
@@ -81,7 +81,7 @@ final class Long extends AnyVal {
*/
def >>>(x: Long): Long = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the right bits with the same value as the left-most bit of this.
* The effect of this is to retain the sign of the value.
* @example {{{
@@ -92,7 +92,7 @@ final class Long extends AnyVal {
*/
def >>(x: Int): Long = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the right bits with the same value as the left-most bit of this.
* The effect of this is to retain the sign of the value.
* @example {{{
@@ -104,181 +104,181 @@ final class Long extends AnyVal {
def >>(x: Long): Long = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Double): Boolean = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -289,7 +289,7 @@ final class Long extends AnyVal {
*/
def |(x: Byte): Long = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -300,7 +300,7 @@ final class Long extends AnyVal {
*/
def |(x: Short): Long = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -311,7 +311,7 @@ final class Long extends AnyVal {
*/
def |(x: Char): Long = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -322,7 +322,7 @@ final class Long extends AnyVal {
*/
def |(x: Int): Long = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -334,7 +334,7 @@ final class Long extends AnyVal {
def |(x: Long): Long = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -345,7 +345,7 @@ final class Long extends AnyVal {
*/
def &(x: Byte): Long = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -356,7 +356,7 @@ final class Long extends AnyVal {
*/
def &(x: Short): Long = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -367,7 +367,7 @@ final class Long extends AnyVal {
*/
def &(x: Char): Long = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -378,7 +378,7 @@ final class Long extends AnyVal {
*/
def &(x: Int): Long = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -390,7 +390,7 @@ final class Long extends AnyVal {
def &(x: Long): Long = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -401,7 +401,7 @@ final class Long extends AnyVal {
*/
def ^(x: Byte): Long = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -412,7 +412,7 @@ final class Long extends AnyVal {
*/
def ^(x: Short): Long = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -423,7 +423,7 @@ final class Long extends AnyVal {
*/
def ^(x: Char): Long = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -434,7 +434,7 @@ final class Long extends AnyVal {
*/
def ^(x: Int): Long = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -446,154 +446,154 @@ final class Long extends AnyVal {
def ^(x: Long): Long = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Byte): Long = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Short): Long = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Char): Long = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Int): Long = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Long): Long = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Float): Float = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Double): Double = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Byte): Long = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Short): Long = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Char): Long = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Int): Long = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Long): Long = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Float): Float = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Double): Double = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Byte): Long = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Short): Long = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Char): Long = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Int): Long = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Long): Long = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Float): Float = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Double): Double = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Byte): Long = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Short): Long = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Char): Long = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Int): Long = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Long): Long = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Float): Float = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Double): Double = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Byte): Long = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Short): Long = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Char): Long = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Int): Long = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Long): Long = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Float): Float = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Double): Double = sys.error("stub")
override def getClass(): Class[Long] = sys.error("stub")
}
-object Long extends AnyValCompanion {
+object Long extends AnyValCompanion {
/** The smallest value representable as a Long.
*/
final val MinValue = java.lang.Long.MIN_VALUE
@@ -622,5 +622,7 @@ object Long extends AnyValCompanion {
/** The String representation of the scala.Long companion object.
*/
override def toString = "object scala.Long"
+ implicit def long2float(x: Long): Float = x.toFloat
+ implicit def long2double(x: Long): Double = x.toDouble
}
diff --git a/src/library/scala/LowPriorityImplicits.scala b/src/library/scala/LowPriorityImplicits.scala
index 447a3c3819..491cd417a3 100644
--- a/src/library/scala/LowPriorityImplicits.scala
+++ b/src/library/scala/LowPriorityImplicits.scala
@@ -12,6 +12,7 @@ import scala.collection.{ mutable, immutable, generic }
import mutable.WrappedArray
import immutable.WrappedString
import generic.CanBuildFrom
+import language.implicitConversions
/** The `LowPriorityImplicits` class provides implicit values that
* are valid in all Scala compilation units without explicit qualification,
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index 2d87ccb261..c461b413d6 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -9,6 +9,9 @@
package scala
object Option {
+
+ import language.implicitConversions
+
/** An implicit conversion that converts an option to an iterable value
*/
implicit def option2Iterable[A](xo: Option[A]): Iterable[A] = xo.toList
@@ -79,6 +82,17 @@ object Option {
* @define option [[scala.Option]]
* @define p `p`
* @define f `f`
+ * @define coll option
+ * @define Coll `Option`
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define collectExample
+ * @define undefinedorder
+ * @define thatinfo the class of the returned collection. In the standard library configuration, `That` is `Iterable[B]`
+ * @define bfinfo an implicit value of class `CanBuildFrom` which determines the result class `That` from the current
+ * representation type `Repr` and the new element type `B`.
*/
sealed abstract class Option[+A] extends Product with Serializable {
self =>
@@ -132,7 +146,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
/** Returns the result of applying $f to this $option's
* value if the $option is nonempty. Otherwise, evaluates
- * expression $ifEmpty.
+ * expression `ifEmpty`.
*
* @note This is equivalent to `$option map f getOrElse ifEmpty`.
*
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 837ce96baa..c08462ac1b 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -14,6 +14,7 @@ import mutable.ArrayOps
import generic.CanBuildFrom
import annotation.{ elidable, implicitNotFound }
import annotation.elidable.ASSERTION
+import language.{implicitConversions, existentials}
/** The `Predef` object provides definitions that are accessible in all Scala
* compilation units without explicit qualification.
@@ -99,39 +100,36 @@ object Predef extends LowPriorityImplicits {
// def AnyRef = scala.AnyRef
// Manifest types, companions, and incantations for summoning
- @deprecated("Use `@scala.reflect.ClassTag` instead", "2.10.0")
type ClassManifest[T] = scala.reflect.ClassManifest[T]
- @deprecated("OptManifest is no longer supported and using it may lead to incorrect results, use `@scala.reflect.TypeTag` instead", "2.10.0")
type OptManifest[T] = scala.reflect.OptManifest[T]
- @deprecated("Use `@scala.reflect.ConcreteTypeTag` instead", "2.10.0")
type Manifest[T] = scala.reflect.Manifest[T]
- @deprecated("Use `@scala.reflect.ClassTag` instead", "2.10.0")
val ClassManifest = scala.reflect.ClassManifest
- // [Paul to Eugene] No lazy vals in Predef. Too expensive. Have to work harder on breaking initialization dependencies.
- @deprecated("Use `@scala.reflect.ConcreteTypeTag` instead", "2.10.0")
- lazy val Manifest = scala.reflect.Manifest // needs to be lazy, because requires scala.reflect.mirror instance
- @deprecated("NoManifest is no longer supported and using it may lead to incorrect results, use `@scala.reflect.TypeTag` instead", "2.10.0")
- lazy val NoManifest = scala.reflect.NoManifest // needs to be lazy, because requires scala.reflect.mirror instance
+ val Manifest = scala.reflect.Manifest
+ val NoManifest = scala.reflect.NoManifest
def manifest[T](implicit m: Manifest[T]) = m
def classManifest[T](implicit m: ClassManifest[T]) = m
def optManifest[T](implicit m: OptManifest[T]) = m
// Tag types and companions, and incantations for summoning
- type ClassTag[T] = scala.reflect.ClassTag[T]
- type TypeTag[T] = scala.reflect.TypeTag[T]
- type ConcreteTypeTag[T] = scala.reflect.ConcreteTypeTag[T]
- val ClassTag = scala.reflect.ClassTag // doesn't need to be lazy, because it's not a path-dependent type
+ type ArrayTag[T] = scala.reflect.ArrayTag[T]
+ type ErasureTag[T] = scala.reflect.ErasureTag[T]
+ type ClassTag[T] = scala.reflect.ClassTag[T]
+ type TypeTag[T] = scala.reflect.TypeTag[T]
+ type ConcreteTypeTag[T] = scala.reflect.ConcreteTypeTag[T]
+ val ClassTag = scala.reflect.ClassTag // doesn't need to be lazy, because it's not a path-dependent type
// [Paul to Eugene] No lazy vals in Predef. Too expensive. Have to work harder on breaking initialization dependencies.
- lazy val TypeTag = scala.reflect.TypeTag // needs to be lazy, because requires scala.reflect.mirror instance
- lazy val ConcreteTypeTag = scala.reflect.ConcreteTypeTag
+ lazy val TypeTag = scala.reflect.TypeTag // needs to be lazy, because requires scala.reflect.mirror instance
+ lazy val ConcreteTypeTag = scala.reflect.ConcreteTypeTag
// [Eugene to Martin] it's really tedious to type "implicitly[...]" all the time, so I'm reintroducing these shortcuts
- def classTag[T](implicit ctag: ClassTag[T]) = ctag
- def tag[T](implicit ttag: TypeTag[T]) = ttag
- def typeTag[T](implicit ttag: TypeTag[T]) = ttag
- def concreteTag[T](implicit cttag: ConcreteTypeTag[T]) = cttag
- def concreteTypeTag[T](implicit cttag: ConcreteTypeTag[T]) = cttag
+ def arrayTag[T](implicit atag: ArrayTag[T]) = atag
+ def erasureTag[T](implicit etag: ErasureTag[T]) = etag
+ def classTag[T](implicit ctag: ClassTag[T]) = ctag
+ def tag[T](implicit ttag: TypeTag[T]) = ttag
+ def typeTag[T](implicit ttag: TypeTag[T]) = ttag
+ def concreteTag[T](implicit cttag: ConcreteTypeTag[T]) = cttag
+ def concreteTypeTag[T](implicit cttag: ConcreteTypeTag[T]) = cttag
// Minor variations on identity functions
def identity[A](x: A): A = x // @see `conforms` for the implicit version
@@ -162,7 +160,7 @@ object Predef extends LowPriorityImplicits {
* is at least `ASSERTION`.
*
* @see elidable
- * @param p the expression to test
+ * @param assertion the expression to test
*/
@elidable(ASSERTION)
def assert(assertion: Boolean) {
@@ -175,8 +173,8 @@ object Predef extends LowPriorityImplicits {
* is at least `ASSERTION`.
*
* @see elidable
- * @param p the expression to test
- * @param msg a String to include in the failure message
+ * @param assertion the expression to test
+ * @param message a String to include in the failure message
*/
@elidable(ASSERTION) @inline
final def assert(assertion: Boolean, message: => Any) {
@@ -191,7 +189,7 @@ object Predef extends LowPriorityImplicits {
* will not be generated if `-Xelide-below` is at least `ASSERTION`.
*
* @see elidable
- * @param p the expression to test
+ * @param assumption the expression to test
*/
@elidable(ASSERTION)
def assume(assumption: Boolean) {
@@ -206,8 +204,8 @@ object Predef extends LowPriorityImplicits {
* will not be generated if `-Xelide-below` is at least `ASSERTION`.
*
* @see elidable
- * @param p the expression to test
- * @param msg a String to include in the failure message
+ * @param assumption the expression to test
+ * @param message a String to include in the failure message
*/
@elidable(ASSERTION) @inline
final def assume(assumption: Boolean, message: => Any) {
@@ -219,7 +217,7 @@ object Predef extends LowPriorityImplicits {
* This method is similar to `assert`, but blames the caller of the method
* for violating the condition.
*
- * @param p the expression to test
+ * @param requirement the expression to test
*/
def require(requirement: Boolean) {
if (!requirement)
@@ -230,8 +228,8 @@ object Predef extends LowPriorityImplicits {
* This method is similar to `assert`, but blames the caller of the method
* for violating the condition.
*
- * @param p the expression to test
- * @param msg a String to include in the failure message
+ * @param requirement the expression to test
+ * @param message a String to include in the failure message
*/
@inline final def require(requirement: Boolean, message: => Any) {
if (!requirement)
@@ -309,69 +307,63 @@ object Predef extends LowPriorityImplicits {
// views --------------------------------------------------------------
- implicit def exceptionWrapper(exc: Throwable) = new runtime.RichException(exc)
-
- implicit def zipped2ToTraversable[El1, El2](zz: Tuple2[_, _]#Zipped[_, El1, _, El2]): Traversable[(El1, El2)] =
- new collection.AbstractTraversable[(El1, El2)] {
- def foreach[U](f: ((El1, El2)) => U): Unit = zz foreach Function.untupled(f)
- }
-
- implicit def zipped3ToTraversable[El1, El2, El3](zz: Tuple3[_, _, _]#Zipped[_, El1, _, El2, _, El3]): Traversable[(El1, El2, El3)] =
- new collection.AbstractTraversable[(El1, El2, El3)] {
- def foreach[U](f: ((El1, El2, El3)) => U): Unit = zz foreach Function.untupled(f)
- }
-
- implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = xs match {
- case x: Array[AnyRef] => refArrayOps[AnyRef](x).asInstanceOf[ArrayOps[T]]
- case x: Array[Int] => intArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Double] => doubleArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Long] => longArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Float] => floatArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Char] => charArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Byte] => byteArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Short] => shortArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Boolean] => booleanArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Unit] => unitArrayOps(x).asInstanceOf[ArrayOps[T]]
+ implicit def exceptionWrapper(exc: Throwable) = new runtime.RichException(exc)
+ implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2)) = new runtime.Tuple2Zipped.Ops(x)
+ implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3)) = new runtime.Tuple3Zipped.Ops(x)
+ implicit def seqToCharSequence(xs: collection.IndexedSeq[Char]): CharSequence = new runtime.SeqCharSequence(xs)
+ implicit def arrayToCharSequence(xs: Array[Char]): CharSequence = new runtime.ArrayCharSequence(xs, 0, xs.length)
+
+ implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match {
+ case x: Array[AnyRef] => refArrayOps[AnyRef](x)
+ case x: Array[Boolean] => booleanArrayOps(x)
+ case x: Array[Byte] => byteArrayOps(x)
+ case x: Array[Char] => charArrayOps(x)
+ case x: Array[Double] => doubleArrayOps(x)
+ case x: Array[Float] => floatArrayOps(x)
+ case x: Array[Int] => intArrayOps(x)
+ case x: Array[Long] => longArrayOps(x)
+ case x: Array[Short] => shortArrayOps(x)
+ case x: Array[Unit] => unitArrayOps(x)
case null => null
- }
+ }).asInstanceOf[ArrayOps[T]]
- implicit def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps[T] = new ArrayOps.ofRef[T](xs)
- implicit def intArrayOps(xs: Array[Int]): ArrayOps[Int] = new ArrayOps.ofInt(xs)
- implicit def doubleArrayOps(xs: Array[Double]): ArrayOps[Double] = new ArrayOps.ofDouble(xs)
- implicit def longArrayOps(xs: Array[Long]): ArrayOps[Long] = new ArrayOps.ofLong(xs)
- implicit def floatArrayOps(xs: Array[Float]): ArrayOps[Float] = new ArrayOps.ofFloat(xs)
- implicit def charArrayOps(xs: Array[Char]): ArrayOps[Char] = new ArrayOps.ofChar(xs)
- implicit def byteArrayOps(xs: Array[Byte]): ArrayOps[Byte] = new ArrayOps.ofByte(xs)
- implicit def shortArrayOps(xs: Array[Short]): ArrayOps[Short] = new ArrayOps.ofShort(xs)
implicit def booleanArrayOps(xs: Array[Boolean]): ArrayOps[Boolean] = new ArrayOps.ofBoolean(xs)
- implicit def unitArrayOps(xs: Array[Unit]): ArrayOps[Unit] = new ArrayOps.ofUnit(xs)
+ implicit def byteArrayOps(xs: Array[Byte]): ArrayOps[Byte] = new ArrayOps.ofByte(xs)
+ implicit def charArrayOps(xs: Array[Char]): ArrayOps[Char] = new ArrayOps.ofChar(xs)
+ implicit def doubleArrayOps(xs: Array[Double]): ArrayOps[Double] = new ArrayOps.ofDouble(xs)
+ implicit def floatArrayOps(xs: Array[Float]): ArrayOps[Float] = new ArrayOps.ofFloat(xs)
+ implicit def intArrayOps(xs: Array[Int]): ArrayOps[Int] = new ArrayOps.ofInt(xs)
+ implicit def longArrayOps(xs: Array[Long]): ArrayOps[Long] = new ArrayOps.ofLong(xs)
+ implicit def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps[T] = new ArrayOps.ofRef[T](xs)
+ implicit def shortArrayOps(xs: Array[Short]): ArrayOps[Short] = new ArrayOps.ofShort(xs)
+ implicit def unitArrayOps(xs: Array[Unit]): ArrayOps[Unit] = new ArrayOps.ofUnit(xs)
// Primitive Widenings --------------------------------------------------------------
- implicit def byte2short(x: Byte): Short = x.toShort
- implicit def byte2int(x: Byte): Int = x.toInt
- implicit def byte2long(x: Byte): Long = x.toLong
- implicit def byte2float(x: Byte): Float = x.toFloat
- implicit def byte2double(x: Byte): Double = x.toDouble
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2short(x: Byte): Short = x.toShort
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2int(x: Byte): Int = x.toInt
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2long(x: Byte): Long = x.toLong
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2float(x: Byte): Float = x.toFloat
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2double(x: Byte): Double = x.toDouble
- implicit def short2int(x: Short): Int = x.toInt
- implicit def short2long(x: Short): Long = x.toLong
- implicit def short2float(x: Short): Float = x.toFloat
- implicit def short2double(x: Short): Double = x.toDouble
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2int(x: Short): Int = x.toInt
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2long(x: Short): Long = x.toLong
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2float(x: Short): Float = x.toFloat
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2double(x: Short): Double = x.toDouble
- implicit def char2int(x: Char): Int = x.toInt
- implicit def char2long(x: Char): Long = x.toLong
- implicit def char2float(x: Char): Float = x.toFloat
- implicit def char2double(x: Char): Double = x.toDouble
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2int(x: Char): Int = x.toInt
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2long(x: Char): Long = x.toLong
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2float(x: Char): Float = x.toFloat
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2double(x: Char): Double = x.toDouble
- implicit def int2long(x: Int): Long = x.toLong
- implicit def int2float(x: Int): Float = x.toFloat
- implicit def int2double(x: Int): Double = x.toDouble
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def int2long(x: Int): Long = x.toLong
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def int2float(x: Int): Float = x.toFloat
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def int2double(x: Int): Double = x.toDouble
- implicit def long2float(x: Long): Float = x.toFloat
- implicit def long2double(x: Long): Double = x.toDouble
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def long2float(x: Long): Float = x.toFloat
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def long2double(x: Long): Double = x.toDouble
- implicit def float2double(x: Float): Double = x.toDouble
+ @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def float2double(x: Float): Double = x.toDouble
// "Autoboxing" and "Autounboxing" ---------------------------------------------------
@@ -408,29 +400,17 @@ object Predef extends LowPriorityImplicits {
// Strings and CharSequences --------------------------------------------------------------
- implicit def any2stringadd(x: Any) = new runtime.StringAdd(x)
@inline implicit def any2stringfmt(x: Any) = new runtime.StringFormat(x)
@inline implicit def augmentString(x: String): StringOps = new StringOps(x)
+ implicit def any2stringadd(x: Any) = new runtime.StringAdd(x)
implicit def unaugmentString(x: StringOps): String = x.repr
- implicit def stringCanBuildFrom: CanBuildFrom[String, Char, String] =
- new CanBuildFrom[String, Char, String] {
- def apply(from: String) = apply()
- def apply() = mutable.StringBuilder.newBuilder
- }
-
- implicit def seqToCharSequence(xs: collection.IndexedSeq[Char]): CharSequence = new CharSequence {
- def length: Int = xs.length
- def charAt(index: Int): Char = xs(index)
- def subSequence(start: Int, end: Int): CharSequence = seqToCharSequence(xs.slice(start, end))
- override def toString: String = xs.mkString("")
- }
+ @deprecated("Use StringCanBuildFrom", "2.10.0")
+ def stringCanBuildFrom: CanBuildFrom[String, Char, String] = StringCanBuildFrom
- implicit def arrayToCharSequence(xs: Array[Char]): CharSequence = new CharSequence {
- def length: Int = xs.length
- def charAt(index: Int): Char = xs(index)
- def subSequence(start: Int, end: Int): CharSequence = arrayToCharSequence(xs.slice(start, end))
- override def toString: String = xs.mkString("")
+ implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] {
+ def apply(from: String) = apply()
+ def apply() = mutable.StringBuilder.newBuilder
}
// Type Constraints --------------------------------------------------------------
@@ -470,22 +450,15 @@ object Predef extends LowPriorityImplicits {
implicit def tpEquals[A]: A =:= A = singleton_=:=.asInstanceOf[A =:= A]
}
- // less useful due to #2781
- @deprecated("Use From => To instead", "2.9.0")
- sealed abstract class <%<[-From, +To] extends (From => To) with Serializable
- object <%< {
- implicit def conformsOrViewsAs[A <% B, B]: A <%< B = new (A <%< B) {def apply(x: A) = x}
- }
-
/** A type for which there is always an implicit value.
- * @see fallbackCanBuildFrom in Array.scala
+ * @see [[scala.Array$]], method `fallbackCanBuildFrom`
*/
class DummyImplicit
object DummyImplicit {
/** An implicit value yielding a `DummyImplicit`.
- * @see fallbackCanBuildFrom in Array.scala
+ * @see [[scala.Array$]], method `fallbackCanBuildFrom`
*/
implicit def dummyImplicit: DummyImplicit = new DummyImplicit
}
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index 1459ab9ea5..8c42c60d98 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -19,7 +19,7 @@ package scala
*/
trait Product extends Any with Equals {
/** The n^th^ element of this product, 0-based. In other words, for a
- * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),, where `0 < n < k`.
+ * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 < n < k`.
*
* @param n the index of the element to return
* @throws `IndexOutOfBoundsException`
diff --git a/src/library/scala/Product1.scala b/src/library/scala/Product1.scala
index d268b35f60..edd095c5c6 100644
--- a/src/library/scala/Product1.scala
+++ b/src/library/scala/Product1.scala
@@ -28,7 +28,7 @@ trait Product1[@specialized(Int, Long, Double) +T1] extends Any with Product {
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product10.scala b/src/library/scala/Product10.scala
index cae9e5a664..8daefde699 100644
--- a/src/library/scala/Product10.scala
+++ b/src/library/scala/Product10.scala
@@ -28,7 +28,7 @@ trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Any w
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product11.scala b/src/library/scala/Product11.scala
index 0647b28414..90b4e8013e 100644
--- a/src/library/scala/Product11.scala
+++ b/src/library/scala/Product11.scala
@@ -28,7 +28,7 @@ trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product12.scala b/src/library/scala/Product12.scala
index a080aafa7a..d5997ea05a 100644
--- a/src/library/scala/Product12.scala
+++ b/src/library/scala/Product12.scala
@@ -28,7 +28,7 @@ trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] e
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product13.scala b/src/library/scala/Product13.scala
index 425aebf3e7..db8e0f3722 100644
--- a/src/library/scala/Product13.scala
+++ b/src/library/scala/Product13.scala
@@ -28,7 +28,7 @@ trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product14.scala b/src/library/scala/Product14.scala
index 3d7e4896ef..113c07e8c4 100644
--- a/src/library/scala/Product14.scala
+++ b/src/library/scala/Product14.scala
@@ -28,7 +28,7 @@ trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product15.scala b/src/library/scala/Product15.scala
index 7bca7a2a1b..a6ad9c7594 100644
--- a/src/library/scala/Product15.scala
+++ b/src/library/scala/Product15.scala
@@ -28,7 +28,7 @@ trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product16.scala b/src/library/scala/Product16.scala
index c5042cbc90..cbf47ece94 100644
--- a/src/library/scala/Product16.scala
+++ b/src/library/scala/Product16.scala
@@ -28,7 +28,7 @@ trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product17.scala b/src/library/scala/Product17.scala
index b5651ec712..f56836bfd8 100644
--- a/src/library/scala/Product17.scala
+++ b/src/library/scala/Product17.scala
@@ -28,7 +28,7 @@ trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product18.scala b/src/library/scala/Product18.scala
index 088a48ae32..5b86bcff65 100644
--- a/src/library/scala/Product18.scala
+++ b/src/library/scala/Product18.scala
@@ -28,7 +28,7 @@ trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product19.scala b/src/library/scala/Product19.scala
index 4f4a98c6a0..ed4bf36c93 100644
--- a/src/library/scala/Product19.scala
+++ b/src/library/scala/Product19.scala
@@ -28,7 +28,7 @@ trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product2.scala b/src/library/scala/Product2.scala
index eb67e5d46e..e27e54eff9 100644
--- a/src/library/scala/Product2.scala
+++ b/src/library/scala/Product2.scala
@@ -28,7 +28,7 @@ trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Doub
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product20.scala b/src/library/scala/Product20.scala
index 80f63f1bb4..47437a20af 100644
--- a/src/library/scala/Product20.scala
+++ b/src/library/scala/Product20.scala
@@ -28,7 +28,7 @@ trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product21.scala b/src/library/scala/Product21.scala
index 7056844271..319d2725c0 100644
--- a/src/library/scala/Product21.scala
+++ b/src/library/scala/Product21.scala
@@ -28,7 +28,7 @@ trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product22.scala b/src/library/scala/Product22.scala
index 05e95f92dd..6ab3737acd 100644
--- a/src/library/scala/Product22.scala
+++ b/src/library/scala/Product22.scala
@@ -28,7 +28,7 @@ trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product3.scala b/src/library/scala/Product3.scala
index 91556bb962..1cfbd7956b 100644
--- a/src/library/scala/Product3.scala
+++ b/src/library/scala/Product3.scala
@@ -28,7 +28,7 @@ trait Product3[+T1, +T2, +T3] extends Any with Product {
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product4.scala b/src/library/scala/Product4.scala
index 1f9070c155..843571fd60 100644
--- a/src/library/scala/Product4.scala
+++ b/src/library/scala/Product4.scala
@@ -28,7 +28,7 @@ trait Product4[+T1, +T2, +T3, +T4] extends Any with Product {
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product5.scala b/src/library/scala/Product5.scala
index 52dd284f55..df73bba3dd 100644
--- a/src/library/scala/Product5.scala
+++ b/src/library/scala/Product5.scala
@@ -28,7 +28,7 @@ trait Product5[+T1, +T2, +T3, +T4, +T5] extends Any with Product {
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product6.scala b/src/library/scala/Product6.scala
index 9624bdbe3e..36906ca54e 100644
--- a/src/library/scala/Product6.scala
+++ b/src/library/scala/Product6.scala
@@ -28,7 +28,7 @@ trait Product6[+T1, +T2, +T3, +T4, +T5, +T6] extends Any with Product {
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product7.scala b/src/library/scala/Product7.scala
index 36d4b149db..e7b2c13ad1 100644
--- a/src/library/scala/Product7.scala
+++ b/src/library/scala/Product7.scala
@@ -28,7 +28,7 @@ trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Any with Product {
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product8.scala b/src/library/scala/Product8.scala
index 28c78f9c89..916e57ec39 100644
--- a/src/library/scala/Product8.scala
+++ b/src/library/scala/Product8.scala
@@ -28,7 +28,7 @@ trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Any with Product
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Product9.scala b/src/library/scala/Product9.scala
index d69c550abe..d5e72edc0b 100644
--- a/src/library/scala/Product9.scala
+++ b/src/library/scala/Product9.scala
@@ -28,7 +28,7 @@ trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Any with Pro
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
diff --git a/src/library/scala/Responder.scala b/src/library/scala/Responder.scala
index 189d75a4ae..04c69bc330 100644
--- a/src/library/scala/Responder.scala
+++ b/src/library/scala/Responder.scala
@@ -21,9 +21,6 @@ package scala
object Responder {
/** Creates a responder that answer continuations with the constant `a`.
- *
- * @param x ...
- * @return ...
*/
def constant[A](x: A) = new Responder[A] {
def respond(k: A => Unit) = k(x)
@@ -31,9 +28,6 @@ object Responder {
/** Executes `x` and returns `'''true'''`, useful as syntactic
* convenience in for comprehensions.
- *
- * @param x ...
- * @return ...
*/
def exec[A](x: => Unit): Boolean = { x; true }
diff --git a/src/dbc/scala/dbc/statement/Statement.scala b/src/library/scala/ScalaObject.scala
index fc5374262d..7cd64becbe 100644
--- a/src/dbc/scala/dbc/statement/Statement.scala
+++ b/src/library/scala/ScalaObject.scala
@@ -6,11 +6,11 @@
** |/ **
\* */
+package scala
-
-package scala.dbc
-package statement
-
-
-/** An ISO-9075:2003 (SQL) statement. */
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class Statement
+/** Until scala 2.10.0 this marker trait was added to
+ * scala-compiled classes. Now it only exists for backward
+ * compatibility.
+ */
+@deprecated("ScalaObject will be removed", "2.10.0")
+trait ScalaObject
diff --git a/src/library/scala/Short.scala b/src/library/scala/Short.scala
index 5664c3b44c..1060a9db16 100644
--- a/src/library/scala/Short.scala
+++ b/src/library/scala/Short.scala
@@ -17,7 +17,7 @@ package scala
* There is an implicit conversion from [[scala.Short]] => [[scala.runtime.RichShort]]
* which provides useful non-primitive operations.
*/
-final class Short extends AnyVal {
+final class Short private extends AnyVal {
def toByte: Byte = sys.error("stub")
def toShort: Short = sys.error("stub")
def toChar: Char = sys.error("stub")
@@ -27,7 +27,7 @@ final class Short extends AnyVal {
def toDouble: Double = sys.error("stub")
/**
- * @return the bitwise negation of this value
+ * Returns the bitwise negation of this value.
* @example {{{
* ~5 == -6
* // in binary: ~00000101 ==
@@ -36,30 +36,30 @@ final class Short extends AnyVal {
*/
def unary_~ : Int = sys.error("stub")
/**
- * @return this value, unmodified
+ * Returns this value, unmodified.
*/
def unary_+ : Int = sys.error("stub")
/**
- * @return the negation of this value
+ * Returns the negation of this value.
*/
def unary_- : Int = sys.error("stub")
def +(x: String): String = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the new right bits with zeroes.
* @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
*/
def <<(x: Int): Int = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the new right bits with zeroes.
* @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
*/
def <<(x: Long): Int = sys.error("stub")
/**
- * @return this value bit-shifted right by the specified number of bits,
+ * Returns this value bit-shifted right by the specified number of bits,
* filling the new left bits with zeroes.
* @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
* @example {{{
@@ -70,7 +70,7 @@ final class Short extends AnyVal {
*/
def >>>(x: Int): Int = sys.error("stub")
/**
- * @return this value bit-shifted right by the specified number of bits,
+ * Returns this value bit-shifted right by the specified number of bits,
* filling the new left bits with zeroes.
* @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
* @example {{{
@@ -81,7 +81,7 @@ final class Short extends AnyVal {
*/
def >>>(x: Long): Int = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the right bits with the same value as the left-most bit of this.
* The effect of this is to retain the sign of the value.
* @example {{{
@@ -92,7 +92,7 @@ final class Short extends AnyVal {
*/
def >>(x: Int): Int = sys.error("stub")
/**
- * @return this value bit-shifted left by the specified number of bits,
+ * Returns this value bit-shifted left by the specified number of bits,
* filling in the right bits with the same value as the left-most bit of this.
* The effect of this is to retain the sign of the value.
* @example {{{
@@ -104,181 +104,181 @@ final class Short extends AnyVal {
def >>(x: Long): Int = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is equal x, `false` otherwise
+ * Returns `true` if this value is equal to x, `false` otherwise.
*/
def ==(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is not equal to x, `false` otherwise
+ * Returns `true` if this value is not equal to x, `false` otherwise.
*/
def !=(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than x, `false` otherwise
+ * Returns `true` if this value is less than x, `false` otherwise.
*/
def <(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is less than or equal to x, `false` otherwise
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
*/
def <=(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than x, `false` otherwise
+ * Returns `true` if this value is greater than x, `false` otherwise.
*/
def >(x: Double): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Byte): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Short): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Char): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Int): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Long): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Float): Boolean = sys.error("stub")
/**
- * @return `true` if this value is greater than or equal to x, `false` otherwise
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
*/
def >=(x: Double): Boolean = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -289,7 +289,7 @@ final class Short extends AnyVal {
*/
def |(x: Byte): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -300,7 +300,7 @@ final class Short extends AnyVal {
*/
def |(x: Short): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -311,7 +311,7 @@ final class Short extends AnyVal {
*/
def |(x: Char): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -322,7 +322,7 @@ final class Short extends AnyVal {
*/
def |(x: Int): Int = sys.error("stub")
/**
- * @return the bitwise OR of this value and x
+ * Returns the bitwise OR of this value and `x`.
* @example {{{
* (0xf0 | 0xaa) == 0xfa
* // in binary: 11110000
@@ -334,7 +334,7 @@ final class Short extends AnyVal {
def |(x: Long): Long = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -345,7 +345,7 @@ final class Short extends AnyVal {
*/
def &(x: Byte): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -356,7 +356,7 @@ final class Short extends AnyVal {
*/
def &(x: Short): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -367,7 +367,7 @@ final class Short extends AnyVal {
*/
def &(x: Char): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -378,7 +378,7 @@ final class Short extends AnyVal {
*/
def &(x: Int): Int = sys.error("stub")
/**
- * @return the bitwise AND of this value and x
+ * Returns the bitwise AND of this value and `x`.
* @example {{{
* (0xf0 & 0xaa) == 0xa0
* // in binary: 11110000
@@ -390,7 +390,7 @@ final class Short extends AnyVal {
def &(x: Long): Long = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -401,7 +401,7 @@ final class Short extends AnyVal {
*/
def ^(x: Byte): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -412,7 +412,7 @@ final class Short extends AnyVal {
*/
def ^(x: Short): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -423,7 +423,7 @@ final class Short extends AnyVal {
*/
def ^(x: Char): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -434,7 +434,7 @@ final class Short extends AnyVal {
*/
def ^(x: Int): Int = sys.error("stub")
/**
- * @return the bitwise XOR of this value and x
+ * Returns the bitwise XOR of this value and `x`.
* @example {{{
* (0xf0 ^ 0xaa) == 0x5a
* // in binary: 11110000
@@ -446,154 +446,154 @@ final class Short extends AnyVal {
def ^(x: Long): Long = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Byte): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Short): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Char): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Int): Int = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Long): Long = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Float): Float = sys.error("stub")
/**
- * @return the sum of this value and x
+ * Returns the sum of this value and `x`.
*/
def +(x: Double): Double = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Byte): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Short): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Char): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Int): Int = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Long): Long = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Float): Float = sys.error("stub")
/**
- * @return the difference of this value and x
+ * Returns the difference of this value and `x`.
*/
def -(x: Double): Double = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Byte): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Short): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Char): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Int): Int = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Long): Long = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Float): Float = sys.error("stub")
/**
- * @return the product of this value and x
+ * Returns the product of this value and `x`.
*/
def *(x: Double): Double = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Byte): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Short): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Char): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Int): Int = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Long): Long = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Float): Float = sys.error("stub")
/**
- * @return the quotient of this value and x
+ * Returns the quotient of this value and `x`.
*/
def /(x: Double): Double = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Byte): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Short): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Char): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Int): Int = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Long): Long = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Float): Float = sys.error("stub")
/**
- * @return the remainder of the division of this value by x
+ * Returns the remainder of the division of this value by `x`.
*/
def %(x: Double): Double = sys.error("stub")
override def getClass(): Class[Short] = sys.error("stub")
}
-object Short extends AnyValCompanion {
+object Short extends AnyValCompanion {
/** The smallest value representable as a Short.
*/
final val MinValue = java.lang.Short.MIN_VALUE
@@ -622,5 +622,9 @@ object Short extends AnyValCompanion {
/** The String representation of the scala.Short companion object.
*/
override def toString = "object scala.Short"
+ implicit def short2int(x: Short): Int = x.toInt
+ implicit def short2long(x: Short): Long = x.toLong
+ implicit def short2float(x: Short): Float = x.toFloat
+ implicit def short2double(x: Short): Double = x.toDouble
}
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index be9e0c290a..f400f18dab 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -132,7 +132,7 @@ object StringContext {
* escape: `\\`, `\"`, `\'`
* octal: `\d` `\dd` `\ddd` where `d` is an octal digit between `0` and `7`.
*
- * @param A string that may contain escape sequences
+ * @param str A string that may contain escape sequences
* @return The string with all escape sequences expanded.
*/
def treatEscapes(str: String): String = {
diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala
index b1befca4fa..5e77127080 100644
--- a/src/library/scala/Tuple2.scala
+++ b/src/library/scala/Tuple2.scala
@@ -9,9 +9,6 @@
package scala
-import scala.collection.{ TraversableLike => TLike, IterableLike => ILike }
-import scala.collection.generic.{ CanBuildFrom => CBF }
-
/** A tuple of 2 elements; the canonical representation of a [[scala.Product2]].
*
@@ -30,105 +27,4 @@ case class Tuple2[@specialized(Int, Long, Double, Char, Boolean, AnyRef) +T1, @s
*/
def swap: Tuple2[T2,T1] = Tuple2(_2, _1)
- @deprecated("Use `zipped` instead.", "2.9.0")
- def zip[Repr1, El1, El2, To](implicit w1: T1 => TLike[El1, Repr1],
- w2: T2 => Iterable[El2],
- cbf1: CBF[Repr1, (El1, El2), To]): To = {
- zipped map ((x, y) => ((x, y)))
- }
-
- /** Wraps a tuple in a `Zipped`, which supports 2-ary generalisations of `map`, `flatMap`, `filter`, etc.
- * Note that there must be an implicit value to convert this tuple's types into a [[scala.collection.TraversableLike]]
- * or [[scala.collection.IterableLike]].
- * {{{
- * scala> val tuple = (List(1,2,3),List('a','b','c'))
- * tuple: (List[Int], List[Char]) = (List(1, 2, 3),List(a, b, c))
- *
- * scala> tuple.zipped map { (x,y) => x + ":" + y }
- * res6: List[java.lang.String] = List(1:a, 2:b, 3:c)
- * }}}
- *
- * @see Zipped
- * Note: will not terminate for infinite-sized collections.
- */
- def zipped[Repr1, El1, Repr2, El2](implicit w1: T1 => TLike[El1, Repr1], w2: T2 => ILike[El2, Repr2]): Zipped[Repr1, El1, Repr2, El2]
- = new Zipped[Repr1, El1, Repr2, El2](_1, _2)
-
- class Zipped[+Repr1, +El1, +Repr2, +El2](coll1: TLike[El1, Repr1], coll2: ILike[El2, Repr2]) { // coll2: ILike for filter
- def map[B, To](f: (El1, El2) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- b.sizeHint(coll1)
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext)
- b += f(el1, elems2.next)
- else
- return b.result
- }
-
- b.result
- }
-
- def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext)
- b ++= f(el1, elems2.next)
- else
- return b.result
- }
-
- b.result
- }
-
- def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = {
- val b1 = cbf1(coll1.repr)
- val b2 = cbf2(coll2.repr)
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext) {
- val el2 = elems2.next
- if (f(el1, el2)) {
- b1 += el1
- b2 += el2
- }
- }
- else return (b1.result, b2.result)
- }
-
- (b1.result, b2.result)
- }
-
- def exists(f: (El1, El2) => Boolean): Boolean = {
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext) {
- if (f(el1, elems2.next))
- return true
- }
- else return false
- }
- false
- }
-
- def forall(f: (El1, El2) => Boolean): Boolean =
- !exists((x, y) => !f(x, y))
-
- def foreach[U](f: (El1, El2) => U): Unit = {
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext)
- f(el1, elems2.next)
- else
- return
- }
- }
- }
-
}
diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala
index 0d5399308b..5ed13602e3 100644
--- a/src/library/scala/Tuple3.scala
+++ b/src/library/scala/Tuple3.scala
@@ -9,9 +9,6 @@
package scala
-import scala.collection.{ TraversableLike => TLike, IterableLike => ILike }
-import scala.collection.generic.{ CanBuildFrom => CBF }
-
/** A tuple of 3 elements; the canonical representation of a [[scala.Product3]].
*
@@ -25,121 +22,4 @@ case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3)
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + ")"
-
- @deprecated("Use `zipped` instead.", "2.9.0")
- def zip[Repr1, El1, El2, El3, To](implicit w1: T1 => TLike[El1, Repr1],
- w2: T2 => Iterable[El2],
- w3: T3 => Iterable[El3],
- cbf1: CBF[Repr1, (El1, El2, El3), To]): To = {
- zipped map ((x, y, z) => ((x, y, z)))
- }
-
- /** Wraps a tuple in a `Zipped`, which supports 3-ary generalisations of `map`, `flatMap`, `filter`, etc.
- * Note that there must be an implicit value to convert this tuple's types into a [[scala.collection.TraversableLike]]
- * or [[scala.collection.IterableLike]].
- * {{{
- * scala> val tuple = (List(1,2,3),List('a','b','c'),List("x","y","z"))
- * tuple: (List[Int], List[Char], List[java.lang.String]) = (List(1, 2, 3),List(a, b, c),List(x, y, z))
- *
- * scala> tuple.zipped map { (x,y,z) => x + ":" + y + ":" + z}
- * res8: List[java.lang.String] = List(1:a:x, 2:b:y, 3:c:z)
- * }}}
- *
- * @see Zipped
- * Note: will not terminate for infinite-sized collections.
- */
- def zipped[Repr1, El1, Repr2, El2, Repr3, El3](implicit w1: T1 => TLike[El1, Repr1],
- w2: T2 => ILike[El2, Repr2],
- w3: T3 => ILike[El3, Repr3]): Zipped[Repr1, El1, Repr2, El2, Repr3, El3]
- = new Zipped[Repr1, El1, Repr2, El2, Repr3, El3](_1, _2, _3)
-
- class Zipped[+Repr1, +El1, +Repr2, +El2, +Repr3, +El3](coll1: TLike[El1, Repr1],
- coll2: ILike[El2, Repr2],
- coll3: ILike[El3, Repr3]) {
- def map[B, To](f: (El1, El2, El3) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext)
- b += f(el1, elems2.next, elems3.next)
- else
- return b.result
- }
- b.result
- }
-
- def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext)
- b ++= f(el1, elems2.next, elems3.next)
- else
- return b.result
- }
- b.result
- }
-
- def filter[To1, To2, To3](f: (El1, El2, El3) => Boolean)(
- implicit cbf1: CBF[Repr1, El1, To1],
- cbf2: CBF[Repr2, El2, To2],
- cbf3: CBF[Repr3, El3, To3]): (To1, To2, To3) = {
- val b1 = cbf1(coll1.repr)
- val b2 = cbf2(coll2.repr)
- val b3 = cbf3(coll3.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
- def result = (b1.result, b2.result, b3.result)
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext) {
- val el2 = elems2.next
- val el3 = elems3.next
-
- if (f(el1, el2, el3)) {
- b1 += el1
- b2 += el2
- b3 += el3
- }
- }
- else return result
- }
-
- result
- }
-
- def exists(f: (El1, El2, El3) => Boolean): Boolean = {
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext) {
- if (f(el1, elems2.next, elems3.next))
- return true
- }
- else return false
- }
- false
- }
-
- def forall(f: (El1, El2, El3) => Boolean): Boolean =
- !exists((x, y, z) => !f(x, y, z))
-
- def foreach[U](f: (El1, El2, El3) => U): Unit = {
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext)
- f(el1, elems2.next, elems3.next)
- else
- return
- }
- }
- }
-
}
diff --git a/src/library/scala/Unit.scala b/src/library/scala/Unit.scala
index f6ed0121ab..3da5c083d4 100644
--- a/src/library/scala/Unit.scala
+++ b/src/library/scala/Unit.scala
@@ -16,11 +16,11 @@ package scala
* runtime system. A method with return type `Unit` is analogous to a Java
* method which is declared `void`.
*/
-final class Unit extends AnyVal {
+final class Unit private extends AnyVal {
override def getClass(): Class[Unit] = sys.error("stub")
}
-object Unit extends AnyValCompanion {
+object Unit extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
diff --git a/src/library/scala/annotation/bridge.scala b/src/library/scala/annotation/bridge.scala
index 690370854e..a56129fb96 100644
--- a/src/library/scala/annotation/bridge.scala
+++ b/src/library/scala/annotation/bridge.scala
@@ -10,4 +10,5 @@ package scala.annotation
/** If this annotation is present on a method, it will be treated as a bridge method.
*/
+@deprecated("Reconsider whether using this annotation will accomplish anything", "2.10.0")
private[scala] class bridge extends annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/elidable.scala b/src/library/scala/annotation/elidable.scala
index 880b645daa..18be2450f5 100644
--- a/src/library/scala/annotation/elidable.scala
+++ b/src/library/scala/annotation/elidable.scala
@@ -18,18 +18,22 @@ import java.util.logging.Level
* be omitted from generated code if the priority given the annotation
* is lower than that given on the command line.
*
+ * {{{
* @elidable(123) // annotation priority
* scalac -Xelide-below 456 // command line priority
- *
+ * }}}
+ *
* The method call will be replaced with an expression which depends on
* the type of the elided expression. In decreasing order of precedence:
*
+ * {{{
* Unit ()
* Boolean false
* T <: AnyVal 0
* T >: Null null
* T >: Nothing Predef.???
- *
+ * }}}
+ *
* Complete example:
{{{
import annotation._, elidable._
diff --git a/src/dbc/scala/dbc/exception/UnsupportedFeature.scala b/src/library/scala/annotation/meta/languageFeature.scala
index dd6f904077..23acc01b51 100644
--- a/src/dbc/scala/dbc/exception/UnsupportedFeature.scala
+++ b/src/library/scala/annotation/meta/languageFeature.scala
@@ -1,16 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
+** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
+package scala.annotation.meta
-
-
-package scala.dbc
-package exception
-
-
-/** A type category for all SQL types that store constant-precision numbers. */
-@deprecated(DbcIsDeprecated, "2.9.0") case class UnsupportedFeature (msg: String) extends Exception;
+/**
+ * An annotation giving particulars for a language feature in object `scala.language`.
+ */
+final class languageFeature(feature: String, enableRequired: Boolean) extends annotation.StaticAnnotation
diff --git a/src/dbc/scala/dbc/statement/SetClause.scala b/src/library/scala/annotation/unspecialized.scala
index 3af509c026..28d9aa169c 100644
--- a/src/dbc/scala/dbc/statement/SetClause.scala
+++ b/src/library/scala/annotation/unspecialized.scala
@@ -1,21 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
+package scala.annotation
-
-package scala.dbc
-package statement
-
-
-import scala.dbc.statement.expression._
-
-/** Data to be inserted into a table in an <code>Insert</code>. */
-@deprecated(DbcIsDeprecated, "2.9.0") case class SetClause(name: String, expr: Expression) {
- val value: Pair[String,Expression] = (name, expr)
- def sqlString: String = value._1 + " = " + value._2.sqlInnerString
-}
+/** A method annotation which suppresses the creation of
+ * additional specialized forms based on enclosing specialized
+ * type parameters.
+ *
+ * @since 2.10
+ */
+class unspecialized extends annotation.StaticAnnotation
diff --git a/src/library/scala/collection/BitSet.scala b/src/library/scala/collection/BitSet.scala
index 59b53faf7e..90e837b219 100644
--- a/src/library/scala/collection/BitSet.scala
+++ b/src/library/scala/collection/BitSet.scala
@@ -22,7 +22,7 @@ trait BitSet extends SortedSet[Int]
/** $factoryInfo
* @define coll bitset
- * @define Coll BitSet
+ * @define Coll `BitSet`
*/
object BitSet extends BitSetFactory[BitSet] {
val empty: BitSet = immutable.BitSet.empty
diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala
index e4f9fd436a..00cd63eb70 100644
--- a/src/library/scala/collection/BitSetLike.scala
+++ b/src/library/scala/collection/BitSetLike.scala
@@ -30,7 +30,7 @@ import mutable.StringBuilder
* @version 2.8
* @since 2.8
* @define coll bitset
- * @define Coll BitSet
+ * @define Coll `BitSet`
*/
trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSetLike[Int, This] { self =>
@@ -137,7 +137,7 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
/** Computes the intersection between this bitset and another bitset by performing
* a bitwise "and".
- * @param that the bitset to intersect with.
+ * @param other the bitset to intersect with.
* @return a new bitset consisting of all elements that are both in this
* bitset and in the given bitset `other`.
*/
@@ -152,7 +152,7 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
/** Computes the difference of this bitset and another bitset by performing
* a bitwise "and-not".
*
- * @param that the set of bits to exclude.
+ * @param other the set of bits to exclude.
* @return a bitset containing those bits of this
* bitset that are not also contained in the given bitset `other`.
*/
@@ -167,7 +167,7 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
/** Computes the symmetric difference of this bitset and another bitset by performing
* a bitwise "exclusive-or".
*
- * @param that the other bitset to take part in the symmetric difference.
+ * @param other the other bitset to take part in the symmetric difference.
* @return a bitset containing those bits of this
* bitset or the other bitset that are not contained in both bitsets.
*/
@@ -184,7 +184,7 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
/** Tests whether this bitset is a subset of another bitset.
*
- * @param that the bitset to test.
+ * @param other the bitset to test.
* @return `true` if this bitset is a subset of `other`, i.e. if
* every bit of this set is also an element in `other`.
*/
diff --git a/src/library/scala/collection/DefaultMap.scala b/src/library/scala/collection/DefaultMap.scala
index 3af535bdaa..d00414751a 100644
--- a/src/library/scala/collection/DefaultMap.scala
+++ b/src/library/scala/collection/DefaultMap.scala
@@ -41,7 +41,7 @@ trait DefaultMap[A, +B] extends Map[A, B] { self =>
*/
override def - (key: A): Map[A, B] = {
val b = newBuilder
- b ++= this filter (key !=)
+ b ++= this filter (key != _)
b.result
}
}
diff --git a/src/library/scala/collection/GenIterableLike.scala b/src/library/scala/collection/GenIterableLike.scala
index 8fa5981969..0fd9bac379 100644
--- a/src/library/scala/collection/GenIterableLike.scala
+++ b/src/library/scala/collection/GenIterableLike.scala
@@ -16,7 +16,7 @@ import generic.{ CanBuildFrom => CBF, _ }
* This trait contains abstract methods and methods that can be implemented
* directly in terms of other methods.
*
- * @define Coll GenIterable
+ * @define Coll `GenIterable`
* @define coll general iterable collection
*
* @author Martin Odersky
@@ -41,7 +41,7 @@ trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
/** Checks if the other iterable collection contains the same elements in the same order as this $coll.
*
* @param that the collection to compare with.
- * @tparam B the type of the elements of collection `that`.
+ * @tparam A1 the type of the elements of collection `that`.
* @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
*
* @usecase def sameElements(that: GenIterable[A]): Boolean
@@ -87,13 +87,13 @@ trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
* @tparam A1 the type of the first half of the returned pairs (this is always a supertype
* of the collection's element type `A`).
* @tparam That the class of the returned collection. Where possible, `That` is
- * the same class as the current collection class `Repr`, but this
- * depends on the element type `(A1, Int)` being admissible for that class,
- * which means that an implicit instance of type `CanBuildFrom[Repr, (A1, Int), That]`.
- * is found.
- * @tparam bf an implicit value of class `CanBuildFrom` which determines the
- * result class `That` from the current representation type `Repr`
- * and the new element type `(A1, Int)`.
+ * the same class as the current collection class `Repr`, but this
+ * depends on the element type `(A1, Int)` being admissible for that class,
+ * which means that an implicit instance of type `CanBuildFrom[Repr, (A1, Int), That]`.
+ * is found.
+ * @param bf an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `(A1, Int)`.
* @return A new collection of type `That` containing pairs consisting of all elements of this
* $coll paired with their index. Indices start at `0`.
*
@@ -141,7 +141,4 @@ trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
*/
def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CBF[Repr, (A1, B), That]): That
- def isEmpty = iterator.isEmpty
-
- def head = iterator.next
}
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index 114169c849..4dd2a4fe37 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -11,7 +11,7 @@ package scala.collection
/** A trait for all maps upon which operations may be
* implemented in parallel.
*
- * @define Coll GenMap
+ * @define Coll `GenMap`
* @define coll general map
* @author Martin Odersky
* @author Aleksandar Prokopec
@@ -42,7 +42,6 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals
* otherwise the result of the `default` computation.
* @usecase def getOrElse(key: A, default: => B): B
* @inheritdoc
- * @tparam B the result type of the default computation.
*/
def getOrElse[B1 >: B](key: A, default: => B1): B1
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index 755abcd2bf..cfa0ca101e 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -9,7 +9,6 @@
package scala.collection
import generic._
-import annotation.bridge
/** A template trait for all sequences which may be traversed
* in parallel.
@@ -142,7 +141,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* $mayNotTerminateInf
*
*/
- def indexOf[B >: A](elem: B, from: Int): Int = indexWhere(elem ==, from)
+ def indexOf[B >: A](elem: B, from: Int): Int = indexWhere(elem == _, from)
/** Finds index of last occurrence of some value in this $coll.
*
@@ -157,7 +156,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* $willNotTerminateInf
*
*/
- def lastIndexOf[B >: A](elem: B): Int = lastIndexWhere(elem ==)
+ def lastIndexOf[B >: A](elem: B): Int = lastIndexWhere(elem == _)
/** Finds index of last occurrence of some value in this $coll before or at a given end index.
*
@@ -170,7 +169,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @usecase def lastIndexOf(elem: A, end: Int): Int
* @inheritdoc
*/
- def lastIndexOf[B >: A](elem: B, end: Int): Int = lastIndexWhere(elem ==, end)
+ def lastIndexOf[B >: A](elem: B, end: Int): Int = lastIndexWhere(elem == _, end)
/** Finds index of last element satisfying some predicate.
*
@@ -228,9 +227,6 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*/
def startsWith[B](that: GenSeq[B]): Boolean = startsWith(that, 0)
- @bridge
- def startsWith[B](that: Seq[B]): Boolean = startsWith(that: GenSeq[B])
-
/** Tests whether this $coll contains the given sequence at a given index.
*
* '''Note''': If the both the receiver object `this` and the argument
@@ -413,16 +409,10 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*/
def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = this ++ that
- @bridge
- def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That =
- union(that: GenSeq[B])(bf)
-
/** Computes the multiset difference between this $coll and another sequence.
*
* @param that the sequence of elements to remove
* @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
* @return a new collection of type `That` which contains all elements of this $coll
* except some of occurrences of elements that also appear in `that`.
* If an element value `x` appears
@@ -446,8 +436,6 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* @param that the sequence of elements to intersect with.
* @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
* @return a new collection of type `That` which contains all elements of this $coll
* which also appear in `that`.
* If an element value `x` appears
diff --git a/src/library/scala/collection/GenSetLike.scala b/src/library/scala/collection/GenSetLike.scala
index f729f82bb4..219374abc6 100644
--- a/src/library/scala/collection/GenSetLike.scala
+++ b/src/library/scala/collection/GenSetLike.scala
@@ -8,7 +8,6 @@
package scala.collection
-import annotation.bridge
/** A template trait for sets which may possibly
* have their operations implemented in parallel.
@@ -51,9 +50,6 @@ extends GenIterableLike[A, Repr]
*/
def intersect(that: GenSet[A]): Repr = this filter that
- @bridge
- def intersect(that: Set[A]): Repr = intersect(that: GenSet[A])
-
/** Computes the intersection between this set and another set.
*
* '''Note:''' Same as `intersect`.
@@ -63,9 +59,6 @@ extends GenIterableLike[A, Repr]
*/
def &(that: GenSet[A]): Repr = this intersect that
- @bridge
- def &(that: Set[A]): Repr = &(that: GenSet[A])
-
/** Computes the union between of set and another set.
*
* @param that the set to form the union with.
@@ -83,9 +76,6 @@ extends GenIterableLike[A, Repr]
*/
def | (that: GenSet[A]): Repr = this union that
- @bridge
- def | (that: Set[A]): Repr = | (that: GenSet[A])
-
/** Computes the difference of this set and another set.
*
* @param that the set of elements to exclude.
@@ -103,9 +93,6 @@ extends GenIterableLike[A, Repr]
*/
def &~(that: GenSet[A]): Repr = this diff that
- @bridge
- def &~(that: Set[A]): Repr = &~(that: GenSet[A])
-
/** Tests whether this set is a subset of another set.
*
* @param that the set to test.
@@ -114,9 +101,6 @@ extends GenIterableLike[A, Repr]
*/
def subsetOf(that: GenSet[A]): Boolean = this forall that
- @bridge
- def subsetOf(that: Set[A]): Boolean = subsetOf(that: GenSet[A])
-
/** Compares this set with another object for equality.
*
* '''Note:''' This operation contains an unchecked cast: if `that`
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index fd03e0f446..eaec7a2a76 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -43,7 +43,7 @@ import annotation.migration
* @define traversableInfo
* This is a base trait of all kinds of Scala collections.
*
- * @define Coll GenTraversable
+ * @define Coll `GenTraversable`
* @define coll general collection
* @define collectExample
* @tparam A the collection element type.
@@ -59,12 +59,24 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
def size: Int
+ /** Selects the first element of this $coll.
+ * $orderDependent
+ * @return the first element of this $coll.
+ * @throws `NoSuchElementException` if the $coll is empty.
+ */
def head: A
-
+
+ /** Optionally selects the first element.
+ * $orderDependent
+ * @return the first element of this $coll if it is nonempty,
+ * `None` if it is empty.
+ */
+ def headOption: Option[A]
+
/** Tests whether this $coll can be repeatedly traversed.
* @return `true`
*/
- final def isTraversableAgain = true
+ def isTraversableAgain: Boolean
/** Selects all elements except the first.
* $orderDependent
@@ -72,11 +84,30 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* except the first one.
* @throws `UnsupportedOperationException` if the $coll is empty.
*/
- def tail: Repr = {
- if (isEmpty) throw new UnsupportedOperationException("empty.tail")
- drop(1)
- }
+ def tail: Repr
+ /** Selects the last element.
+ * $orderDependent
+ * @return The last element of this $coll.
+ * @throws NoSuchElementException If the $coll is empty.
+ */
+ def last: A
+
+ /** Optionally selects the last element.
+ * $orderDependent
+ * @return the last element of this $coll$ if it is nonempty,
+ * `None` if it is empty.
+ */
+ def lastOption: Option[A]
+
+ /** Selects all elements except the last.
+ * $orderDependent
+ * @return a $coll consisting of all elements of this $coll
+ * except the last one.
+ * @throws `UnsupportedOperationException` if the $coll is empty.
+ */
+ def init: Repr
+
/** Computes a prefix scan of the elements of the collection.
*
* Note: The neutral element `z` may be applied more than once.
@@ -91,7 +122,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
*/
def scan[B >: A, That](z: B)(op: (B, B) => B)(implicit cbf: CanBuildFrom[Repr, B, That]): That
- /** Produces a collection containing cummulative results of applying the
+ /** Produces a collection containing cumulative results of applying the
* operator going left to right.
*
* $willNotTerminateInf
@@ -106,8 +137,8 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
*/
def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
- /** Produces a collection containing cummulative results of applying the operator going right to left.
- * The head of the collection is the last cummulative result.
+ /** Produces a collection containing cumulative results of applying the operator going right to left.
+ * The head of the collection is the last cumulative result.
* $willNotTerminateInf
* $orderDependent
*
@@ -258,7 +289,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
/** Selects all elements of this $coll which satisfy a predicate.
*
- * @param p the predicate used to test elements.
+ * @param pred the predicate used to test elements.
* @return a new $coll consisting of all elements of this $coll that satisfy the given
* predicate `p`. Their order may not be preserved.
*/
@@ -266,7 +297,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
/** Selects all elements of this $coll which do not satisfy a predicate.
*
- * @param p the predicate used to test elements.
+ * @param pred the predicate used to test elements.
* @return a new $coll consisting of all elements of this $coll that do not satisfy the given
* predicate `p`. Their order may not be preserved.
*/
@@ -274,11 +305,11 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
/** Partitions this $coll in two ${coll}s according to a predicate.
*
- * @param p the predicate on which to partition.
- * @return a pair of ${coll}s: the first $coll consists of all elements that
- * satisfy the predicate `p` and the second $coll consists of all elements
- * that don't. The relative order of the elements in the resulting ${coll}s
- * may not be preserved.
+ * @param pred the predicate on which to partition.
+ * @return a pair of ${coll}s: the first $coll consists of all elements that
+ * satisfy the predicate `p` and the second $coll consists of all elements
+ * that don't. The relative order of the elements in the resulting ${coll}s
+ * may not be preserved.
*/
def partition(pred: A => Boolean): (Repr, Repr)
@@ -323,8 +354,8 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* }}}
* $orderDependent
*
- * @param from the lowest index to include from this $coll.
- * @param until the lowest index to EXCLUDE from this $coll.
+ * @param unc_from the lowest index to include from this $coll.
+ * @param unc_until the lowest index to EXCLUDE from this $coll.
* @return a $coll containing the elements greater than or equal to
* index `from` extending up to (but not including) index `until`
* of this $coll.
@@ -344,7 +375,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
/** Takes longest prefix of elements that satisfy a predicate.
* $orderDependent
- * @param p The predicate used to test elements.
+ * @param pred The predicate used to test elements.
* @return the longest prefix of this $coll whose elements all satisfy
* the predicate `p`.
*/
@@ -357,7 +388,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* predicate `p` does not cause any side-effects.
* $orderDependent
*
- * @param p the test predicate
+ * @param pred the test predicate
* @return a pair consisting of the longest prefix of this $coll whose
* elements all satisfy `p`, and the rest of this $coll.
*/
@@ -365,7 +396,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
/** Drops longest prefix of elements that satisfy a predicate.
* $orderDependent
- * @param p The predicate used to test elements.
+ * @param pred The predicate used to test elements.
* @return the longest suffix of this $coll whose first element
* does not satisfy the predicate `p`.
*/
@@ -380,3 +411,12 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
def stringPrefix: String
}
+
+object GenTraversableLike {
+ /** Manufacture a conversion from collection representation type `Repr` to
+ * its corresponding `GenTraversableLike` given an implicitly available
+ * instance of `FromRepr[Repr]`.
+ * @see [[scala.collection.generic.FromRepr]]
+ */
+ implicit def fromRepr[Repr](implicit fr : FromRepr[Repr]) = fr.hasElem
+}
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index a7ec7618b7..eab6b84ea8 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -14,7 +14,7 @@ package scala.collection
* Methods in this trait are either abstract or can be implemented in terms
* of other methods.
*
- * @define Coll GenTraversableOnce
+ * @define Coll `GenTraversableOnce`
* @define coll collection or iterator
* @define possiblyparinfo
* This trait may possibly have operations implemented in parallel.
@@ -312,7 +312,7 @@ trait GenTraversableOnce[+A] extends Any {
*
* @param num an implicit parameter defining a set of numeric operations
* which includes the `+` operator to be used in forming the sum.
- * @tparam B the result type of the `+` operator.
+ * @tparam A1 the result type of the `+` operator.
* @return the sum of all elements of this $coll with respect to the `+` operator in `num`.
*
* @usecase def sum: A
@@ -330,7 +330,7 @@ trait GenTraversableOnce[+A] extends Any {
*
* @param num an implicit parameter defining a set of numeric operations
* which includes the `*` operator to be used in forming the product.
- * @tparam B the result type of the `*` operator.
+ * @tparam A1 the result type of the `*` operator.
* @return the product of all elements of this $coll with respect to the `*` operator in `num`.
*
* @usecase def product: A
@@ -345,8 +345,8 @@ trait GenTraversableOnce[+A] extends Any {
/** Finds the smallest element.
*
- * @param cmp An ordering to be used for comparing elements.
- * @tparam B The type over which the ordering is defined.
+ * @param ord An ordering to be used for comparing elements.
+ * @tparam A1 The type over which the ordering is defined.
* @return the smallest element of this $coll with respect to the ordering `cmp`.
*
* @usecase def min: A
@@ -358,8 +358,8 @@ trait GenTraversableOnce[+A] extends Any {
/** Finds the largest element.
*
- * @param cmp An ordering to be used for comparing elements.
- * @tparam B The type over which the ordering is defined.
+ * @param ord An ordering to be used for comparing elements.
+ * @tparam A1 The type over which the ordering is defined.
* @return the largest element of this $coll with respect to the ordering `cmp`.
*
* @usecase def max: A
@@ -382,9 +382,9 @@ trait GenTraversableOnce[+A] extends Any {
* $mayNotTerminateInf
* $orderDependent
*
- * @param p the predicate used to test elements.
- * @return an option value containing the first element in the $coll
- * that satisfies `p`, or `None` if none exists.
+ * @param pred the predicate used to test elements.
+ * @return an option value containing the first element in the $coll
+ * that satisfies `p`, or `None` if none exists.
*/
def find(pred: A => Boolean): Option[A]
@@ -459,9 +459,9 @@ trait GenTraversableOnce[+A] extends Any {
/** Converts this $coll to an array.
*
- * @tparam B the type of the elements of the array. A `ClassManifest` for
- * this type must be available.
- * @return an array containing all elements of this $coll.
+ * @tparam A1 the type of the elements of the array. An `ArrayTag` for
+ * this type must be available.
+ * @return an array containing all elements of this $coll.
*
* @usecase def toArray: Array[A]
* @inheritdoc
@@ -469,9 +469,9 @@ trait GenTraversableOnce[+A] extends Any {
* $willNotTerminateInf
*
* @return an array containing all elements of this $coll.
- * A `ClassManifest` must be available for the element type of this $coll.
+ * An `ArrayTag` must be available for the element type of this $coll.
*/
- def toArray[A1 >: A: ClassManifest]: Array[A1]
+ def toArray[A1 >: A: ArrayTag]: Array[A1]
/** Converts this $coll to a list.
* $willNotTerminateInf
diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala
index 4a3586a375..56dd0bffff 100644
--- a/src/library/scala/collection/IndexedSeq.scala
+++ b/src/library/scala/collection/IndexedSeq.scala
@@ -26,7 +26,7 @@ trait IndexedSeq[+A] extends Seq[A]
/** $factoryInfo
* The current default implementation of a $Coll is a `Vector`.
* @define coll indexed sequence
- * @define Coll IndexedSeq
+ * @define Coll `IndexedSeq`
*/
object IndexedSeq extends SeqFactory[IndexedSeq] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index d1f7d1cb36..11f481e425 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -26,7 +26,7 @@ import scala.annotation.tailrec
* access and length computation. They are defined in terms of abstract methods
* `apply` for indexing and `length`.
*
- * Indexed sequences do not add any new methods wrt `Seq`, but promise
+ * Indexed sequences do not add any new methods to `Seq`, but promise
* efficient implementations of random access patterns.
*
* @tparam A the element type of the $coll
diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala
index b1752a5c67..f543c6f80f 100644
--- a/src/library/scala/collection/Iterable.scala
+++ b/src/library/scala/collection/Iterable.scala
@@ -40,7 +40,7 @@ trait Iterable[+A] extends Traversable[A]
/** $factoryInfo
* The current default implementation of a $Coll is a `Vector`.
* @define coll iterable collection
- * @define Coll Iterable
+ * @define Coll `Iterable`
*/
object Iterable extends TraversableFactory[Iterable] {
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index fb6d154952..2e9599058f 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -12,7 +12,6 @@ package scala.collection
import generic._
import immutable.{ List, Stream }
import annotation.unchecked.uncheckedVariance
-import annotation.bridge
/** A template trait for iterable collections of type `Iterable[A]`.
* $iterableInfo
@@ -134,7 +133,7 @@ self =>
it.next
i += 1
}
- b ++= it result
+ (b ++= it).result
}
override /*TraversableLike*/ def takeWhile(p: A => Boolean): Repr = {
@@ -149,7 +148,7 @@ self =>
}
/** Partitions elements in fixed size ${coll}s.
- * @see Iterator#grouped
+ * @see [[scala.collection.Iterator]], method `grouped`
*
* @param size the number of elements per group
* @return An iterator producing ${coll}s of size `size`, except the
@@ -164,7 +163,18 @@ self =>
/** Groups elements in fixed size blocks by passing a "sliding window"
* over them (as opposed to partitioning them, as is done in grouped.)
- * @see Iterator#sliding
+ * @see [[scala.collection.Iterator]], method `sliding`
+ *
+ * @param size the number of elements per group
+ * @return An iterator producing ${coll}s of size `size`, except the
+ * last and the only element will be truncated if there are
+ * fewer elements than size.
+ */
+ def sliding(size: Int): Iterator[Repr] = sliding(size, 1)
+
+ /** Groups elements in fixed size blocks by passing a "sliding window"
+ * over them (as opposed to partitioning them, as is done in grouped.)
+ * @see [[scala.collection.Iterator]], method `sliding`
*
* @param size the number of elements per group
* @param step the distance between the first elements of successive
@@ -173,7 +183,6 @@ self =>
* last and the only element will be truncated if there are
* fewer elements than size.
*/
- def sliding(size: Int): Iterator[Repr] = sliding(size, 1)
def sliding(size: Int, step: Int): Iterator[Repr] =
for (xs <- iterator.sliding(size, step)) yield {
val b = newBuilder
@@ -239,10 +248,6 @@ self =>
b.result
}
- @bridge
- def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That =
- zip(that: GenIterable[B])(bf)
-
def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = {
val b = bf(repr)
val these = this.iterator
@@ -256,10 +261,6 @@ self =>
b.result
}
- @bridge
- def zipAll[B, A1 >: A, That](that: Iterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That =
- zipAll(that: GenIterable[B], thisElem, thatElem)(bf)
-
def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = {
val b = bf(repr)
var i = 0
@@ -280,9 +281,6 @@ self =>
!these.hasNext && !those.hasNext
}
- @bridge
- def sameElements[B >: A](that: Iterable[B]): Boolean = sameElements(that: GenIterable[B])
-
override /*TraversableLike*/ def toStream: Stream[A] = iterator.toStream
/** Method called from equality methods, so that user-defined subclasses can
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index ce2daf08d4..c842475590 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -11,6 +11,7 @@ package scala.collection
import generic._
import TraversableView.NoBuilder
import immutable.Stream
+import language.implicitConversions
/** A template trait for non-strict views of iterable collections.
* $iterableViewInfo
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 65de60c8fe..b2bbc8d888 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -21,6 +21,15 @@ import immutable.Stream
*/
object Iterator {
+ /** With the advent of `TraversableOnce` and `Iterator`, it can be useful to have a builder which
+ * operates on `Iterator`s so they can be treated uniformly along with the collections.
+ * See `scala.util.Random.shuffle` for an example.
+ */
+ implicit def IteratorCanBuildFrom[A] = new TraversableOnce.BufferedCanBuildFrom[A, Iterator] {
+ def bufferToColl[B](coll: ArrayBuffer[B]) = coll.iterator
+ def traversableToColl[B](t: GenTraversable[B]) = t.toIterator
+ }
+
/** The iterator which produces no values. */
val empty: Iterator[Nothing] = new AbstractIterator[Nothing] {
def hasNext: Boolean = false
@@ -382,6 +391,24 @@ trait Iterator[+A] extends TraversableOnce[A] {
def next() = if (hasNext) { hdDefined = false; hd } else empty.next()
}
+
+ /** Tests whether every element of this iterator relates to the
+ * corresponding element of another collection by satisfying a test predicate.
+ *
+ * @param that the other collection
+ * @param p the test predicate, which relates elements from both collections
+ * @tparam B the type of the elements of `that`
+ * @return `true` if both collections have the same length and
+ * `p(x, y)` is `true` for all corresponding elements `x` of this iterator
+ * and `y` of `that`, otherwise `false`
+ */
+ def corresponds[B](that: GenTraversableOnce[B])(p: (A, B) => Boolean): Boolean = {
+ val that0 = that.toIterator
+ while (hasNext && that0.hasNext)
+ if (!p(next, that0.next)) return false
+
+ hasNext == that0.hasNext
+ }
/** Creates an iterator over all the elements of this iterator that
* satisfy the predicate `p`. The order of the elements
@@ -797,7 +824,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
/** Creates a buffered iterator from this iterator.
*
- * @see BufferedIterator
+ * @see [[scala.collection.BufferedIterator]]
* @return a buffered iterator producing the same values as this iterator.
* @note Reuse: $consumesAndProducesIterator
*/
@@ -1052,11 +1079,12 @@ trait Iterator[+A] extends TraversableOnce[A] {
if (i < from) origElems.hasNext
else patchElems.hasNext || origElems.hasNext
def next(): B = {
+ // We have to do this *first* just in case from = 0.
+ if (i == from) origElems = origElems drop replaced
val result: B =
if (i < from || !patchElems.hasNext) origElems.next()
else patchElems.next()
i += 1
- if (i == from) origElems = origElems drop replaced
result
}
}
diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala
index be143cf96b..21ed91f7f3 100644
--- a/src/library/scala/collection/LinearSeq.scala
+++ b/src/library/scala/collection/LinearSeq.scala
@@ -26,7 +26,7 @@ trait LinearSeq[+A] extends Seq[A]
/** $factoryInfo
* The current default implementation of a $Coll is a `Vector`.
* @define coll linear sequence
- * @define Coll LinearSeq
+ * @define Coll `LinearSeq`
*/
object LinearSeq extends SeqFactory[LinearSeq] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala
index 0c07d5bb74..42a56a9c5a 100644
--- a/src/library/scala/collection/Map.scala
+++ b/src/library/scala/collection/Map.scala
@@ -33,7 +33,7 @@ trait Map[A, +B] extends Iterable[(A, B)] with GenMap[A, B] with MapLike[A, B, M
}
/** $factoryInfo
- * @define Coll Map
+ * @define Coll `Map`
* @define coll map
*/
object Map extends MapFactory[Map] {
@@ -45,7 +45,7 @@ object Map extends MapFactory[Map] {
/** An abstract shell used by { mutable, immutable }.Map but not by collection.Map
* because of variance issues.
*/
- abstract class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends AbstractMap[A, B] with Map[A, B] {
+ abstract class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = underlying.size
def get(key: A) = underlying.get(key) // removed in 2.9: orElse Some(default(key))
def iterator = underlying.iterator
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 8f88e62791..b9b8f62574 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -165,7 +165,7 @@ self =>
/** The implementation class of the set returned by `keySet`.
*/
- protected class DefaultKeySet extends AbstractSet[A] with Set[A] {
+ protected class DefaultKeySet extends AbstractSet[A] with Set[A] with Serializable {
def contains(key : A) = self.contains(key)
def iterator = keysIterator
def + (elem: A): Set[A] = (Set[A]() ++ this + elem).asInstanceOf[Set[A]] // !!! concrete overrides abstract problem
@@ -200,7 +200,7 @@ self =>
/** The implementation class of the iterable returned by `values`.
*/
- protected class DefaultValuesIterable extends AbstractIterable[B] with Iterable[B] {
+ protected class DefaultValuesIterable extends AbstractIterable[B] with Iterable[B] with Serializable {
def iterator = valuesIterator
override def size = self.size
override def foreach[C](f: B => C) = self.valuesIterator foreach f
@@ -280,14 +280,14 @@ self =>
*
* @usecase def + (kvs: (A, B)*): Map[A, B]
* @inheritdoc
- * @param the key/value pairs
+ * @param kvs the key/value pairs
*/
def + [B1 >: B] (kv1: (A, B1), kv2: (A, B1), kvs: (A, B1) *): Map[A, B1] =
this + kv1 + kv2 ++ kvs
/** Adds all key/value pairs in a traversable collection to this map, returning a new map.
*
- * @param kvs the collection containing the added key/value pairs
+ * @param xs the collection containing the added key/value pairs
* @tparam B1 the type of the added values
* @return a new map with the given bindings added to this map
*
@@ -297,9 +297,6 @@ self =>
def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] =
((repr: Map[A, B1]) /: xs.seq) (_ + _)
- @bridge
- def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1] = ++(xs: GenTraversableOnce[(A, B1)])
-
/** Returns a new map with all key/value pairs for which the predicate
* `p` returns `true`.
*
diff --git a/src/library/scala/collection/Seq.scala b/src/library/scala/collection/Seq.scala
index fd03a49af4..34705ee058 100644
--- a/src/library/scala/collection/Seq.scala
+++ b/src/library/scala/collection/Seq.scala
@@ -27,7 +27,7 @@ trait Seq[+A] extends PartialFunction[Int, A]
/** $factoryInfo
* The current default implementation of a $Coll is a `List`.
* @define coll sequence
- * @define Coll Seq
+ * @define Coll `Seq`
*/
object Seq extends SeqFactory[Seq] {
/** $genericCanBuildFromInfo */
diff --git a/src/library/scala/collection/SeqExtractors.scala b/src/library/scala/collection/SeqExtractors.scala
index cb3cb27f18..de9ff93521 100644
--- a/src/library/scala/collection/SeqExtractors.scala
+++ b/src/library/scala/collection/SeqExtractors.scala
@@ -11,11 +11,13 @@ object +: {
/** An extractor used to init/last deconstruct sequences. */
object :+ {
/** Splits a sequence into init :+ tail.
- * @returns Some(init, tail) if sequence is non-empty.
- * None otherwise.
+ * @return Some(init, tail) if sequence is non-empty. None otherwise.
*/
def unapply[T,Coll <: SeqLike[T, Coll]](
t: Coll with SeqLike[T, Coll]): Option[(Coll, T)] =
if(t.isEmpty) None
else Some(t.init -> t.last)
}
+
+// Dummy to fool ant
+private abstract class SeqExtractors \ No newline at end of file
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index fd1d42d7e9..c87726ef2c 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -12,7 +12,6 @@ import mutable.{ ListBuffer, ArraySeq }
import immutable.{ List, Range }
import generic._
import parallel.ParSeq
-import annotation.bridge
import scala.math.Ordering
/** A template trait for sequences of type `Seq[A]`
@@ -46,7 +45,7 @@ import scala.math.Ordering
* @version 1.0, 16/07/2003
* @since 2.8
*
- * @define Coll Seq
+ * @define Coll `Seq`
* @define coll sequence
* @define thatinfo the class of the returned collection. Where possible, `That` is
* the same class as the current collection class `Repr`, but this
@@ -185,7 +184,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
private[this] def init() = {
val m = mutable.HashMap[A, Int]()
- val (es, is) = thisCollection map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2) unzip
+ val (es, is) = (thisCollection map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip
(es.toBuffer, is.toArray)
}
@@ -240,7 +239,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
val m = mutable.HashMap[A, Int]()
// e => (e, weight(e))
- val (es, is) = thisCollection map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2) unzip
+ val (es, is) = (thisCollection map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip
val cs = new Array[Int](m.size)
is foreach (i => cs(i) += 1)
val ns = new Array[Int](cs.length)
@@ -296,9 +295,6 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
!j.hasNext
}
- @bridge
- def startsWith[B](that: Seq[B], offset: Int): Boolean = startsWith(that: GenSeq[B], offset)
-
def endsWith[B](that: GenSeq[B]): Boolean = {
val i = this.iterator.drop(length - that.length)
val j = that.iterator
@@ -309,10 +305,6 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
!j.hasNext
}
- @bridge
- def endsWith[B](that: Seq[B]): Boolean = endsWith(that: GenSeq[B])
-
-
/** Finds first index where this $coll contains a given sequence as a slice.
* $mayNotTerminateInf
* @param that the sequence to test
@@ -321,9 +313,6 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
*/
def indexOfSlice[B >: A](that: GenSeq[B]): Int = indexOfSlice(that, 0)
- @bridge
- def indexOfSlice[B >: A](that: Seq[B]): Int = indexOfSlice(that: GenSeq[B])
-
/** Finds first index after or at a start index where this $coll contains a given sequence as a slice.
* $mayNotTerminateInf
* @param that the sequence to test
@@ -354,9 +343,6 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
-1
}
- @bridge
- def indexOfSlice[B >: A](that: Seq[B], from: Int): Int = indexOfSlice(that: GenSeq[B], from)
-
/** Finds last index where this $coll contains a given sequence as a slice.
* $willNotTerminateInf
* @param that the sequence to test
@@ -365,9 +351,6 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
*/
def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = lastIndexOfSlice(that, length)
- @bridge
- def lastIndexOfSlice[B >: A](that: Seq[B]): Int = lastIndexOfSlice(that: GenSeq[B])
-
/** Finds last index before or at a given end index where this $coll contains a given sequence as a slice.
* @param that the sequence to test
* @param end the end index
@@ -385,9 +368,6 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
else SeqLike.kmpSearch(thisCollection, 0, clippedL+tl, that.seq, 0, tl, false)
}
- @bridge
- def lastIndexOfSlice[B >: A](that: Seq[B], end: Int): Int = lastIndexOfSlice(that: GenSeq[B], end)
-
/** Tests whether this $coll contains a given sequence as a slice.
* $mayNotTerminateInf
* @param that the sequence to test
@@ -396,15 +376,12 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
*/
def containsSlice[B](that: GenSeq[B]): Boolean = indexOfSlice(that) != -1
- @bridge
- def containsSlice[B](that: Seq[B]): Boolean = containsSlice(that: GenSeq[B])
-
/** Tests whether this $coll contains a given value as an element.
* $mayNotTerminateInf
*
* @param elem the element to test.
- * @return `true` if this $coll has an element that is
- * is equal (wrt `==`) to `elem`, `false` otherwise.
+ * @return `true` if this $coll has an element that is equal (as
+ * determined by `==`) to `elem`, `false` otherwise.
*/
def contains(elem: Any): Boolean = exists (_ == elem)
@@ -436,8 +413,6 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
*
* @param that the sequence of elements to remove
* @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
* @return a new collection of type `That` which contains all elements of this $coll
* except some of occurrences of elements that also appear in `that`.
* If an element value `x` appears
@@ -463,15 +438,10 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
b.result
}
- @bridge
- def diff[B >: A](that: Seq[B]): Repr = diff(that: GenSeq[B])
-
/** Computes the multiset intersection between this $coll and another sequence.
*
* @param that the sequence of elements to intersect with.
* @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
* @return a new collection of type `That` which contains all elements of this $coll
* which also appear in `that`.
* If an element value `x` appears
@@ -499,9 +469,6 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
b.result
}
- @bridge
- def intersect[B >: A](that: Seq[B]): Repr = intersect(that: GenSeq[B])
-
private def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = {
val occ = new mutable.HashMap[B, Int] { override def default(k: B) = 0 }
for (y <- sq.seq) occ(y) += 1
@@ -534,10 +501,6 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
b.result
}
- @bridge
- def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That =
- this.patch(from, patch: GenSeq[B], replaced)(bf)
-
def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
val (prefix, rest) = this.splitAt(index)
@@ -583,15 +546,11 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
!i.hasNext && !j.hasNext
}
- @bridge
- def corresponds[B](that: Seq[B])(p: (A,B) => Boolean): Boolean =
- corresponds(that: GenSeq[B])(p)
-
/** Sorts this $coll according to a comparison function.
* $willNotTerminateInf
*
- * The sort is stable. That is, elements that are equal wrt `lt` appear in the
- * same order in the sorted sequence as in the original.
+ * The sort is stable. That is, elements that are equal (as determined by
+ * `lt`) appear in the same order in the sorted sequence as in the original.
*
* @param lt the comparison function which tests whether
* its first argument precedes its second argument in
@@ -607,7 +566,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
/** Sorts this $Coll according to the Ordering which results from transforming
* an implicitly given Ordering with a transformation function.
- * @see scala.math.Ordering
+ * @see [[scala.math.Ordering]]
* $willNotTerminateInf
* @param f the transformation function mapping elements
* to some other domain `B`.
@@ -629,10 +588,10 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
/** Sorts this $coll according to an Ordering.
*
- * The sort is stable. That is, elements that are equal wrt `lt` appear in the
- * same order in the sorted sequence as in the original.
+ * The sort is stable. That is, elements that are equal (as determined by
+ * `lt`) appear in the same order in the sorted sequence as in the original.
*
- * @see scala.math.Ordering
+ * @see [[scala.math.Ordering]]
*
* @param ord the ordering to be used to compare elements.
* @return a $coll consisting of the elements of this $coll
@@ -893,7 +852,7 @@ object SeqLike {
/** Finds a particular index at which one sequence occurs in another sequence.
* Like `indexOf`, but finds the latest occurrence rather than earliest.
*
- * @see SeqLike#indexOf
+ * @see [[scala.collection.SeqLike], method `indexOf`
*/
def lastIndexOf[B](
source: Seq[B], sourceOffset: Int, sourceCount: Int,
diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala
index ce32ba97c2..3783ef771f 100644
--- a/src/library/scala/collection/SeqProxyLike.scala
+++ b/src/library/scala/collection/SeqProxyLike.scala
@@ -36,7 +36,7 @@ trait SeqProxyLike[+A, +Repr <: SeqLike[A, Repr] with Seq[A]] extends SeqLike[A,
override def indexOf[B >: A](elem: B): Int = self.indexOf(elem)
override def indexOf[B >: A](elem: B, from: Int): Int = self.indexOf(elem, from)
override def lastIndexOf[B >: A](elem: B): Int = self.lastIndexOf(elem)
- override def lastIndexOf[B >: A](elem: B, end: Int): Int = self.lastIndexWhere(elem ==, end)
+ override def lastIndexOf[B >: A](elem: B, end: Int): Int = self.lastIndexWhere(elem == _, end)
override def lastIndexWhere(p: A => Boolean): Int = self.lastIndexWhere(p, length - 1)
override def lastIndexWhere(p: A => Boolean, end: Int): Int = self.lastIndexWhere(p)
override def reverse: Repr = self.reverse
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index a32cad08e5..f64045c9f6 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -11,7 +11,6 @@ package scala.collection
import generic._
import Seq.fill
import TraversableView.NoBuilder
-import annotation.bridge
/** A template trait for non-strict views of sequences.
* $seqViewInfo
@@ -132,13 +131,9 @@ trait SeqViewLike[+A,
override def diff[B >: A](that: GenSeq[B]): This =
newForced(thisSeq diff that).asInstanceOf[This]
- @bridge def diff[B >: A](that: Seq[B]): This = diff(that: GenSeq[B])
-
override def intersect[B >: A](that: GenSeq[B]): This =
newForced(thisSeq intersect that).asInstanceOf[This]
- @bridge def intersect[B >: A](that: Seq[B]): This = intersect(that: GenSeq[B])
-
override def sorted[B >: A](implicit ord: Ordering[B]): This =
newForced(thisSeq sorted ord).asInstanceOf[This]
diff --git a/src/library/scala/collection/Set.scala b/src/library/scala/collection/Set.scala
index 4c67aad603..7424c9cb9a 100644
--- a/src/library/scala/collection/Set.scala
+++ b/src/library/scala/collection/Set.scala
@@ -35,7 +35,7 @@ trait Set[A] extends (A => Boolean)
* The current default implementation of a $Coll is one of `EmptySet`, `Set1`, `Set2`, `Set3`, `Set4` in
* class `immutable.Set` for sets of sizes up to 4, and a `immutable.HashSet` for sets of larger sizes.
* @define coll set
- * @define Coll Set
+ * @define Coll `Set`
*/
object Set extends SetFactory[Set] {
def newBuilder[A] = immutable.Set.newBuilder[A]
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index 7293f3775c..04ec4af830 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -127,9 +127,6 @@ self =>
*/
def ++ (elems: GenTraversableOnce[A]): This = (repr /: elems.seq)(_ + _)
- @bridge
- def ++ (elems: TraversableOnce[A]): This = ++ (elems: GenTraversableOnce[A])
-
/** Creates a new set with a given element removed from this set.
*
* @param elem the element to be removed
@@ -152,9 +149,6 @@ self =>
*/
def union(that: GenSet[A]): This = this ++ that
- @bridge
- def union(that: Set[A]): This = union(that: GenSet[A])
-
/** Computes the difference of this set and another set.
*
* @param that the set of elements to exclude.
@@ -163,9 +157,6 @@ self =>
*/
def diff(that: GenSet[A]): This = this -- that
- @bridge
- def diff(that: Set[A]): This = diff(that: GenSet[A])
-
/** An iterator over all subsets of this set of the given size.
* If the requested size is impossible, an empty iterator is returned.
*
diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala
index 3fba3dfa79..cd85ea4d2d 100644
--- a/src/library/scala/collection/Traversable.scala
+++ b/src/library/scala/collection/Traversable.scala
@@ -13,7 +13,6 @@ package scala.collection
import generic._
import mutable.{Builder, Buffer, ArrayBuffer, ListBuffer}
import scala.util.control.Breaks
-import annotation.bridge
/** A trait for traversable collections.
* All operations are guaranteed to be performed in a single-threaded manner.
@@ -28,12 +27,6 @@ trait Traversable[+A] extends TraversableLike[A, Traversable[A]]
override def seq: Traversable[A] = this
- @bridge
- def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]): Traversable[B] = super.flatten(asTraversable)
-
- @bridge
- def transpose[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]): Traversable[Traversable[B]] = super.transpose(asTraversable)
-
/* The following methods are inherited from TraversableLike
*
override def isEmpty: Boolean
@@ -75,7 +68,7 @@ trait Traversable[+A] extends TraversableLike[A, Traversable[A]]
override def copyToBuffer[B >: A](dest: Buffer[B])
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int)
override def copyToArray[B >: A](xs: Array[B], start: Int)
- override def toArray[B >: A : ClassManifest]: Array[B]
+ override def toArray[B >: A : ArrayTag]: Array[B]
override def toList: List[A]
override def toIterable: Iterable[A]
override def toSeq: Seq[A]
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 1f5beb5109..3716a318d9 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -84,6 +84,8 @@ trait TraversableLike[+A, +Repr] extends Any
*/
def repr: Repr = this.asInstanceOf[Repr]
+ final def isTraversableAgain: Boolean = true
+
/** The underlying collection seen as an instance of `$Coll`.
* By default this is implemented as the current collection object itself,
* but this can be overridden.
@@ -155,10 +157,6 @@ trait TraversableLike[+A, +Repr] extends Any
b.result
}
- @bridge
- def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That =
- ++(that: GenTraversableOnce[B])(bf)
-
/** As with `++`, returns a new collection containing the elements from the left operand followed by the
* elements from the right operand.
*
@@ -487,7 +485,7 @@ trait TraversableLike[+A, +Repr] extends Any
if (n <= 0) {
val b = newBuilder
b.sizeHint(this)
- b ++= thisCollection result
+ (b ++= thisCollection).result
}
else sliceWithKnownDelta(n, Int.MaxValue, -n)
@@ -775,6 +773,6 @@ trait TraversableLike[+A, +Repr] extends Any
// A helper for tails and inits.
private def iterateUntilEmpty(f: Traversable[A @uV] => Traversable[A @uV]): Iterator[Repr] = {
val it = Iterator.iterate(thisCollection)(f) takeWhile (x => !x.isEmpty)
- it ++ Iterator(Nil) map (newBuilder ++= _ result)
+ it ++ Iterator(Nil) map (x => (newBuilder ++= x).result)
}
}
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 62ea692b90..d42c037dee 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -8,15 +8,14 @@
package scala.collection
-import mutable.{ Buffer, ListBuffer, ArrayBuffer }
+import mutable.{ Buffer, Builder, ListBuffer, ArrayBuffer }
import annotation.unchecked.{ uncheckedVariance => uV }
+import language.{implicitConversions, higherKinds}
/** A template trait for collections which can be traversed either once only
* or one or more times.
* $traversableonceinfo
*
- * @tparam A the element type of the collection
- *
* @author Martin Odersky
* @author Paul Phillips
* @version 2.8
@@ -228,7 +227,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
def copyToArray[B >: A](xs: Array[B]): Unit =
copyToArray(xs, 0, xs.length)
- def toArray[B >: A : ClassManifest]: Array[B] = {
+ def toArray[B >: A : ArrayTag]: Array[B] = {
if (isTraversableAgain) {
val result = new Array[B](size)
copyToArray(result, 0)
@@ -239,7 +238,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
def toTraversable: Traversable[A]
- def toList: List[A] = new ListBuffer[A] ++= seq toList
+ def toList: List[A] = (new ListBuffer[A] ++= seq).toList
def toIterable: Iterable[A] = toStream
@@ -356,33 +355,52 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
}
-
object TraversableOnce {
- implicit def traversableOnceCanBuildFrom[T] = new OnceCanBuildFrom[T]
- implicit def wrapTraversableOnce[A](trav: TraversableOnce[A]) = new MonadOps(trav)
+ @deprecated("use OnceCanBuildFrom instead")
+ def traversableOnceCanBuildFrom[T] = new OnceCanBuildFrom[T]
+ @deprecated("use MonadOps instead")
+ def wrapTraversableOnce[A](trav: TraversableOnce[A]) = new MonadOps(trav)
+
implicit def alternateImplicit[A](trav: TraversableOnce[A]) = new ForceImplicitAmbiguity
implicit def flattenTraversableOnce[A, CC[_]](travs: TraversableOnce[CC[A]])(implicit ev: CC[A] => TraversableOnce[A]) =
new FlattenOps[A](travs map ev)
-
- /** With the advent of TraversableOnce, it can be useful to have a builder which
- * operates on Iterators so they can be treated uniformly along with the collections.
- * See scala.util.Random.shuffle for an example.
- */
- class OnceCanBuildFrom[A] extends generic.CanBuildFrom[TraversableOnce[A], A, TraversableOnce[A]] {
- def newIterator = new ArrayBuffer[A] mapResult (_.iterator)
+
+ /* Functionality reused in Iterator.CanBuildFrom */
+ private[collection] abstract class BufferedCanBuildFrom[A, Coll[X] <: TraversableOnce[X]] extends generic.CanBuildFrom[Coll[_], A, Coll[A]] {
+ def bufferToColl[B](buff: ArrayBuffer[B]): Coll[B]
+ def traversableToColl[B](t: GenTraversable[B]): Coll[B]
+
+ def newIterator: Builder[A, Coll[A]] = new ArrayBuffer[A] mapResult bufferToColl
/** Creates a new builder on request of a collection.
* @param from the collection requesting the builder to be created.
* @return the result of invoking the `genericBuilder` method on `from`.
*/
- def apply(from: TraversableOnce[A]) = newIterator
+ def apply(from: Coll[_]): Builder[A, Coll[A]] = from match {
+ case xs: generic.GenericTraversableTemplate[_, _] => xs.genericBuilder.asInstanceOf[Builder[A, Traversable[A]]] mapResult {
+ case res => traversableToColl(res.asInstanceOf[GenTraversable[A]])
+ }
+ case _ => newIterator
+ }
/** Creates a new builder from scratch
* @return the result of invoking the `newBuilder` method of this factory.
*/
def apply() = newIterator
}
-
+
+ /** With the advent of `TraversableOnce`, it can be useful to have a builder which
+ * operates on `Iterator`s so they can be treated uniformly along with the collections.
+ * See `scala.util.Random.shuffle` or `scala.concurrent.Future.sequence` for an example.
+ */
+ class OnceCanBuildFrom[A] extends BufferedCanBuildFrom[A, TraversableOnce] {
+ def bufferToColl[B](buff: ArrayBuffer[B]) = buff.iterator
+ def traversableToColl[B](t: GenTraversable[B]) = t.seq
+ }
+
+ /** Evidence for building collections from `TraversableOnce` collections */
+ implicit def OnceCanBuildFrom[A] = new OnceCanBuildFrom[A]
+
class FlattenOps[A](travs: TraversableOnce[TraversableOnce[A]]) {
def flatten: Iterator[A] = new AbstractIterator[A] {
val its = travs.toIterator
@@ -394,7 +412,7 @@ object TraversableOnce {
class ForceImplicitAmbiguity
- class MonadOps[+A](trav: TraversableOnce[A]) {
+ implicit class MonadOps[+A](trav: TraversableOnce[A]) {
def map[B](f: A => B): TraversableOnce[B] = trav.toIterator map f
def flatMap[B](f: A => GenTraversableOnce[B]): TraversableOnce[B] = trav.toIterator flatMap f
def withFilter(p: A => Boolean) = trav.toIterator filter p
diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala
index e7e797391e..20880e369d 100644
--- a/src/library/scala/collection/TraversableProxyLike.scala
+++ b/src/library/scala/collection/TraversableProxyLike.scala
@@ -73,7 +73,7 @@ trait TraversableProxyLike[+A, +Repr <: TraversableLike[A, Repr] with Traversabl
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) = self.copyToArray(xs, start, len)
override def copyToArray[B >: A](xs: Array[B], start: Int) = self.copyToArray(xs, start)
override def copyToArray[B >: A](xs: Array[B]) = self.copyToArray(xs)
- override def toArray[B >: A: ClassManifest]: Array[B] = self.toArray
+ override def toArray[B >: A: ArrayTag]: Array[B] = self.toArray
override def toList: List[A] = self.toList
override def toIterable: Iterable[A] = self.toIterable
override def toSeq: Seq[A] = self.toSeq
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index fbecad98fe..eb2091a5f3 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -12,13 +12,14 @@ import generic._
import mutable.{ Builder, ArrayBuffer }
import TraversableView.NoBuilder
import annotation.migration
+import language.implicitConversions
trait ViewMkString[+A] {
self: Traversable[A] =>
// It is necessary to use thisSeq rather than toSeq to avoid cycles in the
// eager evaluation of vals in transformed view subclasses, see #4558.
- protected[this] def thisSeq: Seq[A] = new ArrayBuffer[A] ++= self result
+ protected[this] def thisSeq: Seq[A] = (new ArrayBuffer[A] ++= self).result
// Have to overload all three to work around #4299. The overload
// is because mkString should force a view but toString should not.
@@ -27,8 +28,16 @@ trait ViewMkString[+A] {
override def mkString(start: String, sep: String, end: String): String = {
thisSeq.addString(new StringBuilder(), start, sep, end).toString
}
- override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder =
- b append start append "..." append end
+ override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = {
+ var first = true
+ b append start
+ for (x <- self) {
+ if (first) first = false else b append sep
+ b append x
+ }
+ b append end
+ b
+ }
}
/** A template trait for non-strict views of traversable collections.
@@ -40,17 +49,17 @@ trait ViewMkString[+A] {
* that takes a `View` as its `From` type parameter must yield the same view (or a generic
* superclass of it) as its result parameter. If that assumption is broken, cast errors might result.
*
- * @define viewInfo
+ * @define viewInfo
* A view is a lazy version of some collection. Collection transformers such as
* `map` or `filter` or `++` do not traverse any elements when applied on a view.
* Instead they create a new view which simply records that fact that the operation
* needs to be applied. The collection elements are accessed, and the view operations are applied,
* when a non-view result is needed, or when the `force` method is called on a view.
- * @define traversableViewInfo
+ * @define traversableViewInfo
* $viewInfo
*
* All views for traversable collections are defined by creating a new `foreach` method.
-
+ *
* @author Martin Odersky
* @version 2.8
* @since 2.8
@@ -153,6 +162,8 @@ trait TraversableViewLike[+A,
// if (b.isInstanceOf[NoBuilder[_]]) newFlatMapped(f).asInstanceOf[That]
// else super.flatMap[B, That](f)(bf)
}
+ override def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]) =
+ newFlatMapped(asTraversable)
private[this] implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This]
/** Boilerplate method, to override in each subclass
diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala
index 83445738d9..a724be42cc 100644
--- a/src/library/scala/collection/concurrent/Map.scala
+++ b/src/library/scala/collection/concurrent/Map.scala
@@ -19,7 +19,7 @@ package scala.collection.concurrent
* @tparam A the key type of the map
* @tparam B the value type of the map
*
- * @define Coll ConcurrentMap
+ * @define Coll `ConcurrentMap`
* @define coll concurrent map
* @define concurrentmapinfo
* This is a base trait for all Scala concurrent map implementations. It
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 2a908aebb1..f944d20bdb 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -14,6 +14,7 @@ package concurrent
import java.util.concurrent.atomic._
import collection.immutable.{ ListMap => ImmutableListMap }
import collection.parallel.mutable.ParTrieMap
+import util.hashing.Hashing
import generic._
import annotation.tailrec
import annotation.switch
@@ -80,6 +81,9 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
} else false
}
+ @inline
+ private def equal(k1: K, k2: K, ct: TrieMap[K, V]) = ct.equality.equiv(k1, k2)
+
@inline private def inode(cn: MainNode[K, V]) = {
val nin = new INode[K, V](gen)
nin.WRITE(cn)
@@ -117,7 +121,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
else false
}
case sn: SNode[K, V] =>
- if (sn.hc == hc && sn.k == k) GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)
+ if (sn.hc == hc && equal(sn.k, k, ct)) GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)
else {
val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen)
@@ -164,7 +168,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
}
case sn: SNode[K, V] => cond match {
case null =>
- if (sn.hc == hc && sn.k == k) {
+ if (sn.hc == hc && equal(sn.k, k, ct)) {
if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null
} else {
val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
@@ -173,7 +177,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
else null
}
case INode.KEY_ABSENT =>
- if (sn.hc == hc && sn.k == k) Some(sn.v)
+ if (sn.hc == hc && equal(sn.k, k, ct)) Some(sn.v)
else {
val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen)
@@ -181,11 +185,11 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
else null
}
case INode.KEY_PRESENT =>
- if (sn.hc == hc && sn.k == k) {
+ if (sn.hc == hc && equal(sn.k, k, ct)) {
if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null
} else None
case otherv: V =>
- if (sn.hc == hc && sn.k == k && sn.v == otherv) {
+ if (sn.hc == hc && equal(sn.k, k, ct) && sn.v == otherv) {
if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null
} else None
}
@@ -253,7 +257,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
else return RESTART // used to be throw RestartException
}
case sn: SNode[K, V] => // 2) singleton node
- if (sn.hc == hc && sn.k == k) sn.v.asInstanceOf[AnyRef]
+ if (sn.hc == hc && equal(sn.k, k, ct)) sn.v.asInstanceOf[AnyRef]
else null
}
}
@@ -296,7 +300,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
else null
}
case sn: SNode[K, V] =>
- if (sn.hc == hc && sn.k == k && (v == null || sn.v == v)) {
+ if (sn.hc == hc && equal(sn.k, k, ct) && (v == null || sn.v == v)) {
val ncn = cn.removedAt(pos, flag, gen).toContracted(lev)
if (GCAS(cn, ncn, ct)) Some(sn.v) else null
} else None
@@ -341,11 +345,11 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
case ln: LNode[K, V] =>
if (v == null) {
val optv = ln.get(k)
- val nn = ln.removed(k)
+ val nn = ln.removed(k, ct)
if (GCAS(ln, nn, ct)) optv else null
} else ln.get(k) match {
case optv @ Some(v0) if v0 == v =>
- val nn = ln.removed(k)
+ val nn = ln.removed(k, ct)
if (GCAS(ln, nn, ct)) optv else null
case _ => None
}
@@ -433,12 +437,12 @@ extends MainNode[K, V] {
def this(k: K, v: V) = this(ImmutableListMap(k -> v))
def this(k1: K, v1: V, k2: K, v2: V) = this(ImmutableListMap(k1 -> v1, k2 -> v2))
def inserted(k: K, v: V) = new LNode(listmap + ((k, v)))
- def removed(k: K): MainNode[K, V] = {
+ def removed(k: K, ct: TrieMap[K, V]): MainNode[K, V] = {
val updmap = listmap - k
if (updmap.size > 1) new LNode(updmap)
else {
val (k, v) = updmap.iterator.next
- new TNode(k, v, TrieMap.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses
+ new TNode(k, v, ct.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses
}
}
def get(k: K) = listmap.get(k)
@@ -627,25 +631,34 @@ private[concurrent] case class RDCSS_Descriptor[K, V](old: INode[K, V], expected
* @since 2.10
*/
@SerialVersionUID(0L - 6402774413839597105L)
-final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[TrieMap[K, V], AnyRef])
+final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[TrieMap[K, V], AnyRef], hashf: Hashing[K], ef: Equiv[K])
extends scala.collection.concurrent.Map[K, V]
with scala.collection.mutable.MapLike[K, V, TrieMap[K, V]]
with CustomParallelizable[(K, V), ParTrieMap[K, V]]
with Serializable
{
- import TrieMap.computeHash
-
+ private var hashingobj = if (hashf.isInstanceOf[Hashing.Default[_]]) new TrieMap.MangledHashing[K] else hashf
+ private var equalityobj = ef
private var rootupdater = rtupd
+ def hashing = hashingobj
+ def equality = equalityobj
@volatile var root = r
-
- def this() = this(
+
+ def this(hashf: Hashing[K], ef: Equiv[K]) = this(
INode.newRootNode,
- AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root")
+ AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root"),
+ hashf,
+ ef
)
-
+
+ def this() = this(Hashing.default, Equiv.universal)
+
/* internal methods */
private def writeObject(out: java.io.ObjectOutputStream) {
+ out.writeObject(hashf)
+ out.writeObject(ef)
+
val it = iterator
while (it.hasNext) {
val (k, v) = it.next()
@@ -659,6 +672,9 @@ extends scala.collection.concurrent.Map[K, V]
root = INode.newRootNode
rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root")
+ hashingobj = in.readObject().asInstanceOf[Hashing[K]]
+ equalityobj = in.readObject().asInstanceOf[Equiv[K]]
+
var obj: AnyRef = null
do {
obj = in.readObject()
@@ -780,7 +796,7 @@ extends scala.collection.concurrent.Map[K, V]
@tailrec final def snapshot(): TrieMap[K, V] = {
val r = RDCSS_READ_ROOT()
val expmain = r.gcasRead(this)
- if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r.copyToGen(new Gen, this), rootupdater)
+ if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r.copyToGen(new Gen, this), rootupdater, hashing, equality)
else snapshot()
}
@@ -799,7 +815,7 @@ extends scala.collection.concurrent.Map[K, V]
@tailrec final def readOnlySnapshot(): collection.Map[K, V] = {
val r = RDCSS_READ_ROOT()
val expmain = r.gcasRead(this)
- if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r, null)
+ if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r, null, hashing, equality)
else readOnlySnapshot()
}
@@ -807,7 +823,10 @@ extends scala.collection.concurrent.Map[K, V]
val r = RDCSS_READ_ROOT()
if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V])) clear()
}
-
+
+ @inline
+ def computeHash(k: K) = hashingobj.hashCode(k)
+
final def lookup(k: K): V = {
val hc = computeHash(k)
lookuphc(k, hc).asInstanceOf[V]
@@ -894,14 +913,15 @@ object TrieMap extends MutableMapFactory[TrieMap] {
implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), TrieMap[K, V]] = new MapCanBuildFrom[K, V]
def empty[K, V]: TrieMap[K, V] = new TrieMap[K, V]
-
- @inline final def computeHash[K](k: K): Int = {
- var hcode = k.hashCode
- hcode = hcode * 0x9e3775cd
- hcode = java.lang.Integer.reverseBytes(hcode)
- hcode * 0x9e3775cd
+
+ class MangledHashing[K] extends Hashing[K] {
+ def hashCode(k: K) = {
+ var hcode = k.## * 0x9e3775cd
+ hcode = java.lang.Integer.reverseBytes(hcode)
+ hcode * 0x9e3775cd
+ }
}
-
+
}
diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala
index e05bfc41cd..bde13f2830 100644
--- a/src/library/scala/collection/convert/DecorateAsJava.scala
+++ b/src/library/scala/collection/convert/DecorateAsJava.scala
@@ -12,6 +12,8 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import Decorators._
import WrapAsJava._
+import language.implicitConversions
+
/** A collection of decorators that allow to convert between
* Scala and Java collections using `asScala` and `asJava` methods.
diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala
index 722f0b9af9..539584b148 100644
--- a/src/library/scala/collection/convert/DecorateAsScala.scala
+++ b/src/library/scala/collection/convert/DecorateAsScala.scala
@@ -12,6 +12,7 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import Decorators._
import WrapAsScala._
+import language.implicitConversions
trait DecorateAsScala {
/**
@@ -185,9 +186,9 @@ trait DecorateAsScala {
* any side-effects of using it via the Scala interface will be visible via
* the Java interface and vice versa.
*
- * @param m The `Dictionary` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Map[String, String]` view of the argument.
+ * @param p The `Dictionary` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Map[String, String]` view of the argument.
*/
implicit def dictionaryAsScalaMapConverter[A, B](p: ju.Dictionary[A, B]): AsScala[mutable.Map[A, B]] =
new AsScala(dictionaryAsScalaMap(p))
@@ -199,9 +200,9 @@ trait DecorateAsScala {
* any side-effects of using it via the Scala interface will be visible via
* the Java interface and vice versa.
*
- * @param m The `Properties` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Map[String, String]` view of the argument.
+ * @param p The `Properties` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Map[String, String]` view of the argument.
*/
implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] =
new AsScala(propertiesAsScalaMap(p))
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
index cdec72b9fe..fcfe402a68 100644
--- a/src/library/scala/collection/convert/WrapAsJava.scala
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -11,6 +11,7 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import Wrappers._
+import language.implicitConversions
trait WrapAsJava {
/**
@@ -23,8 +24,8 @@ trait WrapAsJava {
* explicit call of `asIterator(java.util.Iterator)` then the original
* Java Iterator will be returned.
*
- * @param i The Iterator to be converted.
- * @return A Java Iterator view of the argument.
+ * @param it The Iterator to be converted.
+ * @return A Java Iterator view of the argument.
*/
implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match {
case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
@@ -41,8 +42,8 @@ trait WrapAsJava {
* explicit call of `asIterator(java.util.Enumeration)` then the
* original Java Enumeration will be returned.
*
- * @param i The Iterator to be converted.
- * @return A Java Enumeration view of the argument.
+ * @param it The Iterator to be converted.
+ * @return A Java Enumeration view of the argument.
*/
implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match {
case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]]
@@ -75,8 +76,8 @@ trait WrapAsJava {
* explicit call of `asSizedIterable(java.util.Collection)` then the original
* Java Collection will be returned.
*
- * @param i The SizedIterable to be converted.
- * @return A Java Collection view of the argument.
+ * @param it The SizedIterable to be converted.
+ * @return A Java Collection view of the argument.
*/
implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match {
case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
@@ -111,8 +112,8 @@ trait WrapAsJava {
* explicit call of `asSeq(java.util.List)` then the original
* Java List will be returned.
*
- * @param b The Seq to be converted.
- * @return A Java List view of the argument.
+ * @param seq The Seq to be converted.
+ * @return A Java List view of the argument.
*/
implicit def mutableSeqAsJavaList[A](seq: mutable.Seq[A]): ju.List[A] = seq match {
case JListWrapper(wrapped) => wrapped
@@ -129,8 +130,8 @@ trait WrapAsJava {
* explicit call of `asSeq(java.util.List)` then the original
* Java List will be returned.
*
- * @param b The Seq to be converted.
- * @return A Java List view of the argument.
+ * @param seq The Seq to be converted.
+ * @return A Java List view of the argument.
*/
implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match {
case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
index 56e13b2105..49f4d7cd99 100644
--- a/src/library/scala/collection/convert/WrapAsScala.scala
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -11,6 +11,7 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import Wrappers._
+import language.implicitConversions
trait WrapAsScala {
/**
@@ -24,8 +25,8 @@ trait WrapAsScala {
* explicit call of `asIterator(scala.collection.Iterator)` then the
* original Scala `Iterator` will be returned.
*
- * @param i The `Iterator` to be converted.
- * @return A Scala `Iterator` view of the argument.
+ * @param it The `Iterator` to be converted.
+ * @return A Scala `Iterator` view of the argument.
*/
implicit def asScalaIterator[A](it: ju.Iterator[A]): Iterator[A] = it match {
case IteratorWrapper(wrapped) => wrapped
@@ -186,8 +187,8 @@ trait WrapAsScala {
* `Dictionary` and any side-effects of using it via the Scala interface
* will be visible via the Java interface and vice versa.
*
- * @param m The Dictionary to be converted.
- * @return A Scala mutable Map[String, String] view of the argument.
+ * @param p The Dictionary to be converted.
+ * @return A Scala mutable Map[String, String] view of the argument.
*/
implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match {
case DictionaryWrapper(wrapped) => wrapped
@@ -201,8 +202,8 @@ trait WrapAsScala {
* `Properties` and any side-effects of using it via the Scala interface
* will be visible via the Java interface and vice versa.
*
- * @param m The Properties to be converted.
- * @return A Scala mutable Map[String, String] view of the argument.
+ * @param p The Properties to be converted.
+ * @return A Scala mutable Map[String, String] view of the argument.
*/
implicit def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match {
case _ => new JPropertiesWrapper(p)
diff --git a/src/library/scala/collection/generic/ClassManifestTraversableFactory.scala b/src/library/scala/collection/generic/ArrayTagTraversableFactory.scala
index e54ce9cdbf..ddae0a4d64 100644
--- a/src/library/scala/collection/generic/ClassManifestTraversableFactory.scala
+++ b/src/library/scala/collection/generic/ArrayTagTraversableFactory.scala
@@ -9,21 +9,23 @@
package scala.collection
package generic
-/** A template for companion objects of `ClassManifestTraversable` and
+import language.higherKinds
+
+/** A template for companion objects of `ClassTagTraversable` and
* subclasses thereof.
*
* @define coll collection
- * @define Coll Traversable
+ * @define Coll `Traversable`
* @define genericCanBuildFromInfo
* The standard `CanBuildFrom` instance for $Coll objects.
* @author Aleksandar Prokopec
* @since 2.8
*/
-abstract class ClassManifestTraversableFactory[CC[X] <: Traversable[X] with GenericClassManifestTraversableTemplate[X, CC]]
- extends GenericClassManifestCompanion[CC] {
+abstract class ArrayTagTraversableFactory[CC[X] <: Traversable[X] with GenericArrayTagTraversableTemplate[X, CC]]
+ extends GenericArrayTagCompanion[CC] {
- class GenericCanBuildFrom[A](implicit manif: ClassManifest[A]) extends CanBuildFrom[CC[_], A, CC[A]] {
- def apply(from: CC[_]) = from.genericClassManifestBuilder[A]
+ class GenericCanBuildFrom[A](implicit tag: ArrayTag[A]) extends CanBuildFrom[CC[_], A, CC[A]] {
+ def apply(from: CC[_]) = from.genericArrayTagBuilder[A]
def apply = newBuilder[A]
}
}
diff --git a/src/library/scala/collection/generic/BitSetFactory.scala b/src/library/scala/collection/generic/BitSetFactory.scala
index 796b12b0ac..da80b3964b 100644
--- a/src/library/scala/collection/generic/BitSetFactory.scala
+++ b/src/library/scala/collection/generic/BitSetFactory.scala
@@ -15,7 +15,7 @@ import scala.collection._
import mutable.Builder
/** @define coll collection
- * @define Coll Traversable
+ * @define Coll `Traversable`
* @define factoryInfo
* This object provides a set of operations to create `$Coll` values.
* @author Martin Odersky
diff --git a/src/library/scala/collection/generic/CanBuildFrom.scala b/src/library/scala/collection/generic/CanBuildFrom.scala
index 3a335f357a..f3eff03d89 100644
--- a/src/library/scala/collection/generic/CanBuildFrom.scala
+++ b/src/library/scala/collection/generic/CanBuildFrom.scala
@@ -20,7 +20,7 @@ import scala.annotation.implicitNotFound
* @tparam Elem the element type of the collection to be created.
* @tparam To the type of the collection to be created.
*
- * @see Builder
+ * @see [[scala.collection.mutable.Builder]]
* @author Martin Odersky
* @author Adriaan Moors
* @since 2.8
diff --git a/src/dbc/scala/dbc/statement/AccessMode.scala b/src/library/scala/collection/generic/Clearable.scala
index 885e0012f2..6c8d9558b0 100644
--- a/src/dbc/scala/dbc/statement/AccessMode.scala
+++ b/src/library/scala/collection/generic/Clearable.scala
@@ -6,21 +6,21 @@
** |/ **
\* */
+package scala.collection
+package generic
-
-package scala.dbc
-package statement
-
-
-@deprecated(DbcIsDeprecated, "2.9.0") abstract class AccessMode {
- def sqlString: String
-}
-
-@deprecated(DbcIsDeprecated, "2.9.0") object AccessMode {
- case object ReadOnly extends AccessMode {
- def sqlString = "READ ONLY"
- }
- case object ReadWrite extends AccessMode {
- def sqlString = "READ WRITE"
- }
+/** This trait forms part of collections that can be cleared
+ * with a clear() call.
+ *
+ * @author Paul Phillips
+ * @version 2.10
+ * @since 2.10
+ * @define coll clearable collection
+ * @define Coll `Clearable`
+ */
+trait Clearable {
+ /** Clears the $coll's contents. After this operation, the
+ * $coll is empty.
+ */
+ def clear(): Unit
}
diff --git a/src/library/scala/collection/generic/FromRepr.scala b/src/library/scala/collection/generic/FromRepr.scala
new file mode 100644
index 0000000000..c08761332c
--- /dev/null
+++ b/src/library/scala/collection/generic/FromRepr.scala
@@ -0,0 +1,56 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package generic
+
+/** Type class witnessing that a collection representation type `Repr` has
+ * elements of type `A` and has a conversion to `GenTraversableLike[A, Repr]`.
+ *
+ * This type enables simple enrichment of `GenTraversable`s with extension
+ * methods which can make full use of the mechanics of the Scala collections
+ * framework in their implementation.
+ *
+ * Example usage,
+ * {{{
+ * import scala.collection.generic.{ CanBuildFrom, FromRepr, HasElem }
+ *
+ * class FilterMapImpl[A, Repr](val r : Repr)(implicit hasElem : HasElem[Repr, A]) {
+ * def filterMap[B, That](f : A => Option[B])
+ * (implicit cbf : CanBuildFrom[Repr, B, That]) : That = r.flatMap(f(_).toSeq)
+ * }
+ *
+ * implicit def filterMap[Repr : FromRepr](r : Repr) = new FilterMapImpl(r)
+ *
+ * val l = List(1, 2, 3, 4, 5)
+ * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None)
+ * // == List(2, 4)
+ * }}}
+ *
+ * @author Miles Sabin
+ * @since 2.10
+ */
+trait FromRepr[Repr] {
+ type A
+ val hasElem: HasElem[Repr, A]
+}
+
+object FromRepr {
+ import language.higherKinds
+
+ implicit val stringFromRepr : FromRepr[String] { type A = Char } = new FromRepr[String] {
+ type A = Char
+ val hasElem = implicitly[HasElem[String, Char]]
+ }
+
+ implicit def genTraversableLikeFromRepr[C[_], A0]
+ (implicit hasElem0: HasElem[C[A0], A0]) : FromRepr[C[A0]] { type A = A0 } = new FromRepr[C[A0]] {
+ type A = A0
+ val hasElem = hasElem0
+ }
+}
diff --git a/src/library/scala/collection/generic/GenMapFactory.scala b/src/library/scala/collection/generic/GenMapFactory.scala
index d6f6978ead..31fe4e100d 100644
--- a/src/library/scala/collection/generic/GenMapFactory.scala
+++ b/src/library/scala/collection/generic/GenMapFactory.scala
@@ -10,11 +10,12 @@ package scala.collection
package generic
import mutable.{Builder, MapBuilder}
+import language.higherKinds
/** A template for companion objects of `Map` and subclasses thereof.
*
* @define coll map
- * @define Coll Map
+ * @define Coll `Map`
* @define factoryInfo
* This object provides a set of operations needed to create `$Coll` values.
* @author Martin Odersky
diff --git a/src/library/scala/collection/generic/GenSeqFactory.scala b/src/library/scala/collection/generic/GenSeqFactory.scala
index ee6ecae3c2..19eeba9b1d 100644
--- a/src/library/scala/collection/generic/GenSeqFactory.scala
+++ b/src/library/scala/collection/generic/GenSeqFactory.scala
@@ -11,16 +11,11 @@
package scala.collection
package generic
-import annotation.bridge
+import language.higherKinds
/** A template for companion objects of Seq and subclasses thereof.
*
* @since 2.8
*/
abstract class GenSeqFactory[CC[X] <: GenSeq[X] with GenericTraversableTemplate[X, CC]]
-extends GenTraversableFactory[CC] {
-
- @bridge
- def unapplySeq[A](x: GenSeq[A]): Some[GenSeq[A]] = Some(x)
-
-}
+extends GenTraversableFactory[CC]
diff --git a/src/library/scala/collection/generic/GenSetFactory.scala b/src/library/scala/collection/generic/GenSetFactory.scala
index d83f248aff..4f812b337c 100644
--- a/src/library/scala/collection/generic/GenSetFactory.scala
+++ b/src/library/scala/collection/generic/GenSetFactory.scala
@@ -12,11 +12,12 @@ package scala.collection
package generic
import mutable.Builder
+import language.higherKinds
/** A template for companion objects of `Set` and subclasses thereof.
*
* @define coll set
- * @define Coll Set
+ * @define Coll `Set`
* @define factoryInfo
* This object provides a set of operations needed to create `$Coll` values.
* @author Martin Odersky
diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala
index 34cbe1a7f2..2aaf93de05 100644
--- a/src/library/scala/collection/generic/GenTraversableFactory.scala
+++ b/src/library/scala/collection/generic/GenTraversableFactory.scala
@@ -10,6 +10,8 @@
package scala.collection
package generic
+import language.higherKinds
+
/** A template for companion objects of `Traversable` and subclasses thereof.
* This class provides a set of operations to create `$Coll` objects.
* It is typically inherited by companion objects of subclasses of `Traversable`.
@@ -17,7 +19,7 @@ package generic
* @since 2.8
*
* @define coll collection
- * @define Coll Traversable
+ * @define Coll `Traversable`
* @define factoryInfo
* This object provides a set of operations to create `$Coll` values.
* @author Martin Odersky
@@ -71,7 +73,7 @@ abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTrav
val b = newBuilder[A]
// At present we're using IndexedSeq as a proxy for "has a cheap size method".
if (xss forall (_.isInstanceOf[IndexedSeq[_]]))
- b.sizeHint(xss map (_.size) sum)
+ b.sizeHint(xss.map(_.size).sum)
for (xs <- xss.seq) b ++= xs
b.result
@@ -199,8 +201,8 @@ abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTrav
/** Produces a $coll containing a sequence of increasing of integers.
*
- * @param from the first element of the $coll
- * @param end the end value of the $coll (the first value NOT contained)
+ * @param start the first element of the $coll
+ * @param end the end value of the $coll (the first value NOT contained)
* @return a $coll with values `start, start + 1, ..., end - 1`
*/
def range[T: Integral](start: T, end: T): CC[T] = range(start, end, implicitly[Integral[T]].one)
diff --git a/src/library/scala/collection/generic/GenericClassManifestCompanion.scala b/src/library/scala/collection/generic/GenericArrayTagCompanion.scala
index 546e82fb4a..959adbce6d 100644
--- a/src/library/scala/collection/generic/GenericClassManifestCompanion.scala
+++ b/src/library/scala/collection/generic/GenericArrayTagCompanion.scala
@@ -10,20 +10,21 @@ package scala.collection
package generic
import mutable.Builder
+import language.higherKinds
-/** This class represents companions of classes which require ClassManifests
+/** This class represents companions of classes which require ArrayTags
* for their element types.
*
* @author Aleksandar Prokopec
*/
-abstract class GenericClassManifestCompanion[+CC[X] <: Traversable[X]] {
+abstract class GenericArrayTagCompanion[+CC[X] <: Traversable[X]] {
type Coll = CC[_]
- def newBuilder[A](implicit ord: ClassManifest[A]): Builder[A, CC[A]]
+ def newBuilder[A](implicit ord: ArrayTag[A]): Builder[A, CC[A]]
- def empty[A: ClassManifest]: CC[A] = newBuilder[A].result
+ def empty[A: ArrayTag]: CC[A] = newBuilder[A].result
- def apply[A](elems: A*)(implicit ord: ClassManifest[A]): CC[A] = {
+ def apply[A](elems: A*)(implicit ord: ArrayTag[A]): CC[A] = {
val b = newBuilder[A]
b ++= elems
b.result
diff --git a/src/library/scala/collection/generic/GenericArrayTagTraversableTemplate.scala b/src/library/scala/collection/generic/GenericArrayTagTraversableTemplate.scala
new file mode 100644
index 0000000000..ac84683c59
--- /dev/null
+++ b/src/library/scala/collection/generic/GenericArrayTagTraversableTemplate.scala
@@ -0,0 +1,30 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2010-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package generic
+
+import mutable.Builder
+import annotation.unchecked.uncheckedVariance
+import language.higherKinds
+
+/** This trait represents collections classes which require array
+ * tags for their element types.
+ *
+ * @author Aleksandar Prokopec
+ * @since 2.8
+ */
+trait GenericArrayTagTraversableTemplate[+A, +CC[X] <: Traversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] {
+ implicit protected[this] val tag: ArrayTag[A]
+ def arrayTagCompanion: GenericArrayTagCompanion[CC]
+ def genericArrayTagBuilder[B](implicit tag: ArrayTag[B]): Builder[B, CC[B]] = arrayTagCompanion.newBuilder[B]
+ @deprecated("use arrayTagCompanion instead", "2.10.0")
+ def classManifestCompanion: GenericClassManifestCompanion[CC] = arrayTagCompanion
+ @deprecated("use genericArrayTagBuilder instead", "2.10.0")
+ def genericClassManifestBuilder[B](implicit manifest: ClassManifest[B]): Builder[B, CC[B]] = genericArrayTagBuilder[B](manifest)
+}
diff --git a/src/library/scala/collection/generic/GenericClassManifestTraversableTemplate.scala b/src/library/scala/collection/generic/GenericClassManifestTraversableTemplate.scala
deleted file mode 100644
index 12b5a495f0..0000000000
--- a/src/library/scala/collection/generic/GenericClassManifestTraversableTemplate.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2010-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package generic
-
-import mutable.Builder
-import annotation.unchecked.uncheckedVariance
-
-/** This trait represents collections classes which require class
- * manifests for their element types.
- *
- * @author Aleksandar Prokopec
- * @since 2.8
- */
-trait GenericClassManifestTraversableTemplate[+A, +CC[X] <: Traversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] {
- implicit protected[this] val manifest: ClassManifest[A]
- def classManifestCompanion: GenericClassManifestCompanion[CC]
- def genericClassManifestBuilder[B](implicit man: ClassManifest[B]): Builder[B, CC[B]] = classManifestCompanion.newBuilder[B]
-}
diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala
index b36a1e297f..1844542315 100644
--- a/src/library/scala/collection/generic/GenericCompanion.scala
+++ b/src/library/scala/collection/generic/GenericCompanion.scala
@@ -10,16 +10,17 @@ package scala.collection
package generic
import mutable.Builder
+import language.higherKinds
/** A template class for companion objects of "regular" collection classes
* represent an unconstrained higher-kinded type. Typically
* such classes inherit from trait `GenericTraversableTemplate`.
* @tparam CC The type constructor representing the collection class.
- * @see GenericTraversableTemplate
+ * @see [[scala.collection.generic.GenericTraversableTemplate]]
* @author Martin Odersky
* @since 2.8
* @define coll collection
- * @define Coll CC
+ * @define Coll `CC`
*/
abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] {
/** The underlying collection type with unknown element type */
diff --git a/src/library/scala/collection/generic/GenericOrderedCompanion.scala b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
index c3baa28147..290dc435c8 100644
--- a/src/library/scala/collection/generic/GenericOrderedCompanion.scala
+++ b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
@@ -10,6 +10,7 @@ package scala.collection
package generic
import mutable.Builder
+import language.higherKinds
/** This class represents companions of classes which require the ordered trait
* for their element types.
diff --git a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
index 5cfc4666b3..6e04420315 100644
--- a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
@@ -13,8 +13,7 @@ package generic
import mutable.Builder
import annotation.unchecked.uncheckedVariance
-
-
+import language.higherKinds
/** This trait represents collections classes which require
* ordered element types.
diff --git a/src/library/scala/collection/generic/GenericParCompanion.scala b/src/library/scala/collection/generic/GenericParCompanion.scala
index 40fcfa31d0..484da5c6d9 100644
--- a/src/library/scala/collection/generic/GenericParCompanion.scala
+++ b/src/library/scala/collection/generic/GenericParCompanion.scala
@@ -11,11 +11,12 @@ package scala.collection.generic
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParIterable
import scala.collection.parallel.ParMap
+import language.higherKinds
/** A template class for companion objects of parallel collection classes.
* They should be mixed in together with `GenericCompanion` type.
*
- * @define Coll ParIterable
+ * @define Coll `ParIterable`
* @tparam CC the type constructor representing the collection class
* @since 2.8
*/
diff --git a/src/library/scala/collection/generic/GenericParTemplate.scala b/src/library/scala/collection/generic/GenericParTemplate.scala
index 430dcb9e29..fc1c3f5eaa 100644
--- a/src/library/scala/collection/generic/GenericParTemplate.scala
+++ b/src/library/scala/collection/generic/GenericParTemplate.scala
@@ -14,6 +14,7 @@ import scala.collection.parallel.ParMap
import scala.collection.parallel.TaskSupport
import annotation.unchecked.uncheckedVariance
+import language.higherKinds
/** A template trait for collections having a companion.
*
diff --git a/src/library/scala/collection/generic/GenericSeqCompanion.scala b/src/library/scala/collection/generic/GenericSeqCompanion.scala
index 41e8d6dd39..90063c1ca2 100644
--- a/src/library/scala/collection/generic/GenericSeqCompanion.scala
+++ b/src/library/scala/collection/generic/GenericSeqCompanion.scala
@@ -10,15 +10,7 @@
package scala.collection
package generic
-import annotation.bridge
+import language.higherKinds
trait GenericSeqCompanion[CC[X] <: Traversable[X]]
- extends GenericCompanion[CC] {
-
- @bridge
- override def empty[A]: CC[A] = super.empty[A]
-
- @bridge
- override def apply[A](elems: A*): CC[A] = super.apply(elems: _*)
-
-}
+ extends GenericCompanion[CC] \ No newline at end of file
diff --git a/src/library/scala/collection/generic/GenericSetTemplate.scala b/src/library/scala/collection/generic/GenericSetTemplate.scala
index 6af6a36981..221bcfb379 100644
--- a/src/library/scala/collection/generic/GenericSetTemplate.scala
+++ b/src/library/scala/collection/generic/GenericSetTemplate.scala
@@ -8,7 +8,7 @@
package scala.collection
package generic
-
+import language.higherKinds
/**
* @since 2.8
*/
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index 6586434924..7cb0e812d8 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -14,6 +14,7 @@ package generic
import mutable.Builder
import annotation.migration
import annotation.unchecked.uncheckedVariance
+import language.higherKinds
/** A template class for companion objects of ``regular`` collection classes
* that represent an unconstrained higher-kinded type.
@@ -72,8 +73,8 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
/** Converts this $coll of pairs into two collections of the first and second
* half of each pair.
*
- * @param A1 the type of the first half of the element pairs
- * @param A2 the type of the second half of the element pairs
+ * @tparam A1 the type of the first half of the element pairs
+ * @tparam A2 the type of the second half of the element pairs
* @param asPair an implicit conversion which asserts that the element type
* of this $coll is a pair.
* @return a pair ${coll}s, containing the first, respectively second
@@ -93,9 +94,9 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
/** Converts this $coll of triples into three collections of the first, second,
* and third element of each triple.
*
- * @param A1 the type of the first member of the element triples
- * @param A2 the type of the second member of the element triples
- * @param A3 the type of the third member of the element triples
+ * @tparam A1 the type of the first member of the element triples
+ * @tparam A2 the type of the second member of the element triples
+ * @tparam A3 the type of the third member of the element triples
* @param asTriple an implicit conversion which asserts that the element type
* of this $coll is a triple.
* @return a triple ${coll}s, containing the first, second, respectively
@@ -146,11 +147,6 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
b.result
}
- // cannot have a bridge, because it would have the same signature as the target method after erasure
- // @bridge
- // def flatten[B](implicit asTraversable: A => /*<:<!!!*/ TraversableOnce[B]): CC[B] =
- // flatten[B](asTraversable: A => GenTraversableOnce[B])
-
/** Transposes this $coll of traversable collections into
* a $coll of ${coll}s.
*
diff --git a/src/library/scala/collection/generic/Growable.scala b/src/library/scala/collection/generic/Growable.scala
index f0a70c2b88..730cb18733 100644
--- a/src/library/scala/collection/generic/Growable.scala
+++ b/src/library/scala/collection/generic/Growable.scala
@@ -18,11 +18,11 @@ package generic
* @version 2.8
* @since 2.8
* @define coll growable collection
- * @define Coll Growable
+ * @define Coll `Growable`
* @define add add
* @define Add add
*/
-trait Growable[-A] {
+trait Growable[-A] extends Clearable {
/** ${Add}s a single element to this $coll.
*
@@ -42,7 +42,7 @@ trait Growable[-A] {
/** ${Add}s all elements produced by a TraversableOnce to this $coll.
*
- * @param iter the TraversableOnce producing the elements to $add.
+ * @param xs the TraversableOnce producing the elements to $add.
* @return the $coll itself.
*/
def ++=(xs: TraversableOnce[A]): this.type = { xs.seq foreach += ; this }
@@ -50,5 +50,5 @@ trait Growable[-A] {
/** Clears the $coll's contents. After this operation, the
* $coll is empty.
*/
- def clear()
+ def clear(): Unit
}
diff --git a/src/library/scala/collection/generic/ImmutableMapFactory.scala b/src/library/scala/collection/generic/ImmutableMapFactory.scala
index bdb657f320..d893188e92 100644
--- a/src/library/scala/collection/generic/ImmutableMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableMapFactory.scala
@@ -10,6 +10,8 @@
package scala.collection
package generic
+import language.higherKinds
+
/** A template for companion objects of `immutable.Map` and subclasses thereof.
* @author Martin Odersky
* @version 2.8
diff --git a/src/library/scala/collection/generic/ImmutableSetFactory.scala b/src/library/scala/collection/generic/ImmutableSetFactory.scala
index e128be70a1..7bd5bf2ef8 100644
--- a/src/library/scala/collection/generic/ImmutableSetFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSetFactory.scala
@@ -10,6 +10,7 @@ package scala.collection
package generic
import mutable.{ Builder, SetBuilder }
+import language.higherKinds
abstract class ImmutableSetFactory[CC[X] <: immutable.Set[X] with SetLike[X, CC[X]]]
extends SetFactory[CC] {
diff --git a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
index 89e19eed87..f415a52b4d 100644
--- a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
@@ -11,10 +11,12 @@
package scala.collection
package generic
+import language.higherKinds
+
/** A template for companion objects of `SortedMap` and subclasses thereof.
*
* @since 2.8
- * @define Coll SortedMap
+ * @define Coll `SortedMap`
* @define coll sorted map
* @define factoryInfo
* This object provides a set of operations needed to create sorted maps of type `$Coll`.
diff --git a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
index fe807d9fe6..1317bb4796 100644
--- a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
@@ -11,11 +11,13 @@
package scala.collection
package generic
+import language.higherKinds
+
/** A template for companion objects of `SortedSet` and subclasses thereof.
*
* @since 2.8
- * @define Coll immutable.SortedSet
- * @define coll immutable sorted
+ * @define Coll `immutable.SortedSet`
+ * @define coll immutable sorted set
* @define factoryInfo
* This object provides a set of operations needed to create sorted sets of type `$Coll`.
* @author Martin Odersky
@@ -23,4 +25,4 @@ package generic
* @define sortedSetCanBuildFromInfo
* The standard `CanBuildFrom` instance for sorted sets
*/
-abstract class ImmutableSortedSetFactory[CC[A] <: immutable.SortedSet[A] with SortedSetLike[A, CC[A]]] extends SortedSetFactory[CC] \ No newline at end of file
+abstract class ImmutableSortedSetFactory[CC[A] <: immutable.SortedSet[A] with SortedSetLike[A, CC[A]]] extends SortedSetFactory[CC]
diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala
index a60e3032c1..ce44ae9bf4 100644
--- a/src/library/scala/collection/generic/MapFactory.scala
+++ b/src/library/scala/collection/generic/MapFactory.scala
@@ -11,7 +11,7 @@ package generic
import mutable.{Builder, MapBuilder}
-import annotation.bridge
+import language.higherKinds
/** A template for companion objects of `Map` and subclasses thereof.
*
@@ -35,6 +35,4 @@ abstract class MapFactory[CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]] ex
def empty[A, B]: CC[A, B]
- @bridge
- override def apply[A, B](elems: (A, B)*): CC[A, B] = super.apply(elems: _*)
}
diff --git a/src/library/scala/collection/generic/MutableMapFactory.scala b/src/library/scala/collection/generic/MutableMapFactory.scala
index 076e41c9f8..8b38b4ddd5 100644
--- a/src/library/scala/collection/generic/MutableMapFactory.scala
+++ b/src/library/scala/collection/generic/MutableMapFactory.scala
@@ -12,6 +12,7 @@ package scala.collection
package generic
import mutable.Builder
+import language.higherKinds
/** A template for companion objects of `mutable.Map` and subclasses thereof.
* @author Martin Odersky
diff --git a/src/library/scala/collection/generic/MutableSetFactory.scala b/src/library/scala/collection/generic/MutableSetFactory.scala
index 6130ef2042..f130489814 100644
--- a/src/library/scala/collection/generic/MutableSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSetFactory.scala
@@ -10,6 +10,7 @@ package scala.collection
package generic
import mutable.{ Builder, GrowingBuilder }
+import language.higherKinds
abstract class MutableSetFactory[CC[X] <: mutable.Set[X] with mutable.SetLike[X, CC[X]]]
extends SetFactory[CC] {
diff --git a/src/library/scala/collection/generic/MutableSortedSetFactory.scala b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
index cbbedc0231..0e90ed999c 100644
--- a/src/library/scala/collection/generic/MutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
@@ -10,10 +10,11 @@ package scala.collection
package generic
import scala.collection.mutable.{ Builder, GrowingBuilder }
+import language.higherKinds
/**
- * @define Coll mutable.SortedSet
- * @define coll mutable sorted
+ * @define Coll `mutable.SortedSet`
+ * @define coll mutable sorted set
*
* @author Lucien Pereira
*
diff --git a/src/library/scala/collection/generic/OrderedTraversableFactory.scala b/src/library/scala/collection/generic/OrderedTraversableFactory.scala
index 259e4123c4..92f166ae08 100644
--- a/src/library/scala/collection/generic/OrderedTraversableFactory.scala
+++ b/src/library/scala/collection/generic/OrderedTraversableFactory.scala
@@ -10,9 +10,7 @@
package scala.collection
package generic
-
-
-
+import language.higherKinds
abstract class OrderedTraversableFactory[CC[X] <: Traversable[X] with GenericOrderedTraversableTemplate[X, CC]]
extends GenericOrderedCompanion[CC] {
diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala
index 558024d45c..41dca8fbe9 100644
--- a/src/library/scala/collection/generic/ParFactory.scala
+++ b/src/library/scala/collection/generic/ParFactory.scala
@@ -10,13 +10,14 @@ package scala.collection.generic
import scala.collection.parallel.ParIterable
import scala.collection.parallel.Combiner
+import language.higherKinds
/** A template class for companion objects of `ParIterable` and subclasses
* thereof. This class extends `TraversableFactory` and provides a set of
* operations to create `$Coll` objects.
*
* @define coll parallel collection
- * @define Coll ParIterable
+ * @define Coll `ParIterable`
* @since 2.8
*/
abstract class ParFactory[CC[X] <: ParIterable[X] with GenericParTemplate[X, CC]]
diff --git a/src/library/scala/collection/generic/ParMapFactory.scala b/src/library/scala/collection/generic/ParMapFactory.scala
index 2d89f79c13..5aedf67924 100644
--- a/src/library/scala/collection/generic/ParMapFactory.scala
+++ b/src/library/scala/collection/generic/ParMapFactory.scala
@@ -12,13 +12,14 @@ import scala.collection.parallel.ParMap
import scala.collection.parallel.ParMapLike
import scala.collection.parallel.Combiner
import scala.collection.mutable.Builder
+import language.higherKinds
/** A template class for companion objects of `ParMap` and subclasses thereof.
* This class extends `TraversableFactory` and provides a set of operations
* to create `$Coll` objects.
*
* @define coll parallel map
- * @define Coll ParMap
+ * @define Coll `ParMap`
* @author Aleksandar Prokopec
* @since 2.8
*/
diff --git a/src/library/scala/collection/generic/ParSetFactory.scala b/src/library/scala/collection/generic/ParSetFactory.scala
index c2cf971d73..30a36a734a 100644
--- a/src/library/scala/collection/generic/ParSetFactory.scala
+++ b/src/library/scala/collection/generic/ParSetFactory.scala
@@ -12,6 +12,7 @@ import collection.mutable.Builder
import collection.parallel.Combiner
import collection.parallel.ParSet
import collection.parallel.ParSetLike
+import language.higherKinds
/**
* @author Aleksandar Prokopec
diff --git a/src/library/scala/collection/generic/SeqFactory.scala b/src/library/scala/collection/generic/SeqFactory.scala
index 7bd92173ff..3f61de6ceb 100644
--- a/src/library/scala/collection/generic/SeqFactory.scala
+++ b/src/library/scala/collection/generic/SeqFactory.scala
@@ -10,6 +10,7 @@
package scala.collection
package generic
+import language.higherKinds
/** A template for companion objects of Seq and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/SetFactory.scala b/src/library/scala/collection/generic/SetFactory.scala
index 348743a120..646e99dd1e 100644
--- a/src/library/scala/collection/generic/SetFactory.scala
+++ b/src/library/scala/collection/generic/SetFactory.scala
@@ -12,14 +12,7 @@ package scala.collection
package generic
import mutable.Builder
-import annotation.bridge
+import language.higherKinds
abstract class SetFactory[CC[X] <: Set[X] with SetLike[X, CC[X]]]
- extends GenSetFactory[CC] with GenericSeqCompanion[CC] {
-
- @bridge
- override def empty[A]: CC[A] = super.empty[A]
-
- @bridge
- override def apply[A](elems: A*): CC[A] = super.apply(elems: _*)
-}
+ extends GenSetFactory[CC] with GenericSeqCompanion[CC] \ No newline at end of file
diff --git a/src/library/scala/collection/generic/Shrinkable.scala b/src/library/scala/collection/generic/Shrinkable.scala
index 88c7ce3a3d..593cd0f58e 100644
--- a/src/library/scala/collection/generic/Shrinkable.scala
+++ b/src/library/scala/collection/generic/Shrinkable.scala
@@ -17,7 +17,7 @@ package generic
* @version 2.8
* @since 2.8
* @define coll shrinkable collection
- * @define Coll Shrinkable
+ * @define Coll `Shrinkable`
*/
trait Shrinkable[-A] {
@@ -43,7 +43,7 @@ trait Shrinkable[-A] {
/** Removes all elements produced by an iterator from this $coll.
*
- * @param iter the iterator producing the elements to remove.
+ * @param xs the iterator producing the elements to remove.
* @return the $coll itself
*/
def --=(xs: TraversableOnce[A]): this.type = { xs.seq foreach -= ; this }
diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala
index ed9e11fd30..42eca72806 100644
--- a/src/library/scala/collection/generic/Sorted.scala
+++ b/src/library/scala/collection/generic/Sorted.scala
@@ -62,8 +62,6 @@ trait Sorted[K, +This <: Sorted[K, This]] {
* and an upper-bound.
*
* @param from The upper-bound (exclusive) of the ranged projection.
- * @param until ...
- * @return ...
*/
def range(from: K, until: K): This = rangeImpl(Some(from), Some(until))
diff --git a/src/library/scala/collection/generic/SortedMapFactory.scala b/src/library/scala/collection/generic/SortedMapFactory.scala
index 962a945037..f038c8b09b 100644
--- a/src/library/scala/collection/generic/SortedMapFactory.scala
+++ b/src/library/scala/collection/generic/SortedMapFactory.scala
@@ -12,6 +12,7 @@ package scala.collection
package generic
import mutable.{Builder, MapBuilder}
+import language.higherKinds
/** A template for companion objects of mutable.Map and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/SortedSetFactory.scala b/src/library/scala/collection/generic/SortedSetFactory.scala
index 45340cf6c1..bb261803a9 100644
--- a/src/library/scala/collection/generic/SortedSetFactory.scala
+++ b/src/library/scala/collection/generic/SortedSetFactory.scala
@@ -12,6 +12,7 @@ package scala.collection
package generic
import mutable.{Builder, SetBuilder}
+import language.higherKinds
/** A template for companion objects of Set and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/Subtractable.scala b/src/library/scala/collection/generic/Subtractable.scala
index 1ca9d706f0..aed4f4f7da 100644
--- a/src/library/scala/collection/generic/Subtractable.scala
+++ b/src/library/scala/collection/generic/Subtractable.scala
@@ -10,7 +10,6 @@
package scala.collection
package generic
-import annotation.bridge
/** This trait represents collection-like objects that can be reduced
* using a '+' operator. It defines variants of `-` and `--`
@@ -53,12 +52,9 @@ trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { self =>
/** Creates a new $coll from this $coll by removing all elements of another
* collection.
*
- * @param elems the collection containing the removed elements.
+ * @param xs the collection containing the removed elements.
* @return a new $coll that contains all elements of the current $coll
* except one less occurrence of each of the elements of `elems`.
*/
def --(xs: GenTraversableOnce[A]): Repr = (repr /: xs.seq) (_ - _)
-
- @bridge
- def --(xs: TraversableOnce[A]): Repr = --(xs: GenTraversableOnce[A])
}
diff --git a/src/library/scala/collection/generic/TraversableFactory.scala b/src/library/scala/collection/generic/TraversableFactory.scala
index e71de1252c..254a6a224f 100644
--- a/src/library/scala/collection/generic/TraversableFactory.scala
+++ b/src/library/scala/collection/generic/TraversableFactory.scala
@@ -10,7 +10,7 @@
package scala.collection
package generic
-import annotation.bridge
+import language.higherKinds
/** A template for companion objects of `Traversable` and subclasses thereof.
* This class provides a set of operations to create `$Coll` objects.
@@ -36,48 +36,5 @@ import annotation.bridge
* @see GenericCanBuildFrom
*/
trait TraversableFactory[CC[X] <: Traversable[X] with GenericTraversableTemplate[X, CC]]
- extends GenTraversableFactory[CC] with GenericSeqCompanion[CC] {
-
- @bridge
- override def concat[A](xss: Traversable[A]*): CC[A] = super.concat(xss: _*)
-
- @bridge
- override def fill[A](n: Int)(elem: => A): CC[A] = super.fill(n)(elem)
-
- @bridge
- override def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A]] = super.fill(n1, n2)(elem)
-
- @bridge
- override def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]]] = super.fill(n1, n2, n3)(elem)
-
- @bridge
- override def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]]] = super.fill(n1, n2, n3, n4)(elem)
-
- @bridge
- override def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]]] = super.fill(n1, n2, n3, n4, n5)(elem)
-
- @bridge
- override def tabulate[A](n: Int)(f: Int => A): CC[A] = super.tabulate(n)(f)
-
- @bridge
- override def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A]] = super.tabulate(n1, n2)(f)
-
- @bridge
- override def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]]] = super.tabulate(n1, n2, n3)(f)
-
- @bridge
- override def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]]] = super.tabulate(n1, n2, n3, n4)(f)
-
- @bridge
- override def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]]] = super.tabulate(n1, n2, n3, n4, n5)(f)
-
- @bridge
- override def range[T: Integral](start: T, end: T): CC[T] = super.range(start, end)
-
- @bridge
- override def range[T: Integral](start: T, end: T, step: T): CC[T] = super.range(start, end, step)
-
- @bridge
- override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = super.iterate(start, len)(f)
-}
+ extends GenTraversableFactory[CC] with GenericSeqCompanion[CC]
diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala
index 3d723a1feb..3d5bc2704f 100644
--- a/src/library/scala/collection/generic/TraversableForwarder.scala
+++ b/src/library/scala/collection/generic/TraversableForwarder.scala
@@ -57,7 +57,7 @@ trait TraversableForwarder[+A] extends Traversable[A] {
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) = underlying.copyToArray(xs, start, len)
override def copyToArray[B >: A](xs: Array[B], start: Int) = underlying.copyToArray(xs, start)
override def copyToArray[B >: A](xs: Array[B]) = underlying.copyToArray(xs)
- override def toArray[B >: A: ClassManifest]: Array[B] = underlying.toArray
+ override def toArray[B >: A: ArrayTag]: Array[B] = underlying.toArray
override def toList: List[A] = underlying.toList
override def toIterable: Iterable[A] = underlying.toIterable
override def toSeq: Seq[A] = underlying.toSeq
diff --git a/src/library/scala/collection/generic/package.scala b/src/library/scala/collection/generic/package.scala
index 0457fef227..e0351ebae6 100644
--- a/src/library/scala/collection/generic/package.scala
+++ b/src/library/scala/collection/generic/package.scala
@@ -3,4 +3,19 @@ import generic.CanBuildFrom
package object generic {
type CanBuild[-Elem, +To] = CanBuildFrom[Nothing, Elem, To]
+
+ /** The type of conversions from a collection representation type
+ * `Repr` to its corresponding GenTraversableLike.
+ * @see [[scala.collection.generic.FromRepr]]
+ */
+ type HasElem[Repr, A] = Repr => GenTraversableLike[A, Repr]
+
+ @deprecated("use ArrayTagTraversableFactory instead", "2.10.0")
+ type ClassManifestTraversableFactory[CC[X] <: Traversable[X] with GenericClassManifestTraversableTemplate[X, CC]] = ArrayTagTraversableFactory[CC]
+
+ @deprecated("use GenericArrayTagCompanion instead", "2.10.0")
+ type GenericClassManifestCompanion[+CC[X] <: Traversable[X]] = GenericArrayTagCompanion[CC]
+
+ @deprecated("use GenericArrayTagTraversableTemplate instead", "2.10.0")
+ type GenericClassManifestTraversableTemplate[+A, +CC[X] <: Traversable[X]] = GenericArrayTagTraversableTemplate[A, CC]
} \ No newline at end of file
diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala
index 870d5534dc..1b676e2d2f 100644
--- a/src/library/scala/collection/immutable/BitSet.scala
+++ b/src/library/scala/collection/immutable/BitSet.scala
@@ -20,7 +20,7 @@ import mutable.{ Builder, SetBuilder }
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable_bitsets "Scala's Collection Library overview"]]
* section on `Immutable BitSets` for more information.
*
- * @define Coll immutable.BitSet
+ * @define Coll `immutable.BitSet`
* @define coll immutable bitset
*/
@SerialVersionUID(1611436763290191562L)
@@ -63,7 +63,7 @@ abstract class BitSet extends scala.collection.AbstractSet[Int]
}
/** $factoryInfo
- * @define Coll immutable.BitSet
+ * @define Coll `immutable.BitSet`
* @define coll immutable bitset
*/
object BitSet extends BitSetFactory[BitSet] {
diff --git a/src/library/scala/collection/immutable/GenSeq.scala.disabled b/src/library/scala/collection/immutable/GenSeq.scala.disabled
index 5b59418b9f..b8bc420ec3 100644
--- a/src/library/scala/collection/immutable/GenSeq.scala.disabled
+++ b/src/library/scala/collection/immutable/GenSeq.scala.disabled
@@ -25,7 +25,7 @@ import mutable.Builder
*
* The class adds an `update` method to `collection.Seq`.
*
- * @define Coll mutable.Seq
+ * @define Coll `mutable.Seq`
* @define coll mutable sequence
*/
trait GenSeq[+A] extends GenIterable[A]
diff --git a/src/library/scala/collection/immutable/GenSet.scala.disabled b/src/library/scala/collection/immutable/GenSet.scala.disabled
index dc921b5245..828219580e 100644
--- a/src/library/scala/collection/immutable/GenSet.scala.disabled
+++ b/src/library/scala/collection/immutable/GenSet.scala.disabled
@@ -24,7 +24,7 @@ import mutable.Builder
*
* @since 1.0
* @author Matthias Zenger
- * @define Coll mutable.Set
+ * @define Coll `mutable.Set`
* @define coll mutable set
*/
trait GenSet[A] extends GenIterable[A]
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 6b11371bec..13a0febfee 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -27,7 +27,7 @@ import parallel.immutable.ParHashMap
* @since 2.3
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#hash_tries "Scala's Collection Library overview"]]
* section on `Hash Tries` for more information.
- * @define Coll immutable.HashMap
+ * @define Coll `immutable.HashMap`
* @define coll immutable hash map
* @define mayNotTerminateInf
* @define willNotTerminateInf
@@ -96,7 +96,7 @@ class HashMap[A, +B] extends AbstractMap[A, B]
}
/** $factoryInfo
- * @define Coll immutable.HashMap
+ * @define Coll `immutable.HashMap`
* @define coll immutable hash map
*
* @author Tiark Rompf
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 79d2fb71cc..b956a4d838 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -26,7 +26,7 @@ import collection.parallel.immutable.ParHashSet
* @author Tiark Rompf
* @version 2.8
* @since 2.3
- * @define Coll immutable.HashSet
+ * @define Coll `immutable.HashSet`
* @define coll immutable hash set
*/
@SerialVersionUID(2L)
@@ -85,12 +85,12 @@ class HashSet[A] extends AbstractSet[A]
}
/** $factoryInfo
- * @define Coll immutable.HashSet
+ * @define Coll `immutable.HashSet`
* @define coll immutable hash set
*
* @author Tiark Rompf
* @since 2.3
- * @define Coll immutable.HashSet
+ * @define Coll `immutable.HashSet`
* @define coll immutable hash set
* @define mayNotTerminateInf
* @define willNotTerminateInf
diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala
index e3939001d8..b37edc4254 100644
--- a/src/library/scala/collection/immutable/IndexedSeq.scala
+++ b/src/library/scala/collection/immutable/IndexedSeq.scala
@@ -29,7 +29,7 @@ trait IndexedSeq[+A] extends Seq[A]
/** $factoryInfo
* The current default implementation of a $Coll is a `Vector`.
* @define coll indexed sequence
- * @define Coll IndexedSeq
+ * @define Coll `IndexedSeq`
*/
object IndexedSeq extends SeqFactory[IndexedSeq] {
class Impl[A](buf: ArrayBuffer[A]) extends AbstractSeq[A] with IndexedSeq[A] with Serializable {
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index 3c9c0c2f24..039a57041c 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -36,7 +36,7 @@ import IntMapUtils._
/** A companion object for integer maps.
*
- * @define Coll IntMap
+ * @define Coll `IntMap`
* @define mapCanBuildFromInfo
* The standard `CanBuildFrom` instance for `$Coll` objects.
* The created value is an instance of class `MapCanBuildFrom`.
@@ -150,7 +150,7 @@ import IntMap._
* @tparam T type of the values associated with integer keys.
*
* @since 2.7
- * @define Coll immutable.IntMap
+ * @define Coll `immutable.IntMap`
* @define coll immutable integer map
* @define mayNotTerminateInf
* @define willNotTerminateInf
diff --git a/src/library/scala/collection/immutable/Iterable.scala b/src/library/scala/collection/immutable/Iterable.scala
index d5fca2bdff..a1390ba189 100644
--- a/src/library/scala/collection/immutable/Iterable.scala
+++ b/src/library/scala/collection/immutable/Iterable.scala
@@ -18,7 +18,7 @@ import parallel.immutable.ParIterable
/** A base trait for iterable collections that are guaranteed immutable.
* $iterableInfo
*
- * @define Coll immutable.Iterable
+ * @define Coll `immutable.Iterable`
* @define coll immutable iterable collection
*/
trait Iterable[+A] extends Traversable[A]
@@ -34,7 +34,7 @@ trait Iterable[+A] extends Traversable[A]
}
/** $factoryInfo
- * @define Coll immutable.Iterable
+ * @define Coll `immutable.Iterable`
* @define coll immutable iterable collection
*/
object Iterable extends TraversableFactory[Iterable] {
diff --git a/src/library/scala/collection/immutable/LinearSeq.scala b/src/library/scala/collection/immutable/LinearSeq.scala
index 536894c287..2d6986740a 100644
--- a/src/library/scala/collection/immutable/LinearSeq.scala
+++ b/src/library/scala/collection/immutable/LinearSeq.scala
@@ -29,7 +29,7 @@ trait LinearSeq[+A] extends Seq[A]
/** $factoryInfo
* The current default implementation of a $Coll is a `List`.
* @define coll immutable linear sequence
- * @define Coll immutable.LinearSeq
+ * @define Coll `immutable.LinearSeq`
*/
object LinearSeq extends SeqFactory[LinearSeq] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 1b75c10113..6fd8d143ee 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -141,7 +141,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
/** Builds a new list by applying a function to all elements of this list.
* Like `xs map f`, but returns `xs` unchanged if function
- * `f` maps all elements to themselves (wrt eq).
+ * `f` maps all elements to themselves (as determined by `eq`).
*
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
@@ -382,7 +382,7 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
/** $factoryInfo
* @define coll list
- * @define Coll List
+ * @define Coll `List`
*/
object List extends SeqFactory[List] {
@@ -511,7 +511,7 @@ object List extends SeqFactory[List] {
/** Transforms an Iterable of Eithers into a pair of lists.
*
- * @param xs the iterable of Eithers to separate
+ * @param es the iterable of Eithers to separate
* @return a pair of lists.
*/
@deprecated("use `(for (Left(x) <- es) yield x, for (Right(x) <- es) yield x)` instead", "2.8.0")
@@ -582,7 +582,7 @@ object List extends SeqFactory[List] {
/** Tests whether the given predicate `p` holds
* for all corresponding elements of the argument lists.
*
- * @param p function to apply to each pair of elements.
+ * @param f function to apply to each pair of elements.
* @return `(p(a<sub>0</sub>,b<sub>0</sub>) &amp;&amp;
* ... &amp;&amp; p(a<sub>n</sub>,b<sub>n</sub>))]`
* if the lists are `[a<sub>0</sub>, ..., a<sub>k</sub>]`;
@@ -604,7 +604,7 @@ object List extends SeqFactory[List] {
/** Tests whether the given predicate `p` holds
* for some corresponding elements of the argument lists.
*
- * @param p function to apply to each pair of elements.
+ * @param f function to apply to each pair of elements.
* @return `n != 0 &amp;&amp; (p(a<sub>0</sub>,b<sub>0</sub>) ||
* ... || p(a<sub>n</sub>,b<sub>n</sub>))]` if the lists are
* `[a<sub>0</sub>, ..., a<sub>k</sub>]`,
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index e008fb86e3..091443f909 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -105,9 +105,6 @@ extends AbstractMap[A, B]
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): ListMap[A, B1] =
((repr: ListMap[A, B1]) /: xs.seq) (_ + _)
- @bridge def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): ListMap[A, B1] =
- ++(xs: GenTraversableOnce[(A, B1)])
-
/** This creates a new mapping without the given `key`.
* If the map does not contain a mapping for the given key, the
* method returns the same map.
@@ -155,7 +152,7 @@ extends AbstractMap[A, B]
* method throws an exception if there is no mapping from the given
* key to a value.
*
- * @param key the key
+ * @param k the key
* @return the value associated with the given key.
*/
override def apply(k: A): B1 = apply0(this, k)
@@ -165,7 +162,7 @@ extends AbstractMap[A, B]
/** Checks if this map maps `key` to a value and return the
* value if it exists.
*
- * @param key the key of the mapping of interest
+ * @param k the key of the mapping of interest
* @return the value of the mapping, if it exists
*/
override def get(k: A): Option[B1] = get0(this, k)
@@ -177,9 +174,6 @@ extends AbstractMap[A, B]
/** This method allows one to create a new map with an additional mapping
* from `key` to `value`. If the map contains already a mapping for `key`,
* it will be overridden by this function.
- *
- * @param k ...
- * @param v ...
*/
override def updated [B2 >: B1](k: A, v: B2): ListMap[A, B2] = {
val m = if (contains(k)) this - k else this
@@ -189,9 +183,6 @@ extends AbstractMap[A, B]
/** Creates a new mapping without the given `key`.
* If the map does not contain a mapping for the given key, the
* method returns the same map.
- *
- * @param k ...
- * @return ...
*/
override def - (k: A): ListMap[A, B1] = {
// This definition used to result in stack overflows
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index 47e3245117..ce3abaacb7 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -33,7 +33,7 @@ object ListSet extends ImmutableSetFactory[ListSet] {
*/
class ListSetBuilder[Elem](initial: ListSet[Elem]) extends Builder[Elem, ListSet[Elem]] {
def this() = this(empty[Elem])
- protected val elems = new mutable.ListBuffer[Elem] ++= initial reverse
+ protected val elems = (new mutable.ListBuffer[Elem] ++= initial).reverse
protected val seen = new mutable.HashSet[Elem] ++= initial
def +=(x: Elem): this.type = {
@@ -100,9 +100,7 @@ class ListSet[A] extends AbstractSet[A]
*/
override def ++(xs: GenTraversableOnce[A]): ListSet[A] =
if (xs.isEmpty) this
- else new ListSet.ListSetBuilder(this) ++= xs.seq result
-
- @bridge def ++(xs: TraversableOnce[A]): ListSet[A] = ++(xs: GenTraversableOnce[A]): ListSet[A]
+ else (new ListSet.ListSetBuilder(this) ++= xs.seq).result
private[ListSet] def unchecked_+(e: A): ListSet[A] = new Node(e)
private[ListSet] def unchecked_outer: ListSet[A] =
@@ -159,7 +157,7 @@ class ListSet[A] extends AbstractSet[A]
/** Checks if this set contains element `elem`.
*
- * @param elem the element to check for membership.
+ * @param e the element to check for membership.
* @return `'''true'''`, iff `elem` is contained in this set.
*/
override def contains(e: A) = containsInternal(this, e)
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index 11b5d1e311..8a316f37de 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -36,7 +36,7 @@ import LongMapUtils._
/** A companion object for long maps.
*
- * @define Coll LongMap
+ * @define Coll `LongMap`
* @define mapCanBuildFromInfo
* The standard `CanBuildFrom` instance for `$Coll` objects.
* The created value is an instance of class `MapCanBuildFrom`.
@@ -147,7 +147,7 @@ import LongMap._;
* @tparam T type of the values associated with the long keys.
*
* @since 2.7
- * @define Coll immutable.LongMap
+ * @define Coll `immutable.LongMap`
* @define coll immutable long integer map
* @define mayNotTerminateInf
* @define willNotTerminateInf
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index bbefd983fd..e73da01ac4 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -66,7 +66,7 @@ trait Map[A, +B] extends Iterable[(A, B)]
}
/** $factoryInfo
- * @define Coll immutable.Map
+ * @define Coll `immutable.Map`
* @define coll immutable map
*/
object Map extends ImmutableMapFactory[Map] {
diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala
index 80da1ab010..6ae2d78188 100644
--- a/src/library/scala/collection/immutable/MapLike.scala
+++ b/src/library/scala/collection/immutable/MapLike.scala
@@ -11,7 +11,6 @@ package immutable
import generic._
import parallel.immutable.ParMap
-import annotation.bridge
/**
* A generic template for immutable maps from keys of type `A`
@@ -36,9 +35,9 @@ import annotation.bridge
* It is also good idea to override methods `foreach` and
* `size` for efficiency.
*
- * @param A the type of the keys contained in this collection.
- * @param B the type of the values associated with the keys.
- * @param This The type of the actual map implementation.
+ * @tparam A the type of the keys contained in this collection.
+ * @tparam B the type of the values associated with the keys.
+ * @tparam This The type of the actual map implementation.
*
* @author Martin Odersky
* @version 2.8
@@ -86,8 +85,6 @@ trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): immutable.Map[A, B1] =
((repr: immutable.Map[A, B1]) /: xs.seq) (_ + _)
- @bridge def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): immutable.Map[A, B1] = ++(xs: GenTraversableOnce[(A, B1)])
-
/** Filters this map by retaining only keys satisfying a predicate.
* @param p the predicate used to test keys
* @return an immutable map consisting only of those key value pairs of this map where the key satisfies
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index 0966fa035f..4c82d99c03 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -34,7 +34,7 @@ import generic._
*
* @author Paul Phillips
* @version 2.8
- * @define Coll NumericRange
+ * @define Coll `NumericRange`
* @define coll numeric range
* @define mayNotTerminateInf
* @define willNotTerminateInf
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 97c7c789f8..94953ce38b 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -25,7 +25,7 @@ object PagedSeq {
final val UndeterminedEnd = Int.MaxValue
/** Constructs a paged sequence from an iterator */
- def fromIterator[T: ClassManifest](source: Iterator[T]): PagedSeq[T] =
+ def fromIterator[T: ArrayTag](source: Iterator[T]): PagedSeq[T] =
new PagedSeq[T]((data: Array[T], start: Int, len: Int) => {
var i = 0
while (i < len && source.hasNext) {
@@ -36,7 +36,7 @@ object PagedSeq {
})
/** Constructs a paged sequence from an iterable */
- def fromIterable[T: ClassManifest](source: Iterable[T]): PagedSeq[T] =
+ def fromIterable[T: ArrayTag](source: Iterable[T]): PagedSeq[T] =
fromIterator(source.iterator)
/** Constructs a paged character sequence from a string iterator */
@@ -115,16 +115,16 @@ import PagedSeq._
* It returns the number of elements produced, or -1 if end of logical input stream was reached
* before reading any element.
*
- * @tparam T the type of the elements contained in this paged sequence, with a `ClassManifest` context bound.
+ * @tparam T the type of the elements contained in this paged sequence, with an `ArrayTag` context bound.
*
* @author Martin Odersky
* @since 2.7
- * @define Coll PagedSeq
+ * @define Coll `PagedSeq`
* @define coll paged sequence
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-class PagedSeq[T: ClassManifest] protected(
+class PagedSeq[T: ArrayTag] protected(
more: (Array[T], Int, Int) => Int,
first1: Page[T],
start: Int,
@@ -205,7 +205,7 @@ extends scala.collection.AbstractSeq[T]
/** Page containing up to PageSize characters of the input sequence.
*/
-private class Page[T: ClassManifest](val num: Int) {
+private class Page[T: ArrayTag](val num: Int) {
private final val PageSize = 4096
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index da04446281..e980dda847 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -30,7 +30,7 @@ import annotation.tailrec
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable_queues "Scala's Collection Library overview"]]
* section on `Immutable Queues` for more information.
*
- * @define Coll immutable.Queue
+ * @define Coll `immutable.Queue`
* @define coll immutable queue
* @define mayNotTerminateInf
* @define willNotTerminateInf
@@ -131,7 +131,7 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
}
/** $factoryInfo
- * @define Coll immutable.Queue
+ * @define Coll `immutable.Queue`
* @define coll immutable queue
*/
object Queue extends SeqFactory[Queue] {
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index b72d83f896..033331b58b 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -10,7 +10,6 @@
package scala.collection.immutable
import scala.collection.parallel.immutable.ParRange
-import annotation.bridge
/** The `Range` class represents integer values in range
* ''[start;end)'' with non-zero step value `step`.
@@ -328,16 +327,16 @@ object Range {
*/
def apply(start: Int, end: Int, step: Int): Range = new Range(start, end, step)
- /** Make an range from `start` to `end` inclusive with step value 1.
+ /** Make a range from `start` until `end` (exclusive) with step value 1.
*/
def apply(start: Int, end: Int): Range = new Range(start, end, 1)
- /** Make an inclusive range from start to end with given step value.
+ /** Make an inclusive range from `start` to `end` with given step value.
* @note step != 0
*/
@inline def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Inclusive(start, end, step)
- /** Make an inclusive range from start to end with step value 1.
+ /** Make an inclusive range from `start` to `end` with step value 1.
*/
@inline def inclusive(start: Int, end: Int): Range.Inclusive = new Inclusive(start, end, 1)
@@ -398,10 +397,4 @@ object Range {
def apply(start: Int, end: Int, step: Int) = NumericRange(start, end, step)
def inclusive(start: Int, end: Int, step: Int) = NumericRange.inclusive(start, end, step)
}
-
- @deprecated("use Range instead", "2.9.0")
- trait ByOne extends Range {
-// @bridge override def foreach[@specialized(Unit) U](f: Int => U) =
-// super.foreach(f)
- }
}
diff --git a/src/library/scala/collection/immutable/Seq.scala b/src/library/scala/collection/immutable/Seq.scala
index 882ca12612..1104eb1b4f 100644
--- a/src/library/scala/collection/immutable/Seq.scala
+++ b/src/library/scala/collection/immutable/Seq.scala
@@ -19,7 +19,7 @@ import parallel.immutable.ParSeq
* that are guaranteed immutable.
*
* $seqInfo
- * @define Coll immutable.Seq
+ * @define Coll `immutable.Seq`
* @define coll immutable sequence
*/
trait Seq[+A] extends Iterable[A]
@@ -36,7 +36,7 @@ trait Seq[+A] extends Iterable[A]
}
/** $factoryInfo
- * @define Coll immutable.Seq
+ * @define Coll `immutable.Seq`
* @define coll immutable sequence
*/
object Seq extends SeqFactory[Seq] {
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index cd972d6c30..f783f2d562 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -21,7 +21,7 @@ import parallel.immutable.ParSet
* @since 1.0
* @author Matthias Zenger
* @author Martin Odersky
- * @define Coll immutable.Set
+ * @define Coll `immutable.Set`
* @define coll immutable set
*/
trait Set[A] extends Iterable[A]
@@ -38,7 +38,7 @@ trait Set[A] extends Iterable[A]
}
/** $factoryInfo
- * @define Coll immutable.Set
+ * @define Coll `immutable.Set`
* @define coll immutable set
*/
object Set extends ImmutableSetFactory[Set] {
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index 902a0f8457..526f7a1ffe 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -14,7 +14,6 @@ package immutable
import generic._
import mutable.Builder
import annotation.unchecked.uncheckedVariance
-import annotation.bridge
/** A map whose keys are sorted.
*
@@ -77,8 +76,6 @@ trait SortedMap[A, +B] extends Map[A, B]
*/
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
-
- @bridge def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): SortedMap[A, B1] = ++(xs: GenTraversableOnce[(A, B1)])
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/SortedSet.scala b/src/library/scala/collection/immutable/SortedSet.scala
index e1637ce78b..62fa4e0335 100644
--- a/src/library/scala/collection/immutable/SortedSet.scala
+++ b/src/library/scala/collection/immutable/SortedSet.scala
@@ -21,7 +21,7 @@ import mutable.Builder
* @author Martin Odersky
* @version 2.8
* @since 2.4
- * @define Coll immutable.SortedSet
+ * @define Coll `immutable.SortedSet`
* @define coll immutable sorted set
*/
trait SortedSet[A] extends Set[A] with scala.collection.SortedSet[A] with SortedSetLike[A, SortedSet[A]] {
@@ -30,7 +30,7 @@ trait SortedSet[A] extends Set[A] with scala.collection.SortedSet[A] with Sorted
}
/** $factoryInfo
- * @define Coll immutable.SortedSet
+ * @define Coll `immutable.SortedSet`
* @define coll immutable sorted set
*/
object SortedSet extends ImmutableSortedSetFactory[SortedSet] {
diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala
index 50fc2795c0..473ac6b0b0 100644
--- a/src/library/scala/collection/immutable/Stack.scala
+++ b/src/library/scala/collection/immutable/Stack.scala
@@ -13,7 +13,7 @@ import generic._
import mutable.{ ArrayBuffer, Builder }
/** $factoryInfo
- * @define Coll immutable.Stack
+ * @define Coll `immutable.Stack`
* @define coll immutable stack
*/
object Stack extends SeqFactory[Stack] {
@@ -37,7 +37,7 @@ object Stack extends SeqFactory[Stack] {
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable_stacks "Scala's Collection Library overview"]]
* section on `Immutable stacks` for more information.
*
- * @define Coll immutable.Stack
+ * @define Coll `immutable.Stack`
* @define coll immutable stack
* @define orderDependent
* @define orderDependentFold
@@ -84,7 +84,7 @@ class Stack[+A] protected (protected val elems: List[A])
* the stack. The last element returned by the traversable object
* will be on top of the new stack.
*
- * @param elems the iterator object.
+ * @param xs the iterator object.
* @return the stack with the new elements on top.
*/
def pushAll[B >: A](xs: TraversableOnce[B]): Stack[B] =
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 2eb2f8eb09..9f5f98ddf4 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -13,6 +13,7 @@ import generic._
import mutable.{Builder, StringBuilder, LazyBuilder, ListBuffer}
import scala.annotation.tailrec
import Stream.cons
+import language.implicitConversions
/** The class `Stream` implements lazy lists where elements
* are only evaluated when they are needed. Here is an example:
@@ -176,7 +177,7 @@ import Stream.cons
* section on `Streams` for more information.
* @define naturalsEx def naturalsFrom(i: Int): Stream[Int] = i #:: naturalsFrom(i + 1)
- * @define Coll Stream
+ * @define Coll `Stream`
* @define coll stream
* @define orderDependent
* @define orderDependentFold
@@ -715,8 +716,8 @@ self =>
/** A substream starting at index `from` and extending up to (but not including)
* index `until`. This returns a `Stream` that is lazily evaluated.
*
- * @param start The index of the first element of the returned subsequence
- * @param end The index of the element following the returned subsequence
+ * @param from The index of the first element of the returned subsequence
+ * @param until The index of the element following the returned subsequence
* @return A new string containing the elements requested from `start` until
* `end`.
*
@@ -804,9 +805,9 @@ self =>
these
}
- /** Builds a new stream from this stream in which any duplicates (wrt to ==)
- * have been removed. Among duplicate elements, only the first one is
- * retained in the resulting `Stream`.
+ /** Builds a new stream from this stream in which any duplicates (as
+ * determined by `==`) have been removed. Among duplicate elements, only the
+ * first one is retained in the resulting `Stream`.
*
* @return A new `Stream` representing the result of applying distinctness to
* the original `Stream`.
@@ -821,7 +822,7 @@ self =>
*/
override def distinct: Stream[A] =
if (isEmpty) this
- else cons(head, tail.filter(head !=).distinct)
+ else cons(head, tail.filter(head != _).distinct)
/** Returns a new sequence of given length containing the elements of this
* sequence followed by zero or more occurrences of given elements.
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index fc4e7bf0a8..e41b17a5e8 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -33,7 +33,7 @@ import StringLike._
* @tparam Repr The type of the actual collection inheriting `StringLike`.
*
* @since 2.8
- * @define Coll String
+ * @define Coll `String`
* @define coll string
* @define orderDependent
* @define orderDependentFold
@@ -60,7 +60,7 @@ self =>
val end = until min length
if (start >= end) newBuilder.result
- else newBuilder ++= toString.substring(start, end) result
+ else (newBuilder ++= toString.substring(start, end)).result
}
/** Return the current string concatenated `n` times.
@@ -239,7 +239,7 @@ self =>
else
throw new IllegalArgumentException("For input string: \"null\"")
- override def toArray[B >: Char : ClassManifest]: Array[B] =
+ override def toArray[B >: Char : ArrayTag]: Array[B] =
toString.toCharArray.asInstanceOf[Array[B]]
private def unwrapArg(arg: Any): AnyRef = arg match {
@@ -274,7 +274,7 @@ self =>
* `scala.BigDecimal`) are unwrapped to pass a type which `Formatter`
* understands.
*
- * @param locale an instance of `java.util.Locale`
+ * @param l an instance of `java.util.Locale`
* @param args the arguments used to instantiating the pattern.
* @throws `java.lang.IllegalArgumentException`
*/
diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala
index 97609b4c4d..633821ecea 100644
--- a/src/library/scala/collection/immutable/StringOps.scala
+++ b/src/library/scala/collection/immutable/StringOps.scala
@@ -25,7 +25,7 @@ import mutable.StringBuilder
* @param repr the actual representation of this string operations object.
*
* @since 2.8
- * @define Coll StringOps
+ * @define Coll `StringOps`
* @define coll string
*/
final class StringOps(override val repr: String) extends AnyVal with StringLike[String] {
diff --git a/src/library/scala/collection/immutable/Traversable.scala b/src/library/scala/collection/immutable/Traversable.scala
index 7830b38d69..59d3b4e029 100644
--- a/src/library/scala/collection/immutable/Traversable.scala
+++ b/src/library/scala/collection/immutable/Traversable.scala
@@ -30,7 +30,7 @@ trait Traversable[+A] extends scala.collection.Traversable[A]
/** $factoryInfo
* The current default implementation of a $Coll is a `Vector`.
* @define coll immutable traversable collection
- * @define Coll immutable.Traversable
+ * @define Coll `immutable.Traversable`
*/
object Traversable extends TraversableFactory[Traversable] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index dc4f79be35..4c1a5f2e03 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -14,7 +14,6 @@ package immutable
import generic._
import immutable.{RedBlackTree => RB}
import mutable.Builder
-import annotation.bridge
/** $factoryInfo
* @define Coll immutable.TreeMap
@@ -162,8 +161,6 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
override def ++[B1 >: B] (xs: GenTraversableOnce[(A, B1)]): TreeMap[A, B1] =
((repr: TreeMap[A, B1]) /: xs.seq) (_ + _)
- @bridge def ++[B1 >: B] (xs: TraversableOnce[(A, B1)]): TreeMap[A, B1] = ++(xs: GenTraversableOnce[(A, B1)])
-
/** A new TreeMap with the entry added is returned,
* assuming that key is <em>not</em> in the TreeMap.
*
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 1b3d72ceb7..882e828c5b 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -16,7 +16,7 @@ import immutable.{RedBlackTree => RB}
import mutable.{ Builder, SetBuilder }
/** $factoryInfo
- * @define Coll immutable.TreeSet
+ * @define Coll `immutable.TreeSet`
* @define coll immutable tree set
*/
object TreeSet extends ImmutableSortedSetFactory[TreeSet] {
@@ -40,7 +40,7 @@ object TreeSet extends ImmutableSortedSetFactory[TreeSet] {
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#redblack_trees "Scala's Collection Library overview"]]
* section on `Red-Black Trees` for more information.
*
- * @define Coll immutable.TreeSet
+ * @define Coll `immutable.TreeSet`
* @define coll immutable tree set
* @define orderDependent
* @define orderDependentFold
diff --git a/src/library/scala/collection/immutable/TrieIterator.scala b/src/library/scala/collection/immutable/TrieIterator.scala
index c77334b732..ead1a8c744 100644
--- a/src/library/scala/collection/immutable/TrieIterator.scala
+++ b/src/library/scala/collection/immutable/TrieIterator.scala
@@ -75,7 +75,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
}
private[this] def iteratorWithSize(arr: Array[Iterable[T]]): (Iterator[T], Int) =
- (newIterator(arr), arr map (_.size) sum)
+ (newIterator(arr), arr.map(_.size).sum)
private[this] def arrayToIterators(arr: Array[Iterable[T]]): SplitIterators = {
val (fst, snd) = arr.splitAt(arr.length / 2)
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 55c31feec2..1395a8f52d 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -40,7 +40,7 @@ object Vector extends SeqFactory[Vector] {
*
* @tparam A the element type
*
- * @define Coll Vector
+ * @define Coll `Vector`
* @define coll vector
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `Vector[B]` because an implicit of type `CanBuildFrom[Vector, B, That]`
diff --git a/src/library/scala/collection/immutable/WrappedString.scala b/src/library/scala/collection/immutable/WrappedString.scala
index de8aeea7e1..aa7e5b3c4a 100644
--- a/src/library/scala/collection/immutable/WrappedString.scala
+++ b/src/library/scala/collection/immutable/WrappedString.scala
@@ -25,7 +25,7 @@ import mutable.{Builder, StringBuilder}
* @param self a string contained within this wrapped string
*
* @since 2.8
- * @define Coll WrappedString
+ * @define Coll `WrappedString`
* @define coll wrapped string
*/
class WrappedString(val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] with StringLike[WrappedString] {
diff --git a/src/library/scala/collection/interfaces/IterableMethods.scala b/src/library/scala/collection/interfaces/IterableMethods.scala
deleted file mode 100644
index 2054922e59..0000000000
--- a/src/library/scala/collection/interfaces/IterableMethods.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package interfaces
-
-import generic._
-import mutable.Buffer
-import scala.reflect.ClassManifest
-import annotation.unchecked.uncheckedVariance
-
-/**
- * @since 2.8
- */
-trait IterableMethods[+A, +This <: IterableLike[A, This] with Iterable[A]] extends TraversableMethods[A, This] {
- self: Iterable[A] =>
-
- // abstract
- def iterator: Iterator[A]
-
- // concrete
- def dropRight(n: Int): Iterable[A]
- def grouped(size: Int): Iterator[Iterable[A]]
- def sameElements[B >: A](that: GenIterable[B]): Boolean
- def sliding(size: Int): Iterator[Iterable[A]]
- def sliding(size: Int, step: Int): Iterator[Iterable[A]]
- def takeRight(n: Int): Iterable[A]
- def zipAll[B, A1 >: A, That](that: GenIterable[B], e1: A1, e2: B)(implicit bf: CanBuildFrom[This, (A1, B), That]): That
- def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[This, (A1, Int), That]): That
- def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[This, (A1, B), That]): That
-
- override def view: IterableView[A, This]
- override def view(from: Int, until: Int): IterableView[A, This]
-}
diff --git a/src/library/scala/collection/interfaces/MapMethods.scala b/src/library/scala/collection/interfaces/MapMethods.scala
deleted file mode 100644
index bc38ccdd2e..0000000000
--- a/src/library/scala/collection/interfaces/MapMethods.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package interfaces
-
-import generic._
-
-/**
- * @since 2.8
- */
-trait MapMethods[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
- extends IterableMethods[(A, B), This]
- with SubtractableMethods[A, This] {
- self: Map[A, B] =>
-
- // abstract
- def empty: This
- def get(key: A): Option[B]
- def iterator: Iterator[(A, B)]
- def + [B1 >: B] (kv: (A, B1)): Map[A, B1]
- def - (key: A): This
-
- // concrete
- def getOrElse[B1 >: B](key: A, default: => B1): B1
- def apply(key: A): B
- def contains(key: A): Boolean
- def isDefinedAt(key: A): Boolean
- def keys: Iterable[A]
- def keysIterator: Iterator[A]
- def keySet: Set[A]
- def values: Iterable[B]
- def valuesIterator: Iterator[B]
- def default(key: A): B
- def filterKeys(p: A => Boolean): Map[A, B]
- def mapValues[C](f: B => C): Map[A, C]
- def updated [B1 >: B](key: A, value: B1): Map[A, B1]
- def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): Map[A, B1]
- def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1]
-}
diff --git a/src/library/scala/collection/interfaces/SeqMethods.scala b/src/library/scala/collection/interfaces/SeqMethods.scala
deleted file mode 100644
index 1f5b08d036..0000000000
--- a/src/library/scala/collection/interfaces/SeqMethods.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package interfaces
-
-import generic._
-import mutable.Buffer
-import scala.reflect.ClassManifest
-
-/**
- * @since 2.8
- */
-trait SeqMethods[+A, +This <: SeqLike[A, This] with Seq[A]] extends IterableMethods[A, This] {
- self: Seq[A] =>
-
- // abstract
- def apply(idx: Int): A
- def length: Int
-
- // concrete
- def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That
- def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That
- def combinations(n: Int): Iterator[This]
- def contains(elem: Any): Boolean
- def containsSlice[B](that: Seq[B]): Boolean
- def corresponds[B](that: Seq[B])(p: (A,B) => Boolean): Boolean
- def diff[B >: A, That](that: Seq[B]): This
- def distinct: This
- def endsWith[B](that: Seq[B]): Boolean
- def indexOfSlice[B >: A](that: Seq[B]): Int
- def indexOfSlice[B >: A](that: Seq[B], fromIndex: Int): Int
- def indexOf[B >: A](elem: B): Int
- def indexOf[B >: A](elem: B, from: Int): Int
- def indexWhere(p: A => Boolean): Int
- def indexWhere(p: A => Boolean, from: Int): Int
- def indices: Range
- def intersect[B >: A, That](that: Seq[B]): This
- def isDefinedAt(x: Int): Boolean
- def lastIndexOfSlice[B >: A](that: Seq[B]): Int
- def lastIndexOfSlice[B >: A](that: Seq[B], fromIndex: Int): Int
- def lastIndexOf[B >: A](elem: B): Int
- def lastIndexOf[B >: A](elem: B, end: Int): Int
- def lastIndexWhere(p: A => Boolean): Int
- def lastIndexWhere(p: A => Boolean, end: Int): Int
- def lengthCompare(len: Int): Int
- def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That
- def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That
- def permutations: Iterator[This]
- def prefixLength(p: A => Boolean): Int
- def reverse: This
- def reverseIterator: Iterator[A]
- def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That
- def segmentLength(p: A => Boolean, from: Int): Int
- def sortBy[B](f: A => B)(implicit ord: Ordering[B]): This
- def sortWith(lt: (A, A) => Boolean): This
- def sorted[B >: A](implicit ord: Ordering[B]): This
- def startsWith[B](that: Seq[B]): Boolean
- def startsWith[B](that: Seq[B], offset: Int): Boolean
- def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[This, B, That]): That
- def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That
-
- // refinements
- def view: SeqView[A, This]
- def view(from: Int, until: Int): SeqView[A, This]
-}
diff --git a/src/library/scala/collection/interfaces/SetMethods.scala b/src/library/scala/collection/interfaces/SetMethods.scala
deleted file mode 100644
index ffe141ed82..0000000000
--- a/src/library/scala/collection/interfaces/SetMethods.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package interfaces
-
-import generic._
-import mutable.Buffer
-import scala.reflect.ClassManifest
-import annotation.unchecked.uncheckedVariance
-
-/**
- * @since 2.8
- */
-trait SubtractableMethods[A, +This <: Subtractable[A, This]] {
- def -(elem: A): This
- def -(elem1: A, elem2: A, elems: A*): This
- def --(xs: TraversableOnce[A]): This
-}
-
-/**
- * @since 2.8
- */
-trait SetMethods[A, +This <: SetLike[A, This] with Set[A]]
- extends IterableMethods[A, This]
- with SubtractableMethods[A, This] {
- self: Set[A] =>
-
- // abstract
- def empty: This
- def contains(elem: A): Boolean
- def + (elem: A): This
- def - (elem: A): This
-
- // concrete
- def & (that: Set[A]): This
- def &~ (that: Set[A]): This
- def + (elem1: A, elem2: A, elems: A*): This
- def apply(elem: A): Boolean
- def diff(that: Set[A]): This
- def intersect(that: Set[A]): This
- def subsetOf(that: Set[A]): Boolean
- def subsets(len: Int): Iterator[This]
- def subsets: Iterator[This]
- def union(that: Set[A]): This
- def | (that: Set[A]): This
-}
diff --git a/src/library/scala/collection/interfaces/TraversableMethods.scala b/src/library/scala/collection/interfaces/TraversableMethods.scala
deleted file mode 100644
index 8aba39093d..0000000000
--- a/src/library/scala/collection/interfaces/TraversableMethods.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package interfaces
-
-import generic._
-
-/**
- * @since 2.8
- */
-trait TraversableMethods[+A, +This <: TraversableLike[A, This]] extends TraversableOnceMethods[A] {
- self: Traversable[A] =>
-
- // maps/iteration
- def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
- def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That
- def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[This, B, That]): That
- def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[This, B, That]): That
- def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[This, B, That]): That
-
- // new collections
- def ++:[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
- def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
-
- // element retrieval
- def head: A
- def headOption: Option[A]
- def last: A
- def lastOption: Option[A]
-
- // subcollections
- def drop(n: Int): Traversable[A]
- def dropWhile(p: A => Boolean): Traversable[A]
- def filter(p: A => Boolean): Traversable[A]
- def filterNot(p: A => Boolean): Traversable[A]
- def init: Traversable[A]
- def inits: Iterator[This]
- def slice(from: Int, until: Int): Traversable[A]
- def tail: Traversable[A]
- def tails: Iterator[This]
- def take(n: Int): Traversable[A]
- def takeWhile(p: A => Boolean): Traversable[A]
- def withFilter(p: A => Boolean): FilterMonadic[A, Traversable[A]]
-
- // subdivisions
- def groupBy[K](f: A => K): Map[K, Traversable[A]]
- def partition(p: A => Boolean): (Traversable[A], Traversable[A])
- def span(p: A => Boolean): (Traversable[A], Traversable[A])
- def splitAt(n: Int): (Traversable[A], Traversable[A])
-
- // info
- def stringPrefix: String
-
- // views
- def view: TraversableView[A, This]
- def view(from: Int, until: Int): TraversableView[A, This]
-}
diff --git a/src/library/scala/collection/interfaces/TraversableOnceMethods.scala b/src/library/scala/collection/interfaces/TraversableOnceMethods.scala
deleted file mode 100644
index 471e977134..0000000000
--- a/src/library/scala/collection/interfaces/TraversableOnceMethods.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package interfaces
-
-trait TraversableOnceMethods[+A] {
- self: TraversableOnce[A] =>
-
- def foreach[U](f: A => U): Unit
- def size: Int
- protected[this] def reversed: TraversableOnce[A]
-
- // tests
- def hasDefiniteSize: Boolean
- def isEmpty: Boolean
- def isTraversableAgain: Boolean
- def nonEmpty: Boolean
-
- // applying a predicate
- def collectFirst[B](pf: PartialFunction[A, B]): Option[B]
- def count(p: A => Boolean): Int
- def exists(p: A => Boolean): Boolean
- def find(p: A => Boolean): Option[A]
- def forall(p: A => Boolean): Boolean
-
- // folds
- def /:[B](z: B)(op: (B, A) => B): B
- def :\[B](z: B)(op: (A, B) => B): B
- def foldLeft[B](z: B)(op: (B, A) => B): B
- def foldRight[B](z: B)(op: (A, B) => B): B
- def reduceLeftOption[B >: A](op: (B, A) => B): Option[B]
- def reduceLeft[B >: A](op: (B, A) => B): B
- def reduceRightOption[B >: A](op: (A, B) => B): Option[B]
- def reduceRight[B >: A](op: (A, B) => B): B
-
- // copies
- def copyToArray[B >: A](xs: Array[B]): Unit
- def copyToArray[B >: A](xs: Array[B], start: Int): Unit
- def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit
- def copyToBuffer[B >: A](dest: mutable.Buffer[B]): Unit
-
- // conversions
- def toArray[B >: A : ClassManifest]: Array[B]
- def toBuffer[B >: A]: mutable.Buffer[B]
- def toIndexedSeq: immutable.IndexedSeq[A]
- def toIterable: Iterable[A]
- def toIterator: Iterator[A]
- def toList: List[A]
- def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U]
- def toSeq: Seq[A]
- def toSet[B >: A]: immutable.Set[B]
- def toStream: Stream[A]
- def toTraversable: Traversable[A]
-
- // type-constrained folds
- def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A
- def max[B >: A](implicit cmp: Ordering[B]): A
- def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A
- def min[B >: A](implicit cmp: Ordering[B]): A
- def product[B >: A](implicit num: Numeric[B]): B
- def sum[B >: A](implicit num: Numeric[B]): B
-
- // strings
- def mkString(start: String, sep: String, end: String): String
- def mkString(sep: String): String
- def mkString: String
-
- def addString(buf: StringBuilder, start: String, sep: String, end: String): StringBuilder
- def addString(buf: StringBuilder, sep: String): StringBuilder
- def addString(buf: StringBuilder): StringBuilder
-}
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index bfdc08536c..3034fc2bce 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -29,7 +29,7 @@ import parallel.mutable.ParArray
*
* @tparam A the type of this arraybuffer's elements.
*
- * @define Coll ArrayBuffer
+ * @define Coll `ArrayBuffer`
* @define coll arraybuffer
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `ArrayBuffer[B]` because an implicit of type `CanBuildFrom[ArrayBuffer, B, ArrayBuffer[B]]`
@@ -187,7 +187,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
*
* $factoryInfo
* @define coll array buffer
- * @define Coll ArrayBuffer
+ * @define Coll `ArrayBuffer`
*/
object ArrayBuffer extends SeqFactory[ArrayBuffer] {
/** $genericCanBuildFromInfo */
diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala
index e396b0695e..293e85a97e 100644
--- a/src/library/scala/collection/mutable/ArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/ArrayBuilder.scala
@@ -12,7 +12,8 @@ package scala.collection
package mutable
import generic._
-import scala.reflect.ClassManifest
+import scala.reflect.ArrayTag
+import scala.runtime.ScalaRunTime
/** A builder class for arrays.
*
@@ -30,12 +31,12 @@ object ArrayBuilder {
/** Creates a new arraybuilder of type `T`.
*
- * @tparam T type of the elements for the array builder, with a `ClassManifest` context bound.
+ * @tparam T type of the elements for the array builder, with a `ArrayTag` context bound.
* @return a new empty array builder.
*/
- def make[T: ClassManifest](): ArrayBuilder[T] = {
- val manifest = implicitly[ClassManifest[T]]
- val erasure = manifest.erasure
+ def make[T: ArrayTag](): ArrayBuilder[T] = {
+ val tag = implicitly[ArrayTag[T]]
+ val erasure = ScalaRunTime.arrayElementClass(tag)
erasure match {
case java.lang.Byte.TYPE => new ArrayBuilder.ofByte().asInstanceOf[ArrayBuilder[T]]
case java.lang.Short.TYPE => new ArrayBuilder.ofShort().asInstanceOf[ArrayBuilder[T]]
@@ -46,15 +47,15 @@ object ArrayBuilder {
case java.lang.Double.TYPE => new ArrayBuilder.ofDouble().asInstanceOf[ArrayBuilder[T]]
case java.lang.Boolean.TYPE => new ArrayBuilder.ofBoolean().asInstanceOf[ArrayBuilder[T]]
case java.lang.Void.TYPE => new ArrayBuilder.ofUnit().asInstanceOf[ArrayBuilder[T]]
- case _ => new ArrayBuilder.ofRef[T with AnyRef]()(manifest.asInstanceOf[ClassManifest[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]]
+ case _ => new ArrayBuilder.ofRef[T with AnyRef]()(tag.asInstanceOf[ArrayTag[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]]
}
}
/** A class for array builders for arrays of reference types.
*
- * @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassManifest` context bound.
+ * @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ArrayTag` context bound.
*/
- class ofRef[T <: AnyRef : ClassManifest] extends ArrayBuilder[T] {
+ class ofRef[T <: AnyRef : ArrayTag] extends ArrayBuilder[T] {
private var elems: Array[T] = _
private var capacity: Int = 0
diff --git a/src/library/scala/collection/mutable/ArrayLike.scala b/src/library/scala/collection/mutable/ArrayLike.scala
index 23d36252d2..04601845c4 100644
--- a/src/library/scala/collection/mutable/ArrayLike.scala
+++ b/src/library/scala/collection/mutable/ArrayLike.scala
@@ -18,7 +18,7 @@ import generic._
* @tparam A type of the elements contained in the array like object.
* @tparam Repr the type of the actual collection containing the elements.
*
- * @define Coll ArrayLike
+ * @define Coll `ArrayLike`
* @version 2.8
* @since 2.8
*/
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 3e7b8071be..0807721f7d 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -12,7 +12,8 @@ package scala.collection
package mutable
import compat.Platform.arraycopy
-import scala.reflect.ClassManifest
+import scala.reflect.ArrayTag
+import scala.runtime.ScalaRunTime._
import parallel.mutable.ParArray
@@ -29,7 +30,7 @@ import parallel.mutable.ParArray
*
* @tparam T type of the elements contained in this array.
*
- * @define Coll ArrayOps
+ * @define Coll `ArrayOps`
* @define orderDependent
* @define orderDependentFold
* @define mayNotTerminateInf
@@ -37,10 +38,8 @@ import parallel.mutable.ParArray
*/
abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParallelizable[T, ParArray[T]] {
- private def rowBuilder[U]: Builder[U, Array[U]] =
- Array.newBuilder(
- ClassManifest[U](
- repr.getClass.getComponentType.getComponentType))
+ private def elementClass: Class[_] =
+ arrayElementClass(repr.getClass)
override def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) {
var l = math.min(len, repr.length)
@@ -48,11 +47,13 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
Array.copy(repr, 0, xs, start, l)
}
- override def toArray[U >: T : ClassManifest]: Array[U] =
- if (implicitly[ClassManifest[U]].erasure eq repr.getClass.getComponentType)
+ override def toArray[U >: T : ArrayTag]: Array[U] = {
+ val thatElementClass = arrayElementClass(implicitly[ArrayTag[U]])
+ if (elementClass eq thatElementClass)
repr.asInstanceOf[Array[U]]
else
super.toArray[U]
+ }
override def par = ParArray.handoff(repr)
@@ -60,12 +61,12 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
* into a single array.
*
* @tparam U Type of row elements.
- * @param asArray A function that converts elements of this array to rows - arrays of type `U`.
+ * @param asTrav A function that converts elements of this array to rows - arrays of type `U`.
* @return An array obtained by concatenating rows of this array.
*/
- def flatten[U, To](implicit asTrav: T => collection.Traversable[U], m: ClassManifest[U]): Array[U] = {
+ def flatten[U, To](implicit asTrav: T => collection.Traversable[U], m: ArrayTag[U]): Array[U] = {
val b = Array.newBuilder[U]
- b.sizeHint(map{case is: collection.IndexedSeq[_] => is.size case _ => 0} sum)
+ b.sizeHint(map{case is: collection.IndexedSeq[_] => is.size case _ => 0}.sum)
for (xs <- this)
b ++= asTrav(xs)
b.result
@@ -78,7 +79,8 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
* @return An array obtained by replacing elements of this arrays with rows the represent.
*/
def transpose[U](implicit asArray: T => Array[U]): Array[Array[U]] = {
- val bs = asArray(head) map (_ => rowBuilder[U])
+ def mkRowBuilder() = Array.newBuilder(ClassTag[U](arrayElementClass(elementClass)))
+ val bs = asArray(head) map (_ => mkRowBuilder())
for (xs <- this) {
var i = 0
for (x <- asArray(xs)) {
@@ -86,9 +88,7 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
i += 1
}
}
- val bb: Builder[Array[U], Array[Array[U]]] = Array.newBuilder(
- ClassManifest[Array[U]](
- repr.getClass.getComponentType))
+ val bb: Builder[Array[U], Array[Array[U]]] = Array.newBuilder(ClassTag[Array[U]](elementClass))
for (b <- bs) bb += b.result
bb.result
}
@@ -109,8 +109,7 @@ object ArrayOps {
override protected[this] def thisCollection: WrappedArray[T] = new WrappedArray.ofRef[T](repr)
override protected[this] def toCollection(repr: Array[T]): WrappedArray[T] = new WrappedArray.ofRef[T](repr)
- override protected[this] def newBuilder = new ArrayBuilder.ofRef[T]()(
- ClassManifest[T](repr.getClass.getComponentType))
+ override protected[this] def newBuilder = new ArrayBuilder.ofRef[T]()(ClassTag[T](arrayElementClass(repr.getClass)))
def length: Int = repr.length
def apply(index: Int): T = repr(index)
diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index cb86c416fe..d0eaee348b 100644
--- a/src/library/scala/collection/mutable/ArraySeq.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -27,7 +27,7 @@ import parallel.mutable.ParArray
* @tparam A type of the elements contained in this array sequence.
* @param length the length of the underlying array.
*
- * @define Coll ArraySeq
+ * @define Coll `ArraySeq`
* @define coll array sequence
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `ArraySeq[B]` because an implicit of type `CanBuildFrom[ArraySeq, B, ArraySeq[B]]`
@@ -93,7 +93,7 @@ extends AbstractSeq[A]
/** $factoryInfo
* @define coll array sequence
- * @define Coll ArraySeq
+ * @define Coll `ArraySeq`
*/
object ArraySeq extends SeqFactory[ArraySeq] {
/** $genericCanBuildFromInfo */
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index f5287312b9..ed5f39f21b 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -15,13 +15,13 @@ import generic._
*
* $factoryInfo
* @define coll array stack
- * @define Coll ArrayStack
+ * @define Coll `ArrayStack`
*/
object ArrayStack extends SeqFactory[ArrayStack] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArrayStack[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, ArrayStack[A]] = new ArrayStack[A]
def empty: ArrayStack[Nothing] = new ArrayStack()
- def apply[A: ClassManifest](elems: A*): ArrayStack[A] = {
+ def apply[A: ArrayTag](elems: A*): ArrayStack[A] = {
val els: Array[AnyRef] = elems.reverseMap(_.asInstanceOf[AnyRef])(breakOut)
if (els.length == 0) new ArrayStack()
else new ArrayStack[A](els, els.length)
@@ -51,7 +51,7 @@ object ArrayStack extends SeqFactory[ArrayStack] {
*
* @tparam T type of the elements contained in this array stack.
*
- * @define Coll ArrayStack
+ * @define Coll `ArrayStack`
* @define coll array stack
* @define orderDependent
* @define orderDependentFold
@@ -152,7 +152,7 @@ extends AbstractSeq[T]
/** Pushes all the provided elements in the traversable object onto the stack.
*
- * @param x The source of elements to push.
+ * @param xs The source of elements to push.
* @return A reference to this stack.
*/
override def ++=(xs: TraversableOnce[T]): this.type = { xs.seq foreach += ; this }
diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala
index 6b9673dae6..58b45aa2a2 100644
--- a/src/library/scala/collection/mutable/BitSet.scala
+++ b/src/library/scala/collection/mutable/BitSet.scala
@@ -21,7 +21,7 @@ import BitSetLike.{LogWL, updateArray}
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_bitsets "Scala's Collection Library overview"]]
* section on `Mutable Bitsets` for more information.
*
- * @define Coll BitSet
+ * @define Coll `BitSet`
* @define coll bitset
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `BitSet[B]` because an implicit of type `CanBuildFrom[BitSet, B, BitSet]`
@@ -114,7 +114,7 @@ class BitSet(protected var elems: Array[Long]) extends AbstractSet[Int]
/** $factoryInfo
* @define coll bitset
- * @define Coll BitSet
+ * @define Coll `BitSet`
*/
object BitSet extends BitSetFactory[BitSet] {
def empty: BitSet = new BitSet
diff --git a/src/library/scala/collection/mutable/Buffer.scala b/src/library/scala/collection/mutable/Buffer.scala
index 7326d5ec5b..dd225cfab9 100644
--- a/src/library/scala/collection/mutable/Buffer.scala
+++ b/src/library/scala/collection/mutable/Buffer.scala
@@ -25,7 +25,7 @@ import generic._
*
* @tparam A type of the elements contained in this buffer.
*
- * @define Coll Buffer
+ * @define Coll `Buffer`
* @define coll buffer
*/
@cloneable
@@ -37,7 +37,7 @@ trait Buffer[A] extends Seq[A]
/** $factoryInfo
* @define coll buffer
- * @define Coll Buffer
+ * @define Coll `Buffer`
*/
object Buffer extends SeqFactory[Buffer] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Buffer[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 1dc2fc27d5..f82a596b32 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -223,9 +223,6 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
@migration("`++` creates a new buffer. Use `++=` to add an element from this buffer and return that buffer itself.", "2.8.0")
def ++(xs: GenTraversableOnce[A]): This = clone() ++= xs.seq
- @bridge
- def ++(xs: TraversableOnce[A]): This = ++(xs: GenTraversableOnce[A])
-
/** Creates a new collection with all the elements of this collection except `elem`.
*
* @param elem the element to remove.
@@ -255,6 +252,4 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
*/
@migration("`--` creates a new buffer. Use `--=` to remove an element from this buffer and return that buffer itself.", "2.8.0")
override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
-
- @bridge def --(xs: TraversableOnce[A]): This = --(xs: GenTraversableOnce[A])
}
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index 6a6bdd0077..db3b039461 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -25,7 +25,7 @@ import script._
*
* @tparam A type of the elements the buffer proxy contains.
*
- * @define Coll BufferProxy
+ * @define Coll `BufferProxy`
* @define coll buffer proxy
*/
trait BufferProxy[A] extends Buffer[A] with Proxy {
@@ -77,7 +77,7 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
/** Prepend an element to this list.
*
- * @param elem the element to prepend.
+ * @param elems the elements to prepend.
*/
override def prepend(elems: A*) { self.prependAll(elems) }
diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala
index 44cc1c8582..bbf4f5889d 100644
--- a/src/library/scala/collection/mutable/Builder.scala
+++ b/src/library/scala/collection/mutable/Builder.scala
@@ -100,6 +100,8 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
def +=(x: Elem): this.type = { self += x; this }
def clear() = self.clear()
override def ++=(xs: TraversableOnce[Elem]): this.type = { self ++= xs; this }
+ override def sizeHint(size: Int) = self.sizeHint(size)
+ override def sizeHintBounded(size: Int, boundColl: TraversableLike[_, _]) = self.sizeHintBounded(size, boundColl)
def result: NewTo = f(self.result)
}
}
diff --git a/src/library/scala/collection/mutable/ConcurrentMap.scala b/src/library/scala/collection/mutable/ConcurrentMap.scala
index f2b44d6737..ad6b609862 100644
--- a/src/library/scala/collection/mutable/ConcurrentMap.scala
+++ b/src/library/scala/collection/mutable/ConcurrentMap.scala
@@ -20,7 +20,7 @@ package mutable
* @tparam A the key type of the map
* @tparam B the value type of the map
*
- * @define Coll ConcurrentMap
+ * @define Coll `ConcurrentMap`
* @define coll concurrent map
* @define concurrentmapinfo
* This is a base trait for all Scala concurrent map implementations. It
diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala
index 49378a4f4e..cba4e9725e 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedList.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala
@@ -26,7 +26,7 @@ import generic._
*
* @tparam A the type of the elements contained in this double linked list.
*
- * @define Coll DoubleLinkedList
+ * @define Coll `DoubleLinkedList`
* @define coll double linked list
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `DoubleLinkedList[B]` because an implicit of type `CanBuildFrom[DoubleLinkedList, B, DoubleLinkedList[B]]`
@@ -67,7 +67,7 @@ class DoubleLinkedList[A]() extends AbstractSeq[A]
/** $factoryInfo
* @define coll double linked list
- * @define Coll DoubleLinkedList
+ * @define Coll `DoubleLinkedList`
*/
object DoubleLinkedList extends SeqFactory[DoubleLinkedList] {
/** $genericCanBuildFromInfo */
diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
index dfb70beeda..ebccacf976 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
@@ -52,7 +52,7 @@ import annotation.migration
* @tparam A type of the elements contained in the double linked list
* @tparam This the type of the actual linked list holding the elements
*
- * @define Coll DoubleLinkedList
+ * @define Coll `DoubleLinkedList`
* @define coll double linked list
*/
trait DoubleLinkedListLike[A, This <: Seq[A] with DoubleLinkedListLike[A, This]] extends SeqLike[A, This] with LinkedListLike[A, This] { self =>
diff --git a/src/library/scala/collection/mutable/FlatArray.scala b/src/library/scala/collection/mutable/FlatArray.scala
deleted file mode 100644
index 3e43b66ecf..0000000000
--- a/src/library/scala/collection/mutable/FlatArray.scala
+++ /dev/null
@@ -1,157 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.collection
-package mutable
-
-import scala.reflect.ClassManifest
-import generic.CanBuildFrom
-
-/**
- * A class representing `Array[T]`.
- *
- * @tparam T type of the elements in this wrapped array.
- *
- * @author Martin Odersky, Stephane Micheloud
- * @version 1.0
- * @since 2.8
- * @define Coll WrappedArray
- * @define coll wrapped array
- * @define orderDependent
- * @define orderDependentFold
- * @define mayNotTerminateInf
- * @define willNotTerminateInf
- */
-abstract sealed class FlatArray[T]
-extends AbstractSeq[T]
- with IndexedSeq[T]
- with IndexedSeqOptimized[T, FlatArray[T]]
-{
-
- override protected[this] def thisCollection: FlatArray[T] = this
- override protected[this] def toCollection(repr: FlatArray[T]): FlatArray[T] = repr
-
- /** The length of the array */
- def length: Int
-
- /** The element at given index */
- def apply(index: Int): T
-
- /** Update element at given index */
- def update(index: Int, elem: T): Unit
-
- override def stringPrefix = "FlatArray"
-
- override protected[this] def newBuilder: Builder[T, FlatArray[T]] = ??? // implemented in FlatArray.Impl
-
- /** Clones this object, including the underlying Array. */
- override def clone: FlatArray[T] = ??? // implemented in FlatArray.Impl
-}
-
-
-/** A companion object used to create instances of `WrappedArray`.
- */
-object FlatArray {
-
- def ofDim[Boxed, Unboxed](size:Int)
- (implicit boxings: BoxingConversions[Boxed, Unboxed],
- manifest: ClassManifest[Unboxed]): FlatArray[Boxed] = {
- val elems = Array.ofDim[Unboxed](size)
- new FlatArray.Impl(elems, boxings, manifest)
- }
-
- def empty[Boxed, Unboxed](implicit boxings: BoxingConversions[Boxed, Unboxed],
- elemManifest: ClassManifest[Unboxed]): FlatArray[Boxed] = apply()
-
- def apply[Boxed, Unboxed](elems: Boxed*)
- (implicit boxings: BoxingConversions[Boxed, Unboxed], elemManifest: ClassManifest[Unboxed]): FlatArray[Boxed] = {
- val b = newBuilder[Boxed, Unboxed]
- b.sizeHint(elems.length)
- b ++= elems
- b.result
- }
-
- def newBuilder[Boxed, Unboxed]
- (implicit boxings: BoxingConversions[Boxed, Unboxed], elemManifest: ClassManifest[Unboxed]): Builder[Boxed, FlatArray[Boxed]] =
- new Bldr[Boxed, Unboxed](boxings, elemManifest)
-
- implicit def canBuildFrom[Boxed, Unboxed](
- implicit
- boxings: BoxingConversions[Boxed, Unboxed],
- elemManifest: ClassManifest[Unboxed]): CanBuildFrom[FlatArray[_], Boxed, FlatArray[Boxed]] =
- new CanBuildFrom[FlatArray[_], Boxed, FlatArray[Boxed]] {
- def apply(from: FlatArray[_]): Builder[Boxed, FlatArray[Boxed]] =
- newBuilder[Boxed, Unboxed]
- def apply: Builder[Boxed, FlatArray[Boxed]] =
- newBuilder[Boxed, Unboxed]
- }
-
- private class Bldr[Boxed, Unboxed](boxings: BoxingConversions[Boxed, Unboxed], manifest: ClassManifest[Unboxed]) extends Builder[Boxed, FlatArray[Boxed]] {
-
- private var elems: Array[Unboxed] = _
- private var capacity: Int = 0
- private var size: Int = 0
-
- private def resize(size: Int) {
- val newelems = manifest.newArray(size)
- if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size)
- elems = newelems
- capacity = size
- }
-
- override def sizeHint(size: Int) {
- if (capacity < size) resize(size)
- }
-
- private def ensureSize(size: Int) {
- if (capacity < size) {
- var newsize = if (capacity == 0) 16 else capacity * 2
- while (newsize < size) newsize *= 2
- resize(newsize)
- }
- }
-
- def +=(elem: Boxed): this.type = {
- ensureSize(size + 1)
- elems(size) = boxings.unbox(elem)
- size += 1
- this
- }
-
- def clear() {
- size = 0
- }
-
- def result(): FlatArray[Boxed] = {
- if (capacity == 0 || capacity != size) resize(size)
- new FlatArray.Impl(elems, boxings, manifest)
- }
- }
-
- private class Impl[Boxed, Unboxed](
- elems: Array[Unboxed],
- boxings: BoxingConversions[Boxed, Unboxed],
- elemManifest: ClassManifest[Unboxed]) extends FlatArray[Boxed] {
-
- def length = elems.length
-
- def apply(idx: Int): Boxed = boxings.box(elems(idx))
-
- def update(idx: Int, elem: Boxed) = elems(idx) = boxings.unbox(elem)
-
- /** Creates new builder for this collection ==> move to subclasses
- */
- override protected[this] def newBuilder: Builder[Boxed, FlatArray[Boxed]] =
- new Bldr[Boxed, Unboxed](boxings, elemManifest)
-
- /** Clones this object, including the underlying Array. */
- override def clone: FlatArray[Boxed] = new Impl[Boxed, Unboxed](elems.clone(), boxings, elemManifest)
- }
-}
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index ee6d4d1d22..4070174902 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -50,6 +50,10 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
protected def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
+ /** The initial size of the hash table.
+ */
+ def initialSize: Int = 32
+
private def initialCapacity = capacity(initialSize)
protected def randomSeed = seedGenerator.get.nextInt()
@@ -361,10 +365,6 @@ private[collection] object FlatHashTable {
def defaultLoadFactor: Int = 450
final def loadFactorDenum = 1000
- /** The initial size of the hash table.
- */
- def initialSize: Int = 32
-
def sizeForThreshold(size: Int, _loadFactor: Int) = math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt)
def newThreshold(_loadFactor: Int, size: Int) = {
diff --git a/src/library/scala/collection/mutable/GenSeq.scala.disabled b/src/library/scala/collection/mutable/GenSeq.scala.disabled
index 85e4065183..53ec5acc34 100644
--- a/src/library/scala/collection/mutable/GenSeq.scala.disabled
+++ b/src/library/scala/collection/mutable/GenSeq.scala.disabled
@@ -24,7 +24,7 @@ import generic._
*
* The class adds an `update` method to `collection.Seq`.
*
- * @define Coll mutable.Seq
+ * @define Coll `mutable.Seq`
* @define coll mutable sequence
*/
trait GenSeq[A] extends GenIterable[A]
diff --git a/src/library/scala/collection/mutable/GenSet.scala.disabled b/src/library/scala/collection/mutable/GenSet.scala.disabled
index ac11e634e8..9080abaf38 100644
--- a/src/library/scala/collection/mutable/GenSet.scala.disabled
+++ b/src/library/scala/collection/mutable/GenSet.scala.disabled
@@ -24,7 +24,7 @@ import generic._
*
* @since 1.0
* @author Matthias Zenger
- * @define Coll mutable.Set
+ * @define Coll `mutable.Set`
* @define coll mutable set
*/
trait GenSet[A] extends GenIterable[A]
diff --git a/src/library/scala/collection/mutable/GrowingBuilder.scala b/src/library/scala/collection/mutable/GrowingBuilder.scala
index 0b7385194e..df63177b87 100644
--- a/src/library/scala/collection/mutable/GrowingBuilder.scala
+++ b/src/library/scala/collection/mutable/GrowingBuilder.scala
@@ -18,7 +18,7 @@ import generic._
* @version 2.8
* @since 2.8
*
- * @define Coll GrowingBuilder
+ * @define Coll `GrowingBuilder`
* @define coll growing builder
*/
class GrowingBuilder[Elem, To <: Growable[Elem]](empty: To) extends Builder[Elem, To] {
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index 65a10f4ba9..bf640cdb90 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -21,7 +21,7 @@ import scala.collection.parallel.mutable.ParHashMap
* @tparam A the type of the keys contained in this hash map.
* @tparam B the type of the values assigned to keys in this hash map.
*
- * @define Coll mutable.HashMap
+ * @define Coll `mutable.HashMap`
* @define coll mutable hash map
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `HashMap[A, B]` if the elements contained in the resulting collection are
@@ -138,7 +138,7 @@ extends AbstractMap[A, B]
}
/** $factoryInfo
- * @define Coll mutable.HashMap
+ * @define Coll `mutable.HashMap`
* @define coll mutable hash map
*/
object HashMap extends MutableMapFactory[HashMap] {
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index 8ed6b925aa..e040d1e421 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -25,7 +25,7 @@ import collection.parallel.mutable.ParHashSet
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#hash_tables "Scala's Collection Library overview"]]
* section on `Hash Tables` for more information.
*
- * @define Coll mutable.HashSet
+ * @define Coll `mutable.HashSet`
* @define coll mutable hash set
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `HashSet[B]` because an implicit of type `CanBuildFrom[HashSet, B, HashSet[B]]`
@@ -98,7 +98,7 @@ extends AbstractSet[A]
}
/** $factoryInfo
- * @define Coll mutable.HashSet
+ * @define Coll `mutable.HashSet`
* @define coll mutable hash set
*/
object HashSet extends MutableSetFactory[HashSet] {
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 06b7d40bfc..c307e6dcab 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -56,7 +56,15 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
protected def tableSizeSeed = Integer.bitCount(table.length - 1)
- protected def initialSize: Int = HashTable.initialSize
+ /** The initial size of the hash table.
+ */
+ protected def initialSize: Int = 16
+
+ /** The initial threshold.
+ */
+ private def initialThreshold(_loadFactor: Int): Int = newThreshold(_loadFactor, initialCapacity)
+
+ private def initialCapacity = capacity(initialSize)
private def lastPopulatedIndex = {
var idx = table.length - 1
@@ -187,7 +195,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
}
/** Avoid iterator for a 2x faster traversal. */
- protected final def foreachEntry[C](f: Entry => C) {
+ protected def foreachEntry[C](f: Entry => C) {
val iterTable = table
var idx = lastPopulatedIndex
var es = iterTable(idx)
@@ -354,16 +362,6 @@ private[collection] object HashTable {
private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75%
private[collection] final def loadFactorDenum = 1000;
- /** The initial size of the hash table.
- */
- private[collection] final def initialSize: Int = 16
-
- /** The initial threshold.
- */
- private[collection] final def initialThreshold(_loadFactor: Int): Int = newThreshold(_loadFactor, initialCapacity)
-
- private[collection] final def initialCapacity = capacity(initialSize)
-
private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt
private[collection] final def sizeForThreshold(_loadFactor: Int, thr: Int) = ((thr.toLong * loadFactorDenum) / _loadFactor).toInt
diff --git a/src/library/scala/collection/mutable/IndexedSeq.scala b/src/library/scala/collection/mutable/IndexedSeq.scala
index 0e2e06df84..686f90c9e8 100644
--- a/src/library/scala/collection/mutable/IndexedSeq.scala
+++ b/src/library/scala/collection/mutable/IndexedSeq.scala
@@ -29,7 +29,7 @@ trait IndexedSeq[A] extends Seq[A]
/** $factoryInfo
* The current default implementation of a $Coll is an `ArrayBuffer`.
* @define coll mutable indexed sequence
- * @define Coll mutable.IndexedSeq
+ * @define Coll `mutable.IndexedSeq`
*/
object IndexedSeq extends SeqFactory[IndexedSeq] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/mutable/IndexedSeqLike.scala b/src/library/scala/collection/mutable/IndexedSeqLike.scala
index 0c1df17ead..2ff7ac8272 100644
--- a/src/library/scala/collection/mutable/IndexedSeqLike.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqLike.scala
@@ -27,7 +27,7 @@ import generic._
* @tparam A the element type of the $coll
* @tparam Repr the type of the actual $coll containing the elements.
*
- * @define Coll IndexedSeq
+ * @define Coll `IndexedSeq`
* @define coll mutable indexed sequence
* @define indexedSeqInfo
* @author Martin Odersky
@@ -43,7 +43,7 @@ trait IndexedSeqLike[A, +Repr] extends scala.collection.IndexedSeqLike[A, Repr]
/** Replaces element at given index with a new value.
*
- * @param n the index of the element to replace.
+ * @param idx the index of the element to replace.
* @param elem the new value.
* @throws IndexOutOfBoundsException if the index is not valid.
*/
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index 593af92255..a0de2ec8ad 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -14,6 +14,7 @@ package mutable
import generic._
import TraversableView.NoBuilder
+import language.implicitConversions
/** A non-strict view of a mutable `IndexedSeq`.
* $viewInfo
diff --git a/src/library/scala/collection/mutable/Iterable.scala b/src/library/scala/collection/mutable/Iterable.scala
index 54fe11f98c..3b5ee63ea3 100644
--- a/src/library/scala/collection/mutable/Iterable.scala
+++ b/src/library/scala/collection/mutable/Iterable.scala
@@ -29,7 +29,7 @@ trait Iterable[A] extends Traversable[A]
/** $factoryInfo
* The current default implementation of a $Coll is an `ArrayBuffer`.
* @define coll mutable iterable collection
- * @define Coll mutable.Iterable
+ * @define Coll `mutable.Iterable`
*/
object Iterable extends TraversableFactory[Iterable] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/mutable/LinearSeq.scala b/src/library/scala/collection/mutable/LinearSeq.scala
index 522ebfd277..443b458342 100644
--- a/src/library/scala/collection/mutable/LinearSeq.scala
+++ b/src/library/scala/collection/mutable/LinearSeq.scala
@@ -17,7 +17,7 @@ import generic._
* that can be mutated.
* $linearSeqInfo
*
- * @define Coll LinearSeq
+ * @define Coll `LinearSeq`
* @define coll linear sequence
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_lists "Scala's Collection Library overview"]]
* section on `Mutable Lists` for more information.
@@ -33,7 +33,7 @@ trait LinearSeq[A] extends Seq[A]
/** $factoryInfo
* The current default implementation of a $Coll is a `MutableList`.
* @define coll mutable linear sequence
- * @define Coll mutable.LinearSeq
+ * @define Coll `mutable.LinearSeq`
*/
object LinearSeq extends SeqFactory[LinearSeq] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index e4090637ec..4150cf9eba 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -14,7 +14,7 @@ package mutable
import generic._
/** $factoryInfo
- * @define Coll LinkedHashMap
+ * @define Coll `LinkedHashMap`
* @define coll linked hash map
*/
object LinkedHashMap extends MutableMapFactory[LinkedHashMap] {
@@ -28,7 +28,7 @@ object LinkedHashMap extends MutableMapFactory[LinkedHashMap] {
* @tparam A the type of the keys contained in this hash map.
* @tparam B the type of the values assigned to keys in this hash map.
*
- * @define Coll LinkedHashMap
+ * @define Coll `LinkedHashMap`
* @define coll linked hash map
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `LinkedHashMap[A, B]` if the elements contained in the resulting collection are
@@ -132,6 +132,14 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
}
}
+ protected override def foreachEntry[C](f: Entry => C) {
+ var cur = firstEntry
+ while (cur ne null) {
+ f(cur)
+ cur = cur.later
+ }
+ }
+
override def clear() {
clearTable()
firstEntry = null
diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala
index f6d4915fef..3f789f9fa2 100644
--- a/src/library/scala/collection/mutable/LinkedHashSet.scala
+++ b/src/library/scala/collection/mutable/LinkedHashSet.scala
@@ -24,7 +24,7 @@ import generic._
*
* @tparam A the type of the elements contained in this set.
*
- * @define Coll LinkedHashSet
+ * @define Coll `LinkedHashSet`
* @define coll linked hash set
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `LinkedHashSet[B]` because an implicit of type `CanBuildFrom[LinkedHashSet, B, LinkedHashSet[B]]`
@@ -82,12 +82,12 @@ class LinkedHashSet[A] extends AbstractSet[A]
private def readObject(in: java.io.ObjectInputStream) {
ordered = new ListBuffer[A]
- init(in, ordered += )
+ init(in, ordered += _)
}
}
/** $factoryInfo
- * @define Coll LinkedHashSet
+ * @define Coll `LinkedHashSet`
* @define coll linked hash set
*/
object LinkedHashSet extends MutableSetFactory[LinkedHashSet] {
diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala
index 8510827697..335ddccf56 100644
--- a/src/library/scala/collection/mutable/LinkedList.scala
+++ b/src/library/scala/collection/mutable/LinkedList.scala
@@ -40,7 +40,7 @@ import generic._
*
* @constructor Creates an "empty" list, defined as a single node with no data element and next pointing to itself.
- * @define Coll LinkedList
+ * @define Coll `LinkedList`
* @define coll linked list
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `LinkedList[B]` because an implicit of type `CanBuildFrom[LinkedList, B, LinkedList[B]]`
@@ -109,7 +109,7 @@ class LinkedList[A]() extends AbstractSeq[A]
}
/** $factoryInfo
- * @define Coll LinkedList
+ * @define Coll `LinkedList`
* @define coll linked list
*/
object LinkedList extends SeqFactory[LinkedList] {
diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala
index ebec31ca98..07a8501ca4 100644
--- a/src/library/scala/collection/mutable/LinkedListLike.scala
+++ b/src/library/scala/collection/mutable/LinkedListLike.scala
@@ -29,7 +29,7 @@ import annotation.tailrec
* @tparam A type of the elements contained in the linked list
* @tparam This the type of the actual linked list holding the elements
*
- * @define Coll LinkedList
+ * @define Coll `LinkedList`
* @define coll linked list
*
* @define singleLinkedListExample
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 96e73522b6..cd743999bc 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -27,7 +27,7 @@ import java.io._
*
* @tparam A the type of this list buffer's elements.
*
- * @define Coll ListBuffer
+ * @define Coll `ListBuffer`
* @define coll list buffer
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `ListBuffer[B]` because an implicit of type `CanBuildFrom[ListBuffer, B, ListBuffer[B]]`
@@ -208,7 +208,7 @@ final class ListBuffer[A]
* one. Instead, it will insert a new element at index `n`.
*
* @param n the index where a new element will be inserted.
- * @param iter the iterable object providing all elements to insert.
+ * @param seq the iterable object providing all elements to insert.
* @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
*/
def insertAll(n: Int, seq: Traversable[A]) {
@@ -338,8 +338,8 @@ final class ListBuffer[A]
/** Remove a single element from this buffer. May take time linear in the
* buffer size.
*
- * @param x the element to remove.
- * @return this $coll.
+ * @param elem the element to remove.
+ * @return this $coll.
*/
override def -= (elem: A): this.type = {
if (exported) copy()
@@ -425,7 +425,7 @@ final class ListBuffer[A]
}
/** $factoryInfo
- * @define Coll ListBuffer
+ * @define Coll `ListBuffer`
* @define coll list buffer
*/
object ListBuffer extends SeqFactory[ListBuffer] {
diff --git a/src/library/scala/collection/mutable/ListMap.scala b/src/library/scala/collection/mutable/ListMap.scala
index d8d60d1c9a..61810c4ddf 100644
--- a/src/library/scala/collection/mutable/ListMap.scala
+++ b/src/library/scala/collection/mutable/ListMap.scala
@@ -18,7 +18,7 @@ import generic._
* @tparam A the type of the keys contained in this list map.
* @tparam B the type of the values assigned to keys in this list map.
*
- * @define Coll mutable.ListMap
+ * @define Coll `mutable.ListMap`
* @define coll mutable list map
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `ListMap[A, B]` if the elements contained in the resulting collection are
@@ -60,7 +60,7 @@ extends AbstractMap[A, B]
}
/** $factoryInfo
- * @define Coll mutable.ListMap
+ * @define Coll `mutable.ListMap`
* @define coll mutable list map
*/
object ListMap extends MutableMapFactory[ListMap] {
diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala
index 0d40a1c70d..207b3f3324 100644
--- a/src/library/scala/collection/mutable/Map.scala
+++ b/src/library/scala/collection/mutable/Map.scala
@@ -63,7 +63,7 @@ trait Map[A, B]
/** $factoryInfo
* The current default implementation of a $Coll is a `HashMap`.
* @define coll mutable map
- * @define Coll mutable.Map
+ * @define Coll `mutable.Map`
*/
object Map extends MutableMapFactory[Map] {
/** $canBuildFromInfo */
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index b08a4b7bc9..3046207533 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -18,6 +18,28 @@ import parallel.mutable.ParMap
* $mapNote
* $mapTags
* @since 2.8
+ *
+ * @define mapNote
+ * '''Implementation note:'''
+ * This trait provides most of the operations of a mutable `Map`
+ * independently of its representation. It is typically inherited by
+ * concrete implementations of maps.
+ *
+ * To implement a concrete mutable map, you need to provide
+ * implementations of the following methods:
+ * {{{
+ * def get(key: A): Option[B]
+ * def iterator: Iterator[(A, B)]
+ * def += (kv: (A, B)): This
+ * def -= (key: A): This
+ * }}}
+ * If you wish that methods like `take`, `drop`, `filter` also return the same kind of map
+ * you should also override:
+ * {{{
+ * def empty: This
+ * }}}
+ * It is also good idea to override methods `foreach` and
+ * `size` for efficiency.
*/
trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
extends scala.collection.MapLike[A, B, This]
@@ -119,8 +141,6 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] =
clone().asInstanceOf[Map[A, B1]] ++= xs.seq
- @bridge def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1] = ++(xs: GenTraversableOnce[(A, B1)])
-
/** Removes a key from this map, returning the value associated previously
* with that key as an option.
* @param key the key to be removed
@@ -224,6 +244,4 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
*/
@migration("`--` creates a new map. Use `--=` to remove an element from this map and return that map itself.", "2.8.0")
override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
-
- @bridge def --(xs: TraversableOnce[A]): This = --(xs: GenTraversableOnce[A])
}
diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala
index 0f298c4a8a..d21624759d 100644
--- a/src/library/scala/collection/mutable/MultiMap.scala
+++ b/src/library/scala/collection/mutable/MultiMap.scala
@@ -19,7 +19,7 @@ package mutable
* `B` objects.
*
* @define coll multimap
- * @define Coll MultiMap
+ * @define Coll `MultiMap`
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.8
diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala
index a619edf281..6b5079e402 100644
--- a/src/library/scala/collection/mutable/ObservableBuffer.scala
+++ b/src/library/scala/collection/mutable/ObservableBuffer.scala
@@ -70,4 +70,18 @@ trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoa
def undo() { throw new UnsupportedOperationException("cannot undo") }
})
}
+
+ abstract override def insertAll(n: Int, elems: collection.Traversable[A]) {
+ super.insertAll(n, elems)
+ var curr = n - 1
+ val msg = elems.foldLeft(new Script[A]() with Undoable {
+ def undo() { throw new UnsupportedOperationException("cannot undo") }
+ }) {
+ case (msg, elem) =>
+ curr += 1
+ msg += Include(Index(curr), elem)
+ }
+ publish(msg)
+ }
+
}
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index 87e5c061fa..2634deb819 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -10,7 +10,7 @@ package scala.collection
package mutable
/**
- * @define Coll OpenHashMap
+ * @define Coll `OpenHashMap`
* @define coll open hash map
*
* @since 2.7
@@ -42,7 +42,7 @@ object OpenHashMap {
* @author David MacIver
* @since 2.7
*
- * @define Coll OpenHashMap
+ * @define Coll `OpenHashMap`
* @define coll open hash map
* @define mayNotTerminateInf
* @define willNotTerminateInf
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 381cb09e18..af55a01ed6 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -12,7 +12,6 @@ package scala.collection
package mutable
import generic._
-import annotation.bridge
/** This class implements priority queues using a heap.
* To prioritize elements of type A there must be an implicit
@@ -113,9 +112,6 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
*/
def ++(xs: GenTraversableOnce[A]): PriorityQueue[A] = { this.clone() ++= xs.seq }
- @bridge
- def ++(xs: TraversableOnce[A]): PriorityQueue[A] = ++ (xs: GenTraversableOnce[A])
-
/** Adds all elements to the queue.
*
* @param elems the elements to add.
@@ -165,10 +161,13 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
*/
def clear(): Unit = { resarr.p_size0 = 1 }
- /** Returns an iterator which yields all the elements of the priority
- * queue in descending priority order.
+ /** Returns an iterator which yields all the elements.
*
- * @return an iterator over all elements sorted in descending order.
+ * Note: The order of elements returned is undefined.
+ * If you want to traverse the elements in priority queue
+ * order, use `clone().dequeueAll.iterator`.
+ *
+ * @return an iterator over all the elements.
*/
override def iterator: Iterator[A] = new AbstractIterator[A] {
private var i = 1
@@ -180,7 +179,6 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
}
}
-
/** Returns the reverse of this queue. The priority queue that gets
* returned will have an inversed ordering - if for some elements
* `x` and `y` the original queue's ordering
@@ -202,6 +200,13 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
revq
}
+ /** Returns an iterator which yields all the elements in the reverse order
+ * than that returned by the method `iterator`.
+ *
+ * Note: The order of elements returned is undefined.
+ *
+ * @return an iterator over all elements sorted in descending order.
+ */
def reverseIterator: Iterator[A] = new AbstractIterator[A] {
private var i = resarr.p_size0 - 1
def hasNext: Boolean = i >= 1
@@ -221,6 +226,8 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
throw new UnsupportedOperationException("unsuitable as hash key")
/** Returns a regular queue containing the same elements.
+ *
+ * Note: the order of elements is undefined.
*/
def toQueue: Queue[A] = new Queue[A] ++= this.iterator
@@ -229,6 +236,13 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @return the string representation of this queue.
*/
override def toString() = toList.mkString("PriorityQueue(", ", ", ")")
+
+ /** Converts this $coll to a list.
+ *
+ * Note: the order of elements is undefined.
+ *
+ * @return a list containing all elements of this $coll.
+ */
override def toList = this.iterator.toList
/** This method clones the priority queue.
diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala
index 77b1ae21cb..605d37aec6 100644
--- a/src/library/scala/collection/mutable/Queue.scala
+++ b/src/library/scala/collection/mutable/Queue.scala
@@ -23,7 +23,7 @@ import generic._
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_queues "Scala's Collection Library overview"]]
* section on `Queues` for more information.
*
- * @define Coll mutable.Queue
+ * @define Coll `mutable.Queue`
* @define coll mutable queue
* @define orderDependent
* @define orderDependentFold
diff --git a/src/library/scala/collection/mutable/QueueProxy.scala b/src/library/scala/collection/mutable/QueueProxy.scala
index df86545a70..fb76fa609f 100644
--- a/src/library/scala/collection/mutable/QueueProxy.scala
+++ b/src/library/scala/collection/mutable/QueueProxy.scala
@@ -49,7 +49,7 @@ trait QueueProxy[A] extends Queue[A] with Proxy {
/** Adds all elements provided by an iterator at the end of the queue. The
* elements are prepended in the order they are given out by the iterator.
*
- * @param iter an iterator
+ * @param it an iterator
*/
override def ++=(it: TraversableOnce[A]): this.type = {
self ++= it
diff --git a/src/library/scala/collection/mutable/Seq.scala b/src/library/scala/collection/mutable/Seq.scala
index 89b930e36f..ceed76cf88 100644
--- a/src/library/scala/collection/mutable/Seq.scala
+++ b/src/library/scala/collection/mutable/Seq.scala
@@ -21,7 +21,7 @@ import generic._
*
* The class adds an `update` method to `collection.Seq`.
*
- * @define Coll mutable.Seq
+ * @define Coll `mutable.Seq`
* @define coll mutable sequence
*/
trait Seq[A] extends Iterable[A]
@@ -36,7 +36,7 @@ trait Seq[A] extends Iterable[A]
/** $factoryInfo
* The current default implementation of a $Coll is an `ArrayBuffer`.
* @define coll mutable sequence
- * @define Coll mutable.Seq
+ * @define Coll `mutable.Seq`
*/
object Seq extends SeqFactory[Seq] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/mutable/SeqLike.scala b/src/library/scala/collection/mutable/SeqLike.scala
index 7194f15bc2..3a77558e94 100644
--- a/src/library/scala/collection/mutable/SeqLike.scala
+++ b/src/library/scala/collection/mutable/SeqLike.scala
@@ -28,8 +28,8 @@ trait SeqLike[A, +This <: SeqLike[A, This] with Seq[A]]
/** Replaces element at given index with a new value.
*
- * @param n the index of the element to replace.
- * @param lem the new value.
+ * @param idx the index of the element to replace.
+ * @param elem the new value.
* @throws IndexOutOfBoundsException if the index is not valid.
*/
def update(idx: Int, elem: A)
diff --git a/src/library/scala/collection/mutable/Set.scala b/src/library/scala/collection/mutable/Set.scala
index 744768e8dd..33a99e9474 100644
--- a/src/library/scala/collection/mutable/Set.scala
+++ b/src/library/scala/collection/mutable/Set.scala
@@ -19,7 +19,7 @@ import generic._
*
* @since 1.0
* @author Matthias Zenger
- * @define Coll mutable.Set
+ * @define Coll `mutable.Set`
* @define coll mutable set
*/
trait Set[A] extends Iterable[A]
@@ -34,7 +34,7 @@ trait Set[A] extends Iterable[A]
/** $factoryInfo
* The current default implementation of a $Coll is a `HashSet`.
* @define coll mutable set
- * @define Coll mutable.Set
+ * @define Coll `mutable.Set`
*/
object Set extends MutableSetFactory[Set] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = setCanBuildFrom[A]
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index 5e201d9959..37313c8ca3 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -170,8 +170,6 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
@migration("`++` creates a new set. Use `++=` to add elements to this set and return that set itself.", "2.8.0")
override def ++(xs: GenTraversableOnce[A]): This = clone() ++= xs.seq
- @bridge def ++(xs: TraversableOnce[A]): This = ++(xs: GenTraversableOnce[A])
-
/** Creates a new set consisting of all the elements of this set except `elem`.
*
* @param elem the element to remove.
@@ -203,8 +201,6 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
@migration("`--` creates a new set. Use `--=` to remove elements from this set and return that set itself.", "2.8.0")
override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
- @bridge def --(xs: TraversableOnce[A]): This = --(xs: GenTraversableOnce[A])
-
/** Send a message to this scriptable object.
*
* @param cmd the message to send.
diff --git a/src/library/scala/collection/mutable/SortedSet.scala b/src/library/scala/collection/mutable/SortedSet.scala
index f41a51d3ef..78d12f3d64 100644
--- a/src/library/scala/collection/mutable/SortedSet.scala
+++ b/src/library/scala/collection/mutable/SortedSet.scala
@@ -14,7 +14,7 @@ import generic._
/**
* Base trait for mutable sorted set.
*
- * @define Coll mutable.SortedSet
+ * @define Coll `mutable.SortedSet`
* @define coll mutable sorted set
*
* @author Lucien Pereira
@@ -31,7 +31,7 @@ trait SortedSet[A] extends collection.SortedSet[A] with collection.SortedSetLike
/**
* A template for mutable sorted set companion objects.
*
- * @define Coll mutable.SortedSet
+ * @define Coll `mutable.SortedSet`
* @define coll mutable sorted set
* @define factoryInfo
* This object provides a set of operations needed to create sorted sets of type mutable.SortedSet.
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index b70df05c55..042eac517a 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -20,7 +20,7 @@ import annotation.migration
*
* $factoryInfo
* @define coll mutable stack
- * @define Coll mutable.Stack
+ * @define Coll `mutable.Stack`
*/
object Stack extends SeqFactory[Stack] {
class StackBuilder[A] extends Builder[A, Stack[A]] {
@@ -46,7 +46,7 @@ object Stack extends SeqFactory[Stack] {
* @since 1
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#stacks "Scala's Collection Library overview"]]
* section on `Stacks` for more information.
- * @define Coll Stack
+ * @define Coll `Stack`
* @define coll stack
* @define orderDependent
* @define orderDependentFold
diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala
index 8884f03bbd..9eadfe4045 100644
--- a/src/library/scala/collection/mutable/StackProxy.scala
+++ b/src/library/scala/collection/mutable/StackProxy.scala
@@ -54,6 +54,11 @@ trait StackProxy[A] extends Stack[A] with Proxy {
this
}
+ override def push(elem: A): this.type = {
+ self.push(elem)
+ this
+ }
+
/** Returns the top element of the stack. This method will not remove
* the element from the stack. An error is signaled if there is no
* element on the stack.
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index d9ad58f054..08c881dbb8 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -404,7 +404,7 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
* @return the reversed StringBuilder
*/
@migration("`reverse` returns a new instance. Use `reverseContents` to update in place and return that StringBuilder itself.", "2.8.0")
- override def reverse: StringBuilder = new StringBuilder(new JavaStringBuilder(underlying) reverse)
+ override def reverse: StringBuilder = new StringBuilder(new JavaStringBuilder(underlying).reverse)
override def clone(): StringBuilder = new StringBuilder(new JavaStringBuilder(underlying))
diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
index 23552e9d52..1c34046e88 100644
--- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala
+++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
@@ -21,7 +21,7 @@ import script._
* @author Matthias Zenger
* @version 1.0, 08/07/2003
* @since 1
- * @define Coll SynchronizedBuffer
+ * @define Coll `SynchronizedBuffer`
* @define coll synchronized buffer
*/
trait SynchronizedBuffer[A] extends Buffer[A] {
@@ -61,7 +61,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
/** Appends a number of elements provided by a traversable object
* via its `foreach` method.
*
- * @param iter the iterable object.
+ * @param xs the iterable object.
*/
override def ++=(xs: TraversableOnce[A]): this.type = synchronized[this.type] {
super.++=(xs)
@@ -102,7 +102,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
/** Prepend an element to this list.
*
- * @param elem the element to prepend.
+ * @param elems the elements to prepend.
*/
override def prepend(elems: A*): Unit = prependAll(elems)
diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala
index 6e3ae13ada..037b8ec5f5 100644
--- a/src/library/scala/collection/mutable/SynchronizedMap.scala
+++ b/src/library/scala/collection/mutable/SynchronizedMap.scala
@@ -22,7 +22,7 @@ import annotation.migration
* @author Matthias Zenger, Martin Odersky
* @version 2.0, 31/12/2006
* @since 1
- * @define Coll SynchronizedMap
+ * @define Coll `SynchronizedMap`
* @define coll synchronized map
*/
trait SynchronizedMap[A, B] extends Map[A, B] {
diff --git a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
index 159b8312b2..bc32537798 100644
--- a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
@@ -20,7 +20,7 @@ package mutable
* @author Matthias Zenger
* @version 1.0, 03/05/2004
* @since 1
- * @define Coll SynchronizedPriorityQueue
+ * @define Coll `SynchronizedPriorityQueue`
* @define coll synchronized priority queue
*/
class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQueue[A] {
diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala
index 56f74a5b9b..9e00c5d6fd 100644
--- a/src/library/scala/collection/mutable/SynchronizedQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedQueue.scala
@@ -21,7 +21,7 @@ package mutable
* @author Matthias Zenger
* @version 1.0, 03/05/2004
* @since 1
- * @define Coll SynchronizedQueue
+ * @define Coll `SynchronizedQueue`
* @define coll synchronized queue
*/
class SynchronizedQueue[A] extends Queue[A] {
diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala
index c945a859f3..c28764ff68 100644
--- a/src/library/scala/collection/mutable/SynchronizedSet.scala
+++ b/src/library/scala/collection/mutable/SynchronizedSet.scala
@@ -20,7 +20,7 @@ import script._
* @author Matthias Zenger
* @version 1.0, 08/07/2003
* @since 1
- * @define Coll SynchronizedSet
+ * @define Coll `SynchronizedSet`
* @define coll synchronized set
*/
trait SynchronizedSet[A] extends Set[A] {
diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala
index a09ae21901..8363222295 100644
--- a/src/library/scala/collection/mutable/SynchronizedStack.scala
+++ b/src/library/scala/collection/mutable/SynchronizedStack.scala
@@ -21,7 +21,7 @@ package mutable
* @author Matthias Zenger
* @version 1.0, 03/05/2004
* @since 1
- * @define Coll SynchronizedStack
+ * @define Coll `SynchronizedStack`
* @define coll synchronized stack
*/
class SynchronizedStack[A] extends Stack[A] {
diff --git a/src/library/scala/collection/mutable/Traversable.scala b/src/library/scala/collection/mutable/Traversable.scala
index 04b67c0bad..28241fdec9 100644
--- a/src/library/scala/collection/mutable/Traversable.scala
+++ b/src/library/scala/collection/mutable/Traversable.scala
@@ -29,7 +29,7 @@ trait Traversable[A] extends scala.collection.Traversable[A]
/** $factoryInfo
* The current default implementation of a $Coll is an `ArrayBuffer`.
* @define coll mutable traversable collection
- * @define Coll mutable.Traversable
+ * @define Coll `mutable.Traversable`
*/
object Traversable extends TraversableFactory[Traversable] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala
index 02ee811193..00675b9119 100644
--- a/src/library/scala/collection/mutable/TreeSet.scala
+++ b/src/library/scala/collection/mutable/TreeSet.scala
@@ -12,7 +12,7 @@ package mutable
import generic._
/**
- * @define Coll mutable.TreeSet
+ * @define Coll `mutable.TreeSet`
* @define coll mutable tree set
* @factoryInfo
* Companion object of TreeSet providing factory related utilities.
diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala
index 09e6088782..cd76c7de4e 100644
--- a/src/library/scala/collection/mutable/UnrolledBuffer.scala
+++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala
@@ -36,16 +36,16 @@ import annotation.tailrec
* should still be avoided for such a purpose.
*
* @define coll unrolled buffer
- * @define Coll UnrolledBuffer
+ * @define Coll `UnrolledBuffer`
* @author Aleksandar Prokopec
*
*/
@SerialVersionUID(1L)
-class UnrolledBuffer[T](implicit val manifest: ClassManifest[T])
+class UnrolledBuffer[T](implicit val tag: ArrayTag[T])
extends collection.mutable.AbstractBuffer[T]
with collection.mutable.Buffer[T]
with collection.mutable.BufferLike[T, UnrolledBuffer[T]]
- with GenericClassManifestTraversableTemplate[T, UnrolledBuffer]
+ with GenericArrayTagTraversableTemplate[T, UnrolledBuffer]
with collection.mutable.Builder[T, UnrolledBuffer[T]]
with Serializable
{
@@ -67,7 +67,7 @@ extends collection.mutable.AbstractBuffer[T]
private[collection] def calcNextLength(sz: Int) = sz
- def classManifestCompanion = UnrolledBuffer
+ def arrayTagCompanion = UnrolledBuffer
/** Concatenates the targer unrolled buffer to this unrolled buffer.
*
@@ -183,11 +183,11 @@ extends collection.mutable.AbstractBuffer[T]
}
-object UnrolledBuffer extends ClassManifestTraversableFactory[UnrolledBuffer] {
+object UnrolledBuffer extends ArrayTagTraversableFactory[UnrolledBuffer] {
/** $genericCanBuildFromInfo */
- implicit def canBuildFrom[T](implicit m: ClassManifest[T]): CanBuildFrom[Coll, T, UnrolledBuffer[T]] =
+ implicit def canBuildFrom[T](implicit t: ArrayTag[T]): CanBuildFrom[Coll, T, UnrolledBuffer[T]] =
new GenericCanBuildFrom[T]
- def newBuilder[T](implicit m: ClassManifest[T]): Builder[T, UnrolledBuffer[T]] = new UnrolledBuffer[T]
+ def newBuilder[T](implicit t: ArrayTag[T]): Builder[T, UnrolledBuffer[T]] = new UnrolledBuffer[T]
val waterline = 50
val waterlineDelim = 100
@@ -195,7 +195,7 @@ object UnrolledBuffer extends ClassManifestTraversableFactory[UnrolledBuffer] {
/** Unrolled buffer node.
*/
- class Unrolled[T: ClassManifest] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) {
+ class Unrolled[T: ArrayTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) {
private[collection] def this() = this(0, new Array[T](unrolledlength), null, null)
private[collection] def this(b: UnrolledBuffer[T]) = this(0, new Array[T](unrolledlength), null, b)
diff --git a/src/library/scala/collection/mutable/WeakHashMap.scala b/src/library/scala/collection/mutable/WeakHashMap.scala
index 4e09755acf..ec99197bb9 100644
--- a/src/library/scala/collection/mutable/WeakHashMap.scala
+++ b/src/library/scala/collection/mutable/WeakHashMap.scala
@@ -23,7 +23,7 @@ import convert.Wrappers._
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#weak_hash_maps "Scala's Collection Library overview"]]
* section on `Weak Hash Maps` for more information.
*
- * @define Coll WeakHashMap
+ * @define Coll `WeakHashMap`
* @define coll weak hash map
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `WeakHashMap[A, B]` if the elements contained in the resulting collection are
@@ -43,7 +43,7 @@ class WeakHashMap[A, B] extends JMapWrapper[A, B](new java.util.WeakHashMap)
}
/** $factoryInfo
- * @define Coll WeakHashMap
+ * @define Coll `WeakHashMap`
* @define coll weak hash map
*/
object WeakHashMap extends MutableMapFactory[WeakHashMap] {
diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala
index fac4eb77bb..86317819a1 100644
--- a/src/library/scala/collection/mutable/WrappedArray.scala
+++ b/src/library/scala/collection/mutable/WrappedArray.scala
@@ -11,7 +11,8 @@
package scala.collection
package mutable
-import scala.reflect.ClassManifest
+import scala.reflect.ArrayTag
+import scala.runtime.ScalaRunTime._
import scala.collection.generic._
import scala.collection.parallel.mutable.ParArray
@@ -23,7 +24,7 @@ import scala.collection.parallel.mutable.ParArray
* @author Martin Odersky, Stephane Micheloud
* @version 1.0
* @since 2.8
- * @define Coll WrappedArray
+ * @define Coll `WrappedArray`
* @define coll wrapped array
* @define orderDependent
* @define orderDependentFold
@@ -40,8 +41,11 @@ extends AbstractSeq[T]
override protected[this] def thisCollection: WrappedArray[T] = this
override protected[this] def toCollection(repr: WrappedArray[T]): WrappedArray[T] = repr
- /** The manifest of the element type */
- def elemManifest: ClassManifest[T]
+ /** The tag of the element type */
+ def elemTag: ArrayTag[T]
+
+ @deprecated("use elemTag instead", "2.10.0")
+ def elemManifest: ClassManifest[T] = ClassManifest.fromClass[T](arrayElementClass(elemTag).asInstanceOf[Class[T]])
/** The length of the array */
def length: Int
@@ -57,11 +61,16 @@ extends AbstractSeq[T]
override def par = ParArray.handoff(array)
- override def toArray[U >: T : ClassManifest]: Array[U] =
- if (implicitly[ClassManifest[U]].erasure eq array.getClass.getComponentType)
+ private def elementClass: Class[_] =
+ arrayElementClass(repr.getClass)
+
+ override def toArray[U >: T : ArrayTag]: Array[U] = {
+ val thatElementClass = arrayElementClass(implicitly[ArrayTag[U]])
+ if (elementClass eq thatElementClass)
array.asInstanceOf[Array[U]]
else
super.toArray[U]
+ }
override def stringPrefix = "WrappedArray"
@@ -71,7 +80,7 @@ extends AbstractSeq[T]
/** Creates new builder for this collection ==> move to subclasses
*/
override protected[this] def newBuilder: Builder[T, WrappedArray[T]] =
- new WrappedArrayBuilder[T](elemManifest)
+ new WrappedArrayBuilder[T](elemTag)
}
@@ -101,7 +110,7 @@ object WrappedArray {
case x: Array[Unit] => new ofUnit(x)
}).asInstanceOf[WrappedArray[T]]
- implicit def canBuildFrom[T](implicit m: ClassManifest[T]): CanBuildFrom[WrappedArray[_], T, WrappedArray[T]] =
+ implicit def canBuildFrom[T](implicit m: ArrayTag[T]): CanBuildFrom[WrappedArray[_], T, WrappedArray[T]] =
new CanBuildFrom[WrappedArray[_], T, WrappedArray[T]] {
def apply(from: WrappedArray[_]): Builder[T, WrappedArray[T]] =
ArrayBuilder.make[T]()(m) mapResult WrappedArray.make[T]
@@ -112,70 +121,70 @@ object WrappedArray {
def newBuilder[A]: Builder[A, IndexedSeq[A]] = new ArrayBuffer
final class ofRef[T <: AnyRef](val array: Array[T]) extends WrappedArray[T] with Serializable {
- lazy val elemManifest = ClassManifest[T](array.getClass.getComponentType)
+ lazy val elemTag = ClassTag[T](arrayElementClass(array.getClass))
def length: Int = array.length
def apply(index: Int): T = array(index).asInstanceOf[T]
def update(index: Int, elem: T) { array(index) = elem }
}
final class ofByte(val array: Array[Byte]) extends WrappedArray[Byte] with Serializable {
- def elemManifest = ClassManifest.Byte
+ def elemTag = ClassTag.Byte
def length: Int = array.length
def apply(index: Int): Byte = array(index)
def update(index: Int, elem: Byte) { array(index) = elem }
}
final class ofShort(val array: Array[Short]) extends WrappedArray[Short] with Serializable {
- def elemManifest = ClassManifest.Short
+ def elemTag = ClassTag.Short
def length: Int = array.length
def apply(index: Int): Short = array(index)
def update(index: Int, elem: Short) { array(index) = elem }
}
final class ofChar(val array: Array[Char]) extends WrappedArray[Char] with Serializable {
- def elemManifest = ClassManifest.Char
+ def elemTag = ClassTag.Char
def length: Int = array.length
def apply(index: Int): Char = array(index)
def update(index: Int, elem: Char) { array(index) = elem }
}
final class ofInt(val array: Array[Int]) extends WrappedArray[Int] with Serializable {
- def elemManifest = ClassManifest.Int
+ def elemTag = ClassTag.Int
def length: Int = array.length
def apply(index: Int): Int = array(index)
def update(index: Int, elem: Int) { array(index) = elem }
}
final class ofLong(val array: Array[Long]) extends WrappedArray[Long] with Serializable {
- def elemManifest = ClassManifest.Long
+ def elemTag = ClassTag.Long
def length: Int = array.length
def apply(index: Int): Long = array(index)
def update(index: Int, elem: Long) { array(index) = elem }
}
final class ofFloat(val array: Array[Float]) extends WrappedArray[Float] with Serializable {
- def elemManifest = ClassManifest.Float
+ def elemTag = ClassTag.Float
def length: Int = array.length
def apply(index: Int): Float = array(index)
def update(index: Int, elem: Float) { array(index) = elem }
}
final class ofDouble(val array: Array[Double]) extends WrappedArray[Double] with Serializable {
- def elemManifest = ClassManifest.Double
+ def elemTag = ClassTag.Double
def length: Int = array.length
def apply(index: Int): Double = array(index)
def update(index: Int, elem: Double) { array(index) = elem }
}
final class ofBoolean(val array: Array[Boolean]) extends WrappedArray[Boolean] with Serializable {
- def elemManifest = ClassManifest.Boolean
+ def elemTag = ClassTag.Boolean
def length: Int = array.length
def apply(index: Int): Boolean = array(index)
def update(index: Int, elem: Boolean) { array(index) = elem }
}
final class ofUnit(val array: Array[Unit]) extends WrappedArray[Unit] with Serializable {
- def elemManifest = ClassManifest.Unit
+ def elemTag = ClassTag.Unit
def length: Int = array.length
def apply(index: Int): Unit = array(index)
def update(index: Int, elem: Unit) { array(index) = elem }
diff --git a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
index fce65468e9..99a0b0ede3 100644
--- a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
@@ -12,23 +12,27 @@ package scala.collection
package mutable
import generic._
-import scala.reflect.ClassManifest
+import scala.reflect.ArrayTag
+import scala.runtime.ScalaRunTime._
/** A builder class for arrays.
*
- * @tparam A type of elements that can be added to this builder.
- * @param manifest class manifest for objects of type `A`.
+ * @tparam A type of elements that can be added to this builder.
+ * @param tag class tag for objects of type `A`.
*
* @since 2.8
*/
-class WrappedArrayBuilder[A](manifest: ClassManifest[A]) extends Builder[A, WrappedArray[A]] {
+class WrappedArrayBuilder[A](tag: ArrayTag[A]) extends Builder[A, WrappedArray[A]] {
+
+ @deprecated("use tag instead", "2.10.0")
+ val manifest: ArrayTag[A] = tag
private var elems: WrappedArray[A] = _
private var capacity: Int = 0
private var size: Int = 0
private def mkArray(size: Int): WrappedArray[A] = {
- val erasure = manifest.erasure
+ val erasure = arrayElementClass(tag)
val newelems = erasure match {
case java.lang.Byte.TYPE => new WrappedArray.ofByte(new Array[Byte](size)).asInstanceOf[WrappedArray[A]]
case java.lang.Short.TYPE => new WrappedArray.ofShort(new Array[Short](size)).asInstanceOf[WrappedArray[A]]
@@ -39,7 +43,7 @@ class WrappedArrayBuilder[A](manifest: ClassManifest[A]) extends Builder[A, Wrap
case java.lang.Double.TYPE => new WrappedArray.ofDouble(new Array[Double](size)).asInstanceOf[WrappedArray[A]]
case java.lang.Boolean.TYPE => new WrappedArray.ofBoolean(new Array[Boolean](size)).asInstanceOf[WrappedArray[A]]
case java.lang.Void.TYPE => new WrappedArray.ofUnit(new Array[Unit](size)).asInstanceOf[WrappedArray[A]]
- case _ => new WrappedArray.ofRef[A with AnyRef](manifest.newArray(size).asInstanceOf[Array[A with AnyRef]]).asInstanceOf[WrappedArray[A]]
+ case _ => new WrappedArray.ofRef[A with AnyRef](tag.newArray(size).asInstanceOf[Array[A with AnyRef]]).asInstanceOf[WrappedArray[A]]
}
if (this.size > 0) Array.copy(elems.array, 0, newelems.array, 0, this.size)
newelems
diff --git a/src/library/scala/collection/package.scala b/src/library/scala/collection/package.scala
index 0dd4405cf7..ac5cb66942 100644
--- a/src/library/scala/collection/package.scala
+++ b/src/library/scala/collection/package.scala
@@ -14,7 +14,7 @@ package scala
* == Guide ==
*
* A detailed guide for the collections library is available
- * at [[http://www.scala-lang.org/docu/files/collections-api]].
+ * at [[http://docs.scala-lang.org/overviews/collections/introduction.html]].
*
* == Using Collections ==
*
@@ -45,7 +45,7 @@ package scala
*
* The most common way to create a collection is to use the companion objects as factories.
* Of these, the three most common
- * are [[scala.collection.immutable.Seq]], [[scala.collection.immutable.Set]], and [[scala.collection.immutable.Map]]. Their
+ * are [[scala.collection.Seq]], [[scala.collection.immutable.Set]], and [[scala.collection.immutable.Map]]. Their
* companion objects are all available
* as type aliases the either the [[scala]] package or in `scala.Predef`, and can be used
* like so:
@@ -61,13 +61,13 @@ package scala
* }}}
*
* It is also typical to use the [[scala.collection.immutable]] collections over those
- * in [[scala.collection.mutable]]; The types aliased in the [[scala]] package and
+ * in [[scala.collection.mutable]]; The types aliased in
* the `scala.Predef` object are the immutable versions.
*
* Also note that the collections library was carefully designed to include several implementations of
* each of the three basic collection types. These implementations have specific performance
* characteristics which are described
- * in [[http://www.scala-lang.org/docu/files/collections-api the guide]].
+ * in [[http://docs.scala-lang.org/overviews/collections/performance-characteristics.html the guide]].
*
* === Converting between Java Collections ===
*
diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala
index 0b5faf15ee..0bd6abaf78 100644
--- a/src/library/scala/collection/parallel/ParIterable.scala
+++ b/src/library/scala/collection/parallel/ParIterable.scala
@@ -24,7 +24,7 @@ import scala.collection.parallel.mutable.ParArray
* @author Aleksandar Prokopec
* @since 2.9
*
- * @define Coll ParIterable
+ * @define Coll `ParIterable`
* @define coll parallel iterable
*/
trait ParIterable[+T]
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 5551c04ce2..321f259f5d 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -29,6 +29,7 @@ import java.util.concurrent.atomic.AtomicBoolean
import annotation.unchecked.uncheckedVariance
import annotation.unchecked.uncheckedStable
+import language.implicitConversions
/** A template trait for parallel collections of type `ParIterable[T]`.
@@ -154,15 +155,15 @@ extends GenIterableLike[T, Repr]
with HasNewCombiner[T, Repr]
{
self: ParIterableLike[T, Repr, Sequential] =>
-
+
@transient
@volatile
private var _tasksupport = defaultTaskSupport
-
+
protected def initTaskSupport() {
_tasksupport = defaultTaskSupport
}
-
+
def tasksupport = {
val ts = _tasksupport
if (ts eq null) {
@@ -170,17 +171,37 @@ self: ParIterableLike[T, Repr, Sequential] =>
defaultTaskSupport
} else ts
}
-
+
def tasksupport_=(ts: TaskSupport) = _tasksupport = ts
-
+
def seq: Sequential
def repr: Repr = this.asInstanceOf[Repr]
+ final def isTraversableAgain = true
+
def hasDefiniteSize = true
+ def isEmpty = size == 0
+
def nonEmpty = size != 0
-
+
+ def head = iterator.next
+
+ def headOption = if (nonEmpty) Some(head) else None
+
+ def tail = drop(1)
+
+ def last = {
+ var lst = head
+ for (x <- this.seq) lst = x
+ lst
+ }
+
+ def lastOption = if (nonEmpty) Some(last) else None
+
+ def init = take(size - 1)
+
/** Creates a new parallel iterator used to traverse the elements of this parallel collection.
* This iterator is more specific than the iterator of the returned by `iterator`, and augmented
* with additional accessor and transformer methods.
@@ -239,7 +260,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
trait BuilderOps[Elem, To] {
trait Otherwise[Cmb] {
- def otherwise(notbody: => Unit)(implicit m: ClassManifest[Cmb]): Unit
+ def otherwise(notbody: => Unit)(implicit t: ClassTag[Cmb]): Unit
}
def ifIs[Cmb](isbody: Cmb => Unit): Otherwise[Cmb]
@@ -281,8 +302,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
protected implicit def builder2ops[Elem, To](cb: Builder[Elem, To]) = new BuilderOps[Elem, To] {
def ifIs[Cmb](isbody: Cmb => Unit) = new Otherwise[Cmb] {
- def otherwise(notbody: => Unit)(implicit m: ClassManifest[Cmb]) {
- if (cb.getClass == m.erasure) isbody(cb.asInstanceOf[Cmb]) else notbody
+ def otherwise(notbody: => Unit)(implicit t: ClassTag[Cmb]) {
+ if (cb.getClass == t.erasure) isbody(cb.asInstanceOf[Cmb]) else notbody
}
}
def isCombiner = cb.isInstanceOf[Combiner[_, _]]
@@ -470,8 +491,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
*
* $abortsignalling
*
- * @param p a predicate used to test elements
- * @return true if `p` holds for all elements, false otherwise
+ * @param pred a predicate used to test elements
+ * @return true if `p` holds for all elements, false otherwise
*/
def forall(pred: T => Boolean): Boolean = {
tasksupport.executeAndWaitResult(new Forall(pred, splitter assign new DefaultSignalling with VolatileAbort))
@@ -481,8 +502,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
*
* $abortsignalling
*
- * @param p a predicate used to test elements
- * @return true if `p` holds for some element, false otherwise
+ * @param pred a predicate used to test elements
+ * @return true if `p` holds for some element, false otherwise
*/
def exists(pred: T => Boolean): Boolean = {
tasksupport.executeAndWaitResult(new Exists(pred, splitter assign new DefaultSignalling with VolatileAbort))
@@ -496,8 +517,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
*
* $abortsignalling
*
- * @param p predicate used to test the elements
- * @return an option value with the element if such an element exists, or `None` otherwise
+ * @param pred predicate used to test the elements
+ * @return an option value with the element if such an element exists, or `None` otherwise
*/
def find(pred: T => Boolean): Option[T] = {
tasksupport.executeAndWaitResult(new Find(pred, splitter assign new DefaultSignalling with VolatileAbort))
@@ -664,7 +685,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @tparam That type of the resulting collection
* @param z neutral element for the operator `op`
* @param op the associative operator for the scan
- * @param cbf combiner factory which provides a combiner
+ * @param bf $bfinfo
* @return a collection containing the prefix scan of the elements in the original collection
*
* @usecase def scan(z: T)(op: (T, T) => T): $Coll[T]
@@ -753,7 +774,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
cntx.setIndexFlag(Int.MaxValue)
tasksupport.executeAndWaitResult(
new Span(0, pred, combinerFactory, combinerFactory, splitter assign cntx) mapResult {
- _._2.resultWithTaskSupport
+ _._2.resultWithTaskSupport
}
)
}
@@ -801,7 +822,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def size = splitter.remaining
}
- override def toArray[U >: T: ClassManifest]: Array[U] = {
+ override def toArray[U >: T: ArrayTag]: Array[U] = {
val arr = new Array[U](size)
copyToArray(arr)
arr
diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala
index 536139c812..91eefc2aa5 100644
--- a/src/library/scala/collection/parallel/ParIterableViewLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala
@@ -18,6 +18,7 @@ import scala.collection.GenSeq
import scala.collection.generic.{ CanBuildFrom, SliceInterval }
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.immutable.ParRange
+import language.implicitConversions
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index b3c527da84..be5ab03ba7 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -162,7 +162,7 @@ self =>
*
* $abortsignalling
*
- * @tparam U the element type of `that` parallel sequence
+ * @tparam S the element type of `that` parallel sequence
* @param that the parallel sequence this sequence is being searched for
* @param offset the starting offset for the search
* @return `true` if there is a sequence `that` starting at `offset` in this sequence, `false` otherwise
@@ -287,9 +287,7 @@ self =>
/** Computes the multiset intersection between this $coll and another sequence.
*
* @param that the sequence of elements to intersect with.
- * @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
+ * @tparam U the element type of `that` parallel sequence
* @return a new collection of type `That` which contains all elements of this $coll
* which also appear in `that`.
* If an element value `x` appears
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index c5910ff2c8..a67a4d8eb7 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -39,8 +39,7 @@ private[collection] trait RemainsIterator[+T] extends Iterator[T] {
/** Augments iterators with additional methods, mostly transformers,
* assuming they iterate an iterable collection.
*
- * @param T type of the elements iterated.
- * @param IterRepr iterator type.
+ * @tparam T type of the elements iterated.
*/
private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[T] {
@@ -377,7 +376,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
/** Parallel iterators allow splitting and provide a `remaining` method to
* obtain the number of elements remaining in the iterator.
*
- * @param T type of the elements iterated.
+ * @tparam T type of the elements iterated.
*/
trait IterableSplitter[+T]
extends AugmentedIterableIterator[T]
@@ -537,7 +536,7 @@ self =>
/** Parallel sequence iterators allow splitting into arbitrary subsets.
*
- * @param T type of the elements iterated.
+ * @tparam T type of the elements iterated.
*/
trait SeqSplitter[+T]
extends IterableSplitter[T]
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index 4a581f219e..7a0116b3b3 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -541,7 +541,7 @@ trait ExecutionContextTasks extends Tasks {
// this part is a hack which allows switching
val driver: Tasks = executionContext match {
- case eci: scala.concurrent.impl.ExecutionContextImpl => eci.executorService match {
+ case eci: scala.concurrent.impl.ExecutionContextImpl => eci.executor match {
case fjp: ForkJoinPool => new ForkJoinTaskSupport(fjp)
case tpe: ThreadPoolExecutor => new ThreadPoolTaskSupport(tpe)
case _ => ???
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index e630a9dbed..ad882390c8 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -39,7 +39,7 @@ import collection.parallel.Task
* @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tries Scala's Parallel Collections Library overview]]
* section on Parallel Hash Tries for more information.
*
- * @define Coll immutable.ParHashMap
+ * @define Coll `immutable.ParHashMap`
* @define coll immutable parallel hash map
*/
@SerialVersionUID(1L)
@@ -140,7 +140,7 @@ self =>
/** $factoryInfo
- * @define Coll immutable.ParHashMap
+ * @define Coll `immutable.ParHashMap`
* @define coll immutable parallel hash map
*/
object ParHashMap extends ParMapFactory[ParHashMap] {
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index 084637c5dc..d1899601d7 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -38,7 +38,7 @@ import collection.parallel.Task
* @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tries Scala's Parallel Collections Library overview]]
* section on Parallel Hash Tries for more information.
*
- * @define Coll immutable.ParHashSet
+ * @define Coll `immutable.ParHashSet`
* @define coll immutable parallel hash set
*/
@SerialVersionUID(1L)
@@ -118,7 +118,7 @@ self =>
/** $factoryInfo
- * @define Coll immutable.ParHashSet
+ * @define Coll `immutable.ParHashSet`
* @define coll immutable parallel hash set
*/
object ParHashSet extends ParSetFactory[ParHashSet] {
diff --git a/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled b/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled
index fb411ec0ac..04bc8b8d29 100644
--- a/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled
+++ b/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled
@@ -29,7 +29,7 @@ import scala.collection.parallel.ParIterableIterator
* @author Aleksandar Prokopec
* @since 2.9
*
- * @define Coll immutable.ParRange
+ * @define Coll `immutable.ParRange`
* @define coll immutable parallel range
*/
@SerialVersionUID(1L)
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index 277fd5fdd3..9553704caa 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -28,7 +28,7 @@ import scala.collection.Iterator
* @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_range Scala's Parallel Collections Library overview]]
* section on `ParRange` for more information.
*
- * @define Coll immutable.ParRange
+ * @define Coll `immutable.ParRange`
* @define coll immutable parallel range
*/
@SerialVersionUID(1L)
diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala
index bf3d3a5aa8..dde6533c82 100644
--- a/src/library/scala/collection/parallel/immutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala
@@ -24,7 +24,7 @@ import scala.collection.GenSeq
/** An immutable variant of `ParSeq`.
*
- * @define Coll mutable.ParSeq
+ * @define Coll `mutable.ParSeq`
* @define coll mutable parallel sequence
*/
trait ParSeq[+T]
@@ -40,7 +40,7 @@ extends collection/*.immutable*/.GenSeq[T]
/** $factoryInfo
- * @define Coll mutable.ParSeq
+ * @define Coll `mutable.ParSeq`
* @define coll mutable parallel sequence
*/
object ParSeq extends ParFactory[ParSeq] {
diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala
index d64858ed10..40429280ac 100644
--- a/src/library/scala/collection/parallel/immutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSet.scala
@@ -16,7 +16,7 @@ import scala.collection.parallel.Combiner
/** An immutable variant of `ParSet`.
*
- * @define Coll mutable.ParSet
+ * @define Coll `mutable.ParSet`
* @define coll mutable parallel set
*/
trait ParSet[T]
@@ -38,7 +38,7 @@ self =>
}
/** $factoryInfo
- * @define Coll mutable.ParSet
+ * @define Coll `mutable.ParSet`
* @define coll mutable parallel set
*/
object ParSet extends ParSetFactory[ParSet] {
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index 8baa84b77c..1ece663a1d 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -37,7 +37,7 @@ import immutable.VectorIterator
* @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_vector Scala's Parallel Collections Library overview]]
* section on `ParVector` for more information.
*
- * @define Coll immutable.ParVector
+ * @define Coll `immutable.ParVector`
* @define coll immutable parallel vector
*/
class ParVector[+T](private[this] val vector: Vector[T])
@@ -86,7 +86,7 @@ extends ParSeq[T]
/** $factoryInfo
- * @define Coll immutable.ParVector
+ * @define Coll `immutable.ParVector`
* @define coll immutable parallel vector
*/
object ParVector extends ParFactory[ParVector] {
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 8cc0b95997..29d84408db 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -49,7 +49,7 @@ import scala.collection.GenTraversableOnce
* @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_array Scala's Parallel Collections Library overview]]
* section on `ParArray` for more information.
*
- * @define Coll ParArray
+ * @define Coll `ParArray`
* @define coll parallel array
*
*/
@@ -676,7 +676,7 @@ self =>
private def readObject(in: java.io.ObjectInputStream) {
in.defaultReadObject
-
+
// get raw array from arrayseq
array = arrayseq.array.asInstanceOf[Array[Any]]
}
@@ -685,7 +685,7 @@ self =>
/** $factoryInfo
- * @define Coll mutable.ParArray
+ * @define Coll `mutable.ParArray`
* @define coll parallel array
*/
object ParArray extends ParFactory[ParArray] {
@@ -706,7 +706,7 @@ object ParArray extends ParFactory[ParArray] {
case _ => new ParArray[T](new ExposedArraySeq[T](runtime.ScalaRunTime.toObjectArray(arr), sz))
}
- def createFromCopy[T <: AnyRef : ClassManifest](arr: Array[T]): ParArray[T] = {
+ def createFromCopy[T <: AnyRef : ArrayTag](arr: Array[T]): ParArray[T] = {
val newarr = new Array[T](arr.length)
Array.copy(arr, 0, newarr, 0, arr.length)
handoff(newarr)
diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
index 35c748916c..d0c7f6050e 100644
--- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
@@ -15,7 +15,7 @@ import collection.parallel.IterableSplitter
*
* @tparam T type of the elements in the $coll.
* @define coll table
- * @define Coll flat hash table
+ * @define Coll `ParFlatHashTable`
*
* @author Aleksandar Prokopec
*/
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 23b23d55a1..8d39d6e0de 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -26,9 +26,10 @@ import collection.parallel.Task
* `ParHashMap` is a parallel map which internally keeps elements within a hash table.
* It uses chaining to resolve collisions.
*
- * @tparam T type of the elements in the parallel hash map
+ * @tparam K type of the keys in the parallel hash map
+ * @tparam V type of the values in the parallel hash map
*
- * @define Coll ParHashMap
+ * @define Coll `ParHashMap`
* @define coll parallel hash map
*
* @author Aleksandar Prokopec
@@ -141,7 +142,7 @@ self =>
/** $factoryInfo
- * @define Coll mutable.ParHashMap
+ * @define Coll `mutable.ParHashMap`
* @define coll parallel hash map
*/
object ParHashMap extends ParMapFactory[ParHashMap] {
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 4e9a38c13f..783f8dce77 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -25,7 +25,7 @@ import collection.parallel.Task
*
* @tparam T type of the elements in the $coll.
*
- * @define Coll ParHashSet
+ * @define Coll `ParHashSet`
* @define coll parallel hash set
*
* @author Aleksandar Prokopec
@@ -104,7 +104,7 @@ extends ParSet[T]
/** $factoryInfo
- * @define Coll mutable.ParHashSet
+ * @define Coll `mutable.ParHashSet`
* @define coll parallel hash set
*/
object ParHashSet extends ParSetFactory[ParHashSet] {
diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala
index a48ba48d56..f46b369494 100644
--- a/src/library/scala/collection/parallel/mutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSeq.scala
@@ -26,7 +26,7 @@ import scala.collection.GenSeq
/** A mutable variant of `ParSeq`.
*
- * @define Coll mutable.ParSeq
+ * @define Coll `mutable.ParSeq`
* @define coll mutable parallel sequence
*/
trait ParSeq[T] extends collection/*.mutable*/.GenSeq[T] // was: collection.mutable.Seq[T]
@@ -47,7 +47,7 @@ self =>
/** $factoryInfo
- * @define Coll mutable.ParSeq
+ * @define Coll `mutable.ParSeq`
* @define coll mutable parallel sequence
*/
object ParSeq extends ParFactory[ParSeq] {
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index 1d295fd5fe..6da4c8a7bc 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -21,7 +21,7 @@ import scala.collection.GenSet
/** A mutable variant of `ParSet`.
*
- * @define Coll mutable.ParSet
+ * @define Coll `mutable.ParSet`
* @define coll mutable parallel set
*
* @author Aleksandar Prokopec
@@ -41,7 +41,7 @@ self =>
/** $factoryInfo
- * @define Coll mutable.ParSet
+ * @define Coll `mutable.ParSet`
* @define coll mutable parallel set
*/
object ParSet extends ParSetFactory[ParSet] {
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index 410b542a68..43d40776bf 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -26,7 +26,7 @@ import scala.collection.parallel.Task
-private[mutable] class DoublingUnrolledBuffer[T](implicit m: ClassManifest[T]) extends UnrolledBuffer[T]()(m) {
+private[mutable] class DoublingUnrolledBuffer[T](implicit t: ArrayTag[T]) extends UnrolledBuffer[T]()(t) {
override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz
protected override def newUnrolled = new Unrolled[T](0, new Array[T](4), null, this)
}
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 943e0208c7..e3124af12e 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -13,6 +13,7 @@ import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.mutable.ParArray
import scala.collection.mutable.UnrolledBuffer
import annotation.unchecked.uncheckedVariance
+import language.implicitConversions
/** Package object for parallel collections.
*/
diff --git a/src/library/scala/concurrent/Channel.scala b/src/library/scala/concurrent/Channel.scala
index f6d6341151..7aeccd8d05 100644
--- a/src/library/scala/concurrent/Channel.scala
+++ b/src/library/scala/concurrent/Channel.scala
@@ -25,7 +25,6 @@ class Channel[A] {
private var nreaders = 0
/**
- * @param x ...
*/
def write(x: A) = synchronized {
lastWritten.elem = x
diff --git a/src/library/scala/concurrent/ConcurrentPackageObject.scala b/src/library/scala/concurrent/ConcurrentPackageObject.scala
index d185ade8a4..330a2f0e25 100644
--- a/src/library/scala/concurrent/ConcurrentPackageObject.scala
+++ b/src/library/scala/concurrent/ConcurrentPackageObject.scala
@@ -8,11 +8,10 @@
package scala.concurrent
-import java.util.concurrent.{ Executors, ExecutorService, ThreadFactory }
+import java.util.concurrent.{ Executors, Executor, ThreadFactory }
import scala.concurrent.forkjoin.{ ForkJoinPool, ForkJoinWorkerThread }
import scala.concurrent.util.Duration
-import ConcurrentPackageObject._
-
+import language.implicitConversions
/** This package object contains primitives for concurrent and parallel programming.
@@ -20,9 +19,9 @@ import ConcurrentPackageObject._
abstract class ConcurrentPackageObject {
/** A global execution environment for executing lightweight tasks.
*/
- lazy val defaultExecutionContext = new impl.ExecutionContextImpl(null)
+ lazy val defaultExecutionContext: ExecutionContext with Executor = impl.ExecutionContextImpl.fromExecutor(null: Executor)
- private val currentExecutionContext = new ThreadLocal[ExecutionContext]
+ val currentExecutionContext = new ThreadLocal[ExecutionContext]
val handledFutureException: PartialFunction[Throwable, Throwable] = {
case t: Throwable if isFutureThrowable(t) => t
@@ -36,34 +35,29 @@ abstract class ConcurrentPackageObject {
case _ => true
}
- private[concurrent] def resolve[T](source: Either[Throwable, T]): Either[Throwable, T] = source match {
- case Left(t: scala.runtime.NonLocalReturnControl[_]) => Right(t.value.asInstanceOf[T])
- case Left(t: scala.util.control.ControlThrowable) => Left(new ExecutionException("Boxed ControlThrowable", t))
- case Left(t: InterruptedException) => Left(new ExecutionException("Boxed InterruptedException", t))
- case Left(e: Error) => Left(new ExecutionException("Boxed Error", e))
- case _ => source
- }
-
- private[concurrent] def resolver[T] =
- resolverFunction.asInstanceOf[PartialFunction[Throwable, Either[Throwable, T]]]
-
/* concurrency constructs */
+ /** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
+ *
+ * The result becomes available once the asynchronous computation is completed.
+ *
+ * @tparam T the type of the result
+ * @param body the asychronous computation
+ * @param execctx the execution context on which the future is run
+ * @return the `Future` holding the result of the computation
+ */
def future[T](body: =>T)(implicit execctx: ExecutionContext = defaultExecutionContext): Future[T] =
Future[T](body)
+ /** Creates a promise object which can be completed with a value.
+ *
+ * @tparam T the type of the value in the promise
+ * @param execctx the execution context on which the promise is created on
+ * @return the newly created `Promise` object
+ */
def promise[T]()(implicit execctx: ExecutionContext = defaultExecutionContext): Promise[T] =
Promise[T]()
- /** Wraps a block of code into an awaitable object. */
- def body2awaitable[T](body: =>T) = new Awaitable[T] {
- def ready(atMost: Duration)(implicit permit: CanAwait) = {
- body
- this
- }
- def result(atMost: Duration)(implicit permit: CanAwait) = body
- }
-
/** Used to block on a piece of code which potentially blocks.
*
* @param body A piece of code which contains potentially blocking or long running calls.
@@ -74,7 +68,7 @@ abstract class ConcurrentPackageObject {
* - TimeoutException - in the case that the blockable object timed out
*/
def blocking[T](body: =>T): T =
- blocking(body2awaitable(body), Duration.fromNanos(0))
+ blocking(impl.Future.body2awaitable(body), Duration.Inf)
/** Blocks on an awaitable object.
*
@@ -85,26 +79,12 @@ abstract class ConcurrentPackageObject {
* - InterruptedException - in the case that a wait within the blockable object was interrupted
* - TimeoutException - in the case that the blockable object timed out
*/
- def blocking[T](awaitable: Awaitable[T], atMost: Duration): T =
+ def blocking[T](awaitable: Awaitable[T], atMost: Duration): T = {
currentExecutionContext.get match {
- case null => Await.result(awaitable, atMost)
+ case null => awaitable.result(atMost)(Await.canAwaitEvidence)
case ec => ec.internalBlockingCall(awaitable, atMost)
}
+ }
@inline implicit final def int2durationops(x: Int): DurationOps = new DurationOps(x)
}
-
-private[concurrent] object ConcurrentPackageObject {
- // TODO, docs, return type
- // Note that having this in the package object led to failures when
- // compiling a subset of sources; it seems that the wildcard is not
- // properly handled, and you get messages like "type _$1 defined twice".
- // This is consistent with other package object breakdowns.
- private val resolverFunction: PartialFunction[Throwable, Either[Throwable, _]] = {
- case t: scala.runtime.NonLocalReturnControl[_] => Right(t.value)
- case t: scala.util.control.ControlThrowable => Left(new ExecutionException("Boxed ControlThrowable", t))
- case t: InterruptedException => Left(new ExecutionException("Boxed InterruptedException", t))
- case e: Error => Left(new ExecutionException("Boxed Error", e))
- case t => Left(t)
- }
-}
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index e1d4276396..436a17a33b 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -20,19 +20,22 @@ import collection._
trait ExecutionContext {
-
+
+ /** Runs a block of code on this execution context.
+ */
def execute(runnable: Runnable): Unit
-
- def execute[U](body: () => U): Unit
-
+
+ /** Used internally by the framework - blocks execution for at most `atMost` time while waiting
+ * for an `awaitable` object to become ready.
+ *
+ * Clients should use `scala.concurrent.blocking` instead.
+ */
def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T
+ /** Reports that an asynchronous computation failed.
+ */
def reportFailure(t: Throwable): Unit
-
- /* implementations follow */
-
- private implicit val executionContext = this
-
+
}
@@ -44,11 +47,19 @@ object ExecutionContext {
/** Creates an `ExecutionContext` from the given `ExecutorService`.
*/
- def fromExecutorService(e: ExecutorService): ExecutionContext with Executor = new impl.ExecutionContextImpl(e)
+ def fromExecutorService(e: ExecutorService, reporter: Throwable => Unit = defaultReporter): ExecutionContext with ExecutorService =
+ impl.ExecutionContextImpl.fromExecutorService(e, reporter)
/** Creates an `ExecutionContext` from the given `Executor`.
*/
- def fromExecutor(e: Executor): ExecutionContext with Executor = new impl.ExecutionContextImpl(e)
+ def fromExecutor(e: Executor, reporter: Throwable => Unit = defaultReporter): ExecutionContext with Executor =
+ impl.ExecutionContextImpl.fromExecutor(e, reporter)
+
+ def defaultReporter: Throwable => Unit = {
+ // re-throwing `Error`s here causes an exception handling test to fail.
+ //case e: Error => throw e
+ case t => t.printStackTrace()
+ }
}
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 5bc9ad783f..c42393eee2 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -18,12 +18,14 @@ import java.{ lang => jl }
import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicBoolean }
import scala.concurrent.util.Duration
+import scala.concurrent.impl.NonFatal
import scala.Option
import scala.annotation.tailrec
import scala.collection.mutable.Stack
import scala.collection.mutable.Builder
import scala.collection.generic.CanBuildFrom
+import language.higherKinds
@@ -80,9 +82,29 @@ import scala.collection.generic.CanBuildFrom
* {{{
* f flatMap { (x: Int) => g map { (y: Int) => x + y } }
* }}}
+ *
+ * @define callbackInContext
+ * The provided callback always runs in the provided implicit
+ *`ExecutionContext`, though there is no guarantee that the
+ * `execute()` method on the `ExecutionContext` will be called once
+ * per callback or that `execute()` will be called in the current
+ * thread. That is, the implementation may run multiple callbacks
+ * in a batch within a single `execute()` and it may run
+ * `execute()` either immediately or asynchronously.
*/
trait Future[+T] extends Awaitable[T] {
+ // The executor within the lexical scope
+ // of the Future trait. Note that this will
+ // (modulo bugs) _never_ execute a callback
+ // other than those below in this same file.
+ // As a nice side benefit, having this implicit
+ // here forces an ambiguity in those methods
+ // that also have an executor parameter, which
+ // keeps us from accidentally forgetting to use
+ // the executor parameter.
+ private implicit def internalExecutor: ExecutionContext = Future.InternalCallbackExecutor
+
/* Callbacks */
/** When this future is completed successfully (i.e. with a value),
@@ -93,11 +115,12 @@ trait Future[+T] extends Awaitable[T] {
* this will either be applied immediately or be scheduled asynchronously.
*
* $multipleCallbacks
+ * $callbackInContext
*/
- def onSuccess[U](pf: PartialFunction[T, U]): this.type = onComplete {
- case Left(t) => // do nothing
- case Right(v) => if (pf isDefinedAt v) pf(v) else { /*do nothing*/ }
- }
+ def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = onComplete {
+ case Right(v) if pf isDefinedAt v => pf(v)
+ case _ =>
+ }(executor)
/** When this future is completed with a failure (i.e. with a throwable),
* apply the provided callback to the throwable.
@@ -110,48 +133,46 @@ trait Future[+T] extends Awaitable[T] {
* Will not be called in case that the future is completed with a value.
*
* $multipleCallbacks
+ * $callbackInContext
*/
- def onFailure[U](callback: PartialFunction[Throwable, U]): this.type = onComplete {
- case Left(t) => if (isFutureThrowable(t) && callback.isDefinedAt(t)) callback(t) else { /*do nothing*/ }
- case Right(v) => // do nothing
- }
+ def onFailure[U](callback: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete {
+ case Left(t) if (isFutureThrowable(t) && callback.isDefinedAt(t)) => callback(t)
+ case _ =>
+ }(executor)
- /** When this future is completed, either through an exception, a timeout, or a value,
+ /** When this future is completed, either through an exception, or a value,
* apply the provided function.
*
* If the future has already been completed,
* this will either be applied immediately or be scheduled asynchronously.
*
* $multipleCallbacks
+ * $callbackInContext
*/
- def onComplete[U](func: Either[Throwable, T] => U): this.type
+ def onComplete[U](func: Either[Throwable, T] => U)(implicit executor: ExecutionContext): Unit
/* Miscellaneous */
- /** Creates a new promise.
- */
- protected def newPromise[S]: Promise[S]
-
/** Returns whether the future has already been completed with
* a value or an exception.
- *
+ *
* $nonDeterministic
- *
+ *
* @return `true` if the future is already completed, `false` otherwise
*/
def isCompleted: Boolean
-
+
/** The value of this `Future`.
- *
+ *
* If the future is not completed the returned value will be `None`.
* If the future is completed the value will be `Some(Success(t))`
* if it contains a valid result, or `Some(Failure(error))` if it contains
* an exception.
*/
def value: Option[Either[Throwable, T]]
-
-
+
+
/* Projections */
/** Returns a failed projection of this future.
@@ -167,11 +188,11 @@ trait Future[+T] extends Awaitable[T] {
* and throws a corresponding exception if the original future fails.
*/
def failed: Future[Throwable] = {
- val p = newPromise[Throwable]
+ val p = Promise[Throwable]()
onComplete {
case Left(t) => p success t
- case Right(v) => p failure (new NoSuchElementException("Future.failed not completed with a throwable. Instead completed with: " + v))
+ case Right(v) => p failure (new NoSuchElementException("Future.failed not completed with a throwable."))
}
p.future
@@ -184,28 +205,60 @@ trait Future[+T] extends Awaitable[T] {
*
* Will not be called if the future fails.
*/
- def foreach[U](f: T => U): Unit = onComplete {
+ def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = onComplete {
case Right(r) => f(r)
- case Left(_) => // do nothing
+ case _ => // do nothing
+ }(executor)
+
+ /** Creates a new future by applying the 's' function to the successful result of
+ * this future, or the 'f' function to the failed result. If there is any non-fatal
+ * exception thrown when 's' or 'f' is applied, that exception will be propagated
+ * to the resulting future.
+ *
+ * @param s function that transforms a successful result of the receiver into a
+ * successful result of the returned future
+ * @param f function that transforms a failure of the receiver into a failure of
+ * the returned future
+ * @return a future that will be completed with the transformed value
+ */
+ def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = {
+ val p = Promise[S]()
+
+ onComplete {
+ case result =>
+ try {
+ result match {
+ case Left(t) => p failure f(t)
+ case Right(r) => p success s(r)
+ }
+ } catch {
+ case NonFatal(t) => p failure t
+ }
+ }(executor)
+
+ p.future
}
/** Creates a new future by applying a function to the successful result of
* this future. If this future is completed with an exception then the new
* future will also contain this exception.
*
- * $forComprehensionExample
+ * $forComprehensionExamples
*/
- def map[S](f: T => S): Future[S] = {
- val p = newPromise[S]
+ def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = { // transform(f, identity)
+ val p = Promise[S]()
onComplete {
- case Left(t) => p failure t
- case Right(v) =>
- try p success f(v)
- catch {
- case t => p complete resolver(t)
+ case result =>
+ try {
+ result match {
+ case Right(r) => p success f(r)
+ case l: Left[_, _] => p complete l.asInstanceOf[Left[Throwable, S]]
+ }
+ } catch {
+ case NonFatal(t) => p failure t
}
- }
+ }(executor)
p.future
}
@@ -215,23 +268,23 @@ trait Future[+T] extends Awaitable[T] {
* If this future is completed with an exception then the new future will
* also contain this exception.
*
- * $forComprehensionExample
+ * $forComprehensionExamples
*/
- def flatMap[S](f: T => Future[S]): Future[S] = {
- val p = newPromise[S]
+ def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = {
+ val p = Promise[S]()
onComplete {
- case Left(t) => p failure t
+ case l: Left[_, _] => p complete l.asInstanceOf[Left[Throwable, S]]
case Right(v) =>
try {
- f(v) onComplete {
- case Left(t) => p failure t
+ f(v).onComplete({
+ case l: Left[_, _] => p complete l.asInstanceOf[Left[Throwable, S]]
case Right(v) => p success v
- }
+ })(internalExecutor)
} catch {
- case t: Throwable => p complete resolver(t)
+ case NonFatal(t) => p failure t
}
- }
+ }(executor)
p.future
}
@@ -241,7 +294,7 @@ trait Future[+T] extends Awaitable[T] {
* If the current future contains a value which satisfies the predicate, the new future will also hold that value.
* Otherwise, the resulting future will fail with a `NoSuchElementException`.
*
- * If the current future fails or times out, the resulting future also fails or times out, respectively.
+ * If the current future fails, then the resulting future also fails.
*
* Example:
* {{{
@@ -252,26 +305,26 @@ trait Future[+T] extends Awaitable[T] {
* await(h, 0) // throw a NoSuchElementException
* }}}
*/
- def filter(pred: T => Boolean): Future[T] = {
- val p = newPromise[T]
+ def filter(pred: T => Boolean)(implicit executor: ExecutionContext): Future[T] = {
+ val p = Promise[T]()
onComplete {
- case Left(t) => p failure t
+ case l: Left[_, _] => p complete l.asInstanceOf[Left[Throwable, T]]
case Right(v) =>
try {
if (pred(v)) p success v
else p failure new NoSuchElementException("Future.filter predicate is not satisfied by: " + v)
} catch {
- case t: Throwable => p complete resolver(t)
+ case NonFatal(t) => p failure t
}
- }
+ }(executor)
p.future
}
/** Used by for-comprehensions.
*/
- final def withFilter(p: T => Boolean): Future[T] = filter(p)
+ final def withFilter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = filter(p)(executor)
// final def withFilter(p: T => Boolean) = new FutureWithFilter[T](this, p)
// final class FutureWithFilter[+S](self: Future[S], p: S => Boolean) {
@@ -281,12 +334,12 @@ trait Future[+T] extends Awaitable[T] {
// def withFilter(q: S => Boolean): FutureWithFilter[S] = new FutureWithFilter[S](self, x => p(x) && q(x))
// }
- /** Creates a new future by mapping the value of the current future if the given partial function is defined at that value.
+ /** Creates a new future by mapping the value of the current future, if the given partial function is defined at that value.
*
* If the current future contains a value for which the partial function is defined, the new future will also hold that value.
* Otherwise, the resulting future will fail with a `NoSuchElementException`.
*
- * If the current future fails or times out, the resulting future also fails or times out, respectively.
+ * If the current future fails, then the resulting future also fails.
*
* Example:
* {{{
@@ -301,19 +354,19 @@ trait Future[+T] extends Awaitable[T] {
* await(h, 0) // throw a NoSuchElementException
* }}}
*/
- def collect[S](pf: PartialFunction[T, S]): Future[S] = {
- val p = newPromise[S]
+ def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = {
+ val p = Promise[S]()
onComplete {
- case Left(t) => p failure t
+ case l: Left[_, _] => p complete l.asInstanceOf[Left[Throwable, S]]
case Right(v) =>
try {
if (pf.isDefinedAt(v)) p success pf(v)
else p failure new NoSuchElementException("Future.collect partial function is not defined at: " + v)
} catch {
- case t: Throwable => p complete resolver(t)
+ case NonFatal(t) => p failure t
}
- }
+ }(executor)
p.future
}
@@ -330,15 +383,17 @@ trait Future[+T] extends Awaitable[T] {
* future (6 / 2) recover { case e: ArithmeticException ⇒ 0 } // result: 3
* }}}
*/
- def recover[U >: T](pf: PartialFunction[Throwable, U]): Future[U] = {
- val p = newPromise[U]
+ def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = {
+ val p = Promise[U]()
onComplete {
case Left(t) if pf isDefinedAt t =>
try { p success pf(t) }
- catch { case t: Throwable => p complete resolver(t) }
+ catch {
+ case NonFatal(t) => p failure t
+ }
case otherwise => p complete otherwise
- }
+ }(executor)
p.future
}
@@ -356,18 +411,18 @@ trait Future[+T] extends Awaitable[T] {
* future (6 / 0) recoverWith { case e: ArithmeticException => f } // result: Int.MaxValue
* }}}
*/
- def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]]): Future[U] = {
- val p = newPromise[U]
+ def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = {
+ val p = Promise[U]()
onComplete {
case Left(t) if pf isDefinedAt t =>
try {
p completeWith pf(t)
} catch {
- case t: Throwable => p complete resolver(t)
+ case NonFatal(t) => p failure t
}
case otherwise => p complete otherwise
- }
+ }(executor)
p.future
}
@@ -381,19 +436,19 @@ trait Future[+T] extends Awaitable[T] {
* with the throwable stored in `that`.
*/
def zip[U](that: Future[U]): Future[(T, U)] = {
- val p = newPromise[(T, U)]
-
+ val p = Promise[(T, U)]()
+
this onComplete {
- case Left(t) => p failure t
- case Right(r) => that onSuccess {
- case r2 => p success ((r, r2))
- }
- }
-
- that onFailure {
- case f => p failure f
+ case l: Left[_, _] => p complete l.asInstanceOf[Left[Throwable, (T, U)]]
+ case Right(r) =>
+ that onSuccess {
+ case r2 => p success ((r, r2))
+ }
+ that onFailure {
+ case f => p failure f
+ }
}
-
+
p.future
}
@@ -412,33 +467,37 @@ trait Future[+T] extends Awaitable[T] {
* }}}
*/
def fallbackTo[U >: T](that: Future[U]): Future[U] = {
- val p = newPromise[U]
+ val p = Promise[U]()
onComplete {
case r @ Right(_) ⇒ p complete r
case _ ⇒ p completeWith that
}
p.future
}
-
+
/** Creates a new `Future[S]` which is completed with this `Future`'s result if
* that conforms to `S`'s erased type or a `ClassCastException` otherwise.
*/
- def mapTo[S](implicit m: Manifest[S]): Future[S] = {
- val p = newPromise[S]
-
+ def mapTo[S](implicit tag: ClassTag[S]): Future[S] = {
+ def boxedType(c: Class[_]): Class[_] = {
+ if (c.isPrimitive) Future.toBoxed(c) else c
+ }
+
+ val p = Promise[S]()
+
onComplete {
- case l: Left[Throwable, _] => p complete l.asInstanceOf[Either[Throwable, S]]
+ case l: Left[_, _] => p complete l.asInstanceOf[Left[Throwable, S]]
case Right(t) =>
p complete (try {
- Right(impl.Future.boxedType(m.erasure).cast(t).asInstanceOf[S])
+ Right(boxedType(tag.erasure).cast(t).asInstanceOf[S])
} catch {
case e: ClassCastException => Left(e)
})
}
-
+
p.future
}
-
+
/** Applies the side-effecting function to the result of this future, and returns
* a new future with the result of this future.
*
@@ -462,14 +521,12 @@ trait Future[+T] extends Awaitable[T] {
* }
* }}}
*/
- def andThen[U](pf: PartialFunction[Either[Throwable, T], U]): Future[T] = {
- val p = newPromise[T]
+ def andThen[U](pf: PartialFunction[Either[Throwable, T], U])(implicit executor: ExecutionContext): Future[T] = {
+ val p = Promise[T]()
onComplete {
- case r =>
- try if (pf isDefinedAt r) pf(r)
- finally p complete r
- }
+ case r => try if (pf isDefinedAt r) pf(r) finally p complete r
+ }(executor)
p.future
}
@@ -488,12 +545,8 @@ trait Future[+T] extends Awaitable[T] {
* }}}
*/
def either[U >: T](that: Future[U]): Future[U] = {
- val p = newPromise[U]
-
- val completePromise: PartialFunction[Either[Throwable, U], _] = {
- case Left(t) => p tryFailure t
- case Right(v) => p trySuccess v
- }
+ val p = Promise[U]()
+ val completePromise: PartialFunction[Either[Throwable, U], _] = { case result => p tryComplete result }
this onComplete completePromise
that onComplete completePromise
@@ -512,15 +565,38 @@ trait Future[+T] extends Awaitable[T] {
*/
object Future {
- def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = impl.Future(body)
+ import java.{ lang => jl }
+
+ private[concurrent] val toBoxed = Map[Class[_], Class[_]](
+ classOf[Boolean] -> classOf[jl.Boolean],
+ classOf[Byte] -> classOf[jl.Byte],
+ classOf[Char] -> classOf[jl.Character],
+ classOf[Short] -> classOf[jl.Short],
+ classOf[Int] -> classOf[jl.Integer],
+ classOf[Long] -> classOf[jl.Long],
+ classOf[Float] -> classOf[jl.Float],
+ classOf[Double] -> classOf[jl.Double],
+ classOf[Unit] -> classOf[scala.runtime.BoxedUnit]
+ )
+
+ /** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
+ *
+ * The result becomes available once the asynchronous computation is completed.
+ *
+ * @tparam T the type of the result
+ * @param body the asychronous computation
+ * @param execctx the execution context on which the future is run
+ * @return the `Future` holding the result of the computation
+ */
+ def apply[T](body: =>T)(implicit execctx: ExecutionContext): Future[T] = impl.Future(body)
import scala.collection.mutable.Builder
import scala.collection.generic.CanBuildFrom
- /** Simple version of `Futures.traverse`. Transforms a `Traversable[Future[A]]` into a `Future[Traversable[A]]`.
+ /** Simple version of `Futures.traverse`. Transforms a `TraversableOnce[Future[A]]` into a `Future[TraversableOnce[A]]`.
* Useful for reducing many `Future`s into a single `Future`.
*/
- def sequence[A, M[_] <: Traversable[_]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = {
+ def sequence[A, M[_] <: TraversableOnce[_]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = {
in.foldLeft(Promise.successful(cbf(in)).future) {
(fr, fa) => for (r <- fr; a <- fa.asInstanceOf[Future[A]]) yield (r += a)
} map (_.result)
@@ -528,7 +604,7 @@ object Future {
/** Returns a `Future` to the result of the first future in the list that is completed.
*/
- def firstCompletedOf[T](futures: Traversable[Future[T]])(implicit executor: ExecutionContext): Future[T] = {
+ def firstCompletedOf[T](futures: TraversableOnce[Future[T]])(implicit executor: ExecutionContext): Future[T] = {
val p = Promise[T]()
val completeFirst: Either[Throwable, T] => Unit = p tryComplete _
@@ -539,7 +615,8 @@ object Future {
/** Returns a `Future` that will hold the optional result of the first `Future` with a result that matches the predicate.
*/
- def find[T](futures: Traversable[Future[T]])(predicate: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = {
+ def find[T](futurestravonce: TraversableOnce[Future[T]])(predicate: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = {
+ val futures = futurestravonce.toBuffer
if (futures.isEmpty) Promise.successful[Option[T]](None).future
else {
val result = Promise[Option[T]]()
@@ -550,8 +627,9 @@ object Future {
case _ =>
}
} finally {
- if (ref.decrementAndGet == 0)
+ if (ref.decrementAndGet == 0) {
result tryComplete Right(None)
+ }
}
futures.foreach(_ onComplete search)
@@ -564,45 +642,71 @@ object Future {
* The fold is performed on the thread where the last future is completed,
* the result will be the first failure of any of the futures, or any failure in the actual fold,
* or the result of the fold.
- *
+ *
* Example:
* {{{
* val result = Await.result(Future.fold(futures)(0)(_ + _), 5 seconds)
* }}}
*/
- def fold[T, R](futures: Traversable[Future[T]])(zero: R)(foldFun: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
+ def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(foldFun: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
if (futures.isEmpty) Promise.successful(zero).future
else sequence(futures).map(_.foldLeft(zero)(foldFun))
}
/** Initiates a fold over the supplied futures where the fold-zero is the result value of the `Future` that's completed first.
- *
+ *
* Example:
* {{{
* val result = Await.result(Futures.reduce(futures)(_ + _), 5 seconds)
* }}}
*/
- def reduce[T, R >: T](futures: Traversable[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
+ def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
if (futures.isEmpty) Promise[R].failure(new NoSuchElementException("reduce attempted on empty collection")).future
else sequence(futures).map(_ reduceLeft op)
}
-
- /** Transforms a `Traversable[A]` into a `Future[Traversable[B]]` using the provided function `A => Future[B]`.
+
+ /** Transforms a `TraversableOnce[A]` into a `Future[TraversableOnce[B]]` using the provided function `A => Future[B]`.
* This is useful for performing a parallel map. For example, to apply a function to all items of a list
* in parallel:
- *
+ *
* {{{
* val myFutureList = Future.traverse(myList)(x => Future(myFunc(x)))
* }}}
*/
- def traverse[A, B, M[_] <: Traversable[_]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] =
+ def traverse[A, B, M[_] <: TraversableOnce[_]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] =
in.foldLeft(Promise.successful(cbf(in)).future) { (fr, a) =>
val fb = fn(a.asInstanceOf[A])
for (r <- fr; b <- fb) yield (r += b)
}.map(_.result)
-
-}
+ // This is used to run callbacks which are internal
+ // to scala.concurrent; our own callbacks are only
+ // ever used to eventually run another callback,
+ // and that other callback will have its own
+ // executor because all callbacks come with
+ // an executor. Our own callbacks never block
+ // and have no "expected" exceptions.
+ // As a result, this executor can do nothing;
+ // some other executor will always come after
+ // it (and sometimes one will be before it),
+ // and those will be performing the "real"
+ // dispatch to code outside scala.concurrent.
+ // Because this exists, ExecutionContext.defaultExecutionContext
+ // isn't instantiated by Future internals, so
+ // if some code for some reason wants to avoid
+ // ever starting up the default context, it can do so
+ // by just not ever using it itself. scala.concurrent
+ // doesn't need to create defaultExecutionContext as
+ // a side effect.
+ private[concurrent] object InternalCallbackExecutor extends ExecutionContext {
+ def execute(runnable: Runnable): Unit =
+ runnable.run()
+ def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T =
+ throw new IllegalStateException("bug in scala.concurrent, called blocking() from internal callback")
+ def reportFailure(t: Throwable): Unit =
+ throw new IllegalStateException("problem in scala.concurrent internal callback", t)
+ }
+}
diff --git a/src/library/scala/concurrent/FutureTaskRunner.scala b/src/library/scala/concurrent/FutureTaskRunner.scala
index 75e6299ad9..9d6f8a7a88 100644
--- a/src/library/scala/concurrent/FutureTaskRunner.scala
+++ b/src/library/scala/concurrent/FutureTaskRunner.scala
@@ -8,6 +8,8 @@
package scala.concurrent
+import language.{implicitConversions, higherKinds}
+
/** The `FutureTaskRunner</code> trait is a base trait of task runners
* that provide some sort of future abstraction.
*
diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala
index 127a0e0055..9b5e741549 100644
--- a/src/library/scala/concurrent/JavaConversions.scala
+++ b/src/library/scala/concurrent/JavaConversions.scala
@@ -9,6 +9,7 @@
package scala.concurrent
import java.util.concurrent.{ExecutorService, Executor}
+import language.implicitConversions
/** The `JavaConversions` object provides implicit converstions supporting
* interoperability between Scala and Java concurrency classes.
diff --git a/src/library/scala/concurrent/ManagedBlocker.scala b/src/library/scala/concurrent/ManagedBlocker.scala
index 0b6d82e76f..9c6f4d51d6 100644
--- a/src/library/scala/concurrent/ManagedBlocker.scala
+++ b/src/library/scala/concurrent/ManagedBlocker.scala
@@ -12,7 +12,6 @@ package scala.concurrent
*
* @author Philipp Haller
*/
-@deprecated("Not used.", "2.10.0")
trait ManagedBlocker {
/**
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
index 8f2bce5d1a..578642966f 100644
--- a/src/library/scala/concurrent/Promise.scala
+++ b/src/library/scala/concurrent/Promise.scala
@@ -25,6 +25,11 @@ package scala.concurrent
*/
trait Promise[T] {
+ // used for internal callbacks defined in
+ // the lexical scope of this trait;
+ // _never_ for application callbacks.
+ private implicit def internalExecutor: ExecutionContext = Future.InternalCallbackExecutor
+
/** Future containing the value of this promise.
*/
def future: Future[T]
@@ -35,7 +40,8 @@ trait Promise[T] {
*
* $promiseCompletion
*/
- def complete(result: Either[Throwable, T]): this.type = if (tryComplete(result)) this else throwCompleted
+ def complete(result: Either[Throwable, T]): this.type =
+ if (tryComplete(result)) this else throw new IllegalStateException("Promise already completed.")
/** Tries to complete the promise with either a value or the exception.
*
@@ -50,19 +56,26 @@ trait Promise[T] {
* @return This promise
*/
final def completeWith(other: Future[T]): this.type = {
- other onComplete {
- this complete _
- }
+ other onComplete { this complete _ }
+ this
+ }
+
+ /** Attempts to complete this promise with the specified future, once that future is completed.
+ *
+ * @return This promise
+ */
+ final def tryCompleteWith(other: Future[T]): this.type = {
+ other onComplete { this tryComplete _ }
this
}
/** Completes the promise with a value.
*
- * @param value The value to complete the promise with.
+ * @param v The value to complete the promise with.
*
* $promiseCompletion
*/
- def success(v: T): this.type = if (trySuccess(v)) this else throwCompleted
+ def success(v: T): this.type = complete(Right(v))
/** Tries to complete the promise with a value.
*
@@ -80,7 +93,7 @@ trait Promise[T] {
*
* $promiseCompletion
*/
- def failure(t: Throwable): this.type = if (tryFailure(t)) this else throwCompleted
+ def failure(t: Throwable): this.type = complete(Left(t))
/** Tries to complete the promise with an exception.
*
@@ -89,35 +102,32 @@ trait Promise[T] {
* @return If the promise has already been completed returns `false`, or `true` otherwise.
*/
def tryFailure(t: Throwable): Boolean = tryComplete(Left(t))
-
- /** Wraps a `Throwable` in an `ExecutionException` if necessary. TODO replace with `resolver` from scala.concurrent
- *
- * $allowedThrowables
- */
- protected def wrap(t: Throwable): Throwable = t match {
- case t: Throwable if isFutureThrowable(t) => t
- case _ => new ExecutionException(t)
- }
-
- private def throwCompleted = throw new IllegalStateException("Promise already completed.")
-
}
object Promise {
- /** Creates a new promise.
+ /** Creates a promise object which can be completed with a value.
+ *
+ * @tparam T the type of the value in the promise
+ * @return the newly created `Promise` object
*/
- def apply[T]()(implicit executor: ExecutionContext): Promise[T] = new impl.Promise.DefaultPromise[T]()
+ def apply[T](): Promise[T] = new impl.Promise.DefaultPromise[T]()
- /** Creates an already completed Promise with the specified exception
+ /** Creates an already completed Promise with the specified exception.
+ *
+ * @tparam T the type of the value in the promise
+ * @return the newly created `Promise` object
*/
- def failed[T](exception: Throwable)(implicit executor: ExecutionContext): Promise[T] = new impl.Promise.KeptPromise[T](Left(exception))
+ def failed[T](exception: Throwable): Promise[T] = new impl.Promise.KeptPromise[T](Left(exception))
- /** Creates an already completed Promise with the specified result
+ /** Creates an already completed Promise with the specified result.
+ *
+ * @tparam T the type of the value in the promise
+ * @return the newly created `Promise` object
*/
- def successful[T](result: T)(implicit executor: ExecutionContext): Promise[T] = new impl.Promise.KeptPromise[T](Right(result))
+ def successful[T](result: T): Promise[T] = new impl.Promise.KeptPromise[T](Right(result))
}
diff --git a/src/library/scala/concurrent/Scheduler.scala b/src/library/scala/concurrent/Scheduler.scala
index e2eb4d69fe..6645abcc4e 100644
--- a/src/library/scala/concurrent/Scheduler.scala
+++ b/src/library/scala/concurrent/Scheduler.scala
@@ -35,7 +35,7 @@ trait Scheduler {
/** Schedules a thunk for execution after a given delay.
*
* @param delay the duration after which the thunk should be executed
- * @param thunk the thunk that is scheduled for execution
+ * @param task the thunk that is scheduled for execution
* @return a `Cancellable` that may be used to cancel the execution
* of the thunk
*/
diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala
index 43f2ec57c0..5a6d95c2ed 100644
--- a/src/library/scala/concurrent/SyncVar.scala
+++ b/src/library/scala/concurrent/SyncVar.scala
@@ -55,19 +55,32 @@ class SyncVar[A] {
def take(): A = synchronized {
try get
- finally unset()
+ finally unsetVal()
}
- // TODO: this method should be private
- def set(x: A): Unit = synchronized {
- isDefined = true
- value = Some(x)
- notifyAll()
+ /** Waits for this SyncVar to become defined at least for
+ * `timeout` milliseconds (possibly more), and takes its
+ * value by first reading and then removing the value from
+ * the SyncVar.
+ *
+ * @param timeout the amount of milliseconds to wait, 0 means forever
+ * @return `None` if variable is undefined after `timeout`, `Some(value)` otherwise
+ */
+ def take(timeout: Long): A = synchronized {
+ try get(timeout).get
+ finally unsetVal()
}
+ // TODO: this method should be private
+ // [Heather] the reason why: it doesn't take into consideration
+ // whether or not the SyncVar is already defined. So, set has been
+ // deprecated in order to eventually be able to make "setting" private
+ @deprecated("Use `put` instead, as `set` is potentionally error-prone", "2.10.0")
+ def set(x: A): Unit = setVal(x)
+
def put(x: A): Unit = synchronized {
while (isDefined) wait()
- set(x)
+ setVal(x)
}
def isSet: Boolean = synchronized {
@@ -75,10 +88,33 @@ class SyncVar[A] {
}
// TODO: this method should be private
+ // [Heather] the reason why: it doesn't take into consideration
+ // whether or not the SyncVar is already defined. So, unset has been
+ // deprecated in order to eventually be able to make "unsetting" private
+ @deprecated("Use `take` instead, as `unset` is potentionally error-prone", "2.10.0")
def unset(): Unit = synchronized {
isDefined = false
value = None
notifyAll()
}
+
+ // `setVal` exists so as to retroactively deprecate `set` without
+ // deprecation warnings where we use `set` internally. The
+ // implementation of `set` was moved to `setVal` to achieve this
+ private def setVal(x: A): Unit = synchronized {
+ isDefined = true
+ value = Some(x)
+ notifyAll()
+ }
+
+ // `unsetVal` exists so as to retroactively deprecate `unset` without
+ // deprecation warnings where we use `unset` internally. The
+ // implementation of `unset` was moved to `unsetVal` to achieve this
+ private def unsetVal(): Unit = synchronized {
+ isDefined = false
+ value = None
+ notifyAll()
+ }
+
}
diff --git a/src/library/scala/concurrent/TaskRunner.scala b/src/library/scala/concurrent/TaskRunner.scala
index 500d79e07f..3180e9ce8a 100644
--- a/src/library/scala/concurrent/TaskRunner.scala
+++ b/src/library/scala/concurrent/TaskRunner.scala
@@ -8,6 +8,8 @@
package scala.concurrent
+import language.{higherKinds, implicitConversions}
+
/** The `TaskRunner` trait...
*
* @author Philipp Haller
diff --git a/src/library/scala/concurrent/ThreadPoolRunner.scala b/src/library/scala/concurrent/ThreadPoolRunner.scala
index a3e0253634..fd6882348a 100644
--- a/src/library/scala/concurrent/ThreadPoolRunner.scala
+++ b/src/library/scala/concurrent/ThreadPoolRunner.scala
@@ -9,6 +9,7 @@
package scala.concurrent
import java.util.concurrent.{ExecutorService, Callable, TimeUnit}
+import language.implicitConversions
/** The `ThreadPoolRunner` trait uses a `java.util.concurrent.ExecutorService`
* to run submitted tasks.
diff --git a/src/library/scala/concurrent/ThreadRunner.scala b/src/library/scala/concurrent/ThreadRunner.scala
index 28fcf57df8..76be94aa6b 100644
--- a/src/library/scala/concurrent/ThreadRunner.scala
+++ b/src/library/scala/concurrent/ThreadRunner.scala
@@ -9,6 +9,7 @@
package scala.concurrent
import java.lang.Thread
+import language.implicitConversions
/** The `ThreadRunner` trait...
*
diff --git a/src/library/scala/concurrent/impl/AbstractPromise.java b/src/library/scala/concurrent/impl/AbstractPromise.java
index 5280d67854..b1799c5fa9 100644
--- a/src/library/scala/concurrent/impl/AbstractPromise.java
+++ b/src/library/scala/concurrent/impl/AbstractPromise.java
@@ -9,13 +9,32 @@
package scala.concurrent.impl;
-
+import scala.concurrent.util.Unsafe;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
abstract class AbstractPromise {
- private volatile Object _ref = null;
+ private volatile Object _ref;
+
+ final static long _refoffset;
+
+ static {
+ try {
+ _refoffset = Unsafe.instance.objectFieldOffset(AbstractPromise.class.getDeclaredField("_ref"));
+ } catch (Throwable t) {
+ throw new ExceptionInInitializerError(t);
+ }
+ }
+
+ protected final boolean updateState(Object oldState, Object newState) {
+ return Unsafe.instance.compareAndSwapObject(this, _refoffset, oldState, newState);
+ }
+
+ protected final Object getState() {
+ return _ref;
+ }
+
protected final static AtomicReferenceFieldUpdater<AbstractPromise, Object> updater =
AtomicReferenceFieldUpdater.newUpdater(AbstractPromise.class, Object.class, "_ref");
-}
+} \ No newline at end of file
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index c5062267dc..1083a93439 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -10,23 +10,26 @@ package scala.concurrent.impl
-import java.util.concurrent.{Callable, Executor, ExecutorService, Executors, ThreadFactory}
+import java.util.concurrent.{ Callable, Executor, ExecutorService, Executors, ThreadFactory, TimeUnit }
+import java.util.Collection
import scala.concurrent.forkjoin._
-import scala.concurrent.{ExecutionContext, resolver, Awaitable, body2awaitable}
-import scala.concurrent.util.{ Duration }
+import scala.concurrent.{ ExecutionContext, Awaitable }
+import scala.concurrent.util.Duration
-private[scala] class ExecutionContextImpl(es: AnyRef) extends ExecutionContext with Executor {
- import ExecutionContextImpl._
-
- val executorService: AnyRef = if (es eq null) getExecutorService else es
+private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter: Throwable => Unit) extends ExecutionContext with Executor {
+
+ val executor: Executor = es match {
+ case null => createExecutorService
+ case some => some
+ }
// to ensure that the current execution context thread local is properly set
def executorsThreadFactory = new ThreadFactory {
def newThread(r: Runnable) = new Thread(new Runnable {
override def run() {
- currentExecutionContext.set(ExecutionContextImpl.this)
+ scala.concurrent.currentExecutionContext.set(ExecutionContextImpl.this)
r.run()
}
})
@@ -36,60 +39,81 @@ private[scala] class ExecutionContextImpl(es: AnyRef) extends ExecutionContext w
def forkJoinPoolThreadFactory = new ForkJoinPool.ForkJoinWorkerThreadFactory {
def newThread(fjp: ForkJoinPool) = new ForkJoinWorkerThread(fjp) {
override def onStart() {
- currentExecutionContext.set(ExecutionContextImpl.this)
+ scala.concurrent.currentExecutionContext.set(ExecutionContextImpl.this)
}
}
}
- def getExecutorService: AnyRef =
- if (scala.util.Properties.isJavaAtLeast("1.6")) {
- val vendor = scala.util.Properties.javaVmVendor
- if ((vendor contains "Oracle") || (vendor contains "Sun") || (vendor contains "Apple"))
- new ForkJoinPool(
- Runtime.getRuntime.availableProcessors(),
+ def createExecutorService: ExecutorService = try { new ForkJoinPool(
+ Runtime.getRuntime.availableProcessors(), //FIXME from config
forkJoinPoolThreadFactory,
- null,
- false)
- else
- Executors.newCachedThreadPool(executorsThreadFactory)
- } else Executors.newCachedThreadPool(executorsThreadFactory)
+ null, //FIXME we should have an UncaughtExceptionHandler, see what Akka does
+ true) //FIXME I really think this should be async...
+ } catch {
+ case NonFatal(t) =>
+ System.err.println("Failed to create ForkJoinPool for the default ExecutionContext, falling back to Executors.newCachedThreadPool")
+ t.printStackTrace(System.err)
+ Executors.newCachedThreadPool(executorsThreadFactory)
+ }
- def execute(runnable: Runnable): Unit = executorService match {
+ def execute(runnable: Runnable): Unit = executor match {
case fj: ForkJoinPool =>
- if (Thread.currentThread.isInstanceOf[ForkJoinWorkerThread]) {
- val fjtask = ForkJoinTask.adapt(runnable)
- fjtask.fork
- } else {
- fj.execute(runnable)
+ Thread.currentThread match {
+ case fjw: ForkJoinWorkerThread if fjw.getPool eq fj =>
+ (runnable match {
+ case fjt: ForkJoinTask[_] => fjt
+ case _ => ForkJoinTask.adapt(runnable)
+ }).fork
+ case _ => fj.execute(runnable)
}
- case executor: Executor =>
- executor execute runnable
+ case generic => generic execute runnable
}
- def execute[U](body: () => U): Unit = execute(new Runnable {
- def run() = body()
- })
-
def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T = {
Future.releaseStack(this)
- awaitable.result(atMost)(scala.concurrent.Await.canAwaitEvidence)
- }
-
- def reportFailure(t: Throwable) = t match {
- case e: Error => throw e // rethrow serious errors
- case t => t.printStackTrace()
+ executor match {
+ case fj: ForkJoinPool =>
+ var result: T = null.asInstanceOf[T]
+ ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker {
+ @volatile var isdone = false
+ def block(): Boolean = {
+ result = awaitable.result(atMost)(scala.concurrent.Await.canAwaitEvidence) // FIXME what happens if there's an exception thrown here?
+ isdone = true
+ true
+ }
+ def isReleasable = isdone
+ })
+ result
+ case _ =>
+ awaitable.result(atMost)(scala.concurrent.Await.canAwaitEvidence)
+ }
}
+ def reportFailure(t: Throwable) = reporter(t)
}
private[concurrent] object ExecutionContextImpl {
- private[concurrent] def currentExecutionContext: ThreadLocal[ExecutionContext] = new ThreadLocal[ExecutionContext] {
- override protected def initialValue = null
- }
-
+ def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = new ExecutionContextImpl(e, reporter)
+ def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl with ExecutorService =
+ new ExecutionContextImpl(es, reporter) with ExecutorService {
+ final def asExecutorService: ExecutorService = executor.asInstanceOf[ExecutorService]
+ override def execute(command: Runnable) = executor.execute(command)
+ override def shutdown() { asExecutorService.shutdown() }
+ override def shutdownNow() = asExecutorService.shutdownNow()
+ override def isShutdown = asExecutorService.isShutdown
+ override def isTerminated = asExecutorService.isTerminated
+ override def awaitTermination(l: Long, timeUnit: TimeUnit) = asExecutorService.awaitTermination(l, timeUnit)
+ override def submit[T](callable: Callable[T]) = asExecutorService.submit(callable)
+ override def submit[T](runnable: Runnable, t: T) = asExecutorService.submit(runnable, t)
+ override def submit(runnable: Runnable) = asExecutorService.submit(runnable)
+ override def invokeAll[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAll(callables)
+ override def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAll(callables, l, timeUnit)
+ override def invokeAny[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAny(callables)
+ override def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAny(callables, l, timeUnit)
+ }
}
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
index 615ab061a5..47534e398b 100644
--- a/src/library/scala/concurrent/impl/Future.scala
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -10,36 +10,16 @@ package scala.concurrent.impl
-import scala.concurrent.{Awaitable, ExecutionContext}
+import scala.concurrent.util.Duration
+import scala.concurrent.{Awaitable, ExecutionContext, CanAwait}
import scala.collection.mutable.Stack
-private[concurrent] trait Future[+T] extends scala.concurrent.Future[T] with Awaitable[T] {
-
- implicit def executor: ExecutionContext
-
- /** For use only within a Future.flow block or another compatible Delimited Continuations reset block.
- *
- * Returns the result of this Future without blocking, by suspending execution and storing it as a
- * continuation until the result is available.
- */
- //def apply(): T @cps[Future[Any]] = shift(this flatMap (_: T => Future[Any]))
- /** Tests whether this Future has been completed.
- */
- final def isCompleted: Boolean = value.isDefined
-
- /** The contained value of this Future. Before this Future is completed
- * the value will be None. After completion the value will be Some(Right(t))
- * if it contains a valid result, or Some(Left(error)) if it contains
- * an exception.
- */
- def value: Option[Either[Throwable, T]]
-
- def onComplete[U](func: Either[Throwable, T] => U): this.type
+private[concurrent] trait Future[+T] extends scala.concurrent.Future[T] with Awaitable[T] {
}
-object Future {
+private[concurrent] object Future {
import java.{ lang => jl }
private val toBoxed = Map[Class[_], Class[_]](
@@ -54,26 +34,37 @@ object Future {
classOf[Unit] -> classOf[scala.runtime.BoxedUnit]
)
- def boxedType(c: Class[_]): Class[_] = {
- if (c.isPrimitive) toBoxed(c) else c
+ /** Wraps a block of code into an awaitable object. */
+ private[concurrent] def body2awaitable[T](body: =>T) = new Awaitable[T] {
+ def ready(atMost: Duration)(implicit permit: CanAwait) = {
+ body
+ this
+ }
+ def result(atMost: Duration)(implicit permit: CanAwait) = body
}
+ def boxedType(c: Class[_]): Class[_] = if (c.isPrimitive) toBoxed(c) else c
+
def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = {
val promise = new Promise.DefaultPromise[T]()
+
+ //TODO: use `dispatchFuture`?
executor.execute(new Runnable {
- def run = {
- promise complete {
- try {
- Right(body)
- } catch {
- case e => scala.concurrent.resolver(e)
- }
+ def run = promise complete {
+ try Right(body) catch {
+ case NonFatal(e) =>
+ // Commenting out reporting for now, since it produces too much output in the tests
+ //executor.reportFailure(e)
+ Left(e)
}
}
})
+
promise.future
}
+ private[impl] val throwableId: Throwable => Throwable = identity _
+
// an optimization for batching futures
// TODO we should replace this with a public queue,
// so that it can be stolen from
@@ -96,7 +87,7 @@ object Future {
private[impl] def dispatchFuture(executor: ExecutionContext, task: () => Unit, force: Boolean = false): Unit =
_taskStack.get match {
- case stack if (stack ne null) && !force => stack push task
+ case stack if (stack ne null) && !force => stack push task // FIXME we can't mix tasks aimed for different ExecutionContexts see: https://github.com/akka/akka/blob/v2.0.1/akka-actor/src/main/scala/akka/dispatch/Future.scala#L373
case _ => executor.execute(new Runnable {
def run() {
try {
@@ -104,13 +95,7 @@ object Future {
_taskStack set taskStack
while (taskStack.nonEmpty) {
val next = taskStack.pop()
- try {
- next.apply()
- } catch {
- case e =>
- // TODO catching all and continue isn't good for OOME
- executor.reportFailure(e)
- }
+ try next() catch { case NonFatal(e) => executor reportFailure e }
}
} finally {
_taskStack.remove()
diff --git a/src/library/scala/concurrent/impl/NonFatal.scala b/src/library/scala/concurrent/impl/NonFatal.scala
new file mode 100644
index 0000000000..bc509e664c
--- /dev/null
+++ b/src/library/scala/concurrent/impl/NonFatal.scala
@@ -0,0 +1,37 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+package impl
+
+/**
+ * Extractor of non-fatal Throwables. Will not match fatal errors
+ * like VirtualMachineError (OutOfMemoryError)
+ * ThreadDeath, LinkageError and InterruptedException.
+ * StackOverflowError is matched, i.e. considered non-fatal.
+ *
+ * Usage to catch all harmless throwables:
+ * {{{
+ * try {
+ * // dangerous stuff
+ * } catch {
+ * case NonFatal(e) => log.error(e, "Something not that bad")
+ * }
+ * }}}
+ */
+private[concurrent] object NonFatal {
+
+ def unapply(t: Throwable): Option[Throwable] = t match {
+ case e: StackOverflowError ⇒ Some(e) // StackOverflowError ok even though it is a VirtualMachineError
+ // VirtualMachineError includes OutOfMemoryError and other fatal errors
+ case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError ⇒ None
+ case e ⇒ Some(e)
+ }
+
+}
+
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index f7e073cb78..1d573ef818 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -11,8 +11,7 @@ package scala.concurrent.impl
import java.util.concurrent.TimeUnit.{ NANOSECONDS, MILLISECONDS }
-import java.util.concurrent.atomic.AtomicReferenceFieldUpdater
-import scala.concurrent.{Awaitable, ExecutionContext, resolve, resolver, blocking, CanAwait, TimeoutException}
+import scala.concurrent.{ Awaitable, ExecutionContext, blocking, CanAwait, TimeoutException, ExecutionException }
//import scala.util.continuations._
import scala.concurrent.util.Duration
import scala.util
@@ -22,90 +21,30 @@ import scala.annotation.tailrec
private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with Future[T] {
-
- def future = this
-
- def newPromise[S]: scala.concurrent.Promise[S] = new Promise.DefaultPromise()
-
- // TODO refine answer and return types here from Any to type parameters
- // then move this up in the hierarchy
- /*
- final def <<(value: T): Future[T] @cps[Future[Any]] = shift {
- cont: (Future[T] => Future[Any]) =>
- cont(complete(Right(value)))
- }
-
- final def <<(other: Future[T]): Future[T] @cps[Future[Any]] = shift {
- cont: (Future[T] => Future[Any]) =>
- val p = executor.promise[Any]
- val thisPromise = this
-
- thisPromise completeWith other
- thisPromise onComplete { v =>
- try {
- p completeWith cont(thisPromise)
- } catch {
- case e => p complete resolver(e)
- }
- }
-
- p.future
- }
- */
- // TODO finish this once we introduce something like dataflow streams
-
- /*
- final def <<(stream: PromiseStreamOut[T]): Future[T] @cps[Future[Any]] = shift { cont: (Future[T] => Future[Any]) =>
- val fr = executor.promise[Any]
- val f = stream.dequeue(this)
- f.onComplete { _ =>
- try {
- fr completeWith cont(f)
- } catch {
- case e =>
- fr failure e
- }
- }
- fr
- }
- */
-
+ def future: this.type = this
}
object Promise {
- def dur2long(dur: Duration): Long = if (dur.isFinite) dur.toNanos else Long.MaxValue
-
- def EmptyPending[T](): FState[T] = emptyPendingValue.asInstanceOf[FState[T]]
-
- /** Represents the internal state.
- *
- * [adriaan] it's unsound to make FState covariant (tryComplete won't type check)
- */
- sealed trait FState[T] { def value: Option[Either[Throwable, T]] }
-
- case class Pending[T](listeners: List[Either[Throwable, T] => Any] = Nil) extends FState[T] {
- def value: Option[Either[Throwable, T]] = None
+ private def resolveEither[T](source: Either[Throwable, T]): Either[Throwable, T] = source match {
+ case Left(t) => resolver(t)
+ case _ => source
}
-
- case class Success[T](value: Option[Either[Throwable, T]] = None) extends FState[T] {
- def result: T = value.get.right.get
- }
-
- case class Failure[T](value: Option[Either[Throwable, T]] = None) extends FState[T] {
- def exception: Throwable = value.get.left.get
+
+ private def resolver[T](throwable: Throwable): Either[Throwable, T] = throwable match {
+ case t: scala.runtime.NonLocalReturnControl[_] => Right(t.value.asInstanceOf[T])
+ case t: scala.util.control.ControlThrowable => Left(new ExecutionException("Boxed ControlThrowable", t))
+ case t: InterruptedException => Left(new ExecutionException("Boxed InterruptedException", t))
+ case e: Error => Left(new ExecutionException("Boxed Error", e))
+ case t => Left(t)
}
-
- private val emptyPendingValue = Pending[Nothing](Nil)
-
+
/** Default promise implementation.
*/
- class DefaultPromise[T](implicit val executor: ExecutionContext) extends AbstractPromise with Promise[T] {
- self =>
-
- updater.set(this, Promise.EmptyPending())
-
+ class DefaultPromise[T] extends AbstractPromise with Promise[T] { self =>
+ updateState(null, Nil) // Start at "No callbacks" //FIXME switch to Unsafe instead of ARFU
+
protected final def tryAwait(atMost: Duration): Boolean = {
@tailrec
def awaitUnsafe(waitTimeNanos: Long): Boolean = {
@@ -115,7 +54,7 @@ object Promise {
val start = System.nanoTime()
try {
synchronized {
- while (value.isEmpty) wait(ms, ns)
+ if (!isCompleted) wait(ms, ns) // previously - this was a `while`, ending up in an infinite loop
}
} catch {
case e: InterruptedException =>
@@ -123,93 +62,76 @@ object Promise {
awaitUnsafe(waitTimeNanos - (System.nanoTime() - start))
} else
- value.isDefined
+ isCompleted
}
-
- blocking(concurrent.body2awaitable(awaitUnsafe(dur2long(atMost))), atMost)
+ //FIXME do not do this if there'll be no waiting
+ awaitUnsafe(if (atMost.isFinite) atMost.toNanos else Long.MaxValue)
}
+ @throws(classOf[TimeoutException])
def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
- if (value.isDefined || tryAwait(atMost)) this
+ if (isCompleted || tryAwait(atMost)) this
else throw new TimeoutException("Futures timed out after [" + atMost.toMillis + "] milliseconds")
+ @throws(classOf[Exception])
def result(atMost: Duration)(implicit permit: CanAwait): T =
ready(atMost).value.get match {
case Left(e) => throw e
case Right(r) => r
}
- def value: Option[Either[Throwable, T]] = getState.value
-
- @inline
- private[this] final def updater = AbstractPromise.updater.asInstanceOf[AtomicReferenceFieldUpdater[AbstractPromise, FState[T]]]
-
- @inline
- protected final def updateState(oldState: FState[T], newState: FState[T]): Boolean = updater.compareAndSet(this, oldState, newState)
+ def value: Option[Either[Throwable, T]] = getState match {
+ case c: Either[_, _] => Some(c.asInstanceOf[Either[Throwable, T]])
+ case _ => None
+ }
- @inline
- protected final def getState: FState[T] = updater.get(this)
+ override def isCompleted(): Boolean = getState match { // Cheaper than boxing result into Option due to "def value"
+ case _: Either[_, _] => true
+ case _ => false
+ }
def tryComplete(value: Either[Throwable, T]): Boolean = {
- val callbacks: List[Either[Throwable, T] => Any] = {
- try {
- @tailrec
- def tryComplete(v: Either[Throwable, T]): List[Either[Throwable, T] => Any] = {
- getState match {
- case cur @ Pending(listeners) =>
- val newState =
- if (v.isLeft) Failure(Some(v.asInstanceOf[Left[Throwable, T]]))
- else Success(Some(v.asInstanceOf[Right[Throwable, T]]))
-
- if (updateState(cur, newState)) listeners
- else tryComplete(v)
- case _ => null
- }
+ val resolved = resolveEither(value)
+ (try {
+ @tailrec
+ def tryComplete(v: Either[Throwable, T]): List[Either[Throwable, T] => Unit] = {
+ getState match {
+ case raw: List[_] =>
+ val cur = raw.asInstanceOf[List[Either[Throwable, T] => Unit]]
+ if (updateState(cur, v)) cur else tryComplete(v)
+ case _ => null
}
- tryComplete(resolve(value))
- } finally {
- synchronized { notifyAll() } // notify any blockers from `tryAwait`
}
- }
-
- callbacks match {
+ tryComplete(resolved)
+ } finally {
+ synchronized { notifyAll() } //Notify any evil blockers
+ }) match {
case null => false
case cs if cs.isEmpty => true
- case cs =>
- Future.dispatchFuture(executor, {
- () => cs.foreach(f => notifyCompleted(f, value))
- })
- true
+ // this assumes that f(resolved) will go via dispatchFuture
+ // and notifyCompleted (see onComplete below)
+ case cs => cs.foreach(f => f(resolved)); true
}
}
- def onComplete[U](func: Either[Throwable, T] => U): this.type = {
- @tailrec // Returns whether the future has already been completed or not
- def tryAddCallback(): Boolean = {
- val cur = getState
- cur match {
- case _: Success[_] | _: Failure[_] => true
- case p: Pending[_] =>
- val pt = p.asInstanceOf[Pending[T]]
- if (updateState(pt, pt.copy(listeners = func :: pt.listeners))) false else tryAddCallback()
- }
- }
-
- if (tryAddCallback()) {
- val result = value.get
- Future.dispatchFuture(executor, {
- () => notifyCompleted(func, result)
- })
- }
+ def onComplete[U](func: Either[Throwable, T] => U)(implicit executor: ExecutionContext): Unit = {
+ val bound: Either[Throwable, T] => Unit = (either: Either[Throwable, T]) =>
+ Future.dispatchFuture(executor, () => notifyCompleted(func, either))
- this
+ @tailrec //Tries to add the callback, if already completed, it dispatches the callback to be executed
+ def dispatchOrAddCallback(): Unit =
+ getState match {
+ case r: Either[_, _] => bound(r.asInstanceOf[Either[Throwable, T]])
+ case listeners: List[_] => if (updateState(listeners, bound :: listeners)) () else dispatchOrAddCallback()
+ }
+ dispatchOrAddCallback()
}
- private final def notifyCompleted(func: Either[Throwable, T] => Any, result: Either[Throwable, T]) {
+ private final def notifyCompleted(func: Either[Throwable, T] => Any, result: Either[Throwable, T])(implicit executor: ExecutionContext) {
try {
func(result)
} catch {
- case e => executor.reportFailure(e)
+ case NonFatal(e) => executor reportFailure e
}
}
}
@@ -218,18 +140,17 @@ object Promise {
*
* Useful in Future-composition when a value to contribute is already available.
*/
- final class KeptPromise[T](suppliedValue: Either[Throwable, T])(implicit val executor: ExecutionContext) extends Promise[T] {
+ final class KeptPromise[T](suppliedValue: Either[Throwable, T]) extends Promise[T] {
- val value = Some(resolve(suppliedValue))
+ val value = Some(resolveEither(suppliedValue))
+
+ override def isCompleted(): Boolean = true
def tryComplete(value: Either[Throwable, T]): Boolean = false
- def onComplete[U](func: Either[Throwable, T] => U): this.type = {
- val completedAs = value.get
- Future.dispatchFuture(executor, {
- () => func(completedAs)
- })
- this
+ def onComplete[U](func: Either[Throwable, T] => U)(implicit executor: ExecutionContext): Unit = {
+ val completedAs = value.get // Avoid closing over "this"
+ Future.dispatchFuture(executor, () => func(completedAs))
}
def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
@@ -241,19 +162,3 @@ object Promise {
}
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index b06c6f3c63..e8921ef531 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -26,9 +26,15 @@ package concurrent {
object Await {
private[concurrent] implicit val canAwaitEvidence = new CanAwait {}
- def ready[T](awaitable: Awaitable[T], atMost: Duration): Awaitable[T] = awaitable.ready(atMost)
+ def ready[T](awaitable: Awaitable[T], atMost: Duration): awaitable.type = {
+ blocking(awaitable, atMost)
+ awaitable
+ }
+
+ def result[T](awaitable: Awaitable[T], atMost: Duration): T = {
+ blocking(awaitable, atMost)
+ }
- def result[T](awaitable: Awaitable[T], atMost: Duration): T = awaitable.result(atMost)
}
final class DurationOps private[concurrent] (x: Int) {
diff --git a/src/library/scala/concurrent/util/Duration.scala b/src/library/scala/concurrent/util/Duration.scala
index 33d034da76..bab664727e 100644
--- a/src/library/scala/concurrent/util/Duration.scala
+++ b/src/library/scala/concurrent/util/Duration.scala
@@ -1,57 +1,17 @@
-/**
- * Copyright (C) 2009-2012 Typesafe Inc. <http://www.typesafe.com>
- */
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
package scala.concurrent.util
import java.util.concurrent.TimeUnit
import TimeUnit._
-import java.lang.{ Double ⇒ JDouble }
-
-object DurationImplicits {
- trait Classifier[C] {
- type R
- def convert(d: FiniteDuration): R
- }
-
- object span
- implicit object spanConvert extends Classifier[span.type] {
- type R = FiniteDuration
- def convert(d: FiniteDuration) = d
- }
-
- object fromNow
- implicit object fromNowConvert extends Classifier[fromNow.type] {
- type R = Deadline
- def convert(d: FiniteDuration) = Deadline.now + d
- }
-
- implicit def intToDurationInt(n: Int) = new DurationInt(n)
- implicit def longToDurationLong(n: Long) = new DurationLong(n)
- implicit def doubleToDurationDouble(d: Double) = new DurationDouble(d)
-
- implicit def pairIntToDuration(p: (Int, TimeUnit)) = Duration(p._1, p._2)
- implicit def pairLongToDuration(p: (Long, TimeUnit)) = Duration(p._1, p._2)
- implicit def durationToPair(d: Duration) = (d.length, d.unit)
-
- /*
- * Avoid reflection based invocation by using non-duck type
- */
- class IntMult(i: Int) {
- def *(d: Duration) = d * i
- }
- implicit def intMult(i: Int) = new IntMult(i)
-
- class LongMult(l: Long) {
- def *(d: Duration) = d * l
- }
- implicit def longMult(l: Long) = new LongMult(l)
-
- class DoubleMult(f: Double) {
- def *(d: Duration) = d * f
- }
- implicit def doubleMult(f: Double) = new DoubleMult(f)
-}
+import java.lang.{ Double => JDouble }
+import language.implicitConversions
case class Deadline private (time: Duration) {
def +(other: Duration): Deadline = copy(time = time + other)
@@ -71,10 +31,7 @@ object Duration {
def apply(length: Long, unit: TimeUnit): FiniteDuration = new FiniteDuration(length, unit)
def apply(length: Double, unit: TimeUnit): FiniteDuration = fromNanos(unit.toNanos(1) * length)
- def apply(length: Long, unit: String): FiniteDuration = {
- val (mult, timeUnit) = Duration.timeUnit(unit)
- new FiniteDuration(length * mult, timeUnit)
- }
+ def apply(length: Long, unit: String): FiniteDuration = new FiniteDuration(length, Duration.timeUnit(unit))
/**
* Construct a Duration by parsing a String. In case of a format error, a
@@ -117,11 +74,11 @@ object Duration {
def unapply(s: String): Option[Duration] = s match {
case RE(length, d, h, m, s, ms, mus, ns) ⇒
if (d ne null)
- Some(Duration(JDouble.parseDouble(length) * 86400, SECONDS))
+ Some(Duration(JDouble.parseDouble(length), DAYS))
else if (h ne null)
- Some(Duration(JDouble.parseDouble(length) * 3600, SECONDS))
+ Some(Duration(JDouble.parseDouble(length), HOURS))
else if (m ne null)
- Some(Duration(JDouble.parseDouble(length) * 60, SECONDS))
+ Some(Duration(JDouble.parseDouble(length), MINUTES))
else if (s ne null)
Some(Duration(JDouble.parseDouble(length), SECONDS))
else if (ms ne null)
@@ -142,11 +99,11 @@ object Duration {
def fromNanos(nanos: Long): FiniteDuration = {
if (nanos % 86400000000000L == 0) {
- Duration(nanos / 1000000000L, SECONDS)
- } else if (nanos % 1000000000L == 0) {
- Duration(nanos / 1000000000L, SECONDS)
- } else if (nanos % 1000000000L == 0) {
- Duration(nanos / 1000000000L, SECONDS)
+ Duration(nanos / 86400000000000L, DAYS)
+ } else if (nanos % 3600000000000L == 0) {
+ Duration(nanos / 3600000000000L, HOURS)
+ } else if (nanos % 60000000000L == 0) {
+ Duration(nanos / 60000000000L, MINUTES)
} else if (nanos % 1000000000L == 0) {
Duration(nanos / 1000000000L, SECONDS)
} else if (nanos % 1000000L == 0) {
@@ -161,14 +118,14 @@ object Duration {
/**
* Parse TimeUnit from string representation.
*/
- protected[util] def timeUnit(unit: String): (Long, TimeUnit) = unit.toLowerCase match {
- case "d" | "day" | "days" ⇒ (86400, SECONDS)
- case "h" | "hour" | "hours" ⇒ (3600, SECONDS)
- case "min" | "minute" | "minutes" ⇒ (60, SECONDS)
- case "s" | "sec" | "second" | "seconds" ⇒ (1, SECONDS)
- case "ms" | "milli" | "millis" | "millisecond" | "milliseconds" ⇒ (1, MILLISECONDS)
- case "µs" | "micro" | "micros" | "microsecond" | "microseconds" ⇒ (1, MICROSECONDS)
- case "ns" | "nano" | "nanos" | "nanosecond" | "nanoseconds" ⇒ (1, NANOSECONDS)
+ protected[util] def timeUnit(unit: String): TimeUnit = unit.toLowerCase match {
+ case "d" | "day" | "days" => DAYS
+ case "h" | "hour" | "hours" => HOURS
+ case "min" | "minute" | "minutes" => MINUTES
+ case "s" | "sec" | "second" | "seconds" => SECONDS
+ case "ms" | "milli" | "millis" | "millisecond" | "milliseconds" => MILLISECONDS
+ case "µs" | "micro" | "micros" | "microsecond" | "microseconds" => MICROSECONDS
+ case "ns" | "nano" | "nanos" | "nanosecond" | "nanoseconds" => NANOSECONDS
}
val Zero: FiniteDuration = new FiniteDuration(0, NANOSECONDS)
@@ -185,26 +142,26 @@ object Duration {
}
trait Infinite {
- this: Duration ⇒
+ this: Duration =>
def +(other: Duration): Duration =
other match {
- case _: this.type ⇒ this
- case _: Infinite ⇒ throw new IllegalArgumentException("illegal addition of infinities")
- case _ ⇒ this
+ case _: this.type => this
+ case _: Infinite => throw new IllegalArgumentException("illegal addition of infinities")
+ case _ => this
}
def -(other: Duration): Duration =
other match {
- case _: this.type ⇒ throw new IllegalArgumentException("illegal subtraction of infinities")
- case _ ⇒ this
+ case _: this.type => throw new IllegalArgumentException("illegal subtraction of infinities")
+ case _ => this
}
def *(factor: Double): Duration = this
def /(factor: Double): Duration = this
def /(other: Duration): Double =
other match {
- case _: Infinite ⇒ throw new IllegalArgumentException("illegal division of infinities")
+ case _: Infinite => throw new IllegalArgumentException("illegal division of infinities")
// maybe questionable but pragmatic: Inf / 0 => Inf
- case x ⇒ Double.PositiveInfinity * (if ((this > Zero) ^ (other >= Zero)) -1 else 1)
+ case x => Double.PositiveInfinity * (if ((this > Zero) ^ (other >= Zero)) -1 else 1)
}
def finite_? = false
@@ -328,13 +285,9 @@ object FiniteDuration {
def compare(a: FiniteDuration, b: FiniteDuration) = a compare b
}
- def apply(length: Long, unit: TimeUnit) =
- new FiniteDuration(length, unit)
+ def apply(length: Long, unit: TimeUnit) = new FiniteDuration(length, unit)
- def apply(length: Long, unit: String) = {
- val (mult, timeUnit) = Duration.timeUnit(unit)
- new FiniteDuration(length * mult, timeUnit)
- }
+ def apply(length: Long, unit: String) = new FiniteDuration(length, Duration.timeUnit(unit))
}
@@ -348,17 +301,23 @@ class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
def toMinutes = unit.toMinutes(length)
def toHours = unit.toHours(length)
def toDays = unit.toDays(length)
- def toUnit(u: TimeUnit) = long2double(toNanos) / NANOSECONDS.convert(1, u)
+ def toUnit(u: TimeUnit) = toNanos.toDouble / NANOSECONDS.convert(1, u)
override def toString = this match {
- case Duration(1, SECONDS) ⇒ "1 second"
- case Duration(x, SECONDS) ⇒ x + " seconds"
- case Duration(1, MILLISECONDS) ⇒ "1 millisecond"
- case Duration(x, MILLISECONDS) ⇒ x + " milliseconds"
- case Duration(1, MICROSECONDS) ⇒ "1 microsecond"
- case Duration(x, MICROSECONDS) ⇒ x + " microseconds"
- case Duration(1, NANOSECONDS) ⇒ "1 nanosecond"
- case Duration(x, NANOSECONDS) ⇒ x + " nanoseconds"
+ case Duration(1, DAYS) => "1 day"
+ case Duration(x, DAYS) => x + " days"
+ case Duration(1, HOURS) => "1 hour"
+ case Duration(x, HOURS) => x + " hours"
+ case Duration(1, MINUTES) => "1 minute"
+ case Duration(x, MINUTES) => x + " minutes"
+ case Duration(1, SECONDS) => "1 second"
+ case Duration(x, SECONDS) => x + " seconds"
+ case Duration(1, MILLISECONDS) => "1 millisecond"
+ case Duration(x, MILLISECONDS) => x + " milliseconds"
+ case Duration(1, MICROSECONDS) => "1 microsecond"
+ case Duration(x, MICROSECONDS) => x + " microseconds"
+ case Duration(1, NANOSECONDS) => "1 nanosecond"
+ case Duration(x, NANOSECONDS) => x + " nanoseconds"
}
def compare(other: Duration) =
@@ -386,11 +345,11 @@ class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
}
}
- def *(factor: Double) = fromNanos(long2double(toNanos) * factor)
+ def *(factor: Double) = fromNanos(toNanos.toDouble * factor)
- def /(factor: Double) = fromNanos(long2double(toNanos) / factor)
+ def /(factor: Double) = fromNanos(toNanos.toDouble / factor)
- def /(other: Duration) = if (other.finite_?) long2double(toNanos) / other.toNanos else 0
+ def /(other: Duration) = if (other.finite_?) toNanos.toDouble / other.toNanos else 0
def unary_- = Duration(-length, unit)
@@ -404,7 +363,7 @@ class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
}
class DurationInt(n: Int) {
- import DurationImplicits.Classifier
+ import duration.Classifier
def nanoseconds = Duration(n, NANOSECONDS)
def nanos = Duration(n, NANOSECONDS)
@@ -424,14 +383,14 @@ class DurationInt(n: Int) {
def seconds = Duration(n, SECONDS)
def second = Duration(n, SECONDS)
- def minutes = Duration(n * 60, SECONDS)
- def minute = Duration(n * 60, SECONDS)
+ def minutes = Duration(n, MINUTES)
+ def minute = Duration(n, MINUTES)
- def hours = Duration(n * 3600, SECONDS)
- def hour = Duration(n * 3600, SECONDS)
+ def hours = Duration(n, HOURS)
+ def hour = Duration(n, HOURS)
- def days = Duration(n * 86400, SECONDS)
- def day = Duration(n * 86400, SECONDS)
+ def days = Duration(n, DAYS)
+ def day = Duration(n, DAYS)
def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
@@ -451,18 +410,18 @@ class DurationInt(n: Int) {
def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
- def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
- def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
- def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
- def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
- def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
- def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
}
class DurationLong(n: Long) {
- import DurationImplicits.Classifier
+ import duration.Classifier
def nanoseconds = Duration(n, NANOSECONDS)
def nanos = Duration(n, NANOSECONDS)
@@ -482,14 +441,14 @@ class DurationLong(n: Long) {
def seconds = Duration(n, SECONDS)
def second = Duration(n, SECONDS)
- def minutes = Duration(n * 60, SECONDS)
- def minute = Duration(n * 60, SECONDS)
+ def minutes = Duration(n, MINUTES)
+ def minute = Duration(n, MINUTES)
- def hours = Duration(n * 3600, SECONDS)
- def hour = Duration(n * 3600, SECONDS)
+ def hours = Duration(n, HOURS)
+ def hour = Duration(n, HOURS)
- def days = Duration(n * 86400, SECONDS)
- def day = Duration(n * 86400, SECONDS)
+ def days = Duration(n, DAYS)
+ def day = Duration(n, DAYS)
def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
@@ -509,18 +468,18 @@ class DurationLong(n: Long) {
def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
- def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
- def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
- def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
- def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
- def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
- def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
}
class DurationDouble(d: Double) {
- import DurationImplicits.Classifier
+ import duration.Classifier
def nanoseconds = Duration(d, NANOSECONDS)
def nanos = Duration(d, NANOSECONDS)
@@ -540,14 +499,14 @@ class DurationDouble(d: Double) {
def seconds = Duration(d, SECONDS)
def second = Duration(d, SECONDS)
- def minutes = Duration(d * 60, SECONDS)
- def minute = Duration(d * 60, SECONDS)
+ def minutes = Duration(d, MINUTES)
+ def minute = Duration(d, MINUTES)
- def hours = Duration(d * 3600, SECONDS)
- def hour = Duration(d * 3600, SECONDS)
+ def hours = Duration(d, HOURS)
+ def hour = Duration(d, HOURS)
- def days = Duration(d * 86400, SECONDS)
- def day = Duration(d * 86400, SECONDS)
+ def days = Duration(d, DAYS)
+ def day = Duration(d, DAYS)
def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
@@ -567,12 +526,12 @@ class DurationDouble(d: Double) {
def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS))
def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS))
- def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 60, SECONDS))
- def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 60, SECONDS))
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MINUTES))
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MINUTES))
- def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 3600, SECONDS))
- def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 3600, SECONDS))
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, HOURS))
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, HOURS))
- def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 86400, SECONDS))
- def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 86400, SECONDS))
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, DAYS))
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, DAYS))
}
diff --git a/src/library/scala/concurrent/util/Unsafe.java b/src/library/scala/concurrent/util/Unsafe.java
new file mode 100644
index 0000000000..0cd48758d5
--- /dev/null
+++ b/src/library/scala/concurrent/util/Unsafe.java
@@ -0,0 +1,35 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.util;
+
+
+
+import java.lang.reflect.Field;
+
+
+
+public final class Unsafe {
+ public final static sun.misc.Unsafe instance;
+ static {
+ try {
+ sun.misc.Unsafe found = null;
+ for(Field field : sun.misc.Unsafe.class.getDeclaredFields()) {
+ if (field.getType() == sun.misc.Unsafe.class) {
+ field.setAccessible(true);
+ found = (sun.misc.Unsafe) field.get(null);
+ break;
+ }
+ }
+ if (found == null) throw new IllegalStateException("Can't find instance of sun.misc.Unsafe");
+ else instance = found;
+ } catch(Throwable t) {
+ throw new ExceptionInInitializerError(t);
+ }
+ }
+}
diff --git a/src/library/scala/concurrent/util/duration/Classifier.scala b/src/library/scala/concurrent/util/duration/Classifier.scala
new file mode 100644
index 0000000000..10faf0a5ce
--- /dev/null
+++ b/src/library/scala/concurrent/util/duration/Classifier.scala
@@ -0,0 +1,9 @@
+package scala.concurrent.util.duration
+
+import scala.concurrent.util.{ FiniteDuration }
+
+trait Classifier[C] {
+ type R
+ def convert(d: FiniteDuration): R
+}
+
diff --git a/src/library/scala/concurrent/util/duration/IntMult.scala b/src/library/scala/concurrent/util/duration/IntMult.scala
new file mode 100644
index 0000000000..94c58fb8c2
--- /dev/null
+++ b/src/library/scala/concurrent/util/duration/IntMult.scala
@@ -0,0 +1,18 @@
+package scala.concurrent.util.duration
+
+import scala.concurrent.util.{ Duration }
+
+/*
+ * Avoid reflection based invocation by using non-duck type
+ */
+protected[duration] class IntMult(i: Int) {
+ def *(d: Duration) = d * i
+}
+
+protected[duration] class LongMult(i: Long) {
+ def *(d: Duration) = d * i
+}
+
+protected[duration] class DoubleMult(f: Double) {
+ def *(d: Duration) = d * f
+}
diff --git a/src/library/scala/concurrent/util/duration/package.scala b/src/library/scala/concurrent/util/duration/package.scala
new file mode 100644
index 0000000000..e3cf229c61
--- /dev/null
+++ b/src/library/scala/concurrent/util/duration/package.scala
@@ -0,0 +1,31 @@
+package scala.concurrent.util
+
+import java.util.concurrent.TimeUnit
+import language.implicitConversions
+
+package object duration {
+
+ object span
+ implicit object spanConvert extends Classifier[span.type] {
+ type R = FiniteDuration
+ def convert(d: FiniteDuration) = d
+ }
+
+ object fromNow
+ implicit object fromNowConvert extends Classifier[fromNow.type] {
+ type R = Deadline
+ def convert(d: FiniteDuration) = Deadline.now + d
+ }
+
+ implicit def intToDurationInt(n: Int) = new DurationInt(n)
+ implicit def longToDurationLong(n: Long) = new DurationLong(n)
+ implicit def doubleToDurationDouble(d: Double) = new DurationDouble(d)
+
+ implicit def pairIntToDuration(p: (Int, TimeUnit)) = Duration(p._1, p._2)
+ implicit def pairLongToDuration(p: (Long, TimeUnit)) = Duration(p._1, p._2)
+ implicit def durationToPair(d: Duration) = (d.length, d.unit)
+
+ implicit def intMult(i: Int) = new IntMult(i)
+ implicit def longMult(l: Long) = new LongMult(l)
+ implicit def doubleMult(f: Double) = new DoubleMult(f)
+}
diff --git a/src/library/scala/io/BytePickle.scala b/src/library/scala/io/BytePickle.scala
index bec0cfb53f..3bb5ea9c2b 100644
--- a/src/library/scala/io/BytePickle.scala
+++ b/src/library/scala/io/BytePickle.scala
@@ -269,7 +269,7 @@ object BytePickle {
}
def string: SPU[String] = share(wrap(
- (a: Array[Byte]) => Codec fromUTF8 a mkString,
+ (a: Array[Byte]) => (Codec fromUTF8 a).mkString,
(s: String) => Codec toUTF8 s,
bytearray
))
diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala
index d9cef0edb1..84cac88dcc 100644
--- a/src/library/scala/io/Codec.scala
+++ b/src/library/scala/io/Codec.scala
@@ -11,6 +11,7 @@ package scala.io
import java.nio.charset.{ Charset, CharsetDecoder, CharsetEncoder, CharacterCodingException, CodingErrorAction => Action }
import annotation.migration
+import language.implicitConversions
// Some notes about encodings for use in refining this implementation.
//
diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala
index 3cee0ace79..319e021f04 100644
--- a/src/library/scala/io/Source.scala
+++ b/src/library/scala/io/Source.scala
@@ -100,8 +100,6 @@ object Source {
/** Create a `Source` from array of bytes, decoding
* the bytes according to codec.
*
- * @param bytes ...
- * @param enc ...
* @return the created `Source` instance.
*/
def fromBytes(bytes: Array[Byte])(implicit codec: Codec): Source =
@@ -188,7 +186,7 @@ abstract class Source extends Iterator[Char] {
var nerrors = 0
var nwarnings = 0
- private def lineNum(line: Int): String = getLines() drop (line - 1) take 1 mkString
+ private def lineNum(line: Int): String = (getLines() drop (line - 1) take 1).mkString
class LineIterator extends AbstractIterator[String] with Iterator[String] {
private[this] val sb = new StringBuilder
diff --git a/src/library/scala/language.scala b/src/library/scala/language.scala
new file mode 100644
index 0000000000..df2eb0b910
--- /dev/null
+++ b/src/library/scala/language.scala
@@ -0,0 +1,124 @@
+package scala
+
+object language {
+
+ import languageFeature._
+
+ /** Where enabled, direct or indirect subclasses of trait scala.Dynamic can
+ * be defined. Unless dynamics is enabled, a definition of a class, trait,
+ * or object that has Dynamic as a base trait is rejected. Dynamic member
+ * selection of existing subclasses of trait Dynamic are unaffected;
+ * they can be used anywhere.
+ *
+ * _Why introduce the feature?_ To enable flexible DSLs and convenient interfacing
+ * with dynamic languages.
+ *
+ * _Why control it?_ Dynamic member selection can undermine static checkability
+ * of programs. Furthermore, dynamic member selection often relies on reflection,
+ * which is not available on all platforms.
+ */
+ implicit lazy val dynamics: dynamics = ???
+
+ /** Only where enabled, postfix operator notation `(expr op)` will be allowed.
+ *
+ * _Why keep the feature?_ Several DSLs written in Scala need the notation.
+ *
+ * _Why control it?_ Postfix operators interact poorly with semicolon inference.
+ * Most programmers avoid them for this reason.
+ */
+ implicit lazy val postfixOps: postfixOps = ???
+
+ /** Only where enabled, accesses to members of structural types that need
+ * reflection are supported. Reminder: A structural type is a type of the form
+ * `Parents { Decls }` where `Decls` contains declarations of new members that do
+ * not override any member in `Parents`. To access one of these members, a
+ * reflective call is needed.
+ *
+ * _Why keep the feature?_ Structural types provide great flexibility because
+ * they avoid the need to define inheritance hierarchies a priori. Besides,
+ * their definition falls out quite naturally from Scala’s concept of type refinement.
+ *
+ * _Why control it?+ Reflection is not available on all platforms. Popular tools
+ * such as ProGuard have problems dealing with it. Even where reflection is available,
+ * reflective dispatch can lead to surprising performance degradations.
+ */
+ implicit lazy val reflectiveCalls: reflectiveCalls = ???
+
+ /** Only where enabled, definitions of implicit conversions are allowed. An
+ * implicit conversion is an implicit value of unary function type `A => B`,
+ * or an implicit method that has in its first parameter section a single,
+ * non-implicit parameter. Examples:
+ *
+ * implicit def stringToInt(s: String): Int = s.length
+ * implicit val conv = (s: String) => s.length
+ * implicit def listToX(xs: List[T])(implicit f: T => X): X = …
+ *
+ * implicit values of other types are not affected, and neither are implicit
+ * classes.
+ *
+ * _Why keep the feature?_ Implicit conversions are central to many aspects
+ * of Scala’s core libraries.
+ *
+ * _Why control it?_ Implicit conversions are known to cause many pitfalls
+ * if over-used. And there is a tendency to over-use them because they look
+ * very powerful and their effects seem to be easy to understand. Also, in
+ * most situations using implicit parameters leads to a better design than
+ * implicit conversions.
+ */
+ implicit lazy val implicitConversions: implicitConversions = ???
+
+ /** Only where this flag is enabled, higher-kinded types can be written.
+ *
+ * _Why keep the feature?_ Higher-kinded types enable the definition of very general
+ * abstractions such as functor, monad, or arrow. A significant set of advanced
+ * libraries relies on them. Higher-kinded types are also at the core of the
+ * scala-virtualized effort to produce high-performance parallel DSLs through staging.
+ *
+ * _Why control it?_ Higher kinded types in Scala lead to a Turing-complete
+ * type system, where compiler termination is no longer guaranteed. They tend
+ * to be useful mostly for type-level computation and for highly generic design
+ * patterns. The level of abstraction implied by these design patterns is often
+ * a barrier to understanding for newcomers to a Scala codebase. Some syntactic
+ * aspects of higher-kinded types are hard to understand for the uninitiated and
+ * type inference is less effective for them than for normal types. Because we are
+ * not completely happy with them yet, it is possible that some aspects of
+ * higher-kinded types will change in future versions of Scala. So an explicit
+ * enabling also serves as a warning that code involving higher-kinded types
+ * might have to be slightly revised in the future.
+ */
+ implicit lazy val higherKinds: higherKinds = ???
+
+ /** Only where enabled, existential types that cannot be expressed as wildcard
+ * types can be written and are allowed in inferred types of values or return
+ * types of methods. Existential types with wildcard type syntax such as `List[_]`,
+ * or `Map[String, _]` are not affected.
+ *
+ * _Why keep the feature?_ Existential types are needed to make sense of Java’s wildcard
+ * types and raw types and the erased types of run-time values.
+ *
+ * Why control it? Having complex existential types in a code base usually makes
+ * application code very brittle, with a tendency to produce type errors with
+ * obscure error messages. Therefore, going overboard with existential types
+ * is generally perceived not to be a good idea. Also, complicated existential types
+ * might be no longer supported in a future simplification of the language.
+ */
+ implicit lazy val existentials: existentials = ???
+
+ object experimental {
+
+ import languageFeature.experimental._
+
+ /** Where enabled, macro definitions are allowed. Macro implementations and
+ * macro applications are unaffected; they can be used anywhere.
+ *
+ * _Why introduce the feature?_ Macros promise to make the language more regular,
+ * replacing ad-hoc language constructs with a general powerful abstraction
+ * capability that can express them. Macros are also a more disciplined and
+ * powerful replacement for compiler plugins.
+ *
+ * _Why control it?_ For their very power, macros can lead to code that is hard
+ * to debug and understand.
+ */
+ implicit lazy val macros: macros = ???
+ }
+}
diff --git a/src/library/scala/languageFeature.scala b/src/library/scala/languageFeature.scala
new file mode 100644
index 0000000000..c990f714c1
--- /dev/null
+++ b/src/library/scala/languageFeature.scala
@@ -0,0 +1,30 @@
+package scala
+
+import annotation.meta
+
+object languageFeature {
+
+ @meta.languageFeature("extension of type scala.Dynamic", true)
+ sealed trait dynamics
+
+ @meta.languageFeature("postfix operator #", false)
+ sealed trait postfixOps
+
+ @meta.languageFeature("reflective access of structural type member #", false)
+ sealed trait reflectiveCalls
+
+ @meta.languageFeature("implicit conversion #", false)
+ sealed trait implicitConversions
+
+ @meta.languageFeature("higher-kinded type", false)
+ sealed trait higherKinds
+
+ @meta.languageFeature("#, which cannot be expressed by wildcards, ", false)
+ sealed trait existentials
+
+ object experimental {
+ @meta.languageFeature("macro definition", true)
+ sealed trait macros
+ }
+}
+
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index cb42b76b51..74daa510ca 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -12,6 +12,7 @@ package scala.math
import java.{ lang => jl }
import java.math.{ MathContext, BigDecimal => BigDec }
import scala.collection.immutable.NumericRange
+import language.implicitConversions
/**
@@ -292,7 +293,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
/** Returns the absolute value of this BigDecimal
*/
- def abs: BigDecimal = this.bigDecimal abs
+ def abs: BigDecimal = this.bigDecimal.abs
/** Returns the sign of this BigDecimal, i.e.
* -1 if it is less than 0,
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index dbec30b2fe..af2ab04576 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -9,6 +9,7 @@
package scala.math
import java.math.BigInteger
+import language.implicitConversions
/**
* @author Martin Odersky
@@ -70,11 +71,7 @@ object BigInt {
new BigInt(new BigInteger(bitlength, certainty, rnd.self))
/** Constructs a randomly generated BigInt, uniformly distributed over the
- * range 0 to (2 ^ numBits - 1), inclusive.
- *
- * @param numbits ...
- * @param rnd ...
- * @return ...
+ * range `0` to `(2 ^ numBits - 1)`, inclusive.
*/
def apply(numbits: Int, rnd: scala.util.Random): BigInt =
new BigInt(new BigInteger(numbits, rnd.self))
@@ -86,10 +83,6 @@ object BigInt {
/** Translates the string representation of a `BigInt` in the
* specified `radix` into a BigInt.
- *
- * @param x ...
- * @param radix ...
- * @return ...
*/
def apply(x: String, radix: Int): BigInt =
new BigInt(new BigInteger(x, radix))
diff --git a/src/library/scala/math/Equiv.scala b/src/library/scala/math/Equiv.scala
index bd8414a18d..a8ba0aa40c 100644
--- a/src/library/scala/math/Equiv.scala
+++ b/src/library/scala/math/Equiv.scala
@@ -29,7 +29,7 @@ import java.util.Comparator
* @since 2.7
*/
-trait Equiv[T] extends Any {
+trait Equiv[T] extends Any with Serializable {
/** Returns `true` iff `x` is equivalent to `y`.
*/
def equiv(x: T, y: T): Boolean
diff --git a/src/library/scala/math/Fractional.scala b/src/library/scala/math/Fractional.scala
index de09b184e0..0686569c16 100644
--- a/src/library/scala/math/Fractional.scala
+++ b/src/library/scala/math/Fractional.scala
@@ -8,6 +8,8 @@
package scala.math
+import language.implicitConversions
+
/**
* @since 2.8
*/
diff --git a/src/library/scala/math/Integral.scala b/src/library/scala/math/Integral.scala
index bb364a79b4..4b4de28228 100644
--- a/src/library/scala/math/Integral.scala
+++ b/src/library/scala/math/Integral.scala
@@ -10,6 +10,8 @@
package scala.math
+import language.implicitConversions
+
/**
* @since 2.8
*/
diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala
index ff88e0ff4d..ee62706e49 100644
--- a/src/library/scala/math/Numeric.scala
+++ b/src/library/scala/math/Numeric.scala
@@ -8,6 +8,8 @@
package scala.math
+import language.implicitConversions
+
/**
* @since 2.8
*/
@@ -34,8 +36,8 @@ object Numeric {
def fromInt(x: Int): BigInt = BigInt(x)
def toInt(x: BigInt): Int = x.intValue
def toLong(x: BigInt): Long = x.longValue
- def toFloat(x: BigInt): Float = x.longValue.toFloat
- def toDouble(x: BigInt): Double = x.longValue.toDouble
+ def toFloat(x: BigInt): Float = x.floatValue
+ def toDouble(x: BigInt): Double = x.doubleValue
}
implicit object BigIntIsIntegral extends BigIntIsIntegral with Ordering.BigIntOrdering
diff --git a/src/library/scala/math/Ordered.scala b/src/library/scala/math/Ordered.scala
index b76030718f..80addea7f3 100644
--- a/src/library/scala/math/Ordered.scala
+++ b/src/library/scala/math/Ordered.scala
@@ -8,6 +8,8 @@
package scala.math
+import language.implicitConversions
+
/** A trait for data that have a single, natural ordering. See
* [[scala.math.Ordering]] before using this trait for
* more information about whether to use [[scala.math.Ordering]] instead.
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index 8fc74a9d5d..ab685815a1 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -9,6 +9,7 @@
package scala.math
import java.util.Comparator
+import language.{implicitConversions, higherKinds}
/** Ordering is a trait whose instances each represent a strategy for sorting
* instances of a type.
diff --git a/src/library/scala/math/package.scala b/src/library/scala/math/package.scala
index 0417461f85..71a7f8e5ed 100644
--- a/src/library/scala/math/package.scala
+++ b/src/library/scala/math/package.scala
@@ -115,21 +115,10 @@ package object math {
def min(x: Float, y: Float): Float = java.lang.Math.min(x, y)
def min(x: Double, y: Double): Double = java.lang.Math.min(x, y)
- def signum(x: Double): Double =
- if (x == 0d) 0d
- else if (x < 0) -1.0
- else if (x > 0) 1.0
- else x // NaN
-
- def signum(x: Float): Float =
- if (x == 0f) 0f
- else if (x < 0) -1.0f
- else if (x > 0) 1.0f
- else x // NaN
-
- def signum(x: Long): Long = java.lang.Long.signum(x)
-
def signum(x: Int): Int = java.lang.Integer.signum(x)
+ def signum(x: Long): Long = java.lang.Long.signum(x)
+ def signum(x: Float): Float = java.lang.Math.signum(x)
+ def signum(x: Double): Double = java.lang.Math.signum(x)
// -----------------------------------------------------------------------
// root functions
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index 1ef1911fd3..e3890d7a9d 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-import annotation.bridge
/**
* Core Scala types. They are always available without an explicit import.
@@ -80,15 +79,6 @@ package object scala {
type Range = scala.collection.immutable.Range
val Range = scala.collection.immutable.Range
- // Migrated from Predef
- @deprecated("Use Thread.currentThread instead", "2.9.0")
- def currentThread = java.lang.Thread.currentThread()
-
- // Moved back into Predef to avoid unnecessary indirection by
- // way of the scala package object within the standard library,
- // but bridged for compatibility.
- @bridge def $scope = scala.xml.TopScope
-
// Numeric types which were moved into scala.math.*
type BigDecimal = scala.math.BigDecimal
@@ -132,13 +122,4 @@ package object scala {
type unchecked = annotation.unchecked.unchecked
type volatile = annotation.volatile
*/
-
- @deprecated("use scala.annotation.Annotation instead", "2.9.0")
- type Annotation = scala.annotation.Annotation
- @deprecated("use scala.annotation.ClassfileAnnotation instead", "2.9.0")
- type ClassfileAnnotation = scala.annotation.ClassfileAnnotation
- @deprecated("use scala.annotation.StaticAnnotation instead", "2.9.0")
- type StaticAnnotation = scala.annotation.StaticAnnotation
- @deprecated("use scala.annotation.TypeConstraint instead", "2.9.0")
- type TypeConstraint = scala.annotation.TypeConstraint
}
diff --git a/src/library/scala/parallel/Future.scala b/src/library/scala/parallel/Future.scala
index f61d376093..8b71794756 100644
--- a/src/library/scala/parallel/Future.scala
+++ b/src/library/scala/parallel/Future.scala
@@ -12,6 +12,8 @@ package scala.parallel
/** A future is a function without parameters that will block the caller if
* the parallel computation associated with the function is not completed.
+ *
+ * @tparam R the type of the result
*
* @since 2.9
*/
@@ -23,7 +25,6 @@ trait Future[@specialized +R] extends (() => R) {
* '''Note:''' creating a circular dependency between futures by calling
* this method will result in a deadlock.
*
- * @tparam R the type of the result
* @return the result
* @throws the exception that was thrown during a parallel computation
*/
diff --git a/src/library/scala/reflect/ArrayTags.scala b/src/library/scala/reflect/ArrayTag.scala
index 8df7fe5f4e..ba0c075723 100644
--- a/src/library/scala/reflect/ArrayTags.scala
+++ b/src/library/scala/reflect/ArrayTag.scala
@@ -3,11 +3,17 @@ package scala.reflect
/** An `ArrayTag[T]` is a descriptor that is requested by the compiler every time
* when an array is instantiated, but the element type is unknown at compile time.
*
+ * Implicit in the contract of `ArrayTag[T]` is the fact that `T`
+ * cannot contain unresolved references to type parameters or abstract types.
+ *
* Scala library provides a standard implementation of this trait,
- * `ClassTag[T]` that explicitly carries the `java.lang.Class` erasure of type T.
+ * `ClassTag[T]` that explicitly carries the `java.lang.Class` erasure of type T
+ * and uses Java reflection to instantiate arrays.
*
* However other platforms (e.g. a Scala -> JS crosscompiler) may reimplement this trait as they see fit
* and then expose the implementation via an implicit macro.
+ *
+ * @see [[scala.reflect.api.TypeTags]]
*/
@annotation.implicitNotFound(msg = "No ArrayTag available for ${T}")
trait ArrayTag[T] {
diff --git a/src/library/scala/reflect/ClassManifest.scala b/src/library/scala/reflect/ClassManifest.scala
new file mode 100644
index 0000000000..43e043fd40
--- /dev/null
+++ b/src/library/scala/reflect/ClassManifest.scala
@@ -0,0 +1,242 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.reflect
+
+import scala.collection.mutable.{ WrappedArray, ArrayBuilder }
+import java.lang.{ Class => jClass }
+
+/** A `ClassManifest[T]` is an opaque descriptor for type `T`.
+ * It is used by the compiler to preserve information necessary
+ * for instantiating `Arrays` in those cases where the element type
+ * is unknown at compile time.
+ *
+ * The type-relation operators make an effort to present a more accurate
+ * picture than can be realized with erased types, but they should not be
+ * relied upon to give correct answers. In particular they are likely to
+ * be wrong when variance is involved or when a subtype has a different
+ * number of type arguments than a supertype.
+ */
+@deprecated("Use `@scala.reflect.ClassTag` instead", "2.10.0")
+trait ClassManifest[T] extends OptManifest[T] with ClassTag[T] with Equals with Serializable {
+ /** A class representing the type `U` to which `T` would be erased. Note
+ * that there is no subtyping relationship between `T` and `U`. */
+ def erasure: jClass[_]
+
+ private def subtype(sub: jClass[_], sup: jClass[_]): Boolean = {
+ def loop(left: Set[jClass[_]], seen: Set[jClass[_]]): Boolean = {
+ left.nonEmpty && {
+ val next = left.head
+ val supers = next.getInterfaces.toSet ++ Option(next.getSuperclass)
+ supers(sup) || {
+ val xs = left ++ supers filterNot seen
+ loop(xs - next, seen + next)
+ }
+ }
+ }
+ loop(Set(sub), Set())
+ }
+
+ private def subargs(args1: List[OptManifest[_]], args2: List[OptManifest[_]]) = (args1 corresponds args2) {
+ // !!! [Martin] this is wrong, need to take variance into account
+ case (x: ClassManifest[_], y: ClassManifest[_]) => x <:< y
+ case (x, y) => (x eq NoManifest) && (y eq NoManifest)
+ }
+
+ /** Tests whether the type represented by this manifest is a subtype
+ * of the type represented by `that` manifest, subject to the limitations
+ * described in the header.
+ */
+ def <:<(that: ClassManifest[_]): Boolean = {
+ // All types which could conform to these types will override <:<.
+ def cannotMatch = {
+ import Manifest._
+ that.isInstanceOf[AnyValManifest[_]] || (that eq AnyVal) || (that eq Nothing) || (that eq Null)
+ }
+
+ // This is wrong, and I don't know how it can be made right
+ // without more development of Manifests, due to arity-defying
+ // relationships like:
+ //
+ // List[String] <: AnyRef
+ // Map[Int, Int] <: Iterable[(Int, Int)]
+ //
+ // Given the manifest for Map[A, B] how do I determine that a
+ // supertype has single type argument (A, B) ? I don't see how we
+ // can say whether X <:< Y when type arguments are involved except
+ // when the erasure is the same, even before considering variance.
+ !cannotMatch && {
+ // this part is wrong for not considering variance
+ if (this.erasure == that.erasure)
+ subargs(this.typeArguments, that.typeArguments)
+ // this part is wrong for punting unless the rhs has no type
+ // arguments, but it's better than a blindfolded pinata swing.
+ else
+ that.typeArguments.isEmpty && subtype(this.erasure, that.erasure)
+ }
+ }
+
+ /** Tests whether the type represented by this manifest is a supertype
+ * of the type represented by `that` manifest, subject to the limitations
+ * described in the header.
+ */
+ def >:>(that: ClassManifest[_]): Boolean =
+ that <:< this
+
+ override def canEqual(other: Any) = other match {
+ case _: ClassManifest[_] => true
+ case _ => false
+ }
+
+ /** Tests whether the type represented by this manifest is equal to
+ * the type represented by `that` manifest, subject to the limitations
+ * described in the header.
+ */
+ override def equals(that: Any): Boolean = that match {
+ case m: ClassManifest[_] => (m canEqual this) && (this.erasure == m.erasure)
+ case _ => false
+ }
+ override def hashCode = this.erasure.##
+
+ protected def arrayClass[T](tp: jClass[_]): jClass[Array[T]] =
+ java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[jClass[Array[T]]]
+
+ def arrayManifest: ClassManifest[Array[T]] =
+ ClassManifest.classType[Array[T]](arrayClass[T](erasure), this)
+
+ override def newArray(len: Int): Array[T] =
+ java.lang.reflect.Array.newInstance(erasure, len).asInstanceOf[Array[T]]
+
+ def newArray2(len: Int): Array[Array[T]] =
+ java.lang.reflect.Array.newInstance(arrayClass[T](erasure), len)
+ .asInstanceOf[Array[Array[T]]]
+
+ def newArray3(len: Int): Array[Array[Array[T]]] =
+ java.lang.reflect.Array.newInstance(arrayClass[Array[T]](arrayClass[T](erasure)), len)
+ .asInstanceOf[Array[Array[Array[T]]]]
+
+ def newArray4(len: Int): Array[Array[Array[Array[T]]]] =
+ java.lang.reflect.Array.newInstance(arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure))), len)
+ .asInstanceOf[Array[Array[Array[Array[T]]]]]
+
+ def newArray5(len: Int): Array[Array[Array[Array[Array[T]]]]] =
+ java.lang.reflect.Array.newInstance(arrayClass[Array[Array[Array[T]]]](arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure)))), len)
+ .asInstanceOf[Array[Array[Array[Array[Array[T]]]]]]
+
+ def newWrappedArray(len: Int): WrappedArray[T] =
+ // it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests
+ new WrappedArray.ofRef[T with AnyRef](newArray(len).asInstanceOf[Array[T with AnyRef]]).asInstanceOf[WrappedArray[T]]
+
+ def newArrayBuilder(): ArrayBuilder[T] =
+ // it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests
+ new ArrayBuilder.ofRef[T with AnyRef]()(this.asInstanceOf[ClassManifest[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]]
+
+ def typeArguments: List[OptManifest[_]] = List()
+
+ protected def argString =
+ if (typeArguments.nonEmpty) typeArguments.mkString("[", ", ", "]")
+ else if (erasure.isArray) "["+ClassManifest.fromClass(erasure.getComponentType)+"]"
+ else ""
+}
+
+/** The object `ClassManifest` defines factory methods for manifests.
+ * It is intended for use by the compiler and should not be used in client code.
+ */
+@deprecated("Use `@scala.reflect.ClassTag` instead", "2.10.0")
+object ClassManifest {
+ val Byte = Manifest.Byte
+ val Short = Manifest.Short
+ val Char = Manifest.Char
+ val Int = Manifest.Int
+ val Long = Manifest.Long
+ val Float = Manifest.Float
+ val Double = Manifest.Double
+ val Boolean = Manifest.Boolean
+ val Unit = Manifest.Unit
+ val Any = Manifest.Any
+ val Object = Manifest.Object
+ val AnyVal = Manifest.AnyVal
+ val Nothing = Manifest.Nothing
+ val Null = Manifest.Null
+
+ def fromClass[T](clazz: jClass[T]): ClassManifest[T] = clazz match {
+ case java.lang.Byte.TYPE => Byte.asInstanceOf[ClassManifest[T]]
+ case java.lang.Short.TYPE => Short.asInstanceOf[ClassManifest[T]]
+ case java.lang.Character.TYPE => Char.asInstanceOf[ClassManifest[T]]
+ case java.lang.Integer.TYPE => Int.asInstanceOf[ClassManifest[T]]
+ case java.lang.Long.TYPE => Long.asInstanceOf[ClassManifest[T]]
+ case java.lang.Float.TYPE => Float.asInstanceOf[ClassManifest[T]]
+ case java.lang.Double.TYPE => Double.asInstanceOf[ClassManifest[T]]
+ case java.lang.Boolean.TYPE => Boolean.asInstanceOf[ClassManifest[T]]
+ case java.lang.Void.TYPE => Unit.asInstanceOf[ClassManifest[T]]
+ case _ => classType[T with AnyRef](clazz).asInstanceOf[ClassManifest[T]]
+ }
+
+ def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = Manifest.singleType(value)
+
+ /** ClassManifest for the class type `clazz`, where `clazz` is
+ * a top-level or static class.
+ * @note This no-prefix, no-arguments case is separate because we
+ * it's called from ScalaRunTime.boxArray itself. If we
+ * pass varargs as arrays into this, we get an infinitely recursive call
+ * to boxArray. (Besides, having a separate case is more efficient)
+ */
+ def classType[T <: AnyRef](clazz: jClass[_]): ClassManifest[T] =
+ new ClassTypeManifest[T](None, clazz, Nil)
+
+ /** ClassManifest for the class type `clazz[args]`, where `clazz` is
+ * a top-level or static class and `args` are its type arguments */
+ def classType[T <: AnyRef](clazz: jClass[_], arg1: OptManifest[_], args: OptManifest[_]*): ClassManifest[T] =
+ new ClassTypeManifest[T](None, clazz, arg1 :: args.toList)
+
+ /** ClassManifest for the class type `clazz[args]`, where `clazz` is
+ * a class with non-package prefix type `prefix` and type arguments `args`.
+ */
+ def classType[T <: AnyRef](prefix: OptManifest[_], clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] =
+ new ClassTypeManifest[T](Some(prefix), clazz, args.toList)
+
+ def arrayType[T](arg: OptManifest[_]): ClassManifest[Array[T]] = arg match {
+ case NoManifest => Object.asInstanceOf[ClassManifest[Array[T]]]
+ case m: ClassManifest[_] => m.asInstanceOf[ClassManifest[T]].arrayManifest
+ }
+
+ /** ClassManifest for the abstract type `prefix # name`. `upperBound` is not
+ * strictly necessary as it could be obtained by reflection. It was
+ * added so that erasure can be calculated without reflection. */
+ def abstractType[T](prefix: OptManifest[_], name: String, clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] =
+ new ClassManifest[T] {
+ def erasure = clazz
+ override val typeArguments = args.toList
+ override def toString = prefix.toString+"#"+name+argString
+ }
+
+ /** ClassManifest for the abstract type `prefix # name`. `upperBound` is not
+ * strictly necessary as it could be obtained by reflection. It was
+ * added so that erasure can be calculated without reflection.
+ * todo: remove after next boostrap
+ */
+ def abstractType[T](prefix: OptManifest[_], name: String, upperbound: ClassManifest[_], args: OptManifest[_]*): ClassManifest[T] =
+ new ClassManifest[T] {
+ def erasure = upperbound.erasure
+ override val typeArguments = args.toList
+ override def toString = prefix.toString+"#"+name+argString
+ }
+}
+
+/** Manifest for the class type `clazz[args]`, where `clazz` is
+ * a top-level or static class */
+private class ClassTypeManifest[T <: AnyRef](
+ prefix: Option[OptManifest[_]],
+ val erasure: jClass[_],
+ override val typeArguments: List[OptManifest[_]]) extends ClassManifest[T]
+{
+ override def toString =
+ (if (prefix.isEmpty) "" else prefix.get.toString+"#") +
+ (if (erasure.isArray) "Array" else erasure.getName) +
+ argString
+} \ No newline at end of file
diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala
new file mode 100644
index 0000000000..e485691747
--- /dev/null
+++ b/src/library/scala/reflect/ClassTag.scala
@@ -0,0 +1,96 @@
+package scala.reflect
+
+import java.lang.{ Class => jClass }
+import scala.reflect.{ mirror => rm }
+import language.{implicitConversions, existentials}
+import scala.runtime.ScalaRunTime.arrayClass
+
+/** A `ClassTag[T]` wraps a Java class, which can be accessed via the `erasure` method.
+ *
+ * This is useful in itself, but also enables very important use case.
+ * Having this knowledge ClassTag can instantiate `Arrays`
+ * in those cases where the element type is unknown at compile time.
+ * Hence, ClassTag[T] conforms to the ArrayTag[T] trait.
+ *
+ * If an implicit value of type u.ClassTag[T] is required, the compiler will make one up on demand.
+ * The implicitly created value contains in its erasure field the Java class that is the result of erasing type T.
+ * In that value, any occurrences of type parameters or abstract types U which come themselves with a ClassTag
+ * or a reflect.mirror.ConcreteTypeTag are represented by the type referenced by that tag.
+ * If the type T contains unresolved references to type parameters or abstract types, a static error results.
+ *
+ * A ConcreteTypeTag member of the reflect.mirror object is convertible to a ClassTag via an implicit conversion
+ * (this is not possible to do in all reflection universes because an operation that converts a type to a Java class might not be available).
+ *
+ * @see [[scala.reflect.api.TypeTags]]
+ */
+@annotation.implicitNotFound(msg = "No ClassTag available for ${T}")
+trait ClassTag[T] extends ArrayTag[T] with ErasureTag[T] with Equals with Serializable {
+ // please, don't add any APIs here, like it was with `newWrappedArray` and `newArrayBuilder`
+ // class tags, and all tags in general, should be as minimalistic as possible
+
+ /** Produces a `ClassTag` that knows how to build `Array[Array[T]]` */
+ def wrap: ClassTag[Array[T]] = ClassTag[Array[T]](arrayClass(erasure))
+
+ /** Produces a new array with element type `T` and length `len` */
+ def newArray(len: Int): Array[T] =
+ erasure match {
+ case java.lang.Byte.TYPE => new Array[Byte](len).asInstanceOf[Array[T]]
+ case java.lang.Short.TYPE => new Array[Short](len).asInstanceOf[Array[T]]
+ case java.lang.Character.TYPE => new Array[Char](len).asInstanceOf[Array[T]]
+ case java.lang.Integer.TYPE => new Array[Int](len).asInstanceOf[Array[T]]
+ case java.lang.Long.TYPE => new Array[Long](len).asInstanceOf[Array[T]]
+ case java.lang.Float.TYPE => new Array[Float](len).asInstanceOf[Array[T]]
+ case java.lang.Double.TYPE => new Array[Double](len).asInstanceOf[Array[T]]
+ case java.lang.Boolean.TYPE => new Array[Boolean](len).asInstanceOf[Array[T]]
+ case java.lang.Void.TYPE => new Array[Unit](len).asInstanceOf[Array[T]]
+ case _ => java.lang.reflect.Array.newInstance(erasure, len).asInstanceOf[Array[T]]
+ }
+
+ /** case class accessories */
+ override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]]
+ override def equals(x: Any) = x.isInstanceOf[ClassTag[_]] && this.erasure == x.asInstanceOf[ClassTag[_]].erasure
+ override def hashCode = scala.runtime.ScalaRunTime.hash(erasure)
+ override def toString = "ClassTag[" + erasure + "]"
+}
+
+object ClassTag {
+ private val NothingTYPE = classOf[scala.runtime.Nothing$]
+ private val NullTYPE = classOf[scala.runtime.Null$]
+ private val ObjectTYPE = classOf[java.lang.Object]
+ private val StringTYPE = classOf[java.lang.String]
+
+ val Byte : ClassTag[scala.Byte] = new ClassTag[scala.Byte]{ def erasure = java.lang.Byte.TYPE; private def readResolve() = ClassTag.Byte }
+ val Short : ClassTag[scala.Short] = new ClassTag[scala.Short]{ def erasure = java.lang.Short.TYPE; private def readResolve() = ClassTag.Short }
+ val Char : ClassTag[scala.Char] = new ClassTag[scala.Char]{ def erasure = java.lang.Character.TYPE; private def readResolve() = ClassTag.Char }
+ val Int : ClassTag[scala.Int] = new ClassTag[scala.Int]{ def erasure = java.lang.Integer.TYPE; private def readResolve() = ClassTag.Int }
+ val Long : ClassTag[scala.Long] = new ClassTag[scala.Long]{ def erasure = java.lang.Long.TYPE; private def readResolve() = ClassTag.Long }
+ val Float : ClassTag[scala.Float] = new ClassTag[scala.Float]{ def erasure = java.lang.Float.TYPE; private def readResolve() = ClassTag.Float }
+ val Double : ClassTag[scala.Double] = new ClassTag[scala.Double]{ def erasure = java.lang.Double.TYPE; private def readResolve() = ClassTag.Double }
+ val Boolean : ClassTag[scala.Boolean] = new ClassTag[scala.Boolean]{ def erasure = java.lang.Boolean.TYPE; private def readResolve() = ClassTag.Boolean }
+ val Unit : ClassTag[scala.Unit] = new ClassTag[scala.Unit]{ def erasure = java.lang.Void.TYPE; private def readResolve() = ClassTag.Unit }
+ val Any : ClassTag[scala.Any] = new ClassTag[scala.Any]{ def erasure = ObjectTYPE; private def readResolve() = ClassTag.Any }
+ val Object : ClassTag[java.lang.Object] = new ClassTag[java.lang.Object]{ def erasure = ObjectTYPE; private def readResolve() = ClassTag.Object }
+ val AnyVal : ClassTag[scala.AnyVal] = new ClassTag[scala.AnyVal]{ def erasure = ObjectTYPE; private def readResolve() = ClassTag.AnyVal }
+ val AnyRef : ClassTag[scala.AnyRef] = new ClassTag[scala.AnyRef]{ def erasure = ObjectTYPE; private def readResolve() = ClassTag.AnyRef }
+ val Nothing : ClassTag[scala.Nothing] = new ClassTag[scala.Nothing]{ def erasure = NothingTYPE; private def readResolve() = ClassTag.Nothing }
+ val Null : ClassTag[scala.Null] = new ClassTag[scala.Null]{ def erasure = NullTYPE; private def readResolve() = ClassTag.Null }
+ val String : ClassTag[java.lang.String] = new ClassTag[java.lang.String]{ def erasure = StringTYPE; private def readResolve() = ClassTag.String }
+
+ def apply[T](erasure1: jClass[_]): ClassTag[T] =
+ erasure1 match {
+ case java.lang.Byte.TYPE => ClassTag.Byte.asInstanceOf[ClassTag[T]]
+ case java.lang.Short.TYPE => ClassTag.Short.asInstanceOf[ClassTag[T]]
+ case java.lang.Character.TYPE => ClassTag.Char.asInstanceOf[ClassTag[T]]
+ case java.lang.Integer.TYPE => ClassTag.Int.asInstanceOf[ClassTag[T]]
+ case java.lang.Long.TYPE => ClassTag.Long.asInstanceOf[ClassTag[T]]
+ case java.lang.Float.TYPE => ClassTag.Float.asInstanceOf[ClassTag[T]]
+ case java.lang.Double.TYPE => ClassTag.Double.asInstanceOf[ClassTag[T]]
+ case java.lang.Boolean.TYPE => ClassTag.Boolean.asInstanceOf[ClassTag[T]]
+ case java.lang.Void.TYPE => ClassTag.Unit.asInstanceOf[ClassTag[T]]
+ case ObjectTYPE => ClassTag.Object.asInstanceOf[ClassTag[T]]
+ case StringTYPE => ClassTag.String.asInstanceOf[ClassTag[T]]
+ case _ => new ClassTag[T]{ def erasure = erasure1 }
+ }
+
+ def unapply[T](ctag: ClassTag[T]): Option[Class[_]] = Some(ctag.erasure)
+} \ No newline at end of file
diff --git a/src/library/scala/reflect/ClassTags.scala b/src/library/scala/reflect/ClassTags.scala
deleted file mode 100644
index cde6da5539..0000000000
--- a/src/library/scala/reflect/ClassTags.scala
+++ /dev/null
@@ -1,167 +0,0 @@
-package scala.reflect
-
-import java.lang.{ Class => jClass }
-import scala.reflect.{ mirror => rm }
-
-/** A `ClassTag[T]` wraps a Java class, which can be accessed via the `erasure` method.
- *
- * This is useful in itself, but also enables very important use case.
- * Having this knowledge ClassTag can instantiate `Arrays`
- * in those cases where the element type is unknown at compile time.
- * Hence, ClassTag[T] conforms to the ArrayTag[T] trait.
- *
- * If an implicit value of type u.ClassTag[T] is required, the compiler will make one up on demand.
- * The implicitly created value contains in its erasure field the Java class that is the result of erasing type T.
- * In that value, any occurrences of type parameters or abstract types U which come themselves with a ClassTag
- * or a reflect.mirror.ConcreteTypeTag are represented by the type referenced by that tag.
- * If the type T contains unresolved references to type parameters or abstract types, a static error results.
- *
- * A ConcreteTypeTag member of the reflect.mirror object is convertible to a ClassTag via an implicit conversion
- * (this is not possible to do in all reflection universes because an operation that converts a type to a Java class might not be available). */
-// please, don't add any APIs here, like it was with `newWrappedArray` and `newArrayBuilder`
-// class tags, and all tags in general, should be as minimalistic as possible
-@annotation.implicitNotFound(msg = "No ClassTag available for ${T}")
-abstract case class ClassTag[T](erasure: jClass[_]) extends ArrayTag[T] {
- // quick and dirty fix to a deadlock in Predef:
- // http://groups.google.com/group/scala-internals/browse_thread/thread/977de028a4e75d6f
- // todo. fix that in a sane way
- // assert(erasure != null)
-
- /** A Scala reflection type representing T.
- * For ClassTags this representation is lossy (in their case tpe is retrospectively constructed from erasure).
- * For TypeTags and ConcreteTypeTags the representation is almost precise, because they use reification
- * (information is lost only when T refers to non-locatable symbols, which are then reified as free variables). */
- def tpe: rm.Type = rm.classToType(erasure)
-
- /** A Scala reflection symbol representing T. */
- def symbol: rm.Symbol = rm.classToSymbol(erasure)
-
- /** Produces a `ClassTag` that knows how to build `Array[Array[T]]` */
- def wrap: ClassTag[Array[T]] = {
- val arrayClazz = java.lang.reflect.Array.newInstance(erasure, 0).getClass.asInstanceOf[jClass[Array[T]]]
- ClassTag[Array[T]](arrayClazz)
- }
-
- /** Produces a new array with element type `T` and length `len` */
- def newArray(len: Int): Array[T] =
- erasure match {
- case java.lang.Byte.TYPE => new Array[Byte](len).asInstanceOf[Array[T]]
- case java.lang.Short.TYPE => new Array[Short](len).asInstanceOf[Array[T]]
- case java.lang.Character.TYPE => new Array[Char](len).asInstanceOf[Array[T]]
- case java.lang.Integer.TYPE => new Array[Int](len).asInstanceOf[Array[T]]
- case java.lang.Long.TYPE => new Array[Long](len).asInstanceOf[Array[T]]
- case java.lang.Float.TYPE => new Array[Float](len).asInstanceOf[Array[T]]
- case java.lang.Double.TYPE => new Array[Double](len).asInstanceOf[Array[T]]
- case java.lang.Boolean.TYPE => new Array[Boolean](len).asInstanceOf[Array[T]]
- case java.lang.Void.TYPE => new Array[Unit](len).asInstanceOf[Array[T]]
- case _ => java.lang.reflect.Array.newInstance(erasure, len).asInstanceOf[Array[T]]
- }
-}
-
-object ClassTag {
- private val ObjectTYPE = classOf[java.lang.Object]
- private val StringTYPE = classOf[java.lang.String]
-
- val Byte : ClassTag[scala.Byte] = new ClassTag[scala.Byte](java.lang.Byte.TYPE) { private def readResolve() = ClassTag.Byte }
- val Short : ClassTag[scala.Short] = new ClassTag[scala.Short](java.lang.Short.TYPE) { private def readResolve() = ClassTag.Short }
- val Char : ClassTag[scala.Char] = new ClassTag[scala.Char](java.lang.Character.TYPE) { private def readResolve() = ClassTag.Char }
- val Int : ClassTag[scala.Int] = new ClassTag[scala.Int](java.lang.Integer.TYPE) { private def readResolve() = ClassTag.Int }
- val Long : ClassTag[scala.Long] = new ClassTag[scala.Long](java.lang.Long.TYPE) { private def readResolve() = ClassTag.Long }
- val Float : ClassTag[scala.Float] = new ClassTag[scala.Float](java.lang.Float.TYPE) { private def readResolve() = ClassTag.Float }
- val Double : ClassTag[scala.Double] = new ClassTag[scala.Double](java.lang.Double.TYPE) { private def readResolve() = ClassTag.Double }
- val Boolean : ClassTag[scala.Boolean] = new ClassTag[scala.Boolean](java.lang.Boolean.TYPE) { private def readResolve() = ClassTag.Boolean }
- val Unit : ClassTag[scala.Unit] = new ClassTag[scala.Unit](java.lang.Void.TYPE) { private def readResolve() = ClassTag.Unit }
- val Any : ClassTag[scala.Any] = new ClassTag[scala.Any](ObjectTYPE) { private def readResolve() = ClassTag.Any }
- val Object : ClassTag[java.lang.Object] = new ClassTag[java.lang.Object](ObjectTYPE) { private def readResolve() = ClassTag.Object }
- val AnyVal : ClassTag[scala.AnyVal] = new ClassTag[scala.AnyVal](ObjectTYPE) { private def readResolve() = ClassTag.AnyVal }
- val AnyRef : ClassTag[scala.AnyRef] = new ClassTag[scala.AnyRef](ObjectTYPE) { private def readResolve() = ClassTag.AnyRef }
- val Nothing : ClassTag[scala.Nothing] = new ClassTag[scala.Nothing](ObjectTYPE) { private def readResolve() = ClassTag.Nothing }
- val Null : ClassTag[scala.Null] = new ClassTag[scala.Null](ObjectTYPE) { private def readResolve() = ClassTag.Null }
- val String : ClassTag[java.lang.String] = new ClassTag[java.lang.String](StringTYPE) { private def readResolve() = ClassTag.String }
-
- def apply[T](clazz: jClass[_]): ClassTag[T] =
- clazz match {
- case java.lang.Byte.TYPE => ClassTag.Byte.asInstanceOf[ClassTag[T]]
- case java.lang.Short.TYPE => ClassTag.Short.asInstanceOf[ClassTag[T]]
- case java.lang.Character.TYPE => ClassTag.Char.asInstanceOf[ClassTag[T]]
- case java.lang.Integer.TYPE => ClassTag.Int.asInstanceOf[ClassTag[T]]
- case java.lang.Long.TYPE => ClassTag.Long.asInstanceOf[ClassTag[T]]
- case java.lang.Float.TYPE => ClassTag.Float.asInstanceOf[ClassTag[T]]
- case java.lang.Double.TYPE => ClassTag.Double.asInstanceOf[ClassTag[T]]
- case java.lang.Boolean.TYPE => ClassTag.Boolean.asInstanceOf[ClassTag[T]]
- case java.lang.Void.TYPE => ClassTag.Unit.asInstanceOf[ClassTag[T]]
- case ObjectTYPE => ClassTag.Object.asInstanceOf[ClassTag[T]]
- case StringTYPE => ClassTag.String.asInstanceOf[ClassTag[T]]
- case _ => new ClassTag[T](clazz) {}
- }
-
- def apply[T](tpe: rm.Type): ClassTag[T] =
- tpe match {
- case rm.ByteTpe => ClassTag.Byte.asInstanceOf[ClassTag[T]]
- case rm.ShortTpe => ClassTag.Short.asInstanceOf[ClassTag[T]]
- case rm.CharTpe => ClassTag.Char.asInstanceOf[ClassTag[T]]
- case rm.IntTpe => ClassTag.Int.asInstanceOf[ClassTag[T]]
- case rm.LongTpe => ClassTag.Long.asInstanceOf[ClassTag[T]]
- case rm.FloatTpe => ClassTag.Float.asInstanceOf[ClassTag[T]]
- case rm.DoubleTpe => ClassTag.Double.asInstanceOf[ClassTag[T]]
- case rm.BooleanTpe => ClassTag.Boolean.asInstanceOf[ClassTag[T]]
- case rm.UnitTpe => ClassTag.Unit.asInstanceOf[ClassTag[T]]
- case rm.AnyTpe => ClassTag.Any.asInstanceOf[ClassTag[T]]
- case rm.ObjectTpe => ClassTag.Object.asInstanceOf[ClassTag[T]]
- case rm.AnyValTpe => ClassTag.AnyVal.asInstanceOf[ClassTag[T]]
- case rm.AnyRefTpe => ClassTag.AnyRef.asInstanceOf[ClassTag[T]]
- case rm.NothingTpe => ClassTag.Nothing.asInstanceOf[ClassTag[T]]
- case rm.NullTpe => ClassTag.Null.asInstanceOf[ClassTag[T]]
- case rm.StringTpe => ClassTag.String.asInstanceOf[ClassTag[T]]
- case _ => apply[T](rm.typeToClass(tpe.erasure))
- }
-
- implicit def toDeprecatedClassManifestApis[T](ctag: ClassTag[T]): DeprecatedClassManifestApis[T] = new DeprecatedClassManifestApis[T](ctag)
-}
-
-// this class should not be used directly in client code
-class DeprecatedClassManifestApis[T](ctag: ClassTag[T]) {
- import scala.collection.mutable.{ WrappedArray, ArrayBuilder }
-
- @deprecated("Use `tpe` to analyze the underlying type", "2.10.0")
- def <:<(that: ClassManifest[_]): Boolean = ctag.tpe <:< that.tpe
-
- @deprecated("Use `tpe` to analyze the underlying type", "2.10.0")
- def >:>(that: ClassManifest[_]): Boolean = that <:< ctag
-
- @deprecated("Use `wrap` instead", "2.10.0")
- def arrayManifest: ClassManifest[Array[T]] = ctag.wrap
-
- @deprecated("Use a combination of `wrap` and `newArray` instead", "2.10.0")
- def newArray2(len: Int): Array[Array[T]] = ctag.wrap.newArray(len)
-
- @deprecated("Use a combination of `wrap` and `newArray` instead", "2.10.0")
- def newArray3(len: Int): Array[Array[Array[T]]] = ctag.wrap.wrap.newArray(len)
-
- @deprecated("Use a combination of `wrap` and `newArray` instead", "2.10.0")
- def newArray4(len: Int): Array[Array[Array[Array[T]]]] = ctag.wrap.wrap.wrap.newArray(len)
-
- @deprecated("Use a combination of `wrap` and `newArray` instead", "2.10.0")
- def newArray5(len: Int): Array[Array[Array[Array[Array[T]]]]] = ctag.wrap.wrap.wrap.wrap.newArray(len)
-
- @deprecated("Use `@scala.collection.mutable.WrappedArray` object instead", "2.10.0")
- def newWrappedArray(len: Int): WrappedArray[T] =
- ctag.erasure match {
- case java.lang.Byte.TYPE => new WrappedArray.ofByte(new Array[Byte](len)).asInstanceOf[WrappedArray[T]]
- case java.lang.Short.TYPE => new WrappedArray.ofShort(new Array[Short](len)).asInstanceOf[WrappedArray[T]]
- case java.lang.Character.TYPE => new WrappedArray.ofChar(new Array[Char](len)).asInstanceOf[WrappedArray[T]]
- case java.lang.Integer.TYPE => new WrappedArray.ofInt(new Array[Int](len)).asInstanceOf[WrappedArray[T]]
- case java.lang.Long.TYPE => new WrappedArray.ofLong(new Array[Long](len)).asInstanceOf[WrappedArray[T]]
- case java.lang.Float.TYPE => new WrappedArray.ofFloat(new Array[Float](len)).asInstanceOf[WrappedArray[T]]
- case java.lang.Double.TYPE => new WrappedArray.ofDouble(new Array[Double](len)).asInstanceOf[WrappedArray[T]]
- case java.lang.Boolean.TYPE => new WrappedArray.ofBoolean(new Array[Boolean](len)).asInstanceOf[WrappedArray[T]]
- case java.lang.Void.TYPE => new WrappedArray.ofUnit(new Array[Unit](len)).asInstanceOf[WrappedArray[T]]
- case _ => new WrappedArray.ofRef[T with AnyRef](ctag.newArray(len).asInstanceOf[Array[T with AnyRef]]).asInstanceOf[WrappedArray[T]]
- }
-
- @deprecated("Use `@scala.collection.mutable.ArrayBuilder` object instead", "2.10.0")
- def newArrayBuilder(): ArrayBuilder[T] = ArrayBuilder.make[T]()(ctag)
-
- @deprecated("`typeArguments` is no longer supported, and will always return an empty list. Use `@scala.reflect.TypeTag` or `@scala.reflect.ConcreteTypeTag` to capture and analyze type arguments", "2.10.0")
- def typeArguments: List[OptManifest[_]] = List()
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/DummyMirror.scala b/src/library/scala/reflect/DummyMirror.scala
new file mode 100644
index 0000000000..aa731f62db
--- /dev/null
+++ b/src/library/scala/reflect/DummyMirror.scala
@@ -0,0 +1,783 @@
+package scala.reflect
+
+import scala.reflect.api.AbsTreeGen
+import scala.reflect.api.Attachment
+import scala.reflect.api.Modifier
+import scala.reflect.api.Universe
+
+// todo. make Dummy objects not equal to themselves
+class DummyMirror(cl: ClassLoader) extends api.Mirror {
+ // Members declared in scala.reflect.api.AnnotationInfos
+ implicit def classfileAnnotArgTag: scala.reflect.ClassTag[ClassfileAnnotArg] = notSupported()
+ type AnnotationInfo = DummyAnnotationInfo.type
+ object DummyAnnotationInfo
+ val AnnotationInfo: AnnotationInfoExtractor = DummyAnnotationInfoExtractor
+ object DummyAnnotationInfoExtractor extends AnnotationInfoExtractor {
+ def apply(atp: Type, args: List[Tree], assocs: List[(Name, ClassfileAnnotArg)]): AnnotationInfo = DummyAnnotationInfo
+ def unapply(info: AnnotationInfo): Option[(Type, List[Tree], List[(Name, ClassfileAnnotArg)])] = notSupported()
+ }
+ type ClassfileAnnotArg = AnyRef
+ type LiteralAnnotArg = DummyLiteralAnnotArg.type
+ object DummyLiteralAnnotArg
+ val LiteralAnnotArg: LiteralAnnotArgExtractor = DummyLiteralAnnotArgExtractor
+ type ArrayAnnotArg = DummyArrayAnnotArg.type
+ object DummyArrayAnnotArg
+ val ArrayAnnotArg: ArrayAnnotArgExtractor = DummyArrayAnnotArgExtractor
+ type NestedAnnotArg = DummyNestedAnnotArg.type
+ object DummyNestedAnnotArg
+ val NestedAnnotArg: NestedAnnotArgExtractor = DummyNestedAnnotArgExtractor
+ object DummyLiteralAnnotArgExtractor extends LiteralAnnotArgExtractor {
+ def apply(const: Constant): LiteralAnnotArg = DummyLiteralAnnotArg
+ def unapply(arg: LiteralAnnotArg): Option[Constant] = notSupported()
+ }
+ object DummyArrayAnnotArgExtractor extends ArrayAnnotArgExtractor {
+ def apply(const: Array[ClassfileAnnotArg]): ArrayAnnotArg = DummyArrayAnnotArg
+ def unapply(arg: ArrayAnnotArg): Option[Array[ClassfileAnnotArg]] = notSupported()
+ }
+ object DummyNestedAnnotArgExtractor extends NestedAnnotArgExtractor {
+ def apply(anninfo: AnnotationInfo): NestedAnnotArg = DummyNestedAnnotArg
+ def unapply(arg: NestedAnnotArg): Option[AnnotationInfo] = notSupported()
+ }
+
+ // Members declared in scala.reflect.api.Constants
+ type Constant = DummyConstant.type
+ object DummyConstant extends AbsConstant {
+ val value: Any = notSupported()
+ def tpe: Type = notSupported()
+ def isNaN: Boolean = notSupported()
+ def booleanValue: Boolean = notSupported()
+ def byteValue: Byte = notSupported()
+ def shortValue: Short = notSupported()
+ def charValue: Char = notSupported()
+ def intValue: Int = notSupported()
+ def longValue: Long = notSupported()
+ def floatValue: Float = notSupported()
+ def doubleValue: Double = notSupported()
+ def stringValue: String = notSupported()
+ def typeValue: Type = notSupported()
+ def symbolValue: Symbol = notSupported()
+ def convertTo(pt: Type): Constant = notSupported()
+ }
+ val Constant: ConstantExtractor = DummyConstantExtractor
+ object DummyConstantExtractor extends ConstantExtractor {
+ def apply(const: Any): Constant = DummyConstant
+ def unapply(arg: Constant): Option[Any] = notSupported()
+ }
+
+ // Members declared in scala.reflect.api.FreeVars
+ type FreeTerm = DummyFreeTerm.type
+ val DummyFreeTerm = DummySymbol
+ val FreeTerm: FreeTermExtractor = DummyFreeTermExtractor
+ object DummyFreeTermExtractor extends FreeTermExtractor {
+ def unapply(freeTerm: FreeTerm): Option[(TermName, Type, Any, String)] = notSupported()
+ }
+ type FreeType = DummyFreeType.type
+ val DummyFreeType = DummySymbol
+ val FreeType: FreeTypeExtractor = DummyFreeTypeExtractor
+ object DummyFreeTypeExtractor extends FreeTypeExtractor {
+ def unapply(freeType: FreeType): Option[(TypeName, Type, String)] = notSupported()
+ }
+ def freeTerms(tree: Tree): List[FreeTerm] = notSupported()
+ def freeTypes(tree: Tree): List[FreeType] = notSupported()
+ def substituteFreeTypes(tpe: Type,subs: Map[FreeType,Type]): Type = notSupported()
+ def substituteFreeTypes(tree: Tree,subs: Map[FreeType,Type]): Tree = notSupported()
+
+ // Members declared in scala.reflect.api.Importers
+ def mkImporter(from0: scala.reflect.api.Universe): Importer{val from: from0.type} = notSupported()
+
+ // Members declared in scala.reflect.api.Mirror
+ def classLoader: ClassLoader = cl
+ def classLoader_=(x$1: ClassLoader): Unit = notSupported()
+ def classToSymbol(clazz: Class[_]): Symbol = notSupported()
+ def classToType(clazz: Class[_]): Type = notSupported()
+ def companionInstance(clazz: Symbol): AnyRef = notSupported()
+ def getValueOfField(receiver: AnyRef,field: Symbol): Any = notSupported()
+ def invoke(receiver: AnyRef,meth: Symbol)(args: Any*): Any = notSupported()
+ def setValueOfField(receiver: AnyRef,field: Symbol,value: Any): Unit = notSupported()
+ def symbolForName(name: String): Symbol = notSupported()
+ def symbolOfInstance(instance: Any): Symbol = notSupported()
+ def symbolToClass(sym: Symbol): Class[_] = notSupported()
+ def typeOfInstance(instance: Any): Type = notSupported()
+ def typeToClass(tpe: Type): Class[_] = notSupported()
+
+ // Members declared in scala.reflect.api.Names
+ type Name = DummyName.type
+ type TypeName = DummyName.type
+ type TermName = DummyName.type
+ object DummyName extends AbsName {
+ def isTermName: Boolean = notSupported()
+ def isTypeName: Boolean = notSupported()
+ def toTermName: TermName = notSupported()
+ def toTypeName: TypeName = notSupported()
+ def decoded: String = notSupported()
+ def encoded: String = notSupported()
+ def decodedName: Name = notSupported()
+ def encodedName: Name = notSupported()
+ }
+ def newTermName(s: String): TermName = notSupported()
+ def newTypeName(s: String): TypeName = notSupported()
+
+ // Members declared in scala.reflect.api.Positions
+ type Position = DummyPosition.type
+ object DummyPosition extends api.Position {
+ def pos: Position = notSupported()
+ def withPos(newPos: scala.reflect.api.Position): Attachment = notSupported()
+ def payload: Any = notSupported()
+ def withPayload(newPayload: Any): Attachment = notSupported()
+ def fileInfo: java.io.File = notSupported()
+ def fileContent: Array[Char] = notSupported()
+ def isDefined: Boolean = notSupported()
+ def isTransparent: Boolean = notSupported()
+ def isRange: Boolean = notSupported()
+ def isOpaqueRange: Boolean = notSupported()
+ def makeTransparent: Position = notSupported()
+ def start: Int = notSupported()
+ def startOrPoint: Int = notSupported()
+ def point: Int = notSupported()
+ def pointOrElse(default: Int): Int = notSupported()
+ def end: Int = notSupported()
+ def endOrPoint: Int = notSupported()
+ def withStart(off: Int): Position = notSupported()
+ def withEnd(off: Int): Position = notSupported()
+ def withPoint(off: Int): Position = notSupported()
+ def union(pos: scala.reflect.api.Position): Position = notSupported()
+ def focusStart: Position = notSupported()
+ def focus: Position = notSupported()
+ def focusEnd: Position = notSupported()
+ def includes(pos: scala.reflect.api.Position): Boolean = notSupported()
+ def properlyIncludes(pos: scala.reflect.api.Position): Boolean = notSupported()
+ def precedes(pos: scala.reflect.api.Position): Boolean = notSupported()
+ def properlyPrecedes(pos: scala.reflect.api.Position): Boolean = notSupported()
+ def overlaps(pos: scala.reflect.api.Position): Boolean = notSupported()
+ def sameRange(pos: scala.reflect.api.Position): Boolean = notSupported()
+ def line: Int = notSupported()
+ def column: Int = notSupported()
+ def toSingleLine: Position = notSupported()
+ def lineContent: String = notSupported()
+ def show: String = notSupported()
+ }
+ val NoPosition: Position = DummyPosition
+ def atPos[T <: Tree](pos: Position)(tree: T): T = tree
+ def ensureNonOverlapping(tree: Tree,others: List[Tree]): Unit = notSupported()
+ def wrappingPos(trees: List[Tree]): Position = notSupported()
+ def wrappingPos(default: Position,trees: List[Tree]): Position = notSupported()
+
+ // Members declared in scala.reflect.api.FrontEnds
+ def mkConsoleFrontEnd(minSeverity: Int): FrontEnd = notSupported()
+
+ // Members declared in scala.reflect.api.Scopes
+ type Scope = DummyScope.type
+ object DummyScope extends Iterable[Symbol] {
+ def iterator: Iterator[Symbol] = notSupported()
+ }
+ def newScope: Scope = DummyScope
+ def newScopeWith(elems: Symbol*): Scope = DummyScope
+ def newNestedScope(outer: Scope): Scope = DummyScope
+
+ // Members declared in scala.reflect.api.StandardDefinitions
+ val AnyRefTpe: Type = DummyType
+ val AnyTpe: Type = DummyType
+ val AnyValTpe: Type = DummyType
+ val BooleanTpe: Type = DummyType
+ val ByteTpe: Type = DummyType
+ val CharTpe: Type = DummyType
+ val DoubleTpe: Type = DummyType
+ val FloatTpe: Type = DummyType
+ val IntTpe: Type = DummyType
+ val LongTpe: Type = DummyType
+ val NothingTpe: Type = DummyType
+ val NullTpe: Type = DummyType
+ val ObjectTpe: Type = DummyType
+ val ShortTpe: Type = DummyType
+ val StringTpe: Type = DummyType
+ val UnitTpe: Type = DummyType
+ val definitions: AbsDefinitions = DummyDefinitions
+ object DummyDefinitions extends AbsDefinitions {
+ def ByNameParamClass = DummySymbol
+ def JavaRepeatedParamClass = DummySymbol
+ def RepeatedParamClass = DummySymbol
+ def AnyClass = DummyClassSymbol
+ def AnyRefClass = DummyTypeSymbol
+ def AnyValClass = DummyClassSymbol
+ def ArrayClass = DummyClassSymbol
+ def ArrayModule = DummySymbol
+ def ArrayModule_overloadedApply = DummySymbol
+ def Array_apply = DummySymbol
+ def Array_clone = DummySymbol
+ def Array_length = DummySymbol
+ def Array_update = DummySymbol
+ def BooleanClass = DummyClassSymbol
+ def ByteClass = DummyClassSymbol
+ def CharClass = DummyClassSymbol
+ def ClassClass = DummyClassSymbol
+ def ClassTagClass = DummyClassSymbol
+ def ClassTagModule = DummySymbol
+ def ConcreteTypeTagClass = DummyClassSymbol
+ def ConcreteTypeTagModule = DummySymbol
+ def ConsClass = DummySymbol
+ def DoubleClass = DummyClassSymbol
+ def EmptyPackage = DummyPackageSymbol
+ def EmptyPackageClass = DummySymbol
+ def FloatClass = DummyClassSymbol
+ def FunctionClass: Array[Symbol] = Array()
+ def IntClass = DummyClassSymbol
+ def IterableClass = DummySymbol
+ def IteratorClass = DummySymbol
+ def IteratorModule = DummySymbol
+ def Iterator_apply = DummySymbol
+ def JavaLangPackage = DummyPackageSymbol
+ def JavaLangPackageClass = DummySymbol
+ def ListClass = DummyClassSymbol
+ def ListModule = DummyModuleSymbol
+ def List_apply = DummySymbol
+ def LongClass = DummyClassSymbol
+ def NilModule = DummySymbol
+ def NoneModule = DummySymbol
+ def NothingClass = DummyClassSymbol
+ def NullClass = DummyClassSymbol
+ def ObjectClass = DummyClassSymbol
+ def OptionClass = DummySymbol
+ def PredefModule = DummyModuleSymbol
+ def ProductClass: Array[Symbol] = Array()
+ def RootClass = DummyClassSymbol
+ def RootPackage = DummyPackageSymbol
+ def ScalaPackage = DummyPackageSymbol
+ def ScalaPackageClass = DummySymbol
+ def ScalaPrimitiveValueClasses = Nil
+ def SeqClass = DummySymbol
+ def SeqModule = DummySymbol
+ def ShortClass = DummyClassSymbol
+ def SomeClass = DummySymbol
+ def SomeModule = DummySymbol
+ def StringBuilderClass = DummySymbol
+ def StringClass = DummyClassSymbol
+ def SymbolClass = DummySymbol
+ def TraversableClass = DummySymbol
+ def TupleClass: Array[Symbol] = Array()
+ def TypeTagClass = DummyClassSymbol
+ def TypeTagModule = DummySymbol
+ def UnitClass = DummyClassSymbol
+ def isNumericValueClass(sym: Symbol): Boolean = notSupported()
+ def isPrimitiveValueClass(sym: Symbol): Boolean = notSupported()
+ def vmClassType(arg: Type): Type = DummyType
+ def vmSignature(sym: Symbol,info: Type): String = notSupported()
+ }
+
+ // Members declared in scala.reflect.api.StandardNames
+ val nme: AbsTermNames = DummyAbsTermNames
+ val tpnme: AbsTypeNames = DummyAbsTypeNames
+ object DummyAbsTermNames extends AbsTermNames {
+ type NameType = TermName
+ val EMPTY: NameType = DummyName
+ val ANON_FUN_NAME: NameType = DummyName
+ val ANON_CLASS_NAME: NameType = DummyName
+ val EMPTY_PACKAGE_NAME: NameType = DummyName
+ val IMPORT: NameType = DummyName
+ val MODULE_VAR_SUFFIX: NameType = DummyName
+ val ROOT: NameType = DummyName
+ val PACKAGE: NameType = DummyName
+ val SPECIALIZED_SUFFIX: NameType = DummyName
+ val ERROR: NameType = DummyName
+ val NO_NAME: NameType = DummyName
+ val WILDCARD: NameType = DummyName
+ def flattenedName(segments: Name*): NameType = notSupported()
+ val EXPAND_SEPARATOR_STRING: String = ""
+ val ANYNAME: TermName = DummyName
+ val CONSTRUCTOR: TermName = DummyName
+ val FAKE_LOCAL_THIS: TermName = DummyName
+ val INITIALIZER: TermName = DummyName
+ val LAZY_LOCAL: TermName = DummyName
+ val LOCAL_SUFFIX_STRING: String = ""
+ val MIRROR_PREFIX: TermName = DummyName
+ val MIRROR_SHORT: TermName = DummyName
+ val MIRROR_FREE_PREFIX: TermName = DummyName
+ val MIRROR_FREE_THIS_SUFFIX: TermName = DummyName
+ val MIRROR_FREE_VALUE_SUFFIX: TermName = DummyName
+ val MIRROR_SYMDEF_PREFIX: TermName = DummyName
+ val MIXIN_CONSTRUCTOR: TermName = DummyName
+ val MODULE_INSTANCE_FIELD: TermName = DummyName
+ val OUTER: TermName = DummyName
+ val OUTER_LOCAL: TermName = DummyName
+ val OUTER_SYNTH: TermName = DummyName
+ val SELECTOR_DUMMY: TermName = DummyName
+ val SELF: TermName = DummyName
+ val SPECIALIZED_INSTANCE: TermName = DummyName
+ val STAR: TermName = DummyName
+ val THIS: TermName = DummyName
+ val BITMAP_NORMAL: TermName = DummyName
+ val BITMAP_TRANSIENT: TermName = DummyName
+ val BITMAP_CHECKINIT: TermName = DummyName
+ val BITMAP_CHECKINIT_TRANSIENT: TermName = DummyName
+ val INTERPRETER_IMPORT_WRAPPER: String = ""
+ val INTERPRETER_LINE_PREFIX: String = ""
+ val INTERPRETER_VAR_PREFIX: String = ""
+ val INTERPRETER_WRAPPER_SUFFIX: String = ""
+ val ROOTPKG: TermName = DummyName
+ val ADD: TermName = DummyName
+ val AND: TermName = DummyName
+ val ASR: TermName = DummyName
+ val DIV: TermName = DummyName
+ val EQ: TermName = DummyName
+ val EQL: TermName = DummyName
+ val GE: TermName = DummyName
+ val GT: TermName = DummyName
+ val HASHHASH: TermName = DummyName
+ val LE: TermName = DummyName
+ val LSL: TermName = DummyName
+ val LSR: TermName = DummyName
+ val LT: TermName = DummyName
+ val MINUS: TermName = DummyName
+ val MOD: TermName = DummyName
+ val MUL: TermName = DummyName
+ val NE: TermName = DummyName
+ val OR: TermName = DummyName
+ val PLUS : TermName = DummyName
+ val SUB: TermName = DummyName
+ val XOR: TermName = DummyName
+ val ZAND: TermName = DummyName
+ val ZOR: TermName = DummyName
+ val UNARY_~ : TermName = DummyName
+ val UNARY_+ : TermName = DummyName
+ val UNARY_- : TermName = DummyName
+ val UNARY_! : TermName = DummyName
+ val ??? : TermName = DummyName
+ val MODULE_SUFFIX_NAME: TermName = DummyName
+ val NAME_JOIN_NAME: TermName = DummyName
+ val IMPL_CLASS_SUFFIX: String = ""
+ val LOCALDUMMY_PREFIX: String = ""
+ val PROTECTED_PREFIX: String = ""
+ val PROTECTED_SET_PREFIX: String = ""
+ val SINGLETON_SUFFIX: String = ""
+ val SUPER_PREFIX_STRING: String = ""
+ val TRAIT_SETTER_SEPARATOR_STRING: String = ""
+ val SETTER_SUFFIX: TermName = DummyName
+ def isConstructorName(name: Name): Boolean = notSupported()
+ def isExceptionResultName(name: Name): Boolean = notSupported()
+ def isImplClassName(name: Name): Boolean = notSupported()
+ def isLocalDummyName(name: Name): Boolean = notSupported()
+ def isLocalName(name: Name): Boolean = notSupported()
+ def isLoopHeaderLabel(name: Name): Boolean = notSupported()
+ def isProtectedAccessorName(name: Name): Boolean = notSupported()
+ def isSuperAccessorName(name: Name): Boolean = notSupported()
+ def isReplWrapperName(name: Name): Boolean = notSupported()
+ def isSetterName(name: Name): Boolean = notSupported()
+ def isTraitSetterName(name: Name): Boolean = notSupported()
+ def isSingletonName(name: Name): Boolean = notSupported()
+ def isModuleName(name: Name): Boolean = notSupported()
+ def isOpAssignmentName(name: Name): Boolean = notSupported()
+ def segments(name: String, assumeTerm: Boolean): List[Name] = notSupported()
+ def originalName(name: Name): Name = notSupported()
+ def stripModuleSuffix(name: Name): Name = notSupported()
+ def unspecializedName(name: Name): Name = notSupported()
+ def splitSpecializedName(name: Name): (Name, String, String) = notSupported()
+ def dropLocalSuffix(name: Name): Name = notSupported()
+ def expandedName(name: TermName, base: Symbol, separator: String = EXPAND_SEPARATOR_STRING): TermName = notSupported()
+ def expandedSetterName(name: TermName, base: Symbol): TermName = notSupported()
+ def protName(name: Name): TermName = notSupported()
+ def protSetterName(name: Name): TermName = notSupported()
+ def getterName(name: TermName): TermName = notSupported()
+ def getterToLocal(name: TermName): TermName = notSupported()
+ def getterToSetter(name: TermName): TermName = notSupported()
+ def localToGetter(name: TermName): TermName = notSupported()
+ def setterToGetter(name: TermName): TermName = notSupported()
+ def defaultGetterName(name: Name, pos: Int): TermName = notSupported()
+ def defaultGetterToMethod(name: Name): TermName = notSupported()
+ def localDummyName(clazz: Symbol): TermName = notSupported()
+ def superName(name: Name): TermName = notSupported()
+ }
+ object DummyAbsTypeNames extends AbsTypeNames {
+ type NameType = TypeName
+ val EMPTY: NameType = DummyName
+ val ANON_FUN_NAME: NameType = DummyName
+ val ANON_CLASS_NAME: NameType = DummyName
+ val EMPTY_PACKAGE_NAME: NameType = DummyName
+ val IMPORT: NameType = DummyName
+ val MODULE_VAR_SUFFIX: NameType = DummyName
+ val ROOT: NameType = DummyName
+ val PACKAGE: NameType = DummyName
+ val SPECIALIZED_SUFFIX: NameType = DummyName
+ val ERROR: NameType = DummyName
+ val NO_NAME: NameType = DummyName
+ val WILDCARD: NameType = DummyName
+ def flattenedName(segments: Name*): NameType = notSupported()
+ val REFINE_CLASS_NAME: TypeName = DummyName
+ val BYNAME_PARAM_CLASS_NAME: TypeName = DummyName
+ val EQUALS_PATTERN_NAME: TypeName = DummyName
+ val JAVA_REPEATED_PARAM_CLASS_NAME: TypeName = DummyName
+ val LOCAL_CHILD: TypeName = DummyName
+ val REPEATED_PARAM_CLASS_NAME: TypeName = DummyName
+ val WILDCARD_STAR: TypeName = DummyName
+
+ def dropSingletonName(name: Name): TypeName = notSupported()
+ def singletonName(name: Name): TypeName = notSupported()
+ def implClassName(name: Name): TypeName = notSupported()
+ def interfaceName(implname: Name): TypeName = notSupported()
+ }
+
+ // Members declared in scala.reflect.api.Symbols
+ val NoSymbol = DummySymbol
+ type Symbol = DummySymbolApi
+ object DummySymbol extends DummySymbolApi
+ type TypeSymbol = DummyTypeSymbolApi
+ object DummyTypeSymbol extends DummyTypeSymbolApi
+ type TermSymbol = DummyTermSymbolApi
+ object DummyTermSymbol extends DummyTermSymbolApi
+ type MethodSymbol = DummyMethodSymbolApi
+ object DummyMethodSymbol extends DummyMethodSymbolApi
+ type ModuleSymbol = DummyModuleSymbolApi
+ object DummyModuleSymbol extends DummyModuleSymbolApi
+ type PackageSymbol = DummyPackageSymbolApi
+ object DummyPackageSymbol extends DummyPackageSymbolApi
+ type ClassSymbol = DummyClassSymbolApi
+ object DummyClassSymbol extends DummyClassSymbolApi
+ trait DummySymbolApi extends AbsSymbol {
+ this: Symbol =>
+
+ def pos: Position = notSupported()
+ def modifiers: Set[Modifier] = notSupported()
+ def hasModifier(mod: Modifier): Boolean = notSupported()
+ def annotations: List[AnnotationInfo] = notSupported()
+ def hasAnnotation(sym: Symbol): Boolean = notSupported()
+ def owner: Symbol = notSupported()
+ def name: Name = notSupported()
+ def fullName: String = notSupported()
+ def id: Int = notSupported()
+ def orElse(alt: => Symbol): Symbol = notSupported()
+ def filter(cond: Symbol => Boolean): Symbol = notSupported()
+ def suchThat(cond: Symbol => Boolean): Symbol = notSupported()
+ def privateWithin: Symbol = notSupported()
+ def companionSymbol: Symbol = notSupported()
+ def moduleClass: Symbol = notSupported()
+ def enclosingTopLevelClass: Symbol = notSupported()
+ def enclosingClass: Symbol = notSupported()
+ def enclosingMethod: Symbol = notSupported()
+ def enclosingPackageClass: Symbol = notSupported()
+ def isTerm : Boolean = notSupported()
+ def isPackage : Boolean = notSupported()
+ def isMethod : Boolean = notSupported()
+ def isOverloaded : Boolean = notSupported()
+ def isFreeTerm : Boolean = notSupported()
+ def isType : Boolean = notSupported()
+ def isClass : Boolean = notSupported()
+ def isPackageClass : Boolean = notSupported()
+ def isPrimitiveValueClass: Boolean = notSupported()
+ def isDerivedValueClass: Boolean = notSupported()
+ def isAliasType : Boolean = notSupported()
+ def isAbstractType : Boolean = notSupported()
+ def isSkolem : Boolean = notSupported()
+ def isExistential : Boolean = notSupported()
+ def isFreeType : Boolean = notSupported()
+ def isContravariant : Boolean = notSupported()
+ def isCovariant : Boolean = notSupported()
+ def isErroneous : Boolean = notSupported()
+ def typeSignature: Type = notSupported()
+ def typeSignatureIn(site: Type): Type = notSupported()
+ def asType: Type = notSupported()
+ def asTypeIn(site: Type): Type = notSupported()
+ def asTypeConstructor: Type = notSupported()
+ def thisPrefix: Type = notSupported()
+ def selfType: Type = notSupported()
+ def alternatives: List[Symbol] = notSupported()
+ def resolveOverloaded(pre: Type = NoPrefix, targs: Seq[Type] = List(), actuals: Seq[Type]): Symbol = notSupported()
+ def newNestedSymbol(name: Name, pos: Position, flags: Long, isClass: Boolean): Symbol = notSupported()
+ def setInternalFlags(flags: Long): this.type = notSupported()
+ def setTypeSignature(tpe: Type): this.type = notSupported()
+ def setAnnotations(annots: AnnotationInfo*): this.type = notSupported()
+ def kind: String = notSupported()
+ }
+ trait DummyTypeSymbolApi extends DummySymbolApi with TypeSymbolApi {
+ this: TypeSymbol =>
+ }
+ trait DummyTermSymbolApi extends DummySymbolApi with TermSymbolApi {
+ this: TermSymbol =>
+ }
+ trait DummyMethodSymbolApi extends DummyTermSymbolApi with MethodSymbolApi {
+ this: MethodSymbol =>
+ }
+ trait DummyModuleSymbolApi extends DummyTermSymbolApi with ModuleSymbolApi {
+ this: ModuleSymbol =>
+ }
+ trait DummyPackageSymbolApi extends DummyModuleSymbolApi with PackageSymbolApi {
+ this: PackageSymbol =>
+ }
+ trait DummyClassSymbolApi extends DummyTypeSymbolApi with ClassSymbolApi {
+ this: ClassSymbol =>
+ }
+
+ // Members declared in scala.reflect.api.ToolBoxes
+ def mkToolBox(frontEnd: FrontEnd, options: String): AbsToolBox = notSupported()
+
+ // Members declared in scala.reflect.api.TreeBuildUtil
+ // type TreeGen = DummyTreeGen.type // [Eugene] cannot compile if uncomment this
+ val gen: TreeGen{val global: DummyMirror.this.type} = DummyTreeGen.asInstanceOf[TreeGen{val global: DummyMirror.this.type}]
+ def modifiersFromInternalFlags(flags: Long,privateWithin: Name,annotations: List[Tree]): Modifiers = DummyModifiers
+ def newFreeExistential(name: String,info: Type,value: => Any,flags: Long,origin: String) = DummySymbol
+ def newFreeTerm(name: String,info: Type,value: => Any,flags: Long,origin: String) = DummySymbol
+ def newFreeType(name: String,info: Type,value: => Any,flags: Long,origin: String) = DummySymbol
+ def selectOverloadedMethod(owner: Symbol,name: String,index: Int) = DummySymbol
+ def selectOverloadedMethodIfDefined(owner: Symbol,name: String,index: Int) = DummySymbol
+ def selectTerm(owner: Symbol,name: String) = DummySymbol
+ def selectTermIfDefined(owner: Symbol,name: String) = DummySymbol
+ def selectType(owner: Symbol,name: String) = DummySymbol
+ def selectTypeIfDefined(owner: Symbol,name: String) = DummySymbol
+ def staticClass(fullName: String) = DummySymbol
+ def staticClassIfDefined(fullName: String) = DummySymbol
+ def staticModule(fullName: String) = DummySymbol
+ def staticModuleIfDefined(fullName: String) = DummySymbol
+ def thisModuleType(fullName: String): Type = DummyType
+ object DummyTreeGen extends AbsTreeGen {
+ val global: Universe = DummyMirror.this
+ type TreeGenTree = global.Tree
+ type TreeGenType = global.Type
+ type TreeGenSymbol = global.Symbol
+ type TreeGenName = global.Name
+ def mkAttributedQualifier(tpe: TreeGenType): TreeGenTree = notSupported()
+ def mkAttributedQualifier(tpe: TreeGenType, termSym: TreeGenSymbol): TreeGenTree = notSupported()
+ def mkAttributedRef(pre: TreeGenType, sym: TreeGenSymbol): TreeGenTree = notSupported()
+ def mkAttributedRef(sym: TreeGenSymbol): TreeGenTree = notSupported()
+ def mkAttributedThis(sym: TreeGenSymbol): TreeGenTree = notSupported()
+ def mkAttributedIdent(sym: TreeGenSymbol): TreeGenTree = notSupported()
+ def mkAttributedSelect(qual: TreeGenTree, sym: TreeGenSymbol): TreeGenTree = notSupported()
+ def mkMethodCall(target: TreeGenTree,targs: List[TreeGenType],args: List[TreeGenTree]): TreeGenTree = notSupported()
+ def mkMethodCall(receiver: TreeGenTree,method: TreeGenSymbol,targs: List[TreeGenType],args: List[TreeGenTree]): TreeGenTree = notSupported()
+ def mkMethodCall(receiver: TreeGenSymbol,methodName: TreeGenName,args: List[TreeGenTree]): TreeGenTree = notSupported()
+ def mkMethodCall(target: TreeGenTree,args: List[TreeGenTree]): TreeGenTree = notSupported()
+ def mkMethodCall(method: TreeGenSymbol,args: List[TreeGenTree]): TreeGenTree = notSupported()
+ def mkMethodCall(method: TreeGenSymbol,targs: List[TreeGenType],args: List[TreeGenTree]): TreeGenTree = notSupported()
+ def mkMethodCall(receiver: TreeGenSymbol,methodName: TreeGenName,targs: List[TreeGenType],args: List[TreeGenTree]): TreeGenTree = notSupported()
+ def mkNullaryCall(method: TreeGenSymbol,targs: List[TreeGenType]): TreeGenTree = notSupported()
+ }
+
+ // Members declared in scala.reflect.api.TreePrinters
+ def newTreePrinter(out: java.io.PrintWriter): TreePrinter = notSupported()
+
+ // Members declared in scala.reflect.api.Trees
+ def Apply(sym: Symbol,args: Tree*): Tree = Apply(EmptyTree, Nil)
+ def Bind(sym: Symbol,body: Tree): Bind = Bind(DummyName, EmptyTree)
+ def Block(stats: Tree*): Block = Block()
+ def CaseDef(pat: Tree,body: Tree): CaseDef = CaseDef(EmptyTree, EmptyTree, EmptyTree)
+ def ClassDef(sym: Symbol,impl: Template): ClassDef = ClassDef(DummyModifiers, DummyName, Nil, Template(Nil, emptyValDef, Nil))
+ def DefDef(sym: Symbol,rhs: List[List[Symbol]] => Tree): DefDef = DefDef(DummyModifiers, DummyName, Nil, Nil, EmptyTree, EmptyTree)
+ def DefDef(sym: Symbol,rhs: Tree): DefDef = DefDef(DummyModifiers, DummyName, Nil, Nil, EmptyTree, EmptyTree)
+ def DefDef(sym: Symbol,mods: Modifiers,rhs: Tree): DefDef = DefDef(DummyModifiers, DummyName, Nil, Nil, EmptyTree, EmptyTree)
+ def DefDef(sym: Symbol,vparamss: List[List[ValDef]],rhs: Tree): DefDef = DefDef(DummyModifiers, DummyName, Nil, Nil, EmptyTree, EmptyTree)
+ def DefDef(sym: Symbol,mods: Modifiers,vparamss: List[List[ValDef]],rhs: Tree): DefDef = DefDef(DummyModifiers, DummyName, Nil, Nil, EmptyTree, EmptyTree)
+ def Ident(sym: Symbol): Ident = Ident(DummyName)
+ def Ident(name: String): Ident = Ident(DummyName)
+ def LabelDef(sym: Symbol,params: List[Symbol],rhs: Tree): LabelDef = LabelDef(DummyName, Nil, EmptyTree)
+ type Modifiers = DummyModifiers.type
+ val NoMods: Modifiers = DummyModifiers
+ object DummyModifiers extends AbsModifiers {
+ def modifiers: Set[Modifier] = notSupported()
+ def hasModifier(mod: Modifier): Boolean = notSupported()
+ def privateWithin: Name = notSupported()
+ def annotations: List[Tree] = notSupported()
+ def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers = notSupported()
+ }
+ def Modifiers(mods: Set[scala.reflect.api.Modifier],privateWithin: Name,annotations: List[Tree]): Modifiers = DummyModifiers
+ def ModuleDef(sym: Symbol,impl: Template): ModuleDef = ModuleDef(DummyModifiers, DummyName, Template(Nil, emptyValDef, Nil))
+ def New(sym: Symbol,args: Tree*): Tree = New(EmptyTree)
+ def New(tpe: Type,args: Tree*): Tree = New(EmptyTree)
+ def New(tpt: Tree,argss: List[List[Tree]]): Tree = New(EmptyTree)
+ def Select(qualifier: Tree,sym: Symbol): Select = Select(EmptyTree, DummyName)
+ def Select(qualifier: Tree,name: String): Select = Select(EmptyTree, DummyName)
+ def Super(sym: Symbol,mix: TypeName): Tree = Super(EmptyTree, DummyName)
+ def This(sym: Symbol): Tree = This(DummyName)
+ def Throw(tpe: Type,args: Tree*): Throw = Throw(EmptyTree)
+ def Try(body: Tree,cases: (Tree, Tree)*): Try = Try(EmptyTree)
+ def TypeDef(sym: Symbol): TypeDef = TypeDef(DummyModifiers, DummyName, Nil, EmptyTree)
+ def TypeDef(sym: Symbol,rhs: Tree): TypeDef = TypeDef(DummyModifiers, DummyName, Nil, EmptyTree)
+ def ValDef(sym: Symbol): ValDef = ValDef(DummyModifiers, DummyName, EmptyTree, EmptyTree)
+ def ValDef(sym: Symbol,rhs: Tree): ValDef = ValDef(DummyModifiers, DummyName, EmptyTree, EmptyTree)
+ protected def duplicateTree(tree: Tree): Tree = notSupported()
+ object emptyValDef extends ValDef(DummyModifiers, DummyName, EmptyTree, EmptyTree) { override def isEmpty = true }
+ type TreeCopier = DummyTreeCopier.type
+ def newStrictTreeCopier: TreeCopier = DummyTreeCopier
+ def newLazyTreeCopier: TreeCopier = DummyTreeCopier
+ object DummyTreeCopier extends TreeCopierOps {
+ def ClassDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template): ClassDef = notSupported()
+ def PackageDef(tree: Tree, pid: RefTree, stats: List[Tree]): PackageDef = notSupported()
+ def ModuleDef(tree: Tree, mods: Modifiers, name: Name, impl: Template): ModuleDef = notSupported()
+ def ValDef(tree: Tree, mods: Modifiers, name: Name, tpt: Tree, rhs: Tree): ValDef = notSupported()
+ def DefDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef = notSupported()
+ def TypeDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], rhs: Tree): TypeDef = notSupported()
+ def LabelDef(tree: Tree, name: Name, params: List[Ident], rhs: Tree): LabelDef = notSupported()
+ def Import(tree: Tree, expr: Tree, selectors: List[ImportSelector]): Import = notSupported()
+ def Template(tree: Tree, parents: List[Tree], self: ValDef, body: List[Tree]): Template = notSupported()
+ def Block(tree: Tree, stats: List[Tree], expr: Tree): Block = notSupported()
+ def CaseDef(tree: Tree, pat: Tree, guard: Tree, body: Tree): CaseDef = notSupported()
+ def Alternative(tree: Tree, trees: List[Tree]): Alternative = notSupported()
+ def Star(tree: Tree, elem: Tree): Star = notSupported()
+ def Bind(tree: Tree, name: Name, body: Tree): Bind = notSupported()
+ def UnApply(tree: Tree, fun: Tree, args: List[Tree]): UnApply = notSupported()
+ def ArrayValue(tree: Tree, elemtpt: Tree, trees: List[Tree]): ArrayValue = notSupported()
+ def Function(tree: Tree, vparams: List[ValDef], body: Tree): Function = notSupported()
+ def Assign(tree: Tree, lhs: Tree, rhs: Tree): Assign = notSupported()
+ def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree): AssignOrNamedArg = notSupported()
+ def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree): If = notSupported()
+ def Match(tree: Tree, selector: Tree, cases: List[CaseDef]): Match = notSupported()
+ def Return(tree: Tree, expr: Tree): Return = notSupported()
+ def Try(tree: Tree, block: Tree, catches: List[CaseDef], finalizer: Tree): Try = notSupported()
+ def Throw(tree: Tree, expr: Tree): Throw = notSupported()
+ def New(tree: Tree, tpt: Tree): New = notSupported()
+ def Typed(tree: Tree, expr: Tree, tpt: Tree): Typed = notSupported()
+ def TypeApply(tree: Tree, fun: Tree, args: List[Tree]): TypeApply = notSupported()
+ def Apply(tree: Tree, fun: Tree, args: List[Tree]): Apply = notSupported()
+ def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]): ApplyDynamic = notSupported()
+ def Super(tree: Tree, qual: Tree, mix: TypeName): Super = notSupported()
+ def This(tree: Tree, qual: Name): This = notSupported()
+ def Select(tree: Tree, qualifier: Tree, selector: Name): Select = notSupported()
+ def Ident(tree: Tree, name: Name): Ident = notSupported()
+ def ReferenceToBoxed(tree: Tree, idt: Ident): ReferenceToBoxed = notSupported()
+ def Literal(tree: Tree, value: Constant): Literal = notSupported()
+ def TypeTree(tree: Tree): TypeTree = notSupported()
+ def Annotated(tree: Tree, annot: Tree, arg: Tree): Annotated = notSupported()
+ def SingletonTypeTree(tree: Tree, ref: Tree): SingletonTypeTree = notSupported()
+ def SelectFromTypeTree(tree: Tree, qualifier: Tree, selector: Name): SelectFromTypeTree = notSupported()
+ def CompoundTypeTree(tree: Tree, templ: Template): CompoundTypeTree = notSupported()
+ def AppliedTypeTree(tree: Tree, tpt: Tree, args: List[Tree]): AppliedTypeTree = notSupported()
+ def TypeBoundsTree(tree: Tree, lo: Tree, hi: Tree): TypeBoundsTree = notSupported()
+ def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]): ExistentialTypeTree = notSupported()
+ }
+
+ // Members declared in scala.reflect.api.Types
+ type Type = DummyType.type
+ type SingletonType = DummyType.type
+ type CompoundType = DummyType.type
+ type AnnotatedType = DummyType.type
+ val AnnotatedType: AnnotatedTypeExtractor = DummyAnnotatedTypeExtractor
+ type BoundedWildcardType = DummyType.type
+ val BoundedWildcardType: BoundedWildcardTypeExtractor = DummyBoundedWildcardTypeExtractor
+ type ClassInfoType = DummyType.type
+ val ClassInfoType: ClassInfoTypeExtractor = DummyClassInfoTypeExtractor
+ type ConstantType = DummyType.type
+ val ConstantType: ConstantTypeExtractor = DummyConstantTypeExtractor
+ type ExistentialType = DummyType.type
+ val ExistentialType: ExistentialTypeExtractor = DummyExistentialTypeExtractor
+ type MethodType = DummyType.type
+ val MethodType: MethodTypeExtractor = DummyMethodTypeExtractor
+ val NoPrefix: Type = DummyType
+ val NoType: Type = DummyType
+ type NullaryMethodType = DummyType.type
+ val NullaryMethodType: NullaryMethodTypeExtractor = DummyNullaryMethodTypeExtractor
+ type PolyType = DummyType.type
+ val PolyType: PolyTypeExtractor = DummyPolyTypeExtractor
+ type RefinedType = DummyType.type
+ val RefinedType: RefinedTypeExtractor = DummyRefinedTypeExtractor
+ type SingleType = DummyType.type
+ val SingleType: SingleTypeExtractor = DummySingleTypeExtractor
+ type SuperType = DummyType.type
+ val SuperType: SuperTypeExtractor = DummySuperTypeExtractor
+ type ThisType = DummyType.type
+ val ThisType: ThisTypeExtractor = DummyThisTypeExtractor
+ type TypeBounds = DummyType.type
+ val TypeBounds: TypeBoundsExtractor = DummyTypeBoundsExtractor
+ type TypeRef = DummyType.type
+ val TypeRef: TypeRefExtractor = DummyTypeRefExtractor
+ val WildcardType: Type = DummyType
+ def appliedType(tycon: Type,args: List[Type]): Type = DummyType
+ def existentialAbstraction(tparams: List[Symbol],tpe0: Type): Type = DummyType
+ def glb(ts: List[Type]): Type = DummyType
+ def intersectionType(tps: List[Type],owner: Symbol): Type = DummyType
+ def intersectionType(tps: List[Type]): Type = DummyType
+ def lub(xs: List[Type]): Type = DummyType
+ def polyType(tparams: List[Symbol],tpe: Type): Type = DummyType
+ def refinedType(parents: List[Type],owner: Symbol): Type = DummyType
+ def refinedType(parents: List[Type],owner: Symbol,decls: Scope,pos: Position): Type = DummyType
+ def singleType(pre: Type,sym: Symbol): Type = DummyType
+ def typeRef(pre: Type,sym: Symbol,args: List[Type]): Type = DummyType
+ object DummyType extends AbsType {
+ def =:=(that: Type): Boolean = notSupported()
+ def <:<(that: Type): Boolean = notSupported()
+ def asSeenFrom(pre: Type,clazz: Symbol): Type = notSupported()
+ def baseClasses: List[Symbol] = notSupported()
+ def baseType(clazz: Symbol): Type = notSupported()
+ def contains(sym: Symbol): Boolean = notSupported()
+ def declaration(name: Name): Symbol = notSupported()
+ def declarations: Iterable[Symbol] = notSupported()
+ def erasure: Type = notSupported()
+ def exists(p: Type => Boolean): Boolean = notSupported()
+ def find(p: Type => Boolean): Option[Type] = notSupported()
+ def foreach(f: Type => Unit): Unit = notSupported()
+ def isConcrete: Boolean = notSupported()
+ def isHigherKinded: Boolean = notSupported()
+ def isSpliceable: Boolean = notSupported()
+ def kind: String = notSupported()
+ def map(f: Type => Type): Type = notSupported()
+ def member(name: Name): Symbol = notSupported()
+ def members: Iterable[Symbol] = notSupported()
+ def nonPrivateMember(name: Name): Symbol = notSupported()
+ def nonPrivateMembers: Iterable[Symbol] = notSupported()
+ def normalize: Type = notSupported()
+ def parents: List[Type] = notSupported()
+ def substituteTypes(from: List[Symbol],to: List[Type]): Type = notSupported()
+ def typeArguments: List[Type] = notSupported()
+ def typeConstructor: Type = notSupported()
+ def typeParams: List[Symbol] = notSupported()
+ def typeSymbol: Symbol = notSupported()
+ def underlying: Type = notSupported()
+ def widen: Type = notSupported()
+ }
+ object DummyAnnotatedTypeExtractor extends AnnotatedTypeExtractor {
+ def apply(annotations: List[AnnotationInfo], underlying: Type, selfsym: Symbol): AnnotatedType = DummyType
+ def unapply(tpe: AnnotatedType): Option[(List[AnnotationInfo], Type, Symbol)] = notSupported()
+ }
+ object DummyBoundedWildcardTypeExtractor extends BoundedWildcardTypeExtractor {
+ def apply(bounds: TypeBounds): BoundedWildcardType = DummyType
+ def unapply(tpe: BoundedWildcardType): Option[TypeBounds] = notSupported()
+ }
+ object DummyClassInfoTypeExtractor extends ClassInfoTypeExtractor {
+ def apply(parents: List[Type], decls: Scope, clazz: Symbol): ClassInfoType = DummyType
+ def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)] = notSupported()
+ }
+ object DummyConstantTypeExtractor extends ConstantTypeExtractor {
+ def apply(value: Constant): ConstantType = DummyType
+ def unapply(tpe: ConstantType): Option[Constant] = notSupported()
+ }
+ object DummyExistentialTypeExtractor extends ExistentialTypeExtractor {
+ def apply(quantified: List[Symbol], underlying: Type): ExistentialType = DummyType
+ def unapply(tpe: ExistentialType): Option[(List[Symbol], Type)] = notSupported()
+ }
+ object DummyMethodTypeExtractor extends MethodTypeExtractor {
+ def apply(params: List[Symbol], resultType: Type): MethodType = DummyType
+ def unapply(tpe: MethodType): Option[(List[Symbol], Type)] = notSupported()
+ }
+ object DummyNullaryMethodTypeExtractor extends NullaryMethodTypeExtractor {
+ def apply(resultType: Type): NullaryMethodType = DummyType
+ def unapply(tpe: NullaryMethodType): Option[(Type)] = notSupported()
+ }
+ object DummyPolyTypeExtractor extends PolyTypeExtractor {
+ def apply(typeParams: List[Symbol], resultType: Type): PolyType = DummyType
+ def unapply(tpe: PolyType): Option[(List[Symbol], Type)] = notSupported()
+ }
+ object DummyRefinedTypeExtractor extends RefinedTypeExtractor {
+ def apply(parents: List[Type], decls: Scope): RefinedType = DummyType
+ def apply(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType = DummyType
+ def unapply(tpe: RefinedType): Option[(List[Type], Scope)] = notSupported()
+ }
+ object DummySingleTypeExtractor extends SingleTypeExtractor {
+ def apply(pre: Type, sym: Symbol): Type = DummyType
+ def unapply(tpe: SingleType): Option[(Type, Symbol)] = notSupported()
+ }
+ object DummySuperTypeExtractor extends SuperTypeExtractor {
+ def apply(thistpe: Type, supertpe: Type): Type = DummyType
+ def unapply(tpe: SuperType): Option[(Type, Type)] = notSupported()
+ }
+ object DummyThisTypeExtractor extends ThisTypeExtractor {
+ def apply(sym: Symbol): Type = DummyType
+ def unapply(tpe: ThisType): Option[Symbol] = notSupported()
+ }
+ object DummyTypeBoundsExtractor extends TypeBoundsExtractor {
+ def apply(lo: Type, hi: Type): TypeBounds = DummyType
+ def unapply(tpe: TypeBounds): Option[(Type, Type)] = notSupported()
+ }
+ object DummyTypeRefExtractor extends TypeRefExtractor {
+ def apply(pre: Type, sym: Symbol, args: List[Type]): Type = DummyType
+ def unapply(tpe: TypeRef): Option[(Type, Symbol, List[Type])] = notSupported()
+ }
+
+ // Utils
+ def notSupported() = {
+ throw new UnsupportedOperationException("Scala reflection not available on this platform." + mirrorDiagnostics(cl))
+ }
+} \ No newline at end of file
diff --git a/src/library/scala/reflect/DynamicProxy.scala b/src/library/scala/reflect/DynamicProxy.scala
new file mode 100644
index 0000000000..3ed17fea41
--- /dev/null
+++ b/src/library/scala/reflect/DynamicProxy.scala
@@ -0,0 +1,74 @@
+package scala.reflect
+/**
+ * A dynamic proxy which redirects method calls and attribute access to a given
+ * target object at runtime using reflection.
+ *
+ * Usage example:
+ *
+ * object x{ def hello = "hello world" }
+ * val d = new DynamicProxy{ val dynamicProxyTarget = x }
+ * assert( d.hello == "hello world" )
+ *
+ * Not supported (yet):
+ * - implicit conversions and parameters
+ * - multiple arguments lists
+ * - explicit type arguments
+ */
+trait DynamicProxy extends Dynamic{
+ /** Method calls on DynamicProxy are redirected to this object. Needs to be defined in a subclass. */
+ val dynamicProxyTarget : AnyRef
+
+ import scala.reflect.mirror._
+ /**
+ * boxing to preserve information on primitive types for overloading resolution
+ */
+ case class DynamicReflectBoxed( class_ : Class[_], value: Any )
+ object DynamicReflectBoxed{
+ implicit def box[@specialized T]( v:T ) = DynamicReflectBoxed( v.getClass, v )
+ }
+
+ def selectDynamic( method:String ) = {
+ val symbol = classToType( dynamicProxyTarget.getClass ).member( newTermName(method).encodedName )
+ invoke( dynamicProxyTarget, symbol )()
+ }
+
+ def updateDynamic( method:String )( value : Any ) = {
+ val symbol = classToType( dynamicProxyTarget.getClass ).member( newTermName(method+"_=").encodedName )
+ invoke( dynamicProxyTarget, symbol )( value )
+ }
+
+ def applyDynamic( method:String )( args:DynamicReflectBoxed* ) : Any
+ = applyDynamicNamed( method )( args.map( value => ("",value) ) :_* )
+
+ def applyDynamicNamed( method:String )( args:(String,DynamicReflectBoxed)* ) : Any = {
+ val class_ = dynamicProxyTarget.getClass
+ var i = 0
+ val toolbox = mkToolBox(mkConsoleFrontEnd(),"")
+ val symbol = classToType( dynamicProxyTarget.getClass ).member( newTermName(method).encodedName )
+ if(args.size == 0){
+ invoke( dynamicProxyTarget, symbol )()
+ } else {
+ val call =
+ Apply(
+ Select(
+ TypeApply(
+ Select(
+ Select(
+ Ident(newFreeTerm("__this", symbolForName("scala.reflect.DynamicProxy").asType, this))
+ , newTermName("dynamicProxyTarget")
+ ),
+ newTermName("asInstanceOf") )
+ , List(TypeTree().setType(classToType(class_)))
+ )
+ ,newTermName(method).encodedName
+ )
+ ,args.map{ case(name,box) =>
+ val value = Ident(newFreeTerm("__arg"+({i+=1;i}.toString), classToType(box.class_), box.value))
+ if( name == "" ) value
+ else AssignOrNamedArg( Ident(name), value )
+ }.toList
+ )
+ toolbox.runExpr( call )
+ }
+ }
+}
diff --git a/src/library/scala/reflect/ErasureTag.scala b/src/library/scala/reflect/ErasureTag.scala
new file mode 100644
index 0000000000..f95451fab2
--- /dev/null
+++ b/src/library/scala/reflect/ErasureTag.scala
@@ -0,0 +1,23 @@
+package scala.reflect
+
+import java.lang.{Class => jClass}
+
+/** An `ErasureTag[T]` is a descriptor that is requested by the compiler every time
+ * when it needs to persist an erasure of a type.
+ *
+ * Scala library provides a standard implementation of this trait,
+ * `TypeTag[T]` that carries the `java.lang.Class` erasure for arbitrary types.
+ *
+ * However other platforms may reimplement this trait as they see fit
+ * and then expose the implementation via an implicit macro.
+ *
+ * If you need to guarantee that the type does not contain
+ * references to type parameters or abstract types, use `ClassTag[T]`.
+ *
+ * @see [[scala.reflect.api.TypeTags]]
+ */
+@annotation.implicitNotFound(msg = "No ErasureTag available for ${T}")
+trait ErasureTag[T] {
+ /** Returns an erasure of type `T` */
+ def erasure: jClass[_]
+}
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
new file mode 100644
index 0000000000..da029f046d
--- /dev/null
+++ b/src/library/scala/reflect/Manifest.scala
@@ -0,0 +1,259 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.reflect
+
+import scala.collection.mutable.{ ArrayBuilder, WrappedArray }
+import mirror._
+
+/** A `Manifest[T]` is an opaque descriptor for type T. Its supported use
+ * is to give access to the erasure of the type as a `Class` instance, as
+ * is necessary for the creation of native `Arrays` if the class is not
+ * known at compile time.
+ *
+ * The type-relation operators `<:<` and `=:=` should be considered
+ * approximations only, as there are numerous aspects of type conformance
+ * which are not yet adequately represented in manifests.
+ *
+ * Example usages:
+{{{
+ def arr[T] = new Array[T](0) // does not compile
+ def arr[T](implicit m: Manifest[T]) = new Array[T](0) // compiles
+ def arr[T: Manifest] = new Array[T](0) // shorthand for the preceding
+
+ // Methods manifest, classManifest, and optManifest are in [[scala.Predef]].
+ def isApproxSubType[T: Manifest, U: Manifest] = manifest[T] <:< manifest[U]
+ isApproxSubType[List[String], List[AnyRef]] // true
+ isApproxSubType[List[String], List[Int]] // false
+
+ def methods[T: ClassManifest] = classManifest[T].erasure.getMethods
+ def retType[T: ClassManifest](name: String) =
+ methods[T] find (_.getName == name) map (_.getGenericReturnType)
+
+ retType[Map[_, _]]("values") // Some(scala.collection.Iterable<B>)
+}}}
+ *
+ */
+@annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
+@deprecated("Use `@scala.reflect.ConcreteTypeTag` instead", "2.10.0")
+trait Manifest[T] extends ClassManifest[T] with Equals {
+ override def typeArguments: List[Manifest[_]] = Nil
+
+ override def arrayManifest: Manifest[Array[T]] =
+ Manifest.classType[Array[T]](arrayClass[T](erasure), this)
+
+ override def canEqual(that: Any): Boolean = that match {
+ case _: Manifest[_] => true
+ case _ => false
+ }
+ /** Note: testing for erasure here is important, as it is many times
+ * faster than <:< and rules out most comparisons.
+ */
+ override def equals(that: Any): Boolean = that match {
+ case m: Manifest[_] => (m canEqual this) && (this.erasure == m.erasure) && (this <:< m) && (m <:< this)
+ case _ => false
+ }
+ override def hashCode = this.erasure.##
+}
+
+@deprecated("Use type tags and manually check the corresponding class or type instead", "2.10.0")
+abstract class AnyValManifest[T <: AnyVal](override val toString: String) extends Manifest[T] with Equals {
+ override def <:<(that: ClassManifest[_]): Boolean =
+ (that eq this) || (that eq Manifest.Any) || (that eq Manifest.AnyVal)
+ override def canEqual(other: Any) = other match {
+ case _: AnyValManifest[_] => true
+ case _ => false
+ }
+ override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override val hashCode = System.identityHashCode(this)
+}
+
+/** The object `Manifest` defines factory methods for manifests.
+ * It is intended for use by the compiler and should not be used
+ * in client code.
+ */
+@deprecated("Use `@scala.reflect.ConcreteTypeTag` instead", "2.10.0")
+object Manifest {
+ def valueManifests: List[AnyValManifest[_]] =
+ List(Byte, Short, Char, Int, Long, Float, Double, Boolean, Unit)
+
+ val Byte: AnyValManifest[Byte] = new AnyValManifest[scala.Byte]("Byte") {
+ def erasure = java.lang.Byte.TYPE
+ override def newArray(len: Int): Array[Byte] = new Array[Byte](len)
+ override def newWrappedArray(len: Int): WrappedArray[Byte] = new WrappedArray.ofByte(new Array[Byte](len))
+ override def newArrayBuilder(): ArrayBuilder[Byte] = new ArrayBuilder.ofByte()
+ private def readResolve(): Any = Manifest.Byte
+ }
+
+ val Short: AnyValManifest[Short] = new AnyValManifest[scala.Short]("Short") {
+ def erasure = java.lang.Short.TYPE
+ override def newArray(len: Int): Array[Short] = new Array[Short](len)
+ override def newWrappedArray(len: Int): WrappedArray[Short] = new WrappedArray.ofShort(new Array[Short](len))
+ override def newArrayBuilder(): ArrayBuilder[Short] = new ArrayBuilder.ofShort()
+ private def readResolve(): Any = Manifest.Short
+ }
+
+ val Char: AnyValManifest[Char] = new AnyValManifest[scala.Char]("Char") {
+ def erasure = java.lang.Character.TYPE
+ override def newArray(len: Int): Array[Char] = new Array[Char](len)
+ override def newWrappedArray(len: Int): WrappedArray[Char] = new WrappedArray.ofChar(new Array[Char](len))
+ override def newArrayBuilder(): ArrayBuilder[Char] = new ArrayBuilder.ofChar()
+ private def readResolve(): Any = Manifest.Char
+ }
+
+ val Int: AnyValManifest[Int] = new AnyValManifest[scala.Int]("Int") {
+ def erasure = java.lang.Integer.TYPE
+ override def newArray(len: Int): Array[Int] = new Array[Int](len)
+ override def newWrappedArray(len: Int): WrappedArray[Int] = new WrappedArray.ofInt(new Array[Int](len))
+ override def newArrayBuilder(): ArrayBuilder[Int] = new ArrayBuilder.ofInt()
+ private def readResolve(): Any = Manifest.Int
+ }
+
+ val Long: AnyValManifest[Long] = new AnyValManifest[scala.Long]("Long") {
+ def erasure = java.lang.Long.TYPE
+ override def newArray(len: Int): Array[Long] = new Array[Long](len)
+ override def newWrappedArray(len: Int): WrappedArray[Long] = new WrappedArray.ofLong(new Array[Long](len))
+ override def newArrayBuilder(): ArrayBuilder[Long] = new ArrayBuilder.ofLong()
+ private def readResolve(): Any = Manifest.Long
+ }
+
+ val Float: AnyValManifest[Float] = new AnyValManifest[scala.Float]("Float") {
+ def erasure = java.lang.Float.TYPE
+ override def newArray(len: Int): Array[Float] = new Array[Float](len)
+ override def newWrappedArray(len: Int): WrappedArray[Float] = new WrappedArray.ofFloat(new Array[Float](len))
+ override def newArrayBuilder(): ArrayBuilder[Float] = new ArrayBuilder.ofFloat()
+ private def readResolve(): Any = Manifest.Float
+ }
+
+ val Double: AnyValManifest[Double] = new AnyValManifest[scala.Double]("Double") {
+ def erasure = java.lang.Double.TYPE
+ override def newArray(len: Int): Array[Double] = new Array[Double](len)
+ override def newWrappedArray(len: Int): WrappedArray[Double] = new WrappedArray.ofDouble(new Array[Double](len))
+ override def newArrayBuilder(): ArrayBuilder[Double] = new ArrayBuilder.ofDouble()
+ private def readResolve(): Any = Manifest.Double
+ }
+
+ val Boolean: AnyValManifest[Boolean] = new AnyValManifest[scala.Boolean]("Boolean") {
+ def erasure = java.lang.Boolean.TYPE
+ override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len)
+ override def newWrappedArray(len: Int): WrappedArray[Boolean] = new WrappedArray.ofBoolean(new Array[Boolean](len))
+ override def newArrayBuilder(): ArrayBuilder[Boolean] = new ArrayBuilder.ofBoolean()
+ private def readResolve(): Any = Manifest.Boolean
+ }
+
+ val Unit: AnyValManifest[Unit] = new AnyValManifest[scala.Unit]("Unit") {
+ def erasure = java.lang.Void.TYPE
+ override def newArray(len: Int): Array[Unit] = new Array[Unit](len)
+ override def newWrappedArray(len: Int): WrappedArray[Unit] = new WrappedArray.ofUnit(new Array[Unit](len))
+ override def newArrayBuilder(): ArrayBuilder[Unit] = new ArrayBuilder.ofUnit()
+ private def readResolve(): Any = Manifest.Unit
+ }
+
+ val Any: Manifest[scala.Any] = new PhantomManifest[scala.Any]("Any") {
+ override def <:<(that: ClassManifest[_]): Boolean = (that eq this)
+ private def readResolve(): Any = Manifest.Any
+ }
+
+ val Object: Manifest[java.lang.Object] = new PhantomManifest[java.lang.Object]("Object") {
+ override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
+ private def readResolve(): Any = Manifest.Object
+ }
+
+ val AnyVal: Manifest[scala.AnyVal] = new PhantomManifest[scala.AnyVal]("AnyVal") {
+ override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
+ private def readResolve(): Any = Manifest.AnyVal
+ }
+
+ val Null: Manifest[scala.Null] = new PhantomManifest[scala.Null]("Null") {
+ override def <:<(that: ClassManifest[_]): Boolean =
+ (that ne null) && (that ne Nothing) && !(that <:< AnyVal)
+ private def readResolve(): Any = Manifest.Null
+ }
+
+ val Nothing: Manifest[scala.Nothing] = new PhantomManifest[scala.Nothing]("Nothing") {
+ override def <:<(that: ClassManifest[_]): Boolean = (that ne null)
+ private def readResolve(): Any = Manifest.Nothing
+ }
+
+ private class SingletonTypeManifest[T <: AnyRef](value: AnyRef) extends Manifest[T] {
+ lazy val erasure = value.getClass
+ override lazy val toString = value.toString + ".type"
+ }
+
+ /** Manifest for the singleton type `value.type`. */
+ def singleType[T <: AnyRef](value: AnyRef): Manifest[T] =
+ new SingletonTypeManifest[T](value)
+
+ /** Manifest for the class type `clazz[args]`, where `clazz` is
+ * a top-level or static class.
+ * @note This no-prefix, no-arguments case is separate because we
+ * it's called from ScalaRunTime.boxArray itself. If we
+ * pass varargs as arrays into this, we get an infinitely recursive call
+ * to boxArray. (Besides, having a separate case is more efficient)
+ */
+ def classType[T](clazz: Predef.Class[_]): Manifest[T] =
+ new ClassTypeManifest[T](None, clazz, Nil)
+
+ /** Manifest for the class type `clazz`, where `clazz` is
+ * a top-level or static class and args are its type arguments. */
+ def classType[T](clazz: Predef.Class[T], arg1: Manifest[_], args: Manifest[_]*): Manifest[T] =
+ new ClassTypeManifest[T](None, clazz, arg1 :: args.toList)
+
+ /** Manifest for the class type `clazz[args]`, where `clazz` is
+ * a class with non-package prefix type `prefix` and type arguments `args`.
+ */
+ def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
+ new ClassTypeManifest[T](Some(prefix), clazz, args.toList)
+
+ private abstract class PhantomManifest[T](override val toString: String) extends ClassTypeManifest[T](None, classOf[java.lang.Object], Nil) {
+ override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override val hashCode = System.identityHashCode(this)
+ }
+
+ /** Manifest for the class type `clazz[args]`, where `clazz` is
+ * a top-level or static class. */
+ private class ClassTypeManifest[T](prefix: Option[Manifest[_]],
+ val erasure: Predef.Class[_],
+ override val typeArguments: List[Manifest[_]]) extends Manifest[T] {
+ override def toString =
+ (if (prefix.isEmpty) "" else prefix.get.toString+"#") +
+ (if (erasure.isArray) "Array" else erasure.getName) +
+ argString
+ }
+
+ def arrayType[T](arg: Manifest[_]): Manifest[Array[T]] =
+ arg.asInstanceOf[Manifest[T]].arrayManifest
+
+ /** Manifest for the abstract type `prefix # name'. `upperBound` is not
+ * strictly necessary as it could be obtained by reflection. It was
+ * added so that erasure can be calculated without reflection. */
+ def abstractType[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
+ new Manifest[T] {
+ def erasure = upperBound
+ override val typeArguments = args.toList
+ override def toString = prefix.toString+"#"+name+argString
+ }
+
+ /** Manifest for the unknown type `_ >: L <: U` in an existential.
+ */
+ def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] =
+ new Manifest[T] {
+ def erasure = upperBound.erasure
+ override def toString =
+ "_" +
+ (if (lowerBound eq Nothing) "" else " >: "+lowerBound) +
+ (if (upperBound eq Nothing) "" else " <: "+upperBound)
+ }
+
+ /** Manifest for the intersection type `parents_0 with ... with parents_n'. */
+ def intersectionType[T](parents: Manifest[_]*): Manifest[T] =
+ new Manifest[T] {
+ def erasure = parents.head.erasure
+ override def toString = parents.mkString(" with ")
+ }
+} \ No newline at end of file
diff --git a/src/library/scala/reflect/NoManifest.scala b/src/library/scala/reflect/NoManifest.scala
new file mode 100644
index 0000000000..7b8037272c
--- /dev/null
+++ b/src/library/scala/reflect/NoManifest.scala
@@ -0,0 +1,16 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.reflect
+
+/** One of the branches of an [[scala.reflect.OptManifest]].
+ */
+@deprecated("Use `@scala.reflect.TypeTag` instead", "2.10.0")
+object NoManifest extends OptManifest[Nothing] with Serializable {
+ override def toString = "<?>"
+} \ No newline at end of file
diff --git a/src/library/scala/reflect/OptManifest.scala b/src/library/scala/reflect/OptManifest.scala
new file mode 100644
index 0000000000..46f23c4e22
--- /dev/null
+++ b/src/library/scala/reflect/OptManifest.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.reflect
+
+/** A `OptManifest[T]` is an optional [[scala.reflect.Manifest]].
+ *
+ * It is either a `Manifest` or the value `NoManifest`.
+ *
+ * @author Martin Odersky
+ */
+@deprecated("Use `@scala.reflect.TypeTag` instead", "2.10.0")
+trait OptManifest[+T] extends Serializable \ No newline at end of file
diff --git a/src/library/scala/reflect/ReflectionUtils.scala b/src/library/scala/reflect/ReflectionUtils.scala
index 1be46eac55..6ea69cb80d 100644
--- a/src/library/scala/reflect/ReflectionUtils.scala
+++ b/src/library/scala/reflect/ReflectionUtils.scala
@@ -5,6 +5,7 @@
package scala.reflect
+import java.lang.{Class => jClass}
import java.lang.reflect.{ InvocationTargetException, UndeclaredThrowableException }
/** A few java-reflection oriented utility functions useful during reflection bootstrapping.
@@ -27,6 +28,28 @@ object ReflectionUtils {
case ex if pf isDefinedAt unwrapThrowable(ex) => pf(unwrapThrowable(ex))
}
+ private def systemProperties: Iterator[(String, String)] = {
+ import scala.collection.JavaConverters._
+ System.getProperties.asScala.iterator
+ }
+
+ private def searchForBootClasspath = (
+ systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse ""
+ )
+
+ def show(cl: ClassLoader) = {
+ def inferClasspath(cl: ClassLoader) = cl match {
+ case cl: java.net.URLClassLoader => "[" + (cl.getURLs mkString ",") + "]"
+ case _ => "<unknown>"
+ }
+ cl match {
+ case cl if cl != null =>
+ "%s of type %s with classpath %s".format(cl, cl.getClass, inferClasspath(cl))
+ case null =>
+ "primordial classloader with boot classpath [%s]".format(searchForBootClasspath)
+ }
+ }
+
def defaultReflectionClassLoader() = {
// say no to non-determinism of mirror classloaders
// default classloader will be instantiated using current system classloader
diff --git a/src/library/scala/reflect/TagInterop.scala b/src/library/scala/reflect/TagInterop.scala
new file mode 100644
index 0000000000..6c6bfcc2f2
--- /dev/null
+++ b/src/library/scala/reflect/TagInterop.scala
@@ -0,0 +1,34 @@
+package scala.reflect
+
+import scala.runtime.ScalaRunTime._
+import mirror._
+import definitions._
+
+object TagInterop {
+ def arrayTagToClassManifest[T](tag: ArrayTag[T]): ClassManifest[T] = {
+ val erasure = arrayElementClass(tag)
+ if (erasure.isArray) {
+ val elementClass = arrayElementClass(erasure)
+ val elementManifest = arrayTagToClassManifest(ClassTag(elementClass))
+ ClassManifest.arrayType(elementManifest).asInstanceOf[ClassManifest[T]]
+ } else {
+ ClassManifest.fromClass(erasure.asInstanceOf[Class[T]])
+ }
+ }
+
+ def concreteTypeTagToManifest[T](tag: ConcreteTypeTag[T]): Manifest[T] = {
+ // todo. reproduce manifest generation code here. toolboxes are too slow.
+ val implicitly = PredefModule.typeSignature.member(newTermName("implicitly"))
+ val taggedTpe = appliedType(staticClass("scala.reflect.Manifest").asTypeConstructor, List(tag.tpe))
+ val materializer = TypeApply(Ident(implicitly), List(TypeTree(taggedTpe)))
+ try mkToolBox().runExpr(materializer).asInstanceOf[Manifest[T]]
+ catch { case ex: Throwable => Manifest.classType(tag.erasure).asInstanceOf[Manifest[T]] }
+ }
+
+ def manifestToConcreteTypeTag[T](tag: Manifest[T]): ConcreteTypeTag[T] = {
+ val tpe =
+ if (tag.typeArguments.isEmpty) classToType(tag.erasure)
+ else appliedType(classToType(tag.erasure).typeConstructor, tag.typeArguments map (manifestToConcreteTypeTag(_)) map (_.tpe))
+ ConcreteTypeTag(tpe, tag.erasure)
+ }
+} \ No newline at end of file
diff --git a/src/library/scala/reflect/TagMaterialization.scala b/src/library/scala/reflect/TagMaterialization.scala
deleted file mode 100644
index 5918b6effc..0000000000
--- a/src/library/scala/reflect/TagMaterialization.scala
+++ /dev/null
@@ -1,154 +0,0 @@
-package scala.reflect
-
-import api.Universe
-import makro.Context
-
-// todo. unfortunately, current type inferencer doesn't infer type parameters of implicit values
-// this means that during macro expansion these macros will get Nothing instead of real T
-// Oh how much I'd love to implement this now, but I have to postpone this until we have a solution for type inference
-
-/** This object is required by the compiler and <b>should not be used in client code</b>. */
-object TagMaterialization {
- def materializeClassTag[T: c.TypeTag](c: Context): c.Expr[ClassTag[T]] = {
- import c.mirror._
- val tpe = implicitly[c.TypeTag[T]].tpe
- c.materializeClassTag(tpe)
- }
-
- def materializeTypeTag[T: c.TypeTag](c: Context { type PrefixType = Universe }): c.Expr[c.prefix.value.TypeTag[T]] = {
- import c.mirror._
- val tpe = implicitly[c.TypeTag[T]].tpe
- c.materializeTypeTag(tpe, requireConcreteTypeTag = false)
- }
-
- def materializeConcreteTypeTag[T: c.TypeTag](c: Context { type PrefixType = Universe }): c.Expr[c.prefix.value.ConcreteTypeTag[T]] = {
- import c.mirror._
- val tpe = implicitly[c.TypeTag[T]].tpe
- c.materializeTypeTag(tpe, requireConcreteTypeTag = true)
- }
-
- private implicit def context2utils(c0: Context) : Utils { val c: c0.type } = new { val c: c0.type = c0 } with Utils
-
- private abstract class Utils {
- val c: Context
-
- import c.mirror._
- import definitions._
-
- val coreTags = Map(
- ByteClass.asType -> newTermName("Byte"),
- ShortClass.asType -> newTermName("Short"),
- CharClass.asType -> newTermName("Char"),
- IntClass.asType -> newTermName("Int"),
- LongClass.asType -> newTermName("Long"),
- FloatClass.asType -> newTermName("Float"),
- DoubleClass.asType -> newTermName("Double"),
- BooleanClass.asType -> newTermName("Boolean"),
- UnitClass.asType -> newTermName("Unit"),
- AnyClass.asType -> newTermName("Any"),
- ObjectClass.asType -> newTermName("Object"),
- AnyValClass.asType -> newTermName("AnyVal"),
- AnyRefClass.asType -> newTermName("AnyRef"),
- NothingClass.asType -> newTermName("Nothing"),
- NullClass.asType -> newTermName("Null"))
-
- val ReflectPackage = staticModule("scala.reflect.package")
- val Reflect_mirror = selectTerm(ReflectPackage, "mirror")
- val ClassTagClass = staticClass("scala.reflect.ClassTag")
- val ClassTagErasure = selectTerm(ClassTagClass, "erasure")
- val ClassTagModule = staticModule("scala.reflect.ClassTag")
- val TypeTagsClass = staticClass("scala.reflect.api.TypeTags")
- val TypeTagClass = selectType(TypeTagsClass, "TypeTag")
- val TypeTagTpe = selectTerm(TypeTagClass, "tpe")
- val TypeTagModule = selectTerm(TypeTagsClass, "TypeTag")
- val ConcreteTypeTagClass = selectType(TypeTagsClass, "ConcreteTypeTag")
- val ConcreteTypeTagModule = selectTerm(TypeTagsClass, "ConcreteTypeTag")
-
- def materializeClassTag(tpe: Type): Tree = {
- val prefix = gen.mkAttributedRef(Reflect_mirror) setType singleType(Reflect_mirror.owner.thisPrefix, Reflect_mirror)
- materializeClassTag(prefix, tpe)
- }
-
- def materializeClassTag(prefix: Tree, tpe: Type): Tree = {
- val typetagInScope = c.inferImplicitValue(appliedType(typeRef(prefix.tpe, ConcreteTypeTagClass, Nil), List(tpe)))
- def typetagIsSynthetic(tree: Tree) = tree.isInstanceOf[Block] || (tree exists (sub => sub.symbol == TypeTagModule || sub.symbol == ConcreteTypeTagModule))
- typetagInScope match {
- case success if !success.isEmpty && !typetagIsSynthetic(success) =>
- val factory = TypeApply(Select(Ident(ClassTagModule), newTermName("apply")), List(TypeTree(tpe)))
- Apply(factory, List(Select(typetagInScope, newTermName("tpe"))))
- case _ =>
- val result =
- tpe match {
- case coreTpe if coreTags contains coreTpe =>
- Select(Ident(ClassTagModule), coreTags(coreTpe))
- case _ =>
- if (tpe.typeSymbol == ArrayClass) {
- val componentTpe = tpe.typeArguments(0)
- val classtagInScope = c.inferImplicitValue(appliedType(typeRef(NoPrefix, ClassTagClass, Nil), List(componentTpe)))
- val componentTag = classtagInScope orElse materializeClassTag(prefix, componentTpe)
- Select(componentTag, newTermName("wrap"))
- } else {
- // [Eugene] what's the intended behavior? there's no spec on ClassManifests
- // for example, should we ban Array[T] or should we tag them with Array[AnyRef]?
- // if its the latter, what should be the result of tagging Array[T] where T <: Int?
- if (tpe.typeSymbol.isAbstractType) fail("tpe is an abstract type")
- val erasure =
- if (tpe.typeSymbol.isDerivedValueClass) tpe // [Eugene to Martin] is this correct?
- else tpe.erasure.normalize // necessary to deal with erasures of HK types
- val factory = TypeApply(Select(Ident(ClassTagModule), newTermName("apply")), List(TypeTree(tpe)))
- Apply(factory, List(TypeApply(Ident(newTermName("classOf")), List(TypeTree(erasure)))))
- }
- }
- try c.typeCheck(result)
- catch { case terr @ c.TypeError(pos, msg) => fail(terr) }
- }
- }
-
- def materializeTypeTag(tpe: Type, requireConcreteTypeTag: Boolean): Tree = {
- def prefix: Tree = ??? // todo. needs to be synthesized from c.prefix
- materializeTypeTag(prefix, tpe, requireConcreteTypeTag)
- }
-
- def materializeTypeTag(prefix: Tree, tpe: Type, requireConcreteTypeTag: Boolean): Tree = {
- val tagModule = if (requireConcreteTypeTag) ConcreteTypeTagModule else TypeTagModule
- val result =
- tpe match {
- case coreTpe if coreTags contains coreTpe =>
- Select(Select(prefix, tagModule.name), coreTags(coreTpe))
- case _ =>
- try c.reifyType(prefix, tpe, dontSpliceAtTopLevel = true, requireConcreteTypeTag = requireConcreteTypeTag)
- catch {
- case ex: Throwable =>
- // [Eugene] cannot pattern match on an abstract type, so had to do this
- val ex1 = ex
- if (ex.getClass.toString.endsWith("$ReificationError")) {
- ex match {
- case c.ReificationError(pos, msg) =>
- c.error(pos, msg)
- EmptyTree
- }
- } else if (ex.getClass.toString.endsWith("$UnexpectedReificationError")) {
- ex match {
- case c.UnexpectedReificationError(pos, err, cause) =>
- if (cause != null) throw cause else throw ex
- }
- } else {
- throw ex
- }
- }
- }
- try c.typeCheck(result)
- catch { case terr @ c.TypeError(pos, msg) => fail(terr) }
- }
-
- private def fail(reason: Any): Nothing = {
- val Apply(TypeApply(fun, List(tpeTree)), _) = c.macroApplication
- val tpe = tpeTree.tpe
- val PolyType(_, MethodType(_, tagTpe)) = fun.tpe
- val tagModule = tagTpe.typeSymbol.companionSymbol
- if (c.compilerSettings.contains("-Xlog-implicits"))
- c.echo(c.enclosingPosition, "cannot materialize " + tagModule.name + "[" + tpe + "] because:\n" + reason)
- c.abort(c.enclosingPosition, "No %s available for %s".format(tagModule.name, tpe))
- }
- }
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/api/AnnotationInfos.scala b/src/library/scala/reflect/api/AnnotationInfos.scala
index 96a65606e5..cc1c4d2b6b 100755
--- a/src/library/scala/reflect/api/AnnotationInfos.scala
+++ b/src/library/scala/reflect/api/AnnotationInfos.scala
@@ -12,7 +12,7 @@ trait AnnotationInfos { self: Universe =>
}
type ClassfileAnnotArg <: AnyRef
- implicit def classfileAnnotArgManifest: ClassManifest[ClassfileAnnotArg] // need a precise manifest to pass to UnPickle's toArray call
+ implicit def classfileAnnotArgTag: ArrayTag[ClassfileAnnotArg] // need a precise tag to pass to UnPickle's toArray call
type LiteralAnnotArg <: ClassfileAnnotArg
val LiteralAnnotArg: LiteralAnnotArgExtractor
diff --git a/src/library/scala/reflect/api/Attachment.scala b/src/library/scala/reflect/api/Attachment.scala
new file mode 100644
index 0000000000..50f55b4aa5
--- /dev/null
+++ b/src/library/scala/reflect/api/Attachment.scala
@@ -0,0 +1,29 @@
+package scala.reflect
+package api
+
+/** Attachment is a generalisation of Position.
+ * Typically it stores a Position of a tree, but this can be extended to encompass arbitrary payloads.
+ *
+ * Attachments have to carry positions, because we don't want to introduce even a single additional field in Tree
+ * imposing an unnecessary memory tax because of something that will not be used in most cases.
+ */
+trait Attachment {
+ /** Gets the underlying position */
+ def pos: Position
+
+ /** Creates a copy of this attachment with its position updated */
+ def withPos(newPos: Position): Attachment
+
+ /** Gets the underlying payload */
+ def payload: Any
+
+ /** Creates a copy of this attachment with its payload updated */
+ def withPayload(newPayload: Any): Attachment
+}
+
+// [Eugene] with the introduction of `attach` and `attachment[T]` users don't need to create custom attachments anymore
+// however, we cannot move attachments to scala.reflect.internal, because they are used in Trees, which are implemented completely in scala.reflect.api
+private[scala] case class NontrivialAttachment(pos: api.Position, payload: collection.mutable.ListBuffer[Any]) extends Attachment {
+ def withPos(newPos: api.Position) = copy(pos = newPos, payload = payload)
+ def withPayload(newPayload: Any) = copy(pos = pos, payload = newPayload.asInstanceOf[collection.mutable.ListBuffer[Any]])
+} \ No newline at end of file
diff --git a/src/library/scala/reflect/api/Attachments.scala b/src/library/scala/reflect/api/Attachments.scala
deleted file mode 100644
index dfd362ebe0..0000000000
--- a/src/library/scala/reflect/api/Attachments.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package scala.reflect
-package api
-
-/** Attachment is a generalisation of Position.
- * Typically it stores a Position of a tree, but this can be extended to encompass arbitrary payloads.
- *
- * Attachments have to carry positions, because we don't want to introduce even a single additional field in Tree
- * imposing an unnecessary memory tax because of something that will not be used in most cases.
- */
-trait Attachment {
- /** Gets the underlying position */
- def pos: Position
-
- /** Creates a copy of this attachment with its position updated */
- def withPos(pos: Position): Attachment
-}
diff --git a/src/library/scala/reflect/api/Exprs.scala b/src/library/scala/reflect/api/Exprs.scala
index 8c3f12783b..b8db64422e 100644
--- a/src/library/scala/reflect/api/Exprs.scala
+++ b/src/library/scala/reflect/api/Exprs.scala
@@ -5,6 +5,7 @@
package scala.reflect
package api
+import language.implicitConversions
trait Exprs { self: Universe =>
@@ -15,34 +16,4 @@ trait Exprs { self: Universe =>
lazy val value: T = eval
override def toString = "Expr["+tpe+"]("+tree+")"
}
-
- // [Eugene] had to move this to the companion of Tree to make stuff compile. weirdo!
-// object Expr {
-// // would be great if in future this generated an Expr[Magic]
-// // where Magic is a magic untyped type that propagates through the entire quasiquote
-// // and turns off typechecking whenever it's involved
-// // that'd allow us to splice trees into quasiquotes and still have these qqs to be partially typechecked
-// // see some exploration of these ideas here: https://github.com/xeno-by/alphakeplerdemo
-// implicit def tree2expr(tree: Tree): Expr[Nothing] = Expr[Nothing](tree)
-// implicit def expr2tree(expr: Expr[_]): Tree = expr.tree
-//
-// // [Eugene] good idea?
-// implicit def trees2exprs(trees: List[Tree]): List[Expr[Nothing]] = trees map tree2expr
-// implicit def exprs2trees(exprs: List[Expr[_]]): List[Tree] = exprs map expr2tree
-// }
-
- // [Eugene] even weirder - implicits didn't feel at home in Trees :(
-
- // would be great if in future this generated an Expr[Magic]
- // where Magic is a magic untyped type that propagates through the entire quasiquote
- // and turns off typechecking whenever it's involved
- // that'd allow us to splice trees into quasiquotes and still have these qqs to be partially typechecked
- // see some exploration of these ideas here: https://github.com/xeno-by/alphakeplerdemo
- implicit def tree2expr(tree: Tree): Expr[Nothing] = Expr[Nothing](tree)(TypeTag.Nothing)
- implicit def expr2tree(expr: Expr[_]): Tree = expr.tree
-
- // [Eugene] good idea?
- implicit def trees2exprs(trees: List[Tree]): List[Expr[Nothing]] = trees map tree2expr
- implicit def exprs2trees(exprs: List[Expr[_]]): List[Tree] = exprs map expr2tree
-}
-
+} \ No newline at end of file
diff --git a/src/library/scala/reflect/api/Reporters.scala b/src/library/scala/reflect/api/FrontEnds.scala
index b7428e1599..2c1f3feff6 100644
--- a/src/library/scala/reflect/api/Reporters.scala
+++ b/src/library/scala/reflect/api/FrontEnds.scala
@@ -1,9 +1,9 @@
package scala.reflect
package api
-trait Reporters { self: Universe =>
+trait FrontEnds { self: Universe =>
- trait Reporter {
+ trait FrontEnd {
object severity extends Enumeration
class Severity(val id: Int) extends severity.Value {
var count: Int = 0
@@ -18,6 +18,9 @@ trait Reporters { self: Universe =>
val WARNING = new Severity(1)
val ERROR = new Severity(2)
+ def hasErrors = ERROR.count > 0
+ def hasWarnings = WARNING.count > 0
+
case class Info(val pos: Position, val msg: String, val severity: Severity)
val infos = new collection.mutable.LinkedHashSet[Info]
@@ -46,14 +49,14 @@ trait Reporters { self: Universe =>
}
}
- class SilentReporter extends Reporter {
+ class SilentFrontEnd extends FrontEnd {
def display(info: Info) {}
def interactive() {}
}
/** Creates a UI-less reporter that simply accumulates all the messages
*/
- def mkSilentReporter(): Reporter = new SilentReporter()
+ def mkSilentFrontEnd(): FrontEnd = new SilentFrontEnd()
/** Creates a reporter that prints messages to the console according to the settings.
*
@@ -61,5 +64,5 @@ trait Reporters { self: Universe =>
* 0 stands for INFO, 1 stands for WARNING and 2 stands for ERROR.
*/
// todo. untangle warningsAsErrors from Reporters. I don't feel like moving this flag here!
- def mkConsoleReporter(minSeverity: Int = 1): Reporter
+ def mkConsoleFrontEnd(minSeverity: Int = 1): FrontEnd
} \ No newline at end of file
diff --git a/src/library/scala/reflect/api/Names.scala b/src/library/scala/reflect/api/Names.scala
index c72774dfc7..96651ffa88 100755
--- a/src/library/scala/reflect/api/Names.scala
+++ b/src/library/scala/reflect/api/Names.scala
@@ -33,13 +33,13 @@ trait Names {
/** Returns a type name that represents the same string as this name */
def toTypeName: TypeName
- /** Replaces all occurrences of $op_names in this name by corresponding operator symbols.
- * Example: `foo_+=` becomes `foo_$plus$eq`.
+ /** Replaces all occurrences of \$op_names in this name by corresponding operator symbols.
+ * Example: `foo_\$plus\$eq` becomes `foo_+=`
*/
def decoded: String
- /** Replaces all occurrences of operator symbols in this name by corresponding $op_names.
- * Example: `foo_$plus$eq` becomes `foo_+=`
+ /** Replaces all occurrences of operator symbols in this name by corresponding \$op_names.
+ * Example: `foo_+=` becomes `foo_\$plus\$eq`.
*/
def encoded: String
diff --git a/src/library/scala/reflect/api/RequiredFile.scala b/src/library/scala/reflect/api/RequiredFile.scala
new file mode 100644
index 0000000000..4a54595940
--- /dev/null
+++ b/src/library/scala/reflect/api/RequiredFile.scala
@@ -0,0 +1,7 @@
+package scala.reflect
+package api
+
+trait RequiredFile {
+ def path: String
+ def canonicalPath: String
+}
diff --git a/src/library/scala/reflect/api/StandardDefinitions.scala b/src/library/scala/reflect/api/StandardDefinitions.scala
index e457bb73e0..21f7c9283b 100755
--- a/src/library/scala/reflect/api/StandardDefinitions.scala
+++ b/src/library/scala/reflect/api/StandardDefinitions.scala
@@ -6,7 +6,8 @@
package scala.reflect
package api
-trait StandardDefinitions { self: Universe =>
+trait StandardTypes {
+ self: Universe =>
val ByteTpe: Type
val ShortTpe: Type
@@ -17,110 +18,119 @@ trait StandardDefinitions { self: Universe =>
val DoubleTpe: Type
val BooleanTpe: Type
val UnitTpe: Type
+
val AnyTpe: Type
- val ObjectTpe: Type
val AnyValTpe: Type
val AnyRefTpe: Type
+ val ObjectTpe: Type
+
val NothingTpe: Type
val NullTpe: Type
val StringTpe: Type
+}
+
+trait StandardDefinitions extends StandardTypes {
+ self: Universe =>
val definitions: AbsDefinitions
- abstract class AbsDefinitions {
- // packages
- def RootPackage: Symbol
- def RootClass: Symbol
- def EmptyPackage: Symbol
+ // I intend to pull everything in here out of the public API.
+ trait AbsDefinitionsInternal {
+ def ArrayModule: Symbol
+ def ArrayModule_overloadedApply: Symbol
+ def Array_apply: Symbol
+ def Array_clone: Symbol
+ def Array_length: Symbol
+ def Array_update: Symbol
+ def ByNameParamClass: Symbol
+ def ClassTagModule: Symbol
+ def ConcreteTypeTagModule: Symbol
+ def ConsClass: Symbol
def EmptyPackageClass: Symbol
- def ScalaPackage: Symbol
- def ScalaPackageClass: Symbol
- def JavaLangPackage: Symbol
+ def FunctionClass : Array[Symbol]
+ def IterableClass: Symbol
+ def IteratorClass: Symbol
+ def IteratorModule: Symbol
+ def Iterator_apply: Symbol
def JavaLangPackageClass: Symbol
+ def JavaRepeatedParamClass: Symbol
+ def ListModule: Symbol
+ def List_apply: Symbol
+ def NilModule: Symbol
+ def NoneModule: Symbol
+ def OptionClass: Symbol
+ def ProductClass : Array[Symbol]
+ def RepeatedParamClass: Symbol
+ def ScalaPackageClass: Symbol
+ def SeqClass: Symbol
+ def SeqModule: Symbol
+ def SomeClass: Symbol
+ def SomeModule: Symbol
+ def StringBuilderClass: Symbol
+ def SymbolClass : Symbol
+ def TraversableClass: Symbol
+ def TupleClass : Array[Symbol]
+ def TypeTagModule: Symbol
+ def ScalaPrimitiveValueClasses: List[ClassSymbol]
+ }
+
+ trait AbsDefinitions extends AbsDefinitionsInternal {
+ // packages
+ def RootClass: ClassSymbol
+ def RootPackage: PackageSymbol
+ def EmptyPackage: PackageSymbol
+ def ScalaPackage: PackageSymbol
+ def JavaLangPackage: PackageSymbol
// top types
- def AnyClass : Symbol
- def AnyValClass: Symbol
- def AnyRefClass: Symbol
- def ObjectClass: Symbol
+ def AnyClass : ClassSymbol
+ def AnyValClass: ClassSymbol
+ def ObjectClass: ClassSymbol
+ def AnyRefClass: TypeSymbol
// bottom types
- def NullClass : Symbol
- def NothingClass: Symbol
+ def NullClass : ClassSymbol
+ def NothingClass: ClassSymbol
// the scala value classes
- def UnitClass : Symbol
- def ByteClass : Symbol
- def ShortClass : Symbol
- def CharClass : Symbol
- def IntClass : Symbol
- def LongClass : Symbol
- def FloatClass : Symbol
- def DoubleClass : Symbol
- def BooleanClass: Symbol
- def ScalaPrimitiveValueClasses: List[Symbol]
-
- // fundamental reference classes
- def SymbolClass : Symbol
- def StringClass : Symbol
- def ClassClass : Symbol
-
- // product, tuple, function
- def TupleClass : Array[Symbol]
- def ProductClass : Array[Symbol]
- def FunctionClass : Array[Symbol]
-
- // Option classes
- def OptionClass: Symbol
- def SomeClass: Symbol
- def NoneModule: Symbol
- def SomeModule: Symbol
+ def UnitClass : ClassSymbol
+ def ByteClass : ClassSymbol
+ def ShortClass : ClassSymbol
+ def CharClass : ClassSymbol
+ def IntClass : ClassSymbol
+ def LongClass : ClassSymbol
+ def FloatClass : ClassSymbol
+ def DoubleClass : ClassSymbol
+ def BooleanClass: ClassSymbol
+
+ // some special classes
+ def StringClass : ClassSymbol
+ def ClassClass : ClassSymbol
+ def ArrayClass: ClassSymbol
// collections classes
- def ConsClass: Symbol
- def IterableClass: Symbol
- def IteratorClass: Symbol
- def ListClass: Symbol
- def SeqClass: Symbol
- def StringBuilderClass: Symbol
- def TraversableClass: Symbol
+ def ListClass: ClassSymbol
+ def ListModule: ModuleSymbol
// collections modules
- def PredefModule: Symbol
- def ListModule: Symbol
- def List_apply: Symbol
- def NilModule: Symbol
- def SeqModule: Symbol
- def IteratorModule: Symbol
- def Iterator_apply: Symbol
-
- // arrays and their members
- def ArrayModule: Symbol
- def ArrayModule_overloadedApply: Symbol
- def ArrayClass: Symbol
- def Array_apply: Symbol
- def Array_update: Symbol
- def Array_length: Symbol
- def Array_clone: Symbol
-
- // special parameter types
- def ByNameParamClass: Symbol
- def JavaRepeatedParamClass: Symbol
- def RepeatedParamClass: Symbol
+ def PredefModule: ModuleSymbol
// type tags
- def ClassTagClass: Symbol
- def ClassTagModule: Symbol
- def TypeTagClass: Symbol
- def TypeTagModule: Symbol
- def ConcreteTypeTagClass: Symbol
- def ConcreteTypeTagModule: Symbol
+ def ClassTagClass: ClassSymbol
+ def TypeTagClass: ClassSymbol
+ def ConcreteTypeTagClass: ClassSymbol
/** Given a type T, returns the type corresponding to the VM's
* representation: ClassClass's type constructor applied to `arg`.
*/
def vmClassType(arg: Type): Type // !!! better name?
- // [Eugene] we already have arg.erasure, right?
+ // [Eugene] we already have arg.erasure, right?
+ //
+ // [Paul] You misunderstand the method (it could be better named).
+ // Given List[String], it returns java.lang.Class[List[String]]
+ // (or the .Net equivalent), not the erasure of List[String].
+ // See def ClassType in definitions - that's what it was called before,
+ // and obviously that name has to go.
/** The string representation used by the given type in the VM.
*/
diff --git a/src/library/scala/reflect/api/StandardNames.scala b/src/library/scala/reflect/api/StandardNames.scala
index bfc165f613..a17ea216f7 100644
--- a/src/library/scala/reflect/api/StandardNames.scala
+++ b/src/library/scala/reflect/api/StandardNames.scala
@@ -1,49 +1,65 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
+* Copyright 2005-2011 LAMP/EPFL
+* @author Martin Odersky
+*/
package scala.reflect
package api
-trait StandardNames { self: Universe =>
+trait StandardNames {
+ self: Universe =>
- abstract class AbsNames {
+ val nme: AbsTermNames
+ val tpnme: AbsTypeNames
+
+ trait AbsNames {
type NameType <: Name
- val EMPTY: NameType
- val ANON_FUN_NAME: NameType
val ANON_CLASS_NAME: NameType
+ val ANON_FUN_NAME: NameType
+ val EMPTY: NameType
val EMPTY_PACKAGE_NAME: NameType
+ val ERROR: NameType
val IMPORT: NameType
val MODULE_VAR_SUFFIX: NameType
- val ROOT: NameType
+ val NO_NAME: NameType
val PACKAGE: NameType
+ val ROOT: NameType
val SPECIALIZED_SUFFIX: NameType
-
- val ERROR: NameType
- val NO_NAME: NameType
val WILDCARD: NameType
def flattenedName(segments: Name*): NameType
}
- val nme: AbsTermNames
-
- abstract class AbsTermNames extends AbsNames {
+ trait AbsTermNames extends AbsNames {
val EXPAND_SEPARATOR_STRING: String
+ val IMPL_CLASS_SUFFIX: String
+ val INTERPRETER_IMPORT_WRAPPER: String
+ val INTERPRETER_LINE_PREFIX: String
+ val INTERPRETER_VAR_PREFIX: String
+ val INTERPRETER_WRAPPER_SUFFIX: String
+ val LOCALDUMMY_PREFIX: String
+ val LOCAL_SUFFIX_STRING: String
+ val MODULE_SUFFIX_NAME: TermName
+ val NAME_JOIN_NAME: TermName
+ val PROTECTED_PREFIX: String
+ val PROTECTED_SET_PREFIX: String
+ val SETTER_SUFFIX: TermName
+ val SINGLETON_SUFFIX: String
+ val SUPER_PREFIX_STRING: String
+ val TRAIT_SETTER_SEPARATOR_STRING: String
val ANYNAME: TermName
val CONSTRUCTOR: TermName
val FAKE_LOCAL_THIS: TermName
val INITIALIZER: TermName
val LAZY_LOCAL: TermName
- val LOCAL_SUFFIX_STRING: String
- val MIRROR_PREFIX: TermName
- val MIRROR_SHORT: TermName
val MIRROR_FREE_PREFIX: TermName
val MIRROR_FREE_THIS_SUFFIX: TermName
val MIRROR_FREE_VALUE_SUFFIX: TermName
+ val MIRROR_PREFIX: TermName
+ val MIRROR_SHORT: TermName
+ val MIRROR_SYMDEF_PREFIX: TermName
val MIXIN_CONSTRUCTOR: TermName
val MODULE_INSTANCE_FIELD: TermName
val OUTER: TermName
@@ -57,15 +73,9 @@ trait StandardNames { self: Universe =>
val BITMAP_NORMAL: TermName
val BITMAP_TRANSIENT: TermName
- val BITMAP_PRIVATE: TermName
val BITMAP_CHECKINIT: TermName
val BITMAP_CHECKINIT_TRANSIENT: TermName
- val INTERPRETER_IMPORT_WRAPPER: String
- val INTERPRETER_LINE_PREFIX: String
- val INTERPRETER_VAR_PREFIX: String
- val INTERPRETER_WRAPPER_SUFFIX: String
-
val ROOTPKG: TermName
val ADD: TermName
@@ -92,30 +102,12 @@ trait StandardNames { self: Universe =>
val ZAND: TermName
val ZOR: TermName
- // [Eugene] this doesn't compile. why?!
-// val UNARY_~: TermName
-// val UNARY_+: TermName
-// val UNARY_-: TermName
-// val UNARY_!: TermName
- val UNARY_TILDE: TermName
- val UNARY_PLUS: TermName
- val UNARY_MINUS: TermName
- val UNARY_NOT: TermName
-
- // [Eugene] this doesn't compile. why?!
-// val ???: TermName
- val QQQ: TermName
+ val UNARY_~ : TermName
+ val UNARY_+ : TermName
+ val UNARY_- : TermName
+ val UNARY_! : TermName
- val MODULE_SUFFIX_NAME: TermName
- val NAME_JOIN_NAME: TermName
- val IMPL_CLASS_SUFFIX: String
- val LOCALDUMMY_PREFIX: String
- val PROTECTED_PREFIX: String
- val PROTECTED_SET_PREFIX: String
- val SINGLETON_SUFFIX: String
- val SUPER_PREFIX_STRING: String
- val TRAIT_SETTER_SEPARATOR_STRING: String
- val SETTER_SUFFIX: TermName
+ val ??? : TermName
def isConstructorName(name: Name): Boolean
def isExceptionResultName(name: Name): Boolean
@@ -123,51 +115,49 @@ trait StandardNames { self: Universe =>
def isLocalDummyName(name: Name): Boolean
def isLocalName(name: Name): Boolean
def isLoopHeaderLabel(name: Name): Boolean
+ def isModuleName(name: Name): Boolean
+ def isOpAssignmentName(name: Name): Boolean
def isProtectedAccessorName(name: Name): Boolean
- def isSuperAccessorName(name: Name): Boolean
def isReplWrapperName(name: Name): Boolean
def isSetterName(name: Name): Boolean
- def isTraitSetterName(name: Name): Boolean
def isSingletonName(name: Name): Boolean
- def isModuleName(name: Name): Boolean
- def isOpAssignmentName(name: Name): Boolean
-
- def segments(name: String, assumeTerm: Boolean): List[Name]
- def originalName(name: Name): Name
- def stripModuleSuffix(name: Name): Name
- def unspecializedName(name: Name): Name
- def splitSpecializedName(name: Name): (Name, String, String)
- def dropLocalSuffix(name: Name): Name
+ def isSuperAccessorName(name: Name): Boolean
+ def isTraitSetterName(name: Name): Boolean
- def expandedName(name: TermName, base: Symbol, separator: String = EXPAND_SEPARATOR_STRING): TermName
+ def defaultGetterName(name: Name, pos: Int): TermName
+ def defaultGetterToMethod(name: Name): TermName
+ def expandedName(name: TermName, base: Symbol, separator: String): TermName
def expandedSetterName(name: TermName, base: Symbol): TermName
- def protName(name: Name): TermName
- def protSetterName(name: Name): TermName
def getterName(name: TermName): TermName
def getterToLocal(name: TermName): TermName
def getterToSetter(name: TermName): TermName
+ def localDummyName(clazz: Symbol): TermName
def localToGetter(name: TermName): TermName
+ def protName(name: Name): TermName
+ def protSetterName(name: Name): TermName
def setterToGetter(name: TermName): TermName
- def defaultGetterName(name: Name, pos: Int): TermName
- def defaultGetterToMethod(name: Name): TermName
-
- def dropSingletonName(name: Name): TypeName
- def singletonName(name: Name): TypeName
- def implClassName(name: Name): TypeName
- def interfaceName(implname: Name): TypeName
- def localDummyName(clazz: Symbol): TermName
def superName(name: Name): TermName
- }
- val tpnme: AbsTypeNames
+ def dropLocalSuffix(name: Name): Name
+ def originalName(name: Name): Name
+ def stripModuleSuffix(name: Name): Name
+ def unspecializedName(name: Name): Name
+ def segments(name: String, assumeTerm: Boolean): List[Name]
+ def splitSpecializedName(name: Name): (Name, String, String)
+ }
- abstract class AbsTypeNames extends AbsNames {
- val REFINE_CLASS_NAME: TypeName
+ trait AbsTypeNames extends AbsNames {
val BYNAME_PARAM_CLASS_NAME: TypeName
val EQUALS_PATTERN_NAME: TypeName
val JAVA_REPEATED_PARAM_CLASS_NAME: TypeName
val LOCAL_CHILD: TypeName
+ val REFINE_CLASS_NAME: TypeName
val REPEATED_PARAM_CLASS_NAME: TypeName
val WILDCARD_STAR: TypeName
+
+ def dropSingletonName(name: Name): TypeName
+ def implClassName(name: Name): TypeName
+ def interfaceName(implname: Name): TypeName
+ def singletonName(name: Name): TypeName
}
}
diff --git a/src/library/scala/reflect/api/Symbols.scala b/src/library/scala/reflect/api/Symbols.scala
index a154e5f7a0..32faee2512 100755
--- a/src/library/scala/reflect/api/Symbols.scala
+++ b/src/library/scala/reflect/api/Symbols.scala
@@ -4,8 +4,101 @@ package api
trait Symbols { self: Universe =>
type Symbol >: Null <: AbsSymbol
+ type TypeSymbol <: Symbol with TypeSymbolApi
+ type TermSymbol <: Symbol with TermSymbolApi
+ type MethodSymbol <: TermSymbol with MethodSymbolApi
+ type ModuleSymbol <: TermSymbol with ModuleSymbolApi
+ type PackageSymbol <: ModuleSymbol with PackageSymbolApi
+ type ClassSymbol <: TypeSymbol with ClassSymbolApi
- abstract class AbsSymbol { this: Symbol =>
+ val NoSymbol: Symbol
+
+ trait TypeSymbolApi {
+ self: TypeSymbol =>
+
+ def name: TypeName
+ }
+ trait TermSymbolApi {
+ self: TermSymbol =>
+
+ def name: TermName
+ }
+ trait MethodSymbolApi extends TermSymbolApi {
+ self: MethodSymbol =>
+ }
+ trait ClassSymbolApi extends TypeSymbolApi {
+ self: ClassSymbol =>
+ }
+ trait ModuleSymbolApi extends TermSymbolApi {
+ self: ModuleSymbol =>
+ }
+ trait PackageSymbolApi extends ModuleSymbolApi {
+ self: PackageSymbol =>
+ }
+
+ // I intend to pull everything in here out of the public API.
+ trait AbsSymbolInternal {
+ this: Symbol =>
+
+ /** A fresh symbol with given name `name`, position `pos` and flags `flags` that has
+ * the current symbol as its owner.
+ */
+ def newNestedSymbol(name: Name, pos: Position, flags: Long, isClass: Boolean): Symbol
+ // needed by LiftCode !!! not enough reason to have in the api
+
+ /** Low-level operation to set the symbol's flags
+ * @return the symbol itself
+ */
+ def setInternalFlags(flags: Long): this.type
+ // needed by LiftCode !!! not enough reason to have in the api
+
+ /** Set symbol's type signature to given type
+ * @return the symbol itself
+ */
+ def setTypeSignature(tpe: Type): this.type
+ // needed by LiftCode !!! not enough reason to have in the api
+
+ /** Set symbol's annotations to given annotations `annots`.
+ */
+ def setAnnotations(annots: AnnotationInfo*): this.type
+ // needed by LiftCode !!! not enough reason to have in the api
+
+ /** Does this symbol represent the definition of a skolem?
+ * Skolems are used during typechecking to represent type parameters viewed from inside their scopes.
+ * If yes, `isType` is also guaranteed to be true.
+ */
+ def isSkolem : Boolean
+
+ /** Does this symbol represent a free type captured by reification?
+ */
+ // needed for ones who wish to inspect reified trees
+ def isFreeType : Boolean
+
+ /** The type signature of this symbol.
+ * Note if the symbol is a member of a class, one almost always is interested
+ * in `typeSignatureIn` with a site type instead.
+ */
+ def typeSignature: Type // !!! Since one should almost never use this, let's give it a different name.
+
+ /** A type reference that refers to this type symbol
+ * Note if symbol is a member of a class, one almost always is interested
+ * in `asTypeIn` with a site type instead.
+ *
+ * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol
+ * `C`. Then `C.asType` is the type `C[T]`.
+ *
+ * By contrast, `C.typeSignature` would be a type signature of form
+ * `PolyType(ClassInfoType(...))` that describes type parameters, value
+ * parameters, parent types, and members of `C`.
+ */
+ def asType: Type // !!! Same as typeSignature.
+
+ /** The kind of this symbol; used for debugging */
+ def kind: String
+ }
+
+ trait AbsSymbol extends AbsSymbolInternal {
+ this: Symbol =>
/** The position of this symbol
*/
@@ -53,7 +146,15 @@ trait Symbols { self: Universe =>
/** ...
*/
- def orElse[T](alt: => Symbol): Symbol
+ def orElse(alt: => Symbol): Symbol
+
+ /** ...
+ */
+ def filter(cond: Symbol => Boolean): Symbol
+
+ /** ...
+ */
+ def suchThat(cond: Symbol => Boolean): Symbol
/**
* Set when symbol has a modifier of the form private[X], NoSymbol otherwise.
@@ -120,6 +221,11 @@ trait Symbols { self: Universe =>
*/
def isTerm : Boolean
+ /** Does this symbol represent a package?
+ * If yes, `isTerm` is also guaranteed to be true.
+ */
+ def isPackage : Boolean
+
/** Does this symbol represent the definition of method?
* If yes, `isTerm` is also guaranteed to be true.
*/
@@ -146,6 +252,11 @@ trait Symbols { self: Universe =>
*/
def isClass : Boolean
+ /** Does this symbol represent a package class?
+ * If yes, `isClass` is also guaranteed to be true.
+ */
+ def isPackageClass : Boolean
+
/** Does this symbol represent the definition of a primitive class?
* Namely, is it one of [[scala.Double]], [[scala.Float]], [[scala.Long]], [[scala.Int]], [[scala.Char]],
* [[scala.Short]], [[scala.Byte]], [[scala.Unit]] or [[scala.Boolean]]?
@@ -167,16 +278,10 @@ trait Symbols { self: Universe =>
*/
def isAbstractType : Boolean
- /** Does this symbol represent the definition of a skolem?
- * Skolems are used during typechecking to represent type parameters viewed from inside their scopes.
+ /** Does this symbol represent an existentially bound type?
* If yes, `isType` is also guaranteed to be true.
*/
- def isSkolem : Boolean
-
- /** Does this symbol represent a free type captured by reification?
- */
- // needed for ones who wish to inspect reified trees
- def isFreeType : Boolean
+ def isExistential : Boolean
/** Is the type parameter represented by this symbol contravariant?
*/
@@ -190,29 +295,10 @@ trait Symbols { self: Universe =>
*/
def isErroneous : Boolean
- /** The type signature of this symbol.
- * Note if the symbol is a member of a class, one almost always is interested
- * in `typeSignatureIn` with a site type instead.
- */
- def typeSignature: Type // !!! Since one should almost never use this, let's give it a different name.
-
/** The type signature of this symbol seen as a member of given type `site`.
*/
def typeSignatureIn(site: Type): Type
- /** A type reference that refers to this type symbol
- * Note if symbol is a member of a class, one almost always is interested
- * in `asTypeIn` with a site type instead.
- *
- * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol
- * `C`. Then `C.asType` is the type `C[T]`.
- *
- * By contrast, `C.typeSignature` would be a type signature of form
- * `PolyType(ClassInfoType(...))` that describes type parameters, value
- * parameters, parent types, and members of `C`.
- */
- def asType: Type // !!! Same as typeSignature.
-
/** A type reference that refers to this type symbol seen
* as a member of given type `site`.
*/
@@ -240,29 +326,5 @@ trait Symbols { self: Universe =>
def alternatives: List[Symbol]
def resolveOverloaded(pre: Type = NoPrefix, targs: Seq[Type] = List(), actuals: Seq[Type]): Symbol
-
- /** A fresh symbol with given name `name`, position `pos` and flags `flags` that has
- * the current symbol as its owner.
- */
- def newNestedSymbol(name: Name, pos: Position, flags: Long, isClass: Boolean): Symbol // needed by LiftCode !!! not enough reason to have in the api
-
- /** Low-level operation to set the symbol's flags
- * @return the symbol itself
- */
- def setInternalFlags(flags: Long): this.type // needed by LiftCode !!! not enough reason to have in the api
-
- /** Set symbol's type signature to given type
- * @return the symbol itself
- */
- def setTypeSignature(tpe: Type): this.type // needed by LiftCode !!! not enough reason to have in the api
-
- /** Set symbol's annotations to given annotations `annots`.
- */
- def setAnnotations(annots: AnnotationInfo*): this.type // needed by LiftCode !!! not enough reason to have in the api
-
- /** The kind of this symbol; used for debugging */
- def kind: String
}
-
- val NoSymbol: Symbol
}
diff --git a/src/library/scala/reflect/api/ToolBoxes.scala b/src/library/scala/reflect/api/ToolBoxes.scala
index 387ef5163b..15c9fcc403 100644
--- a/src/library/scala/reflect/api/ToolBoxes.scala
+++ b/src/library/scala/reflect/api/ToolBoxes.scala
@@ -5,17 +5,17 @@ trait ToolBoxes { self: Universe =>
type ToolBox <: AbsToolBox
- def mkToolBox(reporter: Reporter = mkSilentReporter(), options: String = ""): AbsToolBox
+ def mkToolBox(frontEnd: FrontEnd = mkSilentFrontEnd(), options: String = ""): AbsToolBox
// [Eugene] what do you think about the interface? namely about the ``freeTypes'' part.
trait AbsToolBox {
- /** UI of the toolbox.
+ /** Front end of the toolbox.
*
* Accumulates and displays warnings and errors, can drop to interactive mode (if supported).
* The latter can be useful to study the typechecker or to debug complex macros.
*/
- def reporter: Reporter
+ def frontEnd: FrontEnd
/** Typechecks a tree using this ToolBox.
* This populates symbols and types of the tree and possibly transforms it to reflect certain desugarings.
@@ -59,14 +59,14 @@ trait ToolBoxes { self: Universe =>
* Note that this does not revert the tree to its pre-typer shape.
* For more info, read up https://issues.scala-lang.org/browse/SI-5464.
*/
- def resetAllAttrs[T <: Tree](tree: T): T
+ def resetAllAttrs(tree: Tree): Tree
/** Recursively resets locally defined symbols and types in a given tree.
*
* Note that this does not revert the tree to its pre-typer shape.
* For more info, read up https://issues.scala-lang.org/browse/SI-5464.
*/
- def resetLocalAttrs[T <: Tree](tree: T): T
+ def resetLocalAttrs(tree: Tree): Tree
/** Compiles and runs a tree using this ToolBox.
*
diff --git a/src/library/scala/reflect/api/TreeBuildUtil.scala b/src/library/scala/reflect/api/TreeBuildUtil.scala
index 32d7eefa5b..87790b3812 100644
--- a/src/library/scala/reflect/api/TreeBuildUtil.scala
+++ b/src/library/scala/reflect/api/TreeBuildUtil.scala
@@ -62,20 +62,34 @@ trait TreeBuildUtil { self: Universe =>
* @param name the name of the free variable
* @param info the type signature of the free variable
* @param value the value of the free variable at runtime
+ * @param flags (optional) flags of the free variable
* @param origin debug information that tells where this symbol comes from
*/
- def newFreeTerm(name: String, info: Type, value: => Any, origin: String): Symbol
+ def newFreeTerm(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null): Symbol
- /** Create a fresh free type symbol.
+ /** Create a fresh free non-existential type symbol.
* @param name the name of the free variable
* @param info the type signature of the free variable
* @param value a type tag that captures the value of the free variable
* is completely phantom, since the captured type cannot be propagated to the runtime
* if it could be, we wouldn't be creating a free type to begin with
* the only usage for it is preserving the captured symbol for compile-time analysis
+ * @param flags (optional) flags of the free variable
* @param origin debug information that tells where this symbol comes from
*/
- def newFreeType(name: String, info: Type, value: => Any, origin: String): Symbol
+ def newFreeType(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null): Symbol
+
+ /** Create a fresh free existential type symbol.
+ * @param name the name of the free variable
+ * @param info the type signature of the free variable
+ * @param value a type tag that captures the value of the free variable
+ * is completely phantom, since the captured type cannot be propagated to the runtime
+ * if it could be, we wouldn't be creating a free type to begin with
+ * the only usage for it is preserving the captured symbol for compile-time analysis
+ * @param flags (optional) flags of the free variable
+ * @param origin (optional) debug information that tells where this symbol comes from
+ */
+ def newFreeExistential(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null): Symbol
/** Create a Modiiers structure given internal flags, qualifier, annotations */
def modifiersFromInternalFlags(flags: Long, privateWithin: Name, annotations: List[Tree]): Modifiers
@@ -124,4 +138,22 @@ trait AbsTreeGen {
/** Builds a typed Select with an underlying symbol. */
def mkAttributedSelect(qual: Tree, sym: Symbol): Tree
+
+ /** A creator for method calls, e.g. fn[T1, T2, ...](v1, v2, ...)
+ * There are a number of variations.
+ *
+ * @param receiver symbol of the method receiver
+ * @param methodName name of the method to call
+ * @param targs type arguments (if Nil, no TypeApply node will be generated)
+ * @param args value arguments
+ * @return the newly created trees.
+ */
+ def mkMethodCall(receiver: Symbol, methodName: Name, targs: List[Type], args: List[Tree]): Tree
+ def mkMethodCall(method: Symbol, targs: List[Type], args: List[Tree]): Tree
+ def mkMethodCall(method: Symbol, args: List[Tree]): Tree
+ def mkMethodCall(target: Tree, args: List[Tree]): Tree
+ def mkMethodCall(receiver: Symbol, methodName: Name, args: List[Tree]): Tree
+ def mkMethodCall(receiver: Tree, method: Symbol, targs: List[Type], args: List[Tree]): Tree
+ def mkMethodCall(target: Tree, targs: List[Type], args: List[Tree]): Tree
+ def mkNullaryCall(method: Symbol, targs: List[Type]): Tree
}
diff --git a/src/library/scala/reflect/api/Trees.scala b/src/library/scala/reflect/api/Trees.scala
index 01f948809c..3427136fde 100644
--- a/src/library/scala/reflect/api/Trees.scala
+++ b/src/library/scala/reflect/api/Trees.scala
@@ -16,6 +16,9 @@ trait Trees { self: Universe =>
type Modifiers >: Null <: AbsModifiers
val NoMods: Modifiers
+ // TODO - Where do I put this?
+ object BackquotedIdentifier
+
abstract class AbsModifiers {
def modifiers: Set[Modifier]
def hasModifier(mod: Modifier): Boolean
@@ -85,16 +88,61 @@ trait Trees { self: Universe =>
def pos_=(pos: Position): Unit = rawatt = (rawatt withPos pos) // the "withPos" part is crucial to robustness
def setPos(newpos: Position): this.type = { pos = newpos; this }
- private[this] var rawatt: Attachment = NoPosition
- def attachment: Attachment = rawatt
- def attachment_=(att: Attachment): Unit = rawatt = att
- def setAttachment(att: Attachment): this.type = { rawatt = att; this }
+ // [Eugene] can we make this more type-safe
+ private var rawatt: Attachment = NoPosition
+ def attach(att: Any): Unit =
+ rawatt match {
+ case NontrivialAttachment(pos, payload) =>
+ val index = payload.indexWhere(p => p.getClass == att.getClass)
+ if (index == -1) payload += att
+ else payload(index) = att
+ case _ =>
+ rawatt = NontrivialAttachment(pos, collection.mutable.ListBuffer[Any](att))
+ }
+
+ // a) why didn't this method already exist
+ // b) what is all this "Any" business?
+ // c) am I reverse-engineering this correctly? It shouldn't be hard
+ // to figure out what is attached.
+ def attachments: List[Any] = rawatt match {
+ case NoPosition => Nil
+ case NontrivialAttachment(pos, atts) => pos :: atts.toList
+ case x => List(x)
+ }
+ // Writing "Any" repeatedly to work within this structure
+ // is making my skin crawl.
+ def hasAttachment(x: Any) = attachments contains x
+
+ def withAttachment(att: Any): this.type = { attach(att); this }
+ def detach(att: Any): Unit =
+ detach(att.getClass)
+ def detach(clazz: java.lang.Class[_]): Unit =
+ rawatt match {
+ case NontrivialAttachment(pos, payload) =>
+ val index = payload.indexWhere(p => p.getClass == clazz)
+ if (index != -1) payload.remove(index)
+ case _ =>
+ // do nothing
+ }
+ def withoutAttachment(att: Any): this.type = { detach(att); this }
+ def attachment[T: ClassTag]: T = attachmentOpt[T] getOrElse { throw new Error("no attachment of type %s".format(classTag[T].erasure)) }
+ def attachmentOpt[T: ClassTag]: Option[T] =
+ firstAttachment { case attachment if attachment.getClass == classTag[T].erasure => attachment.asInstanceOf[T] }
+
+ def firstAttachment[T](p: PartialFunction[Any, T]): Option[T] =
+ rawatt match {
+ case NontrivialAttachment(pos, payload) => payload.collectFirst(p)
+ case _ => None
+ }
private[this] var rawtpe: Type = _
def tpe = rawtpe
def tpe_=(t: Type) = rawtpe = t
+ def resetType(): this.type = { tpe = null ; this }
+ def resetSymbol(): this.type = { if (hasSymbol) symbol = NoSymbol ; this }
+
/** Set tpe to give `tp` and return this.
*/
def setType(tp: Type): this.type = { rawtpe = tp; this }
@@ -105,7 +153,7 @@ trait Trees { self: Universe =>
* @PP: Attempting to elaborate on the above, I find: If defineType
* is called on a TypeTree whose type field is null or NoType,
* this is recorded as "wasEmpty = true". That value is used in
- * ResetAttrsTraverser, which nulls out the type field of TypeTrees
+ * ResetAttrs, which nulls out the type field of TypeTrees
* for which wasEmpty is true, leaving the others alone.
*
* resetAllAttrs is used in situations where some speculative
@@ -140,9 +188,14 @@ trait Trees { self: Universe =>
def hasSymbol = false
def isDef = false
def isEmpty = false
- def orElse(alt: => Tree) = if (!isEmpty) this else alt
+ @inline final def orElse(alt: => Tree) = if (!isEmpty) this else alt
+ @inline final def andAlso(f: Tree => Unit): Tree = { if (!this.isEmpty) f(this) ; this }
- def hasSymbolWhich(f: Symbol => Boolean) = hasSymbol && f(symbol)
+ def hasAssignedType = (tpe ne null) && (tpe ne NoType)
+ def hasAssignedSymbol = (symbol ne null) && (symbol ne NoSymbol)
+
+ @inline final def hasSymbolWhich(f: Symbol => Boolean) = hasAssignedSymbol && f(symbol)
+ @inline final def hasTypeWhich(f: Type => Boolean) = hasAssignedType && f(tpe)
/** The canonical way to test if a Tree represents a term.
*/
@@ -238,7 +291,7 @@ trait Trees { self: Universe =>
duplicateTree(this).asInstanceOf[this.type]
private[scala] def copyAttrs(tree: Tree): this.type = {
- attachment = tree.attachment
+ rawatt = tree.rawatt
tpe = tree.tpe
if (hasSymbol) symbol = tree.symbol
this
@@ -249,34 +302,6 @@ trait Trees { self: Universe =>
override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
}
- // [Eugene] uh-oh
- // locker.comp:
- // [mkdir] Created dir: C:\Projects\Kepler\build\locker\classes\compiler
- // [scalacfork] Compiling 471 files to C:\Projects\Kepler\build\locker\classes\compiler
- // [scalacfork] amb prefix: Importers.this.type#class Tree Importer.this.from.type#class Tree
- // [scalacfork] amb prefix: Importers.this.type#class Tree Importer.this.from.type#class Tree
- // [scalacfork] amb prefix: Importers.this.type#class Tree Importer.this.from.type#class Tree
- // [scalacfork] amb prefix: Importers.this.type#class Tree Importer.this.from.type#class Tree
- // [scalacfork] amb prefix: Importers.this.type#class Tree Importer.this.from.type#class Tree
- // [scalacfork] amb prefix: Importers.this.type#class Tree Importer.this.from.type#class Tree
- // [scalacfork] amb prefix: Importers.this.type#class Tree Importer.this.from.type#class Tree
- // [scalacfork] amb prefix: Importers.this.type#class Tree Importer.this.from.type#class Tree
- // [scalacfork] amb prefix: Importers.this.type#class Tree Importer.this.from.type#class Tree
- // [scalacfork] amb prefix: Importers.this.type#class Tree Importer.this.from.type#class Tree
-// object Tree {
-// // would be great if in future this generated an Expr[Magic]
-// // where Magic is a magic untyped type that propagates through the entire quasiquote
-// // and turns off typechecking whenever it's involved
-// // that'd allow us to splice trees into quasiquotes and still have these qqs to be partially typechecked
-// // see some exploration of these ideas here: https://github.com/xeno-by/alphakeplerdemo
-// implicit def tree2expr(tree: Tree): Expr[Nothing] = Expr[Nothing](tree)
-// implicit def expr2tree(expr: Expr[_]): Tree = expr.tree
-//
-// // [Eugene] good idea?
-// implicit def trees2exprs(trees: List[Tree]): List[Expr[Nothing]] = trees map tree2expr
-// implicit def exprs2trees(exprs: List[Expr[_]]): List[Tree] = exprs map expr2tree
-// }
-
/** A tree for a term. Not all terms are TermTrees; use isTerm
* to reliably identify terms.
*/
@@ -294,18 +319,24 @@ trait Trees { self: Universe =>
override var symbol: Symbol = NoSymbol
}
+ /** A tree with a name - effectively, a DefTree or RefTree.
+ */
+ trait NameTree extends Tree {
+ def name: Name
+ }
+
/** A tree which references a symbol-carrying entity.
* References one, as opposed to defining one; definitions
* are in DefTrees.
*/
- trait RefTree extends SymTree {
+ trait RefTree extends SymTree with NameTree {
def qualifier: Tree // empty for Idents
def name: Name
}
/** A tree which defines a symbol-carrying entity.
*/
- abstract class DefTree extends SymTree {
+ abstract class DefTree extends SymTree with NameTree {
def name: Name
override def isDef = true
}
@@ -318,6 +349,7 @@ trait Trees { self: Universe =>
override def tpe_=(t: Type) =
if (t != NoType) throw new UnsupportedOperationException("tpe_=("+t+") inapplicable for <empty>")
override def isEmpty = true
+ override def resetType(): this.type = this
}
/** Common base class for all member definitions: types, classes,
@@ -615,6 +647,9 @@ trait Trees { self: Universe =>
*/
case class TypeApply(fun: Tree, args: List[Tree])
extends GenericApply {
+
+ // Testing the above theory re: args.nonEmpty.
+ require(args.nonEmpty, this)
override def symbol: Symbol = fun.symbol
override def symbol_=(sym: Symbol) { fun.symbol = sym }
}
@@ -628,10 +663,17 @@ trait Trees { self: Universe =>
def Apply(sym: Symbol, args: Tree*): Tree
+ // TODO remove this class, add a tree attachment to Apply to track whether implicits were involved
+ // copying trees will all too easily forget to distinguish subclasses
class ApplyToImplicitArgs(fun: Tree, args: List[Tree]) extends Apply(fun, args)
+ // TODO remove this class, add a tree attachment to Apply to track whether implicits were involved
+ // copying trees will all too easily forget to distinguish subclasses
class ApplyImplicitView(fun: Tree, args: List[Tree]) extends Apply(fun, args)
+ // TODO: use a factory method, not a class (???)
+ // as a case in point of the comment that should go here by similarity to ApplyToImplicitArgs,
+ // this tree is considered in importers, but not in treecopier
class ApplyConstructor(tpt: Tree, args: List[Tree]) extends Apply(Select(New(tpt), nme.CONSTRUCTOR), args) {
override def printingPrefix = "ApplyConstructor"
}
@@ -677,16 +719,15 @@ trait Trees { self: Universe =>
/** Identifier <name> */
case class Ident(name: Name) extends RefTree {
def qualifier: Tree = EmptyTree
+ def isBackquoted = this hasAttachment BackquotedIdentifier
}
def Ident(name: String): Ident
def Ident(sym: Symbol): Ident
- class BackQuotedIdent(name: Name) extends Ident(name)
-
/** Marks underlying reference to id as boxed.
- * @pre: id must refer to a captured variable
+ * @pre id must refer to a captured variable
* A reference such marked will refer to the boxed entity, no dereferencing
* with `.elem` is done on it.
* This tree node can be emitted by macros such as reify that call referenceCapturedVariable.
@@ -757,8 +798,8 @@ trait Trees { self: Universe =>
case t => t
}
- orig = followOriginal(tree); setPos(tree.pos);
- this
+ orig = followOriginal(tree)
+ this setPos tree.pos
}
override def defineType(tp: Type): this.type = {
@@ -1122,9 +1163,10 @@ trait Trees { self: Universe =>
def TypeApply(tree: Tree, fun: Tree, args: List[Tree]) =
new TypeApply(fun, args).copyAttrs(tree)
def Apply(tree: Tree, fun: Tree, args: List[Tree]) =
- (tree match {
+ (tree match { // TODO: use a tree attachment to track whether this is an apply to implicit args or a view
case _: ApplyToImplicitArgs => new ApplyToImplicitArgs(fun, args)
case _: ApplyImplicitView => new ApplyImplicitView(fun, args)
+ // TODO: ApplyConstructor ???
case _ => new Apply(fun, args)
}).copyAttrs(tree)
def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]) =
@@ -1135,8 +1177,11 @@ trait Trees { self: Universe =>
new This(qual.toTypeName).copyAttrs(tree)
def Select(tree: Tree, qualifier: Tree, selector: Name) =
new Select(qualifier, selector).copyAttrs(tree)
- def Ident(tree: Tree, name: Name) =
- new Ident(name).copyAttrs(tree)
+ def Ident(tree: Tree, name: Name) = {
+ val t = new Ident(name) copyAttrs tree
+ if (tree hasAttachment BackquotedIdentifier) t withAttachment BackquotedIdentifier
+ else t
+ }
def ReferenceToBoxed(tree: Tree, idt: Ident) =
new ReferenceToBoxed(idt).copyAttrs(tree)
def Literal(tree: Tree, value: Constant) =
@@ -1519,7 +1564,7 @@ trait Trees { self: Universe =>
def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] =
stats mapConserve (stat =>
if (exprOwner != currentOwner && stat.isTerm) atOwner(exprOwner)(transform(stat))
- else transform(stat)) filter (EmptyTree !=)
+ else transform(stat)) filter (EmptyTree != _)
def transformModifiers(mods: Modifiers): Modifiers =
mods.mapAnnotations(transformTrees)
diff --git a/src/library/scala/reflect/api/TypeTags.scala b/src/library/scala/reflect/api/TypeTags.scala
index ed47620e13..c58b0fcec2 100644
--- a/src/library/scala/reflect/api/TypeTags.scala
+++ b/src/library/scala/reflect/api/TypeTags.scala
@@ -6,29 +6,49 @@
package scala.reflect
package api
-import scala.reflect.{ mirror => rm }
+import java.lang.{ Class => jClass }
+import language.implicitConversions
/**
* Type tags encapsulate a representation of type T.
* They are supposed to replace the pre-2.10 concept of a [[scala.reflect.Manifest]].
* TypeTags are much better integrated with reflection than manifests are, and are consequently much simpler.
*
- * Type tags are organized in a hierarchy of two classes:
+ * === Overview ===
+ *
+ * Type tags are organized in a hierarchy of five classes:
+ * [[scala.reflect.ArrayTag]], [[scala.reflect.ErasureTag]], [[scala.reflect.ClassTag]],
* [[scala.reflect.api.Universe#TypeTag]] and [[scala.reflect.api.Universe#ConcreteTypeTag]].
- * A [[scala.reflect.api.Universe#TypeTag]] value wraps a full Scala type in its tpe field.
- * A [[scala.reflect.api.Universe#ConcreteTypeTag]] value is a type tag that is guaranteed not to contain any references to type parameters or abstract types.
*
- * It is also possible to capture Java classes by using a different kind of tag.
- * A [[scala.reflect.ClassTag]] value wraps a Java class, which can be accessed via the erasure method.
+ * An [[scala.reflect.ArrayTag]] value carries knowledge about how to build an array of elements of type T.
+ * Typically such operation is performed by storing an erasure and instantiating arrays via Java reflection,
+ * but [[scala.reflect.ArrayTag]] only defines an interface, not an implementation, hence it only contains the factory methods
+ * `newArray` and `wrap` that can be used to build, correspondingly, single-dimensional and multi-dimensional arrays.
*
- * TypeTags correspond loosely to Manifests. More precisely:
- * The previous notion of a [[scala.reflect.ClassManifest]] corresponds to a scala.reflect.ClassTag,
- * The previous notion of a [[scala.reflect.Manifest]] corresponds to scala.reflect.mirror.ConcreteTypeTag,
- * Whereas scala.reflect.mirror.TypeTag is approximated by the previous notion of [[scala.reflect.OptManifest]].
+ * An [[scala.reflect.ErasureTag]] value wraps a Java class, which can be accessed via the `erasure` method.
+ * This notion, previously embodied in a [[scala.reflect.ClassManifest]] together with the notion of array creation,
+ * deserves a concept of itself. Quite often (e.g. for serialization or classloader introspection) it's useful to
+ * know an erasure, and only it, so we've implemented this notion in [[scala.reflect.ErasureTag]].
+ *
+ * A [[scala.reflect.ClassTag]] is a standard implementation of both [[scala.reflect.ArrayTag]] and [[scala.reflect.ErasureTag]].
+ * It guarantees that the source type T did not to contain any references to type parameters or abstract types.
+ * [[scala.reflect.ClassTag]] corresponds to a previous notion of [[scala.reflect.ClassManifest]].
+ *
+ * A [[scala.reflect.api.Universe#TypeTag]] value wraps a full Scala type in its tpe field.
+ * A [[scala.reflect.api.Universe#ConcreteTypeTag]] value is a [[scala.reflect.api.Universe#TypeTag]]
+ * that is guaranteed not to contain any references to type parameters or abstract types.
+ * Both flavors of TypeTags also carry an erasure, so [[scala.reflect.api.Universe#TypeTag]] is also an [[scala.reflect.ErasureTag]],
+ * and [[scala.reflect.api.Universe#ConcreteTypeTag]] is additionally an [[scala.reflect.ArrayTag]] and a [[scala.reflect.ClassTag]]
+ *
+ * It is recommended to use the tag supertypes of to precisely express your intent, i.e.:
+ * use ArrayTag when you want to construct arrays,
+ * use ErasureTag when you need an erasure and don't mind it being generated for untagged abstract types,
+ * use ClassTag only when you need an erasure of a type that doesn't refer to untagged abstract types.
*
- * Implicit in the contract for all Tag classes is that the reified type tpe represents the type parameter T.
- * Tags are typically created by the compiler, which makes sure that this contract is kept.
+ * === Splicing ===
*
+ * Tags can be spliced, i.e. if compiler generates a tag for a type that contains references to tagged
+ * type parameters or abstract type members, it will retrieve the corresponding tag and embed it into the result.
* An example that illustrates the TypeTag embedding, consider the following function:
*
* import reflect.mirror._
@@ -42,6 +62,54 @@ import scala.reflect.{ mirror => rm }
* TypeTag(<[ String => U ]>).
*
* Note that T has been replaced by String, because it comes with a TypeTag in f, whereas U was left as a type parameter.
+ *
+ * === ErasureTag vs ClassTag and TypeTag vs ConcreteTypeTag ===
+ *
+ * Be careful with ErasureTag and TypeTag, because they will reify types even if these types are abstract.
+ * This makes it easy to forget to tag one of the methods in the call chain and discover it much later in the runtime
+ * by getting cryptic errors far away from their source. For example, consider the following snippet:
+ *
+ * def bind[T: TypeTag](name: String, value: T): IR.Result = bind((name, value))
+ * def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
+ * object NamedParam {
+ * implicit def namedValue[T: TypeTag](name: String, x: T): NamedParam = apply(name, x)
+ * def apply[T: TypeTag](name: String, x: T): NamedParam = new Typed[T](name, x)
+ * }
+ *
+ * This fragment of Scala REPL implementation defines a `bind` function that carries a named value along with its type
+ * into the heart of the REPL. Using a [[scala.reflect.api.Universe#TypeTag]] here is reasonable, because it is desirable
+ * to work with all types, even if they are type parameters or abstract type members.
+ *
+ * However if any of the three `TypeTag` context bounds is omitted, the resulting code will be incorrect,
+ * because the missing `TypeTag` will be transparently generated by the compiler, carrying meaningless information.
+ * Most likely, this problem will manifest itself elsewhere, making debugging complicated.
+ * If `TypeTag` context bounds were replaced with `ConcreteTypeTag`, then such errors would be reported statically.
+ * But in that case we wouldn't be able to use `bind` in arbitrary contexts.
+ *
+ * === Backward compatibility ===
+ *
+ * TypeTags correspond loosely to Manifests. More precisely:
+ * The previous notion of a [[scala.reflect.ClassManifest]] corresponds to a scala.reflect.ClassTag,
+ * The previous notion of a [[scala.reflect.Manifest]] corresponds to scala.reflect.mirror.ConcreteTypeTag,
+ * Whereas scala.reflect.mirror.TypeTag is approximated by the previous notion of [[scala.reflect.OptManifest]].
+ *
+ * In Scala 2.10, manifests are deprecated, so it's adviseable to migrate them to tags,
+ * because manifests might be removed in the next major release.
+ *
+ * In most cases it will be enough to replace ClassManifests with ClassTags and Manifests with ConcreteTypeTags,
+ * however there are a few caveats:
+ *
+ * 1) The notion of OptManifest is no longer supported. Tags can reify arbitrary types, so they are always available.
+ * // [Eugene] it might be useful, though, to guard against abstractness of the incoming type.
+ *
+ * 2) There's no equivalent for AnyValManifest. Consider comparing your tag with one of the core tags
+ * (defined in the corresponding companion objects) to find out whether it represents a primitive value class.
+ *
+ * 3) There's no replacement for factory methods defined in `ClassManifest` and `Manifest` companion objects.
+ * Consider assembling corresponding types using reflection API provided by Java (for classes) and Scala (for types).
+ *
+ * 4) Certain manifest functions (such as `<:<`, `>:>` and `typeArguments`) weren't included in the tag API.
+ * Consider using reflection API provided by Java (for classes) and Scala (for types) instead.
*/
trait TypeTags { self: Universe =>
@@ -54,22 +122,20 @@ trait TypeTags { self: Universe =>
* @see [[scala.reflect.api.TypeTags]]
*/
@annotation.implicitNotFound(msg = "No TypeTag available for ${T}")
- abstract case class TypeTag[T](tpe: Type) {
- // it's unsafe to use assert here, because we might run into deadlocks with Predef
- // also see comments in ClassTags.scala
- // assert(tpe != null)
+ trait TypeTag[T] extends ErasureTag[T] with Equals with Serializable {
- def sym = tpe.typeSymbol
+ def tpe: Type
+ def sym: Symbol = tpe.typeSymbol
- def isConcrete = !isNotConcrete
- def isNotConcrete = tpe exists (_.typeSymbol.isAbstractType)
- def toConcrete: ConcreteTypeTag[T] = ConcreteTypeTag[T](tpe)
+ def isConcrete: Boolean = tpe.isConcrete
+ def notConcrete: Boolean = !isConcrete
+ def toConcrete: ConcreteTypeTag[T] = ConcreteTypeTag[T](tpe, erasure)
- override def toString = {
- var prefix = if (isConcrete) "ConcreteTypeTag" else "TypeTag"
- if (prefix != this.productPrefix) prefix = "*" + prefix
- prefix + "[" + tpe + "]"
- }
+ /** case class accessories */
+ override def canEqual(x: Any) = x.isInstanceOf[TypeTag[_]]
+ override def equals(x: Any) = x.isInstanceOf[TypeTag[_]] && this.tpe == x.asInstanceOf[TypeTag[_]].tpe
+ override def hashCode = scala.runtime.ScalaRunTime.hash(tpe)
+ override def toString = if (!self.isInstanceOf[DummyMirror]) (if (isConcrete) "*ConcreteTypeTag" else "TypeTag") + "[" + tpe + "]" else "TypeTag[?]"
}
object TypeTag {
@@ -90,8 +156,10 @@ trait TypeTags { self: Universe =>
val Null : TypeTag[scala.Null] = ConcreteTypeTag.Null
val String : TypeTag[java.lang.String] = ConcreteTypeTag.String
- def apply[T](tpe: Type): TypeTag[T] =
- tpe match {
+ // todo. uncomment after I redo the starr
+ // def apply[T](tpe1: Type, erasure1: jClass[_]): TypeTag[T] =
+ def apply[T](tpe1: Type, erasure1: jClass[_]): TypeTag[T] =
+ tpe1 match {
case ByteTpe => TypeTag.Byte.asInstanceOf[TypeTag[T]]
case ShortTpe => TypeTag.Short.asInstanceOf[TypeTag[T]]
case CharTpe => TypeTag.Char.asInstanceOf[TypeTag[T]]
@@ -108,8 +176,10 @@ trait TypeTags { self: Universe =>
case NothingTpe => TypeTag.Nothing.asInstanceOf[TypeTag[T]]
case NullTpe => TypeTag.Null.asInstanceOf[TypeTag[T]]
case StringTpe => TypeTag.String.asInstanceOf[TypeTag[T]]
- case _ => new TypeTag[T](tpe) {}
+ case _ => new TypeTag[T]{ def tpe = tpe1; def erasure = erasure1 }
}
+
+ def unapply[T](ttag: TypeTag[T]): Option[Type] = Some(ttag.tpe)
}
/**
@@ -119,34 +189,40 @@ trait TypeTags { self: Universe =>
* @see [[scala.reflect.api.TypeTags]]
*/
@annotation.implicitNotFound(msg = "No ConcreteTypeTag available for ${T}")
- class ConcreteTypeTag[T](tpe: Type) extends TypeTag[T](tpe) {
- // it's unsafe to use assert here, because we might run into deadlocks with Predef
- // also see comments in ClassTags.scala
- //assert(isConcrete, tpe)
- if (isNotConcrete) throw new Error("%s (%s) is not concrete and cannot be used to construct a concrete type tag".format(tpe, tpe.kind))
- override def productPrefix = "ConcreteTypeTag"
+ trait ConcreteTypeTag[T] extends TypeTag[T] with ClassTag[T] with Equals with Serializable {
+ if (!self.isInstanceOf[DummyMirror]) {
+ if (notConcrete) throw new Error("%s (%s) is not concrete and cannot be used to construct a concrete type tag".format(tpe, tpe.kind))
+ }
+
+ /** case class accessories */
+ override def canEqual(x: Any) = x.isInstanceOf[TypeTag[_]] // this is done on purpose. TypeTag(tpe) and ConcreteTypeTag(tpe) should be equal if tpe's are equal
+ override def equals(x: Any) = x.isInstanceOf[TypeTag[_]] && this.tpe == x.asInstanceOf[TypeTag[_]].tpe
+ override def hashCode = scala.runtime.ScalaRunTime.hash(tpe)
+ override def toString = if (!self.isInstanceOf[DummyMirror]) "ConcreteTypeTag[" + tpe + "]" else "ConcreteTypeTag[?]"
}
object ConcreteTypeTag {
- val Byte : ConcreteTypeTag[scala.Byte] = new ConcreteTypeTag[scala.Byte](ByteTpe) { private def readResolve() = ConcreteTypeTag.Byte }
- val Short : ConcreteTypeTag[scala.Short] = new ConcreteTypeTag[scala.Short](ShortTpe) { private def readResolve() = ConcreteTypeTag.Short }
- val Char : ConcreteTypeTag[scala.Char] = new ConcreteTypeTag[scala.Char](CharTpe) { private def readResolve() = ConcreteTypeTag.Char }
- val Int : ConcreteTypeTag[scala.Int] = new ConcreteTypeTag[scala.Int](IntTpe) { private def readResolve() = ConcreteTypeTag.Int }
- val Long : ConcreteTypeTag[scala.Long] = new ConcreteTypeTag[scala.Long](LongTpe) { private def readResolve() = ConcreteTypeTag.Long }
- val Float : ConcreteTypeTag[scala.Float] = new ConcreteTypeTag[scala.Float](FloatTpe) { private def readResolve() = ConcreteTypeTag.Float }
- val Double : ConcreteTypeTag[scala.Double] = new ConcreteTypeTag[scala.Double](DoubleTpe) { private def readResolve() = ConcreteTypeTag.Double }
- val Boolean : ConcreteTypeTag[scala.Boolean] = new ConcreteTypeTag[scala.Boolean](BooleanTpe) { private def readResolve() = ConcreteTypeTag.Boolean }
- val Unit : ConcreteTypeTag[scala.Unit] = new ConcreteTypeTag[scala.Unit](UnitTpe) { private def readResolve() = ConcreteTypeTag.Unit }
- val Any : ConcreteTypeTag[scala.Any] = new ConcreteTypeTag[scala.Any](AnyTpe) { private def readResolve() = ConcreteTypeTag.Any }
- val Object : ConcreteTypeTag[java.lang.Object] = new ConcreteTypeTag[java.lang.Object](ObjectTpe) { private def readResolve() = ConcreteTypeTag.Object }
- val AnyVal : ConcreteTypeTag[scala.AnyVal] = new ConcreteTypeTag[scala.AnyVal](AnyValTpe) { private def readResolve() = ConcreteTypeTag.AnyVal }
- val AnyRef : ConcreteTypeTag[scala.AnyRef] = new ConcreteTypeTag[scala.AnyRef](AnyRefTpe) { private def readResolve() = ConcreteTypeTag.AnyRef }
- val Nothing : ConcreteTypeTag[scala.Nothing] = new ConcreteTypeTag[scala.Nothing](NothingTpe) { private def readResolve() = ConcreteTypeTag.Nothing }
- val Null : ConcreteTypeTag[scala.Null] = new ConcreteTypeTag[scala.Null](NullTpe) { private def readResolve() = ConcreteTypeTag.Null }
- val String : ConcreteTypeTag[java.lang.String] = new ConcreteTypeTag[java.lang.String](StringTpe) { private def readResolve() = ConcreteTypeTag.String }
-
- def apply[T](tpe: Type): ConcreteTypeTag[T] =
- tpe match {
+ val Byte : ConcreteTypeTag[scala.Byte] = new ConcreteTypeTag[scala.Byte]{ def tpe = ByteTpe; def erasure = ClassTag.Byte.erasure; private def readResolve() = ConcreteTypeTag.Byte }
+ val Short : ConcreteTypeTag[scala.Short] = new ConcreteTypeTag[scala.Short]{ def tpe = ShortTpe; def erasure = ClassTag.Short.erasure; private def readResolve() = ConcreteTypeTag.Short }
+ val Char : ConcreteTypeTag[scala.Char] = new ConcreteTypeTag[scala.Char]{ def tpe = CharTpe; def erasure = ClassTag.Char.erasure; private def readResolve() = ConcreteTypeTag.Char }
+ val Int : ConcreteTypeTag[scala.Int] = new ConcreteTypeTag[scala.Int]{ def tpe = IntTpe; def erasure = ClassTag.Int.erasure; private def readResolve() = ConcreteTypeTag.Int }
+ val Long : ConcreteTypeTag[scala.Long] = new ConcreteTypeTag[scala.Long]{ def tpe = LongTpe; def erasure = ClassTag.Long.erasure; private def readResolve() = ConcreteTypeTag.Long }
+ val Float : ConcreteTypeTag[scala.Float] = new ConcreteTypeTag[scala.Float]{ def tpe = FloatTpe; def erasure = ClassTag.Float.erasure; private def readResolve() = ConcreteTypeTag.Float }
+ val Double : ConcreteTypeTag[scala.Double] = new ConcreteTypeTag[scala.Double]{ def tpe = DoubleTpe; def erasure = ClassTag.Double.erasure; private def readResolve() = ConcreteTypeTag.Double }
+ val Boolean : ConcreteTypeTag[scala.Boolean] = new ConcreteTypeTag[scala.Boolean]{ def tpe = BooleanTpe; def erasure = ClassTag.Boolean.erasure; private def readResolve() = ConcreteTypeTag.Boolean }
+ val Unit : ConcreteTypeTag[scala.Unit] = new ConcreteTypeTag[scala.Unit]{ def tpe = UnitTpe; def erasure = ClassTag.Unit.erasure; private def readResolve() = ConcreteTypeTag.Unit }
+ val Any : ConcreteTypeTag[scala.Any] = new ConcreteTypeTag[scala.Any]{ def tpe = AnyTpe; def erasure = ClassTag.Any.erasure; private def readResolve() = ConcreteTypeTag.Any }
+ val Object : ConcreteTypeTag[java.lang.Object] = new ConcreteTypeTag[java.lang.Object]{ def tpe = ObjectTpe; def erasure = ClassTag.Object.erasure; private def readResolve() = ConcreteTypeTag.Object }
+ val AnyVal : ConcreteTypeTag[scala.AnyVal] = new ConcreteTypeTag[scala.AnyVal]{ def tpe = AnyValTpe; def erasure = ClassTag.AnyVal.erasure; private def readResolve() = ConcreteTypeTag.AnyVal }
+ val AnyRef : ConcreteTypeTag[scala.AnyRef] = new ConcreteTypeTag[scala.AnyRef]{ def tpe = AnyRefTpe; def erasure = ClassTag.AnyRef.erasure; private def readResolve() = ConcreteTypeTag.AnyRef }
+ val Nothing : ConcreteTypeTag[scala.Nothing] = new ConcreteTypeTag[scala.Nothing]{ def tpe = NothingTpe; def erasure = ClassTag.Nothing.erasure; private def readResolve() = ConcreteTypeTag.Nothing }
+ val Null : ConcreteTypeTag[scala.Null] = new ConcreteTypeTag[scala.Null]{ def tpe = NullTpe; def erasure = ClassTag.Null.erasure; private def readResolve() = ConcreteTypeTag.Null }
+ val String : ConcreteTypeTag[java.lang.String] = new ConcreteTypeTag[java.lang.String]{ def tpe = StringTpe; def erasure = ClassTag.String.erasure; private def readResolve() = ConcreteTypeTag.String }
+
+ // todo. uncomment after I redo the starr
+ // def apply[T](tpe1: Type, erasure1: jClass[_]): ConcreteTypeTag[T] =
+ def apply[T](tpe1: Type, erasure1: jClass[_] = null): ConcreteTypeTag[T] =
+ tpe1 match {
case ByteTpe => ConcreteTypeTag.Byte.asInstanceOf[ConcreteTypeTag[T]]
case ShortTpe => ConcreteTypeTag.Short.asInstanceOf[ConcreteTypeTag[T]]
case CharTpe => ConcreteTypeTag.Char.asInstanceOf[ConcreteTypeTag[T]]
@@ -163,26 +239,10 @@ trait TypeTags { self: Universe =>
case NothingTpe => ConcreteTypeTag.Nothing.asInstanceOf[ConcreteTypeTag[T]]
case NullTpe => ConcreteTypeTag.Null.asInstanceOf[ConcreteTypeTag[T]]
case StringTpe => ConcreteTypeTag.String.asInstanceOf[ConcreteTypeTag[T]]
- case _ => new ConcreteTypeTag[T](tpe) {}
+ case _ => new ConcreteTypeTag[T]{ def tpe = tpe1; def erasure = erasure1 }
}
def unapply[T](ttag: TypeTag[T]): Option[Type] = if (ttag.isConcrete) Some(ttag.tpe) else None
-
- implicit def toClassTag[T](ttag: rm.ConcreteTypeTag[T]): ClassTag[T] = ClassTag[T](rm.typeToClass(ttag.tpe.erasure))
-
- implicit def toDeprecatedManifestApis[T](ttag: rm.ConcreteTypeTag[T]): DeprecatedManifestApis[T] = new DeprecatedManifestApis[T](ttag)
-
- // this class should not be used directly in client code
- class DeprecatedManifestApis[T](ttag: rm.ConcreteTypeTag[T]) extends DeprecatedClassManifestApis[T](toClassTag(ttag)) {
- @deprecated("Use `tpe` to analyze the underlying type", "2.10.0")
- def <:<(that: Manifest[_]): Boolean = ttag.tpe <:< that.tpe
-
- @deprecated("Use `tpe` to analyze the underlying type", "2.10.0")
- def >:>(that: Manifest[_]): Boolean = that <:< ttag
-
- @deprecated("Use `tpe` to analyze the type arguments", "2.10.0")
- override def typeArguments: List[Manifest[_]] = ttag.tpe.typeArguments map (targ => rm.ConcreteTypeTag(targ))
- }
}
// incantations for summoning
@@ -191,4 +251,4 @@ trait TypeTags { self: Universe =>
// def typeTag[T](implicit ttag: TypeTag[T]) = ttag
// def concreteTag[T](implicit gttag: ConcreteTypeTag[T]) = cttag
// def concreteTypeTag[T](implicit gttag: ConcreteTypeTag[T]) = cttag
-} \ No newline at end of file
+}
diff --git a/src/library/scala/reflect/api/Types.scala b/src/library/scala/reflect/api/Types.scala
index 12aad453b1..3d42242641 100755
--- a/src/library/scala/reflect/api/Types.scala
+++ b/src/library/scala/reflect/api/Types.scala
@@ -53,10 +53,26 @@ trait Types { self: Universe =>
*/
def typeArguments: List[Type]
+ /** For a (potentially wrapped) poly type, its type parameters,
+ * the empty list for all other types */
+ def typeParams: List[Symbol]
+
/** Is this type a type constructor that is missing its type arguments?
*/
def isHigherKinded: Boolean // !!! This should be called "isTypeConstructor", no?
+ /** Returns the corresponding type constructor (e.g. List for List[T] or List[String])
+ */
+ def typeConstructor: Type
+
+ /** Does this type refer to spliceable types or is a spliceable type?
+ */
+ def isConcrete: Boolean
+
+ /** Is this type an abstract type that needs to be resolved?
+ */
+ def isSpliceable: Boolean
+
/**
* Expands type aliases and converts higher-kinded TypeRefs to PolyTypes.
* Functions on types are also implemented as PolyTypes.
@@ -95,10 +111,12 @@ trait Types { self: Universe =>
* Proceed analogously for thistypes referring to outer classes.
*
* Example:
+ * {{{
* class D[T] { def m: T }
* class C extends p.D[Int]
* T.asSeenFrom(ThisType(C), D) (where D is owner of m)
* = Int
+ * }}}
*/
def asSeenFrom(pre: Type, clazz: Symbol): Type
@@ -253,6 +271,7 @@ trait Types { self: Universe =>
* (T # x).type SingleType(T, x)
* p.x.type SingleType(p.type, x)
* x.type SingleType(NoPrefix, x)
+ * }}}
*/
type SingleType <: SingletonType
@@ -447,10 +466,10 @@ trait Types { self: Universe =>
def unapply(tpe: AnnotatedType): Option[(List[AnnotationInfo], Type, Symbol)]
}
- /** The least upper bound wrt <:< of a list of types */
+ /** The least upper bound of a list of types, as determined by `<:<`. */
def lub(xs: List[Type]): Type
- /** The greatest lower bound wrt <:< of a list of types */
+ /** The greatest lower bound of a list of types, as determined by `<:<`. */
def glb(ts: List[Type]): Type
// Creators ---------------------------------------------------------------
@@ -464,10 +483,6 @@ trait Types { self: Universe =>
def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type
/** The canonical creator for a refined type with an initially empty scope.
- *
- * @param parents ...
- * @param owner ...
- * @return ...
*/
def refinedType(parents: List[Type], owner: Symbol): Type
@@ -498,15 +513,17 @@ trait Types { self: Universe =>
/** A creator for existential types. This generates:
*
- * tpe1 where { tparams }
+ * {{{
+ * tpe1 where { tparams }
+ * }}}
*
- * where `tpe1` is the result of extrapolating `tpe` wrt to `tparams`.
+ * where `tpe1` is the result of extrapolating `tpe` with regard to `tparams`.
* Extrapolating means that type variables in `tparams` occurring
* in covariant positions are replaced by upper bounds, (minus any
* SingletonClass markers), type variables in `tparams` occurring in
* contravariant positions are replaced by upper bounds, provided the
- * resulting type is legal wrt to stability, and does not contain any type
- * variable in `tparams`.
+ * resulting type is legal with regard to stability, and does not contain
+ * any type variable in `tparams`.
*
* The abstraction drops all type parameters that are not directly or
* indirectly referenced by type `tpe1`. If there are no remaining type
diff --git a/src/library/scala/reflect/api/Universe.scala b/src/library/scala/reflect/api/Universe.scala
index 60abd267cb..05b5963c73 100755
--- a/src/library/scala/reflect/api/Universe.scala
+++ b/src/library/scala/reflect/api/Universe.scala
@@ -1,6 +1,8 @@
package scala.reflect
package api
+import language.experimental.macros
+
abstract class Universe extends Symbols
with FreeVars
with Types
@@ -18,7 +20,7 @@ abstract class Universe extends Symbols
with ClassLoaders
with TreeBuildUtil
with ToolBoxes
- with Reporters
+ with FrontEnds
with Importers {
/** Given an expression, generate a tree that when compiled and executed produces the original tree.
@@ -26,18 +28,23 @@ abstract class Universe extends Symbols
*
* For instance, given the abstract syntax tree representation of the <[ x + 1 ]> expression:
*
+ * {{{
* Apply(Select(Ident("x"), "+"), List(Literal(Constant(1))))
+ * }}}
*
* The reifier transforms it to the following expression:
- *
+ *
+ * {{{
* <[
* val $mr: scala.reflect.api.Universe = <reference to the Universe that calls the reify>
* $mr.Expr[Int]($mr.Apply($mr.Select($mr.Ident($mr.newFreeVar("x", <Int>, x), "+"), List($mr.Literal($mr.Constant(1))))))
* ]>
+ * }}}
*
* Reification performs expression splicing (when processing Expr.eval and Expr.value)
* and type splicing (for every type T that has a TypeTag[T] implicit in scope):
*
+ * {{{
* val two = mirror.reify(2) // Literal(Constant(2))
* val four = mirror.reify(two.eval + two.eval) // Apply(Select(two.tree, newTermName("$plus")), List(two.tree))
*
@@ -49,6 +56,7 @@ abstract class Universe extends Symbols
* val factory = c.reify{ new Queryable[T] }
* ...
* }
+ * }}}
*
* The transformation looks mostly straightforward, but it has its tricky parts:
* * Reifier retains symbols and types defined outside the reified tree, however
@@ -63,26 +71,7 @@ abstract class Universe extends Symbols
object Universe {
def reify[T](cc: scala.reflect.makro.Context{ type PrefixType = Universe })(expr: cc.Expr[T]): cc.Expr[cc.prefix.value.Expr[T]] = {
- import cc.mirror._
- try cc.reifyTree(cc.prefix, expr)
- catch {
- case ex: Throwable =>
- // [Eugene] cannot pattern match on an abstract type, so had to do this
- val ex1 = ex
- if (ex.getClass.toString.endsWith("$ReificationError")) {
- ex match {
- case cc.ReificationError(pos, msg) =>
- cc.error(pos, msg)
- EmptyTree
- }
- } else if (ex.getClass.toString.endsWith("$UnexpectedReificationError")) {
- ex match {
- case cc.UnexpectedReificationError(pos, err, cause) =>
- if (cause != null) throw cause else throw ex
- }
- } else {
- throw ex
- }
- }
+ import scala.reflect.makro.internal._
+ cc.Expr(cc.materializeExpr(cc.prefix.tree, expr.tree))
}
}
diff --git a/src/library/scala/reflect/makro/Context.scala b/src/library/scala/reflect/makro/Context.scala
index 96a41377b3..b8fb0dcce5 100644
--- a/src/library/scala/reflect/makro/Context.scala
+++ b/src/library/scala/reflect/makro/Context.scala
@@ -1,5 +1,7 @@
package scala.reflect.makro
+import language.experimental.macros
+
// todo. introduce context hierarchy
// the most lightweight context should just expose the stuff from the SIP
// the full context should include all traits from scala.reflect.makro (and probably reside in scala-compiler.jar)
@@ -10,7 +12,7 @@ trait Context extends Aliases
with Infrastructure
with Names
with Reifiers
- with Reporters
+ with FrontEnds
with Settings
with Symbols
with Typers
@@ -32,28 +34,11 @@ trait Context extends Aliases
object Context {
def reify[T](cc: Context{ type PrefixType = Context })(expr: cc.Expr[T]): cc.Expr[cc.prefix.value.Expr[T]] = {
import cc.mirror._
+ import scala.reflect.makro.internal._
// [Eugene] how do I typecheck this without undergoing this tiresome (and, in general, incorrect) procedure?
- val prefix: Tree = Select(cc.prefix, newTermName("mirror"))
+ val prefix: Tree = Select(cc.prefix.tree, newTermName("mirror"))
val prefixTpe = cc.typeCheck(TypeApply(Select(prefix, newTermName("asInstanceOf")), List(SingletonTypeTree(prefix)))).tpe
prefix setType prefixTpe
- try cc.reifyTree(prefix, expr)
- catch {
- case ex: Throwable =>
- // [Eugene] cannot pattern match on an abstract type, so had to do this
- if (ex.getClass.toString.endsWith("$ReificationError")) {
- ex match {
- case cc.ReificationError(pos, msg) =>
- cc.error(pos, msg)
- EmptyTree
- }
- } else if (ex.getClass.toString.endsWith("$UnexpectedReificationError")) {
- ex match {
- case cc.UnexpectedReificationError(pos, err, cause) =>
- if (cause != null) throw cause else throw ex
- }
- } else {
- throw ex
- }
- }
+ cc.Expr(cc.materializeExpr(prefix, expr.tree))
}
}
diff --git a/src/library/scala/reflect/makro/Reporters.scala b/src/library/scala/reflect/makro/FrontEnds.scala
index 7341b0e0b7..a1e24dcea3 100644
--- a/src/library/scala/reflect/makro/Reporters.scala
+++ b/src/library/scala/reflect/makro/FrontEnds.scala
@@ -1,14 +1,14 @@
package scala.reflect.makro
-trait Reporters {
+trait FrontEnds {
self: Context =>
import mirror._
/** Exposes means to control the compiler UI */
- def reporter: Reporter
- def setReporter(reporter: Reporter): this.type
- def withReporter[T](reporter: Reporter)(op: => T): T
+ def frontEnd: FrontEnd
+ def setFrontEnd(frontEnd: FrontEnd): this.type
+ def withFrontEnd[T](frontEnd: FrontEnd)(op: => T): T
/** For sending a message which should not be labeled as a warning/error,
* but also shouldn't require -verbose to be visible.
diff --git a/src/library/scala/reflect/makro/Reifiers.scala b/src/library/scala/reflect/makro/Reifiers.scala
index d690df6aee..b1de8d9957 100644
--- a/src/library/scala/reflect/makro/Reifiers.scala
+++ b/src/library/scala/reflect/makro/Reifiers.scala
@@ -46,7 +46,12 @@ trait Reifiers {
* The produced tree will be bound to the mirror specified by ``prefix'' (also see ``reflectMirrorPrefix'').
* For more information and examples see the documentation for ``Context.reifyTree'' and ``Universe.reify''.
*/
- def reifyType(prefix: Tree, tpe: Type, dontSpliceAtTopLevel: Boolean = false, requireConcreteTypeTag: Boolean = false): Tree
+ def reifyType(prefix: Tree, tpe: Type, dontSpliceAtTopLevel: Boolean = false, concrete: Boolean = false): Tree
+
+ /** Given a type, generate a tree that when compiled and executed produces the erasure of the original type.
+ * If ``concrete'' is true, then this function will bail on types, whose erasure includes abstract types (like `ClassTag` does).
+ */
+ def reifyErasure(tpe: Type, concrete: Boolean = true): Tree
/** Undoes reification of a tree.
*
@@ -63,20 +68,10 @@ trait Reifiers {
* 3) compileAndEval(unreifyTree(reifyTree(tree))) ~ compileAndEval(tree) // at runtime original and unreified trees are behaviorally equivalent
*/
def unreifyTree(tree: Tree): Tree
+}
- /** Represents an error during reification
- */
- type ReificationError <: Throwable
- val ReificationError: ReificationErrorExtractor
- abstract class ReificationErrorExtractor {
- def unapply(error: ReificationError): Option[(Position, String)]
- }
+// made these guys non path-dependent, otherwise exception handling quickly becomes a mess
- /** Wraps an unexpected error during reification
- */
- type UnexpectedReificationError <: Throwable
- val UnexpectedReificationError: UnexpectedReificationErrorExtractor
- abstract class UnexpectedReificationErrorExtractor {
- def unapply(error: UnexpectedReificationError): Option[(Position, String, Throwable)]
- }
-}
+case class ReificationError(var pos: reflect.api.Position, val msg: String) extends Throwable(msg)
+
+case class UnexpectedReificationError(val pos: reflect.api.Position, val msg: String, val cause: Throwable = null) extends Throwable(msg, cause) \ No newline at end of file
diff --git a/src/library/scala/reflect/makro/Symbols.scala b/src/library/scala/reflect/makro/Symbols.scala
index 91a5f6d8a5..ca1c17534c 100644
--- a/src/library/scala/reflect/makro/Symbols.scala
+++ b/src/library/scala/reflect/makro/Symbols.scala
@@ -14,4 +14,11 @@ trait Symbols {
* With `isLocatable' it's possible to check whether a tree can be retained as is, or it needs special treatment.
*/
def isLocatable(sym: Symbol): Boolean
+
+ /** Is this symbol static (i.e. with no outer instance)?
+ * Q: When exactly is a sym marked as STATIC?
+ * A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or any number of levels deep.
+ * http://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6
+ */
+ def isStatic(sym: Symbol): Boolean
} \ No newline at end of file
diff --git a/src/library/scala/reflect/makro/Typers.scala b/src/library/scala/reflect/makro/Typers.scala
index 1ced2daccd..90024a4f7a 100644
--- a/src/library/scala/reflect/makro/Typers.scala
+++ b/src/library/scala/reflect/makro/Typers.scala
@@ -29,7 +29,7 @@ trait Typers {
*
* If ``silent'' is false, ``TypeError'' will be thrown in case of a typecheck error.
* If ``silent'' is true, the typecheck is silent and will return ``EmptyTree'' if an error occurs.
- * Such errors don't vanish and can be inspected by turning on -Ymacro-debug.
+ * Such errors don't vanish and can be inspected by turning on -Ymacro-debug-verbose.
* Unlike in ``inferImplicitValue'' and ``inferImplicitView'', ``silent'' is false by default.
*
* Typechecking can be steered with the following optional parameters:
@@ -66,14 +66,14 @@ trait Typers {
* Note that this does not revert the tree to its pre-typer shape.
* For more info, read up https://issues.scala-lang.org/browse/SI-5464.
*/
- def resetAllAttrs[T <: Tree](tree: T): T
+ def resetAllAttrs(tree: Tree): Tree
/** Recursively resets locally defined symbols and types in a given tree.
*
* Note that this does not revert the tree to its pre-typer shape.
* For more info, read up https://issues.scala-lang.org/browse/SI-5464.
*/
- def resetLocalAttrs[T <: Tree](tree: T): T
+ def resetLocalAttrs(tree: Tree): Tree
/** Represents an error during typechecking
*/
diff --git a/src/library/scala/reflect/makro/internal/Utils.scala b/src/library/scala/reflect/makro/internal/Utils.scala
new file mode 100644
index 0000000000..3af58e1c88
--- /dev/null
+++ b/src/library/scala/reflect/makro/internal/Utils.scala
@@ -0,0 +1,146 @@
+package scala.reflect.makro
+
+import scala.reflect.api.Universe
+import language.implicitConversions
+import language.experimental.macros
+
+/** This package is required by the compiler and <b>should not be used in client code</b>. */
+package object internal {
+ /** This method is required by the compiler and <b>should not be used in client code</b>. */
+ def materializeArrayTag[T](u: Universe): ArrayTag[T] = macro materializeArrayTag_impl[T]
+
+ /** This method is required by the compiler and <b>should not be used in client code</b>. */
+ def materializeArrayTag_impl[T: c.TypeTag](c: Context)(u: c.Expr[Universe]): c.Expr[ArrayTag[T]] =
+ c.Expr[Nothing](c.materializeArrayTag(u.tree, implicitly[c.TypeTag[T]].tpe))(c.TypeTag.Nothing)
+
+ /** This method is required by the compiler and <b>should not be used in client code</b>. */
+ def materializeErasureTag[T](u: Universe): ErasureTag[T] = macro materializeErasureTag_impl[T]
+
+ /** This method is required by the compiler and <b>should not be used in client code</b>. */
+ def materializeErasureTag_impl[T: c.TypeTag](c: Context)(u: c.Expr[Universe]): c.Expr[ErasureTag[T]] =
+ c.Expr[Nothing](c.materializeErasureTag(u.tree, implicitly[c.TypeTag[T]].tpe, concrete = false))(c.TypeTag.Nothing)
+
+ /** This method is required by the compiler and <b>should not be used in client code</b>. */
+ def materializeClassTag[T](u: Universe): ClassTag[T] = macro materializeClassTag_impl[T]
+
+ /** This method is required by the compiler and <b>should not be used in client code</b>. */
+ def materializeClassTag_impl[T: c.TypeTag](c: Context)(u: c.Expr[Universe]): c.Expr[ClassTag[T]] =
+ c.Expr[Nothing](c.materializeClassTag(u.tree, implicitly[c.TypeTag[T]].tpe))(c.TypeTag.Nothing)
+
+ /** This method is required by the compiler and <b>should not be used in client code</b>. */
+ def materializeTypeTag[T](u: Universe): u.TypeTag[T] = macro materializeTypeTag_impl[T]
+
+ /** This method is required by the compiler and <b>should not be used in client code</b>. */
+ def materializeTypeTag_impl[T: c.TypeTag](c: Context)(u: c.Expr[Universe]): c.Expr[u.value.TypeTag[T]] =
+ c.Expr[Nothing](c.materializeTypeTag(u.tree, implicitly[c.TypeTag[T]].tpe, concrete = false))(c.TypeTag.Nothing)
+
+ /** This method is required by the compiler and <b>should not be used in client code</b>. */
+ def materializeConcreteTypeTag[T](u: Universe): u.ConcreteTypeTag[T] = macro materializeConcreteTypeTag_impl[T]
+
+ /** This method is required by the compiler and <b>should not be used in client code</b>. */
+ def materializeConcreteTypeTag_impl[T: c.TypeTag](c: Context)(u: c.Expr[Universe]): c.Expr[u.value.ConcreteTypeTag[T]] =
+ c.Expr[Nothing](c.materializeTypeTag(u.tree, implicitly[c.TypeTag[T]].tpe, concrete = true))(c.TypeTag.Nothing)
+
+ /** This method is required by the compiler and <b>should not be used in client code</b>. */
+ private[scala] implicit def context2utils(c0: Context) : Utils { val c: c0.type } = new { val c: c0.type = c0 } with Utils
+}
+
+package internal {
+ private[scala] abstract class Utils {
+ val c: Context
+
+ import c.mirror._
+ import definitions._
+
+ val coreTags = Map(
+ ByteClass.asType -> newTermName("Byte"),
+ ShortClass.asType -> newTermName("Short"),
+ CharClass.asType -> newTermName("Char"),
+ IntClass.asType -> newTermName("Int"),
+ LongClass.asType -> newTermName("Long"),
+ FloatClass.asType -> newTermName("Float"),
+ DoubleClass.asType -> newTermName("Double"),
+ BooleanClass.asType -> newTermName("Boolean"),
+ UnitClass.asType -> newTermName("Unit"),
+ AnyClass.asType -> newTermName("Any"),
+ ObjectClass.asType -> newTermName("Object"),
+ AnyValClass.asType -> newTermName("AnyVal"),
+ AnyRefClass.asType -> newTermName("AnyRef"),
+ NothingClass.asType -> newTermName("Nothing"),
+ NullClass.asType -> newTermName("Null"),
+ StringClass.asType -> newTermName("String"))
+
+ // todo. the following two methods won't be necessary once we implement implicit macro generators for tags
+
+ def materializeArrayTag(prefix: Tree, tpe: Type): Tree =
+ materializeClassTag(prefix, tpe)
+
+ def materializeErasureTag(prefix: Tree, tpe: Type, concrete: Boolean): Tree =
+ if (concrete) materializeClassTag(prefix, tpe) else materializeTypeTag(prefix, tpe, concrete = false)
+
+ def materializeClassTag(prefix: Tree, tpe: Type): Tree =
+ materializeTag(prefix, tpe, ClassTagModule, {
+ val erasure = c.reifyErasure(tpe, concrete = true)
+ val factory = TypeApply(Select(Ident(ClassTagModule), "apply"), List(TypeTree(tpe)))
+ Apply(factory, List(erasure))
+ })
+
+ def materializeTypeTag(prefix: Tree, tpe: Type, concrete: Boolean): Tree = {
+ val tagModule = if (concrete) ConcreteTypeTagModule else TypeTagModule
+ materializeTag(prefix, tpe, tagModule, c.reifyType(prefix, tpe, dontSpliceAtTopLevel = true, concrete = concrete))
+ }
+
+ private def materializeTag(prefix: Tree, tpe: Type, tagModule: Symbol, materializer: => Tree): Tree = {
+ val result =
+ tpe match {
+ case coreTpe if coreTags contains coreTpe =>
+ val ref = if (tagModule.owner.isPackageClass) Ident(tagModule) else Select(prefix, tagModule.name)
+ Select(ref, coreTags(coreTpe))
+ case _ =>
+ val manifestInScope = nonSyntheticManifestInScope(tpe)
+ if (manifestInScope.isEmpty) translatingReificationErrors(materializer)
+ else gen.mkMethodCall(staticModule("scala.reflect.package"), newTermName("manifestToConcreteTypeTag"), List(tpe), List(manifestInScope))
+ }
+ try c.typeCheck(result)
+ catch { case terr @ c.TypeError(pos, msg) => failTag(terr) }
+ }
+
+ private def nonSyntheticManifestInScope(tpe: Type) = {
+ val ManifestClass = staticClass("scala.reflect.Manifest")
+ val ManifestModule = staticModule("scala.reflect.Manifest")
+ val manifest = c.inferImplicitValue(appliedType(ManifestClass.asTypeConstructor, List(tpe)))
+ val notOk = manifest.isEmpty || (manifest exists (sub => sub.symbol != null && (sub.symbol == ManifestModule || sub.symbol.owner == ManifestModule)))
+ if (notOk) EmptyTree else manifest
+ }
+
+ def materializeExpr(prefix: Tree, expr: Tree): Tree = {
+ val result = translatingReificationErrors(c.reifyTree(prefix, expr))
+ try c.typeCheck(result)
+ catch { case terr @ c.TypeError(pos, msg) => failExpr(terr) }
+ }
+
+ private def translatingReificationErrors(materializer: => Tree): Tree = {
+ try materializer
+ catch {
+ case ReificationError(pos, msg) =>
+ c.error(pos.asInstanceOf[c.Position], msg) // this cast is a very small price for the sanity of exception handling
+ EmptyTree
+ case UnexpectedReificationError(pos, err, cause) if cause != null =>
+ throw cause
+ }
+ }
+
+ private def failTag(reason: Any): Nothing = {
+ val Apply(TypeApply(fun, List(tpeTree)), _) = c.macroApplication
+ val tpe = tpeTree.tpe
+ val PolyType(_, MethodType(_, tagTpe)) = fun.tpe
+ val tagModule = tagTpe.typeSymbol.companionSymbol
+ if (c.compilerSettings.contains("-Xlog-implicits"))
+ c.echo(c.enclosingPosition, "cannot materialize " + tagModule.name + "[" + tpe + "] because:\n" + reason)
+ c.abort(c.enclosingPosition, "No %s available for %s".format(tagModule.name, tpe))
+ }
+
+ private def failExpr(reason: Any): Nothing =
+ c.abort(c.enclosingPosition, "Cannot materialize Expr because:\n" + reason)
+ }
+}
diff --git a/src/library/scala/reflect/makro/internal/macroImpl.scala b/src/library/scala/reflect/makro/internal/macroImpl.scala
index 86600ba0a1..9cf4d23072 100644
--- a/src/library/scala/reflect/makro/internal/macroImpl.scala
+++ b/src/library/scala/reflect/makro/internal/macroImpl.scala
@@ -2,4 +2,4 @@ package scala.reflect.makro
package internal
/** This type is required by the compiler and <b>should not be used in client code</b>. */
-class macroImpl(val referenceToMacroImpl: Any) extends StaticAnnotation
+class macroImpl(val referenceToMacroImpl: Any) extends annotation.StaticAnnotation
diff --git a/src/library/scala/reflect/makro/internal/typeTagImpl.scala b/src/library/scala/reflect/makro/internal/typeTagImpl.scala
deleted file mode 100644
index de404ff39f..0000000000
--- a/src/library/scala/reflect/makro/internal/typeTagImpl.scala
+++ /dev/null
@@ -1,133 +0,0 @@
-package scala.reflect.makro
-
-import scala.reflect.api.Universe
-
-/** This package is required by the compiler and <b>should not be used in client code</b>. */
-package object internal {
- /** This method is required by the compiler and <b>should not be used in client code</b>. */
- def materializeClassTag[T](u: Universe): ClassTag[T] = macro materializeClassTag_impl[T]
-
- /** This method is required by the compiler and <b>should not be used in client code</b>. */
- def materializeClassTag_impl[T: c.TypeTag](c: Context)(u: c.Expr[Universe]): c.Expr[ClassTag[T]] =
- c.Expr[Nothing](c.materializeClassTag(u.tree, implicitly[c.TypeTag[T]].tpe))(c.TypeTag.Nothing)
-
- /** This method is required by the compiler and <b>should not be used in client code</b>. */
- def materializeTypeTag[T](u: Universe): u.TypeTag[T] = macro materializeTypeTag_impl[T]
-
- /** This method is required by the compiler and <b>should not be used in client code</b>. */
- def materializeTypeTag_impl[T: c.TypeTag](c: Context)(u: c.Expr[Universe]): c.Expr[u.value.TypeTag[T]] =
- c.Expr[Nothing](c.materializeTypeTag(u.tree, implicitly[c.TypeTag[T]].tpe, requireConcreteTypeTag = false))(c.TypeTag.Nothing)
-
- /** This method is required by the compiler and <b>should not be used in client code</b>. */
- def materializeConcreteTypeTag[T](u: Universe): u.ConcreteTypeTag[T] = macro materializeConcreteTypeTag_impl[T]
-
- /** This method is required by the compiler and <b>should not be used in client code</b>. */
- def materializeConcreteTypeTag_impl[T: c.TypeTag](c: Context)(u: c.Expr[Universe]): c.Expr[u.value.ConcreteTypeTag[T]] =
- c.Expr[Nothing](c.materializeTypeTag(u.tree, implicitly[c.TypeTag[T]].tpe, requireConcreteTypeTag = true))(c.TypeTag.Nothing)
-
- /** This method is required by the compiler and <b>should not be used in client code</b>. */
- private[scala] implicit def context2utils(c0: Context) : Utils { val c: c0.type } = new { val c: c0.type = c0 } with Utils
-}
-
-package internal {
- private[scala] abstract class Utils {
- val c: Context
-
- import c.mirror._
- import definitions._
-
- val coreTags = Map(
- ByteClass.asType -> newTermName("Byte"),
- ShortClass.asType -> newTermName("Short"),
- CharClass.asType -> newTermName("Char"),
- IntClass.asType -> newTermName("Int"),
- LongClass.asType -> newTermName("Long"),
- FloatClass.asType -> newTermName("Float"),
- DoubleClass.asType -> newTermName("Double"),
- BooleanClass.asType -> newTermName("Boolean"),
- UnitClass.asType -> newTermName("Unit"),
- AnyClass.asType -> newTermName("Any"),
- ObjectClass.asType -> newTermName("Object"),
- AnyValClass.asType -> newTermName("AnyVal"),
- AnyRefClass.asType -> newTermName("AnyRef"),
- NothingClass.asType -> newTermName("Nothing"),
- NullClass.asType -> newTermName("Null"))
-
- def materializeClassTag(prefix: Tree, tpe: Type): Tree = {
- val typetagInScope = c.inferImplicitValue(appliedType(typeRef(prefix.tpe, ConcreteTypeTagClass, Nil), List(tpe)))
- def typetagIsSynthetic(tree: Tree) = tree.isInstanceOf[Block] || (tree exists (sub => sub.symbol == TypeTagModule || sub.symbol == ConcreteTypeTagModule))
- typetagInScope match {
- case success if !success.isEmpty && !typetagIsSynthetic(success) =>
- val factory = TypeApply(Select(Ident(ClassTagModule), newTermName("apply")), List(TypeTree(tpe)))
- Apply(factory, List(Select(typetagInScope, newTermName("tpe"))))
- case _ =>
- val result =
- tpe match {
- case coreTpe if coreTags contains coreTpe =>
- Select(Ident(ClassTagModule), coreTags(coreTpe))
- case _ =>
- if (tpe.typeSymbol == ArrayClass) {
- val componentTpe = tpe.typeArguments(0)
- val classtagInScope = c.inferImplicitValue(appliedType(typeRef(NoPrefix, ClassTagClass, Nil), List(componentTpe)))
- val componentTag = classtagInScope orElse materializeClassTag(prefix, componentTpe)
- Select(componentTag, newTermName("wrap"))
- } else {
- // [Eugene] what's the intended behavior? there's no spec on ClassManifests
- // for example, should we ban Array[T] or should we tag them with Array[AnyRef]?
- // if its the latter, what should be the result of tagging Array[T] where T <: Int?
- if (tpe.typeSymbol.isAbstractType) fail("tpe is an abstract type")
- val erasure =
- if (tpe.typeSymbol.isDerivedValueClass) tpe // [Eugene to Martin] is this correct?
- else tpe.erasure.normalize // necessary to deal with erasures of HK types
- val factory = TypeApply(Select(Ident(ClassTagModule), newTermName("apply")), List(TypeTree(tpe)))
- Apply(factory, List(TypeApply(Ident(newTermName("classOf")), List(TypeTree(erasure)))))
- }
- }
- try c.typeCheck(result)
- catch { case terr @ c.TypeError(pos, msg) => fail(terr) }
- }
- }
-
- def materializeTypeTag(prefix: Tree, tpe: Type, requireConcreteTypeTag: Boolean): Tree = {
- val tagModule = if (requireConcreteTypeTag) ConcreteTypeTagModule else TypeTagModule
- val result =
- tpe match {
- case coreTpe if coreTags contains coreTpe =>
- Select(Select(prefix, tagModule.name), coreTags(coreTpe))
- case _ =>
- try c.reifyType(prefix, tpe, dontSpliceAtTopLevel = true, requireConcreteTypeTag = requireConcreteTypeTag)
- catch {
- case ex: Throwable =>
- // [Eugene] cannot pattern match on an abstract type, so had to do this
- val ex1 = ex
- if (ex.getClass.toString.endsWith("$ReificationError")) {
- ex match {
- case c.ReificationError(pos, msg) =>
- c.error(pos, msg)
- EmptyTree
- }
- } else if (ex.getClass.toString.endsWith("$UnexpectedReificationError")) {
- ex match {
- case c.UnexpectedReificationError(pos, err, cause) =>
- if (cause != null) throw cause else throw ex
- }
- } else {
- throw ex
- }
- }
- }
- try c.typeCheck(result)
- catch { case terr @ c.TypeError(pos, msg) => fail(terr) }
- }
-
- private def fail(reason: Any): Nothing = {
- val Apply(TypeApply(fun, List(tpeTree)), _) = c.macroApplication
- val tpe = tpeTree.tpe
- val PolyType(_, MethodType(_, tagTpe)) = fun.tpe
- val tagModule = tagTpe.typeSymbol.companionSymbol
- if (c.compilerSettings.contains("-Xlog-implicits"))
- c.echo(c.enclosingPosition, "cannot materialize " + tagModule.name + "[" + tpe + "] because:\n" + reason)
- c.abort(c.enclosingPosition, "No %s available for %s".format(tagModule.name, tpe))
- }
- }
-}
diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala
index 1738642932..38a144cd49 100644
--- a/src/library/scala/reflect/package.scala
+++ b/src/library/scala/reflect/package.scala
@@ -3,6 +3,7 @@ package scala
package object reflect {
import ReflectionUtils._
+ import scala.compat.Platform.EOL
// !!! This was a val; we can't throw exceptions that aggressively without breaking
// non-standard environments, e.g. google app engine. I made it a lazy val, but
@@ -10,17 +11,30 @@ package object reflect {
// initialization, but in response to a doomed attempt to utilize it.
// todo. default mirror (a static object) might become a source for memory leaks (because it holds a strong reference to a classloader)!
- lazy val mirror: api.Mirror = mkMirror(defaultReflectionClassLoader)
+ lazy val mirror: api.Mirror =
+ try mkMirror(defaultReflectionClassLoader)
+ catch {
+ case ex: UnsupportedOperationException =>
+ new DummyMirror(defaultReflectionClassLoader)
+ }
+
+ private[scala] def mirrorDiagnostics(cl: ClassLoader): String = """
+ |
+ | This error has happened because `scala.reflect.runtime.package` located in
+ | scala-compiler.jar cannot be loaded. Classloader you are using is:
+ | %s.
+ |
+ | For the instructions for some of the situations that might be relevant
+ | visit our knowledge base at https://gist.github.com/2391081.
+ """.stripMargin('|').format(show(cl))
def mkMirror(classLoader: ClassLoader): api.Mirror = {
- // we use (Java) reflection here so that we can keep reflect.runtime and reflect.internals in a seperate jar
- // note that we must instantiate the mirror with current classloader, otherwise we won't be able to cast it to api.Mirror
- // that's not a problem, though, because mirror can service classes from arbitrary classloaders
- val instance = invokeFactoryOpt(getClass.getClassLoader, "scala.reflect.runtime.package", "mkMirror", classLoader)
+ val coreClassLoader = getClass.getClassLoader
+ val instance = invokeFactoryOpt(coreClassLoader, "scala.reflect.runtime.package", "mkMirror", classLoader)
instance match {
case Some(x: api.Mirror) => x
- case Some(_) => throw new UnsupportedOperationException("Available scala reflection implementation is incompatible with this interface")
- case None => throw new UnsupportedOperationException("Scala reflection not available on this platform")
+ case Some(_) => throw new UnsupportedOperationException("Available scala reflection implementation is incompatible with this interface." + mirrorDiagnostics(coreClassLoader))
+ case None => throw new UnsupportedOperationException("Scala reflection not available on this platform." + mirrorDiagnostics(coreClassLoader))
}
}
@@ -39,20 +53,9 @@ package object reflect {
@deprecated("Use `@scala.beans.ScalaBeanInfo` instead", "2.10.0")
type ScalaBeanInfo = scala.beans.ScalaBeanInfo
- @deprecated("Use `@scala.reflect.ClassTag` instead", "2.10.0")
- type ClassManifest[T] = ClassTag[T]
- @deprecated("OptManifest is no longer supported, and using it may lead to incorrect results, Use `@scala.reflect.TypeTag` instead", "2.10.0")
- type OptManifest[T] = TypeTag[T]
- @deprecated("Use `@scala.reflect.ConcreteTypeTag` instead", "2.10.0")
- type Manifest[T] = ConcreteTypeTag[T]
-
- @deprecated("Use `@scala.reflect.ClassTag` instead", "2.10.0")
- val ClassManifest = ClassTag
- @deprecated("Use `@scala.reflect.ConcreteTypeTag` instead", "2.10.0")
- lazy val Manifest = ConcreteTypeTag
- @deprecated("NoManifest is no longer supported, and using it may lead to incorrect results, Use `@scala.reflect.TypeTag` instead", "2.10.0")
- object NoManifest extends OptManifest[Nothing](scala.reflect.mirror.definitions.NothingClass.asType) with Serializable
-
+ // ArrayTag trait is defined separately from the mirror
+ // ErasureTag trait is defined separately from the mirror
+ // ConcreteErasureTag trait is defined separately from the mirror
// ClassTag class is defined separately from the mirror
type TypeTag[T] = scala.reflect.mirror.TypeTag[T]
type ConcreteTypeTag[T] = scala.reflect.mirror.ConcreteTypeTag[T]
@@ -60,4 +63,8 @@ package object reflect {
// ClassTag object is defined separately from the mirror
lazy val TypeTag = scala.reflect.mirror.TypeTag
lazy val ConcreteTypeTag = scala.reflect.mirror.ConcreteTypeTag
+
+ def arrayTagToClassManifest[T](tag: ArrayTag[T]): ClassManifest[T] = TagInterop.arrayTagToClassManifest[T](tag)
+ def concreteTypeTagToManifest[T](tag: ConcreteTypeTag[T]): Manifest[T] = TagInterop.concreteTypeTagToManifest[T](tag)
+ def manifestToConcreteTypeTag[T](tag: Manifest[T]): ConcreteTypeTag[T] = TagInterop.manifestToConcreteTypeTag[T](tag)
}
diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala
index 2e435d8a7e..f499350ce9 100644
--- a/src/library/scala/runtime/AbstractPartialFunction.scala
+++ b/src/library/scala/runtime/AbstractPartialFunction.scala
@@ -36,7 +36,7 @@ abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala
// let's not make it final so as not to confuse anyone
/*final*/ def apply(x: T1): R = applyOrElse(x, PartialFunction.empty)
- override final def andThen[C](k: R => C) : PartialFunction[T1, C] =
+ @annotation.unspecialized override final def andThen[C](k: R => C) : PartialFunction[T1, C] =
new AbstractPartialFunction[T1, C] {
def isDefinedAt(x: T1): Boolean = self.isDefinedAt(x)
override def applyOrElse[A1 <: T1, C1 >: C](x: A1, default: A1 => C1): C1 =
@@ -61,8 +61,8 @@ abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala
*/
abstract class AbstractTotalFunction[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends Function1[T1, R] with PartialFunction[T1, R] {
final def isDefinedAt(x: T1): Boolean = true
- override final def applyOrElse[A1 <: T1, B1 >: R](x: A1, default: A1 => B1): B1 = apply(x)
- override final def orElse[A1 <: T1, B1 >: R](that: PartialFunction[A1, B1]): PartialFunction[A1, B1] = this
+ @annotation.unspecialized override final def applyOrElse[A1 <: T1, B1 >: R](x: A1, default: A1 => B1): B1 = apply(x)
+ @annotation.unspecialized override final def orElse[A1 <: T1, B1 >: R](that: PartialFunction[A1, B1]): PartialFunction[A1, B1] = this
//TODO: check generated code for PF literal here
- override final def andThen[C](k: R => C): PartialFunction[T1, C] = { case x => k(apply(x)) }
+ @annotation.unspecialized override final def andThen[C](k: R => C): PartialFunction[T1, C] = { case x => k(apply(x)) }
}
diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java
index 258a176671..8fe9a017d0 100644
--- a/src/library/scala/runtime/BoxesRunTime.java
+++ b/src/library/scala/runtime/BoxesRunTime.java
@@ -228,7 +228,7 @@ public final class BoxesRunTime
* as yet have not.
*
* Note: Among primitives, Float.NaN != Float.NaN, but the boxed
- * verisons are equal. This still needs reconciliation.
+ * versions are equal. This still needs reconciliation.
*/
public static int hashFromLong(java.lang.Long n) {
int iv = n.intValue();
@@ -242,6 +242,9 @@ public final class BoxesRunTime
long lv = n.longValue();
if (lv == dv) return java.lang.Long.valueOf(lv).hashCode();
+
+ float fv = n.floatValue();
+ if (fv == dv) return java.lang.Float.valueOf(fv).hashCode();
else return n.hashCode();
}
public static int hashFromFloat(java.lang.Float n) {
diff --git a/src/library/scala/runtime/RichDouble.scala b/src/library/scala/runtime/RichDouble.scala
index f702b9e3da..396323d1e8 100644
--- a/src/library/scala/runtime/RichDouble.scala
+++ b/src/library/scala/runtime/RichDouble.scala
@@ -18,14 +18,12 @@ final class RichDouble(val self: Double) extends FractionalProxy[Double] {
/** Converts an angle measured in degrees to an approximately equivalent
* angle measured in radians.
*
- * @param x an angle, in degrees
* @return the measurement of the angle x in radians.
*/
def toRadians: Double = math.toRadians(self)
/** Converts an angle measured in radians to an approximately equivalent
* angle measured in degrees.
- * @param x angle, in radians
* @return the measurement of the angle x in degrees.
*/
def toDegrees: Double = math.toDegrees(self)
diff --git a/src/library/scala/runtime/RichFloat.scala b/src/library/scala/runtime/RichFloat.scala
index 6b72a9dd55..4fc9e8864a 100644
--- a/src/library/scala/runtime/RichFloat.scala
+++ b/src/library/scala/runtime/RichFloat.scala
@@ -18,7 +18,6 @@ final class RichFloat(val self: Float) extends FractionalProxy[Float] {
/** Converts an angle measured in degrees to an approximately equivalent
* angle measured in radians.
*
- * @param x an angle, in degrees
* @return the measurement of the angle `x` in radians.
*/
def toRadians: Float = math.toRadians(self).toFloat
@@ -26,7 +25,6 @@ final class RichFloat(val self: Float) extends FractionalProxy[Float] {
/** Converts an angle measured in radians to an approximately equivalent
* angle measured in degrees.
*
- * @param x angle, in radians
* @return the measurement of the angle `x` in degrees.
*/
def toDegrees: Float = math.toDegrees(self).toFloat
diff --git a/src/library/scala/runtime/RichInt.scala b/src/library/scala/runtime/RichInt.scala
index cf5aab0be4..d03968212f 100644
--- a/src/library/scala/runtime/RichInt.scala
+++ b/src/library/scala/runtime/RichInt.scala
@@ -9,7 +9,6 @@
package scala.runtime
import scala.collection.immutable.Range
-import annotation.bridge
// Note that this does not implement IntegralProxy[Int] so that it can return
// the Int-specific Range class from until/to.
@@ -37,9 +36,6 @@ final class RichInt(val self: Int) extends ScalaNumberProxy[Int] with RangedProx
*/
def until(end: Int, step: Int): Range = Range(self, end, step)
-// @bridge
-// def until(end: Int): Range with Range.ByOne = new Range(self, end, 1) with Range.ByOne
-
/** like `until`, but includes the last index */
/**
* @param end The final bound of the range to make.
@@ -56,9 +52,6 @@ final class RichInt(val self: Int) extends ScalaNumberProxy[Int] with RangedProx
*/
def to(end: Int, step: Int): Range.Inclusive = Range.inclusive(self, end, step)
-// @bridge
-// def to(end: Int): Range with Range.ByOne = new Range.Inclusive(self, end, 1) with Range.ByOne
-
/**
* @return `'''this'''` if `'''this''' < that` or `that` otherwise
*/
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index d06eba8f7d..4c5e0e408b 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -47,21 +47,29 @@ object ScalaRunTime {
names.toSet
}
+ /** Return the class object representing an array with element class `clazz`.
+ */
+ def arrayClass(clazz: Class[_]): Class[_] = {
+ // newInstance throws an exception if the erasure is Void.TYPE. see SI-5680
+ if (clazz == java.lang.Void.TYPE) classOf[Array[Unit]]
+ else java.lang.reflect.Array.newInstance(clazz, 0).getClass
+ }
+
+ /** Return the class object representing elements in arrays described by a given schematic.
+ */
+ def arrayElementClass(schematic: Any): Class[_] = schematic match {
+ case cls: Class[_] => cls.getComponentType
+ case tag: ClassTag[_] => tag.erasure
+ case tag: ArrayTag[_] => tag.newArray(0).getClass.getComponentType
+ case _ => throw new UnsupportedOperationException("unsupported schematic %s (%s)".format(schematic, if (schematic == null) "null" else schematic.getClass))
+ }
+
/** Return the class object representing an unboxed value type,
* e.g. classOf[int], not classOf[java.lang.Integer]. The compiler
* rewrites expressions like 5.getClass to come here.
*/
- def anyValClass[T <: AnyVal](value: T): Class[T] = (value match {
- case x: Byte => java.lang.Byte.TYPE
- case x: Short => java.lang.Short.TYPE
- case x: Char => java.lang.Character.TYPE
- case x: Int => java.lang.Integer.TYPE
- case x: Long => java.lang.Long.TYPE
- case x: Float => java.lang.Float.TYPE
- case x: Double => java.lang.Double.TYPE
- case x: Boolean => java.lang.Boolean.TYPE
- case x: Unit => java.lang.Void.TYPE
- }).asInstanceOf[Class[T]]
+ def anyValClass[T <: AnyVal : ClassTag](value: T): Class[T] =
+ classTag[T].erasure.asInstanceOf[Class[T]]
/** Retrieve generic array element */
def array_apply(xs: AnyRef, idx: Int): Any = xs match {
@@ -122,16 +130,18 @@ object ScalaRunTime {
case null => throw new NullPointerException
}
- /** Convert a numeric value array to an object array.
+ /** Convert an array to an object array.
* Needed to deal with vararg arguments of primitive types that are passed
* to a generic Java vararg parameter T ...
*/
- def toObjectArray(src: AnyRef): Array[Object] = {
- val length = array_length(src)
- val dest = new Array[Object](length)
- for (i <- 0 until length)
- array_update(dest, i, array_apply(src, i))
- dest
+ def toObjectArray(src: AnyRef): Array[Object] = src match {
+ case x: Array[AnyRef] => x
+ case _ =>
+ val length = array_length(src)
+ val dest = new Array[Object](length)
+ for (i <- 0 until length)
+ array_update(dest, i, array_apply(src, i))
+ dest
}
def toArray[T](xs: collection.Seq[T]) = {
@@ -285,8 +295,12 @@ object ScalaRunTime {
*/
def stringOf(arg: Any): String = stringOf(arg, scala.Int.MaxValue)
def stringOf(arg: Any, maxElements: Int): String = {
- def isScalaClass(x: AnyRef) =
- Option(x.getClass.getPackage) exists (_.getName startsWith "scala.")
+ def packageOf(x: AnyRef) = x.getClass.getPackage match {
+ case null => ""
+ case p => p.getName
+ }
+ def isScalaClass(x: AnyRef) = packageOf(x) startsWith "scala."
+ def isScalaCompilerClass(x: AnyRef) = packageOf(x) startsWith "scala.tools.nsc."
// When doing our own iteration is dangerous
def useOwnToString(x: Any) = x match {
@@ -302,7 +316,8 @@ object ScalaRunTime {
case _: TraversableView[_, _] => true
// Don't want to a) traverse infinity or b) be overly helpful with peoples' custom
// collections which may have useful toString methods - ticket #3710
- case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x)
+ // or c) print AbstractFiles which are somehow also Iterable[AbstractFile]s.
+ case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x) || isScalaCompilerClass(x)
// Otherwise, nothing could possibly go wrong
case _ => false
}
@@ -329,14 +344,14 @@ object ScalaRunTime {
case null => "null"
case "" => "\"\""
case x: String => if (x.head.isWhitespace || x.last.isWhitespace) "\"" + x + "\"" else x
- case x if useOwnToString(x) => x toString
+ case x if useOwnToString(x) => x.toString
case x: AnyRef if isArray(x) => arrayToString(x)
case x: collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")")
case x: Iterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma
case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")")
- case x => x toString
+ case x => x.toString
}
// The try/catch is defense against iterables which aren't actually designed
diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala
new file mode 100644
index 0000000000..8ef1a9a33e
--- /dev/null
+++ b/src/library/scala/runtime/SeqCharSequence.scala
@@ -0,0 +1,44 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.runtime
+
+import java.util.Arrays.copyOfRange
+
+final class SeqCharSequence(val xs: collection.IndexedSeq[Char]) extends CharSequence {
+ def length: Int = xs.length
+ def charAt(index: Int): Char = xs(index)
+ def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(xs.slice(start, end))
+ override def toString = xs.mkString("")
+}
+
+final class ArrayCharSequence(val xs: Array[Char], start: Int, end: Int) extends CharSequence {
+ // yikes
+ // java.lang.VerifyError: (class: scala/runtime/ArrayCharSequence, method: <init> signature: ([C)V)
+ // Constructor must call super() or this()
+ //
+ // def this(xs: Array[Char]) = this(xs, 0, xs.length)
+
+ def length: Int = math.max(0, end - start)
+ def charAt(index: Int): Char = {
+ if (0 <= index && index < length)
+ xs(start + index)
+ else throw new ArrayIndexOutOfBoundsException(index)
+ }
+ def subSequence(start0: Int, end0: Int): CharSequence = {
+ if (start0 < 0) throw new ArrayIndexOutOfBoundsException(start0)
+ else if (end0 > length) throw new ArrayIndexOutOfBoundsException(end0)
+ else if (end0 <= start0) new ArrayCharSequence(xs, 0, 0)
+ else {
+ val newlen = end0 - start0
+ val start1 = start + start0
+ new ArrayCharSequence(xs, start1, start1 + newlen)
+ }
+ }
+ override def toString = xs drop start take length mkString ""
+}
diff --git a/src/library/scala/runtime/Statics.java b/src/library/scala/runtime/Statics.java
new file mode 100644
index 0000000000..485511ecbb
--- /dev/null
+++ b/src/library/scala/runtime/Statics.java
@@ -0,0 +1,89 @@
+package scala.runtime;
+
+/** Not for public consumption. Usage by the runtime only.
+ */
+
+public final class Statics {
+ public static int mix(int hash, int data) {
+ int h = mixLast(hash, data);
+ h = Integer.rotateLeft(h, 13);
+ return h * 5 + 0xe6546b64;
+ }
+
+ public static int mixLast(int hash, int data) {
+ int k = data;
+
+ k *= 0xcc9e2d51;
+ k = Integer.rotateLeft(k, 15);
+ k *= 0x1b873593;
+
+ return hash ^ k;
+ }
+
+ public static int finalizeHash(int hash, int length) {
+ return avalanche(hash ^ length);
+ }
+
+ /** Force all bits of the hash to avalanche. Used for finalizing the hash. */
+ public static int avalanche(int h) {
+ h ^= h >>> 16;
+ h *= 0x85ebca6b;
+ h ^= h >>> 13;
+ h *= 0xc2b2ae35;
+ h ^= h >>> 16;
+
+ return h;
+ }
+
+ public static int longHash(long lv) {
+ if ((int)lv == lv)
+ return (int)lv;
+ else
+ return (int)(lv ^ (lv >>> 32));
+ }
+
+ public static int doubleHash(double dv) {
+ int iv = (int)dv;
+ if (iv == dv)
+ return iv;
+
+ float fv = (float)dv;
+ if (fv == dv)
+ return java.lang.Float.floatToIntBits(fv);
+
+ long lv = (long)dv;
+ if (lv == dv)
+ return (int)lv;
+
+ lv = Double.doubleToLongBits(dv);
+ return (int)(lv ^ (lv >>> 32));
+ }
+
+ public static int floatHash(float fv) {
+ int iv = (int)fv;
+ if (iv == fv)
+ return iv;
+
+ long lv = (long)fv;
+ if (lv == fv)
+ return (int)(lv^(lv>>>32));
+
+ return java.lang.Float.floatToIntBits(fv);
+ }
+
+ public static int anyHash(Object x) {
+ if (x == null)
+ return 0;
+
+ if (x instanceof java.lang.Long)
+ return longHash(((java.lang.Long)x).longValue());
+
+ if (x instanceof java.lang.Double)
+ return doubleHash(((java.lang.Double)x).doubleValue());
+
+ if (x instanceof java.lang.Float)
+ return floatHash(((java.lang.Float)x).floatValue());
+
+ return x.hashCode();
+ }
+}
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
new file mode 100644
index 0000000000..5ad364c8a5
--- /dev/null
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -0,0 +1,130 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.runtime
+
+import scala.collection.{ TraversableLike, IterableLike }
+import scala.collection.generic.{ CanBuildFrom => CBF }
+
+/** This interface is intended as a minimal interface, not complicated
+ * by the requirement to resolve type constructors, for implicit search (which only
+ * needs to find an implicit conversion to Traversable for our purposes.)
+ */
+trait ZippedTraversable2[+El1, +El2] {
+ def foreach[U](f: (El1, El2) => U): Unit
+}
+object ZippedTraversable2 {
+ implicit def zippedTraversable2ToTraversable[El1, El2](zz: ZippedTraversable2[El1, El2]): Traversable[(El1, El2)] = {
+ new collection.AbstractTraversable[(El1, El2)] {
+ def foreach[U](f: ((El1, El2)) => U): Unit = zz foreach Function.untupled(f)
+ }
+ }
+}
+
+class Tuple2Zipped[El1, Repr1, El2, Repr2](
+ coll1: TraversableLike[El1, Repr1],
+ coll2: IterableLike[El2, Repr2]
+) extends ZippedTraversable2[El1, El2] {
+ def map[B, To](f: (El1, El2) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
+ val b = cbf(coll1.repr)
+ b.sizeHint(coll1)
+ val elems2 = coll2.iterator
+
+ for (el1 <- coll1) {
+ if (elems2.hasNext)
+ b += f(el1, elems2.next)
+ else
+ return b.result
+ }
+
+ b.result
+ }
+
+ def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
+ val b = cbf(coll1.repr)
+ val elems2 = coll2.iterator
+
+ for (el1 <- coll1) {
+ if (elems2.hasNext)
+ b ++= f(el1, elems2.next)
+ else
+ return b.result
+ }
+
+ b.result
+ }
+
+ def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = {
+ val b1 = cbf1(coll1.repr)
+ val b2 = cbf2(coll2.repr)
+ val elems2 = coll2.iterator
+
+ for (el1 <- coll1) {
+ if (elems2.hasNext) {
+ val el2 = elems2.next
+ if (f(el1, el2)) {
+ b1 += el1
+ b2 += el2
+ }
+ }
+ else return (b1.result, b2.result)
+ }
+
+ (b1.result, b2.result)
+ }
+
+ def exists(f: (El1, El2) => Boolean): Boolean = {
+ val elems2 = coll2.iterator
+
+ for (el1 <- coll1) {
+ if (elems2.hasNext) {
+ if (f(el1, elems2.next))
+ return true
+ }
+ else return false
+ }
+ false
+ }
+
+ def forall(f: (El1, El2) => Boolean): Boolean =
+ !exists((x, y) => !f(x, y))
+
+ def foreach[U](f: (El1, El2) => U): Unit = {
+ val elems2 = coll2.iterator
+
+ for (el1 <- coll1) {
+ if (elems2.hasNext)
+ f(el1, elems2.next)
+ else
+ return
+ }
+ }
+}
+
+object Tuple2Zipped {
+ class Ops[T1, T2](x: (T1, T2)) {
+ def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], That]
+ (implicit w1: T1 <:< CC1[El1],
+ w2: T2 <:< CC2[El2],
+ bf: collection.generic.CanBuildFrom[CC1[_], (El1, El2), That]
+ ): That = {
+ val buf = bf(x._1)
+ val it1 = x._1.toIterator
+ val it2 = x._2.toIterator
+ while (it1.hasNext && it2.hasNext)
+ buf += ((it1.next, it2.next))
+
+ buf.result
+ }
+
+ def zipped[El1, Repr1, El2, Repr2]
+ (implicit w1: T1 => TraversableLike[El1, Repr1],
+ w2: T2 => IterableLike[El2, Repr2]
+ ): Tuple2Zipped[El1, Repr1, El2, Repr2] = new Tuple2Zipped(x._1, x._2)
+ }
+}
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
new file mode 100644
index 0000000000..4e9c542c58
--- /dev/null
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -0,0 +1,141 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.runtime
+
+import scala.collection.{ TraversableLike, IterableLike }
+import scala.collection.generic.{ CanBuildFrom => CBF }
+
+/** See comment on ZippedTraversable2. */
+trait ZippedTraversable3[+El1, +El2, +El3] {
+ def foreach[U](f: (El1, El2, El3) => U): Unit
+}
+object ZippedTraversable3 {
+ implicit def zippedTraversable3ToTraversable[El1, El2, El3](zz: ZippedTraversable3[El1, El2, El3]): Traversable[(El1, El2, El3)] = {
+ new collection.AbstractTraversable[(El1, El2, El3)] {
+ def foreach[U](f: ((El1, El2, El3)) => U): Unit = zz foreach Function.untupled(f)
+ }
+ }
+}
+
+class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](
+ coll1: TraversableLike[El1, Repr1],
+ coll2: IterableLike[El2, Repr2],
+ coll3: IterableLike[El3, Repr3]
+) extends ZippedTraversable3[El1, El2, El3] {
+ def map[B, To](f: (El1, El2, El3) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
+ val b = cbf(coll1.repr)
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+
+ for (el1 <- coll1) {
+ if (elems2.hasNext && elems3.hasNext)
+ b += f(el1, elems2.next, elems3.next)
+ else
+ return b.result
+ }
+ b.result
+ }
+
+ def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
+ val b = cbf(coll1.repr)
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+
+ for (el1 <- coll1) {
+ if (elems2.hasNext && elems3.hasNext)
+ b ++= f(el1, elems2.next, elems3.next)
+ else
+ return b.result
+ }
+ b.result
+ }
+
+ def filter[To1, To2, To3](f: (El1, El2, El3) => Boolean)(
+ implicit cbf1: CBF[Repr1, El1, To1],
+ cbf2: CBF[Repr2, El2, To2],
+ cbf3: CBF[Repr3, El3, To3]): (To1, To2, To3) = {
+ val b1 = cbf1(coll1.repr)
+ val b2 = cbf2(coll2.repr)
+ val b3 = cbf3(coll3.repr)
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+ def result = (b1.result, b2.result, b3.result)
+
+ for (el1 <- coll1) {
+ if (elems2.hasNext && elems3.hasNext) {
+ val el2 = elems2.next
+ val el3 = elems3.next
+
+ if (f(el1, el2, el3)) {
+ b1 += el1
+ b2 += el2
+ b3 += el3
+ }
+ }
+ else return result
+ }
+
+ result
+ }
+
+ def exists(f: (El1, El2, El3) => Boolean): Boolean = {
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+
+ for (el1 <- coll1) {
+ if (elems2.hasNext && elems3.hasNext) {
+ if (f(el1, elems2.next, elems3.next))
+ return true
+ }
+ else return false
+ }
+ false
+ }
+
+ def forall(f: (El1, El2, El3) => Boolean): Boolean =
+ !exists((x, y, z) => !f(x, y, z))
+
+ def foreach[U](f: (El1, El2, El3) => U): Unit = {
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+
+ for (el1 <- coll1) {
+ if (elems2.hasNext && elems3.hasNext)
+ f(el1, elems2.next, elems3.next)
+ else
+ return
+ }
+ }
+}
+
+object Tuple3Zipped {
+ class Ops[T1, T2, T3](x: (T1, T2, T3)) {
+ def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], El3, CC3[X] <: TraversableOnce[X], That]
+ (implicit w1: T1 <:< CC1[El1],
+ w2: T2 <:< CC2[El2],
+ w3: T3 <:< CC3[El3],
+ bf: collection.generic.CanBuildFrom[CC1[_], (El1, El2, El3), That]
+ ): That = {
+ val buf = bf(x._1)
+ val it1 = x._1.toIterator
+ val it2 = x._2.toIterator
+ val it3 = x._3.toIterator
+ while (it1.hasNext && it2.hasNext && it3.hasNext)
+ buf += ((it1.next, it2.next, it3.next))
+
+ buf.result
+ }
+
+ def zipped[El1, Repr1, El2, Repr2, El3, Repr3]
+ (implicit w1: T1 => TraversableLike[El1, Repr1],
+ w2: T2 => IterableLike[El2, Repr2],
+ w3: T3 => IterableLike[El3, Repr3]
+ ): Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3] = new Tuple3Zipped(x._1, x._2, x._3)
+ }
+}
diff --git a/src/library/scala/runtime/package.scala b/src/library/scala/runtime/package.scala
index 9c87baf6a7..e4472b3ea1 100644
--- a/src/library/scala/runtime/package.scala
+++ b/src/library/scala/runtime/package.scala
@@ -1,13 +1,3 @@
package scala
-package object runtime {
- @deprecated("Use `scala.Unit` instead.", "2.9.0") val Unit = scala.Unit
- @deprecated("Use `scala.Boolean` instead.", "2.9.0") val Boolean = scala.Boolean
- @deprecated("Use `scala.Byte` instead.", "2.9.0") val Byte = scala.Byte
- @deprecated("Use `scala.Short` instead.", "2.9.0") val Short = scala.Short
- @deprecated("Use `scala.Char` instead.", "2.9.0") val Char = scala.Char
- @deprecated("Use `scala.Int` instead.", "2.9.0") val Int = scala.Int
- @deprecated("Use `scala.Long` instead.", "2.9.0") val Long = scala.Long
- @deprecated("Use `scala.Float` instead.", "2.9.0") val Float = scala.Float
- @deprecated("Use `scala.Double` instead.", "2.9.0") val Double = scala.Double
-}
+package object runtime { }
diff --git a/src/library/scala/sys/BooleanProp.scala b/src/library/scala/sys/BooleanProp.scala
index e940990785..45fc6f5897 100644
--- a/src/library/scala/sys/BooleanProp.scala
+++ b/src/library/scala/sys/BooleanProp.scala
@@ -8,6 +8,8 @@
package scala.sys
+import language.implicitConversions
+
/** A few additional conveniences for Boolean properties.
*/
trait BooleanProp extends Prop[Boolean] {
diff --git a/src/library/scala/sys/Prop.scala b/src/library/scala/sys/Prop.scala
index 33b88f119f..687a32cf7d 100644
--- a/src/library/scala/sys/Prop.scala
+++ b/src/library/scala/sys/Prop.scala
@@ -38,7 +38,7 @@ trait Prop[+T] {
/** Sets the property.
*
- * @param the new string value
+ * @param newValue the new string value
* @return the old value, or null if it was unset.
*/
def set(newValue: String): String
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index 52e0ac230b..d5777922b4 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -11,6 +11,8 @@ package scala.sys
import scala.collection.{ mutable, Iterator }
import scala.collection.JavaConverters._
import java.security.AccessControlException
+import language.implicitConversions
+
/** A bidirectional map wrapping the java System properties.
* Changes to System properties will be immediately visible in the map,
@@ -76,6 +78,5 @@ object SystemProperties {
lazy val preferIPv4Stack = bool("java.net.preferIPv4Stack", "system should prefer IPv4 sockets")
lazy val preferIPv6Addresses = bool("java.net.preferIPv6Addresses", "system should prefer IPv6 addresses")
lazy val noTraceSupression = bool("scala.control.noTraceSuppression", "scala should not suppress any stack trace creation")
- lazy val traceSourcePath = str("scala.control.sourcepath", "sourcepath for looking up stack trace elements")
}
diff --git a/src/library/scala/sys/package.scala b/src/library/scala/sys/package.scala
index 16faded419..119ab59c22 100644
--- a/src/library/scala/sys/package.scala
+++ b/src/library/scala/sys/package.scala
@@ -50,7 +50,7 @@ package object sys {
/** A bidirectional, mutable Map representing the current system Properties.
*
* @return a SystemProperties.
- * @see `scala.sys.SystemProperties`
+ * @see [[scala.sys.SystemProperties]]
*/
def props: SystemProperties = new SystemProperties
@@ -69,8 +69,8 @@ package object sys {
*
* Note that shutdown hooks are NOT guaranteed to be run.
*
- * @param the body of code to run at shutdown
- * @return the Thread which will run the shutdown hook.
+ * @param body the body of code to run at shutdown
+ * @return the Thread which will run the shutdown hook.
*/
def addShutdownHook(body: => Unit): ShutdownHookThread = ShutdownHookThread(body)
diff --git a/src/library/scala/sys/process/Process.scala b/src/library/scala/sys/process/Process.scala
index c2a61af936..d56c6f2c9d 100644
--- a/src/library/scala/sys/process/Process.scala
+++ b/src/library/scala/sys/process/Process.scala
@@ -11,6 +11,7 @@ package process
import processInternal._
import ProcessBuilder._
+import language.implicitConversions
/** Represents a process that is running or has finished running.
* It may be a compound process with several underlying native processes (such as `a #&& b`).
diff --git a/src/library/scala/testing/Benchmark.scala b/src/library/scala/testing/Benchmark.scala
index c8e31766ea..9acae34d4e 100644
--- a/src/library/scala/testing/Benchmark.scala
+++ b/src/library/scala/testing/Benchmark.scala
@@ -49,9 +49,6 @@ trait Benchmark {
/** Run the benchmark the specified number of times and return a list with
* the execution times in milliseconds in reverse order of the execution.
- *
- * @param noTimes ...
- * @return ...
*/
def runBenchmark(noTimes: Int): List[Long] =
for (i <- List.range(1, noTimes + 1)) yield {
diff --git a/src/library/scala/testing/Show.scala b/src/library/scala/testing/Show.scala
index 7570bf705c..5ab46b8985 100644
--- a/src/library/scala/testing/Show.scala
+++ b/src/library/scala/testing/Show.scala
@@ -27,17 +27,17 @@ package scala.testing
*/
trait Show {
- /** The result class of wrapper `symApply`.
+ /** An implicit definition that adds an apply method to Symbol which forwards to `test`.
* Prints out diagnostics of method applications.
*/
- class SymApply(f: Symbol) {
+ implicit class SymApply(f: Symbol) {
def apply[A](args: A*) {
println(test(f, args: _*))
}
}
- /** An implicit definition that adds an apply method to Symbol which forwards to `test`. */
- implicit def symApply(sym: Symbol) = new SymApply(sym)
+ @deprecated("use SymApply instead", "2.10")
+ def symApply(sym: Symbol): SymApply = new SymApply(sym)
/** Apply method with name of given symbol `f` to given arguments and return
* a result diagnostics.
diff --git a/src/library/scala/text/Document.scala b/src/library/scala/text/Document.scala
index c0a54799a3..86508634e3 100644
--- a/src/library/scala/text/Document.scala
+++ b/src/library/scala/text/Document.scala
@@ -33,9 +33,6 @@ abstract class Document {
/**
* Format this document on `writer` and try to set line
* breaks so that the result fits in `width` columns.
- *
- * @param width ...
- * @param writer ...
*/
def format(width: Int, writer: Writer) {
type FmtState = (Int, Boolean, Document)
@@ -91,6 +88,8 @@ abstract class Document {
case (i, b, DocGroup(d)) :: z =>
val fitsFlat = fits(width - k, (i, false, d) :: z)
fmt(k, (i, !fitsFlat, d) :: z)
+ case _ =>
+ ()
}
fmt(0, (0, false, DocGroup(this)) :: Nil)
diff --git a/src/library/scala/util/Marshal.scala b/src/library/scala/util/Marshal.scala
index c2269cde45..209cd6c062 100644
--- a/src/library/scala/util/Marshal.scala
+++ b/src/library/scala/util/Marshal.scala
@@ -11,19 +11,20 @@
package scala.util
/**
- * Marshalling of Scala objects using Scala manifests.
+ * Marshalling of Scala objects using Scala tags.
*
* @author Stephane Micheloud
* @version 1.0
*/
+@deprecated("This class will be removed", "2.10.0")
object Marshal {
import java.io._
- import scala.reflect.ClassManifest
+ import scala.reflect.ClassTag
- def dump[A](o: A)(implicit m: ClassManifest[A]): Array[Byte] = {
+ def dump[A](o: A)(implicit t: ClassTag[A]): Array[Byte] = {
val ba = new ByteArrayOutputStream(512)
val out = new ObjectOutputStream(ba)
- out.writeObject(m)
+ out.writeObject(t)
out.writeObject(o)
out.close()
ba.toByteArray()
@@ -32,20 +33,20 @@ object Marshal {
@throws(classOf[IOException])
@throws(classOf[ClassCastException])
@throws(classOf[ClassNotFoundException])
- def load[A](buffer: Array[Byte])(implicit expected: ClassManifest[A]): A = {
+ def load[A](buffer: Array[Byte])(implicit expected: ClassTag[A]): A = {
val in = new ObjectInputStream(new ByteArrayInputStream(buffer))
- val found = in.readObject.asInstanceOf[ClassManifest[_]]
- // todo. [Eugene] needs review, since ClassManifests no longer capture typeArguments
- if (found.tpe <:< expected.tpe) {
- val o = in.readObject.asInstanceOf[A]
- in.close()
- o
- } else {
- in.close()
- throw new ClassCastException("type mismatch;"+
- "\n found : "+found+
- "\n required: "+expected)
+ val found = in.readObject.asInstanceOf[ClassTag[_]]
+ try {
+ // [Eugene] needs review
+ // previously was: found <:< expected
+ found.erasure.asSubclass(expected.erasure)
+ in.readObject.asInstanceOf[A]
+ } catch {
+ case _: ClassCastException =>
+ in.close()
+ throw new ClassCastException("type mismatch;"+
+ "\n found : "+found+
+ "\n required: "+expected)
}
}
-
}
diff --git a/src/library/scala/util/MurmurHash3.scala b/src/library/scala/util/MurmurHash3.scala
index 33d9d2f0e5..9a7f64b4ee 100644
--- a/src/library/scala/util/MurmurHash3.scala
+++ b/src/library/scala/util/MurmurHash3.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.util
import java.lang.Integer.{ rotateLeft => rotl }
@@ -19,7 +27,7 @@ import java.lang.Integer.{ rotateLeft => rotl }
* to remedy some weaknesses and improve performance. This represents the
* latest and supposedly final version of the algortihm (revision 136).
*
- * @see http://code.google.com/p/smhasher
+ * @see [[http://code.google.com/p/smhasher]]
*/
class MurmurHash3 {
/** Mix in a block of data into an intermediate hash value. */
diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala
index 62cba1fc5b..65a1b8c685 100644
--- a/src/library/scala/util/Random.scala
+++ b/src/library/scala/util/Random.scala
@@ -11,6 +11,7 @@ package scala.util
import collection.mutable.ArrayBuffer
import collection.generic.CanBuildFrom
import scala.collection.immutable.{ List, Stream }
+import language.{implicitConversions, higherKinds}
/**
* @author Stephane Micheloud
@@ -100,8 +101,7 @@ class Random(val self: java.util.Random) {
/** Returns a new collection of the same type in a randomly chosen order.
*
- * @param coll the [[scala.collection.TraversableOnce]] to shuffle
- * @return the shuffled [[scala.collection.TraversableOnce]]
+ * @return the shuffled collection
*/
def shuffle[T, CC[X] <: TraversableOnce[X]](xs: CC[T])(implicit bf: CanBuildFrom[CC[T], T, CC[T]]): CC[T] = {
val buf = new ArrayBuffer[T] ++= xs
@@ -117,7 +117,7 @@ class Random(val self: java.util.Random) {
swap(n - 1, k)
}
- bf(xs) ++= buf result
+ (bf(xs) ++= buf).result
}
/** Returns a Stream of pseudorandomly chosen alphanumeric characters,
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index bf460a118f..7d98e57741 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -8,7 +8,7 @@
package scala.util
-import scala.reflect.ClassManifest
+import scala.reflect.ClassTag
import scala.math.Ordering
/** The Sorting object provides functions that can sort various kinds of
@@ -39,14 +39,14 @@ object Sorting {
/** Sort an array of K where K is Ordered, preserving the existing order
* where the values are equal. */
- def stableSort[K: ClassManifest: Ordering](a: Array[K]) {
+ def stableSort[K: ArrayTag: Ordering](a: Array[K]) {
stableSort(a, 0, a.length-1, new Array[K](a.length), Ordering[K].lt _)
}
/** Sorts an array of `K` given an ordering function `f`.
* `f` should return `true` iff its first parameter is strictly less than its second parameter.
*/
- def stableSort[K: ClassManifest](a: Array[K], f: (K, K) => Boolean) {
+ def stableSort[K: ArrayTag](a: Array[K], f: (K, K) => Boolean) {
stableSort(a, 0, a.length-1, new Array[K](a.length), f)
}
@@ -57,14 +57,14 @@ object Sorting {
* @param f the comparison function.
* @return the sorted sequence of items.
*/
- def stableSort[K: ClassManifest](a: Seq[K], f: (K, K) => Boolean): Array[K] = {
+ def stableSort[K: ArrayTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = {
val ret = a.toArray
stableSort(ret, f)
ret
}
/** Sorts an arbitrary sequence of items that are viewable as ordered. */
- def stableSort[K: ClassManifest: Ordering](a: Seq[K]): Array[K] =
+ def stableSort[K: ArrayTag: Ordering](a: Seq[K]): Array[K] =
stableSort(a, Ordering[K].lt _)
/** Stably sorts a sequence of items given an extraction function that will
@@ -74,8 +74,8 @@ object Sorting {
* @param f the comparison function.
* @return the sorted sequence of items.
*/
- def stableSort[K: ClassManifest, M: Ordering](a: Seq[K], f: K => M): Array[K] =
- stableSort(a)(implicitly[ClassManifest[K]], Ordering[M] on f)
+ def stableSort[K: ArrayTag, M: Ordering](a: Seq[K], f: K => M): Array[K] =
+ stableSort(a)(implicitly[ArrayTag[K]], Ordering[M] on f)
private def sort1[K: Ordering](x: Array[K], off: Int, len: Int) {
val ord = Ordering[K]
@@ -498,7 +498,7 @@ object Sorting {
sort2(off, len)
}
- private def stableSort[K : ClassManifest](a: Array[K], lo: Int, hi: Int, scratch: Array[K], f: (K,K) => Boolean) {
+ private def stableSort[K : ArrayTag](a: Array[K], lo: Int, hi: Int, scratch: Array[K], f: (K,K) => Boolean) {
if (lo < hi) {
val mid = (lo+hi) / 2
stableSort(a, lo, mid, scratch, f)
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index efa2fcabb8..8faba236f0 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -92,8 +92,8 @@ sealed abstract class Try[+T] {
def andThen[U](f: T => Try[U]): Try[U] = flatMap(f)
/**
- * Transforms a nested `Try`, i.e., a `Try` of type `Try[Try[T]]`,
- * into an un-nested `Try`, i.e., a `Try` of type `Try[T]`.
+ * Transforms a nested `Try`, ie, a `Try` of type `Try[Try[T]]`,
+ * into an un-nested `Try`, ie, a `Try` of type `Try[T]`.
*/
def flatten[U](implicit ev: T <:< Try[U]): Try[U]
diff --git a/src/library/scala/util/automata/BaseBerrySethi.scala b/src/library/scala/util/automata/BaseBerrySethi.scala
index 18f36f9496..d8d260c478 100644
--- a/src/library/scala/util/automata/BaseBerrySethi.scala
+++ b/src/library/scala/util/automata/BaseBerrySethi.scala
@@ -17,6 +17,7 @@ import scala.collection.{ mutable, immutable }
* [[scala.util.automata.NondetWordAutom]] over `A` using the celebrated
* position automata construction (also called ''Berry-Sethi'' or ''Glushkov'').
*/
+@deprecated("This class will be removed", "2.10.0")
abstract class BaseBerrySethi {
val lang: Base
import lang.{ Alt, Eps, Meta, RegExp, Sequ, Star }
@@ -54,9 +55,6 @@ abstract class BaseBerrySethi {
/** Starts from the right-to-left
* precondition: pos is final
* pats are successor patterns of a Sequence node
- *
- * @param r ...
- * @return ...
*/
protected def compFollow(rs: Seq[RegExp]): Set[Int] = {
follow(0) =
@@ -72,13 +70,9 @@ abstract class BaseBerrySethi {
}
/** Returns the first set of an expression, setting the follow set along the way.
- *
- * @param fol1 ...
- * @param r ...
- * @return ...
*/
protected def compFollow1(fol1: Set[Int], r: RegExp): Set[Int] = r match {
- case x: Alt => Set(x.rs reverseMap (compFollow1(fol1, _)) flatten: _*)
+ case x: Alt => Set((x.rs reverseMap (compFollow1(fol1, _))).flatten: _*)
case x: Meta => compFollow1(fol1, x.r)
case x: Star => compFollow1(fol1 ++ compFirst(x.r), x.r)
case x: Sequ =>
@@ -92,8 +86,6 @@ abstract class BaseBerrySethi {
}
/** Returns the "Sethi-length" of a pattern, creating the set of position along the way.
- *
- * @param r ...
*/
protected def traverse(r: RegExp): Unit = r match {
// (is tree automaton stuff, more than Berry-Sethi)
diff --git a/src/library/scala/util/automata/DetWordAutom.scala b/src/library/scala/util/automata/DetWordAutom.scala
index 16c5d2944b..c6d72f1a06 100644
--- a/src/library/scala/util/automata/DetWordAutom.scala
+++ b/src/library/scala/util/automata/DetWordAutom.scala
@@ -20,6 +20,7 @@ import scala.collection.{ mutable, immutable }
* @author Burak Emir
* @version 1.0
*/
+@deprecated("This class will be removed", "2.10.0")
abstract class DetWordAutom[T <: AnyRef] {
val nstates: Int
val finals: Array[Int]
diff --git a/src/library/scala/util/automata/Inclusion.scala b/src/library/scala/util/automata/Inclusion.scala
index 63133998b4..4eaf1dfc02 100644
--- a/src/library/scala/util/automata/Inclusion.scala
+++ b/src/library/scala/util/automata/Inclusion.scala
@@ -17,14 +17,12 @@ package scala.util.automata
* @author Burak Emir
* @version 1.0
*/
+@deprecated("This class will be removed", "2.10.0")
trait Inclusion[A <: AnyRef] {
val labels: Seq[A]
/** Returns true if `dfa1` is included in `dfa2`.
- *
- * @param dfa1 ...
- * @param dfa2 ...
*/
def inclusion(dfa1: DetWordAutom[A], dfa2: DetWordAutom[A]) = {
diff --git a/src/library/scala/util/automata/NondetWordAutom.scala b/src/library/scala/util/automata/NondetWordAutom.scala
index fbc05de7fd..3b6f0b251a 100644
--- a/src/library/scala/util/automata/NondetWordAutom.scala
+++ b/src/library/scala/util/automata/NondetWordAutom.scala
@@ -17,6 +17,7 @@ import scala.collection.{ immutable, mutable }
* All states are reachable. Accepting states are those for which
* the partial function `finals` is defined.
*/
+@deprecated("This class will be removed", "2.10.0")
abstract class NondetWordAutom[T <: AnyRef] {
val nstates: Int
val labels: Seq[T]
@@ -50,8 +51,8 @@ abstract class NondetWordAutom[T <: AnyRef] {
override def toString = {
val finalString = Map(finalStates map (j => j -> finals(j)) : _*).toString
- val deltaString = (0 until nstates) .
- map (i => " %d->%s\n _>%s\n".format(i, delta(i), default(i))) mkString
+ val deltaString = (0 until nstates)
+ .map(i => " %d->%s\n _>%s\n".format(i, delta(i), default(i))).mkString
"[NondetWordAutom nstates=%d finals=%s delta=\n%s".format(nstates, finalString, deltaString)
}
diff --git a/src/library/scala/util/automata/SubsetConstruction.scala b/src/library/scala/util/automata/SubsetConstruction.scala
index 8049d10d88..1cdcd734cd 100644
--- a/src/library/scala/util/automata/SubsetConstruction.scala
+++ b/src/library/scala/util/automata/SubsetConstruction.scala
@@ -10,11 +10,12 @@ package scala.util.automata
import scala.collection.{ mutable, immutable }
+@deprecated("This class will be removed", "2.10.0")
class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
import nfa.labels
def selectTag(Q: immutable.BitSet, finals: Array[Int]) =
- Q map finals filter (_ > 0) min
+ (Q map finals filter (_ > 0)).min
def determinize: DetWordAutom[T] = {
// for assigning numbers to bitsets
diff --git a/src/library/scala/util/automata/WordBerrySethi.scala b/src/library/scala/util/automata/WordBerrySethi.scala
index 84b78d8dd8..1d4d1f971f 100644
--- a/src/library/scala/util/automata/WordBerrySethi.scala
+++ b/src/library/scala/util/automata/WordBerrySethi.scala
@@ -17,6 +17,7 @@ import scala.util.regexp.WordExp
* @author Burak Emir
* @version 1.0
*/
+@deprecated("This class will be removed", "2.10.0")
abstract class WordBerrySethi extends BaseBerrySethi {
override val lang: WordExp
@@ -52,7 +53,6 @@ abstract class WordBerrySethi extends BaseBerrySethi {
/** Returns the first set of an expression, setting the follow set along
* the way.
*
- * @param fol1 ...
* @param r the regular expression
* @return the computed set
*/
@@ -139,7 +139,7 @@ abstract class WordBerrySethi extends BaseBerrySethi {
finals = finals.updated(0, finalTag)
val delta1 = immutable.Map(deltaq.zipWithIndex map (_.swap): _*)
- val finalsArr = 0 until pos map (k => finals.getOrElse(k, 0)) toArray // 0 == not final
+ val finalsArr = (0 until pos map (k => finals.getOrElse(k, 0))).toArray // 0 == not final
val initialsArr = initials.toArray
val deltaArr: Array[mutable.Map[_labelT, immutable.BitSet]] =
@@ -147,7 +147,7 @@ abstract class WordBerrySethi extends BaseBerrySethi {
mutable.HashMap(delta1(x).toSeq map { case (k, v) => k -> immutable.BitSet(v: _*) } : _*)
}).toArray
- val defaultArr = 0 until pos map (k => immutable.BitSet(defaultq(k): _*)) toArray
+ val defaultArr = (0 until pos map (k => immutable.BitSet(defaultq(k): _*))).toArray
new NondetWordAutom[_labelT] {
val nstates = pos
@@ -161,4 +161,4 @@ abstract class WordBerrySethi extends BaseBerrySethi {
automatonFrom(Sequ(z.asInstanceOf[this.lang._regexpT]), finalTag)
}
}
-} \ No newline at end of file
+}
diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala
index 20a179a884..38f4abb20a 100644
--- a/src/library/scala/util/control/Exception.scala
+++ b/src/library/scala/util/control/Exception.scala
@@ -10,6 +10,8 @@ package scala.util.control
import collection.immutable.List
import java.lang.reflect.InvocationTargetException
+import language.implicitConversions
+
/** Classes representing the components of exception handling.
* Each class is independently composable. Some example usages:
@@ -28,9 +30,9 @@ import java.lang.reflect.InvocationTargetException
object Exception {
type Catcher[+T] = PartialFunction[Throwable, T]
- def mkCatcher[Ex <: Throwable: ClassManifest, T](isDef: Ex => Boolean, f: Ex => T) = new Catcher[T] {
+ def mkCatcher[Ex <: Throwable: ClassTag, T](isDef: Ex => Boolean, f: Ex => T) = new Catcher[T] {
private def downcast(x: Throwable): Option[Ex] =
- if (classManifest[Ex].erasure.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[Ex])
+ if (classTag[Ex].erasure.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[Ex])
else None
def isDefinedAt(x: Throwable) = downcast(x) exists isDef
@@ -39,7 +41,7 @@ object Exception {
def mkThrowableCatcher[T](isDef: Throwable => Boolean, f: Throwable => T) = mkCatcher(isDef, f)
- implicit def throwableSubtypeToCatcher[Ex <: Throwable: ClassManifest, T](pf: PartialFunction[Ex, T]) =
+ implicit def throwableSubtypeToCatcher[Ex <: Throwable: ClassTag, T](pf: PartialFunction[Ex, T]) =
mkCatcher(pf.isDefinedAt _, pf.apply _)
/** !!! Not at all sure of every factor which goes into this,
diff --git a/src/library/scala/util/grammar/HedgeRHS.scala b/src/library/scala/util/grammar/HedgeRHS.scala
index 8fb3d4c5dc..da109f41c5 100644
--- a/src/library/scala/util/grammar/HedgeRHS.scala
+++ b/src/library/scala/util/grammar/HedgeRHS.scala
@@ -10,13 +10,17 @@
package scala.util.grammar
+@deprecated("This class will be removed", "2.10.0")
abstract class HedgeRHS
/** Right hand side of a hedge production, deriving a single tree. */
+@deprecated("This class will be removed", "2.10.0")
case class ConsRHS(tnt: Int, hnt: Int) extends HedgeRHS
/** Right hand side of a hedge production, deriving any hedge. */
+@deprecated("This class will be removed", "2.10.0")
case object AnyHedgeRHS extends HedgeRHS
/** Right hand side of a hedge production, deriving the empty hedge. */
+@deprecated("This class will be removed", "2.10.0")
case object EmptyHedgeRHS extends HedgeRHS
diff --git a/src/library/scala/util/grammar/TreeRHS.scala b/src/library/scala/util/grammar/TreeRHS.scala
index ebe16b25bd..d6e7c01588 100644
--- a/src/library/scala/util/grammar/TreeRHS.scala
+++ b/src/library/scala/util/grammar/TreeRHS.scala
@@ -11,9 +11,12 @@
package scala.util.grammar
/** Right hand side of a tree production. */
+@deprecated("This class will be removed", "2.10.0")
abstract class TreeRHS
/** Right hand side of a tree production, labelled with a letter from an alphabet. */
+@deprecated("This class will be removed", "2.10.0")
case class LabelledRHS[A](label: A, hnt: Int) extends TreeRHS
+@deprecated("This class will be removed", "2.10.0")
case object AnyTreeRHS extends TreeRHS
diff --git a/src/library/scala/util/hashing/Hashing.scala b/src/library/scala/util/hashing/Hashing.scala
new file mode 100644
index 0000000000..f27a825125
--- /dev/null
+++ b/src/library/scala/util/hashing/Hashing.scala
@@ -0,0 +1,42 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.util.hashing
+
+/** `Hashing` is a trait whose instances each represent a strategy for hashing
+ * instances of a type.
+ *
+ * `Hashing`'s companion object defines a default hashing strategy for all
+ * objects - it calls their `##` method.
+ *
+ * Note: when using a custom `Hashing`, make sure to use it with the `Equiv`
+ * such that if any two objects are equal, then their hash codes must be equal.
+ *
+ * @since 2.10
+ */
+@annotation.implicitNotFound(msg = "No implicit Hashing defined for ${T}.")
+trait Hashing[T] extends Serializable {
+
+ def hashCode(x: T): Int
+
+}
+
+
+object Hashing {
+
+ final class Default[T] extends Hashing[T] {
+ def hashCode(x: T) = x.##
+ }
+
+ implicit def default[T] = new Default[T]
+
+ def fromFunction[T](f: T => Int) = new Hashing[T] {
+ def hashCode(x: T) = f(x)
+ }
+
+}
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 3f21cc9724..3655a0a019 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -145,6 +145,7 @@ import java.util.regex.{ Pattern, Matcher }
*/
@SerialVersionUID(-2094783597747625537L)
class Regex(regex: String, groupNames: String*) extends Serializable {
+ outer =>
import Regex._
@@ -179,15 +180,14 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return The matches
*/
def unapplySeq(target: Any): Option[List[String]] = target match {
- case s: java.lang.CharSequence =>
- val m = pattern.matcher(s)
- if (m.matches) Some((1 to m.groupCount).toList map m.group)
+ case s: CharSequence =>
+ val m = pattern matcher s
+ if (runMatcher(m)) Some((1 to m.groupCount).toList map m.group)
else None
- case Match(s) =>
- unapplySeq(s)
- case _ =>
- None
+ case m: Match => unapplySeq(m.matched)
+ case _ => None
}
+ protected def runMatcher(m: Matcher) = m.matches()
/** Return all matches of this regexp in given character sequence as a [[scala.util.matching.Regex.MatchIterator]],
* which is a special [[scala.collection.Iterator]] that returns the
@@ -204,7 +204,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
/** Return all matches of this regexp in given character sequence as a
- * [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match].
+ * [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]].
*
* @param source The text to match against.
* @return A [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]] for all matches.
@@ -373,10 +373,35 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
def split(toSplit: java.lang.CharSequence): Array[String] =
pattern.split(toSplit)
+ /** Create a new Regex with the same pattern, but no requirement that
+ * the entire String matches in extractor patterns. For instance, the strings
+ * shown below lead to successful matches, where they would not otherwise.
+ *
+ * {{{
+ * val dateP1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r.unanchored
+ *
+ * val dateP1(year, month, day) = "Date 2011-07-15"
+ *
+ * val copyright: String = "Date of this document: 2011-07-15" match {
+ * case dateP1(year, month, day) => "Copyright "+year
+ * case _ => "No copyright"
+ * }
+ * }}}
+ *
+ * @return The new unanchored regex
+ */
+ def unanchored: UnanchoredRegex = new Regex(regex, groupNames: _*) with UnanchoredRegex { override def anchored = outer }
+ def anchored: Regex = this
+
/** The string defining the regular expression */
override def toString = regex
}
+trait UnanchoredRegex extends Regex {
+ override protected def runMatcher(m: Matcher) = m.find()
+ override def unanchored = this
+}
+
/** This object defines inner classes that describe
* regex matches and helper objects. The class hierarchy
* is as follows:
diff --git a/src/library/scala/util/parsing/ast/AbstractSyntax.scala b/src/library/scala/util/parsing/ast/AbstractSyntax.scala
index 220643a0d7..67e8a87221 100644
--- a/src/library/scala/util/parsing/ast/AbstractSyntax.scala
+++ b/src/library/scala/util/parsing/ast/AbstractSyntax.scala
@@ -14,6 +14,7 @@ import scala.util.parsing.input.Positional
*
* @author Adriaan Moors
*/
+@deprecated("This class will be removed", "2.10.0")
trait AbstractSyntax {
/** The base class for elements of the abstract syntax tree.
*/
diff --git a/src/library/scala/util/parsing/ast/Binders.scala b/src/library/scala/util/parsing/ast/Binders.scala
index 0646f57064..b93c24fde4 100644
--- a/src/library/scala/util/parsing/ast/Binders.scala
+++ b/src/library/scala/util/parsing/ast/Binders.scala
@@ -10,6 +10,7 @@ package scala.util.parsing.ast
import scala.collection.AbstractIterable
import scala.collection.mutable
+import language.implicitConversions
//DISCLAIMER: this code is highly experimental!
@@ -23,6 +24,7 @@ import scala.collection.mutable
*
* @author Adriaan Moors
*/
+@deprecated("This class will be removed", "2.10.0")
trait Mappable {
trait Mapper { def apply[T <% Mappable[T]](x: T): T } /* TODO: having type `Forall T. T => T` is too strict:
sometimes we want to allow `Forall T >: precision. T => T` for some type `precision`, so that,
@@ -324,11 +326,11 @@ trait Binders extends AbstractSyntax with Mappable {
// TODO: move this to some utility object higher in the scala hierarchy?
/** Returns a given result, but executes the supplied closure before returning.
* (The effect of this closure does not influence the returned value.)
- *
- * @param result the result to be returned
- * @param block code to be executed, purely for its side-effects
*/
trait ReturnAndDo[T]{
+ /**
+ * @param block code to be executed, purely for its side-effects
+ */
def andDo(block: => Unit): T
}
diff --git a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
index e993628e88..270ac680a9 100644
--- a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
+++ b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
@@ -9,6 +9,8 @@
package scala.util.parsing.combinator
+import language.implicitConversions
+
/** This object contains implicit conversions that come in handy when using the `^^` combinator.
*
* Refer to [[scala.util.parsing.combinator.Parsers]] to construct an AST from the concrete syntax.
diff --git a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
index bc71391bdb..06567ea348 100644
--- a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
@@ -9,6 +9,8 @@
package scala.util.parsing.combinator
+import annotation.migration
+
/** `JavaTokenParsers` differs from [[scala.util.parsing.combinator.RegexParsers]]
* by adding the following definitions:
*
@@ -39,12 +41,13 @@ trait JavaTokenParsers extends RegexParsers {
/** Double quotes (`"`) enclosing a sequence of:
*
* - Any character except double quotes, control characters or backslash (`\`)
- * - A backslash followed by a slash, another backslash, or one of the letters
- * `b`, `f`, `n`, `r` or `t`.
+ * - A backslash followed by another backslash, a single or double quote, or one
+ * of the letters `b`, `f`, `n`, `r` or `t`
* - `\` followed by `u` followed by four hexadecimal digits
*/
+ @migration("`stringLiteral` allows escaping single and double quotes, but not forward slashes any longer.", "2.10.0")
def stringLiteral: Parser[String] =
- ("\""+"""([^"\p{Cntrl}\\]|\\[\\/bfnrt]|\\u[a-fA-F0-9]{4})*"""+"\"").r
+ ("\""+"""([^"\p{Cntrl}\\]|\\[\\'"bfnrt]|\\u[a-fA-F0-9]{4})*"""+"\"").r
/** A number following the rules of `decimalNumber`, with the following
* optional additions:
*
diff --git a/src/library/scala/util/parsing/combinator/PackratParsers.scala b/src/library/scala/util/parsing/combinator/PackratParsers.scala
index ea856efc3a..9516df0093 100644
--- a/src/library/scala/util/parsing/combinator/PackratParsers.scala
+++ b/src/library/scala/util/parsing/combinator/PackratParsers.scala
@@ -11,6 +11,7 @@ package scala.util.parsing.combinator
import scala.util.parsing.combinator._
import scala.util.parsing.input.{ Reader, Position }
import scala.collection.mutable
+import language.implicitConversions
/**
* `PackratParsers` is a component that extends the parser combinators
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index 9aaf0aeb54..66e0a496d8 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -12,6 +12,8 @@ import scala.util.parsing.input._
import scala.collection.mutable.ListBuffer
import scala.annotation.tailrec
import annotation.migration
+import language.implicitConversions
+import scala.util.DynamicVariable
// TODO: better error handling (labelling like parsec's <?>)
@@ -153,13 +155,14 @@ trait Parsers {
val successful = true
}
- var lastNoSuccess: NoSuccess = null
+ private lazy val lastNoSuccess = new DynamicVariable[Option[NoSuccess]](None)
/** A common super-class for unsuccessful parse results. */
sealed abstract class NoSuccess(val msg: String, override val next: Input) extends ParseResult[Nothing] { // when we don't care about the difference between Failure and Error
val successful = false
- if (!(lastNoSuccess != null && next.pos < lastNoSuccess.next.pos))
- lastNoSuccess = this
+
+ if (lastNoSuccess.value map { v => !(next.pos < v.next.pos) } getOrElse true)
+ lastNoSuccess.value = Some(this)
def map[U](f: Nothing => U) = this
def mapPartial[U](f: PartialFunction[Nothing, U], error: Nothing => String): ParseResult[U] = this
@@ -595,7 +598,8 @@ trait Parsers {
* @return A parser for elements satisfying p(e).
*/
def acceptIf(p: Elem => Boolean)(err: Elem => String): Parser[Elem] = Parser { in =>
- if (p(in.first)) Success(in.first, in.rest)
+ if (in.atEnd) Failure("end of input", in)
+ else if (p(in.first)) Success(in.first, in.rest)
else Failure(err(in.first), in)
}
@@ -613,7 +617,8 @@ trait Parsers {
* applying `f` to it to produce the result.
*/
def acceptMatch[U](expected: String, f: PartialFunction[Elem, U]): Parser[U] = Parser{ in =>
- if (f.isDefinedAt(in.first)) Success(f(in.first), in.rest)
+ if (in.atEnd) Failure("end of input", in)
+ else if (f.isDefinedAt(in.first)) Success(f(in.first), in.rest)
else Failure(expected+" expected", in)
}
@@ -876,16 +881,15 @@ trait Parsers {
* if `p` consumed all the input.
*/
def phrase[T](p: Parser[T]) = new Parser[T] {
- lastNoSuccess = null
- def apply(in: Input) = p(in) match {
+ def apply(in: Input) = lastNoSuccess.withValue(None) {
+ p(in) match {
case s @ Success(out, in1) =>
if (in1.atEnd)
s
- else if (lastNoSuccess == null || lastNoSuccess.next.pos < in1.pos)
- Failure("end of input expected", in1)
else
- lastNoSuccess
- case _ => lastNoSuccess
+ lastNoSuccess.value filterNot { _.next.pos < in1.pos } getOrElse Failure("end of input expected", in1)
+ case ns => lastNoSuccess.value.getOrElse(ns)
+ }
}
}
diff --git a/src/library/scala/util/parsing/combinator/RegexParsers.scala b/src/library/scala/util/parsing/combinator/RegexParsers.scala
index 86eecd03c4..d685329ef1 100644
--- a/src/library/scala/util/parsing/combinator/RegexParsers.scala
+++ b/src/library/scala/util/parsing/combinator/RegexParsers.scala
@@ -13,6 +13,7 @@ import java.util.regex.Pattern
import scala.util.matching.Regex
import scala.util.parsing.input._
import scala.collection.immutable.PagedSeq
+import language.implicitConversions
/** The ''most important'' differences between `RegexParsers` and
* [[scala.util.parsing.combinator.Parsers]] are:
diff --git a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala b/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
index 9979a420d6..6c3bc52c1a 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
@@ -32,7 +32,7 @@ abstract class Lexical extends Scanners with Tokens {
def digit = elem("digit", _.isDigit)
/** A character-parser that matches any character except the ones given in `cs` (and returns it).*/
- def chrExcept(cs: Char*) = elem("", ch => (cs forall (ch !=)))
+ def chrExcept(cs: Char*) = elem("", ch => (cs forall (ch != _)))
/** A character-parser that matches a white-space character (and returns it).*/
def whitespaceChar = elem("space char", ch => ch <= ' ' && ch != EofCh)
diff --git a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
index 3ac5c07dc4..5d7386b5c1 100644
--- a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
@@ -25,8 +25,8 @@ import scala.collection.mutable
* `delimiters` set.
*
* Usually this component is used to break character-based input into
- * bigger tokens, which are then passed to a token-parser {@see
- * [[scala.util.parsing.combinator.syntactical.TokenParsers]]}.
+ * bigger tokens, which are then passed to a token-parser (see
+ * [[scala.util.parsing.combinator.syntactical.TokenParsers]].)
*
* @author Martin Odersky
* @author Iulian Dragos
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
index e494a69cf0..215b8b792f 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
@@ -13,6 +13,7 @@ package syntactical
import token._
import lexical.StdLexical
+import language.implicitConversions
/** This component provides primitive parsers for the standard tokens defined in `StdTokens`.
*
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
index 0901f9bbd0..7aa6178df9 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
@@ -14,6 +14,7 @@ package syntactical
import token._
import scala.collection.mutable
+import language.implicitConversions
/** This component provides primitive parsers for the standard tokens defined in `StdTokens`.
*
diff --git a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala b/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
index 299736046e..ff3554a6af 100644
--- a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
+++ b/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
@@ -3,11 +3,16 @@ package scala.util.parsing.combinator.testing
import scala.util.parsing.combinator._
import scala.util.parsing.input._
+import language.postfixOps
+@deprecated("This class will be removed", "2.10.0")
case class Ident(s: String)
+@deprecated("This class will be removed", "2.10.0")
case class Number(n: Int)
+@deprecated("This class will be removed", "2.10.0")
case class Str(s: String)
+@deprecated("This class will be removed", "2.10.0")
object RegexTest extends RegexParsers {
val ident: Parser[Any] = """[a-zA-Z_]\w*""".r ^^ (s => Ident(s))
val number: Parser[Any] = """\d\d*""".r ^^ (s => Number(s.toInt))
diff --git a/src/library/scala/util/parsing/combinator/testing/Tester.scala b/src/library/scala/util/parsing/combinator/testing/Tester.scala
index 4607dc8843..1b98d63289 100644
--- a/src/library/scala/util/parsing/combinator/testing/Tester.scala
+++ b/src/library/scala/util/parsing/combinator/testing/Tester.scala
@@ -28,6 +28,7 @@ import scala.util.parsing.combinator.syntactical.TokenParsers
* @author Martin Odersky
* @author Adriaan Moors
*/
+@deprecated("This class will be removed", "2.10.0")
abstract class Tester {
val syntactic: TokenParsers { val lexical: Lexical }
diff --git a/src/library/scala/util/parsing/input/CharArrayReader.scala b/src/library/scala/util/parsing/input/CharArrayReader.scala
index e798c9883a..63d76c9382 100644
--- a/src/library/scala/util/parsing/input/CharArrayReader.scala
+++ b/src/library/scala/util/parsing/input/CharArrayReader.scala
@@ -21,10 +21,8 @@ object CharArrayReader {
/** A character array reader reads a stream of characters (keeping track of their positions)
* from an array.
*
- * @param source an array of characters
+ * @param chars an array of characters
* @param index starting offset into the array; the first element returned will be `source(index)`
- * @param line the line number of the first element (counting from index `0` of `source`)
- * @param column the column number of the first element (counting from index `0` of `source`)
*
* @author Martin Odersky
* @author Adriaan Moors
diff --git a/src/library/scala/util/parsing/input/OffsetPosition.scala b/src/library/scala/util/parsing/input/OffsetPosition.scala
index c2483c44e3..57a2c9c4c2 100644
--- a/src/library/scala/util/parsing/input/OffsetPosition.scala
+++ b/src/library/scala/util/parsing/input/OffsetPosition.scala
@@ -45,10 +45,9 @@ case class OffsetPosition(source: java.lang.CharSequence, offset: Int) extends P
/** The column number referred to by the position; column numbers start at 1. */
def column: Int = offset - index(line - 1) + 1
- /** The contents of the line numbered `lnum` (must not contain a new-line character).
+ /** The contents of the line numbered at the current offset.
*
- * @param lnum a 1-based integer index into the `document`
- * @return the line at `lnum` (not including a newline)
+ * @return the line at `offset` (not including a newline)
*/
def lineContents: String =
source.subSequence(index(line - 1), index(line)).toString
@@ -59,7 +58,7 @@ case class OffsetPosition(source: java.lang.CharSequence, offset: Int) extends P
/** Compare this position to another, by first comparing their line numbers,
* and then -- if necessary -- using the columns to break a tie.
*
- * @param `that` a `Position` to compare to this `Position`
+ * @param that a `Position` to compare to this `Position`
* @return true if this position's line number or (in case of equal line numbers)
* column is smaller than the corresponding components of `that`
*/
diff --git a/src/library/scala/util/parsing/input/PagedSeqReader.scala b/src/library/scala/util/parsing/input/PagedSeqReader.scala
index 134cf0a79a..284afef57b 100644
--- a/src/library/scala/util/parsing/input/PagedSeqReader.scala
+++ b/src/library/scala/util/parsing/input/PagedSeqReader.scala
@@ -23,7 +23,7 @@ object PagedSeqReader {
/** A character array reader reads a stream of characters (keeping track of their positions)
* from an array.
*
- * @param source the source sequence
+ * @param seq the source sequence
* @param offset starting offset.
*
* @author Martin Odersky
diff --git a/src/library/scala/util/parsing/input/Position.scala b/src/library/scala/util/parsing/input/Position.scala
index be817013a0..9cb0031746 100644
--- a/src/library/scala/util/parsing/input/Position.scala
+++ b/src/library/scala/util/parsing/input/Position.scala
@@ -27,10 +27,7 @@ trait Position {
/** The column number referred to by the position; column numbers start at 1. */
def column: Int
- /** The contents of the line numbered `lnum` (must not contain a new-line character).
- *
- * @param lnum a 1-based integer index into the `document`
- * @return the line at `lnum` (not including a newline)
+ /** The contents of the line at this position. (must not contain a new-line character).
*/
protected def lineContents: String
diff --git a/src/library/scala/util/parsing/input/StreamReader.scala b/src/library/scala/util/parsing/input/StreamReader.scala
index 8244177359..3858dc3210 100644
--- a/src/library/scala/util/parsing/input/StreamReader.scala
+++ b/src/library/scala/util/parsing/input/StreamReader.scala
@@ -13,14 +13,16 @@ import scala.collection.immutable.PagedSeq
/** An object to create a `StreamReader` from a `java.io.Reader`.
*
- * @param in the `java.io.Reader` that provides the underlying
- * stream of characters for this Reader.
- *
* @author Miles Sabin
*/
object StreamReader {
final val EofCh = '\032'
+ /** Create a `StreamReader` from a `java.io.Reader`.
+ *
+ * @param in the `java.io.Reader` that provides the underlying
+ * stream of characters for this Reader.
+ */
def apply(in: java.io.Reader): StreamReader = {
new StreamReader(PagedSeq.fromReader(in), 0, 1)
}
diff --git a/src/library/scala/util/regexp/Base.scala b/src/library/scala/util/regexp/Base.scala
index 8e23d46eb9..81962ea8bd 100644
--- a/src/library/scala/util/regexp/Base.scala
+++ b/src/library/scala/util/regexp/Base.scala
@@ -15,8 +15,9 @@ package scala.util.regexp
* @author Burak Emir
* @version 1.0
*/
-abstract class Base
-{
+
+@deprecated("This class will be removed", "2.10.0")
+abstract class Base {
type _regexpT <: RegExp
abstract class RegExp {
diff --git a/src/library/scala/util/regexp/PointedHedgeExp.scala b/src/library/scala/util/regexp/PointedHedgeExp.scala
index 23aa46448c..056031a339 100644
--- a/src/library/scala/util/regexp/PointedHedgeExp.scala
+++ b/src/library/scala/util/regexp/PointedHedgeExp.scala
@@ -15,6 +15,7 @@ package scala.util.regexp
* @author Burak Emir
* @version 1.0
*/
+@deprecated("This class will be removed", "2.10.0")
abstract class PointedHedgeExp extends Base {
type _regexpT <: RegExp
diff --git a/src/library/scala/util/regexp/SyntaxError.scala b/src/library/scala/util/regexp/SyntaxError.scala
index 0f5c2af187..c19dfe126e 100644
--- a/src/library/scala/util/regexp/SyntaxError.scala
+++ b/src/library/scala/util/regexp/SyntaxError.scala
@@ -16,4 +16,5 @@ package scala.util.regexp
* @author Burak Emir
* @version 1.0
*/
+@deprecated("This class will be removed", "2.10.0")
class SyntaxError(e: String) extends RuntimeException(e)
diff --git a/src/library/scala/util/regexp/WordExp.scala b/src/library/scala/util/regexp/WordExp.scala
index cf146934d1..05674f118c 100644
--- a/src/library/scala/util/regexp/WordExp.scala
+++ b/src/library/scala/util/regexp/WordExp.scala
@@ -38,6 +38,7 @@ package scala.util.regexp
* @author Burak Emir
* @version 1.0
*/
+@deprecated("This class will be removed", "2.10.0")
abstract class WordExp extends Base {
abstract class Label
diff --git a/src/library/scala/xml/Atom.scala b/src/library/scala/xml/Atom.scala
index 72572fb5e4..7bed714f68 100644
--- a/src/library/scala/xml/Atom.scala
+++ b/src/library/scala/xml/Atom.scala
@@ -37,9 +37,6 @@ class Atom[+A](val data: A) extends SpecialNode with Serializable {
/** Returns text, with some characters escaped according to the XML
* specification.
- *
- * @param sb ...
- * @return ...
*/
def buildString(sb: StringBuilder): StringBuilder =
Utility.escape(data.toString, sb)
diff --git a/src/library/scala/xml/Attribute.scala b/src/library/scala/xml/Attribute.scala
index 6b68e97412..4c50b15e53 100644
--- a/src/library/scala/xml/Attribute.scala
+++ b/src/library/scala/xml/Attribute.scala
@@ -61,8 +61,8 @@ abstract trait Attribute extends MetaData {
else copy(next remove key)
def remove(namespace: String, scope: NamespaceBinding, key: String) =
- if (isPrefixed && this.key == key && (scope getURI pre) == namespace) next
- else next.remove(namespace, scope, key)
+ if (this.key == key && (scope getURI pre) == namespace) next
+ else copy(next.remove(namespace, scope, key))
def isPrefixed: Boolean = pre != null
diff --git a/src/library/scala/xml/Comment.scala b/src/library/scala/xml/Comment.scala
index 014cead47c..9ce053190a 100644
--- a/src/library/scala/xml/Comment.scala
+++ b/src/library/scala/xml/Comment.scala
@@ -11,7 +11,7 @@ package scala.xml
/** The class `Comment` implements an XML node for comments.
*
* @author Burak Emir
- * @param text the text contained in this node, may not contain "--"
+ * @param commentText the text contained in this node, may not contain "--"
*/
case class Comment(commentText: String) extends SpecialNode {
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
index 5b6b9f2bb9..f140fd1e07 100755
--- a/src/library/scala/xml/Elem.scala
+++ b/src/library/scala/xml/Elem.scala
@@ -107,5 +107,5 @@ extends Node with Serializable
/** Returns concatenation of `text(n)` for each child `n`.
*/
- override def text = child map (_.text) mkString
+ override def text = (child map (_.text)).mkString
}
diff --git a/src/library/scala/xml/EntityRef.scala b/src/library/scala/xml/EntityRef.scala
index 60feeb845d..66438135c8 100644
--- a/src/library/scala/xml/EntityRef.scala
+++ b/src/library/scala/xml/EntityRef.scala
@@ -12,7 +12,7 @@ package scala.xml
*
* @author Burak Emir
* @version 1.0
- * @param text the text contained in this node.
+ * @param entityName the name of the entity reference, for example `amp`.
*/
case class EntityRef(entityName: String) extends SpecialNode {
final override def doCollectNamespaces = false
diff --git a/src/library/scala/xml/MetaData.scala b/src/library/scala/xml/MetaData.scala
index c516747bae..e98ec90aca 100644
--- a/src/library/scala/xml/MetaData.scala
+++ b/src/library/scala/xml/MetaData.scala
@@ -102,7 +102,6 @@ extends AbstractIterable[MetaData]
* @param namespace_uri namespace uri of key
* @param owner the element owning this attribute list
* @param key the attribute key
- * @return ...
*/
final def apply(namespace_uri: String, owner: Node, key: String): Seq[Node] =
apply(namespace_uri, owner.scope, key)
@@ -112,15 +111,12 @@ extends AbstractIterable[MetaData]
*
* @param namespace_uri namespace uri of key
* @param scp a namespace scp (usually of the element owning this attribute list)
- * @param key to be looked fore
+ * @param k to be looked for
* @return value as Seq[Node] if key is found, null otherwise
*/
- def apply(namespace_uri:String, scp:NamespaceBinding, k:String): Seq[Node]
+ def apply(namespace_uri: String, scp: NamespaceBinding, k: String): Seq[Node]
/** returns a copy of this MetaData item with next field set to argument.
- *
- * @param next ...
- * @return ...
*/
def copy(next: MetaData): MetaData
@@ -167,7 +163,7 @@ extends AbstractIterable[MetaData]
/** Returns a Map containing the attributes stored as key/value pairs.
*/
def asAttrMap: Map[String, String] =
- iterator map (x => (x.prefixedKey, x.value.text)) toMap
+ (iterator map (x => (x.prefixedKey, x.value.text))).toMap
/** returns Null or the next MetaData item */
def next: MetaData
@@ -208,31 +204,13 @@ extends AbstractIterable[MetaData]
}
/**
- * @param scope ...
- * @return `'''true'''` iff ...
*/
def wellformed(scope: NamespaceBinding): Boolean
- /**
- * @param key ...
- * @return ...
- */
def remove(key: String): MetaData
- /**
- * @param namespace ...
- * @param scope ...
- * @param key ...
- * @return ...
- */
def remove(namespace: String, scope: NamespaceBinding, key: String): MetaData
- /**
- * @param namespace ...
- * @param owner ...
- * @param key ...
- * @return ...
- */
final def remove(namespace: String, owner: Node, key: String): MetaData =
remove(namespace, owner.scope, key)
}
diff --git a/src/library/scala/xml/Node.scala b/src/library/scala/xml/Node.scala
index 02e34e1bdc..9cf1869efc 100755
--- a/src/library/scala/xml/Node.scala
+++ b/src/library/scala/xml/Node.scala
@@ -155,24 +155,18 @@ abstract class Node extends NodeSeq {
/**
* String representation of this node
*
- * @param stripComment if true, strips comment nodes from result
- * @return ...
+ * @param stripComments if true, strips comment nodes from result
*/
def buildString(stripComments: Boolean): String =
Utility.serialize(this, stripComments = stripComments).toString
/**
* Same as `toString('''false''')`.
- *
- * @see <code><a href="#toString">toString(Boolean)</a></code>
*/
override def toString(): String = buildString(false)
/**
* Appends qualified name of this node to `StringBuilder`.
- *
- * @param sb ...
- * @return ...
*/
def nameToString(sb: StringBuilder): StringBuilder = {
if (null != prefix) {
diff --git a/src/library/scala/xml/NodeSeq.scala b/src/library/scala/xml/NodeSeq.scala
index ff5618645f..40ddc7d85c 100644
--- a/src/library/scala/xml/NodeSeq.scala
+++ b/src/library/scala/xml/NodeSeq.scala
@@ -11,6 +11,7 @@ package scala.xml
import collection.{ mutable, immutable, generic, SeqLike, AbstractSeq }
import mutable.{ Builder, ListBuffer }
import generic.{ CanBuildFrom }
+import language.implicitConversions
/** This object ...
*
@@ -87,9 +88,6 @@ abstract class NodeSeq extends AbstractSeq[Node] with immutable.Seq[Node] with S
* There is no support for searching a prefixed attribute by its literal prefix.
*
* The document order is preserved.
- *
- * @param that ...
- * @return ...
*/
def \(that: String): NodeSeq = {
def fail = throw new IllegalArgumentException(that)
@@ -137,9 +135,6 @@ abstract class NodeSeq extends AbstractSeq[Node] with immutable.Seq[Node] with S
* There is no support for searching a prefixed attribute by its literal prefix.
*
* The document order is preserved.
- *
- * @param that ...
- * @return ...
*/
def \\ (that: String): NodeSeq = {
def filt(cond: (Node) => Boolean) = this flatMap (_.descendant_or_self) filter cond
@@ -152,5 +147,5 @@ abstract class NodeSeq extends AbstractSeq[Node] with immutable.Seq[Node] with S
override def toString(): String = theSeq.mkString
- def text: String = this map (_.text) mkString
+ def text: String = (this map (_.text)).mkString
}
diff --git a/src/library/scala/xml/PrefixedAttribute.scala b/src/library/scala/xml/PrefixedAttribute.scala
index b80d6a1c73..5cab113d85 100644
--- a/src/library/scala/xml/PrefixedAttribute.scala
+++ b/src/library/scala/xml/PrefixedAttribute.scala
@@ -11,10 +11,10 @@ package scala.xml
/** prefixed attributes always have a non-null namespace.
*
- * @param pre ...
- * @param key ...
+ * @param pre
+ * @param key
* @param value the attribute value
- * @param next ...
+ * @param next1
*/
class PrefixedAttribute(
val pre: String,
diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala
index 64dbd00f2f..da82aca33a 100755
--- a/src/library/scala/xml/PrettyPrinter.scala
+++ b/src/library/scala/xml/PrettyPrinter.scala
@@ -42,10 +42,6 @@ class PrettyPrinter(width: Int, step: Int) {
}
/** Try to cut at whitespace.
- *
- * @param s ...
- * @param ind ...
- * @return ...
*/
protected def cut(s: String, ind: Int): List[Item] = {
val tmp = width - cur
@@ -74,10 +70,6 @@ class PrettyPrinter(width: Int, step: Int) {
}
/** Try to make indented box, if possible, else para.
- *
- * @param ind ...
- * @param s ...
- * @return ...
*/
protected def makeBox(ind: Int, s: String) =
if (cur + s.length > width) { // fits in this line
@@ -99,10 +91,6 @@ class PrettyPrinter(width: Int, step: Int) {
cur = 0
}
- /**
- * @param n ...
- * @return ...
- */
protected def leafTag(n: Node) = {
def mkLeaf(sb: StringBuilder) {
sb append '<'
@@ -149,7 +137,6 @@ class PrettyPrinter(width: Int, step: Int) {
private def doPreserve(node: Node) =
node.attribute(XML.namespace, XML.space).map(_.toString == XML.preserve) getOrElse false
- /** @param tail: what we'd like to sqeeze in */
protected def traverse(node: Node, pscope: NamespaceBinding, ind: Int): Unit = node match {
case Text(s) if s.trim() == "" =>
@@ -210,7 +197,6 @@ class PrettyPrinter(width: Int, step: Int) {
* given namespace to prefix mapping to the given string buffer.
*
* @param n the node to be serialized
- * @param pmap the namespace to prefix mapping
* @param sb the stringbuffer to append to
*/
def format(n: Node, sb: StringBuilder) { // entry point
@@ -250,9 +236,9 @@ class PrettyPrinter(width: Int, step: Int) {
/** Returns a formatted string containing well-formed XML with
* given namespace to prefix mapping.
*
- * @param n the node to be serialized
- * @param pmap the namespace to prefix mapping
- * @return ...
+ * @param n the node to be serialized
+ * @param pscope the namespace to prefix mapping
+ * @return the formatted string
*/
def format(n: Node, pscope: NamespaceBinding = null): String =
sbToString(format(n, pscope, _))
diff --git a/src/library/scala/xml/ProcInstr.scala b/src/library/scala/xml/ProcInstr.scala
index b3f4ba9186..152bcf989f 100644
--- a/src/library/scala/xml/ProcInstr.scala
+++ b/src/library/scala/xml/ProcInstr.scala
@@ -12,8 +12,8 @@ package scala.xml
/** an XML node for processing instructions (PI)
*
* @author Burak Emir
- * @param target target name of this PI
- * @param text text contained in this node, may not contain "?>"
+ * @param target target name of this PI
+ * @param proctext text contained in this node, may not contain "?>"
*/
case class ProcInstr(target: String, proctext: String) extends SpecialNode
{
diff --git a/src/library/scala/xml/TextBuffer.scala b/src/library/scala/xml/TextBuffer.scala
index bcd5fc731f..3d62afb2ef 100644
--- a/src/library/scala/xml/TextBuffer.scala
+++ b/src/library/scala/xml/TextBuffer.scala
@@ -25,9 +25,6 @@ class TextBuffer
val sb = new StringBuilder()
/** Appends this string to the text buffer, trimming whitespaces as needed.
- *
- * @param cs ...
- * @return ...
*/
def append(cs: Seq[Char]): this.type = {
cs foreach { c =>
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
index 9f944c0e92..17c91fa52c 100755
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -10,6 +10,7 @@ package scala.xml
import scala.collection.mutable
import parsing.XhtmlEntities
+import language.implicitConversions
/**
* The `Utility` object provides utility functions for processing instances
@@ -20,6 +21,8 @@ import parsing.XhtmlEntities
object Utility extends AnyRef with parsing.TokenTests {
final val SU = '\u001A'
+ // [Martin] This looks dubious. We don't convert StringBuilders to
+ // Strings anywhere else, why do it here?
implicit def implicitSbToString(sb: StringBuilder) = sb.toString()
// helper for the extremely oft-repeated sequence of creating a
@@ -74,9 +77,6 @@ object Utility extends AnyRef with parsing.TokenTests {
/**
* Escapes the characters &lt; &gt; &amp; and &quot; from string.
- *
- * @param text ...
- * @return ...
*/
final def escape(text: String): String = sbToString(escape(text, _))
@@ -99,10 +99,6 @@ object Utility extends AnyRef with parsing.TokenTests {
/**
* Appends escaped string to `s`.
- *
- * @param text ...
- * @param s ...
- * @return ...
*/
final def escape(text: String, s: StringBuilder): StringBuilder = {
// Implemented per XML spec:
@@ -132,28 +128,20 @@ object Utility extends AnyRef with parsing.TokenTests {
* Appends unescaped string to `s`, `amp` becomes `&amp;`,
* `lt` becomes `&lt;` etc..
*
- * @param ref ...
- * @param s ...
* @return `'''null'''` if `ref` was not a predefined entity.
*/
final def unescape(ref: String, s: StringBuilder): StringBuilder =
- (unescMap get ref) map (s append _) orNull
+ ((unescMap get ref) map (s append _)).orNull
/**
* Returns a set of all namespaces used in a sequence of nodes
* and all their descendants, including the empty namespaces.
- *
- * @param nodes ...
- * @return ...
*/
def collectNamespaces(nodes: Seq[Node]): mutable.Set[String] =
nodes.foldLeft(new mutable.HashSet[String]) { (set, x) => collectNamespaces(x, set) ; set }
/**
* Adds all namespaces in node to set.
- *
- * @param n ...
- * @param set ...
*/
def collectNamespaces(n: Node, set: mutable.Set[String]) {
if (n.doCollectNamespaces) {
@@ -270,9 +258,6 @@ object Utility extends AnyRef with parsing.TokenTests {
/**
* Returns prefix of qualified name if any.
- *
- * @param name ...
- * @return ...
*/
final def prefix(name: String): Option[String] = (name indexOf ':') match {
case -1 => None
@@ -281,11 +266,6 @@ object Utility extends AnyRef with parsing.TokenTests {
/**
* Returns a hashcode for the given constituents of a node
- *
- * @param uri
- * @param label
- * @param attribHashCode
- * @param children
*/
def hashCode(pre: String, label: String, attribHashCode: Int, scpeHash: Int, children: Seq[Node]) =
scala.util.MurmurHash3.orderedHash(label +: attribHashCode +: scpeHash +: children, pre.##)
@@ -295,10 +275,6 @@ object Utility extends AnyRef with parsing.TokenTests {
/**
* Appends &quot;s&quot; if string `s` does not contain &quot;,
* &apos;s&apos; otherwise.
- *
- * @param s ...
- * @param sb ...
- * @return ...
*/
def appendQuoted(s: String, sb: StringBuilder) = {
val ch = if (s contains '"') '\'' else '"'
@@ -307,10 +283,6 @@ object Utility extends AnyRef with parsing.TokenTests {
/**
* Appends &quot;s&quot; and escapes and &quot; i s with \&quot;
- *
- * @param s ...
- * @param sb ...
- * @return ...
*/
def appendEscapedQuoted(s: String, sb: StringBuilder): StringBuilder = {
sb.append('"')
@@ -321,11 +293,6 @@ object Utility extends AnyRef with parsing.TokenTests {
sb.append('"')
}
- /**
- * @param s ...
- * @param index ...
- * @return ...
- */
def getName(s: String, index: Int): String = {
if (index >= s.length) null
else {
@@ -338,9 +305,6 @@ object Utility extends AnyRef with parsing.TokenTests {
/**
* Returns `'''null'''` if the value is a correct attribute value,
* error message if it isn't.
- *
- * @param value ...
- * @return ...
*/
def checkAttributeValue(value: String): String = {
var i = 0
@@ -362,12 +326,6 @@ object Utility extends AnyRef with parsing.TokenTests {
null
}
- /**
- * new
- *
- * @param value ...
- * @return ...
- */
def parseAttributeValue(value: String): Seq[Node] = {
val sb = new StringBuilder
var rfb: StringBuilder = null
@@ -423,11 +381,6 @@ object Utility extends AnyRef with parsing.TokenTests {
* | "&amp;#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
* }}}
* See [66]
- *
- * @param ch ...
- * @param nextch ...
- * @param reportSyntaxError ...
- * @return ...
*/
def parseCharRef(ch: () => Char, nextch: () => Unit, reportSyntaxError: String => Unit, reportTruncatedError: String => Unit): String = {
val hex = (ch() == 'x') && { nextch(); true }
diff --git a/src/library/scala/xml/dtd/ContentModel.scala b/src/library/scala/xml/dtd/ContentModel.scala
index 1e9a3a4b58..a5d2a6bd7e 100644
--- a/src/library/scala/xml/dtd/ContentModel.scala
+++ b/src/library/scala/xml/dtd/ContentModel.scala
@@ -36,8 +36,8 @@ object ContentModel extends WordExp {
def traverse(r: RegExp): Set[String] = r match { // !!! check for match translation problem
case Letter(ElemName(name)) => Set(name)
case Star( x @ _ ) => traverse( x ) // bug if x@_*
- case Sequ( xs @ _* ) => Set(xs map traverse flatten: _*)
- case Alt( xs @ _* ) => Set(xs map traverse flatten: _*)
+ case Sequ( xs @ _* ) => Set(xs flatMap traverse: _*)
+ case Alt( xs @ _* ) => Set(xs flatMap traverse: _*)
}
traverse(r)
diff --git a/src/library/scala/xml/dtd/DocType.scala b/src/library/scala/xml/dtd/DocType.scala
index f0ef09b901..64aa7e2f74 100644
--- a/src/library/scala/xml/dtd/DocType.scala
+++ b/src/library/scala/xml/dtd/DocType.scala
@@ -14,7 +14,7 @@ package dtd
*
* @author Burak Emir
*
- * @param target name of this DOCTYPE
+ * @param name name of this DOCTYPE
* @param extID None, or Some(external ID of this doctype)
* @param intSubset sequence of internal subset declarations
*/
diff --git a/src/library/scala/xml/dtd/ExternalID.scala b/src/library/scala/xml/dtd/ExternalID.scala
index fdde18a926..a0a5818d07 100644
--- a/src/library/scala/xml/dtd/ExternalID.scala
+++ b/src/library/scala/xml/dtd/ExternalID.scala
@@ -41,7 +41,7 @@ abstract class ExternalID extends parsing.TokenTests
/** a system identifier
*
* @author Burak Emir
- * @param systemLiteral the system identifier literal
+ * @param systemId the system identifier literal
*/
case class SystemID(systemId: String) extends ExternalID {
val publicId = null
@@ -54,8 +54,8 @@ case class SystemID(systemId: String) extends ExternalID {
/** a public identifier (see http://www.w3.org/QA/2002/04/valid-dtd-list.html).
*
* @author Burak Emir
- * @param publicLiteral the public identifier literal
- * @param systemLiteral (can be null for notation pubIDs) the system identifier literal
+ * @param publicId the public identifier literal
+ * @param systemId (can be null for notation pubIDs) the system identifier literal
*/
case class PublicID(publicId: String, systemId: String) extends ExternalID {
if (!checkPubID(publicId))
diff --git a/src/library/scala/xml/factory/LoggedNodeFactory.scala b/src/library/scala/xml/factory/LoggedNodeFactory.scala
index abf8f97f03..45ba9530e1 100644
--- a/src/library/scala/xml/factory/LoggedNodeFactory.scala
+++ b/src/library/scala/xml/factory/LoggedNodeFactory.scala
@@ -12,7 +12,7 @@ package factory
/** This class logs what the nodefactory is actually doing.
* If you want to see what happens during loading, use it like this:
{{{
-object testLogged extends Application {
+object testLogged extends App {
val x = new scala.xml.parsing.NoBindingFactoryAdapter
with scala.xml.factory.LoggedNodeFactory[scala.xml.Elem]
with scala.util.logging.ConsoleLogger
diff --git a/src/library/scala/xml/include/sax/XIncludeFilter.scala b/src/library/scala/xml/include/sax/XIncludeFilter.scala
index 4bd0336a26..52ddf6b476 100644
--- a/src/library/scala/xml/include/sax/XIncludeFilter.scala
+++ b/src/library/scala/xml/include/sax/XIncludeFilter.scala
@@ -249,7 +249,7 @@ class XIncludeFilter extends XMLFilterImpl {
* calls to `characters()`. It's used to include files with `parse="text"`.
*
* @param url URL of the document that will be read
- * @param encoding Encoding of the document; e.g. UTF-8,
+ * @param encoding1 Encoding of the document; e.g. UTF-8,
* ISO-8859-1, etc.
* @return void
* @throws SAXException if the requested document cannot
diff --git a/src/library/scala/xml/parsing/ConstructingParser.scala b/src/library/scala/xml/parsing/ConstructingParser.scala
index 5571c9844d..471cde056e 100644
--- a/src/library/scala/xml/parsing/ConstructingParser.scala
+++ b/src/library/scala/xml/parsing/ConstructingParser.scala
@@ -16,10 +16,10 @@ import scala.io.Source
object ConstructingParser {
def fromFile(inp: File, preserveWS: Boolean) =
- new ConstructingParser(Source.fromFile(inp), preserveWS) initialize
+ new ConstructingParser(Source.fromFile(inp), preserveWS).initialize
def fromSource(inp: Source, preserveWS: Boolean) =
- new ConstructingParser(inp, preserveWS) initialize
+ new ConstructingParser(inp, preserveWS).initialize
}
/** An xml parser. parses XML and invokes callback methods of a MarkupHandler.
diff --git a/src/library/scala/xml/parsing/ExternalSources.scala b/src/library/scala/xml/parsing/ExternalSources.scala
index ca6cea4c67..127d66bf6f 100644
--- a/src/library/scala/xml/parsing/ExternalSources.scala
+++ b/src/library/scala/xml/parsing/ExternalSources.scala
@@ -23,11 +23,6 @@ import scala.io.Source
trait ExternalSources {
self: ExternalSources with MarkupParser with MarkupHandler =>
- /** ...
- *
- * @param systemId ...
- * @return ...
- */
def externalSource(systemId: String): Source = {
if (systemId startsWith "http:")
return Source fromURL new URL(systemId)
diff --git a/src/library/scala/xml/parsing/FactoryAdapter.scala b/src/library/scala/xml/parsing/FactoryAdapter.scala
index 5aad0e7ce1..507a14a418 100644
--- a/src/library/scala/xml/parsing/FactoryAdapter.scala
+++ b/src/library/scala/xml/parsing/FactoryAdapter.scala
@@ -158,7 +158,7 @@ abstract class FactoryAdapter extends DefaultHandler with factory.XMLLoader[Node
/** End element.
* @param uri
- * @param localName
+ * @param _localName
* @param qname
* @throws org.xml.sax.SAXException if ..
*/
diff --git a/src/library/scala/xml/parsing/MarkupHandler.scala b/src/library/scala/xml/parsing/MarkupHandler.scala
index 83db2f177d..8d66fd0a7f 100755
--- a/src/library/scala/xml/parsing/MarkupHandler.scala
+++ b/src/library/scala/xml/parsing/MarkupHandler.scala
@@ -64,7 +64,6 @@ abstract class MarkupHandler extends Logged
* @param pos the position in the source file
* @param pre the prefix
* @param label the local name
- * @param attrs the attributes (metadata)
*/
def elemEnd(pos: Int, pre: String, label: String): Unit = ()
@@ -77,7 +76,6 @@ abstract class MarkupHandler extends Logged
* @param attrs the attributes (metadata)
* @param empty `true` if the element was previously empty; `false` otherwise.
* @param args the children of this element
- * @return ...
*/
def elem(pos: Int, pre: String, label: String, attrs: MetaData, scope: NamespaceBinding, empty: Boolean, args: NodeSeq): NodeSeq
diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala
index c6da4bb546..096f8a8f38 100644
--- a/src/library/scala/xml/parsing/MarkupParserCommon.scala
+++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala
@@ -54,8 +54,8 @@ private[scala] trait MarkupParserCommon extends TokenTests {
xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>")
}
- /** attribute value, terminated by either ' or ". value may not contain <.
- * @param endch either ' or "
+ /** attribute value, terminated by either `'` or `"`. value may not contain `<`.
+ @param endCh either `'` or `"`
*/
def xAttributeValue(endCh: Char): String = {
val buf = new StringBuilder
diff --git a/src/library/scala/xml/pull/XMLEvent.scala b/src/library/scala/xml/pull/XMLEvent.scala
index dff81e8ed6..58d7d9fac5 100644
--- a/src/library/scala/xml/pull/XMLEvent.scala
+++ b/src/library/scala/xml/pull/XMLEvent.scala
@@ -38,7 +38,7 @@ case class EvElemEnd(pre: String, label: String) extends XMLEvent
case class EvText(text: String) extends XMLEvent
/** An entity reference was encountered.
- * @param the name of the entity, e.g. `gt` when encountering the entity `&gt;`
+ * @param entity the name of the entity, e.g. `gt` when encountering the entity `&gt;`
*/
case class EvEntityRef(entity: String) extends XMLEvent
diff --git a/src/library/scala/xml/transform/BasicTransformer.scala b/src/library/scala/xml/transform/BasicTransformer.scala
index 002f86abe7..0ae417a7f8 100644
--- a/src/library/scala/xml/transform/BasicTransformer.scala
+++ b/src/library/scala/xml/transform/BasicTransformer.scala
@@ -18,11 +18,6 @@ package transform
*/
abstract class BasicTransformer extends Function1[Node,Node]
{
- /**
- * @param n ...
- * @param ns ...
- * @return ...
- */
protected def unchanged(n: Node, ns: Seq[Node]) =
ns.length == 1 && (ns.head == n)
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
index aaea4416dd..86678760be 100644
--- a/src/partest/scala/tools/partest/CompilerTest.scala
+++ b/src/partest/scala/tools/partest/CompilerTest.scala
@@ -5,6 +5,7 @@
package scala.tools.partest
+import scala.reflect.{mirror => rm}
import scala.tools.nsc._
/** For testing compiler internals directly.
@@ -29,13 +30,13 @@ abstract class CompilerTest extends DirectTest {
// Override at least one of these...
def code = ""
def sources: List[String] = List(code)
-
+
// Utility functions
-
+
class MkType(sym: Symbol) {
- def apply[M](implicit m1: Manifest[M]): Type =
+ def apply[M](implicit t: rm.TypeTag[M]): Type =
if (sym eq NoSymbol) NoType
- else appliedType(sym, manifestToType(m1))
+ else appliedType(sym, compilerTypeFromTag(t))
}
implicit def mkMkType(sym: Symbol) = new MkType(sym)
@@ -47,7 +48,7 @@ abstract class CompilerTest extends DirectTest {
}
loop(Set(), List(root))
}
-
+
class SymsInPackage(pkgName: String) {
def pkg = getRequiredModule(pkgName)
def classes = allMembers(pkg) filter (_.isClass)
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index ad2e155182..67b38d2e24 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -309,6 +309,16 @@ class PartestTask extends Task with CompilationPathProperty {
}
} getOrElse sys.error("Provided classpath does not contain a Scala actors.")
+ val scalaActorsMigration = {
+ (classpath.list map { fs => new File(fs) }) find { f =>
+ f.getName match {
+ case "scala-actors-migration.jar" => true
+ case "actors-migration" if (f.getParentFile.getName == "classes") => true
+ case _ => false
+ }
+ }
+ } getOrElse sys.error("Provided classpath does not contain a Scala actors.")
+
def scalacArgsFlat: Option[Seq[String]] = scalacArgs map (_ flatMap { a =>
val parts = a.getParts
if(parts eq null) Seq[String]() else parts.toSeq
@@ -335,6 +345,7 @@ class PartestTask extends Task with CompilationPathProperty {
antFileManager.LATEST_COMP = scalaCompiler.getAbsolutePath
antFileManager.LATEST_PARTEST = scalaPartest.getAbsolutePath
antFileManager.LATEST_ACTORS = scalaActors.getAbsolutePath
+ antFileManager.LATEST_ACTORS_MIGRATION = scalaActorsMigration.getAbsolutePath
javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath)
javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath)
diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/partest/scala/tools/partest/ScaladocModelTest.scala
index 2eb026ceee..142f2baea5 100644
--- a/src/partest/scala/tools/partest/ScaladocModelTest.scala
+++ b/src/partest/scala/tools/partest/ScaladocModelTest.scala
@@ -21,10 +21,10 @@ import scala.tools.nsc.reporters.ConsoleReporter
import scala.tools.nsc.doc.model._
import scala.tools.partest.ScaladocModelTest
- object Test extends ScaladocModelTest {
+ object Test extends ScaladocModelTest {
- def code = """ ... """
- def scaladocSettings = ""
+ override def code = """ ... """ // or override def resourceFile = "<file>.scala" (from test/scaladoc/resources)
+ def scaladocSettings = " ... "
def testModel(rootPackage: Package) = {
// get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
import access._
@@ -39,10 +39,22 @@ abstract class ScaladocModelTest extends DirectTest {
/** Override this to give scaladoc command line parameters */
def scaladocSettings: String
-
+
/** Override this to test the model */
def testModel(root: Package): Unit
+ /** Override to feed a file in resources to scaladoc*/
+ def resourceFile: String = null
+
+ /** Override to feed code into scaladoc */
+ override def code =
+ if (resourceFile ne null)
+ io.File(resourcePath + "/" + resourceFile).slurp()
+ else
+ sys.error("Scaladoc Model Test: You need to give a file or some code to feed to scaladoc!")
+
+ def resourcePath = io.Directory(sys.props("partest.cwd") + "/../resources")
+
// Implementation follows:
override def extraSettings: String = "-usejavacp"
@@ -50,15 +62,15 @@ abstract class ScaladocModelTest extends DirectTest {
// redirect err to out, for logging
val prevErr = System.err
System.setErr(System.out)
-
+
try {
// 1 - compile with scaladoc and get the model out
- val args = scaladocSettings.split(" ")
- val universe = model(args:_*).getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")})
+ val universe = model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")})
// 2 - check the model generated
testModel(universe.rootPackage)
+ println("Done.")
} catch {
- case e =>
+ case e =>
println(e)
e.printStackTrace
}
@@ -66,51 +78,46 @@ abstract class ScaladocModelTest extends DirectTest {
System.setErr(prevErr)
}
+ private[this] var settings: Settings = null
+
// create a new scaladoc compiler
- def newDocFactory(args: String*): DocFactory = {
- val settings = new Settings(_ => ())
- val command = new ScalaDoc.Command((CommandLineParser tokenize extraSettings) ++ args.toList, settings)
+ def newDocFactory: DocFactory = {
+ settings = new Settings(_ => ())
+ settings.reportModel = false // yaay, no more "model contains X documentable templates"!
+ val args = extraSettings + " " + scaladocSettings
+ val command = new ScalaDoc.Command((CommandLineParser tokenize (args)), settings)
val docFact = new DocFactory(new ConsoleReporter(settings), settings)
docFact
}
// compile with scaladoc and output the result
- def model(args: String*): Option[Universe] = newDocFactory(args: _*).makeUniverse(Right(code))
+ def model: Option[Universe] = newDocFactory.makeUniverse(Right(code))
// so we don't get the newSettings warning
- override def isDebug = false
+ override def isDebug = false
// finally, enable easy navigation inside the entities
object access {
- // Make it easy to access things
class TemplateAccess(tpl: DocTemplateEntity) {
-
def _class(name: String): DocTemplateEntity = getTheFirst(_classes(name), tpl.qualifiedName + ".class(" + name + ")")
- def _classes(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).flatMap({ case c: Class => List(c)})
+ def _classes(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case c: Class => c})
def _trait(name: String): DocTemplateEntity = getTheFirst(_traits(name), tpl.qualifiedName + ".trait(" + name + ")")
- def _traits(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).flatMap({ case t: Trait => List(t)})
+ def _traits(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case t: Trait => t})
def _object(name: String): DocTemplateEntity = getTheFirst(_objects(name), tpl.qualifiedName + ".object(" + name + ")")
- def _objects(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).flatMap({ case o: Object => List(o)})
+ def _objects(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case o: Object => o})
def _method(name: String): Def = getTheFirst(_methods(name), tpl.qualifiedName + ".method(" + name + ")")
def _methods(name: String): List[Def] = tpl.methods.filter(_.name == name)
-
+
def _value(name: String): Val = getTheFirst(_values(name), tpl.qualifiedName + ".value(" + name + ")")
def _values(name: String): List[Val] = tpl.values.filter(_.name == name)
- def getTheFirst[T](list: List[T], expl: String): T = {
- if (list.length == 1)
- list.head
- else if (list.length == 0)
- sys.error("Error getting " + expl + ": No such element. All elements in list: [" + list.mkString(", ") + "]")
- else
- sys.error("Error getting " + expl + ": " + list.length + " elements with this name. " +
- "All elements in list: [" + list.mkString(", ") + "]")
- }
+ def _conversion(name: String): ImplicitConversion = getTheFirst(_conversions(name), tpl.qualifiedName + ".conversion(" + name + ")")
+ def _conversions(name: String): List[ImplicitConversion] = tpl.conversions.filter(_.conversionQualifiedName == name)
}
class PackageAccess(pack: Package) extends TemplateAccess(pack) {
@@ -118,7 +125,22 @@ abstract class ScaladocModelTest extends DirectTest {
def _packages(name: String): List[Package] = pack.packages.filter(_.name == name)
}
+ class MemberAccess(mbrs: WithMembers) {
+ def _member(name: String): MemberEntity = getTheFirst(_members(name), mbrs.toString + ".member(" + name + ")")
+ def _members(name: String): List[MemberEntity] = mbrs.members.filter(_.name == name)
+ }
+
+ type WithMembers = { def members: List[MemberEntity]; def toString: String } /* DocTemplates and ImplicitConversions */
+
implicit def templateAccess(tpl: DocTemplateEntity) = new TemplateAccess(tpl)
implicit def packageAccess(pack: Package) = new PackageAccess(pack)
+ implicit def membersAccess(mbrs: WithMembers) = new MemberAccess(mbrs)
+
+ def getTheFirst[T](list: List[T], expl: String): T = list.length match {
+ case 1 => list.head
+ case 0 => sys.error("Error getting " + expl + ": No such element.")
+ case _ => sys.error("Error getting " + expl + ": " + list.length + " elements with this name. " +
+ "All elements in list: [" + list.mkString(", ") + "]")
+ }
}
}
diff --git a/src/partest/scala/tools/partest/SigTest.scala b/src/partest/scala/tools/partest/SigTest.scala
index 072ec006f9..999d901d21 100644
--- a/src/partest/scala/tools/partest/SigTest.scala
+++ b/src/partest/scala/tools/partest/SigTest.scala
@@ -20,31 +20,31 @@ trait SigTest {
def isObjectMethodName(name: String) = classOf[Object].getMethods exists (_.getName == name)
- def fields[T: ClassManifest](p: JField => Boolean) = {
- val cl = classManifest[T].erasure
+ def fields[T: ClassTag](p: JField => Boolean) = {
+ val cl = classTag[T].erasure
val fs = (cl.getFields ++ cl.getDeclaredFields).distinct sortBy (_.getName)
fs filter p
}
- def methods[T: ClassManifest](p: JMethod => Boolean) = {
- val cl = classManifest[T].erasure
+ def methods[T: ClassTag](p: JMethod => Boolean) = {
+ val cl = classTag[T].erasure
val ms = (cl.getMethods ++ cl.getDeclaredMethods).distinct sortBy (x => (x.getName, x.isBridge))
ms filter p
}
- def allFields[T: ClassManifest]() = fields[T](_ => true)
- def allMethods[T: ClassManifest]() = methods[T](m => !isObjectMethodName(m.getName))
- def fieldsNamed[T: ClassManifest](name: String) = fields[T](_.getName == name)
- def methodsNamed[T: ClassManifest](name: String) = methods[T](_.getName == name)
+ def allFields[T: ClassTag]() = fields[T](_ => true)
+ def allMethods[T: ClassTag]() = methods[T](m => !isObjectMethodName(m.getName))
+ def fieldsNamed[T: ClassTag](name: String) = fields[T](_.getName == name)
+ def methodsNamed[T: ClassTag](name: String) = methods[T](_.getName == name)
- def allGenericStrings[T: ClassManifest]() =
+ def allGenericStrings[T: ClassTag]() =
(allMethods[T]() map mstr) ++ (allFields[T]() map fstr)
- def genericStrings[T: ClassManifest](name: String) =
+ def genericStrings[T: ClassTag](name: String) =
(methodsNamed[T](name) map mstr) ++ (fieldsNamed[T](name) map fstr)
- def show[T: ClassManifest](name: String = "") = {
- println(classManifest[T].erasure.getName)
+ def show[T: ClassTag](name: String = "") = {
+ println(classTag[T].erasure.getName)
if (name == "") allGenericStrings[T]() foreach println
else genericStrings[T](name) foreach println
}
diff --git a/src/partest/scala/tools/partest/nest/AntRunner.scala b/src/partest/scala/tools/partest/nest/AntRunner.scala
index e77385d6e9..dc83e4ea66 100644
--- a/src/partest/scala/tools/partest/nest/AntRunner.scala
+++ b/src/partest/scala/tools/partest/nest/AntRunner.scala
@@ -23,6 +23,7 @@ class AntRunner extends DirectRunner {
var LATEST_COMP: String = _
var LATEST_PARTEST: String = _
var LATEST_ACTORS: String = _
+ var LATEST_ACTORS_MIGRATION: String = _
val testRootPath: String = "test"
val testRootDir: Directory = Directory(testRootPath)
}
diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala
index 7aaa7bab00..c674e21482 100644
--- a/src/partest/scala/tools/partest/nest/CompileManager.scala
+++ b/src/partest/scala/tools/partest/nest/CompileManager.scala
@@ -18,6 +18,21 @@ import io.Path
import java.io.{ File, BufferedReader, PrintWriter, FileReader, Writer, FileWriter, StringWriter }
import File.pathSeparator
+sealed abstract class CompilationOutcome {
+ def merge(other: CompilationOutcome): CompilationOutcome
+ def isPositive = this eq CompileSuccess
+ def isNegative = this eq CompileFailed
+}
+case object CompileSuccess extends CompilationOutcome {
+ def merge(other: CompilationOutcome) = other
+}
+case object CompileFailed extends CompilationOutcome {
+ def merge(other: CompilationOutcome) = if (other eq CompileSuccess) this else other
+}
+case object CompilerCrashed extends CompilationOutcome {
+ def merge(other: CompilationOutcome) = this
+}
+
class ExtConsoleReporter(settings: Settings, val writer: PrintWriter) extends ConsoleReporter(settings, Console.in, writer) {
shortname = true
}
@@ -32,7 +47,7 @@ class TestSettings(cp: String, error: String => Unit) extends Settings(error) {
}
abstract class SimpleCompiler {
- def compile(out: Option[File], files: List[File], kind: String, log: File): Boolean
+ def compile(out: Option[File], files: List[File], kind: String, log: File): CompilationOutcome
}
class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
@@ -68,7 +83,7 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
(opt2 ::: pluginOption) mkString " "
}
- def compile(out: Option[File], files: List[File], kind: String, log: File): Boolean = {
+ def compile(out: Option[File], files: List[File], kind: String, log: File): CompilationOutcome = {
val testSettings = out match {
case Some(f) => newSettings(f.getAbsolutePath)
case _ => newSettings()
@@ -118,6 +133,7 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
catch {
case FatalError(msg) =>
testRep.error(null, "fatal error: " + msg)
+ return CompilerCrashed
}
testRep.printSummary()
@@ -125,81 +141,13 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
}
finally logWriter.close()
- !testRep.hasErrors
+ if (testRep.hasErrors) CompileFailed
+ else CompileSuccess
}
}
-// class ReflectiveCompiler(val fileManager: ConsoleFileManager) extends SimpleCompiler {
-// import fileManager.{latestCompFile, latestPartestFile}
-//
-// val sepUrls = Array(latestCompFile.toURI.toURL, latestPartestFile.toURI.toURL)
-// //NestUI.verbose("constructing URLClassLoader from URLs "+latestCompFile+" and "+latestPartestFile)
-//
-// val sepLoader = new java.net.URLClassLoader(sepUrls, null)
-//
-// val sepCompilerClass =
-// sepLoader.loadClass("scala.tools.partest.nest.DirectCompiler")
-// val sepCompiler = sepCompilerClass.newInstance()
-//
-// // needed for reflective invocation
-// val fileClass = Class.forName("java.io.File")
-// val stringClass = Class.forName("java.lang.String")
-// val sepCompileMethod =
-// sepCompilerClass.getMethod("compile", fileClass, stringClass)
-// val sepCompileMethod2 =
-// sepCompilerClass.getMethod("compile", fileClass, stringClass, fileClass)
-//
-// /* This method throws java.lang.reflect.InvocationTargetException
-// * if the compiler crashes.
-// * This exception is handled in the shouldCompile and shouldFailCompile
-// * methods of class CompileManager.
-// */
-// def compile(out: Option[File], files: List[File], kind: String, log: File): Boolean = {
-// val res = sepCompileMethod2.invoke(sepCompiler, out, files, kind, log).asInstanceOf[java.lang.Boolean]
-// res.booleanValue()
-// }
-// }
-
class CompileManager(val fileManager: FileManager) {
- var compiler: SimpleCompiler = new DirectCompiler(fileManager)
-
- var numSeparateCompilers = 1
- def createSeparateCompiler() = {
- numSeparateCompilers += 1
- compiler = new /*ReflectiveCompiler*/ DirectCompiler(fileManager)
- }
-
- /* This method returns true iff compilation succeeds.
- */
- def shouldCompile(files: List[File], kind: String, log: File): Boolean = {
- createSeparateCompiler()
- compiler.compile(None, files, kind, log)
- }
-
- /* This method returns true iff compilation succeeds.
- */
- def shouldCompile(out: File, files: List[File], kind: String, log: File): Boolean = {
- createSeparateCompiler()
- compiler.compile(Some(out), files, kind, log)
- }
-
- /* This method returns true iff compilation fails
- * _and_ the compiler does _not_ crash or loop.
- *
- * If the compiler crashes, this method returns false.
- */
- def shouldFailCompile(files: List[File], kind: String, log: File): Boolean = {
- createSeparateCompiler()
- !compiler.compile(None, files, kind, log)
- }
-
- /* This method returns true iff compilation fails
- * _and_ the compiler does _not_ crash or loop.
- *
- * If the compiler crashes, this method returns false.
- */
- def shouldFailCompile(out: File, files: List[File], kind: String, log: File): Boolean = {
- createSeparateCompiler()
- !compiler.compile(Some(out), files, kind, log)
- }
+ private def newCompiler = new DirectCompiler(fileManager)
+ def attemptCompile(outdir: Option[File], sources: List[File], kind: String, log: File): CompilationOutcome =
+ newCompiler.compile(outdir, sources, kind, log)
}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
index 8d239a84bd..b270a6b65a 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
@@ -84,6 +84,7 @@ class ConsoleFileManager extends FileManager {
latestFile = testClassesDir.parent / "bin"
latestLibFile = testClassesDir / "library"
latestActorsFile = testClassesDir / "library" / "actors"
+ latestActMigFile = testClassesDir / "actors-migration"
latestCompFile = testClassesDir / "compiler"
latestPartestFile = testClassesDir / "partest"
latestFjbgFile = testParent / "lib" / "fjbg.jar"
@@ -94,6 +95,7 @@ class ConsoleFileManager extends FileManager {
latestFile = dir / "bin"
latestLibFile = dir / "lib/scala-library.jar"
latestActorsFile = dir / "lib/scala-actors.jar"
+ latestActMigFile = dir / "lib/scala-actors-migration.jar"
latestCompFile = dir / "lib/scala-compiler.jar"
latestPartestFile = dir / "lib/scala-partest.jar"
latestFjbgFile = testParent / "lib" / "fjbg.jar"
@@ -104,6 +106,7 @@ class ConsoleFileManager extends FileManager {
latestFile = prefixFile("build/quick/bin")
latestLibFile = prefixFile("build/quick/classes/library")
latestActorsFile = prefixFile("build/quick/classes/library/actors")
+ latestActMigFile = prefixFile("build/quick/classes/actors-migration")
latestCompFile = prefixFile("build/quick/classes/compiler")
latestPartestFile = prefixFile("build/quick/classes/partest")
}
@@ -114,6 +117,7 @@ class ConsoleFileManager extends FileManager {
latestFile = prefixFileWith(p, "bin")
latestLibFile = prefixFileWith(p, "lib/scala-library.jar")
latestActorsFile = prefixFileWith(p, "lib/scala-actors.jar")
+ latestActMigFile = prefixFileWith(p, "lib/scala-actors-migration.jar")
latestCompFile = prefixFileWith(p, "lib/scala-compiler.jar")
latestPartestFile = prefixFileWith(p, "lib/scala-partest.jar")
}
@@ -123,6 +127,7 @@ class ConsoleFileManager extends FileManager {
latestFile = prefixFile("dists/latest/bin")
latestLibFile = prefixFile("dists/latest/lib/scala-library.jar")
latestActorsFile = prefixFile("dists/latest/lib/scala-actors.jar")
+ latestActMigFile = prefixFile("dists/latest/lib/scala-actors-migration.jar")
latestCompFile = prefixFile("dists/latest/lib/scala-compiler.jar")
latestPartestFile = prefixFile("dists/latest/lib/scala-partest.jar")
}
@@ -132,6 +137,7 @@ class ConsoleFileManager extends FileManager {
latestFile = prefixFile("build/pack/bin")
latestLibFile = prefixFile("build/pack/lib/scala-library.jar")
latestActorsFile = prefixFile("build/pack/lib/scala-actors.jar")
+ latestActMigFile = prefixFile("build/pack/lib/scala-actors-migration.jar")
latestCompFile = prefixFile("build/pack/lib/scala-compiler.jar")
latestPartestFile = prefixFile("build/pack/lib/scala-partest.jar")
}
@@ -167,16 +173,19 @@ class ConsoleFileManager extends FileManager {
LATEST_COMP = latestCompFile.getAbsolutePath
LATEST_PARTEST = latestPartestFile.getAbsolutePath
LATEST_ACTORS = latestActorsFile.getAbsolutePath
+ LATEST_ACTORS_MIGRATION = latestActMigFile.getAbsolutePath
}
var LATEST_LIB: String = ""
var LATEST_COMP: String = ""
var LATEST_PARTEST: String = ""
var LATEST_ACTORS: String = ""
+ var LATEST_ACTORS_MIGRATION: String = ""
var latestFile: File = _
var latestLibFile: File = _
var latestActorsFile: File = _
+ var latestActMigFile: File = _
var latestCompFile: File = _
var latestPartestFile: File = _
var latestFjbgFile: File = _
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
index 85060ad633..fb3cab52c4 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
@@ -213,7 +213,12 @@ class ConsoleRunner extends DirectRunner {
* @return (success count, failure count)
*/
def testCheckAll(enabledSets: List[TestSet]): (Int, Int) = {
- def kindOf(f: File) = (srcDir relativize Path(f).toCanonical).segments.head
+ def kindOf(f: File) = {
+ (srcDir relativize Path(f).toCanonical).segments match {
+ case (".." :: "scaladoc" :: xs) => xs.head
+ case xs => xs.head
+ }
+ }
val (valid, invalid) = testFiles partition (x => testSetKinds contains kindOf(x))
invalid foreach (x => NestUI.failure(
diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala
index 20f435cfbb..815c27f567 100644
--- a/src/partest/scala/tools/partest/nest/DirectRunner.scala
+++ b/src/partest/scala/tools/partest/nest/DirectRunner.scala
@@ -61,10 +61,10 @@ trait DirectRunner {
val latestLibFile = new File(fileManager.LATEST_LIB)
val latestPartestFile = new File(fileManager.LATEST_PARTEST)
val latestActorsFile = new File(fileManager.LATEST_ACTORS)
-
+ val latestActMigFile = new File(fileManager.LATEST_ACTORS_MIGRATION)
val scalacheckURL = PathSettings.scalaCheck.toURL
val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs(
- scalacheckURL :: (List(latestCompFile, latestLibFile, latestActorsFile, latestPartestFile).map(_.toURI.toURL))
+ scalacheckURL :: (List(latestCompFile, latestLibFile, latestActorsFile, latestActMigFile, latestPartestFile).map(_.toURI.toURL))
)
Output.init()
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index 6d9e64730f..cf7160f521 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -63,6 +63,7 @@ trait FileManager extends FileUtil {
var LATEST_COMP: String
var LATEST_PARTEST: String
var LATEST_ACTORS: String
+ var LATEST_ACTORS_MIGRATION: String
var showDiff = false
var updateCheck = false
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
index a0511774a9..a5d5952ff7 100644
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
@@ -48,9 +48,9 @@ class ReflectiveRunner {
new ConsoleFileManager
import fileManager.
- { latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile }
+ { latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile, latestActMigFile }
val files =
- Array(latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile) map (x => io.File(x))
+ Array(latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile, latestActMigFile) map (x => io.File(x))
val sepUrls = files map (_.toURL)
var sepLoader = new URLClassLoader(sepUrls, null)
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
index 750e270c18..14e2dc3df9 100644
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ b/src/partest/scala/tools/partest/nest/SBTRunner.scala
@@ -16,6 +16,7 @@ object SBTRunner extends DirectRunner {
var LATEST_COMP: String = _
var LATEST_PARTEST: String = _
var LATEST_ACTORS: String = _
+ var LATEST_ACTORS_MIGRATION: String = _
val testRootPath: String = "test"
val testRootDir: Directory = Directory(testRootPath)
}
@@ -54,15 +55,18 @@ object SBTRunner extends DirectRunner {
val config = parseArgs(args, CommandLineOptions())
fileManager.SCALAC_OPTS = config.scalacOptions
fileManager.CLASSPATH = config.classpath getOrElse sys.error("No classpath set")
+
+ def findClasspath(jar: String, name: String): Option[String] = {
+ val optJar = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches (".*"+jar+".*\\.jar"))).headOption
+ val optClassDir = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches (".*"+name+File.separator+"classes"))).headOption
+ optJar orElse optClassDir
+ }
// Find scala library jar file...
- val lib: Option[String] = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches ".*scala-library.*\\.jar")).headOption
- fileManager.LATEST_LIB = lib getOrElse sys.error("No scala-library found! Classpath = " + fileManager.CLASSPATH)
- val comp: Option[String] = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches ".*scala-compiler.*\\.jar")).headOption
- fileManager.LATEST_COMP = comp getOrElse sys.error("No scala-compiler found! Classpath = " + fileManager.CLASSPATH)
- val partest: Option[String] = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches ".*scala-partest.*\\.jar")).headOption
- fileManager.LATEST_PARTEST = partest getOrElse sys.error("No scala-partest found! Classpath = " + fileManager.CLASSPATH)
- val actors: Option[String] = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches ".*scala-actors.*\\.jar")).headOption
- fileManager.LATEST_ACTORS = actors getOrElse sys.error("No scala-actors found! Classpath = " + fileManager.CLASSPATH)
+ fileManager.LATEST_LIB = findClasspath("scala-library", "scala-library") getOrElse sys.error("No scala-library found! Classpath = " + fileManager.CLASSPATH)
+ fileManager.LATEST_COMP = findClasspath("scala-compiler", "scala-compiler") getOrElse sys.error("No scala-compiler found! Classpath = " + fileManager.CLASSPATH)
+ fileManager.LATEST_PARTEST = findClasspath("scala-partest", "partest") getOrElse sys.error("No scala-partest found! Classpath = " + fileManager.CLASSPATH)
+ fileManager.LATEST_ACTORS = findClasspath("scala-actors", "actors") getOrElse sys.error("No scala-actors found! Classpath = " + fileManager.CLASSPATH)
+ fileManager.LATEST_ACTORS_MIGRATION = findClasspath("scala-actors-migration", "actors-migration") getOrElse sys.error("No scala-actors-migration found! Classpath = " + fileManager.CLASSPATH)
// TODO - Do something useful here!!!
fileManager.JAVAC_CMD = "javac"
diff --git a/src/partest/scala/tools/partest/nest/Worker.scala b/src/partest/scala/tools/partest/nest/Worker.scala
index 00ee8ba857..40325c6375 100644
--- a/src/partest/scala/tools/partest/nest/Worker.scala
+++ b/src/partest/scala/tools/partest/nest/Worker.scala
@@ -56,6 +56,7 @@ class ScalaCheckFileManager(val origmanager: FileManager) extends FileManager {
var LATEST_COMP: String = origmanager.LATEST_COMP
var LATEST_PARTEST: String = origmanager.LATEST_PARTEST
var LATEST_ACTORS: String = origmanager.LATEST_ACTORS
+ var LATEST_ACTORS_MIGRATION: String = origmanager.LATEST_ACTORS_MIGRATION
}
object Output {
@@ -269,7 +270,7 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
outDir.jfile
}
- private def javac(outDir: File, files: List[File], output: File): Boolean = {
+ private def javac(outDir: File, files: List[File], output: File): CompilationOutcome = {
// compile using command-line javac compiler
val args = Seq(
javacCmd,
@@ -279,8 +280,8 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
join(outDir.toString, CLASSPATH)
) ++ files.map("" + _)
- try runCommand(args, output)
- catch exHandler(output, "javac command failed:\n" + args.map(" " + _ + "\n").mkString + "\n")
+ try if (runCommand(args, output)) CompileSuccess else CompileFailed
+ catch exHandler(output, "javac command failed:\n" + args.map(" " + _ + "\n").mkString + "\n", CompilerCrashed)
}
/** Runs command redirecting standard out and
@@ -357,13 +358,13 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
private def compareOutput(dir: File, logFile: File): String = {
val checkFile = getCheckFilePath(dir, kind)
- // if check file exists, compare with log file
val diff =
if (checkFile.canRead) compareFiles(logFile, checkFile.jfile)
else file2String(logFile)
+ // if check file exists, compare with log file
if (diff != "" && fileManager.updateCheck) {
- NestUI.verbose("output differs from log file: updating checkfile\n")
+ NestUI.verbose("Updating checkfile " + checkFile.jfile)
val toWrite = if (checkFile.exists) checkFile else getCheckFilePath(dir, "")
toWrite writeAll file2String(logFile)
""
@@ -388,10 +389,8 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
false
}
- private def exHandler(logFile: File): PartialFunction[Throwable, Boolean] =
- exHandler(logFile, "")
- private def exHandler(logFile: File, msg: String): PartialFunction[Throwable, Boolean] = {
- case e: Exception => logStackTrace(logFile, e, msg)
+ private def exHandler[T](logFile: File, msg: String, value: T): PartialFunction[Throwable, T] = {
+ case e: Exception => logStackTrace(logFile, e, msg) ; value
}
/** Runs a list of tests.
@@ -464,39 +463,38 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
}
else script(logFile, outDir)
}
- catch exHandler(logFile)
+ catch exHandler(logFile, "", false)
LogContext(logFile, swr, wr)
}
}
- def compileFilesIn(dir: File, logFile: File, outDir: File): Boolean = {
+ def groupedFiles(dir: File): List[List[File]] = {
val testFiles = dir.listFiles.toList filter isJavaOrScala
def isInGroup(f: File, num: Int) = SFile(f).stripExtension endsWith ("_" + num)
val groups = (0 to 9).toList map (num => testFiles filter (f => isInGroup(f, num)))
val noGroupSuffix = testFiles filterNot (groups.flatten contains)
- def compileGroup(g: List[File]): Boolean = {
+ noGroupSuffix :: groups filterNot (_.isEmpty)
+ }
+
+ def compileFilesIn(dir: File, logFile: File, outDir: File): CompilationOutcome = {
+ def compileGroup(g: List[File]): CompilationOutcome = {
val (scalaFiles, javaFiles) = g partition isScala
val allFiles = javaFiles ++ scalaFiles
- // scala+java, then java, then scala
- (scalaFiles.isEmpty || compileMgr.shouldCompile(outDir, allFiles, kind, logFile) || fail(g)) && {
- (javaFiles.isEmpty || javac(outDir, javaFiles, logFile)) && {
- (scalaFiles.isEmpty || compileMgr.shouldCompile(outDir, scalaFiles, kind, logFile) || fail(scalaFiles))
- }
+ List(1, 2, 3).foldLeft(CompileSuccess: CompilationOutcome) {
+ case (CompileSuccess, 1) if scalaFiles.nonEmpty => compileMgr.attemptCompile(Some(outDir), allFiles, kind, logFile) // java + scala
+ case (CompileSuccess, 2) if javaFiles.nonEmpty => javac(outDir, javaFiles, logFile) // java
+ case (CompileSuccess, 3) if scalaFiles.nonEmpty => compileMgr.attemptCompile(Some(outDir), scalaFiles, kind, logFile) // scala
+ case (outcome, _) => outcome
}
}
-
- (noGroupSuffix.isEmpty || compileGroup(noGroupSuffix)) && (groups forall compileGroup)
- }
-
- def failCompileFilesIn(dir: File, logFile: File, outDir: File): Boolean = {
- val testFiles = dir.listFiles.toList
- val sourceFiles = testFiles filter isJavaOrScala
-
- sourceFiles.isEmpty || compileMgr.shouldFailCompile(outDir, sourceFiles, kind, logFile) || fail(testFiles filter isScala)
+ groupedFiles(dir).foldLeft(CompileSuccess: CompilationOutcome) {
+ case (CompileSuccess, files) => compileGroup(files)
+ case (outcome, _) => outcome
+ }
}
def runTestCommon(file: File, expectFailure: Boolean)(
@@ -504,15 +502,14 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
onFail: (File, File) => Unit = (_, _) => ()): LogContext =
{
runInContext(file, (logFile: File, outDir: File) => {
- val result =
- if (file.isDirectory) {
- if (expectFailure) failCompileFilesIn(file, logFile, outDir)
- else compileFilesIn(file, logFile, outDir)
- }
- else {
- if (expectFailure) compileMgr.shouldFailCompile(List(file), kind, logFile)
- else compileMgr.shouldCompile(List(file), kind, logFile)
- }
+ val outcome = (
+ if (file.isDirectory) compileFilesIn(file, logFile, outDir)
+ else compileMgr.attemptCompile(None, List(file), kind, logFile)
+ )
+ val result = (
+ if (expectFailure) outcome.isNegative
+ else outcome.isPositive
+ )
if (result) onSuccess(logFile, outDir)
else { onFail(logFile, outDir) ; false }
@@ -551,7 +548,7 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
)
try runCommand(cmd, output)
- catch exHandler(output, "ant command '" + cmd + "' failed:\n")
+ catch exHandler(output, "ant command '" + cmd + "' failed:\n", false)
}
def runAntTest(file: File): LogContext = {
@@ -884,7 +881,7 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
)
// 4. compile testFile
- val ok = compileMgr.shouldCompile(List(testFile), kind, logFile)
+ val ok = compileMgr.attemptCompile(None, List(testFile), kind, logFile) eq CompileSuccess
NestUI.verbose("compilation of " + testFile + (if (ok) "succeeded" else "failed"))
if (ok) {
execTest(outDir, logFile) && {
@@ -911,7 +908,8 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
else {
val resFile = results.head
// 2. Compile source file
- if (!compileMgr.shouldCompile(outDir, sources, kind, logFile)) {
+
+ if (!compileMgr.attemptCompile(Some(outDir), sources, kind, logFile).isPositive) {
NestUI.normal("compilerMgr failed to compile %s to %s".format(sources mkString ", ", outDir))
false
}
diff --git a/src/scalacheck/org/scalacheck/Arbitrary.scala b/src/scalacheck/org/scalacheck/Arbitrary.scala
index 91d56b0aec..9bb235f917 100644
--- a/src/scalacheck/org/scalacheck/Arbitrary.scala
+++ b/src/scalacheck/org/scalacheck/Arbitrary.scala
@@ -263,7 +263,7 @@ object Arbitrary {
): Arbitrary[C[T]] = Arbitrary(containerOf[C,T](arbitrary[T]))
/** Arbitrary instance of any array. */
- implicit def arbArray[T](implicit a: Arbitrary[T], c: ClassManifest[T]
+ implicit def arbArray[T](implicit a: Arbitrary[T], c: ClassTag[T]
): Arbitrary[Array[T]] = Arbitrary(containerOf[Array,T](arbitrary[T]))
diff --git a/src/scalacheck/org/scalacheck/util/Buildable.scala b/src/scalacheck/org/scalacheck/util/Buildable.scala
index 6378e72d4f..662bc6146b 100644
--- a/src/scalacheck/org/scalacheck/util/Buildable.scala
+++ b/src/scalacheck/org/scalacheck/util/Buildable.scala
@@ -30,7 +30,7 @@ object Buildable {
def builder = (new mutable.ListBuffer[T]).mapResult(_.toStream)
}
- implicit def buildableArray[T](implicit cm: ClassManifest[T]) =
+ implicit def buildableArray[T](implicit t: ClassTag[T]) =
new Buildable[T,Array] {
def builder = mutable.ArrayBuilder.make[T]
}
diff --git a/src/scalap/scala/tools/scalap/JavaWriter.scala b/src/scalap/scala/tools/scalap/JavaWriter.scala
index 02b940ab16..9b0748509f 100644
--- a/src/scalap/scala/tools/scalap/JavaWriter.scala
+++ b/src/scalap/scala/tools/scalap/JavaWriter.scala
@@ -128,33 +128,12 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer
def printMethod(flags: Int, name: Int, tpe: Int, attribs: List[cf.Attribute]) {
if (getName(name) == "<init>")
print(flagsToStr(false, flags))
- attribs find {
- case cf.Attribute(name, _) => getName(name) == "JacoMeta"
- } match {
- case Some(cf.Attribute(_, data)) =>
- val mp = new MetaParser(getName(
- ((data(0) & 0xff) << 8) + (data(1) & 0xff)).trim())
- mp.parse match {
- case None =>
- if (getName(name) == "<init>") {
- print("def this" + getType(tpe) + ";").newline
- } else {
- print("def " + NameTransformer.decode(getName(name)))
- print(getType(tpe) + ";").newline
- }
- case Some(str) =>
- if (getName(name) == "<init>")
- print("def this" + str + ";").newline
- else
- print("def " + NameTransformer.decode(getName(name)) + str + ";").newline
- }
- case None =>
- if (getName(name) == "<init>") {
- print("def this" + getType(tpe) + ";").newline
- } else {
- print("def " + NameTransformer.decode(getName(name)))
- print(getType(tpe) + ";").newline
- }
+ if (getName(name) == "<init>") {
+ print("def this" + getType(tpe) + ";").newline
+ }
+ else {
+ print("def " + NameTransformer.decode(getName(name)))
+ print(getType(tpe) + ";").newline
}
attribs find {
case cf.Attribute(name, _) => getName(name) == "Exceptions"
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
index aa454934c1..411a87e4bb 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
@@ -156,7 +156,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
val printer = new ScalaSigPrinter(stream, printPrivates)
printer.printMethodType(m.infoType, false)(())
baos.toString
- case None =>
+ case _ =>
""
}
}
diff --git a/src/swing/scala/swing/Font.scala.disabled b/src/swing/scala/swing/Font.scala.disabled
index 6eebd667bd..9e21eb859c 100644
--- a/src/swing/scala/swing/Font.scala.disabled
+++ b/src/swing/scala/swing/Font.scala.disabled
@@ -1,36 +1,36 @@
package scala.swing
-/*object Font {
- def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile)
- def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream)
+/*object Font {
+ def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile)
+ def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream)
def decode(str: String) = java.awt.Font.decode(str)
-
+
/* TODO: finish implementation
/**
* See [java.awt.Font.getFont].
*/
- def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) =
+ def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) =
java.awt.Font.getFont(ImmutableMapWrapper(attributes))
-
+
import java.{util => ju}
- private case class ImmutableMapWrapper[A, B](underlying : Map[A, B])(m : ClassManifest[A]) extends ju.AbstractMap[A, B] {
+ private case class ImmutableMapWrapper[A, B](underlying : Map[A, B])(t : ClassTag[A]) extends ju.AbstractMap[A, B] {
self =>
override def size = underlying.size
- override def put(k : A, v : B) =
+ override def put(k : A, v : B) =
throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
- override def remove(k : AnyRef) =
+ override def remove(k : AnyRef) =
throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
-
+
override def entrySet : ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
def size = self.size
def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
val ui = underlying.iterator
var prev : Option[A] = None
-
+
def hasNext = ui.hasNext
-
+
def next = {
val (k, v) = ui.next
prev = Some(k)
@@ -44,7 +44,7 @@ package scala.swing
}
}
}
-
+
def remove = prev match {
case Some(k) => val v = self.remove(k.asInstanceOf[AnyRef]) ; prev = None ; v
case _ => throw new IllegalStateException("next must be called at least once before remove")
@@ -53,7 +53,7 @@ package scala.swing
}
}
*/
-
+
/**
* See [java.awt.Font.getFont].
*/
@@ -62,9 +62,9 @@ package scala.swing
* See [java.awt.Font.getFont].
*/
def get(nm: String, font: Font) = java.awt.Font.getFont(nm, font)
-
+
def Insets(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
def Rectangle(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
def Point(x: Int, y: Int) = new Point(x, y)
- def Dimension(x: Int, y: Int) = new Dimension(x, y)
+ def Dimension(x: Int, y: Int) = new Dimension(x, y)
}*/ \ No newline at end of file
diff --git a/src/yourkit/scala/tools/util/YourkitProfiling.scala b/src/yourkit/scala/tools/util/YourkitProfiling.scala
deleted file mode 100644
index 677a85112b..0000000000
--- a/src/yourkit/scala/tools/util/YourkitProfiling.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-package scala.tools
-package util
-
-import com.yourkit.api._
-import com.yourkit.runtime._
-import nsc.io._
-
-class YourkitProfiling extends Profiling {
- @volatile private var active = false
- @volatile private var freq: Option[Int] = None
- lazy val controller = new Controller
-
- def defaultFreq = 100
- def allocationFreq = freq
- def setAllocationFreq(x: Int) = freq = if (x <= 0) None else Some(x)
-
- def startRecordingAllocations() = {
- controller.startAllocationRecording(true, freq getOrElse defaultFreq, false, 0)
- }
- def stopRecordingAllocations() = {
- controller.stopAllocationRecording()
- }
-
- def startProfiling(): Unit = {
- if (isActive)
- return
-
- active = true
- daemonize {
- try {
- controller.startCPUProfiling(ProfilingModes.CPU_SAMPLING, Controller.DEFAULT_FILTERS)
- if (freq.isDefined)
- startRecordingAllocations()
- }
- catch {
- case _: PresentableException => () // if it's already running, no big deal
- }
- }
- }
-
- def captureSnapshot() = {
- daemonize(controller.captureSnapshot(ProfilingModes.SNAPSHOT_WITH_HEAP))
- }
-
- def stopProfiling() = {
- try {
- if (freq.isDefined)
- stopRecordingAllocations()
-
- controller.stopCPUProfiling()
- }
- catch {
- case _: PresentableException => () // if it's already running, no big deal
- }
- finally active = false
- }
-
- def advanceGeneration(desc: String) {
- controller.advanceGeneration(desc)
- }
-
- def isActive = active
-}