summaryrefslogtreecommitdiff
path: root/src/compiler
diff options
context:
space:
mode:
Diffstat (limited to 'src/compiler')
-rw-r--r--src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala31
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Errors.scala143
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Resolvers.scala72
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Validators.scala199
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Aliases.scala (renamed from src/compiler/scala/reflect/macros/runtime/Aliases.scala)5
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Context.scala29
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Enclosures.scala32
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Evals.scala (renamed from src/compiler/scala/reflect/macros/runtime/Evals.scala)4
-rw-r--r--src/compiler/scala/reflect/macros/contexts/ExprUtils.scala (renamed from src/compiler/scala/reflect/macros/runtime/ExprUtils.scala)3
-rw-r--r--src/compiler/scala/reflect/macros/contexts/FrontEnds.scala (renamed from src/compiler/scala/reflect/macros/runtime/FrontEnds.scala)4
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Infrastructure.scala (renamed from src/compiler/scala/reflect/macros/runtime/Infrastructure.scala)2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Names.scala26
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Parsers.scala20
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Reifiers.scala (renamed from src/compiler/scala/reflect/macros/runtime/Reifiers.scala)8
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Traces.scala (renamed from src/compiler/scala/reflect/macros/runtime/Traces.scala)2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Typers.scala (renamed from src/compiler/scala/reflect/macros/runtime/Typers.scala)14
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Context.scala28
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Enclosures.scala24
-rw-r--r--src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala38
-rw-r--r--src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala75
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Names.scala17
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Parsers.scala24
-rw-r--r--src/compiler/scala/reflect/macros/runtime/package.scala5
-rw-r--r--src/compiler/scala/reflect/macros/util/Helpers.scala96
-rw-r--r--src/compiler/scala/reflect/reify/Errors.scala6
-rw-r--r--src/compiler/scala/reflect/reify/Phases.scala5
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala33
-rw-r--r--src/compiler/scala/reflect/reify/States.scala1
-rw-r--r--src/compiler/scala/reflect/reify/Taggers.scala11
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala5
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenNames.scala5
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenPositions.scala3
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenSymbols.scala19
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTrees.scala60
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTypes.scala7
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenUtils.scala64
-rw-r--r--src/compiler/scala/reflect/reify/package.scala20
-rw-r--r--src/compiler/scala/reflect/reify/phases/Calculate.scala3
-rw-r--r--src/compiler/scala/reflect/reify/phases/Metalevels.scala23
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reify.scala6
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala77
-rw-r--r--src/compiler/scala/reflect/reify/utils/Extractors.scala26
-rw-r--r--src/compiler/scala/reflect/reify/utils/NodePrinters.scala24
-rw-r--r--src/compiler/scala/reflect/reify/utils/SymbolTables.scala16
-rw-r--r--src/compiler/scala/tools/ant/Pack200Task.scala6
-rw-r--r--src/compiler/scala/tools/ant/Same.scala7
-rw-r--r--src/compiler/scala/tools/ant/ScalaTool.scala18
-rw-r--r--src/compiler/scala/tools/ant/Scalac.scala23
-rw-r--r--src/compiler/scala/tools/ant/Scaladoc.scala695
-rw-r--r--src/compiler/scala/tools/ant/antlib.xml2
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Break.scala3
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Compilers.scala2
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Make.scala3
-rw-r--r--src/compiler/scala/tools/ant/sabbus/ScalacFork.scala12
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Settings.scala16
-rw-r--r--src/compiler/scala/tools/ant/sabbus/TaskArgs.scala2
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Use.scala7
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-unix.tmpl5
-rw-r--r--src/compiler/scala/tools/cmd/CommandLine.scala14
-rw-r--r--src/compiler/scala/tools/cmd/CommandLineParser.scala72
-rw-r--r--src/compiler/scala/tools/cmd/Demo.scala84
-rw-r--r--src/compiler/scala/tools/cmd/FromString.scala26
-rw-r--r--src/compiler/scala/tools/cmd/Interpolation.scala3
-rw-r--r--src/compiler/scala/tools/cmd/Opt.scala8
-rw-r--r--src/compiler/scala/tools/cmd/Parser.scala52
-rw-r--r--src/compiler/scala/tools/cmd/Reference.scala25
-rw-r--r--src/compiler/scala/tools/cmd/Spec.scala2
-rw-r--r--src/compiler/scala/tools/cmd/gen/AnyVals.scala134
-rw-r--r--src/compiler/scala/tools/cmd/gen/Codegen.scala8
-rw-r--r--src/compiler/scala/tools/cmd/gen/CodegenSpec.scala6
-rw-r--r--src/compiler/scala/tools/cmd/package.scala15
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala78
-rw-r--r--src/compiler/scala/tools/nsc/CompileClient.scala10
-rw-r--r--src/compiler/scala/tools/nsc/CompileServer.scala21
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala18
-rw-r--r--src/compiler/scala/tools/nsc/CompilerCommand.scala44
-rw-r--r--src/compiler/scala/tools/nsc/CompilerRun.scala21
-rw-r--r--src/compiler/scala/tools/nsc/ConsoleWriter.scala4
-rw-r--r--src/compiler/scala/tools/nsc/Driver.scala13
-rw-r--r--src/compiler/scala/tools/nsc/EvalLoop.scala2
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerCommand.scala2
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerSettings.scala3
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala912
-rw-r--r--src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala30
-rw-r--r--src/compiler/scala/tools/nsc/Interpreter.scala12
-rw-r--r--src/compiler/scala/tools/nsc/InterpreterLoop.scala12
-rw-r--r--src/compiler/scala/tools/nsc/Main.scala80
-rw-r--r--src/compiler/scala/tools/nsc/MainBench.scala16
-rw-r--r--src/compiler/scala/tools/nsc/MainGenericRunner.scala108
-rw-r--r--src/compiler/scala/tools/nsc/MainTokenMetric.scala13
-rw-r--r--src/compiler/scala/tools/nsc/ObjectRunner.scala6
-rw-r--r--src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala6
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala108
-rw-r--r--src/compiler/scala/tools/nsc/Phases.scala46
-rw-r--r--src/compiler/scala/tools/nsc/Properties.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ScalaDoc.scala77
-rw-r--r--src/compiler/scala/tools/nsc/ScriptRunner.scala33
-rw-r--r--src/compiler/scala/tools/nsc/SubComponent.scala25
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala51
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala30
-rw-r--r--src/compiler/scala/tools/nsc/ast/Positions.scala11
-rw-r--r--src/compiler/scala/tools/nsc/ast/Printers.scala121
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala43
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala216
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala208
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala74
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala112
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala90
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala1440
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala416
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala27
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala92
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Tokens.scala38
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala502
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala211
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala163
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala37
-rw-r--r--src/compiler/scala/tools/nsc/backend/MSILPlatform.scala69
-rw-r--r--src/compiler/scala/tools/nsc/backend/Platform.scala21
-rw-r--r--src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala32
-rw-r--r--src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala5
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala145
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala23
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala822
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala94
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodes.scala16
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala206
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala109
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala134
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Primitives.scala29
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Printers.scala66
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Repository.scala13
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala73
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala12
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala94
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala22
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala15
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala100
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala1234
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala716
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala1323
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala725
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala724
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala395
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala880
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala105
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala822
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala62
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala381
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala1921
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala32
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala142
-rw-r--r--src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala2358
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala29
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala622
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala50
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala21
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala119
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Changes.scala227
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala254
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Files.scala177
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocFactory.scala142
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocParser.scala74
-rw-r--r--src/compiler/scala/tools/nsc/doc/Index.scala17
-rw-r--r--src/compiler/scala/tools/nsc/doc/Settings.scala365
-rw-r--r--src/compiler/scala/tools/nsc/doc/Uncompilable.scala51
-rw-r--r--src/compiler/scala/tools/nsc/doc/Universe.scala16
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala955
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/base/LinkTo.scala15
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala206
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/base/comment/Body.scala95
-rw-r--r--src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala134
-rw-r--r--src/compiler/scala/tools/nsc/doc/doclet/Generator.scala30
-rw-r--r--src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala21
-rw-r--r--src/compiler/scala/tools/nsc/doc/doclet/Universer.scala21
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/Doclet.scala19
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala152
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala224
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/Page.scala108
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala286
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Index.scala142
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala70
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala58
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Source.scala128
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala977
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala53
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala66
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala511
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala228
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.pngbin6232 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.pngbin6220 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/class.pngbin3357 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.pngbin7516 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.pngbin3910 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.pngbin9006 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gifbin1206 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gifbin167 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gifbin1544 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gifbin1341 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css143
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js324
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.pngbin1692 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psdbin30823 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gifbin1462 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.pngbin1803 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psdbin31295 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gifbin1324 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gifbin1104 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.pngbin965 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gifbin1366 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gifbin1115 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css338
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js536
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js6
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js5486
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js4
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.pngbin1198 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.pngbin2441 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/object.pngbin3356 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.pngbin7653 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.pngbin3903 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.pngbin9158 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.pngbin9200 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.pngbin9158 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gifbin1145 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gifbin1118 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gifbin1145 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/package.pngbin3335 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.pngbin7312 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gifbin1201 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js10
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css30
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.pngbin3186 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psdbin28904 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js71
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.pngbin1150 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.pngbin646 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.pngbin1380 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.pngbin1864 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.pngbin1434 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.pngbin1965 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gifbin1214 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gifbin1209 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css848
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js466
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js14
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.pngbin3374 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.pngbin7410 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.pngbin3882 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.pngbin8967 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/type.pngbin1445 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.pngbin4236 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.pngbin1841 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai6376
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.pngbin4969 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gifbin1206 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.pngbin1879 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gifbin1206 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt1
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala114
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/Entity.scala631
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala60
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala63
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala1103
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala609
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala326
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala27
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/model/TreeFactory.scala95
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala27
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala20
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/Visibility.scala39
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala146
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala261
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala271
-rw-r--r--src/compiler/scala/tools/nsc/interactive/BuildManager.scala93
-rw-r--r--src/compiler/scala/tools/nsc/interactive/CompilerControl.scala481
-rw-r--r--src/compiler/scala/tools/nsc/interactive/ContextTrees.scala165
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala1214
-rw-r--r--src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala47
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Picklers.scala191
-rw-r--r--src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala51
-rw-r--r--src/compiler/scala/tools/nsc/interactive/REPL.scala222
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RangePositions.scala285
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala355
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Response.scala105
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala58
-rw-r--r--src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala200
-rw-r--r--src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala103
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala129
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala70
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/Tester.scala208
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala122
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala133
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala35
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala62
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala19
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala15
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala22
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala29
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala12
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala19
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala107
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala41
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ByteCode.scala63
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala50
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CommandLine.scala14
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Completion.scala51
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala83
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala86
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala70
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Delimited.scala44
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala107
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Formatting.scala35
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ILoop.scala966
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala125
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/IMain.scala1235
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ISettings.scala61
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Imports.scala195
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala57
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala372
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineReader.scala76
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Logger.scala18
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala107
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala228
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/NamedParam.scala49
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Naming.scala98
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Parsed.scala69
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Pasted.scala101
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Phased.scala162
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Power.scala430
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala61
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala57
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplProps.scala31
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala34
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala35
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplVals.scala83
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Results.scala22
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/RichClass.scala39
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala43
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/package.scala49
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala84
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/session/History.scala28
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala49
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala62
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/session/package.scala23
-rw-r--r--src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala21
-rw-r--r--src/compiler/scala/tools/nsc/io/Fileish.scala33
-rw-r--r--src/compiler/scala/tools/nsc/io/Jar.scala38
-rw-r--r--src/compiler/scala/tools/nsc/io/Lexer.scala301
-rw-r--r--src/compiler/scala/tools/nsc/io/MsilFile.scala18
-rw-r--r--src/compiler/scala/tools/nsc/io/Pickler.scala454
-rw-r--r--src/compiler/scala/tools/nsc/io/PrettyWriter.scala41
-rw-r--r--src/compiler/scala/tools/nsc/io/Replayer.scala74
-rw-r--r--src/compiler/scala/tools/nsc/io/Socket.scala12
-rw-r--r--src/compiler/scala/tools/nsc/io/SourceReader.scala7
-rw-r--r--src/compiler/scala/tools/nsc/io/package.scala31
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala220
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaScanners.scala290
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaTokens.scala6
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatchSupport.scala138
-rw-r--r--src/compiler/scala/tools/nsc/matching/Matrix.scala259
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala193
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala870
-rw-r--r--src/compiler/scala/tools/nsc/matching/PatternBindings.scala137
-rw-r--r--src/compiler/scala/tools/nsc/matching/Patterns.scala499
-rw-r--r--src/compiler/scala/tools/nsc/package.scala11
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugin.scala190
-rw-r--r--src/compiler/scala/tools/nsc/plugins/PluginComponent.scala8
-rw-r--r--src/compiler/scala/tools/nsc/plugins/PluginDescription.scala80
-rw-r--r--src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala15
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugins.scala58
-rw-r--r--src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala37
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala23
-rw-r--r--src/compiler/scala/tools/nsc/reporters/Reporter.scala15
-rw-r--r--src/compiler/scala/tools/nsc/reporters/StoreReporter.scala2
-rw-r--r--src/compiler/scala/tools/nsc/scratchpad/Mixer.scala102
-rw-r--r--src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala23
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala10
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsSettings.scala13
-rw-r--r--src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala77
-rw-r--r--src/compiler/scala/tools/nsc/settings/AestheticSettings.scala39
-rw-r--r--src/compiler/scala/tools/nsc/settings/FscSettings.scala6
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala146
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala155
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaVersion.scala53
-rw-r--r--src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala11
-rw-r--r--src/compiler/scala/tools/nsc/settings/Warnings.scala25
-rw-r--r--src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala8
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala123
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala10
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala7
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala816
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala415
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala1028
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala137
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala850
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala58
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala521
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala1137
-rw-r--r--src/compiler/scala/tools/nsc/transform/Delambdafy.scala467
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala550
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala182
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala71
-rw-r--r--src/compiler/scala/tools/nsc/transform/Flatten.scala65
-rw-r--r--src/compiler/scala/tools/nsc/transform/InfoTransform.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/InlineErasure.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala140
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala29
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala309
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala224
-rw-r--r--src/compiler/scala/tools/nsc/transform/PostErasure.scala60
-rw-r--r--src/compiler/scala/tools/nsc/transform/SampleTransform.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala547
-rw-r--r--src/compiler/scala/tools/nsc/transform/Statics.scala52
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala256
-rw-r--r--src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala187
-rw-r--r--src/compiler/scala/tools/nsc/transform/TypingTransformers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala388
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala66
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala112
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala106
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala37
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala64
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala733
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala157
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala86
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala155
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala49
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala154
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Solving.scala32
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Adaptations.scala33
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala31
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala295
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala67
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala422
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala1375
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala124
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala20
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala586
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala1678
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala1134
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala217
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Modes.scala140
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala493
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala71
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala374
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala986
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala162
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala80
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala170
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Tags.scala21
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala376
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala190
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala (renamed from src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala)66
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala4358
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala168
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala121
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Variances.scala94
-rw-r--r--src/compiler/scala/tools/nsc/util/CharArrayReader.scala57
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala111
-rw-r--r--src/compiler/scala/tools/nsc/util/CommandLineParser.scala144
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/DocStrings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/Exceptional.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/FreshNameCreator.scala45
-rw-r--r--src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala66
-rw-r--r--src/compiler/scala/tools/nsc/util/MsilClassPath.scala170
-rw-r--r--src/compiler/scala/tools/nsc/util/MultiHashMap.scala9
-rw-r--r--src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala168
-rw-r--r--src/compiler/scala/tools/nsc/util/ShowPickled.scala24
-rw-r--r--src/compiler/scala/tools/nsc/util/SimpleTracer.scala3
-rw-r--r--src/compiler/scala/tools/nsc/util/StackTracing.scala76
-rw-r--r--src/compiler/scala/tools/nsc/util/TreeSet.scala64
-rw-r--r--src/compiler/scala/tools/nsc/util/WorkScheduler.scala6
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala96
-rw-r--r--src/compiler/scala/tools/reflect/FastTrack.scala58
-rw-r--r--src/compiler/scala/tools/reflect/FrontEnd.scala2
-rw-r--r--src/compiler/scala/tools/reflect/MacroImplementations.scala18
-rw-r--r--src/compiler/scala/tools/reflect/ReflectMain.scala3
-rw-r--r--src/compiler/scala/tools/reflect/StdTags.scala7
-rw-r--r--src/compiler/scala/tools/reflect/ToolBox.scala14
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala223
-rw-r--r--src/compiler/scala/tools/reflect/WrappedProperties.scala1
-rw-r--r--src/compiler/scala/tools/reflect/package.scala5
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Holes.scala204
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala196
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala177
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala60
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala423
-rw-r--r--src/compiler/scala/tools/util/Javap.scala157
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala191
-rw-r--r--src/compiler/scala/tools/util/SocketServer.scala7
-rw-r--r--src/compiler/scala/tools/util/VerifyClass.scala2
497 files changed, 25918 insertions, 63337 deletions
diff --git a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
new file mode 100644
index 0000000000..2e82e34bd9
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
@@ -0,0 +1,31 @@
+package scala.reflect.macros
+package compiler
+
+import scala.tools.nsc.Global
+
+abstract class DefaultMacroCompiler extends Resolvers
+ with Validators
+ with Errors {
+ val global: Global
+ import global._
+
+ val typer: global.analyzer.Typer
+ val context = typer.context
+
+ val macroDdef: DefDef
+ lazy val macroDef = macroDdef.symbol
+
+ private case class MacroImplResolutionException(pos: Position, msg: String) extends Exception
+ def abort(pos: Position, msg: String) = throw MacroImplResolutionException(pos, msg)
+
+ def resolveMacroImpl: Tree = {
+ try {
+ validateMacroImplRef()
+ macroImplRef
+ } catch {
+ case MacroImplResolutionException(pos, msg) =>
+ context.error(pos, msg)
+ EmptyTree
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/compiler/Errors.scala b/src/compiler/scala/reflect/macros/compiler/Errors.scala
new file mode 100644
index 0000000000..280baa2a42
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/Errors.scala
@@ -0,0 +1,143 @@
+package scala.reflect.macros
+package compiler
+
+import scala.compat.Platform.EOL
+import scala.reflect.macros.util.Traces
+
+trait Errors extends Traces {
+ self: DefaultMacroCompiler =>
+
+ import global._
+ import analyzer._
+ import definitions._
+ import treeInfo._
+ import typer.TyperErrorGen._
+ import typer.infer.InferErrorGen._
+ private val runDefinitions = currentRun.runDefinitions
+ import runDefinitions._
+ def globalSettings = global.settings
+
+ // sanity check errors
+
+ private def implRefError(message: String) = {
+ val Applied(culprit, _, _) = macroDdef.rhs
+ abort(culprit.pos, message)
+ }
+
+ private def bundleRefError(message: String) = {
+ val Applied(core, _, _) = macroDdef.rhs
+ val culprit = core match {
+ case Select(Applied(core, _, _), _) => core
+ case _ => core
+ }
+ abort(culprit.pos, message)
+ }
+
+ def MacroImplReferenceWrongShapeError() = implRefError(
+ "macro implementation reference has wrong shape. required:\n"+
+ "macro [<static object>].<method name>[[<type args>]] or\n" +
+ "macro [<macro bundle>].<method name>[[<type args>]]")
+
+ def MacroImplWrongNumberOfTypeArgumentsError() = {
+ val diagnostic = if (macroImpl.typeParams.length > targs.length) "has too few type arguments" else "has too many arguments"
+ implRefError(s"macro implementation reference $diagnostic for " + treeSymTypeMsg(macroImplRef))
+ }
+
+ def MacroImplNotPublicError() = implRefError("macro implementation must be public")
+
+ def MacroImplOverloadedError() = implRefError("macro implementation cannot be overloaded")
+
+ def MacroImplNonTagImplicitParameters(params: List[Symbol]) = implRefError("macro implementations cannot have implicit parameters other than WeakTypeTag evidences")
+
+ def MacroBundleNonStaticError() = bundleRefError("macro bundles must be static")
+
+ def MacroBundleWrongShapeError() = bundleRefError("macro bundles must be concrete classes having a single constructor with a `val c: Context` parameter")
+
+ // compatibility errors
+
+ // helpers
+
+ private def lengthMsg(flavor: String, violation: String, extra: Symbol) = {
+ val noun = if (flavor == "value") "parameter" else "type parameter"
+ val message = noun + " lists have different length, " + violation + " extra " + noun
+ val suffix = if (extra ne NoSymbol) " " + extra.defString else ""
+ message + suffix
+ }
+
+ private def abbreviateCoreAliases(s: String): String = {
+ val coreAliases = List("WeakTypeTag", "Expr", "Tree")
+ coreAliases.foldLeft(s)((res, x) => res.replace("c.universe." + x, "c." + x))
+ }
+
+ private def showMeth(pss: List[List[Symbol]], restpe: Type, abbreviate: Boolean, untype: Boolean) = {
+ def preprocess(tpe: Type) = if (untype) untypeMetalevel(tpe) else tpe
+ var pssPart = (pss map (ps => ps map (p => p.defStringSeenAs(preprocess(p.info))) mkString ("(", ", ", ")"))).mkString
+ if (abbreviate) pssPart = abbreviateCoreAliases(pssPart)
+ var retPart = preprocess(restpe).toString
+ if (abbreviate || macroDdef.tpt.tpe == null) retPart = abbreviateCoreAliases(retPart)
+ pssPart + ": " + retPart
+ }
+
+ // not exactly an error generator, but very related
+ // and I dearly wanted to push it away from Macros.scala
+ private def checkConforms(slot: String, rtpe: Type, atpe: Type) = {
+ val verbose = macroDebugVerbose
+
+ def check(rtpe: Type, atpe: Type): Boolean = {
+ def success() = { if (verbose) println(rtpe + " <: " + atpe + "?" + EOL + "true"); true }
+ (rtpe, atpe) match {
+ case _ if rtpe eq atpe => success()
+ case (TypeRef(_, RepeatedParamClass, rtpe :: Nil), TypeRef(_, RepeatedParamClass, atpe :: Nil)) => check(rtpe, atpe)
+ case (ExprClassOf(_), TreeType()) if rtpe.prefix =:= atpe.prefix => success()
+ case (SubtreeType(), ExprClassOf(_)) if rtpe.prefix =:= atpe.prefix => success()
+ case _ => rtpe <:< atpe
+ }
+ }
+
+ val ok =
+ if (verbose) withTypesExplained(check(rtpe, atpe))
+ else check(rtpe, atpe)
+ if (!ok) {
+ if (!verbose) explainTypes(rtpe, atpe)
+ val msg = {
+ val ss = Seq(rtpe, atpe) map (this abbreviateCoreAliases _.toString)
+ s"type mismatch for $slot: ${ss(0)} does not conform to ${ss(1)}"
+ }
+ compatibilityError(msg)
+ }
+ }
+
+ private def compatibilityError(message: String) =
+ implRefError(
+ "macro implementation has incompatible shape:"+
+ "\n required: " + showMeth(rparamss, rret, abbreviate = true, untype = false) +
+ "\n or : " + showMeth(rparamss, rret, abbreviate = true, untype = true) +
+ "\n found : " + showMeth(aparamss, aret, abbreviate = false, untype = false) +
+ "\n" + message)
+
+ def MacroImplParamssMismatchError() = compatibilityError("number of parameter sections differ")
+
+ def MacroImplExtraParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(lengthMsg("value", "found", aparams(rparams.length)))
+
+ def MacroImplMissingParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(abbreviateCoreAliases(lengthMsg("value", "required", rparams(aparams.length))))
+
+ def checkMacroImplParamTypeMismatch(atpe: Type, rparam: Symbol) = checkConforms("parameter " + rparam.name, rparam.tpe, atpe)
+
+ def checkMacroImplResultTypeMismatch(atpe: Type, rret: Type) = checkConforms("return type", atpe, rret)
+
+ def MacroImplParamNameMismatchError(aparam: Symbol, rparam: Symbol) = compatibilityError("parameter names differ: " + rparam.name + " != " + aparam.name)
+
+ def MacroImplVarargMismatchError(aparam: Symbol, rparam: Symbol) = {
+ def fail(paramName: Name) = compatibilityError("types incompatible for parameter " + paramName + ": corresponding is not a vararg parameter")
+ if (isRepeated(rparam) && !isRepeated(aparam)) fail(rparam.name)
+ if (!isRepeated(rparam) && isRepeated(aparam)) fail(aparam.name)
+ }
+
+ def MacroImplTargMismatchError(atargs: List[Type], atparams: List[Symbol]) =
+ compatibilityError(NotWithinBoundsErrorMessage("", atargs, atparams, macroDebugVerbose || settings.explaintypes.value))
+
+ def MacroImplTparamInstantiationError(atparams: List[Symbol], e: NoInstance) = {
+ val badps = atparams map (_.defString) mkString ", "
+ compatibilityError(f"type parameters $badps cannot be instantiated%n${e.getMessage}")
+ }
+}
diff --git a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
new file mode 100644
index 0000000000..d35f1c32a9
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
@@ -0,0 +1,72 @@
+package scala.reflect.macros
+package compiler
+
+import scala.reflect.internal.Flags._
+import scala.reflect.macros.TypecheckException
+
+trait Resolvers {
+ self: DefaultMacroCompiler =>
+
+ import global._
+ import analyzer._
+ import definitions._
+ import treeInfo._
+ import gen._
+ private val runDefinitions = currentRun.runDefinitions
+ import runDefinitions.{Predef_???, _}
+
+ /** Resolves a macro impl reference provided in the right-hand side of the given macro definition.
+ *
+ * Acceptable shapes of the right-hand side:
+ * 1) [<static object>].<method name>[[<type args>]] // vanilla macro def
+ * 2) [<macro bundle>].<method name>[[<type args>]] // shiny new macro bundle
+ *
+ * Produces a tree, which represents a reference to a macro implementation if everything goes well,
+ * otherwise reports found errors and returns EmptyTree. The resulting tree should have the following format:
+ *
+ * qualifier.method[targs]
+ *
+ * Qualifier here might be omitted (local macro defs), be a static object (vanilla macro defs)
+ * or be a dummy instance of a macro bundle (e.g. new MyMacro(???).expand).
+ */
+ lazy val macroImplRef: Tree = {
+ val (maybeBundleRef, methName, targs) = macroDdef.rhs match {
+ case Applied(Select(Applied(RefTree(qual, bundleName), _, Nil), methName), targs, Nil) =>
+ (RefTree(qual, bundleName.toTypeName), methName, targs)
+ case Applied(Ident(methName), targs, Nil) =>
+ (Ident(context.owner.enclClass), methName, targs)
+ case _ =>
+ (EmptyTree, TermName(""), Nil)
+ }
+
+ val untypedImplRef = typer.silent(_.typedTypeConstructor(maybeBundleRef)) match {
+ case SilentResultValue(result) if looksLikeMacroBundleType(result.tpe) =>
+ val bundle = result.tpe.typeSymbol
+ if (!isMacroBundleType(bundle.tpe)) MacroBundleWrongShapeError()
+ if (!bundle.owner.isStaticOwner) MacroBundleNonStaticError()
+ atPos(macroDdef.rhs.pos)(gen.mkTypeApply(Select(New(bundle, Ident(Predef_???)), methName), targs))
+ case _ =>
+ macroDdef.rhs
+ }
+
+ val typedImplRef = typer.silent(_.typed(markMacroImplRef(untypedImplRef)), reportAmbiguousErrors = false)
+ typedImplRef match {
+ case SilentResultValue(success) => success
+ case SilentTypeError(err) => abort(err.errPos, err.errMsg)
+ }
+ }
+
+ // FIXME: cannot write this concisely because of SI-7507
+ // lazy val (isImplBundle, macroImplOwner, macroImpl, macroImplTargs) =
+ private lazy val dissectedMacroImplRef =
+ macroImplRef match {
+ case MacroImplReference(isBundle, isBlackbox, owner, meth, targs) => (isBundle, isBlackbox, owner, meth, targs)
+ case _ => MacroImplReferenceWrongShapeError()
+ }
+ lazy val isImplBundle = dissectedMacroImplRef._1
+ lazy val isImplMethod = !isImplBundle
+ lazy val isImplBlackbox = dissectedMacroImplRef._2
+ lazy val macroImplOwner = dissectedMacroImplRef._3
+ lazy val macroImpl = dissectedMacroImplRef._4
+ lazy val targs = dissectedMacroImplRef._5
+}
diff --git a/src/compiler/scala/reflect/macros/compiler/Validators.scala b/src/compiler/scala/reflect/macros/compiler/Validators.scala
new file mode 100644
index 0000000000..02c1f7c431
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/Validators.scala
@@ -0,0 +1,199 @@
+package scala.reflect.macros
+package compiler
+
+import java.util.UUID.randomUUID
+import scala.reflect.internal.Flags._
+import scala.reflect.macros.TypecheckException
+
+trait Validators {
+ self: DefaultMacroCompiler =>
+
+ import global._
+ import analyzer._
+ import definitions._
+ private val runDefinitions = currentRun.runDefinitions
+ import runDefinitions.{Predef_???, _}
+
+ def validateMacroImplRef() = {
+ sanityCheck()
+ if (macroImpl != Predef_???) checkMacroDefMacroImplCorrespondence()
+ }
+
+ private def sanityCheck() = {
+ if (!macroImpl.isMethod) MacroImplReferenceWrongShapeError()
+ if (macroImpl.typeParams.length != targs.length) MacroImplWrongNumberOfTypeArgumentsError()
+ if (!macroImpl.isPublic) MacroImplNotPublicError()
+ if (macroImpl.isOverloaded) MacroImplOverloadedError()
+ val implicitParams = aparamss.flatten filter (_.isImplicit)
+ if (implicitParams.nonEmpty) MacroImplNonTagImplicitParameters(implicitParams)
+ val effectiveOwner = if (isImplMethod) macroImplOwner else macroImplOwner.owner
+ val declaredInStaticObject = effectiveOwner.isStaticOwner || effectiveOwner.moduleClass.isStaticOwner
+ if (!declaredInStaticObject) MacroImplReferenceWrongShapeError()
+ }
+
+ private def checkMacroDefMacroImplCorrespondence() = {
+ val atvars = atparams map freshVar
+ def atpeToRtpe(atpe: Type) = atpe.substSym(aparamss.flatten, rparamss.flatten).instantiateTypeParams(atparams, atvars)
+
+ // we only check strict correspondence between value parameterss
+ // type parameters of macro defs and macro impls don't have to coincide with each other
+ if (aparamss.length != rparamss.length) MacroImplParamssMismatchError()
+ map2(aparamss, rparamss)((aparams, rparams) => {
+ if (aparams.length < rparams.length) MacroImplMissingParamsError(aparams, rparams)
+ if (rparams.length < aparams.length) MacroImplExtraParamsError(aparams, rparams)
+ })
+
+ try {
+ // cannot fuse this map2 and the map2 above because if aparamss.flatten != rparamss.flatten
+ // then `atpeToRtpe` is going to fail with an unsound substitution
+ map2(aparamss.flatten, rparamss.flatten)((aparam, rparam) => {
+ if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam)
+ if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam)
+ val aparamtpe = aparam.tpe match {
+ case MacroContextType(tpe) => tpe
+ case tpe => tpe
+ }
+ checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam)
+ })
+
+ checkMacroImplResultTypeMismatch(atpeToRtpe(aret), rret)
+
+ val maxLubDepth = lubDepth(aparamss.flatten map (_.tpe)) max lubDepth(rparamss.flatten map (_.tpe))
+ val atargs = solvedTypes(atvars, atparams, atparams map varianceInType(aret), upper = false, maxLubDepth)
+ val boundsOk = typer.silent(_.infer.checkBounds(macroDdef, NoPrefix, NoSymbol, atparams, atargs, ""))
+ boundsOk match {
+ case SilentResultValue(true) => // do nothing, success
+ case SilentResultValue(false) | SilentTypeError(_) => MacroImplTargMismatchError(atargs, atparams)
+ }
+ } catch {
+ case ex: NoInstance => MacroImplTparamInstantiationError(atparams, ex)
+ }
+ }
+
+ // aXXX (e.g. aparamss) => characteristics of the actual macro impl signature extracted from the macro impl ("a" stands for "actual")
+ // rXXX (e.g. rparamss) => characteristics of the reference macro impl signature synthesized from the macro def ("r" stands for "reference")
+ // FIXME: cannot write this concisely because of SI-7507
+ //lazy val MacroImplSig(atparams, aparamss, aret) = macroImplSig
+ //lazy val MacroImplSig(_, rparamss, rret) = referenceMacroImplSig
+ lazy val atparams = macroImplSig.tparams
+ lazy val aparamss = macroImplSig.paramss
+ lazy val aret = macroImplSig.ret
+ lazy val rparamss = referenceMacroImplSig.paramss
+ lazy val rret = referenceMacroImplSig.ret
+
+ // Technically this can be just an alias to MethodType, but promoting it to a first-class entity
+ // provides better encapsulation and convenient syntax for pattern matching.
+ private case class MacroImplSig(tparams: List[Symbol], paramss: List[List[Symbol]], ret: Type) {
+ private def tparams_s = if (tparams.isEmpty) "" else tparams.map(_.defString).mkString("[", ", ", "]")
+ private def paramss_s = paramss map (ps => ps.map(s => s"${s.name}: ${s.tpe_*}").mkString("(", ", ", ")")) mkString ""
+ override def toString = "MacroImplSig(" + tparams_s + paramss_s + ret + ")"
+ }
+
+ /** An actual macro implementation signature extracted from a macro implementation method.
+ *
+ * For the following macro impl:
+ * def fooBar[T: c.WeakTypeTag]
+ * (c: scala.reflect.macros.blackbox.Context)
+ * (xs: c.Expr[List[T]])
+ * : c.Expr[T] = ...
+ *
+ * This function will return:
+ * (c: scala.reflect.macros.blackbox.Context)(xs: c.Expr[List[T]])c.Expr[T]
+ *
+ * Note that type tag evidence parameters are not included into the result.
+ * Type tag context bounds for macro impl tparams are optional.
+ * Therefore compatibility checks ignore such parameters, and we don't need to bother about them here.
+ *
+ * This method cannot be reduced to just macroImpl.info, because macro implementations might
+ * come in different shapes. If the implementation is an apply method of a *box.Macro-compatible object,
+ * then it won't have (c: *box.Context) in its parameters, but will rather refer to *boxMacro.c.
+ *
+ * @param macroImpl The macro implementation symbol
+ */
+ private lazy val macroImplSig: MacroImplSig = {
+ val tparams = macroImpl.typeParams
+ val paramss = transformTypeTagEvidenceParams(macroImplRef, (param, tparam) => NoSymbol)
+ val ret = macroImpl.info.finalResultType
+ MacroImplSig(tparams, paramss, ret)
+ }
+
+ /** A reference macro implementation signature extracted from a given macro definition.
+ *
+ * For the following macro def:
+ * def foo[T](xs: List[T]): T = macro fooBar
+ *
+ * This function will return:
+ * (c: scala.reflect.macros.blackbox.Context)(xs: c.Expr[List[T]])c.Expr[T] or
+ * (c: scala.reflect.macros.whitebox.Context)(xs: c.Expr[List[T]])c.Expr[T]
+ *
+ * Note that type tag evidence parameters are not included into the result.
+ * Type tag context bounds for macro impl tparams are optional.
+ * Therefore compatibility checks ignore such parameters, and we don't need to bother about them here.
+ *
+ * Also note that we need a DefDef, not the corresponding MethodSymbol, because that symbol would be of no use for us.
+ * Macro signatures are verified when typechecking macro defs, which means that at that moment inspecting macroDef.info
+ * means asking for cyclic reference errors.
+ *
+ * We need macro implementation symbol as well, because the return type of the macro definition might be omitted,
+ * and in that case we'd need to infer it from the return type of the macro implementation. Luckily for us, we can
+ * use that symbol without a risk of running into cycles.
+ *
+ * @param typer Typechecker of `macroDdef`
+ * @param macroDdef The macro definition tree
+ * @param macroImpl The macro implementation symbol
+ */
+ private lazy val referenceMacroImplSig: MacroImplSig = {
+ // had to move method's body to an object because of the recursive dependencies between sigma and param
+ object SigGenerator {
+ val cache = scala.collection.mutable.Map[Symbol, Symbol]()
+ val ctxTpe = if (isImplBlackbox) BlackboxContextClass.tpe else WhiteboxContextClass.tpe
+ val ctxPrefix =
+ if (isImplMethod) singleType(NoPrefix, makeParam(nme.macroContext, macroDdef.pos, ctxTpe, SYNTHETIC))
+ else singleType(ThisType(macroImpl.owner), macroImpl.owner.tpe.member(nme.c))
+ val paramss =
+ if (isImplMethod) List(ctxPrefix.termSymbol) :: mmap(macroDdef.vparamss)(param)
+ else mmap(macroDdef.vparamss)(param)
+ val macroDefRet =
+ if (!macroDdef.tpt.isEmpty) typer.typedType(macroDdef.tpt).tpe
+ else computeMacroDefTypeFromMacroImplRef(macroDdef, macroImplRef) orElse AnyTpe
+ val implReturnType = sigma(increaseMetalevel(ctxPrefix, macroDefRet))
+
+ object SigmaTypeMap extends TypeMap {
+ def mapPrefix(pre: Type) = pre match {
+ case ThisType(sym) if sym == macroDef.owner =>
+ singleType(singleType(ctxPrefix, MacroContextPrefix), ExprValue)
+ case SingleType(NoPrefix, sym) =>
+ mfind(macroDdef.vparamss)(_.symbol == sym).fold(pre)(p => singleType(singleType(NoPrefix, param(p)), ExprValue))
+ case _ =>
+ mapOver(pre)
+ }
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, args) =>
+ val pre1 = mapPrefix(pre)
+ val args1 = mapOverArgs(args, sym.typeParams)
+ if ((pre eq pre1) && (args eq args1)) tp
+ else typeRef(pre1, sym, args1)
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ def sigma(tpe: Type): Type = SigmaTypeMap(tpe)
+
+ def makeParam(name: Name, pos: Position, tpe: Type, flags: Long) =
+ macroDef.newValueParameter(name.toTermName, pos, flags) setInfo tpe
+ def param(tree: Tree): Symbol = (
+ cache.getOrElseUpdate(tree.symbol, {
+ val sym = tree.symbol
+ assert(sym.isTerm, s"sym = $sym, tree = $tree")
+ makeParam(sym.name, sym.pos, sigma(increaseMetalevel(ctxPrefix, sym.tpe)), sym.flags)
+ })
+ )
+ }
+
+ import SigGenerator._
+ macroLogVerbose(s"generating macroImplSigs for: $macroDdef")
+ val result = MacroImplSig(macroDdef.tparams map (_.symbol), paramss, implReturnType)
+ macroLogVerbose(s"result is: $result")
+ result
+ }
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Aliases.scala b/src/compiler/scala/reflect/macros/contexts/Aliases.scala
index 96cf50e498..cc64d97d85 100644
--- a/src/compiler/scala/reflect/macros/runtime/Aliases.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Aliases.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
trait Aliases {
self: Context =>
@@ -29,8 +29,7 @@ trait Aliases {
override def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
override def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
- type ImplicitCandidate = (Type, Tree)
implicit class RichOpenImplicit(oi: universe.analyzer.OpenImplicit) {
- def toImplicitCandidate = (oi.pt, oi.tree)
+ def toImplicitCandidate = ImplicitCandidate(oi.info.pre, oi.info.sym, oi.pt, oi.tree)
}
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/contexts/Context.scala b/src/compiler/scala/reflect/macros/contexts/Context.scala
new file mode 100644
index 0000000000..87dac18849
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Context.scala
@@ -0,0 +1,29 @@
+package scala.reflect.macros
+package contexts
+
+import scala.tools.nsc.Global
+
+abstract class Context extends scala.reflect.macros.blackbox.Context
+ with scala.reflect.macros.whitebox.Context
+ with Aliases
+ with Enclosures
+ with Names
+ with Reifiers
+ with FrontEnds
+ with Infrastructure
+ with Typers
+ with Parsers
+ with Evals
+ with ExprUtils
+ with Traces {
+
+ val universe: Global
+
+ val mirror: universe.Mirror = universe.rootMirror
+
+ val callsiteTyper: universe.analyzer.Typer
+
+ val prefix: Expr[PrefixType]
+
+ val expandee: Tree
+}
diff --git a/src/compiler/scala/reflect/macros/contexts/Enclosures.scala b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala
new file mode 100644
index 0000000000..5e931817b5
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala
@@ -0,0 +1,32 @@
+package scala.reflect.macros
+package contexts
+
+import scala.reflect.{ClassTag, classTag}
+
+trait Enclosures {
+ self: Context =>
+
+ import universe._
+
+ private lazy val site = callsiteTyper.context
+ private lazy val enclTrees = site.enclosingContextChain map (_.tree)
+ private lazy val enclPoses = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition)
+
+ private def lenientEnclosure[T <: Tree : ClassTag]: Tree = enclTrees collectFirst { case x: T => x } getOrElse EmptyTree
+ private def strictEnclosure[T <: Tree : ClassTag]: T = enclTrees collectFirst { case x: T => x } getOrElse (throw new EnclosureException(classTag[T].runtimeClass, enclTrees))
+
+ // vals are eager to simplify debugging
+ // after all we wouldn't save that much time by making them lazy
+ val macroApplication: Tree = expandee
+ def enclosingPackage: PackageDef = strictEnclosure[PackageDef]
+ val enclosingClass: Tree = lenientEnclosure[ImplDef]
+ def enclosingImpl: ImplDef = strictEnclosure[ImplDef]
+ def enclosingTemplate: Template = strictEnclosure[Template]
+ val enclosingImplicits: List[ImplicitCandidate] = site.openImplicits.map(_.toImplicitCandidate)
+ val enclosingMacros: List[Context] = this :: universe.analyzer.openMacros // include self
+ val enclosingMethod: Tree = lenientEnclosure[DefDef]
+ def enclosingDef: DefDef = strictEnclosure[DefDef]
+ val enclosingPosition: Position = if (enclPoses.isEmpty) NoPosition else enclPoses.head.pos
+ val enclosingUnit: CompilationUnit = universe.currentRun.currentUnit
+ val enclosingRun: Run = universe.currentRun
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Evals.scala b/src/compiler/scala/reflect/macros/contexts/Evals.scala
index 1f7b5f2ff1..84928ddf86 100644
--- a/src/compiler/scala/reflect/macros/runtime/Evals.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Evals.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
import scala.reflect.runtime.{universe => ru}
import scala.tools.reflect.ToolBox
@@ -7,7 +7,7 @@ import scala.tools.reflect.ToolBox
trait Evals {
self: Context =>
- private lazy val evalMirror = ru.runtimeMirror(universe.analyzer.macroClassloader)
+ private lazy val evalMirror = ru.runtimeMirror(universe.analyzer.defaultMacroClassloader)
private lazy val evalToolBox = evalMirror.mkToolBox()
private lazy val evalImporter = ru.mkImporter(universe).asInstanceOf[ru.Importer { val from: universe.type }]
diff --git a/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala b/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala
index 672699f00e..4846325d1e 100644
--- a/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala
+++ b/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala
@@ -1,11 +1,10 @@
package scala.reflect.macros
-package runtime
+package contexts
trait ExprUtils {
self: Context =>
import universe._
- import mirror._
def literalNull = Expr[Null](Literal(Constant(null)))(TypeTag.Null)
diff --git a/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala b/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala
index a6a198e1b4..fda05de09c 100644
--- a/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala
+++ b/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala
@@ -1,5 +1,7 @@
package scala.reflect.macros
-package runtime
+package contexts
+
+import scala.reflect.macros.runtime.AbortMacroException
trait FrontEnds {
self: Context =>
diff --git a/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
index 7781693822..df7aa4d2be 100644
--- a/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
trait Infrastructure {
self: Context =>
diff --git a/src/compiler/scala/reflect/macros/contexts/Names.scala b/src/compiler/scala/reflect/macros/contexts/Names.scala
new file mode 100644
index 0000000000..c2f14cf0f1
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Names.scala
@@ -0,0 +1,26 @@
+package scala.reflect.macros
+package contexts
+
+trait Names {
+ self: Context =>
+
+ def freshNameCreator = callsiteTyper.context.unit.fresh
+
+ def fresh(): String =
+ freshName()
+
+ def fresh(name: String): String =
+ freshName(name)
+
+ def fresh[NameType <: Name](name: NameType): NameType =
+ freshName[NameType](name)
+
+ def freshName(): String =
+ freshName("fresh$")
+
+ def freshName(name: String): String =
+ freshNameCreator.newName(name)
+
+ def freshName[NameType <: Name](name: NameType): NameType =
+ name.mapName(freshNameCreator.newName(_)).asInstanceOf[NameType]
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/contexts/Parsers.scala b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
new file mode 100644
index 0000000000..88cfea8157
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
@@ -0,0 +1,20 @@
+package scala.reflect.macros
+package contexts
+
+import scala.tools.nsc.reporters.StoreReporter
+
+trait Parsers {
+ self: Context =>
+ import global._
+
+ def parse(code: String) = {
+ val sreporter = new StoreReporter()
+ val unit = new CompilationUnit(newSourceFile(code, "<macro>")) { override def reporter = sreporter }
+ val parser = newUnitParser(unit)
+ val tree = gen.mkTreeOrBlock(parser.parseStatsOrPackages())
+ sreporter.infos.foreach {
+ case sreporter.Info(pos, msg, sreporter.ERROR) => throw ParseException(pos, msg)
+ }
+ tree
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Reifiers.scala b/src/compiler/scala/reflect/macros/contexts/Reifiers.scala
index 8bb388be8f..ecef1c7289 100644
--- a/src/compiler/scala/reflect/macros/runtime/Reifiers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Reifiers.scala
@@ -4,7 +4,7 @@
*/
package scala.reflect.macros
-package runtime
+package contexts
trait Reifiers {
self: Context =>
@@ -60,15 +60,15 @@ trait Reifiers {
def logFreeVars(symtab: SymbolTable): Unit =
// logging free vars only when they are untyped prevents avalanches of duplicate messages
symtab.syms map (sym => symtab.symDef(sym)) foreach {
- case FreeTermDef(_, _, binding, _, origin) if universe.settings.logFreeTerms.value && binding.tpe == null =>
+ case FreeTermDef(_, _, binding, _, origin) if universe.settings.logFreeTerms && binding.tpe == null =>
reporter.echo(position, "free term: %s %s".format(showRaw(binding), origin))
- case FreeTypeDef(_, _, binding, _, origin) if universe.settings.logFreeTypes.value && binding.tpe == null =>
+ case FreeTypeDef(_, _, binding, _, origin) if universe.settings.logFreeTypes && binding.tpe == null =>
reporter.echo(position, "free type: %s %s".format(showRaw(binding), origin))
case _ =>
// do nothing
}
- if (universe.settings.logFreeTerms.value || universe.settings.logFreeTypes.value)
+ if (universe.settings.logFreeTerms || universe.settings.logFreeTypes)
reification match {
case ReifiedTree(_, _, symtab, _, _, _, _) => logFreeVars(symtab)
case ReifiedType(_, _, symtab, _, _, _) => logFreeVars(symtab)
diff --git a/src/compiler/scala/reflect/macros/runtime/Traces.scala b/src/compiler/scala/reflect/macros/contexts/Traces.scala
index 0238e9f84e..df47f6ba81 100644
--- a/src/compiler/scala/reflect/macros/runtime/Traces.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Traces.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
trait Traces extends util.Traces {
self: Context =>
diff --git a/src/compiler/scala/reflect/macros/runtime/Typers.scala b/src/compiler/scala/reflect/macros/contexts/Typers.scala
index a51bee0fe8..85204d0f1b 100644
--- a/src/compiler/scala/reflect/macros/runtime/Typers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Typers.scala
@@ -1,5 +1,7 @@
package scala.reflect.macros
-package runtime
+package contexts
+
+import scala.reflect.internal.Mode
trait Typers {
self: Context =>
@@ -9,9 +11,9 @@ trait Typers {
def openImplicits: List[ImplicitCandidate] = callsiteTyper.context.openImplicits.map(_.toImplicitCandidate)
/**
- * @see [[scala.tools.reflect.Toolbox.typeCheck]]
+ * @see [[scala.tools.reflect.ToolBox.typeCheck]]
*/
- def typeCheck(tree: Tree, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree = {
+ def typecheck(tree: Tree, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree = {
macroLogVerbose("typechecking %s with expected type %s, implicit views = %s, macros = %s".format(tree, pt, !withImplicitViewsDisabled, !withMacrosDisabled))
val context = callsiteTyper.context
val wrapper1 = if (!withImplicitViewsDisabled) (context.withImplicitsEnabled[Tree] _) else (context.withImplicitsDisabled[Tree] _)
@@ -22,7 +24,7 @@ trait Typers {
// typechecking uses silent anyways (e.g. in typedSelect), so you'll only waste your time
// I'd advise fixing the root cause: finding why the context is not set to report errors
// (also see reflect.runtime.ToolBoxes.typeCheckExpr for a workaround that might work for you)
- wrapper(callsiteTyper.silent(_.typed(tree, universe.analyzer.EXPRmode, pt), reportAmbiguousErrors = false) match {
+ wrapper(callsiteTyper.silent(_.typed(universe.duplicateAndKeepPositions(tree), pt), reportAmbiguousErrors = false) match {
case universe.analyzer.SilentResultValue(result) =>
macroLogVerbose(result)
result
@@ -44,7 +46,7 @@ trait Typers {
universe.analyzer.inferImplicit(tree, viewTpe, true, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg))
}
- def resetAllAttrs(tree: Tree): Tree = universe.resetAllAttrs(tree)
+ def resetAllAttrs(tree: Tree): Tree = universe.resetAllAttrs(universe.duplicateAndKeepPositions(tree))
- def resetLocalAttrs(tree: Tree): Tree = universe.resetLocalAttrs(tree)
+ def resetLocalAttrs(tree: Tree): Tree = universe.resetLocalAttrs(universe.duplicateAndKeepPositions(tree))
}
diff --git a/src/compiler/scala/reflect/macros/runtime/Context.scala b/src/compiler/scala/reflect/macros/runtime/Context.scala
deleted file mode 100644
index 8e8b0fcea1..0000000000
--- a/src/compiler/scala/reflect/macros/runtime/Context.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-import scala.tools.nsc.Global
-
-abstract class Context extends scala.reflect.macros.Context
- with Aliases
- with Enclosures
- with Names
- with Reifiers
- with FrontEnds
- with Infrastructure
- with Typers
- with Parsers
- with Evals
- with ExprUtils
- with Traces {
-
- val universe: Global
-
- val mirror: universe.Mirror = universe.rootMirror
-
- val callsiteTyper: universe.analyzer.Typer
-
- val prefix: Expr[PrefixType]
-
- val expandee: Tree
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala b/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
deleted file mode 100644
index 2a4a22f81c..0000000000
--- a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-trait Enclosures {
- self: Context =>
-
- import universe._
- import mirror._
-
- private def site = callsiteTyper.context
- private def enclTrees = site.enclosingContextChain map (_.tree)
- private def enclPoses = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition)
-
- // vals are eager to simplify debugging
- // after all we wouldn't save that much time by making them lazy
- val macroApplication: Tree = expandee
- val enclosingClass: Tree = enclTrees collectFirst { case x: ImplDef => x } getOrElse EmptyTree
- val enclosingImplicits: List[ImplicitCandidate] = site.openImplicits.map(_.toImplicitCandidate)
- val enclosingMacros: List[Context] = this :: universe.analyzer.openMacros // include self
- val enclosingMethod: Tree = site.enclMethod.tree
- val enclosingPosition: Position = if (enclPoses.isEmpty) NoPosition else enclPoses.head.pos
- val enclosingUnit: CompilationUnit = universe.currentRun.currentUnit
- val enclosingRun: Run = universe.currentRun
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala
new file mode 100644
index 0000000000..ecdd48db22
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala
@@ -0,0 +1,38 @@
+package scala.reflect.macros
+package runtime
+
+import scala.reflect.runtime.ReflectionUtils
+import scala.reflect.macros.blackbox.{Context => BlackboxContext}
+import scala.reflect.macros.whitebox.{Context => WhiteboxContext}
+import java.lang.reflect.{Constructor => jConstructor}
+
+trait JavaReflectionRuntimes {
+ self: scala.tools.nsc.typechecker.Analyzer =>
+
+ trait JavaReflectionResolvers {
+ self: MacroRuntimeResolver =>
+
+ def resolveJavaReflectionRuntime(classLoader: ClassLoader): MacroRuntime = {
+ val implClass = Class.forName(className, true, classLoader)
+ val implMeths = implClass.getDeclaredMethods.find(_.getName == methName)
+ // relies on the fact that macro impls cannot be overloaded
+ // so every methName can resolve to at maximum one method
+ val implMeth = implMeths getOrElse { throw new NoSuchMethodException(s"$className.$methName") }
+ macroLogVerbose(s"successfully loaded macro impl as ($implClass, $implMeth)")
+ args => {
+ val implObj =
+ if (isBundle) {
+ def isMacroContext(clazz: Class[_]) = clazz == classOf[BlackboxContext] || clazz == classOf[WhiteboxContext]
+ def isBundleCtor(ctor: jConstructor[_]) = ctor.getParameterTypes match {
+ case Array(param) if isMacroContext(param) => true
+ case _ => false
+ }
+ val Array(bundleCtor) = implClass.getConstructors.filter(isBundleCtor)
+ bundleCtor.newInstance(args.c)
+ } else ReflectionUtils.staticSingletonInstance(implClass)
+ val implArgs = if (isBundle) args.others else args.c +: args.others
+ implMeth.invoke(implObj, implArgs.asInstanceOf[Seq[AnyRef]]: _*)
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
new file mode 100644
index 0000000000..5fd9c0db34
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
@@ -0,0 +1,75 @@
+package scala.reflect.macros
+package runtime
+
+import scala.reflect.internal.Flags._
+import scala.reflect.runtime.ReflectionUtils
+
+trait MacroRuntimes extends JavaReflectionRuntimes {
+ self: scala.tools.nsc.typechecker.Analyzer =>
+
+ import global._
+ import definitions._
+
+ /** Produces a function that can be used to invoke macro implementation for a given macro definition:
+ * 1) Looks up macro implementation symbol in this universe.
+ * 2) Loads its enclosing class from the macro classloader.
+ * 3) Loads the companion of that enclosing class from the macro classloader.
+ * 4) Resolves macro implementation within the loaded companion.
+ *
+ * @return Requested runtime if macro implementation can be loaded successfully from either of the mirrors,
+ * `null` otherwise.
+ */
+ def macroRuntime(expandee: Tree): MacroRuntime = pluginsMacroRuntime(expandee)
+
+ /** Default implementation of `macroRuntime`.
+ * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroRuntime for more details)
+ */
+ private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime]
+ def standardMacroRuntime(expandee: Tree): MacroRuntime = {
+ val macroDef = expandee.symbol
+ macroLogVerbose(s"looking for macro implementation: $macroDef")
+ if (fastTrack contains macroDef) {
+ macroLogVerbose("macro expansion is serviced by a fast track")
+ fastTrack(macroDef)
+ } else {
+ macroRuntimesCache.getOrElseUpdate(macroDef, new MacroRuntimeResolver(macroDef).resolveRuntime())
+ }
+ }
+
+ /** Macro classloader that is used to resolve and run macro implementations.
+ * Loads classes from from -cp (aka the library classpath).
+ * Is also capable of detecting REPL and reusing its classloader.
+ *
+ * When -Xmacro-jit is enabled, we sometimes fallback to on-the-fly compilation of macro implementations,
+ * which compiles implementations into a virtual directory (very much like REPL does) and then conjures
+ * a classloader mapped to that virtual directory.
+ */
+ lazy val defaultMacroClassloader: ClassLoader = findMacroClassLoader()
+
+ /** Abstracts away resolution of macro runtimes.
+ */
+ type MacroRuntime = MacroArgs => Any
+ class MacroRuntimeResolver(val macroDef: Symbol) extends JavaReflectionResolvers {
+ val binding = loadMacroImplBinding(macroDef).get
+ val isBundle = binding.isBundle
+ val className = binding.className
+ val methName = binding.methName
+
+ def resolveRuntime(): MacroRuntime = {
+ if (className == Predef_???.owner.javaClassName && methName == Predef_???.name.encoded) {
+ args => throw new AbortMacroException(args.c.enclosingPosition, "macro implementation is missing")
+ } else {
+ try {
+ macroLogVerbose(s"resolving macro implementation as $className.$methName (isBundle = $isBundle)")
+ macroLogVerbose(s"classloader is: ${ReflectionUtils.show(defaultMacroClassloader)}")
+ resolveJavaReflectionRuntime(defaultMacroClassloader)
+ } catch {
+ case ex: Exception =>
+ macroLogVerbose(s"macro runtime failed to load: ${ex.toString}")
+ macroDef setFlag IS_ERROR
+ null
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Names.scala b/src/compiler/scala/reflect/macros/runtime/Names.scala
deleted file mode 100644
index ee9f3a56d3..0000000000
--- a/src/compiler/scala/reflect/macros/runtime/Names.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-trait Names {
- self: Context =>
-
- lazy val freshNameCreator = callsiteTyper.context.unit.fresh
-
- def fresh(): String =
- freshNameCreator.newName()
-
- def fresh(name: String): String =
- freshNameCreator.newName(name)
-
- def fresh[NameType <: Name](name: NameType): NameType =
- name.mapName(freshNameCreator.newName(_)).asInstanceOf[NameType]
-} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Parsers.scala b/src/compiler/scala/reflect/macros/runtime/Parsers.scala
deleted file mode 100644
index 566bcde73d..0000000000
--- a/src/compiler/scala/reflect/macros/runtime/Parsers.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-import scala.language.existentials
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.ToolBoxError
-
-trait Parsers {
- self: Context =>
-
- def parse(code: String): Tree =
- // todo. provide decent implementation
- // see `Typers.typedUseCase` for details
- try {
- import scala.reflect.runtime.{universe => ru}
- val parsed = ru.rootMirror.mkToolBox().parse(code)
- val importer = universe.mkImporter(ru)
- importer.importTree(parsed)
- } catch {
- case ToolBoxError(msg, cause) =>
- // todo. provide a position
- throw new ParseException(universe.NoPosition, msg)
- }
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/package.scala b/src/compiler/scala/reflect/macros/runtime/package.scala
new file mode 100644
index 0000000000..9ef8200760
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/package.scala
@@ -0,0 +1,5 @@
+package scala.reflect.macros
+
+package object runtime {
+ type Context = scala.reflect.macros.contexts.Context
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/util/Helpers.scala b/src/compiler/scala/reflect/macros/util/Helpers.scala
new file mode 100644
index 0000000000..bddc42d1f9
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/util/Helpers.scala
@@ -0,0 +1,96 @@
+package scala.reflect.macros
+package util
+
+import scala.tools.nsc.typechecker.Analyzer
+
+trait Helpers {
+ self: Analyzer =>
+
+ import global._
+ import definitions._
+
+ /** Transforms parameters lists of a macro impl.
+ * The `transform` function is invoked only for WeakTypeTag evidence parameters.
+ *
+ * The transformer takes two arguments: a value parameter from the parameter list
+ * and a type parameter that is witnesses by the value parameter.
+ *
+ * If the transformer returns a NoSymbol, the value parameter is not included from the result.
+ * If the transformer returns something else, this something else is included in the result instead of the value parameter.
+ *
+ * Despite of being highly esoteric, this function significantly simplifies signature analysis.
+ * For example, it can be used to strip macroImpl.paramss from the evidences (necessary when checking def <-> impl correspondence)
+ * or to streamline creation of the list of macro arguments.
+ */
+ def transformTypeTagEvidenceParams(macroImplRef: Tree, transform: (Symbol, Symbol) => Symbol): List[List[Symbol]] = {
+ val runDefinitions = currentRun.runDefinitions
+ import runDefinitions._
+
+ val MacroContextUniverse = definitions.MacroContextUniverse
+ val treeInfo.MacroImplReference(isBundle, _, _, macroImpl, _) = macroImplRef
+ val paramss = macroImpl.paramss
+ val ContextParam = paramss match {
+ case Nil | _ :+ Nil => NoSymbol // no implicit parameters in the signature => nothing to do
+ case _ if isBundle => macroImpl.owner.tpe member nme.c
+ case (cparam :: _) :: _ if isMacroContextType(cparam.tpe) => cparam
+ case _ => NoSymbol // no context parameter in the signature => nothing to do
+ }
+ def transformTag(param: Symbol): Symbol = param.tpe.dealias match {
+ case TypeRef(SingleType(SingleType(_, ContextParam), MacroContextUniverse), WeakTypeTagClass, targ :: Nil) => transform(param, targ.typeSymbol)
+ case _ => param
+ }
+ ContextParam match {
+ case NoSymbol => paramss
+ case _ =>
+ paramss.last map transformTag filter (_.exists) match {
+ case Nil => paramss.init
+ case transformed => paramss.init :+ transformed
+ }
+ }
+ }
+
+ /** Increases metalevel of the type, i.e. transforms:
+ * * T to c.Expr[T]
+ *
+ * @see Metalevels.scala for more information and examples about metalevels
+ */
+ def increaseMetalevel(pre: Type, tp: Type): Type = {
+ val runDefinitions = currentRun.runDefinitions
+ import runDefinitions._
+
+ transparentShallowTransform(RepeatedParamClass, tp) {
+ case tp => typeRef(pre, MacroContextExprClass, List(tp))
+ }
+ }
+
+ /** Transforms c.Expr[T] types into c.Tree and leaves the rest unchanged.
+ */
+ def untypeMetalevel(tp: Type): Type = {
+ val runDefinitions = currentRun.runDefinitions
+ import runDefinitions._
+
+ transparentShallowTransform(RepeatedParamClass, tp) {
+ case ExprClassOf(_) => typeRef(tp.prefix, TreesTreeType, Nil)
+ case tp => tp
+ }
+ }
+
+ /** Decreases metalevel of the type, i.e. transforms:
+ * * c.Expr[T] to T
+ * * Nothing to Nothing
+ * * Anything else to NoType
+ *
+ * @see Metalevels.scala for more information and examples about metalevels
+ */
+ def decreaseMetalevel(tp: Type): Type = {
+ val runDefinitions = currentRun.runDefinitions
+ import runDefinitions._
+ transparentShallowTransform(RepeatedParamClass, tp) {
+ case ExprClassOf(runtimeType) => runtimeType
+ // special-casing Nothing here is a useful convention
+ // that enables no-hassle prototyping with `macro ???` and `macro { ...; ??? }`
+ case nothing if nothing =:= NothingTpe => NothingTpe
+ case _ => NoType
+ }
+ }
+}
diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala
index 2e57bc59a8..860dfd72b2 100644
--- a/src/compiler/scala/reflect/reify/Errors.scala
+++ b/src/compiler/scala/reflect/reify/Errors.scala
@@ -7,7 +7,6 @@ trait Errors {
self: Reifier =>
import global._
- import definitions._
def defaultErrorPosition = {
val stack = currents collect { case t: Tree if t.pos != NoPosition => t.pos }
@@ -22,11 +21,6 @@ trait Errors {
throw new ReificationException(defaultErrorPosition, msg)
}
- def CannotReifySymbol(sym: Symbol) = {
- val msg = "implementation restriction: cannot reify symbol %s (%s)".format(sym, sym.accurateKindString)
- throw new ReificationException(defaultErrorPosition, msg)
- }
-
def CannotReifyCompoundTypeTreeWithNonEmptyBody(ctt: CompoundTypeTree) = {
val msg = "implementation restriction: cannot reify refinement type trees with non-empty bodies"
throw new ReificationException(ctt.pos, msg)
diff --git a/src/compiler/scala/reflect/reify/Phases.scala b/src/compiler/scala/reflect/reify/Phases.scala
index 1710cae2a5..4572caeb36 100644
--- a/src/compiler/scala/reflect/reify/Phases.scala
+++ b/src/compiler/scala/reflect/reify/Phases.scala
@@ -10,7 +10,6 @@ trait Phases extends Reshape
self: Reifier =>
import global._
- import definitions._
private var alreadyRun = false
@@ -26,7 +25,7 @@ trait Phases extends Reshape
if (reifyDebug) println("[reshape phase]")
tree = reshape.transform(tree)
if (reifyDebug) println("[interlude]")
- if (reifyDebug) println("reifee = " + (if (opt.showTrees) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString))
+ if (reifyDebug) println("reifee = " + (if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString))
if (reifyDebug) println("[calculate phase]")
calculate.traverse(tree)
@@ -41,4 +40,4 @@ trait Phases extends Reshape
result
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index 47669f57b0..ad0632f93e 100644
--- a/src/compiler/scala/reflect/reify/Reifier.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -6,10 +6,11 @@ import scala.reflect.macros.UnexpectedReificationException
import scala.reflect.reify.utils.Utils
/** Given a tree or a type, generate a tree that when executed at runtime produces the original tree or type.
- * See more info in the comments to ``reify'' in scala.reflect.api.Universe.
+ * See more info in the comments to `reify` in scala.reflect.api.Universe.
*
- * @author Martin Odersky
- * @version 2.10
+ * @author Martin Odersky
+ * @version 2.10
+ * @since 2.10
*/
abstract class Reifier extends States
with Phases
@@ -19,6 +20,8 @@ abstract class Reifier extends States
val global: Global
import global._
import definitions._
+ private val runDefinitions = currentRun.runDefinitions
+ import runDefinitions._
val typer: global.analyzer.Typer
val universe: Tree
@@ -31,20 +34,20 @@ abstract class Reifier extends States
this.asInstanceOf[Reifier { val global: Reifier.this.global.type }]
override def hasReifier = true
- /**
- * For ``reifee'' and other reification parameters, generate a tree of the form
- *
+ /** For `reifee` and other reification parameters, generate a tree of the form
+ * {{{
* {
- * val $u: universe.type = <[ universe ]>
- * val $m: $u.Mirror = <[ mirror ]>
- * $u.Expr[T](rtree) // if data is a Tree
- * $u.TypeTag[T](rtree) // if data is a Type
+ * val \$u: universe.type = <[ universe ]>
+ * val \$m: \$u.Mirror = <[ mirror ]>
+ * \$u.Expr[T](rtree) // if data is a Tree
+ * \$u.TypeTag[T](rtree) // if data is a Type
* }
+ * }}}
*
* where
*
- * - `universe` is the tree that represents the universe the result will be bound to
- * - `mirror` is the tree that represents the mirror the result will be initially bound to
+ * - `universe` is the tree that represents the universe the result will be bound to.
+ * - `mirror` is the tree that represents the mirror the result will be initially bound to.
* - `rtree` is code that generates `reifee` at runtime.
* - `T` is the type that corresponds to `data`.
*
@@ -57,7 +60,7 @@ abstract class Reifier extends States
val result = reifee match {
case tree: Tree =>
- reifyTrace("reifying = ")(if (opt.showTrees) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)
+ reifyTrace("reifying = ")(if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)
reifyTrace("reifee is located at: ")(tree.pos)
reifyTrace("universe = ")(universe)
reifyTrace("mirror = ")(mirror)
@@ -109,7 +112,7 @@ abstract class Reifier extends States
// maybe try `resetLocalAttrs` once the dust settles
var importantSymbols = Set[Symbol](
NothingClass, AnyClass, SingletonClass, PredefModule, ScalaRunTimeModule, TypeCreatorClass, TreeCreatorClass, MirrorClass,
- ApiUniverseClass, JavaUniverseClass, ReflectRuntimePackage, ReflectRuntimeCurrentMirror)
+ ApiUniverseClass, JavaUniverseClass, ReflectRuntimePackage, runDefinitions.ReflectRuntimeCurrentMirror)
importantSymbols ++= importantSymbols map (_.companionSymbol)
importantSymbols ++= importantSymbols map (_.moduleClass)
importantSymbols ++= importantSymbols map (_.linkedClassOfClass)
@@ -140,4 +143,4 @@ abstract class Reifier extends States
throw new UnexpectedReificationException(defaultErrorPosition, "reification crashed", ex)
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/States.scala b/src/compiler/scala/reflect/reify/States.scala
index 58455c9f3c..29bfa19845 100644
--- a/src/compiler/scala/reflect/reify/States.scala
+++ b/src/compiler/scala/reflect/reify/States.scala
@@ -4,7 +4,6 @@ trait States {
self: Reifier =>
import global._
- import definitions._
/** Encapsulates reifier state
*
diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala
index cbaee41890..093c2bee22 100644
--- a/src/compiler/scala/reflect/reify/Taggers.scala
+++ b/src/compiler/scala/reflect/reify/Taggers.scala
@@ -1,14 +1,15 @@
package scala.reflect.reify
import scala.reflect.macros.{ReificationException, UnexpectedReificationException, TypecheckException}
-import scala.reflect.macros.runtime.Context
+import scala.reflect.macros.contexts.Context
abstract class Taggers {
val c: Context
import c.universe._
import definitions._
- import treeBuild._
+ private val runDefinitions = currentRun.runDefinitions
+ import runDefinitions._
val coreTags = Map(
ByteTpe -> nme.Byte,
@@ -59,18 +60,18 @@ abstract class Taggers {
val result =
tpe match {
case coreTpe if coreTags contains coreTpe =>
- val ref = if (tagModule.owner.isPackageClass) Ident(tagModule) else Select(prefix, tagModule.name)
+ val ref = if (tagModule.isTopLevel) Ident(tagModule) else Select(prefix, tagModule.name)
Select(ref, coreTags(coreTpe))
case _ =>
translatingReificationErrors(materializer)
}
- try c.typeCheck(result)
+ try c.typecheck(result)
catch { case terr @ TypecheckException(pos, msg) => failTag(result, terr) }
}
def materializeExpr(universe: Tree, mirror: Tree, expr: Tree): Tree = {
val result = translatingReificationErrors(c.reifyTree(universe, mirror, expr))
- try c.typeCheck(result)
+ try c.typecheck(result)
catch { case terr @ TypecheckException(pos, msg) => failExpr(result, terr) }
}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
index dec491aabe..bd60faf4cd 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
@@ -5,7 +5,6 @@ trait GenAnnotationInfos {
self: Reifier =>
import global._
- import definitions._
// usually annotations are reified as their originals from Modifiers
// however, when reifying free and tough types, we're forced to reify annotation infos as is
@@ -46,10 +45,12 @@ trait GenAnnotationInfos {
mirrorFactoryCall(nme.ArrayAnnotArg, scalaFactoryCall(nme.Array, args map reifyClassfileAnnotArg: _*))
case NestedAnnotArg(ann) =>
mirrorFactoryCall(nme.NestedAnnotArg, reifyAnnotationInfo(ann))
+ case _ =>
+ sys.error(s"Don't know what to do with $arg")
}
// if you reify originals of anns, you get SO when trying to reify AnnotatedTypes, so screw it - after all, it's not that important
val reifiedAssocs = ann.assocs map (assoc => scalaFactoryCall(nme.Tuple2, reify(assoc._1), reifyClassfileAnnotArg(assoc._2)))
mirrorFactoryCall(nme.Annotation, reify(ann.atp), mkList(reifiedArgs), mkListMap(reifiedAssocs))
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenNames.scala b/src/compiler/scala/reflect/reify/codegen/GenNames.scala
index 4abf88f475..4266c6f8d6 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenNames.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenNames.scala
@@ -5,10 +5,9 @@ trait GenNames {
self: Reifier =>
import global._
- import definitions._
def reifyName(name: Name) = {
- val factory = if (name.isTypeName) nme.nmeNewTypeName else nme.nmeNewTermName
+ val factory = if (name.isTypeName) nme.TypeName else nme.TermName
mirrorCall(factory, Literal(Constant(name.toString)))
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenPositions.scala b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala
index 8c5db04454..1d151c5135 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenPositions.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala
@@ -5,7 +5,6 @@ trait GenPositions {
self: Reifier =>
import global._
- import definitions._
// we do not reify positions because this inflates resulting trees, but doesn't buy as anything
// where would one use positions? right, in error messages
@@ -14,4 +13,4 @@ trait GenPositions {
// however both macros and toolboxes have their own means to report errors in synthetic trees
def reifyPosition(pos: Position): Tree =
reifyMirrorObject(NoPosition)
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
index 47c966ea24..3a97089d51 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
@@ -7,7 +7,6 @@ trait GenSymbols {
self: Reifier =>
import global._
- import definitions._
/** Symbol table of the reifee.
*
@@ -43,7 +42,7 @@ trait GenSymbols {
else if (sym.isPackage)
mirrorMirrorCall(nme.staticPackage, reify(sym.fullName))
else if (sym.isLocatable) {
- /** This is a fancy conundrum that stems from the fact that Scala allows
+ /* This is a fancy conundrum that stems from the fact that Scala allows
* packageless packages and packageless objects with the same names in the same program.
*
* For more details read the docs to staticModule and staticPackage.
@@ -101,7 +100,7 @@ trait GenSymbols {
def reifyFreeTerm(binding: Tree): Tree =
reifyIntoSymtab(binding.symbol) { sym =>
if (reifyDebug) println("Free term" + (if (sym.isCapturedVariable) " (captured)" else "") + ": " + sym + "(" + sym.accurateKindString + ")")
- val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name + (if (sym.isType) nme.REIFY_FREE_THIS_SUFFIX else ""))
+ val name = newTermName("" + nme.REIFY_FREE_PREFIX + sym.name + (if (sym.isType) nme.REIFY_FREE_THIS_SUFFIX else ""))
// We need to note whether the free value being reified is stable or not to guide subsequent reflective compilation.
// Here's why reflection compilation needs our help.
//
@@ -132,9 +131,9 @@ trait GenSymbols {
if (sym.isCapturedVariable) {
assert(binding.isInstanceOf[Ident], showRaw(binding))
val capturedBinding = referenceCapturedVariable(sym)
- Reification(name, capturedBinding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), capturedBinding, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+ Reification(name, capturedBinding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), capturedBinding, mirrorBuildCall(nme.FlagsRepr, reify(sym.flags)), reify(origin(sym))))
} else {
- Reification(name, binding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), binding, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+ Reification(name, binding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), binding, mirrorBuildCall(nme.FlagsRepr, reify(sym.flags)), reify(origin(sym))))
}
}
@@ -142,16 +141,16 @@ trait GenSymbols {
reifyIntoSymtab(binding.symbol) { sym =>
if (reifyDebug) println("Free type: %s (%s)".format(sym, sym.accurateKindString))
state.reificationIsConcrete = false
- val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name)
- Reification(name, binding, mirrorBuildCall(nme.newFreeType, reify(sym.name.toString), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+ val name: TermName = nme.REIFY_FREE_PREFIX append sym.name
+ Reification(name, binding, mirrorBuildCall(nme.newFreeType, reify(sym.name.toString), mirrorBuildCall(nme.FlagsRepr, reify(sym.flags)), reify(origin(sym))))
}
def reifySymDef(sym: Symbol): Tree =
reifyIntoSymtab(sym) { sym =>
if (reifyDebug) println("Sym def: %s (%s)".format(sym, sym.accurateKindString))
- val name = newTermName(nme.REIFY_SYMDEF_PREFIX + sym.name)
+ val name: TermName = nme.REIFY_SYMDEF_PREFIX append sym.name
def reifiedOwner = if (sym.owner.isLocatable) reify(sym.owner) else reifySymDef(sym.owner)
- Reification(name, Ident(sym), mirrorBuildCall(nme.newNestedSymbol, reifiedOwner, reify(sym.name), reify(sym.pos), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(sym.isClass)))
+ Reification(name, Ident(sym), mirrorBuildCall(nme.newNestedSymbol, reifiedOwner, reify(sym.name), reify(sym.pos), mirrorBuildCall(nme.FlagsRepr, reify(sym.flags)), reify(sym.isClass)))
}
case class Reification(name: Name, binding: Tree, tree: Tree)
@@ -173,7 +172,7 @@ trait GenSymbols {
val reification = reificode(sym)
import reification.{name, binding}
val tree = reification.tree updateAttachment ReifyBindingAttachment(binding)
- state.symtab += (sym, name, tree)
+ state.symtab += (sym, name.toTermName, tree)
}
fromSymtab
}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
index 9894e359b4..f6b3c42ca9 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -15,7 +15,7 @@ trait GenTrees {
/**
* Reify a tree.
- * For internal use only, use ``reified'' instead.
+ * For internal use only, use `reified` instead.
*/
def reifyTree(tree: Tree): Tree = {
assert(tree != null, "tree is null")
@@ -29,42 +29,29 @@ trait GenTrees {
// the idea behind the new reincarnation of reifier is a simple maxim:
//
- // never call ``reifyType'' to reify a tree
+ // never call `reifyType` to reify a tree
//
// this works because the stuff we are reifying was once represented with trees only
// and lexical scope information can be fully captured by reifying symbols
//
- // to enable this idyll, we work hard in the ``Reshape'' phase
+ // to enable this idyll, we work hard in the `Reshape` phase
// which replaces all types with equivalent trees and works around non-idempotencies of the typechecker
//
// why bother? because this brings method to the madness
// the first prototype of reification reified all types and symbols for all trees => this quickly became unyieldy
// the second prototype reified external types, but avoided reifying local ones => this created an ugly irregularity
// current approach is uniform and compact
- var rtree = tree match {
- case global.EmptyTree =>
- reifyMirrorObject(EmptyTree)
- case global.emptyValDef =>
- mirrorBuildSelect(nme.emptyValDef)
- case FreeDef(_, _, _, _, _) =>
- reifyNestedFreeDef(tree)
- case FreeRef(_, _) =>
- reifyNestedFreeRef(tree)
- case BoundTerm(tree) =>
- reifyBoundTerm(tree)
- case BoundType(tree) =>
- reifyBoundType(tree)
- case Literal(const @ Constant(_)) =>
- mirrorCall(nme.Literal, reifyProduct(const))
- case Import(expr, selectors) =>
- mirrorCall(nme.Import, reify(expr), mkList(selectors map reifyProduct))
- case _ =>
- reifyProduct(tree)
+ var rtree: Tree = tree match {
+ case FreeDef(_, _, _, _, _) => reifyNestedFreeDef(tree)
+ case FreeRef(_, _) => reifyNestedFreeRef(tree)
+ case BoundTerm(tree) => reifyBoundTerm(tree)
+ case BoundType(tree) => reifyBoundType(tree)
+ case _ => reifyTreeSyntactically(tree)
}
// usually we don't reify symbols/types, because they can be re-inferred during subsequent reflective compilation
- // however, reification of AnnotatedTypes is special. see ``reifyType'' to find out why.
- if (reifyTreeSymbols && tree.hasSymbol) {
+ // however, reification of AnnotatedTypes is special. see `reifyType` to find out why.
+ if (reifyTreeSymbols && tree.hasSymbolField) {
if (reifyDebug) println("reifying symbol %s for tree %s".format(tree.symbol, tree))
rtree = mirrorBuildCall(nme.setSymbol, rtree, reify(tree.symbol))
}
@@ -76,21 +63,34 @@ trait GenTrees {
rtree
}
+ def reifyTreeSyntactically(tree: Tree): Tree = tree match {
+ case global.EmptyTree => reifyMirrorObject(EmptyTree)
+ case global.noSelfType => mirrorSelect(nme.noSelfType)
+ case global.pendingSuperCall => mirrorSelect(nme.pendingSuperCall)
+ case Literal(const @ Constant(_)) => mirrorCall(nme.Literal, reifyProduct(const))
+ case Import(expr, selectors) => mirrorCall(nme.Import, reify(expr), mkList(selectors map reifyProduct))
+ case _ => reifyProduct(tree)
+ }
+
+ def reifyFlags(flags: FlagSet) =
+ if (flags != 0) reifyBuildCall(nme.FlagsRepr, flags) else mirrorSelect(nme.NoFlags)
+
def reifyModifiers(m: global.Modifiers) =
- mirrorFactoryCall(nme.Modifiers, mirrorBuildCall(nme.flagsFromBits, reify(m.flags)), reify(m.privateWithin), reify(m.annotations))
+ if (m == NoMods) mirrorSelect(nme.NoMods)
+ else mirrorFactoryCall(nme.Modifiers, reifyFlags(m.flags), reify(m.privateWithin), reify(m.annotations))
private def spliceTree(tree: Tree): Tree = {
tree match {
case TreeSplice(splicee) =>
if (reifyDebug) println("splicing " + tree)
- // see ``Metalevels'' for more info about metalevel breaches
+ // see `Metalevels` for more info about metalevel breaches
// and about how we deal with splices that contain them
- val isMetalevelBreach = splicee exists (sub => sub.hasSymbol && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
- val isRuntimeEval = splicee exists (sub => sub.hasSymbol && sub.symbol == ExprSplice)
+ val isMetalevelBreach = splicee exists (sub => sub.hasSymbolField && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
+ val isRuntimeEval = splicee exists (sub => sub.hasSymbolField && sub.symbol == ExprSplice)
if (isMetalevelBreach || isRuntimeEval) {
// we used to convert dynamic splices into runtime evals transparently, but we no longer do that
- // why? see comments in ``Metalevels''
+ // why? see comments in `Metalevels`
// if (reifyDebug) println("splicing has failed: cannot splice when facing a metalevel breach")
// EmptyTree
CannotReifyRuntimeSplice(tree)
@@ -100,7 +100,7 @@ trait GenTrees {
// we intentionally don't care about the prefix (the first underscore in the `RefiedTree` pattern match)
case ReifiedTree(_, _, inlinedSymtab, rtree, _, _, _) =>
if (reifyDebug) println("inlining the splicee")
- // all free vars local to the enclosing reifee should've already been inlined by ``Metalevels''
+ // all free vars local to the enclosing reifee should've already been inlined by `Metalevels`
for (sym <- inlinedSymtab.syms if sym.isLocalToReifee)
abort("local free var, should have already been inlined by Metalevels: " + inlinedSymtab.symDef(sym))
state.symtab ++= inlinedSymtab
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
index bb7e1f9b56..99b968be3b 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
@@ -6,10 +6,12 @@ trait GenTypes {
import global._
import definitions._
+ private val runDefinitions = currentRun.runDefinitions
+ import runDefinitions.{ReflectRuntimeUniverse, ReflectRuntimeCurrentMirror, _}
/**
* Reify a type.
- * For internal use only, use ``reified'' instead.
+ * For internal use only, use `reified` instead.
*/
def reifyType(tpe: Type): Tree = {
assert(tpe != null, "tpe is null")
@@ -73,7 +75,6 @@ trait GenTypes {
if (reifyDebug) println("splicing " + tpe)
val tagFlavor = if (concrete) tpnme.TypeTag.toString else tpnme.WeakTypeTag.toString
- val key = (tagFlavor, tpe.typeSymbol)
// if this fails, it might produce the dreaded "erroneous or inaccessible type" error
// to find out the whereabouts of the error run scalac with -Ydebug
if (reifyDebug) println("launching implicit search for %s.%s[%s]".format(universe, tagFlavor, tpe))
@@ -156,7 +157,7 @@ trait GenTypes {
*/
private def reifySemiConcreteTypeMember(tpe: Type): Tree = tpe match {
case tpe @ TypeRef(pre @ SingleType(prepre, presym), sym, args) if sym.isAbstractType && !sym.isExistential =>
- return mirrorFactoryCall(nme.TypeRef, reify(pre), mirrorBuildCall(nme.selectType, reify(sym.owner), reify(sym.name.toString)), reify(args))
+ mirrorFactoryCall(nme.TypeRef, reify(pre), mirrorBuildCall(nme.selectType, reify(sym.owner), reify(sym.name.toString)), reify(args))
}
/** Reify an annotated type, i.e. the one that makes us deal with AnnotationInfos */
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
index 49877b4286..de9fec0df5 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -5,7 +5,6 @@ trait GenUtils {
self: Reifier =>
import global._
- import definitions._
def reifyList(xs: List[Any]): Tree =
mkList(xs map reify)
@@ -31,41 +30,35 @@ trait GenUtils {
def call(fname: String, args: Tree*): Tree =
Apply(termPath(fname), args.toList)
- def mirrorSelect(name: String): Tree =
- termPath(nme.UNIVERSE_PREFIX + name)
+ def mirrorSelect(name: String): Tree = termPath(nme.UNIVERSE_PREFIX + name)
+ def mirrorSelect(name: TermName): Tree = mirrorSelect(name.toString)
- def mirrorBuildSelect(name: String): Tree =
- termPath(nme.UNIVERSE_BUILD_PREFIX + name)
-
- def mirrorMirrorSelect(name: String): Tree =
- termPath(nme.MIRROR_PREFIX + name)
+ def mirrorMirrorSelect(name: TermName): Tree =
+ termPath("" + nme.MIRROR_PREFIX + name)
def mirrorCall(name: TermName, args: Tree*): Tree =
- call("" + (nme.UNIVERSE_PREFIX append name), args: _*)
-
- def mirrorCall(name: String, args: Tree*): Tree =
- call(nme.UNIVERSE_PREFIX + name, args: _*)
+ call("" + nme.UNIVERSE_PREFIX + name, args: _*)
def mirrorBuildCall(name: TermName, args: Tree*): Tree =
- call("" + (nme.UNIVERSE_BUILD_PREFIX append name), args: _*)
+ call("" + nme.UNIVERSE_BUILD_PREFIX + name, args: _*)
- def mirrorBuildCall(name: String, args: Tree*): Tree =
- call(nme.UNIVERSE_BUILD_PREFIX + name, args: _*)
+ def reifyBuildCall(name: TermName, args: Any*) =
+ mirrorBuildCall(name, args map reify: _*)
def mirrorMirrorCall(name: TermName, args: Tree*): Tree =
- call("" + (nme.MIRROR_PREFIX append name), args: _*)
-
- def mirrorMirrorCall(name: String, args: Tree*): Tree =
- call(nme.MIRROR_PREFIX + name, args: _*)
+ call("" + nme.MIRROR_PREFIX + name, args: _*)
def mirrorFactoryCall(value: Product, args: Tree*): Tree =
mirrorFactoryCall(value.productPrefix, args: _*)
- def mirrorFactoryCall(prefix: String, args: Tree*): Tree =
- mirrorCall(prefix, args: _*)
+ def mirrorFactoryCall(prefix: TermName, args: Tree*): Tree =
+ mirrorCall("" + prefix, args: _*)
+
+ def scalaFactoryCall(name: TermName, args: Tree*): Tree =
+ call(s"scala.$name.apply", args: _*)
def scalaFactoryCall(name: String, args: Tree*): Tree =
- call("scala." + name + ".apply", args: _*)
+ scalaFactoryCall(name: TermName, args: _*)
def mkList(args: List[Tree]): Tree =
scalaFactoryCall("collection.immutable.List", args: _*)
@@ -91,22 +84,6 @@ trait GenUtils {
/** An (unreified) path that refers to term definition with given fully qualified name */
def termPath(fullname: String): Tree = path(fullname, newTermName)
- /** An (unreified) path that refers to type definition with given fully qualified name */
- def typePath(fullname: String): Tree = path(fullname, newTypeName)
-
- def isTough(tpe: Type) = {
- def isTough(tpe: Type) = tpe match {
- case _: RefinedType => true
- case _: ExistentialType => true
- case _: ClassInfoType => true
- case _: MethodType => true
- case _: PolyType => true
- case _ => false
- }
-
- tpe != null && (tpe exists isTough)
- }
-
object TypedOrAnnotated {
def unapply(tree: Tree): Option[Tree] = tree match {
case ty @ Typed(_, _) =>
@@ -118,15 +95,6 @@ trait GenUtils {
}
}
- def isAnnotated(tpe: Type) = {
- def isAnnotated(tpe: Type) = tpe match {
- case _: AnnotatedType => true
- case _ => false
- }
-
- tpe != null && (tpe exists isAnnotated)
- }
-
def isSemiConcreteTypeMember(tpe: Type) = tpe match {
case TypeRef(SingleType(_, _), sym, _) if sym.isAbstractType && !sym.isExistential => true
case _ => false
@@ -145,4 +113,4 @@ trait GenUtils {
if (origin == "") origin = "of unknown origin"
origin
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala
index 6777bb0a50..eea63d8f28 100644
--- a/src/compiler/scala/reflect/reify/package.scala
+++ b/src/compiler/scala/reflect/reify/package.scala
@@ -1,11 +1,11 @@
-package scala.reflect
+package scala
+package reflect
-import scala.language.implicitConversions
-import scala.reflect.macros.{Context, ReificationException, UnexpectedReificationException}
+import scala.reflect.macros.ReificationException
import scala.tools.nsc.Global
package object reify {
- private def mkReifier(global1: Global)(typer: global1.analyzer.Typer, universe: global1.Tree, mirror: global1.Tree, reifee: Any, concrete: Boolean = false): Reifier { val global: global1.type } = {
+ private def mkReifier(global1: Global)(typer: global1.analyzer.Typer, universe: global1.Tree, mirror: global1.Tree, reifee: Any, concrete: Boolean): Reifier { val global: global1.type } = {
val typer1: typer.type = typer
val universe1: universe.type = universe
val mirror1: mirror.type = mirror
@@ -24,14 +24,15 @@ package object reify {
private[reify] def mkDefaultMirrorRef(global: Global)(universe: global.Tree, typer0: global.analyzer.Typer): global.Tree = {
import global._
- import definitions._
+ import definitions.JavaUniverseClass
+
val enclosingErasure = {
val rClassTree = reifyEnclosingRuntimeClass(global)(typer0)
// HACK around SI-6259
// If we're in the constructor of an object or others don't have easy access to `this`, we have no good way to grab
// the class of that object. Instead, we construct an anonymous class and grab his class file, assuming
// this is enough to get the correct class loadeer for the class we *want* a mirror for, the object itself.
- rClassTree orElse Apply(Select(treeBuilder.makeAnonymousNew(Nil), sn.GetClass), Nil)
+ rClassTree orElse Apply(Select(gen.mkAnonymousNew(Nil), sn.GetClass), Nil)
}
// JavaUniverse is defined in scala-reflect.jar, so we must be very careful in case someone reifies stuff having only scala-library.jar on the classpath
val isJavaUniverse = JavaUniverseClass != NoSymbol && universe.tpe <:< JavaUniverseClass.toTypeConstructor
@@ -59,14 +60,14 @@ package object reify {
if (concrete) throw new ReificationException(enclosingMacroPosition, "tpe %s is an unresolved spliceable type".format(tpe))
}
- tpe match {
+ tpe.dealiasWiden match {
case TypeRef(_, ArrayClass, componentTpe :: Nil) =>
val componentErasure = reifyRuntimeClass(global)(typer0, componentTpe, concrete)
- gen.mkMethodCall(arrayClassMethod, List(componentErasure))
+ gen.mkMethodCall(currentRun.runDefinitions.arrayClassMethod, List(componentErasure))
case _ =>
var erasure = tpe.erasure
if (tpe.typeSymbol.isDerivedValueClass && global.phase.id < global.currentRun.erasurePhase.id) erasure = tpe
- gen.mkNullaryCall(Predef_classOf, List(erasure))
+ gen.mkNullaryCall(currentRun.runDefinitions.Predef_classOf, List(erasure))
}
}
@@ -74,7 +75,6 @@ package object reify {
// a class/object body, this will return an EmptyTree.
def reifyEnclosingRuntimeClass(global: Global)(typer0: global.analyzer.Typer): global.Tree = {
import global._
- import definitions._
def isThisInScope = typer0.context.enclosingContextChain exists (_.tree.isInstanceOf[ImplDef])
if (isThisInScope) {
val enclosingClasses = typer0.context.enclosingContextChain map (_.tree) collect { case classDef: ClassDef => classDef }
diff --git a/src/compiler/scala/reflect/reify/phases/Calculate.scala b/src/compiler/scala/reflect/reify/phases/Calculate.scala
index 4d1e22abe7..abd179b24b 100644
--- a/src/compiler/scala/reflect/reify/phases/Calculate.scala
+++ b/src/compiler/scala/reflect/reify/phases/Calculate.scala
@@ -5,7 +5,6 @@ trait Calculate {
self: Reifier =>
import global._
- import definitions._
implicit class RichCalculateSymbol(sym: Symbol) {
def metalevel: Int = { assert(sym != null && sym != NoSymbol); localSymbols.getOrElse(sym, 0) }
@@ -30,7 +29,7 @@ trait Calculate {
* Merely traverses the reifiee and records local symbols along with their metalevels.
*/
val calculate = new Traverser {
- // see the explanation of metalevels in ``Metalevels''
+ // see the explanation of metalevels in `Metalevels`
var currMetalevel = 1
override def traverse(tree: Tree): Unit = tree match {
diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
index fbbd12a42f..c69263399f 100644
--- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala
+++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
@@ -1,16 +1,17 @@
package scala.reflect.reify
package phases
+import scala.collection.{ mutable }
+
trait Metalevels {
self: Reifier =>
import global._
- import definitions._
/**
* Makes sense of cross-stage bindings.
*
- * ================
+ * ----------------
*
* Analysis of cross-stage bindings becomes convenient if we introduce the notion of metalevels.
* Metalevel of a tree is a number that gets incremented every time you reify something and gets decremented when you splice something.
@@ -32,27 +33,27 @@ trait Metalevels {
*
* 1) symbol.metalevel < curr_metalevel. In this case reifier will generate a free variable
* that captures both the name of the symbol (to be compiled successfully) and its value (to be run successfully).
- * For example, x in Example 1 will be reified as follows: Ident(newFreeVar("x", IntClass.tpe, x))
+ * For example, x in Example 1 will be reified as follows: Ident(newFreeVar("x", IntTpe, x))
*
* 2) symbol.metalevel > curr_metalevel. This leads to a metalevel breach that violates intuitive perception of splicing.
* As defined in macro spec, splicing takes a tree and inserts it into another tree - as simple as that.
* However, how exactly do we do that in the case of y.splice? In this very scenario we can use dataflow analysis and inline it,
* but what if y were a var, and what if it were calculated randomly at runtime?
*
- * This question has a genuinely simple answer. Sure, we cannot resolve such splices statically (i.e. during macro expansion of ``reify''),
+ * This question has a genuinely simple answer. Sure, we cannot resolve such splices statically (i.e. during macro expansion of `reify`),
* but now we have runtime toolboxes, so noone stops us from picking up that reified tree and evaluating it at runtime
- * (in fact, this is something that ``Expr.splice'' does transparently).
+ * (in fact, this is something that `Expr.splice` does transparently).
*
* This is akin to early vs late binding dilemma.
* The prior is faster, plus, the latter (implemented with reflection) might not work because of visibility issues or might be not available on all platforms.
* But the latter still has its uses, so I'm allowing metalevel breaches, but introducing the -Xlog-runtime-evals to log them.
*
- * upd. We no longer do that. In case of a runaway ``splice'' inside a `reify`, one will get a static error.
+ * upd. We no longer do that. In case of a runaway `splice` inside a `reify`, one will get a static error.
* Why? Unfortunately, the cute idea of transparently converting between static and dynamic splices has failed.
* 1) Runtime eval that services dynamic splices requires scala-compiler.jar, which might not be on library classpath
* 2) Runtime eval incurs a severe performance penalty, so it'd better to be explicit about it
*
- * ================
+ * ----------------
*
* As we can see, the only problem is the fact that lhs'es of `splice` can be code blocks that can capture variables from the outside.
* Code inside the lhs of an `splice` is not reified, while the code from the enclosing reify is.
@@ -71,7 +72,7 @@ trait Metalevels {
* Since the result of the inner reify is wrapped in a splice, it won't be reified
* together with the other parts of the outer reify, but will be inserted into that result verbatim.
*
- * The inner reify produces an Expr[Int] that wraps Ident(freeVar("x", IntClass.tpe, x)).
+ * The inner reify produces an Expr[Int] that wraps Ident(freeVar("x", IntTpe, x)).
* However the freevar the reification points to will vanish when the compiler processes the outer reify.
* That's why we need to replace that freevar with a regular symbol that will point to reified x.
*
@@ -102,7 +103,7 @@ trait Metalevels {
*/
val metalevels = new Transformer {
var insideSplice = false
- var inlineableBindings = scala.collection.mutable.Map[TermName, Tree]()
+ val inlineableBindings = mutable.Map[TermName, Tree]()
def withinSplice[T](op: => T) = {
val old = insideSplice
@@ -124,7 +125,7 @@ trait Metalevels {
withinSplice { super.transform(TreeSplice(ReifiedTree(universe, mirror, symtab1, rtree, tpe, rtpe, concrete))) }
case TreeSplice(splicee) =>
if (reifyDebug) println("entering splice: " + splicee)
- val breaches = splicee filter (sub => sub.hasSymbol && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
+ val breaches = splicee filter (sub => sub.hasSymbolField && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
if (!insideSplice && breaches.nonEmpty) {
// we used to convert dynamic splices into runtime evals transparently, but we no longer do that
// why? see comments above
@@ -135,7 +136,7 @@ trait Metalevels {
} else {
withinSplice { super.transform(tree) }
}
- // todo. also inline usages of ``inlineableBindings'' in the symtab itself
+ // todo. also inline usages of `inlineableBindings` in the symtab itself
// e.g. a free$Foo can well use free$x, if Foo is path-dependent w.r.t x
// FreeRef(_, _) check won't work, because metalevels of symbol table and body are different, hence, freerefs in symbol table look different from freerefs in body
case FreeRef(_, name) if inlineableBindings contains name =>
diff --git a/src/compiler/scala/reflect/reify/phases/Reify.scala b/src/compiler/scala/reflect/reify/phases/Reify.scala
index 8e13a45cdb..143424dac5 100644
--- a/src/compiler/scala/reflect/reify/phases/Reify.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reify.scala
@@ -2,7 +2,6 @@ package scala.reflect.reify
package phases
import scala.runtime.ScalaRunTime.isAnyVal
-import scala.runtime.ScalaRunTime.isTuple
import scala.reflect.reify.codegen._
trait Reify extends GenSymbols
@@ -16,7 +15,6 @@ trait Reify extends GenSymbols
self: Reifier =>
import global._
- import definitions._
private object reifyStack {
def currents: List[Any] = state.reifyStack
@@ -37,7 +35,7 @@ trait Reify extends GenSymbols
/**
* Reifies any supported value.
- * For internal use only, use ``reified'' instead.
+ * For internal use only, use `reified` instead.
*/
def reify(reifee: Any): Tree = reifyStack.push(reifee)(reifee match {
// before adding some case here, in global scope, please, consider
@@ -59,4 +57,4 @@ trait Reify extends GenSymbols
case _ =>
throw new Error("reifee %s of type %s is not supported".format(reifee, reifee.getClass))
})
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index 535a933c73..6c073c0b4c 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -8,6 +8,9 @@ trait Reshape {
import global._
import definitions._
+ import treeInfo.Unapplied
+ private val runDefinitions = currentRun.runDefinitions
+ import runDefinitions._
/**
* Rolls back certain changes that were introduced during typechecking of the reifee.
@@ -48,13 +51,13 @@ trait Reshape {
val Template(parents, self, body) = impl
var body1 = trimAccessors(classDef, reshapeLazyVals(body))
body1 = trimSyntheticCaseClassMembers(classDef, body1)
- var impl1 = Template(parents, self, body1).copyAttrs(impl)
+ val impl1 = Template(parents, self, body1).copyAttrs(impl)
ClassDef(mods, name, params, impl1).copyAttrs(classDef)
case moduledef @ ModuleDef(mods, name, impl) =>
val Template(parents, self, body) = impl
var body1 = trimAccessors(moduledef, reshapeLazyVals(body))
body1 = trimSyntheticCaseClassMembers(moduledef, body1)
- var impl1 = Template(parents, self, body1).copyAttrs(impl)
+ val impl1 = Template(parents, self, body1).copyAttrs(impl)
ModuleDef(mods, name, impl1).copyAttrs(moduledef)
case template @ Template(parents, self, body) =>
val discardedParents = parents collect { case tt: TypeTree => tt } filter isDiscarded
@@ -65,22 +68,9 @@ trait Reshape {
case block @ Block(stats, expr) =>
val stats1 = reshapeLazyVals(trimSyntheticCaseClassCompanions(stats))
Block(stats1, expr).copyAttrs(block)
- case unapply @ UnApply(fun, args) =>
- def extractExtractor(tree: Tree): Tree = {
- val Apply(fun, args) = tree
- args match {
- case List(Ident(special)) if special == nme.SELECTOR_DUMMY =>
- val Select(extractor, flavor) = fun
- assert(flavor == nme.unapply || flavor == nme.unapplySeq)
- extractor
- case _ =>
- extractExtractor(fun)
- }
- }
-
+ case unapply @ UnApply(Unapplied(Select(fun, nme.unapply | nme.unapplySeq)), args) =>
if (reifyDebug) println("unapplying unapply: " + tree)
- val fun1 = extractExtractor(fun)
- Apply(fun1, args).copyAttrs(unapply)
+ Apply(fun, args).copyAttrs(unapply)
case _ =>
tree
}
@@ -89,8 +79,8 @@ trait Reshape {
}
private def undoMacroExpansion(tree: Tree): Tree =
- tree.attachments.get[MacroExpansionAttachment] match {
- case Some(MacroExpansionAttachment(original)) =>
+ tree.attachments.get[analyzer.MacroExpansionAttachment] match {
+ case Some(analyzer.MacroExpansionAttachment(original, _)) =>
def mkImplicitly(tp: Type) = atPos(tree.pos)(
gen.mkNullaryCall(Predef_implicitly, List(tp))
)
@@ -116,7 +106,6 @@ trait Reshape {
private def toPreTyperModifiers(mods: Modifiers, sym: Symbol) = {
if (!sym.annotations.isEmpty) {
- val Modifiers(flags, privateWithin, annotations) = mods
val postTyper = sym.annotations filter (_.original != EmptyTree)
if (reifyDebug && !postTyper.isEmpty) println("reify symbol annotations for: " + sym)
if (reifyDebug && !postTyper.isEmpty) println("originals are: " + sym.annotations)
@@ -131,8 +120,8 @@ trait Reshape {
*
* NB: This is the trickiest part of reification!
*
- * In most cases, we're perfectly fine to reify a Type itself (see ``reifyType'').
- * However if the type involves a symbol declared inside the quasiquote (i.e. registered in ``boundSyms''),
+ * In most cases, we're perfectly fine to reify a Type itself (see `reifyType`).
+ * However if the type involves a symbol declared inside the quasiquote (i.e. registered in `boundSyms`),
* then we cannot reify it, or otherwise subsequent reflective compilation will fail.
*
* Why will it fail? Because reified deftrees (e.g. ClassDef(...)) will generate fresh symbols during that compilation,
@@ -140,7 +129,7 @@ trait Reshape {
* https://issues.scala-lang.org/browse/SI-5230
*
* To deal with this unpleasant fact, we need to fall back from types to equivalent trees (after all, parser trees don't contain any types, just trees, so it should be possible).
- * Luckily, these original trees get preserved for us in the ``original'' field when Trees get transformed into TypeTrees.
+ * Luckily, these original trees get preserved for us in the `original` field when Trees get transformed into TypeTrees.
* And if an original of a type tree is empty, we can safely assume that this type is non-essential (e.g. was inferred/generated by the compiler).
* In that case the type can be omitted (e.g. reified as an empty TypeTree), since it will be inferred again later on.
*
@@ -157,8 +146,8 @@ trait Reshape {
* upd. There are also problems with CompoundTypeTrees. I had to use attachments to retain necessary information.
*
* upd. Recently I went ahead and started using original for all TypeTrees, regardless of whether they refer to local symbols or not.
- * As a result, ``reifyType'' is never called directly by tree reification (and, wow, it seems to work great!).
- * The only usage of ``reifyType'' now is for servicing typetags, however, I have some ideas how to get rid of that as well.
+ * As a result, `reifyType` is never called directly by tree reification (and, wow, it seems to work great!).
+ * The only usage of `reifyType` now is for servicing typetags, however, I have some ideas how to get rid of that as well.
*/
private def isDiscarded(tt: TypeTree) = tt.original == null
private def toPreTyperTypeTree(tt: TypeTree): Tree = {
@@ -168,7 +157,7 @@ trait Reshape {
// if this assumption fails, please, don't be quick to add postprocessing here (like I did before)
// but rather try to fix this in Typer, so that it produces quality originals (like it's done for typedAnnotated)
if (reifyDebug) println("TypeTree, essential: %s (%s)".format(tt.tpe, tt.tpe.kind))
- if (reifyDebug) println("verdict: rolled back to original %s".format(tt.original))
+ if (reifyDebug) println("verdict: rolled back to original %s".format(tt.original.toString.replaceAll("\\s+", " ")))
transform(tt.original)
} else {
// type is deemed to be non-essential
@@ -182,7 +171,7 @@ trait Reshape {
private def toPreTyperCompoundTypeTree(ctt: CompoundTypeTree): Tree = {
val CompoundTypeTree(tmpl @ Template(parents, self, stats)) = ctt
if (stats.nonEmpty) CannotReifyCompoundTypeTreeWithNonEmptyBody(ctt)
- assert(self eq emptyValDef, self)
+ assert(self eq noSelfType, self)
val att = tmpl.attachments.get[CompoundTypeTreeOriginalAttachment]
val CompoundTypeTreeOriginalAttachment(parents1, stats1) = att.getOrElse(CompoundTypeTreeOriginalAttachment(parents, stats))
CompoundTypeTree(Template(parents1, self, stats1))
@@ -232,13 +221,10 @@ trait Reshape {
val args = if (ann.assocs.isEmpty) {
ann.args
} else {
- def toScalaAnnotation(jann: ClassfileAnnotArg): Tree = jann match {
- case LiteralAnnotArg(const) =>
- Literal(const)
- case ArrayAnnotArg(arr) =>
- Apply(Ident(definitions.ArrayModule), arr.toList map toScalaAnnotation)
- case NestedAnnotArg(ann) =>
- toPreTyperAnnotation(ann)
+ def toScalaAnnotation(jann: ClassfileAnnotArg): Tree = (jann: @unchecked) match {
+ case LiteralAnnotArg(const) => Literal(const)
+ case ArrayAnnotArg(arr) => Apply(Ident(definitions.ArrayModule), arr.toList map toScalaAnnotation)
+ case NestedAnnotArg(ann) => toPreTyperAnnotation(ann)
}
ann.assocs map { case (nme, arg) => AssignOrNamedArg(Ident(nme), toScalaAnnotation(arg)) }
@@ -255,9 +241,9 @@ trait Reshape {
case _ => rhs // unit or trait case
}
val DefDef(mods0, name0, _, _, tpt0, rhs0) = ddef
- val name1 = nme.dropLocalSuffix(name0)
+ val name1 = name0.dropLocal
val Modifiers(flags0, privateWithin0, annotations0) = mods0
- var flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD)
+ val flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD)
val mods1 = Modifiers(flags1, privateWithin0, annotations0) setPositions mods0.positions
val mods2 = toPreTyperModifiers(mods1, ddef.symbol)
ValDef(mods2, name1, tpt0, extractRhs(rhs0))
@@ -272,9 +258,11 @@ trait Reshape {
def detectBeanAccessors(prefix: String): Unit = {
if (defdef.name.startsWith(prefix)) {
- var name = defdef.name.toString.substring(prefix.length)
+ val name = defdef.name.toString.substring(prefix.length)
def uncapitalize(s: String) = if (s.length == 0) "" else { val chars = s.toCharArray; chars(0) = chars(0).toLower; new String(chars) }
- def findValDef(name: String) = (symdefs.values collect { case vdef: ValDef if nme.dropLocalSuffix(vdef.name).toString == name => vdef }).headOption
+ def findValDef(name: String) = symdefs.values collectFirst {
+ case vdef: ValDef if vdef.name.dropLocal string_== name => vdef
+ }
val valdef = findValDef(name).orElse(findValDef(uncapitalize(name))).orNull
if (valdef != null) accessors(valdef) = accessors.getOrElse(valdef, Nil) :+ defdef
}
@@ -282,13 +270,13 @@ trait Reshape {
detectBeanAccessors("get")
detectBeanAccessors("set")
detectBeanAccessors("is")
- });
+ })
- var stats1 = stats flatMap {
+ val stats1 = stats flatMap {
case vdef @ ValDef(mods, name, tpt, rhs) if !mods.isLazy =>
val mods1 = if (accessors.contains(vdef)) {
val ddef = accessors(vdef)(0) // any accessor will do
- val Modifiers(flags, privateWithin, annotations) = mods
+ val Modifiers(flags, _, annotations) = mods
var flags1 = flags & ~LOCAL
if (!ddef.symbol.isPrivate) flags1 = flags1 & ~PRIVATE
val privateWithin1 = ddef.mods.privateWithin
@@ -298,8 +286,8 @@ trait Reshape {
mods
}
val mods2 = toPreTyperModifiers(mods1, vdef.symbol)
- val name1 = nme.dropLocalSuffix(name)
- val vdef1 = ValDef(mods2, name1, tpt, rhs)
+ val name1 = name.dropLocal
+ val vdef1 = ValDef(mods2, name1.toTermName, tpt, rhs)
if (reifyDebug) println("resetting visibility of field: %s => %s".format(vdef, vdef1))
Some(vdef1) // no copyAttrs here, because new ValDef and old symbols are now out of sync
case ddef: DefDef if !ddef.mods.isLazy =>
@@ -331,7 +319,8 @@ trait Reshape {
case Some(ddef) =>
toPreTyperLazyVal(ddef)
case None =>
- CannotReifyInvalidLazyVal(vdef)
+ if (reifyDebug) println("couldn't find corresponding lazy val accessor")
+ vdef
}
if (reifyDebug) println(s"reconstructed lazy val is $vdef1")
vdef1::Nil
diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala
index 59cd4e5047..d052127956 100644
--- a/src/compiler/scala/reflect/reify/utils/Extractors.scala
+++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala
@@ -11,7 +11,7 @@ trait Extractors {
// Example of a reified tree for `reify(List(1, 2))`:
// (also contains an example of a reified type as a third argument to the constructor of Expr)
// {
- // val $u: reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
+ // val $u: scala.reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
// val $m: $u.Mirror = $u.runtimeMirror(Test.this.getClass().getClassLoader());
// $u.Expr[List[Int]]($m, {
// final class $treecreator1 extends scala.reflect.api.TreeCreator {
@@ -75,12 +75,12 @@ trait Extractors {
newTypeName(global.currentUnit.fresh.newName(flavor.toString)),
List(),
Template(List(Ident(reifierBase)),
- emptyValDef,
+ noSelfType,
List(
DefDef(NoMods, nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(())))),
DefDef(NoMods,
reifierName,
- List(TypeDef(Modifiers(PARAM), tparamu, List(), TypeBoundsTree(Ident(NothingClass), CompoundTypeTree(Template(List(Ident(reifierUniverse), Ident(SingletonClass)), emptyValDef, List()))))),
+ List(TypeDef(Modifiers(PARAM), tparamu, List(), TypeBoundsTree(Ident(NothingClass), CompoundTypeTree(Template(List(Ident(reifierUniverse), Ident(SingletonClass)), noSelfType, List()))))),
List(List(ValDef(Modifiers(PARAM), nme.MIRROR_UNTYPED, AppliedTypeTree(Ident(MirrorClass), List(Ident(tparamu))), EmptyTree))),
reifierTpt, reifierBody))))
Block(tpec, ApplyConstructor(Ident(tpec.name), List()))
@@ -164,6 +164,16 @@ trait Extractors {
}
}
+ // abstract over possible additional .apply select
+ // which is sometimes inserted after desugaring of calls
+ object ApplyCall {
+ def unapply(tree: Tree): Option[(Tree, List[Tree])] = tree match {
+ case Apply(Select(id, nme.apply), args) => Some((id, args))
+ case Apply(id, args) => Some((id, args))
+ case _ => None
+ }
+ }
+
sealed abstract class FreeDefExtractor(acceptTerms: Boolean, acceptTypes: Boolean) {
def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = {
def acceptFreeTermFactory(name: Name) = {
@@ -175,11 +185,11 @@ trait Extractors {
ValDef(_, name, _, Apply(
Select(Select(uref1 @ Ident(_), build1), freeTermFactory),
_ :+
- Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))) :+
+ ApplyCall(Select(Select(uref2 @ Ident(_), build2), flagsRepr), List(Literal(Constant(flags: Long)))) :+
Literal(Constant(origin: String))))
if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && acceptFreeTermFactory(freeTermFactory) &&
- uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
- Some(uref1, name, reifyBinding(tree), flags, origin)
+ uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsRepr == nme.FlagsRepr =>
+ Some((uref1, name, reifyBinding(tree), flags, origin))
case _ =>
None
}
@@ -208,10 +218,10 @@ trait Extractors {
_,
_,
_,
- Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))),
+ ApplyCall(Select(Select(uref2 @ Ident(_), build2), flagsRepr), List(Literal(Constant(flags: Long)))),
Literal(Constant(isClass: Boolean)))))
if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newNestedSymbol == nme.newNestedSymbol &&
- uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
+ uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsRepr == nme.FlagsRepr =>
Some((uref1, name, flags, isClass))
case _ =>
None
diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
index aca18c7df7..e37b861461 100644
--- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
+++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
@@ -11,8 +11,6 @@ trait NodePrinters {
self: Utils =>
import global._
- import definitions._
- import Flag._
object reifiedNodeToString extends (Tree => String) {
def apply(tree: Tree): String = {
@@ -25,8 +23,8 @@ trait NodePrinters {
// Rolling a full-fledged, robust TreePrinter would be several times more code.
// Also as of late we have tests that ensure that UX won't be broken by random changes to the reifier.
val lines = (tree.toString.split(EOL) drop 1 dropRight 1).toList splitAt 2
- var (List(universe, mirror), reification) = lines
- reification = (for (line <- reification) yield {
+ val (List(universe, mirror), reification0) = lines
+ val reification = (for (line <- reification0) yield {
var s = line substring 2
s = s.replace(nme.UNIVERSE_PREFIX.toString, "")
s = s.replace(".apply", "")
@@ -34,12 +32,12 @@ trait NodePrinters {
s = "List\\[List\\[.*?\\].*?\\]".r.replaceAllIn(s, "List")
s = "List\\[.*?\\]".r.replaceAllIn(s, "List")
s = s.replace("immutable.this.Nil", "List()")
- s = """build\.flagsFromBits\((\d+)[lL]\)""".r.replaceAllIn(s, m => {
+ s = """build\.FlagsRepr\((\d+)[lL]\)""".r.replaceAllIn(s, m => {
flagsAreUsed = true
show(m.group(1).toLong)
})
- s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()")
- s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
+ s = s.replace("Modifiers(0L, TypeName(\"\"), List())", "Modifiers()")
+ s = """Modifiers\((\d+)[lL], TypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
val buf = new scala.collection.mutable.ListBuffer[String]
val annotations = m.group(3)
@@ -48,7 +46,7 @@ trait NodePrinters {
val privateWithin = "" + m.group(2)
if (buf.nonEmpty || privateWithin != "")
- buf.append("newTypeName(\"" + privateWithin + "\")")
+ buf.append("TypeName(\"" + privateWithin + "\")")
val bits = m.group(1)
if (buf.nonEmpty || bits != "0L") {
@@ -73,14 +71,14 @@ trait NodePrinters {
s.trim
})
- val printout = scala.collection.mutable.ListBuffer[String]();
+ val printout = scala.collection.mutable.ListBuffer[String]()
printout += universe.trim
if (mirrorIsUsed) printout += mirror.replace("Mirror[", "scala.reflect.api.Mirror[").trim
- val imports = scala.collection.mutable.ListBuffer[String]();
- imports += nme.UNIVERSE_SHORT
+ val imports = scala.collection.mutable.ListBuffer[String]()
+ imports += nme.UNIVERSE_SHORT.toString
// if (buildIsUsed) imports += nme.build
- if (mirrorIsUsed) imports += nme.MIRROR_SHORT
- if (flagsAreUsed) imports += nme.Flag
+ if (mirrorIsUsed) imports += nme.MIRROR_SHORT.toString
+ if (flagsAreUsed) imports += nme.Flag.toString
printout += s"""import ${imports map (_ + "._") mkString ", "}"""
val name = if (isExpr) "tree" else "tpe"
diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
index dbb0836e0a..5f8de9894f 100644
--- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
+++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
@@ -8,8 +8,6 @@ trait SymbolTables {
self: Utils =>
import global._
- import definitions._
- import Flag._
class SymbolTable private[SymbolTable] (
private[SymbolTable] val symtab: immutable.ListMap[Symbol, Tree] = immutable.ListMap[Symbol, Tree](),
@@ -17,9 +15,6 @@ trait SymbolTables {
private[SymbolTable] val original: Option[List[Tree]] = None) {
def syms: List[Symbol] = symtab.keys.toList
- def isConcrete: Boolean = symtab.values forall (sym => !FreeTypeDef.unapply(sym).isDefined)
-
-// def aliases: Map[Symbol, List[TermName]] = aliases.distinct groupBy (_._1) mapValues (_ map (_._2))
def symDef(sym: Symbol): Tree =
symtab.getOrElse(sym, EmptyTree)
@@ -89,11 +84,6 @@ trait SymbolTables {
add(ValDef(NoMods, freshName(name0), TypeTree(), reification) updateAttachment bindingAttachment)
}
- private def add(sym: Symbol, name: TermName): SymbolTable = {
- if (!(syms contains sym)) error("cannot add an alias to a symbol not in the symbol table")
- add(sym, name, EmptyTree)
- }
-
private def remove(sym: Symbol): SymbolTable = {
val newSymtab = symtab - sym
val newAliases = aliases filter (_._1 != sym)
@@ -107,7 +97,7 @@ trait SymbolTables {
newSymtab = newSymtab map { case ((sym, tree)) =>
val ValDef(mods, primaryName, tpt, rhs) = tree
val tree1 =
- if (!(newAliases contains (sym, primaryName))) {
+ if (!(newAliases contains ((sym, primaryName)))) {
val primaryName1 = newAliases.find(_._1 == sym).get._2
ValDef(mods, primaryName1, tpt, rhs).copyAttrs(tree)
} else tree
@@ -143,7 +133,7 @@ trait SymbolTables {
var result = new SymbolTable(original = Some(encoded))
encoded foreach (entry => (entry.attachments.get[ReifyBindingAttachment], entry.attachments.get[ReifyAliasAttachment]) match {
case (Some(ReifyBindingAttachment(_)), _) => result += entry
- case (_, Some(ReifyAliasAttachment(sym, alias))) => result = new SymbolTable(result.symtab, result.aliases :+ (sym, alias))
+ case (_, Some(ReifyAliasAttachment(sym, alias))) => result = new SymbolTable(result.symtab, result.aliases :+ ((sym, alias)))
case _ => // do nothing, this is boilerplate that can easily be recreated by subsequent `result.encode`
})
result
@@ -214,4 +204,4 @@ trait SymbolTables {
}
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/ant/Pack200Task.scala b/src/compiler/scala/tools/ant/Pack200Task.scala
index 255efe55ec..3c1bc8cad9 100644
--- a/src/compiler/scala/tools/ant/Pack200Task.scala
+++ b/src/compiler/scala/tools/ant/Pack200Task.scala
@@ -65,7 +65,7 @@ class Pack200Task extends ScalaMatchingTask {
/** Set the flag to specify if file reordering should be performed. Reordering
* is used to remove empty packages and improve pack200 optimization.
- * @param keep
+ * @param x
* `'''true'''` to retain file ordering.
* `'''false'''` to optimize directory structure (DEFAULT). */
def setKeepFileOrder(x: Boolean) { keepFileOrder = x }
@@ -99,8 +99,8 @@ class Pack200Task extends ScalaMatchingTask {
private def getFileList: List[File] = {
var files: List[File] = Nil
val fs = getImplicitFileSet
- var ds = fs.getDirectoryScanner(getProject())
- var dir = fs.getDir(getProject())
+ val ds = fs.getDirectoryScanner(getProject())
+ val dir = fs.getDir(getProject())
for (filename <- ds.getIncludedFiles()
if filename.toLowerCase.endsWith(".jar")) {
val file = new File(dir, filename)
diff --git a/src/compiler/scala/tools/ant/Same.scala b/src/compiler/scala/tools/ant/Same.scala
index e53679f052..6036b238b6 100644
--- a/src/compiler/scala/tools/ant/Same.scala
+++ b/src/compiler/scala/tools/ant/Same.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.tools.ant
+package scala
+package tools.ant
import java.io.{File, FileInputStream}
@@ -32,7 +33,7 @@ import org.apache.tools.ant.types.Mapper
*
* @author Gilles Dubochet
* @version 1.0 */
-class Same extends ScalaMatchingTask {
+@deprecated("Use diff", "2.11.0") class Same extends ScalaMatchingTask {
/*============================================================================*\
** Ant user-properties **
\*============================================================================*/
@@ -110,7 +111,7 @@ class Same extends ScalaMatchingTask {
\*============================================================================*/
override def execute() = {
- validateAttributes
+ validateAttributes()
val mapper = getMapper
allEqualNow = true
val originNames: Array[String] = getDirectoryScanner(origin.get).getIncludedFiles
diff --git a/src/compiler/scala/tools/ant/ScalaTool.scala b/src/compiler/scala/tools/ant/ScalaTool.scala
index 57d24f6213..bb6a933d3f 100644
--- a/src/compiler/scala/tools/ant/ScalaTool.scala
+++ b/src/compiler/scala/tools/ant/ScalaTool.scala
@@ -108,7 +108,7 @@ class ScalaTool extends ScalaMatchingTask {
* for general purpose scripts, as this does not assume all elements are
* relative to the Ant `basedir`. Additionally, the platform specific
* demarcation of any script variables (e.g. `${SCALA_HOME}` or
- * `%SCALA_HOME%`) can be specified in a platform independant way (e.g.
+ * `%SCALA_HOME%`) can be specified in a platform independent way (e.g.
* `@SCALA_HOME@`) and automatically translated for you.
*/
def setClassPath(input: String) {
@@ -139,7 +139,7 @@ class ScalaTool extends ScalaMatchingTask {
val st = s.trim
val stArray = st.split("=", 2)
if (stArray.length == 2) {
- if (input != "") List(Pair(stArray(0), stArray(1))) else Nil
+ if (input != "") List((stArray(0), stArray(1))) else Nil
}
else
buildError("Property " + st + " is not formatted properly.")
@@ -170,7 +170,7 @@ class ScalaTool extends ScalaMatchingTask {
private def getProperties: String =
properties.map({
- case Pair(name,value) => "-D" + name + "=\"" + value + "\""
+ case (name,value) => "-D" + name + "=\"" + value + "\""
}).mkString("", " ", "")
/*============================================================================*\
@@ -190,13 +190,13 @@ class ScalaTool extends ScalaMatchingTask {
val builder = new StringBuilder()
while (chars.hasNext) {
- val char = chars.next
+ val char = chars.next()
if (char == '@') {
- var char = chars.next
+ var char = chars.next()
val token = new StringBuilder()
while (chars.hasNext && char != '@') {
token.append(char)
- char = chars.next
+ char = chars.next()
}
if (token.toString == "")
builder.append('@')
@@ -212,13 +212,13 @@ class ScalaTool extends ScalaMatchingTask {
val builder = new StringBuilder()
while (chars.hasNext) {
- val char = chars.next
+ val char = chars.next()
if (char == '@') {
- var char = chars.next
+ var char = chars.next()
val token = new StringBuilder()
while (chars.hasNext && char != '@') {
token.append(char)
- char = chars.next
+ char = chars.next()
}
if (tokens.contains(token.toString))
builder.append(tokens(token.toString))
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index 73d09e82ba..1747405f03 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -55,8 +55,6 @@ import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
* - `usejavacp`,
* - `failonerror`,
* - `scalacdebugging`,
- * - `assemname`,
- * - `assemrefs`.
*
* It also takes the following parameters as nested elements:
* - `src` (for `srcdir`),
@@ -93,13 +91,13 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
val values = List("namer", "typer", "pickler", "refchecks",
"uncurry", "tailcalls", "specialize", "explicitouter",
"erasure", "lazyvals", "lambdalift", "constructors",
- "flatten", "mixin", "cleanup", "icode", "inliner",
+ "flatten", "mixin", "delambdafy", "cleanup", "icode", "inliner",
"closelim", "dce", "jvm", "terminal")
}
/** Defines valid values for the `target` property. */
object Target extends PermissibleValue {
- val values = List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil")
+ val values = List("jvm-1.5", "jvm-1.6", "jvm-1.7")
}
/** Defines valid values for the `deprecation` and `unchecked` properties. */
@@ -169,11 +167,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Indicates whether compilation errors will fail the build; defaults to true. */
protected var failonerror: Boolean = true
- // Name of the output assembly (only relevant with -target:msil)
- protected var assemname: Option[String] = None
- // List of assemblies referenced by the program (only relevant with -target:msil)
- protected var assemrefs: Option[String] = None
-
/** Prints out the files being compiled by the scalac ant task
* (not only the number of files). */
protected var scalacDebugging: Boolean = false
@@ -420,9 +413,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
* @param input The specified flag */
def setScalacdebugging(input: Boolean) { scalacDebugging = input }
- def setAssemname(input: String) { assemname = Some(input) }
- def setAssemrefs(input: String) { assemrefs = Some(input) }
-
/** Sets the `compilerarg` as a nested compilerarg Ant parameter.
* @return A compiler argument to be configured. */
def createCompilerArg(): ImplementationSpecificArgument = {
@@ -505,7 +495,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
path.map(asString) mkString File.pathSeparator
/** Transforms a file into a Scalac-readable string.
- * @param path A file to convert.
+ * @param file A file to convert.
* @return A string-representation of the file like `/x/k/a.scala`. */
protected def asString(file: File): String =
file.getAbsolutePath()
@@ -518,7 +508,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
new Settings(error)
protected def newGlobal(settings: Settings, reporter: Reporter) =
- new Global(settings, reporter)
+ Global(settings, reporter)
/*============================================================================*\
** The big execute method **
@@ -612,9 +602,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
if (!unchecked.isEmpty) settings.unchecked.value = unchecked.get
if (!usejavacp.isEmpty) settings.usejavacp.value = usejavacp.get
- if (!assemname.isEmpty) settings.assemname.value = assemname.get
- if (!assemrefs.isEmpty) settings.assemrefs.value = assemrefs.get
-
val jvmargs = scalacCompilerArgs.getArgs filter (_ startsWith "-J")
if (!jvmargs.isEmpty) settings.jvmargs.value = jvmargs.toList
val defines = scalacCompilerArgs.getArgs filter (_ startsWith "-D")
@@ -685,7 +672,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
file
}
- val res = execWithArgFiles(java, List(writeSettings.getAbsolutePath))
+ val res = execWithArgFiles(java, List(writeSettings().getAbsolutePath))
if (failonerror && res != 0)
buildError("Compilation failed because of an internal compiler error;"+
" see the error output for details.")
diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/compiler/scala/tools/ant/Scaladoc.scala
deleted file mode 100644
index 7fc811788e..0000000000
--- a/src/compiler/scala/tools/ant/Scaladoc.scala
+++ /dev/null
@@ -1,695 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools.ant
-
-import java.io.File
-
-import org.apache.tools.ant.Project
-import org.apache.tools.ant.types.{Path, Reference}
-import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper}
-
-import scala.tools.nsc.Global
-import scala.tools.nsc.doc.Settings
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-
-/** An Ant task to document Scala code.
- *
- * This task can take the following parameters as attributes:
- * - `srcdir` (mandatory),
- * - `srcref`,
- * - `destdir`,
- * - `classpath`,
- * - `classpathref`,
- * - `sourcepath`,
- * - `sourcepathref`,
- * - `bootclasspath`,
- * - `bootclasspathref`,
- * - `extdirs`,
- * - `extdirsref`,
- * - `encoding`,
- * - `doctitle`,
- * - `header`,
- * - `footer`,
- * - `top`,
- * - `bottom`,
- * - `addparams`,
- * - `deprecation`,
- * - `docgenerator`,
- * - `docrootcontent`,
- * - `unchecked`,
- * - `nofail`,
- * - `skipPackages`.
- *
- * It also takes the following parameters as nested elements:
- * - `src` (for srcdir),
- * - `classpath`,
- * - `sourcepath`,
- * - `bootclasspath`,
- * - `extdirs`.
- *
- * @author Gilles Dubochet, Stephane Micheloud
- */
-class Scaladoc extends ScalaMatchingTask {
-
- /** The unique Ant file utilities instance to use in this task. */
- private val fileUtils = FileUtils.getFileUtils()
-
-/*============================================================================*\
-** Ant user-properties **
-\*============================================================================*/
-
- abstract class PermissibleValue {
- val values: List[String]
- def isPermissible(value: String): Boolean =
- (value == "") || values.exists(_.startsWith(value))
- }
-
- /** Defines valid values for the `deprecation` and
- * `unchecked` properties.
- */
- object Flag extends PermissibleValue {
- val values = List("yes", "no", "on", "off")
- def getBooleanValue(value: String, flagName: String): Boolean =
- if (Flag.isPermissible(value))
- return ("yes".equals(value) || "on".equals(value))
- else
- buildError("Unknown " + flagName + " flag '" + value + "'")
- }
-
- /** The directories that contain source files to compile. */
- private var origin: Option[Path] = None
- /** The directory to put the compiled files in. */
- private var destination: Option[File] = None
-
- /** The class path to use for this compilation. */
- private var classpath: Option[Path] = None
- /** The source path to use for this compilation. */
- private var sourcepath: Option[Path] = None
- /** The boot class path to use for this compilation. */
- private var bootclasspath: Option[Path] = None
- /** The external extensions path to use for this compilation. */
- private var extdirs: Option[Path] = None
-
- /** The character encoding of the files to compile. */
- private var encoding: Option[String] = None
-
- /** The fully qualified name of a doclet class, which will be used to generate the documentation. */
- private var docgenerator: Option[String] = None
-
- /** The file from which the documentation content of the root package will be taken */
- private var docrootcontent: Option[File] = None
-
- /** The document title of the generated HTML documentation. */
- private var doctitle: Option[String] = None
-
- /** The document footer of the generated HTML documentation. */
- private var docfooter: Option[String] = None
-
- /** The document version, to be added to the title. */
- private var docversion: Option[String] = None
-
- /** Instruct the compiler to generate links to sources */
- private var docsourceurl: Option[String] = None
-
- /** Point scaladoc at uncompilable sources. */
- private var docUncompilable: Option[String] = None
-
- /** Instruct the compiler to use additional parameters */
- private var addParams: String = ""
-
- /** Instruct the compiler to generate deprecation information. */
- private var deprecation: Boolean = false
-
- /** Instruct the compiler to generate unchecked information. */
- private var unchecked: Boolean = false
-
- /** Instruct the ant task not to fail in the event of errors */
- private var nofail: Boolean = false
-
- /** Instruct the scaladoc tool to document implicit conversions */
- private var docImplicits: Boolean = false
-
- /** Instruct the scaladoc tool to document all (including impossible) implicit conversions */
- private var docImplicitsShowAll: Boolean = false
-
- /** Instruct the scaladoc tool to output implicits debugging information */
- private var docImplicitsDebug: Boolean = false
-
- /** Instruct the scaladoc tool to create diagrams */
- private var docDiagrams: Boolean = false
-
- /** Instruct the scaladoc tool to output diagram creation debugging information */
- private var docDiagramsDebug: Boolean = false
-
- /** Instruct the scaladoc tool to use the binary given to create diagrams */
- private var docDiagramsDotPath: Option[String] = None
-
- /** Instruct the scaladoc to produce textual ouput from html pages, for easy diff-ing */
- private var docRawOutput: Boolean = false
-
- /** Instruct the scaladoc not to generate prefixes */
- private var docNoPrefixes: Boolean = false
-
- /** Instruct the scaladoc tool to group similar functions together */
- private var docGroups: Boolean = false
-
- /** Instruct the scaladoc tool to skip certain packages */
- private var docSkipPackages: String = ""
-
-/*============================================================================*\
-** Properties setters **
-\*============================================================================*/
-
- /** Sets the `srcdir` attribute. Used by [[http://ant.apache.org Ant]].
- *
- * @param input The value of `origin`.
- */
- def setSrcdir(input: Path) {
- if (origin.isEmpty) origin = Some(input)
- else origin.get.append(input)
- }
-
- /** Sets the `origin` as a nested src Ant parameter.
- *
- * @return An origin path to be configured.
- */
- def createSrc(): Path = {
- if (origin.isEmpty) origin = Some(new Path(getProject))
- origin.get.createPath()
- }
-
- /** Sets the `origin` as an external reference Ant parameter.
- *
- * @param input A reference to an origin path.
- */
- def setSrcref(input: Reference) {
- createSrc().setRefid(input)
- }
-
- /** Sets the `destdir` attribute. Used by [[http://ant.apache.org Ant]].
- *
- * @param input The value of `destination`.
- */
- def setDestdir(input: File) {
- destination = Some(input)
- }
-
- /** Sets the `classpath` attribute. Used by [[http://ant.apache.org Ant]].
- *
- * @param input The value of `classpath`.
- */
- def setClasspath(input: Path) {
- if (classpath.isEmpty) classpath = Some(input)
- else classpath.get.append(input)
- }
-
- /** Sets the `classpath` as a nested classpath Ant parameter.
- *
- * @return A class path to be configured.
- */
- def createClasspath(): Path = {
- if (classpath.isEmpty) classpath = Some(new Path(getProject))
- classpath.get.createPath()
- }
-
- /** Sets the `classpath` as an external reference Ant parameter.
- *
- * @param input A reference to a class path.
- */
- def setClasspathref(input: Reference) =
- createClasspath().setRefid(input)
-
- /** Sets the `sourcepath` attribute. Used by [[http://ant.apache.org Ant]].
- *
- * @param input The value of `sourcepath`.
- */
- def setSourcepath(input: Path) =
- if (sourcepath.isEmpty) sourcepath = Some(input)
- else sourcepath.get.append(input)
-
- /** Sets the `sourcepath` as a nested sourcepath Ant parameter.
- *
- * @return A source path to be configured.
- */
- def createSourcepath(): Path = {
- if (sourcepath.isEmpty) sourcepath = Some(new Path(getProject))
- sourcepath.get.createPath()
- }
-
- /** Sets the `sourcepath` as an external reference Ant parameter.
- *
- * @param input A reference to a source path.
- */
- def setSourcepathref(input: Reference) =
- createSourcepath().setRefid(input)
-
- /** Sets the `bootclasspath` attribute. Used by [[http://ant.apache.org Ant]].
- *
- * @param input The value of `bootclasspath`.
- */
- def setBootclasspath(input: Path) =
- if (bootclasspath.isEmpty) bootclasspath = Some(input)
- else bootclasspath.get.append(input)
-
- /** Sets the `bootclasspath` as a nested `sourcepath` Ant parameter.
- *
- * @return A source path to be configured.
- */
- def createBootclasspath(): Path = {
- if (bootclasspath.isEmpty) bootclasspath = Some(new Path(getProject))
- bootclasspath.get.createPath()
- }
-
- /** Sets the `bootclasspath` as an external reference Ant parameter.
- *
- * @param input A reference to a source path.
- */
- def setBootclasspathref(input: Reference) {
- createBootclasspath().setRefid(input)
- }
-
- /** Sets the external extensions path attribute. Used by [[http://ant.apache.org Ant]].
- *
- * @param input The value of `extdirs`.
- */
- def setExtdirs(input: Path) {
- if (extdirs.isEmpty) extdirs = Some(input)
- else extdirs.get.append(input)
- }
-
- /** Sets the `extdirs` as a nested sourcepath Ant parameter.
- *
- * @return An extensions path to be configured.
- */
- def createExtdirs(): Path = {
- if (extdirs.isEmpty) extdirs = Some(new Path(getProject))
- extdirs.get.createPath()
- }
-
- /** Sets the `extdirs` as an external reference Ant parameter.
- *
- * @param input A reference to an extensions path.
- */
- def setExtdirsref(input: Reference) {
- createExtdirs().setRefid(input)
- }
-
- /** Sets the `encoding` attribute. Used by Ant.
- *
- * @param input The value of `encoding`.
- */
- def setEncoding(input: String) {
- encoding = Some(input)
- }
-
- /** Sets the `docgenerator` attribute.
- *
- * @param input A fully qualified class name of a doclet.
- */
- def setDocgenerator(input: String) {
- docgenerator = Some(input)
- }
-
- /**
- * Sets the `docrootcontent` attribute.
- *
- * @param input The file from which the documentation content of the root
- * package will be taken.
- */
- def setDocrootcontent(input : File) {
- docrootcontent = Some(input)
- }
-
- /** Sets the `docversion` attribute.
- *
- * @param input The value of `docversion`.
- */
- def setDocversion(input: String) {
- docversion = Some(input)
- }
-
- /** Sets the `docsourceurl` attribute.
- *
- * @param input The value of `docsourceurl`.
- */
- def setDocsourceurl(input: String) {
- docsourceurl = Some(input)
- }
-
- /** Sets the `doctitle` attribute.
- *
- * @param input The value of `doctitle`.
- */
- def setDoctitle(input: String) {
- doctitle = Some(input)
- }
-
- /** Sets the `docfooter` attribute.
- *
- * @param input The value of `docfooter`.
- */
- def setDocfooter(input: String) {
- docfooter = Some(input)
- }
-
- /** Set the `addparams` info attribute.
- *
- * @param input The value for `addparams`.
- */
- def setAddparams(input: String) {
- addParams = input
- }
-
- /** Set the `deprecation` info attribute.
- *
- * @param input One of the flags `yes/no` or `on/off`.
- */
- def setDeprecation(input: String) {
- if (Flag.isPermissible(input))
- deprecation = "yes".equals(input) || "on".equals(input)
- else
- buildError("Unknown deprecation flag '" + input + "'")
- }
-
- /** Set the `unchecked` info attribute.
- *
- * @param input One of the flags `yes/no` or `on/off`.
- */
- def setUnchecked(input: String) {
- if (Flag.isPermissible(input))
- unchecked = "yes".equals(input) || "on".equals(input)
- else
- buildError("Unknown unchecked flag '" + input + "'")
- }
-
- def setDocUncompilable(input: String) {
- docUncompilable = Some(input)
- }
-
- /** Set the `nofail` info attribute.
- *
- * @param input One of the flags `yes/no` or `on/off`. Default if no/off.
- */
- def setNoFail(input: String) =
- nofail = Flag.getBooleanValue(input, "nofail")
-
- /** Set the `implicits` info attribute.
- * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
- def setImplicits(input: String) =
- docImplicits = Flag.getBooleanValue(input, "implicits")
-
- /** Set the `implicitsShowAll` info attribute to enable scaladoc to show all implicits, including those impossible to
- * convert to from the default scope
- * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
- def setImplicitsShowAll(input: String) =
- docImplicitsShowAll = Flag.getBooleanValue(input, "implicitsShowAll")
-
- /** Set the `implicitsDebug` info attribute so scaladoc outputs implicit conversion debug information
- * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
- def setImplicitsDebug(input: String) =
- docImplicitsDebug = Flag.getBooleanValue(input, "implicitsDebug")
-
- /** Set the `diagrams` bit so Scaladoc adds diagrams to the documentation
- * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
- def setDiagrams(input: String) =
- docDiagrams = Flag.getBooleanValue(input, "diagrams")
-
- /** Set the `diagramsDebug` bit so Scaladoc outputs diagram building debug information
- * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
- def setDiagramsDebug(input: String) =
- docDiagramsDebug = Flag.getBooleanValue(input, "diagramsDebug")
-
- /** Set the `diagramsDotPath` attribute to the path where graphviz dot can be found (including the binary file name,
- * eg: /usr/bin/dot) */
- def setDiagramsDotPath(input: String) =
- docDiagramsDotPath = Some(input)
-
- /** Set the `rawOutput` bit so Scaladoc also outputs text from each html file
- * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
- def setRawOutput(input: String) =
- docRawOutput = Flag.getBooleanValue(input, "rawOutput")
-
- /** Set the `noPrefixes` bit to prevent Scaladoc from generating prefixes in
- * front of types -- may lead to confusion, but significantly speeds up the generation.
- * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
- def setNoPrefixes(input: String) =
- docNoPrefixes = Flag.getBooleanValue(input, "noPrefixes")
-
- /** Instruct the scaladoc tool to group similar functions together */
- def setGroups(input: String) =
- docGroups = Flag.getBooleanValue(input, "groups")
-
- /** Instruct the scaladoc tool to skip certain packages.
- * @param input A colon-delimited list of fully qualified package names that will be skipped from scaladoc.
- */
- def setSkipPackages(input: String) =
- docSkipPackages = input
-
-/*============================================================================*\
-** Properties getters **
-\*============================================================================*/
-
- /** Gets the value of the `classpath` attribute in a
- * Scala-friendly form.
- *
- * @return The class path as a list of files.
- */
- private def getClasspath: List[File] =
- if (classpath.isEmpty) buildError("Member 'classpath' is empty.")
- else classpath.get.list().toList map nameToFile
-
- /** Gets the value of the `origin` attribute in a Scala-friendly
- * form.
- *
- * @return The origin path as a list of files.
- */
- private def getOrigin: List[File] =
- if (origin.isEmpty) buildError("Member 'origin' is empty.")
- else origin.get.list().toList map nameToFile
-
- /** Gets the value of the `destination` attribute in a
- * Scala-friendly form.
- *
- * @return The destination as a file.
- */
- private def getDestination: File =
- if (destination.isEmpty) buildError("Member 'destination' is empty.")
- else existing(getProject resolveFile destination.get.toString)
-
- /** Gets the value of the `sourcepath` attribute in a
- * Scala-friendly form.
- *
- * @return The source path as a list of files.
- */
- private def getSourcepath: List[File] =
- if (sourcepath.isEmpty) buildError("Member 'sourcepath' is empty.")
- else sourcepath.get.list().toList map nameToFile
-
- /** Gets the value of the `bootclasspath` attribute in a
- * Scala-friendly form.
- *
- * @return The boot class path as a list of files.
- */
- private def getBootclasspath: List[File] =
- if (bootclasspath.isEmpty) buildError("Member 'bootclasspath' is empty.")
- else bootclasspath.get.list().toList map nameToFile
-
- /** Gets the value of the `extdirs` attribute in a
- * Scala-friendly form.
- *
- * @return The extensions path as a list of files.
- */
- private def getExtdirs: List[File] =
- if (extdirs.isEmpty) buildError("Member 'extdirs' is empty.")
- else extdirs.get.list().toList map nameToFile
-
-/*============================================================================*\
-** Compilation and support methods **
-\*============================================================================*/
-
- /** This is forwarding method to circumvent bug #281 in Scala 2. Remove when
- * bug has been corrected.
- */
- override protected def getDirectoryScanner(baseDir: java.io.File) =
- super.getDirectoryScanner(baseDir)
-
- /** Transforms a string name into a file relative to the provided base
- * directory.
- *
- * @param base A file pointing to the location relative to which the name
- * will be resolved.
- * @param name A relative or absolute path to the file as a string.
- * @return A file created from the name and the base file.
- */
- private def nameToFile(base: File)(name: String): File =
- existing(fileUtils.resolveFile(base, name))
-
- /** Transforms a string name into a file relative to the build root
- * directory.
- *
- * @param name A relative or absolute path to the file as a string.
- * @return A file created from the name.
- */
- private def nameToFile(name: String): File =
- existing(getProject resolveFile name)
-
- /** Tests if a file exists and prints a warning in case it doesn't. Always
- * returns the file, even if it doesn't exist.
- *
- * @param file A file to test for existance.
- * @return The same file.
- */
- private def existing(file: File): File = {
- if (!file.exists())
- log("Element '" + file.toString + "' does not exist.",
- Project.MSG_WARN)
- file
- }
-
- /** Transforms a path into a Scalac-readable string.
- *
- * @param path A path to convert.
- * @return A string-representation of the path like `a.jar:b.jar`.
- */
- private def asString(path: List[File]): String =
- path.map(asString).mkString("", File.pathSeparator, "")
-
- /** Transforms a file into a Scalac-readable string.
- *
- * @param path A file to convert.
- * @return A string-representation of the file like `/x/k/a.scala`.
- */
- private def asString(file: File): String =
- file.getAbsolutePath()
-
-/*============================================================================*\
-** The big execute method **
-\*============================================================================*/
-
- /** Initializes settings and source files */
- protected def initialize: Pair[Settings, List[File]] = {
- // Tests if all mandatory attributes are set and valid.
- if (origin.isEmpty) buildError("Attribute 'srcdir' is not set.")
- if (getOrigin.isEmpty) buildError("Attribute 'srcdir' is not set.")
- if (!destination.isEmpty && !destination.get.isDirectory())
- buildError("Attribute 'destdir' does not refer to an existing directory.")
- if (destination.isEmpty) destination = Some(getOrigin.head)
-
- val mapper = new GlobPatternMapper()
- mapper setTo "*.html"
- mapper setFrom "*.scala"
-
- // Scans source directories to build up a compile lists.
- // If force is false, only files were the .class file in destination is
- // older than the .scala file will be used.
- val sourceFiles: List[File] =
- for {
- originDir <- getOrigin
- originFile <- {
- val includedFiles =
- getDirectoryScanner(originDir).getIncludedFiles()
- val list = includedFiles.toList
- if (list.length > 0)
- log(
- "Documenting " + list.length + " source file" +
- (if (list.length > 1) "s" else "") +
- (" to " + getDestination.toString)
- )
- else
- log("No files selected for documentation", Project.MSG_VERBOSE)
-
- list
- }
- } yield {
- log(originFile, Project.MSG_DEBUG)
- nameToFile(originDir)(originFile)
- }
-
- def decodeEscapes(s: String): String = {
- // In Ant script characters '<' and '>' must be encoded when
- // used in attribute values, e.g. for attributes "doctitle", "header", ..
- // in task Scaladoc you may write:
- // doctitle="&lt;div&gt;Scala&lt;/div&gt;"
- // so we have to decode them here.
- s.replaceAll("&lt;", "<").replaceAll("&gt;",">")
- .replaceAll("&amp;", "&").replaceAll("&quot;", "\"")
- }
-
- // Builds-up the compilation settings for Scalac with the existing Ant
- // parameters.
- val docSettings = new Settings(buildError)
- docSettings.outdir.value = asString(destination.get)
- if (!classpath.isEmpty)
- docSettings.classpath.value = asString(getClasspath)
- if (!sourcepath.isEmpty)
- docSettings.sourcepath.value = asString(getSourcepath)
- /*else if (origin.get.size() > 0)
- settings.sourcepath.value = origin.get.list()(0)*/
- if (!bootclasspath.isEmpty)
- docSettings.bootclasspath.value = asString(getBootclasspath)
- if (!extdirs.isEmpty) docSettings.extdirs.value = asString(getExtdirs)
- if (!encoding.isEmpty) docSettings.encoding.value = encoding.get
- if (!doctitle.isEmpty) docSettings.doctitle.value = decodeEscapes(doctitle.get)
- if (!docfooter.isEmpty) docSettings.docfooter.value = decodeEscapes(docfooter.get)
- if (!docversion.isEmpty) docSettings.docversion.value = decodeEscapes(docversion.get)
- if (!docsourceurl.isEmpty) docSettings.docsourceurl.value = decodeEscapes(docsourceurl.get)
- if (!docUncompilable.isEmpty) docSettings.docUncompilable.value = decodeEscapes(docUncompilable.get)
-
- docSettings.deprecation.value = deprecation
- docSettings.unchecked.value = unchecked
- docSettings.docImplicits.value = docImplicits
- docSettings.docImplicitsDebug.value = docImplicitsDebug
- docSettings.docImplicitsShowAll.value = docImplicitsShowAll
- docSettings.docDiagrams.value = docDiagrams
- docSettings.docDiagramsDebug.value = docDiagramsDebug
- docSettings.docRawOutput.value = docRawOutput
- docSettings.docNoPrefixes.value = docNoPrefixes
- docSettings.docGroups.value = docGroups
- docSettings.docSkipPackages.value = docSkipPackages
- if(!docDiagramsDotPath.isEmpty) docSettings.docDiagramsDotPath.value = docDiagramsDotPath.get
-
- if (!docgenerator.isEmpty) docSettings.docgenerator.value = docgenerator.get
- if (!docrootcontent.isEmpty) docSettings.docRootContent.value = docrootcontent.get.getAbsolutePath()
- log("Scaladoc params = '" + addParams + "'", Project.MSG_DEBUG)
-
- docSettings processArgumentString addParams
- Pair(docSettings, sourceFiles)
- }
-
- def safeBuildError(message: String): Unit = if (nofail) log(message) else buildError(message)
-
- /** Performs the compilation. */
- override def execute() = {
- val Pair(docSettings, sourceFiles) = initialize
- val reporter = new ConsoleReporter(docSettings)
- try {
- val docProcessor = new scala.tools.nsc.doc.DocFactory(reporter, docSettings)
- docProcessor.document(sourceFiles.map (_.toString))
- if (reporter.ERROR.count > 0)
- safeBuildError(
- "Document failed with " +
- reporter.ERROR.count + " error" +
- (if (reporter.ERROR.count > 1) "s" else "") +
- "; see the documenter error output for details.")
- else if (reporter.WARNING.count > 0)
- log(
- "Document succeeded with " +
- reporter.WARNING.count + " warning" +
- (if (reporter.WARNING.count > 1) "s" else "") +
- "; see the documenter output for details.")
- reporter.printSummary()
- } catch {
- case exception: Throwable =>
- exception.printStackTrace()
- val msg = Option(exception.getMessage) getOrElse "no error message provided"
- safeBuildError(s"Document failed because of an internal documenter error ($msg); see the error output for details.")
- }
- }
-}
diff --git a/src/compiler/scala/tools/ant/antlib.xml b/src/compiler/scala/tools/ant/antlib.xml
index 78159e6d10..7885534689 100644
--- a/src/compiler/scala/tools/ant/antlib.xml
+++ b/src/compiler/scala/tools/ant/antlib.xml
@@ -11,8 +11,6 @@
classname="scala.tools.ant.Scaladoc"/>
<taskdef name="scalatool"
classname="scala.tools.ant.ScalaTool"/>
- <taskdef name="same"
- classname="scala.tools.ant.Same"/>
<taskdef name="pack200"
classname="scala.tools.ant.Pack200Task"/>
</antlib>
diff --git a/src/compiler/scala/tools/ant/sabbus/Break.scala b/src/compiler/scala/tools/ant/sabbus/Break.scala
index 0b6701b6e9..b170ceaed8 100644
--- a/src/compiler/scala/tools/ant/sabbus/Break.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Break.scala
@@ -7,7 +7,8 @@
\* */
-package scala.tools.ant.sabbus
+package scala
+package tools.ant.sabbus
import org.apache.tools.ant.Task
diff --git a/src/compiler/scala/tools/ant/sabbus/Compilers.scala b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
index b1994233e8..a0aad49f20 100644
--- a/src/compiler/scala/tools/ant/sabbus/Compilers.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
@@ -27,7 +27,7 @@ object Compilers extends scala.collection.DefaultMap[String, Compiler] {
if (debug) println("Making compiler " + id)
if (debug) println(" memory before: " + freeMemoryString)
val comp = new Compiler(classpath, settings)
- container += Pair(id, comp)
+ container(id) = comp
if (debug) println(" memory after: " + freeMemoryString)
comp
}
diff --git a/src/compiler/scala/tools/ant/sabbus/Make.scala b/src/compiler/scala/tools/ant/sabbus/Make.scala
index 5274594f3d..027a828f03 100644
--- a/src/compiler/scala/tools/ant/sabbus/Make.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Make.scala
@@ -7,7 +7,8 @@
\* */
-package scala.tools.ant.sabbus
+package scala
+package tools.ant.sabbus
import java.io.File
import org.apache.tools.ant.Task
diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
index 9cdf484080..595b45ae51 100644
--- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
+++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.tools.ant
+package scala
+package tools.ant
package sabbus
import java.io.{ File, FileWriter }
@@ -80,7 +81,7 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
private def createMapper() = {
val mapper = new GlobPatternMapper()
- val extension = if (isMSIL) "*.msil" else "*.class"
+ val extension = "*.class"
mapper setTo extension
mapper setFrom "*.scala"
@@ -104,9 +105,6 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
sourcePath foreach (settings.sourcepath = _)
settings.extraParams = extraArgsFlat
- if (isMSIL)
- settings.sourcedir = sourceDir
-
val mapper = createMapper()
val includedFiles: Array[File] =
@@ -117,12 +115,12 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
mapper
) map (x => new File(sourceDir, x))
- /** Nothing to do. */
+ /* Nothing to do. */
if (includedFiles.isEmpty && argfile.isEmpty)
return
if (includedFiles.nonEmpty)
- log("Compiling %d file%s to %s".format(includedFiles.size, plural(includedFiles.size), destinationDir))
+ log("Compiling %d file%s to %s".format(includedFiles.length, plural(includedFiles.length), destinationDir))
argfile foreach (x => log("Using argfile file: @" + x))
diff --git a/src/compiler/scala/tools/ant/sabbus/Settings.scala b/src/compiler/scala/tools/ant/sabbus/Settings.scala
index fde61e9564..a86af73fe3 100644
--- a/src/compiler/scala/tools/ant/sabbus/Settings.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Settings.scala
@@ -10,7 +10,7 @@ package scala.tools.ant.sabbus
import java.io.File
-import org.apache.tools.ant.types.{Path, Reference}
+import org.apache.tools.ant.types.Path
class Settings {
@@ -93,4 +93,18 @@ class Settings {
case _ => false
}
+ override lazy val hashCode: Int = Seq[Any](
+ gBf,
+ uncheckedBf,
+ classpathBf,
+ sourcepathBf,
+ sourcedirBf,
+ bootclasspathBf,
+ extdirsBf,
+ dBf,
+ encodingBf,
+ targetBf,
+ optimiseBf,
+ extraParamsBf
+ ).##
}
diff --git a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
index 6bb1aaa306..b061bcf7fb 100644
--- a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
+++ b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
@@ -98,6 +98,4 @@ trait TaskArgs extends CompilationPathProperty {
val parts = a.getParts
if(parts eq null) Seq[String]() else parts.toSeq
}
-
- def isMSIL = compTarget exists (_ == "msil")
}
diff --git a/src/compiler/scala/tools/ant/sabbus/Use.scala b/src/compiler/scala/tools/ant/sabbus/Use.scala
index 2c97232aec..a8736f228b 100644
--- a/src/compiler/scala/tools/ant/sabbus/Use.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Use.scala
@@ -7,7 +7,8 @@
\* */
-package scala.tools.ant
+package scala
+package tools.ant
package sabbus
import java.io.File
@@ -53,9 +54,9 @@ class Use extends ScalaMatchingTask {
compiler.settings.d,
mapper
) map (new File(sourceDir.get, _))
- if (includedFiles.size > 0)
+ if (includedFiles.length > 0)
try {
- log("Compiling " + includedFiles.size + " file" + (if (includedFiles.size > 1) "s" else "") + " to " + compiler.settings.d.getAbsolutePath)
+ log("Compiling " + includedFiles.length + " file" + (if (includedFiles.length > 1) "s" else "") + " to " + compiler.settings.d.getAbsolutePath)
val (errors, warnings) = compiler.compile(includedFiles)
if (errors > 0)
sys.error("Compilation failed with " + errors + " error" + (if (errors > 1) "s" else "") + ".")
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
index f1c6c52785..abf9925ad9 100644
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -102,6 +102,9 @@ if [[ -n "$cygwin" ]]; then
format=windows
fi
SCALA_HOME="$(cygpath --$format "$SCALA_HOME")"
+ if [[ -n "$JAVA_HOME" ]]; then
+ JAVA_HOME="$(cygpath --$format "$JAVA_HOME")"
+ fi
TOOL_CLASSPATH="$(cygpath --path --$format "$TOOL_CLASSPATH")"
elif [[ -n "$mingw" ]]; then
SCALA_HOME="$(cmd //c echo "$SCALA_HOME")"
@@ -112,7 +115,7 @@ if [[ -n "$cygwin$mingw" ]]; then
case "$TERM" in
rxvt* | xterm*)
stty -icanon min 1 -echo
- WINDOWS_OPT="-Djline.terminal=scala.tools.jline.UnixTerminal"
+ WINDOWS_OPT="-Djline.terminal=unix"
;;
esac
fi
diff --git a/src/compiler/scala/tools/cmd/CommandLine.scala b/src/compiler/scala/tools/cmd/CommandLine.scala
index 75f96d3c4b..781cc564cb 100644
--- a/src/compiler/scala/tools/cmd/CommandLine.scala
+++ b/src/compiler/scala/tools/cmd/CommandLine.scala
@@ -16,27 +16,27 @@ trait CommandLineConfig {
/** An instance of a command line, parsed according to a Spec.
*/
class CommandLine(val spec: Reference, val originalArgs: List[String]) extends CommandLineConfig {
- def this(spec: Reference, line: String) = this(spec, Parser tokenize line)
+ def this(spec: Reference, line: String) = this(spec, CommandLineParser tokenize line)
def this(spec: Reference, args: Array[String]) = this(spec, args.toList)
- import spec.{ isAnyOption, isUnaryOption, isBinaryOption, isExpandOption }
+ import spec.{ isUnaryOption, isBinaryOption, isExpandOption }
val Terminator = "--"
val ValueForUnaryOption = "true" // so if --opt is given, x(--opt) = true
- def mapForUnary(opt: String) = Map(opt -> ValueForUnaryOption)
+ def mapForUnary(opt: String) = Map(fromOpt(opt) -> ValueForUnaryOption)
def errorFn(msg: String) = println(msg)
/** argMap is option -> argument (or "" if it is a unary argument)
* residualArgs are what is left after removing the options and their args.
*/
- lazy val (argMap, residualArgs) = {
+ lazy val (argMap, residualArgs): (Map[String, String], List[String]) = {
val residualBuffer = new ListBuffer[String]
def loop(args: List[String]): Map[String, String] = {
def residual(xs: List[String]) = { residualBuffer ++= xs ; Map[String, String]() }
- /** Returns Some(List(args)) if this option expands to an
+ /* Returns Some(List(args)) if this option expands to an
* argument list and it's not returning only the same arg.
*/
def expand(s1: String) = {
@@ -48,7 +48,7 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C
else None
}
- /** Assumes known options have all been ruled out already. */
+ /* Assumes known options have all been ruled out already. */
def isUnknown(opt: String) =
onlyKnownOptions && (opt startsWith "-") && {
errorFn("Option '%s' not recognized.".format(opt))
@@ -72,7 +72,7 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C
if (x2 == Terminator) mapForUnary(x1) ++ residual(xs)
else if (isUnaryOption(x1)) mapForUnary(x1) ++ loop(args.tail)
- else if (isBinaryOption(x1)) Map(x1 -> x2) ++ loop(xs)
+ else if (isBinaryOption(x1)) Map(fromOpt(x1) -> x2) ++ loop(xs)
else if (isUnknown(x1)) loop(args.tail)
else residual(List(x1)) ++ loop(args.tail)
}
diff --git a/src/compiler/scala/tools/cmd/CommandLineParser.scala b/src/compiler/scala/tools/cmd/CommandLineParser.scala
new file mode 100644
index 0000000000..6132eff557
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/CommandLineParser.scala
@@ -0,0 +1,72 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+
+import scala.annotation.tailrec
+
+/** A simple (overly so) command line parser.
+ * !!! This needs a thorough test suite to make sure quoting is
+ * done correctly and portably.
+ */
+object CommandLineParser {
+ // splits a string into a quoted prefix and the rest of the string,
+ // taking escaping into account (using \)
+ // `"abc"def` will match as `DoubleQuoted(abc, def)`
+ private class QuotedExtractor(quote: Char) {
+ def unapply(in: String): Option[(String, String)] = {
+ val del = quote.toString
+ if (in startsWith del) {
+ var escaped = false
+ val (quoted, next) = (in substring 1) span {
+ case `quote` if !escaped => false
+ case '\\' if !escaped => escaped = true; true
+ case _ => escaped = false; true
+ }
+ // the only way to get out of the above loop is with an empty next or !escaped
+ // require(next.isEmpty || !escaped)
+ if (next startsWith del) Some((quoted, next substring 1))
+ else None
+ } else None
+ }
+ }
+ private object DoubleQuoted extends QuotedExtractor('"')
+ private object SingleQuoted extends QuotedExtractor('\'')
+ private val Word = """(\S+)(.*)""".r
+
+ // parse `in` for an argument, return it and the remainder of the input (or an error message)
+ // (argument may be in single/double quotes, taking escaping into account, quotes are stripped)
+ private def argument(in: String): Either[String, (String, String)] = in match {
+ case DoubleQuoted(arg, rest) => Right((arg, rest))
+ case SingleQuoted(arg, rest) => Right((arg, rest))
+ case Word(arg, rest) => Right((arg, rest))
+ case _ => Left(s"Illegal argument: $in")
+ }
+
+ // parse a list of whitespace-separated arguments (ignoring whitespace in quoted arguments)
+ @tailrec private def commandLine(in: String, accum: List[String] = Nil): Either[String, (List[String], String)] = {
+ val trimmed = in.trim
+ if (trimmed.isEmpty) Right((accum.reverse, ""))
+ else argument(trimmed) match {
+ case Right((arg, next)) =>
+ (next span Character.isWhitespace) match {
+ case("", rest) if rest.nonEmpty => Left("Arguments should be separated by whitespace.") // TODO: can this happen?
+ case(ws, rest) => commandLine(rest, arg :: accum)
+ }
+ case Left(msg) => Left(msg)
+ }
+ }
+
+ class ParseException(msg: String) extends RuntimeException(msg)
+
+ def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x))
+ def tokenize(line: String, errorFn: String => Unit): List[String] = {
+ commandLine(line) match {
+ case Right((args, _)) => args
+ case Left(msg) => errorFn(msg) ; Nil
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/cmd/Demo.scala b/src/compiler/scala/tools/cmd/Demo.scala
deleted file mode 100644
index af818845bb..0000000000
--- a/src/compiler/scala/tools/cmd/Demo.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package cmd
-
-/** A sample command specification for illustrative purposes.
- * First take advantage of the meta-options:
- *
- * // this command creates an executable runner script "demo"
- * % scala scala.tools.cmd.Demo --self-update demo
- *
- * // this one creates and sources a completion file - note backticks
- * % `./demo --bash`
- *
- * // and now you have a runner with working completion
- * % ./demo --<tab>
- * --action --defint --int
- * --bash --defstr --str
- * --defenv --self-update --unary
- *
- * The normal option configuration is plausibly self-explanatory.
- */
-trait DemoSpec extends Spec with Meta.StdOpts with Interpolation {
- lazy val referenceSpec = DemoSpec
- lazy val programInfo = Spec.Info("demo", "Usage: demo [<options>]", "scala.tools.cmd.Demo")
-
- help("""Usage: demo [<options>]""")
- heading("Unary options:")
-
- val optIsUnary = "unary" / "a unary option" --? ;
- ("action" / "a body which may be run") --> println("Hello, I am the --action body.")
-
- heading("Binary options:")
- val optopt = "str" / "an optional String" --|
- val optoptInt = ("int" / "an optional Int") . --^[Int]
- val optEnv = "defenv" / "an optional String" defaultToEnv "PATH"
- val optDefault = "defstr" / "an optional String" defaultTo "default"
- val optDefaultInt = "defint" / "an optional Int" defaultTo -1
- val optExpand = "alias" / "an option which expands" expandTo ("--int", "15")
-}
-
-object DemoSpec extends DemoSpec with Property {
- lazy val propMapper = new PropertyMapper(DemoSpec)
-
- type ThisCommandLine = SpecCommandLine
- def creator(args: List[String]) =
- new SpecCommandLine(args) {
- override def errorFn(msg: String) = { println("Error: " + msg) ; sys.exit(0) }
- }
-}
-
-class Demo(args: List[String]) extends {
- val parsed = DemoSpec(args: _*)
-} with DemoSpec with Instance {
- import java.lang.reflect._
-
- def helpMsg = DemoSpec.helpMsg
- def demoSpecMethods = this.getClass.getMethods.toList
- private def isDemo(m: Method) = (m.getName startsWith "opt") && !(m.getName contains "$") && (m.getParameterTypes.isEmpty)
-
- def demoString(ms: List[Method]) = {
- val longest = ms map (_.getName.length) max
- val formatStr = " %-" + longest + "s: %s"
- val xs = ms map (m => formatStr.format(m.getName, m.invoke(this)))
-
- xs mkString ("Demo(\n ", "\n ", "\n)\n")
- }
-
- override def toString = demoString(demoSpecMethods filter isDemo)
-}
-
-object Demo {
- def main(args: Array[String]): Unit = {
- val runner = new Demo(args.toList)
-
- if (args.isEmpty)
- println(runner.helpMsg)
-
- println(runner)
- }
-}
diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/compiler/scala/tools/cmd/FromString.scala
index cba2e99998..0b074efc0f 100644
--- a/src/compiler/scala/tools/cmd/FromString.scala
+++ b/src/compiler/scala/tools/cmd/FromString.scala
@@ -7,42 +7,34 @@ package scala.tools
package cmd
import nsc.io.{ Path, File, Directory }
-import scala.reflect.runtime.{universe => ru}
-import scala.tools.reflect.StdRuntimeTags._
+import scala.reflect.OptManifest
/** A general mechanism for defining how a command line argument
* (always a String) is transformed into an arbitrary type. A few
* example instances are in the companion object, but in general
* either IntFromString will suffice or you'll want custom transformers.
*/
-abstract class FromString[+T](implicit t: ru.TypeTag[T]) extends PartialFunction[String, T] {
+abstract class FromString[+T](implicit m: OptManifest[T]) extends PartialFunction[String, T] {
def apply(s: String): T
def isDefinedAt(s: String): Boolean = true
def zero: T = apply("")
- def targetString: String = t.toString
+ def targetString: String = m.toString
}
object FromString {
- // We need these because we clash with the String => Path implicits.
- private def toFile(s: String) = new File(new java.io.File(s))
+ // We need this because we clash with the String => Path implicits.
private def toDir(s: String) = new Directory(new java.io.File(s))
/** Path related stringifiers.
*/
- val ExistingFile: FromString[File] = new FromString[File]()(tagOfFile) {
- override def isDefinedAt(s: String) = toFile(s).isFile
- def apply(s: String): File =
- if (isDefinedAt(s)) toFile(s)
- else cmd.runAndExit(println("'%s' is not an existing file." format s))
- }
- val ExistingDir: FromString[Directory] = new FromString[Directory]()(tagOfDirectory) {
+ val ExistingDir: FromString[Directory] = new FromString[Directory] {
override def isDefinedAt(s: String) = toDir(s).isDirectory
def apply(s: String): Directory =
if (isDefinedAt(s)) toDir(s)
else cmd.runAndExit(println("'%s' is not an existing directory." format s))
}
- def ExistingDirRelativeTo(root: Directory) = new FromString[Directory]()(tagOfDirectory) {
+ def ExistingDirRelativeTo(root: Directory) = new FromString[Directory] {
private def resolve(s: String) = (toDir(s) toAbsoluteWithRoot root).toDirectory
override def isDefinedAt(s: String) = resolve(s).isDirectory
def apply(s: String): Directory =
@@ -53,19 +45,19 @@ object FromString {
/** Argument expander, i.e. turns single argument "foo bar baz" into argument
* list "foo", "bar", "baz".
*/
- val ArgumentsFromString: FromString[List[String]] = new FromString[List[String]]()(tagOfListOfString) {
+ val ArgumentsFromString: FromString[List[String]] = new FromString[List[String]] {
def apply(s: String) = toArgs(s)
}
/** Identity.
*/
- implicit val StringFromString: FromString[String] = new FromString[String]()(tagOfString) {
+ implicit val StringFromString: FromString[String] = new FromString[String] {
def apply(s: String): String = s
}
/** Implicit as the most likely to be useful as-is.
*/
- implicit val IntFromString: FromString[Int] = new FromString[Int]()(tagOfInt) {
+ implicit val IntFromString: FromString[Int] = new FromString[Int] {
override def isDefinedAt(s: String) = safeToInt(s).isDefined
def apply(s: String) = safeToInt(s).get
def safeToInt(s: String): Option[Int] = try Some(java.lang.Integer.parseInt(s)) catch { case _: NumberFormatException => None }
diff --git a/src/compiler/scala/tools/cmd/Interpolation.scala b/src/compiler/scala/tools/cmd/Interpolation.scala
index abffd6bb2e..d1c798b621 100644
--- a/src/compiler/scala/tools/cmd/Interpolation.scala
+++ b/src/compiler/scala/tools/cmd/Interpolation.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.tools
+package scala
+package tools
package cmd
/** Interpolation logic for generated files. The idea is to be
diff --git a/src/compiler/scala/tools/cmd/Opt.scala b/src/compiler/scala/tools/cmd/Opt.scala
index 2c193128f1..df3d0c4462 100644
--- a/src/compiler/scala/tools/cmd/Opt.scala
+++ b/src/compiler/scala/tools/cmd/Opt.scala
@@ -26,10 +26,10 @@ object Opt {
trait Implicit {
def name: String
def programInfo: Info
- protected def opt = toOpt(name)
+ protected def opt = fromOpt(name)
def --? : Boolean // --opt is set
- def --> (body: => Unit): Unit // if --opt is set, execute body
+ def --> (body: => Unit): Boolean // if --opt is set, execute body
def --| : Option[String] // --opt <arg: String> is optional, result is Option[String]
def --^[T: FromString] : Option[T] // --opt <arg: T> is optional, result is Option[T]
@@ -51,7 +51,7 @@ object Opt {
import options._
def --? = { addUnary(opt) ; false }
- def --> (body: => Unit) = { addUnary(opt) }
+ def --> (body: => Unit) = { addUnary(opt) ; false }
def --| = { addBinary(opt) ; None }
def --^[T: FromString] = { addBinary(opt) ; None }
@@ -65,7 +65,7 @@ object Opt {
class Instance(val programInfo: Info, val parsed: CommandLine, val name: String) extends Implicit with Error {
def --? = parsed isSet opt
- def --> (body: => Unit) = if (parsed isSet opt) body
+ def --> (body: => Unit) = { val isSet = parsed isSet opt ; if (isSet) body ; isSet }
def --| = parsed get opt
def --^[T: FromString] = {
val fs = implicitly[FromString[T]]
diff --git a/src/compiler/scala/tools/cmd/Parser.scala b/src/compiler/scala/tools/cmd/Parser.scala
deleted file mode 100644
index 6e2afa41c4..0000000000
--- a/src/compiler/scala/tools/cmd/Parser.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package cmd
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.input.CharArrayReader.EofCh
-
-/** A simple (overly so) command line parser.
- * !!! This needs a thorough test suite to make sure quoting is
- * done correctly and portably.
- */
-trait ParserUtil extends Parsers {
- class ParserPlus[+T](underlying: Parser[T]) {
- def !~>[U](p: => Parser[U]): Parser[U] = (underlying ~! p) ^^ { case a~b => b }
- def <~![U](p: => Parser[U]): Parser[T] = (underlying ~! p) ^^ { case a~b => a }
- }
- protected implicit def parser2parserPlus[T](p: Parser[T]): ParserPlus[T] = new ParserPlus(p)
-}
-
-object Parser extends RegexParsers with ParserUtil {
- override def skipWhitespace = false
-
- def elemExcept(xs: Elem*): Parser[Elem] = elem("elemExcept", x => x != EofCh && !(xs contains x))
- def elemOf(xs: Elem*): Parser[Elem] = elem("elemOf", xs contains _)
- def escaped(ch: Char): Parser[String] = "\\" + ch
- def mkQuoted(ch: Char): Parser[String] = (
- elem(ch) !~> rep(escaped(ch) | elemExcept(ch)) <~ ch ^^ (_.mkString)
- | failure("Unmatched %s in input." format ch)
- )
-
- /** Apparently windows can't deal with the quotes sticking around. */
- lazy val squoted: Parser[String] = mkQuoted('\'') // ^^ (x => "'%s'" format x)
- lazy val dquoted: Parser[String] = mkQuoted('"') // ^^ (x => "\"" + x + "\"")
- lazy val token: Parser[String] = """\S+""".r
-
- lazy val argument: Parser[String] = squoted | dquoted | token
- lazy val commandLine: Parser[List[String]] = phrase(repsep(argument, whiteSpace))
-
- class ParseException(msg: String) extends RuntimeException(msg)
-
- def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x))
- def tokenize(line: String, errorFn: String => Unit): List[String] = {
- parse(commandLine, line.trim) match {
- case Success(args, _) => args
- case NoSuccess(msg, rest) => errorFn(msg) ; Nil
- }
- }
-}
diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala
index bcbb454771..62b6c893cf 100644
--- a/src/compiler/scala/tools/cmd/Reference.scala
+++ b/src/compiler/scala/tools/cmd/Reference.scala
@@ -23,14 +23,13 @@ trait Reference extends Spec {
def helpMsg = options.helpMsg
def propertyArgs: List[String] = Nil
- def isUnaryOption(s: String) = unary contains toOpt(s)
- def isBinaryOption(s: String) = binary contains toOpt(s)
- def isExpandOption(s: String) = expansionMap contains toOpt(s)
- def isAnyOption(s: String) = isUnaryOption(s) || isBinaryOption(s) || isExpandOption(s)
+ def isUnaryOption(s: String) = unary contains fromOpt(s)
+ def isBinaryOption(s: String) = binary contains fromOpt(s)
+ def isExpandOption(s: String) = expansionMap contains fromOpt(s)
- def expandArg(arg: String) = expansionMap.getOrElse(fromOpt(arg), List(arg))
+ def expandArg(arg: String): List[String] = expansionMap.getOrElse(fromOpt(arg), List(arg))
- protected def help(str: => String) = addHelp(() => str)
+ protected def help(str: => String): Unit = addHelp(() => str)
type ThisCommandLine <: CommandLine
@@ -46,7 +45,7 @@ object Reference {
val MaxLine = 80
class Accumulators() {
- private var _help = new ListBuffer[() => String]
+ private val _help = new ListBuffer[() => String]
private var _unary = List[String]()
private var _binary = List[String]()
private var _expand = Map[String, List[String]]()
@@ -54,20 +53,20 @@ object Reference {
def helpFormatStr = " %-" + longestArg + "s %s"
def defaultFormatStr = (" " * (longestArg + 7)) + "%s"
- def addUnary(s: String) = _unary +:= s
- def addBinary(s: String) = _binary +:= s
+ def addUnary(s: String): Unit = _unary +:= s
+ def addBinary(s: String): Unit = _binary +:= s
def addExpand(opt: String, expanded: List[String]) =
_expand += (opt -> expanded)
- def mapHelp(g: String => String) = {
+ def mapHelp(g: String => String): Unit = {
val idx = _help.length - 1
val f = _help(idx)
_help(idx) = () => g(f())
}
- def addHelp(f: () => String) = _help += f
+ def addHelp(f: () => String): Unit = _help += f
def addHelpAlias(f: () => String) = mapHelp { s =>
val str = "alias for '%s'" format f()
def noHelp = (helpFormatStr.format("", "")).length == s.length
@@ -75,13 +74,13 @@ object Reference {
s + str2
}
- def addHelpDefault(f: () => String) = mapHelp { s =>
+ def addHelpDefault(f: () => String): Unit = mapHelp { s =>
val str = "(default: %s)" format f()
if (s.length + str.length < MaxLine) s + " " + str
else defaultFormatStr.format(s, str)
}
- def addHelpEnvDefault(name: String) = mapHelp { s =>
+ def addHelpEnvDefault(name: String): Unit = mapHelp { s =>
val line1 = "%s (default: %s)".format(s, name)
val envNow = envOrNone(name) map ("'" + _ + "'") getOrElse "unset"
val line2 = defaultFormatStr.format("Currently " + envNow)
diff --git a/src/compiler/scala/tools/cmd/Spec.scala b/src/compiler/scala/tools/cmd/Spec.scala
index b761601167..a1cb31f911 100644
--- a/src/compiler/scala/tools/cmd/Spec.scala
+++ b/src/compiler/scala/tools/cmd/Spec.scala
@@ -15,7 +15,7 @@ trait Spec {
def programInfo: Spec.Info
protected def help(str: => String): Unit
- protected def heading(str: => String): Unit = help("\n " + str)
+ protected def heading(str: => String): Unit = help(s"\n $str")
type OptionMagic <: Opt.Implicit
protected implicit def optionMagicAdditions(s: String): OptionMagic
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index dbd2195938..842851b4f6 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -6,24 +6,23 @@
package scala.tools.cmd
package gen
-/** Code generation of the AnyVal types and their companions.
- */
+/** Code generation of the AnyVal types and their companions. */
trait AnyValReps {
self: AnyVals =>
- sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String) extends AnyValRep(name,repr,javaEquiv) {
+ sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String)
+ extends AnyValRep(name,repr,javaEquiv) {
- case class Op(val op : String, val doc : String)
+ case class Op(op : String, doc : String)
private def companionCoercions(tos: AnyValRep*) = {
tos.toList map (to =>
- """implicit def @javaequiv@2%s(x: @name@): %s = x.to%s""".format(to.javaEquiv, to.name, to.name)
+ s"implicit def @javaequiv@2${to.javaEquiv}(x: @name@): ${to.name} = x.to${to.name}"
)
}
- def coercionCommentExtra = ""
- def coercionComment = """
- /** Language mandated coercions from @name@ to "wider" types.%s
- */""".format(coercionCommentExtra)
+ def coercionComment =
+"""/** Language mandated coercions from @name@ to "wider" types. */
+import scala.language.implicitConversions"""
def implicitCoercions: List[String] = {
val coercions = this match {
@@ -35,18 +34,14 @@ trait AnyValReps {
case _ => Nil
}
if (coercions.isEmpty) Nil
- else coercionComment :: coercions
+ else coercionComment.lines.toList ++ coercions
}
def isCardinal: Boolean = isIntegerType(this)
def unaryOps = {
val ops = List(
- Op("+", "/**\n" +
- " * Returns this value, unmodified.\n" +
- " */"),
- Op("-", "/**\n" +
- " * Returns the negation of this value.\n" +
- " */"))
+ Op("+", "/** Returns this value, unmodified. */"),
+ Op("-", "/** Returns the negation of this value. */"))
if(isCardinal)
Op("~", "/**\n" +
@@ -95,7 +90,7 @@ trait AnyValReps {
" */"))
else Nil
- def shiftOps =
+ def shiftOps =
if (isCardinal)
List(
Op("<<", "/**\n" +
@@ -127,20 +122,20 @@ trait AnyValReps {
" */"))
else Nil
- def comparisonOps = List(
- Op("==", "/**\n * Returns `true` if this value is equal to x, `false` otherwise.\n */"),
- Op("!=", "/**\n * Returns `true` if this value is not equal to x, `false` otherwise.\n */"),
- Op("<", "/**\n * Returns `true` if this value is less than x, `false` otherwise.\n */"),
- Op("<=", "/**\n * Returns `true` if this value is less than or equal to x, `false` otherwise.\n */"),
- Op(">", "/**\n * Returns `true` if this value is greater than x, `false` otherwise.\n */"),
- Op(">=", "/**\n * Returns `true` if this value is greater than or equal to x, `false` otherwise.\n */"))
+ def comparisonOps = List(
+ Op("==", "/** Returns `true` if this value is equal to x, `false` otherwise. */"),
+ Op("!=", "/** Returns `true` if this value is not equal to x, `false` otherwise. */"),
+ Op("<", "/** Returns `true` if this value is less than x, `false` otherwise. */"),
+ Op("<=", "/** Returns `true` if this value is less than or equal to x, `false` otherwise. */"),
+ Op(">", "/** Returns `true` if this value is greater than x, `false` otherwise. */"),
+ Op(">=", "/** Returns `true` if this value is greater than or equal to x, `false` otherwise. */"))
def otherOps = List(
- Op("+", "/**\n * Returns the sum of this value and `x`.\n */"),
- Op("-", "/**\n * Returns the difference of this value and `x`.\n */"),
- Op("*", "/**\n * Returns the product of this value and `x`.\n */"),
- Op("/", "/**\n * Returns the quotient of this value and `x`.\n */"),
- Op("%", "/**\n * Returns the remainder of the division of this value by `x`.\n */"))
+ Op("+", "/** Returns the sum of this value and `x`. */"),
+ Op("-", "/** Returns the difference of this value and `x`. */"),
+ Op("*", "/** Returns the product of this value and `x`. */"),
+ Op("/", "/** Returns the quotient of this value and `x`. */"),
+ Op("%", "/** Returns the remainder of the division of this value by `x`. */"))
// Given two numeric value types S and T , the operation type of S and T is defined as follows:
// If both S and T are subrange types then the operation type of S and T is Int.
@@ -183,7 +178,7 @@ trait AnyValReps {
}
def objectLines = {
val comp = if (isCardinal) cardinalCompanion else floatingCompanion
- (comp + allCompanions + "\n" + nonUnitCompanions).trim.lines.toList ++ implicitCoercions map interpolate
+ interpolate(comp + allCompanions + "\n" + nonUnitCompanions).trim.lines.toList ++ (implicitCoercions map interpolate)
}
/** Makes a set of binary operations based on the given set of ops, args, and resultFn.
@@ -209,11 +204,14 @@ trait AnyValReps {
)
def lcname = name.toLowerCase
+ def boxedSimpleName = this match {
+ case C => "Character"
+ case I => "Integer"
+ case _ => name
+ }
def boxedName = this match {
case U => "scala.runtime.BoxedUnit"
- case C => "java.lang.Character"
- case I => "java.lang.Integer"
- case _ => "java.lang." + name
+ case _ => "java.lang." + boxedSimpleName
}
def zeroRep = this match {
case L => "0L"
@@ -228,7 +226,13 @@ trait AnyValReps {
def indentN(s: String) = s.lines map indent mkString "\n"
def boxUnboxImpls = Map(
+ "@boxRunTimeDoc@" -> """
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxTo%s`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *""".format(boxedSimpleName),
"@boxImpl@" -> "%s.valueOf(x)".format(boxedName),
+ "@unboxRunTimeDoc@" -> """
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxTo%s`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *""".format(name),
"@unboxImpl@" -> "x.asInstanceOf[%s].%sValue()".format(boxedName, lcname),
"@unboxDoc@" -> "the %s resulting from calling %sValue() on `x`".format(name, lcname)
)
@@ -269,8 +273,7 @@ trait AnyValReps {
}
trait AnyValTemplates {
- def headerTemplate = ("""
-/* __ *\
+ def headerTemplate = """/* __ *\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
@@ -278,12 +281,13 @@ trait AnyValTemplates {
** |/ **
\* */
-%s
-package scala
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
-import scala.language.implicitConversions
+package scala
-""".trim.format(timestampString) + "\n\n")
+"""
def classDocTemplate = ("""
/** `@name@`@representation@ (equivalent to Java's `@javaequiv@` primitive type) is a
@@ -295,11 +299,9 @@ import scala.language.implicitConversions
*/
""".trim + "\n")
- def timestampString = "// DO NOT EDIT, CHANGES WILL BE LOST.\n"
-
def allCompanions = """
/** Transform a value type into a boxed reference type.
- *
+ *@boxRunTimeDoc@
* @param x the @name@ to be boxed
* @return a @boxed@ offering `x` as its underlying value.
*/
@@ -308,27 +310,24 @@ def box(x: @name@): @boxed@ = @boxImpl@
/** Transform a boxed type into a value type. Note that this
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a @boxed@.
- *
+ *@unboxRunTimeDoc@
* @param x the @boxed@ to be unboxed.
* @throws ClassCastException if the argument is not a @boxed@
* @return @unboxDoc@
*/
def unbox(x: java.lang.Object): @name@ = @unboxImpl@
-/** The String representation of the scala.@name@ companion object.
- */
+/** The String representation of the scala.@name@ companion object. */
override def toString = "object scala.@name@"
"""
def nonUnitCompanions = "" // todo
def cardinalCompanion = """
-/** The smallest value representable as a @name@.
- */
+/** The smallest value representable as a @name@. */
final val MinValue = @boxed@.MIN_VALUE
-/** The largest value representable as a @name@.
- */
+/** The largest value representable as a @name@. */
final val MaxValue = @boxed@.MAX_VALUE
"""
@@ -363,18 +362,16 @@ class AnyVals extends AnyValReps with AnyValTemplates {
object D extends AnyValNum("Double", Some("64-bit IEEE-754 floating point number"), "double")
object Z extends AnyValRep("Boolean", None, "boolean") {
def classLines = """
-/**
- * Negates a Boolean expression.
- *
- * - `!a` results in `false` if and only if `a` evaluates to `true` and
- * - `!a` results in `true` if and only if `a` evaluates to `false`.
- *
- * @return the negated expression
- */
+/** Negates a Boolean expression.
+ *
+ * - `!a` results in `false` if and only if `a` evaluates to `true` and
+ * - `!a` results in `true` if and only if `a` evaluates to `false`.
+ *
+ * @return the negated expression
+ */
def unary_! : Boolean
-/**
- * Compares two Boolean expressions and returns `true` if they evaluate to the same value.
+/** Compares two Boolean expressions and returns `true` if they evaluate to the same value.
*
* `a == b` returns `true` if and only if
* - `a` and `b` are `true` or
@@ -391,8 +388,7 @@ def ==(x: Boolean): Boolean
*/
def !=(x: Boolean): Boolean
-/**
- * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
*
* `a || b` returns `true` if and only if
* - `a` is `true` or
@@ -405,8 +401,7 @@ def !=(x: Boolean): Boolean
*/
def ||(x: Boolean): Boolean
-/**
- * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if both of them evaluate to true.
*
* `a && b` returns `true` if and only if
* - `a` and `b` are `true`.
@@ -421,8 +416,7 @@ def &&(x: Boolean): Boolean
// def ||(x: => Boolean): Boolean
// def &&(x: => Boolean): Boolean
-/**
- * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
*
* `a | b` returns `true` if and only if
* - `a` is `true` or
@@ -433,8 +427,7 @@ def &&(x: Boolean): Boolean
*/
def |(x: Boolean): Boolean
-/**
- * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if both of them evaluate to true.
*
* `a & b` returns `true` if and only if
* - `a` and `b` are `true`.
@@ -443,8 +436,7 @@ def |(x: Boolean): Boolean
*/
def &(x: Boolean): Boolean
-/**
- * Compares two Boolean expressions and returns `true` if they evaluate to a different value.
+/** Compares two Boolean expressions and returns `true` if they evaluate to a different value.
*
* `a ^ b` returns `true` if and only if
* - `a` is `true` and `b` is `false` or
@@ -471,7 +463,9 @@ override def getClass(): Class[Boolean] = null
def objectLines = interpolate(allCompanions).lines.toList
override def boxUnboxImpls = Map(
+ "@boxRunTimeDoc@" -> "",
"@boxImpl@" -> "scala.runtime.BoxedUnit.UNIT",
+ "@unboxRunTimeDoc@" -> "",
"@unboxImpl@" -> "()",
"@unboxDoc@" -> "the Unit value ()"
)
@@ -488,5 +482,3 @@ override def getClass(): Class[Boolean] = null
def make() = values map (x => (x.name, x.make()))
}
-
-object AnyVals extends AnyVals { }
diff --git a/src/compiler/scala/tools/cmd/gen/Codegen.scala b/src/compiler/scala/tools/cmd/gen/Codegen.scala
index 4ca9b6cac7..c3aa527ef2 100644
--- a/src/compiler/scala/tools/cmd/gen/Codegen.scala
+++ b/src/compiler/scala/tools/cmd/gen/Codegen.scala
@@ -6,11 +6,9 @@
package scala.tools.cmd
package gen
-import scala.language.postfixOps
-
class Codegen(args: List[String]) extends {
val parsed = CodegenSpec(args: _*)
-} with CodegenSpec with Instance { }
+} with CodegenSpec with Instance
object Codegen {
def echo(msg: String) = Console println msg
@@ -23,7 +21,7 @@ object Codegen {
return println (CodegenSpec.helpMsg)
val out = outDir getOrElse { return println("--out is required.") }
- val all = genall || (!anyvals && !products)
+ val all = genall || !anyvals
echo("Generating sources into " + out)
@@ -31,7 +29,7 @@ object Codegen {
val av = new AnyVals { }
av.make() foreach { case (name, code ) =>
- val file = out / (name + ".scala") toFile;
+ val file = (out / (name + ".scala")).toFile
echo("Writing: " + file)
file writeAll code
}
diff --git a/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala b/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
index 903517c5b4..4b4a1e482d 100644
--- a/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
+++ b/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
@@ -12,17 +12,11 @@ trait CodegenSpec extends Spec with Meta.StdOpts with Interpolation {
def referenceSpec = CodegenSpec
def programInfo = Spec.Info("codegen", "", "scala.tools.cmd.gen.Codegen")
- import FromString.ExistingDir
-
help("Usage: codegen [<options>]")
- // val inDir = "in" / "directory containing templates" --^ ExistingDir
val outDir = "out" / "directory for generated files" --^ ExistingDir
- // val install = "install" / "write source files directly to src/library/scala"
val anyvals = "anyvals" / "generate sources for AnyVal types" --?
- val products = "products" / "generate sources for ProductN, FunctionN, etc." --?
val genall = "all" / "generate sources for everything" --?
- val stamp = "stamp" / "add a timestamp to the generated files" --?
}
object CodegenSpec extends CodegenSpec with Reference {
diff --git a/src/compiler/scala/tools/cmd/package.scala b/src/compiler/scala/tools/cmd/package.scala
index d605ecae8f..9754becf10 100644
--- a/src/compiler/scala/tools/cmd/package.scala
+++ b/src/compiler/scala/tools/cmd/package.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.tools
+package scala
+package tools
package object cmd {
def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
@@ -12,19 +13,19 @@ package object cmd {
implicit def implicitConversions = scala.language.implicitConversions
implicit def postfixOps = scala.language.postfixOps
- private[cmd] def debug(msg: String) = println(msg)
+ private[cmd] def debug(msg: String): Unit = println(msg)
def runAndExit(body: => Unit): Nothing = {
body
sys.exit(0)
}
- def toOpt(s: String) = if (s startsWith "--") s else "--" + s
- def fromOpt(s: String) = s stripPrefix "--"
- def toArgs(line: String) = Parser tokenize line
- def fromArgs(args: List[String]) = args mkString " "
+ def toOpt(s: String): String = if (s startsWith "--") s else "--" + s
+ def fromOpt(s: String): String = s stripPrefix "--"
+ def toArgs(line: String): List[String] = CommandLineParser tokenize line
+ def fromArgs(args: List[String]): String = args mkString " "
- def stripQuotes(s: String) = {
+ def stripQuotes(s: String): String = {
def isQuotedBy(c: Char) = s.length > 0 && s.head == c && s.last == c
if (List('"', '\'') exists isQuotedBy) s.tail.init else s
}
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 355a1fd262..df5952a4cf 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -5,12 +5,12 @@
package scala.tools.nsc
-import util.FreshNameCreator
-import scala.reflect.internal.util.{ Position, NoPosition, BatchSourceFile, SourceFile, NoSourceFile }
+import scala.reflect.internal.util.{ SourceFile, NoSourceFile, FreshNameCreator }
import scala.collection.mutable
import scala.collection.mutable.{ LinkedHashSet, ListBuffer }
+import scala.tools.nsc.reporters.Reporter
-trait CompilationUnits { self: Global =>
+trait CompilationUnits { global: Global =>
/** An object representing a missing compilation unit.
*/
@@ -26,34 +26,57 @@ trait CompilationUnits { self: Global =>
class CompilationUnit(val source: SourceFile) extends CompilationUnitContextApi { self =>
/** the fresh name creator */
- var fresh: FreshNameCreator = new FreshNameCreator.Default
-
- def freshTermName(prefix: String): TermName = newTermName(fresh.newName(prefix))
- def freshTypeName(prefix: String): TypeName = newTypeName(fresh.newName(prefix))
+ implicit val fresh: FreshNameCreator = new FreshNameCreator
+ def freshTermName(prefix: String = "x$") = global.freshTermName(prefix)
+ def freshTypeName(prefix: String) = global.freshTypeName(prefix)
/** the content of the compilation unit in tree form */
var body: Tree = EmptyTree
+ /** The position of the first xml literal encountered while parsing this compilation unit.
+ * NoPosition if there were none. Write-once.
+ */
+ private[this] var _firstXmlPos: Position = NoPosition
+
+ /** Record that we encountered XML. Should only be called once. */
+ protected[nsc] def encounteredXml(pos: Position) = _firstXmlPos = pos
+
+ /** Does this unit contain XML? */
+ def hasXml = _firstXmlPos ne NoPosition
+
+ /** Position of first XML literal in this unit. */
+ def firstXmlPos = _firstXmlPos
+
def exists = source != NoSourceFile && source != null
-// def parseSettings() = {
-// val argsmarker = "SCALAC_ARGS"
-// if(comments nonEmpty) {
-// val pragmas = comments find (_.text.startsWith("//#")) // only parse first one
-// pragmas foreach { p =>
-// val i = p.text.indexOf(argsmarker)
-// if(i > 0)
-// }
-// }
-// }
/** Note: depends now contains toplevel classes.
* To get their sourcefiles, you need to dereference with .sourcefile
*/
- val depends = mutable.HashSet[Symbol]()
+ private[this] val _depends = mutable.HashSet[Symbol]()
+ // SBT compatibility (SI-6875)
+ //
+ // imagine we have a file named A.scala, which defines a trait named Foo and a module named Main
+ // Main contains a call to a macro, which calls compileLate to define a mock for Foo
+ // compileLate creates a virtual file Virt35af32.scala, which contains a class named FooMock extending Foo,
+ // and macro expansion instantiates FooMock. the stage is now set. let's see what happens next.
+ //
+ // without this workaround in scalac or without being patched itself, sbt will think that
+ // * Virt35af32 depends on A (because it extends Foo from A)
+ // * A depends on Virt35af32 (because it contains a macro expansion referring to FooMock from Virt35af32)
+ //
+ // after compiling A.scala, SBT will notice that it has a new source file named Virt35af32.
+ // it will also think that this file hasn't yet been compiled and since A depends on it
+ // it will think that A needs to be recompiled.
+ //
+ // recompilation will lead to another macro expansion. that another macro expansion might choose to create a fresh mock,
+ // producing another virtual file, say, Virtee509a, which will again trick SBT into thinking that A needs a recompile,
+ // which will lead to another macro expansion, which will produce another virtual file and so on
+ def depends = if (exists && !source.file.isVirtual) _depends else mutable.HashSet[Symbol]()
/** so we can relink
*/
- val defined = mutable.HashSet[Symbol]()
+ private[this] val _defined = mutable.HashSet[Symbol]()
+ def defined = if (exists && !source.file.isVirtual) _defined else mutable.HashSet[Symbol]()
/** Synthetic definitions generated by namer, eliminated by typer.
*/
@@ -67,7 +90,7 @@ trait CompilationUnits { self: Global =>
debuglog(s"removing synthetic $sym from $self")
map -= sym
}
- def get(sym: Symbol): Option[Tree] = logResultIf[Option[Tree]](s"found synthetic for $sym in $self", _.isDefined) {
+ def get(sym: Symbol): Option[Tree] = debuglogResultIf[Option[Tree]](s"found synthetic for $sym in $self", _.isDefined) {
map get sym
}
def keys: Iterable[Symbol] = map.keys
@@ -95,6 +118,8 @@ trait CompilationUnits { self: Global =>
*/
val icode: LinkedHashSet[icodes.IClass] = new LinkedHashSet
+ def reporter = global.reporter
+
def echo(pos: Position, msg: String) =
reporter.echo(pos, msg)
@@ -123,18 +148,5 @@ trait CompilationUnits { self: Global =>
lazy val isJava = source.file.name.endsWith(".java")
override def toString() = source.toString()
-
- def clear() {
- fresh = new FreshNameCreator.Default
- body = EmptyTree
- depends.clear()
- defined.clear()
- synthetics.clear()
- toCheck.clear()
- checkedFeatures = Set()
- icode.clear()
- }
}
}
-
-
diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala
index 731f6926f0..3017d8c9cc 100644
--- a/src/compiler/scala/tools/nsc/CompileClient.scala
+++ b/src/compiler/scala/tools/nsc/CompileClient.scala
@@ -3,12 +3,12 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
-import java.io.{ BufferedReader, File, InputStreamReader, PrintWriter }
import settings.FscSettings
import scala.tools.util.CompileOutputCommon
-import sys.SystemProperties.preferIPv4Stack
+import scala.sys.SystemProperties.preferIPv4Stack
/** The client part of the fsc offline compiler. Instead of compiling
* things itself, it send requests to a CompileServer.
@@ -27,12 +27,12 @@ class StandardCompileClient extends HasCompileSocket with CompileOutputCommon {
val settings = new FscSettings(Console.println)
val command = new OfflineCompilerCommand(args.toList, settings)
val shutdown = settings.shutdown.value
- val extraVmArgs = if (settings.preferIPv4.value) List("-D%s=true".format(preferIPv4Stack.key)) else Nil
+ val extraVmArgs = if (settings.preferIPv4) List("-D%s=true".format(preferIPv4Stack.key)) else Nil
val vmArgs = settings.jvmargs.unparse ++ settings.defines.unparse ++ extraVmArgs
val fscArgs = args.toList ++ command.extraFscArgs
- if (settings.version.value) {
+ if (settings.version) {
Console println versionMsg
return true
}
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index 7a0a072bb8..6f068e179c 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
-import java.io.{ BufferedOutputStream, FileOutputStream, PrintStream }
+import java.io.PrintStream
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
import scala.reflect.internal.util.FakePos //Position
import scala.tools.util.SocketServer
@@ -29,8 +29,6 @@ class StandardCompileServer extends SocketServer {
var shutdown = false
var verbose = false
- val versionMsg = "Fast " + Properties.versionMsg
-
val MaxCharge = 0.8
private val runtime = Runtime.getRuntime()
@@ -58,9 +56,6 @@ class StandardCompileServer extends SocketServer {
(totalMemory - freeMemory).toDouble / maxMemory.toDouble > MaxCharge
}
- protected def newOfflineCompilerCommand(arguments: List[String], settings: FscSettings): OfflineCompilerCommand =
- new OfflineCompilerCommand(arguments, settings)
-
/** Problematically, Settings are only considered equal if every setting
* is exactly equal. In fsc this immediately breaks down because the randomly
* chosen temporary outdirs differ between client and server. Among other
@@ -90,9 +85,9 @@ class StandardCompileServer extends SocketServer {
if (input == null || password != guessedPassword)
return
- val args = input.split("\0", -1).toList
+ val args = input.split("\u0000", -1).toList
val newSettings = new FscSettings(fscError)
- val command = newOfflineCompilerCommand(args, newSettings)
+ val command = new OfflineCompilerCommand(args, newSettings)
this.verbose = newSettings.verbose.value
info("Settings after normalizing paths: " + newSettings)
@@ -120,7 +115,7 @@ class StandardCompileServer extends SocketServer {
reporter = new ConsoleReporter(newSettings, in, out) {
// disable prompts, so that compile server cannot block
- override def displayPrompt = ()
+ override def displayPrompt() = ()
}
def isCompilerReusable: Boolean = {
if (compiler == null) {
@@ -162,7 +157,7 @@ class StandardCompileServer extends SocketServer {
}
}
reporter.printSummary()
- if (isMemoryFullEnough) {
+ if (isMemoryFullEnough()) {
info("Nulling out compiler due to memory utilization.")
clearCompiler()
}
@@ -177,9 +172,9 @@ object CompileServer extends StandardCompileServer {
private def createRedirect(filename: String) =
new PrintStream((redirectDir / filename).createFile().bufferedOutput())
- def main(args: Array[String]) =
+ def main(args: Array[String]) =
execute(() => (), args)
-
+
/**
* Used for internal testing. The callback is called upon
* server start, notifying the caller that the server is
@@ -204,7 +199,7 @@ object CompileServer extends StandardCompileServer {
compileSocket setPort port
startupCallback()
run()
-
+
compileSocket deletePort port
}
}
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index 4051bda914..c4f06b59ec 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -5,13 +5,9 @@
package scala.tools.nsc
-import java.io.{ IOException, FileNotFoundException, PrintWriter, FileOutputStream }
-import java.io.{ BufferedReader, FileReader }
-import java.util.regex.Pattern
-import java.net._
+import java.io.{ FileNotFoundException, PrintWriter, FileOutputStream }
import java.security.SecureRandom
import io.{ File, Path, Directory, Socket }
-import scala.util.control.Exception.catching
import scala.tools.util.CompileOutputCommon
import scala.reflect.internal.util.StringOps.splitWhere
import scala.sys.process._
@@ -28,7 +24,7 @@ trait HasCompileSocket {
sock.applyReaderAndWriter { (in, out) =>
out println (compileSocket getPassword sock.getPort())
- out println (args mkString "\0")
+ out println (args mkString "\u0000")
def loop(): Boolean = in.readLine() match {
case null => noErrors
@@ -117,7 +113,7 @@ class CompileSocket extends CompileOutputCommon {
*/
def getPort(vmArgs: String): Int = {
val maxPolls = 300
- val sleepTime = 25
+ val sleepTime = 25L
var attempts = 0
var port = pollPort()
@@ -156,9 +152,9 @@ class CompileSocket extends CompileOutputCommon {
* cannot be established.
*/
def getOrCreateSocket(vmArgs: String, create: Boolean = true): Option[Socket] = {
- val maxMillis = 10 * 1000 // try for 10 seconds
- val retryDelay = 50
- val maxAttempts = maxMillis / retryDelay
+ val maxMillis = 10L * 1000 // try for 10 seconds
+ val retryDelay = 50L
+ val maxAttempts = (maxMillis / retryDelay).toInt
def getsock(attempts: Int): Option[Socket] = attempts match {
case 0 => warn("Unable to establish connection to compilation daemon") ; None
@@ -190,7 +186,7 @@ class CompileSocket extends CompileOutputCommon {
catch { case _: NumberFormatException => None }
def getSocket(serverAdr: String): Socket = (
- for ((name, portStr) <- splitWhere(serverAdr, _ == ':', true) ; port <- parseInt(portStr)) yield
+ for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield
getSocket(name, port)
) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr)
diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala
index e994150f6f..bab0768ca9 100644
--- a/src/compiler/scala/tools/nsc/CompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala
@@ -5,7 +5,6 @@
package scala.tools.nsc
-import scala.collection.mutable.ListBuffer
import io.File
/** A class representing command line info for scalac */
@@ -15,9 +14,6 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
type Setting = Settings#Setting
- /** file extensions of files that the compiler can process */
- lazy val fileEndings = Properties.fileEndings
-
private val processArgumentsResult =
if (shouldProcessArguments) processArguments
else (true, Nil)
@@ -31,7 +27,7 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
|-- Notes on option parsing --
|Boolean settings are always false unless set.
|Where multiple values are accepted, they should be comma-separated.
- | example: -Xplugin:plugin1,plugin2
+ | example: -Xplugin:option1,option2
|<phases> means one or a comma-separated list of:
| (partial) phase names, phase ids, phase id ranges, or the string "all".
| example: -Xprint:all prints all phases.
@@ -41,8 +37,6 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
""".stripMargin.trim + "\n"
def shortUsage = "Usage: %s <options> <source files>" format cmdName
- def createUsagePreface(shouldExplain: Boolean) =
- if (shouldExplain) shortUsage + "\n" + explainAdvanced else ""
/** Creates a help message for a subset of options based on cond */
def createUsageMsg(cond: Setting => Boolean): String = {
@@ -82,27 +76,27 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
}
/** Messages explaining usage and options */
- def usageMsg = createUsageMsg("where possible standard", false, _.isStandard)
- def xusageMsg = createUsageMsg("Possible advanced", true, _.isAdvanced)
- def yusageMsg = createUsageMsg("Possible private", true, _.isPrivate)
-
- // If any of these settings is set, the compiler shouldn't start;
- // an informative message of some sort should be printed instead.
- def shouldStopWithInfo = {
- import settings.{ Setting => _, _ }
- Set[BooleanSetting](help, Xhelp, Yhelp, showPlugins, showPhases) exists (_.value)
- }
+ def usageMsg = createUsageMsg("where possible standard", shouldExplain = false, _.isStandard)
+ def xusageMsg = createUsageMsg("Possible advanced", shouldExplain = true, _.isAdvanced)
+ def yusageMsg = createUsageMsg("Possible private", shouldExplain = true, _.isPrivate)
+
+ /** For info settings, compiler should just print a message and quit. */
+ def shouldStopWithInfo = settings.isInfo
def getInfoMessage(global: Global): String = {
import settings._
- if (help.value) usageMsg + global.pluginOptionsHelp
- else if (Xhelp.value) xusageMsg
- else if (Yhelp.value) yusageMsg
- else if (showPlugins.value) global.pluginDescriptions
- else if (showPhases.value) global.phaseDescriptions + (
- if (debug.value) "\n" + global.phaseFlagDescriptions else ""
+ if (help) usageMsg + global.pluginOptionsHelp
+ else if (Xhelp) xusageMsg
+ else if (Yhelp) yusageMsg
+ else if (showPlugins) global.pluginDescriptions
+ else if (showPhases) global.phaseDescriptions + (
+ if (debug) "\n" + global.phaseFlagDescriptions else ""
)
- else ""
+ else if (genPhaseGraph.isSetByUser) {
+ val components = global.phaseNames // global.phaseDescriptors // one initializes
+ s"Phase graph of ${components.size} components output to ${genPhaseGraph.value}*.dot."
+ }
+ else ""
}
/**
@@ -128,6 +122,6 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
case x => List(x)
}
- settings.processArguments(expandedArguments, true)
+ settings.processArguments(expandedArguments, processAll = true)
}
}
diff --git a/src/compiler/scala/tools/nsc/CompilerRun.scala b/src/compiler/scala/tools/nsc/CompilerRun.scala
deleted file mode 100644
index 6746b08155..0000000000
--- a/src/compiler/scala/tools/nsc/CompilerRun.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-
-class CompilerRun {
- def firstPhase: Phase = NoPhase
- def terminalPhase: Phase = NoPhase
- def namerPhase: Phase = NoPhase
- def typerPhase: Phase = NoPhase
- def refchecksPhase: Phase = NoPhase
- def explicitouterPhase: Phase = NoPhase
- def erasurePhase: Phase = NoPhase
- def flattenPhase: Phase = NoPhase
- def mixinPhase: Phase = NoPhase
- def icodePhase: Phase = NoPhase
- def phaseNamed(name: String): Phase = NoPhase
-}
-
diff --git a/src/compiler/scala/tools/nsc/ConsoleWriter.scala b/src/compiler/scala/tools/nsc/ConsoleWriter.scala
index 5c5606e98b..6c16d19d2c 100644
--- a/src/compiler/scala/tools/nsc/ConsoleWriter.scala
+++ b/src/compiler/scala/tools/nsc/ConsoleWriter.scala
@@ -13,9 +13,9 @@ import java.io.Writer
* @version 1.0
*/
class ConsoleWriter extends Writer {
- def close = flush
+ def close() = flush()
- def flush = Console.flush
+ def flush() = Console.flush()
def write(cbuf: Array[Char], off: Int, len: Int) {
if (len > 0)
diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala
index 814bd58a66..3ac27a42e8 100644
--- a/src/compiler/scala/tools/nsc/Driver.scala
+++ b/src/compiler/scala/tools/nsc/Driver.scala
@@ -1,11 +1,12 @@
-package scala.tools.nsc
+package scala
+package tools.nsc
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
+import scala.tools.nsc.reporters.ConsoleReporter
import Properties.{ versionString, copyrightString, residentPromptString }
-import scala.reflect.internal.util.{ BatchSourceFile, FakePos }
+import scala.reflect.internal.util.FakePos
abstract class Driver {
-
+
val prompt = residentPromptString
val versionMsg = "Scala compiler " +
@@ -41,7 +42,7 @@ abstract class Driver {
command = new CompilerCommand(args.toList, ss)
settings = command.settings
- if (settings.version.value) {
+ if (settings.version) {
reporter.echo(versionMsg)
} else if (processSettingsHook()) {
val compiler = newCompiler()
@@ -68,4 +69,4 @@ abstract class Driver {
sys.exit(if (reporter.hasErrors) 1 else 0)
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/EvalLoop.scala b/src/compiler/scala/tools/nsc/EvalLoop.scala
index c4147fad4c..15a296c836 100644
--- a/src/compiler/scala/tools/nsc/EvalLoop.scala
+++ b/src/compiler/scala/tools/nsc/EvalLoop.scala
@@ -14,7 +14,7 @@ trait EvalLoop {
def loop(action: (String) => Unit) {
@tailrec def inner() {
Console.print(prompt)
- val line = try Console.readLine catch { case _: EOFException => null }
+ val line = try Console.readLine() catch { case _: EOFException => null }
if (line != null && line != "") {
action(line)
inner()
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
index c8fd5985c6..e710222285 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
@@ -26,7 +26,7 @@ extends CompilerCommand(args, settings) {
// change CompilerCommand behavior
override def shouldProcessArguments: Boolean = false
- private lazy val (_ok, targetAndArguments) = settings.processArguments(args, false)
+ private lazy val (_ok, targetAndArguments) = settings.processArguments(args, processAll = false)
override def ok = _ok
private def guessHowToRun(target: String): GenericRunnerCommand.HowToRun = {
if (!ok) Error
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
index 9c2db11a56..ad75d02bff 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
@@ -39,7 +39,4 @@ class GenericRunnerSettings(error: String => Unit) extends Settings(error) {
val nc = BooleanSetting(
"-nc",
"do not use the fsc compilation daemon") withAbbreviation "-nocompdaemon"
-
- @deprecated("Use `nc` instead", "2.9.0") def nocompdaemon = nc
- @deprecated("Use `save` instead", "2.9.0") def savecompiled = save
}
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index aea3e0d930..5492e563dd 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -3,22 +3,23 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
+import java.util.UUID._
import scala.compat.Platform.currentTime
-import scala.tools.util.PathResolver
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
-import util.{ Exceptional, ClassPath, MergedClassPath, StatisticsInfo, ScalaClassLoader, returning }
-import scala.reflect.internal.util.{ NoPosition, OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
+import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString }
+import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
-import settings.{ AestheticSettings }
+import scala.reflect.io.VirtualFile
import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers }
import symtab.classfile.Pickler
-import dependencies.DependencyAnalysis
import plugins.Plugins
import ast._
import ast.parser._
@@ -26,13 +27,12 @@ import typechecker._
import transform.patmat.PatternMatching
import transform._
import backend.icode.{ ICodes, GenICode, ICodeCheckers }
-import backend.{ ScalaPrimitives, Platform, MSILPlatform, JavaPlatform }
-import backend.jvm.{GenJVM, GenASM}
-import backend.opt.{ Inliners, InlineExceptionHandlers, ClosureElimination, DeadCodeElimination }
+import backend.{ ScalaPrimitives, Platform, JavaPlatform }
+import backend.jvm.GenBCode
+import backend.jvm.GenASM
+import backend.opt.{ Inliners, InlineExceptionHandlers, ConstantOptimization, ClosureElimination, DeadCodeElimination }
import backend.icode.analysis._
import scala.language.postfixOps
-import scala.reflect.internal.StdAttachments
-import scala.reflect.ClassTag
class Global(var currentSettings: Settings, var reporter: Reporter)
extends SymbolTable
@@ -47,10 +47,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// the mirror --------------------------------------------------
override def isCompilerUniverse = true
+ override val useOffsetPositions = !currentSettings.Yrangepos
class GlobalMirror extends Roots(NoSymbol) {
val universe: self.type = self
- def rootLoader: LazyType = platform.rootLoader
+ def rootLoader: LazyType = new loaders.PackageLoader(classPath)
override def toString = "compiler mirror"
}
@@ -69,25 +70,28 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
override def settings = currentSettings
+ /** Switch to turn on detailed type logs */
+ var printTypings = settings.Ytyperdebug.value
+
def this(reporter: Reporter) =
this(new Settings(err => reporter.error(null, err)), reporter)
def this(settings: Settings) =
this(settings, new ConsoleReporter(settings))
- def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = gen.mkAttributedQualifier(tpe, termSym)
-
def picklerPhase: Phase = if (currentRun.isDefined) currentRun.picklerPhase else NoPhase
// platform specific elements
- type ThisPlatform = Platform { val global: Global.this.type }
+ protected class GlobalPlatform extends {
+ val global: Global.this.type = Global.this
+ val settings: Settings = Global.this.settings
+ } with JavaPlatform
- lazy val platform: ThisPlatform =
- if (forMSIL) new { val global: Global.this.type = Global.this } with MSILPlatform
- else new { val global: Global.this.type = Global.this } with JavaPlatform
+ type ThisPlatform = JavaPlatform { val global: Global.this.type }
+ lazy val platform: ThisPlatform = new GlobalPlatform
- type PlatformClassPath = ClassPath[platform.BinaryRepr]
+ type PlatformClassPath = ClassPath[AbstractFile]
type OptClassPath = Option[PlatformClassPath]
def classPath: PlatformClassPath = platform.classPath
@@ -105,15 +109,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
typer.typed(mkCast(tree, pt))
}
- /** Trees fresh from the oven, mostly for use by the parser. */
- object treeBuilder extends {
- val global: Global.this.type = Global.this
- } with TreeBuilder {
- def freshName(prefix: String): Name = freshTermName(prefix)
- def freshTermName(prefix: String): TermName = currentUnit.freshTermName(prefix)
- def freshTypeName(prefix: String): TypeName = currentUnit.freshTypeName(prefix)
- def o2p(offset: Int): Position = new OffsetPosition(currentUnit.source, offset)
- def r2p(start: Int, mid: Int, end: Int): Position = rangePos(currentUnit.source, start, mid, end)
+ /** A spare instance of TreeBuilder left for backwards compatibility. */
+ lazy val treeBuilder: TreeBuilder { val global: Global.this.type } = new TreeBuilder {
+ val global: Global.this.type = Global.this;
+ def unit = currentUnit
+ def source = currentUnit.source
}
/** Fold constants */
@@ -136,6 +136,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val global: Global.this.type = Global.this
} with OverridingPairs
+ type SymbolPair = overridingPairs.SymbolPair
+
// Optimizer components
/** ICode analysis for optimization */
@@ -173,7 +175,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (lastPrintedSource == source)
println(": tree is unchanged since " + lastPrintedPhase)
else {
- lastPrintedPhase = phase.prev // since we're running inside "afterPhase"
+ lastPrintedPhase = phase.prev // since we're running inside "exitingPhase"
lastPrintedSource = source
println("")
println(source)
@@ -224,25 +226,32 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// not deprecated yet, but a method called "error" imported into
// nearly every trait really must go. For now using globalError.
- def error(msg: String) = globalError(msg)
- def inform(msg: String) = reporter.echo(msg)
- override def globalError(msg: String) = reporter.error(NoPosition, msg)
- override def warning(msg: String) =
- if (settings.fatalWarnings.value) globalError(msg)
- else reporter.warning(NoPosition, msg)
+ def error(msg: String) = globalError(msg)
+
+ override def inform(msg: String) = inform(NoPosition, msg)
+ override def globalError(msg: String) = globalError(NoPosition, msg)
+ override def warning(msg: String) = warning(NoPosition, msg)
+
+ def globalError(pos: Position, msg: String) = reporter.error(pos, msg)
+ def warning(pos: Position, msg: String) = if (settings.fatalWarnings) globalError(pos, msg) else reporter.warning(pos, msg)
+ def inform(pos: Position, msg: String) = reporter.echo(pos, msg)
// Getting in front of Predef's asserts to supplement with more info.
// This has the happy side effect of masking the one argument forms
// of assert and require (but for now I've reproduced them here,
// because there are a million to fix.)
@inline final def assert(assertion: Boolean, message: => Any) {
- Predef.assert(assertion, supplementErrorMessage("" + message))
+ // calling Predef.assert would send a freshly allocated closure wrapping the one received as argument.
+ if (!assertion)
+ throw new java.lang.AssertionError("assertion failed: "+ supplementErrorMessage("" + message))
}
@inline final def assert(assertion: Boolean) {
assert(assertion, "")
}
@inline final def require(requirement: Boolean, message: => Any) {
- Predef.require(requirement, supplementErrorMessage("" + message))
+ // calling Predef.require would send a freshly allocated closure wrapping the one received as argument.
+ if (!requirement)
+ throw new IllegalArgumentException("requirement failed: "+ supplementErrorMessage("" + message))
}
@inline final def require(requirement: Boolean) {
require(requirement, "")
@@ -255,30 +264,31 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
@inline final def ifDebug(body: => Unit) {
- if (settings.debug.value)
+ if (settings.debug)
body
}
- // Warnings issued only under -Ydebug. For messages which should reach
- // developer ears, but are not adequately actionable by users.
- @inline final override def debugwarn(msg: => String) {
- if (settings.debug.value)
- warning(msg)
- }
- private def elapsedMessage(msg: String, start: Long) =
- msg + " in " + (currentTime - start) + "ms"
+ override protected def isDeveloper = settings.developer || super.isDeveloper
+
+ /** This is for WARNINGS which should reach the ears of scala developers
+ * whenever they occur, but are not useful for normal users. They should
+ * be precise, explanatory, and infrequent. Please don't use this as a
+ * logging mechanism. !!! is prefixed to all messages issued via this route
+ * to make them visually distinct.
+ */
+ @inline final override def devWarning(msg: => String): Unit = devWarning(NoPosition, msg)
+ @inline final def devWarning(pos: Position, msg: => String) {
+ def pos_s = if (pos eq NoPosition) "" else s" [@ $pos]"
+ if (isDeveloper)
+ warning(pos, "!!! " + msg)
+ else
+ log(s"!!!$pos_s $msg") // such warnings always at least logged
+ }
def informComplete(msg: String): Unit = reporter.withoutTruncating(inform(msg))
- def informProgress(msg: String) = if (opt.verbose) inform("[" + msg + "]")
- def inform[T](msg: String, value: T): T = returning(value)(x => inform(msg + x))
- def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start))
def logError(msg: String, t: Throwable): Unit = ()
- def logAfterEveryPhase[T](msg: String)(op: => T) {
- log("Running operation '%s' after every phase.\n".format(msg) + describeAfterEveryPhase(op))
- }
-
override def shouldLogAtThisPhase = settings.log.isSetByUser && (
(settings.log containsPhase globalPhase) || (settings.log containsPhase phase)
)
@@ -289,7 +299,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
@inline final override def debuglog(msg: => String) {
- if (settings.debug.value)
+ if (settings.debug)
log(msg)
}
@@ -302,7 +312,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
private val reader: SourceReader = {
val defaultEncoding = Properties.sourceEncoding
- val defaultReader = Properties.sourceReader
def loadCharset(name: String) =
try Some(Charset.forName(name))
@@ -315,7 +324,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
None
}
- val charset = opt.encoding flatMap loadCharset getOrElse {
+ val charset = ( if (settings.encoding.isSetByUser) Some(settings.encoding.value) else None ) flatMap loadCharset getOrElse {
settings.encoding.value = defaultEncoding // A mandatory charset
Charset.forName(defaultEncoding)
}
@@ -330,62 +339,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- opt.sourceReader flatMap loadReader getOrElse {
+ ( if (settings.sourceReader.isSetByUser) Some(settings.sourceReader.value) else None ) flatMap loadReader getOrElse {
new SourceReader(charset.newDecoder(), reporter)
}
}
- if (!dependencyAnalysis.off)
- dependencyAnalysis.loadDependencyAnalysis()
-
- if (opt.verbose || opt.logClasspath) {
+ if (settings.verbose || settings.Ylogcp) {
// Uses the "do not truncate" inform
informComplete("[search path for source files: " + classPath.sourcepaths.mkString(",") + "]")
informComplete("[search path for class files: " + classPath.asClasspathString + "]")
}
- object opt extends AestheticSettings {
- def settings = Global.this.settings
-
- // protected implicit lazy val globalPhaseOrdering: Ordering[Phase] = Ordering[Int] on (_.id)
- def isActive(ph: Settings#PhasesSetting) = ph containsPhase globalPhase
- def wasActive(ph: Settings#PhasesSetting) = ph containsPhase globalPhase.prev
-
- // Allows for syntax like scalac -Xshow-class Random@erasure,typer
- private def splitClassAndPhase(str: String, term: Boolean): Name = {
- def mkName(s: String) = if (term) newTermName(s) else newTypeName(s)
- (str indexOf '@') match {
- case -1 => mkName(str)
- case idx =>
- val phasePart = str drop (idx + 1)
- settings.Yshow.tryToSetColon(phasePart split ',' toList)
- mkName(str take idx)
- }
- }
-
- // behavior
-
- // debugging
- def checkPhase = wasActive(settings.check)
- def logPhase = isActive(settings.log)
-
- // Write *.icode files right after GenICode when -Xprint-icode was given.
- def writeICodeAtICode = settings.writeICode.isSetByUser && isActive(settings.writeICode)
-
- // showing/printing things
- def browsePhase = isActive(settings.browse)
- def echoFilenames = opt.debug && (opt.verbose || currentRun.size < 5)
- def noShow = settings.Yshow.isDefault
- def printLate = settings.printLate.value
- def printPhase = isActive(settings.Xprint)
- def showNames = List(showClass, showObject).flatten
- def showPhase = isActive(settings.Yshow)
- def showSymbols = settings.Yshowsyms.value
- def showTrees = settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value
- val showClass = optSetting[String](settings.Xshowcls) map (x => splitClassAndPhase(x, false))
- val showObject = optSetting[String](settings.Xshowobj) map (x => splitClassAndPhase(x, true))
- }
-
// The current division between scala.reflect.* and scala.tools.nsc.* is pretty
// clunky. It is often difficult to have a setting influence something without having
// to create it on that side. For this one my strategy is a constant def at the file
@@ -394,12 +358,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// Here comes another one...
override protected val enableTypeVarExperimentals = settings.Xexperimental.value
- // True if -Xscript has been set, indicating a script run.
- def isScriptRun = opt.script.isDefined
-
- def getSourceFile(f: AbstractFile): BatchSourceFile =
- if (isScriptRun) ScriptSourceFile(f, reader read f)
- else new BatchSourceFile(f, reader read f)
+ def getSourceFile(f: AbstractFile): BatchSourceFile = new BatchSourceFile(f, reader read f)
def getSourceFile(name: String): SourceFile = {
val f = AbstractFile.getFile(name)
@@ -408,9 +367,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
getSourceFile(f)
}
- lazy val loaders = new SymbolLoaders {
+ lazy val loaders = new {
val global: Global.this.type = Global.this
- }
+ val platform: Global.this.platform.type = Global.this.platform
+ } with GlobalSymbolLoaders
/** Returns the mirror that loaded given symbol */
def mirrorThatLoaded(sym: Symbol): Mirror = rootMirror
@@ -453,7 +413,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if ((unit ne null) && unit.exists)
lastSeenSourceFile = unit.source
- if (opt.echoFilenames)
+ if (settings.debug && (settings.verbose || currentRun.size < 5))
inform("[running phase " + name + " on " + unit + "]")
val unit0 = currentUnit
@@ -463,7 +423,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
currentRun.informUnitStarting(this, unit)
apply(unit)
}
- currentRun.advanceUnit
+ currentRun.advanceUnit()
} finally {
//assert(currentUnit == unit)
currentRun.currentUnit = unit0
@@ -471,16 +431,16 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- /** Switch to turn on detailed type logs */
- var printTypings = settings.Ytyperdebug.value
- var printInfers = settings.Yinferdebug.value
-
// phaseName = "parser"
- object syntaxAnalyzer extends {
+ lazy val syntaxAnalyzer = new {
val global: Global.this.type = Global.this
+ } with SyntaxAnalyzer {
val runsAfter = List[String]()
val runsRightAfter = None
- } with SyntaxAnalyzer
+ override val initial = true
+ }
+
+ import syntaxAnalyzer.{ UnitScanner, UnitParser }
// !!! I think we're overdue for all these phase objects being lazy vals.
// There's no way for a Global subclass to provide a custom typer
@@ -498,9 +458,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
object patmat extends {
val global: Global.this.type = Global.this
val runsAfter = List("typer")
- // patmat doesn't need to be right after typer, as long as we run before supperaccesors
- // (sbt does need to run right after typer, so don't conflict)
val runsRightAfter = None
+ // patmat doesn't need to be right after typer, as long as we run before superaccessors
+ // (sbt does need to run right after typer, so don't conflict)
} with PatternMatching
// phaseName = "superaccessors"
@@ -615,6 +575,13 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val runsRightAfter = None
} with CleanUp
+ // phaseName = "delambdafy"
+ object delambdafy extends {
+ val global: Global.this.type = Global.this
+ val runsAfter = List("cleanup")
+ val runsRightAfter = None
+ } with Delambdafy
+
// phaseName = "icode"
object genicode extends {
val global: Global.this.type = Global.this
@@ -629,7 +596,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val runsRightAfter = None
} with Inliners
- // phaseName = "inlineExceptionHandlers"
+ // phaseName = "inlinehandlers"
object inlineExceptionHandlers extends {
val global: Global.this.type = Global.this
val runsAfter = List("inliner")
@@ -639,23 +606,23 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// phaseName = "closelim"
object closureElimination extends {
val global: Global.this.type = Global.this
- val runsAfter = List("inlineExceptionHandlers")
+ val runsAfter = List("inlinehandlers")
val runsRightAfter = None
} with ClosureElimination
- // phaseName = "dce"
- object deadCode extends {
+ // phaseName = "constopt"
+ object constantOptimization extends {
val global: Global.this.type = Global.this
val runsAfter = List("closelim")
val runsRightAfter = None
- } with DeadCodeElimination
+ } with ConstantOptimization
- // phaseName = "jvm", FJBG-based version
- object genJVM extends {
+ // phaseName = "dce"
+ object deadCode extends {
val global: Global.this.type = Global.this
- val runsAfter = List("dce")
+ val runsAfter = List("closelim")
val runsRightAfter = None
- } with GenJVM
+ } with DeadCodeElimination
// phaseName = "jvm", ASM-based version
object genASM extends {
@@ -664,40 +631,31 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val runsRightAfter = None
} with GenASM
- // This phase is optional: only added if settings.make option is given.
- // phaseName = "dependencyAnalysis"
- object dependencyAnalysis extends {
+ // phaseName = "bcode"
+ object genBCode extends {
val global: Global.this.type = Global.this
- val runsAfter = List("jvm")
+ val runsAfter = List("dce")
val runsRightAfter = None
- } with DependencyAnalysis
+ } with GenBCode
// phaseName = "terminal"
object terminal extends {
val global: Global.this.type = Global.this
+ } with SubComponent {
val phaseName = "terminal"
- val runsAfter = List("jvm", "msil")
+ val runsAfter = List("jvm")
val runsRightAfter = None
- } with SubComponent {
- private var cache: Option[GlobalPhase] = None
- def reset(): Unit = cache = None
+ override val terminal = true
- def newPhase(prev: Phase): GlobalPhase =
- cache getOrElse returning(new TerminalPhase(prev))(x => cache = Some(x))
-
- class TerminalPhase(prev: Phase) extends GlobalPhase(prev) {
- def name = "terminal"
+ def newPhase(prev: Phase): GlobalPhase = {
+ new TerminalPhase(prev)
+ }
+ private class TerminalPhase(prev: Phase) extends GlobalPhase(prev) {
+ def name = phaseName
def apply(unit: CompilationUnit) {}
}
}
- // phaseName = "SAMPLE PHASE"
- object sampleTransform extends {
- val global: Global.this.type = Global.this
- val runsAfter = List[String]()
- val runsRightAfter = None
- } with SampleTransform
-
/** The checkers are for validating the compiler data structures
* at phase boundaries.
*/
@@ -721,7 +679,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Add the internal compiler phases to the phases set.
* This implementation creates a description map at the same time.
*/
- protected def computeInternalPhases() {
+ protected def computeInternalPhases(): Unit = {
// Note: this fits -Xshow-phases into 80 column width, which it is
// desirable to preserve.
val phs = List(
@@ -737,20 +695,22 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
uncurry -> "uncurry, translate function values to anonymous classes",
tailCalls -> "replace tail calls by jumps",
specializeTypes -> "@specialized-driven class and method specialization",
- explicitOuter -> "this refs to outer pointers, translate patterns",
+ explicitOuter -> "this refs to outer pointers",
erasure -> "erase types, add interfaces for traits",
postErasure -> "clean up erased inline classes",
lazyVals -> "allocate bitmaps, translate lazy vals into lazified defs",
lambdaLift -> "move nested functions to top level",
constructors -> "move field definitions into constructors",
mixer -> "mixin composition",
+ delambdafy -> "remove lambdas",
cleanup -> "platform-specific cleanups, generate reflective calls",
genicode -> "generate portable intermediate code",
inliner -> "optimization: do inlining",
inlineExceptionHandlers -> "optimization: inline exception handlers",
closureElimination -> "optimization: eliminate uncalled closures",
+ constantOptimization -> "optimization: optimize null and other constants",
deadCode -> "optimization: eliminate dead code",
- terminal -> "The last phase in the compiler chain"
+ terminal -> "the last phase during a compilation run"
)
phs foreach (addToPhasesSet _).tupled
@@ -768,13 +728,21 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// sequences the phase assembly
protected def computePhaseDescriptors: List[SubComponent] = {
- computeInternalPhases() // Global.scala
- computePlatformPhases() // backend/Platform.scala
- computePluginPhases() // plugins/Plugins.scala
- buildCompilerFromPhasesSet() // PhaseAssembly.scala
+ /** Allow phases to opt out of the phase assembly. */
+ def cullPhases(phases: List[SubComponent]) = {
+ val enabled = if (settings.debug && settings.isInfo) phases else phases filter (_.enabled)
+ def isEnabled(q: String) = enabled exists (_.phaseName == q)
+ val (satisfied, unhappy) = enabled partition (_.requires forall isEnabled)
+ unhappy foreach (u => globalError(s"Phase '${u.phaseName}' requires: ${u.requires filterNot isEnabled}"))
+ satisfied // they're happy now, but they may need an unhappy phase that was booted
+ }
+ computeInternalPhases() // Global.scala
+ computePlatformPhases() // backend/Platform.scala
+ computePluginPhases() // plugins/Plugins.scala
+ cullPhases(computePhaseAssembly()) // PhaseAssembly.scala
}
- /* The phase descriptor list */
+ /* The phase descriptor list. Components that are phase factories. */
lazy val phaseDescriptors: List[SubComponent] = computePhaseDescriptors
/* The set of phase objects that is the basis for the compiler phase chain */
@@ -792,86 +760,91 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
phaseDescriptors map (_.phaseName)
}
- /** A description of the phases that will run */
- def phaseDescriptions: String = {
- val width = phaseNames map (_.length) max
- val fmt = "%" + width + "s %2s %s\n"
+ /** A description of the phases that will run in this configuration, or all if -Ydebug. */
+ def phaseDescriptions: String = phaseHelp("description", elliptically = true, phasesDescMap)
- val line1 = fmt.format("phase name", "id", "description")
- val line2 = fmt.format("----------", "--", "-----------")
- val descs = phaseDescriptors.zipWithIndex map {
- case (ph, idx) => fmt.format(ph.phaseName, idx + 1, phasesDescMap(ph))
+ /** Summary of the per-phase values of nextFlags and newFlags, shown under -Xshow-phases -Ydebug. */
+ def phaseFlagDescriptions: String = {
+ def fmt(ph: SubComponent) = {
+ def fstr1 = if (ph.phaseNewFlags == 0L) "" else "[START] " + Flags.flagsToString(ph.phaseNewFlags)
+ def fstr2 = if (ph.phaseNextFlags == 0L) "" else "[END] " + Flags.flagsToString(ph.phaseNextFlags)
+ if (ph.initial) Flags.flagsToString(Flags.InitialFlags)
+ else if (ph.phaseNewFlags != 0L && ph.phaseNextFlags != 0L) fstr1 + " " + fstr2
+ else fstr1 + fstr2
}
- line1 :: line2 :: descs mkString
+ phaseHelp("new flags", elliptically = false, fmt)
}
- /** Summary of the per-phase values of nextFlags and newFlags, shown
- * with -Xshow-phases if -Ydebug also given.
+
+ /** Emit a verbose phase table.
+ * The table includes the phase id in the current assembly,
+ * or "oo" to indicate a skipped phase, or "xx" to indicate
+ * a disabled phase.
+ *
+ * @param title descriptive header
+ * @param elliptically whether to truncate the description with an ellipsis (...)
+ * @param describe how to describe a component
*/
- def phaseFlagDescriptions: String = {
- val width = phaseNames map (_.length) max
- val fmt = "%" + width + "s %2s %s\n"
-
- val line1 = fmt.format("phase name", "id", "new flags")
- val line2 = fmt.format("----------", "--", "---------")
- val descs = phaseDescriptors.zipWithIndex map {
- case (ph, idx) =>
- def fstr1 = if (ph.phaseNewFlags == 0L) "" else "[START] " + Flags.flagsToString(ph.phaseNewFlags)
- def fstr2 = if (ph.phaseNextFlags == 0L) "" else "[END] " + Flags.flagsToString(ph.phaseNextFlags)
- val fstr = (
- if (ph.ownPhase.id == 1) Flags.flagsToString(Flags.InitialFlags)
- else if (ph.phaseNewFlags != 0L && ph.phaseNextFlags != 0L) fstr1 + " " + fstr2
- else fstr1 + fstr2
- )
- fmt.format(ph.phaseName, idx + 1, fstr)
+ def phaseHelp(title: String, elliptically: Boolean, describe: SubComponent => String) = {
+ val Limit = 16 // phase names should not be absurdly long
+ val MaxCol = 80 // because some of us edit on green screens
+ val maxName = phaseNames map (_.length) max
+ val width = maxName min Limit
+ val maxDesc = MaxCol - (width + 6) // descriptions not novels
+ val fmt = if (settings.verbose || !elliptically) s"%${maxName}s %2s %s%n"
+ else s"%${width}.${width}s %2s %.${maxDesc}s%n"
+
+ val line1 = fmt.format("phase name", "id", title)
+ val line2 = fmt.format("----------", "--", "-" * title.length)
+
+ // built-in string precision merely truncates
+ import java.util.{ Formattable, FormattableFlags, Formatter }
+ def dotfmt(s: String) = new Formattable {
+ def elliptically(s: String, max: Int) = (
+ if (max < 0 || s.length <= max) s
+ else if (max < 4) s.take(max)
+ else s.take(max - 3) + "..."
+ )
+ override def formatTo(formatter: Formatter, flags: Int, width: Int, precision: Int) {
+ val p = elliptically(s, precision)
+ val w = if (width > 0 && p.length < width) {
+ import FormattableFlags.LEFT_JUSTIFY
+ val leftly = (flags & LEFT_JUSTIFY) == LEFT_JUSTIFY
+ val sb = new StringBuilder
+ def pad() = 1 to width - p.length foreach (_ => sb.append(' '))
+ if (!leftly) pad()
+ sb.append(p)
+ if (leftly) pad()
+ sb.toString
+ } else p
+ formatter.out.append(w)
+ }
+ }
+
+ // phase id in run, or suitable icon
+ def idOf(p: SubComponent) = (
+ if (settings.skip contains p.phaseName) "oo" // (currentRun skipPhase p.phaseName)
+ else if (!p.enabled) "xx"
+ else p.ownPhase.id.toString
+ )
+ def mkText(p: SubComponent) = {
+ val (name, text) = if (elliptically) (dotfmt(p.phaseName), dotfmt(describe(p)))
+ else (p.phaseName, describe(p))
+ fmt.format(name, idOf(p), text)
}
- line1 :: line2 :: descs mkString
+ line1 :: line2 :: (phaseDescriptors map mkText) mkString
}
/** Returns List of (phase, value) pairs, including only those
* where the value compares unequal to the previous phase's value.
*/
- def afterEachPhase[T](op: => T): List[(Phase, T)] = {
+ def afterEachPhase[T](op: => T): List[(Phase, T)] = { // used in tests
phaseDescriptors.map(_.ownPhase).filterNot(_ eq NoPhase).foldLeft(List[(Phase, T)]()) { (res, ph) =>
- val value = afterPhase(ph)(op)
+ val value = exitingPhase(ph)(op)
if (res.nonEmpty && res.head._2 == value) res
else ((ph, value)) :: res
} reverse
}
- /** Returns List of ChangeAfterPhase objects, encapsulating those
- * phase transitions where the result of the operation gave a different
- * list than it had when run during the previous phase.
- */
- def changesAfterEachPhase[T](op: => List[T]): List[ChangeAfterPhase[T]] = {
- val ops = ((NoPhase, Nil)) :: afterEachPhase(op)
-
- ops sliding 2 map {
- case (_, before) :: (ph, after) :: Nil =>
- val lost = before filterNot (after contains _)
- val gained = after filterNot (before contains _)
- ChangeAfterPhase(ph, lost, gained)
- case _ => ???
- } toList
- }
- private def numberedPhase(ph: Phase) = "%2d/%s".format(ph.id, ph.name)
-
- case class ChangeAfterPhase[+T](ph: Phase, lost: List[T], gained: List[T]) {
- private def mkStr(what: String, xs: List[_]) = (
- if (xs.isEmpty) ""
- else xs.mkString(what + " after " + numberedPhase(ph) + " {\n ", "\n ", "\n}\n")
- )
- override def toString = mkStr("Lost", lost) + mkStr("Gained", gained)
- }
-
- def describeAfterEachPhase[T](op: => T): List[String] =
- afterEachPhase(op) map { case (ph, t) => "[after %-15s] %s".format(numberedPhase(ph), t) }
-
- def describeAfterEveryPhase[T](op: => T): String =
- describeAfterEachPhase(op) map (" " + _ + "\n") mkString
-
- def printAfterEachPhase[T](op: => T): Unit =
- describeAfterEachPhase(op) foreach (m => println(" " + m))
-
// ------------ Invalidations ---------------------------------
/** Is given package class a system package class that cannot be invalidated?
@@ -885,8 +858,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Invalidates packages that contain classes defined in a classpath entry, and
* rescans that entry.
- * @param path A fully qualified name that refers to a directory or jar file that's
- * an entry on the classpath.
+ * @param paths Fully qualified names that refer to directories or jar files that are
+ * a entries on the classpath.
* First, causes the classpath entry referred to by `path` to be rescanned, so that
* any new files or deleted files or changes in subpackages are picked up.
* Second, invalidates any packages for which one of the following considitions is met:
@@ -980,7 +953,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]) {
ifDebug(informProgress(s"syncing $root, $oldEntries -> $newEntries"))
- val getName: ClassPath[platform.BinaryRepr] => String = (_.name)
+ val getName: ClassPath[AbstractFile] => String = (_.name)
def hasClasses(cp: OptClassPath) = cp.isDefined && cp.get.classes.nonEmpty
def invalidateOrRemove(root: ClassSymbol) = {
allEntries match {
@@ -1064,17 +1037,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
* Then, fsc -Xexperimental clears the nsc project between successive runs of `fsc`.
*/
- /** Remove the current run when not needed anymore. Used by the build
- * manager to save on the memory foot print. The current run holds on
- * to all compilation units, which in turn hold on to trees.
- */
- private [nsc] def dropRun() {
- curRun = null
- }
-
object typeDeconstruct extends {
val global: Global.this.type = Global.this
- } with interpreter.StructuredTypeStrings
+ } with typechecker.StructuredTypeStrings
/** There are common error conditions where when the exception hits
* here, currentRun.currentUnit is null. This robs us of the knowledge
@@ -1093,102 +1058,101 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def currentRun: Run = curRun
def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit
def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile
+ def currentFreshNameCreator = currentUnit.fresh
- // TODO - trim these to the absolute minimum.
- @inline final def afterErasure[T](op: => T): T = afterPhase(currentRun.erasurePhase)(op)
- @inline final def afterPostErasure[T](op: => T): T = afterPhase(currentRun.posterasurePhase)(op)
- @inline final def afterExplicitOuter[T](op: => T): T = afterPhase(currentRun.explicitouterPhase)(op)
- @inline final def afterFlatten[T](op: => T): T = afterPhase(currentRun.flattenPhase)(op)
- @inline final def afterIcode[T](op: => T): T = afterPhase(currentRun.icodePhase)(op)
- @inline final def afterMixin[T](op: => T): T = afterPhase(currentRun.mixinPhase)(op)
- @inline final def afterPickler[T](op: => T): T = afterPhase(currentRun.picklerPhase)(op)
- @inline final def afterRefchecks[T](op: => T): T = afterPhase(currentRun.refchecksPhase)(op)
- @inline final def afterSpecialize[T](op: => T): T = afterPhase(currentRun.specializePhase)(op)
- @inline final def afterTyper[T](op: => T): T = afterPhase(currentRun.typerPhase)(op)
- @inline final def afterUncurry[T](op: => T): T = afterPhase(currentRun.uncurryPhase)(op)
- @inline final def beforeErasure[T](op: => T): T = beforePhase(currentRun.erasurePhase)(op)
- @inline final def beforeExplicitOuter[T](op: => T): T = beforePhase(currentRun.explicitouterPhase)(op)
- @inline final def beforeFlatten[T](op: => T): T = beforePhase(currentRun.flattenPhase)(op)
- @inline final def beforeIcode[T](op: => T): T = beforePhase(currentRun.icodePhase)(op)
- @inline final def beforeMixin[T](op: => T): T = beforePhase(currentRun.mixinPhase)(op)
- @inline final def beforePickler[T](op: => T): T = beforePhase(currentRun.picklerPhase)(op)
- @inline final def beforeRefchecks[T](op: => T): T = beforePhase(currentRun.refchecksPhase)(op)
- @inline final def beforeSpecialize[T](op: => T): T = beforePhase(currentRun.specializePhase)(op)
- @inline final def beforeTyper[T](op: => T): T = beforePhase(currentRun.typerPhase)(op)
- @inline final def beforeUncurry[T](op: => T): T = beforePhase(currentRun.uncurryPhase)(op)
-
- def explainContext(c: analyzer.Context): String = (
- if (c == null) "" else (
- """| context owners: %s
- |
- |Enclosing block or template:
- |%s""".format(
- c.owner.ownerChain.takeWhile(!_.isPackageClass).mkString(" -> "),
- nodePrinters.nodeToString(c.enclClassOrMethod.tree)
- )
- )
+ def isGlobalInitialized = (
+ definitions.isDefinitionsInitialized
+ && rootMirror.isMirrorInitialized
+ )
+ override def isPastTyper = (
+ (curRun ne null)
+ && isGlobalInitialized // defense against init order issues
+ && (globalPhase.id > currentRun.typerPhase.id)
)
- // Owners up to and including the first package class.
+
+ // TODO - trim these to the absolute minimum.
+ @inline final def exitingErasure[T](op: => T): T = exitingPhase(currentRun.erasurePhase)(op)
+ @inline final def exitingPostErasure[T](op: => T): T = exitingPhase(currentRun.posterasurePhase)(op)
+ @inline final def exitingExplicitOuter[T](op: => T): T = exitingPhase(currentRun.explicitouterPhase)(op)
+ @inline final def exitingFlatten[T](op: => T): T = exitingPhase(currentRun.flattenPhase)(op)
+ @inline final def exitingMixin[T](op: => T): T = exitingPhase(currentRun.mixinPhase)(op)
+ @inline final def exitingDelambdafy[T](op: => T): T = exitingPhase(currentRun.delambdafyPhase)(op)
+ @inline final def exitingPickler[T](op: => T): T = exitingPhase(currentRun.picklerPhase)(op)
+ @inline final def exitingRefchecks[T](op: => T): T = exitingPhase(currentRun.refchecksPhase)(op)
+ @inline final def exitingSpecialize[T](op: => T): T = exitingPhase(currentRun.specializePhase)(op)
+ @inline final def exitingTyper[T](op: => T): T = exitingPhase(currentRun.typerPhase)(op)
+ @inline final def exitingUncurry[T](op: => T): T = exitingPhase(currentRun.uncurryPhase)(op)
+ @inline final def enteringErasure[T](op: => T): T = enteringPhase(currentRun.erasurePhase)(op)
+ @inline final def enteringExplicitOuter[T](op: => T): T = enteringPhase(currentRun.explicitouterPhase)(op)
+ @inline final def enteringFlatten[T](op: => T): T = enteringPhase(currentRun.flattenPhase)(op)
+ @inline final def enteringIcode[T](op: => T): T = enteringPhase(currentRun.icodePhase)(op)
+ @inline final def enteringMixin[T](op: => T): T = enteringPhase(currentRun.mixinPhase)(op)
+ @inline final def enteringDelambdafy[T](op: => T): T = enteringPhase(currentRun.delambdafyPhase)(op)
+ @inline final def enteringPickler[T](op: => T): T = enteringPhase(currentRun.picklerPhase)(op)
+ @inline final def enteringSpecialize[T](op: => T): T = enteringPhase(currentRun.specializePhase)(op)
+ @inline final def enteringTyper[T](op: => T): T = enteringPhase(currentRun.typerPhase)(op)
+ @inline final def enteringUncurry[T](op: => T): T = enteringPhase(currentRun.uncurryPhase)(op)
+
+ // Owners which aren't package classes.
private def ownerChainString(sym: Symbol): String = (
if (sym == null) ""
- else sym.ownerChain.span(!_.isPackageClass) match {
- case (xs, pkg :: _) => (xs :+ pkg) mkString " -> "
- case _ => sym.ownerChain mkString " -> " // unlikely
- }
+ else sym.ownerChain takeWhile (!_.isPackageClass) mkString " -> "
)
+
private def formatExplain(pairs: (String, Any)*): String = (
pairs.toList collect { case (k, v) if v != null => "%20s: %s".format(k, v) } mkString "\n"
)
- def explainTree(t: Tree): String = formatExplain(
- )
-
/** Don't want to introduce new errors trying to report errors,
* so swallow exceptions.
*/
- override def supplementErrorMessage(errorMessage: String): String =
+ override def supplementErrorMessage(errorMessage: String): String = {
if (currentRun.supplementedError) errorMessage
else try {
+ currentRun.supplementedError = true
val tree = analyzer.lastTreeToTyper
val sym = tree.symbol
val tpe = tree.tpe
- val enclosing = lastSeenContext.enclClassOrMethod.tree
+ val site = lastSeenContext.enclClassOrMethod.owner
+ val pos_s = if (tree.pos.isDefined) s"line ${tree.pos.line} of ${tree.pos.source.file}" else "<unknown>"
+ val context_s = try {
+ // Taking 3 before, 3 after the fingered line.
+ val start = 0 max (tree.pos.line - 3)
+ val xs = scala.reflect.io.File(tree.pos.source.file.file).lines drop start take 7
+ val strs = xs.zipWithIndex map { case (line, idx) => f"${start + idx}%6d $line" }
+ strs.mkString("== Source file context for tree position ==\n\n", "\n", "")
+ }
+ catch { case t: Exception => devWarning("" + t) ; "<Cannot read source file>" }
val info1 = formatExplain(
"while compiling" -> currentSource.path,
- "during phase" -> ( if (globalPhase eq phase) phase else "global=%s, atPhase=%s".format(globalPhase, phase) ),
+ "during phase" -> ( if (globalPhase eq phase) phase else "globalPhase=%s, enteringPhase=%s".format(globalPhase, phase) ),
"library version" -> scala.util.Properties.versionString,
"compiler version" -> Properties.versionString,
"reconstructed args" -> settings.recreateArgs.mkString(" ")
)
val info2 = formatExplain(
"last tree to typer" -> tree.summaryString,
+ "tree position" -> pos_s,
+ "tree tpe" -> tpe,
"symbol" -> Option(sym).fold("null")(_.debugLocationString),
- "symbol definition" -> Option(sym).fold("null")(_.defString),
- "tpe" -> tpe,
+ "symbol definition" -> Option(sym).fold("null")(s => s.defString + s" (a ${s.shortSymbolClass})"),
+ "symbol package" -> sym.enclosingPackage.fullName,
"symbol owners" -> ownerChainString(sym),
- "context owners" -> ownerChainString(lastSeenContext.owner)
- )
- val info3: List[String] = (
- ( List("== Enclosing template or block ==", nodePrinters.nodeToString(enclosing).trim) )
- ++ ( if (tpe eq null) Nil else List("== Expanded type of tree ==", typeDeconstruct.show(tpe)) )
- ++ ( if (!opt.debug) Nil else List("== Current unit body ==", nodePrinters.nodeToString(currentUnit.body)) )
- ++ ( List(errorMessage) )
+ "call site" -> (site.fullLocationString + " in " + site.enclosingPackage)
)
-
- currentRun.supplementedError = true
-
- ("\n" + info1) :: info2 :: info3 mkString "\n\n"
+ ("\n " + errorMessage + "\n" + info1) :: info2 :: context_s :: Nil mkString "\n\n"
}
catch { case _: Exception | _: TypeError => errorMessage }
+ }
/** The id of the currently active run
*/
override def currentRunId = curRunId
def echoPhaseSummary(ph: Phase) = {
- /** Only output a summary message under debug if we aren't echoing each file. */
- if (opt.debug && !opt.echoFilenames)
+ /* Only output a summary message under debug if we aren't echoing each file. */
+ if (settings.debug && !(settings.verbose || currentRun.size < 5))
inform("[running phase " + ph.name + " on " + currentRun.size + " compilation units]")
}
@@ -1196,19 +1160,29 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
class ConditionalWarning(what: String, option: Settings#BooleanSetting) {
val warnings = mutable.LinkedHashMap[Position, String]()
def warn(pos: Position, msg: String) =
- if (option.value) reporter.warning(pos, msg)
+ if (option) reporter.warning(pos, msg)
else if (!(warnings contains pos)) warnings += ((pos, msg))
def summarize() =
- if (option.isDefault && warnings.nonEmpty)
- reporter.warning(NoPosition, "there were %d %s warning(s); re-run with %s for details".format(warnings.size, what, option.name))
+ if (warnings.nonEmpty && (option.isDefault || settings.fatalWarnings))
+ warning("there were %d %s warning(s); re-run with %s for details".format(warnings.size, what, option.name))
}
- def newUnitParser(code: String) = new syntaxAnalyzer.UnitParser(newCompilationUnit(code))
- def newUnitScanner(code: String) = new syntaxAnalyzer.UnitScanner(newCompilationUnit(code))
- def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code))
- def newSourceFile(code: String) = new BatchSourceFile("<console>", code)
+ def newSourceFile(code: String, filename: String = "<console>") =
+ new BatchSourceFile(filename, code)
- /** A Run is a single execution of the compiler on a sets of units
+ def newCompilationUnit(code: String, filename: String = "<console>") =
+ new CompilationUnit(newSourceFile(code, filename))
+
+ def newUnitScanner(unit: CompilationUnit): UnitScanner =
+ new UnitScanner(unit)
+
+ def newUnitParser(unit: CompilationUnit): UnitParser =
+ new UnitParser(unit)
+
+ def newUnitParser(code: String, filename: String = "<console>"): UnitParser =
+ newUnitParser(newCompilationUnit(code, filename))
+
+ /** A Run is a single execution of the compiler on a set of units.
*/
class Run extends RunContextApi {
/** Have been running into too many init order issues with Run
@@ -1227,9 +1201,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val inlinerWarnings = new ConditionalWarning("inliner", settings.YinlinerWarnings)
val allConditionalWarnings = List(deprecationWarnings0, uncheckedWarnings0, featureWarnings, inlinerWarnings)
- // for sbt's benefit
- def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList
- def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList
+ def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList // used in sbt
+ def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList // used in sbt
var reportedFeature = Set[Symbol]()
@@ -1239,9 +1212,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Have we already supplemented the error message of a compiler crash? */
private[nsc] final var supplementedError = false
- /** To be initialized from firstPhase. */
- private var terminalPhase: Phase = NoPhase
-
private val unitbuf = new mutable.ListBuffer[CompilationUnit]
val compiledFiles = new mutable.HashSet[String]
@@ -1251,64 +1221,100 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** A map from compiled top-level symbols to their picklers */
val symData = new mutable.HashMap[Symbol, PickleBuffer]
- private var phasec: Int = 0 // phases completed
- private var unitc: Int = 0 // units completed this phase
+ private var phasec: Int = 0 // phases completed
+ private var unitc: Int = 0 // units completed this phase
private var _unitbufSize = 0
def size = _unitbufSize
override def toString = "scalac Run for:\n " + compiledFiles.toList.sorted.mkString("\n ")
// Calculate where to stop based on settings -Ystop-before or -Ystop-after.
- // Slightly complicated logic due to wanting -Ystop-before:parser to fail rather
- // than mysteriously running to completion.
+ // The result is the phase to stop at BEFORE running it.
private lazy val stopPhaseSetting = {
- val result = phaseDescriptors sliding 2 collectFirst {
- case xs if xs exists (settings.stopBefore contains _.phaseName) => if (settings.stopBefore contains xs.head.phaseName) xs.head else xs.last
- case xs if settings.stopAfter contains xs.head.phaseName => xs.last
+ def isBefore(pd: SubComponent) = settings.stopBefore contains pd.phaseName
+ phaseDescriptors sliding 2 collectFirst {
+ case xs if xs exists isBefore
+ => (xs find isBefore).get
+ case xs if settings.stopAfter contains xs.head.phaseName
+ => xs.last
}
- if (result exists (_.phaseName == "parser"))
- globalError("Cannot stop before parser phase.")
-
- result
}
- // The phase to stop BEFORE running.
+ /** Should we stop right before entering the given phase? */
protected def stopPhase(name: String) = stopPhaseSetting exists (_.phaseName == name)
+ /** Should we skip the given phase? */
protected def skipPhase(name: String) = settings.skip contains name
- /** As definitions.init requires phase != NoPhase, and calling phaseDescriptors.head
- * will force init, there is some jockeying herein regarding init order: instead of
- * taking the head descriptor we create a parser phase directly.
- */
private val firstPhase = {
- /** Initialization. */
+ // Initialization. definitions.init requires phase != NoPhase
+ import scala.reflect.internal.SomePhase
curRunId += 1
curRun = this
-
- /** Set phase to a newly created syntaxAnalyzer and call definitions.init. */
- val parserPhase: Phase = syntaxAnalyzer.newPhase(NoPhase)
- phase = parserPhase
+ phase = SomePhase
+ phaseWithId(phase.id) = phase
definitions.init()
- // Flush the cache in the terminal phase: the chain could have been built
- // before without being used. (This happens in the interpreter.)
- terminal.reset
-
- // Each subcomponent supplies a phase, which are chained together.
- // If -Ystop:phase is given, neither that phase nor any beyond it is added.
- // If -Yskip:phase is given, that phase will be skipped.
- val phaseLinks = {
- val phs = (
- phaseDescriptors.tail
- takeWhile (pd => !stopPhase(pd.phaseName))
- filterNot (pd => skipPhase(pd.phaseName))
- )
+ // the components to use, omitting those named by -Yskip and stopping at the -Ystop phase
+ val components = {
+ // stop on a dime, but this test fails if pd is after the stop phase
+ def unstoppable(pd: SubComponent) = {
+ val stoppable = stopPhase(pd.phaseName)
+ if (stoppable && pd.initial) {
+ globalError(s"Cannot stop before initial phase '${pd.phaseName}'.")
+ true
+ } else
+ !stoppable
+ }
+ // skip a component for -Yskip or if not enabled
+ def skippable(pd: SubComponent) = {
+ val skippable = skipPhase(pd.phaseName)
+ if (skippable && (pd.initial || pd.terminal)) {
+ globalError(s"Cannot skip an initial or terminal phase '${pd.phaseName}'.")
+ false
+ } else
+ skippable || !pd.enabled
+ }
+ val phs = phaseDescriptors takeWhile unstoppable filterNot skippable
// Ensure there is a terminal phase at the end, since -Ystop may have limited the phases.
- if (phs.isEmpty || (phs.last ne terminal)) phs :+ terminal
- else phs
+ if (phs.isEmpty || !phs.last.terminal) {
+ val t = if (phaseDescriptors.last.terminal) phaseDescriptors.last else terminal
+ phs :+ t
+ } else phs
+ }
+ // Create phases and link them together. We supply the previous, and the ctor sets prev.next.
+ val last = components.foldLeft(NoPhase: Phase)((prev, c) => c newPhase prev)
+ // rewind (Iterator.iterate(last)(_.prev) dropWhile (_.prev ne NoPhase)).next
+ val first = { var p = last ; while (p.prev ne NoPhase) p = p.prev ; p }
+ val ss = settings
+
+ // As a final courtesy, see if the settings make any sense at all.
+ // If a setting selects no phase, it's a mistake. If a name prefix
+ // doesn't select a unique phase, that might be surprising too.
+ def checkPhaseSettings(including: Boolean, specs: Seq[String]*) = {
+ def isRange(s: String) = s.forall(c => c.isDigit || c == '-')
+ def isSpecial(s: String) = (s == "all" || isRange(s))
+ val setting = new ss.PhasesSetting("fake","fake")
+ for (p <- specs.flatten.to[Set]) {
+ setting.value = List(p)
+ val count = (
+ if (including) first.iterator count (setting containsPhase _)
+ else phaseDescriptors count (setting contains _.phaseName)
+ )
+ if (count == 0) warning(s"'$p' specifies no phase")
+ if (count > 1 && !isSpecial(p)) warning(s"'$p' selects $count phases")
+ if (!including && isSpecial(p)) globalError(s"-Yskip and -Ystop values must name phases: '$p'")
+ setting.clear()
+ }
}
- // Link them together.
- phaseLinks.foldLeft(parserPhase)((chain, ph) => ph newPhase chain)
- parserPhase
+ // phases that are excluded; for historical reasons, these settings only select by phase name
+ val exclusions = List(ss.stopBefore, ss.stopAfter, ss.skip)
+ val inclusions = ss.visibleSettings collect {
+ case s: ss.PhasesSetting if !(exclusions contains s) => s.value
+ }
+ checkPhaseSettings(including = true, inclusions.toSeq: _*)
+ checkPhaseSettings(including = false, exclusions map (_.value): _*)
+
+ phase = first //parserPhase
+ first
}
/** Reset all classes contained in current project, as determined by
@@ -1318,7 +1324,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def resetProjectClasses(root: Symbol): Unit = try {
def unlink(sym: Symbol) =
if (sym != NoSymbol) root.info.decls.unlink(sym)
- if (settings.verbose.value) inform("[reset] recursing in "+root)
+ if (settings.verbose) inform("[reset] recursing in "+root)
val toReload = mutable.Set[String]()
for (sym <- root.info.decls) {
if (sym.isInitialized && clearOnNextRun(sym))
@@ -1338,7 +1344,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
for (fullname <- toReload)
classPath.findClass(fullname) match {
case Some(classRep) =>
- if (settings.verbose.value) inform("[reset] reinit "+fullname)
+ if (settings.verbose) inform("[reset] reinit "+fullname)
loaders.initializeFromClassPath(root, classRep)
case _ =>
}
@@ -1347,8 +1353,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// this handler should not be nessasary, but it seems that `fsc`
// eats exceptions if they appear here. Need to find out the cause for
// this and fix it.
- inform("[reset] exception happened: "+ex);
- ex.printStackTrace();
+ inform("[reset] exception happened: "+ex)
+ ex.printStackTrace()
throw ex
}
@@ -1374,14 +1380,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def advancePhase() {
unitc = 0
phasec += 1
- refreshProgress
+ refreshProgress()
}
/** take note that a phase on a unit is completed
* (for progress reporting)
*/
def advanceUnit() {
unitc += 1
- refreshProgress
+ refreshProgress()
}
def cancel() { reporter.cancelled = true }
@@ -1402,7 +1408,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val namerPhase = phaseNamed("namer")
// val packageobjectsPhase = phaseNamed("packageobjects")
val typerPhase = phaseNamed("typer")
- val inlineclassesPhase = phaseNamed("inlineclasses")
+ // val inlineclassesPhase = phaseNamed("inlineclasses")
// val superaccessorsPhase = phaseNamed("superaccessors")
val picklerPhase = phaseNamed("pickler")
val refchecksPhase = phaseNamed("refchecks")
@@ -1415,22 +1421,20 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val erasurePhase = phaseNamed("erasure")
val posterasurePhase = phaseNamed("posterasure")
// val lazyvalsPhase = phaseNamed("lazyvals")
- val lambdaliftPhase = phaseNamed("lambdalift")
+ // val lambdaliftPhase = phaseNamed("lambdalift")
// val constructorsPhase = phaseNamed("constructors")
val flattenPhase = phaseNamed("flatten")
val mixinPhase = phaseNamed("mixin")
+ val delambdafyPhase = phaseNamed("delambdafy")
val cleanupPhase = phaseNamed("cleanup")
val icodePhase = phaseNamed("icode")
val inlinerPhase = phaseNamed("inliner")
- val inlineExceptionHandlersPhase = phaseNamed("inlineExceptionHandlers")
+ val inlineExceptionHandlersPhase = phaseNamed("inlinehandlers")
val closelimPhase = phaseNamed("closelim")
val dcePhase = phaseNamed("dce")
- val jvmPhase = phaseNamed("jvm")
- // val msilPhase = phaseNamed("msil")
+ // val jvmPhase = phaseNamed("jvm")
def runIsAt(ph: Phase) = globalPhase.id == ph.id
- def runIsPast(ph: Phase) = globalPhase.id > ph.id
- // def runIsAtBytecodeGen = (runIsAt(jvmPhase) || runIsAt(msilPhase))
def runIsAtOptimiz = {
runIsAt(inlinerPhase) || // listing phases in full for robustness when -Ystop-after has been given.
runIsAt(inlineExceptionHandlersPhase) ||
@@ -1470,10 +1474,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def registerPickle(sym: Symbol): Unit = ()
/** does this run compile given class, module, or case factory? */
+ // NOTE: Early initialized members temporarily typechecked before the enclosing class, see typedPrimaryConstrBody!
+ // Here we work around that wrinkle by claiming that a top-level, early-initialized member is compiled in
+ // *every* run. This approximation works because this method is exclusively called with `this` == `currentRun`.
def compiles(sym: Symbol): Boolean =
if (sym == NoSymbol) false
else if (symSource.isDefinedAt(sym)) true
- else if (!sym.owner.isPackageClass) compiles(sym.enclosingTopLevelClass)
+ else if (sym.isTopLevel && sym.isEarlyInitialized) true
+ else if (!sym.isTopLevel) compiles(sym.enclosingTopLevelClass)
else if (sym.isModuleClass) compiles(sym.sourceModule)
else false
@@ -1494,13 +1502,29 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (canCheck) {
phase = globalPhase
- if (globalPhase.id >= icodePhase.id) icodeChecker.checkICodes
- else treeChecker.checkTrees
+ if (globalPhase.id >= icodePhase.id) icodeChecker.checkICodes()
+ else treeChecker.checkTrees()
}
}
- private def showMembers() =
- opt.showNames foreach (x => showDef(x, opt.declsOnly, globalPhase))
+ private def showMembers() = {
+ // Allows for syntax like scalac -Xshow-class Random@erasure,typer
+ def splitClassAndPhase(str: String, term: Boolean): Name = {
+ def mkName(s: String) = if (term) newTermName(s) else newTypeName(s)
+ (str indexOf '@') match {
+ case -1 => mkName(str)
+ case idx =>
+ val phasePart = str drop (idx + 1)
+ settings.Yshow.tryToSetColon(phasePart split ',' toList)
+ mkName(str take idx)
+ }
+ }
+ if (settings.Xshowcls.isSetByUser)
+ showDef(splitClassAndPhase(settings.Xshowcls.value, term = false), declsOnly = false, globalPhase)
+
+ if (settings.Xshowobj.isSetByUser)
+ showDef(splitClassAndPhase(settings.Xshowobj.value, term = true), declsOnly = false, globalPhase)
+ }
// Similarly, this will only be created under -Yshow-syms.
object trackerFactory extends SymbolTrackers {
@@ -1508,7 +1532,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
lazy val trackers = currentRun.units.toList map (x => SymbolTracker(x))
def snapshot() = {
inform("\n[[symbol layout at end of " + phase + "]]")
- afterPhase(phase) {
+ exitingPhase(phase) {
trackers foreach { t =>
t.snapshot()
inform(t.show("Heading from " + phase.prev.name + " to " + phase.name))
@@ -1518,6 +1542,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
def reportCompileErrors() {
+ if (!reporter.hasErrors && reporter.hasWarnings && settings.fatalWarnings)
+ globalError("No warnings can be incurred under -Xfatal-warnings.")
+
if (reporter.hasErrors) {
for ((sym, file) <- symSource.iterator) {
sym.reset(new loaders.SourcefileLoader(file))
@@ -1526,7 +1553,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
else {
- allConditionalWarnings foreach (_.summarize)
+ allConditionalWarnings foreach (_.summarize())
if (seenMacroExpansionsFallingBack)
warning("some macros could not be expanded and code fell back to overridden methods;"+
@@ -1535,38 +1562,31 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- /** Compile list of source files */
- def compileSources(_sources: List[SourceFile]) {
- val depSources = dependencyAnalysis calculateFiles _sources.distinct
- val sources = coreClassesFirst(depSources)
- // there is a problem already, e.g. a plugin was passed a bad option
- if (reporter.hasErrors)
- return
+ /** Caching member symbols that are def-s in Defintions because they might change from Run to Run. */
+ val runDefinitions: definitions.RunDefinitions = new definitions.RunDefinitions
+
+ /** Compile list of source files,
+ * unless there is a problem already,
+ * such as a plugin was passed a bad option.
+ */
+ def compileSources(sources: List[SourceFile]) = if (!reporter.hasErrors) {
- // nothing to compile, but we should still report use of deprecated options
- if (sources.isEmpty) {
+ def checkDeprecations() = {
checkDeprecatedSettings(newCompilationUnit(""))
reportCompileErrors()
- return
}
- compileUnits(sources map (new CompilationUnit(_)), firstPhase)
- }
+ val units = sources map scripted map (new CompilationUnit(_))
- def compileUnits(units: List[CompilationUnit], fromPhase: Phase) {
- try compileUnitsInternal(units, fromPhase)
- catch { case ex: Throwable =>
- val shown = if (settings.verbose.value) {
- val pw = new java.io.PrintWriter(new java.io.StringWriter)
- ex.printStackTrace(pw)
- pw.toString
- } else ex.getClass.getName
- // ex.printStackTrace(Console.out) // DEBUG for fsc, note that error stacktraces do not print in fsc
- globalError(supplementErrorMessage("uncaught exception during compilation: " + shown))
- throw ex
+ units match {
+ case Nil => checkDeprecations() // nothing to compile, report deprecated options
+ case _ => compileUnits(units, firstPhase)
}
}
+ def compileUnits(units: List[CompilationUnit], fromPhase: Phase): Unit =
+ compileUnitsInternal(units, fromPhase)
+
private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) {
doInvalidation()
@@ -1580,67 +1600,66 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
while (globalPhase.hasNext && !reporter.hasErrors) {
val startTime = currentTime
phase = globalPhase
- globalPhase.run
+ globalPhase.run()
// progress update
informTime(globalPhase.description, startTime)
-
- if (opt.writeICodeAtICode || (opt.printPhase && runIsAtOptimiz)) {
+ val shouldWriteIcode = (
+ (settings.writeICode.isSetByUser && (settings.writeICode containsPhase globalPhase))
+ || (!settings.Xprint.doAllPhases && (settings.Xprint containsPhase globalPhase) && runIsAtOptimiz)
+ )
+ if (shouldWriteIcode) {
// Write *.icode files when -Xprint-icode or -Xprint:<some-optimiz-phase> was given.
writeICode()
- } else if (opt.printPhase || opt.printLate && runIsAt(cleanupPhase)) {
+ } else if ((settings.Xprint containsPhase globalPhase) || settings.printLate && runIsAt(cleanupPhase)) {
// print trees
- if (opt.showTrees) nodePrinters.printAll()
+ if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) nodePrinters.printAll()
else printAllUnits()
}
// print the symbols presently attached to AST nodes
- if (opt.showSymbols)
+ if (settings.Yshowsyms)
trackerFactory.snapshot()
// print members
- if (opt.showPhase)
+ if (settings.Yshow containsPhase globalPhase)
showMembers()
// browse trees with swing tree viewer
- if (opt.browsePhase)
+ if (settings.browse containsPhase globalPhase)
treeBrowser browse (phase.name, units)
// move the pointer
globalPhase = globalPhase.next
// run tree/icode checkers
- if (opt.checkPhase)
+ if (settings.check containsPhase globalPhase.prev)
runCheckers()
// output collected statistics
- if (opt.printStats)
+ if (settings.Ystatistics)
statistics.print(phase)
- advancePhase
+ advancePhase()
}
if (traceSymbolActivity)
units map (_.body) foreach (traceSymbols recordSymbolsInTree _)
// In case no phase was specified for -Xshow-class/object, show it now for sure.
- if (opt.noShow)
+ if (settings.Yshow.isDefault)
showMembers()
reportCompileErrors()
symSource.keys foreach (x => resetPackageClass(x.owner))
informTime("total", startTime)
- // record dependency data
- if (!dependencyAnalysis.off)
- dependencyAnalysis.saveDependencyAnalysis()
-
// Clear any sets or maps created via perRunCaches.
perRunCaches.clearAll()
// Reset project
if (!stopPhase("namer")) {
- atPhase(namerPhase) {
+ enteringPhase(namerPhase) {
resetProjectClasses(RootClass)
}
}
@@ -1656,7 +1675,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def compile(filenames: List[String]) {
try {
val sources: List[SourceFile] =
- if (isScriptRun && filenames.size > 1) returning(Nil)(_ => globalError("can only compile one script at a time"))
+ if (settings.script.isSetByUser && filenames.size > 1) returning(Nil)(_ => globalError("can only compile one script at a time"))
else filenames map getSourceFile
compileSources(sources)
@@ -1664,12 +1683,18 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
catch { case ex: IOException => globalError(ex.getMessage()) }
}
+ /** If this compilation is scripted, convert the source to a script source. */
+ private def scripted(s: SourceFile) = s match {
+ case b: BatchSourceFile if settings.script.isSetByUser => ScriptSourceFile(b)
+ case _ => s
+ }
+
/** Compile abstract file until `globalPhase`, but at least
* to phase "namer".
*/
def compileLate(file: AbstractFile) {
if (!compiledFiles(file.path))
- compileLate(new CompilationUnit(getSourceFile(file)))
+ compileLate(new CompilationUnit(scripted(getSourceFile(file))))
}
/** Compile abstract file until `globalPhase`, but at least to phase "namer".
@@ -1680,8 +1705,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (firstPhase ne null) { // we might get here during initialization, is a source is newer than the binary
val maxId = math.max(globalPhase.id, typerPhase.id)
firstPhase.iterator takeWhile (_.id < maxId) foreach (ph =>
- atPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit))
- refreshProgress
+ enteringPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit))
+ refreshProgress()
}
}
@@ -1689,56 +1714,16 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
* is needed for?)
*/
private def resetPackageClass(pclazz: Symbol) {
- atPhase(firstPhase) {
- pclazz.setInfo(atPhase(typerPhase)(pclazz.info))
+ enteringPhase(firstPhase) {
+ pclazz.setInfo(enteringPhase(typerPhase)(pclazz.info))
}
if (!pclazz.isRoot) resetPackageClass(pclazz.owner)
}
-
- /**
- * Re-orders the source files to
- * 1. This Space Intentionally Left Blank
- * 2. LowPriorityImplicits / EmbeddedControls (i.e. parents of Predef)
- * 3. the rest
- *
- * 1 is to avoid cyclic reference errors.
- * 2 is due to the following. When completing "Predef" (*), typedIdent is called
- * for its parents (e.g. "LowPriorityImplicits"). typedIdent checks whether
- * the symbol reallyExists, which tests if the type of the symbol after running
- * its completer is != NoType.
- * If the "namer" phase has not yet run for "LowPriorityImplicits", the symbol
- * has a SourcefileLoader as type. Calling "doComplete" on it does nothing at
- * all, because the source file is part of the files to be compiled anyway.
- * So the "reallyExists" test will return "false".
- * Only after the namer, the symbol has a lazy type which actually computes
- * the info, and "reallyExists" behaves as expected.
- * So we need to make sure that the "namer" phase is run on predef's parents
- * before running it on predef.
- *
- * (*) Predef is completed early when calling "mkAttributedRef" during the
- * addition of "import Predef._" to sourcefiles. So this situation can't
- * happen for user classes.
- *
- */
- private def coreClassesFirst(files: List[SourceFile]) = {
- val goLast = 4
- def rank(f: SourceFile) = {
- if (f.file.container.name != "scala") goLast
- else f.file.name match {
- case "LowPriorityImplicits.scala" => 2
- case "StandardEmbeddings.scala" => 2
- case "EmbeddedControls.scala" => 2
- case "Predef.scala" => 3 /* Predef.scala before Any.scala, etc. */
- case _ => goLast
- }
- }
- files sortBy rank
- }
} // class Run
def printAllUnits() {
print("[[syntax trees at end of %25s]]".format(phase))
- afterPhase(phase)(currentRun.units foreach { unit =>
+ exitingPhase(phase)(currentRun.units foreach { unit =>
nodePrinters showUnit unit
})
}
@@ -1747,7 +1732,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
*/
def showDef(fullName: Name, declsOnly: Boolean, ph: Phase) = {
val boringOwners = Set[Symbol](definitions.AnyClass, definitions.AnyRefClass, definitions.ObjectClass)
- def phased[T](body: => T): T = afterPhase(ph)(body)
+ def phased[T](body: => T): T = exitingPhase(ph)(body)
def boringMember(sym: Symbol) = boringOwners(sym.owner)
def symString(sym: Symbol) = if (sym.isTerm) sym.defString else sym.toString
@@ -1793,7 +1778,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val printer = new icodes.TextPrinter(null, icodes.linearizer)
icodes.classes.values.foreach((cls) => {
val suffix = if (cls.symbol.hasModuleFlag) "$.icode" else ".icode"
- var file = getFile(cls.symbol, suffix)
+ val file = getFile(cls.symbol, suffix)
// if (file.exists())
// file = new File(file.getParentFile(), file.getName() + "1")
try {
@@ -1803,25 +1788,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
informProgress("wrote " + file)
} catch {
case ex: IOException =>
- if (opt.debug) ex.printStackTrace()
+ if (settings.debug) ex.printStackTrace()
globalError("could not write file " + file)
}
})
}
- // In order to not outright break code which overrides onlyPresentation (like sbt 0.7.5.RC0)
- // I restored and deprecated it. That would be enough to avoid the compilation
- // failure, but the override wouldn't accomplish anything. So now forInteractive
- // and forScaladoc default to onlyPresentation, which is the same as defaulting
- // to false except in old code. The downside is that this leaves us calling a
- // deprecated method: but I see no simple way out, so I leave it for now.
- def forJVM = opt.jvm
- override def forMSIL = opt.msil
- def forInteractive = onlyPresentation
- def forScaladoc = onlyPresentation
def createJavadoc = false
-
- @deprecated("Use forInteractive or forScaladoc, depending on what you're after", "2.9.0")
- def onlyPresentation = false
}
object Global {
diff --git a/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala b/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala
new file mode 100644
index 0000000000..6921548230
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala
@@ -0,0 +1,30 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala
+package tools
+package nsc
+
+/**
+ * Symbol loaders implementation that wires dependencies using Global.
+ */
+abstract class GlobalSymbolLoaders extends symtab.SymbolLoaders {
+ val global: Global
+ val symbolTable: global.type = global
+ val platform: symbolTable.platform.type
+ import global._
+ def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol = {
+ def lookup = sym.info.member(name)
+ // if loading during initialization of `definitions` typerPhase is not yet set.
+ // in that case we simply load the member at the current phase
+ if (currentRun.typerPhase eq null)
+ lookup
+ else
+ enteringTyper { lookup }
+ }
+
+ protected def compileLate(srcfile: io.AbstractFile): Unit =
+ currentRun.compileLate(srcfile)
+}
diff --git a/src/compiler/scala/tools/nsc/Interpreter.scala b/src/compiler/scala/tools/nsc/Interpreter.scala
deleted file mode 100644
index 434f19f21b..0000000000
--- a/src/compiler/scala/tools/nsc/Interpreter.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-package scala.tools.nsc
-
-import interpreter._
-import java.io._
-
-/** A compatibility stub.
- */
-@deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0")
-class Interpreter(settings: Settings, out: PrintWriter) extends IMain(settings, out) {
- def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
- def this() = this(new Settings())
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/InterpreterLoop.scala b/src/compiler/scala/tools/nsc/InterpreterLoop.scala
deleted file mode 100644
index a0be3f4fdb..0000000000
--- a/src/compiler/scala/tools/nsc/InterpreterLoop.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-package scala.tools.nsc
-
-import interpreter._
-import java.io._
-
-/** A compatibility stub.
- */
-@deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0")
-class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) extends ILoop(in0, out) {
- def this(in0: BufferedReader, out: PrintWriter) = this(Some(in0), out)
- def this() = this(None, new PrintWriter(scala.Console.out))
-}
diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala
index 7d112dfb3e..a66ee572a9 100644
--- a/src/compiler/scala/tools/nsc/Main.scala
+++ b/src/compiler/scala/tools/nsc/Main.scala
@@ -2,80 +2,26 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
+package scala.tools
+package nsc
-package scala.tools.nsc
-
-import java.io.File
-import File.pathSeparator
-
-import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager }
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.reflect.internal.util.{ BatchSourceFile, FakePos } //{Position}
-import Properties.msilLibPath
+import scala.language.postfixOps
/** The main class for NSC, a compiler for the programming
- * language Scala.
+ * language Scala.
*/
-object Main extends Driver with EvalLoop {
-
- def resident(compiler: Global) {
- loop { line =>
- val args = line.split(' ').toList
- val command = new CompilerCommand(args, new Settings(scalacError))
- compiler.reporter.reset()
- new compiler.Run() compile command.files
- }
+class MainClass extends Driver with EvalLoop {
+ def resident(compiler: Global): Unit = loop { line =>
+ val command = new CompilerCommand(line split "\\s+" toList, new Settings(scalacError))
+ compiler.reporter.reset()
+ new compiler.Run() compile command.files
}
- override def processSettingsHook(): Boolean =
- if (settings.Yidedebug.value) {
- settings.Xprintpos.value = true
- settings.Yrangepos.value = true
- val compiler = new interactive.Global(settings, reporter)
- import compiler.{ reporter => _, _ }
-
- val sfs = command.files map getSourceFile
- val reloaded = new interactive.Response[Unit]
- askReload(sfs, reloaded)
-
- reloaded.get.right.toOption match {
- case Some(ex) => reporter.cancelled = true // Causes exit code to be non-0
- case None => reporter.reset() // Causes other compiler errors to be ignored
- }
- askShutdown
- false
- }
- else if (settings.Ybuilderdebug.value != "none") {
- def fileSet(files : List[String]) = Set.empty ++ (files map AbstractFile.getFile)
-
- val buildManager = settings.Ybuilderdebug.value match {
- case "simple" => new SimpleBuildManager(settings)
- case _ => new RefinedBuildManager(settings)
- }
- buildManager.addSourceFiles(fileSet(command.files))
-
- // enter resident mode
- loop { line =>
- val args = line.split(' ').toList
- val command = new CompilerCommand(args.toList, settings)
- buildManager.update(fileSet(command.files), Set.empty)
- }
- false
- }
- else {
- if (settings.target.value == "msil")
- msilLibPath foreach (x => settings.assemrefs.value += (pathSeparator + x))
- true
- }
-
- override def newCompiler(): Global =
- if (settings.Yrangepos.value) new Global(settings, reporter) with interactive.RangePositions
- else Global(settings, reporter)
-
+ override def newCompiler(): Global = Global(settings, reporter)
override def doCompile(compiler: Global) {
- if (settings.resident.value)
- resident(compiler)
+ if (settings.resident) resident(compiler)
else super.doCompile(compiler)
}
}
+
+object Main extends MainClass { }
diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala
index f18ff19d7d..03190a63f3 100644
--- a/src/compiler/scala/tools/nsc/MainBench.scala
+++ b/src/compiler/scala/tools/nsc/MainBench.scala
@@ -5,28 +5,20 @@
package scala.tools.nsc
-import java.io.File
-import File.pathSeparator
-
-import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager }
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.reflect.internal.util.{ BatchSourceFile, FakePos } //{Position}
-import Properties.{ versionString, copyrightString, residentPromptString, msilLibPath }
import scala.reflect.internal.util.Statistics
/** The main class for NSC, a compiler for the programming
* language Scala.
*/
object MainBench extends Driver with EvalLoop {
-
+
lazy val theCompiler = Global(settings, reporter)
-
+
override def newCompiler() = theCompiler
-
+
val NIter = 50
val NBest = 10
-
+
override def main(args: Array[String]) = {
val times = new Array[Long](NIter)
var start = System.nanoTime()
diff --git a/src/compiler/scala/tools/nsc/MainGenericRunner.scala b/src/compiler/scala/tools/nsc/MainGenericRunner.scala
deleted file mode 100644
index e4a20b4a8c..0000000000
--- a/src/compiler/scala/tools/nsc/MainGenericRunner.scala
+++ /dev/null
@@ -1,108 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2006-2013 LAMP/EPFL
- * @author Lex Spoon
- */
-
-package scala.tools.nsc
-
-import java.net.URL
-import scala.tools.util.PathResolver
-import io.{ File }
-import util.{ ClassPath, ScalaClassLoader }
-import Properties.{ versionString, copyrightString }
-import interpreter.{ ILoop }
-import GenericRunnerCommand._
-
-object JarRunner extends CommonRunner {
- def runJar(settings: GenericRunnerSettings, jarPath: String, arguments: Seq[String]): Either[Throwable, Boolean] = {
- val jar = new io.Jar(jarPath)
- val mainClass = jar.mainClass getOrElse sys.error("Cannot find main class for jar: " + jarPath)
- val jarURLs = ClassPath expandManifestPath jarPath
- val urls = if (jarURLs.isEmpty) File(jarPath).toURL +: settings.classpathURLs else jarURLs
-
- if (settings.Ylogcp.value) {
- Console.err.println("Running jar with these URLs as the classpath:")
- urls foreach println
- }
-
- runAndCatch(urls, mainClass, arguments)
- }
-}
-
-/** An object that runs Scala code. It has three possible
- * sources for the code to run: pre-compiled code, a script file,
- * or interactive entry.
- */
-class MainGenericRunner {
- def errorFn(ex: Throwable): Boolean = {
- ex.printStackTrace()
- false
- }
- def errorFn(str: String): Boolean = {
- Console.err println str
- false
- }
-
- def process(args: Array[String]): Boolean = {
- val command = new GenericRunnerCommand(args.toList, (x: String) => errorFn(x))
- import command.{ settings, howToRun, thingToRun }
- def sampleCompiler = new Global(settings) // def so its not created unless needed
-
- if (!command.ok) return errorFn("\n" + command.shortUsageMsg)
- else if (settings.version.value) return errorFn("Scala code runner %s -- %s".format(versionString, copyrightString))
- else if (command.shouldStopWithInfo) return errorFn(command getInfoMessage sampleCompiler)
-
- def isE = !settings.execute.isDefault
- def dashe = settings.execute.value
-
- def isI = !settings.loadfiles.isDefault
- def dashi = settings.loadfiles.value
-
- // Deadlocks on startup under -i unless we disable async.
- if (isI)
- settings.Yreplsync.value = true
-
- def combinedCode = {
- val files = if (isI) dashi map (file => File(file).slurp()) else Nil
- val str = if (isE) List(dashe) else Nil
-
- files ++ str mkString "\n\n"
- }
-
- def runTarget(): Either[Throwable, Boolean] = howToRun match {
- case AsObject =>
- ObjectRunner.runAndCatch(settings.classpathURLs, thingToRun, command.arguments)
- case AsScript =>
- ScriptRunner.runScriptAndCatch(settings, thingToRun, command.arguments)
- case AsJar =>
- JarRunner.runJar(settings, thingToRun, command.arguments)
- case Error =>
- Right(false)
- case _ =>
- // We start the repl when no arguments are given.
- Right(new ILoop process settings)
- }
-
- /** If -e and -i were both given, we want to execute the -e code after the
- * -i files have been included, so they are read into strings and prepended to
- * the code given in -e. The -i option is documented to only make sense
- * interactively so this is a pretty reasonable assumption.
- *
- * This all needs a rewrite though.
- */
- if (isE) {
- ScriptRunner.runCommand(settings, combinedCode, thingToRun +: command.arguments)
- }
- else runTarget() match {
- case Left(ex) => errorFn(ex)
- case Right(b) => b
- }
- }
-}
-
-object MainGenericRunner extends MainGenericRunner {
- def main(args: Array[String]) {
- if (!process(args))
- sys.exit(1)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
index 50cd51d486..84eb688b63 100644
--- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala
+++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
import scala.tools.nsc.reporters.ConsoleReporter
@@ -21,11 +22,11 @@ object MainTokenMetric {
var totale = 0
for (source <- fnames) {
val s = new UnitScanner(new CompilationUnit(compiler.getSourceFile(source)))
- s.nextToken
+ s.nextToken()
var i = 0
while (s.token != EOF) {
i += 1
- s.nextToken
+ s.nextToken()
}
Console.println(i.toString + " " + source.toString())
totale += i
@@ -42,9 +43,9 @@ object MainTokenMetric {
tokenMetric(compiler, command.files)
} catch {
case ex @ FatalError(msg) =>
- if (command.settings.debug.value)
- ex.printStackTrace();
- reporter.error(null, "fatal error: " + msg)
+ if (command.settings.debug)
+ ex.printStackTrace()
+ reporter.error(null, "fatal error: " + msg)
}
}
diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala
index f5123513c4..95264aeda6 100644
--- a/src/compiler/scala/tools/nsc/ObjectRunner.scala
+++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala
@@ -8,15 +8,9 @@ package scala.tools.nsc
import java.net.URL
import util.ScalaClassLoader
-import java.lang.reflect.InvocationTargetException
import util.Exceptional.unwrap
trait CommonRunner {
- /** Check whether a class with the specified name
- * exists on the specified class path. */
- def classExists(urls: List[URL], objectName: String): Boolean =
- ScalaClassLoader.classExists(urls, objectName)
-
/** Run a given object, specified by name, using a
* specified classpath and argument list.
*
diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
index caf6ad14cf..899aa93a3b 100644
--- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
@@ -26,8 +26,8 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext
// instead of whatever it's supposed to be doing.
val baseDirectory = {
val pwd = System.getenv("PWD")
- if (pwd != null && !isWin) Directory(pwd)
- else Directory.Current getOrElse Directory("/")
+ if (pwd == null || isWin) Directory.Current getOrElse Directory("/")
+ else Directory(pwd)
}
currentDir.value = baseDirectory.path
}
@@ -39,7 +39,7 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext
override def cmdName = "fsc"
override def usageMsg = (
- createUsageMsg("where possible fsc", false, x => x.isStandard && settings.isFscSpecific(x.name)) +
+ createUsageMsg("where possible fsc", shouldExplain = false, x => x.isStandard && settings.isFscSpecific(x.name)) +
"\n\nStandard scalac options also available:" +
createUsageMsg(x => x.isStandard && !settings.isFscSpecific(x.name))
)
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index cff3590b3f..cfb4cd23a1 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -6,15 +6,12 @@
package scala.tools.nsc
-import java.io.{ BufferedWriter, FileWriter }
import scala.collection.mutable
import scala.language.postfixOps
-/**
- * PhaseAssembly
- * Trait made to separate the constraint solving of the phase order from
- * the rest of the compiler. See SIP 00002
- *
+/** Converts an unordered morass of components into an order that
+ * satisfies their mutual constraints.
+ * @see SIP 00002. You have read SIP 00002?
*/
trait PhaseAssembly {
self: Global =>
@@ -23,18 +20,16 @@ trait PhaseAssembly {
* Aux datastructure for solving the constraint system
* The depency graph container with helper methods for node and edge creation
*/
- class DependencyGraph {
+ private class DependencyGraph {
- /**
- * Simple edge with to and from refs
- */
- class Edge(var frm: Node, var to: Node, var hard: Boolean)
+ /** Simple edge with to and from refs */
+ case class Edge(var frm: Node, var to: Node, var hard: Boolean)
/**
* Simple node with name and object ref for the phase object,
* also sets of in and out going dependencies
*/
- class Node(name: String) {
+ case class Node(name: String) {
val phasename = name
var phaseobj: Option[List[SubComponent]] = None
val after = new mutable.HashSet[Edge]()
@@ -51,11 +46,11 @@ trait PhaseAssembly {
val nodes = new mutable.HashMap[String,Node]()
val edges = new mutable.HashSet[Edge]()
- /* Given a phase object, get the node for this phase object. If the
- * node object does not exist, then create it.
+ /** Given a phase object, get the node for this phase object. If the
+ * node object does not exist, then create it.
*/
def getNodeByPhase(phs: SubComponent): Node = {
- var node: Node = getNodeByPhase(phs.phaseName)
+ val node: Node = getNodeByPhase(phs.phaseName)
node.phaseobj match {
case None =>
node.phaseobj = Some(List[SubComponent](phs))
@@ -75,7 +70,7 @@ trait PhaseAssembly {
* list of the nodes
*/
def softConnectNodes(frm: Node, to: Node) {
- var e = new Edge(frm, to, false)
+ val e = new Edge(frm, to, false)
this.edges += e
frm.after += e
@@ -87,7 +82,7 @@ trait PhaseAssembly {
* list of the nodes
*/
def hardConnectNodes(frm: Node, to: Node) {
- var e = new Edge(frm, to, true)
+ val e = new Edge(frm, to, true)
this.edges += e
frm.after += e
@@ -105,9 +100,8 @@ trait PhaseAssembly {
*/
def collapseHardLinksAndLevels(node: Node, lvl: Int) {
if (node.visited) {
- throw new FatalError(
- "Cycle in compiler phase dependencies detected, phase " +
- node.phasename + " reacted twice!")
+ dump("phase-cycle")
+ throw new FatalError(s"Cycle in phase dependencies detected at ${node.phasename}, created phase-cycle.dot")
}
if (node.level < lvl) node.level = lvl
@@ -140,7 +134,8 @@ trait PhaseAssembly {
var hardlinks = edges.filter(_.hard)
for (hl <- hardlinks) {
if (hl.frm.after.size > 1) {
- throw new FatalError("phase " + hl.frm.phasename + " want to run right after " + hl.to.phasename + ", but some phase has declared to run before " + hl.frm.phasename + ". Re-run with -Xgenerate-phase-graph <filename> to better see the problem.")
+ dump("phase-order")
+ throw new FatalError(s"Phase ${hl.frm.phasename} can't follow ${hl.to.phasename}, created phase-order.dot")
}
}
@@ -149,23 +144,17 @@ trait PhaseAssembly {
rerun = false
hardlinks = edges.filter(_.hard)
for (hl <- hardlinks) {
- var sanity = Nil ++ hl.to.before.filter(_.hard)
+ val sanity = Nil ++ hl.to.before.filter(_.hard)
if (sanity.length == 0) {
throw new FatalError("There is no runs right after dependency, where there should be one! This is not supposed to happen!")
} else if (sanity.length > 1) {
- var msg = "Multiple phases want to run right after the phase " + sanity.head.to.phasename + "\n"
- msg += "Phases: "
- sanity = sanity sortBy (_.frm.phasename)
- for (edge <- sanity) {
- msg += edge.frm.phasename + ", "
- }
- msg += "\nRe-run with -Xgenerate-phase-graph <filename> to better see the problem."
- throw new FatalError(msg)
-
+ dump("phase-order")
+ val following = (sanity map (_.frm.phasename)).sorted mkString ","
+ throw new FatalError(s"Multiple phases want to run right after ${sanity.head.to.phasename}; followers: $following; created phase-order.dot")
} else {
- var promote = hl.to.before.filter(e => (!e.hard))
- hl.to.before.clear
+ val promote = hl.to.before.filter(e => (!e.hard))
+ hl.to.before.clear()
sanity foreach (edge => hl.to.before += edge)
for (edge <- promote) {
rerun = true
@@ -182,7 +171,7 @@ trait PhaseAssembly {
/** Remove all nodes in the given graph, that have no phase object
* Make sure to clean up all edges when removing the node object
- * <code>Inform</code> with warnings, if an external phase has a
+ * `Inform` with warnings, if an external phase has a
* dependency on something that is dropped.
*/
def removeDanglingNodes() {
@@ -199,39 +188,38 @@ trait PhaseAssembly {
}
}
}
+
+ def dump(title: String = "phase-assembly") = graphToDotFile(this, s"$title.dot")
}
- /* Method called from computePhaseDescriptors in class Global
- */
- def buildCompilerFromPhasesSet(): List[SubComponent] = {
+
+ /** Called by Global#computePhaseDescriptors to compute phase order. */
+ def computePhaseAssembly(): List[SubComponent] = {
// Add all phases in the set to the graph
val graph = phasesSetToDepGraph(phasesSet)
+ val dot = if (settings.genPhaseGraph.isSetByUser) Some(settings.genPhaseGraph.value) else None
+
// Output the phase dependency graph at this stage
- if (settings.genPhaseGraph.value != "")
- graphToDotFile(graph, settings.genPhaseGraph.value + "1.dot")
+ def dump(stage: Int) = dot foreach (n => graphToDotFile(graph, s"$n-$stage.dot"))
+
+ dump(1)
// Remove nodes without phaseobj
graph.removeDanglingNodes()
- // Output the phase dependency graph at this stage
- if (settings.genPhaseGraph.value != "")
- graphToDotFile(graph, settings.genPhaseGraph.value + "2.dot")
+ dump(2)
// Validate and Enforce hardlinks / runsRightAfter and promote nodes down the tree
graph.validateAndEnforceHardlinks()
- // Output the phase dependency graph at this stage
- if (settings.genPhaseGraph.value != "")
- graphToDotFile(graph, settings.genPhaseGraph.value + "3.dot")
+ dump(3)
// test for cycles, assign levels and collapse hard links into nodes
graph.collapseHardLinksAndLevels(graph.getNodeByPhase("parser"), 1)
- // Output the phase dependency graph at this stage
- if (settings.genPhaseGraph.value != "")
- graphToDotFile(graph, settings.genPhaseGraph.value + "4.dot")
+ dump(4)
// assemble the compiler
graph.compilerPhaseList()
@@ -245,7 +233,7 @@ trait PhaseAssembly {
for (phs <- phsSet) {
- var fromnode = graph.getNodeByPhase(phs)
+ val fromnode = graph.getNodeByPhase(phs)
phs.runsRightAfter match {
case None =>
@@ -288,16 +276,11 @@ trait PhaseAssembly {
sbuf.append("digraph G {\n")
for (edge <- graph.edges) {
sbuf.append("\"" + edge.frm.allPhaseNames + "(" + edge.frm.level + ")" + "\"->\"" + edge.to.allPhaseNames + "(" + edge.to.level + ")" + "\"")
- if (! edge.frm.phaseobj.get.head.internal) {
- extnodes += edge.frm
- }
- edge.frm.phaseobj match { case None => null case Some(ln) => if(ln.size > 1) fatnodes += edge.frm }
- edge.to.phaseobj match { case None => null case Some(ln) => if(ln.size > 1) fatnodes += edge.to }
- if (edge.hard) {
- sbuf.append(" [color=\"#0000ff\"]\n")
- } else {
- sbuf.append(" [color=\"#000000\"]\n")
- }
+ if (!edge.frm.phaseobj.get.head.internal) extnodes += edge.frm
+ edge.frm.phaseobj foreach (phobjs => if (phobjs.tail.nonEmpty) fatnodes += edge.frm )
+ edge.to.phaseobj foreach (phobjs => if (phobjs.tail.nonEmpty) fatnodes += edge.to )
+ val color = if (edge.hard) "#0000ff" else "#000000"
+ sbuf.append(s""" [color="$color"]\n""")
}
for (node <- extnodes) {
sbuf.append("\"" + node.allPhaseNames + "(" + node.level + ")" + "\" [color=\"#00ff00\"]\n")
@@ -306,10 +289,7 @@ trait PhaseAssembly {
sbuf.append("\"" + node.allPhaseNames + "(" + node.level + ")" + "\" [color=\"#0000ff\"]\n")
}
sbuf.append("}\n")
- var out = new BufferedWriter(new FileWriter(filename))
- out.write(sbuf.toString)
- out.flush()
- out.close()
+ import reflect.io._
+ for (d <- settings.outputDirs.getSingleOutput if !d.isVirtual) Path(d.file) / File(filename) writeAll sbuf.toString
}
-
}
diff --git a/src/compiler/scala/tools/nsc/Phases.scala b/src/compiler/scala/tools/nsc/Phases.scala
deleted file mode 100644
index 0901ade2d7..0000000000
--- a/src/compiler/scala/tools/nsc/Phases.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-
-import symtab.Flags
-import scala.reflect.internal.util.TableDef
-import scala.language.postfixOps
-
-@deprecated("Scheduled for removal as being a dead-code in the compiler.", "2.10.1")
-object Phases {
- val MaxPhases = 64
-
- /** A class for tracking something about each phase.
- */
- class Model[T] {
- case class Cell(ph: Phase, value: T) {
- def name = ph.name
- def id = ph.id
- }
- val values = new Array[Cell](MaxPhases + 1)
- def results = values filterNot (_ == null)
- def apply(ph: Phase): T = values(ph.id).value
- def update(ph: Phase, value: T): Unit = values(ph.id) = Cell(ph, value)
- }
- /** A class for recording the elapsed time of each phase in the
- * interests of generating a classy and informative table.
- */
- class TimingModel extends Model[Long] {
- var total: Long = 0
- def table() = {
- total = results map (_.value) sum;
- new Format.Table(results sortBy (-_.value))
- }
- object Format extends TableDef[Cell] {
- >> ("phase" -> (_.name)) >+ " "
- << ("id" -> (_.id)) >+ " "
- >> ("ms" -> (_.value)) >+ " "
- << ("share" -> (_.value.toDouble * 100 / total formatted "%.2f"))
- }
- def formatted = "" + table()
- }
-}
-
diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala
index 55fd196716..ed5fda9c3f 100644
--- a/src/compiler/scala/tools/nsc/Properties.scala
+++ b/src/compiler/scala/tools/nsc/Properties.scala
@@ -12,14 +12,9 @@ object Properties extends scala.util.PropertiesTrait {
protected def pickJarBasedOn = classOf[Global]
// settings based on jar properties
- def fileEndingString = scalaPropOrElse("file.ending", ".scala|.java")
def residentPromptString = scalaPropOrElse("resident.prompt", "\nnsc> ")
def shellPromptString = scalaPropOrElse("shell.prompt", "\nscala> ")
- // settings based on system properties
- def msilLibPath = propOrNone("msil.libpath")
-
// derived values
def isEmacsShell = propOrEmpty("env.emacs") != ""
- def fileEndings = fileEndingString.split("""\|""").toList
}
diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/compiler/scala/tools/nsc/ScalaDoc.scala
deleted file mode 100644
index ba434bc797..0000000000
--- a/src/compiler/scala/tools/nsc/ScalaDoc.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/* scaladoc, a documentation generator for Scala
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- * @author Geoffrey Washburn
- */
-
-package scala.tools.nsc
-
-import java.io.File.pathSeparator
-import scala.tools.nsc.doc.DocFactory
-import scala.tools.nsc.reporters.ConsoleReporter
-import scala.reflect.internal.util.FakePos
-import Properties.msilLibPath
-
-/** The main class for scaladoc, a front-end for the Scala compiler
- * that generates documentation from source files.
- */
-class ScalaDoc {
- val versionMsg = "Scaladoc %s -- %s".format(Properties.versionString, Properties.copyrightString)
-
- def process(args: Array[String]): Boolean = {
- var reporter: ConsoleReporter = null
- val docSettings = new doc.Settings(msg => reporter.error(FakePos("scaladoc"), msg + "\n scaladoc -help gives more information"),
- msg => reporter.printMessage(msg))
- reporter = new ConsoleReporter(docSettings) {
- // need to do this so that the Global instance doesn't trash all the
- // symbols just because there was an error
- override def hasErrors = false
- }
- val command = new ScalaDoc.Command(args.toList, docSettings)
- def hasFiles = command.files.nonEmpty || docSettings.uncompilableFiles.nonEmpty
-
- if (docSettings.version.value)
- reporter.echo(versionMsg)
- else if (docSettings.Xhelp.value)
- reporter.echo(command.xusageMsg)
- else if (docSettings.Yhelp.value)
- reporter.echo(command.yusageMsg)
- else if (docSettings.showPlugins.value)
- reporter.warning(null, "Plugins are not available when using Scaladoc")
- else if (docSettings.showPhases.value)
- reporter.warning(null, "Phases are restricted when using Scaladoc")
- else if (docSettings.help.value || !hasFiles)
- reporter.echo(command.usageMsg)
- else try {
- if (docSettings.target.value == "msil")
- msilLibPath foreach (x => docSettings.assemrefs.value += (pathSeparator + x))
-
- new DocFactory(reporter, docSettings) document command.files
- }
- catch {
- case ex @ FatalError(msg) =>
- if (docSettings.debug.value) ex.printStackTrace()
- reporter.error(null, "fatal error: " + msg)
- }
- finally reporter.printSummary()
-
- // not much point in returning !reporter.hasErrors when it has
- // been overridden with constant false.
- true
- }
-}
-
-object ScalaDoc extends ScalaDoc {
- class Command(arguments: List[String], settings: doc.Settings) extends CompilerCommand(arguments, settings) {
- override def cmdName = "scaladoc"
- override def usageMsg = (
- createUsageMsg("where possible scaladoc", false, x => x.isStandard && settings.isScaladocSpecific(x.name)) +
- "\n\nStandard scalac options also available:" +
- createUsageMsg(x => x.isStandard && !settings.isScaladocSpecific(x.name))
- )
- }
-
- def main(args: Array[String]): Unit = sys exit {
- if (process(args)) 0 else 1
- }
-}
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index 107c4b3df3..d553d71bf5 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -3,11 +3,11 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
import io.{ Directory, File, Path }
import java.io.IOException
-import java.net.URL
import scala.tools.nsc.reporters.{Reporter,ConsoleReporter}
import util.Exceptional.unwrap
@@ -49,25 +49,12 @@ class ScriptRunner extends HasCompileSocket {
case x => x
}
- def isScript(settings: Settings) = settings.script.value != ""
-
/** Choose a jar filename to hold the compiled version of a script. */
private def jarFileFor(scriptFile: String)= File(
if (scriptFile endsWith ".jar") scriptFile
else scriptFile.stripSuffix(".scala") + ".jar"
)
- /** Read the entire contents of a file as a String. */
- private def contentsOfFile(filename: String) = File(filename).slurp()
-
- /** Split a fully qualified object name into a
- * package and an unqualified object name */
- private def splitObjectName(fullname: String): (Option[String], String) =
- (fullname lastIndexOf '.') match {
- case -1 => (None, fullname)
- case idx => (Some(fullname take idx), fullname drop (idx + 1))
- }
-
/** Compile a script using the fsc compilation daemon.
*/
private def compileWithDaemon(settings: GenericRunnerSettings, scriptFileIn: String) = {
@@ -98,8 +85,8 @@ class ScriptRunner extends HasCompileSocket {
{
def mainClass = scriptMain(settings)
- /** Compiles the script file, and returns the directory with the compiled
- * class files, if the compilation succeeded.
+ /* Compiles the script file, and returns the directory with the compiled
+ * class files, if the compilation succeeded.
*/
def compile: Option[Directory] = {
val compiledPath = Directory makeTemp "scalascript"
@@ -109,9 +96,9 @@ class ScriptRunner extends HasCompileSocket {
settings.outdir.value = compiledPath.path
- if (settings.nc.value) {
- /** Setting settings.script.value informs the compiler this is not a
- * self contained compilation unit.
+ if (settings.nc) {
+ /* Setting settings.script.value informs the compiler this is not a
+ * self contained compilation unit.
*/
settings.script.value = mainClass
val reporter = new ConsoleReporter(settings)
@@ -124,11 +111,11 @@ class ScriptRunner extends HasCompileSocket {
else None
}
- /** The script runner calls sys.exit to communicate a return value, but this must
- * not take place until there are no non-daemon threads running. Tickets #1955, #2006.
+ /* The script runner calls sys.exit to communicate a return value, but this must
+ * not take place until there are no non-daemon threads running. Tickets #1955, #2006.
*/
util.waitingForThreads {
- if (settings.save.value) {
+ if (settings.save) {
val jarFile = jarFileFor(scriptFile)
def jarOK = jarFile.canRead && (jarFile isFresher File(scriptFile))
diff --git a/src/compiler/scala/tools/nsc/SubComponent.scala b/src/compiler/scala/tools/nsc/SubComponent.scala
index a0468a22b9..b21d156145 100644
--- a/src/compiler/scala/tools/nsc/SubComponent.scala
+++ b/src/compiler/scala/tools/nsc/SubComponent.scala
@@ -19,19 +19,30 @@ abstract class SubComponent {
/** The name of the phase */
val phaseName: String
- /** List of phase names, this phase should run after */
+ /** Names of phases that must run before this phase. */
val runsAfter: List[String]
- /** List of phase names, this phase should run before */
+ /** Names of phases that must run after this phase. Default is `Nil`. */
val runsBefore: List[String] = Nil
- /** Phase name this phase will attach itself to, not allowing any phase to come between it
- * and the phase name declared */
+ /** Name of the phase that this phase must follow immediately. */
val runsRightAfter: Option[String]
- /** Internal flag to tell external from internal phases */
+ /** Names of phases required by this component. Default is `Nil`. */
+ val requires: List[String] = Nil
+
+ /** Is this component enabled? Default is true. */
+ def enabled: Boolean = true
+
+ /** True if this phase is not provided by a plug-in. */
val internal: Boolean = true
+ /** True if this phase runs before all other phases. Usually, `parser`. */
+ val initial: Boolean = false
+
+ /** True if this phase runs after all other phases. Usually, `terminal`. */
+ val terminal: Boolean = false
+
/** SubComponent are added to a HashSet and two phases are the same if they have the same name */
override def hashCode() = phaseName.hashCode()
@@ -47,8 +58,8 @@ abstract class SubComponent {
private var ownPhaseCache: WeakReference[Phase] = new WeakReference(null)
private var ownPhaseRunId = global.NoRunId
- @inline final def beforeOwnPhase[T](op: => T) = global.beforePhase(ownPhase)(op)
- @inline final def afterOwnPhase[T](op: => T) = global.afterPhase(ownPhase)(op)
+ @inline final def beforeOwnPhase[T](op: => T) = global.enteringPhase(ownPhase)(op)
+ @inline final def afterOwnPhase[T](op: => T) = global.exitingPhase(ownPhase)(op)
/** The phase corresponding to this subcomponent in the current compiler run */
def ownPhase: Phase = {
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 6e39fc9aa1..6d9b41ec45 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -7,10 +7,7 @@ package scala.tools.nsc
package ast
import symtab._
-import reporters._
-import scala.reflect.internal.util.{Position, NoPosition}
import util.DocStrings._
-import scala.reflect.internal.Chars._
import scala.collection.mutable
/*
@@ -21,8 +18,14 @@ trait DocComments { self: Global =>
val cookedDocComments = mutable.HashMap[Symbol, String]()
- /** The raw doc comment map */
- val docComments = mutable.HashMap[Symbol, DocComment]()
+ /** The raw doc comment map
+ *
+ * In IDE, background compilation runs get interrupted by
+ * reloading new sourcefiles. This is weak to avoid
+ * memleaks due to the doc of their cached symbols
+ * (e.g. in baseTypeSeq) between periodic doc reloads.
+ */
+ val docComments = mutable.WeakHashMap[Symbol, DocComment]()
def clearDocComments() {
cookedDocComments.clear()
@@ -30,11 +33,6 @@ trait DocComments { self: Global =>
defs.clear()
}
- /** Associate comment with symbol `sym` at position `pos`. */
- def docComment(sym: Symbol, docStr: String, pos: Position = NoPosition) =
- if ((sym ne null) && (sym ne NoSymbol))
- docComments += (sym -> DocComment(docStr, pos))
-
/** The raw doc comment of symbol `sym`, as it appears in the source text, "" if missing.
*/
def rawDocComment(sym: Symbol): String =
@@ -96,11 +94,6 @@ trait DocComments { self: Global =>
expandVariables(cookedDocComment(sym, docStr), sym, site1)
}
- /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing.
- * @param sym The symbol for which doc comment is returned (site is always the containing class)
- */
- def expandedDocComment(sym: Symbol): String = expandedDocComment(sym, sym.enclClass)
-
/** The list of use cases of doc comment of symbol `sym` seen as a member of class
* `site`. Each use case consists of a synthetic symbol (which is entered nowhere else),
* of an expanded doc comment string, and of its position.
@@ -129,12 +122,6 @@ trait DocComments { self: Global =>
getDocComment(sym) map getUseCases getOrElse List()
}
- def useCases(sym: Symbol): List[(Symbol, String, Position)] = useCases(sym, sym.enclClass)
-
- /** Returns the javadoc format of doc comment string `s`, including wiki expansion
- */
- def toJavaDoc(s: String): String = expandWiki(s)
-
private val wikiReplacements = List(
("""(\n\s*\*?)(\s*\n)""" .r, """$1 <p>$2"""),
("""<([^\w/])""" .r, """&lt;$1"""),
@@ -275,7 +262,7 @@ trait DocComments { self: Global =>
cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2))
case None =>
reporter.info(sym.pos, "The \"" + getSectionHeader + "\" annotation of the " + sym +
- " comment contains @inheritdoc, but the corresponding section in the parent is not defined.", true)
+ " comment contains @inheritdoc, but the corresponding section in the parent is not defined.", force = true)
"<invalid inheritdoc annotation>"
}
@@ -313,7 +300,6 @@ trait DocComments { self: Global =>
/** Lookup definition of variable.
*
* @param vble The variable for which a definition is searched
- * @param owner The current owner in which variable definitions are searched.
* @param site The class for which doc comments are generated
*/
def lookupVariable(vble: String, site: Symbol): Option[String] = site match {
@@ -330,12 +316,12 @@ trait DocComments { self: Global =>
}
/** Expand variable occurrences in string `str`, until a fix point is reached or
- * a expandLimit is exceeded.
+ * an expandLimit is exceeded.
*
- * @param str The string to be expanded
- * @param sym The symbol for which doc comments are generated
- * @param site The class for which doc comments are generated
- * @return Expanded string
+ * @param initialStr The string to be expanded
+ * @param sym The symbol for which doc comments are generated
+ * @param site The class for which doc comments are generated
+ * @return Expanded string
*/
protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol): String = {
val expandLimit = 10
@@ -372,7 +358,10 @@ trait DocComments { self: Global =>
case vname =>
lookupVariable(vname, site) match {
case Some(replacement) => replaceWith(replacement)
- case None => reporter.warning(sym.pos, "Variable " + vname + " undefined in comment for " + sym + " in " + site)
+ case None =>
+ val pos = docCommentPos(sym)
+ val loc = pos withPoint (pos.start + vstart + 1)
+ reporter.warning(loc, s"Variable $vname undefined in comment for $sym in $site")
}
}
}
@@ -470,7 +459,7 @@ trait DocComments { self: Global =>
//val (classes, pkgs) = site.ownerChain.span(!_.isPackageClass)
//val sites = (classes ::: List(pkgs.head, rootMirror.RootClass)))
//findIn(sites)
- findIn(site.ownerChain ::: List(definitions.EmptyPackage))
+ findIn(site.ownerChain ::: List(rootMirror.EmptyPackage))
}
def getType(str: String, variable: String): Type = {
@@ -507,7 +496,7 @@ trait DocComments { self: Global =>
result
}
- /**
+ /*
* work around the backticks issue suggested by Simon in
* https://groups.google.com/forum/?hl=en&fromgroups#!topic/scala-internals/z7s1CCRCz74
* ideally, we'd have a removeWikiSyntax method in the CommentFactory to completely eliminate the wiki markup
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index deea4de707..9c8e13a1a9 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -32,7 +32,7 @@ abstract class NodePrinters {
}
trait DefaultPrintAST extends PrintAST {
- val printPos = settings.Xprintpos.value || settings.Yposdebug.value
+ val printPos = settings.Xprintpos || settings.Yposdebug
def showNameAndPos(tree: NameTree) = showPosition(tree) + showName(tree.name)
def showDefTreeName(tree: DefTree) = showName(tree.name)
@@ -100,9 +100,9 @@ abstract class NodePrinters {
def stringify(tree: Tree): String = {
buf.clear()
- if (settings.XshowtreesStringified.value) buf.append(tree.toString + EOL)
- if (settings.XshowtreesCompact.value) {
- buf.append(showRaw(tree, printIds = settings.uniqid.value, printTypes = settings.printtypes.value))
+ if (settings.XshowtreesStringified) buf.append(tree.toString + EOL)
+ if (settings.XshowtreesCompact) {
+ buf.append(showRaw(tree, printIds = settings.uniqid, printTypes = settings.printtypes))
} else {
level = 0
traverse(tree)
@@ -168,6 +168,13 @@ abstract class NodePrinters {
}
}
+ def typeApplyCommon(tree: Tree, fun: Tree, args: List[Tree]) {
+ printMultiline(tree) {
+ traverse(fun)
+ traverseList("[]", "type argument")(args)
+ }
+ }
+
def treePrefix(tree: Tree) = showPosition(tree) + tree.productPrefix
def printMultiline(tree: Tree)(body: => Unit) {
printMultiline(treePrefix(tree), showAttributes(tree))(body)
@@ -203,9 +210,11 @@ abstract class NodePrinters {
showPosition(tree)
tree match {
- case AppliedTypeTree(tpt, args) => applyCommon(tree, tpt, args)
- case ApplyDynamic(fun, args) => applyCommon(tree, fun, args)
- case Apply(fun, args) => applyCommon(tree, fun, args)
+ case ApplyDynamic(fun, args) => applyCommon(tree, fun, args)
+ case Apply(fun, args) => applyCommon(tree, fun, args)
+
+ case TypeApply(fun, args) => typeApplyCommon(tree, fun, args)
+ case AppliedTypeTree(tpt, args) => typeApplyCommon(tree, tpt, args)
case Throw(Ident(name)) =>
printSingle(tree, name)
@@ -273,7 +282,7 @@ abstract class NodePrinters {
traverseList("[]", "type parameter")(tparams)
vparamss match {
case Nil => println("Nil")
- case Nil :: Nil => println("List(Nil)")
+ case ListOfNil => println("List(Nil)")
case ps :: Nil =>
printLine("", "1 parameter list")
ps foreach traverse
@@ -312,11 +321,6 @@ abstract class NodePrinters {
}
case This(qual) =>
printSingle(tree, qual)
- case TypeApply(fun, args) =>
- printMultiline(tree) {
- traverse(fun)
- traverseList("[]", "type argument")(args)
- }
case tt @ TypeTree() =>
println(showTypeTree(tt))
diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala
index d8fb632f73..beab801edf 100644
--- a/src/compiler/scala/tools/nsc/ast/Positions.scala
+++ b/src/compiler/scala/tools/nsc/ast/Positions.scala
@@ -1,16 +1,9 @@
package scala.tools.nsc
package ast
-import scala.reflect.internal.util.{ SourceFile, Position, OffsetPosition, NoPosition }
-
trait Positions extends scala.reflect.internal.Positions {
self: Global =>
- def rangePos(source: SourceFile, start: Int, point: Int, end: Int) =
- new OffsetPosition(source, point)
-
- def validatePositions(tree: Tree) {}
-
class ValidatingPosAssigner extends PosAssigner {
var pos: Position = _
override def traverse(t: Tree) {
@@ -20,7 +13,7 @@ trait Positions extends scala.reflect.internal.Positions {
// When we prune due to encountering a position, traverse the
// pruned children so we can warn about those lacking positions.
t.children foreach { c =>
- if ((c eq EmptyTree) || (c eq emptyValDef)) ()
+ if (!c.canHaveAttrs) ()
else if (c.pos == NoPosition) {
reporter.warning(t.pos, " Positioned tree has unpositioned child in phase " + globalPhase)
inform("parent: " + treeSymStatus(t))
@@ -32,6 +25,6 @@ trait Positions extends scala.reflect.internal.Positions {
}
override protected[this] lazy val posAssigner: PosAssigner =
- if (settings.Yrangepos.value && settings.debug.value || settings.Yposdebug.value) new ValidatingPosAssigner
+ if (settings.Yrangepos && settings.debug || settings.Yposdebug) new ValidatingPosAssigner
else new DefaultPosAssigner
}
diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala
index 83222a24b4..c64b18207a 100644
--- a/src/compiler/scala/tools/nsc/ast/Printers.scala
+++ b/src/compiler/scala/tools/nsc/ast/Printers.scala
@@ -7,8 +7,6 @@ package scala.tools.nsc
package ast
import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
-import symtab.Flags._
-import symtab.SymbolTable
trait Printers extends scala.reflect.internal.Printers { this: Global =>
@@ -22,7 +20,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
printTree(
if (tree.isDef && tree.symbol != NoSymbol && tree.symbol.isInitialized) {
tree match {
- case ClassDef(_, _, _, impl @ Template(ps, emptyValDef, body))
+ case ClassDef(_, _, _, impl @ Template(ps, noSelfType, body))
if (tree.symbol.thisSym != tree.symbol) =>
ClassDef(tree.symbol, Template(ps, ValDef(tree.symbol.thisSym), body))
case ClassDef(_, _, _, impl) => ClassDef(tree.symbol, impl)
@@ -44,7 +42,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
}
}
- // overflow cases missing from TreePrinter in reflect.api
+ // overflow cases missing from TreePrinter in scala.reflect.api
override def xprintTree(treePrinter: super.TreePrinter, tree: Tree) = tree match {
case DocDef(comment, definition) =>
treePrinter.print(comment.raw)
@@ -130,7 +128,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
case Select(qualifier, name) =>
printTree(qualifier)
print(".")
- print(quotedName(name, true))
+ print(quotedName(name, decode = true))
// target.toString() ==> target.toString
case Apply(fn, Nil) => printTree(fn)
@@ -154,7 +152,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
// If thenp or elsep has only one statement, it doesn't need more than one line.
case If(cond, thenp, elsep) =>
def ifIndented(x: Tree) = {
- indent ; println() ; printTree(x) ; undent
+ indent() ; println() ; printTree(x) ; undent()
}
val List(thenStmts, elseStmts) = List(thenp, elsep) map allStatements
@@ -168,128 +166,27 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
if (elseStmts.nonEmpty) {
print(" else")
- indent ; println()
+ indent() ; println()
elseStmts match {
case List(x) => printTree(x)
case _ => printTree(elsep)
}
- undent ; println()
+ undent() ; println()
}
case _ => s()
}
}
}
- /** This must guarantee not to force any evaluation, so we can learn
- * a little bit about trees in the midst of compilation without altering
- * the natural course of events.
- */
- class SafeTreePrinter(out: PrintWriter) extends TreePrinter(out) {
-
- private def default(t: Tree) = t.getClass.getName.reverse.takeWhile(_ != '.').reverse
- private def params(trees: List[Tree]): String = trees map safe mkString ", "
-
- private def safe(name: Name): String = name.decode
- private def safe(tree: Tree): String = tree match {
- case Apply(fn, args) => "%s(%s)".format(safe(fn), params(args))
- case Select(qual, name) => safe(qual) + "." + safe(name)
- case This(qual) => safe(qual) + ".this"
- case Ident(name) => safe(name)
- case Literal(value) => value.stringValue
- case _ => "(?: %s)".format(default(tree))
- }
-
- override def printTree(tree: Tree) { print(safe(tree)) }
- }
-
- class TreeMatchTemplate {
- // non-trees defined in Trees
- //
- // case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
- // case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position])
- //
- def apply(t: Tree): Unit = t match {
- // eliminated by typer
- case Annotated(annot, arg) =>
- case AssignOrNamedArg(lhs, rhs) =>
- case DocDef(comment, definition) =>
- case Import(expr, selectors) =>
-
- // eliminated by refchecks
- case ModuleDef(mods, name, impl) =>
- case TypeTreeWithDeferredRefCheck() =>
-
- // eliminated by erasure
- case TypeDef(mods, name, tparams, rhs) =>
- case Typed(expr, tpt) =>
-
- // eliminated by cleanup
- case ApplyDynamic(qual, args) =>
-
- // eliminated by explicitouter
- case Alternative(trees) =>
- case Bind(name, body) =>
- case CaseDef(pat, guard, body) =>
- case Star(elem) =>
- case UnApply(fun, args) =>
-
- // eliminated by lambdalift
- case Function(vparams, body) =>
-
- // eliminated by uncurry
- case AppliedTypeTree(tpt, args) =>
- case CompoundTypeTree(templ) =>
- case ExistentialTypeTree(tpt, whereClauses) =>
- case SelectFromTypeTree(qual, selector) =>
- case SingletonTypeTree(ref) =>
- case TypeBoundsTree(lo, hi) =>
-
- // survivors
- case Apply(fun, args) =>
- case ArrayValue(elemtpt, trees) =>
- case Assign(lhs, rhs) =>
- case Block(stats, expr) =>
- case ClassDef(mods, name, tparams, impl) =>
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- case EmptyTree =>
- case Ident(name) =>
- case If(cond, thenp, elsep) =>
- case LabelDef(name, params, rhs) =>
- case Literal(value) =>
- case Match(selector, cases) =>
- case New(tpt) =>
- case PackageDef(pid, stats) =>
- case Return(expr) =>
- case Select(qualifier, selector) =>
- case Super(qual, mix) =>
- case Template(parents, self, body) =>
- case This(qual) =>
- case Throw(expr) =>
- case Try(block, catches, finalizer) =>
- case TypeApply(fun, args) =>
- case TypeTree() =>
- case ValDef(mods, name, tpt, rhs) =>
-
- // missing from the Trees comment
- case Parens(args) => // only used during parsing
- case SelectFromArray(qual, name, erasure) => // only used during erasure
- }
- }
-
- def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
- def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
+ def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes, settings.uniqid, settings.Yshowsymkinds)
+ def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes, settings.uniqid, settings.Yshowsymkinds)
def asCompactDebugString(t: Tree): String = render(t, newCompactTreePrinter, true, true, true)
def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
- def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream))
- def newStandardTreePrinter(): TreePrinter = newStandardTreePrinter(new PrintWriter(ConsoleWriter))
-
def newCompactTreePrinter(writer: PrintWriter): CompactTreePrinter = new CompactTreePrinter(writer)
- def newCompactTreePrinter(stream: OutputStream): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(stream))
- def newCompactTreePrinter(): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(ConsoleWriter))
override def newTreePrinter(writer: PrintWriter): TreePrinter =
- if (settings.Ycompacttrees.value) newCompactTreePrinter(writer)
+ if (settings.Ycompacttrees) newCompactTreePrinter(writer)
else newStandardTreePrinter(writer)
override def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream))
override def newTreePrinter(): TreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter))
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index 5c954096f4..c68b248240 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package ast
import java.awt.{List => awtList, _}
@@ -16,8 +17,6 @@ import javax.swing.tree._
import scala.concurrent.Lock
import scala.text._
-import symtab.Flags._
-import symtab.SymbolTable
import scala.language.implicitConversions
/**
@@ -34,7 +33,7 @@ abstract class TreeBrowsers {
val borderSize = 10
- def create(): SwingBrowser = new SwingBrowser();
+ def create(): SwingBrowser = new SwingBrowser()
/** Pseudo tree class, so that all JTree nodes are treated uniformly */
case class ProgramTree(units: List[UnitTree]) extends Tree {
@@ -50,21 +49,6 @@ abstract class TreeBrowsers {
* Java Swing pretty printer for Scala abstract syntax trees.
*/
class SwingBrowser {
-
- def browse(t: Tree): Tree = {
- val tm = new ASTTreeModel(t)
-
- val frame = new BrowserFrame()
- frame.setTreeModel(tm)
-
- val lock = new Lock()
- frame.createFrame(lock)
-
- // wait for the frame to be closed
- lock.acquire
- t
- }
-
def browse(pName: String, units: Iterator[CompilationUnit]): Unit =
browse(pName, units.toList)
@@ -83,7 +67,7 @@ abstract class TreeBrowsers {
frame.createFrame(lock)
// wait for the frame to be closed
- lock.acquire
+ lock.acquire()
}
}
@@ -171,8 +155,8 @@ abstract class TreeBrowsers {
_setExpansionState(root, new TreePath(root.getModel.getRoot))
}
- def expandAll(subtree: JTree) = setExpansionState(subtree, true)
- def collapseAll(subtree: JTree) = setExpansionState(subtree, false)
+ def expandAll(subtree: JTree) = setExpansionState(subtree, expand = true)
+ def collapseAll(subtree: JTree) = setExpansionState(subtree, expand = false)
/** Create a frame that displays the AST.
@@ -184,14 +168,14 @@ abstract class TreeBrowsers {
* especially symbols/types would change while the window is visible.
*/
def createFrame(lock: Lock): Unit = {
- lock.acquire // keep the lock until the user closes the window
+ lock.acquire() // keep the lock until the user closes the window
frame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE)
frame.addWindowListener(new WindowAdapter() {
/** Release the lock, so compilation may resume after the window is closed. */
- override def windowClosed(e: WindowEvent): Unit = lock.release
- });
+ override def windowClosed(e: WindowEvent): Unit = lock.release()
+ })
jTree = new JTree(treeModel) {
/** Return the string for a tree node. */
@@ -253,7 +237,7 @@ abstract class TreeBrowsers {
putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_Q, menuKey + shiftKey, false))
override def actionPerformed(e: ActionEvent) {
closeWindow()
- global.currentRun.cancel
+ global.currentRun.cancel()
}
}
)
@@ -509,7 +493,7 @@ abstract class TreeBrowsers {
/** Return a textual representation of this t's symbol */
def symbolText(t: Tree): String = {
val prefix =
- if (t.hasSymbol) "[has] "
+ if (t.hasSymbolField) "[has] "
else if (t.isDef) "[defines] "
else ""
@@ -529,11 +513,10 @@ abstract class TreeBrowsers {
* attributes */
def symbolAttributes(t: Tree): String = {
val s = t.symbol
- var att = ""
if ((s ne null) && (s != NoSymbol)) {
- var str = flagsToString(s.flags)
- if (s.isStaticMember) str = str + " isStatic ";
+ var str = s.flagString
+ if (s.isStaticMember) str = str + " isStatic "
(str + " annotations: " + s.annotations.mkString("", " ", "")
+ (if (s.isTypeSkolem) "\ndeSkolemized annotations: " + s.deSkolemize.annotations.mkString("", " ", "") else ""))
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 9a5b92e795..6dda30b5e7 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -7,7 +7,6 @@
package scala.tools.nsc
package ast
-import PartialFunction._
import symtab.Flags
import scala.language.implicitConversions
@@ -21,7 +20,6 @@ trait TreeDSL {
import global._
import definitions._
- import gen.{ scalaDot }
object CODE {
// Add a null check to a Tree => Tree function
@@ -31,30 +29,16 @@ trait TreeDSL {
def returning[T](x: T)(f: T => Unit): T = util.returning(x)(f)
object LIT extends (Any => Literal) {
+ def typed(x: Any) = apply(x) setType ConstantType(Constant(x))
def apply(x: Any) = Literal(Constant(x))
- def unapply(x: Any) = condOpt(x) { case Literal(Constant(value)) => value }
}
- // You might think these could all be vals, but empirically I have found that
- // at least in the case of UNIT the compiler breaks if you re-use trees.
- // However we need stable identifiers to have attractive pattern matching.
- // So it's inconsistent until I devise a better way.
- val TRUE = LIT(true)
- val FALSE = LIT(false)
- val ZERO = LIT(0)
- def NULL = LIT(null)
- def UNIT = LIT(())
-
- // for those preferring boring, predictable lives, without the thrills of tree-sharing
- // (but with the perk of typed trees)
- def TRUE_typed = LIT(true) setType ConstantType(Constant(true))
- def FALSE_typed = LIT(false) setType ConstantType(Constant(false))
-
- object WILD {
- def empty = Ident(nme.WILDCARD)
- def apply(tpe: Type) = Ident(nme.WILDCARD) setType tpe
- def unapply(other: Any) = cond(other) { case Ident(nme.WILDCARD) => true }
- }
+ // Boring, predictable trees.
+ def TRUE = LIT typed true
+ def FALSE = LIT typed false
+ def ZERO = LIT(0)
+ def NULL = LIT(null)
+ def UNIT = LIT(())
def fn(lhs: Tree, op: Name, args: Tree*) = Apply(Select(lhs, op), args.toList)
def fn(lhs: Tree, op: Symbol, args: Tree*) = Apply(Select(lhs, op), args.toList)
@@ -82,19 +66,15 @@ trait TreeDSL {
if (opSym == NoSymbol) ANY_==(other)
else fn(target, opSym, other)
}
- def ANY_EQ (other: Tree) = OBJ_EQ(other AS ObjectClass.tpe)
+ def ANY_EQ (other: Tree) = OBJ_EQ(other AS ObjectTpe)
def ANY_== (other: Tree) = fn(target, Any_==, other)
def ANY_!= (other: Tree) = fn(target, Any_!=, other)
- def OBJ_== (other: Tree) = fn(target, Object_==, other)
- def OBJ_!= (other: Tree) = fn(target, Object_!=, other)
def OBJ_EQ (other: Tree) = fn(target, Object_eq, other)
def OBJ_NE (other: Tree) = fn(target, Object_ne, other)
- def INT_| (other: Tree) = fn(target, getMember(IntClass, nme.OR), other)
- def INT_& (other: Tree) = fn(target, getMember(IntClass, nme.AND), other)
def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other)
def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other)
- def INT_!= (other: Tree) = fn(target, getMember(IntClass, nme.NE), other)
+ def INT_- (other: Tree) = fn(target, getMember(IntClass, nme.MINUS), other)
// generic operations on ByteClass, IntClass, LongClass
def GEN_| (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.OR), other)
@@ -102,37 +82,28 @@ trait TreeDSL {
def GEN_== (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.EQ), other)
def GEN_!= (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.NE), other)
- def BOOL_&& (other: Tree) = fn(target, Boolean_and, other)
- def BOOL_|| (other: Tree) = fn(target, Boolean_or, other)
-
/** Apply, Select, Match **/
def APPLY(params: Tree*) = Apply(target, params.toList)
def APPLY(params: List[Tree]) = Apply(target, params)
- def MATCH(cases: CaseDef*) = Match(target, cases.toList)
def DOT(member: Name) = SelectStart(Select(target, member))
def DOT(sym: Symbol) = SelectStart(Select(target, sym))
/** Assignment */
+ // !!! This method is responsible for some tree sharing, but a diligent
+ // reviewer pointed out that we shouldn't blindly duplicate these trees
+ // as there might be DefTrees nested beneath them. It's not entirely
+ // clear how to proceed, so for now it retains the non-duplicating behavior.
def ===(rhs: Tree) = Assign(target, rhs)
- /** Methods for sequences **/
- def DROP(count: Int): Tree =
- if (count == 0) target
- else (target DOT nme.drop)(LIT(count))
-
/** Casting & type tests -- working our way toward understanding exactly
* what differs between the different forms of IS and AS.
*
* See ticket #2168 for one illustration of AS vs. AS_ANY.
*/
def AS(tpe: Type) = gen.mkAsInstanceOf(target, tpe, any = true, wrapInApply = false)
- def IS(tpe: Type) = gen.mkIsInstanceOf(target, tpe, true)
- def IS_OBJ(tpe: Type) = gen.mkIsInstanceOf(target, tpe, false)
+ def IS_OBJ(tpe: Type) = gen.mkIsInstanceOf(target, tpe, any = false)
- // XXX having some difficulty expressing nullSafe in a way that doesn't freak out value types
- // def TOSTRING() = nullSafe(fn(_: Tree, nme.toString_), LIT("null"))(target)
- def TOSTRING() = fn(target, nme.toString_)
def GETCLASS() = fn(target, Object_getClass)
}
@@ -145,98 +116,6 @@ trait TreeDSL {
def ==>(body: Tree): CaseDef = CaseDef(pat, guard, body)
}
- /** VODD, if it's not obvious, means ValOrDefDef. This is the
- * common code between a tree based on a pre-existing symbol and
- * one being built from scratch.
- */
- trait VODDStart {
- def name: Name
- def defaultMods: Modifiers
- def defaultTpt: Tree
- def defaultPos: Position
-
- type ResultTreeType <: ValOrDefDef
- def mkTree(rhs: Tree): ResultTreeType
- def ===(rhs: Tree): ResultTreeType
-
- private var _mods: Modifiers = null
- private var _tpt: Tree = null
- private var _pos: Position = null
-
- def withType(tp: Type): this.type = {
- _tpt = TypeTree(tp)
- this
- }
- def withFlags(flags: Long*): this.type = {
- if (_mods == null)
- _mods = defaultMods
-
- _mods = flags.foldLeft(_mods)(_ | _)
- this
- }
- def withPos(pos: Position): this.type = {
- _pos = pos
- this
- }
-
- final def mods = if (_mods == null) defaultMods else _mods
- final def tpt = if (_tpt == null) defaultTpt else _tpt
- final def pos = if (_pos == null) defaultPos else _pos
- }
- trait SymVODDStart extends VODDStart {
- def sym: Symbol
- def symType: Type
-
- def name = sym.name
- def defaultMods = Modifiers(sym.flags)
- def defaultTpt = TypeTree(symType) setPos sym.pos.focus
- def defaultPos = sym.pos
-
- final def ===(rhs: Tree): ResultTreeType =
- atPos(pos)(mkTree(rhs) setSymbol sym)
- }
- trait ValCreator {
- self: VODDStart =>
-
- type ResultTreeType = ValDef
- def mkTree(rhs: Tree): ValDef = ValDef(mods, name, tpt, rhs)
- }
- trait DefCreator {
- self: VODDStart =>
-
- def tparams: List[TypeDef]
- def vparamss: List[List[ValDef]]
-
- type ResultTreeType = DefDef
- def mkTree(rhs: Tree): DefDef = DefDef(mods, name, tparams, vparamss, tpt, rhs)
- }
-
- class DefSymStart(val sym: Symbol) extends SymVODDStart with DefCreator {
- def symType = sym.tpe.finalResultType
- def tparams = sym.typeParams map TypeDef
- def vparamss = mapParamss(sym)(ValDef)
- }
- class ValSymStart(val sym: Symbol) extends SymVODDStart with ValCreator {
- def symType = sym.tpe
- }
-
- trait TreeVODDStart extends VODDStart {
- def defaultMods = NoMods
- def defaultTpt = TypeTree()
- def defaultPos = NoPosition
-
- final def ===(rhs: Tree): ResultTreeType =
- if (pos == NoPosition) mkTree(rhs)
- else atPos(pos)(mkTree(rhs))
- }
-
- class ValTreeStart(val name: Name) extends TreeVODDStart with ValCreator {
- }
- class DefTreeStart(val name: Name) extends TreeVODDStart with DefCreator {
- def tparams: List[TypeDef] = Nil
- def vparamss: List[List[ValDef]] = ListOfNil
- }
-
class IfStart(cond: Tree, thenp: Tree) {
def THEN(x: Tree) = new IfStart(cond, x)
def ELSE(elsep: Tree) = If(cond, thenp, elsep)
@@ -244,84 +123,29 @@ trait TreeDSL {
}
class TryStart(body: Tree, catches: List[CaseDef], fin: Tree) {
def CATCH(xs: CaseDef*) = new TryStart(body, xs.toList, fin)
- def FINALLY(x: Tree) = Try(body, catches, x)
def ENDTRY = Try(body, catches, fin)
}
def CASE(pat: Tree): CaseStart = new CaseStart(pat, EmptyTree)
- def DEFAULT: CaseStart = new CaseStart(WILD.empty, EmptyTree)
-
- class SymbolMethods(target: Symbol) {
- def BIND(body: Tree) = Bind(target, body)
- def IS_NULL() = REF(target) OBJ_EQ NULL
- def NOT_NULL() = REF(target) OBJ_NE NULL
-
- def GET() = fn(REF(target), nme.get)
-
- // name of nth indexed argument to a method (first parameter list), defaults to 1st
- def ARG(idx: Int = 0) = Ident(target.paramss.head(idx))
- def ARGS = target.paramss.head
- def ARGNAMES = ARGS map Ident
- }
-
- /** Top level accessible. */
- def MATCHERROR(arg: Tree) = Throw(MatchErrorClass.tpe, arg)
- def THROW(sym: Symbol, msg: Tree): Throw = Throw(sym.tpe, msg.TOSTRING())
+ def DEFAULT: CaseStart = new CaseStart(Ident(nme.WILDCARD), EmptyTree)
def NEW(tpt: Tree, args: Tree*): Tree = New(tpt, List(args.toList))
- def NEW(sym: Symbol, args: Tree*): Tree = New(sym.tpe, args: _*)
-
- def DEF(name: Name, tp: Type): DefTreeStart = DEF(name) withType tp
- def DEF(name: Name): DefTreeStart = new DefTreeStart(name)
- def DEF(sym: Symbol): DefSymStart = new DefSymStart(sym)
-
- def VAL(name: Name, tp: Type): ValTreeStart = VAL(name) withType tp
- def VAL(name: Name): ValTreeStart = new ValTreeStart(name)
- def VAL(sym: Symbol): ValSymStart = new ValSymStart(sym)
-
- def VAR(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.MUTABLE
- def VAR(name: Name): ValTreeStart = VAL(name) withFlags Flags.MUTABLE
- def VAR(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.MUTABLE
-
- def LAZYVAL(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.LAZY
- def LAZYVAL(name: Name): ValTreeStart = VAL(name) withFlags Flags.LAZY
- def LAZYVAL(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.LAZY
- def AND(guards: Tree*) =
- if (guards.isEmpty) EmptyTree
- else guards reduceLeft gen.mkAnd
-
- def OR(guards: Tree*) =
- if (guards.isEmpty) EmptyTree
- else guards reduceLeft gen.mkOr
+ def NOT(tree: Tree) = Select(tree, Boolean_not)
+ def AND(guards: Tree*) = if (guards.isEmpty) EmptyTree else guards reduceLeft gen.mkAnd
def IF(tree: Tree) = new IfStart(tree, EmptyTree)
def TRY(tree: Tree) = new TryStart(tree, Nil, EmptyTree)
def BLOCK(xs: Tree*) = Block(xs.init.toList, xs.last)
- def NOT(tree: Tree) = Select(tree, Boolean_not)
- def SOME(xs: Tree*) = Apply(SomeClass.companionSymbol, makeTupleTerm(xs.toList, true))
+ def SOME(xs: Tree*) = Apply(SomeClass.companionSymbol, gen.mkTuple(xs.toList))
/** Typed trees from symbols. */
- def THIS(sym: Symbol) = gen.mkAttributedThis(sym)
- def ID(sym: Symbol) = gen.mkAttributedIdent(sym)
- def REF(sym: Symbol) = gen.mkAttributedRef(sym)
- def REF(pre: Type, sym: Symbol) = gen.mkAttributedRef(pre, sym)
-
- def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
- case Nil => UNIT
- case List(tree) if flattenUnary => tree
- case _ => Apply(TupleClass(trees.length).companionModule, trees: _*)
- }
- def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
- case Nil => gen.scalaUnitConstr
- case List(tree) if flattenUnary => tree
- case _ => AppliedTypeTree(REF(TupleClass(trees.length)), trees)
- }
+ def REF(sym: Symbol) = gen.mkAttributedRef(sym)
+ def REF(pre: Type, sym: Symbol) = gen.mkAttributedRef(pre, sym)
/** Implicits - some of these should probably disappear **/
implicit def mkTreeMethods(target: Tree): TreeMethods = new TreeMethods(target)
implicit def mkTreeMethodsFromSymbol(target: Symbol): TreeMethods = new TreeMethods(Ident(target))
- implicit def mkSymbolMethodsFromSymbol(target: Symbol): SymbolMethods = new SymbolMethods(target)
/** (foo DOT bar) might be simply a Select, but more likely it is to be immediately
* followed by an Apply. We don't want to add an actual apply method to arbitrary
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 99b82d9746..4ac6672727 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -8,7 +8,6 @@ package ast
import scala.collection.mutable.ListBuffer
import symtab.Flags._
-import symtab.SymbolTable
import scala.language.postfixOps
/** XXX to resolve: TreeGen only assumes global is a SymbolTable, but
@@ -20,23 +19,20 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
import global._
import definitions._
- def mkCheckInit(tree: Tree): Tree = {
- val tpe =
- if (tree.tpe != null || !tree.hasSymbol) tree.tpe
- else tree.symbol.tpe
-
- if (!global.phase.erasedTypes && settings.warnSelectNullable.value &&
- tpe <:< NotNullClass.tpe && !tpe.isNotNull)
- mkRuntimeCall(nme.checkInitialized, List(tree))
- else
- tree
- }
-
- /** Builds a fully attributed wildcard import node.
+ /** Builds a fully attributed, synthetic wildcard import node.
*/
- def mkWildcardImport(pkg: Symbol): Import = {
- assert(pkg ne null, this)
- val qual = gen.mkAttributedStableRef(pkg)
+ def mkWildcardImport(pkg: Symbol): Import =
+ mkImportFromSelector(pkg, ImportSelector.wildList)
+
+ /** Builds a fully attributed, synthetic import node.
+ * import `qualSym`.{`name` => `toName`}
+ */
+ def mkImport(qualSym: Symbol, name: Name, toName: Name): Import =
+ mkImportFromSelector(qualSym, ImportSelector(name, 0, toName, 0) :: Nil)
+
+ private def mkImportFromSelector(qualSym: Symbol, selector: List[ImportSelector]): Import = {
+ assert(qualSym ne null, this)
+ val qual = gen.mkAttributedStableRef(qualSym)
val importSym = (
NoSymbol
newImport NoPosition
@@ -44,7 +40,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
setInfo analyzer.ImportType(qual)
)
val importTree = (
- Import(qual, ImportSelector.wildList)
+ Import(qual, selector)
setSymbol importSym
setType NoType
)
@@ -52,120 +48,23 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
}
// wrap the given expression in a SoftReference so it can be gc-ed
- def mkSoftRef(expr: Tree): Tree = atPos(expr.pos)(New(SoftReferenceClass.tpe, expr))
-
- // annotate the expression with @unchecked
- def mkUnchecked(expr: Tree): Tree = atPos(expr.pos) {
- // This can't be "Annotated(New(UncheckedClass), expr)" because annotations
- // are very picky about things and it crashes the compiler with "unexpected new".
- Annotated(New(scalaDot(UncheckedClass.name), ListOfNil), expr)
- }
- // if it's a Match, mark the selector unchecked; otherwise nothing.
- def mkUncheckedMatch(tree: Tree) = tree match {
- case Match(selector, cases) => atPos(tree.pos)(Match(mkUnchecked(selector), cases))
- case _ => tree
- }
-
- def mkSynthSwitchSelector(expr: Tree): Tree = atPos(expr.pos) {
- // This can't be "Annotated(New(SwitchClass), expr)" because annotations
- // are very picky about things and it crashes the compiler with "unexpected new".
- Annotated(Ident(nme.synthSwitch), expr)
- }
-
- // TODO: would be so much nicer if we would know during match-translation (i.e., type checking)
- // whether we should emit missingCase-style apply (and isDefinedAt), instead of transforming trees post-factum
- class MatchMatcher {
- def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = unknownTree(orig)
- def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = unknownTree(orig)
- def caseVirtualizedMatchOpt(orig: Tree, prologue: List[Tree], cases: List[Tree], matchEndDef: Tree, wrap: Tree => Tree): Tree = unknownTree(orig)
-
- def genVirtualizedMatch(prologue: List[Tree], cases: List[Tree], matchEndDef: Tree): Tree = Block(prologue ++ cases, matchEndDef)
-
- def apply(matchExpr: Tree): Tree = matchExpr match {
- // old-style match or virtpatmat switch
- case Match(selector, cases) => // println("simple match: "+ (selector, cases) + "for:\n"+ matchExpr )
- caseMatch(matchExpr, selector, cases, identity)
- // old-style match or virtpatmat switch
- case Block((vd: ValDef) :: Nil, orig@Match(selector, cases)) => // println("block match: "+ (selector, cases, vd) + "for:\n"+ matchExpr )
- caseMatch(matchExpr, selector, cases, m => copyBlock(matchExpr, List(vd), m))
- // virtpatmat
- case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if opt.virtPatmat => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr )
- caseVirtualizedMatch(matchExpr, tgt, targs, scrut, matcher)
- // optimized version of virtpatmat
- case Block(stats, matchEndDef) if opt.virtPatmat && (stats forall treeInfo.hasSynthCaseSymbol) =>
- // the assumption is once we encounter a case, the remainder of the block will consist of cases
- // the prologue may be empty, usually it is the valdef that stores the scrut
- val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
- caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, identity)
- // optimized version of virtpatmat
- case Block(outerStats, orig@Block(stats, matchEndDef)) if opt.virtPatmat && (stats forall treeInfo.hasSynthCaseSymbol) =>
- val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
- caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, m => copyBlock(matchExpr, outerStats, m))
- case other =>
- unknownTree(other)
- }
-
- def unknownTree(t: Tree): Tree = throw new MatchError(t)
- def copyBlock(orig: Tree, stats: List[Tree], expr: Tree): Block = Block(stats, expr)
-
- def dropSyntheticCatchAll(cases: List[CaseDef]): List[CaseDef] =
- if (!opt.virtPatmat) cases
- else cases filter {
- case CaseDef(pat, EmptyTree, Throw(Apply(Select(New(exTpt), nme.CONSTRUCTOR), _))) if (treeInfo.isWildcardArg(pat) && (exTpt.tpe.typeSymbol eq MatchErrorClass)) => false
- case CaseDef(pat, guard, body) => true
- }
- }
-
- def mkCached(cvar: Symbol, expr: Tree): Tree = {
- val cvarRef = mkUnattributedRef(cvar)
- Block(
- List(
- If(Apply(Select(cvarRef, nme.eq), List(Literal(Constant(null)))),
- Assign(cvarRef, expr),
- EmptyTree)),
- cvarRef
- )
+ def mkSoftRef(expr: Tree): Tree = atPos(expr.pos) {
+ val constructor = SoftReferenceClass.info.nonPrivateMember(nme.CONSTRUCTOR).suchThat(_.paramss.flatten.size == 1)
+ NewFromConstructor(constructor, expr)
}
// Builds a tree of the form "{ lhs = rhs ; lhs }"
def mkAssignAndReturn(lhs: Symbol, rhs: Tree): Tree = {
- val lhsRef = mkUnattributedRef(lhs)
+ def lhsRef = if (lhs.owner.isClass) Select(This(lhs.owner), lhs) else Ident(lhs)
Block(Assign(lhsRef, rhs) :: Nil, lhsRef)
}
- def mkModuleVarDef(accessor: Symbol) = {
- val inClass = accessor.owner.isClass
- val extraFlags = if (inClass) PrivateLocal | SYNTHETIC else 0
-
- val mval = (
- accessor.owner.newVariable(nme.moduleVarName(accessor.name), accessor.pos.focus, MODULEVAR | extraFlags)
- setInfo accessor.tpe.finalResultType
- addAnnotation VolatileAttr
- )
- if (inClass)
- mval.owner.info.decls enter mval
-
- ValDef(mval)
- }
-
- // def m: T = { if (m$ eq null) m$ = new m$class(...) m$ }
- // where (...) are eventual outer accessors
- def mkCachedModuleAccessDef(accessor: Symbol, mvar: Symbol) =
- DefDef(accessor, mkCached(mvar, newModule(accessor, mvar.tpe)))
-
- def mkModuleAccessDef(accessor: Symbol, msym: Symbol) =
- DefDef(accessor, Select(This(msym.owner), msym))
-
def newModule(accessor: Symbol, tpe: Type) = {
val ps = tpe.typeSymbol.primaryConstructor.info.paramTypes
if (ps.isEmpty) New(tpe)
else New(tpe, This(accessor.owner.enclClass))
}
- // def m: T;
- def mkModuleAccessDcl(accessor: Symbol) =
- DefDef(accessor setFlag lateDEFERRED, EmptyTree)
-
def mkRuntimeCall(meth: Name, args: List[Tree]): Tree =
mkRuntimeCall(meth, Nil, args)
@@ -206,7 +105,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
else AppliedTypeTree(Ident(clazz), targs map TypeTree)
))
}
- def mkSuperSelect = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
def wildcardStar(tree: Tree) =
atPos(tree.pos) { Typed(tree, Ident(tpnme.WILDCARD_STAR)) }
@@ -267,25 +165,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
else
mkCast(tree, pt)
- def mkZeroContravariantAfterTyper(tp: Type): Tree = {
- // contravariant -- for replacing an argument in a method call
- // must use subtyping, as otherwise we miss types like `Any with Int`
- val tree =
- if (NullClass.tpe <:< tp) Literal(Constant(null))
- else if (UnitClass.tpe <:< tp) Literal(Constant())
- else if (BooleanClass.tpe <:< tp) Literal(Constant(false))
- else if (FloatClass.tpe <:< tp) Literal(Constant(0.0f))
- else if (DoubleClass.tpe <:< tp) Literal(Constant(0.0d))
- else if (ByteClass.tpe <:< tp) Literal(Constant(0.toByte))
- else if (ShortClass.tpe <:< tp) Literal(Constant(0.toShort))
- else if (IntClass.tpe <:< tp) Literal(Constant(0))
- else if (LongClass.tpe <:< tp) Literal(Constant(0L))
- else if (CharClass.tpe <:< tp) Literal(Constant(0.toChar))
- else mkCast(Literal(Constant(null)), tp)
-
- tree
- }
-
/** Translate names in Select/Ident nodes to type names.
*/
def convertToTypeName(tree: Tree): Option[RefTree] = tree match {
@@ -307,7 +186,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
*/
private def mkPackedValDef(expr: Tree, owner: Symbol, name: Name): (ValDef, () => Ident) = {
val packedType = typer.packedType(expr, owner)
- val sym = owner.newValue(name, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType
+ val sym = owner.newValue(name.toTermName, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType
(ValDef(sym, expr), () => Ident(sym) setPos sym.pos.focus setType expr.tpe)
}
@@ -368,4 +247,53 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
attrThis,
If(cond, Block(syncBody: _*), EmptyTree)) ::
stats: _*)
+
+ /** Creates a tree representing new Object { stats }.
+ * To make sure an anonymous subclass of Object is created,
+ * if there are no stats, a () is added.
+ */
+ def mkAnonymousNew(stats: List[Tree]): Tree = {
+ val stats1 = if (stats.isEmpty) List(Literal(Constant(()))) else stats
+ mkNew(Nil, noSelfType, stats1, NoPosition, NoPosition)
+ }
+
+ /**
+ * Create a method based on a Function
+ *
+ * Used both to under `-Ydelambdafy:method` create a lifted function and
+ * under `-Ydelamdafy:inline` to create the apply method on the anonymous
+ * class.
+ *
+ * It creates a method definition with value params cloned from the
+ * original lambda. Then it calls a supplied function to create
+ * the body and types the result. Finally
+ * everything is wrapped up in a DefDef
+ *
+ * @param owner The owner for the new method
+ * @param name name for the new method
+ * @param additionalFlags flags to be put on the method in addition to FINAL
+ */
+ def mkMethodFromFunction(localTyper: analyzer.Typer)
+ (fun: Function, owner: Symbol, name: TermName, additionalFlags: FlagSet = NoFlags) = {
+ val funParams = fun.vparams map (_.symbol)
+ val formals :+ restpe = fun.tpe.typeArgs
+
+ val methSym = owner.newMethod(name, fun.pos, FINAL | additionalFlags)
+
+ val paramSyms = map2(formals, fun.vparams) {
+ (tp, vparam) => methSym.newSyntheticValueParam(tp, vparam.name)
+ }
+
+ methSym setInfo MethodType(paramSyms, restpe.deconst)
+
+ fun.body.substituteSymbols(funParams, paramSyms)
+ fun.body changeOwner (fun.symbol -> methSym)
+
+ val methDef = DefDef(methSym, fun.body)
+
+ // Have to repack the type to avoid mismatches when existentials
+ // appear in the result - see SI-4869.
+ methDef.tpt setType localTyper.packedType(fun.body, methSym).deconst
+ methDef
+ }
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index cbbb4c8ba8..0731d78a9b 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -6,10 +6,6 @@
package scala.tools.nsc
package ast
-import scala.reflect.internal.HasFlags
-import scala.reflect.internal.Flags._
-import symtab._
-
/** This class ...
*
* @author Martin Odersky
@@ -18,8 +14,65 @@ import symtab._
abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
val global: Global
import global._
+ import definitions._
+
+ // arg1.op(arg2) returns (arg1, op.symbol, arg2)
+ object BinaryOp {
+ def unapply(t: Tree): Option[(Tree, Symbol, Tree)] = t match {
+ case Apply(sel @ Select(arg1, _), arg2 :: Nil) => Some((arg1, sel.symbol, arg2))
+ case _ => None
+ }
+ }
+ // recv.op[T1, ...] returns (recv, op.symbol, type argument types)
+ object TypeApplyOp {
+ def unapply(t: Tree): Option[(Tree, Symbol, List[Type])] = t match {
+ case TypeApply(sel @ Select(recv, _), targs) => Some((recv, sel.symbol, targs map (_.tpe)))
+ case _ => None
+ }
+ }
+
+ // x.asInstanceOf[T] returns (x, typeOf[T])
+ object AsInstanceOf {
+ def unapply(t: Tree): Option[(Tree, Type)] = t match {
+ case Apply(TypeApplyOp(recv, Object_asInstanceOf, tpe :: Nil), Nil) => Some((recv, tpe))
+ case _ => None
+ }
+ }
- import definitions.ThrowableClass
+ // Extractors for value classes.
+ object ValueClass {
+ def isValueClass(tpe: Type) = enteringErasure(tpe.typeSymbol.isDerivedValueClass)
+ def valueUnbox(tpe: Type) = enteringErasure(tpe.typeSymbol.derivedValueClassUnbox)
+
+ // B.unbox. Returns B.
+ object Unbox {
+ def unapply(t: Tree): Option[Tree] = t match {
+ case Apply(sel @ Select(ref, _), Nil) if valueUnbox(ref.tpe) == sel.symbol => Some(ref)
+ case _ => None
+ }
+ }
+ // new B(v). Returns B and v.
+ object Box {
+ def unapply(t: Tree): Option[(Tree, Type)] = t match {
+ case Apply(sel @ Select(New(tpt), nme.CONSTRUCTOR), v :: Nil) => Some((v, tpt.tpe.finalResultType))
+ case _ => None
+ }
+ }
+ // (new B(v)).unbox. returns v.
+ object BoxAndUnbox {
+ def unapply(t: Tree): Option[Tree] = t match {
+ case Unbox(Box(v, tpe)) if isValueClass(tpe) => Some(v)
+ case _ => None
+ }
+ }
+ // new B(v1) op new B(v2) where op is == or !=. Returns v1, op, v2.
+ object BoxAndCompare {
+ def unapply(t: Tree): Option[(Tree, Symbol, Tree)] = t match {
+ case BinaryOp(Box(v1, tpe1), op @ (Object_== | Object_!=), Box(v2, tpe2)) if isValueClass(tpe1) && tpe1 =:= tpe2 => Some((v1, op, v2))
+ case _ => None
+ }
+ }
+ }
/** Is tree legal as a member definition of an interface?
*/
@@ -34,15 +87,4 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
case DocDef(_, definition) => isPureDef(definition)
case _ => super.isPureDef(tree)
}
-
- /** Does list of trees start with a definition of
- * a class of module with given name (ignoring imports)
- */
- override def firstDefinesClassOrObject(trees: List[Tree], name: Name): Boolean = trees match {
- case ClassDef(_, `name`, _, _) :: Nil => true
- case _ => super.firstDefinesClassOrObject(trees, name)
- }
-
- def isInterface(mods: HasFlags, body: List[Tree]) =
- mods.isTrait && (body forall isInterfaceMember)
}
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 0a12737572..cccae0c3a0 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -16,24 +16,6 @@ import scala.reflect.internal.Flags.TRAIT
import scala.compat.Platform.EOL
trait Trees extends scala.reflect.internal.Trees { self: Global =>
-
- def treeLine(t: Tree): String =
- if (t.pos.isDefined && t.pos.isRange) t.pos.lineContent.drop(t.pos.column - 1).take(t.pos.end - t.pos.start + 1)
- else t.summaryString
-
- def treeStatus(t: Tree, enclosingTree: Tree = null) = {
- val parent = if (enclosingTree eq null) " " else " P#%5s".format(enclosingTree.id)
-
- "[L%4s%8s] #%-6s %-15s %-10s // %s".format(t.pos.safeLine, parent, t.id, t.pos.show, t.shortClass, treeLine(t))
- }
- def treeSymStatus(t: Tree) = {
- val line = if (t.pos.isDefined) "line %-4s".format(t.pos.safeLine) else " "
- "#%-5s %s %-10s // %s".format(t.id, line, t.shortClass,
- if (t.symbol ne NoSymbol) "(" + t.symbol.fullLocationString + ")"
- else treeLine(t)
- )
- }
-
// --- additional cases --------------------------------------------------------
/** Only used during parsing */
case class Parens(args: List[Tree]) extends Tree
@@ -65,69 +47,11 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
// --- factory methods ----------------------------------------------------------
- /** Generates a template with constructor corresponding to
- *
- * constrmods (vparams1_) ... (vparams_n) preSuper { presupers }
- * extends superclass(args_1) ... (args_n) with mixins { self => body }
- *
- * This gets translated to
- *
- * extends superclass with mixins { self =>
- * presupers' // presupers without rhs
- * vparamss // abstract fields corresponding to value parameters
- * def <init>(vparamss) {
- * presupers
- * super.<init>(args)
- * }
- * body
- * }
+ /** Factory method for a primary constructor super call `super.<init>(args_1)...(args_n)`
*/
- def Template(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): Template = {
- /* Add constructor to template */
-
- // create parameters for <init> as synthetic trees.
- var vparamss1 = mmap(vparamss) { vd =>
- atPos(vd.pos.focus) {
- val mods = Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM | BYNAMEPARAM) | PARAM | PARAMACCESSOR)
- ValDef(mods withAnnotations vd.mods.annotations, vd.name, vd.tpt.duplicate, vd.rhs.duplicate)
- }
- }
- val (edefs, rest) = body span treeInfo.isEarlyDef
- val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
- val gvdefs = evdefs map {
- case vdef @ ValDef(_, _, tpt, _) =>
- copyValDef(vdef)(
- // atPos for the new tpt is necessary, since the original tpt might have no position
- // (when missing type annotation for ValDef for example), so even though setOriginal modifies the
- // position of TypeTree, it would still be NoPosition. That's what the author meant.
- tpt = atPos(vdef.pos.focus)(TypeTree() setOriginal tpt setPos tpt.pos.focus),
- rhs = EmptyTree
- )
- }
- val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = vdef.mods | PRESUPER) }
-
- val constrs = {
- if (constrMods hasFlag TRAIT) {
- if (body forall treeInfo.isInterfaceMember) List()
- else List(
- atPos(wrappingPos(superPos, lvdefs)) (
- DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, List(), ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant())))))
- } else {
- // convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
- if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
- vparamss1 = List() :: vparamss1;
- val superRef: Tree = atPos(superPos)(gen.mkSuperSelect)
- val superCall = (superRef /: argss) (Apply.apply)
- List(
- atPos(wrappingPos(superPos, lvdefs ::: argss.flatten)) (
- DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant())))))
- }
- }
- constrs foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus=false))
- // Field definitions for the class - remove defaults.
- val fieldDefs = vparamss.flatten map (vd => copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree))
-
- Template(parents, self, gvdefs ::: fieldDefs ::: constrs ::: etdefs ::: rest)
+ def PrimarySuperCall(argss: List[List[Tree]]): Tree = argss match {
+ case Nil => Apply(gen.mkSuperInitCall, Nil)
+ case xs :: rest => rest.foldLeft(Apply(gen.mkSuperInitCall, xs): Tree)(Apply.apply)
}
/** Construct class definition with given class symbol, value parameters,
@@ -137,21 +61,17 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
* @param constrMods the modifiers for the class constructor, i.e. as in `class C private (...)`
* @param vparamss the value parameters -- if they have symbols they
* should be owned by `sym`
- * @param argss the supercall arguments
* @param body the template statements without primary constructor
* and value parameter fields.
*/
- def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): ClassDef = {
+ def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): ClassDef = {
// "if they have symbols they should be owned by `sym`"
- assert(
- mforall(vparamss)(p => (p.symbol eq NoSymbol) || (p.symbol.owner == sym)),
- ((mmap(vparamss)(_.symbol), sym))
- )
+ assert(mforall(vparamss)(_.symbol.owner == sym), (mmap(vparamss)(_.symbol), sym))
ClassDef(sym,
- Template(sym.info.parents map TypeTree,
- if (sym.thisSym == sym || phase.erasedTypes) emptyValDef else ValDef(sym.thisSym),
- constrMods, vparamss, argss, body, superPos))
+ gen.mkTemplate(sym.info.parents map TypeTree,
+ if (sym.thisSym == sym || phase.erasedTypes) noSelfType else ValDef(sym.thisSym),
+ constrMods, vparamss, body, superPos))
}
// --- subcomponents --------------------------------------------------
@@ -160,8 +80,6 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
val global: Trees.this.type = self
} with TreeInfo
- lazy val treePrinter = newTreePrinter()
-
// --- additional cases in operations ----------------------------------
override protected def xtraverse(traverser: Traverser, tree: Tree): Unit = tree match {
@@ -227,7 +145,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
try unit.body = transform(unit.body)
catch {
case ex: Exception =>
- println(supplementErrorMessage("unhandled exception while transforming "+unit))
+ log(supplementErrorMessage("unhandled exception while transforming "+unit))
throw ex
}
}
@@ -265,7 +183,6 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
def resetAllAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(false, leaveAlone).transform(x)
def resetLocalAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(true, leaveAlone).transform(x)
- def resetLocalAttrsKeepLabels(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(true, leaveAlone, true).transform(x)
/** A transformer which resets symbol and tpe fields of all nodes in a given tree,
* with special treatment of:
@@ -324,6 +241,8 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
else
super.transform {
tree match {
+ case tree if !tree.canHaveAttrs =>
+ tree
case tpt: TypeTree =>
if (tpt.original != null)
transform(tpt.original)
@@ -331,9 +250,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
val refersToLocalSymbols = tpt.tpe != null && (tpt.tpe exists (tp => locals contains tp.typeSymbol))
val isInferred = tpt.wasEmpty
if (refersToLocalSymbols || isInferred) {
- val dupl = tpt.duplicate
- dupl.tpe = null
- dupl
+ tpt.duplicate.clearType()
} else {
tpt
}
@@ -382,8 +299,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
val vetoThis = dupl.isInstanceOf[This] && sym.isPackageClass
if (!(vetoScope || vetoLabel || vetoThis)) dupl.symbol = NoSymbol
}
- dupl.tpe = null
- dupl
+ dupl.clearType()
}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index 553a2088a6..d3f495f280 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -10,10 +10,7 @@ import scala.collection.mutable
import mutable.{ Buffer, ArrayBuffer, ListBuffer }
import scala.util.control.ControlThrowable
import scala.tools.nsc.util.CharArrayReader
-import scala.reflect.internal.util.SourceFile
-import scala.xml.{ Text, TextBuffer }
-import scala.xml.parsing.MarkupParserCommon
-import scala.xml.Utility.{ isNameStart, isNameChar, isSpace }
+import scala.tools.nsc.ast.parser.xml.{MarkupParserCommon, Utility}
import scala.reflect.internal.Chars.{ SU, LF }
// XXX/Note: many/most of the functions in here are almost direct cut and pastes
@@ -26,12 +23,6 @@ import scala.reflect.internal.Chars.{ SU, LF }
// I rewrote most of these, but not as yet the library versions: so if you are
// tempted to touch any of these, please be aware of that situation and try not
// to let it get any worse. -- paulp
-
-/** This trait ...
- *
- * @author Burak Emir
- * @version 1.0
- */
trait MarkupParsers {
self: Parsers =>
@@ -50,8 +41,8 @@ trait MarkupParsers {
import global._
class MarkupParser(parser: SourceFileParser, final val preserveWS: Boolean) extends MarkupParserCommon {
-
- import Tokens.{ EMPTY, LBRACE, RBRACE }
+ import Utility.{ isNameStart, isSpace }
+ import Tokens.{ LBRACE, RBRACE }
type PositionType = Position
type InputType = CharArrayReader
@@ -89,7 +80,7 @@ trait MarkupParsers {
var xEmbeddedBlock = false
- private var debugLastStartElement = new mutable.Stack[(Int, String)]
+ private val debugLastStartElement = new mutable.Stack[(Int, String)]
private def debugLastPos = debugLastStartElement.top._1
private def debugLastElem = debugLastStartElement.top._2
@@ -107,7 +98,7 @@ trait MarkupParsers {
*/
def xCheckEmbeddedBlock: Boolean = {
// attentions, side-effect, used in xText
- xEmbeddedBlock = (ch == '{') && { nextch; (ch != '{') }
+ xEmbeddedBlock = (ch == '{') && { nextch(); (ch != '{') }
xEmbeddedBlock
}
@@ -123,8 +114,7 @@ trait MarkupParsers {
while (isNameStart(ch)) {
val start = curOffset
val key = xName
- xEQ
- val delim = ch
+ xEQ()
val mid = curOffset
val value: Tree = ch match {
case '"' | '\'' =>
@@ -137,7 +127,7 @@ trait MarkupParsers {
}
case '{' =>
- nextch
+ nextch()
xEmbeddedExpr
case SU =>
throw TruncatedXMLControl
@@ -150,7 +140,7 @@ trait MarkupParsers {
aMap(key) = value
if (ch != '/' && ch != '>')
- xSpace
+ xSpace()
}
aMap
}
@@ -181,22 +171,31 @@ trait MarkupParsers {
xTakeUntil(handle.comment, () => r2p(start, start, curOffset), "-->")
}
- def appendText(pos: Position, ts: Buffer[Tree], txt: String) {
- val toAppend =
- if (preserveWS) Seq(txt)
- else TextBuffer.fromString(txt).toText map (_.text)
+ def appendText(pos: Position, ts: Buffer[Tree], txt: String): Unit = {
+ def append(t: String) = ts append handle.text(pos, t)
+
+ if (preserveWS) append(txt)
+ else {
+ val sb = new StringBuilder()
+
+ txt foreach { c =>
+ if (!isSpace(c)) sb append c
+ else if (sb.isEmpty || !isSpace(sb.last)) sb append ' '
+ }
- toAppend foreach (t => ts append handle.text(pos, t))
+ val trimmed = sb.toString.trim
+ if (!trimmed.isEmpty) append(trimmed)
+ }
}
/** adds entity/character to ts as side-effect
* @precond ch == '&'
*/
def content_AMP(ts: ArrayBuffer[Tree]) {
- nextch
+ nextch()
val toAppend = ch match {
case '#' => // CharacterRef
- nextch
+ nextch()
val theChar = handle.text(tmppos, xCharRef)
xToken(';')
theChar
@@ -219,17 +218,14 @@ trait MarkupParsers {
/** Returns true if it encounters an end tag (without consuming it),
* appends trees to ts as side-effect.
- *
- * @param ts ...
- * @return ...
*/
private def content_LT(ts: ArrayBuffer[Tree]): Boolean = {
if (ch == '/')
return true // end tag
val toAppend = ch match {
- case '!' => nextch ; if (ch =='[') xCharData else xComment // CDATA or Comment
- case '?' => nextch ; xProcInstr // PI
+ case '!' => nextch() ; if (ch =='[') xCharData else xComment // CDATA or Comment
+ case '?' => nextch() ; xProcInstr // PI
case _ => element // child node
}
@@ -246,7 +242,7 @@ trait MarkupParsers {
tmppos = o2p(curOffset)
ch match {
// end tag, cdata, comment, pi or child node
- case '<' => nextch ; if (content_LT(ts)) return ts
+ case '<' => nextch() ; if (content_LT(ts)) return ts
// either the character '{' or an embedded scala block }
case '{' => content_BRACE(tmppos, ts) // }
// EntityRef or CharRef
@@ -268,7 +264,7 @@ trait MarkupParsers {
val (qname, attrMap) = xTag(())
if (ch == '/') { // empty element
xToken("/>")
- handle.element(r2p(start, start, curOffset), qname, attrMap, true, new ListBuffer[Tree])
+ handle.element(r2p(start, start, curOffset), qname, attrMap, empty = true, new ListBuffer[Tree])
}
else { // handle content
xToken('>')
@@ -278,11 +274,11 @@ trait MarkupParsers {
debugLastStartElement.push((start, qname))
val ts = content
xEndTag(qname)
- debugLastStartElement.pop
+ debugLastStartElement.pop()
val pos = r2p(start, start, curOffset)
qname match {
case "xml:group" => handle.group(pos, ts)
- case _ => handle.element(pos, qname, attrMap, false, ts)
+ case _ => handle.element(pos, qname, attrMap, empty = false, ts)
}
}
}
@@ -297,12 +293,12 @@ trait MarkupParsers {
while (ch != SU) {
if (ch == '}') {
- if (charComingAfter(nextch) == '}') nextch
+ if (charComingAfter(nextch()) == '}') nextch()
else errorBraces()
}
buf append ch
- nextch
+ nextch()
if (xCheckEmbeddedBlock || ch == '<' || ch == '&')
return done
}
@@ -349,12 +345,12 @@ trait MarkupParsers {
content_LT(ts)
// parse more XML ?
- if (charComingAfter(xSpaceOpt) == '<') {
- xSpaceOpt
+ if (charComingAfter(xSpaceOpt()) == '<') {
+ xSpaceOpt()
while (ch == '<') {
- nextch
+ nextch()
ts append element
- xSpaceOpt
+ xSpaceOpt()
}
handle.makeXMLseq(r2p(start, start, curOffset), ts)
}
@@ -375,7 +371,7 @@ trait MarkupParsers {
saving[Boolean, Tree](handle.isPattern, handle.isPattern = _) {
handle.isPattern = true
val tree = xPattern
- xSpaceOpt
+ xSpaceOpt()
tree
}
},
@@ -410,13 +406,13 @@ trait MarkupParsers {
* | Name [S] '/' '>'
*/
def xPattern: Tree = {
- var start = curOffset
+ val start = curOffset
val qname = xName
debugLastStartElement.push((start, qname))
- xSpaceOpt
+ xSpaceOpt()
val ts = new ArrayBuffer[Tree]
- val isEmptyTag = (ch == '/') && { nextch ; true }
+ val isEmptyTag = (ch == '/') && { nextch() ; true }
xToken('>')
if (!isEmptyTag) {
@@ -426,13 +422,13 @@ trait MarkupParsers {
if (xEmbeddedBlock) ts ++= xScalaPatterns
else ch match {
case '<' => // tag
- nextch
+ nextch()
if (ch != '/') ts append xPattern // child
else return false // terminate
case '{' => // embedded Scala patterns
while (ch == '{') {
- nextch
+ nextch()
ts ++= xScalaPatterns
}
assert(!xEmbeddedBlock, "problem with embedded block")
@@ -450,7 +446,7 @@ trait MarkupParsers {
while (doPattern) { } // call until false
xEndTag(qname)
- debugLastStartElement.pop
+ debugLastStartElement.pop()
}
handle.makeXMLpat(r2p(start, start, curOffset), qname, ts)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index b9e4109623..0728fff74f 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -9,12 +9,12 @@
package scala.tools.nsc
package ast.parser
-import scala.collection.mutable.{ListBuffer, StringBuilder}
-import scala.reflect.internal.{ ModifierFlags => Flags }
+import scala.collection.{ mutable, immutable }
+import mutable.{ ListBuffer, StringBuilder }
+import scala.reflect.internal.{ Precedence, ModifierFlags => Flags }
import scala.reflect.internal.Chars.{ isScalaLetter }
-import scala.reflect.internal.util.{ SourceFile, OffsetPosition }
+import scala.reflect.internal.util.{ SourceFile, Position, FreshNameCreator }
import Tokens._
-import util.FreshNameCreator
/** Historical note: JavaParsers started life as a direct copy of Parsers
* but at a time when that Parsers had been replaced by a different one.
@@ -25,20 +25,23 @@ import util.FreshNameCreator
* the beginnings of a campaign against this latest incursion by Cutty
* McPastington and his army of very similar soldiers.
*/
-trait ParsersCommon extends ScannersCommon {
+trait ParsersCommon extends ScannersCommon { self =>
val global : Global
- import global._
+ // the use of currentUnit in the parser should be avoided as it might
+ // cause unexpected behaviour when you work with two units at the
+ // same time; use Parser.unit instead
+ import global.{currentUnit => _, _}
+
+ def newLiteral(const: Any) = Literal(Constant(const))
+ def literalUnit = newLiteral(())
/** This is now an abstract class, only to work around the optimizer:
* methods in traits are never inlined.
*/
abstract class ParserCommon {
val in: ScannerCommon
- def freshName(prefix: String): Name
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- def deprecationWarning(off: Int, msg: String): Unit
- def accept(token: Int): Int
+ def deprecationWarning(off: Offset, msg: String): Unit
+ def accept(token: Token): Int
/** Methods inParensOrError and similar take a second argument which, should
* the next token not be the expected opener (e.g. LPAREN) will be returned
@@ -56,7 +59,7 @@ trait ParsersCommon extends ScannersCommon {
if (in.token == LPAREN) inParens(body)
else { accept(LPAREN) ; alt }
- @inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, Literal(Constant()))
+ @inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, literalUnit)
@inline final def inParensOrNil[T](body: => List[T]): List[T] = inParensOrError(body, Nil)
@inline final def inBraces[T](body: => T): T = {
@@ -70,7 +73,7 @@ trait ParsersCommon extends ScannersCommon {
else { accept(LBRACE) ; alt }
@inline final def inBracesOrNil[T](body: => List[T]): List[T] = inBracesOrError(body, Nil)
- @inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, Literal(Constant()))
+ @inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, literalUnit)
@inline final def dropAnyBraces[T](body: => T): T =
if (in.token == LBRACE) inBraces(body)
else body
@@ -94,7 +97,7 @@ trait ParsersCommon extends ScannersCommon {
* <ol>
* <li>
* Places all pattern variables in Bind nodes. In a pattern, for
- * identifiers <code>x</code>:<pre>
+ * identifiers `x`:<pre>
* x => x @ _
* x:T => x @ (_ : T)</pre>
* </li>
@@ -130,7 +133,9 @@ self =>
val global: Global
import global._
- case class OpInfo(operand: Tree, operator: Name, offset: Offset)
+ case class OpInfo(lhs: Tree, operator: TermName, targs: List[Tree], offset: Offset) {
+ def precedence = Precedence(operator.toString)
+ }
class SourceFileParser(val source: SourceFile) extends Parser {
@@ -141,37 +146,36 @@ self =>
if (source.isSelfContained) () => compilationUnit()
else () => scriptBody()
- def newScanner = new SourceFileScanner(source)
+ def newScanner(): Scanner = new SourceFileScanner(source)
- val in = newScanner
+ val in = newScanner()
in.init()
- private val globalFresh = new FreshNameCreator.Default
-
- def freshName(prefix: String): Name = freshTermName(prefix)
- def freshTermName(prefix: String): TermName = newTermName(globalFresh.newName(prefix))
- def freshTypeName(prefix: String): TypeName = newTypeName(globalFresh.newName(prefix))
-
- def o2p(offset: Int): Position = new OffsetPosition(source, offset)
- def r2p(start: Int, mid: Int, end: Int): Position = rangePos(source, start, mid, end)
+ def unit = global.currentUnit
// suppress warnings; silent abort on errors
- def warning(offset: Int, msg: String) {}
- def deprecationWarning(offset: Int, msg: String) {}
+ def warning(offset: Offset, msg: String) {}
+ def deprecationWarning(offset: Offset, msg: String) {}
- def syntaxError(offset: Int, msg: String): Unit = throw new MalformedInput(offset, msg)
+ def syntaxError(offset: Offset, msg: String): Unit = throw new MalformedInput(offset, msg)
def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg)
- /** the markup parser */
- lazy val xmlp = new MarkupParser(this, preserveWS = true)
-
object symbXMLBuilder extends SymbolicXMLBuilder(this, preserveWS = true) { // DEBUG choices
val global: self.global.type = self.global
- def freshName(prefix: String): Name = SourceFileParser.this.freshName(prefix)
}
- def xmlLiteral : Tree = xmlp.xLiteral
- def xmlLiteralPattern : Tree = xmlp.xLiteralPattern
+ /** the markup parser
+ * The first time this lazy val is accessed, we assume we were trying to parse an xml literal.
+ * The current position is recorded for later error reporting if it turns out
+ * that we don't have the xml library on the compilation classpath.
+ */
+ private[this] lazy val xmlp = {
+ unit.encounteredXml(o2p(in.offset))
+ new MarkupParser(this, preserveWS = true)
+ }
+
+ def xmlLiteral() : Tree = xmlp.xLiteral
+ def xmlLiteralPattern() : Tree = xmlp.xLiteralPattern
}
class OutlineParser(source: SourceFile) extends SourceFileParser(source) {
@@ -192,23 +196,19 @@ self =>
override def blockExpr(): Tree = skipBraces(EmptyTree)
- override def templateBody(isPre: Boolean) = skipBraces((emptyValDef, EmptyTree.asList))
+ override def templateBody(isPre: Boolean) = skipBraces((noSelfType, EmptyTree.asList))
}
- class UnitParser(val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) {
+ class UnitParser(override val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) { uself =>
+ def this(unit: global.CompilationUnit) = this(unit, Nil)
- def this(unit: global.CompilationUnit) = this(unit, List())
+ override def newScanner() = new UnitScanner(unit, patches)
- override def newScanner = new UnitScanner(unit, patches)
-
- override def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
- override def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
-
- override def warning(offset: Int, msg: String) {
+ override def warning(offset: Offset, msg: String) {
unit.warning(o2p(offset), msg)
}
- override def deprecationWarning(offset: Int, msg: String) {
+ override def deprecationWarning(offset: Offset, msg: String) {
unit.deprecationWarning(o2p(offset), msg)
}
@@ -219,13 +219,14 @@ self =>
try body
finally smartParsing = saved
}
+ def withPatches(patches: List[BracePatch]): UnitParser = new UnitParser(unit, patches)
val syntaxErrors = new ListBuffer[(Int, String)]
def showSyntaxErrors() =
for ((offset, msg) <- syntaxErrors)
unit.error(o2p(offset), msg)
- override def syntaxError(offset: Int, msg: String) {
+ override def syntaxError(offset: Offset, msg: String) {
if (smartParsing) syntaxErrors += ((offset, msg))
else unit.error(o2p(offset), msg)
}
@@ -244,14 +245,15 @@ self =>
if (syntaxErrors.isEmpty) firstTry
else in.healBraces() match {
case Nil => showSyntaxErrors() ; firstTry
- case patches => new UnitParser(unit, patches).parse()
+ case patches => (this withPatches patches).parse()
}
}
}
- final val Local = 0
- final val InBlock = 1
- final val InTemplate = 2
+ type Location = Int
+ final val Local: Location = 0
+ final val InBlock: Location = 1
+ final val InTemplate: Location = 2
// These symbols may not yet be loaded (e.g. in the ide) so don't go
// through definitions to obtain the names.
@@ -268,20 +270,57 @@ self =>
import nme.raw
- abstract class Parser extends ParserCommon {
+ abstract class Parser extends ParserCommon { parser =>
val in: Scanner
+ def unit: CompilationUnit
+ def source: SourceFile
- def freshName(prefix: String): Name
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- def o2p(offset: Int): Position
- def r2p(start: Int, mid: Int, end: Int): Position
+ /** Scoping operator used to temporarily look into the future.
+ * Backs up scanner data before evaluating a block and restores it after.
+ */
+ @inline final def lookingAhead[T](body: => T): T = {
+ val saved = new ScannerData {} copyFrom in
+ in.nextToken()
+ try body finally in copyFrom saved
+ }
+
+ /** Perform an operation while peeking ahead.
+ * Pushback if the operation yields an empty tree or blows to pieces.
+ */
+ @inline def peekingAhead(tree: =>Tree): Tree = {
+ @inline def peekahead() = {
+ in.prev copyFrom in
+ in.nextToken()
+ }
+ @inline def pushback() = {
+ in.next copyFrom in
+ in copyFrom in.prev
+ }
+ peekahead()
+ // try it, in case it is recoverable
+ val res = try tree catch { case e: Exception => pushback() ; throw e }
+ if (res.isEmpty) pushback()
+ res
+ }
+
+ class ParserTreeBuilder extends TreeBuilder {
+ val global: self.global.type = self.global
+ def unit = parser.unit
+ def source = parser.source
+ }
+ val treeBuilder = new ParserTreeBuilder
+ import treeBuilder.{global => _, unit => _, source => _, fresh => _, _}
+
+ implicit def fresh: FreshNameCreator = unit.fresh
+
+ def o2p(offset: Offset): Position = Position.offset(source, offset)
+ def r2p(start: Offset, mid: Offset, end: Offset): Position = rangePos(source, start, mid, end)
+ def r2p(start: Offset, mid: Offset): Position = r2p(start, mid, in.lastOffset max start)
+ def r2p(offset: Offset): Position = r2p(offset, offset)
/** whether a non-continuable syntax error has been seen */
private var lastErrorOffset : Int = -1
- import treeBuilder.{global => _, _}
-
/** The types of the context bounds of type parameters of the surrounding class
*/
private var classContextBounds: List[Tree] = Nil
@@ -291,6 +330,7 @@ self =>
finally classContextBounds = saved
}
+
/** Are we inside the Scala package? Set for files that start with package scala
*/
private var inScalaPackage = false
@@ -299,73 +339,75 @@ self =>
inScalaPackage = false
currentPackage = ""
}
- private lazy val primitiveNames: Set[Name] = tpnme.ScalaValueNames.toSet
-
- private def inScalaRootPackage = inScalaPackage && currentPackage == "scala"
- private def isScalaArray(name: Name) = inScalaRootPackage && name == tpnme.Array
- private def isPrimitiveType(name: Name) = inScalaRootPackage && primitiveNames(name)
+ private def inScalaRootPackage = inScalaPackage && currentPackage == "scala"
def parseStartRule: () => Tree
- /** This is the general parse entry point.
- */
- def parse(): Tree = {
- val t = parseStartRule()
+ def parseRule[T](rule: this.type => T): T = {
+ val t = rule(this)
accept(EOF)
t
}
+ /** This is the general parse entry point.
+ */
+ def parse(): Tree = parseRule(_.parseStartRule())
+
+ /** These are alternative entry points for repl, script runner, toolbox and parsing in macros.
+ */
+ def parseStats(): List[Tree] = parseRule(_.templateStats())
+ def parseStatsOrPackages(): List[Tree] = parseRule(_.templateOrTopStatSeq())
+
/** This is the parse entry point for code which is not self-contained, e.g.
* a script which is a series of template statements. They will be
* swaddled in Trees until the AST is equivalent to the one returned
* by compilationUnit().
*/
def scriptBody(): Tree = {
- val stmts = templateStats()
- accept(EOF)
+ val stmts = parseStats()
def mainModuleName = newTermName(settings.script.value)
- /** If there is only a single object template in the file and it has a
- * suitable main method, we will use it rather than building another object
- * around it. Since objects are loaded lazily the whole script would have
- * been a no-op, so we're not taking much liberty.
+ /* If there is only a single object template in the file and it has a
+ * suitable main method, we will use it rather than building another object
+ * around it. Since objects are loaded lazily the whole script would have
+ * been a no-op, so we're not taking much liberty.
*/
def searchForMain(): Option[Tree] = {
- /** Have to be fairly liberal about what constitutes a main method since
- * nothing has been typed yet - for instance we can't assume the parameter
- * type will look exactly like "Array[String]" as it could have been renamed
- * via import, etc.
+ /* Have to be fairly liberal about what constitutes a main method since
+ * nothing has been typed yet - for instance we can't assume the parameter
+ * type will look exactly like "Array[String]" as it could have been renamed
+ * via import, etc.
*/
def isMainMethod(t: Tree) = t match {
case DefDef(_, nme.main, Nil, List(_), _, _) => true
case _ => false
}
- /** For now we require there only be one top level object. */
+ /* For now we require there only be one top level object. */
var seenModule = false
val newStmts = stmts collect {
case t @ Import(_, _) => t
case md @ ModuleDef(mods, name, template) if !seenModule && (md exists isMainMethod) =>
seenModule = true
- /** This slightly hacky situation arises because we have no way to communicate
- * back to the scriptrunner what the name of the program is. Even if we were
- * willing to take the sketchy route of settings.script.value = progName, that
- * does not work when using fsc. And to find out in advance would impose a
- * whole additional parse. So instead, if the actual object's name differs from
- * what the script is expecting, we transform it to match.
+ /* This slightly hacky situation arises because we have no way to communicate
+ * back to the scriptrunner what the name of the program is. Even if we were
+ * willing to take the sketchy route of settings.script.value = progName, that
+ * does not work when using fsc. And to find out in advance would impose a
+ * whole additional parse. So instead, if the actual object's name differs from
+ * what the script is expecting, we transform it to match.
*/
if (name == mainModuleName) md
else treeCopy.ModuleDef(md, mods, mainModuleName, template)
case _ =>
- /** If we see anything but the above, fail. */
+ /* If we see anything but the above, fail. */
return None
}
- Some(makePackaging(0, emptyPkg, newStmts))
+ Some(makeEmptyPackage(0, newStmts))
}
if (mainModuleName == newTermName(ScriptRunner.defaultScriptMain))
searchForMain() foreach { return _ }
- /** Here we are building an AST representing the following source fiction,
+ /* Here we are building an AST representing the following source fiction,
* where `moduleName` is from -Xscript (defaults to "Main") and <stmts> are
* the result of parsing the script file.
*
@@ -380,31 +422,28 @@ self =>
* }
* }}}
*/
- import definitions._
-
- def emptyPkg = atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
def emptyInit = DefDef(
NoMods,
nme.CONSTRUCTOR,
Nil,
ListOfNil,
TypeTree(),
- Block(List(Apply(gen.mkSuperSelect, Nil)), Literal(Constant(())))
+ Block(List(Apply(gen.mkSuperInitCall, Nil)), literalUnit)
)
// def main
def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String)))
def mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.argv, mainParamType, EmptyTree))
def mainSetArgv = List(ValDef(NoMods, nme.args, TypeTree(), Ident(nme.argv)))
- def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, makeAnonymousNew(stmts)))
+ def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, gen.mkAnonymousNew(stmts)))
// object Main
def moduleName = newTermName(ScriptRunner scriptMain settings)
- def moduleBody = Template(List(atPos(o2p(in.offset))(scalaAnyRefConstr)), emptyValDef, List(emptyInit, mainDef))
+ def moduleBody = Template(atInPos(scalaAnyRefConstr) :: Nil, noSelfType, List(emptyInit, mainDef))
def moduleDef = ModuleDef(NoMods, moduleName, moduleBody)
// package <empty> { ... }
- makePackaging(0, emptyPkg, List(moduleDef))
+ makeEmptyPackage(0, moduleDef :: Nil)
}
/* --------------- PLACEHOLDERS ------------------------------------------- */
@@ -429,13 +468,13 @@ self =>
placeholderParams match {
case vd :: _ =>
- syntaxError(vd.pos, "unbound placeholder parameter", false)
+ syntaxError(vd.pos, "unbound placeholder parameter", skipIt = false)
placeholderParams = List()
case _ =>
}
placeholderTypes match {
case td :: _ =>
- syntaxError(td.pos, "unbound wildcard type", false)
+ syntaxError(td.pos, "unbound wildcard type", skipIt = false)
placeholderTypes = List()
case _ =>
}
@@ -468,7 +507,7 @@ self =>
/* ------------- ERROR HANDLING ------------------------------------------- */
- var assumedClosingParens = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ val assumedClosingParens = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
private var inFunReturnType = false
@inline private def fromWithinReturnType[T](body: => T): T = {
@@ -478,7 +517,7 @@ self =>
finally inFunReturnType = saved
}
- protected def skip(targetToken: Int) {
+ protected def skip(targetToken: Token) {
var nparens = 0
var nbraces = 0
while (true) {
@@ -506,17 +545,17 @@ self =>
in.nextToken()
}
}
- def warning(offset: Int, msg: String): Unit
+ def warning(offset: Offset, msg: String): Unit
def incompleteInputError(msg: String): Unit
private def syntaxError(pos: Position, msg: String, skipIt: Boolean) {
syntaxError(pos pointOrElse in.offset, msg, skipIt)
}
- def syntaxError(offset: Int, msg: String): Unit
+ def syntaxError(offset: Offset, msg: String): Unit
def syntaxError(msg: String, skipIt: Boolean) {
syntaxError(in.offset, msg, skipIt)
}
- def syntaxError(offset: Int, msg: String, skipIt: Boolean) {
+ def syntaxError(offset: Offset, msg: String, skipIt: Boolean) {
if (offset > lastErrorOffset) {
syntaxError(offset, msg)
// no more errors on this token.
@@ -534,15 +573,19 @@ self =>
else
syntaxError(in.offset, msg, skipIt)
}
+ def syntaxErrorOrIncompleteAnd[T](msg: String, skipIt: Boolean)(and: T): T = {
+ syntaxErrorOrIncomplete(msg, skipIt)
+ and
+ }
- def expectedMsg(token: Int): String =
- token2string(token) + " expected but " +token2string(in.token) + " found."
+ def expectedMsgTemplate(exp: String, fnd: String) = s"$exp expected but $fnd found."
+ def expectedMsg(token: Token): String = expectedMsgTemplate(token2string(token), token2string(in.token))
/** Consume one token of the specified type, or signal an error if it is not there. */
- def accept(token: Int): Int = {
+ def accept(token: Token): Offset = {
val offset = in.offset
if (in.token != token) {
- syntaxErrorOrIncomplete(expectedMsg(token), false)
+ syntaxErrorOrIncomplete(expectedMsg(token), skipIt = false)
if ((token == RPAREN || token == RBRACE || token == RBRACKET))
if (in.parenBalance(token) + assumedClosingParens(token) < 0)
assumedClosingParens(token) += 1
@@ -568,25 +611,16 @@ self =>
if (!isStatSeqEnd)
acceptStatSep()
- def errorTypeTree = TypeTree() setType ErrorType setPos o2p(in.offset)
- def errorTermTree = Literal(Constant(null)) setPos o2p(in.offset)
- def errorPatternTree = Ident(nme.WILDCARD) setPos o2p(in.offset)
+ def errorTypeTree = setInPos(TypeTree() setType ErrorType)
+ def errorTermTree = setInPos(newLiteral(null))
+ def errorPatternTree = setInPos(Ident(nme.WILDCARD))
/** Check that type parameter is not by name or repeated. */
def checkNotByNameOrVarargs(tpt: Tree) = {
if (treeInfo isByNameParamType tpt)
- syntaxError(tpt.pos, "no by-name parameter type allowed here", false)
+ syntaxError(tpt.pos, "no by-name parameter type allowed here", skipIt = false)
else if (treeInfo isRepeatedParamType tpt)
- syntaxError(tpt.pos, "no * parameter type allowed here", false)
- }
-
- /** Check that tree is a legal clause of a forSome. */
- def checkLegalExistential(t: Tree) = t match {
- case TypeDef(_, _, _, TypeBoundsTree(_, _)) |
- ValDef(_, _, _, EmptyTree) | EmptyTree =>
- ;
- case _ =>
- syntaxError(t.pos, "not a legal existential clause", false)
+ syntaxError(tpt.pos, "no * parameter type allowed here", skipIt = false)
}
/* -------------- TOKEN CLASSES ------------------------------------------- */
@@ -597,6 +631,8 @@ self =>
case _ => false
}
+ def isAnnotation: Boolean = in.token == AT
+
def isLocalModifier: Boolean = in.token match {
case ABSTRACT | FINAL | SEALED | IMPLICIT | LAZY => true
case _ => false
@@ -617,20 +653,25 @@ self =>
case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT => true
case _ => false
}
+
+ def isIdentExcept(except: Name) = isIdent && in.name != except
+ def isIdentOf(name: Name) = isIdent && in.name == name
+
def isUnaryOp = isIdent && raw.isUnary(in.name)
def isRawStar = isIdent && in.name == raw.STAR
def isRawBar = isIdent && in.name == raw.BAR
def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT
+ def isMacro = in.token == IDENTIFIER && in.name == nme.MACROkw
- def isLiteralToken(token: Int) = token match {
+ def isLiteralToken(token: Token) = token match {
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL => true
case _ => false
}
def isLiteral = isLiteralToken(in.token)
- def isExprIntroToken(token: Int): Boolean = isLiteralToken(token) || (token match {
+ def isExprIntroToken(token: Token): Boolean = isLiteralToken(token) || (token match {
case IDENTIFIER | BACKQUOTED_IDENT |
THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE |
DO | RETURN | THROW | LPAREN | LBRACE | XMLSTART => true
@@ -639,17 +680,17 @@ self =>
def isExprIntro: Boolean = isExprIntroToken(in.token)
- def isTypeIntroToken(token: Int): Boolean = token match {
+ def isTypeIntroToken(token: Token): Boolean = token match {
case IDENTIFIER | BACKQUOTED_IDENT | THIS |
SUPER | USCORE | LPAREN | AT => true
case _ => false
}
- def isTypeIntro: Boolean = isTypeIntroToken(in.token)
-
def isStatSeqEnd = in.token == RBRACE || in.token == EOF
- def isStatSep(token: Int): Boolean =
+ def isCaseDefEnd = in.token == RBRACE || in.token == CASE || in.token == EOF
+
+ def isStatSep(token: Token): Boolean =
token == NEWLINE || token == NEWLINES || token == SEMI
def isStatSep: Boolean = isStatSep(in.token)
@@ -657,42 +698,20 @@ self =>
/* --------- COMMENT AND ATTRIBUTE COLLECTION ----------------------------- */
- /** Join the comment associated with a definition. */
- def joinComment(trees: => List[Tree]): List[Tree] = {
- val doc = in.flushDoc
- if ((doc ne null) && doc.raw.length > 0) {
- val joined = trees map {
- t =>
- DocDef(doc, t) setPos {
- if (t.pos.isDefined) {
- val pos = doc.pos.withEnd(t.pos.endOrPoint)
- // always make the position transparent
- pos.makeTransparent
- } else {
- t.pos
- }
- }
- }
- joined.find(_.pos.isOpaqueRange) foreach {
- main =>
- val mains = List(main)
- joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) }
- }
- joined
- }
- else trees
- }
+ /** A hook for joining the comment associated with a definition.
+ * Overridden by scaladoc.
+ */
+ def joinComment(trees: => List[Tree]): List[Tree] = trees
/* ---------- TREE CONSTRUCTION ------------------------------------------- */
- def atPos[T <: Tree](offset: Int)(t: T): T =
- global.atPos(r2p(offset, offset, in.lastOffset max offset))(t)
- def atPos[T <: Tree](start: Int, point: Int)(t: T): T =
- global.atPos(r2p(start, point, in.lastOffset max start))(t)
- def atPos[T <: Tree](start: Int, point: Int, end: Int)(t: T): T =
- global.atPos(r2p(start, point, end))(t)
- def atPos[T <: Tree](pos: Position)(t: T): T =
- global.atPos(pos)(t)
+ def atPos[T <: Tree](offset: Offset)(t: T): T = atPos(r2p(offset))(t)
+ def atPos[T <: Tree](start: Offset, point: Offset)(t: T): T = atPos(r2p(start, point))(t)
+ def atPos[T <: Tree](start: Offset, point: Offset, end: Offset)(t: T): T = atPos(r2p(start, point, end))(t)
+ def atPos[T <: Tree](pos: Position)(t: T): T = global.atPos(pos)(t)
+
+ def atInPos[T <: Tree](t: T): T = atPos(o2p(in.offset))(t)
+ def setInPos[T <: Tree](t: T): T = t setPos o2p(in.offset)
/** Convert tree to formal parameter list. */
def convertToParams(tree: Tree): List[ValDef] = tree match {
@@ -705,29 +724,40 @@ self =>
def removeAsPlaceholder(name: Name) {
placeholderParams = placeholderParams filter (_.name != name)
}
+ def errorParam = makeParam(nme.ERROR, errorTypeTree setPos o2p(tree.pos.end))
tree match {
case Ident(name) =>
removeAsPlaceholder(name)
- makeParam(name, TypeTree() setPos o2p(tree.pos.endOrPoint))
+ makeParam(name.toTermName, TypeTree() setPos o2p(tree.pos.end))
case Typed(Ident(name), tpe) if tpe.isType => // get the ident!
removeAsPlaceholder(name)
- makeParam(name, tpe)
+ makeParam(name.toTermName, tpe)
+ case build.SyntacticTuple(as) =>
+ val arity = as.length
+ val example = analyzer.exampleTuplePattern(as map { case Ident(name) => name; case _ => nme.EMPTY })
+ val msg =
+ sm"""|not a legal formal parameter.
+ |Note: Tuples cannot be directly destructured in method or function parameters.
+ | Either create a single parameter accepting the Tuple${arity},
+ | or consider a pattern matching anonymous function: `{ case $example => ... }"""
+ syntaxError(tree.pos, msg, skipIt = false)
+ errorParam
case _ =>
- syntaxError(tree.pos, "not a legal formal parameter", false)
- makeParam(nme.ERROR, errorTypeTree setPos o2p(tree.pos.endOrPoint))
+ syntaxError(tree.pos, "not a legal formal parameter", skipIt = false)
+ errorParam
}
}
/** Convert (qual)ident to type identifier. */
def convertToTypeId(tree: Tree): Tree = atPos(tree.pos) {
convertToTypeName(tree) getOrElse {
- syntaxError(tree.pos, "identifier expected", false)
+ syntaxError(tree.pos, "identifier expected", skipIt = false)
errorTypeTree
}
}
/** {{{ part { `sep` part } }}},or if sepFirst is true, {{{ { `sep` part } }}}. */
- final def tokenSeparated[T](separator: Int, sepFirst: Boolean, part: => T): List[T] = {
+ final def tokenSeparated[T](separator: Token, sepFirst: Boolean, part: => T): List[T] = {
val ts = new ListBuffer[T]
if (!sepFirst)
ts += part
@@ -740,7 +770,7 @@ self =>
}
@inline final def commaSeparated[T](part: => T): List[T] = tokenSeparated(COMMA, sepFirst = false, part)
@inline final def caseSeparated[T](part: => T): List[T] = tokenSeparated(CASE, sepFirst = true, part)
- @inline final def readAnnots[T](part: => T): List[T] = tokenSeparated(AT, sepFirst = true, part)
+ def readAnnots(part: => Tree): List[Tree] = tokenSeparated(AT, sepFirst = true, part)
/* --------- OPERAND/OPERATOR STACK --------------------------------------- */
@@ -751,53 +781,66 @@ self =>
var opstack: List[OpInfo] = Nil
- def precedence(operator: Name): Int =
- if (operator eq nme.ERROR) -1
- else {
- val firstCh = operator.startChar
- if (isScalaLetter(firstCh)) 1
- else if (nme.isOpAssignmentName(operator)) 0
- else firstCh match {
- case '|' => 2
- case '^' => 3
- case '&' => 4
- case '=' | '!' => 5
- case '<' | '>' => 6
- case ':' => 7
- case '+' | '-' => 8
- case '*' | '/' | '%' => 9
- case _ => 10
- }
- }
+ @deprecated("Use `scala.reflect.internal.Precedence`", "2.11.0")
+ def precedence(operator: Name): Int = Precedence(operator.toString).level
- def checkSize(kind: String, size: Int, max: Int) {
- if (size > max) syntaxError("too many "+kind+", maximum = "+max, false)
+ private def opHead = opstack.head
+ private def headPrecedence = opHead.precedence
+ private def popOpInfo(): OpInfo = try opHead finally opstack = opstack.tail
+ private def pushOpInfo(top: Tree): Unit = {
+ val name = in.name
+ val offset = in.offset
+ ident()
+ val targs = if (in.token == LBRACKET) exprTypeArgs() else Nil
+ val opinfo = OpInfo(top, name, targs, offset)
+ opstack ::= opinfo
}
- def checkAssoc(offset: Int, op: Name, leftAssoc: Boolean) =
+ def checkHeadAssoc(leftAssoc: Boolean) = checkAssoc(opHead.offset, opHead.operator, leftAssoc)
+ def checkAssoc(offset: Offset, op: Name, leftAssoc: Boolean) = (
if (treeInfo.isLeftAssoc(op) != leftAssoc)
- syntaxError(
- offset, "left- and right-associative operators with same precedence may not be mixed", false)
-
- def reduceStack(isExpr: Boolean, base: List[OpInfo], top0: Tree, prec: Int, leftAssoc: Boolean): Tree = {
- var top = top0
- if (opstack != base && precedence(opstack.head.operator) == prec)
- checkAssoc(opstack.head.offset, opstack.head.operator, leftAssoc)
- while (opstack != base &&
- (prec < precedence(opstack.head.operator) ||
- leftAssoc && prec == precedence(opstack.head.operator))) {
- val opinfo = opstack.head
- opstack = opstack.tail
- val opPos = r2p(opinfo.offset, opinfo.offset, opinfo.offset+opinfo.operator.length)
- val lPos = opinfo.operand.pos
- val start = if (lPos.isDefined) lPos.startOrPoint else opPos.startOrPoint
- val rPos = top.pos
- val end = if (rPos.isDefined) rPos.endOrPoint else opPos.endOrPoint
- top = atPos(start, opinfo.offset, end) {
- makeBinop(isExpr, opinfo.operand, opinfo.operator, top, opPos)
- }
- }
- top
+ syntaxError(offset, "left- and right-associative operators with same precedence may not be mixed", skipIt = false)
+ )
+
+ def finishPostfixOp(start: Int, base: List[OpInfo], opinfo: OpInfo): Tree = {
+ if (opinfo.targs.nonEmpty)
+ syntaxError(opinfo.offset, "type application is not allowed for postfix operators")
+
+ val od = stripParens(reduceExprStack(base, opinfo.lhs))
+ makePostfixSelect(start, opinfo.offset, od, opinfo.operator)
+ }
+
+ def finishBinaryOp(isExpr: Boolean, opinfo: OpInfo, rhs: Tree): Tree = {
+ import opinfo._
+ val operatorPos: Position = Position.range(rhs.pos.source, offset, offset, offset + operator.length)
+ val pos = lhs.pos union rhs.pos union operatorPos withPoint offset
+
+ atPos(pos)(makeBinop(isExpr, lhs, operator, rhs, operatorPos, opinfo.targs))
+ }
+
+ def reduceExprStack(base: List[OpInfo], top: Tree): Tree = reduceStack(isExpr = true, base, top)
+ def reducePatternStack(base: List[OpInfo], top: Tree): Tree = reduceStack(isExpr = false, base, top)
+
+ def reduceStack(isExpr: Boolean, base: List[OpInfo], top: Tree): Tree = {
+ val opPrecedence = if (isIdent) Precedence(in.name.toString) else Precedence(0)
+ val leftAssoc = !isIdent || (treeInfo isLeftAssoc in.name)
+
+ reduceStack(isExpr, base, top, opPrecedence, leftAssoc)
+ }
+
+ def reduceStack(isExpr: Boolean, base: List[OpInfo], top: Tree, opPrecedence: Precedence, leftAssoc: Boolean): Tree = {
+ def isDone = opstack == base
+ def lowerPrecedence = !isDone && (opPrecedence < headPrecedence)
+ def samePrecedence = !isDone && (opPrecedence == headPrecedence)
+ def canReduce = lowerPrecedence || leftAssoc && samePrecedence
+
+ if (samePrecedence)
+ checkHeadAssoc(leftAssoc)
+
+ def loop(top: Tree): Tree =
+ if (canReduce) loop(finishBinaryOp(isExpr, popOpInfo(), top)) else top
+
+ loop(top)
}
/* -------- IDENTIFIERS AND LITERALS ------------------------------------------- */
@@ -814,7 +857,7 @@ self =>
def argType(): Tree
def functionArgType(): Tree
- private def tupleInfixType(start: Int) = {
+ private def tupleInfixType(start: Offset) = {
in.nextToken()
if (in.token == RPAREN) {
in.nextToken()
@@ -827,7 +870,7 @@ self =>
atPos(start, in.skipToken()) { makeFunctionTypeTree(ts, typ()) }
else {
ts foreach checkNotByNameOrVarargs
- val tuple = atPos(start) { makeTupleType(ts, flattenUnary = true) }
+ val tuple = atPos(start) { makeTupleType(ts) }
infixTypeRest(
compoundTypeRest(
annotTypeRest(
@@ -839,9 +882,14 @@ self =>
}
}
private def makeExistentialTypeTree(t: Tree) = {
- val whereClauses = refinement()
- whereClauses foreach checkLegalExistential
- ExistentialTypeTree(t, whereClauses)
+ // EmptyTrees in the result of refinement() stand for parse errors
+ // so it's okay for us to filter them out here
+ ExistentialTypeTree(t, refinement() flatMap {
+ case t @ TypeDef(_, _, _, TypeBoundsTree(_, _)) => Some(t)
+ case t @ ValDef(_, _, _, EmptyTree) => Some(t)
+ case EmptyTree => None
+ case _ => syntaxError(t.pos, "not a legal existential clause", skipIt = false); None
+ })
}
/** {{{
@@ -889,7 +937,7 @@ self =>
def simpleType(): Tree = {
val start = in.offset
simpleTypeRest(in.token match {
- case LPAREN => atPos(start)(makeTupleType(inParens(types()), flattenUnary = true))
+ case LPAREN => atPos(start)(makeTupleType(inParens(types())))
case USCORE => wildcardType(in.skipToken())
case _ =>
path(thisOK = false, typeOK = true) match {
@@ -904,11 +952,11 @@ self =>
val nameOffset = in.offset
val name = identForType(skipIt = false)
val point = if (name == tpnme.ERROR) hashOffset else nameOffset
- atPos(t.pos.startOrPoint, point)(SelectFromTypeTree(t, name))
+ atPos(t.pos.start, point)(SelectFromTypeTree(t, name))
}
def simpleTypeRest(t: Tree): Tree = in.token match {
case HASH => simpleTypeRest(typeProjection(t))
- case LBRACKET => simpleTypeRest(atPos(t.pos.startOrPoint, t.pos.point)(AppliedTypeTree(t, typeArgs())))
+ case LBRACKET => simpleTypeRest(atPos(t.pos.start, t.pos.point)(AppliedTypeTree(t, typeArgs())))
case _ => t
}
@@ -918,32 +966,34 @@ self =>
* }}}
*/
def compoundType(): Tree = compoundTypeRest(
- if (in.token == LBRACE) atPos(o2p(in.offset))(scalaAnyRefConstr)
+ if (in.token == LBRACE) atInPos(scalaAnyRefConstr)
else annotType()
)
def compoundTypeRest(t: Tree): Tree = {
- var ts = new ListBuffer[Tree] += t
+ val ts = new ListBuffer[Tree] += t
while (in.token == WITH) {
in.nextToken()
ts += annotType()
}
newLineOptWhenFollowedBy(LBRACE)
- atPos(t.pos.startOrPoint) {
- if (in.token == LBRACE) {
- // Warn if they are attempting to refine Unit; we can't be certain it's
- // scala.Unit they're refining because at this point all we have is an
- // identifier, but at a later stage we lose the ability to tell an empty
- // refinement from no refinement at all. See bug #284.
- for (Ident(name) <- ts) name.toString match {
- case "Unit" | "scala.Unit" =>
- warning("Detected apparent refinement of Unit; are you missing an '=' sign?")
- case _ =>
- }
- CompoundTypeTree(Template(ts.toList, emptyValDef, refinement()))
- }
- else
- makeIntersectionTypeTree(ts.toList)
+ val types = ts.toList
+ val braceOffset = in.offset
+ val hasRefinement = in.token == LBRACE
+ val refinements = if (hasRefinement) refinement() else Nil
+ // Warn if they are attempting to refine Unit; we can't be certain it's
+ // scala.Unit they're refining because at this point all we have is an
+ // identifier, but at a later stage we lose the ability to tell an empty
+ // refinement from no refinement at all. See bug #284.
+ if (hasRefinement) types match {
+ case Ident(name) :: Nil if name endsWith "Unit" => warning(braceOffset, "Detected apparent refinement of Unit; are you missing an '=' sign?")
+ case _ =>
+ }
+ // The second case includes an empty refinement - refinements is empty, but
+ // it still gets a CompoundTypeTree.
+ ts.toList match {
+ case tp :: Nil if !hasRefinement => tp // single type, no refinement, already positioned
+ case tps => atPos(t.pos.start)(CompoundTypeTree(Template(tps, noSelfType, refinements)))
}
}
@@ -955,7 +1005,7 @@ self =>
val op = identForType()
val tycon = atPos(opOffset) { Ident(op) }
newLineOptWhenFollowing(isTypeIntroToken)
- def mkOp(t1: Tree) = atPos(t.pos.startOrPoint, opOffset) { AppliedTypeTree(tycon, List(t, t1)) }
+ def mkOp(t1: Tree) = atPos(t.pos.start, opOffset) { AppliedTypeTree(tycon, List(t, t1)) }
if (leftAssoc)
infixTypeRest(mkOp(compoundType()), InfixMode.LeftOp)
else
@@ -979,15 +1029,15 @@ self =>
}
/** Assumed (provisionally) to be TermNames. */
- def ident(skipIt: Boolean): Name =
+ def ident(skipIt: Boolean): Name = (
if (isIdent) {
val name = in.name.encode
in.nextToken()
name
- } else {
- syntaxErrorOrIncomplete(expectedMsg(IDENTIFIER), skipIt)
- nme.ERROR
}
+ else syntaxErrorOrIncompleteAnd(expectedMsg(IDENTIFIER), skipIt)(nme.ERROR)
+ )
+
def ident(): Name = ident(skipIt = true)
def rawIdent(): Name = try in.name finally in.nextToken()
@@ -995,11 +1045,13 @@ self =>
def identForType(): TypeName = ident().toTypeName
def identForType(skipIt: Boolean): TypeName = ident(skipIt).toTypeName
+ def identOrMacro(): Name = if (isMacro) rawIdent() else ident()
+
def selector(t: Tree): Tree = {
val point = in.offset
//assert(t.pos.isDefined, t)
if (t != EmptyTree)
- Select(t, ident(skipIt = false)) setPos r2p(t.pos.startOrPoint, point, in.lastOffset)
+ Select(t, ident(skipIt = false)) setPos r2p(t.pos.start, point, in.lastOffset)
else
errorTermTree // has already been reported
}
@@ -1053,10 +1105,10 @@ self =>
t
}
- def selectors(t: Tree, typeOK: Boolean, dotOffset: Int): Tree =
+ def selectors(t: Tree, typeOK: Boolean, dotOffset: Offset): Tree =
if (typeOK && in.token == TYPE) {
in.nextToken()
- atPos(t.pos.startOrPoint, dotOffset) { SingletonTypeTree(t) }
+ atPos(t.pos.start, dotOffset) { SingletonTypeTree(t) }
}
else {
val t1 = selector(t)
@@ -1111,72 +1163,90 @@ self =>
* | null
* }}}
*/
- def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Int = in.offset): Tree = {
- atPos(start) {
- def finish(value: Any): Tree = {
- val t = Literal(Constant(value))
- in.nextToken()
- t
- }
- if (in.token == SYMBOLLIT)
- Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
- else if (in.token == INTERPOLATIONID)
- interpolatedString(inPattern = inPattern)
- else finish(in.token match {
- case CHARLIT => in.charVal
- case INTLIT => in.intVal(isNegated).toInt
- case LONGLIT => in.intVal(isNegated)
- case FLOATLIT => in.floatVal(isNegated).toFloat
- case DOUBLELIT => in.floatVal(isNegated)
- case STRINGLIT | STRINGPART => in.strVal.intern()
- case TRUE => true
- case FALSE => false
- case NULL => null
- case _ =>
- syntaxErrorOrIncomplete("illegal literal", true)
- null
+ def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Offset = in.offset): Tree = atPos(start) {
+ def finish(value: Any): Tree = try newLiteral(value) finally in.nextToken()
+ if (in.token == SYMBOLLIT)
+ Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
+ else if (in.token == INTERPOLATIONID)
+ interpolatedString(inPattern = inPattern)
+ else finish(in.token match {
+ case CHARLIT => in.charVal
+ case INTLIT => in.intVal(isNegated).toInt
+ case LONGLIT => in.intVal(isNegated)
+ case FLOATLIT => in.floatVal(isNegated).toFloat
+ case DOUBLELIT => in.floatVal(isNegated)
+ case STRINGLIT | STRINGPART => in.strVal.intern()
+ case TRUE => true
+ case FALSE => false
+ case NULL => null
+ case _ => syntaxErrorOrIncompleteAnd("illegal literal", skipIt = true)(null)
+ })
+ }
+
+ /** Handle placeholder syntax.
+ * If evaluating the tree produces placeholders, then make it a function.
+ */
+ private def withPlaceholders(tree: =>Tree, isAny: Boolean): Tree = {
+ val savedPlaceholderParams = placeholderParams
+ placeholderParams = List()
+ var res = tree
+ if (placeholderParams.nonEmpty && !isWildcard(res)) {
+ res = atPos(res.pos)(Function(placeholderParams.reverse, res))
+ if (isAny) placeholderParams foreach (_.tpt match {
+ case tpt @ TypeTree() => tpt setType definitions.AnyTpe
+ case _ => // some ascription
})
+ placeholderParams = List()
}
+ placeholderParams = placeholderParams ::: savedPlaceholderParams
+ res
}
- private def stringOp(t: Tree, op: TermName) = {
- val str = in.strVal
+ /** Consume a USCORE and create a fresh synthetic placeholder param. */
+ private def freshPlaceholder(): Tree = {
+ val start = in.offset
+ val pname = freshTermName()
in.nextToken()
- if (str.length == 0) t
- else atPos(t.pos.startOrPoint) {
- Apply(Select(t, op), List(Literal(Constant(str))))
- }
+ val id = atPos(start)(Ident(pname))
+ val param = atPos(id.pos.focus)(gen.mkSyntheticParam(pname.toTermName))
+ placeholderParams = param :: placeholderParams
+ id
}
- private def interpolatedString(inPattern: Boolean = false): Tree = atPos(in.offset) {
- val start = in.offset
- val interpolator = in.name
+ private def interpolatedString(inPattern: Boolean): Tree = {
+ def errpolation() = syntaxErrorOrIncompleteAnd("error in interpolated string: identifier or block expected",
+ skipIt = true)(EmptyTree)
+ // Like Swiss cheese, with holes
+ def stringCheese: Tree = atPos(in.offset) {
+ val start = in.offset
+ val interpolator = in.name
- val partsBuf = new ListBuffer[Tree]
- val exprBuf = new ListBuffer[Tree]
- in.nextToken()
- while (in.token == STRINGPART) {
- partsBuf += literal()
- exprBuf += {
- if (inPattern) dropAnyBraces(pattern())
- else {
- if (in.token == IDENTIFIER) atPos(in.offset)(Ident(ident()))
- else if(in.token == LBRACE) expr()
- else if(in.token == THIS) { in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY)) }
- else {
- syntaxErrorOrIncomplete("error in interpolated string: identifier or block expected", true)
- EmptyTree
+ val partsBuf = new ListBuffer[Tree]
+ val exprBuf = new ListBuffer[Tree]
+ in.nextToken()
+ while (in.token == STRINGPART) {
+ partsBuf += literal()
+ exprBuf += (
+ if (inPattern) dropAnyBraces(pattern())
+ else in.token match {
+ case IDENTIFIER => atPos(in.offset)(Ident(ident()))
+ //case USCORE => freshPlaceholder() // ifonly etapolation
+ case LBRACE => expr() // dropAnyBraces(expr0(Local))
+ case THIS => in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY))
+ case _ => errpolation()
}
- }
+ )
}
- }
- if (in.token == STRINGLIT) partsBuf += literal()
+ if (in.token == STRINGLIT) partsBuf += literal()
- val t1 = atPos(o2p(start)) { Ident(nme.StringContext) }
- val t2 = atPos(start) { Apply(t1, partsBuf.toList) }
- t2 setPos t2.pos.makeTransparent
- val t3 = Select(t2, interpolator) setPos t2.pos
- atPos(start) { Apply(t3, exprBuf.toList) }
+ val t1 = atPos(o2p(start)) { Ident(nme.StringContext) }
+ val t2 = atPos(start) { Apply(t1, partsBuf.toList) }
+ t2 setPos t2.pos.makeTransparent
+ val t3 = Select(t2, interpolator) setPos t2.pos
+ atPos(start) { Apply(t3, exprBuf.toList) }
+ }
+ if (inPattern) stringCheese
+ else withPlaceholders(stringCheese, isAny = true) // strinterpolator params are Any* by definition
}
/* ------------- NEW LINES ------------------------------------------------- */
@@ -1190,12 +1260,12 @@ self =>
in.nextToken()
}
- def newLineOptWhenFollowedBy(token: Int) {
+ def newLineOptWhenFollowedBy(token: Offset) {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && in.next.token == token) newLineOpt()
}
- def newLineOptWhenFollowing(p: Int => Boolean) {
+ def newLineOptWhenFollowing(p: Token => Boolean) {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && p(in.next.token)) newLineOpt()
}
@@ -1210,7 +1280,7 @@ self =>
if (in.token == COLON) { in.nextToken(); typ() }
else TypeTree()
- def typeOrInfixType(location: Int): Tree =
+ def typeOrInfixType(location: Location): Tree =
if (location == Local) typ()
else startInfixType()
@@ -1221,7 +1291,7 @@ self =>
* WildcardType ::= `_' TypeBounds
* }}}
*/
- def wildcardType(start: Int) = {
+ def wildcardType(start: Offset) = {
val pname = freshTypeName("_$")
val t = atPos(start)(Ident(pname))
val bounds = typeBounds()
@@ -1232,15 +1302,6 @@ self =>
/* ----------- EXPRESSIONS ------------------------------------------------ */
- /** {{{
- * EqualsExpr ::= `=' Expr
- * }}}
- */
- def equalsExpr(): Tree = {
- accept(EQUALS)
- expr()
- }
-
def condExpr(): Tree = {
if (in.token == LPAREN) {
in.nextToken()
@@ -1249,14 +1310,14 @@ self =>
r
} else {
accept(LPAREN)
- Literal(Constant(true))
+ newLiteral(true)
}
}
/* hook for IDE, unlike expression can be stubbed
* don't use for any tree that can be inspected in the parser!
*/
- def statement(location: Int): Tree = expr(location) // !!! still needed?
+ def statement(location: Location): Tree = expr(location) // !!! still needed?
/** {{{
* Expr ::= (Bindings | [`implicit'] Id | `_') `=>' Expr
@@ -1283,27 +1344,16 @@ self =>
*/
def expr(): Tree = expr(Local)
- def expr(location: Int): Tree = {
- var savedPlaceholderParams = placeholderParams
- placeholderParams = List()
- var res = expr0(location)
- if (!placeholderParams.isEmpty && !isWildcard(res)) {
- res = atPos(res.pos){ Function(placeholderParams.reverse, res) }
- placeholderParams = List()
- }
- placeholderParams = placeholderParams ::: savedPlaceholderParams
- res
- }
-
+ def expr(location: Location): Tree = withPlaceholders(expr0(location), isAny = false)
- def expr0(location: Int): Tree = (in.token: @scala.annotation.switch) match {
+ def expr0(location: Location): Tree = (in.token: @scala.annotation.switch) match {
case IF =>
def parseIf = atPos(in.skipToken()) {
val cond = condExpr()
newLinesOpt()
val thenp = expr()
val elsep = if (in.token == ELSE) { in.nextToken(); expr() }
- else Literal(Constant())
+ else literalUnit
If(cond, thenp, elsep)
}
parseIf
@@ -1345,14 +1395,13 @@ self =>
parseWhile
case DO =>
def parseDo = {
- val start = in.offset
atPos(in.skipToken()) {
val lname: Name = freshTermName(nme.DO_WHILE_PREFIX)
val body = expr()
if (isStatSep) in.nextToken()
accept(WHILE)
val cond = condExpr()
- makeDoWhile(lname, body, cond)
+ makeDoWhile(lname.toTermName, body, cond)
}
}
parseDo
@@ -1365,9 +1414,9 @@ self =>
newLinesOpt()
if (in.token == YIELD) {
in.nextToken()
- makeForYield(enums, expr())
+ gen.mkFor(enums, gen.Yield(expr()))
} else {
- makeFor(enums, expr())
+ gen.mkFor(enums, expr())
}
}
def adjustStart(tree: Tree) =
@@ -1378,7 +1427,7 @@ self =>
case RETURN =>
def parseReturn =
atPos(in.skipToken()) {
- Return(if (isExprIntro) expr() else Literal(Constant()))
+ Return(if (isExprIntro) expr() else literalUnit)
}
parseReturn
case THROW =>
@@ -1395,7 +1444,7 @@ self =>
if (in.token == EQUALS) {
t match {
case Ident(_) | Select(_, _) | Apply(_, _) =>
- t = atPos(t.pos.startOrPoint, in.skipToken()) { makeAssign(t, expr()) }
+ t = atPos(t.pos.start, in.skipToken()) { gen.mkAssign(t, expr()) }
case _ =>
}
} else if (in.token == COLON) {
@@ -1406,16 +1455,16 @@ self =>
val uscorePos = in.skipToken()
if (isIdent && in.name == nme.STAR) {
in.nextToken()
- t = atPos(t.pos.startOrPoint, colonPos) {
+ t = atPos(t.pos.start, colonPos) {
Typed(t, atPos(uscorePos) { Ident(tpnme.WILDCARD_STAR) })
}
} else {
- syntaxErrorOrIncomplete("`*' expected", true)
+ syntaxErrorOrIncomplete("`*' expected", skipIt = true)
}
- } else if (in.token == AT) {
+ } else if (isAnnotation) {
t = (t /: annotations(skipNewLines = false))(makeAnnotated)
} else {
- t = atPos(t.pos.startOrPoint, colonPos) {
+ t = atPos(t.pos.start, colonPos) {
val tpt = typeOrInfixType(location)
if (isWildcard(t))
(placeholderParams: @unchecked) match {
@@ -1428,18 +1477,18 @@ self =>
}
}
} else if (in.token == MATCH) {
- t = atPos(t.pos.startOrPoint, in.skipToken())(Match(stripParens(t), inBracesOrNil(caseClauses())))
+ t = atPos(t.pos.start, in.skipToken())(Match(stripParens(t), inBracesOrNil(caseClauses())))
}
// in order to allow anonymous functions as statements (as opposed to expressions) inside
// templates, we have to disambiguate them from self type declarations - bug #1565
// The case still missed is unparenthesized single argument, like "x: Int => x + 1", which
// may be impossible to distinguish from a self-type and so remains an error. (See #1564)
def lhsIsTypedParamList() = t match {
- case Parens(xs) if xs forall (_.isInstanceOf[Typed]) => true
+ case Parens(xs) if xs.forall(isTypedParam) => true
case _ => false
}
if (in.token == ARROW && (location != InTemplate || lhsIsTypedParamList)) {
- t = atPos(t.pos.startOrPoint, in.skipToken()) {
+ t = atPos(t.pos.start, in.skipToken()) {
Function(convertToParams(t), if (location != InBlock) expr() else block())
}
}
@@ -1448,12 +1497,14 @@ self =>
parseOther
}
+ def isTypedParam(t: Tree) = t.isInstanceOf[Typed]
+
/** {{{
* Expr ::= implicit Id => Expr
* }}}
*/
- def implicitClosure(start: Int, location: Int): Tree = {
+ def implicitClosure(start: Offset, location: Location): Tree = {
val param0 = convertToParam {
atPos(in.offset) {
Ident(ident()) match {
@@ -1479,28 +1530,19 @@ self =>
def postfixExpr(): Tree = {
val start = in.offset
val base = opstack
- var top = prefixExpr()
- while (isIdent) {
- top = reduceStack(isExpr = true, base, top, precedence(in.name), leftAssoc = treeInfo.isLeftAssoc(in.name))
- val op = in.name
- opstack = OpInfo(top, op, in.offset) :: opstack
- ident()
+ def loop(top: Tree): Tree = if (!isIdent) top else {
+ pushOpInfo(reduceExprStack(base, top))
newLineOptWhenFollowing(isExprIntroToken)
- if (isExprIntro) {
- val next = prefixExpr()
- if (next == EmptyTree)
- return reduceStack(isExpr = true, base, top, 0, leftAssoc = true)
- top = next
- } else {
- // postfix expression
- val topinfo = opstack.head
- opstack = opstack.tail
- val od = stripParens(reduceStack(isExpr = true, base, topinfo.operand, 0, leftAssoc = true))
- return makePostfixSelect(start, topinfo.offset, od, topinfo.operator)
- }
+ if (isExprIntro)
+ prefixExpr() match {
+ case EmptyTree => reduceExprStack(base, top)
+ case next => loop(next)
+ }
+ else finishPostfixOp(start, base, popOpInfo())
}
- reduceStack(isExpr = true, base, top, 0, leftAssoc = true)
+
+ reduceExprStack(base, loop(prefixExpr()))
}
/** {{{
@@ -1510,7 +1552,7 @@ self =>
def prefixExpr(): Tree = {
if (isUnaryOp) {
atPos(in.offset) {
- val name = nme.toUnaryName(rawIdent())
+ val name = nme.toUnaryName(rawIdent().toTermName)
if (name == nme.UNARY_- && isNumericLit)
simpleExprRest(literal(isNegated = true), canApply = true)
else
@@ -1544,15 +1586,9 @@ self =>
case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER =>
path(thisOK = true, typeOK = false)
case USCORE =>
- val start = in.offset
- val pname = freshName("x$")
- in.nextToken()
- val id = atPos(start) (Ident(pname))
- val param = atPos(id.pos.focus){ makeSyntheticParam(pname) }
- placeholderParams = param :: placeholderParams
- id
+ freshPlaceholder()
case LPAREN =>
- atPos(in.offset)(makeParens(commaSeparated(expr)))
+ atPos(in.offset)(makeParens(commaSeparated(expr())))
case LBRACE =>
canApply = false
blockExpr()
@@ -1561,12 +1597,11 @@ self =>
val nstart = in.skipToken()
val npos = r2p(nstart, nstart, in.lastOffset)
val tstart = in.offset
- val (parents, argss, self, stats) = template(isTrait = false)
+ val (parents, self, stats) = template()
val cpos = r2p(tstart, tstart, in.lastOffset max tstart)
- makeNew(parents, self, stats, argss, npos, cpos)
+ gen.mkNew(parents, self, stats, npos, cpos)
case _ =>
- syntaxErrorOrIncomplete("illegal start of simple expression", true)
- errorTermTree
+ syntaxErrorOrIncompleteAnd("illegal start of simple expression", skipIt = true)(errorTermTree)
}
simpleExprRest(t, canApply = canApply)
}
@@ -1583,14 +1618,14 @@ self =>
case Ident(_) | Select(_, _) | Apply(_, _) =>
var app: Tree = t1
while (in.token == LBRACKET)
- app = atPos(app.pos.startOrPoint, in.offset)(TypeApply(app, exprTypeArgs()))
+ app = atPos(app.pos.start, in.offset)(TypeApply(app, exprTypeArgs()))
simpleExprRest(app, canApply = true)
case _ =>
t1
}
case LPAREN | LBRACE if (canApply) =>
- val app = atPos(t.pos.startOrPoint, in.offset) {
+ val app = atPos(t.pos.start, in.offset) {
// look for anonymous function application like (f _)(x) and
// translate to (f _).apply(x), bug #460
val sel = t match {
@@ -1603,7 +1638,7 @@ self =>
}
simpleExprRest(app, canApply = true)
case USCORE =>
- atPos(t.pos.startOrPoint, in.skipToken()) {
+ atPos(t.pos.start, in.skipToken()) {
Typed(stripParens(t), Function(Nil, EmptyTree))
}
case _ =>
@@ -1617,14 +1652,9 @@ self =>
* }}}
*/
def argumentExprs(): List[Tree] = {
- def args(): List[Tree] = commaSeparated {
- val maybeNamed = isIdent
- expr() match {
- case a @ Assign(id, rhs) if maybeNamed =>
- atPos(a.pos) { AssignOrNamedArg(id, rhs) }
- case e => e
- }
- }
+ def args(): List[Tree] = commaSeparated(
+ if (isIdent) treeInfo.assignmentToMaybeNamedArg(expr()) else expr()
+ )
in.token match {
case LBRACE => List(blockExpr())
case LPAREN => inParens(if (in.token == RPAREN) Nil else args())
@@ -1655,13 +1685,16 @@ self =>
*/
def block(): Tree = makeBlock(blockStatSeq())
+ def caseClause(): CaseDef =
+ atPos(in.offset)(makeCaseDef(pattern(), guard(), caseBlock()))
+
/** {{{
* CaseClauses ::= CaseClause {CaseClause}
* CaseClause ::= case Pattern [Guard] `=>' Block
* }}}
*/
def caseClauses(): List[CaseDef] = {
- val cases = caseSeparated { atPos(in.offset)(makeCaseDef(pattern(), guard(), caseBlock())) }
+ val cases = caseSeparated { caseClause() }
if (cases.isEmpty) // trigger error if there are no cases
accept(CASE)
@@ -1687,22 +1720,25 @@ self =>
* | val Pattern1 `=' Expr
* }}}
*/
- def enumerators(): List[Enumerator] = {
- val enums = new ListBuffer[Enumerator]
- generator(enums, eqOK = false)
+ def enumerators(): List[Tree] = {
+ val enums = new ListBuffer[Tree]
+ enums ++= enumerator(isFirst = true)
while (isStatSep) {
in.nextToken()
- if (in.token == IF) enums += makeFilter(in.offset, guard())
- else generator(enums, eqOK = true)
+ enums ++= enumerator(isFirst = false)
}
enums.toList
}
+ def enumerator(isFirst: Boolean, allowNestedIf: Boolean = true): List[Tree] =
+ if (in.token == IF && !isFirst) makeFilter(in.offset, guard()) :: Nil
+ else generator(!isFirst, allowNestedIf)
+
/** {{{
* Generator ::= Pattern1 (`<-' | `=') Expr [Guard]
* }}}
*/
- def generator(enums: ListBuffer[Enumerator], eqOK: Boolean) {
+ def generator(eqOK: Boolean, allowNestedIf: Boolean = true): List[Tree] = {
val start = in.offset
val hasVal = in.token == VAL
if (hasVal)
@@ -1720,13 +1756,22 @@ self =>
if (hasEq && eqOK) in.nextToken()
else accept(LARROW)
val rhs = expr()
- enums += makeGenerator(r2p(start, point, in.lastOffset max start), pat, hasEq, rhs)
- // why max above? IDE stress tests have shown that lastOffset could be less than start,
+
+ def loop(): List[Tree] =
+ if (in.token != IF) Nil
+ else makeFilter(in.offset, guard()) :: loop()
+
+ val tail =
+ if (allowNestedIf) loop()
+ else Nil
+
+ // why max? IDE stress tests have shown that lastOffset could be less than start,
// I guess this happens if instead if a for-expression we sit on a closing paren.
- while (in.token == IF) enums += makeFilter(in.offset, guard())
+ val genPos = r2p(start, point, in.lastOffset max start)
+ gen.mkGenerator(genPos, pat, hasEq, rhs) :: tail
}
- def makeFilter(start: Int, tree: Tree) = Filter(r2p(start, tree.pos.point, tree.pos.endOrPoint), tree)
+ def makeFilter(start: Offset, tree: Tree) = gen.Filter(tree).setPos(r2p(start, tree.pos.point, tree.pos.end))
/* -------- PATTERNS ------------------------------------------- */
@@ -1749,10 +1794,12 @@ self =>
in.nextToken()
if (in.token == SUBTYPE || in.token == SUPERTYPE) wildcardType(start)
else atPos(start) { Bind(tpnme.WILDCARD, EmptyTree) }
- case IDENTIFIER if nme.isVariableName(in.name) =>
- atPos(start) { Bind(identForType(), EmptyTree) }
case _ =>
- typ()
+ typ() match {
+ case Ident(name: TypeName) if nme.isVariableName(name) =>
+ atPos(start) { Bind(name, EmptyTree) }
+ case t => t
+ }
}
}
@@ -1792,7 +1839,7 @@ self =>
def pattern1(): Tree = pattern2() match {
case p @ Ident(name) if in.token == COLON =>
if (treeInfo.isVarPattern(p))
- atPos(p.pos.startOrPoint, in.skipToken())(Typed(p, compoundType()))
+ atPos(p.pos.start, in.skipToken())(Typed(p, compoundType()))
else {
syntaxError(in.offset, "Pattern variables must start with a lower-case letter. (SLS 8.1.1.)")
p
@@ -1808,7 +1855,6 @@ self =>
* }}}
*/
def pattern2(): Tree = {
- val nameOffset = in.offset
val p = pattern3()
if (in.token != AT) p
@@ -1818,7 +1864,7 @@ self =>
pattern3()
case Ident(name) if treeInfo.isVarPattern(p) =>
in.nextToken()
- atPos(p.pos.startOrPoint) { Bind(name, pattern3()) }
+ atPos(p.pos.start) { Bind(name, pattern3()) }
case _ => p
}
}
@@ -1829,71 +1875,51 @@ self =>
* }}}
*/
def pattern3(): Tree = {
- var top = simplePattern(badPattern3)
- // after peekahead
- def acceptWildStar() = atPos(top.pos.startOrPoint, in.prev.offset)(Star(stripParens(top)))
- def peekahead() = {
- in.prev copyFrom in
- in.nextToken()
- }
- def pushback() = {
- in.next copyFrom in
- in copyFrom in.prev
- }
+ val top = simplePattern(badPattern3)
+ val base = opstack
// See SI-3189, SI-4832 for motivation. Cf SI-3480 for counter-motivation.
- // TODO: dredge out the remnants of regexp patterns.
- // /{/ peek for _*) or _*} (for xml escape)
- if (isSequenceOK) {
- top match {
- case Ident(nme.WILDCARD) if (isRawStar) =>
- peekahead()
- in.token match {
- case RBRACE if (isXML) => return acceptWildStar()
- case RPAREN if (!isXML) => return acceptWildStar()
- case _ => pushback()
- }
- case _ =>
- }
+ def isCloseDelim = in.token match {
+ case RBRACE => isXML
+ case RPAREN => !isXML
+ case _ => false
}
- val base = opstack
- while (isIdent && in.name != raw.BAR) {
- top = reduceStack(isExpr = false, base, top, precedence(in.name), leftAssoc = treeInfo.isLeftAssoc(in.name))
- val op = in.name
- opstack = OpInfo(top, op, in.offset) :: opstack
- ident()
- top = simplePattern(badPattern3)
+ def checkWildStar: Tree = top match {
+ case Ident(nme.WILDCARD) if isSequenceOK && isRawStar => peekingAhead (
+ if (isCloseDelim) atPos(top.pos.start, in.prev.offset)(Star(stripParens(top)))
+ else EmptyTree
+ )
+ case _ => EmptyTree
+ }
+ def loop(top: Tree): Tree = reducePatternStack(base, top) match {
+ case next if isIdentExcept(raw.BAR) => pushOpInfo(next) ; loop(simplePattern(badPattern3))
+ case next => next
}
- stripParens(reduceStack(isExpr = false, base, top, 0, leftAssoc = true))
+ checkWildStar orElse stripParens(loop(top))
}
+
def badPattern3(): Tree = {
- def isComma = in.token == COMMA
- def isAnyBrace = in.token == RPAREN || in.token == RBRACE
- val badStart = "illegal start of simple pattern"
+ def isComma = in.token == COMMA
+ def isDelimiter = in.token == RPAREN || in.token == RBRACE
+ def isCommaOrDelimiter = isComma || isDelimiter
+ val (isUnderscore, isStar) = opstack match {
+ case OpInfo(Ident(nme.WILDCARD), nme.STAR, _, _) :: _ => (true, true)
+ case OpInfo(_, nme.STAR, _, _) :: _ => (false, true)
+ case _ => (false, false)
+ }
+ def isSeqPatternClose = isUnderscore && isStar && isSequenceOK && isDelimiter
+ val preamble = "bad simple pattern:"
+ val subtext = (isUnderscore, isStar, isSequenceOK) match {
+ case (true, true, true) if isComma => "bad use of _* (a sequence pattern must be the last pattern)"
+ case (true, true, true) if isDelimiter => "bad brace or paren after _*"
+ case (true, true, false) if isDelimiter => "bad use of _* (sequence pattern not allowed)"
+ case (false, true, true) if isDelimiter => "use _* to match a sequence"
+ case (false, true, _) if isCommaOrDelimiter => "trailing * is not a valid pattern"
+ case _ => null
+ }
+ val msg = if (subtext != null) s"$preamble $subtext" else "illegal start of simple pattern"
// better recovery if don't skip delims of patterns
- var skip = !(isComma || isAnyBrace)
- val msg = if (!opstack.isEmpty && opstack.head.operator == nme.STAR) {
- opstack.head.operand match {
- case Ident(nme.WILDCARD) =>
- if (isSequenceOK && isComma)
- "bad use of _* (a sequence pattern must be the last pattern)"
- else if (isSequenceOK && isAnyBrace) {
- skip = true // do skip bad paren; scanner may skip bad brace already
- "bad brace or paren after _*"
- } else if (!isSequenceOK && isAnyBrace)
- "bad use of _* (sequence pattern not allowed)"
- else badStart
- case _ =>
- if (isSequenceOK && isAnyBrace)
- "use _* to match a sequence"
- else if (isComma || isAnyBrace)
- "trailing * is not a valid pattern"
- else badStart
- }
- } else {
- badStart
- }
- syntaxErrorOrIncomplete(msg, skip)
- errorPatternTree
+ val skip = !isCommaOrDelimiter || isSeqPatternClose
+ syntaxErrorOrIncompleteAnd(msg, skip)(errorPatternTree)
}
/** {{{
@@ -1909,19 +1935,15 @@ self =>
*
* XXX: Hook for IDE
*/
- def simplePattern(): Tree = {
+ def simplePattern(): Tree = (
// simple diagnostics for this entry point
- def badStart(): Tree = {
- syntaxErrorOrIncomplete("illegal start of simple pattern", true)
- errorPatternTree
- }
- simplePattern(badStart)
- }
+ simplePattern(() => syntaxErrorOrIncompleteAnd("illegal start of simple pattern", skipIt = true)(errorPatternTree))
+ )
def simplePattern(onError: () => Tree): Tree = {
val start = in.offset
in.token match {
case IDENTIFIER | BACKQUOTED_IDENT | THIS =>
- var t = stableId()
+ val t = stableId()
in.token match {
case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT =>
t match {
@@ -1983,7 +2005,6 @@ self =>
/** Default entry points into some pattern contexts. */
def pattern(): Tree = noSeq.pattern()
- def patterns(): List[Tree] = noSeq.patterns()
def seqPatterns(): List[Tree] = seqOK.patterns()
def xmlSeqPatterns(): List[Tree] = xmlSeqOK.patterns() // Called from xml parser
def argumentPatterns(): List[Tree] = inParens {
@@ -1997,16 +2018,16 @@ self =>
/** Drop `private` modifier when followed by a qualifier.
* Contract `abstract` and `override` to ABSOVERRIDE
*/
- private def normalize(mods: Modifiers): Modifiers =
+ private def normalizeModifers(mods: Modifiers): Modifiers =
if (mods.isPrivate && mods.hasAccessBoundary)
- normalize(mods &~ Flags.PRIVATE)
+ normalizeModifers(mods &~ Flags.PRIVATE)
else if (mods hasAllFlags (Flags.ABSTRACT | Flags.OVERRIDE))
- normalize(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE)
+ normalizeModifers(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE)
else
mods
private def addMod(mods: Modifiers, mod: Long, pos: Position): Modifiers = {
- if (mods hasFlag mod) syntaxError(in.offset, "repeated modifier", false)
+ if (mods hasFlag mod) syntaxError(in.offset, "repeated modifier", skipIt = false)
in.nextToken()
(mods | mod) withPosition (mod, pos)
}
@@ -2023,7 +2044,7 @@ self =>
if (in.token == LBRACKET) {
in.nextToken()
if (mods.hasAccessBoundary)
- syntaxError("duplicate private/protected qualifier", false)
+ syntaxError("duplicate private/protected qualifier", skipIt = false)
result = if (in.token == THIS) { in.nextToken(); mods | Flags.LOCAL }
else Modifiers(mods.flags, identForType())
accept(RBRACKET)
@@ -2046,7 +2067,7 @@ self =>
* AccessModifier ::= (private | protected) [AccessQualifier]
* }}}
*/
- def accessModifierOpt(): Modifiers = normalize {
+ def accessModifierOpt(): Modifiers = normalizeModifers {
in.token match {
case m @ (PRIVATE | PROTECTED) => in.nextToken() ; accessQualifierOpt(Modifiers(flagTokens(m)))
case _ => NoMods
@@ -2060,7 +2081,7 @@ self =>
* | override
* }}}
*/
- def modifiers(): Modifiers = normalize {
+ def modifiers(): Modifiers = normalizeModifers {
def loop(mods: Modifiers): Modifiers = in.token match {
case PRIVATE | PROTECTED =>
loop(accessQualifierOpt(addMod(mods, flagTokens(in.token), tokenRange(in))))
@@ -2105,11 +2126,13 @@ self =>
def annotationExpr(): Tree = atPos(in.offset) {
val t = exprSimpleType()
if (in.token == LPAREN) New(t, multipleArgumentExprs())
- else New(t, ListOfNil)
+ else New(t, Nil)
}
/* -------- PARAMETERS ------------------------------------------- */
+ def allowTypelessParams = false
+
/** {{{
* ParamClauses ::= {ParamClause} [[nl] `(' implicit Params `)']
* ParamClause ::= [nl] `(' [Params] `)'
@@ -2130,10 +2153,10 @@ self =>
var mods = Modifiers(Flags.PARAM)
if (owner.isTypeName) {
mods = modifiers() | Flags.PARAMACCESSOR
- if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", false)
+ if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", skipIt = false)
in.token match {
case v @ (VAL | VAR) =>
- mods = mods withPosition (in.token, tokenRange(in))
+ mods = mods withPosition (in.token.toLong, tokenRange(in))
if (v == VAR) mods |= Flags.MUTABLE
in.nextToken()
case _ =>
@@ -2146,7 +2169,7 @@ self =>
val name = ident()
var bynamemod = 0
val tpt =
- if (settings.YmethodInfer.value && !owner.isTypeName && in.token != COLON) {
+ if (((settings.YmethodInfer && !owner.isTypeName) || allowTypelessParams) && in.token != COLON) {
TypeTree()
} else { // XX-METHOD-INFER
accept(COLON)
@@ -2155,11 +2178,11 @@ self =>
syntaxError(
in.offset,
(if (mods.isMutable) "`var'" else "`val'") +
- " parameters may not be call-by-name", false)
+ " parameters may not be call-by-name", skipIt = false)
else if (implicitmod != 0)
syntaxError(
in.offset,
- "implicit parameters may not be call-by-name", false)
+ "implicit parameters may not be call-by-name", skipIt = false)
else bynamemod = Flags.BYNAMEPARAM
}
paramType()
@@ -2171,7 +2194,7 @@ self =>
expr()
} else EmptyTree
atPos(start, if (name == nme.ERROR) start else nameOffset) {
- ValDef((mods | implicitmod | bynamemod) withAnnotations annots, name, tpt, default)
+ ValDef((mods | implicitmod.toLong | bynamemod) withAnnotations annots, name.toTermName, tpt, default)
}
}
def paramClause(): List[ValDef] = {
@@ -2188,8 +2211,8 @@ self =>
val start = in.offset
newLineOptWhenFollowedBy(LPAREN)
if (ofCaseClass && in.token != LPAREN)
- deprecationWarning(in.lastOffset, "case classes without a parameter list have been deprecated;\n"+
- "use either case objects or case classes with `()' as parameter list.")
+ syntaxError(in.lastOffset, "case classes without a parameter list are not allowed;\n"+
+ "use either case objects or case classes with an explicit `()' as a parameter list.")
while (implicitmod == 0 && in.token == LPAREN) {
in.nextToken()
vds += paramClause()
@@ -2200,9 +2223,9 @@ self =>
val result = vds.toList
if (owner == nme.CONSTRUCTOR && (result.isEmpty || (result.head take 1 exists (_.mods.isImplicit)))) {
in.token match {
- case LBRACKET => syntaxError(in.offset, "no type parameters allowed here", false)
+ case LBRACKET => syntaxError(in.offset, "no type parameters allowed here", skipIt = false)
case EOF => incompleteInputError("auxiliary constructor needs non-implicit parameter list")
- case _ => syntaxError(start, "auxiliary constructor needs non-implicit parameter list", false)
+ case _ => syntaxError(start, "auxiliary constructor needs non-implicit parameter list", skipIt = false)
}
}
addEvidenceParams(owner, result, contextBounds)
@@ -2224,7 +2247,7 @@ self =>
if (isRawStar) {
in.nextToken()
if (useStartAsPosition) atPos(start)(repeatedApplication(t))
- else atPos(t.pos.startOrPoint, t.pos.point)(repeatedApplication(t))
+ else atPos(t.pos.start, t.pos.point)(repeatedApplication(t))
}
else t
}
@@ -2253,7 +2276,7 @@ self =>
}
}
val nameOffset = in.offset
- // TODO AM: freshName(o2p(in.skipToken()), "_$$"), will need to update test suite
+ // TODO AM: freshTermName(o2p(in.skipToken()), "_$$"), will need to update test suite
val pname: TypeName = wildcardOrIdent().toTypeName
val param = atPos(start, nameOffset) {
val tparams = typeParamClauseOpt(pname, null) // @M TODO null --> no higher-order context bounds for now
@@ -2261,9 +2284,10 @@ self =>
}
if (contextBoundBuf ne null) {
while (in.token == VIEWBOUND) {
- contextBoundBuf += atPos(in.skipToken()) {
- makeFunctionTypeTree(List(Ident(pname)), typ())
- }
+ val msg = "Use an implicit parameter instead.\nExample: Instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)`."
+ if (settings.future)
+ deprecationWarning(in.offset, s"View bounds are deprecated. $msg")
+ contextBoundBuf += atPos(in.skipToken())(makeFunctionTypeTree(List(Ident(pname)), typ()))
}
while (in.token == COLON) {
contextBoundBuf += atPos(in.skipToken()) {
@@ -2283,16 +2307,18 @@ self =>
* }}}
*/
def typeBounds(): TypeBoundsTree = {
- val t = TypeBoundsTree(
- bound(SUPERTYPE, tpnme.Nothing),
- bound(SUBTYPE, tpnme.Any)
- )
- t setPos wrappingPos(List(t.hi, t.lo))
+ val lo = bound(SUPERTYPE)
+ val hi = bound(SUBTYPE)
+ val t = TypeBoundsTree(lo, hi)
+ val defined = List(t.hi, t.lo) filter (_.pos.isDefined)
+
+ if (defined.nonEmpty)
+ t setPos wrappingPos(defined)
+ else
+ t setPos o2p(in.offset)
}
- def bound(tok: Int, default: TypeName): Tree =
- if (in.token == tok) { in.nextToken(); typ() }
- else atPos(o2p(in.lastOffset)) { rootScalaDot(default) }
+ def bound(tok: Token): Tree = if (in.token == tok) { in.nextToken(); typ() } else EmptyTree
/* -------- DEFS ------------------------------------------- */
@@ -2326,8 +2352,8 @@ self =>
accept(DOT)
result
}
- /** Walks down import `foo.bar.baz.{ ... }` until it ends at a
- * an underscore, a left brace, or an undotted identifier.
+ /* Walks down import `foo.bar.baz.{ ... }` until it ends at a
+ * an underscore, a left brace, or an undotted identifier.
*/
def loop(expr: Tree): Tree = {
expr setPos expr.pos.makeTransparent
@@ -2411,9 +2437,9 @@ self =>
* | type [nl] TypeDcl
* }}}
*/
- def defOrDcl(pos: Int, mods: Modifiers): List[Tree] = {
+ def defOrDcl(pos: Offset, mods: Modifiers): List[Tree] = {
if (mods.isLazy && in.token != VAL)
- syntaxError("lazy not allowed here. Only vals can be lazy", false)
+ syntaxError("lazy not allowed here. Only vals can be lazy", skipIt = false)
in.token match {
case VAL =>
patDefOrDcl(pos, mods withPosition(VAL, tokenRange(in)))
@@ -2462,17 +2488,15 @@ self =>
EmptyTree
}
def mkDefs(p: Tree, tp: Tree, rhs: Tree): List[Tree] = {
- //Console.println("DEBUG: p = "+p.toString()); // DEBUG
- val trees =
- makePatDef(newmods,
- if (tp.isEmpty) p
- else Typed(p, tp) setPos (p.pos union tp.pos),
- rhs)
+ val trees = {
+ val pat = if (tp.isEmpty) p else Typed(p, tp) setPos (p.pos union tp.pos)
+ gen.mkPatDef(newmods, pat, rhs)
+ }
if (newmods.isDeferred) {
trees match {
case List(ValDef(_, _, _, EmptyTree)) =>
- if (mods.isLazy) syntaxError(p.pos, "lazy values may not be abstract", false)
- case _ => syntaxError(p.pos, "pattern definition may not be abstract", false)
+ if (mods.isLazy) syntaxError(p.pos, "lazy values may not be abstract", skipIt = false)
+ case _ => syntaxError(p.pos, "pattern definition may not be abstract", skipIt = false)
}
}
trees
@@ -2522,7 +2546,7 @@ self =>
* }}}
*/
def funDefOrDcl(start : Int, mods: Modifiers): Tree = {
- in.nextToken
+ in.nextToken()
if (in.token == THIS) {
atPos(start, in.skipToken()) {
val vparamss = paramClauses(nme.CONSTRUCTOR, classContextBounds map (_.duplicate), ofCaseClass = false)
@@ -2536,12 +2560,12 @@ self =>
}
else {
val nameOffset = in.offset
- val name = ident()
+ val name = identOrMacro()
funDefRest(start, nameOffset, mods, name)
}
}
- def funDefRest(start: Int, nameOffset: Int, mods: Modifiers, name: Name): Tree = {
+ def funDefRest(start: Offset, nameOffset: Offset, mods: Modifiers, name: Name): Tree = {
val result = atPos(start, if (name.toTermName == nme.ERROR) start else nameOffset) {
var newmods = mods
// contextBoundBuf is for context bounded type parameters of the form
@@ -2554,16 +2578,22 @@ self =>
var restype = fromWithinReturnType(typedOpt())
val rhs =
if (isStatSep || in.token == RBRACE) {
- if (restype.isEmpty) restype = scalaUnitConstr
+ if (restype.isEmpty) {
+ if (settings.future)
+ deprecationWarning(in.lastOffset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit`.")
+ restype = scalaUnitConstr
+ }
newmods |= Flags.DEFERRED
EmptyTree
} else if (restype.isEmpty && in.token == LBRACE) {
+ if (settings.future)
+ deprecationWarning(in.offset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit =`.")
restype = scalaUnitConstr
blockExpr()
} else {
if (in.token == EQUALS) {
in.nextTokenAllow(nme.MACROkw)
- if (in.token == IDENTIFIER && in.name == nme.MACROkw) {
+ if (isMacro) {
in.nextToken()
newmods |= Flags.MACRO
}
@@ -2572,7 +2602,7 @@ self =>
}
expr()
}
- DefDef(newmods, name, tparams, vparamss, restype, rhs)
+ DefDef(newmods, name.toTermName, tparams, vparamss, restype, rhs)
}
signalParseProgress(result.pos)
result
@@ -2585,7 +2615,7 @@ self =>
*/
def constrExpr(vparamss: List[List[ValDef]]): Tree =
if (in.token == LBRACE) constrBlock(vparamss)
- else Block(List(selfInvocation(vparamss)), Literal(Constant()))
+ else Block(selfInvocation(vparamss) :: Nil, literalUnit)
/** {{{
* SelfInvocation ::= this ArgumentExprs {ArgumentExprs}
@@ -2615,7 +2645,7 @@ self =>
else Nil
}
accept(RBRACE)
- Block(stats, Literal(Constant()))
+ Block(stats, literalUnit)
}
/** {{{
@@ -2624,11 +2654,10 @@ self =>
* TypeDcl ::= type Id [TypeParamClause] TypeBounds
* }}}
*/
- def typeDefOrDcl(start: Int, mods: Modifiers): Tree = {
+ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = {
in.nextToken()
newLinesOpt()
atPos(start, in.offset) {
- val nameOffset = in.offset
val name = identForType()
// @M! a type alias as well as an abstract type may declare type parameters
val tparams = typeParamClauseOpt(name, null)
@@ -2636,11 +2665,10 @@ self =>
case EQUALS =>
in.nextToken()
TypeDef(mods, name, tparams, typ())
- case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE =>
+ case t if t == SUPERTYPE || t == SUBTYPE || t == COMMA || t == RBRACE || isStatSep(t) =>
TypeDef(mods | Flags.DEFERRED, name, tparams, typeBounds())
case _ =>
- syntaxErrorOrIncomplete("`=', `>:', or `<:' expected", true)
- EmptyTree
+ syntaxErrorOrIncompleteAnd("`=', `>:', or `<:' expected", skipIt = true)(EmptyTree)
}
}
}
@@ -2659,8 +2687,8 @@ self =>
* | [override] trait TraitDef
* }}}
*/
- def tmplDef(pos: Int, mods: Modifiers): Tree = {
- if (mods.isLazy) syntaxError("classes cannot be lazy", false)
+ def tmplDef(pos: Offset, mods: Modifiers): Tree = {
+ if (mods.isLazy) syntaxError("classes cannot be lazy", skipIt = false)
in.token match {
case TRAIT =>
classDef(pos, (mods | Flags.TRAIT | Flags.ABSTRACT) withPosition (Flags.TRAIT, tokenRange(in)))
@@ -2673,8 +2701,7 @@ self =>
case CASEOBJECT =>
objectDef(pos, (mods | Flags.CASE) withPosition (Flags.CASE, tokenRange(in.prev /*scanner skips on 'case' to 'object', thus take prev*/)))
case _ =>
- syntaxErrorOrIncomplete("expected start of definition", true)
- EmptyTree
+ syntaxErrorOrIncompleteAnd("expected start of definition", skipIt = true)(EmptyTree)
}
}
@@ -2684,8 +2711,8 @@ self =>
* TraitDef ::= Id [TypeParamClause] RequiresTypeOpt TraitTemplateOpt
* }}}
*/
- def classDef(start: Int, mods: Modifiers): ClassDef = {
- in.nextToken
+ def classDef(start: Offset, mods: Modifiers): ClassDef = {
+ in.nextToken()
val nameOffset = in.offset
val name = identForType()
atPos(start, if (name == tpnme.ERROR) start else nameOffset) {
@@ -2693,12 +2720,13 @@ self =>
val contextBoundBuf = new ListBuffer[Tree]
val tparams = typeParamClauseOpt(name, contextBoundBuf)
classContextBounds = contextBoundBuf.toList
- val tstart = (in.offset :: classContextBounds.map(_.pos.startOrPoint)).min
+ val tstart = (in.offset :: classContextBounds.map(_.pos.start)).min
if (!classContextBounds.isEmpty && mods.isTrait) {
- syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", false)
+ val viewBoundsExist = if (settings.future) "" else " nor view bounds `<% ...'"
+ syntaxError(s"traits cannot have type parameters with context bounds `: ...'$viewBoundsExist", skipIt = false)
classContextBounds = List()
}
- val constrAnnots = constructorAnnotations()
+ val constrAnnots = if (!mods.isTrait) constructorAnnotations() else Nil
val (constrMods, vparamss) =
if (mods.isTrait) (Modifiers(Flags.TRAIT), List())
else (accessModifierOpt(), paramClauses(name, classContextBounds, ofCaseClass = mods.isCase))
@@ -2706,11 +2734,10 @@ self =>
if (mods.isTrait) {
if (settings.YvirtClasses && in.token == SUBTYPE) mods1 |= Flags.DEFERRED
} else if (in.token == SUBTYPE) {
- syntaxError("classes are not allowed to be virtual", false)
+ syntaxError("classes are not allowed to be virtual", skipIt = false)
}
val template = templateOpt(mods1, name, constrMods withAnnotations constrAnnots, vparamss, tstart)
- if (isInterface(mods1, template.body)) mods1 |= Flags.INTERFACE
- val result = ClassDef(mods1, name, tparams, template)
+ val result = gen.mkClassDef(mods1, name, tparams, template)
// Context bounds generate implicit parameters (part of the template) with types
// from tparams: we need to ensure these don't overlap
if (!classContextBounds.isEmpty)
@@ -2724,37 +2751,69 @@ self =>
* ObjectDef ::= Id ClassTemplateOpt
* }}}
*/
- def objectDef(start: Int, mods: Modifiers): ModuleDef = {
- in.nextToken
+ def objectDef(start: Offset, mods: Modifiers): ModuleDef = {
+ in.nextToken()
val nameOffset = in.offset
val name = ident()
val tstart = in.offset
atPos(start, if (name == nme.ERROR) start else nameOffset) {
val mods1 = if (in.token == SUBTYPE) mods | Flags.DEFERRED else mods
val template = templateOpt(mods1, name, NoMods, Nil, tstart)
- ModuleDef(mods1, name, template)
+ ModuleDef(mods1, name.toTermName, template)
}
}
+ /** Create a tree representing a package object, converting
+ * {{{
+ * package object foo { ... }
+ * }}}
+ * to
+ * {{{
+ * package foo {
+ * object `package` { ... }
+ * }
+ * }}}
+ */
+ def packageObjectDef(start: Offset): PackageDef = {
+ val defn = objectDef(in.offset, NoMods)
+ val pidPos = o2p(defn.pos.startOrPoint)
+ val pkgPos = r2p(start, pidPos.point)
+ gen.mkPackageObject(defn, pidPos, pkgPos)
+ }
+ def packageOrPackageObject(start: Offset): Tree = (
+ if (in.token == OBJECT)
+ joinComment(packageObjectDef(start) :: Nil).head
+ else {
+ in.flushDoc
+ makePackaging(start, pkgQualId(), inBracesOrNil(topStatSeq()))
+ }
+ )
+ // TODO - eliminate this and use "def packageObjectDef" (see call site of this
+ // method for small elaboration.)
+ def makePackageObject(start: Offset, objDef: ModuleDef): PackageDef = objDef match {
+ case ModuleDef(mods, name, impl) =>
+ makePackaging(
+ start, atPos(o2p(objDef.pos.start)){ Ident(name) }, List(ModuleDef(mods, nme.PACKAGEkw, impl)))
+ }
+
/** {{{
* ClassParents ::= AnnotType {`(' [Exprs] `)'} {with AnnotType}
* TraitParents ::= AnnotType {with AnnotType}
* }}}
*/
- def templateParents(isTrait: Boolean): (List[Tree], List[List[Tree]]) = {
- val parents = new ListBuffer[Tree] += startAnnotType()
- val argss = (
- // TODO: the insertion of ListOfNil here is where "new Foo" becomes
- // indistinguishable from "new Foo()".
- if (in.token == LPAREN && !isTrait) multipleArgumentExprs()
- else ListOfNil
- )
-
- while (in.token == WITH) {
- in.nextToken()
- parents += startAnnotType()
+ def templateParents(): List[Tree] = {
+ val parents = new ListBuffer[Tree]
+ def readAppliedParent() = {
+ val start = in.offset
+ val parent = startAnnotType()
+ parents += (in.token match {
+ case LPAREN => atPos(start)((parent /: multipleArgumentExprs())(Apply.apply))
+ case _ => parent
+ })
}
- (parents.toList, argss)
+ readAppliedParent()
+ while (in.token == WITH) { in.nextToken(); readAppliedParent() }
+ parents.toList
}
/** {{{
@@ -2764,79 +2823,75 @@ self =>
* EarlyDef ::= Annotations Modifiers PatDef
* }}}
*/
- def template(isTrait: Boolean): (List[Tree], List[List[Tree]], ValDef, List[Tree]) = {
+ def template(): (List[Tree], ValDef, List[Tree]) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
// @S: pre template body cannot stub like post body can!
val (self, body) = templateBody(isPre = true)
- if (in.token == WITH && self.isEmpty) {
- val earlyDefs: List[Tree] = body flatMap {
- case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
- List(copyValDef(vdef)(mods = mods | Flags.PRESUPER))
- case tdef @ TypeDef(mods, name, tparams, rhs) =>
- List(treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs))
- case docdef @ DocDef(comm, rhs) =>
- List(treeCopy.DocDef(docdef, comm, rhs))
- case stat if !stat.isEmpty =>
- syntaxError(stat.pos, "only type definitions and concrete field definitions allowed in early object initialization section", false)
- List()
- case _ => List()
- }
+ if (in.token == WITH && (self eq noSelfType)) {
+ val earlyDefs: List[Tree] = body.map(ensureEarlyDef).filter(_.nonEmpty)
in.nextToken()
- val (parents, argss) = templateParents(isTrait = isTrait)
- val (self1, body1) = templateBodyOpt(traitParentSeen = isTrait)
- (parents, argss, self1, earlyDefs ::: body1)
+ val parents = templateParents()
+ val (self1, body1) = templateBodyOpt(parenMeansSyntaxError = false)
+ (parents, self1, earlyDefs ::: body1)
} else {
- (List(), ListOfNil, self, body)
+ (List(), self, body)
}
} else {
- val (parents, argss) = templateParents(isTrait = isTrait)
- val (self, body) = templateBodyOpt(traitParentSeen = isTrait)
- (parents, argss, self, body)
- }
+ val parents = templateParents()
+ val (self, body) = templateBodyOpt(parenMeansSyntaxError = false)
+ (parents, self, body)
+ }
+ }
+
+ def ensureEarlyDef(tree: Tree): Tree = tree match {
+ case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
+ copyValDef(vdef)(mods = mods | Flags.PRESUPER)
+ case tdef @ TypeDef(mods, name, tparams, rhs) =>
+ deprecationWarning(tdef.pos.point, "early type members are deprecated. Move them to the regular body: the semantics are the same.")
+ treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs)
+ case docdef @ DocDef(comm, rhs) =>
+ treeCopy.DocDef(docdef, comm, rhs)
+ case stat if !stat.isEmpty =>
+ syntaxError(stat.pos, "only concrete field definitions allowed in early object initialization section", skipIt = false)
+ EmptyTree
+ case _ =>
+ EmptyTree
}
- def isInterface(mods: Modifiers, body: List[Tree]): Boolean =
- mods.isTrait && (body forall treeInfo.isInterfaceMember)
-
/** {{{
* ClassTemplateOpt ::= `extends' ClassTemplate | [[`extends'] TemplateBody]
* TraitTemplateOpt ::= TraitExtends TraitTemplate | [[`extends'] TemplateBody] | `<:' TemplateBody
* TraitExtends ::= `extends' | `<:'
* }}}
*/
- def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Int): Template = {
- val (parents0, argss, self, body) = (
+ def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Offset): Template = {
+ val (parents, self, body) = (
if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait) {
in.nextToken()
- template(isTrait = mods.isTrait)
+ template()
}
else {
newLineOptWhenFollowedBy(LBRACE)
- val (self, body) = templateBodyOpt(traitParentSeen = false)
- (List(), ListOfNil, self, body)
+ val (self, body) = templateBodyOpt(parenMeansSyntaxError = mods.isTrait || name.isTermName)
+ (List(), self, body)
}
)
- def anyrefParents() = {
- val caseParents = if (mods.isCase) List(productConstr, serializableConstr) else Nil
- parents0 ::: caseParents match {
- case Nil => List(atPos(o2p(in.offset))(scalaAnyRefConstr))
- case ps => ps
- }
- }
def anyvalConstructor() = (
// Not a well-formed constructor, has to be finished later - see note
// regarding AnyVal constructor in AddInterfaces.
- DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, Literal(Constant())))
+ DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, literalUnit))
)
- val tstart0 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart
+ val parentPos = o2p(in.offset)
+ val tstart1 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart
- atPos(tstart0) {
+ atPos(tstart1) {
// Exclude only the 9 primitives plus AnyVal.
if (inScalaRootPackage && ScalaValueClassNames.contains(name))
- Template(parents0, self, anyvalConstructor :: body)
+ Template(parents, self, anyvalConstructor :: body)
else
- Template(anyrefParents, self, constrMods, vparamss, argss, body, o2p(tstart))
+ gen.mkTemplate(gen.mkParents(mods, parents, parentPos),
+ self, constrMods, vparamss, body, o2p(tstart))
}
}
@@ -2851,15 +2906,16 @@ self =>
case (self, Nil) => (self, EmptyTree.asList)
case result => result
}
- def templateBodyOpt(traitParentSeen: Boolean): (ValDef, List[Tree]) = {
+ def templateBodyOpt(parenMeansSyntaxError: Boolean): (ValDef, List[Tree]) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
templateBody(isPre = false)
} else {
- if (in.token == LPAREN)
- syntaxError((if (traitParentSeen) "parents of traits" else "traits or objects")+
- " may not have parameters", true)
- (emptyValDef, List())
+ if (in.token == LPAREN) {
+ if (parenMeansSyntaxError) syntaxError(s"traits or objects may not have parameters", skipIt = true)
+ else abort("unexpected opening parenthesis")
+ }
+ (noSelfType, List())
}
}
@@ -2872,45 +2928,24 @@ self =>
/* -------- STATSEQS ------------------------------------------- */
/** Create a tree representing a packaging. */
- def makePackaging(start: Int, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
+ def makePackaging(start: Offset, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
case x: RefTree => atPos(start, pkg.pos.point)(PackageDef(x, stats))
}
-/*
- pkg match {
- case id @ Ident(_) =>
- PackageDef(id, stats)
- case Select(qual, name) => // drop this to flatten packages
- makePackaging(start, qual, List(PackageDef(Ident(name), stats)))
- }
- }
-*/
- /** Create a tree representing a package object, converting
- * {{{
- * package object foo { ... }
- * }}}
- * to
- * {{{
- * package foo {
- * object `package` { ... }
- * }
- * }}}
- */
- def makePackageObject(start: Int, objDef: ModuleDef): PackageDef = objDef match {
- case ModuleDef(mods, name, impl) =>
- makePackaging(
- start, atPos(o2p(objDef.pos.startOrPoint)){ Ident(name) }, List(ModuleDef(mods, nme.PACKAGEkw, impl)))
- }
+ def makeEmptyPackage(start: Offset, stats: List[Tree]): PackageDef = (
+ makePackaging(start, atPos(start, start, start)(Ident(nme.EMPTY_PACKAGE_NAME)), stats)
+ )
- /** {{{
- * Packaging ::= package QualId [nl] `{' TopStatSeq `}'
- * }}}
- */
- def packaging(start: Int): Tree = {
- val nameOffset = in.offset
- val pkg = pkgQualId()
- val stats = inBracesOrNil(topStatSeq())
- makePackaging(start, pkg, stats)
+ def statSeq(stat: PartialFunction[Token, List[Tree]], errorMsg: String = "illegal start of definition"): List[Tree] = {
+ val stats = new ListBuffer[Tree]
+ def default(tok: Token) =
+ if (isStatSep) Nil
+ else syntaxErrorOrIncompleteAnd(errorMsg, skipIt = true)(Nil)
+ while (!isStatSeqEnd) {
+ stats ++= stat.applyOrElse(in.token, default)
+ acceptStatSepOpt()
+ }
+ stats.toList
}
/** {{{
@@ -2922,54 +2957,25 @@ self =>
* |
* }}}
*/
- def topStatSeq(): List[Tree] = {
- val stats = new ListBuffer[Tree]
- while (!isStatSeqEnd) {
- stats ++= (in.token match {
- case PACKAGE =>
- val start = in.skipToken()
- if (in.token == OBJECT)
- joinComment(List(makePackageObject(start, objectDef(in.offset, NoMods))))
- else {
- in.flushDoc
- List(packaging(start))
- }
- case IMPORT =>
- in.flushDoc
- importClause()
- case x if x == AT || isTemplateIntro || isModifier =>
- joinComment(List(topLevelTmplDef))
- case _ =>
- if (!isStatSep)
- syntaxErrorOrIncomplete("expected class or object definition", true)
- Nil
- })
- acceptStatSepOpt()
- }
- stats.toList
- }
-
- /** Informal - for the repl and other direct parser accessors.
- */
- def templateStats(): List[Tree] = templateStatSeq(isPre = false)._2 match {
- case Nil => EmptyTree.asList
- case stats => stats
+ def topStatSeq(): List[Tree] = statSeq(topStat, errorMsg = "expected class or object definition")
+ def topStat: PartialFunction[Token, List[Tree]] = {
+ case PACKAGE =>
+ packageOrPackageObject(in.skipToken()) :: Nil
+ case IMPORT =>
+ in.flushDoc
+ importClause()
+ case _ if isAnnotation || isTemplateIntro || isModifier =>
+ joinComment(topLevelTmplDef :: Nil)
}
/** {{{
- * TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStat {semi TemplateStat}
- * TemplateStat ::= Import
- * | Annotations Modifiers Def
- * | Annotations Modifiers Dcl
- * | Expr1
- * | super ArgumentExprs {ArgumentExprs}
- * |
+ * TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStats
* }}}
* @param isPre specifies whether in early initializer (true) or not (false)
*/
def templateStatSeq(isPre : Boolean): (ValDef, List[Tree]) = checkNoEscapingPlaceholders {
- var self: ValDef = emptyValDef
- val stats = new ListBuffer[Tree]
+ var self: ValDef = noSelfType
+ var firstOpt: Option[Tree] = None
if (isExprIntro) {
in.flushDoc
val first = expr(InTemplate) // @S: first statement is potentially converted so cannot be stubbed.
@@ -2986,28 +2992,38 @@ self =>
}
in.nextToken()
} else {
- stats += first
+ firstOpt = Some(first)
acceptStatSepOpt()
}
}
- while (!isStatSeqEnd) {
- if (in.token == IMPORT) {
- in.flushDoc
- stats ++= importClause()
- } else if (isExprIntro) {
- in.flushDoc
- stats += statement(InTemplate)
- } else if (isDefIntro || isModifier || in.token == AT) {
- stats ++= joinComment(nonLocalDefOrDcl)
- } else if (!isStatSep) {
- syntaxErrorOrIncomplete("illegal start of definition", true)
- }
- acceptStatSepOpt()
- }
- (self, stats.toList)
+ (self, firstOpt ++: templateStats())
}
/** {{{
+ * TemplateStats ::= TemplateStat {semi TemplateStat}
+ * TemplateStat ::= Import
+ * | Annotations Modifiers Def
+ * | Annotations Modifiers Dcl
+ * | Expr1
+ * | super ArgumentExprs {ArgumentExprs}
+ * |
+ * }}}
+ */
+ def templateStats(): List[Tree] = statSeq(templateStat)
+ def templateStat: PartialFunction[Token, List[Tree]] = {
+ case IMPORT =>
+ in.flushDoc
+ importClause()
+ case _ if isDefIntro || isModifier || isAnnotation =>
+ joinComment(nonLocalDefOrDcl)
+ case _ if isExprIntro =>
+ in.flushDoc
+ statement(InTemplate) :: Nil
+ }
+
+ def templateOrTopStatSeq(): List[Tree] = statSeq(templateStat.orElse(topStat))
+
+ /** {{{
* RefineStatSeq ::= RefineStat {semi RefineStat}
* RefineStat ::= Dcl
* | type TypeDef
@@ -3017,19 +3033,23 @@ self =>
def refineStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
val stats = new ListBuffer[Tree]
while (!isStatSeqEnd) {
- if (isDclIntro) { // don't IDE hook
- stats ++= joinComment(defOrDcl(in.offset, NoMods))
- } else if (!isStatSep) {
- syntaxErrorOrIncomplete(
- "illegal start of declaration"+
- (if (inFunReturnType) " (possible cause: missing `=' in front of current method body)"
- else ""), true)
- }
+ stats ++= refineStat()
if (in.token != RBRACE) acceptStatSep()
}
stats.toList
}
+ def refineStat(): List[Tree] =
+ if (isDclIntro) { // don't IDE hook
+ joinComment(defOrDcl(in.offset, NoMods))
+ } else if (!isStatSep) {
+ syntaxErrorOrIncomplete(
+ "illegal start of declaration"+
+ (if (inFunReturnType) " (possible cause: missing `=' in front of current method body)"
+ else ""), skipIt = true)
+ Nil
+ } else Nil
+
/** overridable IDE hook for local definitions of blockStatSeq
* Here's an idea how to fill in start and end positions.
def localDef : List[Tree] = {
@@ -3047,13 +3067,13 @@ self =>
def localDef(implicitMod: Int): List[Tree] = {
val annots = annotations(skipNewLines = true)
val pos = in.offset
- val mods = (localModifiers() | implicitMod) withAnnotations annots
+ val mods = (localModifiers() | implicitMod.toLong) withAnnotations annots
val defs =
if (!(mods hasFlag ~(Flags.IMPLICIT | Flags.LAZY))) defOrDcl(pos, mods)
else List(tmplDef(pos, mods))
in.token match {
- case RBRACE | CASE => defs :+ (Literal(Constant()) setPos o2p(in.offset))
+ case RBRACE | CASE => defs :+ setInPos(literalUnit)
case _ => defs
}
}
@@ -3069,16 +3089,16 @@ self =>
*/
def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
val stats = new ListBuffer[Tree]
- while (!isStatSeqEnd && in.token != CASE) {
+ while (!isStatSeqEnd && !isCaseDefEnd) {
if (in.token == IMPORT) {
stats ++= importClause()
acceptStatSepOpt()
}
else if (isExprIntro) {
stats += statement(InBlock)
- if (in.token != RBRACE && in.token != CASE) acceptStatSep()
+ if (!isCaseDefEnd) acceptStatSep()
}
- else if (isDefIntro || isLocalModifier || in.token == AT) {
+ else if (isDefIntro || isLocalModifier || isAnnotation) {
if (in.token == IMPLICIT) {
val start = in.skipToken()
if (isIdent) stats += implicitClosure(start, InBlock)
@@ -3093,7 +3113,7 @@ self =>
}
else {
val addendum = if (isModifier) " (no modifiers allowed here)" else ""
- syntaxErrorOrIncomplete("illegal start of statement" + addendum, true)
+ syntaxErrorOrIncomplete("illegal start of statement" + addendum, skipIt = true)
}
}
stats.toList
@@ -3103,7 +3123,7 @@ self =>
* CompilationUnit ::= {package QualId semi} TopStatSeq
* }}}
*/
- def compilationUnit(): Tree = checkNoEscapingPlaceholders {
+ def compilationUnit(): PackageDef = checkNoEscapingPlaceholders {
def topstats(): List[Tree] = {
val ts = new ListBuffer[Tree]
while (in.token == SEMI) in.nextToken()
@@ -3111,13 +3131,15 @@ self =>
if (in.token == PACKAGE) {
in.nextToken()
if (in.token == OBJECT) {
+ // TODO - this next line is supposed to be
+ // ts += packageObjectDef(start)
+ // but this broke a scaladoc test (run/diagrams-filtering.scala) somehow.
ts ++= joinComment(List(makePackageObject(start, objectDef(in.offset, NoMods))))
if (in.token != EOF) {
acceptStatSep()
ts ++= topStatSeq()
}
} else {
- val nameOffset = in.offset
in.flushDoc
val pkg = pkgQualId()
@@ -3140,17 +3162,17 @@ self =>
resetPackage()
topstats() match {
- case List(stat @ PackageDef(_, _)) => stat
- case stats =>
+ case (stat @ PackageDef(_, _)) :: Nil => stat
+ case stats =>
val start =
if (stats forall (_ == EmptyTree)) 0
else {
val wpos = wrappingPos(stats)
- if (wpos.isDefined) wpos.startOrPoint
+ if (wpos.isDefined) wpos.start
else 0
}
- makePackaging(start, atPos(start, start, start) { Ident(nme.EMPTY_PACKAGE_NAME) }, stats)
+ makeEmptyPackage(start, stats)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 8d295a28d0..8011abc1ed 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -5,13 +5,15 @@
package scala.tools.nsc
package ast.parser
-import scala.tools.nsc.util.CharArrayReader
+import scala.tools.nsc.util.{ CharArrayReader, CharArrayReaderData }
import scala.reflect.internal.util._
import scala.reflect.internal.Chars._
import Tokens._
-import scala.annotation.switch
-import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
-import scala.xml.Utility.{ isNameStart }
+import scala.annotation.{ switch, tailrec }
+import scala.collection.{ mutable, immutable }
+import mutable.{ ListBuffer, ArrayBuffer }
+import scala.tools.nsc.ast.parser.xml.Utility.isNameStart
+import scala.language.postfixOps
/** See Parsers.scala / ParsersCommon for some explanation of ScannersCommon.
*/
@@ -19,20 +21,24 @@ trait ScannersCommon {
val global : Global
import global._
+ /** Offset into source character array */
+ type Offset = Int
+
+ type Token = Int
+
trait CommonTokenData {
- def token: Int
+ def token: Token
def name: TermName
}
trait ScannerCommon extends CommonTokenData {
// things to fill in, in addition to buf, decodeUni which come from CharArrayReader
- def warning(off: Int, msg: String): Unit
- def error (off: Int, msg: String): Unit
- def incompleteInputError(off: Int, msg: String): Unit
- def deprecationWarning(off: Int, msg: String): Unit
+ def error(off: Offset, msg: String): Unit
+ def incompleteInputError(off: Offset, msg: String): Unit
+ def deprecationWarning(off: Offset, msg: String): Unit
}
- def createKeywordArray(keywords: Seq[(Name, Int)], defaultToken: Int): (Int, Array[Int]) = {
+ def createKeywordArray(keywords: Seq[(Name, Token)], defaultToken: Token): (Token, Array[Token]) = {
val names = keywords sortBy (_._1.start) map { case (k, v) => (k.start, v) }
val low = names.head._1
val high = names.last._1
@@ -47,16 +53,10 @@ trait Scanners extends ScannersCommon {
val global : Global
import global._
- /** Offset into source character array */
- type Offset = Int
-
- /** An undefined offset */
- val NoOffset: Offset = -1
-
trait TokenData extends CommonTokenData {
/** the next token */
- var token: Int = EMPTY
+ var token: Token = EMPTY
/** the offset of the first character of the current token */
var offset: Offset = 0
@@ -73,24 +73,105 @@ trait Scanners extends ScannersCommon {
/** the base of a number */
var base: Int = 0
- def copyFrom(td: TokenData) = {
+ def copyFrom(td: TokenData): this.type = {
this.token = td.token
this.offset = td.offset
this.lastOffset = td.lastOffset
this.name = td.name
this.strVal = td.strVal
this.base = td.base
+ this
+ }
+ }
+
+ /** An interface to most of mutable data in Scanner defined in TokenData
+ * and CharArrayReader (+ next, prev fields) with copyFrom functionality
+ * to backup/restore data (used by quasiquotes' lookingAhead).
+ */
+ trait ScannerData extends TokenData with CharArrayReaderData {
+ /** we need one token lookahead and one token history
+ */
+ val next: TokenData = new TokenData{}
+ val prev: TokenData = new TokenData{}
+
+ def copyFrom(sd: ScannerData): this.type = {
+ this.next copyFrom sd.next
+ this.prev copyFrom sd.prev
+ super[CharArrayReaderData].copyFrom(sd)
+ super[TokenData].copyFrom(sd)
+ this
}
}
- abstract class Scanner extends CharArrayReader with TokenData with ScannerCommon {
+ abstract class Scanner extends CharArrayReader with TokenData with ScannerData with ScannerCommon {
private def isDigit(c: Char) = java.lang.Character isDigit c
- def isAtEnd = charOffset >= buf.length
+ private var openComments = 0
+ protected def putCommentChar(): Unit = nextChar()
- def flush = { charOffset = offset; nextChar(); this }
+ @tailrec private def skipLineComment(): Unit = ch match {
+ case SU | CR | LF =>
+ case _ => nextChar() ; skipLineComment()
+ }
+ private def maybeOpen() {
+ putCommentChar()
+ if (ch == '*') {
+ putCommentChar()
+ openComments += 1
+ }
+ }
+ private def maybeClose(): Boolean = {
+ putCommentChar()
+ (ch == '/') && {
+ putCommentChar()
+ openComments -= 1
+ openComments == 0
+ }
+ }
+ @tailrec final def skipNestedComments(): Unit = ch match {
+ case '/' => maybeOpen() ; skipNestedComments()
+ case '*' => if (!maybeClose()) skipNestedComments()
+ case SU => incompleteInputError("unclosed comment")
+ case _ => putCommentChar() ; skipNestedComments()
+ }
+ def skipDocComment(): Unit = skipNestedComments()
+ def skipBlockComment(): Unit = skipNestedComments()
- def resume(lastCode: Int) = {
+ private def skipToCommentEnd(isLineComment: Boolean) {
+ nextChar()
+ if (isLineComment) skipLineComment()
+ else {
+ openComments = 1
+ val isDocComment = (ch == '*') && { nextChar(); true }
+ if (isDocComment) {
+ // Check for the amazing corner case of /**/
+ if (ch == '/')
+ nextChar()
+ else
+ skipDocComment()
+ }
+ else skipBlockComment()
+ }
+ }
+
+ /** @pre ch == '/'
+ * Returns true if a comment was skipped.
+ */
+ def skipComment(): Boolean = ch match {
+ case '/' | '*' => skipToCommentEnd(isLineComment = ch == '/') ; true
+ case _ => false
+ }
+ def flushDoc(): DocComment = null
+
+ /** To prevent doc comments attached to expressions from leaking out of scope
+ * onto the next documentable entity, they are discarded upon passing a right
+ * brace, bracket, or parenthesis.
+ */
+ def discardDocBuffer(): Unit = ()
+
+ def isAtEnd = charOffset >= buf.length
+
+ def resume(lastCode: Token) = {
token = lastCode
if (next.token != EMPTY && !reporter.hasErrors)
syntaxError("unexpected end of input: possible missing '}' in XML block")
@@ -98,10 +179,6 @@ trait Scanners extends ScannersCommon {
nextToken()
}
- /** the last error offset
- */
- var errOffset: Offset = NoOffset
-
/** A character buffer for literals
*/
val cbuf = new StringBuilder
@@ -119,7 +196,7 @@ trait Scanners extends ScannersCommon {
protected def emitIdentifierDeprecationWarnings = true
/** Clear buffer and set name and token */
- private def finishNamed(idtoken: Int = IDENTIFIER) {
+ private def finishNamed(idtoken: Token = IDENTIFIER) {
name = newTermName(cbuf.toString)
cbuf.clear()
token = idtoken
@@ -127,8 +204,12 @@ trait Scanners extends ScannersCommon {
val idx = name.start - kwOffset
if (idx >= 0 && idx < kwArray.length) {
token = kwArray(idx)
- if (token == IDENTIFIER && allowIdent != name && emitIdentifierDeprecationWarnings)
- deprecationWarning(name+" is now a reserved word; usage as an identifier is deprecated")
+ if (token == IDENTIFIER && allowIdent != name) {
+ if (name == nme.MACROkw)
+ syntaxError(s"$name is now a reserved word; usage as an identifier is disallowed")
+ else if (emitIdentifierDeprecationWarnings)
+ deprecationWarning(s"$name is now a reserved word; usage as an identifier is deprecated")
+ }
}
}
}
@@ -139,29 +220,6 @@ trait Scanners extends ScannersCommon {
cbuf.clear()
}
- /** Should doc comments be built? */
- def buildDocs: Boolean = forScaladoc
-
- /** holder for the documentation comment
- */
- var docComment: DocComment = null
-
- def flushDoc: DocComment = {
- val ret = docComment
- docComment = null
- ret
- }
-
- protected def foundComment(value: String, start: Int, end: Int) = ()
- protected def foundDocComment(value: String, start: Int, end: Int) = ()
-
- private class TokenData0 extends TokenData
-
- /** we need one token lookahead and one token history
- */
- val next : TokenData = new TokenData0
- val prev : TokenData = new TokenData0
-
/** a stack of tokens which indicates whether line-ends can be statement separators
* also used for keeping track of nesting levels.
* We keep track of the closing symbol of a region. This can be
@@ -173,7 +231,7 @@ trait Scanners extends ScannersCommon {
* (the STRINGLIT appears twice in succession on the stack iff the
* expression is a multiline string literal).
*/
- var sepRegions: List[Int] = List()
+ var sepRegions: List[Token] = List()
// Get next token ------------------------------------------------------------
@@ -227,12 +285,15 @@ trait Scanners extends ScannersCommon {
case RBRACE =>
while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
sepRegions = sepRegions.tail
- if (!sepRegions.isEmpty) sepRegions = sepRegions.tail
- docComment = null
+ if (!sepRegions.isEmpty)
+ sepRegions = sepRegions.tail
+
+ discardDocBuffer()
case RBRACKET | RPAREN =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
- docComment = null
+
+ discardDocBuffer()
case ARROW =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
@@ -262,11 +323,11 @@ trait Scanners extends ScannersCommon {
next.token = EMPTY
}
- /** Insert NEWLINE or NEWLINES if
- * - we are after a newline
- * - we are within a { ... } or on toplevel (wrt sepRegions)
- * - the current token can start a statement and the one before can end it
- * insert NEWLINES if we are past a blank line, NEWLINE otherwise
+ /* Insert NEWLINE or NEWLINES if
+ * - we are after a newline
+ * - we are within a { ... } or on toplevel (wrt sepRegions)
+ * - the current token can start a statement and the one before can end it
+ * insert NEWLINES if we are past a blank line, NEWLINE otherwise
*/
if (!applyBracePatch() && afterLineEnd() && inLastOfStat(lastToken) && inFirstOfStat(token) &&
(sepRegions.isEmpty || sepRegions.head == RBRACE)) {
@@ -328,7 +389,7 @@ trait Scanners extends ScannersCommon {
// println("blank line found at "+lastOffset+":"+(lastOffset to idx).map(buf(_)).toList)
return true
}
- if (idx == end) return false
+ if (idx == end) return false
} while (ch <= ' ')
}
idx += 1; ch = buf(idx)
@@ -375,7 +436,7 @@ trait Scanners extends ScannersCommon {
getOperatorRest()
}
}
- fetchLT
+ fetchLT()
case '~' | '!' | '@' | '#' | '%' |
'^' | '*' | '+' | '-' | /*'<' | */
'>' | '?' | ':' | '=' | '&' |
@@ -399,20 +460,11 @@ trait Scanners extends ScannersCommon {
nextChar()
base = 16
} else {
- /**
- * What should leading 0 be in the future? It is potentially dangerous
- * to let it be base-10 because of history. Should it be an error? Is
- * there a realistic situation where one would need it?
- */
- if (isDigit(ch)) {
- if (opt.future) syntaxError("Non-zero numbers may not have a leading zero.")
- else deprecationWarning("Treating numbers with a leading zero as octal is deprecated.")
- }
base = 8
}
getNumber()
}
- fetchZero
+ fetchZero()
case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
base = 10
getNumber()
@@ -455,7 +507,7 @@ trait Scanners extends ScannersCommon {
}
}
}
- fetchDoubleQuote
+ fetchDoubleQuote()
case '\'' =>
def fetchSingleQuote() = {
nextChar()
@@ -474,7 +526,7 @@ trait Scanners extends ScannersCommon {
}
}
}
- fetchSingleQuote
+ fetchSingleQuote()
case '.' =>
nextChar()
if ('0' <= ch && ch <= '9') {
@@ -519,72 +571,16 @@ trait Scanners extends ScannersCommon {
nextChar()
getOperatorRest()
} else {
- syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch: Int)) + "'")
+ syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch.toInt)) + "'")
nextChar()
}
}
- fetchOther
- }
- }
-
- private def skipComment(): Boolean = {
-
- if (ch == '/' || ch == '*') {
-
- val comment = new StringBuilder("/")
- def appendToComment() = comment.append(ch)
-
- if (ch == '/') {
- do {
- appendToComment()
- nextChar()
- } while ((ch != CR) && (ch != LF) && (ch != SU))
- } else {
- docComment = null
- var openComments = 1
- appendToComment()
- nextChar()
- appendToComment()
- var buildingDocComment = false
- if (ch == '*' && buildDocs) {
- buildingDocComment = true
- }
- while (openComments > 0) {
- do {
- do {
- if (ch == '/') {
- nextChar(); appendToComment()
- if (ch == '*') {
- nextChar(); appendToComment()
- openComments += 1
- }
- }
- if (ch != '*' && ch != SU) {
- nextChar(); appendToComment()
- }
- } while (ch != '*' && ch != SU)
- while (ch == '*') {
- nextChar(); appendToComment()
- }
- } while (ch != '/' && ch != SU)
- if (ch == '/') nextChar()
- else incompleteInputError("unclosed comment")
- openComments -= 1
- }
-
- if (buildingDocComment)
- foundDocComment(comment.toString, offset, charOffset - 2)
- }
-
- foundComment(comment.toString, offset, charOffset - 2)
- true
- } else {
- false
+ fetchOther()
}
}
/** Can token start a statement? */
- def inFirstOfStat(token: Int) = token match {
+ def inFirstOfStat(token: Token) = token match {
case EOF | CATCH | ELSE | EXTENDS | FINALLY | FORSOME | MATCH | WITH | YIELD |
COMMA | SEMI | NEWLINE | NEWLINES | DOT | COLON | EQUALS | ARROW | LARROW |
SUBTYPE | VIEWBOUND | SUPERTYPE | HASH | RPAREN | RBRACKET | RBRACE | LBRACKET =>
@@ -594,7 +590,7 @@ trait Scanners extends ScannersCommon {
}
/** Can token end a statement? */
- def inLastOfStat(token: Int) = token match {
+ def inLastOfStat(token: Token) = token match {
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT | STRINGLIT | SYMBOLLIT |
IDENTIFIER | BACKQUOTED_IDENT | THIS | NULL | TRUE | FALSE | RETURN | USCORE |
TYPE | XMLSTART | RPAREN | RBRACKET | RBRACE =>
@@ -709,7 +705,7 @@ trait Scanners extends ScannersCommon {
}
}
- @annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
+ @scala.annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
def finishStringPart() = {
setStrVal()
token = STRINGPART
@@ -739,6 +735,10 @@ trait Scanners extends ScannersCommon {
finishStringPart()
nextRawChar()
next.token = LBRACE
+ } else if (ch == '_') {
+ finishStringPart()
+ nextRawChar()
+ next.token = USCORE
} else if (Character.isUnicodeIdentifierStart(ch)) {
finishStringPart()
do {
@@ -803,6 +803,7 @@ trait Scanners extends ScannersCommon {
if (ch == '\\') {
nextChar()
if ('0' <= ch && ch <= '7') {
+ val start = charOffset - 2
val leadch: Char = ch
var oct: Int = digit2int(ch, 8)
nextChar()
@@ -814,6 +815,12 @@ trait Scanners extends ScannersCommon {
nextChar()
}
}
+ val alt = if (oct == LF) "\\n" else "\\u%04x" format oct
+ def msg(what: String) = s"Octal escape literals are $what, use $alt instead."
+ if (settings.future)
+ syntaxError(start, msg("unsupported"))
+ else
+ deprecationWarning(start, msg("deprecated"))
putChar(oct.toChar)
} else {
ch match {
@@ -895,7 +902,7 @@ trait Scanners extends ScannersCommon {
*/
def intVal(negated: Boolean): Long = {
if (token == CHARLIT && !negated) {
- charVal
+ charVal.toLong
} else {
var value: Long = 0
val divider = if (base == 10) 1 else 2
@@ -923,7 +930,7 @@ trait Scanners extends ScannersCommon {
}
}
- def intVal: Long = intVal(false)
+ def intVal: Long = intVal(negated = false)
/** Convert current strVal, base to double value
*/
@@ -943,9 +950,8 @@ trait Scanners extends ScannersCommon {
}
if (value > limit)
syntaxError("floating point number too large")
- if (isDeprecatedForm) {
- deprecationWarning("This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.")
- }
+ if (isDeprecatedForm)
+ syntaxError("floating point number is missing digit after dot")
if (negated) -value else value
} catch {
@@ -955,7 +961,7 @@ trait Scanners extends ScannersCommon {
}
}
- def floatVal: Double = floatVal(false)
+ def floatVal: Double = floatVal(negated = false)
def checkNoLetter() {
if (isIdentifierPart(ch) && ch >= ' ')
@@ -966,14 +972,19 @@ trait Scanners extends ScannersCommon {
*/
protected def getNumber() {
val base1 = if (base < 10) 10 else base
- // read 8,9's even if format is octal, produce a malformed number error afterwards.
+ // Read 8,9's even if format is octal, produce a malformed number error afterwards.
+ // At this point, we have already read the first digit, so to tell an innocent 0 apart
+ // from an octal literal 0123... (which we want to disallow), we check whether there
+ // are any additional digits coming after the first one we have already read.
+ var notSingleZero = false
while (digit2int(ch, base1) >= 0) {
putChar(ch)
nextChar()
+ notSingleZero = true
}
token = INTLIT
- /** When we know for certain it's a number after using a touch of lookahead */
+ /* When we know for certain it's a number after using a touch of lookahead */
def restOfNumber() = {
putChar(ch)
nextChar()
@@ -986,6 +997,9 @@ trait Scanners extends ScannersCommon {
if (base <= 10 && isEfd)
getFraction()
else {
+ // Checking for base == 8 is not enough, because base = 8 is set
+ // as soon as a 0 is read in `case '0'` of method fetchToken.
+ if (base == 8 && notSingleZero) syntaxError("Non-zero integral values may not have a leading zero.")
setStrVal()
if (isL) {
nextChar()
@@ -1001,10 +1015,8 @@ trait Scanners extends ScannersCommon {
val lookahead = lookaheadReader
val c = lookahead.getc()
- /** As of scala 2.11, it isn't a number unless c here is a digit, so
- * opt.future excludes the rest of the logic.
- */
- if (opt.future && !isDigit(c))
+ /* Prohibit 1. */
+ if (!isDigit(c))
return setStrVal()
val isDefinitelyNumber = (c: @switch) match {
@@ -1012,16 +1024,16 @@ trait Scanners extends ScannersCommon {
case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
true
- /** Backquoted idents like 22.`foo`. */
+ /* Backquoted idents like 22.`foo`. */
case '`' =>
return setStrVal() /** Note the early return */
- /** These letters may be part of a literal, or a method invocation on an Int.
+ /* These letters may be part of a literal, or a method invocation on an Int.
*/
case 'd' | 'D' | 'f' | 'F' =>
!isIdentifierPart(lookahead.getc())
- /** A little more special handling for e.g. 5e7 */
+ /* A little more special handling for e.g. 5e7 */
case 'e' | 'E' =>
val ch = lookahead.getc()
!isIdentifierPart(ch) || (isDigit(ch) || ch == '+' || ch == '-')
@@ -1058,7 +1070,6 @@ trait Scanners extends ScannersCommon {
def syntaxError(off: Offset, msg: String) {
error(off, msg)
token = ERROR
- errOffset = off
}
/** generate an error at the current token offset
@@ -1071,7 +1082,6 @@ trait Scanners extends ScannersCommon {
def incompleteInputError(msg: String) {
incompleteInputError(offset, msg)
token = EOF
- errOffset = offset
}
override def toString() = token match {
@@ -1114,7 +1124,7 @@ trait Scanners extends ScannersCommon {
def applyBracePatch(): Boolean = false
/** overridden in UnitScanners */
- def parenBalance(token: Int) = 0
+ def parenBalance(token: Token) = 0
/** overridden in UnitScanners */
def healBraces(): List[BracePatch] = List()
@@ -1129,7 +1139,7 @@ trait Scanners extends ScannersCommon {
// ------------- keyword configuration -----------------------------------
- private val allKeywords = List[(Name, Int)](
+ private val allKeywords = List[(Name, Token)](
nme.ABSTRACTkw -> ABSTRACT,
nme.CASEkw -> CASE,
nme.CATCHkw -> CATCH,
@@ -1183,8 +1193,8 @@ trait Scanners extends ScannersCommon {
nme.MACROkw -> IDENTIFIER,
nme.THENkw -> IDENTIFIER)
- private var kwOffset: Int = -1
- private val kwArray: Array[Int] = {
+ private var kwOffset: Offset = -1
+ private val kwArray: Array[Token] = {
val (offset, arr) = createKeywordArray(allKeywords, IDENTIFIER)
kwOffset = offset
arr
@@ -1195,7 +1205,7 @@ trait Scanners extends ScannersCommon {
// Token representation ----------------------------------------------------
/** Returns the string representation of given token. */
- def token2string(token: Int): String = (token: @switch) match {
+ def token2string(token: Token): String = (token: @switch) match {
case IDENTIFIER | BACKQUOTED_IDENT => "identifier"
case CHARLIT => "character literal"
case INTLIT => "integer literal"
@@ -1226,17 +1236,16 @@ trait Scanners extends ScannersCommon {
}
}
- class MalformedInput(val offset: Int, val msg: String) extends Exception
+ class MalformedInput(val offset: Offset, val msg: String) extends Exception
/** A scanner for a given source file not necessarily attached to a compilation unit.
* Useful for looking inside source files that aren not currently compiled to see what's there
*/
class SourceFileScanner(val source: SourceFile) extends Scanner {
val buf = source.content
- override val decodeUni: Boolean = !settings.nouescape.value
+ override val decodeUni: Boolean = !settings.nouescape
// suppress warnings, throw exception on errors
- def warning(off: Offset, msg: String): Unit = ()
def deprecationWarning(off: Offset, msg: String): Unit = ()
def error (off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
def incompleteInputError(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
@@ -1244,10 +1253,9 @@ trait Scanners extends ScannersCommon {
/** A scanner over a given compilation unit
*/
- class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
+ class UnitScanner(val unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
def this(unit: CompilationUnit) = this(unit, List())
- override def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg)
override def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg)
override def error (off: Offset, msg: String) = unit.error(unit.position(off), msg)
override def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg)
@@ -1256,7 +1264,7 @@ trait Scanners extends ScannersCommon {
lazy val parensAnalyzer = new ParensAnalyzer(unit, List())
- override def parenBalance(token: Int) = parensAnalyzer.balance(token)
+ override def parenBalance(token: Token) = parensAnalyzer.balance(token)
override def healBraces(): List[BracePatch] = {
var patches: List[BracePatch] = List()
@@ -1293,23 +1301,21 @@ trait Scanners extends ScannersCommon {
}
}
}
-
- override def foundComment(value: String, start: Int, end: Int) {
- val pos = new RangePosition(unit.source, start, start, end)
- unit.comment(pos, value)
- }
-
- override def foundDocComment(value: String, start: Int, end: Int) {
- val docPos = new RangePosition(unit.source, start, start, end)
- docComment = new DocComment(value, docPos)
- unit.comment(docPos, value)
- }
}
class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) {
- var balance = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ val balance = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+
+ /** The source code with braces and line starts annotated with [NN] showing the index */
+ private def markedSource = {
+ val code = unit.source.content
+ val braces = code.indices filter (idx => "{}\n" contains code(idx)) toSet;
+ val mapped = code.indices map (idx => if (braces(idx)) s"${code(idx)}[$idx]" else "" + code(idx))
+ mapped.mkString("")
+ }
init()
+ log(s"ParensAnalyzer for ${unit.source} of length ${unit.source.content.length}\n```\n$markedSource\n```")
/** The offset of the first token on this line, or next following line if blank
*/
@@ -1385,23 +1391,30 @@ trait Scanners extends ScannersCommon {
bpbuf += current
}
}
+ def bracePairString(bp: BracePair, indent: Int): String = {
+ val rangeString = {
+ import bp._
+ val lline = line(loff)
+ val rline = line(roff)
+ val tokens = List(lline, lindent, rline, rindent) map (n => if (n < 0) "??" else "" + n)
+ "%s:%s to %s:%s".format(tokens: _*)
+ }
+ val outer = (" " * indent) + rangeString
+ val inners = bp.nested map (bracePairString(_, indent + 2))
- def printBP(bp: BracePair, indent: Int) {
- println(" "*indent+line(bp.loff)+":"+bp.lindent+" to "+line(bp.roff)+":"+bp.rindent)
- if (bp.nested.nonEmpty)
- for (bp1 <- bp.nested) {
- printBP(bp1, indent + 2)
- }
+ if (inners.isEmpty) outer
+ else inners.mkString(outer + "\n", "\n", "")
}
-// println("lineStart = "+lineStart)//DEBUG
-// println("bracepairs = ")
-// for (bp <- bpbuf.toList) printBP(bp, 0)
+ def bpString = bpbuf.toList map ("\n" + bracePairString(_, 0)) mkString ""
+ def startString = lineStart.mkString("line starts: [", ", ", "]")
+
+ log(s"\n$startString\n$bpString")
bpbuf.toList
}
var tabSeen = false
- def line(offset: Int): Int = {
+ def line(offset: Offset): Int = {
def findLine(lo: Int, hi: Int): Int = {
val mid = (lo + hi) / 2
if (offset < lineStart(mid)) findLine(lo, mid - 1)
@@ -1412,7 +1425,7 @@ trait Scanners extends ScannersCommon {
else findLine(0, lineStart.length - 1)
}
- def column(offset: Int): Int = {
+ def column(offset: Offset): Int = {
var col = 0
var i = offset - 1
while (i >= 0 && buf(i) != CR && buf(i) != LF) {
@@ -1429,18 +1442,6 @@ trait Scanners extends ScannersCommon {
else bp :: insertPatch(bps, patch)
}
- def leftColumn(offset: Int) =
- if (offset == -1) -1 else column(lineStart(line(offset)))
-
- def rightColumn(offset: Int, default: Int) =
- if (offset == -1) -1
- else {
- val rlin = line(offset)
- if (lineStart(rlin) == offset) column(offset)
- else if (rlin + 1 < lineStart.length) column(lineStart(rlin + 1))
- else default
- }
-
def insertRBrace(): List[BracePatch] = {
def insert(bps: List[BracePair]): List[BracePatch] = bps match {
case List() => patches
@@ -1455,7 +1456,7 @@ trait Scanners extends ScannersCommon {
while (lin < lineStart.length && column(lineStart(lin)) > lindent)
lin += 1
if (lin < lineStart.length) {
- val patches1 = insertPatch(patches, BracePatch(lineStart(lin), true))
+ val patches1 = insertPatch(patches, BracePatch(lineStart(lin), inserted = true))
//println("patch for "+bp+"/"+imbalanceMeasure+"/"+new ParensAnalyzer(unit, patches1).imbalanceMeasure)
/*if (improves(patches1))*/
patches1
@@ -1476,27 +1477,16 @@ trait Scanners extends ScannersCommon {
else {
val patches1 = delete(nested)
if (patches1 ne patches) patches1
- else insertPatch(patches, BracePatch(roff, false))
+ else insertPatch(patches, BracePatch(roff, inserted = false))
}
}
delete(bracePairs)
}
- def imbalanceMeasure: Int = {
- def measureList(bps: List[BracePair]): Int =
- (bps map measure).sum
- def measure(bp: BracePair): Int =
- (if (bp.lindent != bp.rindent) 1 else 0) + measureList(bp.nested)
- measureList(bracePairs)
- }
-
- def improves(patches1: List[BracePatch]): Boolean =
- imbalanceMeasure > new ParensAnalyzer(unit, patches1).imbalanceMeasure
-
// don't emit deprecation warnings about identifiers like `macro` or `then`
// when skimming through the source file trying to heal braces
override def emitIdentifierDeprecationWarnings = false
- override def error(offset: Int, msg: String) {}
+ override def error(offset: Offset, msg: String) {}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index e8ef670222..1abc0c860c 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -7,11 +7,8 @@ package scala.tools.nsc
package ast.parser
import scala.collection.{ mutable, immutable }
-import scala.xml.{ EntityRef, Text }
-import scala.xml.XML.{ xmlns }
import symtab.Flags.MUTABLE
import scala.reflect.internal.util.StringOps.splitWhere
-import scala.language.implicitConversions
/** This class builds instance of `Tree` that represent XML.
*
@@ -133,7 +130,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
case (Some(pre), rest) => (const(pre), const(rest))
case _ => (wild, const(n))
}
- mkXML(pos, true, prepat, labpat, null, null, false, args)
+ mkXML(pos, isPattern = true, prepat, labpat, null, null, empty = false, args)
}
protected def convertToTextPat(t: Tree): Tree = t match {
@@ -144,14 +141,12 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
(buf map convertToTextPat).toList
def parseAttribute(pos: Position, s: String): Tree = {
- val ts = scala.xml.Utility.parseAttributeValue(s) map {
- case Text(s) => text(pos, s)
- case EntityRef(s) => entityRef(pos, s)
- }
- ts.length match {
- case 0 => gen.mkNil
- case 1 => ts.head
- case _ => makeXMLseq(pos, ts.toList)
+ import xml.Utility.parseAttributeValue
+
+ parseAttributeValue(s, text(pos, _), entityRef(pos, _)) match {
+ case Nil => gen.mkNil
+ case t :: Nil => t
+ case ts => makeXMLseq(pos, ts.toList)
}
}
@@ -169,7 +164,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
}
/** Returns (Some(prefix) | None, rest) based on position of ':' */
- def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', true) match {
+ def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', doDropIndex = true) match {
case Some((pre, rest)) => (Some(pre), rest)
case _ => (None, name)
}
@@ -197,9 +192,9 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
uri1
}
- /** Extract all the namespaces from the attribute map. */
+ /* Extract all the namespaces from the attribute map. */
val namespaces: List[Tree] =
- for (z <- attrMap.keys.toList ; if z startsWith xmlns) yield {
+ for (z <- attrMap.keys.toList ; if z startsWith "xmlns") yield {
val ns = splitPrefix(z) match {
case (Some(_), rest) => rest
case _ => null
@@ -247,7 +242,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
val body = mkXML(
pos.makeTransparent,
- false,
+ isPattern = false,
const(pre),
const(newlabel),
makeSymbolicAttrs,
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
index 8a9ce8907e..3a695c6f59 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
@@ -11,26 +11,98 @@ import javac._
/** An nsc sub-component.
*/
abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParsers with Scanners with JavaParsers with JavaScanners {
+ import global._
val phaseName = "parser"
-
def newPhase(prev: Phase): StdPhase = new ParserPhase(prev)
- class ParserPhase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) {
+ abstract class MemberDefTraverser extends Traverser {
+ def onMember(defn: MemberDef): Unit
+
+ private var depth: Int = 0
+ private def lower[T](body: => T): T = {
+ depth += 1
+ try body finally depth -= 1
+ }
+ def currentDepth = depth
+
+ /** Prune this tree and all trees beneath it. Can be overridden. */
+ def prune(md: MemberDef): Boolean = (
+ md.mods.isSynthetic
+ || md.mods.isParamAccessor
+ || nme.isConstructorName(md.name)
+ || (md.name containsName nme.ANON_CLASS_NAME)
+ )
+
+ override def traverse(t: Tree): Unit = t match {
+ case md: MemberDef if prune(md) =>
+ case md @ PackageDef(_, stats) => traverseTrees(stats)
+ case md: ImplDef => onMember(md) ; lower(traverseTrees(md.impl.body))
+ case md: ValOrDefDef => onMember(md) ; lower(traverse(md.rhs))
+ case _ => super.traverse(t)
+ }
+ }
+
+ class MemberPosReporter(unit: CompilationUnit) extends MemberDefTraverser {
+ private var outputFn: MemberDef => String = outputForScreen
+ val path = unit.source.file.path
+
+ // If a single line, outputs the line; if it spans multiple lines
+ // outputs NN,NN with start and end lines, e.g. 15,25.
+ def outputPos(md: MemberDef): String = {
+ val pos = md.pos
+ val start = pos.focusStart.line
+ val end = pos.focusEnd.line
+
+ if (start == end) "" + start else s"$start,$end"
+ }
+ def outputForSed(md: MemberDef): String = {
+ val pos_s = "%-12s" format outputPos(md) + "p"
+ s"$pos_s $path # ${md.keyword} ${md.name}"
+ }
+ def outputForScreen(md: MemberDef): String = {
+ val pos_s = "%-20s" format " " * currentDepth + outputPos(md)
+ s"$pos_s ${md.keyword} ${md.name}"
+ }
+
+ def onMember(md: MemberDef) = println(outputFn(md))
+ // It recognizes "sed" and "anything else".
+ def show(style: String) {
+ if (style == "sed") {
+ outputFn = outputForSed
+ traverse(unit.body)
+ }
+ else {
+ outputFn = outputForScreen
+ println(path)
+ traverse(unit.body)
+ }
+ println("")
+ }
+ }
+
+ private def initialUnitBody(unit: CompilationUnit): Tree = {
+ if (unit.isJava) new JavaUnitParser(unit).parse()
+ else if (global.reporter.incompleteHandled) newUnitParser(unit).parse()
+ else newUnitParser(unit).smartParse()
+ }
+
+ class ParserPhase(prev: Phase) extends StdPhase(prev) {
override val checkable = false
override val keepsTypeParams = false
- def apply(unit: global.CompilationUnit) {
- import global._
+ def apply(unit: CompilationUnit) {
informProgress("parsing " + unit)
- unit.body =
- if (unit.isJava) new JavaUnitParser(unit).parse()
- else if (reporter.incompleteHandled) new UnitParser(unit).parse()
- else new UnitParser(unit).smartParse()
+ // if the body is already filled in, don't overwrite it
+ // otherwise compileLate is going to overwrite bodies of synthetic source files
+ if (unit.body == EmptyTree)
+ unit.body = initialUnitBody(unit)
- if (settings.Yrangepos.value && !reporter.hasErrors)
+ if (settings.Yrangepos && !reporter.hasErrors)
validatePositions(unit.body)
+
+ if (settings.Ymemberpos.isSetByUser)
+ new MemberPosReporter(unit) show (style = settings.Ymemberpos.value)
}
}
}
-
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
index c3fd414426..5a7dc4950d 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
@@ -6,15 +6,11 @@
package scala.tools.nsc
package ast.parser
-import scala.annotation.switch
-
/** Common code between JavaTokens and Tokens. Not as much (and not as concrete)
* as one might like because JavaTokens for no clear reason chose new numbers for
* identical token sets.
*/
abstract class Tokens {
- import scala.reflect.internal.Chars._
-
/** special tokens */
final val EMPTY = -3
final val UNDEF = -2
@@ -34,14 +30,6 @@ abstract class Tokens {
def isIdentifier(code: Int): Boolean
def isLiteral(code: Int): Boolean
- def isKeyword(code: Int): Boolean
- def isSymbol(code: Int): Boolean
-
- final def isSpace(at: Char) = at == ' ' || at == '\t'
- final def isNewLine(at: Char) = at == CR || at == LF || at == FF
- final def isBrace(code: Int) = code >= LPAREN && code <= RBRACE
- final def isOpenBrace(code: Int) = isBrace(code) && (code % 2 == 0)
- final def isCloseBrace(code: Int) = isBrace(code) && (code % 2 == 1)
}
object Tokens extends Tokens {
@@ -52,20 +40,10 @@ object Tokens extends Tokens {
def isLiteral(code: Int) =
code >= CHARLIT && code <= INTERPOLATIONID
-
/** identifiers */
final val IDENTIFIER = 10
final val BACKQUOTED_IDENT = 11
- def isIdentifier(code: Int) =
- code >= IDENTIFIER && code <= BACKQUOTED_IDENT
-
- @switch def canBeginExpression(code: Int) = code match {
- case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true
- case LBRACE|LPAREN|LBRACKET|COMMENT => true
- case IF|DO|WHILE|FOR|NEW|TRY|THROW => true
- case NULL|THIS|TRUE|FALSE => true
- case code => isLiteral(code)
- }
+ def isIdentifier(code: Int) = code >= IDENTIFIER && code <= BACKQUOTED_IDENT // used by ide
/** keywords */
final val IF = 20
@@ -113,17 +91,6 @@ object Tokens extends Tokens {
final val MACRO = 62 // not yet used in 2.10
final val THEN = 63 // not yet used in 2.10
- def isKeyword(code: Int) =
- code >= IF && code <= LAZY
-
- @switch def isDefinition(code: Int) = code match {
- case CLASS|TRAIT|OBJECT => true
- case CASECLASS|CASEOBJECT => true
- case DEF|VAL|VAR => true
- case TYPE => true
- case _ => false
- }
-
/** special symbols */
final val COMMA = 70
final val SEMI = 71
@@ -141,9 +108,6 @@ object Tokens extends Tokens {
final val AT = 83
final val VIEWBOUND = 84
- def isSymbol(code: Int) =
- code >= COMMA && code <= VIEWBOUND
-
/** parenthesis */
final val LPAREN = 90
final val RPAREN = 91
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 1412bff0ab..cfee988efc 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -8,119 +8,29 @@ package ast.parser
import symtab.Flags._
import scala.collection.mutable.ListBuffer
+import scala.reflect.internal.util.{Position, SourceFile, FreshNameCreator}
/** Methods for building trees, used in the parser. All the trees
* returned by this class must be untyped.
*/
abstract class TreeBuilder {
-
val global: Global
import global._
- def freshName(): Name = freshName("x$")
- def freshTermName(): TermName = freshTermName("x$")
+ def unit: CompilationUnit
+ def source: SourceFile
- def freshName(prefix: String): Name
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- def o2p(offset: Int): Position
- def r2p(start: Int, point: Int, end: Int): Position
+ implicit def fresh: FreshNameCreator = unit.fresh
+ def o2p(offset: Int): Position = Position.offset(source, offset)
+ def r2p(start: Int, mid: Int, end: Int): Position = rangePos(source, start, mid, end)
- def rootId(name: Name) = gen.rootId(name)
def rootScalaDot(name: Name) = gen.rootScalaDot(name)
def scalaDot(name: Name) = gen.scalaDot(name)
def scalaAnyRefConstr = scalaDot(tpnme.AnyRef)
- def scalaAnyValConstr = scalaDot(tpnme.AnyVal)
- def scalaAnyConstr = scalaDot(tpnme.Any)
def scalaUnitConstr = scalaDot(tpnme.Unit)
- def productConstr = scalaDot(tpnme.Product)
- def productConstrN(n: Int) = scalaDot(newTypeName("Product" + n))
- def serializableConstr = scalaDot(tpnme.Serializable)
def convertToTypeName(t: Tree) = gen.convertToTypeName(t)
- /** Convert all occurrences of (lower-case) variables in a pattern as follows:
- * x becomes x @ _
- * x: T becomes x @ (_: T)
- */
- private object patvarTransformer extends Transformer {
- override def transform(tree: Tree): Tree = tree match {
- case Ident(name) if (treeInfo.isVarPattern(tree) && name != nme.WILDCARD) =>
- atPos(tree.pos)(Bind(name, atPos(tree.pos.focus) (Ident(nme.WILDCARD))))
- case Typed(id @ Ident(name), tpt) if (treeInfo.isVarPattern(id) && name != nme.WILDCARD) =>
- atPos(tree.pos.withPoint(id.pos.point)) {
- Bind(name, atPos(tree.pos.withStart(tree.pos.point)) {
- Typed(Ident(nme.WILDCARD), tpt)
- })
- }
- case Apply(fn @ Apply(_, _), args) =>
- treeCopy.Apply(tree, transform(fn), transformTrees(args))
- case Apply(fn, args) =>
- treeCopy.Apply(tree, fn, transformTrees(args))
- case Typed(expr, tpt) =>
- treeCopy.Typed(tree, transform(expr), tpt)
- case Bind(name, body) =>
- treeCopy.Bind(tree, name, transform(body))
- case Alternative(_) | Star(_) =>
- super.transform(tree)
- case _ =>
- tree
- }
- }
-
- /** Traverse pattern and collect all variable names with their types in buffer
- * The variables keep their positions; whereas the pattern is converted to be
- * synthetic for all nodes that contain a variable position.
- */
- class GetVarTraverser extends Traverser {
- val buf = new ListBuffer[(Name, Tree, Position)]
-
- def namePos(tree: Tree, name: Name): Position =
- if (!tree.pos.isRange || name.containsName(nme.raw.DOLLAR)) tree.pos.focus
- else {
- val start = tree.pos.start
- val end = start + name.decode.length
- r2p(start, start, end)
- }
-
- override def traverse(tree: Tree): Unit = {
- def seenName(name: Name) = buf exists (_._1 == name)
- def add(name: Name, t: Tree) = if (!seenName(name)) buf += ((name, t, namePos(tree, name)))
- val bl = buf.length
-
- tree match {
- case Bind(nme.WILDCARD, _) =>
- super.traverse(tree)
-
- case Bind(name, Typed(tree1, tpt)) =>
- val newTree = if (treeInfo.mayBeTypePat(tpt)) TypeTree() else tpt.duplicate
- add(name, newTree)
- traverse(tree1)
-
- case Bind(name, tree1) =>
- // can assume only name range as position, as otherwise might overlap
- // with binds embedded in pattern tree1
- add(name, TypeTree())
- traverse(tree1)
-
- case _ =>
- super.traverse(tree)
- }
- if (buf.length > bl)
- tree setPos tree.pos.makeTransparent
- }
- def apply(tree: Tree) = {
- traverse(tree)
- buf.toList
- }
- }
-
- /** Returns list of all pattern variables, possibly with their types,
- * without duplicates
- */
- private def getVariables(tree: Tree): List[(Name, Tree, Position)] =
- new GetVarTraverser apply tree
-
def byNameApplication(tpe: Tree): Tree =
AppliedTypeTree(rootScalaDot(tpnme.BYNAME_PARAM_CLASS_NAME), List(tpe))
def repeatedApplication(tpe: Tree): Tree =
@@ -129,25 +39,12 @@ abstract class TreeBuilder {
def makeImportSelector(name: Name, nameOffset: Int): ImportSelector =
ImportSelector(name, nameOffset, name, nameOffset)
- private def makeTuple(trees: List[Tree], isType: Boolean): Tree = {
- val tupString = "Tuple" + trees.length
- Apply(scalaDot(if (isType) newTypeName(tupString) else newTermName(tupString)), trees)
- }
+ def makeTupleTerm(elems: List[Tree]) = gen.mkTuple(elems)
- def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
- case Nil => Literal(Constant())
- case List(tree) if flattenUnary => tree
- case _ => makeTuple(trees, false)
- }
-
- def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
- case Nil => scalaUnitConstr
- case List(tree) if flattenUnary => tree
- case _ => AppliedTypeTree(scalaDot(newTypeName("Tuple" + trees.length)), trees)
- }
+ def makeTupleType(elems: List[Tree]) = gen.mkTupleType(elems)
def stripParens(t: Tree) = t match {
- case Parens(ts) => atPos(t.pos) { makeTupleTerm(ts, true) }
+ case Parens(ts) => atPos(t.pos) { makeTupleTerm(ts) }
case _ => t
}
@@ -157,323 +54,67 @@ abstract class TreeBuilder {
def makeSelfDef(name: TermName, tpt: Tree): ValDef =
ValDef(Modifiers(PRIVATE), name, tpt, EmptyTree)
- /** If tree is a variable pattern, return Some("its name and type").
- * Otherwise return none */
- private def matchVarPattern(tree: Tree): Option[(Name, Tree)] = {
- def wildType(t: Tree): Option[Tree] = t match {
- case Ident(x) if x.toTermName == nme.WILDCARD => Some(TypeTree())
- case Typed(Ident(x), tpt) if x.toTermName == nme.WILDCARD => Some(tpt)
- case _ => None
- }
- tree match {
- case Ident(name) => Some((name, TypeTree()))
- case Bind(name, body) => wildType(body) map (x => (name, x))
- case Typed(Ident(name), tpt) => Some((name, tpt))
- case _ => None
- }
- }
-
/** Create tree representing (unencoded) binary operation expression or pattern. */
- def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position): Tree = {
- def mkNamed(args: List[Tree]) =
- if (isExpr) args map {
- case a @ Assign(id @ Ident(name), rhs) =>
- atPos(a.pos) { AssignOrNamedArg(id, rhs) }
- case e => e
- } else args
+ def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position, targs: List[Tree] = Nil): Tree = {
+ require(isExpr || targs.isEmpty, s"Incompatible args to makeBinop: !isExpr but targs=$targs")
+
+ def mkSelection(t: Tree) = {
+ def sel = atPos(opPos union t.pos)(Select(stripParens(t), op.encode))
+ if (targs.isEmpty) sel else atPos(left.pos)(TypeApply(sel, targs))
+ }
+ def mkNamed(args: List[Tree]) = if (isExpr) args map treeInfo.assignmentToMaybeNamedArg else args
val arguments = right match {
case Parens(args) => mkNamed(args)
- case _ => List(right)
+ case _ => List(right)
}
if (isExpr) {
if (treeInfo.isLeftAssoc(op)) {
- Apply(atPos(opPos union left.pos) { Select(stripParens(left), op.encode) }, arguments)
+ Apply(mkSelection(left), arguments)
} else {
val x = freshTermName()
Block(
- List(ValDef(Modifiers(SYNTHETIC), x, TypeTree(), stripParens(left))),
- Apply(atPos(opPos union right.pos) { Select(stripParens(right), op.encode) }, List(Ident(x))))
+ List(ValDef(Modifiers(SYNTHETIC | ARTIFACT), x, TypeTree(), stripParens(left))),
+ Apply(mkSelection(right), List(Ident(x))))
}
} else {
Apply(Ident(op.encode), stripParens(left) :: arguments)
}
}
- /** Creates a tree representing new Object { stats }.
- * To make sure an anonymous subclass of Object is created,
- * if there are no stats, a () is added.
- */
- def makeAnonymousNew(stats: List[Tree]): Tree = {
- val stats1 = if (stats.isEmpty) List(Literal(Constant(()))) else stats
- makeNew(Nil, emptyValDef, stats1, ListOfNil, NoPosition, NoPosition)
- }
-
- /** Create positioned tree representing an object creation <new parents { stats }
- * @param npos the position of the new
- * @param cpos the position of the anonymous class starting with parents
- */
- def makeNew(parents: List[Tree], self: ValDef, stats: List[Tree], argss: List[List[Tree]],
- npos: Position, cpos: Position): Tree =
- if (parents.isEmpty)
- makeNew(List(scalaAnyRefConstr), self, stats, argss, npos, cpos)
- else if (parents.tail.isEmpty && stats.isEmpty)
- atPos(npos union cpos) { New(parents.head, argss) }
- else {
- val x = tpnme.ANON_CLASS_NAME
- atPos(npos union cpos) {
- Block(
- List(
- atPos(cpos) {
- ClassDef(
- Modifiers(FINAL), x, Nil,
- Template(parents, self, NoMods, ListOfNil, argss, stats, cpos.focus))
- }),
- atPos(npos) {
- New(
- Ident(x) setPos npos.focus,
- ListOfNil)
- }
- )
- }
- }
-
- /** Create a tree representing an assignment <lhs = rhs> */
- def makeAssign(lhs: Tree, rhs: Tree): Tree = lhs match {
- case Apply(fn, args) =>
- Apply(atPos(fn.pos) { Select(fn, nme.update) }, args ::: List(rhs))
- case _ =>
- Assign(lhs, rhs)
- }
-
/** Tree for `od op`, start is start0 if od.pos is borked. */
def makePostfixSelect(start0: Int, end: Int, od: Tree, op: Name): Tree = {
- val start = if (od.pos.isDefined) od.pos.startOrPoint else start0
+ val start = if (od.pos.isDefined) od.pos.start else start0
atPos(r2p(start, end, end + op.length)) { new PostfixSelect(od, op.encode) }
}
- /** A type tree corresponding to (possibly unary) intersection type */
- def makeIntersectionTypeTree(tps: List[Tree]): Tree =
- if (tps.tail.isEmpty) tps.head
- else CompoundTypeTree(Template(tps, emptyValDef, Nil))
-
/** Create tree representing a while loop */
def makeWhile(startPos: Int, cond: Tree, body: Tree): Tree = {
val lname = freshTermName(nme.WHILE_PREFIX)
def default = wrappingPos(List(cond, body)) match {
- case p if p.isDefined => p.endOrPoint
+ case p if p.isDefined => p.end
case _ => startPos
}
val continu = atPos(o2p(body.pos pointOrElse default)) { Apply(Ident(lname), Nil) }
- val rhs = If(cond, Block(List(body), continu), Literal(Constant()))
+ val rhs = If(cond, Block(List(body), continu), Literal(Constant(())))
LabelDef(lname, Nil, rhs)
}
/** Create tree representing a do-while loop */
def makeDoWhile(lname: TermName, body: Tree, cond: Tree): Tree = {
val continu = Apply(Ident(lname), Nil)
- val rhs = Block(List(body), If(cond, continu, Literal(Constant())))
+ val rhs = Block(List(body), If(cond, continu, Literal(Constant(()))))
LabelDef(lname, Nil, rhs)
}
/** Create block of statements `stats` */
- def makeBlock(stats: List[Tree]): Tree =
- if (stats.isEmpty) Literal(Constant())
- else if (!stats.last.isTerm) Block(stats, Literal(Constant()))
- else if (stats.length == 1) stats.head
- else Block(stats.init, stats.last)
-
- def makeFilter(tree: Tree, condition: Tree, scrutineeName: String): Tree = {
- val cases = List(
- CaseDef(condition, EmptyTree, Literal(Constant(true))),
- CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
- )
- val matchTree = makeVisitor(cases, false, scrutineeName)
-
- atPos(tree.pos)(Apply(Select(tree, nme.withFilter), matchTree :: Nil))
- }
-
- /** Create tree for for-comprehension generator <val pat0 <- rhs0> */
- def makeGenerator(pos: Position, pat: Tree, valeq: Boolean, rhs: Tree): Enumerator = {
- val pat1 = patvarTransformer.transform(pat)
- val rhs1 =
- if (valeq || treeInfo.isVarPatternDeep(pat)) rhs
- else makeFilter(rhs, pat1.duplicate, nme.CHECK_IF_REFUTABLE_STRING)
-
- if (valeq) ValEq(pos, pat1, rhs1)
- else ValFrom(pos, pat1, rhs1)
- }
+ def makeBlock(stats: List[Tree]): Tree = gen.mkBlock(stats)
def makeParam(pname: TermName, tpe: Tree) =
ValDef(Modifiers(PARAM), pname, tpe, EmptyTree)
- def makeSyntheticParam(pname: TermName) =
- ValDef(Modifiers(PARAM | SYNTHETIC), pname, TypeTree(), EmptyTree)
-
def makeSyntheticTypeParam(pname: TypeName, bounds: Tree) =
TypeDef(Modifiers(DEFERRED | SYNTHETIC), pname, Nil, bounds)
- abstract class Enumerator { def pos: Position }
- case class ValFrom(pos: Position, pat: Tree, rhs: Tree) extends Enumerator
- case class ValEq(pos: Position, pat: Tree, rhs: Tree) extends Enumerator
- case class Filter(pos: Position, test: Tree) extends Enumerator
-
- /** Create tree for for-comprehension <for (enums) do body> or
- * <for (enums) yield body> where mapName and flatMapName are chosen
- * corresponding to whether this is a for-do or a for-yield.
- * The creation performs the following rewrite rules:
- *
- * 1.
- *
- * for (P <- G) E ==> G.foreach (P => E)
- *
- * Here and in the following (P => E) is interpreted as the function (P => E)
- * if P is a variable pattern and as the partial function { case P => E } otherwise.
- *
- * 2.
- *
- * for (P <- G) yield E ==> G.map (P => E)
- *
- * 3.
- *
- * for (P_1 <- G_1; P_2 <- G_2; ...) ...
- * ==>
- * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...)
- *
- * 4.
- *
- * for (P <- G; E; ...) ...
- * =>
- * for (P <- G.filter (P => E); ...) ...
- *
- * 5. For N < MaxTupleArity:
- *
- * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...)
- * ==>
- * for (TupleN(P_1, P_2, ... P_N) <-
- * for (x_1 @ P_1 <- G) yield {
- * val x_2 @ P_2 = E_2
- * ...
- * val x_N & P_N = E_N
- * TupleN(x_1, ..., x_N)
- * } ...)
- *
- * If any of the P_i are variable patterns, the corresponding `x_i @ P_i' is not generated
- * and the variable constituting P_i is used instead of x_i
- *
- * @param mapName The name to be used for maps (either map or foreach)
- * @param flatMapName The name to be used for flatMaps (either flatMap or foreach)
- * @param enums The enumerators in the for expression
- * @param body The body of the for expression
- */
- private def makeFor(mapName: TermName, flatMapName: TermName, enums: List[Enumerator], body: Tree): Tree = {
-
- /** make a closure pat => body.
- * The closure is assigned a transparent position with the point at pos.point and
- * the limits given by pat and body.
- */
- def makeClosure(pos: Position, pat: Tree, body: Tree): Tree = {
- def splitpos = wrappingPos(List(pat, body)).withPoint(pos.point).makeTransparent
- matchVarPattern(pat) match {
- case Some((name, tpt)) =>
- Function(
- List(atPos(pat.pos) { ValDef(Modifiers(PARAM), name.toTermName, tpt, EmptyTree) }),
- body) setPos splitpos
- case None =>
- atPos(splitpos) {
- makeVisitor(List(CaseDef(pat, EmptyTree, body)), false)
- }
- }
- }
-
- /** Make an application qual.meth(pat => body) positioned at `pos`.
- */
- def makeCombination(pos: Position, meth: TermName, qual: Tree, pat: Tree, body: Tree): Tree =
- Apply(Select(qual, meth) setPos qual.pos, List(makeClosure(pos, pat, body))) setPos pos
-
- /** Optionally, if pattern is a `Bind`, the bound name, otherwise None.
- */
- def patternVar(pat: Tree): Option[Name] = pat match {
- case Bind(name, _) => Some(name)
- case _ => None
- }
-
- /** If `pat` is not yet a `Bind` wrap it in one with a fresh name
- */
- def makeBind(pat: Tree): Tree = pat match {
- case Bind(_, _) => pat
- case _ => Bind(freshName(), pat) setPos pat.pos
- }
-
- /** A reference to the name bound in Bind `pat`.
- */
- def makeValue(pat: Tree): Tree = pat match {
- case Bind(name, _) => Ident(name) setPos pat.pos.focus
- }
-
- /** The position of the closure that starts with generator at position `genpos`.
- */
- def closurePos(genpos: Position) = {
- val end = body.pos match {
- case NoPosition => genpos.point
- case bodypos => bodypos.endOrPoint
- }
- r2p(genpos.startOrPoint, genpos.point, end)
- }
-
-// val result =
- enums match {
- case ValFrom(pos, pat, rhs) :: Nil =>
- makeCombination(closurePos(pos), mapName, rhs, pat, body)
- case ValFrom(pos, pat, rhs) :: (rest @ (ValFrom(_, _, _) :: _)) =>
- makeCombination(closurePos(pos), flatMapName, rhs, pat,
- makeFor(mapName, flatMapName, rest, body))
- case ValFrom(pos, pat, rhs) :: Filter(_, test) :: rest =>
- makeFor(mapName, flatMapName,
- ValFrom(pos, pat, makeCombination(rhs.pos union test.pos, nme.withFilter, rhs, pat.duplicate, test)) :: rest,
- body)
- case ValFrom(pos, pat, rhs) :: rest =>
- val valeqs = rest.take(definitions.MaxTupleArity - 1).takeWhile(_.isInstanceOf[ValEq]);
- assert(!valeqs.isEmpty)
- val rest1 = rest.drop(valeqs.length)
- val pats = valeqs map { case ValEq(_, pat, _) => pat }
- val rhss = valeqs map { case ValEq(_, _, rhs) => rhs }
- val defpat1 = makeBind(pat)
- val defpats = pats map makeBind
- val pdefs = (defpats, rhss).zipped flatMap makePatDef
- val ids = (defpat1 :: defpats) map makeValue
- val rhs1 = makeForYield(
- List(ValFrom(pos, defpat1, rhs)),
- Block(pdefs, atPos(wrappingPos(ids)) { makeTupleTerm(ids, true) }) setPos wrappingPos(pdefs))
- val allpats = (pat :: pats) map (_.duplicate)
- val vfrom1 = ValFrom(r2p(pos.startOrPoint, pos.point, rhs1.pos.endOrPoint), atPos(wrappingPos(allpats)) { makeTuple(allpats, false) } , rhs1)
- makeFor(mapName, flatMapName, vfrom1 :: rest1, body)
- case _ =>
- EmptyTree //may happen for erroneous input
- }
-// println("made for "+result)
-// result
- }
-
- /** Create tree for for-do comprehension <for (enums) body> */
- def makeFor(enums: List[Enumerator], body: Tree): Tree =
- makeFor(nme.foreach, nme.foreach, enums, body)
-
- /** Create tree for for-yield comprehension <for (enums) yield body> */
- def makeForYield(enums: List[Enumerator], body: Tree): Tree =
- makeFor(nme.map, nme.flatMap, enums, body)
-
- /** Create tree for a lifted expression XX-LIFTING
- */
- def makeLifted(gs: List[ValFrom], body: Tree): Tree = {
- def combine(gs: List[ValFrom]): ValFrom = (gs: @unchecked) match {
- case g :: Nil => g
- case ValFrom(pos1, pat1, rhs1) :: gs2 =>
- val ValFrom(pos2, pat2, rhs2) = combine(gs2)
- ValFrom(pos1, makeTuple(List(pat1, pat2), false), Apply(Select(rhs1, nme.zip), List(rhs2)))
- }
- makeForYield(List(combine(gs)), body)
- }
-
/** Create tree for a pattern alternative */
def makeAlternative(ts: List[Tree]): Tree = {
def alternatives(t: Tree): List[Tree] = t match {
@@ -483,21 +124,9 @@ abstract class TreeBuilder {
Alternative(ts flatMap alternatives)
}
- /** Create visitor <x => x match cases> */
- def makeVisitor(cases: List[CaseDef], checkExhaustive: Boolean): Tree =
- makeVisitor(cases, checkExhaustive, "x$")
-
- /** Create visitor <x => x match cases> */
- def makeVisitor(cases: List[CaseDef], checkExhaustive: Boolean, prefix: String): Tree = {
- val x = freshTermName(prefix)
- val id = Ident(x)
- val sel = if (checkExhaustive) id else gen.mkUnchecked(id)
- Function(List(makeSyntheticParam(x)), Match(sel, cases))
- }
-
/** Create tree for case definition <case pat if guard => rhs> */
def makeCaseDef(pat: Tree, guard: Tree, rhs: Tree): CaseDef =
- CaseDef(patvarTransformer.transform(pat), guard, rhs)
+ CaseDef(gen.patvarTransformer.transform(pat), guard, rhs)
/** Creates tree representing:
* { case x: Throwable =>
@@ -506,9 +135,9 @@ abstract class TreeBuilder {
* }
*/
def makeCatchFromExpr(catchExpr: Tree): CaseDef = {
- val binder = freshTermName("x")
+ val binder = freshTermName()
val pat = Bind(binder, Typed(Ident(nme.WILDCARD), Ident(tpnme.Throwable)))
- val catchDef = ValDef(NoMods, freshTermName("catchExpr"), TypeTree(), catchExpr)
+ val catchDef = ValDef(Modifiers(ARTIFACT), freshTermName("catchExpr"), TypeTree(), catchExpr)
val catchFn = Ident(catchDef.name)
val body = atPos(catchExpr.pos.makeTransparent)(Block(
List(catchDef),
@@ -521,79 +150,8 @@ abstract class TreeBuilder {
makeCaseDef(pat, EmptyTree, body)
}
- /** Create tree for pattern definition <val pat0 = rhs> */
- def makePatDef(pat: Tree, rhs: Tree): List[Tree] =
- makePatDef(Modifiers(0), pat, rhs)
-
- /** Create tree for pattern definition <mods val pat0 = rhs> */
- def makePatDef(mods: Modifiers, pat: Tree, rhs: Tree): List[Tree] = matchVarPattern(pat) match {
- case Some((name, tpt)) =>
- List(atPos(pat.pos union rhs.pos) {
- ValDef(mods, name.toTermName, tpt, rhs)
- })
-
- case None =>
- // in case there is exactly one variable x_1 in pattern
- // val/var p = e ==> val/var x_1 = e.match (case p => (x_1))
- //
- // in case there are zero or more than one variables in pattern
- // val/var p = e ==> private synthetic val t$ = e.match (case p => (x_1, ..., x_N))
- // val/var x_1 = t$._1
- // ...
- // val/var x_N = t$._N
-
- val rhsUnchecked = gen.mkUnchecked(rhs)
-
- // TODO: clean this up -- there is too much information packked into makePatDef's `pat` argument
- // when it's a simple identifier (case Some((name, tpt)) -- above),
- // pat should have the type ascription that was specified by the user
- // however, in `case None` (here), we must be careful not to generate illegal pattern trees (such as `(a, b): Tuple2[Int, String]`)
- // i.e., this must hold: pat1 match { case Typed(expr, tp) => assert(expr.isInstanceOf[Ident]) case _ => }
- // if we encounter such an erroneous pattern, we strip off the type ascription from pat and propagate the type information to rhs
- val (pat1, rhs1) = patvarTransformer.transform(pat) match {
- // move the Typed ascription to the rhs
- case Typed(expr, tpt) if !expr.isInstanceOf[Ident] =>
- val rhsTypedUnchecked =
- if (tpt.isEmpty) rhsUnchecked
- else Typed(rhsUnchecked, tpt) setPos (rhs.pos union tpt.pos)
- (expr, rhsTypedUnchecked)
- case ok =>
- (ok, rhsUnchecked)
- }
- val vars = getVariables(pat1)
- val matchExpr = atPos((pat1.pos union rhs.pos).makeTransparent) {
- Match(
- rhs1,
- List(
- atPos(pat1.pos) {
- CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident.apply, true))
- }
- ))
- }
- vars match {
- case List((vname, tpt, pos)) =>
- List(atPos(pat.pos union pos union rhs.pos) {
- ValDef(mods, vname.toTermName, tpt, matchExpr)
- })
- case _ =>
- val tmp = freshTermName()
- val firstDef =
- atPos(matchExpr.pos) {
- ValDef(Modifiers(PrivateLocal | SYNTHETIC | (mods.flags & LAZY)),
- tmp, TypeTree(), matchExpr)
- }
- var cnt = 0
- val restDefs = for ((vname, tpt, pos) <- vars) yield atPos(pos) {
- cnt += 1
- ValDef(mods, vname.toTermName, tpt, Select(Ident(tmp), newTermName("_" + cnt)))
- }
- firstDef :: restDefs
- }
- }
-
/** Create a tree representing the function type (argtpes) => restpe */
- def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
- AppliedTypeTree(rootScalaDot(newTypeName("Function" + argtpes.length)), argtpes ::: List(restpe))
+ def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree = gen.mkFunctionTypeTree(argtpes, restpe)
/** Append implicit parameter section if `contextBounds` nonempty */
def addEvidenceParams(owner: Name, vparamss: List[List[ValDef]], contextBounds: List[Tree]): List[List[ValDef]] = {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala b/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala
new file mode 100644
index 0000000000..82dce9f1f8
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala
@@ -0,0 +1,211 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools.nsc.ast.parser.xml
+
+/** This is not a public trait - it contains common code shared
+ * between the library level XML parser and the compiler's.
+ * All members should be accessed through those.
+ */
+private[scala] trait MarkupParserCommon {
+ import Utility._
+ import scala.reflect.internal.Chars.SU
+
+ protected def unreachable = scala.sys.error("Cannot be reached.")
+
+ type PositionType // Int, Position
+ type ElementType // NodeSeq, Tree
+ type NamespaceType // NamespaceBinding, Any
+ type AttributesType // (MetaData, NamespaceBinding), mutable.Map[String, Tree]
+
+ def mkAttributes(name: String, pscope: NamespaceType): AttributesType
+ def mkProcInstr(position: PositionType, name: String, text: String): ElementType
+
+ /** parse a start or empty tag.
+ * [40] STag ::= '<' Name { S Attribute } [S]
+ * [44] EmptyElemTag ::= '<' Name { S Attribute } [S]
+ */
+ protected def xTag(pscope: NamespaceType): (String, AttributesType) = {
+ val name = xName
+ xSpaceOpt()
+
+ (name, mkAttributes(name, pscope))
+ }
+
+ /** '<?' ProcInstr ::= Name [S ({Char} - ({Char}'>?' {Char})]'?>'
+ *
+ * see [15]
+ */
+ def xProcInstr: ElementType = {
+ val n = xName
+ xSpaceOpt()
+ xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>")
+ }
+
+ /** attribute value, terminated by either `'` or `"`. value may not contain `<`.
+ @param endCh either `'` or `"`
+ */
+ def xAttributeValue(endCh: Char): String = {
+ val buf = new StringBuilder
+ while (ch != endCh) {
+ // well-formedness constraint
+ if (ch == '<') return errorAndResult("'<' not allowed in attrib value", "")
+ else if (ch == SU) truncatedError("")
+ else buf append ch_returning_nextch
+ }
+ ch_returning_nextch
+ // @todo: normalize attribute value
+ buf.toString
+ }
+
+ /** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'
+ */
+ def xEndTag(startName: String) {
+ xToken('/')
+ if (xName != startName)
+ errorNoEnd(startName)
+
+ xSpaceOpt()
+ xToken('>')
+ }
+
+ /** actually, Name ::= (Letter | '_' | ':') (NameChar)* but starting with ':' cannot happen
+ * Name ::= (Letter | '_') (NameChar)*
+ *
+ * see [5] of XML 1.0 specification
+ *
+ * pre-condition: ch != ':' // assured by definition of XMLSTART token
+ * post-condition: name does neither start, nor end in ':'
+ */
+ def xName: String = {
+ if (ch == SU)
+ truncatedError("")
+ else if (!isNameStart(ch))
+ return errorAndResult("name expected, but char '%s' cannot start a name" format ch, "")
+
+ val buf = new StringBuilder
+
+ do buf append ch_returning_nextch
+ while (isNameChar(ch))
+
+ if (buf.last == ':') {
+ reportSyntaxError( "name cannot end in ':'" )
+ buf.toString dropRight 1
+ }
+ else buf.toString
+ }
+
+ /** CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
+ * | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
+ *
+ * see [66]
+ */
+ def xCharRef(ch: () => Char, nextch: () => Unit): String =
+ Utility.parseCharRef(ch, nextch, reportSyntaxError _, truncatedError _)
+
+ def xCharRef(it: Iterator[Char]): String = {
+ var c = it.next()
+ Utility.parseCharRef(() => c, () => { c = it.next() }, reportSyntaxError _, truncatedError _)
+ }
+
+ def xCharRef: String = xCharRef(() => ch, () => nextch())
+
+ /** Create a lookahead reader which does not influence the input */
+ def lookahead(): BufferedIterator[Char]
+
+ /** The library and compiler parsers had the interesting distinction of
+ * different behavior for nextch (a function for which there are a total
+ * of two plausible behaviors, so we know the design space was fully
+ * explored.) One of them returned the value of nextch before the increment
+ * and one of them the new value. So to unify code we have to at least
+ * temporarily abstract over the nextchs.
+ */
+ def ch: Char
+ def nextch(): Unit
+ protected def ch_returning_nextch: Char
+ def eof: Boolean
+
+ // def handle: HandleType
+ var tmppos: PositionType
+
+ def xHandleError(that: Char, msg: String): Unit
+ def reportSyntaxError(str: String): Unit
+ def reportSyntaxError(pos: Int, str: String): Unit
+
+ def truncatedError(msg: String): Nothing
+ def errorNoEnd(tag: String): Nothing
+
+ protected def errorAndResult[T](msg: String, x: T): T = {
+ reportSyntaxError(msg)
+ x
+ }
+
+ def xToken(that: Char) {
+ if (ch == that) nextch()
+ else xHandleError(that, "'%s' expected instead of '%s'".format(that, ch))
+ }
+ def xToken(that: Seq[Char]) { that foreach xToken }
+
+ /** scan [S] '=' [S]*/
+ def xEQ() = { xSpaceOpt(); xToken('='); xSpaceOpt() }
+
+ /** skip optional space S? */
+ def xSpaceOpt() = while (isSpace(ch) && !eof) nextch()
+
+ /** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */
+ def xSpace() =
+ if (isSpace(ch)) { nextch(); xSpaceOpt() }
+ else xHandleError(ch, "whitespace expected")
+
+ /** Apply a function and return the passed value */
+ def returning[T](x: T)(f: T => Unit): T = { f(x); x }
+
+ /** Execute body with a variable saved and restored after execution */
+ def saving[A, B](getter: A, setter: A => Unit)(body: => B): B = {
+ val saved = getter
+ try body
+ finally setter(saved)
+ }
+
+ /** Take characters from input stream until given String "until"
+ * is seen. Once seen, the accumulated characters are passed
+ * along with the current Position to the supplied handler function.
+ */
+ protected def xTakeUntil[T](
+ handler: (PositionType, String) => T,
+ positioner: () => PositionType,
+ until: String): T =
+ {
+ val sb = new StringBuilder
+ val head = until.head
+ val rest = until.tail
+
+ while (true) {
+ if (ch == head && peek(rest))
+ return handler(positioner(), sb.toString)
+ else if (ch == SU)
+ truncatedError("") // throws TruncatedXMLControl in compiler
+
+ sb append ch
+ nextch()
+ }
+ unreachable
+ }
+
+ /** Create a non-destructive lookahead reader and see if the head
+ * of the input would match the given String. If yes, return true
+ * and drop the entire String from input; if no, return false
+ * and leave input unchanged.
+ */
+ private def peek(lookingFor: String): Boolean =
+ (lookahead() take lookingFor.length sameElements lookingFor.iterator) && {
+ // drop the chars from the real reader (all lookahead + orig)
+ (0 to lookingFor.length) foreach (_ => nextch())
+ true
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala b/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala
new file mode 100755
index 0000000000..6dcfa173df
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala
@@ -0,0 +1,163 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools.nsc.ast.parser.xml
+
+import scala.collection.mutable
+
+
+/**
+ * The `Utility` object provides utility functions for processing instances
+ * of bound and not bound XML classes, as well as escaping text nodes.
+ *
+ * @author Burak Emir
+ */
+object Utility {
+ import scala.reflect.internal.Chars.SU
+
+ private val unescMap = Map(
+ "lt" -> '<',
+ "gt" -> '>',
+ "amp" -> '&',
+ "quot" -> '"',
+ "apos" -> '\''
+ )
+
+ /**
+ * Appends unescaped string to `s`, `amp` becomes `&amp;`,
+ * `lt` becomes `&lt;` etc..
+ *
+ * @return `'''null'''` if `ref` was not a predefined entity.
+ */
+ private final def unescape(ref: String, s: StringBuilder): StringBuilder =
+ ((unescMap get ref) map (s append _)).orNull
+
+ def parseAttributeValue[T](value: String, text: String => T, entityRef: String => T): List[T] = {
+ val sb = new StringBuilder
+ var rfb: StringBuilder = null
+ val nb = new mutable.ListBuffer[T]()
+
+ val it = value.iterator
+ while (it.hasNext) {
+ var c = it.next()
+ // entity! flush buffer into text node
+ if (c == '&') {
+ c = it.next()
+ if (c == '#') {
+ c = it.next()
+ val theChar = parseCharRef ({ ()=> c },{ () => c = it.next() },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)})
+ sb.append(theChar)
+ }
+ else {
+ if (rfb eq null) rfb = new StringBuilder()
+ rfb append c
+ c = it.next()
+ while (c != ';') {
+ rfb.append(c)
+ c = it.next()
+ }
+ val ref = rfb.toString()
+ rfb.clear()
+ unescape(ref,sb) match {
+ case null =>
+ if (!sb.isEmpty) { // flush buffer
+ nb += text(sb.toString())
+ sb.clear()
+ }
+ nb += entityRef(ref) // add entityref
+ case _ =>
+ }
+ }
+ }
+ else sb append c
+ }
+
+ if(!sb.isEmpty) // flush buffer
+ nb += text(sb.toString())
+
+ nb.toList
+ }
+
+ /**
+ * {{{
+ * CharRef ::= "&amp;#" '0'..'9' {'0'..'9'} ";"
+ * | "&amp;#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
+ * }}}
+ * See [66]
+ */
+ def parseCharRef(ch: () => Char, nextch: () => Unit, reportSyntaxError: String => Unit, reportTruncatedError: String => Unit): String = {
+ val hex = (ch() == 'x') && { nextch(); true }
+ val base = if (hex) 16 else 10
+ var i = 0
+ while (ch() != ';') {
+ ch() match {
+ case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
+ i = i * base + ch().asDigit
+ case 'a' | 'b' | 'c' | 'd' | 'e' | 'f'
+ | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' =>
+ if (! hex)
+ reportSyntaxError("hex char not allowed in decimal char ref\n" +
+ "Did you mean to write &#x ?")
+ else
+ i = i * base + ch().asDigit
+ case SU =>
+ reportTruncatedError("")
+ case _ =>
+ reportSyntaxError("character '" + ch() + "' not allowed in char ref\n")
+ }
+ nextch()
+ }
+ new String(Array(i), 0, 1)
+ }
+
+ /** {{{
+ * (#x20 | #x9 | #xD | #xA)
+ * }}} */
+ final def isSpace(ch: Char): Boolean = ch match {
+ case '\u0009' | '\u000A' | '\u000D' | '\u0020' => true
+ case _ => false
+ }
+
+ /** {{{
+ * NameChar ::= Letter | Digit | '.' | '-' | '_' | ':'
+ * | CombiningChar | Extender
+ * }}}
+ * See [4] and Appendix B of XML 1.0 specification.
+ */
+ def isNameChar(ch: Char) = {
+ import java.lang.Character._
+ // The constants represent groups Mc, Me, Mn, Lm, and Nd.
+
+ isNameStart(ch) || (getType(ch).toByte match {
+ case COMBINING_SPACING_MARK |
+ ENCLOSING_MARK | NON_SPACING_MARK |
+ MODIFIER_LETTER | DECIMAL_DIGIT_NUMBER => true
+ case _ => ".-:" contains ch
+ })
+ }
+
+ /** {{{
+ * NameStart ::= ( Letter | '_' )
+ * }}}
+ * where Letter means in one of the Unicode general
+ * categories `{ Ll, Lu, Lo, Lt, Nl }`.
+ *
+ * We do not allow a name to start with `:`.
+ * See [3] and Appendix B of XML 1.0 specification
+ */
+ def isNameStart(ch: Char) = {
+ import java.lang.Character._
+
+ getType(ch).toByte match {
+ case LOWERCASE_LETTER |
+ UPPERCASE_LETTER | OTHER_LETTER |
+ TITLECASE_LETTER | LETTER_NUMBER => true
+ case _ => ch == '_'
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index fc5d4372c5..32b5a98b98 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -7,50 +7,34 @@ package scala.tools.nsc
package backend
import io.AbstractFile
-import util.{ClassPath,JavaClassPath,MergedClassPath,DeltaClassPath}
-import util.ClassPath.{ JavaContext, DefaultJavaContext }
+import util.{ClassPath,MergedClassPath,DeltaClassPath}
import scala.tools.util.PathResolver
trait JavaPlatform extends Platform {
+ val global: Global
+ override val symbolTable: global.type = global
import global._
import definitions._
- type BinaryRepr = AbstractFile
+ private var currentClassPath: Option[MergedClassPath[AbstractFile]] = None
- private var currentClassPath: Option[MergedClassPath[BinaryRepr]] = None
-
- def classPath: ClassPath[BinaryRepr] = {
+ def classPath: ClassPath[AbstractFile] = {
if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result)
currentClassPath.get
}
/** Update classpath with a substituted subentry */
- def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]]) =
+ def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]) =
currentClassPath = Some(new DeltaClassPath(currentClassPath.get, subst))
- def rootLoader = new loaders.PackageLoader(classPath.asInstanceOf[ClassPath[platform.BinaryRepr]])
- // [Martin] Why do we need a cast here?
- // The problem is that we cannot specify at this point that global.platform should be of type JavaPlatform.
- // So we cannot infer that global.platform.BinaryRepr is AbstractFile.
- // Ideally, we should be able to write at the top of the JavaPlatform trait:
- // val global: Global { val platform: JavaPlatform }
- // import global._
- // Right now, this does nothing because the concrete definition of platform in Global
- // replaces the tighter abstract definition here. If we had DOT typing rules, the two
- // types would be conjoined and everything would work out. Yet another reason to push for DOT.
-
- private def depAnalysisPhase =
- if (settings.make.isDefault) Nil
- else List(dependencyAnalysis)
-
private def classEmitPhase =
- if (settings.target.value == "jvm-1.5-fjbg") genJVM
+ if (settings.isBCodeActive) genBCode
else genASM
def platformPhases = List(
flatten, // get rid of inner classes
classEmitPhase // generate .class files
- ) ++ depAnalysisPhase
+ )
lazy val externalEquals = getDecl(BoxesRunTimeClass, nme.equals_)
lazy val externalEqualsNumNum = getDecl(BoxesRunTimeClass, nme.equalsNumNum)
@@ -71,10 +55,7 @@ trait JavaPlatform extends Platform {
(sym isNonBottomSubClass BoxedBooleanClass)
}
- def newClassLoader(bin: AbstractFile): loaders.SymbolLoader =
- new loaders.ClassfileLoader(bin)
-
- def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean = true
+ def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean = true
def needCompile(bin: AbstractFile, src: AbstractFile) =
src.lastModified >= bin.lastModified
diff --git a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala b/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
deleted file mode 100644
index 4493685b52..0000000000
--- a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package backend
-
-import ch.epfl.lamp.compiler.{ msil => msillib }
-import util.{ ClassPath, MsilClassPath }
-import msil.GenMSIL
-import io.{ AbstractFile, MsilFile }
-
-trait MSILPlatform extends Platform {
- import global._
- import definitions.{ ComparatorClass, BoxedNumberClass, getMember }
-
- type BinaryRepr = MsilFile
-
- if (settings.verbose.value)
- inform("[AssemRefs = " + settings.assemrefs.value + "]")
-
- // phaseName = "msil"
- object genMSIL extends {
- val global: MSILPlatform.this.global.type = MSILPlatform.this.global
- val runsAfter = List[String]("dce")
- val runsRightAfter = None
- } with GenMSIL
-
- lazy val classPath = MsilClassPath.fromSettings(settings)
- def rootLoader = new loaders.PackageLoader(classPath.asInstanceOf[ClassPath[platform.BinaryRepr]])
- // See discussion in JavaPlatForm for why we need a cast here.
-
- /** Update classpath with a substituted subentry */
- def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]]) =
- throw new UnsupportedOperationException("classpath invalidations not supported on MSIL")
-
- def platformPhases = List(
- genMSIL // generate .msil files
- )
-
- lazy val externalEquals = getMember(ComparatorClass.companionModule, nme.equals_)
- def isMaybeBoxed(sym: Symbol) = sym isNonBottomSubClass BoxedNumberClass
-
- def newClassLoader(bin: MsilFile): loaders.SymbolLoader = new loaders.MsilFileLoader(bin)
-
- /**
- * Tells whether a class should be loaded and entered into the package
- * scope. On .NET, this method returns `false` for all synthetic classes
- * (anonymous classes, implementation classes, module classes), their
- * symtab is encoded in the pickle of another class.
- */
- def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean = {
- if (cls.binary.isDefined) {
- val typ = cls.binary.get.msilType
- if (typ.IsDefined(loaders.clrTypes.SCALA_SYMTAB_ATTR, false)) {
- val attrs = typ.GetCustomAttributes(loaders.clrTypes.SCALA_SYMTAB_ATTR, false)
- assert(attrs.length == 1, attrs.length)
- val a = attrs(0).asInstanceOf[msillib.Attribute]
- // symtab_constr takes a byte array argument (the pickle), i.e. typ has a pickle.
- // otherwise, symtab_default_constr was used, which marks typ as scala-synthetic.
- a.getConstructor() == loaders.clrTypes.SYMTAB_CONSTR
- } else true // always load non-scala types
- } else true // always load source
- }
-
- def needCompile(bin: MsilFile, src: AbstractFile) =
- false // always use compiled file on .net
-}
diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala
index e2b22c06d7..499f8a9290 100644
--- a/src/compiler/scala/tools/nsc/backend/Platform.scala
+++ b/src/compiler/scala/tools/nsc/backend/Platform.scala
@@ -12,20 +12,14 @@ import io.AbstractFile
/** The platform dependent pieces of Global.
*/
trait Platform {
- val global: Global
- import global._
-
- /** The binary classfile representation type */
- type BinaryRepr
+ val symbolTable: symtab.SymbolTable
+ import symbolTable._
/** The compiler classpath. */
- def classPath: ClassPath[BinaryRepr]
-
- /** The root symbol loader. */
- def rootLoader: LazyType
+ def classPath: ClassPath[AbstractFile]
/** Update classpath with a substitution that maps entries to entries */
- def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]])
+ def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]])
/** Any platform-specific phases. */
def platformPhases: List[SubComponent]
@@ -36,16 +30,13 @@ trait Platform {
/** The various ways a boxed primitive might materialize at runtime. */
def isMaybeBoxed(sym: Symbol): Boolean
- /** Create a new class loader to load class file `bin` */
- def newClassLoader(bin: BinaryRepr): loaders.SymbolLoader
-
/**
* Tells whether a class should be loaded and entered into the package
* scope. On .NET, this method returns `false` for all synthetic classes
* (anonymous classes, implementation classes, module classes), their
* symtab is encoded in the pickle of another class.
*/
- def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean
+ def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean
/**
* Tells whether a class with both a binary and a source representation
@@ -53,6 +44,6 @@ trait Platform {
* on the JVM similar to javac, i.e. if the source file is newer than the classfile,
* a re-compile is triggered. On .NET by contrast classfiles always take precedence.
*/
- def needCompile(bin: BinaryRepr, src: AbstractFile): Boolean
+ def needCompile(bin: AbstractFile, src: AbstractFile): Boolean
}
diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
index 8cbb5bc980..b8ddb65de9 100644
--- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
@@ -3,10 +3,10 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend
-import scala.tools.nsc.backend.icode._
import scala.collection.{ mutable, immutable }
/** Scala primitive operations are represented as methods in `Any` and
@@ -442,15 +442,17 @@ abstract class ScalaPrimitives {
}
def addPrimitives(cls: Symbol, method: Name, code: Int) {
- val tpe = cls.info
- val sym = tpe.member(method)
- if (sym == NoSymbol)
- inform("Unknown primitive method " + cls + "." + method)
- for (s <- sym.alternatives)
- addPrimitive(
- s,
- if (code == ADD && s.info.paramTypes.head == definitions.StringClass.tpe) CONCAT
- else code)
+ val alts = (cls.info member method).alternatives
+ if (alts.isEmpty)
+ inform(s"Unknown primitive method $cls.$method")
+ else alts foreach (s =>
+ addPrimitive(s,
+ s.info.paramTypes match {
+ case tp :: _ if code == ADD && tp =:= StringTpe => CONCAT
+ case _ => code
+ }
+ )
+ )
}
def isCoercion(code: Int): Boolean = (code >= B2B) && (code <= D2D)
@@ -495,8 +497,8 @@ abstract class ScalaPrimitives {
def isArraySet(code: Int): Boolean = code match {
case ZARRAY_SET | BARRAY_SET | SARRAY_SET | CARRAY_SET |
IARRAY_SET | LARRAY_SET | FARRAY_SET | DARRAY_SET |
- OARRAY_SET | UPDATE => true;
- case _ => false;
+ OARRAY_SET | UPDATE => true
+ case _ => false
}
/** Check whether the given code is a comparison operator */
@@ -515,7 +517,7 @@ abstract class ScalaPrimitives {
DIV | MOD => true; // binary
case OR | XOR | AND |
LSL | LSR | ASR => true; // bitwise
- case _ => false;
+ case _ => false
}
def isLogicalOp(code: Int): Boolean = code match {
@@ -565,7 +567,7 @@ abstract class ScalaPrimitives {
import definitions._
val code = getPrimitive(fun)
- def elementType = beforeTyper {
+ def elementType = enteringTyper {
val arrayParent = tpe :: tpe.parents collectFirst {
case TypeRef(_, ArrayClass, elem :: Nil) => elem
}
diff --git a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
index 798a80ea37..45ca39fee4 100644
--- a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
+++ b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
@@ -6,8 +6,7 @@
package scala.tools.nsc
package backend
-import scala.tools.nsc.ast._
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
/**
* Simple implementation of a worklist algorithm. A processing
@@ -32,8 +31,6 @@ trait WorklistAlgorithm {
* Run the iterative algorithm until the worklist remains empty.
* The initializer is run once before the loop starts and should
* initialize the worklist.
- *
- * @param initWorklist ...
*/
def run(initWorklist: => Unit) = {
initWorklist
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index d50d4cd125..f9551697d2 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -8,8 +8,7 @@ package backend
package icode
import scala.collection.{ mutable, immutable }
-import mutable.{ ListBuffer, ArrayBuffer }
-import scala.reflect.internal.util.{ Position, NoPosition }
+import mutable.ListBuffer
import backend.icode.analysis.ProgramPoint
import scala.language.postfixOps
@@ -17,8 +16,7 @@ trait BasicBlocks {
self: ICodes =>
import opcodes._
- import global.{ ifDebug, settings, log, nme }
- import nme.isExceptionResultName
+ import global._
/** Override Array creation for efficiency (to not go through reflection). */
private implicit val instructionTag: scala.reflect.ClassTag[Instruction] = new scala.reflect.ClassTag[Instruction] {
@@ -38,7 +36,7 @@ trait BasicBlocks {
import BBFlags._
- def code = method.code
+ def code = if (method eq null) NoCode else method.code
private final class SuccessorList() {
private var successors: List[BasicBlock] = Nil
@@ -68,10 +66,10 @@ trait BasicBlocks {
addBlock(scratchBlocks.head)
scratchBlocks = scratchBlocks.tail
}
- /** Return a list of successors for 'b' that come from exception handlers
- * covering b's (non-exceptional) successors. These exception handlers
- * might not cover 'b' itself. This situation corresponds to an
- * exception being thrown as the first thing of one of b's successors.
+ /* Return a list of successors for 'b' that come from exception handlers
+ * covering b's (non-exceptional) successors. These exception handlers
+ * might not cover 'b' itself. This situation corresponds to an
+ * exception being thrown as the first thing of one of b's successors.
*/
while (scratchHandlers ne Nil) {
val handler = scratchHandlers.head
@@ -122,7 +120,7 @@ trait BasicBlocks {
def closed: Boolean = hasFlag(CLOSED)
def closed_=(b: Boolean) = if (b) setFlag(CLOSED) else resetFlag(CLOSED)
- /** When set, the <code>emit</code> methods will be ignored. */
+ /** When set, the `emit` methods will be ignored. */
def ignore: Boolean = hasFlag(IGNORING)
def ignore_=(b: Boolean) = if (b) setFlag(IGNORING) else resetFlag(IGNORING)
@@ -260,13 +258,9 @@ trait BasicBlocks {
}
}
- /** Replaces <code>oldInstr</code> with <code>is</code>. It does not update
+ /** Replaces `oldInstr` with `is`. It does not update
* the position field in the newly inserted instructions, so it behaves
* differently than the one-instruction versions of this function.
- *
- * @param iold ..
- * @param is ..
- * @return ..
*/
def replaceInstruction(oldInstr: Instruction, is: List[Instruction]): Boolean = {
assert(closed, "Instructions can be replaced only after the basic block is closed")
@@ -280,17 +274,7 @@ trait BasicBlocks {
}
}
- /** Insert instructions in 'is' immediately after index 'idx'. */
- def insertAfter(idx: Int, is: List[Instruction]) {
- assert(closed, "Instructions can be replaced only after the basic block is closed")
-
- instrs = instrs.patch(idx + 1, is, 0)
- code.touched = true
- }
-
/** Removes instructions found at the given positions.
- *
- * @param positions ...
*/
def removeInstructionsAt(positions: Int*) {
assert(closed, this)
@@ -311,8 +295,6 @@ trait BasicBlocks {
}
/** Replaces all instructions found in the map.
- *
- * @param map ...
*/
def subst(map: Map[Instruction, Instruction]): Unit =
if (!closed)
@@ -344,21 +326,17 @@ trait BasicBlocks {
* is closed, which sets the DIRTYSUCCS flag.
*/
def emit(instr: Instruction, pos: Position) {
-/* if (closed) {
- print()
- Console.println("trying to emit: " + instr)
- } */
assert(!closed || ignore, this)
if (ignore) {
- if (settings.debug.value) {
- /** Trying to pin down what it's likely to see after a block has been
- * put into ignore mode so we hear about it if there's a problem.
+ if (settings.debug) {
+ /* Trying to pin down what it's likely to see after a block has been
+ * put into ignore mode so we hear about it if there's a problem.
*/
instr match {
- case JUMP(_) | RETURN(_) | THROW(_) | SCOPE_EXIT(_) => // ok
- case STORE_LOCAL(local) if isExceptionResultName(local.sym.name) => // ok
- case x => log("Ignoring instruction, possibly at our peril, at " + pos + ": " + x)
+ case JUMP(_) | RETURN(_) | THROW(_) | SCOPE_EXIT(_) => // ok
+ case STORE_LOCAL(local) if nme.isExceptionResultName(local.sym.name) => // ok
+ case x => log("Ignoring instruction, possibly at our peril, at " + pos + ": " + x)
}
}
}
@@ -403,7 +381,6 @@ trait BasicBlocks {
/** Close the block */
def close() {
assert(!closed || ignore, this)
- assert(instructionList.nonEmpty, "Empty block: " + this)
if (ignore && closed) { // redundant `ignore &&` for clarity -- we should never be in state `!ignore && closed`
// not doing anything to this block is important...
// because the else branch reverses innocent blocks, which is wrong when they're in ignore mode (and closed)
@@ -413,9 +390,38 @@ trait BasicBlocks {
setFlag(DIRTYSUCCS)
instructionList = instructionList.reverse
instrs = instructionList.toArray
+ if (instructionList.isEmpty) {
+ debuglog(s"Removing empty block $this")
+ code removeBlock this
+ }
+ }
+ }
+
+ /**
+ * if cond is true, closes this block, entersIgnoreMode, and removes the block from
+ * its list of blocks. Used to allow a block to be started and then cancelled when it
+ * is discovered to be unreachable.
+ */
+ def killIf(cond: Boolean) {
+ if (!settings.YdisableUnreachablePrevention && cond) {
+ debuglog(s"Killing block $this")
+ assert(instructionList.isEmpty, s"Killing a non empty block $this")
+ // only checked under debug because fetching predecessor list is moderately expensive
+ if (settings.debug)
+ assert(predecessors.isEmpty, s"Killing block $this which is referred to from ${predecessors.mkString}")
+
+ close()
+ enterIgnoreMode()
}
}
+ /**
+ * Same as killIf but with the logic of the condition reversed
+ */
+ def killUnless(cond: Boolean) {
+ this killIf !cond
+ }
+
def open() {
assert(closed, this)
closed = false
@@ -441,20 +447,11 @@ trait BasicBlocks {
ignore = true
}
- def exitIgnoreMode() {
- assert(ignore, "Exit ignore mode when not in ignore mode: " + this)
- ignore = false
- }
-
/** Return the last instruction of this basic block. */
def lastInstruction =
if (closed) instrs(instrs.length - 1)
else instructionList.head
- def firstInstruction =
- if (closed) instrs(0)
- else instructionList.last
-
def exceptionSuccessors: List[BasicBlock] =
exceptionSuccessorsForBlock(this)
@@ -474,16 +471,17 @@ trait BasicBlocks {
def directSuccessors: List[BasicBlock] =
if (isEmpty) Nil else lastInstruction match {
- case JUMP(whereto) => whereto :: Nil
- case CJUMP(succ, fail, _, _) => fail :: succ :: Nil
- case CZJUMP(succ, fail, _, _) => fail :: succ :: Nil
- case SWITCH(_, labels) => labels
- case RETURN(_) => Nil
- case THROW(_) => Nil
- case _ =>
+ case JUMP(whereto) => whereto :: Nil
+ case CJUMP(succ, fail, _, _) => fail :: succ :: Nil
+ case CZJUMP(succ, fail, _, _) => fail :: succ :: Nil
+ case SWITCH(_, labels) => labels
+ case RETURN(_) => Nil
+ case THROW(_) => Nil
+ case _ =>
if (closed)
- dumpClassesAndAbort("The last instruction is not a control flow instruction: " + lastInstruction)
- else Nil
+ devWarning(s"$lastInstruction/${lastInstruction.getClass.getName} is not a control flow instruction")
+
+ Nil
}
/** Returns the predecessors of this block. */
@@ -502,17 +500,6 @@ trait BasicBlocks {
override def hashCode = label * 41 + code.hashCode
- // Instead of it, rather use a printer
- def print() { print(java.lang.System.out) }
-
- def print(out: java.io.PrintStream) {
- out.println("block #"+label+" :")
- foreach(i => out.println(" " + i))
- out.print("Successors: ")
- successors.foreach((x: BasicBlock) => out.print(" "+x.label.toString()))
- out.println()
- }
-
private def succString = if (successors.isEmpty) "[S: N/A]" else successors.distinct.mkString("[S: ", ", ", "]")
private def predString = if (predecessors.isEmpty) "[P: N/A]" else predecessors.distinct.mkString("[P: ", ", ", "]")
@@ -532,18 +519,6 @@ trait BasicBlocks {
}
object BBFlags {
- val flagMap = Map[Int, String](
- LOOP_HEADER -> "loopheader",
- IGNORING -> "ignore",
- EX_HEADER -> "exheader",
- CLOSED -> "closed",
- DIRTYSUCCS -> "dirtysuccs",
- DIRTYPREDS -> "dirtypreds"
- )
- def flagsToString(flags: Int) = {
- flagMap collect { case (bit, name) if (bit & flags) != 0 => "<" + name + ">" } mkString " "
- }
-
/** This block is a loop header (was translated from a while). */
final val LOOP_HEADER = (1 << 0)
@@ -561,4 +536,16 @@ object BBFlags {
/** Code has been changed, recompute predecessors. */
final val DIRTYPREDS = (1 << 5)
+
+ val flagMap = Map[Int, String](
+ LOOP_HEADER -> "loopheader",
+ IGNORING -> "ignore",
+ EX_HEADER -> "exheader",
+ CLOSED -> "closed",
+ DIRTYSUCCS -> "dirtysuccs",
+ DIRTYPREDS -> "dirtypreds"
+ )
+ def flagsToString(flags: Int) = {
+ flagMap collect { case (bit, name) if (bit & flags) != 0 => "<" + name + ">" } mkString " "
+ }
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala b/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
index 0856f2f09d..8bcdb6dbd2 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
@@ -3,10 +3,8 @@
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend
package icode
class CheckerException(s: String) extends Exception(s)
-
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
index 2cebf7ad99..7243264773 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package backend
package icode
-import scala.collection.{ mutable, immutable }
+import scala.collection.immutable
/**
* Exception handlers are pieces of code that `handle` exceptions on
@@ -24,14 +24,11 @@ trait ExceptionHandlers {
class ExceptionHandler(val method: IMethod, val label: TermName, val cls: Symbol, val pos: Position) {
def loadExceptionClass = if (cls == NoSymbol) ThrowableClass else cls
- private var _startBlock: BasicBlock = _;
- var finalizer: Finalizer = _;
-
- /** Needed for the MSIL backend. */
- var resultKind: TypeKind = _;
+ private var _startBlock: BasicBlock = _
+ var finalizer: Finalizer = _
def setStartBlock(b: BasicBlock) = {
- _startBlock = b;
+ _startBlock = b
b.exceptionHandlerStart = true
}
def startBlock = _startBlock
@@ -49,11 +46,11 @@ trait ExceptionHandlers {
/** The body of this exception handler. May contain 'dead' blocks (which will not
* make it into generated code because linearizers may not include them) */
- var blocks: List[BasicBlock] = Nil;
+ var blocks: List[BasicBlock] = Nil
- def addBlock(b: BasicBlock): Unit = blocks = b :: blocks;
+ def addBlock(b: BasicBlock): Unit = blocks = b :: blocks
- override def toString() = "exh_" + label + "(" + cls.simpleName + ")";
+ override def toString() = "exh_" + label + "(" + cls.simpleName + ")"
/** A standard copy constructor */
def this(other: ExceptionHandler) = {
@@ -71,10 +68,4 @@ trait ExceptionHandlers {
override def toString() = "finalizer_" + label
override def dup: Finalizer = new Finalizer(method, label, pos)
}
-
- object NoFinalizer extends Finalizer(null, newTermNameCached("<no finalizer>"), NoPosition) {
- override def startBlock: BasicBlock = sys.error("NoFinalizer cannot have a start block.");
- override def setStartBlock(b: BasicBlock): Unit = sys.error("NoFinalizer cannot have a start block.");
- override def dup = this
- }
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 71a5b85271..1332d01dbd 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -4,7 +4,8 @@
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend
package icode
@@ -13,10 +14,8 @@ import scala.collection.mutable.{ ListBuffer, Buffer }
import scala.tools.nsc.symtab._
import scala.annotation.switch
import PartialFunction._
-import scala.language.postfixOps
-/** This class ...
- *
+/**
* @author Iulian Dragos
* @version 1.0
*/
@@ -24,12 +23,7 @@ abstract class GenICode extends SubComponent {
import global._
import icodes._
import icodes.opcodes._
- import definitions.{
- ArrayClass, ObjectClass, ThrowableClass, StringClass, StringModule, AnyRefClass,
- Object_equals, Object_isInstanceOf, Object_asInstanceOf, ScalaRunTimeModule,
- BoxedNumberClass, BoxedCharacterClass,
- getMember
- }
+ import definitions._
import scalaPrimitives.{
isArrayOp, isComparisonOp, isLogicalOp,
isUniversalEqualityOp, isReferenceEqualityOp
@@ -41,7 +35,7 @@ abstract class GenICode extends SubComponent {
override def newPhase(prev: Phase) = new ICodePhase(prev)
@inline private def debugassert(cond: => Boolean, msg: => Any) {
- if (settings.debug.value)
+ if (settings.debug)
assert(cond, msg)
}
@@ -52,14 +46,15 @@ abstract class GenICode extends SubComponent {
var unit: CompilationUnit = NoCompilationUnit
override def run() {
- scalaPrimitives.init
+ scalaPrimitives.init()
classes.clear()
super.run()
}
override def apply(unit: CompilationUnit): Unit = {
+ if (settings.isBCodeActive) { return }
this.unit = unit
- unit.icode.clear
+ unit.icode.clear()
informProgress("Generating icode for " + unit)
gen(unit.body)
this.unit = NoCompilationUnit
@@ -77,7 +72,7 @@ abstract class GenICode extends SubComponent {
* it is the host class; otherwise the symbol's owner.
*/
def findHostClass(selector: Type, sym: Symbol) = selector member sym.name match {
- case NoSymbol => log(s"Rejecting $selector as host class for $sym") ; sym.owner
+ case NoSymbol => debuglog(s"Rejecting $selector as host class for $sym") ; sym.owner
case _ => selector.typeSymbol
}
@@ -93,7 +88,7 @@ abstract class GenICode extends SubComponent {
debuglog("Generating class: " + tree.symbol.fullName)
val outerClass = ctx.clazz
ctx setClass (new IClass(tree.symbol) setCompilationUnit unit)
- addClassFields(ctx, tree.symbol);
+ addClassFields(ctx, tree.symbol)
classes += (tree.symbol -> ctx.clazz)
unit.icode += ctx.clazz
gen(impl, ctx)
@@ -121,7 +116,7 @@ abstract class GenICode extends SubComponent {
m.native = m.symbol.hasAnnotation(definitions.NativeAttr)
if (!m.isAbstractMethod && !m.native) {
- ctx1 = genLoad(rhs, ctx1, m.returnType);
+ ctx1 = genLoad(rhs, ctx1, m.returnType)
// reverse the order of the local variables, to match the source-order
m.locals = m.locals.reverse
@@ -131,7 +126,7 @@ abstract class GenICode extends SubComponent {
case Return(_) => ()
case EmptyTree =>
globalError("Concrete method has no definition: " + tree + (
- if (settings.debug.value) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")"
+ if (settings.debug) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")"
else "")
)
case _ => if (ctx1.bb.isEmpty)
@@ -139,7 +134,7 @@ abstract class GenICode extends SubComponent {
else
ctx1.bb.closeWith(RETURN(m.returnType))
}
- if (!ctx1.bb.closed) ctx1.bb.close
+ if (!ctx1.bb.closed) ctx1.bb.close()
prune(ctx1.method)
} else
ctx1.method.setCode(NoCode)
@@ -160,18 +155,13 @@ abstract class GenICode extends SubComponent {
* and not produce any value. Use genLoad for expressions which leave
* a value on top of the stack.
*
- * @param tree ...
- * @param ctx ...
* @return a new context. This is necessary for control flow instructions
* which may change the current basic block.
*/
private def genStat(tree: Tree, ctx: Context): Context = tree match {
case Assign(lhs @ Select(_, _), rhs) =>
val isStatic = lhs.symbol.isStaticMember
- var ctx1 = if (isStatic) ctx
- else if (forMSIL && msil_IsValuetypeInstField(lhs.symbol))
- msil_genLoadQualifierAddress(lhs, ctx)
- else genLoadQualifier(lhs, ctx)
+ var ctx1 = if (isStatic) ctx else genLoadQualifier(lhs, ctx)
ctx1 = genLoad(rhs, ctx1, toTypeKind(lhs.symbol.info))
ctx1.bb.emit(STORE_FIELD(lhs.symbol, isStatic), tree.pos)
@@ -188,12 +178,12 @@ abstract class GenICode extends SubComponent {
}
private def genThrow(expr: Tree, ctx: Context): (Context, TypeKind) = {
- require(expr.tpe <:< ThrowableClass.tpe, expr.tpe)
+ require(expr.tpe <:< ThrowableTpe, expr.tpe)
val thrownKind = toTypeKind(expr.tpe)
val ctx1 = genLoad(expr, ctx, thrownKind)
ctx1.bb.emit(THROW(expr.tpe.typeSymbol), expr.pos)
- ctx1.bb.enterIgnoreMode
+ ctx1.bb.enterIgnoreMode()
(ctx1, NothingReference)
}
@@ -231,10 +221,10 @@ abstract class GenICode extends SubComponent {
// binary operation
case rarg :: Nil =>
- resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil);
+ resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil)
if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code))
assert(resKind.isIntegralType | resKind == BOOL,
- resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1);
+ resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1)
ctx1 = genLoad(larg, ctx1, resKind)
ctx1 = genLoad(rarg,
@@ -264,11 +254,6 @@ abstract class GenICode extends SubComponent {
}
/** Generate primitive array operations.
- *
- * @param tree ...
- * @param ctx ...
- * @param code ...
- * @return ...
*/
private def genArrayOp(tree: Tree, ctx: Context, code: Int, expectedType: TypeKind): (Context, TypeKind) = {
import scalaPrimitives._
@@ -283,14 +268,19 @@ abstract class GenICode extends SubComponent {
if (scalaPrimitives.isArrayGet(code)) {
// load argument on stack
debugassert(args.length == 1,
- "Too many arguments for array get operation: " + tree);
+ "Too many arguments for array get operation: " + tree)
ctx1 = genLoad(args.head, ctx1, INT)
generatedType = elem
ctx1.bb.emit(LOAD_ARRAY_ITEM(elementType), tree.pos)
+ // it's tempting to just drop array loads of type Null instead
+ // of adapting them but array accesses can cause
+ // ArrayIndexOutOfBounds so we can't. Besides, Array[Null]
+ // probably isn't common enough to figure out an optimization
+ adaptNullRef(generatedType, expectedType, ctx1, tree.pos)
}
else if (scalaPrimitives.isArraySet(code)) {
debugassert(args.length == 2,
- "Too many arguments for array set operation: " + tree);
+ "Too many arguments for array set operation: " + tree)
ctx1 = genLoad(args.head, ctx1, INT)
ctx1 = genLoad(args.tail.head, ctx1, toTypeKind(args.tail.head.tpe))
// the following line should really be here, but because of bugs in erasure
@@ -308,11 +298,8 @@ abstract class GenICode extends SubComponent {
}
private def genSynchronized(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
val Apply(fun, args) = tree
- val monitor = ctx.makeLocal(tree.pos, ObjectClass.tpe, "monitor")
+ val monitor = ctx.makeLocal(tree.pos, ObjectTpe, "monitor")
var monitorResult: Local = null
-
- // if the synchronized block returns a result, store it in a local variable. just leaving
- // it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks)
val argTpe = args.head.tpe
val hasResult = expectedType != UNIT
if (hasResult)
@@ -345,7 +332,7 @@ abstract class GenICode extends SubComponent {
MONITOR_EXIT() setPos tree.pos,
THROW(ThrowableClass)
))
- exhCtx.bb.enterIgnoreMode
+ exhCtx.bb.enterIgnoreMode()
exhCtx
})), EmptyTree, tree)
@@ -359,9 +346,9 @@ abstract class GenICode extends SubComponent {
private def genLoadIf(tree: If, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
val If(cond, thenp, elsep) = tree
- var thenCtx = ctx.newBlock
- var elseCtx = ctx.newBlock
- val contCtx = ctx.newBlock
+ var thenCtx = ctx.newBlock()
+ var elseCtx = ctx.newBlock()
+ val contCtx = ctx.newBlock()
genCond(cond, ctx, thenCtx, elseCtx)
@@ -386,12 +373,14 @@ abstract class GenICode extends SubComponent {
"I produce UNIT in a context where " + expectedType + " is expected!")
// alternatives may be already closed by a tail-recursive jump
+ val contReachable = !(thenCtx.bb.ignore && elseCtx.bb.ignore)
thenCtx.bb.closeWith(JUMP(contCtx.bb))
elseCtx.bb.closeWith(
if (elsep == EmptyTree) JUMP(contCtx.bb)
else JUMP(contCtx.bb) setPos tree.pos
)
+ contCtx.bb killUnless contReachable
(contCtx, resKind)
}
private def genLoadTry(tree: Try, ctx: Context, setGeneratedType: TypeKind => Unit): Context = {
@@ -414,8 +403,8 @@ abstract class GenICode extends SubComponent {
(pat.symbol.tpe.typeSymbol, kind, {
ctx: Context =>
- ctx.bb.emit(STORE_LOCAL(exception), pat.pos);
- genLoad(body, ctx, kind);
+ ctx.bb.emit(STORE_LOCAL(exception), pat.pos)
+ genLoad(body, ctx, kind)
})
}
}
@@ -432,7 +421,7 @@ abstract class GenICode extends SubComponent {
private def genPrimitiveOp(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
val sym = tree.symbol
- val Apply(fun @ Select(receiver, _), args) = tree
+ val Apply(fun @ Select(receiver, _), _) = tree
val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
if (scalaPrimitives.isArithmeticOp(code))
@@ -444,7 +433,7 @@ abstract class GenICode extends SubComponent {
else if (isArrayOp(code))
genArrayOp(tree, ctx, code, expectedType)
else if (isLogicalOp(code) || isComparisonOp(code)) {
- val trueCtx, falseCtx, afterCtx = ctx.newBlock
+ val trueCtx, falseCtx, afterCtx = ctx.newBlock()
genCond(tree, ctx, trueCtx, falseCtx)
trueCtx.bb.emitOnly(
@@ -471,132 +460,6 @@ abstract class GenICode extends SubComponent {
}
/**
- * forMSIL
- */
- private def msil_IsValuetypeInstMethod(msym: Symbol) = (
- loaders.clrTypes.methods get msym exists (mMSIL =>
- mMSIL.IsInstance && mMSIL.DeclaringType.IsValueType
- )
- )
- private def msil_IsValuetypeInstField(fsym: Symbol) = (
- loaders.clrTypes.fields get fsym exists (fMSIL =>
- !fMSIL.IsStatic && fMSIL.DeclaringType.IsValueType
- )
- )
-
- /**
- * forMSIL: Adds a local var, the emitted code requires one more slot on the stack as on entry
- */
- private def msil_genLoadZeroOfNonEnumValuetype(ctx: Context, kind: TypeKind, pos: Position, leaveAddressOnStackInstead: Boolean) {
- val REFERENCE(clssym) = kind
- assert(loaders.clrTypes.isNonEnumValuetype(clssym), clssym)
- val local = ctx.makeLocal(pos, clssym.tpe, "tmp")
- ctx.method.addLocal(local)
- ctx.bb.emit(CIL_LOAD_LOCAL_ADDRESS(local), pos)
- ctx.bb.emit(CIL_INITOBJ(kind), pos)
- val instr = if (leaveAddressOnStackInstead)
- CIL_LOAD_LOCAL_ADDRESS(local)
- else
- LOAD_LOCAL(local)
- ctx.bb.emit(instr, pos)
- }
-
- /**
- * forMSIL
- */
- private def msil_genLoadAddressOf(tree: Tree, ctx: Context, expectedType: TypeKind, butRawValueIsAlsoGoodEnough: Boolean): Context = {
- var generatedType = expectedType
- var addressTaken = false
- debuglog("at line: " + (if (tree.pos.isDefined) tree.pos.line else tree.pos))
-
- var resCtx: Context = tree match {
-
- // emits CIL_LOAD_FIELD_ADDRESS
- case Select(qualifier, selector) if (!tree.symbol.isModule) =>
- addressTaken = true
- val sym = tree.symbol
- generatedType = toTypeKind(sym.info)
-
- if (sym.isStaticMember) {
- ctx.bb.emit(CIL_LOAD_FIELD_ADDRESS(sym, true), tree.pos)
- ctx
- } else {
- val ctx1 = genLoadQualifier(tree, ctx)
- ctx1.bb.emit(CIL_LOAD_FIELD_ADDRESS(sym, false), tree.pos)
- ctx1
- }
-
- // emits CIL_LOAD_LOCAL_ADDRESS
- case Ident(name) if (!tree.symbol.isPackage && !tree.symbol.isModule)=>
- addressTaken = true
- val sym = tree.symbol
- try {
- val Some(l) = ctx.method.lookupLocal(sym)
- ctx.bb.emit(CIL_LOAD_LOCAL_ADDRESS(l), tree.pos)
- generatedType = l.kind // actually, should be "V&" but the callsite is aware of this
- } catch {
- case ex: MatchError =>
- abort("symbol " + sym + " does not exist in " + ctx.method)
- }
- ctx
-
- // emits CIL_LOAD_ARRAY_ITEM_ADDRESS
- case Apply(fun, args) =>
- if (isPrimitive(fun.symbol)) {
-
- val sym = tree.symbol
- val Apply(fun @ Select(receiver, _), args) = tree
- val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
-
- if (isArrayOp(code)) {
- val arrayObj = receiver
- val k = toTypeKind(arrayObj.tpe)
- val ARRAY(elementType) = k
- if (scalaPrimitives.isArrayGet(code)) {
- var ctx1 = genLoad(arrayObj, ctx, k)
- // load argument on stack
- debugassert(args.length == 1, "Too many arguments for array get operation: " + tree)
- ctx1 = genLoad(args.head, ctx1, INT)
- generatedType = elementType // actually "managed pointer to element type" but the callsite is aware of this
- ctx1.bb.emit(CIL_LOAD_ARRAY_ITEM_ADDRESS(elementType), tree.pos)
- addressTaken = true
- ctx1
- } else null
- } else null
- } else null
-
- case This(qual) =>
- /* TODO: this case handler is a placeholder for the time when Level 2 support for valuetypes is in place,
- in particular when invoking other methods on this where this is a valuetype value (boxed or not).
- As receiver, a managed pointer is expected, and a plain ldarg.0 achieves just that. */
- addressTaken = true
- genLoad(tree, ctx, expectedType)
-
- case _ =>
- null /* A method returning ByRef won't pass peverify, so I guess this case handler is dead code.
- Even if it's not, the code below to handler !addressTaken below. */
- }
-
- if (!addressTaken) {
- resCtx = genLoad(tree, ctx, expectedType)
- if (!butRawValueIsAlsoGoodEnough) {
- // raw value on stack (must be an intermediate result, e.g. returned by method call), take address
- addressTaken = true
- val boxType = expectedType // toTypeKind(expectedType /* TODO FIXME */)
- resCtx.bb.emit(BOX(boxType), tree.pos)
- resCtx.bb.emit(CIL_UNBOX(boxType), tree.pos)
- }
- }
-
- // emit conversion
- if (generatedType != expectedType)
- abort("Unexpected tree in msil_genLoadAddressOf: " + tree + " at: " + tree.pos)
-
- resCtx
- }
-
-
- /**
* Generate code for trees that produce values on the stack
*
* @param tree The tree to be translated
@@ -613,7 +476,11 @@ abstract class GenICode extends SubComponent {
val resCtx: Context = tree match {
case LabelDef(name, params, rhs) =>
def genLoadLabelDef = {
- val ctx1 = ctx.newBlock
+ val ctx1 = ctx.newBlock() // note: we cannot kill ctx1 if ctx is in ignore mode because
+ // label defs can be the target of jumps from other locations.
+ // that means label defs can lead to unreachable code without
+ // proper reachability analysis
+
if (nme.isLoopHeaderLabel(name))
ctx1.bb.loopHeader = true
@@ -627,7 +494,7 @@ abstract class GenICode extends SubComponent {
val pair = (tree.symbol -> (new Label(tree.symbol) anchor ctx1.bb setParams (params map (_.symbol))))
debuglog("Adding label " + tree.symbol.fullLocationString + " in genLoad.")
ctx1.labels += pair
- ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)));
+ ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)))
}
ctx.bb.closeWith(JUMP(ctx1.bb), tree.pos)
@@ -645,13 +512,13 @@ abstract class GenICode extends SubComponent {
val local = ctx.method.addLocal(new Local(sym, toTypeKind(sym.info), false))
if (rhs == EmptyTree) {
- debuglog("Uninitialized variable " + tree + " at: " + (tree.pos));
+ debuglog("Uninitialized variable " + tree + " at: " + (tree.pos))
ctx.bb.emit(getZeroOf(local.kind))
}
var ctx1 = ctx
if (rhs != EmptyTree)
- ctx1 = genLoad(rhs, ctx, local.kind);
+ ctx1 = genLoad(rhs, ctx, local.kind)
ctx1.bb.emit(STORE_LOCAL(local), tree.pos)
ctx1.scope.add(local)
@@ -695,7 +562,8 @@ abstract class GenICode extends SubComponent {
// we have to run this without the same finalizer in
// the list, otherwise infinite recursion happens for
// finalizers that contain 'return'
- val fctx = finalizerCtx.newBlock
+ val fctx = finalizerCtx.newBlock()
+ fctx.bb killIf ctx1.bb.ignore
ctx1.bb.closeWith(JUMP(fctx.bb))
ctx1 = genLoad(f1, fctx, UNIT)
}
@@ -708,7 +576,7 @@ abstract class GenICode extends SubComponent {
}
adapt(returnedKind, ctx1.method.returnType, ctx1, tree.pos)
ctx1.bb.emit(RETURN(ctx.method.returnType), tree.pos)
- ctx1.bb.enterIgnoreMode
+ ctx1.bb.enterIgnoreMode()
generatedType = expectedType
ctx1
}
@@ -760,7 +628,7 @@ abstract class GenICode extends SubComponent {
} else {
genCast(l, r, ctx1, cast)
}
- generatedType = if (cast) r else BOOL;
+ generatedType = if (cast) r else BOOL
ctx1
}
genLoadApply1
@@ -773,7 +641,7 @@ abstract class GenICode extends SubComponent {
// on the stack (contrary to what the type in the AST says).
case Apply(fun @ Select(Super(_, mix), _), args) =>
def genLoadApply2 = {
- debuglog("Call to super: " + tree);
+ debuglog("Call to super: " + tree)
val invokeStyle = SuperCall(mix)
// if (fun.symbol.isConstructor) Static(true) else SuperCall(mix);
@@ -819,47 +687,31 @@ abstract class GenICode extends SubComponent {
debugassert(ctor.owner == cls,
"Symbol " + ctor.owner.fullName + " is different than " + tpt)
- val ctx2 = if (forMSIL && loaders.clrTypes.isNonEnumValuetype(cls)) {
- /* parameterful constructors are the only possible custom constructors,
- a default constructor can't be defined for valuetypes, CLR dixit */
- val isDefaultConstructor = args.isEmpty
- if (isDefaultConstructor) {
- msil_genLoadZeroOfNonEnumValuetype(ctx, rt, tree.pos, leaveAddressOnStackInstead = false)
- ctx
- } else {
- val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
- ctx1.bb.emit(CIL_NEWOBJ(ctor), tree.pos)
- ctx1
- }
- } else {
- val nw = NEW(rt)
- ctx.bb.emit(nw, tree.pos)
- ctx.bb.emit(DUP(generatedType))
- val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
-
- val init = CALL_METHOD(ctor, Static(true))
- nw.init = init
- ctx1.bb.emit(init, tree.pos)
- ctx1
- }
- ctx2
+ val nw = NEW(rt)
+ ctx.bb.emit(nw, tree.pos)
+ ctx.bb.emit(DUP(generatedType))
+ val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
+ val init = CALL_METHOD(ctor, Static(onInstance = true))
+ nw.init = init
+ ctx1.bb.emit(init, tree.pos)
+ ctx1
case _ =>
abort("Cannot instantiate " + tpt + " of kind: " + generatedType)
}
}
genLoadApply3
- case Apply(fun @ _, List(expr)) if (definitions.isBox(fun.symbol)) =>
+ case Apply(fun @ _, List(expr)) if currentRun.runDefinitions.isBox(fun.symbol) =>
def genLoadApply4 = {
- debuglog("BOX : " + fun.symbol.fullName);
+ debuglog("BOX : " + fun.symbol.fullName)
val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
val nativeKind = toTypeKind(expr.tpe)
- if (settings.Xdce.value) {
+ if (settings.Xdce) {
// we store this boxed value to a local, even if not really needed.
// boxing optimization might use it, and dead code elimination will
// take care of unnecessary stores
- var loc1 = ctx.makeLocal(tree.pos, expr.tpe, "boxed")
+ val loc1 = ctx.makeLocal(tree.pos, expr.tpe, "boxed")
ctx1.bb.emit(STORE_LOCAL(loc1))
ctx1.bb.emit(LOAD_LOCAL(loc1))
}
@@ -869,7 +721,7 @@ abstract class GenICode extends SubComponent {
}
genLoadApply4
- case Apply(fun @ _, List(expr)) if (definitions.isUnbox(fun.symbol)) =>
+ case Apply(fun @ _, List(expr)) if (currentRun.runDefinitions.isUnbox(fun.symbol)) =>
debuglog("UNBOX : " + fun.symbol.fullName)
val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
val boxType = toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe)
@@ -877,12 +729,6 @@ abstract class GenICode extends SubComponent {
ctx1.bb.emit(UNBOX(boxType), expr.pos)
ctx1
- case Apply(fun @ _, List(expr)) if (forMSIL && loaders.clrTypes.isAddressOf(fun.symbol)) =>
- debuglog("ADDRESSOF : " + fun.symbol.fullName);
- val ctx1 = msil_genLoadAddressOf(expr, ctx, toTypeKind(expr.tpe), butRawValueIsAlsoGoodEnough = false)
- generatedType = toTypeKind(fun.symbol.tpe.resultType)
- ctx1
-
case app @ Apply(fun, args) =>
def genLoadApply6 = {
val sym = fun.symbol
@@ -893,7 +739,7 @@ abstract class GenICode extends SubComponent {
resolveForwardLabel(ctx.defdef, ctx, sym)
ctx.labels.get(sym) match {
case Some(l) =>
- log("Forward jump for " + sym.fullLocationString + ": scan found label " + l)
+ debuglog("Forward jump for " + sym.fullLocationString + ": scan found label " + l)
l
case _ =>
abort("Unknown label target: " + sym + " at: " + (fun.pos) + ": ctx: " + ctx)
@@ -908,35 +754,28 @@ abstract class GenICode extends SubComponent {
// (if it's not in ignore mode, double-closing is an error)
val ctx1 = genLoadLabelArguments(args, label, ctx)
ctx1.bb.emitOnly(if (label.anchored) JUMP(label.block) else PJUMP(label))
- ctx1.bb.enterIgnoreMode
+ ctx1.bb.enterIgnoreMode()
ctx1
} else if (isPrimitive(sym)) { // primitive method call
val (newCtx, resKind) = genPrimitiveOp(app, ctx, expectedType)
generatedType = resKind
newCtx
} else { // normal method call
- debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember);
+ debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember)
val invokeStyle =
if (sym.isStaticMember)
- Static(false)
+ Static(onInstance = false)
else if (sym.isPrivate || sym.isClassConstructor)
- Static(true)
+ Static(onInstance = true)
else
Dynamic
- var ctx1 =
- if (invokeStyle.hasInstance) {
- if (forMSIL && !(invokeStyle.isInstanceOf[SuperCall]) && msil_IsValuetypeInstMethod(sym))
- msil_genLoadQualifierAddress(fun, ctx)
- else
- genLoadQualifier(fun, ctx)
- } else ctx
-
+ var ctx1 = if (invokeStyle.hasInstance) genLoadQualifier(fun, ctx) else ctx
ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx1)
val cm = CALL_METHOD(sym, invokeStyle)
- /** In a couple cases, squirrel away a little extra information in the
- * CALL_METHOD for use by GenJVM.
+ /* In a couple cases, squirrel away a little extra information in the
+ * CALL_METHOD for use by GenASM.
*/
fun match {
case Select(qual, _) =>
@@ -957,14 +796,15 @@ abstract class GenICode extends SubComponent {
ctx1.method.updateRecursive(sym)
generatedType =
if (sym.isClassConstructor) UNIT
- else toTypeKind(sym.info.resultType);
+ else toTypeKind(sym.info.resultType)
+ // deal with methods that return Null
+ adaptNullRef(generatedType, expectedType, ctx1, tree.pos)
ctx1
}
}
genLoadApply6
case ApplyDynamic(qual, args) =>
- assert(!forMSIL, tree)
// TODO - this is where we'd catch dynamic applies for invokedynamic.
sys.error("No invokedynamic support yet.")
// val ctx1 = genLoad(qual, ctx, ObjectReference)
@@ -1002,17 +842,23 @@ abstract class GenICode extends SubComponent {
val sym = tree.symbol
generatedType = toTypeKind(sym.info)
val hostClass = findHostClass(qualifier.tpe, sym)
- log(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
+ debuglog(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
+ val qualSafeToElide = treeInfo isQualifierSafeToElide qualifier
+
+ def genLoadQualUnlessElidable: Context =
+ if (qualSafeToElide) ctx else genLoadQualifier(tree, ctx)
if (sym.isModule) {
- genLoadModule(ctx, tree)
- }
- else if (sym.isStaticMember) {
- ctx.bb.emit(LOAD_FIELD(sym, true) setHostClass hostClass, tree.pos)
- ctx
+ genLoadModule(genLoadQualUnlessElidable, tree)
} else {
- val ctx1 = genLoadQualifier(tree, ctx)
- ctx1.bb.emit(LOAD_FIELD(sym, false) setHostClass hostClass, tree.pos)
+ val isStatic = sym.isStaticMember
+ val ctx1 = if (isStatic) genLoadQualUnlessElidable
+ else genLoadQualifier(tree, ctx)
+ ctx1.bb.emit(LOAD_FIELD(sym, isStatic) setHostClass hostClass, tree.pos)
+ // it's tempting to drop field accesses of type Null instead of adapting them,
+ // but field access can cause static class init so we can't. Besides, fields
+ // of type Null probably aren't common enough to figure out an optimization
+ adaptNullRef(generatedType, expectedType, ctx1, tree.pos)
ctx1
}
}
@@ -1044,16 +890,16 @@ abstract class GenICode extends SubComponent {
def genLoadLiteral = {
if (value.tag != UnitTag) (value.tag, expectedType) match {
case (IntTag, LONG) =>
- ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos);
+ ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos)
generatedType = LONG
case (FloatTag, DOUBLE) =>
- ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos);
+ ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos)
generatedType = DOUBLE
case (NullTag, _) =>
- ctx.bb.emit(CONSTANT(value), tree.pos);
+ ctx.bb.emit(CONSTANT(value), tree.pos)
generatedType = NullReference
case _ =>
- ctx.bb.emit(CONSTANT(value), tree.pos);
+ ctx.bb.emit(CONSTANT(value), tree.pos)
generatedType = toTypeKind(tree.tpe)
}
ctx
@@ -1061,10 +907,10 @@ abstract class GenICode extends SubComponent {
genLoadLiteral
case Block(stats, expr) =>
- ctx.enterScope
+ ctx.enterScope()
var ctx1 = genStat(stats, ctx)
ctx1 = genLoad(expr, ctx1, expectedType)
- ctx1.exitScope
+ ctx1.exitScope()
ctx1
case Typed(Super(_, _), _) =>
@@ -1101,9 +947,11 @@ abstract class GenICode extends SubComponent {
case Match(selector, cases) =>
def genLoadMatch = {
- debuglog("Generating SWITCH statement.");
- var ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue)
- val afterCtx = ctx1.newBlock
+ debuglog("Generating SWITCH statement.")
+ val ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue)
+ val afterCtx = ctx1.newBlock()
+ afterCtx.bb killIf ctx1.bb.ignore
+ var afterCtxReachable = false
var caseCtx: Context = null
generatedType = toTypeKind(tree.tpe)
@@ -1113,7 +961,8 @@ abstract class GenICode extends SubComponent {
for (caze @ CaseDef(pat, guard, body) <- cases) {
assert(guard == EmptyTree, guard)
- val tmpCtx = ctx1.newBlock
+ val tmpCtx = ctx1.newBlock()
+ tmpCtx.bb killIf ctx1.bb.ignore
pat match {
case Literal(value) =>
tags = value.intValue :: tags
@@ -1135,12 +984,15 @@ abstract class GenICode extends SubComponent {
}
caseCtx = genLoad(body, tmpCtx, generatedType)
+ afterCtxReachable ||= !caseCtx.bb.ignore
// close the block unless it's already been closed by the body, which closes the block if it ends in a jump (which is emitted to have alternatives share their body)
caseCtx.bb.closeWith(JUMP(afterCtx.bb) setPos caze.pos)
}
+ afterCtxReachable ||= (default == afterCtx)
ctx1.bb.emitOnly(
SWITCH(tags.reverse map (x => List(x)), (default :: targets).reverse) setPos tree.pos
)
+ afterCtx.bb killUnless afterCtxReachable
afterCtx
}
genLoadMatch
@@ -1161,28 +1013,60 @@ abstract class GenICode extends SubComponent {
resCtx
}
+ /**
+ * If we have a method call, field load, or array element load of type Null then
+ * we need to convince the JVM that we have a null value because in Scala
+ * land Null is a subtype of all ref types, but in JVM land scala.runtime.Null$
+ * is not. Note we don't have to adapt loads of locals because the JVM type
+ * system for locals does have a null type which it tracks internally. As
+ * long as we adapt these other things, the JVM will know that a Scala local of
+ * type Null is holding a null.
+ */
+ private def adaptNullRef(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
+ debuglog(s"GenICode#adaptNullRef($from, $to, $ctx, $pos)")
+
+ // Don't need to adapt null to unit because we'll just drop it anyway. Don't
+ // need to adapt to Object or AnyRef because the JVM is happy with
+ // upcasting Null to them.
+ // We do have to adapt from NullReference to NullReference because we could be storing
+ // this value into a local of type Null and we want the JVM to see that it's
+ // a null value so we don't have to also adapt local loads.
+ if (from == NullReference && to != UNIT && to != ObjectReference && to != AnyRefReference) {
+ assert(to.isRefOrArrayType, s"Attempt to adapt a null to a non reference type $to.")
+ // adapt by dropping what we've got and pushing a null which
+ // will convince the JVM we really do have null
+ ctx.bb.emit(DROP(from), pos)
+ ctx.bb.emit(CONSTANT(Constant(null)), pos)
+ }
+ }
+
private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
// An awful lot of bugs explode here - let's leave ourselves more clues.
// A typical example is an overloaded type assigned after typer.
- log(s"GenICode#adapt($from, $to, $ctx, $pos)")
+ debuglog(s"GenICode#adapt($from, $to, $ctx, $pos)")
- val conforms = (from <:< to) || (from == NullReference && to == NothingReference)
def coerce(from: TypeKind, to: TypeKind) = ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
- def checkAssertions() {
- def msg = s"Can't convert from $from to $to in unit ${unit.source} at $pos"
- debugassert(from != UNIT, msg)
- assert(!from.isReferenceType && !to.isReferenceType, msg)
- }
- if (conforms) from match {
- case NothingReference => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode
- case NullReference => ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null))))
- case ThrowableReference if !(ThrowableClass.tpe <:< to.toType) => ctx.bb.emit(CHECK_CAST(to)) // downcast throwables
- case BYTE | SHORT | CHAR | INT if to == LONG => coerce(INT, LONG) // widen subrange types
- case _ => ()
- }
- else to match {
- case UNIT => ctx.bb.emit(DROP(from), pos) // value discarding
- case _ => checkAssertions() ; coerce(from, to) // other primitive coercions
+
+ (from, to) match {
+ // The JVM doesn't have a Nothing equivalent, so it doesn't know that a method of type Nothing can't actually return. So for instance, with
+ // def f: String = ???
+ // we need
+ // 0: getstatic #25; //Field scala/Predef$.MODULE$:Lscala/Predef$;
+ // 3: invokevirtual #29; //Method scala/Predef$.$qmark$qmark$qmark:()Lscala/runtime/Nothing$;
+ // 6: athrow
+ // So this case tacks on the ahtrow which makes the JVM happy because class Nothing is declared as a subclass of Throwable
+ case (NothingReference, _) =>
+ ctx.bb.emit(THROW(ThrowableClass))
+ ctx.bb.enterIgnoreMode()
+ case _ if from isAssignabledTo to =>
+ ()
+ case (_, UNIT) =>
+ ctx.bb.emit(DROP(from), pos)
+ // otherwise we'd better be doing a primtive -> primitive coercion or there's a problem
+ case _ if !from.isRefOrArrayType && !to.isRefOrArrayType =>
+ coerce(from, to)
+ case _ =>
+ assert(false, s"Can't convert from $from to $to in unit ${unit.source} at $pos")
}
}
@@ -1195,15 +1079,6 @@ abstract class GenICode extends SubComponent {
abort("Unknown qualifier " + tree)
}
- /** forMSIL */
- private def msil_genLoadQualifierAddress(tree: Tree, ctx: Context): Context =
- tree match {
- case Select(qualifier, _) =>
- msil_genLoadAddressOf(qualifier, ctx, toTypeKind(qualifier.tpe), butRawValueIsAlsoGoodEnough = false)
- case _ =>
- abort("Unknown qualifier " + tree)
- }
-
/**
* Generate code that loads args into label parameters.
*/
@@ -1250,7 +1125,9 @@ abstract class GenICode extends SubComponent {
if (!tree.symbol.isPackageClass) tree.symbol
else tree.symbol.info.member(nme.PACKAGE) match {
case NoSymbol => abort("Cannot use package as value: " + tree)
- case s => debugwarn("Bug: found package class where package object expected. Converting.") ; s.moduleClass
+ case s =>
+ devWarning(s"Found ${tree.symbol} where a package object is required. Converting to ${s.moduleClass}")
+ s.moduleClass
}
)
debuglog("LOAD_MODULE from %s: %s".format(tree.shortClass, sym))
@@ -1384,18 +1261,14 @@ abstract class GenICode extends SubComponent {
// }
/** Generate string concatenation.
- *
- * @param tree ...
- * @param ctx ...
- * @return ...
*/
def genStringConcat(tree: Tree, ctx: Context): Context = {
liftStringConcat(tree) match {
// Optimization for expressions of the form "" + x. We can avoid the StringBuilder.
- case List(Literal(Constant("")), arg) if !forMSIL =>
+ case List(Literal(Constant("")), arg) =>
debuglog("Rewriting \"\" + x as String.valueOf(x) for: " + arg)
val ctx1 = genLoad(arg, ctx, ObjectReference)
- ctx1.bb.emit(CALL_METHOD(String_valueOf, Static(false)), arg.pos)
+ ctx1.bb.emit(CALL_METHOD(String_valueOf, Static(onInstance = false)), arg.pos)
ctx1
case concatenations =>
debuglog("Lifted string concatenations for " + tree + "\n to: " + concatenations)
@@ -1420,7 +1293,7 @@ abstract class GenICode extends SubComponent {
}
val ctx1 = genLoad(tree, ctx, ObjectReference)
- ctx1.bb.emit(CALL_METHOD(hashMethod, Static(false)))
+ ctx1.bb.emit(CALL_METHOD(hashMethod, Static(onInstance = false)))
ctx1
}
@@ -1443,6 +1316,8 @@ abstract class GenICode extends SubComponent {
/** Some useful equality helpers.
*/
def isNull(t: Tree) = cond(t) { case Literal(Constant(null)) => true }
+ def isLiteral(t: Tree) = cond(t) { case Literal(_) => true }
+ def isNonNullExpr(t: Tree) = isLiteral(t) || ((t.symbol ne null) && t.symbol.isModule)
/* If l or r is constant null, returns the other ; otherwise null */
def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null
@@ -1476,9 +1351,17 @@ abstract class GenICode extends SubComponent {
private def genCond(tree: Tree,
ctx: Context,
thenCtx: Context,
- elseCtx: Context): Unit =
+ elseCtx: Context): Boolean =
{
- def genComparisonOp(l: Tree, r: Tree, code: Int) {
+ /**
+ * Generate the de-sugared comparison mechanism that will underly an '=='
+ *
+ * @param l left-hand side of the '=='
+ * @param r right-hand side of the '=='
+ * @param code the comparison operator to use
+ * @return true if either branch can continue normally to a follow on block, false otherwise
+ */
+ def genComparisonOp(l: Tree, r: Tree, code: Int): Boolean = {
val op: TestOp = code match {
case scalaPrimitives.LT => LT
case scalaPrimitives.LE => LE
@@ -1494,27 +1377,33 @@ abstract class GenICode extends SubComponent {
lazy val nonNullSide = ifOneIsNull(l, r)
if (isReferenceEqualityOp(code) && nonNullSide != null) {
val ctx1 = genLoad(nonNullSide, ctx, ObjectReference)
+ val branchesReachable = !ctx1.bb.ignore
ctx1.bb.emitOnly(
CZJUMP(thenCtx.bb, elseCtx.bb, op, ObjectReference)
)
+ branchesReachable
}
else {
val kind = getMaxType(l.tpe :: r.tpe :: Nil)
var ctx1 = genLoad(l, ctx, kind)
ctx1 = genLoad(r, ctx1, kind)
+ val branchesReachable = !ctx1.bb.ignore
ctx1.bb.emitOnly(
CJUMP(thenCtx.bb, elseCtx.bb, op, kind) setPos r.pos
)
+ branchesReachable
}
}
- debuglog("Entering genCond with tree: " + tree);
+ debuglog("Entering genCond with tree: " + tree)
// the default emission
- def default() = {
+ def default(): Boolean = {
val ctx1 = genLoad(tree, ctx, BOOL)
+ val branchesReachable = !ctx1.bb.ignore
ctx1.bb.closeWith(CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL) setPos tree.pos)
+ branchesReachable
}
tree match {
@@ -1526,11 +1415,12 @@ abstract class GenICode extends SubComponent {
lazy val Select(lhs, _) = fun
lazy val rhs = args.head
- def genZandOrZor(and: Boolean) = {
- val ctxInterm = ctx.newBlock
+ def genZandOrZor(and: Boolean): Boolean = {
+ val ctxInterm = ctx.newBlock()
- if (and) genCond(lhs, ctx, ctxInterm, elseCtx)
+ val branchesReachable = if (and) genCond(lhs, ctx, ctxInterm, elseCtx)
else genCond(lhs, ctx, thenCtx, ctxInterm)
+ ctxInterm.bb killUnless branchesReachable
genCond(rhs, ctxInterm, thenCtx, elseCtx)
}
@@ -1553,10 +1443,10 @@ abstract class GenICode extends SubComponent {
else if (isComparisonOp(code))
genComparisonOp(lhs, rhs, code)
else
- default
+ default()
}
- case _ => default
+ case _ => default()
}
}
@@ -1569,17 +1459,18 @@ abstract class GenICode extends SubComponent {
* @param ctx current context
* @param thenCtx target context if the comparison yields true
* @param elseCtx target context if the comparison yields false
+ * @return true if either branch can continue normally to a follow on block, false otherwise
*/
- def genEqEqPrimitive(l: Tree, r: Tree, ctx: Context)(thenCtx: Context, elseCtx: Context): Unit = {
+ def genEqEqPrimitive(l: Tree, r: Tree, ctx: Context)(thenCtx: Context, elseCtx: Context): Boolean = {
def getTempLocal = ctx.method.lookupLocal(nme.EQEQ_LOCAL_VAR) getOrElse {
- ctx.makeLocal(l.pos, AnyRefClass.tpe, nme.EQEQ_LOCAL_VAR)
+ ctx.makeLocal(l.pos, AnyRefTpe, nme.EQEQ_LOCAL_VAR.toString)
}
- /** True if the equality comparison is between values that require the use of the rich equality
- * comparator (scala.runtime.Comparator.equals). This is the case when either side of the
- * comparison might have a run-time type subtype of java.lang.Number or java.lang.Character.
- * When it is statically known that both sides are equal and subtypes of Number of Character,
- * not using the rich equality is possible (their own equals method will do ok.)*/
+ /* True if the equality comparison is between values that require the use of the rich equality
+ * comparator (scala.runtime.Comparator.equals). This is the case when either side of the
+ * comparison might have a run-time type subtype of java.lang.Number or java.lang.Character.
+ * When it is statically known that both sides are equal and subtypes of Number of Character,
+ * not using the rich equality is possible (their own equals method will do ok.)*/
def mustUseAnyComparator: Boolean = {
def areSameFinals = l.tpe.isFinalType && r.tpe.isFinalType && (l.tpe =:= r.tpe)
!areSameFinals && isMaybeBoxed(l.tpe.typeSymbol) && isMaybeBoxed(r.tpe.typeSymbol)
@@ -1587,49 +1478,72 @@ abstract class GenICode extends SubComponent {
if (mustUseAnyComparator) {
// when -optimise is on we call the @inline-version of equals, found in ScalaRunTime
- val equalsMethod =
- if (!settings.optimise.value) {
- def default = platform.externalEquals
- platform match {
- case x: JavaPlatform =>
- import x._
- if (l.tpe <:< BoxedNumberClass.tpe) {
- if (r.tpe <:< BoxedNumberClass.tpe) externalEqualsNumNum
- else if (r.tpe <:< BoxedCharacterClass.tpe) externalEqualsNumChar
- else externalEqualsNumObject
- }
- else default
-
- case _ => default
- }
- }
- else {
+ val equalsMethod: Symbol = {
+ if (!settings.optimise) {
+ if (l.tpe <:< BoxedNumberClass.tpe) {
+ if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum
+ else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumChar
+ else platform.externalEqualsNumObject
+ } else platform.externalEquals
+ } else {
ctx.bb.emit(LOAD_MODULE(ScalaRunTimeModule))
getMember(ScalaRunTimeModule, nme.inlinedEquals)
}
+ }
val ctx1 = genLoad(l, ctx, ObjectReference)
val ctx2 = genLoad(r, ctx1, ObjectReference)
+ val branchesReachable = !ctx2.bb.ignore
ctx2.bb.emitOnly(
- CALL_METHOD(equalsMethod, if (settings.optimise.value) Dynamic else Static(false)),
+ CALL_METHOD(equalsMethod, if (settings.optimise) Dynamic else Static(onInstance = false)),
CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
)
+ branchesReachable
}
else {
- if (isNull(l))
+ if (isNull(l)) {
// null == expr -> expr eq null
- genLoad(r, ctx, ObjectReference).bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
- else if (isNull(r)) {
+ val ctx1 = genLoad(r, ctx, ObjectReference)
+ val branchesReachable = !ctx1.bb.ignore
+ ctx1.bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
+ branchesReachable
+ } else if (isNull(r)) {
// expr == null -> expr eq null
- genLoad(l, ctx, ObjectReference).bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
+ val ctx1 = genLoad(l, ctx, ObjectReference)
+ val branchesReachable = !ctx1.bb.ignore
+ ctx1.bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
+ branchesReachable
+ } else if (isNonNullExpr(l)) {
+ // Avoid null check if L is statically non-null.
+ //
+ // "" == expr -> "".equals(expr)
+ // Nil == expr -> Nil.equals(expr)
+ //
+ // Common enough (through pattern matching) to treat this specially here rather than
+ // hoping that -Yconst-opt is enabled. The impossible branches for null checks lead
+ // to spurious "branch not covered" warnings in Jacoco code coverage.
+ var ctx1 = genLoad(l, ctx, ObjectReference)
+ val branchesReachable = !ctx1.bb.ignore
+ ctx1 = genLoad(r, ctx1, ObjectReference)
+ ctx1.bb emitOnly(
+ CALL_METHOD(Object_equals, Dynamic),
+ CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
+ )
+ branchesReachable
} else {
val eqEqTempLocal = getTempLocal
var ctx1 = genLoad(l, ctx, ObjectReference)
- lazy val nonNullCtx = ctx1.newBlock
+ val branchesReachable = !ctx1.bb.ignore
+ lazy val nonNullCtx = {
+ val block = ctx1.newBlock()
+ block.bb killUnless branchesReachable
+ block
+ }
// l == r -> if (l eq null) r eq null else l.equals(r)
ctx1 = genLoad(r, ctx1, ObjectReference)
- val nullCtx = ctx1.newBlock
+ val nullCtx = ctx1.newBlock()
+ nullCtx.bb killUnless branchesReachable
ctx1.bb.emitOnly(
STORE_LOCAL(eqEqTempLocal) setPos l.pos,
@@ -1646,6 +1560,7 @@ abstract class GenICode extends SubComponent {
CALL_METHOD(Object_equals, Dynamic),
CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
)
+ branchesReachable
}
}
}
@@ -1658,12 +1573,12 @@ abstract class GenICode extends SubComponent {
debugassert(ctx.clazz.symbol eq cls,
"Classes are not the same: " + ctx.clazz.symbol + ", " + cls)
- /** Non-method term members are fields, except for module members. Module
- * members can only happen on .NET (no flatten) for inner traits. There,
- * a module symbol is generated (transformInfo in mixin) which is used
- * as owner for the members of the implementation class (so that the
- * backend emits them as static).
- * No code is needed for this module symbol.
+ /* Non-method term members are fields, except for module members. Module
+ * members can only happen on .NET (no flatten) for inner traits. There,
+ * a module symbol is generated (transformInfo in mixin) which is used
+ * as owner for the members of the implementation class (so that the
+ * backend emits them as static).
+ * No code is needed for this module symbol.
*/
for (f <- cls.info.decls ; if !f.isMethod && f.isTerm && !f.isModule)
ctx.clazz addField new IField(f)
@@ -1701,8 +1616,6 @@ abstract class GenICode extends SubComponent {
* If the block consists of a single unconditional jump, prune
* it by replacing the instructions in the predecessor to jump
* directly to the JUMP target of the block.
- *
- * @param method ...
*/
def prune(method: IMethod) = {
var changed = false
@@ -1714,14 +1627,14 @@ abstract class GenICode extends SubComponent {
case _ => None
}
if (block.size == 1 && optCont.isDefined) {
- val Some(cont) = optCont;
- val pred = block.predecessors;
- debuglog("Preds: " + pred + " of " + block + " (" + optCont + ")");
+ val Some(cont) = optCont
+ val pred = block.predecessors
+ debuglog("Preds: " + pred + " of " + block + " (" + optCont + ")")
pred foreach { p =>
changed = true
p.lastInstruction match {
case CJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
- debuglog("Pruning empty if branch.");
+ debuglog("Pruning empty if branch.")
p.replaceInstruction(p.lastInstruction,
if (block == succ)
if (block == fail)
@@ -1734,7 +1647,7 @@ abstract class GenICode extends SubComponent {
abort("Could not find block in preds: " + method + " " + block + " " + pred + " " + p))
case CZJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
- debuglog("Pruning empty ifz branch.");
+ debuglog("Pruning empty ifz branch.")
p.replaceInstruction(p.lastInstruction,
if (block == succ)
if (block == fail)
@@ -1747,12 +1660,12 @@ abstract class GenICode extends SubComponent {
abort("Could not find block in preds"))
case JUMP(b) if (b == block) =>
- debuglog("Pruning empty JMP branch.");
+ debuglog("Pruning empty JMP branch.")
val replaced = p.replaceInstruction(p.lastInstruction, JUMP(cont))
debugassert(replaced, "Didn't find p.lastInstruction")
case SWITCH(tags, labels) if (labels contains block) =>
- debuglog("Pruning empty SWITCH branch.");
+ debuglog("Pruning empty SWITCH branch.")
p.replaceInstruction(p.lastInstruction,
SWITCH(tags, labels map (l => if (l == block) cont else l)))
@@ -1768,7 +1681,7 @@ abstract class GenICode extends SubComponent {
e.covered = e.covered filter (_ != block)
e.blocks = e.blocks filter (_ != block)
if (e.startBlock eq block)
- e setStartBlock cont;
+ e setStartBlock cont
}
}
}
@@ -1780,7 +1693,7 @@ abstract class GenICode extends SubComponent {
method.blocks foreach prune0
} while (changed)
- debuglog("Prune fixpoint reached in " + n + " iterations.");
+ debuglog("Prune fixpoint reached in " + n + " iterations.")
}
def getMaxType(ts: List[Type]): TypeKind =
@@ -1822,9 +1735,7 @@ abstract class GenICode extends SubComponent {
t match {
case t @ Apply(_, args) if sym.isLabel && !boundLabels(sym) =>
val newSym = getLabel(sym.pos, sym.name)
- val tree = Apply(global.gen.mkAttributedRef(newSym), transformTrees(args)) setPos t.pos
- tree.tpe = t.tpe
- tree
+ Apply(global.gen.mkAttributedRef(newSym), transformTrees(args)) setPos t.pos setType t.tpe
case t @ LabelDef(name, params, rhs) =>
val newSym = getLabel(t.pos, name)
@@ -1845,7 +1756,7 @@ abstract class GenICode extends SubComponent {
/////////////////////// Context ////////////////////////////////
- abstract class Cleanup(val value: AnyRef) {
+ sealed abstract class Cleanup(val value: AnyRef) {
def contains(x: AnyRef) = value == x
}
case class MonitorRelease(m: Local) extends Cleanup(m) { }
@@ -1954,22 +1865,11 @@ abstract class GenICode extends SubComponent {
}
def addFinalizer(f: Tree, ctx: Context): this.type = {
- cleanups = Finalizer(f, ctx) :: cleanups;
- this
- }
-
- def removeFinalizer(f: Tree): this.type = {
- assert(cleanups.head contains f,
- "Illegal nesting of cleanup operations: " + cleanups + " while exiting finalizer " + f);
- cleanups = cleanups.tail
+ cleanups = Finalizer(f, ctx) :: cleanups
this
}
/** Prepare a new context upon entry into a method.
- *
- * @param m ...
- * @param d ...
- * @return ...
*/
def enterMethod(m: IMethod, d: DefDef): Context = {
val ctx1 = new Context(this) setMethod(m)
@@ -1978,13 +1878,13 @@ abstract class GenICode extends SubComponent {
ctx1.bb = ctx1.method.startBlock
ctx1.defdef = d
ctx1.scope = EmptyScope
- ctx1.enterScope
+ ctx1.enterScope()
ctx1
}
/** Return a new context for a new basic block. */
def newBlock(): Context = {
- val block = method.code.newBlock
+ val block = method.code.newBlock()
handlers foreach (_ addCoveredBlock block)
currentExceptionHandlers foreach (_ addBlock block)
block.varsInScope.clear()
@@ -2008,13 +1908,12 @@ abstract class GenICode extends SubComponent {
* 'covered' by this exception handler (in addition to the
* previously active handlers).
*/
- private def newExceptionHandler(cls: Symbol, resultKind: TypeKind, pos: Position): ExceptionHandler = {
+ private def newExceptionHandler(cls: Symbol, pos: Position): ExceptionHandler = {
handlerCount += 1
val exh = new ExceptionHandler(method, newTermNameCached("" + handlerCount), cls, pos)
- exh.resultKind = resultKind
method.addHandler(exh)
handlers = exh :: handlers
- debuglog("added handler: " + exh);
+ debuglog("added handler: " + exh)
exh
}
@@ -2024,7 +1923,7 @@ abstract class GenICode extends SubComponent {
private def addActiveHandler(exh: ExceptionHandler) {
handlerCount += 1
handlers = exh :: handlers
- debuglog("added handler: " + exh);
+ debuglog("added handler: " + exh)
}
/** Return a new context for generating code for the given
@@ -2032,7 +1931,7 @@ abstract class GenICode extends SubComponent {
*/
private def enterExceptionHandler(exh: ExceptionHandler): Context = {
currentExceptionHandlers ::= exh
- val ctx = newBlock
+ val ctx = newBlock()
exh.setStartBlock(ctx.bb)
ctx
}
@@ -2041,16 +1940,6 @@ abstract class GenICode extends SubComponent {
currentExceptionHandlers = currentExceptionHandlers.tail
}
- /** Remove the given handler from the list of active exception handlers. */
- def removeActiveHandler(exh: ExceptionHandler): Unit = {
- assert(handlerCount > 0 && handlers.head == exh,
- "Wrong nesting of exception handlers." + this + " for " + exh)
- handlerCount -= 1
- handlers = handlers.tail
- debuglog("removed handler: " + exh);
-
- }
-
/** Clone the current context */
def dup: Context = new Context(this)
@@ -2069,23 +1958,55 @@ abstract class GenICode extends SubComponent {
* It returns the resulting context, with the same active handlers as
* before the call. Use it like:
*
- * <code> ctx.Try( ctx => {
+ * ` ctx.Try( ctx => {
* ctx.bb.emit(...) // protected block
* }, (ThrowableClass,
* ctx => {
* ctx.bb.emit(...); // exception handler
* }), (AnotherExceptionClass,
* ctx => {...
- * } ))</code>
+ * } ))`
+ *
+ * The resulting structure will look something like
+ *
+ * outer:
+ * // this 'useless' jump will be removed later,
+ * // for now it separates the try body's blocks from previous
+ * // code since the try body needs its own exception handlers
+ * JUMP body
+ *
+ * body:
+ * [ try body ]
+ * JUMP normalExit
+ *
+ * catch[i]:
+ * [ handler[i] body ]
+ * JUMP normalExit
+ *
+ * catchAll:
+ * STORE exception
+ * [ finally body ]
+ * THROW exception
+ *
+ * normalExit:
+ * [ finally body ]
+ *
+ * each catch[i] will cover body. catchAll will cover both body and each catch[i]
+ * Additional finally copies are created on the emission of every RETURN in the try body and exception handlers.
+ *
+ * This could result in unreachable code which has to be cleaned up later, e.g. if the try and all the exception
+ * handlers always end in RETURN then there will be no "normal" flow out of the try/catch/finally.
+ * Later reachability analysis will remove unreacahble code.
*/
def Try(body: Context => Context,
handlers: List[(Symbol, TypeKind, Context => Context)],
finalizer: Tree,
- tree: Tree) = if (forMSIL) TryMsil(body, handlers, finalizer, tree) else {
+ tree: Tree) = {
- val outerCtx = this.dup // context for generating exception handlers, covered by finalizer
+ val outerCtx = this.dup // context for generating exception handlers, covered by the catch-all finalizer
val finalizerCtx = this.dup // context for generating finalizer handler
- val afterCtx = outerCtx.newBlock
+ val normalExitCtx = outerCtx.newBlock() // context where flow will go on a "normal" (non-return, non-throw) exit from a try or catch handler
+ var normalExitReachable = false
var tmp: Local = null
val kind = toTypeKind(tree.tpe)
val guardResult = kind != UNIT && mayCleanStack(finalizer)
@@ -2099,7 +2020,8 @@ abstract class GenICode extends SubComponent {
}
def emitFinalizer(ctx: Context): Context = if (!finalizer.isEmpty) {
- val ctx1 = finalizerCtx.dup.newBlock
+ val ctx1 = finalizerCtx.dup.newBlock()
+ ctx1.bb killIf ctx.bb.ignore
ctx.bb.closeWith(JUMP(ctx1.bb))
if (guardResult) {
@@ -2112,107 +2034,53 @@ abstract class GenICode extends SubComponent {
} else ctx
- val finalizerExh = if (finalizer != EmptyTree) Some({
- val exh = outerCtx.newExceptionHandler(NoSymbol, toTypeKind(finalizer.tpe), finalizer.pos) // finalizer covers exception handlers
- this.addActiveHandler(exh) // .. and body aswell
- val ctx = finalizerCtx.enterExceptionHandler(exh)
- val exception = ctx.makeLocal(finalizer.pos, ThrowableClass.tpe, "exc")
- loadException(ctx, exh, finalizer.pos)
- ctx.bb.emit(STORE_LOCAL(exception));
- val ctx1 = genLoad(finalizer, ctx, UNIT);
- ctx1.bb.emit(LOAD_LOCAL(exception));
- ctx1.bb.emit(THROW(ThrowableClass));
- ctx1.bb.enterIgnoreMode;
- ctx1.bb.close
- finalizerCtx.endHandler()
- exh
- }) else None
-
- val exhs = handlers.map { case (sym, kind, handler) => // def genWildcardHandler(sym: Symbol): (Symbol, TypeKind, Context => Context) =
- val exh = this.newExceptionHandler(sym, kind, tree.pos)
- var ctx1 = outerCtx.enterExceptionHandler(exh)
- ctx1.addFinalizer(finalizer, finalizerCtx)
- loadException(ctx1, exh, tree.pos)
- ctx1 = handler(ctx1)
- // emit finalizer
- val ctx2 = emitFinalizer(ctx1)
- ctx2.bb.closeWith(JUMP(afterCtx.bb))
- outerCtx.endHandler()
- exh
+ // Generate the catch-all exception handler that deals with uncaught exceptions coming
+ // from the try or exception handlers. It catches the exception, runs the finally code, then rethrows
+ // the exception
+ if (settings.YdisableUnreachablePrevention || !outerCtx.bb.ignore) {
+ if (finalizer != EmptyTree) {
+ val exh = outerCtx.newExceptionHandler(NoSymbol, finalizer.pos) // finalizer covers exception handlers
+ this.addActiveHandler(exh) // .. and body aswell
+ val exhStartCtx = finalizerCtx.enterExceptionHandler(exh)
+ exhStartCtx.bb killIf outerCtx.bb.ignore
+ val exception = exhStartCtx.makeLocal(finalizer.pos, ThrowableTpe, "exc")
+ loadException(exhStartCtx, exh, finalizer.pos)
+ exhStartCtx.bb.emit(STORE_LOCAL(exception))
+ val exhEndCtx = genLoad(finalizer, exhStartCtx, UNIT)
+ exhEndCtx.bb.emit(LOAD_LOCAL(exception))
+ exhEndCtx.bb.closeWith(THROW(ThrowableClass))
+ exhEndCtx.bb.enterIgnoreMode()
+ finalizerCtx.endHandler()
}
- val bodyCtx = this.newBlock
- if (finalizer != EmptyTree)
- bodyCtx.addFinalizer(finalizer, finalizerCtx)
-
- var finalCtx = body(bodyCtx)
- finalCtx = emitFinalizer(finalCtx)
-
- outerCtx.bb.closeWith(JUMP(bodyCtx.bb))
-
- finalCtx.bb.closeWith(JUMP(afterCtx.bb))
- afterCtx
- }
-
-
- /** try-catch-finally blocks are actually simpler to emit in MSIL, because there
- * is support for `finally` in bytecode.
- *
- * A
- * try { .. } catch { .. } finally { .. }
- * block is de-sugared into
- * try { try { ..} catch { .. } } finally { .. }
- *
- * In ICode `finally` block is represented exactly the same as an exception handler,
- * but with `NoSymbol` as the exception class. The covered blocks are all blocks of
- * the `try { .. } catch { .. }`.
- *
- * Also, TryMsil does not enter any Finalizers into the `cleanups`, because the
- * CLI takes care of running the finalizer when seeing a `leave` statement inside
- * a try / catch.
- */
- def TryMsil(body: Context => Context,
- handlers: List[(Symbol, TypeKind, (Context => Context))],
- finalizer: Tree,
- tree: Tree) = {
-
- val outerCtx = this.dup // context for generating exception handlers, covered by finalizer
- val finalizerCtx = this.dup // context for generating finalizer handler
- val afterCtx = outerCtx.newBlock
-
- if (finalizer != EmptyTree) {
- // finalizer is covers try and all catch blocks, i.e.
- // try { try { .. } catch { ..} } finally { .. }
- val exh = outerCtx.newExceptionHandler(NoSymbol, UNIT, tree.pos)
- this.addActiveHandler(exh)
- val ctx = finalizerCtx.enterExceptionHandler(exh)
- loadException(ctx, exh, tree.pos)
- val ctx1 = genLoad(finalizer, ctx, UNIT)
- // need jump for the ICode to be valid. MSIL backend will emit `Endfinally` instead.
- ctx1.bb.closeWith(JUMP(afterCtx.bb))
- finalizerCtx.endHandler()
- }
-
- for (handler <- handlers) {
- val exh = this.newExceptionHandler(handler._1, handler._2, tree.pos)
- var ctx1 = outerCtx.enterExceptionHandler(exh)
- loadException(ctx1, exh, tree.pos)
- ctx1 = handler._3(ctx1)
- // msil backend will emit `Leave` to jump out of a handler
- ctx1.bb.closeWith(JUMP(afterCtx.bb))
- outerCtx.endHandler()
+ // Generate each exception handler
+ for ((sym, kind, handler) <- handlers) {
+ val exh = this.newExceptionHandler(sym, tree.pos)
+ val exhStartCtx = outerCtx.enterExceptionHandler(exh)
+ exhStartCtx.bb killIf outerCtx.bb.ignore
+ exhStartCtx.addFinalizer(finalizer, finalizerCtx)
+ loadException(exhStartCtx, exh, tree.pos)
+ val exhEndCtx = handler(exhStartCtx)
+ normalExitReachable ||= !exhEndCtx.bb.ignore
+ exhEndCtx.bb.closeWith(JUMP(normalExitCtx.bb))
+ outerCtx.endHandler()
+ }
}
- val bodyCtx = this.newBlock
+ val bodyCtx = this.newBlock()
+ bodyCtx.bb killIf outerCtx.bb.ignore
+ if (finalizer != EmptyTree)
+ bodyCtx.addFinalizer(finalizer, finalizerCtx)
- val finalCtx = body(bodyCtx)
+ val bodyEndCtx = body(bodyCtx)
outerCtx.bb.closeWith(JUMP(bodyCtx.bb))
- // msil backend will emit `Leave` to jump out of a try-block
- finalCtx.bb.closeWith(JUMP(afterCtx.bb))
+ normalExitReachable ||= !bodyEndCtx.bb.ignore
+ normalExitCtx.bb killUnless normalExitReachable
+ bodyEndCtx.bb.closeWith(JUMP(normalExitCtx.bb))
- afterCtx
+ emitFinalizer(normalExitCtx)
}
}
}
@@ -2246,7 +2114,7 @@ abstract class GenICode extends SubComponent {
/** Add an instruction that refers to this label. */
def addCallingInstruction(i: Instruction) =
- toPatch = i :: toPatch;
+ toPatch = i :: toPatch
/**
* Patch the code by replacing pseudo call instructions with
@@ -2301,14 +2169,13 @@ abstract class GenICode extends SubComponent {
* by a real JUMP instruction when all labels are resolved.
*/
abstract class PseudoJUMP(label: Label) extends Instruction {
- override def toString(): String = "PJUMP " + label.symbol
-
+ override def toString = s"PJUMP(${label.symbol})"
override def consumed = 0
override def produced = 0
// register with the given label
if (!label.anchored)
- label.addCallingInstruction(this);
+ label.addCallingInstruction(this)
}
case class PJUMP(whereto: Label) extends PseudoJUMP(whereto)
@@ -2338,7 +2205,6 @@ abstract class GenICode extends SubComponent {
val locals: ListBuffer[Local] = new ListBuffer
def add(l: Local) = locals += l
- def remove(l: Local) = locals -= l
/** Return all locals that are in scope. */
def varsInScope: Buffer[Local] = outer.varsInScope.clone() ++= locals
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
index f05def3123..0cdf629ce1 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
@@ -9,7 +9,6 @@ package icode
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.symtab._
abstract class ICodeCheckers {
val global: Global
@@ -49,7 +48,7 @@ abstract class ICodeCheckers {
* @author Iulian Dragos
* @version 1.0, 06/09/2005
*
- * @todo Better checks for <code>MONITOR_ENTER/EXIT</code>
+ * @todo Better checks for `MONITOR_ENTER/EXIT`
* Better checks for local var initializations
*
* @todo Iulian says: I think there's some outdated logic in the checker.
@@ -95,7 +94,7 @@ abstract class ICodeCheckers {
}
def checkICodes(): Unit = {
- if (settings.verbose.value)
+ if (settings.verbose)
println("[[consistency check at the beginning of phase " + globalPhase.name + "]]")
classes.values foreach check
}
@@ -103,7 +102,6 @@ abstract class ICodeCheckers {
private def posStr(p: Position) =
if (p.isDefined) p.line.toString else "<??>"
- private def indent(s: String, spaces: Int): String = indent(s, " " * spaces)
private def indent(s: String, prefix: String): String = {
val lines = s split "\\n"
lines map (prefix + _) mkString "\n"
@@ -121,11 +119,11 @@ abstract class ICodeCheckers {
clasz = cls
for (f1 <- cls.fields ; f2 <- cls.fields ; if f1 < f2)
- if (isConfict(f1, f2, false))
+ if (isConfict(f1, f2, canOverload = false))
icodeError("Repetitive field name: " + f1.symbol.fullName)
for (m1 <- cls.methods ; m2 <- cls.methods ; if m1 < m2)
- if (isConfict(m1, m2, true))
+ if (isConfict(m1, m2, canOverload = true))
icodeError("Repetitive method: " + m1.symbol.fullName)
clasz.methods foreach check
@@ -170,12 +168,11 @@ abstract class ICodeCheckers {
val preds = bl.predecessors
def hasNothingType(s: TypeStack) = s.nonEmpty && (s.head == NothingReference)
- def hasNullType(s: TypeStack) = s.nonEmpty && (s.head == NullReference)
- /** XXX workaround #1: one stack empty, the other has BoxedUnit.
- * One example where this arises is:
+ /* XXX workaround #1: one stack empty, the other has BoxedUnit.
+ * One example where this arises is:
*
- * def f(b: Boolean): Unit = synchronized { if (b) () }
+ * def f(b: Boolean): Unit = synchronized { if (b) () }
*/
def allUnits(s: TypeStack) = s.types forall (_ == BoxedUnitReference)
@@ -184,10 +181,10 @@ abstract class ICodeCheckers {
case (x1, x2) if f(x2) => x1
}
- /** XXX workaround #2: different stacks heading into an exception
- * handler which will clear them anyway. Examples where it arises:
+ /* XXX workaround #2: different stacks heading into an exception
+ * handler which will clear them anyway. Examples where it arises:
*
- * var bippy: Int = synchronized { if (b) 5 else 10 }
+ * var bippy: Int = synchronized { if (b) 5 else 10 }
*/
def isHandlerBlock() = bl.exceptionHandlerStart
@@ -211,7 +208,7 @@ abstract class ICodeCheckers {
if (s1.length != s2.length) {
if (allUnits(s1) && allUnits(s2))
workaround("Ignoring mismatched boxed units")
- else if (isHandlerBlock)
+ else if (isHandlerBlock())
workaround("Ignoring mismatched stacks entering exception handler")
else
throw new CheckerException(incompatibleString)
@@ -236,8 +233,8 @@ abstract class ICodeCheckers {
}
if (preds.nonEmpty) {
- in(bl) = (preds map out.apply) reduceLeft meet2;
- log("Input changed for block: " + bl +" to: " + in(bl));
+ in(bl) = (preds map out.apply) reduceLeft meet2
+ log("Input changed for block: " + bl +" to: " + in(bl))
}
}
@@ -296,7 +293,7 @@ abstract class ICodeCheckers {
else prefix + " with initial stack " + initial.types.mkString("[", ", ", "]")
})
- var stack = new TypeStack(initial)
+ val stack = new TypeStack(initial)
def checkStack(len: Int) {
if (stack.length < len)
ICodeChecker.this.icodeError("Expected at least " + len + " elements on the stack", stack)
@@ -324,14 +321,14 @@ abstract class ICodeCheckers {
def popStackN(num: Int, instrFn: () => String = defaultInstrPrinter) = {
List.range(0, num) map { _ =>
val res = _popStack
- printStackString(false, res, instrFn())
+ printStackString(isPush = false, res, instrFn())
res
}
}
def pushStackN(xs: Seq[TypeKind], instrFn: () => String) = {
xs foreach { x =>
stack push x
- printStackString(true, x, instrFn())
+ printStackString(isPush = true, x, instrFn())
}
}
@@ -339,7 +336,7 @@ abstract class ICodeCheckers {
def popStack2 = { checkStack(2) ; (popStackN(2): @unchecked) match { case List(x, y) => (x, y) } }
def popStack3 = { checkStack(3) ; (popStackN(3): @unchecked) match { case List(x, y, z) => (x, y, z) } }
- /** Called by faux instruction LOAD_EXCEPTION to wipe out the stack. */
+ /* Called by faux instruction LOAD_EXCEPTION to wipe out the stack. */
def clearStack() = {
if (stack.nonEmpty)
logChecker("Wiping out the " + stack.length + " element stack for exception handler: " + stack)
@@ -354,7 +351,7 @@ abstract class ICodeCheckers {
def typeError(k1: TypeKind, k2: TypeKind) {
icodeError("\n expected: " + k1 + "\n found: " + k2)
}
- def isSubtype(k1: TypeKind, k2: TypeKind) = (k1 <:< k2) || {
+ def isSubtype(k1: TypeKind, k2: TypeKind) = (k1 isAssignabledTo k2) || {
import platform.isMaybeBoxed
(k1, k2) match {
@@ -369,11 +366,6 @@ abstract class ICodeCheckers {
}
}
- /** Return true if k1 is a subtype of any of the following types,
- * according to the somewhat relaxed subtyping standards in effect here.
- */
- def isOneOf(k1: TypeKind, kinds: TypeKind*) = kinds exists (k => isSubtype(k1, k))
-
def subtypeTest(k1: TypeKind, k2: TypeKind): Unit =
if (isSubtype(k1, k2)) ()
else typeError(k2, k1)
@@ -381,20 +373,19 @@ abstract class ICodeCheckers {
for (instr <- b) {
this.instruction = instr
- def checkLocal(local: Local): Unit = {
- method lookupLocal local.sym.name getOrElse {
- icodeError(" " + local + " is not defined in method " + method)
- }
+ def checkLocal(local: Local) {
+ if ((method lookupLocal local.sym.name).isEmpty)
+ icodeError(s" $local is not defined in method $method")
}
def checkField(obj: TypeKind, field: Symbol): Unit = obj match {
case REFERENCE(sym) =>
if (sym.info.member(field.name) == NoSymbol)
- icodeError(" " + field + " is not defined in class " + clasz);
+ icodeError(" " + field + " is not defined in class " + clasz)
case _ =>
- icodeError(" expected reference type, but " + obj + " found");
+ icodeError(" expected reference type, but " + obj + " found")
}
- /** Checks that tpe is a subtype of one of the allowed types */
+ /* Checks that tpe is a subtype of one of the allowed types */
def checkType(tpe: TypeKind, allowed: TypeKind*) = (
if (allowed exists (k => isSubtype(tpe, k))) ()
else icodeError(tpe + " is not one of: " + allowed.mkString("{ ", ", ", " }"))
@@ -402,16 +393,14 @@ abstract class ICodeCheckers {
def checkNumeric(tpe: TypeKind) =
checkType(tpe, BYTE, CHAR, SHORT, INT, LONG, FLOAT, DOUBLE)
- /** Checks that the 2 topmost elements on stack are of the
- * kind TypeKind.
- */
+ /* Checks that the 2 topmost elements on stack are of the kind TypeKind. */
def checkBinop(kind: TypeKind) {
val (a, b) = popStack2
checkType(a, kind)
checkType(b, kind)
}
- /** Check that arguments on the stack match method params. */
+ /* Check that arguments on the stack match method params. */
def checkMethodArgs(method: Symbol) {
val params = method.info.paramTypes
checkStack(params.length)
@@ -421,21 +410,18 @@ abstract class ICodeCheckers {
)
}
- /** Checks that the object passed as receiver has a method
- * <code>method</code> and that it is callable from the current method.
- *
- * @param receiver ...
- * @param method ...
+ /* Checks that the object passed as receiver has a method
+ * `method` and that it is callable from the current method.
*/
def checkMethod(receiver: TypeKind, method: Symbol) =
receiver match {
case REFERENCE(sym) =>
checkBool(sym.info.member(method.name) != NoSymbol,
- "Method " + method + " does not exist in " + sym.fullName);
+ "Method " + method + " does not exist in " + sym.fullName)
if (method.isPrivate)
checkBool(method.owner == clasz.symbol,
"Cannot call private method of " + method.owner.fullName
- + " from " + clasz.symbol.fullName);
+ + " from " + clasz.symbol.fullName)
else if (method.isProtected) {
val isProtectedOK = (
(clasz.symbol isSubClass method.owner) ||
@@ -444,7 +430,7 @@ abstract class ICodeCheckers {
checkBool(isProtectedOK,
"Cannot call protected method of " + method.owner.fullName
- + " from " + clasz.symbol.fullName);
+ + " from " + clasz.symbol.fullName)
}
case ARRAY(_) =>
@@ -458,7 +444,7 @@ abstract class ICodeCheckers {
def checkBool(cond: Boolean, msg: String) =
if (!cond) icodeError(msg)
- if (settings.debug.value) {
+ if (settings.debug) {
log("PC: " + instr)
log("stack: " + stack)
log("================")
@@ -476,8 +462,8 @@ abstract class ICodeCheckers {
subtypeTest(elem, kind)
pushStack(elem)
case (a, b) =>
- icodeError(" expected and INT and a array reference, but " +
- a + ", " + b + " found");
+ icodeError(" expected an INT and an array reference, but " +
+ a + ", " + b + " found")
}
case LOAD_LOCAL(local) =>
@@ -495,10 +481,10 @@ abstract class ICodeCheckers {
case LOAD_MODULE(module) =>
checkBool((module.isModule || module.isModuleClass),
- "Expected module: " + module + " flags: " + Flags.flagsToString(module.flags));
- pushStack(toTypeKind(module.tpe));
+ "Expected module: " + module + " flags: " + module.flagString)
+ pushStack(toTypeKind(module.tpe))
- case STORE_THIS(kind) =>
+ case STORE_THIS(kind) =>
val actualType = popStack
if (actualType.isReferenceType) subtypeTest(actualType, kind)
else icodeError("Expected this reference but found: " + actualType)
@@ -510,7 +496,7 @@ abstract class ICodeCheckers {
subtypeTest(k, elem)
case (a, b, c) =>
icodeError(" expected and array reference, and int and " + kind +
- " but " + a + ", " + b + ", " + c + " found");
+ " but " + a + ", " + b + ", " + c + " found")
}
case STORE_LOCAL(local) =>
@@ -606,7 +592,7 @@ abstract class ICodeCheckers {
case x if style.hasInstance => x + 1
case x => x
}
- if (style == Static(true))
+ if (style == Static(onInstance = true))
checkBool(method.isPrivate || method.isConstructor, "Static call to non-private method.")
checkStack(paramCount)
@@ -665,7 +651,7 @@ abstract class ICodeCheckers {
case RETURN(kind) =>
val top = popStack
if (kind.isValueType) checkType(top, kind)
- else checkBool(!top.isValueType, "" + kind + " is a reference type, but " + top + " is not");
+ else checkBool(!top.isValueType, "" + kind + " is a reference type, but " + top + " is not")
case THROW(clasz) =>
checkType(popStack, toTypeKind(clasz.tpe))
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
index 93201089e4..bc35a9e7de 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
@@ -8,10 +8,9 @@ package backend
package icode
import java.io.PrintWriter
-import scala.collection.mutable
-import scala.tools.nsc.symtab._
import analysis.{ Liveness, ReachingDefinitions }
import scala.tools.nsc.symtab.classfile.ICodeReader
+import scala.reflect.io.AbstractFile
/** Glue together ICode parts.
*
@@ -30,14 +29,14 @@ abstract class ICodes extends AnyRef
with Repository
{
val global: Global
- import global.{ log, definitions, settings, perRunCaches }
+ import global.{ log, definitions, settings, perRunCaches, devWarning }
/** The ICode representation of classes */
val classes = perRunCaches.newMap[global.Symbol, IClass]()
/** Debugging flag */
def shouldCheckIcode = settings.check contains global.genicode.phaseName
- def checkerDebug(msg: String) = if (shouldCheckIcode && global.opt.debug) println(msg)
+ def checkerDebug(msg: String) = if (shouldCheckIcode && global.settings.debug) println(msg)
/** The ICode linearizer. */
val linearizer: Linearizer = settings.Xlinearizer.value match {
@@ -84,7 +83,7 @@ abstract class ICodes extends AnyRef
// Something is leaving open/empty blocks around (see SI-4840) so
// let's not kill the deal unless it's nonempty.
if (b.isEmpty) {
- log("!!! Found open but empty block while inlining " + m + ": removing from block list.")
+ devWarning(s"Found open but empty block while inlining $m: removing from block list.")
m.code removeBlock b
}
else dumpMethodAndAbort(m, b)
@@ -106,10 +105,15 @@ abstract class ICodes extends AnyRef
lazy val NullReference: TypeKind = REFERENCE(definitions.NullClass)
lazy val ObjectReference: TypeKind = REFERENCE(definitions.ObjectClass)
lazy val StringReference: TypeKind = REFERENCE(definitions.StringClass)
- lazy val ThrowableReference: TypeKind = REFERENCE(definitions.ThrowableClass)
object icodeReader extends ICodeReader {
lazy val global: ICodes.this.global.type = ICodes.this.global
+ import global._
+ def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol =
+ global.loaders.lookupMemberAtTyperPhaseIfPossible(sym, name)
+ lazy val symbolTable: global.type = global
+ lazy val loaders: global.loaders.type = global.loaders
+ def classPath: util.ClassPath[AbstractFile] = ICodes.this.global.platform.classPath
}
/** A phase which works on icode. */
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
index a38eab4515..54be9d18f1 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
@@ -4,11 +4,11 @@
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend
package icode
-import scala.tools.nsc.ast._
import scala.collection.{ mutable, immutable }
import mutable.ListBuffer
@@ -36,15 +36,15 @@ trait Linearizers {
var blocks: List[BasicBlock] = Nil
def linearize(m: IMethod): List[BasicBlock] = {
- val b = m.startBlock;
- blocks = Nil;
+ val b = m.startBlock
+ blocks = Nil
run {
- worklist pushAll (m.exh map (_.startBlock));
- worklist.push(b);
+ worklist pushAll (m.exh map (_.startBlock))
+ worklist.push(b)
}
- blocks.reverse;
+ blocks.reverse
}
def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
@@ -56,30 +56,30 @@ trait Linearizers {
/** Linearize another subtree and append it to the existing blocks. */
def linearize(startBlock: BasicBlock): List[BasicBlock] = {
//blocks = startBlock :: Nil;
- run( { worklist.push(startBlock); } );
- blocks.reverse;
+ run( { worklist.push(startBlock); } )
+ blocks.reverse
}
def processElement(b: BasicBlock) =
if (b.nonEmpty) {
- add(b);
+ add(b)
b.lastInstruction match {
case JUMP(whereto) =>
- add(whereto);
+ add(whereto)
case CJUMP(success, failure, _, _) =>
- add(success);
- add(failure);
+ add(success)
+ add(failure)
case CZJUMP(success, failure, _, _) =>
- add(success);
- add(failure);
+ add(success)
+ add(failure)
case SWITCH(_, labels) =>
- add(labels);
- case RETURN(_) => ();
- case THROW(clasz) => ();
+ add(labels)
+ case RETURN(_) => ()
+ case THROW(clasz) => ()
}
}
- def dequeue: Elem = worklist.pop;
+ def dequeue: Elem = worklist.pop()
/**
* Prepend b to the list, if not already scheduled.
@@ -89,25 +89,25 @@ trait Linearizers {
if (blocks.contains(b))
()
else {
- blocks = b :: blocks;
- worklist push b;
+ blocks = b :: blocks
+ worklist push b
}
}
- def add(bs: List[BasicBlock]): Unit = bs foreach add;
+ def add(bs: List[BasicBlock]): Unit = bs foreach add
}
/**
* Linearize code using a depth first traversal.
*/
class DepthFirstLinerizer extends Linearizer {
- var blocks: List[BasicBlock] = Nil;
+ var blocks: List[BasicBlock] = Nil
def linearize(m: IMethod): List[BasicBlock] = {
- blocks = Nil;
+ blocks = Nil
- dfs(m.startBlock);
- m.exh foreach (b => dfs(b.startBlock));
+ dfs(m.startBlock)
+ m.exh foreach (b => dfs(b.startBlock))
blocks.reverse
}
@@ -120,7 +120,7 @@ trait Linearizers {
def dfs(b: BasicBlock): Unit =
if (b.nonEmpty && add(b))
- b.successors foreach dfs;
+ b.successors foreach dfs
/**
* Prepend b to the list, if not already scheduled.
@@ -129,7 +129,7 @@ trait Linearizers {
*/
def add(b: BasicBlock): Boolean =
!(blocks contains b) && {
- blocks = b :: blocks;
+ blocks = b :: blocks
true
}
}
@@ -145,12 +145,12 @@ trait Linearizers {
val added = new mutable.BitSet
def linearize(m: IMethod): List[BasicBlock] = {
- blocks = Nil;
+ blocks = Nil
visited.clear()
- added.clear;
+ added.clear()
- m.exh foreach (b => rpo(b.startBlock));
- rpo(m.startBlock);
+ m.exh foreach (b => rpo(b.startBlock))
+ rpo(m.startBlock)
// if the start block has predecessors, it won't be the first one
// in the linearization, so we need to enforce it here
@@ -171,7 +171,7 @@ trait Linearizers {
def rpo(b: BasicBlock): Unit =
if (b.nonEmpty && !visited(b)) {
- visited += b;
+ visited += b
b.successors foreach rpo
add(b)
}
@@ -185,7 +185,7 @@ trait Linearizers {
if (!added(b.label)) {
added += b.label
- blocks = b :: blocks;
+ blocks = b :: blocks
}
}
}
@@ -198,142 +198,4 @@ trait Linearizers {
def linearize(m: IMethod): List[BasicBlock] = m.blocks
def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = sys.error("not implemented")
}
-
- /** The MSIL linearizer is used only for methods with at least one exception handler.
- * It makes sure that all the blocks belonging to a `try`, `catch` or `finally` block
- * are emitted in an order that allows the lexical nesting of try-catch-finally, just
- * like in the source code.
- */
- class MSILLinearizer extends Linearizer {
- /** The MSIL linearizer first calls a NormalLInearizer. This is because the ILGenerator checks
- * the stack size before emitting instructions. For instance, to emit a `store`, there needs
- * to be some value on the stack. This can blow up in situations like this:
- * ...
- * jump 3
- * 4: store_local 0
- * jump 5
- * 3: load_value
- * jump 4
- * 5: ...
- * here, 3 must be scheduled first.
- *
- * The NormalLinearizer also removes dead blocks (blocks without predecessor). This is important
- * in the following example:
- * try { throw new Exception }
- * catch { case e => throw e }
- * which adds a dead block containing just a "throw" (which, again, would blow up code generation
- * because of the stack size; there's no value on the stack when emitting that `throw`)
- */
- val normalLinearizer = new NormalLinearizer()
-
- def linearize(m: IMethod): List[BasicBlock] = {
-
- val handlersByCovered = m.exh.groupBy(_.covered)
-
- // number of basic blocks covered by the entire try-catch expression
- def size(covered: scala.collection.immutable.Set[BasicBlock]) = {
- val hs = handlersByCovered(covered)
- covered.size + (hs :\ 0)((h, s) => h.blocks.length + s)
- }
-
- val tryBlocks = handlersByCovered.keys.toList sortBy size
- var result = normalLinearizer.linearize(m)
- val frozen = mutable.HashSet[BasicBlock](result.head)
-
- for (tryBlock <- tryBlocks) {
- result = groupBlocks(m, result, handlersByCovered(tryBlock), frozen)
- }
- result
- }
-
- /** @param handlers a list of handlers covering the same blocks (same try, multiple catches)
- * @param frozen blocks can't be moved (fist block of a method, blocks directly following a try-catch)
- */
- def groupBlocks(method: IMethod, blocks: List[BasicBlock], handlers: List[ExceptionHandler], frozen: mutable.HashSet[BasicBlock]) = {
- assert(blocks.head == method.startBlock, method)
-
- // blocks before the try, and blocks for the try
- val beforeAndTry = new ListBuffer[BasicBlock]()
- // blocks for the handlers
- val catches = handlers map (_ => new ListBuffer[BasicBlock]())
- // blocks to be put at the end
- val after = new ListBuffer[BasicBlock]()
-
- var beforeTry = true
- val head = handlers.head
-
- for (b <- blocks) {
- if (head covers b) {
- beforeTry = false
- beforeAndTry += b
- } else {
- val handlerIndex = handlers.indexWhere(_.blocks.contains(b))
- if (handlerIndex >= 0) {
- catches(handlerIndex) += b
- } else if (beforeTry) {
- beforeAndTry += b
- } else {
- after += b
- }
- }
- }
-
- // reorder the blocks in "catches" so that the "firstBlock" is actually first
- (catches, handlers).zipped foreach { (lb, handler) =>
- lb -= handler.startBlock
- handler.startBlock +=: lb
- }
-
- // The first block emitted after a try-catch must be the one that the try / catch
- // blocks jump to (because in msil, these jumps cannot be emitted manually)
- var firstAfter: Option[BasicBlock] = None
-
- // Find the (hopefully) unique successor, look at the try and all catch blocks
- var blks = head.covered.toList :: handlers.map(_.blocks)
- while (firstAfter.isEmpty && !blks.isEmpty) {
- val b = blks.head
- blks = blks.tail
-
- val leaving = leavingBlocks(b)
- // no leaving blocks when the try or catch ends with THROW or RET
- if (!leaving.isEmpty) {
- assert(leaving.size <= 1, leaving)
- firstAfter = Some(leaving.head)
- }
- }
- if (firstAfter.isDefined) {
- val b = firstAfter.get
- if (frozen(b)) {
- assert(after contains b, b +", "+ method)
- } else {
- frozen += b
- if (beforeAndTry contains b) {
- beforeAndTry -= b
- } else {
- assert(after contains b, after)
- after -= b
- }
- b +=: after
- }
- }
-
- for (lb <- catches) { beforeAndTry ++= lb }
- beforeAndTry ++= after
- beforeAndTry.toList
- }
-
- /** Returns all direct successors of `blocks` wich are not part
- * that list, i.e. successors outside the `blocks` list.
- */
- private def leavingBlocks(blocks: List[BasicBlock]) = {
- val res = new mutable.HashSet[BasicBlock]()
- for (b <- blocks; s <- b.directSuccessors; if (!blocks.contains(s)))
- res += s
- res
- }
-
- def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
- sys.error("not implemented")
- }
- }
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 00bcf603cf..267fa15312 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -3,14 +3,13 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend
package icode
-import java.io.PrintWriter
import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
-import symtab.Flags.{ DEFERRED }
trait ReferenceEquality {
override def hashCode = System.identityHashCode(this)
@@ -48,27 +47,33 @@ trait Members {
def touched = _touched
def touched_=(b: Boolean): Unit = {
- if (b)
- blocks foreach (_.touched = true)
+ @annotation.tailrec def loop(xs: List[BasicBlock]) {
+ xs match {
+ case Nil =>
+ case x :: xs => x.touched = true ; loop(xs)
+ }
+ }
+ if (b) loop(blocks.toList)
_touched = b
}
// Constructor code
- startBlock = newBlock
+ startBlock = newBlock()
def removeBlock(b: BasicBlock) {
- if (settings.debug.value) {
- assert(blocks forall (p => !(p.successors contains b)),
- "Removing block that is still referenced in method code " + b + "preds: " + b.predecessors
- )
- assert(b != startBlock || b.successors.length == 1,
- "Removing start block with more than one successor."
- )
+ if (settings.debug) {
+ // only do this sanity check when debug is turned on because it's moderately expensive
+ val referers = blocks filter (_.successors contains b)
+ assert(referers.isEmpty, s"Trying to removing block $b (with preds ${b.predecessors.mkString}) but it is still refered to from block(s) ${referers.mkString}")
}
- if (b == startBlock)
+ if (b == startBlock) {
+ assert(b.successors.length == 1,
+ s"Removing start block ${b} with ${b.successors.length} successors (${b.successors.mkString})."
+ )
startBlock = b.successors.head
+ }
blocks -= b
assert(!blocks.contains(b))
@@ -77,7 +82,7 @@ trait Members {
}
/** This methods returns a string representation of the ICode */
- override def toString = "ICode '" + name + "'";
+ override def toString = "ICode '" + name + "'"
/* Compute a unique new label */
def nextLabel: Int = {
@@ -89,8 +94,8 @@ trait Members {
*/
def newBlock(): BasicBlock = {
touched = true
- val block = new BasicBlock(nextLabel, method);
- blocks += block;
+ val block = new BasicBlock(nextLabel, method)
+ blocks += block
block
}
}
@@ -103,6 +108,14 @@ trait Members {
if (symbol eq other.symbol) 0
else if (symbol isLess other.symbol) -1
else 1
+
+ override def equals(other: Any): Boolean =
+ other match {
+ case other: IMember => (this compare other) == 0
+ case _ => false
+ }
+
+ override def hashCode = symbol.##
}
/** Represent a class in ICode */
@@ -112,25 +125,23 @@ trait Members {
var cunit: CompilationUnit = _
def addField(f: IField): this.type = {
- fields = f :: fields;
+ fields = f :: fields
this
}
def addMethod(m: IMethod): this.type = {
- methods = m :: methods;
+ methods = m :: methods
this
}
def setCompilationUnit(unit: CompilationUnit): this.type = {
- this.cunit = unit;
+ this.cunit = unit
this
}
override def toString() = symbol.fullName
- def lookupField(s: Symbol) = fields find (_.symbol == s)
def lookupMethod(s: Symbol) = methods find (_.symbol == s)
- def lookupMethod(s: Name) = methods find (_.symbol.name == s)
/* returns this methods static ctor if it has one. */
def lookupStaticCtor: Option[IMethod] = methods find (_.symbol.isStaticConstructor)
@@ -154,14 +165,13 @@ trait Members {
class IMethod(val symbol: Symbol) extends IMember {
var code: Code = NoCode
- def newBlock() = code.newBlock
+ def newBlock() = code.newBlock()
def startBlock = code.startBlock
def lastBlock = { assert(blocks.nonEmpty, symbol); blocks.last }
def blocks = code.blocksList
def linearizedBlocks(lin: Linearizer = self.linearizer): List[BasicBlock] = lin linearize this
def foreachBlock[U](f: BasicBlock => U): Unit = blocks foreach f
- def foreachInstr[U](f: Instruction => U): Unit = foreachBlock(_.toList foreach f)
var native = false
@@ -181,7 +191,7 @@ trait Members {
def hasCode = code ne NoCode
def setCode(code: Code): IMethod = {
- this.code = code;
+ this.code = code
this
}
@@ -199,7 +209,6 @@ trait Members {
}
def addLocals(ls: List[Local]) = ls foreach addLocal
- def addParams(as: List[Local]) = as foreach addParam
def lookupLocal(n: Name): Option[Local] = locals find (_.sym.name == n)
def lookupLocal(sym: Symbol): Option[Local] = locals find (_.sym == sym)
@@ -214,28 +223,7 @@ trait Members {
override def toString() = symbol.fullName
- def matchesSignature(other: IMethod) = {
- (symbol.name == other.symbol.name) &&
- (params corresponds other.params)(_.kind == _.kind) &&
- (returnType == other.returnType)
- }
-
import opcodes._
- def checkLocals(): Unit = {
- def localsSet = (code.blocks flatMap { bb =>
- bb.iterator collect {
- case LOAD_LOCAL(l) => l
- case STORE_LOCAL(l) => l
- }
- }).toSet
-
- if (hasCode) {
- log("[checking locals of " + this + "]")
- locals filterNot localsSet foreach { l =>
- log("Local " + l + " is not declared in " + this)
- }
- }
- }
/** Merge together blocks that have a single successor which has a
* single predecessor. Exception handlers are taken into account (they
@@ -247,10 +235,10 @@ trait Members {
val nextBlock: mutable.Map[BasicBlock, BasicBlock] = mutable.HashMap.empty
for (b <- code.blocks.toList
if b.successors.length == 1;
- succ = b.successors.head;
- if succ ne b;
- if succ.predecessors.length == 1;
- if succ.predecessors.head eq b;
+ succ = b.successors.head
+ if succ ne b
+ if succ.predecessors.length == 1
+ if succ.predecessors.head eq b
if !(exh.exists { (e: ExceptionHandler) =>
(e.covers(succ) && !e.covers(b)) || (e.covers(b) && !e.covers(succ)) })) {
nextBlock(b) = succ
@@ -259,10 +247,10 @@ trait Members {
var bb = code.startBlock
while (!nextBlock.isEmpty) {
if (nextBlock.isDefinedAt(bb)) {
- bb.open
+ bb.open()
var succ = bb
do {
- succ = nextBlock(succ);
+ succ = nextBlock(succ)
val lastInstr = bb.lastInstruction
/* Ticket SI-5672
* Besides removing the control-flow instruction at the end of `bb` (usually a JUMP), we have to pop any values it pushes.
@@ -273,7 +261,7 @@ trait Members {
val oldTKs = lastInstr.consumedTypes
assert(lastInstr.consumed == oldTKs.size, "Someone forgot to override consumedTypes() in " + lastInstr)
- bb.removeLastInstruction
+ bb.removeLastInstruction()
for(tk <- oldTKs.reverse) { bb.emit(DROP(tk), lastInstr.pos) }
succ.toList foreach { i => bb.emit(i, i.pos) }
code.removeBlock(succ)
@@ -281,9 +269,9 @@ trait Members {
nextBlock -= bb
} while (nextBlock.isDefinedAt(succ))
- bb.close
+ bb.close()
} else
- bb = nextBlock.keysIterator.next
+ bb = nextBlock.keysIterator.next()
}
checkValid(this)
}
@@ -298,15 +286,6 @@ trait Members {
class Local(val sym: Symbol, val kind: TypeKind, val arg: Boolean) {
var index: Int = -1
- /** Starting PC for this local's visibility range. */
- var start: Int = _
-
- /** Ending PC for this local's visibility range. */
- var end: Int = _
-
- /** PC-based ranges for this local variable's visibility */
- var ranges: List[(Int, Int)] = Nil
-
override def equals(other: Any): Boolean = other match {
case x: Local => sym == x.sym
case _ => false
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index a3a0edb35d..076f84ce7a 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -3,13 +3,11 @@
* @author Martin Odersky
*/
-
-
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend
package icode
-import scala.tools.nsc.ast._
import scala.reflect.internal.util.{Position,NoPosition}
/*
@@ -67,7 +65,7 @@ import scala.reflect.internal.util.{Position,NoPosition}
* in the source files.
*/
trait Opcodes { self: ICodes =>
- import global.{Symbol, NoSymbol, Type, Name, Constant};
+ import global.{Symbol, NoSymbol, Name, Constant}
// categories of ICode instructions
final val localsCat = 1
@@ -111,17 +109,11 @@ trait Opcodes { self: ICodes =>
// Vlad: I wonder why we keep producedTypes around -- it looks like an useless thing to have
def producedTypes: List[TypeKind] = Nil
- /** This method returns the difference of size of the stack when the instruction is used */
- def difference = produced-consumed
-
/** The corresponding position in the source file */
private var _pos: Position = NoPosition
def pos: Position = _pos
- /** Used by dead code elimination. */
- var useful: Boolean = false
-
def setPos(p: Position): this.type = {
_pos = p
this
@@ -133,13 +125,6 @@ trait Opcodes { self: ICodes =>
}
object opcodes {
-
- def mayThrow(i: Instruction): Boolean = i match {
- case LOAD_LOCAL(_) | STORE_LOCAL(_) | CONSTANT(_) | THIS(_) | CZJUMP(_, _, _, _)
- | DROP(_) | DUP(_) | RETURN(_) | LOAD_EXCEPTION(_) | JUMP(_) | CJUMP(_, _, _, _) => false
- case _ => true
- }
-
/** Loads "this" on top of the stack.
* Stack: ...
* ->: ...:ref
@@ -211,7 +196,7 @@ trait Opcodes { self: ICodes =>
case class LOAD_FIELD(field: Symbol, isStatic: Boolean) extends Instruction {
/** Returns a string representation of this instruction */
override def toString(): String =
- "LOAD_FIELD " + (if (isStatic) field.fullName else field.toString());
+ "LOAD_FIELD " + (if (isStatic) field.fullName else field.toString())
override def consumed = if (isStatic) 0 else 1
override def produced = 1
@@ -273,16 +258,17 @@ trait Opcodes { self: ICodes =>
case class STORE_FIELD(field: Symbol, isStatic: Boolean) extends Instruction {
/** Returns a string representation of this instruction */
override def toString(): String =
- "STORE_FIELD "+field + (if (isStatic) " (static)" else " (dynamic)");
+ "STORE_FIELD "+field + (if (isStatic) " (static)" else " (dynamic)")
- override def consumed = if(isStatic) 1 else 2;
- override def produced = 0;
+ override def consumed = if(isStatic) 1 else 2
+
+ override def produced = 0
override def consumedTypes =
if (isStatic)
toTypeKind(field.tpe) :: Nil
else
- REFERENCE(field.owner) :: toTypeKind(field.tpe) :: Nil;
+ REFERENCE(field.owner) :: toTypeKind(field.tpe) :: Nil
override def category = fldsCat
}
@@ -409,19 +395,19 @@ trait Opcodes { self: ICodes =>
override def category = mthdsCat
}
-
+
/**
* A place holder entry that allows us to parse class files with invoke dynamic
* instructions. Because the compiler doesn't yet really understand the
* behavior of invokeDynamic, this op acts as a poison pill. Any attempt to analyze
* this instruction will cause a failure. The only optimization that
* should ever look at non-Scala generated icode is the inliner, and it
- * has been modified to not examine any method with invokeDynamic
+ * has been modified to not examine any method with invokeDynamic
* instructions. So if this poison pill ever causes problems then
* there's been a serious misunderstanding
*/
// TODO do the real thing
- case class INVOKE_DYNAMIC(poolEntry: Char) extends Instruction {
+ case class INVOKE_DYNAMIC(poolEntry: Int) extends Instruction {
private def error = sys.error("INVOKE_DYNAMIC is not fully implemented and should not be analyzed")
override def consumed = error
override def produced = error
@@ -455,10 +441,12 @@ trait Opcodes { self: ICodes =>
*/
case class NEW(kind: REFERENCE) extends Instruction {
/** Returns a string representation of this instruction */
- override def toString(): String = "NEW "+ kind;
+ override def toString(): String = "NEW "+ kind
+
+ override def consumed = 0
+
+ override def produced = 1
- override def consumed = 0;
- override def produced = 1;
override def producedTypes = kind :: Nil
/** The corresponding constructor call. */
@@ -474,11 +462,13 @@ trait Opcodes { self: ICodes =>
*/
case class CREATE_ARRAY(elem: TypeKind, dims: Int) extends Instruction {
/** Returns a string representation of this instruction */
- override def toString(): String ="CREATE_ARRAY "+elem + " x " + dims;
+ override def toString(): String ="CREATE_ARRAY "+elem + " x " + dims
+
+ override def consumed = dims
- override def consumed = dims;
override def consumedTypes = List.fill(dims)(INT)
- override def produced = 1;
+ override def produced = 1
+
override def producedTypes = ARRAY(elem) :: Nil
override def category = arraysCat
@@ -567,7 +557,7 @@ trait Opcodes { self: ICodes =>
override def toString(): String = (
"CJUMP (" + kind + ")" +
cond + " ? "+successBlock.label+" : "+failureBlock.label
- );
+ )
override def consumed = 2
override def produced = 0
@@ -590,7 +580,7 @@ trait Opcodes { self: ICodes =>
override def toString(): String = (
"CZJUMP (" + kind + ")" +
cond + " ? "+successBlock.label+" : "+failureBlock.label
- );
+ )
override def consumed = 1
override def produced = 0
@@ -682,10 +672,11 @@ trait Opcodes { self: ICodes =>
*/
case class MONITOR_EXIT() extends Instruction {
/** Returns a string representation of this instruction */
- override def toString(): String ="MONITOR_EXIT";
+ override def toString(): String ="MONITOR_EXIT"
- override def consumed = 1;
- override def produced = 0;
+ override def consumed = 1
+
+ override def produced = 0
override def consumedTypes = ObjectReference :: Nil
@@ -772,74 +763,5 @@ trait Opcodes { self: ICodes =>
override def isSuper = true
override def toString(): String = { "super(" + mix + ")" }
}
-
-
- // CLR backend
-
- case class CIL_LOAD_LOCAL_ADDRESS(local: Local) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String = "CIL_LOAD_LOCAL_ADDRESS "+local //+isArgument?" (argument)":"";
-
- override def consumed = 0
- override def produced = 1
-
- override def producedTypes = msil_mgdptr(local.kind) :: Nil
-
- override def category = localsCat
- }
-
- case class CIL_LOAD_FIELD_ADDRESS(field: Symbol, isStatic: Boolean) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String =
- "CIL_LOAD_FIELD_ADDRESS " + (if (isStatic) field.fullName else field.toString)
-
- override def consumed = if (isStatic) 0 else 1
- override def produced = 1
-
- override def consumedTypes = if (isStatic) Nil else REFERENCE(field.owner) :: Nil;
- override def producedTypes = msil_mgdptr(REFERENCE(field.owner)) :: Nil;
-
- override def category = fldsCat
- }
-
- case class CIL_LOAD_ARRAY_ITEM_ADDRESS(kind: TypeKind) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String = "CIL_LOAD_ARRAY_ITEM_ADDRESS (" + kind + ")"
-
- override def consumed = 2
- override def produced = 1
-
- override def consumedTypes = ARRAY(kind) :: INT :: Nil
- override def producedTypes = msil_mgdptr(kind) :: Nil
-
- override def category = arraysCat
- }
-
- case class CIL_UNBOX(valueType: TypeKind) extends Instruction {
- override def toString(): String = "CIL_UNBOX " + valueType
- override def consumed = 1
- override def consumedTypes = ObjectReferenceList // actually consumes a 'boxed valueType'
- override def produced = 1
- override def producedTypes = msil_mgdptr(valueType) :: Nil
- override def category = objsCat
- }
-
- case class CIL_INITOBJ(valueType: TypeKind) extends Instruction {
- override def toString(): String = "CIL_INITOBJ " + valueType
- override def consumed = 1
- override def consumedTypes = ObjectReferenceList // actually consumes a managed pointer
- override def produced = 0
- override def category = objsCat
- }
-
- case class CIL_NEWOBJ(method: Symbol) extends Instruction {
- override def toString(): String = "CIL_NEWOBJ " + hostClass.fullName + method.fullName
- var hostClass: Symbol = method.owner;
- override def consumed = method.tpe.paramTypes.length
- override def consumedTypes = method.tpe.paramTypes map toTypeKind
- override def produced = 1
- override def producedTypes = toTypeKind(method.tpe.resultType) :: Nil
- override def category = objsCat
- }
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
index c8579041ba..f81c42d836 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
@@ -6,9 +6,9 @@
package scala.tools.nsc
package backend
-package icode;
+package icode
-import java.io.PrintWriter;
+import java.io.PrintWriter
trait Primitives { self: ICodes =>
@@ -51,12 +51,12 @@ trait Primitives { self: ICodes =>
// type : (src) => dst
// range: src,dst <- { Ix, Ux, Rx }
// jvm : i2{l, f, d}, l2{i, f, d}, f2{i, l, d}, d2{i, l, f}, i2{b, c, s}
- case class Conversion(src: TypeKind, dst: TypeKind) extends Primitive;
+ case class Conversion(src: TypeKind, dst: TypeKind) extends Primitive
// type : (Array[REF]) => I4
// range: type <- { BOOL, Ix, Ux, Rx, REF }
// jvm : arraylength
- case class ArrayLength(kind: TypeKind) extends Primitive;
+ case class ArrayLength(kind: TypeKind) extends Primitive
// type : (buf,el) => buf
// range: lf,rg <- { BOOL, Ix, Ux, Rx, REF, STR }
@@ -76,25 +76,10 @@ trait Primitives { self: ICodes =>
/** Pretty printer for primitives */
class PrimitivePrinter(out: PrintWriter) {
-
def print(s: String): PrimitivePrinter = {
out.print(s)
this
}
-
- def print(o: AnyRef): PrimitivePrinter = print(o.toString())
-
- def printPrimitive(prim: Primitive) = prim match {
- case Negation(kind) =>
- print("!")
-
- case Test(op, kind, zero) =>
- print(op).print(kind)
-
- case Comparison(op, kind) =>
- print(op).print("(").print(kind)
-
- }
}
/** This class represents a comparison operation. */
@@ -243,9 +228,9 @@ trait Primitives { self: ICodes =>
/** Returns a string representation of this operation. */
override def toString(): String = this match {
- case AND => return "AND"
- case OR => return "OR"
- case XOR => return "XOR"
+ case AND => "AND"
+ case OR => "OR"
+ case XOR => "XOR"
case _ => throw new RuntimeException("LogicalOp unknown case")
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
index 6cac641e3e..1fe33f78e7 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
@@ -8,13 +8,9 @@ package backend
package icode
import java.io.PrintWriter
-import scala.tools.nsc.symtab.Flags
-import scala.reflect.internal.util.Position
trait Printers { self: ICodes =>
import global._
- import global.icodes.opcodes._
- import global.icodes._
class TextPrinter(writer: PrintWriter, lin: Linearizer) {
private var margin = 0
@@ -31,15 +27,15 @@ trait Printers { self: ICodes =>
def print(o: Any) { print(o.toString()) }
def println(s: String) {
- print(s);
- println
+ print(s)
+ println()
}
def println() {
out.println()
var i = 0
while (i < margin) {
- print(" ");
+ print(" ")
i += 1
}
}
@@ -57,26 +53,26 @@ trait Printers { self: ICodes =>
}
def printClass(cls: IClass) {
- print(cls.symbol.toString()); print(" extends ");
- printList(cls.symbol.info.parents, ", ");
- indent; println(" {");
- println("// fields:");
- cls.fields.foreach(printField); println;
- println("// methods");
- cls.methods.foreach(printMethod);
- undent; println;
+ print(cls.symbol.toString()); print(" extends ")
+ printList(cls.symbol.info.parents, ", ")
+ indent(); println(" {")
+ println("// fields:")
+ cls.fields.foreach(printField); println()
+ println("// methods")
+ cls.methods.foreach(printMethod)
+ undent(); println()
println("}")
}
def printField(f: IField) {
- print(f.symbol.keyString); print(" ");
- print(f.symbol.nameString); print(": ");
- println(f.symbol.info.toString());
+ print(f.symbol.keyString); print(" ")
+ print(f.symbol.nameString); print(": ")
+ println(f.symbol.info.toString())
}
def printMethod(m: IMethod) {
- print("def "); print(m.symbol.name);
- print("("); printList(printParam)(m.params, ", "); print(")");
+ print("def "); print(m.symbol.name)
+ print("("); printList(printParam)(m.params, ", "); print(")")
print(": "); print(m.symbol.info.resultType)
if (!m.isAbstractMethod) {
@@ -84,40 +80,40 @@ trait Printers { self: ICodes =>
println("locals: " + m.locals.mkString("", ", ", ""))
println("startBlock: " + m.startBlock)
println("blocks: " + m.code.blocks.mkString("[", ",", "]"))
- println
+ println()
lin.linearize(m) foreach printBlock
println("}")
- indent; println("Exception handlers: ")
+ indent(); println("Exception handlers: ")
m.exh foreach printExceptionHandler
- undent; println
+ undent(); println()
} else
- println
+ println()
}
def printParam(p: Local) {
- print(p.sym.name); print(": "); print(p.sym.info);
+ print(p.sym.name); print(": "); print(p.sym.info)
print(" ("); print(p.kind); print(")")
}
def printExceptionHandler(e: ExceptionHandler) {
- indent;
- println("catch (" + e.cls.simpleName + ") in " + e.covered.toSeq.sortBy(_.label) + " starting at: " + e.startBlock);
- println("consisting of blocks: " + e.blocks);
- undent;
- println("with finalizer: " + e.finalizer);
-// linearizer.linearize(e.startBlock) foreach printBlock;
+ indent()
+ println("catch (" + e.cls.simpleName + ") in " + e.covered.toSeq.sortBy(_.label) + " starting at: " + e.startBlock)
+ println("consisting of blocks: " + e.blocks)
+ undent()
+ println("with finalizer: " + e.finalizer)
+ // linearizer.linearize(e.startBlock) foreach printBlock;
}
def printBlock(bb: BasicBlock) {
print(bb.label)
if (bb.loopHeader) print("[loop header]")
- print(": ");
- if (settings.debug.value) print("pred: " + bb.predecessors + " succs: " + bb.successors + " flags: " + bb.flagsString)
- indent; println
+ print(": ")
+ if (settings.debug) print("pred: " + bb.predecessors + " succs: " + bb.successors + " flags: " + bb.flagsString)
+ indent(); println()
bb.toList foreach printInstruction
- undent; println
+ undent(); println()
}
def printInstruction(i: Instruction) {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
index e73015c4da..10d57df4a3 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
@@ -26,17 +26,6 @@ trait Repository {
/** The icode of the given class, if available */
def icode(sym: Symbol): Option[IClass] = (classes get sym) orElse (loaded get sym)
- /** The icode of the given class. If not available, it loads
- * its bytecode.
- */
- def icode(sym: Symbol, force: Boolean): IClass =
- icode(sym) getOrElse {
- log("loading " + sym)
- load(sym)
- assert(available(sym))
- loaded(sym)
- }
-
/** Load bytecode for given symbol. */
def load(sym: Symbol): Boolean = {
try {
@@ -50,7 +39,7 @@ trait Repository {
} catch {
case e: Throwable => // possible exceptions are MissingRequirementError, IOException and TypeError -> no better common supertype
log("Failed to load %s. [%s]".format(sym.fullName, e.getMessage))
- if (settings.debug.value) { e.printStackTrace }
+ if (settings.debug) { e.printStackTrace }
false
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
index 4f8fda8024..633e71a756 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
@@ -55,7 +55,7 @@ trait TypeKinds { self: ICodes =>
def toType: Type = reversePrimitiveMap get this map (_.tpe) getOrElse {
this match {
- case REFERENCE(cls) => cls.tpe
+ case REFERENCE(cls) => cls.tpe_*
case ARRAY(elem) => arrayType(elem.toType)
case _ => abort("Unknown type kind.")
}
@@ -66,7 +66,6 @@ trait TypeKinds { self: ICodes =>
def isValueType = false
def isBoxedType = false
final def isRefOrArrayType = isReferenceType || isArrayType
- final def isRefArrayOrBoxType = isRefOrArrayType || isBoxedType
final def isNothingType = this == NothingReference
final def isNullType = this == NullReference
final def isInterfaceType = this match {
@@ -89,10 +88,19 @@ trait TypeKinds { self: ICodes =>
final def isNumericType: Boolean = isIntegralType | isRealType
/** Simple subtyping check */
- def <:<(other: TypeKind): Boolean = (this eq other) || (this match {
- case BOOL | BYTE | SHORT | CHAR => other == INT || other == LONG
- case _ => this eq other
- })
+ def <:<(other: TypeKind): Boolean
+
+ /**
+ * this is directly assignable to other if no coercion or
+ * casting is needed to convert this to other. It's a distinct
+ * relationship from <:< because on the JVM, BOOL, BYTE, CHAR,
+ * SHORT need no coercion to INT even though JVM arrays
+ * are covariant, ARRAY[SHORT] is not a subtype of ARRAY[INT]
+ */
+ final def isAssignabledTo(other: TypeKind): Boolean = other match {
+ case INT => this.isIntSizedType
+ case _ => this <:< other
+ }
/** Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?) */
def isWideType: Boolean = false
@@ -112,10 +120,9 @@ trait TypeKinds { self: ICodes =>
override def toString = {
this.getClass.getName stripSuffix "$" dropWhile (_ != '$') drop 1
}
+ def <:<(other: TypeKind): Boolean = this eq other
}
- var lubs0 = 0
-
/**
* The least upper bound of two typekinds. They have to be either
* REFERENCE or ARRAY kinds.
@@ -123,24 +130,23 @@ trait TypeKinds { self: ICodes =>
* The lub is based on the lub of scala types.
*/
def lub(a: TypeKind, b: TypeKind): TypeKind = {
- /** The compiler's lub calculation does not order classes before traits.
- * This is apparently not wrong but it is inconvenient, and causes the
- * icode checker to choke when things don't match up. My attempts to
- * alter the calculation at the compiler level were failures, so in the
- * interests of a working icode checker I'm making the adjustment here.
+ /* The compiler's lub calculation does not order classes before traits.
+ * This is apparently not wrong but it is inconvenient, and causes the
+ * icode checker to choke when things don't match up. My attempts to
+ * alter the calculation at the compiler level were failures, so in the
+ * interests of a working icode checker I'm making the adjustment here.
*
- * Example where we'd like a different answer:
+ * Example where we'd like a different answer:
*
- * abstract class Tom
- * case object Bob extends Tom
- * case object Harry extends Tom
- * List(Bob, Harry) // compiler calculates "Product with Tom" rather than "Tom with Product"
+ * abstract class Tom
+ * case object Bob extends Tom
+ * case object Harry extends Tom
+ * List(Bob, Harry) // compiler calculates "Product with Tom" rather than "Tom with Product"
*
- * Here we make the adjustment by rewinding to a pre-erasure state and
- * sifting through the parents for a class type.
+ * Here we make the adjustment by rewinding to a pre-erasure state and
+ * sifting through the parents for a class type.
*/
- def lub0(tk1: TypeKind, tk2: TypeKind): Type = beforeUncurry {
- import definitions._
+ def lub0(tk1: TypeKind, tk2: TypeKind): Type = enteringUncurry {
val tp = global.lub(List(tk1.toType, tk2.toType))
val (front, rest) = tp.parents span (_.typeSymbol.isTrait)
@@ -284,7 +290,7 @@ trait TypeKinds { self: ICodes =>
}
/** Checks subtyping relationship. */
- override def <:<(other: TypeKind) = isNothingType || (other match {
+ def <:<(other: TypeKind) = isNothingType || (other match {
case REFERENCE(cls2) => cls.tpe <:< cls2.tpe
case ARRAY(_) => cls == NullClass
case _ => false
@@ -298,7 +304,7 @@ trait TypeKinds { self: ICodes =>
else ARRAY(ArrayN(elem, dims - 1))
}
- final case class ARRAY(val elem: TypeKind) extends TypeKind {
+ final case class ARRAY(elem: TypeKind) extends TypeKind {
override def toString = "ARRAY[" + elem + "]"
override def isArrayType = true
override def dimensions = 1 + elem.dimensions
@@ -322,7 +328,7 @@ trait TypeKinds { self: ICodes =>
/** Array subtyping is covariant, as in Java. Necessary for checking
* code that interacts with Java. */
- override def <:<(other: TypeKind) = other match {
+ def <:<(other: TypeKind) = other match {
case ARRAY(elem2) => elem <:< elem2
case REFERENCE(AnyRefClass | ObjectClass) => true // TODO: platform dependent!
case _ => false
@@ -340,7 +346,7 @@ trait TypeKinds { self: ICodes =>
}
/** Checks subtyping relationship. */
- override def <:<(other: TypeKind) = other match {
+ def <:<(other: TypeKind) = other match {
case BOXED(`kind`) => true
case REFERENCE(AnyRefClass | ObjectClass) => true // TODO: platform dependent!
case _ => false
@@ -353,6 +359,7 @@ trait TypeKinds { self: ICodes =>
*/
case object ConcatClass extends TypeKind {
override def toString = "ConcatClass"
+ def <:<(other: TypeKind): Boolean = this eq other
/**
* Approximate `lub`. The common type of two references is
@@ -363,19 +370,16 @@ trait TypeKinds { self: ICodes =>
case REFERENCE(_) => AnyRefReference
case _ => uncomparable(other)
}
-
- /** Checks subtyping relationship. */
- override def <:<(other: TypeKind) = this eq other
}
////////////////// Conversions //////////////////////////////
/** Return the TypeKind of the given type
*
- * Call to .normalize fixes #3003 (follow type aliases). Otherwise,
+ * Call to dealiasWiden fixes #3003 (follow type aliases). Otherwise,
* arrayOrClassType below would return ObjectReference.
*/
- def toTypeKind(t: Type): TypeKind = t.normalize match {
+ def toTypeKind(t: Type): TypeKind = t.dealiasWiden match {
case ThisType(ArrayClass) => ObjectReference
case ThisType(sym) => REFERENCE(sym)
case SingleType(_, sym) => primitiveOrRefType(sym)
@@ -431,11 +435,4 @@ trait TypeKinds { self: ICodes =>
primitiveTypeMap.getOrElse(sym, newReference(sym))
private def primitiveOrClassType(sym: Symbol, targs: List[Type]) =
primitiveTypeMap.getOrElse(sym, arrayOrClassType(sym, targs))
-
- def msil_mgdptr(tk: TypeKind): TypeKind = (tk: @unchecked) match {
- case REFERENCE(cls) => REFERENCE(loaders.clrTypes.mdgptrcls4clssym(cls))
- // TODO have ready class-symbols for the by-ref versions of built-in valuetypes
- case _ => abort("cannot obtain a managed pointer for " + tk)
- }
-
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
index 23d3d05c64..57d51dad49 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
@@ -15,15 +15,11 @@ package icode
trait TypeStacks {
self: ICodes =>
- import opcodes._
-
/* This class simulates the type of the operand
* stack of the ICode.
*/
type Rep = List[TypeKind]
- object NoTypeStack extends TypeStack(Nil) { }
-
class TypeStack(var types: Rep) {
if (types.nonEmpty)
checkerDebug("Created " + this)
@@ -71,14 +67,6 @@ trait TypeStacks {
def apply(n: Int): TypeKind = types(n)
- /**
- * A TypeStack agrees with another one if they have the same
- * length and each type kind agrees position-wise. Two
- * types agree if one is a subtype of the other.
- */
- def agreesWith(other: TypeStack): Boolean =
- (types corresponds other.types)((t1, t2) => t1 <:< t2 || t2 <:< t1)
-
/* This method returns a String representation of the stack */
override def toString() =
if (types.isEmpty) "[]"
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index 53111d0ade..9d48d7a0d3 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend.icode.analysis
import scala.collection.{ mutable, immutable }
@@ -26,12 +27,8 @@ abstract class CopyPropagation {
case object This extends Location
/** Values that can be on the stack. */
- abstract class Value {
- def isRecord = false
- }
- case class Record(cls: Symbol, bindings: mutable.Map[Symbol, Value]) extends Value {
- override def isRecord = true
- }
+ sealed abstract class Value { }
+ case class Record(cls: Symbol, bindings: mutable.Map[Symbol, Value]) extends Value { }
/** The value of some location in memory. */
case class Deref(l: Location) extends Value
@@ -91,16 +88,6 @@ abstract class CopyPropagation {
loop(l) getOrElse Deref(LocalVar(l))
}
- /* Return the binding for the given field of the given record */
- def getBinding(r: Record, f: Symbol): Value = {
- assert(r.bindings contains f, "Record " + r + " does not contain a field " + f)
-
- r.bindings(f) match {
- case Deref(LocalVar(l)) => getBinding(l)
- case target => target
- }
- }
-
/** Return a local which contains the same value as this field, if any.
* If the field holds a reference to a local, the returned value is the
* binding of that local.
@@ -137,7 +124,7 @@ abstract class CopyPropagation {
}
override def toString(): String =
- "\nBindings: " + bindings + "\nStack: " + stack;
+ "\nBindings: " + bindings + "\nStack: " + stack
def dup: State = {
val b: Bindings = mutable.HashMap()
@@ -178,7 +165,7 @@ abstract class CopyPropagation {
val resBindings = mutable.HashMap[Location, Value]()
for ((k, v) <- a.bindings if b.bindings.isDefinedAt(k) && v == b.bindings(k))
- resBindings += (k -> v);
+ resBindings += (k -> v)
new State(resBindings, resStack)
}
}
@@ -203,20 +190,20 @@ abstract class CopyPropagation {
debuglog("CopyAnalysis added point: " + b)
}
m.exh foreach { e =>
- in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack);
+ in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack)
}
// first block is special: it's not bottom, but a precisely defined state with no bindings
- in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil);
+ in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil)
}
}
override def run() {
forwardAnalysis(blockTransfer)
- if (settings.debug.value) {
+ if (settings.debug) {
linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(in(b) != lattice.bottom,
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
+ "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"))
}
}
@@ -241,7 +228,7 @@ abstract class CopyPropagation {
case CONSTANT(k) =>
if (k.tag != UnitTag)
- out.stack = Const(k) :: out.stack;
+ out.stack = Const(k) :: out.stack
case LOAD_ARRAY_ITEM(_) =>
out.stack = (Unknown :: out.stack.drop(2))
@@ -290,14 +277,14 @@ abstract class CopyPropagation {
v match {
case Deref(LocalVar(other)) =>
if (other != local)
- out.bindings += (LocalVar(local) -> v);
+ out.bindings += (LocalVar(local) -> v)
case _ =>
out.bindings += (LocalVar(local) -> v)
}
case Nil =>
sys.error("Incorrect icode in " + method + ". Expecting something on the stack.")
}
- out.stack = out.stack drop 1;
+ out.stack = out.stack drop 1
case STORE_THIS(_) =>
cleanReferencesTo(out, This)
@@ -305,14 +292,14 @@ abstract class CopyPropagation {
case STORE_FIELD(field, isStatic) =>
if (isStatic)
- out.stack = out.stack.drop(1);
+ out.stack = out.stack.drop(1)
else {
- out.stack = out.stack.drop(2);
- cleanReferencesTo(out, Field(AllRecords, field));
+ out.stack = out.stack.drop(2)
+ cleanReferencesTo(out, Field(AllRecords, field))
in.stack match {
case v :: Record(_, bindings) :: vs =>
bindings += (field -> v)
- case _ => ();
+ case _ => ()
}
}
@@ -322,7 +309,7 @@ abstract class CopyPropagation {
case CALL_METHOD(method, style) => style match {
case Dynamic =>
- out = simulateCall(in, method, false)
+ out = simulateCall(in, method, static = false)
case Static(onInstance) =>
if (onInstance) {
@@ -333,19 +320,19 @@ abstract class CopyPropagation {
case Record(_, bindings) =>
for (v <- out.stack.take(method.info.paramTypes.length + 1)
if v ne obj) {
- bindings ++= getBindingsForPrimaryCtor(in, method);
+ bindings ++= getBindingsForPrimaryCtor(in, method)
}
case _ => ()
}
// put the Record back on the stack and remove the 'returned' value
out.stack = out.stack.drop(1 + method.info.paramTypes.length)
} else
- out = simulateCall(in, method, false)
+ out = simulateCall(in, method, static = false)
} else
- out = simulateCall(in, method, true)
+ out = simulateCall(in, method, static = true)
case SuperCall(_) =>
- out = simulateCall(in, method, false)
+ out = simulateCall(in, method, static = false)
}
case BOX(tpe) =>
@@ -404,7 +391,7 @@ abstract class CopyPropagation {
out.stack = out.stack.head :: out.stack
case MONITOR_ENTER() =>
- out.stack = out.stack.drop(1);
+ out.stack = out.stack.drop(1)
case MONITOR_EXIT() =>
out.stack = out.stack.drop(1)
@@ -452,7 +439,7 @@ abstract class CopyPropagation {
case Deref(loc1) if (loc1 == target) => false
case Boxed(loc1) if (loc1 == target) => false
case rec @ Record(_, _) =>
- cleanRecord(rec);
+ cleanRecord(rec)
true
case _ => true
}) &&
@@ -463,22 +450,17 @@ abstract class CopyPropagation {
}
}
- /** Update the state <code>s</code> after the call to <code>method</code>.
+ /** Update the state `s` after the call to `method`.
* The stack elements are dropped and replaced by the result of the call.
* If the method is impure, all bindings to record fields are cleared.
- *
- * @param state ...
- * @param method ...
- * @param static ...
- * @return ...
*/
final def simulateCall(state: copyLattice.State, method: Symbol, static: Boolean): copyLattice.State = {
- val out = new copyLattice.State(state.bindings, state.stack);
- out.stack = out.stack.drop(method.info.paramTypes.length + (if (static) 0 else 1));
- if (method.info.resultType != definitions.UnitClass.tpe && !method.isConstructor)
- out.stack = Unknown :: out.stack;
+ val out = new copyLattice.State(state.bindings, state.stack)
+ out.stack = out.stack.drop(method.info.paramTypes.length + (if (static) 0 else 1))
+ if (method.info.resultType != definitions.UnitTpe && !method.isConstructor)
+ out.stack = Unknown :: out.stack
if (!isPureMethod(method))
- invalidateRecords(out);
+ invalidateRecords(out)
out
}
@@ -519,8 +501,8 @@ abstract class CopyPropagation {
* they are passed on the stack. It works for primary constructors.
*/
private def getBindingsForPrimaryCtor(in: copyLattice.State, ctor: Symbol): mutable.Map[Symbol, Value] = {
- val paramAccessors = ctor.owner.constrParamAccessors;
- var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1);
+ val paramAccessors = ctor.owner.constrParamAccessors
+ var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1)
val bindings = mutable.HashMap[Symbol, Value]()
debuglog("getBindings for: " + ctor + " acc: " + paramAccessors)
@@ -546,24 +528,22 @@ abstract class CopyPropagation {
// + " having acc: " + (paramAccessors map (_.tpe))+ " vs. params" + paramTypes
// + "\n\t failed at pos " + i + " with " + p.tpe + " == " + paramTypes(i))
if (p.tpe == paramTypes(i))
- bindings += (p -> values.head);
- values = values.tail;
+ bindings += (p -> values.head)
+ values = values.tail
}
debuglog("\t" + bindings)
bindings
}
- /** Is symbol <code>m</code> a pure method?
- *
- * @param m ...
- * @return ...
+ /** Is symbol `m` a pure method?
*/
final def isPureMethod(m: Symbol): Boolean =
m.isGetter // abstract getters are still pure, as we 'know'
final override def toString() = (
- method.blocks map { b =>
+ if (method eq null) List("<null>")
+ else method.blocks map { b =>
"\nIN(%s):\t Bindings: %s".format(b.label, in(b).bindings) +
"\nIN(%s):\t Stack: %s".format(b.label, in(b).stack)
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
index 04c3eedbad..a378998f8f 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
@@ -4,7 +4,8 @@
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend.icode.analysis
import scala.collection.{ mutable, immutable }
@@ -30,16 +31,7 @@ trait DataFlowAnalysis[L <: SemiLattice] {
/* Implement this function to initialize the worklist. */
def init(f: => Unit): Unit = {
iterations = 0
- in.clear; out.clear; worklist.clear; visited.clear;
- f
- }
-
- /** Reinitialize, but keep the old solutions. Should be used when reanalyzing the
- * same method, after some code transformation.
- */
- def reinit(f: => Unit): Unit = {
- iterations = 0
- worklist.clear; visited.clear;
+ in.clear(); out.clear(); worklist.clear(); visited.clear()
f
}
@@ -55,7 +47,7 @@ trait DataFlowAnalysis[L <: SemiLattice] {
while (!worklist.isEmpty) {
if (stat) iterations += 1
//Console.println("worklist in: " + worklist);
- val point = worklist.iterator.next; worklist -= point; visited += point;
+ val point = worklist.iterator.next(); worklist -= point; visited += point
//Console.println("taking out point: " + point + " worklist out: " + worklist);
val output = f(point, in(point))
@@ -82,17 +74,13 @@ trait DataFlowAnalysis[L <: SemiLattice] {
sys.error("Could not find element " + e.getMessage)
}
- /** ...
- *
- * @param f ...
- */
def backwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit =
while (worklist.nonEmpty) {
if (stat) iterations += 1
val point = worklist.head
worklist -= point
- out(point) = lattice.lub(point.successors map in.apply, false) // TODO check for exception handlers
+ out(point) = lattice.lub(point.successors map in.apply, exceptional = false) // TODO check for exception handlers
val input = f(point, out(point))
if ((lattice.bottom == in(point)) || input != in(point)) {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
index abda639dec..939641c3eb 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
@@ -69,15 +69,15 @@ abstract class Liveness {
case STORE_LOCAL(local) if (!genSet(local)) => killSet = killSet + local
case _ => ()
}
- Pair(genSet, killSet)
+ (genSet, killSet)
}
override def run() {
backwardAnalysis(blockTransfer)
- if (settings.debug.value) {
+ if (settings.debug) {
linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(lattice.bottom != in(b),
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
+ "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"))
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
index 2717c432e8..fecd48ed27 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
@@ -51,8 +51,8 @@ abstract class ReachingDefinitions {
// it'd be nice not to call zip with mismatched sequences because
// it makes it harder to spot the real problems.
val result = (a.stack, b.stack).zipped map (_ ++ _)
- if (settings.debug.value && (a.stack.length != b.stack.length))
- debugwarn("Mismatched stacks in ReachingDefinitions#lub2: " + a.stack + ", " + b.stack + ", returning " + result)
+ if (settings.debug && (a.stack.length != b.stack.length))
+ devWarning(s"Mismatched stacks in ReachingDefinitions#lub2: ${a.stack}, ${b.stack}, returning $result")
result
}
)
@@ -141,13 +141,13 @@ abstract class ReachingDefinitions {
override def run() {
forwardAnalysis(blockTransfer)
- if (settings.debug.value) {
+ if (settings.debug) {
linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(lattice.bottom != in(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? " + in(b)
+ ": bot: " + lattice.bottom
+ "\nin(b) == bottom: " + (in(b) == lattice.bottom)
- + "\nbottom == in(b): " + (lattice.bottom == in(b))));
+ + "\nbottom == in(b): " + (lattice.bottom == in(b))))
}
}
@@ -155,7 +155,7 @@ abstract class ReachingDefinitions {
import lattice.IState
def updateReachingDefinition(b: BasicBlock, idx: Int, rd: ListSet[Definition]): ListSet[Definition] = {
val STORE_LOCAL(local) = b(idx)
- var tmp = local
+ val tmp = local
(rd filter { case (l, _, _) => l != tmp }) + ((tmp, b, idx))
}
@@ -197,7 +197,7 @@ abstract class ReachingDefinitions {
def findDefs(bb: BasicBlock, idx: Int, m: Int, depth: Int): List[(BasicBlock, Int)] = if (idx > 0) {
assert(bb.closed, bb)
- var instrs = bb.getArray
+ val instrs = bb.getArray
var res: List[(BasicBlock, Int)] = Nil
var i = idx
var n = m
@@ -240,7 +240,8 @@ abstract class ReachingDefinitions {
findDefs(bb, idx, m, 0)
override def toString: String = {
- method.code.blocks map { b =>
+ if (method eq null) "<null>"
+ else method.code.blocks map { b =>
" entry(%s) = %s\n".format(b, in(b)) +
" exit(%s) = %s\n".format(b, out(b))
} mkString ("ReachingDefinitions {\n", "\n", "\n}")
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index b2ecb431ee..2e44c405cf 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -3,10 +3,12 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend.icode.analysis
import scala.collection.{mutable, immutable}
+import java.util.concurrent.TimeUnit
/** A data-flow analysis on types, that works on `ICode`.
*
@@ -68,7 +70,6 @@ abstract class TypeFlowAnalysis {
* names to types and a type stack.
*/
object typeFlowLattice extends SemiLattice {
- import icodes._
type Elem = IState[VarBinding, icodes.TypeStack]
val top = new Elem(new VarBinding, typeStackLattice.top)
@@ -132,15 +133,15 @@ abstract class TypeFlowAnalysis {
init(m)
}
- def run = {
- timer.start
+ def run() = {
+ timer.start()
// icodes.lubs0 = 0
forwardAnalysis(blockTransfer)
- val t = timer.stop
- if (settings.debug.value) {
+ timer.stop
+ if (settings.debug) {
linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(visited.contains(b),
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited));
+ "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited))
}
// log("" + method.symbol.fullName + " [" + method.code.blocks.size + " blocks] "
// + "\n\t" + iterations + " iterations: " + t + " ms."
@@ -168,7 +169,7 @@ abstract class TypeFlowAnalysis {
val bindings = out.vars
val stack = out.stack
- if (settings.debug.value) {
+ if (settings.debug) {
// Console.println("[before] Stack: " + stack);
// Console.println(i);
}
@@ -208,7 +209,7 @@ abstract class TypeFlowAnalysis {
case Test(_, kind, zero) =>
stack.pop
if (!zero) { stack.pop }
- stack push BOOL;
+ stack push BOOL
case Comparison(_, _) => stack.pop2; stack push INT
@@ -269,36 +270,6 @@ abstract class TypeFlowAnalysis {
out
} // interpret
-
- class SimulatedStack {
- private var types: List[InferredType] = Nil
- private var depth = 0
-
- /** Remove and return the topmost element on the stack. If the
- * stack is empty, return a reference to a negative index on the
- * stack, meaning it refers to elements pushed by a predecessor block.
- */
- def pop: InferredType = types match {
- case head :: rest =>
- types = rest
- head
- case _ =>
- depth -= 1
- TypeOfStackPos(depth)
- }
-
- def pop2: (InferredType, InferredType) = {
- (pop, pop)
- }
-
- def push(t: InferredType) {
- depth += 1
- types = types ::: List(t)
- }
-
- def push(k: TypeKind) { push(Const(k)) }
- }
-
abstract class InferredType {
/** Return the type kind pointed by this inferred type. */
def getKind(in: lattice.Elem): icodes.TypeKind = this match {
@@ -326,7 +297,6 @@ abstract class TypeFlowAnalysis {
class TransferFunction(consumed: Int, gens: List[Gen]) extends (lattice.Elem => lattice.Elem) {
def apply(in: lattice.Elem): lattice.Elem = {
val out = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
- val bindings = out.vars
val stack = out.stack
out.stack.pop(consumed)
@@ -387,9 +357,9 @@ abstract class TypeFlowAnalysis {
override def run {
- timer.start
+ timer.start()
forwardAnalysis(blockTransfer)
- val t = timer.stop
+ timer.stop
/* Now that `forwardAnalysis(blockTransfer)` has finished, all inlining candidates can be found in `remainingCALLs`,
whose keys are callsites and whose values are pieces of information about the typestack just before the callsite in question.
@@ -399,7 +369,7 @@ abstract class TypeFlowAnalysis {
preCandidates += rc._2.bb
}
- if (settings.debug.value) {
+ if (settings.debug) {
for(b <- callerLin; if (b != method.startBlock) && preCandidates(b)) {
assert(visited.contains(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited)
@@ -428,7 +398,7 @@ abstract class TypeFlowAnalysis {
override def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
var result = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
- val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null;
+ val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null
var isPastLast = false
var instrs = b.toList
@@ -449,7 +419,7 @@ abstract class TypeFlowAnalysis {
!blackballed(concreteMethod)
}
if(isCandidate) {
- remainingCALLs += Pair(cm, CallsiteInfo(b, receiver, result.stack.length, concreteMethod))
+ remainingCALLs(cm) = CallsiteInfo(b, receiver, result.stack.length, concreteMethod)
} else {
remainingCALLs.remove(cm)
isOnWatchlist.remove(cm)
@@ -546,9 +516,6 @@ abstract class TypeFlowAnalysis {
relevantBBs ++= blocks
}
- /* the argument is also included in the result */
- private def transitivePreds(b: BasicBlock): Set[BasicBlock] = { transitivePreds(List(b)) }
-
/* those BBs in the argument are also included in the result */
private def transitivePreds(starters: Traversable[BasicBlock]): Set[BasicBlock] = {
val result = mutable.Set.empty[BasicBlock]
@@ -562,19 +529,6 @@ abstract class TypeFlowAnalysis {
result.toSet
}
- /* those BBs in the argument are also included in the result */
- private def transitiveSuccs(starters: Traversable[BasicBlock]): Set[BasicBlock] = {
- val result = mutable.Set.empty[BasicBlock]
- var toVisit: List[BasicBlock] = starters.toList.distinct
- while(toVisit.nonEmpty) {
- val h = toVisit.head
- toVisit = toVisit.tail
- result += h
- for(p <- h.successors; if !result(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
- }
- result.toSet
- }
-
/* A basic block B is "on the perimeter" of the current control-flow subgraph if none of its successors belongs to that subgraph.
* In that case, for the purposes of inlining, we're interested in the typestack right before the last inline candidate in B, not in those afterwards.
* In particular we can do without computing the outflow at B. */
@@ -646,10 +600,10 @@ abstract class TypeFlowAnalysis {
return
} else if(staleOut.isEmpty && inlined.isEmpty && staleIn.isEmpty) {
// this promotes invoking reinit if in doubt, no performance degradation will ensue!
- return;
+ return
}
- worklist.clear // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit.
+ worklist.clear() // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit.
// asserts conveying an idea what CFG shapes arrive here:
// staleIn foreach (p => assert( !in.isDefinedAt(p), p))
@@ -685,12 +639,6 @@ abstract class TypeFlowAnalysis {
if(!worklist.contains(b)) { worklist += b }
}
- /* this is not a general purpose method to add to the worklist,
- * because the assert is expected to hold only when called from MTFAGrowable.reinit() */
- private def enqueue(bs: Traversable[BasicBlock]) {
- bs foreach enqueue
- }
-
private def blankOut(blocks: scala.collection.Set[BasicBlock]) {
blocks foreach { b =>
in(b) = typeFlowLattice.bottom
@@ -719,14 +667,14 @@ abstract class TypeFlowAnalysis {
override def forwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit = {
while (!worklist.isEmpty && relevantBBs.nonEmpty) {
if (stat) iterations += 1
- val point = worklist.iterator.next; worklist -= point;
+ val point = worklist.iterator.next(); worklist -= point
if(relevantBBs(point)) {
shrinkedWatchlist = false
val output = f(point, in(point))
- visited += point;
+ visited += point
if(isOnPerimeter(point)) {
if(shrinkedWatchlist && !isWatching(point)) {
- relevantBBs -= point;
+ relevantBBs -= point
populatePerimeter()
}
} else {
@@ -761,19 +709,15 @@ abstract class TypeFlowAnalysis {
private var lastStart = 0L
- def reset() {
- millis = 0L
- }
-
def start() {
- lastStart = System.currentTimeMillis
+ lastStart = System.nanoTime()
}
/** Stop the timer and return the number of milliseconds since the last
* call to start. The 'millis' field is increased by the elapsed time.
*/
def stop: Long = {
- val elapsed = System.currentTimeMillis - lastStart
+ val elapsed = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - lastStart)
millis += elapsed
elapsed
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
new file mode 100644
index 0000000000..c8845344e9
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
@@ -0,0 +1,1234 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+
+package scala
+package tools.nsc
+package backend
+package jvm
+
+import scala.collection.{ mutable, immutable }
+import scala.annotation.switch
+
+import scala.tools.asm
+
+/*
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
+ * @version 1.0
+ *
+ */
+abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
+ import global._
+ import definitions._
+
+ /*
+ * Functionality to build the body of ASM MethodNode, except for `synchronized` and `try` expressions.
+ */
+ abstract class PlainBodyBuilder(cunit: CompilationUnit) extends PlainSkelBuilder(cunit) {
+
+ import icodes.TestOp
+ import icodes.opcodes.InvokeStyle
+
+ /* If the selector type has a member with the right name,
+ * it is the host class; otherwise the symbol's owner.
+ */
+ def findHostClass(selector: Type, sym: Symbol) = selector member sym.name match {
+ case NoSymbol => debuglog(s"Rejecting $selector as host class for $sym") ; sym.owner
+ case _ => selector.typeSymbol
+ }
+
+ /* ---------------- helper utils for generating methods and code ---------------- */
+
+ def emit(opc: Int) { mnode.visitInsn(opc) }
+
+ def emitZeroOf(tk: BType) {
+ (tk.sort: @switch) match {
+ case asm.Type.BOOLEAN => bc.boolconst(false)
+ case asm.Type.BYTE |
+ asm.Type.SHORT |
+ asm.Type.CHAR |
+ asm.Type.INT => bc.iconst(0)
+ case asm.Type.LONG => bc.lconst(0)
+ case asm.Type.FLOAT => bc.fconst(0)
+ case asm.Type.DOUBLE => bc.dconst(0)
+ case asm.Type.VOID => ()
+ case _ => emit(asm.Opcodes.ACONST_NULL)
+ }
+ }
+
+ /*
+ * Emits code that adds nothing to the operand stack.
+ * Two main cases: `tree` is an assignment,
+ * otherwise an `adapt()` to UNIT is performed if needed.
+ */
+ def genStat(tree: Tree) {
+ lineNumber(tree)
+ tree match {
+ case Assign(lhs @ Select(_, _), rhs) =>
+ val isStatic = lhs.symbol.isStaticMember
+ if (!isStatic) { genLoadQualifier(lhs) }
+ genLoad(rhs, symInfoTK(lhs.symbol))
+ lineNumber(tree)
+ fieldStore(lhs.symbol)
+
+ case Assign(lhs, rhs) =>
+ val s = lhs.symbol
+ val Local(tk, _, idx, _) = locals.getOrMakeLocal(s)
+ genLoad(rhs, tk)
+ lineNumber(tree)
+ bc.store(idx, tk)
+
+ case _ =>
+ genLoad(tree, UNIT)
+ }
+ }
+
+ def genThrow(expr: Tree): BType = {
+ val thrownKind = tpeTK(expr)
+ // `throw null` is valid although scala.Null (as defined in src/libray-aux) isn't a subtype of Throwable.
+ // Similarly for scala.Nothing (again, as defined in src/libray-aux).
+ assert(thrownKind.isNullType || thrownKind.isNothingType || exemplars.get(thrownKind).isSubtypeOf(ThrowableReference))
+ genLoad(expr, thrownKind)
+ lineNumber(expr)
+ emit(asm.Opcodes.ATHROW) // ICode enters here into enterIgnoreMode, we'll rely instead on DCE at ClassNode level.
+
+ RT_NOTHING // always returns the same, the invoker should know :)
+ }
+
+ /* Generate code for primitive arithmetic operations. */
+ def genArithmeticOp(tree: Tree, code: Int): BType = {
+ val Apply(fun @ Select(larg, _), args) = tree
+ var resKind = tpeTK(larg)
+
+ assert(resKind.isNumericType || (resKind == BOOL),
+ s"$resKind is not a numeric or boolean type [operation: ${fun.symbol}]")
+
+ import scalaPrimitives._
+
+ args match {
+ // unary operation
+ case Nil =>
+ genLoad(larg, resKind)
+ code match {
+ case POS => () // nothing
+ case NEG => bc.neg(resKind)
+ case NOT => bc.genPrimitiveArithmetic(icodes.NOT, resKind)
+ case _ => abort(s"Unknown unary operation: ${fun.symbol.fullName} code: $code")
+ }
+
+ // binary operation
+ case rarg :: Nil =>
+ resKind = maxType(tpeTK(larg), tpeTK(rarg))
+ if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code)) {
+ assert(resKind.isIntegralType || (resKind == BOOL),
+ s"$resKind incompatible with arithmetic modulo operation.")
+ }
+
+ genLoad(larg, resKind)
+ genLoad(rarg, // check .NET size of shift arguments!
+ if (scalaPrimitives.isShiftOp(code)) INT else resKind)
+
+ (code: @switch) match {
+ case ADD => bc add resKind
+ case SUB => bc sub resKind
+ case MUL => bc mul resKind
+ case DIV => bc div resKind
+ case MOD => bc rem resKind
+
+ case OR | XOR | AND => bc.genPrimitiveLogical(code, resKind)
+
+ case LSL | LSR | ASR => bc.genPrimitiveShift(code, resKind)
+
+ case _ => abort(s"Unknown primitive: ${fun.symbol}[$code]")
+ }
+
+ case _ =>
+ abort(s"Too many arguments for primitive function: $tree")
+ }
+ lineNumber(tree)
+ resKind
+ }
+
+ /* Generate primitive array operations. */
+ def genArrayOp(tree: Tree, code: Int, expectedType: BType): BType = {
+ val Apply(Select(arrayObj, _), args) = tree
+ val k = tpeTK(arrayObj)
+ genLoad(arrayObj, k)
+ val elementType = typeOfArrayOp.getOrElse(code, abort(s"Unknown operation on arrays: $tree code: $code"))
+
+ var generatedType = expectedType
+
+ if (scalaPrimitives.isArrayGet(code)) {
+ // load argument on stack
+ assert(args.length == 1, s"Too many arguments for array get operation: $tree");
+ genLoad(args.head, INT)
+ generatedType = k.getComponentType
+ bc.aload(elementType)
+ }
+ else if (scalaPrimitives.isArraySet(code)) {
+ args match {
+ case a1 :: a2 :: Nil =>
+ genLoad(a1, INT)
+ genLoad(a2)
+ // the following line should really be here, but because of bugs in erasure
+ // we pretend we generate whatever type is expected from us.
+ //generatedType = UNIT
+ bc.astore(elementType)
+ case _ =>
+ abort(s"Too many arguments for array set operation: $tree")
+ }
+ }
+ else {
+ generatedType = INT
+ emit(asm.Opcodes.ARRAYLENGTH)
+ }
+ lineNumber(tree)
+
+ generatedType
+ }
+
+ def genLoadIf(tree: If, expectedType: BType): BType = {
+ val If(condp, thenp, elsep) = tree
+
+ val success = new asm.Label
+ val failure = new asm.Label
+
+ val hasElse = !elsep.isEmpty
+ val postIf = if (hasElse) new asm.Label else failure
+
+ genCond(condp, success, failure)
+
+ val thenKind = tpeTK(thenp)
+ val elseKind = if (!hasElse) UNIT else tpeTK(elsep)
+ def hasUnitBranch = (thenKind == UNIT || elseKind == UNIT)
+ val resKind = if (hasUnitBranch) UNIT else tpeTK(tree)
+
+ markProgramPoint(success)
+ genLoad(thenp, resKind)
+ if (hasElse) { bc goTo postIf }
+ markProgramPoint(failure)
+ if (hasElse) {
+ genLoad(elsep, resKind)
+ markProgramPoint(postIf)
+ }
+
+ resKind
+ }
+
+ def genPrimitiveOp(tree: Apply, expectedType: BType): BType = {
+ val sym = tree.symbol
+ val Apply(fun @ Select(receiver, _), _) = tree
+ val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
+
+ import scalaPrimitives.{isArithmeticOp, isArrayOp, isLogicalOp, isComparisonOp}
+
+ if (isArithmeticOp(code)) genArithmeticOp(tree, code)
+ else if (code == scalaPrimitives.CONCAT) genStringConcat(tree)
+ else if (code == scalaPrimitives.HASH) genScalaHash(receiver)
+ else if (isArrayOp(code)) genArrayOp(tree, code, expectedType)
+ else if (isLogicalOp(code) || isComparisonOp(code)) {
+ val success, failure, after = new asm.Label
+ genCond(tree, success, failure)
+ // success block
+ markProgramPoint(success)
+ bc boolconst true
+ bc goTo after
+ // failure block
+ markProgramPoint(failure)
+ bc boolconst false
+ // after
+ markProgramPoint(after)
+
+ BOOL
+ }
+ else if (code == scalaPrimitives.SYNCHRONIZED)
+ genSynchronized(tree, expectedType)
+ else if (scalaPrimitives.isCoercion(code)) {
+ genLoad(receiver)
+ lineNumber(tree)
+ genCoercion(code)
+ coercionTo(code)
+ }
+ else abort(
+ s"Primitive operation not handled yet: ${sym.fullName}(${fun.symbol.simpleName}) at: ${tree.pos}"
+ )
+ }
+
+ def genLoad(tree: Tree) {
+ genLoad(tree, tpeTK(tree))
+ }
+
+ /* Generate code for trees that produce values on the stack */
+ def genLoad(tree: Tree, expectedType: BType) {
+ var generatedType = expectedType
+
+ lineNumber(tree)
+
+ tree match {
+ case lblDf : LabelDef => genLabelDef(lblDf, expectedType)
+
+ case ValDef(_, nme.THIS, _, _) =>
+ debuglog("skipping trivial assign to _$this: " + tree)
+
+ case ValDef(_, _, _, rhs) =>
+ val sym = tree.symbol
+ /* most of the time, !locals.contains(sym), unless the current activation of genLoad() is being called
+ while duplicating a finalizer that contains this ValDef. */
+ val Local(tk, _, idx, isSynth) = locals.getOrMakeLocal(sym)
+ if (rhs == EmptyTree) { emitZeroOf(tk) }
+ else { genLoad(rhs, tk) }
+ bc.store(idx, tk)
+ if (!isSynth) { // there are case <synthetic> ValDef's emitted by patmat
+ varsInScope ::= (sym -> currProgramPoint())
+ }
+ generatedType = UNIT
+
+ case t : If =>
+ generatedType = genLoadIf(t, expectedType)
+
+ case r : Return =>
+ genReturn(r)
+ generatedType = expectedType
+
+ case t : Try =>
+ generatedType = genLoadTry(t)
+
+ case Throw(expr) =>
+ generatedType = genThrow(expr)
+
+ case New(tpt) =>
+ abort(s"Unexpected New(${tpt.summaryString}/$tpt) reached GenBCode.\n" +
+ " Call was genLoad" + ((tree, expectedType)))
+
+ case app : Apply =>
+ generatedType = genApply(app, expectedType)
+
+ case ApplyDynamic(qual, args) => sys.error("No invokedynamic support yet.")
+
+ case This(qual) =>
+ val symIsModuleClass = tree.symbol.isModuleClass
+ assert(tree.symbol == claszSymbol || symIsModuleClass,
+ s"Trying to access the this of another class: tree.symbol = ${tree.symbol}, class symbol = $claszSymbol compilation unit: $cunit")
+ if (symIsModuleClass && tree.symbol != claszSymbol) {
+ generatedType = genLoadModule(tree)
+ }
+ else {
+ mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ generatedType =
+ if (tree.symbol == ArrayClass) ObjectReference
+ else brefType(thisName) // inner class (if any) for claszSymbol already tracked.
+ }
+
+ case Select(Ident(nme.EMPTY_PACKAGE_NAME), module) =>
+ assert(tree.symbol.isModule, s"Selection of non-module from empty package: $tree sym: ${tree.symbol} at: ${tree.pos}")
+ genLoadModule(tree)
+
+ case Select(qualifier, selector) =>
+ val sym = tree.symbol
+ generatedType = symInfoTK(sym)
+ val hostClass = findHostClass(qualifier.tpe, sym)
+ debuglog(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
+ val qualSafeToElide = treeInfo isQualifierSafeToElide qualifier
+
+ def genLoadQualUnlessElidable() { if (!qualSafeToElide) { genLoadQualifier(tree) } }
+
+ if (sym.isModule) {
+ genLoadQualUnlessElidable()
+ genLoadModule(tree)
+ }
+ else if (sym.isStaticMember) {
+ genLoadQualUnlessElidable()
+ fieldLoad(sym, hostClass)
+ }
+ else {
+ genLoadQualifier(tree)
+ fieldLoad(sym, hostClass)
+ }
+
+ case Ident(name) =>
+ val sym = tree.symbol
+ if (!sym.isPackage) {
+ val tk = symInfoTK(sym)
+ if (sym.isModule) { genLoadModule(tree) }
+ else { locals.load(sym) }
+ generatedType = tk
+ }
+
+ case Literal(value) =>
+ if (value.tag != UnitTag) (value.tag, expectedType) match {
+ case (IntTag, LONG ) => bc.lconst(value.longValue); generatedType = LONG
+ case (FloatTag, DOUBLE) => bc.dconst(value.doubleValue); generatedType = DOUBLE
+ case (NullTag, _ ) => bc.emit(asm.Opcodes.ACONST_NULL); generatedType = RT_NULL
+ case _ => genConstant(value); generatedType = tpeTK(tree)
+ }
+
+ case blck : Block => genBlock(blck, expectedType)
+
+ case Typed(Super(_, _), _) => genLoad(This(claszSymbol), expectedType)
+
+ case Typed(expr, _) => genLoad(expr, expectedType)
+
+ case Assign(_, _) =>
+ generatedType = UNIT
+ genStat(tree)
+
+ case av : ArrayValue =>
+ generatedType = genArrayValue(av)
+
+ case mtch : Match =>
+ generatedType = genMatch(mtch)
+
+ case EmptyTree => if (expectedType != UNIT) { emitZeroOf(expectedType) }
+
+ case _ => abort(s"Unexpected tree in genLoad: $tree/${tree.getClass} at: ${tree.pos}")
+ }
+
+ // emit conversion
+ if (generatedType != expectedType) {
+ adapt(generatedType, expectedType)
+ }
+
+ } // end of GenBCode.genLoad()
+
+ // ---------------- field load and store ----------------
+
+ /*
+ * must-single-thread
+ */
+ def fieldLoad( field: Symbol, hostClass: Symbol = null) {
+ fieldOp(field, isLoad = true, hostClass)
+ }
+ /*
+ * must-single-thread
+ */
+ def fieldStore(field: Symbol, hostClass: Symbol = null) {
+ fieldOp(field, isLoad = false, hostClass)
+ }
+
+ /*
+ * must-single-thread
+ */
+ private def fieldOp(field: Symbol, isLoad: Boolean, hostClass: Symbol) {
+ // LOAD_FIELD.hostClass , CALL_METHOD.hostClass , and #4283
+ val owner =
+ if (hostClass == null) internalName(field.owner)
+ else internalName(hostClass)
+ val fieldJName = field.javaSimpleName.toString
+ val fieldDescr = symInfoTK(field).getDescriptor
+ val isStatic = field.isStaticMember
+ val opc =
+ if (isLoad) { if (isStatic) asm.Opcodes.GETSTATIC else asm.Opcodes.GETFIELD }
+ else { if (isStatic) asm.Opcodes.PUTSTATIC else asm.Opcodes.PUTFIELD }
+ mnode.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
+
+ }
+
+ // ---------------- emitting constant values ----------------
+
+ /*
+ * For const.tag in {ClazzTag, EnumTag}
+ * must-single-thread
+ * Otherwise it's safe to call from multiple threads.
+ */
+ def genConstant(const: Constant) {
+ (const.tag: @switch) match {
+
+ case BooleanTag => bc.boolconst(const.booleanValue)
+
+ case ByteTag => bc.iconst(const.byteValue)
+ case ShortTag => bc.iconst(const.shortValue)
+ case CharTag => bc.iconst(const.charValue)
+ case IntTag => bc.iconst(const.intValue)
+
+ case LongTag => bc.lconst(const.longValue)
+ case FloatTag => bc.fconst(const.floatValue)
+ case DoubleTag => bc.dconst(const.doubleValue)
+
+ case UnitTag => ()
+
+ case StringTag =>
+ assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant`
+ mnode.visitLdcInsn(const.stringValue) // `stringValue` special-cases null, but not for a const with StringTag
+
+ case NullTag => emit(asm.Opcodes.ACONST_NULL)
+
+ case ClazzTag =>
+ val toPush: BType = {
+ val kind = toTypeKind(const.typeValue)
+ if (kind.isValueType) classLiteral(kind)
+ else kind
+ }
+ mnode.visitLdcInsn(toPush.toASMType)
+
+ case EnumTag =>
+ val sym = const.symbolValue
+ val ownerName = internalName(sym.owner)
+ val fieldName = sym.javaSimpleName.toString
+ val fieldDesc = toTypeKind(sym.tpe.underlying).getDescriptor
+ mnode.visitFieldInsn(
+ asm.Opcodes.GETSTATIC,
+ ownerName,
+ fieldName,
+ fieldDesc
+ )
+
+ case _ => abort(s"Unknown constant value: $const")
+ }
+ }
+
+ private def genLabelDef(lblDf: LabelDef, expectedType: BType) {
+ // duplication of LabelDefs contained in `finally`-clauses is handled when emitting RETURN. No bookkeeping for that required here.
+ // no need to call index() over lblDf.params, on first access that magic happens (moreover, no LocalVariableTable entries needed for them).
+ markProgramPoint(programPoint(lblDf.symbol))
+ lineNumber(lblDf)
+ genLoad(lblDf.rhs, expectedType)
+ }
+
+ private def genReturn(r: Return) {
+ val Return(expr) = r
+ val returnedKind = tpeTK(expr)
+ genLoad(expr, returnedKind)
+ adapt(returnedKind, returnType)
+ val saveReturnValue = (returnType != UNIT)
+ lineNumber(r)
+
+ cleanups match {
+ case Nil =>
+ // not an assertion: !shouldEmitCleanup (at least not yet, pendingCleanups() may still have to run, and reset `shouldEmitCleanup`.
+ bc emitRETURN returnType
+ case nextCleanup :: rest =>
+ if (saveReturnValue) {
+ if (insideCleanupBlock) {
+ cunit.warning(r.pos, "Return statement found in finally-clause, discarding its return-value in favor of that of a more deeply nested return.")
+ bc drop returnType
+ } else {
+ // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted.
+ if (earlyReturnVar == null) {
+ earlyReturnVar = locals.makeLocal(returnType, "earlyReturnVar")
+ }
+ locals.store(earlyReturnVar)
+ }
+ }
+ bc goTo nextCleanup
+ shouldEmitCleanup = true
+ }
+
+ } // end of genReturn()
+
+ private def genApply(app: Apply, expectedType: BType): BType = {
+ var generatedType = expectedType
+ lineNumber(app)
+ app match {
+
+ case Apply(TypeApply(fun, targs), _) =>
+
+ val sym = fun.symbol
+ val cast = sym match {
+ case Object_isInstanceOf => false
+ case Object_asInstanceOf => true
+ case _ => abort(s"Unexpected type application $fun[sym: ${sym.fullName}] in: $app")
+ }
+
+ val Select(obj, _) = fun
+ val l = tpeTK(obj)
+ val r = tpeTK(targs.head)
+
+ def genTypeApply(): BType = {
+ genLoadQualifier(fun)
+
+ if (l.isValueType && r.isValueType)
+ genConversion(l, r, cast)
+ else if (l.isValueType) {
+ bc drop l
+ if (cast) {
+ mnode.visitTypeInsn(asm.Opcodes.NEW, classCastExceptionReference.getInternalName)
+ bc dup ObjectReference
+ emit(asm.Opcodes.ATHROW)
+ } else {
+ bc boolconst false
+ }
+ }
+ else if (r.isValueType && cast) {
+ abort(s"Erasure should have added an unboxing operation to prevent this cast. Tree: $app")
+ }
+ else if (r.isValueType) {
+ bc isInstance classLiteral(r)
+ }
+ else {
+ genCast(r, cast)
+ }
+
+ if (cast) r else BOOL
+ } // end of genTypeApply()
+
+ generatedType = genTypeApply()
+
+ // 'super' call: Note: since constructors are supposed to
+ // return an instance of what they construct, we have to take
+ // special care. On JVM they are 'void', and Scala forbids (syntactically)
+ // to call super constructors explicitly and/or use their 'returned' value.
+ // therefore, we can ignore this fact, and generate code that leaves nothing
+ // on the stack (contrary to what the type in the AST says).
+ case Apply(fun @ Select(Super(_, mix), _), args) =>
+ val invokeStyle = icodes.opcodes.SuperCall(mix)
+ // if (fun.symbol.isConstructor) Static(true) else SuperCall(mix);
+ mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ genLoadArguments(args, paramTKs(app))
+ genCallMethod(fun.symbol, invokeStyle, pos = app.pos)
+ generatedType = asmMethodType(fun.symbol).getReturnType
+
+ // 'new' constructor call: Note: since constructors are
+ // thought to return an instance of what they construct,
+ // we have to 'simulate' it by DUPlicating the freshly created
+ // instance (on JVM, <init> methods return VOID).
+ case Apply(fun @ Select(New(tpt), nme.CONSTRUCTOR), args) =>
+ val ctor = fun.symbol
+ assert(ctor.isClassConstructor, s"'new' call to non-constructor: ${ctor.name}")
+
+ generatedType = tpeTK(tpt)
+ assert(generatedType.isRefOrArrayType, s"Non reference type cannot be instantiated: $generatedType")
+
+ generatedType match {
+ case arr if generatedType.isArray =>
+ genLoadArguments(args, paramTKs(app))
+ val dims = arr.getDimensions
+ var elemKind = arr.getElementType
+ val argsSize = args.length
+ if (argsSize > dims) {
+ cunit.error(app.pos, s"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)")
+ }
+ if (argsSize < dims) {
+ /* In one step:
+ * elemKind = new BType(BType.ARRAY, arr.off + argsSize, arr.len - argsSize)
+ * however the above does not enter a TypeName for each nested arrays in chrs.
+ */
+ for (i <- args.length until dims) elemKind = arrayOf(elemKind)
+ }
+ (argsSize : @switch) match {
+ case 1 => bc newarray elemKind
+ case _ =>
+ val descr = ('[' * argsSize) + elemKind.getDescriptor // denotes the same as: arrayN(elemKind, argsSize).getDescriptor
+ mnode.visitMultiANewArrayInsn(descr, argsSize)
+ }
+
+ case rt if generatedType.hasObjectSort =>
+ assert(exemplar(ctor.owner).c == rt, s"Symbol ${ctor.owner.fullName} is different from $rt")
+ mnode.visitTypeInsn(asm.Opcodes.NEW, rt.getInternalName)
+ bc dup generatedType
+ genLoadArguments(args, paramTKs(app))
+ genCallMethod(ctor, icodes.opcodes.Static(onInstance = true))
+
+ case _ =>
+ abort(s"Cannot instantiate $tpt of kind: $generatedType")
+ }
+
+ case Apply(fun @ _, List(expr)) if currentRun.runDefinitions.isBox(fun.symbol) =>
+ val nativeKind = tpeTK(expr)
+ genLoad(expr, nativeKind)
+ val MethodNameAndType(mname, mdesc) = asmBoxTo(nativeKind)
+ bc.invokestatic(BoxesRunTime.getInternalName, mname, mdesc)
+ generatedType = boxResultType(fun.symbol) // was toTypeKind(fun.symbol.tpe.resultType)
+
+ case Apply(fun @ _, List(expr)) if currentRun.runDefinitions.isUnbox(fun.symbol) =>
+ genLoad(expr)
+ val boxType = unboxResultType(fun.symbol) // was toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe)
+ generatedType = boxType
+ val MethodNameAndType(mname, mdesc) = asmUnboxTo(boxType)
+ bc.invokestatic(BoxesRunTime.getInternalName, mname, mdesc)
+
+ case app @ Apply(fun, args) =>
+ val sym = fun.symbol
+
+ if (sym.isLabel) { // jump to a label
+ genLoadLabelArguments(args, labelDef(sym), app.pos)
+ bc goTo programPoint(sym)
+ } else if (isPrimitive(sym)) { // primitive method call
+ generatedType = genPrimitiveOp(app, expectedType)
+ } else { // normal method call
+
+ def genNormalMethodCall() {
+
+ val invokeStyle =
+ if (sym.isStaticMember) icodes.opcodes.Static(onInstance = false)
+ else if (sym.isPrivate || sym.isClassConstructor) icodes.opcodes.Static(onInstance = true)
+ else icodes.opcodes.Dynamic;
+
+ if (invokeStyle.hasInstance) {
+ genLoadQualifier(fun)
+ }
+
+ genLoadArguments(args, paramTKs(app))
+
+ // In "a couple cases", squirrel away a extra information (hostClass, targetTypeKind). TODO Document what "in a couple cases" refers to.
+ var hostClass: Symbol = null
+ var targetTypeKind: BType = null
+ fun match {
+ case Select(qual, _) =>
+ val qualSym = findHostClass(qual.tpe, sym)
+ if (qualSym == ArrayClass) {
+ targetTypeKind = tpeTK(qual)
+ log(s"Stored target type kind for ${sym.fullName} as $targetTypeKind")
+ }
+ else {
+ hostClass = qualSym
+ if (qual.tpe.typeSymbol != qualSym) {
+ log(s"Precisified host class for $sym from ${qual.tpe.typeSymbol.fullName} to ${qualSym.fullName}")
+ }
+ }
+
+ case _ =>
+ }
+ if ((targetTypeKind != null) && (sym == definitions.Array_clone) && invokeStyle.isDynamic) {
+ val target: String = targetTypeKind.getInternalName
+ bc.invokevirtual(target, "clone", "()Ljava/lang/Object;")
+ }
+ else {
+ genCallMethod(sym, invokeStyle, hostClass, app.pos)
+ }
+
+ } // end of genNormalMethodCall()
+
+ genNormalMethodCall()
+
+ generatedType = asmMethodType(sym).getReturnType
+ }
+
+ }
+
+ generatedType
+ } // end of genApply()
+
+ private def genArrayValue(av: ArrayValue): BType = {
+ val ArrayValue(tpt @ TypeTree(), elems) = av
+
+ val elmKind = tpeTK(tpt)
+ val generatedType = arrayOf(elmKind)
+
+ lineNumber(av)
+ bc iconst elems.length
+ bc newarray elmKind
+
+ var i = 0
+ var rest = elems
+ while (!rest.isEmpty) {
+ bc dup generatedType
+ bc iconst i
+ genLoad(rest.head, elmKind)
+ bc astore elmKind
+ rest = rest.tail
+ i = i + 1
+ }
+
+ generatedType
+ }
+
+ /*
+ * A Match node contains one or more case clauses,
+ * each case clause lists one or more Int values to use as keys, and a code block.
+ * Except the "default" case clause which (if it exists) doesn't list any Int key.
+ *
+ * On a first pass over the case clauses, we flatten the keys and their targets (the latter represented with asm.Labels).
+ * That representation allows JCodeMethodV to emit a lookupswitch or a tableswitch.
+ *
+ * On a second pass, we emit the switch blocks, one for each different target.
+ */
+ private def genMatch(tree: Match): BType = {
+ lineNumber(tree)
+ genLoad(tree.selector, INT)
+ val generatedType = tpeTK(tree)
+
+ var flatKeys: List[Int] = Nil
+ var targets: List[asm.Label] = Nil
+ var default: asm.Label = null
+ var switchBlocks: List[Tuple2[asm.Label, Tree]] = Nil
+
+ // collect switch blocks and their keys, but don't emit yet any switch-block.
+ for (caze @ CaseDef(pat, guard, body) <- tree.cases) {
+ assert(guard == EmptyTree, guard)
+ val switchBlockPoint = new asm.Label
+ switchBlocks ::= (switchBlockPoint, body)
+ pat match {
+ case Literal(value) =>
+ flatKeys ::= value.intValue
+ targets ::= switchBlockPoint
+ case Ident(nme.WILDCARD) =>
+ assert(default == null, s"multiple default targets in a Match node, at ${tree.pos}")
+ default = switchBlockPoint
+ case Alternative(alts) =>
+ alts foreach {
+ case Literal(value) =>
+ flatKeys ::= value.intValue
+ targets ::= switchBlockPoint
+ case _ =>
+ abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.pos}")
+ }
+ case _ =>
+ abort(s"Invalid pattern in Match node: $tree at: ${tree.pos}")
+ }
+ }
+ bc.emitSWITCH(mkArrayReverse(flatKeys), mkArray(targets.reverse), default, MIN_SWITCH_DENSITY)
+
+ // emit switch-blocks.
+ val postMatch = new asm.Label
+ for (sb <- switchBlocks.reverse) {
+ val (caseLabel, caseBody) = sb
+ markProgramPoint(caseLabel)
+ genLoad(caseBody, generatedType)
+ bc goTo postMatch
+ }
+
+ markProgramPoint(postMatch)
+ generatedType
+ }
+
+ def genBlock(tree: Block, expectedType: BType) {
+ val Block(stats, expr) = tree
+ val savedScope = varsInScope
+ varsInScope = Nil
+ stats foreach genStat
+ genLoad(expr, expectedType)
+ val end = currProgramPoint()
+ if (emitVars) { // add entries to LocalVariableTable JVM attribute
+ for ((sym, start) <- varsInScope.reverse) { emitLocalVarScope(sym, start, end) }
+ }
+ varsInScope = savedScope
+ }
+
+ def adapt(from: BType, to: BType) {
+ if (!conforms(from, to)) {
+ to match {
+ case UNIT => bc drop from
+ case _ => bc.emitT2T(from, to)
+ }
+ } else if (from.isNothingType) {
+ emit(asm.Opcodes.ATHROW) // ICode enters here into enterIgnoreMode, we'll rely instead on DCE at ClassNode level.
+ } else if (from.isNullType) {
+ bc drop from
+ mnode.visitInsn(asm.Opcodes.ACONST_NULL)
+ }
+ else (from, to) match {
+ case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => bc.emitT2T(INT, LONG)
+ case _ => ()
+ }
+ }
+
+ /* Emit code to Load the qualifier of `tree` on top of the stack. */
+ def genLoadQualifier(tree: Tree) {
+ lineNumber(tree)
+ tree match {
+ case Select(qualifier, _) => genLoad(qualifier)
+ case _ => abort(s"Unknown qualifier $tree")
+ }
+ }
+
+ /* Generate code that loads args into label parameters. */
+ def genLoadLabelArguments(args: List[Tree], lblDef: LabelDef, gotoPos: Position) {
+
+ val aps = {
+ val params: List[Symbol] = lblDef.params.map(_.symbol)
+ assert(args.length == params.length, s"Wrong number of arguments in call to label at: $gotoPos")
+
+ def isTrivial(kv: (Tree, Symbol)) = kv match {
+ case (This(_), p) if p.name == nme.THIS => true
+ case (arg @ Ident(_), p) if arg.symbol == p => true
+ case _ => false
+ }
+
+ (args zip params) filterNot isTrivial
+ }
+
+ // first push *all* arguments. This makes sure muliple uses of the same labelDef-var will all denote the (previous) value.
+ aps foreach { case (arg, param) => genLoad(arg, locals(param).tk) } // `locals` is known to contain `param` because `genDefDef()` visited `labelDefsAtOrUnder`
+
+ // second assign one by one to the LabelDef's variables.
+ aps.reverse foreach {
+ case (_, param) =>
+ // TODO FIXME a "this" param results from tail-call xform. If so, the `else` branch seems perfectly fine. And the `then` branch must be wrong.
+ if (param.name == nme.THIS) mnode.visitVarInsn(asm.Opcodes.ASTORE, 0)
+ else locals.store(param)
+ }
+
+ }
+
+ def genLoadArguments(args: List[Tree], btpes: List[BType]) {
+ (args zip btpes) foreach { case (arg, btpe) => genLoad(arg, btpe) }
+ }
+
+ def genLoadModule(tree: Tree): BType = {
+ val module = (
+ if (!tree.symbol.isPackageClass) tree.symbol
+ else tree.symbol.info.member(nme.PACKAGE) match {
+ case NoSymbol => abort(s"SI-5604: Cannot use package as value: $tree")
+ case s => abort(s"SI-5604: found package class where package object expected: $tree")
+ }
+ )
+ lineNumber(tree)
+ genLoadModule(module)
+ symInfoTK(module)
+ }
+
+ def genLoadModule(module: Symbol) {
+ def inStaticMethod = methSymbol != null && methSymbol.isStaticMember
+ if (claszSymbol == module.moduleClass && jMethodName != "readResolve" && !inStaticMethod) {
+ mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ } else {
+ val mbt = symInfoTK(module)
+ mnode.visitFieldInsn(
+ asm.Opcodes.GETSTATIC,
+ mbt.getInternalName /* + "$" */ ,
+ strMODULE_INSTANCE_FIELD,
+ mbt.getDescriptor // for nostalgics: toTypeKind(module.tpe).getDescriptor
+ )
+ }
+ }
+
+ def genConversion(from: BType, to: BType, cast: Boolean) {
+ if (cast) { bc.emitT2T(from, to) }
+ else {
+ bc drop from
+ bc boolconst (from == to)
+ }
+ }
+
+ def genCast(to: BType, cast: Boolean) {
+ if (cast) { bc checkCast to }
+ else { bc isInstance to }
+ }
+
+ /* Is the given symbol a primitive operation? */
+ def isPrimitive(fun: Symbol): Boolean = scalaPrimitives.isPrimitive(fun)
+
+ /* Generate coercion denoted by "code" */
+ def genCoercion(code: Int) {
+ import scalaPrimitives._
+ (code: @switch) match {
+ case B2B | S2S | C2C | I2I | L2L | F2F | D2D => ()
+ case _ =>
+ val from = coercionFrom(code)
+ val to = coercionTo(code)
+ bc.emitT2T(from, to)
+ }
+ }
+
+ def genStringConcat(tree: Tree): BType = {
+ lineNumber(tree)
+ liftStringConcat(tree) match {
+
+ // Optimization for expressions of the form "" + x. We can avoid the StringBuilder.
+ case List(Literal(Constant("")), arg) =>
+ genLoad(arg, ObjectReference)
+ genCallMethod(String_valueOf, icodes.opcodes.Static(onInstance = false))
+
+ case concatenations =>
+ bc.genStartConcat
+ for (elem <- concatenations) {
+ val kind = tpeTK(elem)
+ genLoad(elem, kind)
+ bc.genStringConcat(kind)
+ }
+ bc.genEndConcat
+
+ }
+
+ StringReference
+ }
+
+ def genCallMethod(method: Symbol, style: InvokeStyle, hostClass0: Symbol = null, pos: Position = NoPosition) {
+
+ val siteSymbol = claszSymbol
+ val hostSymbol = if (hostClass0 == null) method.owner else hostClass0;
+ val methodOwner = method.owner
+ // info calls so that types are up to date; erasure may add lateINTERFACE to traits
+ hostSymbol.info ; methodOwner.info
+
+ def needsInterfaceCall(sym: Symbol) = (
+ sym.isInterface
+ || sym.isJavaDefined && sym.isNonBottomSubClass(definitions.ClassfileAnnotationClass)
+ )
+
+ // whether to reference the type of the receiver or
+ // the type of the method owner
+ val useMethodOwner = (
+ style != icodes.opcodes.Dynamic
+ || hostSymbol.isBottomClass
+ || methodOwner == definitions.ObjectClass
+ )
+ val receiver = if (useMethodOwner) methodOwner else hostSymbol
+ val bmOwner = asmClassType(receiver)
+ val jowner = bmOwner.getInternalName
+ val jname = method.javaSimpleName.toString
+ val bmType = asmMethodType(method)
+ val mdescr = bmType.getDescriptor
+
+ def initModule() {
+ // we initialize the MODULE$ field immediately after the super ctor
+ if (!isModuleInitialized &&
+ jMethodName == INSTANCE_CONSTRUCTOR_NAME &&
+ jname == INSTANCE_CONSTRUCTOR_NAME &&
+ isStaticModule(siteSymbol)) {
+ isModuleInitialized = true
+ mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ mnode.visitFieldInsn(
+ asm.Opcodes.PUTSTATIC,
+ thisName,
+ strMODULE_INSTANCE_FIELD,
+ "L" + thisName + ";"
+ )
+ }
+ }
+
+ if (style.isStatic) {
+ if (style.hasInstance) { bc.invokespecial (jowner, jname, mdescr) }
+ else { bc.invokestatic (jowner, jname, mdescr) }
+ }
+ else if (style.isDynamic) {
+ if (needsInterfaceCall(receiver)) { bc.invokeinterface(jowner, jname, mdescr) }
+ else { bc.invokevirtual (jowner, jname, mdescr) }
+ }
+ else {
+ assert(style.isSuper, s"An unknown InvokeStyle: $style")
+ bc.invokespecial(jowner, jname, mdescr)
+ initModule()
+ }
+
+ } // end of genCallMethod()
+
+ /* Generate the scala ## method. */
+ def genScalaHash(tree: Tree): BType = {
+ genLoadModule(ScalaRunTimeModule) // TODO why load ScalaRunTimeModule if ## has InvokeStyle of Static(false) ?
+ genLoad(tree, ObjectReference)
+ genCallMethod(hashMethodSym, icodes.opcodes.Static(onInstance = false))
+
+ INT
+ }
+
+ /*
+ * Returns a list of trees that each should be concatenated, from left to right.
+ * It turns a chained call like "a".+("b").+("c") into a list of arguments.
+ */
+ def liftStringConcat(tree: Tree): List[Tree] = tree match {
+ case Apply(fun @ Select(larg, method), rarg) =>
+ if (isPrimitive(fun.symbol) &&
+ scalaPrimitives.getPrimitive(fun.symbol) == scalaPrimitives.CONCAT)
+ liftStringConcat(larg) ::: rarg
+ else
+ tree :: Nil
+ case _ =>
+ tree :: Nil
+ }
+
+ /* Some useful equality helpers. */
+ def isNull(t: Tree) = {
+ t match {
+ case Literal(Constant(null)) => true
+ case _ => false
+ }
+ }
+
+ /* If l or r is constant null, returns the other ; otherwise null */
+ def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null
+
+ /* Emit code to compare the two top-most stack values using the 'op' operator. */
+ private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType) {
+ if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ bc.emitIF_ICMP(op, success)
+ } else if (tk.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ bc.emitIF_ACMP(op, success)
+ } else {
+ (tk: @unchecked) match {
+ case LONG => emit(asm.Opcodes.LCMP)
+ case FLOAT =>
+ if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.FCMPG)
+ else emit(asm.Opcodes.FCMPL)
+ case DOUBLE =>
+ if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.DCMPG)
+ else emit(asm.Opcodes.DCMPL)
+ }
+ bc.emitIF(op, success)
+ }
+ bc goTo failure
+ }
+
+ /* Emits code to compare (and consume) stack-top and zero using the 'op' operator */
+ private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType) {
+ if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ bc.emitIF(op, success)
+ } else if (tk.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ // @unchecked because references aren't compared with GT, GE, LT, LE.
+ (op : @unchecked) match {
+ case icodes.EQ => bc emitIFNULL success
+ case icodes.NE => bc emitIFNONNULL success
+ }
+ } else {
+ (tk: @unchecked) match {
+ case LONG =>
+ emit(asm.Opcodes.LCONST_0)
+ emit(asm.Opcodes.LCMP)
+ case FLOAT =>
+ emit(asm.Opcodes.FCONST_0)
+ if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.FCMPG)
+ else emit(asm.Opcodes.FCMPL)
+ case DOUBLE =>
+ emit(asm.Opcodes.DCONST_0)
+ if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.DCMPG)
+ else emit(asm.Opcodes.DCMPL)
+ }
+ bc.emitIF(op, success)
+ }
+ bc goTo failure
+ }
+
+ val testOpForPrimitive: Array[TestOp] = Array(
+ icodes.EQ, icodes.NE, icodes.EQ, icodes.NE, icodes.LT, icodes.LE, icodes.GE, icodes.GT
+ )
+
+ /*
+ * Generate code for conditional expressions.
+ * The jump targets success/failure of the test are `then-target` and `else-target` resp.
+ */
+ private def genCond(tree: Tree, success: asm.Label, failure: asm.Label) {
+
+ def genComparisonOp(l: Tree, r: Tree, code: Int) {
+ val op: TestOp = testOpForPrimitive(code - scalaPrimitives.ID)
+ // special-case reference (in)equality test for null (null eq x, x eq null)
+ var nonNullSide: Tree = null
+ if (scalaPrimitives.isReferenceEqualityOp(code) &&
+ { nonNullSide = ifOneIsNull(l, r); nonNullSide != null }
+ ) {
+ genLoad(nonNullSide, ObjectReference)
+ genCZJUMP(success, failure, op, ObjectReference)
+ }
+ else {
+ val tk = maxType(tpeTK(l), tpeTK(r))
+ genLoad(l, tk)
+ genLoad(r, tk)
+ genCJUMP(success, failure, op, tk)
+ }
+ }
+
+ def default() = {
+ genLoad(tree, BOOL)
+ genCZJUMP(success, failure, icodes.NE, BOOL)
+ }
+
+ lineNumber(tree)
+ tree match {
+
+ case Apply(fun, args) if isPrimitive(fun.symbol) =>
+ import scalaPrimitives.{ ZNOT, ZAND, ZOR, EQ, getPrimitive }
+
+ // lhs and rhs of test
+ lazy val Select(lhs, _) = fun
+ val rhs = if (args.isEmpty) EmptyTree else args.head; // args.isEmpty only for ZNOT
+
+ def genZandOrZor(and: Boolean) { // TODO WRONG
+ // reaching "keepGoing" indicates the rhs should be evaluated too (ie not short-circuited).
+ val keepGoing = new asm.Label
+
+ if (and) genCond(lhs, keepGoing, failure)
+ else genCond(lhs, success, keepGoing)
+
+ markProgramPoint(keepGoing)
+ genCond(rhs, success, failure)
+ }
+
+ getPrimitive(fun.symbol) match {
+ case ZNOT => genCond(lhs, failure, success)
+ case ZAND => genZandOrZor(and = true)
+ case ZOR => genZandOrZor(and = false)
+ case code =>
+ // TODO !!!!!!!!!! isReferenceType, in the sense of TypeKind? (ie non-array, non-boxed, non-nothing, may be null)
+ if (scalaPrimitives.isUniversalEqualityOp(code) && tpeTK(lhs).hasObjectSort) {
+ // `lhs` has reference type
+ if (code == EQ) genEqEqPrimitive(lhs, rhs, success, failure)
+ else genEqEqPrimitive(lhs, rhs, failure, success)
+ }
+ else if (scalaPrimitives.isComparisonOp(code))
+ genComparisonOp(lhs, rhs, code)
+ else
+ default
+ }
+
+ case _ => default
+ }
+
+ } // end of genCond()
+
+ /*
+ * Generate the "==" code for object references. It is equivalent of
+ * if (l eq null) r eq null else l.equals(r);
+ *
+ * @param l left-hand-side of the '=='
+ * @param r right-hand-side of the '=='
+ */
+ def genEqEqPrimitive(l: Tree, r: Tree, success: asm.Label, failure: asm.Label) {
+
+ /* True if the equality comparison is between values that require the use of the rich equality
+ * comparator (scala.runtime.Comparator.equals). This is the case when either side of the
+ * comparison might have a run-time type subtype of java.lang.Number or java.lang.Character.
+ * When it is statically known that both sides are equal and subtypes of Number of Character,
+ * not using the rich equality is possible (their own equals method will do ok.)
+ */
+ val mustUseAnyComparator: Boolean = {
+ val areSameFinals = l.tpe.isFinalType && r.tpe.isFinalType && (l.tpe =:= r.tpe)
+
+ !areSameFinals && platform.isMaybeBoxed(l.tpe.typeSymbol) && platform.isMaybeBoxed(r.tpe.typeSymbol)
+ }
+
+ if (mustUseAnyComparator) {
+ val equalsMethod: Symbol = {
+ if (l.tpe <:< BoxedNumberClass.tpe) {
+ if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum
+ else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumChar
+ else platform.externalEqualsNumObject
+ } else platform.externalEquals
+ }
+ genLoad(l, ObjectReference)
+ genLoad(r, ObjectReference)
+ genCallMethod(equalsMethod, icodes.opcodes.Static(onInstance = false))
+ genCZJUMP(success, failure, icodes.NE, BOOL)
+ }
+ else {
+ if (isNull(l)) {
+ // null == expr -> expr eq null
+ genLoad(r, ObjectReference)
+ genCZJUMP(success, failure, icodes.EQ, ObjectReference)
+ } else if (isNull(r)) {
+ // expr == null -> expr eq null
+ genLoad(l, ObjectReference)
+ genCZJUMP(success, failure, icodes.EQ, ObjectReference)
+ } else {
+ // l == r -> if (l eq null) r eq null else l.equals(r)
+ val eqEqTempLocal = locals.makeLocal(AnyRefReference, nme.EQEQ_LOCAL_VAR.toString)
+ val lNull = new asm.Label
+ val lNonNull = new asm.Label
+
+ genLoad(l, ObjectReference)
+ genLoad(r, ObjectReference)
+ locals.store(eqEqTempLocal)
+ bc dup ObjectReference
+ genCZJUMP(lNull, lNonNull, icodes.EQ, ObjectReference)
+
+ markProgramPoint(lNull)
+ bc drop ObjectReference
+ locals.load(eqEqTempLocal)
+ genCZJUMP(success, failure, icodes.EQ, ObjectReference)
+
+ markProgramPoint(lNonNull)
+ locals.load(eqEqTempLocal)
+ genCallMethod(Object_equals, icodes.opcodes.Dynamic)
+ genCZJUMP(success, failure, icodes.NE, BOOL)
+ }
+ }
+ }
+
+
+ def genSynchronized(tree: Apply, expectedType: BType): BType
+ def genLoadTry(tree: Try): BType
+
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala
new file mode 100644
index 0000000000..cc3265c5f9
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala
@@ -0,0 +1,716 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala
+package tools.nsc
+package backend.jvm
+
+import scala.tools.asm
+import scala.annotation.switch
+import scala.collection.{ immutable, mutable }
+
+/*
+ * Immutable representations of bytecode-level types.
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded
+ * @version 1.0
+ *
+ */
+abstract class BCodeGlue extends SubComponent {
+
+ import global._
+
+ object BType {
+
+ import global.chrs
+
+ // ------------- sorts -------------
+
+ val VOID : Int = 0
+ val BOOLEAN: Int = 1
+ val CHAR : Int = 2
+ val BYTE : Int = 3
+ val SHORT : Int = 4
+ val INT : Int = 5
+ val FLOAT : Int = 6
+ val LONG : Int = 7
+ val DOUBLE : Int = 8
+ val ARRAY : Int = 9
+ val OBJECT : Int = 10
+ val METHOD : Int = 11
+
+ // ------------- primitive types -------------
+
+ val VOID_TYPE = new BType(VOID, ('V' << 24) | (5 << 16) | (0 << 8) | 0, 1)
+ val BOOLEAN_TYPE = new BType(BOOLEAN, ('Z' << 24) | (0 << 16) | (5 << 8) | 1, 1)
+ val CHAR_TYPE = new BType(CHAR, ('C' << 24) | (0 << 16) | (6 << 8) | 1, 1)
+ val BYTE_TYPE = new BType(BYTE, ('B' << 24) | (0 << 16) | (5 << 8) | 1, 1)
+ val SHORT_TYPE = new BType(SHORT, ('S' << 24) | (0 << 16) | (7 << 8) | 1, 1)
+ val INT_TYPE = new BType(INT, ('I' << 24) | (0 << 16) | (0 << 8) | 1, 1)
+ val FLOAT_TYPE = new BType(FLOAT, ('F' << 24) | (2 << 16) | (2 << 8) | 1, 1)
+ val LONG_TYPE = new BType(LONG, ('J' << 24) | (1 << 16) | (1 << 8) | 2, 1)
+ val DOUBLE_TYPE = new BType(DOUBLE, ('D' << 24) | (3 << 16) | (3 << 8) | 2, 1)
+
+ /*
+ * Returns the Java type corresponding to the given type descriptor.
+ *
+ * @param off the offset of this descriptor in the chrs buffer.
+ * @return the Java type corresponding to the given type descriptor.
+ *
+ * can-multi-thread
+ */
+ def getType(off: Int): BType = {
+ var len = 0
+ chrs(off) match {
+ case 'V' => VOID_TYPE
+ case 'Z' => BOOLEAN_TYPE
+ case 'C' => CHAR_TYPE
+ case 'B' => BYTE_TYPE
+ case 'S' => SHORT_TYPE
+ case 'I' => INT_TYPE
+ case 'F' => FLOAT_TYPE
+ case 'J' => LONG_TYPE
+ case 'D' => DOUBLE_TYPE
+ case '[' =>
+ len = 1
+ while (chrs(off + len) == '[') {
+ len += 1
+ }
+ if (chrs(off + len) == 'L') {
+ len += 1
+ while (chrs(off + len) != ';') {
+ len += 1
+ }
+ }
+ new BType(ARRAY, off, len + 1)
+ case 'L' =>
+ len = 1
+ while (chrs(off + len) != ';') {
+ len += 1
+ }
+ new BType(OBJECT, off + 1, len - 1)
+ // case '(':
+ case _ =>
+ assert(chrs(off) == '(')
+ var resPos = off + 1
+ while (chrs(resPos) != ')') { resPos += 1 }
+ val resType = getType(resPos + 1)
+ val len = resPos - off + 1 + resType.len;
+ new BType(
+ METHOD,
+ off,
+ if (resType.hasObjectSort) {
+ len + 2 // "+ 2" accounts for the "L ... ;" in a descriptor for a non-array reference.
+ } else {
+ len
+ }
+ )
+ }
+ }
+
+ /* Params denote an internal name.
+ * can-multi-thread
+ */
+ def getObjectType(index: Int, length: Int): BType = {
+ val sort = if (chrs(index) == '[') ARRAY else OBJECT;
+ new BType(sort, index, length)
+ }
+
+ /*
+ * @param methodDescriptor a method descriptor.
+ *
+ * must-single-thread
+ */
+ def getMethodType(methodDescriptor: String): BType = {
+ val n = global.newTypeName(methodDescriptor)
+ new BType(BType.METHOD, n.start, n.length) // TODO assert isValidMethodDescriptor
+ }
+
+ /*
+ * Returns the Java method type corresponding to the given argument and return types.
+ *
+ * @param returnType the return type of the method.
+ * @param argumentTypes the argument types of the method.
+ * @return the Java type corresponding to the given argument and return types.
+ *
+ * must-single-thread
+ */
+ def getMethodType(returnType: BType, argumentTypes: Array[BType]): BType = {
+ val n = global.newTypeName(getMethodDescriptor(returnType, argumentTypes))
+ new BType(BType.METHOD, n.start, n.length)
+ }
+
+ /*
+ * Returns the Java types corresponding to the argument types of method descriptor whose first argument starts at idx0.
+ *
+ * @param idx0 index into chrs of the first argument.
+ * @return the Java types corresponding to the argument types of the given method descriptor.
+ *
+ * can-multi-thread
+ */
+ private def getArgumentTypes(idx0: Int): Array[BType] = {
+ assert(chrs(idx0 - 1) == '(', "doesn't look like a method descriptor.")
+ val args = new Array[BType](getArgumentCount(idx0))
+ var off = idx0
+ var size = 0
+ while (chrs(off) != ')') {
+ args(size) = getType(off)
+ off += args(size).len
+ if (args(size).sort == OBJECT) { off += 2 }
+ // debug: assert("LVZBSCIJFD[)".contains(chrs(off)))
+ size += 1
+ }
+ // debug: var check = 0; while (check < args.length) { assert(args(check) != null); check += 1 }
+ args
+ }
+
+ /*
+ * Returns the number of argument types of this method type, whose first argument starts at idx0.
+ *
+ * @param idx0 index into chrs of the first argument.
+ * @return the number of argument types of this method type.
+ *
+ * can-multi-thread
+ */
+ private def getArgumentCount(idx0: Int): Int = {
+ assert(chrs(idx0 - 1) == '(', "doesn't look like a method descriptor.")
+ var off = idx0
+ var size = 0
+ var keepGoing = true
+ while (keepGoing) {
+ val car = chrs(off)
+ off += 1
+ if (car == ')') {
+ keepGoing = false
+ } else if (car == 'L') {
+ while (chrs(off) != ';') { off += 1 }
+ off += 1
+ size += 1
+ } else if (car != '[') {
+ size += 1
+ }
+ }
+
+ size
+ }
+
+ /*
+ * Returns the Java type corresponding to the return type of the given
+ * method descriptor.
+ *
+ * @param methodDescriptor a method descriptor.
+ * @return the Java type corresponding to the return type of the given method descriptor.
+ *
+ * must-single-thread
+ */
+ def getReturnType(methodDescriptor: String): BType = {
+ val n = global.newTypeName(methodDescriptor)
+ val delta = n.pos(')') // `delta` is relative to the Name's zero-based start position, not a valid index into chrs.
+ assert(delta < n.length, s"not a valid method descriptor: $methodDescriptor")
+ getType(n.start + delta + 1)
+ }
+
+ /*
+ * Returns the descriptor corresponding to the given argument and return types.
+ * Note: no BType is created here for the resulting method descriptor,
+ * if that's desired the invoker is responsible for that.
+ *
+ * @param returnType the return type of the method.
+ * @param argumentTypes the argument types of the method.
+ * @return the descriptor corresponding to the given argument and return types.
+ *
+ * can-multi-thread
+ */
+ def getMethodDescriptor(
+ returnType: BType,
+ argumentTypes: Array[BType]): String =
+ {
+ val buf = new StringBuffer()
+ buf.append('(')
+ var i = 0
+ while (i < argumentTypes.length) {
+ argumentTypes(i).getDescriptor(buf)
+ i += 1
+ }
+ buf.append(')')
+ returnType.getDescriptor(buf)
+ buf.toString()
+ }
+
+ } // end of object BType
+
+ /*
+ * Based on ASM's Type class. Namer's chrs is used in this class for the same purposes as the `buf` char array in asm.Type.
+ *
+ * All methods of this classs can-multi-thread
+ */
+ final class BType(val sort: Int, val off: Int, val len: Int) {
+
+ import global.chrs
+
+ /*
+ * can-multi-thread
+ */
+ def toASMType: scala.tools.asm.Type = {
+ import scala.tools.asm
+ // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
+ (sort: @switch) match {
+ case asm.Type.VOID => asm.Type.VOID_TYPE
+ case asm.Type.BOOLEAN => asm.Type.BOOLEAN_TYPE
+ case asm.Type.CHAR => asm.Type.CHAR_TYPE
+ case asm.Type.BYTE => asm.Type.BYTE_TYPE
+ case asm.Type.SHORT => asm.Type.SHORT_TYPE
+ case asm.Type.INT => asm.Type.INT_TYPE
+ case asm.Type.FLOAT => asm.Type.FLOAT_TYPE
+ case asm.Type.LONG => asm.Type.LONG_TYPE
+ case asm.Type.DOUBLE => asm.Type.DOUBLE_TYPE
+ case asm.Type.ARRAY |
+ asm.Type.OBJECT => asm.Type.getObjectType(getInternalName)
+ case asm.Type.METHOD => asm.Type.getMethodType(getDescriptor)
+ }
+ }
+
+ /*
+ * Unlike for ICode's REFERENCE, isBoxedType(t) implies isReferenceType(t)
+ * Also, `isReferenceType(RT_NOTHING) == true` , similarly for RT_NULL.
+ * Use isNullType() , isNothingType() to detect Nothing and Null.
+ *
+ * can-multi-thread
+ */
+ def hasObjectSort = (sort == BType.OBJECT)
+
+ /*
+ * Returns the number of dimensions of this array type. This method should
+ * only be used for an array type.
+ *
+ * @return the number of dimensions of this array type.
+ *
+ * can-multi-thread
+ */
+ def getDimensions: Int = {
+ var i = 1
+ while (chrs(off + i) == '[') {
+ i += 1
+ }
+ i
+ }
+
+ /*
+ * Returns the (ultimate) element type of this array type.
+ * This method should only be used for an array type.
+ *
+ * @return Returns the type of the elements of this array type.
+ *
+ * can-multi-thread
+ */
+ def getElementType: BType = {
+ assert(isArray, s"Asked for the element type of a non-array type: $this")
+ BType.getType(off + getDimensions)
+ }
+
+ /*
+ * Returns the internal name of the class corresponding to this object or
+ * array type. The internal name of a class is its fully qualified name (as
+ * returned by Class.getName(), where '.' are replaced by '/'. This method
+ * should only be used for an object or array type.
+ *
+ * @return the internal name of the class corresponding to this object type.
+ *
+ * can-multi-thread
+ */
+ def getInternalName: String = {
+ new String(chrs, off, len)
+ }
+
+ /*
+ * @return the suffix of the internal name until the last '/' (if '/' present), internal name otherwise.
+ *
+ * can-multi-thread
+ */
+ def getSimpleName: String = {
+ assert(hasObjectSort, s"not of object sort: $toString")
+ val iname = getInternalName
+ val idx = iname.lastIndexOf('/')
+ if (idx == -1) iname
+ else iname.substring(idx + 1)
+ }
+
+ /*
+ * Returns the argument types of methods of this type.
+ * This method should only be used for method types.
+ *
+ * @return the argument types of methods of this type.
+ *
+ * can-multi-thread
+ */
+ def getArgumentTypes: Array[BType] = {
+ BType.getArgumentTypes(off + 1)
+ }
+
+ /*
+ * Returns the return type of methods of this type.
+ * This method should only be used for method types.
+ *
+ * @return the return type of methods of this type.
+ *
+ * can-multi-thread
+ */
+ def getReturnType: BType = {
+ assert(chrs(off) == '(', s"doesn't look like a method descriptor: $toString")
+ var resPos = off + 1
+ while (chrs(resPos) != ')') { resPos += 1 }
+ BType.getType(resPos + 1)
+ }
+
+ // ------------------------------------------------------------------------
+ // Inspector methods
+ // ------------------------------------------------------------------------
+
+ def isPrimitiveOrVoid = (sort < BType.ARRAY) // can-multi-thread
+ def isValueType = (sort < BType.ARRAY) // can-multi-thread
+ def isArray = (sort == BType.ARRAY) // can-multi-thread
+ def isUnitType = (sort == BType.VOID) // can-multi-thread
+
+ def isRefOrArrayType = { hasObjectSort || isArray } // can-multi-thread
+ def isNonUnitValueType = { isValueType && !isUnitType } // can-multi-thread
+
+ def isNonSpecial = { !isValueType && !isArray && !isPhantomType } // can-multi-thread
+ def isNothingType = { (this == RT_NOTHING) || (this == CT_NOTHING) } // can-multi-thread
+ def isNullType = { (this == RT_NULL) || (this == CT_NULL) } // can-multi-thread
+ def isPhantomType = { isNothingType || isNullType } // can-multi-thread
+
+ /*
+ * can-multi-thread
+ */
+ def isBoxed = {
+ this match {
+ case BOXED_UNIT | BOXED_BOOLEAN | BOXED_CHAR |
+ BOXED_BYTE | BOXED_SHORT | BOXED_INT |
+ BOXED_FLOAT | BOXED_LONG | BOXED_DOUBLE
+ => true
+ case _
+ => false
+ }
+ }
+
+ /* On the JVM,
+ * BOOL, BYTE, CHAR, SHORT, and INT
+ * are like Ints for the purpose of lub calculation.
+ *
+ * can-multi-thread
+ */
+ def isIntSizedType = {
+ (sort : @switch) match {
+ case BType.BOOLEAN | BType.CHAR |
+ BType.BYTE | BType.SHORT | BType.INT
+ => true
+ case _
+ => false
+ }
+ }
+
+ /* On the JVM, similar to isIntSizedType except that BOOL isn't integral while LONG is.
+ *
+ * can-multi-thread
+ */
+ def isIntegralType = {
+ (sort : @switch) match {
+ case BType.CHAR |
+ BType.BYTE | BType.SHORT | BType.INT |
+ BType.LONG
+ => true
+ case _
+ => false
+ }
+ }
+
+ /* On the JVM, FLOAT and DOUBLE.
+ *
+ * can-multi-thread
+ */
+ def isRealType = { (sort == BType.FLOAT ) || (sort == BType.DOUBLE) }
+
+ def isNumericType = (isIntegralType || isRealType) // can-multi-thread
+
+ /* Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?)
+ *
+ * can-multi-thread
+ */
+ def isWideType = (getSize == 2)
+
+ /*
+ * Element vs. Component type of an array:
+ * Quoting from the JVMS, Sec. 2.4 "Reference Types and Values"
+ *
+ * An array type consists of a component type with a single dimension (whose
+ * length is not given by the type). The component type of an array type may itself be
+ * an array type. If, starting from any array type, one considers its component type,
+ * and then (if that is also an array type) the component type of that type, and so on,
+ * eventually one must reach a component type that is not an array type; this is called
+ * the element type of the array type. The element type of an array type is necessarily
+ * either a primitive type, or a class type, or an interface type.
+ *
+ */
+
+ /* The type of items this array holds.
+ *
+ * can-multi-thread
+ */
+ def getComponentType: BType = {
+ assert(isArray, s"Asked for the component type of a non-array type: $this")
+ BType.getType(off + 1)
+ }
+
+ // ------------------------------------------------------------------------
+ // Conversion to type descriptors
+ // ------------------------------------------------------------------------
+
+ /*
+ * @return the descriptor corresponding to this Java type.
+ *
+ * can-multi-thread
+ */
+ def getDescriptor: String = {
+ val buf = new StringBuffer()
+ getDescriptor(buf)
+ buf.toString()
+ }
+
+ /*
+ * Appends the descriptor corresponding to this Java type to the given string buffer.
+ *
+ * @param buf the string buffer to which the descriptor must be appended.
+ *
+ * can-multi-thread
+ */
+ private def getDescriptor(buf: StringBuffer) {
+ if (isPrimitiveOrVoid) {
+ // descriptor is in byte 3 of 'off' for primitive types (buf == null)
+ buf.append(((off & 0xFF000000) >>> 24).asInstanceOf[Char])
+ } else if (sort == BType.OBJECT) {
+ buf.append('L')
+ buf.append(chrs, off, len)
+ buf.append(';')
+ } else { // sort == ARRAY || sort == METHOD
+ buf.append(chrs, off, len)
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Corresponding size and opcodes
+ // ------------------------------------------------------------------------
+
+ /*
+ * Returns the size of values of this type.
+ * This method must not be used for method types.
+ *
+ * @return the size of values of this type, i.e., 2 for <tt>long</tt> and
+ * <tt>double</tt>, 0 for <tt>void</tt> and 1 otherwise.
+ *
+ * can-multi-thread
+ */
+ def getSize: Int = {
+ // the size is in byte 0 of 'off' for primitive types (buf == null)
+ if (isPrimitiveOrVoid) (off & 0xFF) else 1
+ }
+
+ /*
+ * Returns a JVM instruction opcode adapted to this Java type. This method
+ * must not be used for method types.
+ *
+ * @param opcode a JVM instruction opcode. This opcode must be one of ILOAD,
+ * ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL,
+ * ISHR, IUSHR, IAND, IOR, IXOR and IRETURN.
+ * @return an opcode that is similar to the given opcode, but adapted to
+ * this Java type. For example, if this type is <tt>float</tt> and
+ * <tt>opcode</tt> is IRETURN, this method returns FRETURN.
+ *
+ * can-multi-thread
+ */
+ def getOpcode(opcode: Int): Int = {
+ import scala.tools.asm.Opcodes
+ if (opcode == Opcodes.IALOAD || opcode == Opcodes.IASTORE) {
+ // the offset for IALOAD or IASTORE is in byte 1 of 'off' for
+ // primitive types (buf == null)
+ opcode + (if (isPrimitiveOrVoid) (off & 0xFF00) >> 8 else 4)
+ } else {
+ // the offset for other instructions is in byte 2 of 'off' for
+ // primitive types (buf == null)
+ opcode + (if (isPrimitiveOrVoid) (off & 0xFF0000) >> 16 else 4)
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Equals, hashCode and toString
+ // ------------------------------------------------------------------------
+
+ /*
+ * Tests if the given object is equal to this type.
+ *
+ * @param o the object to be compared to this type.
+ * @return <tt>true</tt> if the given object is equal to this type.
+ *
+ * can-multi-thread
+ */
+ override def equals(o: Any): Boolean = {
+ if (!(o.isInstanceOf[BType])) {
+ return false
+ }
+ val t = o.asInstanceOf[BType]
+ if (this eq t) {
+ return true
+ }
+ if (sort != t.sort) {
+ return false
+ }
+ if (sort >= BType.ARRAY) {
+ if (len != t.len) {
+ return false
+ }
+ // sort checked already
+ if (off == t.off) {
+ return true
+ }
+ var i = 0
+ while (i < len) {
+ if (chrs(off + i) != chrs(t.off + i)) {
+ return false
+ }
+ i += 1
+ }
+ // If we reach here, we could update the largest of (this.off, t.off) to match the other, so as to simplify future == comparisons.
+ // But that would require a var rather than val.
+ }
+ true
+ }
+
+ /*
+ * @return a hash code value for this type.
+ *
+ * can-multi-thread
+ */
+ override def hashCode(): Int = {
+ var hc = 13 * sort;
+ if (sort >= BType.ARRAY) {
+ var i = off
+ val end = i + len
+ while (i < end) {
+ hc = 17 * (hc + chrs(i))
+ i += 1
+ }
+ }
+ hc
+ }
+
+ /*
+ * @return the descriptor of this type.
+ *
+ * can-multi-thread
+ */
+ override def toString: String = { getDescriptor }
+
+ }
+
+ /*
+ * Creates a TypeName and the BType token for it.
+ * This method does not add to `innerClassBufferASM`, use `internalName()` or `asmType()` or `toTypeKind()` for that.
+ *
+ * must-single-thread
+ */
+ def brefType(iname: String): BType = { brefType(newTypeName(iname.toCharArray(), 0, iname.length())) }
+
+ /*
+ * Creates a BType token for the TypeName received as argument.
+ * This method does not add to `innerClassBufferASM`, use `internalName()` or `asmType()` or `toTypeKind()` for that.
+ *
+ * can-multi-thread
+ */
+ def brefType(iname: TypeName): BType = { BType.getObjectType(iname.start, iname.length) }
+
+ // due to keyboard economy only
+ val UNIT = BType.VOID_TYPE
+ val BOOL = BType.BOOLEAN_TYPE
+ val CHAR = BType.CHAR_TYPE
+ val BYTE = BType.BYTE_TYPE
+ val SHORT = BType.SHORT_TYPE
+ val INT = BType.INT_TYPE
+ val LONG = BType.LONG_TYPE
+ val FLOAT = BType.FLOAT_TYPE
+ val DOUBLE = BType.DOUBLE_TYPE
+
+ val BOXED_UNIT = brefType("java/lang/Void")
+ val BOXED_BOOLEAN = brefType("java/lang/Boolean")
+ val BOXED_BYTE = brefType("java/lang/Byte")
+ val BOXED_SHORT = brefType("java/lang/Short")
+ val BOXED_CHAR = brefType("java/lang/Character")
+ val BOXED_INT = brefType("java/lang/Integer")
+ val BOXED_LONG = brefType("java/lang/Long")
+ val BOXED_FLOAT = brefType("java/lang/Float")
+ val BOXED_DOUBLE = brefType("java/lang/Double")
+
+ /*
+ * RT_NOTHING and RT_NULL exist at run-time only.
+ * They are the bytecode-level manifestation (in method signatures only) of what shows up as NothingClass resp. NullClass in Scala ASTs.
+ * Therefore, when RT_NOTHING or RT_NULL are to be emitted,
+ * a mapping is needed: the internal names of NothingClass and NullClass can't be emitted as-is.
+ */
+ val RT_NOTHING = brefType("scala/runtime/Nothing$")
+ val RT_NULL = brefType("scala/runtime/Null$")
+ val CT_NOTHING = brefType("scala/Nothing") // TODO needed?
+ val CT_NULL = brefType("scala/Null") // TODO needed?
+
+ val srBooleanRef = brefType("scala/runtime/BooleanRef")
+ val srByteRef = brefType("scala/runtime/ByteRef")
+ val srCharRef = brefType("scala/runtime/CharRef")
+ val srIntRef = brefType("scala/runtime/IntRef")
+ val srLongRef = brefType("scala/runtime/LongRef")
+ val srFloatRef = brefType("scala/runtime/FloatRef")
+ val srDoubleRef = brefType("scala/runtime/DoubleRef")
+
+ /* Map from type kinds to the Java reference types.
+ * Useful when pushing class literals onto the operand stack (ldc instruction taking a class literal).
+ * @see Predef.classOf
+ * @see genConstant()
+ */
+ val classLiteral = immutable.Map[BType, BType](
+ UNIT -> BOXED_UNIT,
+ BOOL -> BOXED_BOOLEAN,
+ BYTE -> BOXED_BYTE,
+ SHORT -> BOXED_SHORT,
+ CHAR -> BOXED_CHAR,
+ INT -> BOXED_INT,
+ LONG -> BOXED_LONG,
+ FLOAT -> BOXED_FLOAT,
+ DOUBLE -> BOXED_DOUBLE
+ )
+
+ case class MethodNameAndType(mname: String, mdesc: String)
+
+ val asmBoxTo: Map[BType, MethodNameAndType] = {
+ Map(
+ BOOL -> MethodNameAndType("boxToBoolean", "(Z)Ljava/lang/Boolean;" ) ,
+ BYTE -> MethodNameAndType("boxToByte", "(B)Ljava/lang/Byte;" ) ,
+ CHAR -> MethodNameAndType("boxToCharacter", "(C)Ljava/lang/Character;") ,
+ SHORT -> MethodNameAndType("boxToShort", "(S)Ljava/lang/Short;" ) ,
+ INT -> MethodNameAndType("boxToInteger", "(I)Ljava/lang/Integer;" ) ,
+ LONG -> MethodNameAndType("boxToLong", "(J)Ljava/lang/Long;" ) ,
+ FLOAT -> MethodNameAndType("boxToFloat", "(F)Ljava/lang/Float;" ) ,
+ DOUBLE -> MethodNameAndType("boxToDouble", "(D)Ljava/lang/Double;" )
+ )
+ }
+
+ val asmUnboxTo: Map[BType, MethodNameAndType] = {
+ Map(
+ BOOL -> MethodNameAndType("unboxToBoolean", "(Ljava/lang/Object;)Z") ,
+ BYTE -> MethodNameAndType("unboxToByte", "(Ljava/lang/Object;)B") ,
+ CHAR -> MethodNameAndType("unboxToChar", "(Ljava/lang/Object;)C") ,
+ SHORT -> MethodNameAndType("unboxToShort", "(Ljava/lang/Object;)S") ,
+ INT -> MethodNameAndType("unboxToInt", "(Ljava/lang/Object;)I") ,
+ LONG -> MethodNameAndType("unboxToLong", "(Ljava/lang/Object;)J") ,
+ FLOAT -> MethodNameAndType("unboxToFloat", "(Ljava/lang/Object;)F") ,
+ DOUBLE -> MethodNameAndType("unboxToDouble", "(Ljava/lang/Object;)D")
+ )
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
new file mode 100644
index 0000000000..6b1bb5b220
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
@@ -0,0 +1,1323 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala
+package tools.nsc
+package backend.jvm
+
+import scala.tools.asm
+import scala.annotation.switch
+import scala.collection.{ immutable, mutable }
+import scala.tools.nsc.io.AbstractFile
+
+/*
+ * Traits encapsulating functionality to convert Scala AST Trees into ASM ClassNodes.
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded
+ * @version 1.0
+ *
+ */
+abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
+
+ import global._
+
+ /*
+ * must-single-thread
+ */
+ def getFileForClassfile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
+ getFile(base, clsName, suffix)
+ }
+
+ /*
+ * must-single-thread
+ */
+ def getOutFolder(csym: Symbol, cName: String, cunit: CompilationUnit): _root_.scala.tools.nsc.io.AbstractFile = {
+ try {
+ outputDirectory(csym)
+ } catch {
+ case ex: Throwable =>
+ cunit.error(cunit.body.pos, s"Couldn't create file for class $cName\n${ex.getMessage}")
+ null
+ }
+ }
+
+ var pickledBytes = 0 // statistics
+
+ // -----------------------------------------------------------------------------------------
+ // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM)
+ // Background:
+ // http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf
+ // http://comments.gmane.org/gmane.comp.java.vm.languages/2293
+ // https://issues.scala-lang.org/browse/SI-3872
+ // -----------------------------------------------------------------------------------------
+
+ /*
+ * can-multi-thread
+ */
+ def firstCommonSuffix(as: List[Tracked], bs: List[Tracked]): BType = {
+ var chainA = as
+ var chainB = bs
+ var fcs: Tracked = null
+ do {
+ if (chainB contains chainA.head) fcs = chainA.head
+ else if (chainA contains chainB.head) fcs = chainB.head
+ else {
+ chainA = chainA.tail
+ chainB = chainB.tail
+ }
+ } while (fcs == null)
+ fcs.c
+ }
+
+ /* An `asm.ClassWriter` that uses `jvmWiseLUB()`
+ * The internal name of the least common ancestor of the types given by inameA and inameB.
+ * It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow
+ */
+ final class CClassWriter(flags: Int) extends asm.ClassWriter(flags) {
+
+ /*
+ * This method is thread re-entrant because chrs never grows during its operation (that's because all TypeNames being looked up have already been entered).
+ * To stress this point, rather than using `newTypeName()` we use `lookupTypeName()`
+ *
+ * can-multi-thread
+ */
+ override def getCommonSuperClass(inameA: String, inameB: String): String = {
+ val a = brefType(lookupTypeName(inameA.toCharArray))
+ val b = brefType(lookupTypeName(inameB.toCharArray))
+ val lca = jvmWiseLUB(a, b)
+ val lcaName = lca.getInternalName // don't call javaName because that side-effects innerClassBuffer.
+ assert(lcaName != "scala/Any")
+
+ lcaName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things.
+ }
+
+ }
+
+ /*
+ * Finding the least upper bound in agreement with the bytecode verifier (given two internal names handed out by ASM)
+ * Background:
+ * http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf
+ * http://comments.gmane.org/gmane.comp.java.vm.languages/2293
+ * https://issues.scala-lang.org/browse/SI-3872
+ *
+ * can-multi-thread
+ */
+ def jvmWiseLUB(a: BType, b: BType): BType = {
+
+ assert(a.isNonSpecial, s"jvmWiseLUB() received a non-plain-class $a")
+ assert(b.isNonSpecial, s"jvmWiseLUB() received a non-plain-class $b")
+
+ val ta = exemplars.get(a)
+ val tb = exemplars.get(b)
+
+ val res = (ta.isInterface, tb.isInterface) match {
+ case (true, true) =>
+ // exercised by test/files/run/t4761.scala
+ if (tb.isSubtypeOf(ta.c)) ta.c
+ else if (ta.isSubtypeOf(tb.c)) tb.c
+ else ObjectReference
+ case (true, false) =>
+ if (tb.isSubtypeOf(a)) a else ObjectReference
+ case (false, true) =>
+ if (ta.isSubtypeOf(b)) b else ObjectReference
+ case _ =>
+ firstCommonSuffix(ta :: ta.superClasses, tb :: tb.superClasses)
+ }
+ assert(res.isNonSpecial, "jvmWiseLUB() returned a non-plain-class.")
+ res
+ }
+
+ /*
+ * must-single-thread
+ */
+ object isJavaEntryPoint {
+
+ /*
+ * must-single-thread
+ */
+ def apply(sym: Symbol, csymCompUnit: CompilationUnit): Boolean = {
+ def fail(msg: String, pos: Position = sym.pos) = {
+ csymCompUnit.warning(sym.pos,
+ sym.name +
+ s" has a main method with parameter type Array[String], but ${sym.fullName('.')} will not be a runnable program.\n Reason: $msg"
+ // TODO: make this next claim true, if possible
+ // by generating valid main methods as static in module classes
+ // not sure what the jvm allows here
+ // + " You can still run the program by calling it as " + sym.javaSimpleName + " instead."
+ )
+ false
+ }
+ def failNoForwarder(msg: String) = {
+ fail(s"$msg, which means no static forwarder can be generated.\n")
+ }
+ val possibles = if (sym.hasModuleFlag) (sym.tpe nonPrivateMember nme.main).alternatives else Nil
+ val hasApproximate = possibles exists { m =>
+ m.info match {
+ case MethodType(p :: Nil, _) => p.tpe.typeSymbol == definitions.ArrayClass
+ case _ => false
+ }
+ }
+ // At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
+ hasApproximate && {
+ // Before erasure so we can identify generic mains.
+ enteringErasure {
+ val companion = sym.linkedClassOfClass
+
+ if (definitions.hasJavaMainMethod(companion))
+ failNoForwarder("companion contains its own main method")
+ else if (companion.tpe.member(nme.main) != NoSymbol)
+ // this is only because forwarders aren't smart enough yet
+ failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
+ else if (companion.isTrait)
+ failNoForwarder("companion is a trait")
+ // Now either succeeed, or issue some additional warnings for things which look like
+ // attempts to be java main methods.
+ else (possibles exists definitions.isJavaMainMethod) || {
+ possibles exists { m =>
+ m.info match {
+ case PolyType(_, _) =>
+ fail("main methods cannot be generic.")
+ case MethodType(params, res) =>
+ if (res.typeSymbol :: params exists (_.isAbstractType))
+ fail("main methods cannot refer to type parameters or abstract types.", m.pos)
+ else
+ definitions.isJavaMainMethod(m) || fail("main method must have exact signature (Array[String])Unit", m.pos)
+ case tp =>
+ fail(s"don't know what this is: $tp", m.pos)
+ }
+ }
+ }
+ }
+ }
+ }
+
+ }
+
+ /*
+ * must-single-thread
+ */
+ def initBytecodeWriter(entryPoints: List[Symbol]): BytecodeWriter = {
+ settings.outputDirs.getSingleOutput match {
+ case Some(f) if f hasExtension "jar" =>
+ // If no main class was specified, see if there's only one
+ // entry point among the classes going into the jar.
+ if (settings.mainClass.isDefault) {
+ entryPoints map (_.fullName('.')) match {
+ case Nil =>
+ log("No Main-Class designated or discovered.")
+ case name :: Nil =>
+ log(s"Unique entry point: setting Main-Class to $name")
+ settings.mainClass.value = name
+ case names =>
+ log(s"No Main-Class due to multiple entry points:\n ${names.mkString("\n ")}")
+ }
+ }
+ else log(s"Main-Class was specified: ${settings.mainClass.value}")
+
+ new DirectToJarfileWriter(f.file)
+
+ case _ => factoryNonJarBytecodeWriter()
+ }
+ }
+
+ /*
+ * must-single-thread
+ */
+ def fieldSymbols(cls: Symbol): List[Symbol] = {
+ for (f <- cls.info.decls.toList ;
+ if !f.isMethod && f.isTerm && !f.isModule
+ ) yield f;
+ }
+
+ /*
+ * can-multi-thread
+ */
+ def methodSymbols(cd: ClassDef): List[Symbol] = {
+ cd.impl.body collect { case dd: DefDef => dd.symbol }
+ }
+
+ /*
+ * Populates the InnerClasses JVM attribute with `refedInnerClasses`.
+ * In addition to inner classes mentioned somewhere in `jclass` (where `jclass` is a class file being emitted)
+ * `refedInnerClasses` should contain those inner classes defined as direct member classes of `jclass`
+ * but otherwise not mentioned in `jclass`.
+ *
+ * `refedInnerClasses` may contain duplicates,
+ * need not contain the enclosing inner classes of each inner class it lists (those are looked up for consistency).
+ *
+ * This method serializes in the InnerClasses JVM attribute in an appropriate order,
+ * not necessarily that given by `refedInnerClasses`.
+ *
+ * can-multi-thread
+ */
+ final def addInnerClassesASM(jclass: asm.ClassVisitor, refedInnerClasses: Iterable[BType]) {
+ // used to detect duplicates.
+ val seen = mutable.Map.empty[String, String]
+ // result without duplicates, not yet sorted.
+ val result = mutable.Set.empty[InnerClassEntry]
+
+ for(s: BType <- refedInnerClasses;
+ e: InnerClassEntry <- exemplars.get(s).innersChain) {
+
+ assert(e.name != null, "saveInnerClassesFor() is broken.") // documentation
+ val doAdd = seen.get(e.name) match {
+ // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
+ case Some(prevOName) =>
+ // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
+ // i.e. for them it must be the case that oname == java/lang/Thread
+ assert(prevOName == e.outerName, "duplicate")
+ false
+ case None => true
+ }
+
+ if (doAdd) {
+ seen += (e.name -> e.outerName)
+ result += e
+ }
+
+ }
+ // sorting ensures inner classes are listed after their enclosing class thus satisfying the Eclipse Java compiler
+ for(e <- result.toList sortBy (_.name.toString)) {
+ jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.access)
+ }
+
+ } // end of method addInnerClassesASM()
+
+ /*
+ * Custom attribute (JVMS 4.7.1) "ScalaSig" used as marker only
+ * i.e., the pickle is contained in a custom annotation, see:
+ * (1) `addAnnotations()`,
+ * (2) SID # 10 (draft) - Storage of pickled Scala signatures in class files, http://www.scala-lang.org/sid/10
+ * (3) SID # 5 - Internals of Scala Annotations, http://www.scala-lang.org/sid/5
+ * That annotation in turn is not related to the "java-generic-signature" (JVMS 4.7.9)
+ * other than both ending up encoded as attributes (JVMS 4.7)
+ * (with the caveat that the "ScalaSig" attribute is associated to some classes,
+ * while the "Signature" attribute can be associated to classes, methods, and fields.)
+ *
+ */
+ trait BCPickles {
+
+ import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
+
+ val versionPickle = {
+ val vp = new PickleBuffer(new Array[Byte](16), -1, 0)
+ assert(vp.writeIndex == 0, vp)
+ vp writeNat PickleFormat.MajorVersion
+ vp writeNat PickleFormat.MinorVersion
+ vp writeNat 0
+ vp
+ }
+
+ /*
+ * can-multi-thread
+ */
+ def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = {
+ val dest = new Array[Byte](len);
+ System.arraycopy(b, offset, dest, 0, len);
+ new asm.CustomAttr(name, dest)
+ }
+
+ /*
+ * can-multi-thread
+ */
+ def pickleMarkerLocal = {
+ createJAttribute(tpnme.ScalaSignatureATTR.toString, versionPickle.bytes, 0, versionPickle.writeIndex)
+ }
+
+ /*
+ * can-multi-thread
+ */
+ def pickleMarkerForeign = {
+ createJAttribute(tpnme.ScalaATTR.toString, new Array[Byte](0), 0, 0)
+ }
+
+ /* Returns a ScalaSignature annotation if it must be added to this class, none otherwise.
+ * This annotation must be added to the class' annotations list when generating them.
+ *
+ * Depending on whether the returned option is defined, it adds to `jclass` one of:
+ * (a) the ScalaSig marker attribute
+ * (indicating that a scala-signature-annotation aka pickle is present in this class); or
+ * (b) the Scala marker attribute
+ * (indicating that a scala-signature-annotation aka pickle is to be found in another file).
+ *
+ *
+ * @param jclassName The class file that is being readied.
+ * @param sym The symbol for which the signature has been entered in the symData map.
+ * This is different than the symbol
+ * that is being generated in the case of a mirror class.
+ * @return An option that is:
+ * - defined and contains an AnnotationInfo of the ScalaSignature type,
+ * instantiated with the pickle signature for sym.
+ * - empty if the jclass/sym pair must not contain a pickle.
+ *
+ * must-single-thread
+ */
+ def getAnnotPickle(jclassName: String, sym: Symbol): Option[AnnotationInfo] = {
+ currentRun.symData get sym match {
+ case Some(pickle) if !nme.isModuleName(newTermName(jclassName)) =>
+ val scalaAnnot = {
+ val sigBytes = ScalaSigBytes(pickle.bytes.take(pickle.writeIndex))
+ AnnotationInfo(sigBytes.sigAnnot, Nil, (nme.bytes, sigBytes) :: Nil)
+ }
+ pickledBytes += pickle.writeIndex
+ currentRun.symData -= sym
+ currentRun.symData -= sym.companionSymbol
+ Some(scalaAnnot)
+ case _ =>
+ None
+ }
+ }
+
+ } // end of trait BCPickles
+
+ trait BCInnerClassGen {
+
+ def debugLevel = settings.debuginfo.indexOfChoice
+
+ val emitSource = debugLevel >= 1
+ val emitLines = debugLevel >= 2
+ val emitVars = debugLevel >= 3
+
+ /*
+ * Contains class-symbols that:
+ * (a) are known to denote inner classes
+ * (b) are mentioned somewhere in the class being generated.
+ *
+ * In other words, the lifetime of `innerClassBufferASM` is associated to "the class being generated".
+ */
+ val innerClassBufferASM = mutable.Set.empty[BType]
+
+ /*
+ * Tracks (if needed) the inner class given by `sym`.
+ *
+ * must-single-thread
+ */
+ final def internalName(sym: Symbol): String = { asmClassType(sym).getInternalName }
+
+ /*
+ * Tracks (if needed) the inner class given by `sym`.
+ *
+ * must-single-thread
+ */
+ final def asmClassType(sym: Symbol): BType = {
+ assert(
+ hasInternalName(sym),
+ {
+ val msg0 = if (sym.isAbstractType) "An AbstractTypeSymbol (SI-7122) " else "A symbol ";
+ msg0 + s"has reached the bytecode emitter, for which no JVM-level internal name can be found: ${sym.fullName}"
+ }
+ )
+ val phantOpt = phantomTypeMap.get(sym)
+ if (phantOpt.isDefined) {
+ return phantOpt.get
+ }
+ val tracked = exemplar(sym)
+ val tk = tracked.c
+ if (tracked.isInnerClass) {
+ innerClassBufferASM += tk
+ }
+
+ tk
+ }
+
+ /*
+ * Returns the BType for the given type.
+ * Tracks (if needed) the inner class given by `t`.
+ *
+ * must-single-thread
+ */
+ final def toTypeKind(t: Type): BType = {
+
+ /* Interfaces have to be handled delicately to avoid introducing spurious errors,
+ * but if we treat them all as AnyRef we lose too much information.
+ */
+ def newReference(sym0: Symbol): BType = {
+ assert(!primitiveTypeMap.contains(sym0), "Use primitiveTypeMap instead.")
+ assert(sym0 != definitions.ArrayClass, "Use arrayOf() instead.")
+
+ if (sym0 == definitions.NullClass) return RT_NULL;
+ if (sym0 == definitions.NothingClass) return RT_NOTHING;
+
+ val sym = (
+ if (!sym0.isPackageClass) sym0
+ else sym0.info.member(nme.PACKAGE) match {
+ case NoSymbol => abort(s"SI-5604: Cannot use package as value: ${sym0.fullName}")
+ case s => abort(s"SI-5604: found package class where package object expected: $s")
+ }
+ )
+
+ // Can't call .toInterface (at this phase) or we trip an assertion.
+ // See PackratParser#grow for a method which fails with an apparent mismatch
+ // between "object PackratParsers$class" and "trait PackratParsers"
+ if (sym.isImplClass) {
+ // pos/spec-List.scala is the sole failure if we don't check for NoSymbol
+ val traitSym = sym.owner.info.decl(tpnme.interfaceName(sym.name))
+ if (traitSym != NoSymbol) {
+ // this tracks the inner class in innerClassBufferASM, if needed.
+ return asmClassType(traitSym)
+ }
+ }
+
+ assert(hasInternalName(sym), s"Invoked for a symbol lacking JVM internal name: ${sym.fullName}")
+ assert(!phantomTypeMap.contains(sym), "phantom types not supposed to reach here.")
+
+ val tracked = exemplar(sym)
+ val tk = tracked.c
+ if (tracked.isInnerClass) {
+ innerClassBufferASM += tk
+ }
+
+ tk
+ }
+
+ def primitiveOrRefType(sym: Symbol): BType = {
+ assert(sym != definitions.ArrayClass, "Use primitiveOrArrayOrRefType() instead.")
+
+ primitiveTypeMap.getOrElse(sym, newReference(sym))
+ }
+
+ def primitiveOrRefType2(sym: Symbol): BType = {
+ primitiveTypeMap.get(sym) match {
+ case Some(pt) => pt
+ case None =>
+ sym match {
+ case definitions.NullClass => RT_NULL
+ case definitions.NothingClass => RT_NOTHING
+ case _ if sym.isClass => newReference(sym)
+ case _ =>
+ assert(sym.isType, sym) // it must be compiling Array[a]
+ ObjectReference
+ }
+ }
+ }
+
+ import definitions.ArrayClass
+
+ // Call to .normalize fixes #3003 (follow type aliases). Otherwise, primitiveOrArrayOrRefType() would return ObjectReference.
+ t.normalize match {
+
+ case ThisType(sym) =>
+ if (sym == ArrayClass) ObjectReference
+ else phantomTypeMap.getOrElse(sym, exemplar(sym).c)
+
+ case SingleType(_, sym) => primitiveOrRefType(sym)
+
+ case _: ConstantType => toTypeKind(t.underlying)
+
+ case TypeRef(_, sym, args) =>
+ if (sym == ArrayClass) arrayOf(toTypeKind(args.head))
+ else primitiveOrRefType2(sym)
+
+ case ClassInfoType(_, _, sym) =>
+ assert(sym != ArrayClass, "ClassInfoType to ArrayClass!")
+ primitiveOrRefType(sym)
+
+ // !!! Iulian says types which make no sense after erasure should not reach here, which includes the ExistentialType, AnnotatedType, RefinedType.
+ case ExistentialType(_, t) => toTypeKind(t) // TODO shouldn't get here but the following does: akka-actor/src/main/scala/akka/util/WildcardTree.scala
+ case AnnotatedType(_, w, _) => toTypeKind(w) // TODO test/files/jvm/annotations.scala causes an AnnotatedType to reach here.
+ case RefinedType(parents, _) => parents map toTypeKind reduceLeft jvmWiseLUB
+
+ // For sure WildcardTypes shouldn't reach here either, but when debugging such situations this may come in handy.
+ // case WildcardType => REFERENCE(ObjectClass)
+ case norm => abort(
+ s"Unknown type: $t, $norm [${t.getClass}, ${norm.getClass}] TypeRef? ${t.isInstanceOf[TypeRef]}"
+ )
+ }
+
+ } // end of method toTypeKind()
+
+ /*
+ * must-single-thread
+ */
+ def asmMethodType(msym: Symbol): BType = {
+ assert(msym.isMethod, s"not a method-symbol: $msym")
+ val resT: BType =
+ if (msym.isClassConstructor || msym.isConstructor) BType.VOID_TYPE
+ else toTypeKind(msym.tpe.resultType);
+ BType.getMethodType( resT, mkArray(msym.tpe.paramTypes map toTypeKind) )
+ }
+
+ /*
+ * Returns all direct member inner classes of `csym`,
+ * thus making sure they get entries in the InnerClasses JVM attribute
+ * even if otherwise not mentioned in the class being built.
+ *
+ * must-single-thread
+ */
+ final def trackMemberClasses(csym: Symbol, lateClosuresBTs: List[BType]): List[BType] = {
+ val lateInnerClasses = exitingErasure {
+ for (sym <- List(csym, csym.linkedClassOfClass); memberc <- sym.info.decls.map(innerClassSymbolFor) if memberc.isClass)
+ yield memberc
+ }
+ // as a precaution, do the following outside the above `exitingErasure` otherwise funny internal names might be computed.
+ val result = for(memberc <- lateInnerClasses) yield {
+ val tracked = exemplar(memberc)
+ val memberCTK = tracked.c
+ assert(tracked.isInnerClass, s"saveInnerClassesFor() says this was no inner-class after all: ${memberc.fullName}")
+
+ memberCTK
+ }
+
+ exemplar(csym).directMemberClasses = result
+
+ result
+ }
+
+ /*
+ * Tracks (if needed) the inner class given by `t`.
+ *
+ * must-single-thread
+ */
+ final def descriptor(t: Type): String = { toTypeKind(t).getDescriptor }
+
+ /*
+ * Tracks (if needed) the inner class given by `sym`.
+ *
+ * must-single-thread
+ */
+ final def descriptor(sym: Symbol): String = { asmClassType(sym).getDescriptor }
+
+ } // end of trait BCInnerClassGen
+
+ trait BCAnnotGen extends BCInnerClassGen {
+
+ /*
+ * can-multi-thread
+ */
+ def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = {
+ val ca = new Array[Char](bytes.length)
+ var idx = 0
+ while (idx < bytes.length) {
+ val b: Byte = bytes(idx)
+ assert((b & ~0x7f) == 0)
+ ca(idx) = b.asInstanceOf[Char]
+ idx += 1
+ }
+
+ ca
+ }
+
+ /*
+ * can-multi-thread
+ */
+ private def arrEncode(sb: ScalaSigBytes): Array[String] = {
+ var strs: List[String] = Nil
+ val bSeven: Array[Byte] = sb.sevenBitsMayBeZero
+ // chop into slices of at most 65535 bytes, counting 0x00 as taking two bytes (as per JVMS 4.4.7 The CONSTANT_Utf8_info Structure)
+ var prevOffset = 0
+ var offset = 0
+ var encLength = 0
+ while (offset < bSeven.size) {
+ val deltaEncLength = (if (bSeven(offset) == 0) 2 else 1)
+ val newEncLength = encLength.toLong + deltaEncLength
+ if (newEncLength >= 65535) {
+ val ba = bSeven.slice(prevOffset, offset)
+ strs ::= new java.lang.String(ubytesToCharArray(ba))
+ encLength = 0
+ prevOffset = offset
+ } else {
+ encLength += deltaEncLength
+ offset += 1
+ }
+ }
+ if (prevOffset < offset) {
+ assert(offset == bSeven.length)
+ val ba = bSeven.slice(prevOffset, offset)
+ strs ::= new java.lang.String(ubytesToCharArray(ba))
+ }
+ assert(strs.size > 1, "encode instead as one String via strEncode()") // TODO too strict?
+ mkArrayReverse(strs)
+ }
+
+ /*
+ * can-multi-thread
+ */
+ private def strEncode(sb: ScalaSigBytes): String = {
+ val ca = ubytesToCharArray(sb.sevenBitsMayBeZero)
+ new java.lang.String(ca)
+ // debug val bvA = new asm.ByteVector; bvA.putUTF8(s)
+ // debug val enc: Array[Byte] = scala.reflect.internal.pickling.ByteCodecs.encode(bytes)
+ // debug assert(enc(idx) == bvA.getByte(idx + 2))
+ // debug assert(bvA.getLength == enc.size + 2)
+ }
+
+ /*
+ * For arg a LiteralAnnotArg(constt) with const.tag in {ClazzTag, EnumTag}
+ * as well as for arg a NestedAnnotArg
+ * must-single-thread
+ * Otherwise it's safe to call from multiple threads.
+ */
+ def emitArgument(av: asm.AnnotationVisitor,
+ name: String,
+ arg: ClassfileAnnotArg) {
+ (arg: @unchecked) match {
+
+ case LiteralAnnotArg(const) =>
+ if (const.isNonUnitAnyVal) { av.visit(name, const.value) }
+ else {
+ const.tag match {
+ case StringTag =>
+ assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant`
+ av.visit(name, const.stringValue) // `stringValue` special-cases null, but that execution path isn't exercised for a const with StringTag
+ case ClazzTag => av.visit(name, toTypeKind(const.typeValue).toASMType)
+ case EnumTag =>
+ val edesc = descriptor(const.tpe) // the class descriptor of the enumeration class.
+ val evalue = const.symbolValue.name.toString // value the actual enumeration value.
+ av.visitEnum(name, edesc, evalue)
+ }
+ }
+
+ case sb @ ScalaSigBytes(bytes) =>
+ // see http://www.scala-lang.org/sid/10 (Storage of pickled Scala signatures in class files)
+ // also JVMS Sec. 4.7.16.1 The element_value structure and JVMS Sec. 4.4.7 The CONSTANT_Utf8_info Structure.
+ if (sb.fitsInOneString) {
+ av.visit(name, strEncode(sb))
+ } else {
+ val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name)
+ for(arg <- arrEncode(sb)) { arrAnnotV.visit(name, arg) }
+ arrAnnotV.visitEnd()
+ } // for the lazy val in ScalaSigBytes to be GC'ed, the invoker of emitAnnotations() should hold the ScalaSigBytes in a method-local var that doesn't escape.
+
+ case ArrayAnnotArg(args) =>
+ val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name)
+ for(arg <- args) { emitArgument(arrAnnotV, null, arg) }
+ arrAnnotV.visitEnd()
+
+ case NestedAnnotArg(annInfo) =>
+ val AnnotationInfo(typ, args, assocs) = annInfo
+ assert(args.isEmpty, args)
+ val desc = descriptor(typ) // the class descriptor of the nested annotation class
+ val nestedVisitor = av.visitAnnotation(name, desc)
+ emitAssocs(nestedVisitor, assocs)
+ }
+ }
+
+ /* Whether an annotation should be emitted as a Java annotation
+ * .initialize: if 'annot' is read from pickle, atp might be un-initialized
+ *
+ * must-single-thread
+ */
+ private def shouldEmitAnnotation(annot: AnnotationInfo) =
+ annot.symbol.initialize.isJavaDefined &&
+ annot.matches(definitions.ClassfileAnnotationClass) &&
+ annot.args.isEmpty &&
+ !annot.matches(definitions.DeprecatedAttr)
+
+ /*
+ * In general,
+ * must-single-thread
+ * but not necessarily always.
+ */
+ def emitAssocs(av: asm.AnnotationVisitor, assocs: List[(Name, ClassfileAnnotArg)]) {
+ for ((name, value) <- assocs) {
+ emitArgument(av, name.toString(), value)
+ }
+ av.visitEnd()
+ }
+
+ /*
+ * must-single-thread
+ */
+ def emitAnnotations(cw: asm.ClassVisitor, annotations: List[AnnotationInfo]) {
+ for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val av = cw.visitAnnotation(descriptor(typ), true)
+ emitAssocs(av, assocs)
+ }
+ }
+
+ /*
+ * must-single-thread
+ */
+ def emitAnnotations(mw: asm.MethodVisitor, annotations: List[AnnotationInfo]) {
+ for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val av = mw.visitAnnotation(descriptor(typ), true)
+ emitAssocs(av, assocs)
+ }
+ }
+
+ /*
+ * must-single-thread
+ */
+ def emitAnnotations(fw: asm.FieldVisitor, annotations: List[AnnotationInfo]) {
+ for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val av = fw.visitAnnotation(descriptor(typ), true)
+ emitAssocs(av, assocs)
+ }
+ }
+
+ /*
+ * must-single-thread
+ */
+ def emitParamAnnotations(jmethod: asm.MethodVisitor, pannotss: List[List[AnnotationInfo]]) {
+ val annotationss = pannotss map (_ filter shouldEmitAnnotation)
+ if (annotationss forall (_.isEmpty)) return
+ for ((annots, idx) <- annotationss.zipWithIndex;
+ annot <- annots) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), true)
+ emitAssocs(pannVisitor, assocs)
+ }
+ }
+
+ } // end of trait BCAnnotGen
+
+ trait BCJGenSigGen {
+
+ // @M don't generate java generics sigs for (members of) implementation
+ // classes, as they are monomorphic (TODO: ok?)
+ /*
+ * must-single-thread
+ */
+ private def needsGenericSignature(sym: Symbol) = !(
+ // PP: This condition used to include sym.hasExpandedName, but this leads
+ // to the total loss of generic information if a private member is
+ // accessed from a closure: both the field and the accessor were generated
+ // without it. This is particularly bad because the availability of
+ // generic information could disappear as a consequence of a seemingly
+ // unrelated change.
+ settings.Ynogenericsig
+ || sym.isArtifact
+ || sym.isLiftedMethod
+ || sym.isBridge
+ || (sym.ownerChain exists (_.isImplClass))
+ )
+
+ def getCurrentCUnit(): CompilationUnit
+
+ /* @return
+ * - `null` if no Java signature is to be added (`null` is what ASM expects in these cases).
+ * - otherwise the signature in question
+ *
+ * must-single-thread
+ */
+ def getGenericSignature(sym: Symbol, owner: Symbol): String = {
+
+ if (!needsGenericSignature(sym)) { return null }
+
+ val memberTpe = enteringErasure(owner.thisType.memberInfo(sym))
+
+ val jsOpt: Option[String] = erasure.javaSig(sym, memberTpe)
+ if (jsOpt.isEmpty) { return null }
+
+ val sig = jsOpt.get
+ log(sig) // This seems useful enough in the general case.
+
+ def wrap(op: => Unit) = {
+ try { op; true }
+ catch { case _: Throwable => false }
+ }
+
+ if (settings.Xverify) {
+ // Run the signature parser to catch bogus signatures.
+ val isValidSignature = wrap {
+ // Alternative: scala.tools.reflect.SigParser (frontend to sun.reflect.generics.parser.SignatureParser)
+ import scala.tools.asm.util.CheckClassAdapter
+ if (sym.isMethod) { CheckClassAdapter checkMethodSignature sig }
+ else if (sym.isTerm) { CheckClassAdapter checkFieldSignature sig }
+ else { CheckClassAdapter checkClassSignature sig }
+ }
+
+ if (!isValidSignature) {
+ getCurrentCUnit().warning(sym.pos,
+ """|compiler bug: created invalid generic signature for %s in %s
+ |signature: %s
+ |if this is reproducible, please report bug at https://issues.scala-lang.org/
+ """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig))
+ return null
+ }
+ }
+
+ if ((settings.check containsName phaseName)) {
+ val normalizedTpe = enteringErasure(erasure.prepareSigMap(memberTpe))
+ val bytecodeTpe = owner.thisType.memberInfo(sym)
+ if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
+ getCurrentCUnit().warning(sym.pos,
+ """|compiler bug: created generic signature for %s in %s that does not conform to its erasure
+ |signature: %s
+ |original type: %s
+ |normalized type: %s
+ |erasure type: %s
+ |if this is reproducible, please report bug at http://issues.scala-lang.org/
+ """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig, memberTpe, normalizedTpe, bytecodeTpe))
+ return null
+ }
+ }
+
+ sig
+ }
+
+ } // end of trait BCJGenSigGen
+
+ trait BCForwardersGen extends BCAnnotGen with BCJGenSigGen {
+
+ // -----------------------------------------------------------------------------------------
+ // Static forwarders (related to mirror classes but also present in
+ // a plain class lacking companion module, for details see `isCandidateForForwarders`).
+ // -----------------------------------------------------------------------------------------
+
+ val ExcludedForwarderFlags = genASM.ExcludedForwarderFlags
+
+ /* Adds a @remote annotation, actual use unknown.
+ *
+ * Invoked from genMethod() and addForwarder().
+ *
+ * must-single-thread
+ */
+ def addRemoteExceptionAnnot(isRemoteClass: Boolean, isJMethodPublic: Boolean, meth: Symbol) {
+ val needsAnnotation = (
+ ( isRemoteClass ||
+ isRemote(meth) && isJMethodPublic
+ ) && !(meth.throwsAnnotations contains definitions.RemoteExceptionClass)
+ )
+ if (needsAnnotation) {
+ val c = Constant(definitions.RemoteExceptionClass.tpe)
+ val arg = Literal(c) setType c.tpe
+ meth.addAnnotation(appliedType(definitions.ThrowsClass, c.tpe), arg)
+ }
+ }
+
+ /* Add a forwarder for method m. Used only from addForwarders().
+ *
+ * must-single-thread
+ */
+ private def addForwarder(isRemoteClass: Boolean, jclass: asm.ClassVisitor, module: Symbol, m: Symbol) {
+ val moduleName = internalName(module)
+ val methodInfo = module.thisType.memberInfo(m)
+ val paramJavaTypes: List[BType] = methodInfo.paramTypes map toTypeKind
+ // val paramNames = 0 until paramJavaTypes.length map ("x_" + _)
+
+ /* Forwarders must not be marked final,
+ * as the JVM will not allow redefinition of a final static method,
+ * and we don't know what classes might be subclassing the companion class. See SI-4827.
+ */
+ // TODO: evaluate the other flags we might be dropping on the floor here.
+ // TODO: ACC_SYNTHETIC ?
+ val flags = PublicStatic | (
+ if (m.isVarargsMethod) asm.Opcodes.ACC_VARARGS else 0
+ )
+
+ // TODO needed? for(ann <- m.annotations) { ann.symbol.initialize }
+ val jgensig = if (m.isDeferred) null else getGenericSignature(m, module); // only add generic signature if method concrete; bug #1745
+ addRemoteExceptionAnnot(isRemoteClass, hasPublicBitSet(flags), m)
+ val (throws, others) = m.annotations partition (_.symbol == definitions.ThrowsClass)
+ val thrownExceptions: List[String] = getExceptions(throws)
+
+ val jReturnType = toTypeKind(methodInfo.resultType)
+ val mdesc = BType.getMethodType(jReturnType, mkArray(paramJavaTypes)).getDescriptor
+ val mirrorMethodName = m.javaSimpleName.toString
+ val mirrorMethod: asm.MethodVisitor = jclass.visitMethod(
+ flags,
+ mirrorMethodName,
+ mdesc,
+ jgensig,
+ mkArray(thrownExceptions)
+ )
+
+ emitAnnotations(mirrorMethod, others)
+ emitParamAnnotations(mirrorMethod, m.info.params.map(_.annotations))
+
+ mirrorMethod.visitCode()
+
+ mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, strMODULE_INSTANCE_FIELD, descriptor(module))
+
+ var index = 0
+ for(jparamType <- paramJavaTypes) {
+ mirrorMethod.visitVarInsn(jparamType.getOpcode(asm.Opcodes.ILOAD), index)
+ assert(jparamType.sort != BType.METHOD, jparamType)
+ index += jparamType.getSize
+ }
+
+ mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, asmMethodType(m).getDescriptor)
+ mirrorMethod.visitInsn(jReturnType.getOpcode(asm.Opcodes.IRETURN))
+
+ mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ mirrorMethod.visitEnd()
+
+ }
+
+ /* Add forwarders for all methods defined in `module` that don't conflict
+ * with methods in the companion class of `module`. A conflict arises when
+ * a method with the same name is defined both in a class and its companion object:
+ * method signature is not taken into account.
+ *
+ * must-single-thread
+ */
+ def addForwarders(isRemoteClass: Boolean, jclass: asm.ClassVisitor, jclassName: String, moduleClass: Symbol) {
+ assert(moduleClass.isModuleClass, moduleClass)
+ debuglog(s"Dumping mirror class for object: $moduleClass")
+
+ val linkedClass = moduleClass.companionClass
+ lazy val conflictingNames: Set[Name] = {
+ (linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name }).toSet
+ }
+ debuglog(s"Potentially conflicting names for forwarders: $conflictingNames")
+
+ for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, symtab.Flags.METHOD)) {
+ if (m.isType || m.isDeferred || (m.owner eq definitions.ObjectClass) || m.isConstructor)
+ debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass'")
+ else if (conflictingNames(m.name))
+ log(s"No forwarder for $m due to conflict with ${linkedClass.info.member(m.name)}")
+ else if (m.hasAccessBoundary)
+ log(s"No forwarder for non-public member $m")
+ else {
+ log(s"Adding static forwarder for '$m' from $jclassName to '$moduleClass'")
+ addForwarder(isRemoteClass, jclass, moduleClass, m)
+ }
+ }
+ }
+
+ /*
+ * Quoting from JVMS 4.7.5 The Exceptions Attribute
+ * "The Exceptions attribute indicates which checked exceptions a method may throw.
+ * There may be at most one Exceptions attribute in each method_info structure."
+ *
+ * The contents of that attribute are determined by the `String[] exceptions` argument to ASM's ClassVisitor.visitMethod()
+ * This method returns such list of internal names.
+ *
+ * must-single-thread
+ */
+ def getExceptions(excs: List[AnnotationInfo]): List[String] = {
+ for (ThrownException(exc) <- excs.distinct)
+ yield internalName(exc)
+ }
+
+ } // end of trait BCForwardersGen
+
+ trait BCClassGen extends BCInnerClassGen {
+
+ // Used as threshold above which a tableswitch bytecode instruction is preferred over a lookupswitch.
+ // There's a space tradeoff between these multi-branch instructions (details in the JVM spec).
+ // The particular value in use for `MIN_SWITCH_DENSITY` reflects a heuristic.
+ val MIN_SWITCH_DENSITY = 0.7
+
+ /*
+ * must-single-thread
+ */
+ def serialVUID(csym: Symbol): Option[Long] = csym getAnnotation definitions.SerialVersionUIDAttr collect {
+ case AnnotationInfo(_, Literal(const) :: _, _) => const.longValue
+ }
+
+ /*
+ * Add public static final field serialVersionUID with value `id`
+ *
+ * can-multi-thread
+ */
+ def addSerialVUID(id: Long, jclass: asm.ClassVisitor) {
+ // add static serialVersionUID field if `clasz` annotated with `@SerialVersionUID(uid: Long)`
+ jclass.visitField(
+ PublicStaticFinal,
+ "serialVersionUID",
+ "J",
+ null, // no java-generic-signature
+ new java.lang.Long(id)
+ ).visitEnd()
+ }
+
+ /*
+ * @param owner internal name of the enclosing class of the class.
+ *
+ * @param name the name of the method that contains the class.
+
+ * @param methodType the method that contains the class.
+ */
+ case class EnclMethodEntry(owner: String, name: String, methodType: BType)
+
+ /*
+ * @return null if the current class is not internal to a method
+ *
+ * Quoting from JVMS 4.7.7 The EnclosingMethod Attribute
+ * A class must have an EnclosingMethod attribute if and only if it is a local class or an anonymous class.
+ * A class may have no more than one EnclosingMethod attribute.
+ *
+ * must-single-thread
+ */
+ def getEnclosingMethodAttribute(clazz: Symbol): EnclMethodEntry = { // JVMS 4.7.7
+
+ def newEEE(eClass: Symbol, m: Symbol) = {
+ EnclMethodEntry(
+ internalName(eClass),
+ m.javaSimpleName.toString,
+ asmMethodType(m)
+ )
+ }
+
+ var res: EnclMethodEntry = null
+ val sym = clazz.originalEnclosingMethod
+ if (sym.isMethod) {
+ debuglog(s"enclosing method for $clazz is $sym (in ${sym.enclClass})")
+ res = newEEE(sym.enclClass, sym)
+ } else if (clazz.isAnonymousClass) {
+ val enclClass = clazz.rawowner
+ assert(enclClass.isClass, enclClass)
+ val sym = enclClass.primaryConstructor
+ if (sym == NoSymbol) {
+ log(s"Ran out of room looking for an enclosing method for $clazz: no constructor here: $enclClass.")
+ } else {
+ debuglog(s"enclosing method for $clazz is $sym (in $enclClass)")
+ res = newEEE(enclClass, sym)
+ }
+ }
+
+ res
+ }
+
+ } // end of trait BCClassGen
+
+ /* basic functionality for class file building of plain, mirror, and beaninfo classes. */
+ abstract class JBuilder extends BCInnerClassGen {
+
+ } // end of class JBuilder
+
+ /* functionality for building plain and mirror classes */
+ abstract class JCommonBuilder
+ extends JBuilder
+ with BCAnnotGen
+ with BCForwardersGen
+ with BCPickles { }
+
+ /* builder of mirror classes */
+ class JMirrorBuilder extends JCommonBuilder {
+
+ private var cunit: CompilationUnit = _
+ def getCurrentCUnit(): CompilationUnit = cunit;
+
+ /* Generate a mirror class for a top-level module. A mirror class is a class
+ * containing only static methods that forward to the corresponding method
+ * on the MODULE instance of the given Scala object. It will only be
+ * generated if there is no companion class: if there is, an attempt will
+ * instead be made to add the forwarder methods to the companion class.
+ *
+ * must-single-thread
+ */
+ def genMirrorClass(modsym: Symbol, cunit: CompilationUnit): asm.tree.ClassNode = {
+ assert(modsym.companionClass == NoSymbol, modsym)
+ innerClassBufferASM.clear()
+ this.cunit = cunit
+ val moduleName = internalName(modsym) // + "$"
+ val mirrorName = moduleName.substring(0, moduleName.length() - 1)
+
+ val flags = (asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL)
+ val mirrorClass = new asm.tree.ClassNode
+ mirrorClass.visit(
+ classfileVersion,
+ flags,
+ mirrorName,
+ null /* no java-generic-signature */,
+ JAVA_LANG_OBJECT.getInternalName,
+ EMPTY_STRING_ARRAY
+ )
+
+ if (emitSource) {
+ mirrorClass.visitSource("" + cunit.source,
+ null /* SourceDebugExtension */)
+ }
+
+ val ssa = getAnnotPickle(mirrorName, modsym.companionSymbol)
+ mirrorClass.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
+ emitAnnotations(mirrorClass, modsym.annotations ++ ssa)
+
+ addForwarders(isRemote(modsym), mirrorClass, mirrorName, modsym)
+
+ innerClassBufferASM ++= trackMemberClasses(modsym, Nil /* TODO what about Late-Closure-Classes */ )
+ addInnerClassesASM(mirrorClass, innerClassBufferASM.toList)
+
+ mirrorClass.visitEnd()
+
+ ("" + modsym.name) // this side-effect is necessary, really.
+
+ mirrorClass
+ }
+
+ } // end of class JMirrorBuilder
+
+ /* builder of bean info classes */
+ class JBeanInfoBuilder extends JBuilder {
+
+ /*
+ * Generate a bean info class that describes the given class.
+ *
+ * @author Ross Judson (ross.judson@soletta.com)
+ *
+ * must-single-thread
+ */
+ def genBeanInfoClass(cls: Symbol, cunit: CompilationUnit, fieldSymbols: List[Symbol], methodSymbols: List[Symbol]): asm.tree.ClassNode = {
+
+ def javaSimpleName(s: Symbol): String = { s.javaSimpleName.toString }
+
+ innerClassBufferASM.clear()
+
+ val flags = mkFlags(
+ javaFlags(cls),
+ if (isDeprecated(cls)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ val beanInfoName = (internalName(cls) + "BeanInfo")
+ val beanInfoClass = new asm.tree.ClassNode
+ beanInfoClass.visit(
+ classfileVersion,
+ flags,
+ beanInfoName,
+ null, // no java-generic-signature
+ "scala/beans/ScalaBeanInfo",
+ EMPTY_STRING_ARRAY
+ )
+
+ beanInfoClass.visitSource(
+ cunit.source.toString,
+ null /* SourceDebugExtension */
+ )
+
+ var fieldList = List[String]()
+
+ for (f <- fieldSymbols if f.hasGetter;
+ g = f.getter(cls);
+ s = f.setter(cls);
+ if g.isPublic && !(f.name startsWith "$")
+ ) {
+ // inserting $outer breaks the bean
+ fieldList = javaSimpleName(f) :: javaSimpleName(g) :: (if (s != NoSymbol) javaSimpleName(s) else null) :: fieldList
+ }
+
+ val methodList: List[String] =
+ for (m <- methodSymbols
+ if !m.isConstructor &&
+ m.isPublic &&
+ !(m.name startsWith "$") &&
+ !m.isGetter &&
+ !m.isSetter)
+ yield javaSimpleName(m)
+
+ val constructor = beanInfoClass.visitMethod(
+ asm.Opcodes.ACC_PUBLIC,
+ INSTANCE_CONSTRUCTOR_NAME,
+ "()V",
+ null, // no java-generic-signature
+ EMPTY_STRING_ARRAY // no throwable exceptions
+ )
+
+ val stringArrayJType: BType = arrayOf(JAVA_LANG_STRING)
+ val conJType: BType =
+ BType.getMethodType(
+ BType.VOID_TYPE,
+ Array(exemplar(definitions.ClassClass).c, stringArrayJType, stringArrayJType)
+ )
+
+ def push(lst: List[String]) {
+ var fi = 0
+ for (f <- lst) {
+ constructor.visitInsn(asm.Opcodes.DUP)
+ constructor.visitLdcInsn(new java.lang.Integer(fi))
+ if (f == null) { constructor.visitInsn(asm.Opcodes.ACONST_NULL) }
+ else { constructor.visitLdcInsn(f) }
+ constructor.visitInsn(JAVA_LANG_STRING.getOpcode(asm.Opcodes.IASTORE))
+ fi += 1
+ }
+ }
+
+ constructor.visitCode()
+
+ constructor.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ // push the class
+ constructor.visitLdcInsn(exemplar(cls).c)
+
+ // push the string array of field information
+ constructor.visitLdcInsn(new java.lang.Integer(fieldList.length))
+ constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName)
+ push(fieldList)
+
+ // push the string array of method information
+ constructor.visitLdcInsn(new java.lang.Integer(methodList.length))
+ constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName)
+ push(methodList)
+
+ // invoke the superclass constructor, which will do the
+ // necessary java reflection and create Method objects.
+ constructor.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, "scala/beans/ScalaBeanInfo", INSTANCE_CONSTRUCTOR_NAME, conJType.getDescriptor)
+ constructor.visitInsn(asm.Opcodes.RETURN)
+
+ constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ constructor.visitEnd()
+
+ innerClassBufferASM ++= trackMemberClasses(cls, Nil /* TODO what about Late-Closure-Classes */ )
+ addInnerClassesASM(beanInfoClass, innerClassBufferASM.toList)
+
+ beanInfoClass.visitEnd()
+
+ beanInfoClass
+ }
+
+ } // end of class JBeanInfoBuilder
+
+ trait JAndroidBuilder {
+ self: BCInnerClassGen =>
+
+ /* From the reference documentation of the Android SDK:
+ * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`.
+ * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`,
+ * which is an object implementing the `Parcelable.Creator` interface.
+ */
+ val androidFieldName = newTermName("CREATOR")
+
+ /*
+ * must-single-thread
+ */
+ def isAndroidParcelableClass(sym: Symbol) =
+ (AndroidParcelableInterface != NoSymbol) &&
+ (sym.parentSymbols contains AndroidParcelableInterface)
+
+ /*
+ * must-single-thread
+ */
+ def legacyAddCreatorCode(clinit: asm.MethodVisitor, cnode: asm.tree.ClassNode, thisName: String) {
+ // this tracks the inner class in innerClassBufferASM, if needed.
+ val androidCreatorType = asmClassType(AndroidCreatorClass)
+ val tdesc_creator = androidCreatorType.getDescriptor
+
+ cnode.visitField(
+ PublicStaticFinal,
+ "CREATOR",
+ tdesc_creator,
+ null, // no java-generic-signature
+ null // no initial value
+ ).visitEnd()
+
+ val moduleName = (thisName + "$")
+
+ // GETSTATIC `moduleName`.MODULE$ : `moduleName`;
+ clinit.visitFieldInsn(
+ asm.Opcodes.GETSTATIC,
+ moduleName,
+ strMODULE_INSTANCE_FIELD,
+ "L" + moduleName + ";"
+ )
+
+ // INVOKEVIRTUAL `moduleName`.CREATOR() : android.os.Parcelable$Creator;
+ val bt = BType.getMethodType(androidCreatorType, Array.empty[BType])
+ clinit.visitMethodInsn(
+ asm.Opcodes.INVOKEVIRTUAL,
+ moduleName,
+ "CREATOR",
+ bt.getDescriptor
+ )
+
+ // PUTSTATIC `thisName`.CREATOR;
+ clinit.visitFieldInsn(
+ asm.Opcodes.PUTSTATIC,
+ thisName,
+ "CREATOR",
+ tdesc_creator
+ )
+ }
+
+ } // end of trait JAndroidBuilder
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala
new file mode 100644
index 0000000000..c3492b79a9
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala
@@ -0,0 +1,725 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala
+package tools.nsc
+package backend.jvm
+
+import scala.tools.asm
+import scala.annotation.switch
+import scala.collection.{ immutable, mutable }
+import collection.convert.Wrappers.JListWrapper
+
+/*
+ * A high-level facade to the ASM API for bytecode generation.
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded
+ * @version 1.0
+ *
+ */
+abstract class BCodeIdiomatic extends BCodeGlue {
+
+ import global._
+
+ val classfileVersion: Int = settings.target.value match {
+ case "jvm-1.5" => asm.Opcodes.V1_5
+ case "jvm-1.6" => asm.Opcodes.V1_6
+ case "jvm-1.7" => asm.Opcodes.V1_7
+ }
+
+ val majorVersion: Int = (classfileVersion & 0xFF)
+ val emitStackMapFrame = (majorVersion >= 50)
+
+ def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
+
+ val extraProc: Int = mkFlags(
+ asm.ClassWriter.COMPUTE_MAXS,
+ if (emitStackMapFrame) asm.ClassWriter.COMPUTE_FRAMES else 0
+ )
+
+ val StringBuilderClassName = "scala/collection/mutable/StringBuilder"
+
+ val CLASS_CONSTRUCTOR_NAME = "<clinit>"
+ val INSTANCE_CONSTRUCTOR_NAME = "<init>"
+
+ val ObjectReference = brefType("java/lang/Object")
+ val AnyRefReference = ObjectReference
+ val objArrayReference = arrayOf(ObjectReference)
+
+ val JAVA_LANG_OBJECT = ObjectReference
+ val JAVA_LANG_STRING = brefType("java/lang/String")
+
+ var StringBuilderReference: BType = null
+
+ val EMPTY_STRING_ARRAY = Array.empty[String]
+ val EMPTY_INT_ARRAY = Array.empty[Int]
+ val EMPTY_LABEL_ARRAY = Array.empty[asm.Label]
+ val EMPTY_BTYPE_ARRAY = Array.empty[BType]
+
+ /* can-multi-thread */
+ final def mkArray(xs: List[BType]): Array[BType] = {
+ if (xs.isEmpty) { return EMPTY_BTYPE_ARRAY }
+ val a = new Array[BType](xs.size); xs.copyToArray(a); a
+ }
+ /* can-multi-thread */
+ final def mkArray(xs: List[String]): Array[String] = {
+ if (xs.isEmpty) { return EMPTY_STRING_ARRAY }
+ val a = new Array[String](xs.size); xs.copyToArray(a); a
+ }
+ /* can-multi-thread */
+ final def mkArray(xs: List[asm.Label]): Array[asm.Label] = {
+ if (xs.isEmpty) { return EMPTY_LABEL_ARRAY }
+ val a = new Array[asm.Label](xs.size); xs.copyToArray(a); a
+ }
+
+ /*
+ * can-multi-thread
+ */
+ final def mkArrayReverse(xs: List[String]): Array[String] = {
+ val len = xs.size
+ if (len == 0) { return EMPTY_STRING_ARRAY }
+ val a = new Array[String](len)
+ var i = len - 1
+ var rest = xs
+ while (!rest.isEmpty) {
+ a(i) = rest.head
+ rest = rest.tail
+ i -= 1
+ }
+ a
+ }
+
+ /*
+ * can-multi-thread
+ */
+ final def mkArrayReverse(xs: List[Int]): Array[Int] = {
+ val len = xs.size
+ if (len == 0) { return EMPTY_INT_ARRAY }
+ val a = new Array[Int](len)
+ var i = len - 1
+ var rest = xs
+ while (!rest.isEmpty) {
+ a(i) = rest.head
+ rest = rest.tail
+ i -= 1
+ }
+ a
+ }
+
+ /*
+ * The type of 1-dimensional arrays of `elem` type.
+ * The invoker is responsible for tracking (if needed) the inner class given by the elem BType.
+ *
+ * must-single-thread
+ */
+ final def arrayOf(elem: BType): BType = {
+ assert(!(elem.isUnitType), s"The element type of an array can't be: $elem")
+ brefType("[" + elem.getDescriptor)
+ }
+
+ /* Just a namespace for utilities that encapsulate MethodVisitor idioms.
+ * In the ASM world, org.objectweb.asm.commons.InstructionAdapter plays a similar role,
+ * but the methods here allow choosing when to transition from ICode to ASM types
+ * (including not at all, e.g. for performance).
+ */
+ abstract class JCodeMethodN {
+
+ def jmethod: asm.MethodVisitor
+
+ import asm.Opcodes;
+ import icodes.opcodes.{ Static, Dynamic, SuperCall }
+
+ final def emit(opc: Int) { jmethod.visitInsn(opc) }
+
+ /*
+ * can-multi-thread
+ */
+ final def genPrimitiveArithmetic(op: icodes.ArithmeticOp, kind: BType) {
+
+ import icodes.{ ADD, SUB, MUL, DIV, REM, NOT }
+
+ op match {
+
+ case ADD => add(kind)
+ case SUB => sub(kind)
+ case MUL => mul(kind)
+ case DIV => div(kind)
+ case REM => rem(kind)
+
+ case NOT =>
+ if (kind.isIntSizedType) {
+ emit(Opcodes.ICONST_M1)
+ emit(Opcodes.IXOR)
+ } else if (kind == LONG) {
+ jmethod.visitLdcInsn(new java.lang.Long(-1))
+ jmethod.visitInsn(Opcodes.LXOR)
+ } else {
+ abort(s"Impossible to negate an $kind")
+ }
+
+ case _ =>
+ abort(s"Unknown arithmetic primitive $op")
+ }
+
+ } // end of method genPrimitiveArithmetic()
+
+ /*
+ * can-multi-thread
+ */
+ final def genPrimitiveLogical(op: /* LogicalOp */ Int, kind: BType) {
+
+ import scalaPrimitives.{ AND, OR, XOR }
+
+ ((op, kind): @unchecked) match {
+ case (AND, LONG) => emit(Opcodes.LAND)
+ case (AND, INT) => emit(Opcodes.IAND)
+ case (AND, _) =>
+ emit(Opcodes.IAND)
+ if (kind != BOOL) { emitT2T(INT, kind) }
+
+ case (OR, LONG) => emit(Opcodes.LOR)
+ case (OR, INT) => emit(Opcodes.IOR)
+ case (OR, _) =>
+ emit(Opcodes.IOR)
+ if (kind != BOOL) { emitT2T(INT, kind) }
+
+ case (XOR, LONG) => emit(Opcodes.LXOR)
+ case (XOR, INT) => emit(Opcodes.IXOR)
+ case (XOR, _) =>
+ emit(Opcodes.IXOR)
+ if (kind != BOOL) { emitT2T(INT, kind) }
+ }
+
+ } // end of method genPrimitiveLogical()
+
+ /*
+ * can-multi-thread
+ */
+ final def genPrimitiveShift(op: /* ShiftOp */ Int, kind: BType) {
+
+ import scalaPrimitives.{ LSL, ASR, LSR }
+
+ ((op, kind): @unchecked) match {
+ case (LSL, LONG) => emit(Opcodes.LSHL)
+ case (LSL, INT) => emit(Opcodes.ISHL)
+ case (LSL, _) =>
+ emit(Opcodes.ISHL)
+ emitT2T(INT, kind)
+
+ case (ASR, LONG) => emit(Opcodes.LSHR)
+ case (ASR, INT) => emit(Opcodes.ISHR)
+ case (ASR, _) =>
+ emit(Opcodes.ISHR)
+ emitT2T(INT, kind)
+
+ case (LSR, LONG) => emit(Opcodes.LUSHR)
+ case (LSR, INT) => emit(Opcodes.IUSHR)
+ case (LSR, _) =>
+ emit(Opcodes.IUSHR)
+ emitT2T(INT, kind)
+ }
+
+ } // end of method genPrimitiveShift()
+
+ /*
+ * can-multi-thread
+ */
+ final def genStartConcat {
+ jmethod.visitTypeInsn(Opcodes.NEW, StringBuilderClassName)
+ jmethod.visitInsn(Opcodes.DUP)
+ invokespecial(
+ StringBuilderClassName,
+ INSTANCE_CONSTRUCTOR_NAME,
+ "()V"
+ )
+ }
+
+ /*
+ * can-multi-thread
+ */
+ final def genStringConcat(el: BType) {
+
+ val jtype =
+ if (el.isArray || el.hasObjectSort) JAVA_LANG_OBJECT
+ else el;
+
+ val bt = BType.getMethodType(StringBuilderReference, Array(jtype))
+
+ invokevirtual(StringBuilderClassName, "append", bt.getDescriptor)
+ }
+
+ /*
+ * can-multi-thread
+ */
+ final def genEndConcat {
+ invokevirtual(StringBuilderClassName, "toString", "()Ljava/lang/String;")
+ }
+
+ /*
+ * Emits one or more conversion instructions based on the types given as arguments.
+ *
+ * @param from The type of the value to be converted into another type.
+ * @param to The type the value will be converted into.
+ *
+ * can-multi-thread
+ */
+ final def emitT2T(from: BType, to: BType) {
+
+ assert(
+ from.isNonUnitValueType && to.isNonUnitValueType,
+ s"Cannot emit primitive conversion from $from to $to"
+ )
+
+ def pickOne(opcs: Array[Int]) { // TODO index on to.sort
+ val chosen = (to: @unchecked) match {
+ case BYTE => opcs(0)
+ case SHORT => opcs(1)
+ case CHAR => opcs(2)
+ case INT => opcs(3)
+ case LONG => opcs(4)
+ case FLOAT => opcs(5)
+ case DOUBLE => opcs(6)
+ }
+ if (chosen != -1) { emit(chosen) }
+ }
+
+ if (from == to) { return }
+ // the only conversion involving BOOL that is allowed is (BOOL -> BOOL)
+ assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to")
+
+ // We're done with BOOL already
+ (from.sort: @switch) match {
+
+ // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
+
+ case asm.Type.BYTE => pickOne(JCodeMethodN.fromByteT2T)
+ case asm.Type.SHORT => pickOne(JCodeMethodN.fromShortT2T)
+ case asm.Type.CHAR => pickOne(JCodeMethodN.fromCharT2T)
+ case asm.Type.INT => pickOne(JCodeMethodN.fromIntT2T)
+
+ case asm.Type.FLOAT =>
+ import asm.Opcodes.{ F2L, F2D, F2I }
+ (to.sort: @switch) match {
+ case asm.Type.LONG => emit(F2L)
+ case asm.Type.DOUBLE => emit(F2D)
+ case _ => emit(F2I); emitT2T(INT, to)
+ }
+
+ case asm.Type.LONG =>
+ import asm.Opcodes.{ L2F, L2D, L2I }
+ (to.sort: @switch) match {
+ case asm.Type.FLOAT => emit(L2F)
+ case asm.Type.DOUBLE => emit(L2D)
+ case _ => emit(L2I); emitT2T(INT, to)
+ }
+
+ case asm.Type.DOUBLE =>
+ import asm.Opcodes.{ D2L, D2F, D2I }
+ (to.sort: @switch) match {
+ case asm.Type.FLOAT => emit(D2F)
+ case asm.Type.LONG => emit(D2L)
+ case _ => emit(D2I); emitT2T(INT, to)
+ }
+ }
+ } // end of emitT2T()
+
+ // can-multi-thread
+ final def boolconst(b: Boolean) { iconst(if (b) 1 else 0) }
+
+ // can-multi-thread
+ final def iconst(cst: Int) {
+ if (cst >= -1 && cst <= 5) {
+ emit(Opcodes.ICONST_0 + cst)
+ } else if (cst >= java.lang.Byte.MIN_VALUE && cst <= java.lang.Byte.MAX_VALUE) {
+ jmethod.visitIntInsn(Opcodes.BIPUSH, cst)
+ } else if (cst >= java.lang.Short.MIN_VALUE && cst <= java.lang.Short.MAX_VALUE) {
+ jmethod.visitIntInsn(Opcodes.SIPUSH, cst)
+ } else {
+ jmethod.visitLdcInsn(new Integer(cst))
+ }
+ }
+
+ // can-multi-thread
+ final def lconst(cst: Long) {
+ if (cst == 0L || cst == 1L) {
+ emit(Opcodes.LCONST_0 + cst.asInstanceOf[Int])
+ } else {
+ jmethod.visitLdcInsn(new java.lang.Long(cst))
+ }
+ }
+
+ // can-multi-thread
+ final def fconst(cst: Float) {
+ val bits: Int = java.lang.Float.floatToIntBits(cst)
+ if (bits == 0L || bits == 0x3f800000 || bits == 0x40000000) { // 0..2
+ emit(Opcodes.FCONST_0 + cst.asInstanceOf[Int])
+ } else {
+ jmethod.visitLdcInsn(new java.lang.Float(cst))
+ }
+ }
+
+ // can-multi-thread
+ final def dconst(cst: Double) {
+ val bits: Long = java.lang.Double.doubleToLongBits(cst)
+ if (bits == 0L || bits == 0x3ff0000000000000L) { // +0.0d and 1.0d
+ emit(Opcodes.DCONST_0 + cst.asInstanceOf[Int])
+ } else {
+ jmethod.visitLdcInsn(new java.lang.Double(cst))
+ }
+ }
+
+ // can-multi-thread
+ final def newarray(elem: BType) {
+ if (elem.isRefOrArrayType || elem.isPhantomType ) {
+ /* phantom type at play in `Array(null)`, SI-1513. On the other hand, Array(()) has element type `scala.runtime.BoxedUnit` which hasObjectSort. */
+ jmethod.visitTypeInsn(Opcodes.ANEWARRAY, elem.getInternalName)
+ } else {
+ val rand = {
+ // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
+ (elem.sort: @switch) match {
+ case asm.Type.BOOLEAN => Opcodes.T_BOOLEAN
+ case asm.Type.BYTE => Opcodes.T_BYTE
+ case asm.Type.SHORT => Opcodes.T_SHORT
+ case asm.Type.CHAR => Opcodes.T_CHAR
+ case asm.Type.INT => Opcodes.T_INT
+ case asm.Type.LONG => Opcodes.T_LONG
+ case asm.Type.FLOAT => Opcodes.T_FLOAT
+ case asm.Type.DOUBLE => Opcodes.T_DOUBLE
+ }
+ }
+ jmethod.visitIntInsn(Opcodes.NEWARRAY, rand)
+ }
+ }
+
+
+ final def load( idx: Int, tk: BType) { emitVarInsn(Opcodes.ILOAD, idx, tk) } // can-multi-thread
+ final def store(idx: Int, tk: BType) { emitVarInsn(Opcodes.ISTORE, idx, tk) } // can-multi-thread
+
+ final def aload( tk: BType) { emitTypeBased(JCodeMethodN.aloadOpcodes, tk) } // can-multi-thread
+ final def astore(tk: BType) { emitTypeBased(JCodeMethodN.astoreOpcodes, tk) } // can-multi-thread
+
+ final def neg(tk: BType) { emitPrimitive(JCodeMethodN.negOpcodes, tk) } // can-multi-thread
+ final def add(tk: BType) { emitPrimitive(JCodeMethodN.addOpcodes, tk) } // can-multi-thread
+ final def sub(tk: BType) { emitPrimitive(JCodeMethodN.subOpcodes, tk) } // can-multi-thread
+ final def mul(tk: BType) { emitPrimitive(JCodeMethodN.mulOpcodes, tk) } // can-multi-thread
+ final def div(tk: BType) { emitPrimitive(JCodeMethodN.divOpcodes, tk) } // can-multi-thread
+ final def rem(tk: BType) { emitPrimitive(JCodeMethodN.remOpcodes, tk) } // can-multi-thread
+
+ // can-multi-thread
+ final def invokespecial(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, desc)
+ }
+ // can-multi-thread
+ final def invokestatic(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, desc)
+ }
+ // can-multi-thread
+ final def invokeinterface(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKEINTERFACE, owner, name, desc)
+ }
+ // can-multi-thread
+ final def invokevirtual(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, desc)
+ }
+
+ // can-multi-thread
+ final def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) }
+ // can-multi-thread
+ final def emitIF(cond: icodes.TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF, label) }
+ // can-multi-thread
+ final def emitIF_ICMP(cond: icodes.TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) }
+ // can-multi-thread
+ final def emitIF_ACMP(cond: icodes.TestOp, label: asm.Label) {
+ assert((cond == icodes.EQ) || (cond == icodes.NE), cond)
+ val opc = (if (cond == icodes.EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE)
+ jmethod.visitJumpInsn(opc, label)
+ }
+ // can-multi-thread
+ final def emitIFNONNULL(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNONNULL, label) }
+ // can-multi-thread
+ final def emitIFNULL (label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNULL, label) }
+
+ // can-multi-thread
+ final def emitRETURN(tk: BType) {
+ if (tk == UNIT) { emit(Opcodes.RETURN) }
+ else { emitTypeBased(JCodeMethodN.returnOpcodes, tk) }
+ }
+
+ /* Emits one of tableswitch or lookoupswitch.
+ *
+ * can-multi-thread
+ */
+ final def emitSWITCH(keys: Array[Int], branches: Array[asm.Label], defaultBranch: asm.Label, minDensity: Double) {
+ assert(keys.length == branches.length)
+
+ // For empty keys, it makes sense emitting LOOKUPSWITCH with defaultBranch only.
+ // Similar to what javac emits for a switch statement consisting only of a default case.
+ if (keys.length == 0) {
+ jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches)
+ return
+ }
+
+ // sort `keys` by increasing key, keeping `branches` in sync. TODO FIXME use quicksort
+ var i = 1
+ while (i < keys.length) {
+ var j = 1
+ while (j <= keys.length - i) {
+ if (keys(j) < keys(j - 1)) {
+ val tmp = keys(j)
+ keys(j) = keys(j - 1)
+ keys(j - 1) = tmp
+ val tmpL = branches(j)
+ branches(j) = branches(j - 1)
+ branches(j - 1) = tmpL
+ }
+ j += 1
+ }
+ i += 1
+ }
+
+ // check for duplicate keys to avoid "VerifyError: unsorted lookupswitch" (SI-6011)
+ i = 1
+ while (i < keys.length) {
+ if (keys(i-1) == keys(i)) {
+ abort("duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.")
+ }
+ i += 1
+ }
+
+ val keyMin = keys(0)
+ val keyMax = keys(keys.length - 1)
+
+ val isDenseEnough: Boolean = {
+ /* Calculate in long to guard against overflow. TODO what overflow? */
+ val keyRangeD: Double = (keyMax.asInstanceOf[Long] - keyMin + 1).asInstanceOf[Double]
+ val klenD: Double = keys.length
+ val kdensity: Double = (klenD / keyRangeD)
+
+ kdensity >= minDensity
+ }
+
+ if (isDenseEnough) {
+ // use a table in which holes are filled with defaultBranch.
+ val keyRange = (keyMax - keyMin + 1)
+ val newBranches = new Array[asm.Label](keyRange)
+ var oldPos = 0
+ var i = 0
+ while (i < keyRange) {
+ val key = keyMin + i;
+ if (keys(oldPos) == key) {
+ newBranches(i) = branches(oldPos)
+ oldPos += 1
+ } else {
+ newBranches(i) = defaultBranch
+ }
+ i += 1
+ }
+ assert(oldPos == keys.length, "emitSWITCH")
+ jmethod.visitTableSwitchInsn(keyMin, keyMax, defaultBranch, newBranches: _*)
+ } else {
+ jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches)
+ }
+ }
+
+ // internal helpers -- not part of the public API of `jcode`
+ // don't make private otherwise inlining will suffer
+
+ // can-multi-thread
+ final def emitVarInsn(opc: Int, idx: Int, tk: BType) {
+ assert((opc == Opcodes.ILOAD) || (opc == Opcodes.ISTORE), opc)
+ jmethod.visitVarInsn(tk.getOpcode(opc), idx)
+ }
+
+ // ---------------- array load and store ----------------
+
+ // can-multi-thread
+ final def emitTypeBased(opcs: Array[Int], tk: BType) {
+ assert(tk != UNIT, tk)
+ val opc = {
+ if (tk.isRefOrArrayType) { opcs(0) }
+ else if (tk.isIntSizedType) {
+ (tk: @unchecked) match {
+ case BOOL | BYTE => opcs(1)
+ case SHORT => opcs(2)
+ case CHAR => opcs(3)
+ case INT => opcs(4)
+ }
+ } else {
+ (tk: @unchecked) match {
+ case LONG => opcs(5)
+ case FLOAT => opcs(6)
+ case DOUBLE => opcs(7)
+ }
+ }
+ }
+ emit(opc)
+ }
+
+ // ---------------- primitive operations ----------------
+
+ // can-multi-thread
+ final def emitPrimitive(opcs: Array[Int], tk: BType) {
+ val opc = {
+ // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
+ (tk.sort: @switch) match {
+ case asm.Type.LONG => opcs(1)
+ case asm.Type.FLOAT => opcs(2)
+ case asm.Type.DOUBLE => opcs(3)
+ case _ => opcs(0)
+ }
+ }
+ emit(opc)
+ }
+
+ // can-multi-thread
+ final def drop(tk: BType) { emit(if (tk.isWideType) Opcodes.POP2 else Opcodes.POP) }
+
+ // can-multi-thread
+ final def dup(tk: BType) { emit(if (tk.isWideType) Opcodes.DUP2 else Opcodes.DUP) }
+
+ // ---------------- type checks and casts ----------------
+
+ // can-multi-thread
+ final def isInstance(tk: BType) {
+ jmethod.visitTypeInsn(Opcodes.INSTANCEOF, tk.getInternalName)
+ }
+
+ // can-multi-thread
+ final def checkCast(tk: BType) {
+ assert(tk.isRefOrArrayType, s"checkcast on primitive type: $tk")
+ // TODO ICode also requires: but that's too much, right? assert(!isBoxedType(tk), "checkcast on boxed type: " + tk)
+ jmethod.visitTypeInsn(Opcodes.CHECKCAST, tk.getInternalName)
+ }
+
+ } // end of class JCodeMethodN
+
+ /* Constant-valued val-members of JCodeMethodN at the companion object, so as to avoid re-initializing them multiple times. */
+ object JCodeMethodN {
+
+ import asm.Opcodes._
+
+ // ---------------- conversions ----------------
+
+ val fromByteT2T = { Array( -1, -1, I2C, -1, I2L, I2F, I2D) } // do nothing for (BYTE -> SHORT) and for (BYTE -> INT)
+ val fromCharT2T = { Array(I2B, I2S, -1, -1, I2L, I2F, I2D) } // for (CHAR -> INT) do nothing
+ val fromShortT2T = { Array(I2B, -1, I2C, -1, I2L, I2F, I2D) } // for (SHORT -> INT) do nothing
+ val fromIntT2T = { Array(I2B, I2S, I2C, -1, I2L, I2F, I2D) }
+
+ // ---------------- array load and store ----------------
+
+ val aloadOpcodes = { Array(AALOAD, BALOAD, SALOAD, CALOAD, IALOAD, LALOAD, FALOAD, DALOAD) }
+ val astoreOpcodes = { Array(AASTORE, BASTORE, SASTORE, CASTORE, IASTORE, LASTORE, FASTORE, DASTORE) }
+ val returnOpcodes = { Array(ARETURN, IRETURN, IRETURN, IRETURN, IRETURN, LRETURN, FRETURN, DRETURN) }
+
+ // ---------------- primitive operations ----------------
+
+ val negOpcodes: Array[Int] = { Array(INEG, LNEG, FNEG, DNEG) }
+ val addOpcodes: Array[Int] = { Array(IADD, LADD, FADD, DADD) }
+ val subOpcodes: Array[Int] = { Array(ISUB, LSUB, FSUB, DSUB) }
+ val mulOpcodes: Array[Int] = { Array(IMUL, LMUL, FMUL, DMUL) }
+ val divOpcodes: Array[Int] = { Array(IDIV, LDIV, FDIV, DDIV) }
+ val remOpcodes: Array[Int] = { Array(IREM, LREM, FREM, DREM) }
+
+ } // end of object JCodeMethodN
+
+ // ---------------- adapted from scalaPrimitives ----------------
+
+ /* Given `code` reports the src TypeKind of the coercion indicated by `code`.
+ * To find the dst TypeKind, `ScalaPrimitives.generatedKind(code)` can be used.
+ *
+ * can-multi-thread
+ */
+ final def coercionFrom(code: Int): BType = {
+ import scalaPrimitives._
+ (code: @switch) match {
+ case B2B | B2C | B2S | B2I | B2L | B2F | B2D => BYTE
+ case S2B | S2S | S2C | S2I | S2L | S2F | S2D => SHORT
+ case C2B | C2S | C2C | C2I | C2L | C2F | C2D => CHAR
+ case I2B | I2S | I2C | I2I | I2L | I2F | I2D => INT
+ case L2B | L2S | L2C | L2I | L2L | L2F | L2D => LONG
+ case F2B | F2S | F2C | F2I | F2L | F2F | F2D => FLOAT
+ case D2B | D2S | D2C | D2I | D2L | D2F | D2D => DOUBLE
+ }
+ }
+
+ /* If code is a coercion primitive, the result type.
+ *
+ * can-multi-thread
+ */
+ final def coercionTo(code: Int): BType = {
+ import scalaPrimitives._
+ (code: @scala.annotation.switch) match {
+ case B2B | C2B | S2B | I2B | L2B | F2B | D2B => BYTE
+ case B2C | C2C | S2C | I2C | L2C | F2C | D2C => CHAR
+ case B2S | C2S | S2S | I2S | L2S | F2S | D2S => SHORT
+ case B2I | C2I | S2I | I2I | L2I | F2I | D2I => INT
+ case B2L | C2L | S2L | I2L | L2L | F2L | D2L => LONG
+ case B2F | C2F | S2F | I2F | L2F | F2F | D2F => FLOAT
+ case B2D | C2D | S2D | I2D | L2D | F2D | D2D => DOUBLE
+ }
+ }
+
+ final val typeOfArrayOp: Map[Int, BType] = {
+ import scalaPrimitives._
+ Map(
+ (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++
+ (List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++
+ (List(SARRAY_LENGTH, SARRAY_GET, SARRAY_SET) map (_ -> SHORT)) ++
+ (List(CARRAY_LENGTH, CARRAY_GET, CARRAY_SET) map (_ -> CHAR)) ++
+ (List(IARRAY_LENGTH, IARRAY_GET, IARRAY_SET) map (_ -> INT)) ++
+ (List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG)) ++
+ (List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT)) ++
+ (List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++
+ (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectReference)) : _*
+ )
+ }
+
+ /*
+ * Collects (in `result`) all LabelDef nodes enclosed (directly or not) by each node it visits.
+ *
+ * In other words, this traverser prepares a map giving
+ * all labelDefs (the entry-value) having a Tree node (the entry-key) as ancestor.
+ * The entry-value for a LabelDef entry-key always contains the entry-key.
+ *
+ */
+ class LabelDefsFinder extends Traverser {
+ val result = mutable.Map.empty[Tree, List[LabelDef]]
+ var acc: List[LabelDef] = Nil
+
+ /*
+ * can-multi-thread
+ */
+ override def traverse(tree: Tree) {
+ val saved = acc
+ acc = Nil
+ super.traverse(tree)
+ // acc contains all LabelDefs found under (but not at) `tree`
+ tree match {
+ case lblDf: LabelDef => acc ::= lblDf
+ case _ => ()
+ }
+ if (acc.isEmpty) {
+ acc = saved
+ } else {
+ result += (tree -> acc)
+ acc = acc ::: saved
+ }
+ }
+ }
+
+ implicit class InsnIterMethodNode(mnode: asm.tree.MethodNode) {
+ @inline final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit) { mnode.instructions.foreachInsn(f) }
+ }
+
+ implicit class InsnIterInsnList(lst: asm.tree.InsnList) {
+
+ @inline final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit) {
+ val insnIter = lst.iterator()
+ while (insnIter.hasNext) {
+ f(insnIter.next())
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
new file mode 100644
index 0000000000..360ce58ecc
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
@@ -0,0 +1,724 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+
+package scala
+package tools.nsc
+package backend
+package jvm
+
+import scala.collection.{ mutable, immutable }
+import scala.tools.nsc.symtab._
+import scala.annotation.switch
+
+import scala.tools.asm
+
+/*
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
+ * @version 1.0
+ *
+ */
+abstract class BCodeSkelBuilder extends BCodeHelpers {
+ import global._
+
+ /*
+ * There's a dedicated PlainClassBuilder for each CompilationUnit,
+ * which simplifies the initialization of per-class data structures in `genPlainClass()` which in turn delegates to `initJClass()`
+ *
+ * The entry-point to emitting bytecode instructions is `genDefDef()` where the per-method data structures are initialized,
+ * including `resetMethodBookkeeping()` and `initJMethod()`.
+ * Once that's been done, and assuming the method being visited isn't abstract, `emitNormalMethodBody()` populates
+ * the ASM MethodNode instance with ASM AbstractInsnNodes.
+ *
+ * Given that CleanUp delivers trees that produce values on the stack,
+ * the entry-point to all-things instruction-emit is `genLoad()`.
+ * There, an operation taking N arguments results in recursively emitting instructions to lead each of them,
+ * followed by emitting instructions to process those arguments (to be found at run-time on the operand-stack).
+ *
+ * In a few cases the above recipe deserves more details, as provided in the documentation for:
+ * - `genLoadTry()`
+ * - `genSynchronized()
+ * - `jumpDest` , `cleanups` , `labelDefsAtOrUnder`
+ */
+ abstract class PlainSkelBuilder(cunit: CompilationUnit)
+ extends BCClassGen
+ with BCAnnotGen
+ with BCInnerClassGen
+ with JAndroidBuilder
+ with BCForwardersGen
+ with BCPickles
+ with BCJGenSigGen {
+
+ // Strangely I can't find this in the asm code 255, but reserving 1 for "this"
+ final val MaximumJvmParameters = 254
+
+ // current class
+ var cnode: asm.tree.ClassNode = null
+ var thisName: String = null // the internal name of the class being emitted
+
+ var claszSymbol: Symbol = null
+ var isCZParcelable = false
+ var isCZStaticModule = false
+ var isCZRemote = false
+
+ /* ---------------- idiomatic way to ask questions to typer ---------------- */
+
+ def paramTKs(app: Apply): List[BType] = {
+ val Apply(fun, _) = app
+ val funSym = fun.symbol
+ (funSym.info.paramTypes map toTypeKind) // this tracks mentioned inner classes (in innerClassBufferASM)
+ }
+
+ def symInfoTK(sym: Symbol): BType = {
+ toTypeKind(sym.info) // this tracks mentioned inner classes (in innerClassBufferASM)
+ }
+
+ def tpeTK(tree: Tree): BType = { toTypeKind(tree.tpe) }
+
+ def log(msg: => AnyRef) {
+ global synchronized { global.log(msg) }
+ }
+
+ override def getCurrentCUnit(): CompilationUnit = { cunit }
+
+ /* ---------------- helper utils for generating classes and fiels ---------------- */
+
+ def genPlainClass(cd: ClassDef) {
+ assert(cnode == null, "GenBCode detected nested methods.")
+ innerClassBufferASM.clear()
+
+ claszSymbol = cd.symbol
+ isCZParcelable = isAndroidParcelableClass(claszSymbol)
+ isCZStaticModule = isStaticModule(claszSymbol)
+ isCZRemote = isRemote(claszSymbol)
+ thisName = internalName(claszSymbol)
+
+ cnode = new asm.tree.ClassNode()
+
+ initJClass(cnode)
+
+ val hasStaticCtor = methodSymbols(cd) exists (_.isStaticConstructor)
+ if (!hasStaticCtor) {
+ // but needs one ...
+ if (isCZStaticModule || isCZParcelable) {
+ fabricateStaticInit()
+ }
+ }
+
+ val optSerial: Option[Long] = serialVUID(claszSymbol)
+ if (optSerial.isDefined) { addSerialVUID(optSerial.get, cnode)}
+
+ addClassFields()
+
+ innerClassBufferASM ++= trackMemberClasses(claszSymbol, Nil)
+
+ gen(cd.impl)
+
+ assert(cd.symbol == claszSymbol, "Someone messed up BCodePhase.claszSymbol during genPlainClass().")
+
+ } // end of method genPlainClass()
+
+ /*
+ * must-single-thread
+ */
+ private def initJClass(jclass: asm.ClassVisitor) {
+
+ val ps = claszSymbol.info.parents
+ val superClass: String = if (ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else internalName(ps.head.typeSymbol);
+ val ifaces: Array[String] = {
+ val arrIfacesTr: Array[Tracked] = exemplar(claszSymbol).ifaces
+ val arrIfaces = new Array[String](arrIfacesTr.length)
+ var i = 0
+ while (i < arrIfacesTr.length) {
+ val ifaceTr = arrIfacesTr(i)
+ val bt = ifaceTr.c
+ if (ifaceTr.isInnerClass) { innerClassBufferASM += bt }
+ arrIfaces(i) = bt.getInternalName
+ i += 1
+ }
+ arrIfaces
+ }
+ // `internalName()` tracks inner classes.
+
+ val flags = mkFlags(
+ javaFlags(claszSymbol),
+ if (isDeprecated(claszSymbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ val thisSignature = getGenericSignature(claszSymbol, claszSymbol.owner)
+ cnode.visit(classfileVersion, flags,
+ thisName, thisSignature,
+ superClass, ifaces)
+
+ if (emitSource) {
+ cnode.visitSource(cunit.source.toString, null /* SourceDebugExtension */)
+ }
+
+ val enclM = getEnclosingMethodAttribute(claszSymbol)
+ if (enclM != null) {
+ val EnclMethodEntry(className, methodName, methodType) = enclM
+ cnode.visitOuterClass(className, methodName, methodType.getDescriptor)
+ }
+
+ val ssa = getAnnotPickle(thisName, claszSymbol)
+ cnode.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
+ emitAnnotations(cnode, claszSymbol.annotations ++ ssa)
+
+ if (isCZStaticModule || isCZParcelable) {
+
+ if (isCZStaticModule) { addModuleInstanceField() }
+
+ } else {
+
+ val skipStaticForwarders = (claszSymbol.isInterface || settings.noForwarders)
+ if (!skipStaticForwarders) {
+ val lmoc = claszSymbol.companionModule
+ // add static forwarders if there are no name conflicts; see bugs #363 and #1735
+ if (lmoc != NoSymbol) {
+ // it must be a top level class (name contains no $s)
+ val isCandidateForForwarders = {
+ exitingPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass }
+ }
+ if (isCandidateForForwarders) {
+ log(s"Adding static forwarders from '$claszSymbol' to implementations in '$lmoc'")
+ addForwarders(isRemote(claszSymbol), cnode, thisName, lmoc.moduleClass)
+ }
+ }
+ }
+
+ }
+
+ // the invoker is responsible for adding a class-static constructor.
+
+ } // end of method initJClass
+
+ /*
+ * can-multi-thread
+ */
+ private def addModuleInstanceField() {
+ val fv =
+ cnode.visitField(PublicStaticFinal, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
+ strMODULE_INSTANCE_FIELD,
+ "L" + thisName + ";",
+ null, // no java-generic-signature
+ null // no initial value
+ )
+
+ fv.visitEnd()
+ }
+
+ /*
+ * must-single-thread
+ */
+ private def fabricateStaticInit() {
+
+ val clinit: asm.MethodVisitor = cnode.visitMethod(
+ PublicStatic, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
+ CLASS_CONSTRUCTOR_NAME,
+ "()V",
+ null, // no java-generic-signature
+ null // no throwable exceptions
+ )
+ clinit.visitCode()
+
+ /* "legacy static initialization" */
+ if (isCZStaticModule) {
+ clinit.visitTypeInsn(asm.Opcodes.NEW, thisName)
+ clinit.visitMethodInsn(asm.Opcodes.INVOKESPECIAL,
+ thisName, INSTANCE_CONSTRUCTOR_NAME, "()V")
+ }
+ if (isCZParcelable) { legacyAddCreatorCode(clinit, cnode, thisName) }
+ clinit.visitInsn(asm.Opcodes.RETURN)
+
+ clinit.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ clinit.visitEnd()
+ }
+
+ def addClassFields() {
+ /* Non-method term members are fields, except for module members. Module
+ * members can only happen on .NET (no flatten) for inner traits. There,
+ * a module symbol is generated (transformInfo in mixin) which is used
+ * as owner for the members of the implementation class (so that the
+ * backend emits them as static).
+ * No code is needed for this module symbol.
+ */
+ for (f <- fieldSymbols(claszSymbol)) {
+ val javagensig = getGenericSignature(f, claszSymbol)
+ val flags = mkFlags(
+ javaFieldFlags(f),
+ if (isDeprecated(f)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ val jfield = new asm.tree.FieldNode(
+ flags,
+ f.javaSimpleName.toString,
+ symInfoTK(f).getDescriptor,
+ javagensig,
+ null // no initial value
+ )
+ cnode.fields.add(jfield)
+ emitAnnotations(jfield, f.annotations)
+ }
+
+ } // end of method addClassFields()
+
+ // current method
+ var mnode: asm.tree.MethodNode = null
+ var jMethodName: String = null
+ var isMethSymStaticCtor = false
+ var returnType: BType = null
+ var methSymbol: Symbol = null
+ // in GenASM this is local to genCode(), ie should get false whenever a new method is emitted (including fabricated ones eg addStaticInit())
+ var isModuleInitialized = false
+ // used by genLoadTry() and genSynchronized()
+ var earlyReturnVar: Symbol = null
+ var shouldEmitCleanup = false
+ var insideCleanupBlock = false
+ // line numbers
+ var lastEmittedLineNr = -1
+
+ object bc extends JCodeMethodN {
+ override def jmethod = PlainSkelBuilder.this.mnode
+ }
+
+ /* ---------------- Part 1 of program points, ie Labels in the ASM world ---------------- */
+
+ /*
+ * A jump is represented as an Apply node whose symbol denotes a LabelDef, the target of the jump.
+ * The `jumpDest` map is used to:
+ * (a) find the asm.Label for the target, given an Apply node's symbol;
+ * (b) anchor an asm.Label in the instruction stream, given a LabelDef node.
+ * In other words, (a) is necessary when visiting a jump-source, and (b) when visiting a jump-target.
+ * A related map is `labelDef`: it has the same keys as `jumpDest` but its values are LabelDef nodes not asm.Labels.
+ *
+ */
+ var jumpDest: immutable.Map[ /* LabelDef */ Symbol, asm.Label ] = null
+ def programPoint(labelSym: Symbol): asm.Label = {
+ assert(labelSym.isLabel, s"trying to map a non-label symbol to an asm.Label, at: ${labelSym.pos}")
+ jumpDest.getOrElse(labelSym, {
+ val pp = new asm.Label
+ jumpDest += (labelSym -> pp)
+ pp
+ })
+ }
+
+ /*
+ * A program point may be lexically nested (at some depth)
+ * (a) in the try-clause of a try-with-finally expression
+ * (b) in a synchronized block.
+ * Each of the constructs above establishes a "cleanup block" to execute upon
+ * both normal-exit, early-return, and abrupt-termination of the instructions it encloses.
+ *
+ * The `cleanups` LIFO queue represents the nesting of active (for the current program point)
+ * pending cleanups. For each such cleanup an asm.Label indicates the start of its cleanup-block.
+ * At any given time during traversal of the method body,
+ * the head of `cleanups` denotes the cleanup-block for the closest enclosing try-with-finally or synchronized-expression.
+ *
+ * `cleanups` is used:
+ *
+ * (1) upon visiting a Return statement.
+ * In case of pending cleanups, we can't just emit a RETURN instruction, but must instead:
+ * - store the result (if any) in `earlyReturnVar`, and
+ * - jump to the next pending cleanup.
+ * See `genReturn()`
+ *
+ * (2) upon emitting a try-with-finally or a synchronized-expr,
+ * In these cases, the targets of the above jumps are emitted,
+ * provided an early exit was actually encountered somewhere in the protected clauses.
+ * See `genLoadTry()` and `genSynchronized()`
+ *
+ * The code thus emitted for jumps and targets covers the early-return case.
+ * The case of abrupt (ie exceptional) termination is covered by exception handlers
+ * emitted for that purpose as described in `genLoadTry()` and `genSynchronized()`.
+ */
+ var cleanups: List[asm.Label] = Nil
+ def registerCleanup(finCleanup: asm.Label) {
+ if (finCleanup != null) { cleanups = finCleanup :: cleanups }
+ }
+ def unregisterCleanup(finCleanup: asm.Label) {
+ if (finCleanup != null) {
+ assert(cleanups.head eq finCleanup,
+ s"Bad nesting of cleanup operations: $cleanups trying to unregister: $finCleanup")
+ cleanups = cleanups.tail
+ }
+ }
+
+ /* ---------------- local variables and params ---------------- */
+
+ case class Local(tk: BType, name: String, idx: Int, isSynth: Boolean)
+
+ /*
+ * Bookkeeping for method-local vars and method-params.
+ */
+ object locals {
+
+ private val slots = mutable.Map.empty[Symbol, Local] // (local-or-param-sym -> Local(BType, name, idx, isSynth))
+
+ private var nxtIdx = -1 // next available index for local-var
+
+ def reset(isStaticMethod: Boolean) {
+ slots.clear()
+ nxtIdx = if (isStaticMethod) 0 else 1
+ }
+
+ def contains(locSym: Symbol): Boolean = { slots.contains(locSym) }
+
+ def apply(locSym: Symbol): Local = { slots.apply(locSym) }
+
+ /* Make a fresh local variable, ensuring a unique name.
+ * The invoker must make sure inner classes are tracked for the sym's tpe.
+ */
+ def makeLocal(tk: BType, name: String): Symbol = {
+ val locSym = methSymbol.newVariable(cunit.freshTermName(name), NoPosition, Flags.SYNTHETIC) // setInfo tpe
+ makeLocal(locSym, tk)
+ locSym
+ }
+
+ def makeLocal(locSym: Symbol): Local = {
+ makeLocal(locSym, symInfoTK(locSym))
+ }
+
+ def getOrMakeLocal(locSym: Symbol): Local = {
+ // `getOrElse` below has the same effect as `getOrElseUpdate` because `makeLocal()` adds an entry to the `locals` map.
+ slots.getOrElse(locSym, makeLocal(locSym))
+ }
+
+ private def makeLocal(sym: Symbol, tk: BType): Local = {
+ assert(!slots.contains(sym), "attempt to create duplicate local var.")
+ assert(nxtIdx != -1, "not a valid start index")
+ val loc = Local(tk, sym.javaSimpleName.toString, nxtIdx, sym.isSynthetic)
+ slots += (sym -> loc)
+ assert(tk.getSize > 0, "makeLocal called for a symbol whose type is Unit.")
+ nxtIdx += tk.getSize
+ loc
+ }
+
+ // not to be confused with `fieldStore` and `fieldLoad` which also take a symbol but a field-symbol.
+ def store(locSym: Symbol) {
+ val Local(tk, _, idx, _) = slots(locSym)
+ bc.store(idx, tk)
+ }
+
+ def load(locSym: Symbol) {
+ val Local(tk, _, idx, _) = slots(locSym)
+ bc.load(idx, tk)
+ }
+
+ }
+
+ /* ---------------- Part 2 of program points, ie Labels in the ASM world ---------------- */
+
+ /*
+ * The semantics of try-with-finally and synchronized-expr require their cleanup code
+ * to be present in three forms in the emitted bytecode:
+ * (a) as normal-exit code, reached via fall-through from the last program point being protected,
+ * (b) as code reached upon early-return from an enclosed return statement.
+ * The only difference between (a) and (b) is their next program-point:
+ * the former must continue with fall-through while
+ * the latter must continue to the next early-return cleanup (if any, otherwise return from the method).
+ * Otherwise they are identical.
+ * (c) as exception-handler, reached via exceptional control flow,
+ * which rethrows the caught exception once it's done with the cleanup code.
+ *
+ * A particular cleanup may in general contain LabelDefs. Care is needed when duplicating such jump-targets,
+ * so as to preserve agreement wit the (also duplicated) jump-sources.
+ * This is achieved based on the bookkeeping provided by two maps:
+ * - `labelDefsAtOrUnder` lists all LabelDefs enclosed by a given Tree node (the key)
+ * - `labelDef` provides the LabelDef node whose symbol is used as key.
+ * As a sidenote, a related map is `jumpDest`: it has the same keys as `labelDef` but its values are asm.Labels not LabelDef nodes.
+ *
+ * Details in `emitFinalizer()`, which is invoked from `genLoadTry()` and `genSynchronized()`.
+ */
+ var labelDefsAtOrUnder: scala.collection.Map[Tree, List[LabelDef]] = null
+ var labelDef: scala.collection.Map[Symbol, LabelDef] = null// (LabelDef-sym -> LabelDef)
+
+ // bookkeeping the scopes of non-synthetic local vars, to emit debug info (`emitVars`).
+ var varsInScope: List[Tuple2[Symbol, asm.Label]] = null // (local-var-sym -> start-of-scope)
+
+ // helpers around program-points.
+ def lastInsn: asm.tree.AbstractInsnNode = {
+ mnode.instructions.getLast
+ }
+ def currProgramPoint(): asm.Label = {
+ lastInsn match {
+ case labnode: asm.tree.LabelNode => labnode.getLabel
+ case _ =>
+ val pp = new asm.Label
+ mnode visitLabel pp
+ pp
+ }
+ }
+ def markProgramPoint(lbl: asm.Label) {
+ val skip = (lbl == null) || isAtProgramPoint(lbl)
+ if (!skip) { mnode visitLabel lbl }
+ }
+ def isAtProgramPoint(lbl: asm.Label): Boolean = {
+ (lastInsn match { case labnode: asm.tree.LabelNode => (labnode.getLabel == lbl); case _ => false } )
+ }
+ def lineNumber(tree: Tree) {
+ if (!emitLines || !tree.pos.isDefined) return;
+ val nr = tree.pos.finalPosition.line
+ if (nr != lastEmittedLineNr) {
+ lastEmittedLineNr = nr
+ lastInsn match {
+ case lnn: asm.tree.LineNumberNode =>
+ // overwrite previous landmark as no instructions have been emitted for it
+ lnn.line = nr
+ case _ =>
+ mnode.visitLineNumber(nr, currProgramPoint())
+ }
+ }
+ }
+
+ // on entering a method
+ def resetMethodBookkeeping(dd: DefDef) {
+ locals.reset(isStaticMethod = methSymbol.isStaticMember)
+ jumpDest = immutable.Map.empty[ /* LabelDef */ Symbol, asm.Label ]
+ // populate labelDefsAtOrUnder
+ val ldf = new LabelDefsFinder
+ ldf.traverse(dd.rhs)
+ labelDefsAtOrUnder = ldf.result.withDefaultValue(Nil)
+ labelDef = labelDefsAtOrUnder(dd.rhs).map(ld => (ld.symbol -> ld)).toMap
+ // check previous invocation of genDefDef exited as many varsInScope as it entered.
+ assert(varsInScope == null, "Unbalanced entering/exiting of GenBCode's genBlock().")
+ // check previous invocation of genDefDef unregistered as many cleanups as it registered.
+ assert(cleanups == Nil, "Previous invocation of genDefDef didn't unregister as many cleanups as it registered.")
+ isModuleInitialized = false
+ earlyReturnVar = null
+ shouldEmitCleanup = false
+
+ lastEmittedLineNr = -1
+ }
+
+ /* ---------------- top-down traversal invoking ASM Tree API along the way ---------------- */
+
+ def gen(tree: Tree) {
+ tree match {
+ case EmptyTree => ()
+
+ case _: ModuleDef => abort(s"Modules should have been eliminated by refchecks: $tree")
+
+ case ValDef(mods, name, tpt, rhs) => () // fields are added in `genPlainClass()`, via `addClassFields()`
+
+ case dd : DefDef => genDefDef(dd)
+
+ case Template(_, _, body) => body foreach gen
+
+ case _ => abort(s"Illegal tree in gen: $tree")
+ }
+ }
+
+ /*
+ * must-single-thread
+ */
+ def initJMethod(flags: Int, paramAnnotations: List[List[AnnotationInfo]]) {
+
+ val jgensig = getGenericSignature(methSymbol, claszSymbol)
+ addRemoteExceptionAnnot(isCZRemote, hasPublicBitSet(flags), methSymbol)
+ val (excs, others) = methSymbol.annotations partition (_.symbol == definitions.ThrowsClass)
+ val thrownExceptions: List[String] = getExceptions(excs)
+
+ val bytecodeName =
+ if (isMethSymStaticCtor) CLASS_CONSTRUCTOR_NAME
+ else jMethodName
+
+ val mdesc = asmMethodType(methSymbol).getDescriptor
+ mnode = cnode.visitMethod(
+ flags,
+ bytecodeName,
+ mdesc,
+ jgensig,
+ mkArray(thrownExceptions)
+ ).asInstanceOf[asm.tree.MethodNode]
+
+ // TODO param names: (m.params map (p => javaName(p.sym)))
+
+ emitAnnotations(mnode, others)
+ emitParamAnnotations(mnode, paramAnnotations)
+
+ } // end of method initJMethod
+
+
+ def genDefDef(dd: DefDef) {
+ // the only method whose implementation is not emitted: getClass()
+ if (definitions.isGetClass(dd.symbol)) { return }
+ assert(mnode == null, "GenBCode detected nested method.")
+
+ methSymbol = dd.symbol
+ jMethodName = methSymbol.javaSimpleName.toString
+ returnType = asmMethodType(dd.symbol).getReturnType
+ isMethSymStaticCtor = methSymbol.isStaticConstructor
+
+ resetMethodBookkeeping(dd)
+
+ // add method-local vars for params
+ val DefDef(_, _, _, vparamss, _, rhs) = dd
+ assert(vparamss.isEmpty || vparamss.tail.isEmpty, s"Malformed parameter list: $vparamss")
+ val params = if (vparamss.isEmpty) Nil else vparamss.head
+ for (p <- params) { locals.makeLocal(p.symbol) }
+ // debug assert((params.map(p => locals(p.symbol).tk)) == asmMethodType(methSymbol).getArgumentTypes.toList, "debug")
+
+ if (params.size > MaximumJvmParameters) {
+ // SI-7324
+ cunit.error(methSymbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.")
+ return
+ }
+
+ val isNative = methSymbol.hasAnnotation(definitions.NativeAttr)
+ val isAbstractMethod = (methSymbol.isDeferred || methSymbol.owner.isInterface)
+ val flags = mkFlags(
+ javaFlags(methSymbol),
+ if (claszSymbol.isInterface) asm.Opcodes.ACC_ABSTRACT else 0,
+ if (methSymbol.isStrictFP) asm.Opcodes.ACC_STRICT else 0,
+ if (isNative) asm.Opcodes.ACC_NATIVE else 0, // native methods of objects are generated in mirror classes
+ if (isDeprecated(methSymbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ // TODO needed? for(ann <- m.symbol.annotations) { ann.symbol.initialize }
+ initJMethod(flags, params.map(p => p.symbol.annotations))
+
+ /* Add method-local vars for LabelDef-params.
+ *
+ * This makes sure that:
+ * (1) upon visiting any "forward-jumping" Apply (ie visited before its target LabelDef), and after
+ * (2) grabbing the corresponding param symbols,
+ * those param-symbols can be used to access method-local vars.
+ *
+ * When duplicating a finally-contained LabelDef, another program-point is needed for the copy (each such copy has its own asm.Label),
+ * but the same vars (given by the LabelDef's params) can be reused,
+ * because no LabelDef ends up nested within itself after such duplication.
+ */
+ for(ld <- labelDefsAtOrUnder(dd.rhs); ldp <- ld.params; if !locals.contains(ldp.symbol)) {
+ // the tail-calls xform results in symbols shared btw method-params and labelDef-params, thus the guard above.
+ locals.makeLocal(ldp.symbol)
+ }
+
+ if (!isAbstractMethod && !isNative) {
+
+ def emitNormalMethodBody() {
+ val veryFirstProgramPoint = currProgramPoint()
+ genLoad(rhs, returnType)
+
+ rhs match {
+ case Block(_, Return(_)) => ()
+ case Return(_) => ()
+ case EmptyTree =>
+ globalError("Concrete method has no definition: " + dd + (
+ if (settings.debug) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")"
+ else "")
+ )
+ case _ =>
+ bc emitRETURN returnType
+ }
+ if (emitVars) {
+ // add entries to LocalVariableTable JVM attribute
+ val onePastLastProgramPoint = currProgramPoint()
+ val hasStaticBitSet = ((flags & asm.Opcodes.ACC_STATIC) != 0)
+ if (!hasStaticBitSet) {
+ mnode.visitLocalVariable(
+ "this",
+ "L" + thisName + ";",
+ null,
+ veryFirstProgramPoint,
+ onePastLastProgramPoint,
+ 0
+ )
+ }
+ for (p <- params) { emitLocalVarScope(p.symbol, veryFirstProgramPoint, onePastLastProgramPoint, force = true) }
+ }
+
+ if (isMethSymStaticCtor) { appendToStaticCtor(dd) }
+ } // end of emitNormalMethodBody()
+
+ lineNumber(rhs)
+ emitNormalMethodBody()
+
+ // Note we don't invoke visitMax, thus there are no FrameNode among mnode.instructions.
+ // The only non-instruction nodes to be found are LabelNode and LineNumberNode.
+ }
+ mnode = null
+ } // end of method genDefDef()
+
+ /*
+ * must-single-thread
+ *
+ * TODO document, explain interplay with `fabricateStaticInit()`
+ */
+ private def appendToStaticCtor(dd: DefDef) {
+
+ def insertBefore(
+ location: asm.tree.AbstractInsnNode,
+ i0: asm.tree.AbstractInsnNode,
+ i1: asm.tree.AbstractInsnNode) {
+ if (i0 != null) {
+ mnode.instructions.insertBefore(location, i0.clone(null))
+ mnode.instructions.insertBefore(location, i1.clone(null))
+ }
+ }
+
+ // collect all return instructions
+ var rets: List[asm.tree.AbstractInsnNode] = Nil
+ mnode foreachInsn { i => if (i.getOpcode() == asm.Opcodes.RETURN) { rets ::= i } }
+ if (rets.isEmpty) { return }
+
+ var insnModA: asm.tree.AbstractInsnNode = null
+ var insnModB: asm.tree.AbstractInsnNode = null
+ // call object's private ctor from static ctor
+ if (isCZStaticModule) {
+ // NEW `moduleName`
+ val className = internalName(methSymbol.enclClass)
+ insnModA = new asm.tree.TypeInsnNode(asm.Opcodes.NEW, className)
+ // INVOKESPECIAL <init>
+ val callee = methSymbol.enclClass.primaryConstructor
+ val jname = callee.javaSimpleName.toString
+ val jowner = internalName(callee.owner)
+ val jtype = asmMethodType(callee).getDescriptor
+ insnModB = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESPECIAL, jowner, jname, jtype)
+ }
+
+ var insnParcA: asm.tree.AbstractInsnNode = null
+ var insnParcB: asm.tree.AbstractInsnNode = null
+ // android creator code
+ if (isCZParcelable) {
+ // add a static field ("CREATOR") to this class to cache android.os.Parcelable$Creator
+ val andrFieldDescr = asmClassType(AndroidCreatorClass).getDescriptor
+ cnode.visitField(
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL,
+ "CREATOR",
+ andrFieldDescr,
+ null,
+ null
+ )
+ // INVOKESTATIC CREATOR(): android.os.Parcelable$Creator; -- TODO where does this Android method come from?
+ val callee = definitions.getMember(claszSymbol.companionModule, androidFieldName)
+ val jowner = internalName(callee.owner)
+ val jname = callee.javaSimpleName.toString
+ val jtype = asmMethodType(callee).getDescriptor
+ insnParcA = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESTATIC, jowner, jname, jtype)
+ // PUTSTATIC `thisName`.CREATOR;
+ insnParcB = new asm.tree.FieldInsnNode(asm.Opcodes.PUTSTATIC, thisName, "CREATOR", andrFieldDescr)
+ }
+
+ // insert a few instructions for initialization before each return instruction
+ for(r <- rets) {
+ insertBefore(r, insnModA, insnModB)
+ insertBefore(r, insnParcA, insnParcB)
+ }
+
+ }
+
+ def emitLocalVarScope(sym: Symbol, start: asm.Label, end: asm.Label, force: Boolean = false) {
+ val Local(tk, name, idx, isSynth) = locals(sym)
+ if (force || !isSynth) {
+ mnode.visitLocalVariable(name, tk.getDescriptor, null, start, end, idx)
+ }
+ }
+
+ def genLoad(tree: Tree, expectedType: BType)
+
+ } // end of class PlainSkelBuilder
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
new file mode 100644
index 0000000000..9ddb7a3ce8
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
@@ -0,0 +1,395 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+
+package scala
+package tools.nsc
+package backend
+package jvm
+
+import scala.collection.{ mutable, immutable }
+import scala.annotation.switch
+
+import scala.tools.asm
+
+/*
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
+ * @version 1.0
+ *
+ */
+abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
+ import global._
+
+
+ /*
+ * Functionality to lower `synchronized` and `try` expressions.
+ */
+ abstract class SyncAndTryBuilder(cunit: CompilationUnit) extends PlainBodyBuilder(cunit) {
+
+ def genSynchronized(tree: Apply, expectedType: BType): BType = {
+ val Apply(fun, args) = tree
+ val monitor = locals.makeLocal(ObjectReference, "monitor")
+ val monCleanup = new asm.Label
+
+ // if the synchronized block returns a result, store it in a local variable.
+ // Just leaving it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks).
+ val hasResult = (expectedType != UNIT)
+ val monitorResult: Symbol = if (hasResult) locals.makeLocal(tpeTK(args.head), "monitorResult") else null;
+
+ /* ------ (1) pushing and entering the monitor, also keeping a reference to it in a local var. ------ */
+ genLoadQualifier(fun)
+ bc dup ObjectReference
+ locals.store(monitor)
+ emit(asm.Opcodes.MONITORENTER)
+
+ /* ------ (2) Synchronized block.
+ * Reached by fall-through from (1).
+ * Protected by:
+ * (2.a) the EH-version of the monitor-exit, and
+ * (2.b) whatever protects the whole synchronized expression.
+ * ------
+ */
+ val startProtected = currProgramPoint()
+ registerCleanup(monCleanup)
+ genLoad(args.head, expectedType /* toTypeKind(tree.tpe.resultType) */)
+ unregisterCleanup(monCleanup)
+ if (hasResult) { locals.store(monitorResult) }
+ nopIfNeeded(startProtected)
+ val endProtected = currProgramPoint()
+
+ /* ------ (3) monitor-exit after normal, non-early-return, termination of (2).
+ * Reached by fall-through from (2).
+ * Protected by whatever protects the whole synchronized expression.
+ * ------
+ */
+ locals.load(monitor)
+ emit(asm.Opcodes.MONITOREXIT)
+ if (hasResult) { locals.load(monitorResult) }
+ val postHandler = new asm.Label
+ bc goTo postHandler
+
+ /* ------ (4) exception-handler version of monitor-exit code.
+ * Reached upon abrupt termination of (2).
+ * Protected by whatever protects the whole synchronized expression.
+ * ------
+ */
+ protect(startProtected, endProtected, currProgramPoint(), ThrowableReference)
+ locals.load(monitor)
+ emit(asm.Opcodes.MONITOREXIT)
+ emit(asm.Opcodes.ATHROW)
+
+ /* ------ (5) cleanup version of monitor-exit code.
+ * Reached upon early-return from (2).
+ * Protected by whatever protects the whole synchronized expression.
+ * ------
+ */
+ if (shouldEmitCleanup) {
+ markProgramPoint(monCleanup)
+ locals.load(monitor)
+ emit(asm.Opcodes.MONITOREXIT)
+ pendingCleanups()
+ }
+
+ /* ------ (6) normal exit of the synchronized expression.
+ * Reached after normal, non-early-return, termination of (3).
+ * Protected by whatever protects the whole synchronized expression.
+ * ------
+ */
+ mnode visitLabel postHandler
+
+ lineNumber(tree)
+
+ expectedType
+ }
+
+ /*
+ * Detects whether no instructions have been emitted since label `lbl` and if so emits a NOP.
+ * Useful to avoid emitting an empty try-block being protected by exception handlers,
+ * which results in "java.lang.ClassFormatError: Illegal exception table range". See SI-6102.
+ */
+ def nopIfNeeded(lbl: asm.Label) {
+ val noInstructionEmitted = isAtProgramPoint(lbl)
+ if (noInstructionEmitted) { emit(asm.Opcodes.NOP) }
+ }
+
+ /*
+ * Emitting try-catch is easy, emitting try-catch-finally not quite so.
+ * A finally-block (which always has type Unit, thus leaving the operand stack unchanged)
+ * affects control-transfer from protected regions, as follows:
+ *
+ * (a) `return` statement:
+ *
+ * First, the value to return (if any) is evaluated.
+ * Afterwards, all enclosing finally-blocks are run, from innermost to outermost.
+ * Only then is the return value (if any) returned.
+ *
+ * Some terminology:
+ * (a.1) Executing a return statement that is protected
+ * by one or more finally-blocks is called "early return"
+ * (a.2) the chain of code sections (a code section for each enclosing finally-block)
+ * to run upon early returns is called "cleanup chain"
+ *
+ * As an additional spin, consider a return statement in a finally-block.
+ * In this case, the value to return depends on how control arrived at that statement:
+ * in case it arrived via a previous return, the previous return enjoys priority:
+ * the value to return is given by that statement.
+ *
+ * (b) A finally-block protects both the try-clause and the catch-clauses.
+ *
+ * Sidenote:
+ * A try-clause may contain an empty block. On CLR, a finally-block has special semantics
+ * regarding Abort interruptions; but on the JVM it's safe to elide an exception-handler
+ * that protects an "empty" range ("empty" as in "containing NOPs only",
+ * see `asm.optimiz.DanglingExcHandlers` and SI-6720).
+ *
+ * This means a finally-block indicates instructions that can be reached:
+ * (b.1) Upon normal (non-early-returning) completion of the try-clause or a catch-clause
+ * In this case, the next-program-point is that following the try-catch-finally expression.
+ * (b.2) Upon early-return initiated in the try-clause or a catch-clause
+ * In this case, the next-program-point is the enclosing cleanup section (if any), otherwise return.
+ * (b.3) Upon abrupt termination (due to unhandled exception) of the try-clause or a catch-clause
+ * In this case, the unhandled exception must be re-thrown after running the finally-block.
+ *
+ * (c) finally-blocks are implicit to `synchronized` (a finally-block is added to just release the lock)
+ * that's why `genSynchronized()` too emits cleanup-sections.
+ *
+ * A number of code patterns can be emitted to realize the intended semantics.
+ *
+ * A popular alternative (GenICode, javac) consists in duplicating the cleanup-chain at each early-return position.
+ * The principle at work being that once control is transferred to a cleanup-section,
+ * control will always stay within the cleanup-chain.
+ * That is, barring an exception being thrown in a cleanup-section, in which case the enclosing try-block
+ * (reached via abrupt termination) takes over.
+ *
+ * The observations above hint at another code layout, less verbose, for the cleanup-chain.
+ *
+ * The code layout that GenBCode emits takes into account that once a cleanup section has been reached,
+ * jumping to the next cleanup-section (and so on, until the outermost one) realizes the correct semantics.
+ *
+ * There is still code duplication in that two cleanup-chains are needed (but this is unavoidable, anyway):
+ * one for normal control flow and another chain consisting of exception handlers.
+ * The in-line comments below refer to them as
+ * - "early-return-cleanups" and
+ * - "exception-handler-version-of-finally-block" respectively.
+ *
+ */
+ def genLoadTry(tree: Try): BType = {
+
+ val Try(block, catches, finalizer) = tree
+ val kind = tpeTK(tree)
+
+ val caseHandlers: List[EHClause] =
+ for (CaseDef(pat, _, caseBody) <- catches) yield {
+ pat match {
+ case Typed(Ident(nme.WILDCARD), tpt) => NamelessEH(tpeTK(tpt), caseBody)
+ case Ident(nme.WILDCARD) => NamelessEH(ThrowableReference, caseBody)
+ case Bind(_, _) => BoundEH (pat.symbol, caseBody)
+ }
+ }
+
+ // ------ (0) locals used later ------
+
+ /*
+ * `postHandlers` is a program point denoting:
+ * (a) the finally-clause conceptually reached via fall-through from try-catch-finally
+ * (in case a finally-block is present); or
+ * (b) the program point right after the try-catch
+ * (in case there's no finally-block).
+ * The name choice emphasizes that the code section lies "after all exception handlers",
+ * where "all exception handlers" includes those derived from catch-clauses as well as from finally-blocks.
+ */
+ val postHandlers = new asm.Label
+
+ val hasFinally = (finalizer != EmptyTree)
+
+ /*
+ * used in the finally-clause reached via fall-through from try-catch, if any.
+ */
+ val guardResult = hasFinally && (kind != UNIT) && mayCleanStack(finalizer)
+
+ /*
+ * please notice `tmp` has type tree.tpe, while `earlyReturnVar` has the method return type.
+ * Because those two types can be different, dedicated vars are needed.
+ */
+ val tmp = if (guardResult) locals.makeLocal(tpeTK(tree), "tmp") else null;
+
+ /*
+ * upon early return from the try-body or one of its EHs (but not the EH-version of the finally-clause)
+ * AND hasFinally, a cleanup is needed.
+ */
+ val finCleanup = if (hasFinally) new asm.Label else null
+
+ /* ------ (1) try-block, protected by:
+ * (1.a) the EHs due to case-clauses, emitted in (2),
+ * (1.b) the EH due to finally-clause, emitted in (3.A)
+ * (1.c) whatever protects the whole try-catch-finally expression.
+ * ------
+ */
+
+ val startTryBody = currProgramPoint()
+ registerCleanup(finCleanup)
+ genLoad(block, kind)
+ unregisterCleanup(finCleanup)
+ nopIfNeeded(startTryBody)
+ val endTryBody = currProgramPoint()
+ bc goTo postHandlers
+
+ /* ------ (2) One EH for each case-clause (this does not include the EH-version of the finally-clause)
+ * An EH in (2) is reached upon abrupt termination of (1).
+ * An EH in (2) is protected by:
+ * (2.a) the EH-version of the finally-clause, if any.
+ * (2.b) whatever protects the whole try-catch-finally expression.
+ * ------
+ */
+
+ for (ch <- caseHandlers) {
+
+ // (2.a) emit case clause proper
+ val startHandler = currProgramPoint()
+ var endHandler: asm.Label = null
+ var excType: BType = null
+ registerCleanup(finCleanup)
+ ch match {
+ case NamelessEH(typeToDrop, caseBody) =>
+ bc drop typeToDrop
+ genLoad(caseBody, kind) // adapts caseBody to `kind`, thus it can be stored, if `guardResult`, in `tmp`.
+ nopIfNeeded(startHandler)
+ endHandler = currProgramPoint()
+ excType = typeToDrop
+
+ case BoundEH (patSymbol, caseBody) =>
+ // test/files/run/contrib674.scala , a local-var already exists for patSymbol.
+ // rather than creating on first-access, we do it right away to emit debug-info for the created local var.
+ val Local(patTK, _, patIdx, _) = locals.getOrMakeLocal(patSymbol)
+ bc.store(patIdx, patTK)
+ genLoad(caseBody, kind)
+ nopIfNeeded(startHandler)
+ endHandler = currProgramPoint()
+ emitLocalVarScope(patSymbol, startHandler, endHandler)
+ excType = patTK
+ }
+ unregisterCleanup(finCleanup)
+ // (2.b) mark the try-body as protected by this case clause.
+ protect(startTryBody, endTryBody, startHandler, excType)
+ // (2.c) emit jump to the program point where the finally-clause-for-normal-exit starts, or in effect `after` if no finally-clause was given.
+ bc goTo postHandlers
+
+ }
+
+ /* ------ (3.A) The exception-handler-version of the finally-clause.
+ * Reached upon abrupt termination of (1) or one of the EHs in (2).
+ * Protected only by whatever protects the whole try-catch-finally expression.
+ * ------
+ */
+
+ // a note on terminology: this is not "postHandlers", despite appearences.
+ // "postHandlers" as in the source-code view. And from that perspective, both (3.A) and (3.B) are invisible implementation artifacts.
+ if (hasFinally) {
+ nopIfNeeded(startTryBody)
+ val finalHandler = currProgramPoint() // version of the finally-clause reached via unhandled exception.
+ protect(startTryBody, finalHandler, finalHandler, null)
+ val Local(eTK, _, eIdx, _) = locals(locals.makeLocal(ThrowableReference, "exc"))
+ bc.store(eIdx, eTK)
+ emitFinalizer(finalizer, null, isDuplicate = true)
+ bc.load(eIdx, eTK)
+ emit(asm.Opcodes.ATHROW)
+ }
+
+ /* ------ (3.B) Cleanup-version of the finally-clause.
+ * Reached upon early RETURN from (1) or upon early RETURN from one of the EHs in (2)
+ * (and only from there, ie reached only upon early RETURN from
+ * program regions bracketed by registerCleanup/unregisterCleanup).
+ * Protected only by whatever protects the whole try-catch-finally expression.
+ *
+ * Given that control arrives to a cleanup section only upon early RETURN,
+ * the value to return (if any) is always available. Therefore, a further RETURN
+ * found in a cleanup section is always ignored (a warning is displayed, @see `genReturn()`).
+ * In order for `genReturn()` to know whether the return statement is enclosed in a cleanup section,
+ * the variable `insideCleanupBlock` is used.
+ * ------
+ */
+
+ // this is not "postHandlers" either.
+ // `shouldEmitCleanup` can be set, and at the same time this try expression may lack a finally-clause.
+ // In other words, all combinations of (hasFinally, shouldEmitCleanup) are valid.
+ if (hasFinally && shouldEmitCleanup) {
+ val savedInsideCleanup = insideCleanupBlock
+ insideCleanupBlock = true
+ markProgramPoint(finCleanup)
+ // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted.
+ emitFinalizer(finalizer, null, isDuplicate = true)
+ pendingCleanups()
+ insideCleanupBlock = savedInsideCleanup
+ }
+
+ /* ------ (4) finally-clause-for-normal-nonEarlyReturn-exit
+ * Reached upon normal, non-early-return termination of (1) or of an EH in (2).
+ * Protected only by whatever protects the whole try-catch-finally expression.
+ * TODO explain what happens upon RETURN contained in (4)
+ * ------
+ */
+
+ markProgramPoint(postHandlers)
+ if (hasFinally) {
+ emitFinalizer(finalizer, tmp, isDuplicate = false) // the only invocation of emitFinalizer with `isDuplicate == false`
+ }
+
+ kind
+ } // end of genLoadTry()
+
+ /* if no more pending cleanups, all that remains to do is return. Otherwise jump to the next (outer) pending cleanup. */
+ private def pendingCleanups() {
+ cleanups match {
+ case Nil =>
+ if (earlyReturnVar != null) {
+ locals.load(earlyReturnVar)
+ bc.emitRETURN(locals(earlyReturnVar).tk)
+ } else {
+ bc emitRETURN UNIT
+ }
+ shouldEmitCleanup = false
+
+ case nextCleanup :: _ =>
+ bc goTo nextCleanup
+ }
+ }
+
+ def protect(start: asm.Label, end: asm.Label, handler: asm.Label, excType: BType) {
+ val excInternalName: String =
+ if (excType == null) null
+ else excType.getInternalName
+ assert(start != end, "protecting a range of zero instructions leads to illegal class format. Solution: add a NOP to that range.")
+ mnode.visitTryCatchBlock(start, end, handler, excInternalName)
+ }
+
+ /* `tmp` (if non-null) is the symbol of the local-var used to preserve the result of the try-body, see `guardResult` */
+ def emitFinalizer(finalizer: Tree, tmp: Symbol, isDuplicate: Boolean) {
+ var saved: immutable.Map[ /* LabelDef */ Symbol, asm.Label ] = null
+ if (isDuplicate) {
+ saved = jumpDest
+ for(ldef <- labelDefsAtOrUnder(finalizer)) {
+ jumpDest -= ldef.symbol
+ }
+ }
+ // when duplicating, the above guarantees new asm.Labels are used for LabelDefs contained in the finalizer (their vars are reused, that's ok)
+ if (tmp != null) { locals.store(tmp) }
+ genLoad(finalizer, UNIT)
+ if (tmp != null) { locals.load(tmp) }
+ if (isDuplicate) {
+ jumpDest = saved
+ }
+ }
+
+ /* Does this tree have a try-catch block? */
+ def mayCleanStack(tree: Tree): Boolean = tree exists { t => t.isInstanceOf[Try] }
+
+ trait EHClause
+ case class NamelessEH(typeToDrop: BType, caseBody: Tree) extends EHClause
+ case class BoundEH (patSymbol: Symbol, caseBody: Tree) extends EHClause
+
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala
new file mode 100644
index 0000000000..1eca69936a
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala
@@ -0,0 +1,880 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala
+package tools.nsc
+package backend.jvm
+
+import scala.tools.asm
+import scala.collection.{ immutable, mutable }
+
+/*
+ * Utilities to mediate between types as represented in Scala ASTs and ASM trees.
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded
+ * @version 1.0
+ *
+ */
+abstract class BCodeTypes extends BCodeIdiomatic {
+
+ import global._
+
+ // when compiling the Scala library, some assertions don't hold (e.g., scala.Boolean has null superClass although it's not an interface)
+ val isCompilingStdLib = !(settings.sourcepath.isDefault)
+
+ val srBoxedUnit = brefType("scala/runtime/BoxedUnit")
+
+ // special names
+ var StringReference : BType = null
+ var ThrowableReference : BType = null
+ var jlCloneableReference : BType = null // java/lang/Cloneable
+ var jlNPEReference : BType = null // java/lang/NullPointerException
+ var jioSerializableReference : BType = null // java/io/Serializable
+ var scalaSerializableReference : BType = null // scala/Serializable
+ var classCastExceptionReference : BType = null // java/lang/ClassCastException
+
+ /* A map from scala primitive type-symbols to BTypes */
+ var primitiveTypeMap: Map[Symbol, BType] = null
+ /* A map from scala type-symbols for Nothing and Null to (runtime version) BTypes */
+ var phantomTypeMap: Map[Symbol, BType] = null
+ /* Maps the method symbol for a box method to the boxed type of the result.
+ * For example, the method symbol for `Byte.box()`) is mapped to the BType `Ljava/lang/Integer;`. */
+ var boxResultType: Map[Symbol, BType] = null
+ /* Maps the method symbol for an unbox method to the primitive type of the result.
+ * For example, the method symbol for `Byte.unbox()`) is mapped to the BType BYTE. */
+ var unboxResultType: Map[Symbol, BType] = null
+
+ var hashMethodSym: Symbol = null // scala.runtime.ScalaRunTime.hash
+
+ var AndroidParcelableInterface: Symbol = null
+ var AndroidCreatorClass : Symbol = null // this is an inner class, use asmType() to get hold of its BType while tracking in innerClassBufferASM
+
+ var BeanInfoAttr: Symbol = null
+
+ /* The Object => String overload. */
+ var String_valueOf: Symbol = null
+
+ var ArrayInterfaces: Set[Tracked] = null
+
+ // scala.FunctionX and scala.runtim.AbstractFunctionX
+ val FunctionReference = new Array[Tracked](definitions.MaxFunctionArity + 1)
+ val AbstractFunctionReference = new Array[Tracked](definitions.MaxFunctionArity + 1)
+ val abstractFunctionArityMap = mutable.Map.empty[BType, Int]
+
+ var PartialFunctionReference: BType = null // scala.PartialFunction
+ var AbstractPartialFunctionReference: BType = null // scala.runtime.AbstractPartialFunction
+
+ var BoxesRunTime: BType = null
+
+ /*
+ * must-single-thread
+ */
+ def initBCodeTypes() {
+ import definitions._
+
+ primitiveTypeMap =
+ Map(
+ UnitClass -> UNIT,
+ BooleanClass -> BOOL,
+ CharClass -> CHAR,
+ ByteClass -> BYTE,
+ ShortClass -> SHORT,
+ IntClass -> INT,
+ LongClass -> LONG,
+ FloatClass -> FLOAT,
+ DoubleClass -> DOUBLE
+ )
+
+ phantomTypeMap =
+ Map(
+ NothingClass -> RT_NOTHING,
+ NullClass -> RT_NULL,
+ NothingClass -> RT_NOTHING, // we map on purpose to RT_NOTHING, getting rid of the distinction compile-time vs. runtime for NullClass.
+ NullClass -> RT_NULL // ditto.
+ )
+
+ boxResultType =
+ for((csym, msym) <- currentRun.runDefinitions.boxMethod)
+ yield (msym -> classLiteral(primitiveTypeMap(csym)))
+
+ unboxResultType =
+ for((csym, msym) <- currentRun.runDefinitions.unboxMethod)
+ yield (msym -> primitiveTypeMap(csym))
+
+ // boxed classes are looked up in the `exemplars` map by jvmWiseLUB().
+ // Other than that, they aren't needed there (e.g., `isSubtypeOf()` special-cases boxed classes, similarly for others).
+ val boxedClasses = List(BoxedBooleanClass, BoxedCharacterClass, BoxedByteClass, BoxedShortClass, BoxedIntClass, BoxedLongClass, BoxedFloatClass, BoxedDoubleClass)
+ for(csym <- boxedClasses) {
+ val key = brefType(csym.javaBinaryName.toTypeName)
+ val tr = buildExemplar(key, csym)
+ symExemplars.put(csym, tr)
+ exemplars.put(tr.c, tr)
+ }
+
+ // reversePrimitiveMap = (primitiveTypeMap map { case (s, pt) => (s.tpe, pt) } map (_.swap)).toMap
+
+ hashMethodSym = getMember(ScalaRunTimeModule, nme.hash_)
+
+ // TODO avoiding going through through missingHook for every line in the REPL: https://github.com/scala/scala/commit/8d962ed4ddd310cc784121c426a2e3f56a112540
+ AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
+ AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
+
+ // the following couldn't be an eager vals in Phase constructors:
+ // that might cause cycles before Global has finished initialization.
+ BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
+
+ String_valueOf = {
+ getMember(StringModule, nme.valueOf) filter (sym =>
+ sym.info.paramTypes match {
+ case List(pt) => pt.typeSymbol == ObjectClass
+ case _ => false
+ }
+ )
+ }
+
+ exemplar(JavaCloneableClass)
+ exemplar(JavaSerializableClass)
+ exemplar(SerializableClass)
+
+ StringReference = exemplar(StringClass).c
+ StringBuilderReference = exemplar(StringBuilderClass).c
+ ThrowableReference = exemplar(ThrowableClass).c
+ jlCloneableReference = exemplar(JavaCloneableClass).c
+ jlNPEReference = exemplar(NullPointerExceptionClass).c
+ jioSerializableReference = exemplar(JavaSerializableClass).c
+ scalaSerializableReference = exemplar(SerializableClass).c
+ classCastExceptionReference = exemplar(ClassCastExceptionClass).c
+
+ /*
+ * The bytecode emitter special-cases String concatenation, in that three methods of `JCodeMethodN`
+ * ( `genStartConcat()` , `genStringConcat()` , and `genEndConcat()` )
+ * don't obtain the method descriptor of the callee via `asmMethodType()` (as normally done)
+ * but directly emit callsites on StringBuilder using literal constant for method descriptors.
+ * In order to make sure those method descriptors are available as BTypes, they are initialized here.
+ */
+ BType.getMethodType("()V") // necessary for JCodeMethodN.genStartConcat
+ BType.getMethodType("()Ljava/lang/String;") // necessary for JCodeMethodN.genEndConcat
+
+ PartialFunctionReference = exemplar(PartialFunctionClass).c
+ for(idx <- 0 to definitions.MaxFunctionArity) {
+ FunctionReference(idx) = exemplar(FunctionClass(idx))
+ AbstractFunctionReference(idx) = exemplar(AbstractFunctionClass(idx))
+ abstractFunctionArityMap += (AbstractFunctionReference(idx).c -> idx)
+ AbstractPartialFunctionReference = exemplar(AbstractPartialFunctionClass).c
+ }
+
+ // later a few analyses (e.g. refreshInnerClasses) will look up BTypes based on descriptors in instructions
+ // we make sure those BTypes can be found via lookup as opposed to creating them on the fly.
+ BoxesRunTime = brefType("scala/runtime/BoxesRunTime")
+ asmBoxTo.values foreach { mnat: MethodNameAndType => BType.getMethodType(mnat.mdesc) }
+ asmUnboxTo.values foreach { mnat: MethodNameAndType => BType.getMethodType(mnat.mdesc) }
+
+ }
+
+ /*
+ * must-single-thread
+ */
+ def clearBCodeTypes() {
+ symExemplars.clear()
+ exemplars.clear()
+ }
+
+ val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC
+ val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL
+
+ val strMODULE_INSTANCE_FIELD = nme.MODULE_INSTANCE_FIELD.toString
+
+ // ------------------------------------------------
+ // accessory maps tracking the isInterface, innerClasses, superClass, and supportedInterfaces relations,
+ // allowing answering `conforms()` without resorting to typer.
+ // ------------------------------------------------
+
+ val exemplars = new java.util.concurrent.ConcurrentHashMap[BType, Tracked]
+ val symExemplars = new java.util.concurrent.ConcurrentHashMap[Symbol, Tracked]
+
+ /*
+ * Typically, a question about a BType can be answered only by using the BType as lookup key in one or more maps.
+ * A `Tracked` object saves time by holding together information required to answer those questions:
+ *
+ * - `sc` denotes the bytecode-level superclass if any, null otherwise
+ *
+ * - `ifaces` denotes the interfaces explicitly declared.
+ * Not included are those transitively supported, but the utility method `allLeafIfaces()` can be used for that.
+ *
+ * - `innersChain` denotes the containing classes for a non-package-level class `c`, null otherwise.
+ * Note: the optimizer may inline anonymous closures, thus eliding those inner classes
+ * (no physical class file is emitted for elided classes).
+ * Before committing `innersChain` to bytecode, cross-check with the list of elided classes (SI-6546).
+ *
+ * All methods of this class can-multi-thread
+ */
+ case class Tracked(c: BType, flags: Int, sc: Tracked, ifaces: Array[Tracked], innersChain: Array[InnerClassEntry]) {
+
+ // not a case-field because we initialize it only for JVM classes we emit.
+ private var _directMemberClasses: List[BType] = null
+
+ def directMemberClasses: List[BType] = {
+ assert(_directMemberClasses != null, s"getter directMemberClasses() invoked too early for $c")
+ _directMemberClasses
+ }
+
+ def directMemberClasses_=(bs: List[BType]) {
+ if (_directMemberClasses != null) {
+ // TODO we enter here when both mirror class and plain class are emitted for the same ModuleClassSymbol.
+ assert(_directMemberClasses == bs.sortBy(_.off))
+ }
+ _directMemberClasses = bs.sortBy(_.off)
+ }
+
+ /* `isCompilingStdLib` saves the day when compiling:
+ * (1) scala.Nothing (the test `c.isNonSpecial` fails for it)
+ * (2) scala.Boolean (it has null superClass and is not an interface)
+ */
+ assert(c.isNonSpecial || isCompilingStdLib /*(1)*/, s"non well-formed plain-type: $this")
+ assert(
+ if (sc == null) { (c == ObjectReference) || isInterface || isCompilingStdLib /*(2)*/ }
+ else { (c != ObjectReference) && !sc.isInterface }
+ , "non well-formed plain-type: " + this
+ )
+ assert(ifaces.forall(i => i.c.isNonSpecial && i.isInterface), s"non well-formed plain-type: $this")
+
+ import asm.Opcodes._
+ def hasFlags(mask: Int) = (flags & mask) != 0
+ def isInterface = hasFlags(ACC_INTERFACE)
+ def isFinal = hasFlags(ACC_FINAL)
+ def isInnerClass = { innersChain != null }
+ def isLambda = {
+ // ie isLCC || isTraditionalClosureClass
+ isFinal && (c.getSimpleName.contains(tpnme.ANON_FUN_NAME.toString)) && isFunctionType(c)
+ }
+
+ /* can-multi-thread */
+ def superClasses: List[Tracked] = {
+ if (sc == null) Nil else sc :: sc.superClasses
+ }
+
+ /* can-multi-thread */
+ def isSubtypeOf(other: BType): Boolean = {
+ assert(other.isNonSpecial, "so called special cases have to be handled in BCodeTypes.conforms()")
+
+ if (c == other) return true;
+
+ val otherIsIface = exemplars.get(other).isInterface
+
+ if (this.isInterface) {
+ if (other == ObjectReference) return true;
+ if (!otherIsIface) return false;
+ }
+ else {
+ if (sc != null && sc.isSubtypeOf(other)) return true;
+ if (!otherIsIface) return false;
+ }
+
+ var idx = 0
+ while (idx < ifaces.length) {
+ if (ifaces(idx).isSubtypeOf(other)) return true;
+ idx += 1
+ }
+
+ false
+ }
+
+ /*
+ * The `ifaces` field lists only those interfaces declared by `c`
+ * From the set of all supported interfaces, this method discards those which are supertypes of others in the set.
+ */
+ def allLeafIfaces: Set[Tracked] = {
+ if (sc == null) { ifaces.toSet }
+ else { minimizeInterfaces(ifaces.toSet ++ sc.allLeafIfaces) }
+ }
+
+ /*
+ * This type may not support in its entirety the interface given by the argument, however it may support some of its super-interfaces.
+ * We visualize each such supported subset of the argument's functionality as a "branch". This method returns all such branches.
+ *
+ * In other words, let Ri be a branch supported by `ib`,
+ * this method returns all Ri such that this <:< Ri, where each Ri is maximally deep.
+ */
+ def supportedBranches(ib: Tracked): Set[Tracked] = {
+ assert(ib.isInterface, s"Non-interface argument: $ib")
+
+ val result: Set[Tracked] =
+ if (this.isSubtypeOf(ib.c)) { Set(ib) }
+ else { ib.ifaces.toSet[Tracked].flatMap( bi => supportedBranches(bi) ) }
+
+ checkAllInterfaces(result)
+
+ result
+ }
+
+ override def toString = { c.toString }
+
+ }
+
+ /* must-single-thread */
+ final def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
+
+ /* must-single-thread */
+ final def hasInternalName(sym: Symbol) = { sym.isClass || (sym.isModule && !sym.isMethod) }
+
+ /* must-single-thread */
+ def getSuperInterfaces(csym: Symbol): List[Symbol] = {
+
+ // Additional interface parents based on annotations and other cues
+ def newParentForAttr(ann: AnnotationInfo): Symbol = ann.symbol match {
+ case definitions.RemoteAttr => definitions.RemoteInterfaceClass
+ case _ => NoSymbol
+ }
+
+ /* Drop redundant interfaces (which are implemented by some other parent) from the immediate parents.
+ * In other words, no two interfaces in the result are related by subtyping.
+ * This method works on Symbols, a similar one (not duplicate) works on Tracked instances.
+ */
+ def minimizeInterfaces(lstIfaces: List[Symbol]): List[Symbol] = {
+ var rest = lstIfaces
+ var leaves = List.empty[Symbol]
+ while (!rest.isEmpty) {
+ val candidate = rest.head
+ val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
+ if (!nonLeaf) {
+ leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
+ }
+ rest = rest.tail
+ }
+
+ leaves
+ }
+
+ val superInterfaces0: List[Symbol] = csym.mixinClasses
+ val superInterfaces = existingSymbols(superInterfaces0 ++ csym.annotations.map(newParentForAttr)).distinct
+
+ assert(!superInterfaces.contains(NoSymbol), s"found NoSymbol among: ${superInterfaces.mkString}")
+ assert(superInterfaces.forall(s => s.isInterface || s.isTrait), s"found non-interface among: ${superInterfaces.mkString}")
+
+ minimizeInterfaces(superInterfaces)
+ }
+
+ /*
+ * Records the superClass and supportedInterfaces relations,
+ * so that afterwards queries can be answered without resorting to typer.
+ * This method does not add to `innerClassBufferASM`, use `internalName()` or `asmType()` or `toTypeKind()` for that.
+ * On the other hand, this method does record the inner-class status of the argument, via `buildExemplar()`.
+ *
+ * must-single-thread
+ */
+ final def exemplar(csym0: Symbol): Tracked = {
+ assert(csym0 != NoSymbol, "NoSymbol can't be tracked")
+
+ val csym = {
+ if (csym0.isJavaDefined && csym0.isModuleClass) csym0.linkedClassOfClass
+ else if (csym0.isModule) csym0.moduleClass
+ else csym0 // we track only module-classes and plain-classes
+ }
+
+ assert(!primitiveTypeMap.contains(csym) || isCompilingStdLib, s"primitive types not tracked here: ${csym.fullName}")
+ assert(!phantomTypeMap.contains(csym), s"phantom types not tracked here: ${csym.fullName}")
+
+ val opt = symExemplars.get(csym)
+ if (opt != null) {
+ return opt
+ }
+
+ val key = brefType(csym.javaBinaryName.toTypeName)
+ assert(key.isNonSpecial || isCompilingStdLib, s"Not a class to track: ${csym.fullName}")
+
+ // TODO accomodate the fix for SI-5031 of https://github.com/scala/scala/commit/0527b2549bcada2fda2201daa630369b377d0877
+ // TODO Weaken this assertion? buildExemplar() needs to be updated, too. In the meantime, pos/t5031_3 has been moved to test/disabled/pos.
+ val whatWasInExemplars = exemplars.get(key)
+ assert(whatWasInExemplars == null, "Maps `symExemplars` and `exemplars` got out of synch.")
+ val tr = buildExemplar(key, csym)
+ symExemplars.put(csym, tr)
+ if (csym != csym0) { symExemplars.put(csym0, tr) }
+ exemplars.put(tr.c, tr) // tr.c is the hash-consed, internalized, canonical representative for csym's key.
+ tr
+ }
+
+ val EMPTY_TRACKED_ARRAY = Array.empty[Tracked]
+
+ /*
+ * must-single-thread
+ */
+ private def buildExemplar(key: BType, csym: Symbol): Tracked = {
+ val sc =
+ if (csym.isImplClass) definitions.ObjectClass
+ else csym.superClass
+ assert(
+ if (csym == definitions.ObjectClass)
+ sc == NoSymbol
+ else if (csym.isInterface)
+ sc == definitions.ObjectClass
+ else
+ ((sc != NoSymbol) && !sc.isInterface) || isCompilingStdLib,
+ "superClass out of order"
+ )
+ val ifaces = getSuperInterfaces(csym) map exemplar;
+ val ifacesArr =
+ if (ifaces.isEmpty) EMPTY_TRACKED_ARRAY
+ else {
+ val arr = new Array[Tracked](ifaces.size)
+ ifaces.copyToArray(arr)
+ arr
+ }
+
+ val flags = mkFlags(
+ javaFlags(csym),
+ if (isDeprecated(csym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ val tsc = if (sc == NoSymbol) null else exemplar(sc)
+
+ val innersChain = saveInnerClassesFor(csym, key)
+
+ Tracked(key, flags, tsc, ifacesArr, innersChain)
+ }
+
+ // ---------------- utilities around interfaces represented by Tracked instances. ----------------
+
+ /* Drop redundant interfaces (those which are implemented by some other).
+ * In other words, no two interfaces in the result are related by subtyping.
+ * This method works on Tracked elements, a similar one (not duplicate) works on Symbols.
+ */
+ def minimizeInterfaces(lstIfaces: Set[Tracked]): Set[Tracked] = {
+ checkAllInterfaces(lstIfaces)
+ var rest = lstIfaces.toList
+ var leaves = List.empty[Tracked]
+ while (!rest.isEmpty) {
+ val candidate = rest.head
+ val nonLeaf = leaves exists { leaf => leaf.isSubtypeOf(candidate.c) }
+ if (!nonLeaf) {
+ leaves = candidate :: (leaves filterNot { leaf => candidate.isSubtypeOf(leaf.c) })
+ }
+ rest = rest.tail
+ }
+
+ leaves.toSet
+ }
+
+ def allInterfaces(is: Iterable[Tracked]): Boolean = { is forall { i => i.isInterface } }
+ def nonInterfaces(is: Iterable[Tracked]): Iterable[Tracked] = { is filterNot { i => i.isInterface } }
+
+ def checkAllInterfaces(ifaces: Iterable[Tracked]) {
+ assert(allInterfaces(ifaces), s"Non-interfaces: ${nonInterfaces(ifaces).mkString}")
+ }
+
+ /*
+ * Subtype check `a <:< b` on BTypes that takes into account the JVM built-in numeric promotions (e.g. BYTE to INT).
+ * Its operation can be visualized more easily in terms of the Java bytecode type hierarchy.
+ * This method used to be called, in the ICode world, TypeKind.<:<()
+ *
+ * can-multi-thread
+ */
+ final def conforms(a: BType, b: BType): Boolean = {
+ if (a.isArray) { // may be null
+ /* Array subtyping is covariant here, as in Java bytecode. Also necessary for Java interop. */
+ if ((b == jlCloneableReference) ||
+ (b == jioSerializableReference) ||
+ (b == AnyRefReference)) { true }
+ else if (b.isArray) { conforms(a.getComponentType, b.getComponentType) }
+ else { false }
+ }
+ else if (a.isBoxed) { // may be null
+ if (b.isBoxed) { a == b }
+ else if (b == AnyRefReference) { true }
+ else if (!(b.hasObjectSort)) { false }
+ else { exemplars.get(a).isSubtypeOf(b) } // e.g., java/lang/Double conforms to java/lang/Number
+ }
+ else if (a.isNullType) { // known to be null
+ if (b.isNothingType) { false }
+ else if (b.isValueType) { false }
+ else { true }
+ }
+ else if (a.isNothingType) { // known to be Nothing
+ true
+ }
+ else if (a.isUnitType) {
+ b.isUnitType
+ }
+ else if (a.hasObjectSort) { // may be null
+ if (a.isNothingType) { true }
+ else if (b.hasObjectSort) { exemplars.get(a).isSubtypeOf(b) }
+ else if (b.isArray) { a.isNullType } // documentation only, because `if(a.isNullType)` (above) covers this case already.
+ else { false }
+ }
+ else {
+
+ def msg = s"(a: $a, b: $b)"
+
+ assert(a.isNonUnitValueType, s"a isn't a non-Unit value type. $msg")
+ assert(b.isValueType, s"b isn't a value type. $msg")
+
+ (a eq b) || (a match {
+ case BOOL | BYTE | SHORT | CHAR => b == INT || b == LONG // TODO Actually, BOOL does NOT conform to LONG. Even with adapt().
+ case _ => a == b
+ })
+ }
+ }
+
+ /* The maxValueType of (Char, Byte) and of (Char, Short) is Int, to encompass the negative values of Byte and Short. See ticket #2087.
+ *
+ * can-multi-thread
+ */
+ def maxValueType(a: BType, other: BType): BType = {
+ assert(a.isValueType, "maxValueType() is defined only for 1st arg valuetypes (2nd arg doesn't matter).")
+
+ def uncomparable: Nothing = {
+ abort(s"Uncomparable BTypes: $a with $other")
+ }
+
+ if (a.isNothingType) return other;
+ if (other.isNothingType) return a;
+ if (a == other) return a;
+
+ a match {
+
+ case UNIT => uncomparable
+ case BOOL => uncomparable
+
+ case BYTE =>
+ if (other == CHAR) INT
+ else if (other.isNumericType) other
+ else uncomparable
+
+ case SHORT =>
+ other match {
+ case BYTE => SHORT
+ case CHAR => INT
+ case INT | LONG | FLOAT | DOUBLE => other
+ case _ => uncomparable
+ }
+
+ case CHAR =>
+ other match {
+ case BYTE | SHORT => INT
+ case INT | LONG | FLOAT | DOUBLE => other
+ case _ => uncomparable
+ }
+
+ case INT =>
+ other match {
+ case BYTE | SHORT | CHAR => INT
+ case LONG | FLOAT | DOUBLE => other
+ case _ => uncomparable
+ }
+
+ case LONG =>
+ if (other.isIntegralType) LONG
+ else if (other.isRealType) DOUBLE
+ else uncomparable
+
+ case FLOAT =>
+ if (other == DOUBLE) DOUBLE
+ else if (other.isNumericType) FLOAT
+ else uncomparable
+
+ case DOUBLE =>
+ if (other.isNumericType) DOUBLE
+ else uncomparable
+
+ case _ => uncomparable
+ }
+ }
+
+ /* Takes promotions of numeric primitives into account.
+ *
+ * can-multi-thread
+ */
+ final def maxType(a: BType, other: BType): BType = {
+ if (a.isValueType) { maxValueType(a, other) }
+ else {
+ if (a.isNothingType) return other;
+ if (other.isNothingType) return a;
+ if (a == other) return a;
+ // Approximate `lub`. The common type of two references is always AnyRef.
+ // For 'real' least upper bound wrt to subclassing use method 'lub'.
+ assert(a.isArray || a.isBoxed || a.hasObjectSort, s"This is not a valuetype and it's not something else, what is it? $a")
+ // TODO For some reason, ICode thinks `REFERENCE(...).maxType(BOXED(whatever))` is `uncomparable`. Here, that has maxType AnyRefReference.
+ // BTW, when swapping arguments, ICode says BOXED(whatever).maxType(REFERENCE(...)) == AnyRefReference, so I guess the above was an oversight in REFERENCE.maxType()
+ if (other.isRefOrArrayType) { AnyRefReference }
+ else { abort(s"Uncomparable BTypes: $a with $other") }
+ }
+ }
+
+ /*
+ * Whether the argument is a subtype of
+ * scala.PartialFunction[-A, +B] extends (A => B)
+ * N.B.: this method returns true for a scala.runtime.AbstractPartialFunction
+ *
+ * can-multi-thread
+ */
+ def isPartialFunctionType(t: BType): Boolean = {
+ (t.hasObjectSort) && exemplars.get(t).isSubtypeOf(PartialFunctionReference)
+ }
+
+ /*
+ * Whether the argument is a subtype of scala.FunctionX where 0 <= X <= definitions.MaxFunctionArity
+ *
+ * can-multi-thread
+ */
+ def isFunctionType(t: BType): Boolean = {
+ if (!t.hasObjectSort) return false
+ var idx = 0
+ val et: Tracked = exemplars.get(t)
+ while (idx <= definitions.MaxFunctionArity) {
+ if (et.isSubtypeOf(FunctionReference(idx).c)) {
+ return true
+ }
+ idx += 1
+ }
+ false
+ }
+
+ /*
+ * must-single-thread
+ */
+ def isTopLevelModule(sym: Symbol): Boolean = {
+ exitingPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
+ }
+
+ /*
+ * must-single-thread
+ */
+ def isStaticModule(sym: Symbol): Boolean = {
+ sym.isModuleClass && !sym.isImplClass && !sym.isLifted
+ }
+
+ // ---------------------------------------------------------------------
+ // ---------------- InnerClasses attribute (JVMS 4.7.6) ----------------
+ // ---------------------------------------------------------------------
+
+ val INNER_CLASSES_FLAGS =
+ (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_FINAL)
+
+ /*
+ * @param name the internal name of an inner class.
+ * @param outerName the internal name of the class to which the inner class belongs.
+ * May be `null` for non-member inner classes (ie for a Java local class or a Java anonymous class).
+ * @param innerName the (simple) name of the inner class inside its enclosing class. It's `null` for anonymous inner classes.
+ * @param access the access flags of the inner class as originally declared in the enclosing class.
+ */
+ case class InnerClassEntry(name: String, outerName: String, innerName: String, access: Int) {
+ assert(name != null, "Null isn't good as class name in an InnerClassEntry.")
+ }
+
+ /* For given symbol return a symbol corresponding to a class that should be declared as inner class.
+ *
+ * For example:
+ * class A {
+ * class B
+ * object C
+ * }
+ *
+ * then method will return:
+ * NoSymbol for A,
+ * the same symbol for A.B (corresponding to A$B class), and
+ * A$C$ symbol for A.C.
+ *
+ * must-single-thread
+ */
+ def innerClassSymbolFor(s: Symbol): Symbol =
+ if (s.isClass) s else if (s.isModule) s.moduleClass else NoSymbol
+
+ /*
+ * Computes the chain of inner-class (over the is-member-of relation) for the given argument.
+ * The resulting chain will be cached in `exemplars`.
+ *
+ * The chain thus cached is valid during this compiler run, see in contrast
+ * `innerClassBufferASM` for a cache that is valid only for the class being emitted.
+ *
+ * The argument can be any symbol, but given that this method is invoked only from `buildExemplar()`,
+ * in practice it has been vetted to be a class-symbol.
+ *
+ * Returns:
+ *
+ * - a non-empty array of entries for an inner-class argument.
+ * The array's first element is the outermost top-level class,
+ * the array's last element corresponds to csym.
+ *
+ * - null otherwise.
+ *
+ * This method does not add to `innerClassBufferASM`, use instead `exemplar()` for that.
+ *
+ * must-single-thread
+ */
+ final def saveInnerClassesFor(csym: Symbol, csymTK: BType): Array[InnerClassEntry] = {
+
+ val ics = innerClassSymbolFor(csym)
+ if (ics == NoSymbol) {
+ return null
+ }
+ assert(ics == csym, s"Disagreement between innerClassSymbolFor() and exemplar()'s tracked symbol for the same input: ${csym.fullName}")
+
+ var chain: List[Symbol] = Nil
+ var x = ics
+ while (x ne NoSymbol) {
+ assert(x.isClass, s"not a class symbol: ${x.fullName}")
+ val isInner = !x.rawowner.isPackageClass
+ if (isInner) {
+ chain ::= x
+ x = innerClassSymbolFor(x.rawowner)
+ } else {
+ x = NoSymbol
+ }
+ }
+
+ // now that we have all of `ics` , `csym` , and soon the inner-classes-chain, it's too tempting not to cache.
+ if (chain.isEmpty) { null }
+ else {
+ val arr = new Array[InnerClassEntry](chain.size)
+ (chain map toInnerClassEntry).copyToArray(arr)
+
+ arr
+ }
+ }
+
+ /*
+ * must-single-thread
+ */
+ private def toInnerClassEntry(innerSym: Symbol): InnerClassEntry = {
+
+ /* The outer name for this inner class. Note that it returns null
+ * when the inner class should not get an index in the constant pool.
+ * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
+ */
+ def outerName(innerSym: Symbol): Name = {
+ if (innerSym.originalEnclosingMethod != NoSymbol)
+ null
+ else {
+ val outerName = innerSym.rawowner.javaBinaryName
+ if (isTopLevelModule(innerSym.rawowner)) nme.stripModuleSuffix(outerName)
+ else outerName
+ }
+ }
+
+ def innerName(innerSym: Symbol): String = {
+ if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
+ null
+ else
+ innerSym.rawname + innerSym.moduleSuffix
+ }
+
+ val flagsWithFinal: Int = mkFlags(
+ if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
+ javaFlags(innerSym),
+ if (isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
+ ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
+ val flags = if (innerSym.isModuleClass) flagsWithFinal & ~asm.Opcodes.ACC_FINAL else flagsWithFinal // For SI-5676, object overriding.
+
+ val jname = innerSym.javaBinaryName.toString // never null
+ val oname = { // null when method-enclosed
+ val on = outerName(innerSym)
+ if (on == null) null else on.toString
+ }
+ val iname = { // null for anonymous inner class
+ val in = innerName(innerSym)
+ if (in == null) null else in.toString
+ }
+
+ InnerClassEntry(jname, oname, iname, flags)
+ }
+
+ // --------------------------------------------
+ // ---------------- Java flags ----------------
+ // --------------------------------------------
+
+ /*
+ * can-multi-thread
+ */
+ final def hasPublicBitSet(flags: Int) = ((flags & asm.Opcodes.ACC_PUBLIC) != 0)
+
+ /*
+ * must-single-thread
+ */
+ final def isRemote(s: Symbol) = (s hasAnnotation definitions.RemoteAttr)
+
+ /*
+ * Return the Java modifiers for the given symbol.
+ * Java modifiers for classes:
+ * - public, abstract, final, strictfp (not used)
+ * for interfaces:
+ * - the same as for classes, without 'final'
+ * for fields:
+ * - public, private (*)
+ * - static, final
+ * for methods:
+ * - the same as for fields, plus:
+ * - abstract, synchronized (not used), strictfp (not used), native (not used)
+ *
+ * (*) protected cannot be used, since inner classes 'see' protected members,
+ * and they would fail verification after lifted.
+ *
+ * must-single-thread
+ */
+ def javaFlags(sym: Symbol): Int = {
+ // constructors of module classes should be private
+ // PP: why are they only being marked private at this stage and not earlier?
+ val privateFlag =
+ sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner))
+
+ // Final: the only fields which can receive ACC_FINAL are eager vals.
+ // Neither vars nor lazy vals can, because:
+ //
+ // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3
+ // "Another problem is that the specification allows aggressive
+ // optimization of final fields. Within a thread, it is permissible to
+ // reorder reads of a final field with those modifications of a final
+ // field that do not take place in the constructor."
+ //
+ // A var or lazy val which is marked final still has meaning to the
+ // scala compiler. The word final is heavily overloaded unfortunately;
+ // for us it means "not overridable". At present you can't override
+ // vars regardless; this may change.
+ //
+ // The logic does not check .isFinal (which checks flags for the FINAL flag,
+ // and includes symbols marked lateFINAL) instead inspecting rawflags so
+ // we can exclude lateFINAL. Such symbols are eligible for inlining, but to
+ // avoid breaking proxy software which depends on subclassing, we do not
+ // emit ACC_FINAL.
+ // Nested objects won't receive ACC_FINAL in order to allow for their overriding.
+
+ val finalFlag = (
+ (((sym.rawflags & symtab.Flags.FINAL) != 0) || isTopLevelModule(sym))
+ && !sym.enclClass.isInterface
+ && !sym.isClassConstructor
+ && !sym.isMutable // lazy vals and vars both
+ )
+
+ // Primitives are "abstract final" to prohibit instantiation
+ // without having to provide any implementations, but that is an
+ // illegal combination of modifiers at the bytecode level so
+ // suppress final if abstract if present.
+ import asm.Opcodes._
+ mkFlags(
+ if (privateFlag) ACC_PRIVATE else ACC_PUBLIC,
+ if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0,
+ if (sym.isInterface) ACC_INTERFACE else 0,
+ if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
+ if (sym.isStaticMember) ACC_STATIC else 0,
+ if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
+ if (sym.isArtifact) ACC_SYNTHETIC else 0,
+ if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
+ if (sym.hasEnumFlag) ACC_ENUM else 0,
+ if (sym.isVarargsMethod) ACC_VARARGS else 0,
+ if (sym.hasFlag(symtab.Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0
+ )
+ }
+
+ /*
+ * must-single-thread
+ */
+ def javaFieldFlags(sym: Symbol) = {
+ javaFlags(sym) | mkFlags(
+ if (sym hasAnnotation definitions.TransientAttr) asm.Opcodes.ACC_TRANSIENT else 0,
+ if (sym hasAnnotation definitions.VolatileAttr) asm.Opcodes.ACC_VOLATILE else 0,
+ if (sym.isMutable) 0 else asm.Opcodes.ACC_FINAL
+ )
+ }
+
+} // end of class BCodeTypes
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index fb1f45fa40..8e6c09213f 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -6,14 +6,14 @@
package scala.tools.nsc
package backend.jvm
-import java.io.{ DataOutputStream, FileOutputStream, OutputStream, File => JFile }
+import java.io.{ DataOutputStream, FileOutputStream, IOException, OutputStream, File => JFile }
import scala.tools.nsc.io._
-import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.util.JavapClass
-import java.util.jar.{ JarEntry, JarOutputStream, Attributes }
-import Attributes.Name
+import java.util.jar.Attributes.Name
import scala.language.postfixOps
+/** Can't output a file due to the state of the file system. */
+class FileConflictException(msg: String, val file: AbstractFile) extends IOException(msg)
+
/** For the last mile: turning generated bytecode in memory into
* something you can use. Has implementations for writing to class
* files, jars, and disassembled/javap output.
@@ -22,22 +22,37 @@ trait BytecodeWriters {
val global: Global
import global._
- private def outputDirectory(sym: Symbol): AbstractFile = (
- settings.outputDirs.outputDirFor(beforeFlatten(sym.sourceFile))
- )
- private def getFile(base: AbstractFile, /*cls.getName()*/ clsName: String, suffix: String): AbstractFile = {
+ def outputDirectory(sym: Symbol): AbstractFile =
+ settings.outputDirs outputDirFor enteringFlatten(sym.sourceFile)
+
+ /**
+ * @param clsName cls.getName
+ */
+ def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
+ def ensureDirectory(dir: AbstractFile): AbstractFile =
+ if (dir.isDirectory) dir
+ else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory", dir)
var dir = base
val pathParts = clsName.split("[./]").toList
- for (part <- pathParts.init) {
- dir = dir.subdirectoryNamed(part)
- }
- dir.fileNamed(pathParts.last + suffix)
+ for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part
+ ensureDirectory(dir) fileNamed pathParts.last + suffix
}
- private def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
+ def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
getFile(outputDirectory(sym), clsName, suffix)
+ def factoryNonJarBytecodeWriter(): BytecodeWriter = {
+ val emitAsmp = settings.Ygenasmp.isSetByUser
+ val doDump = settings.Ydumpclasses.isSetByUser
+ (emitAsmp, doDump) match {
+ case (false, false) => new ClassBytecodeWriter { }
+ case (false, true ) => new ClassBytecodeWriter with DumpBytecodeWriter { }
+ case (true, false) => new ClassBytecodeWriter with AsmpBytecodeWriter
+ case (true, true ) => new ClassBytecodeWriter with AsmpBytecodeWriter with DumpBytecodeWriter { }
+ }
+ }
+
trait BytecodeWriter {
- def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol): Unit
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit
def close(): Unit = ()
}
@@ -48,7 +63,9 @@ trait BytecodeWriters {
)
val writer = new Jar(jfile).jarWriter(jarMainAttrs: _*)
- def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ assert(outfile == null,
+ "The outfile formal param is there just because ClassBytecodeWriter overrides this method and uses it.")
val path = jclassName + ".class"
val out = writer.newOutputStream(path)
@@ -60,33 +77,47 @@ trait BytecodeWriters {
override def close() = writer.close()
}
- trait JavapBytecodeWriter extends BytecodeWriter {
- val baseDir = Directory(settings.Ygenjavap.value).createDirectory()
-
- def emitJavap(bytes: Array[Byte], javapFile: io.File) {
- val pw = javapFile.printWriter()
- val javap = new JavapClass(ScalaClassLoader.appLoader, pw) {
- override def findBytes(path: String): Array[Byte] = bytes
+ /*
+ * The ASM textual representation for bytecode overcomes disadvantages of javap ouput in three areas:
+ * (a) pickle dingbats undecipherable to the naked eye;
+ * (b) two constant pools, while having identical contents, are displayed differently due to physical layout.
+ * (c) stack maps (classfile version 50 and up) are displayed in encoded form by javap,
+ * their expansion by ASM is more readable.
+ *
+ * */
+ trait AsmpBytecodeWriter extends BytecodeWriter {
+ import scala.tools.asm
+
+ private val baseDir = Directory(settings.Ygenasmp.value).createDirectory()
+
+ private def emitAsmp(jclassBytes: Array[Byte], asmpFile: io.File) {
+ val pw = asmpFile.printWriter()
+ try {
+ val cnode = new asm.tree.ClassNode()
+ val cr = new asm.ClassReader(jclassBytes)
+ cr.accept(cnode, 0)
+ val trace = new scala.tools.asm.util.TraceClassVisitor(new java.io.PrintWriter(new java.io.StringWriter()))
+ cnode.accept(trace)
+ trace.p.print(pw)
}
-
- try javap(Seq("-verbose", "dummy")) foreach (_.show())
finally pw.close()
}
- abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
- super.writeClass(label, jclassName, jclassBytes, sym)
- val bytes = getFile(sym, jclassName, ".class").toByteArray
- val segments = jclassName.split("[./]")
- val javapFile = segments.foldLeft(baseDir: Path)(_ / _) changeExtension "javap" toFile;
+ abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ super.writeClass(label, jclassName, jclassBytes, outfile)
+
+ val segments = jclassName.split("[./]")
+ val asmpFile = segments.foldLeft(baseDir: Path)(_ / _) changeExtension "asmp" toFile;
- javapFile.parent.createDirectory()
- emitJavap(bytes, javapFile)
+ asmpFile.parent.createDirectory()
+ emitAsmp(jclassBytes, asmpFile)
}
}
trait ClassBytecodeWriter extends BytecodeWriter {
- def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
- val outfile = getFile(sym, jclassName, ".class")
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ assert(outfile != null,
+ "Precisely this override requires its invoker to hand out a non-null AbstractFile.")
val outstream = new DataOutputStream(outfile.bufferedOutput)
try outstream.write(jclassBytes, 0, jclassBytes.length)
@@ -98,11 +129,11 @@ trait BytecodeWriters {
trait DumpBytecodeWriter extends BytecodeWriter {
val baseDir = Directory(settings.Ydumpclasses.value).createDirectory()
- abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
- super.writeClass(label, jclassName, jclassBytes, sym)
+ abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ super.writeClass(label, jclassName, jclassBytes, outfile)
val pathName = jclassName
- var dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile;
+ val dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile;
dumpFile.parent.createDirectory()
val outstream = new DataOutputStream(new FileOutputStream(dumpFile.path))
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 19cdcd2590..eb40e1dbde 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -3,17 +3,16 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend.jvm
-import java.nio.ByteBuffer
import scala.collection.{ mutable, immutable }
import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
import scala.tools.nsc.symtab._
-import scala.tools.nsc.io.AbstractFile
-
import scala.tools.asm
import asm.Label
+import scala.annotation.tailrec
/**
* @author Iulian Dragos (version 1.0, FJBG-based implementation)
@@ -27,11 +26,25 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
import icodes.opcodes._
import definitions._
+ // Strangely I can't find this in the asm code
+ // 255, but reserving 1 for "this"
+ final val MaximumJvmParameters = 254
+
val phaseName = "jvm"
/** Create a new phase */
override def newPhase(p: Phase): Phase = new AsmPhase(p)
+ /** From the reference documentation of the Android SDK:
+ * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`.
+ * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`,
+ * which is an object implementing the `Parcelable.Creator` interface.
+ */
+ private val androidFieldName = newTermName("CREATOR")
+
+ private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
+ private lazy val AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
+
/** JVM code generation phase
*/
class AsmPhase(prev: Phase) extends ICodePhase(prev) {
@@ -39,7 +52,25 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
override def erasedTypes = true
def apply(cls: IClass) = sys.error("no implementation")
- val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
+ // An AsmPhase starts and ends within a Run, thus the caches in question will get populated and cleared within a Run, too), SI-7422
+ javaNameCache.clear()
+ javaNameCache ++= List(
+ NothingClass -> binarynme.RuntimeNothing,
+ RuntimeNothingClass -> binarynme.RuntimeNothing,
+ NullClass -> binarynme.RuntimeNull,
+ RuntimeNullClass -> binarynme.RuntimeNull
+ )
+
+ // unlike javaNameCache, reverseJavaName contains entries only for class symbols and their internal names.
+ reverseJavaName.clear()
+ reverseJavaName ++= List(
+ binarynme.RuntimeNothing.toString() -> RuntimeNothingClass, // RuntimeNothingClass is the bytecode-level return type of Scala methods with Nothing return-type.
+ binarynme.RuntimeNull.toString() -> RuntimeNullClass
+ )
+
+ // Lazy val; can't have eager vals in Phase constructors which may
+ // cause cycles before Global has finished initialization.
+ lazy val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
private def initBytecodeWriter(entryPoints: List[IClass]): BytecodeWriter = {
settings.outputDirs.getSingleOutput match {
@@ -61,29 +92,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
new DirectToJarfileWriter(f.file)
- case _ =>
- if (settings.Ygenjavap.isDefault) {
- if(settings.Ydumpclasses.isDefault)
- new ClassBytecodeWriter { }
- else
- new ClassBytecodeWriter with DumpBytecodeWriter { }
- }
- else new ClassBytecodeWriter with JavapBytecodeWriter { }
-
- // TODO A ScalapBytecodeWriter could take asm.util.Textifier as starting point.
- // Three areas where javap ouput is less than ideal (e.g. when comparing versions of the same classfile) are:
- // (a) unreadable pickle;
- // (b) two constant pools, while having identical contents, are displayed differently due to physical layout.
- // (c) stack maps (classfile version 50 and up) are displayed in encoded form by javap, their expansion makes more sense instead.
+ case _ => factoryNonJarBytecodeWriter()
}
}
override def run() {
- if (settings.debug.value)
+ if (settings.debug)
inform("[running phase " + name + " on icode]")
- if (settings.Xdce.value)
+ if (settings.Xdce)
for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
log(s"Optimizer eliminated ${sym.fullNameString}")
deadCode.elidedClosures += sym
@@ -100,41 +118,41 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
"Such classes will overwrite one another on case-insensitive filesystems.")
}
- debuglog("Created new bytecode generator for " + classes.size + " classes.")
+ debuglog(s"Created new bytecode generator for ${classes.size} classes.")
val bytecodeWriter = initBytecodeWriter(sortedClasses filter isJavaEntryPoint)
- val plainCodeGen = new JPlainBuilder(bytecodeWriter)
- val mirrorCodeGen = new JMirrorBuilder(bytecodeWriter)
- val beanInfoCodeGen = new JBeanInfoBuilder(bytecodeWriter)
-
- while(!sortedClasses.isEmpty) {
- val c = sortedClasses.head
+ val needsOutfile = bytecodeWriter.isInstanceOf[ClassBytecodeWriter]
+ val plainCodeGen = new JPlainBuilder( bytecodeWriter, needsOutfile)
+ val mirrorCodeGen = new JMirrorBuilder( bytecodeWriter, needsOutfile)
+ val beanInfoCodeGen = new JBeanInfoBuilder(bytecodeWriter, needsOutfile)
+ def emitFor(c: IClass) {
if (isStaticModule(c.symbol) && isTopLevelModule(c.symbol)) {
- if (c.symbol.companionClass == NoSymbol) {
- mirrorCodeGen.genMirrorClass(c.symbol, c.cunit)
- } else {
- log("No mirror class for module with linked class: " + c.symbol.fullName)
- }
+ if (c.symbol.companionClass == NoSymbol)
+ mirrorCodeGen genMirrorClass (c.symbol, c.cunit)
+ else
+ log(s"No mirror class for module with linked class: ${c.symbol.fullName}")
}
+ plainCodeGen genClass c
+ if (c.symbol hasAnnotation BeanInfoAttr) beanInfoCodeGen genBeanInfoClass c
+ }
- plainCodeGen.genClass(c)
-
- if (c.symbol hasAnnotation BeanInfoAttr) {
- beanInfoCodeGen.genBeanInfoClass(c)
+ while (!sortedClasses.isEmpty) {
+ val c = sortedClasses.head
+ try emitFor(c)
+ catch {
+ case e: FileConflictException =>
+ c.cunit.error(c.symbol.pos, s"error writing ${c.symbol}: ${e.getMessage}")
}
-
sortedClasses = sortedClasses.tail
classes -= c.symbol // GC opportunity
}
bytecodeWriter.close()
- classes.clear()
- reverseJavaName.clear()
/* don't javaNameCache.clear() because that causes the following tests to fail:
* test/files/run/macro-repl-dontexpand.scala
* test/files/jvm/interpreter.scala
- * TODO but why? what use could javaNameCache possibly see once GenJVM is over?
+ * TODO but why? what use could javaNameCache possibly see once GenASM is over?
*/
/* TODO After emitting all class files (e.g., in a separate compiler phase) ASM can perform bytecode verification:
@@ -153,19 +171,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
var pickledBytes = 0 // statistics
- // Don't put this in per run caches. Contains entries for classes as well as members.
- val javaNameCache = new mutable.WeakHashMap[Symbol, Name]() ++= List(
- NothingClass -> binarynme.RuntimeNothing,
- RuntimeNothingClass -> binarynme.RuntimeNothing,
- NullClass -> binarynme.RuntimeNull,
- RuntimeNullClass -> binarynme.RuntimeNull
- )
+ val javaNameCache = perRunCaches.newMap[Symbol, Name]()
// unlike javaNameCache, reverseJavaName contains entries only for class symbols and their internal names.
- val reverseJavaName = mutable.Map.empty[String, Symbol] ++= List(
- binarynme.RuntimeNothing.toString() -> RuntimeNothingClass, // RuntimeNothingClass is the bytecode-level return type of Scala methods with Nothing return-type.
- binarynme.RuntimeNull.toString() -> RuntimeNullClass
- )
+ val reverseJavaName = perRunCaches.newMap[String, Symbol]()
private def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
private def hasPublicBitSet(flags: Int) = (flags & asm.Opcodes.ACC_PUBLIC) != 0
@@ -235,6 +244,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
if (sym.isArtifact) ACC_SYNTHETIC else 0,
if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
+ if (sym.hasEnumFlag) ACC_ENUM else 0,
if (sym.isVarargsMethod) ACC_VARARGS else 0,
if (sym.hasFlag(Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0
)
@@ -249,7 +259,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
def isTopLevelModule(sym: Symbol): Boolean =
- afterPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
+ exitingPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
def isStaticModule(sym: Symbol): Boolean = {
sym.isModuleClass && !sym.isImplClass && !sym.isLifted
@@ -284,7 +294,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def inameToSymbol(iname: String): Symbol = {
val name = global.newTypeName(iname)
val res0 =
- if (nme.isModuleName(name)) rootMirror.getModule(nme.stripModuleSuffix(name))
+ if (nme.isModuleName(name)) rootMirror.getModuleByName(name.dropModule)
else rootMirror.getClassByName(name.replace('/', '.')) // TODO fails for inner classes (but this hasn't been tested).
assert(res0 != NoSymbol)
val res = jsymbol(res0)
@@ -326,7 +336,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
assert(a.isClass)
assert(b.isClass)
- val res = Pair(a.isInterface, b.isInterface) match {
+ val res = (a.isInterface, b.isInterface) match {
case (true, true) =>
global.lub(List(a.tpe, b.tpe)).typeSymbol // TODO assert == firstCommonSuffix of resp. parents
case (true, false) =>
@@ -369,7 +379,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
private val classfileVersion: Int = settings.target.value match {
case "jvm-1.5" => asm.Opcodes.V1_5
- case "jvm-1.5-asm" => asm.Opcodes.V1_5
case "jvm-1.6" => asm.Opcodes.V1_6
case "jvm-1.7" => asm.Opcodes.V1_7
}
@@ -397,9 +406,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
/** basic functionality for class file building */
- abstract class JBuilder(bytecodeWriter: BytecodeWriter) {
+ abstract class JBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) {
- val EMPTY_JTYPE_ARRAY = Array.empty[asm.Type]
val EMPTY_STRING_ARRAY = Array.empty[String]
val mdesc_arglessvoid = "()V"
@@ -409,7 +417,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val INNER_CLASSES_FLAGS =
(asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
- asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT)
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_FINAL)
// -----------------------------------------------------------------------------------------
// factory methods
@@ -444,8 +452,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = {
- val dest = new Array[Byte](len);
- System.arraycopy(b, offset, dest, 0, len);
+ val dest = new Array[Byte](len)
+ System.arraycopy(b, offset, dest, 0, len)
new asm.CustomAttr(name, dest)
}
@@ -456,7 +464,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def writeIfNotTooBig(label: String, jclassName: String, jclass: asm.ClassWriter, sym: Symbol) {
try {
val arr = jclass.toByteArray()
- bytecodeWriter.writeClass(label, jclassName, arr, sym)
+ val outF: scala.tools.nsc.io.AbstractFile = {
+ if(needsOutfile) getFile(sym, jclassName, ".class") else null
+ }
+ bytecodeWriter.writeClass(label, jclassName, arr, outF)
} catch {
case e: java.lang.RuntimeException if e != null && (e.getMessage contains "too large!") =>
reporter.error(sym.pos,
@@ -467,7 +478,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/** Specialized array conversion to prevent calling
* java.lang.reflect.Array.newInstance via TraversableOnce.toArray
*/
- def mkArray(xs: Traversable[asm.Type]): Array[asm.Type] = { val a = new Array[asm.Type](xs.size); xs.copyToArray(a); a }
def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a }
// -----------------------------------------------------------------------------------------
@@ -510,14 +520,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
*/
def javaName(sym: Symbol): String = {
- /**
+ /*
* Checks if given symbol corresponds to inner class/object and add it to innerClassBuffer
*
* Note: This method is called recursively thus making sure that we add complete chain
* of inner class all until root class.
*/
def collectInnerClass(s: Symbol): Unit = {
- // TODO: some beforeFlatten { ... } which accounts for
+ // TODO: some enteringFlatten { ... } which accounts for
// being nested in parameterized classes (if we're going to selectively flatten.)
val x = innerClassSymbolFor(s)
if(x ne NoSymbol) {
@@ -532,7 +542,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
collectInnerClass(sym)
- var hasInternalName = (sym.isClass || (sym.isModule && !sym.isMethod))
+ val hasInternalName = sym.isClass || sym.isModuleNotMethod
val cachedJN = javaNameCache.getOrElseUpdate(sym, {
if (hasInternalName) { sym.javaBinaryName }
else { sym.javaSimpleName }
@@ -542,12 +552,18 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val internalName = cachedJN.toString()
val trackedSym = jsymbol(sym)
reverseJavaName.get(internalName) match {
- case None =>
+ case Some(oldsym) if oldsym.exists && trackedSym.exists =>
+ assert(
+ // In contrast, neither NothingClass nor NullClass show up bytecode-level.
+ (oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass) || (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule)),
+ s"""|Different class symbols have the same bytecode-level internal name:
+ | name: $internalName
+ | oldsym: ${oldsym.fullNameString}
+ | tracked: ${trackedSym.fullNameString}
+ """.stripMargin
+ )
+ case _ =>
reverseJavaName.put(internalName, trackedSym)
- case Some(oldsym) =>
- assert((oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass) ||
- (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule)), // In contrast, neither NothingClass nor NullClass show up bytecode-level.
- "how can getCommonSuperclass() do its job if different class symbols get the same bytecode-level internal name: " + internalName)
}
}
@@ -589,7 +605,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def javaType(s: Symbol): asm.Type = {
if (s.isMethod) {
- val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType);
+ val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType)
asm.Type.getMethodType( resT, (s.tpe.paramTypes map javaType): _*)
} else { javaType(s.tpe) }
}
@@ -599,9 +615,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
- /** The outer name for this inner class. Note that it returns null
- * when the inner class should not get an index in the constant pool.
- * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
+ /* The outer name for this inner class. Note that it returns null
+ * when the inner class should not get an index in the constant pool.
+ * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
*/
def outerName(innerSym: Symbol): String = {
if (innerSym.originalEnclosingMethod != NoSymbol)
@@ -620,7 +636,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
innerSym.rawname + innerSym.moduleSuffix
// add inner classes which might not have been referenced yet
- afterErasure {
+ exitingErasure {
for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
innerClassBuffer += m
}
@@ -635,11 +651,12 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
- val flags = mkFlags(
+ val flagsWithFinal: Int = mkFlags(
if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
javaFlags(innerSym),
if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
+ val flags = if (innerSym.isModuleClass) flagsWithFinal & ~asm.Opcodes.ACC_FINAL else flagsWithFinal // For SI-5676, object overriding.
val jname = javaName(innerSym) // never null
val oname = outerName(innerSym) // null when method-enclosed
val iname = innerName(innerSym) // null for anonymous inner class
@@ -683,7 +700,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/** functionality for building plain and mirror classes */
- abstract class JCommonBuilder(bytecodeWriter: BytecodeWriter) extends JBuilder(bytecodeWriter) {
+ abstract class JCommonBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) extends JBuilder(bytecodeWriter, needsOutfile) {
def debugLevel = settings.debuginfo.indexOfChoice
@@ -795,7 +812,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// without it. This is particularly bad because the availability of
// generic information could disappear as a consequence of a seemingly
// unrelated change.
- settings.Ynogenericsig.value
+ settings.Ynogenericsig
|| sym.isArtifact
|| sym.isLiftedMethod
|| sym.isBridge
@@ -812,7 +829,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (!needsGenericSignature(sym)) { return null }
- val memberTpe = beforeErasure(owner.thisType.memberInfo(sym))
+ val memberTpe = enteringErasure(owner.thisType.memberInfo(sym))
val jsOpt: Option[String] = erasure.javaSig(sym, memberTpe)
if (jsOpt.isEmpty) { return null }
@@ -825,14 +842,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
catch { case _: Throwable => false }
}
- if (settings.Xverify.value) {
+ if (settings.Xverify) {
// Run the signature parser to catch bogus signatures.
val isValidSignature = wrap {
// Alternative: scala.tools.reflect.SigParser (frontend to sun.reflect.generics.parser.SignatureParser)
- import scala.tools.asm.util.SignatureChecker
- if (sym.isMethod) { SignatureChecker checkMethodSignature sig } // requires asm-util.jar
- else if (sym.isTerm) { SignatureChecker checkFieldSignature sig }
- else { SignatureChecker checkClassSignature sig }
+ import scala.tools.asm.util.CheckClassAdapter
+ if (sym.isMethod) { CheckClassAdapter checkMethodSignature sig } // requires asm-util.jar
+ else if (sym.isTerm) { CheckClassAdapter checkFieldSignature sig }
+ else { CheckClassAdapter checkClassSignature sig }
}
if(!isValidSignature) {
@@ -846,7 +863,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
if ((settings.check containsName phaseName)) {
- val normalizedTpe = beforeErasure(erasure.prepareSigMap(memberTpe))
+ val normalizedTpe = enteringErasure(erasure.prepareSigMap(memberTpe))
val bytecodeTpe = owner.thisType.memberInfo(sym)
if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
getCurrentCUnit().warning(sym.pos,
@@ -865,9 +882,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = {
- val ca = new Array[Char](bytes.size)
+ val ca = new Array[Char](bytes.length)
var idx = 0
- while(idx < bytes.size) {
+ while(idx < bytes.length) {
val b: Byte = bytes(idx)
assert((b & ~0x7f) == 0)
ca(idx) = b.asInstanceOf[Char]
@@ -884,7 +901,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
var prevOffset = 0
var offset = 0
var encLength = 0
- while(offset < bSeven.size) {
+ while(offset < bSeven.length) {
val deltaEncLength = (if(bSeven(offset) == 0) 2 else 1)
val newEncLength = encLength.toLong + deltaEncLength
if(newEncLength >= 65535) {
@@ -918,7 +935,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def emitArgument(av: asm.AnnotationVisitor,
name: String,
arg: ClassfileAnnotArg) {
- arg match {
+ (arg: @unchecked) match {
case LiteralAnnotArg(const) =>
if(const.isNonUnitAnyVal) { av.visit(name, const.value) }
@@ -998,7 +1015,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def emitParamAnnotations(jmethod: asm.MethodVisitor, pannotss: List[List[AnnotationInfo]]) {
val annotationss = pannotss map (_ filter shouldEmitAnnotation)
if (annotationss forall (_.isEmpty)) return
- for (Pair(annots, idx) <- annotationss.zipWithIndex;
+ for ((annots, idx) <- annotationss.zipWithIndex;
annot <- annots) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
@@ -1036,9 +1053,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val paramJavaTypes: List[asm.Type] = methodInfo.paramTypes map javaType
// val paramNames = 0 until paramJavaTypes.length map ("x_" + _)
- /** Forwarders must not be marked final,
- * as the JVM will not allow redefinition of a final static method,
- * and we don't know what classes might be subclassing the companion class. See SI-4827.
+ /* Forwarders must not be marked final,
+ * as the JVM will not allow redefinition of a final static method,
+ * and we don't know what classes might be subclassing the companion class. See SI-4827.
*/
// TODO: evaluate the other flags we might be dropping on the floor here.
// TODO: ACC_SYNTHETIC ?
@@ -1101,7 +1118,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
debuglog("Dumping mirror class for object: " + moduleClass)
val linkedClass = moduleClass.companionClass
- val linkedModule = linkedClass.companionSymbol
lazy val conflictingNames: Set[Name] = {
(linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name }).toSet
}
@@ -1109,13 +1125,13 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flags.METHOD)) {
if (m.isType || m.isDeferred || (m.owner eq ObjectClass) || m.isConstructor)
- debuglog("No forwarder for '%s' from %s to '%s'".format(m, jclassName, moduleClass))
+ debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass'")
else if (conflictingNames(m.name))
- log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name))
+ log(s"No forwarder for $m due to conflict with " + linkedClass.info.member(m.name))
else if (m.hasAccessBoundary)
log(s"No forwarder for non-public member $m")
else {
- log("Adding static forwarder for '%s' from %s to '%s'".format(m, jclassName, moduleClass))
+ debuglog(s"Adding static forwarder for '$m' from $jclassName to '$moduleClass'")
addForwarder(isRemoteClass, jclass, moduleClass, m)
}
}
@@ -1127,16 +1143,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
trait JAndroidBuilder {
self: JPlainBuilder =>
- /** From the reference documentation of the Android SDK:
- * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`.
- * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`,
- * which is an object implementing the `Parcelable.Creator` interface.
- */
- private val androidFieldName = newTermName("CREATOR")
-
- private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
- private lazy val AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
-
def isAndroidParcelableClass(sym: Symbol) =
(AndroidParcelableInterface != NoSymbol) &&
(sym.parentSymbols contains AndroidParcelableInterface)
@@ -1144,13 +1150,13 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/* Typestate: should be called before emitting fields (because it adds an IField to the current IClass). */
def addCreatorCode(block: BasicBlock) {
val fieldSymbol = (
- clasz.symbol.newValue(newTermName(androidFieldName), NoPosition, Flags.STATIC | Flags.FINAL)
+ clasz.symbol.newValue(androidFieldName, NoPosition, Flags.STATIC | Flags.FINAL)
setInfo AndroidCreatorClass.tpe
)
val methodSymbol = definitions.getMember(clasz.symbol.companionModule, androidFieldName)
clasz addField new IField(fieldSymbol)
- block emit CALL_METHOD(methodSymbol, Static(false))
- block emit STORE_FIELD(fieldSymbol, true)
+ block emit CALL_METHOD(methodSymbol, Static(onInstance = false))
+ block emit STORE_FIELD(fieldSymbol, isStatic = true)
}
def legacyAddCreatorCode(clinit: asm.MethodVisitor) {
@@ -1159,7 +1165,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
jclass.visitField(
PublicStaticFinal,
- androidFieldName,
+ androidFieldName.toString,
tdesc_creator,
null, // no java-generic-signature
null // no initial value
@@ -1179,7 +1185,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
clinit.visitMethodInsn(
asm.Opcodes.INVOKEVIRTUAL,
moduleName,
- androidFieldName,
+ androidFieldName.toString,
asm.Type.getMethodDescriptor(creatorType, Array.empty[asm.Type]: _*)
)
@@ -1187,7 +1193,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
clinit.visitFieldInsn(
asm.Opcodes.PUTSTATIC,
thisName,
- androidFieldName,
+ androidFieldName.toString,
tdesc_creator
)
}
@@ -1244,8 +1250,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case class BlockInteval(start: BasicBlock, end: BasicBlock)
/** builder of plain classes */
- class JPlainBuilder(bytecodeWriter: BytecodeWriter)
- extends JCommonBuilder(bytecodeWriter)
+ class JPlainBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean)
+ extends JCommonBuilder(bytecodeWriter, needsOutfile)
with JAndroidBuilder {
val MIN_SWITCH_DENSITY = 0.7
@@ -1268,15 +1274,13 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
private def getSuperInterfaces(c: IClass): Array[String] = {
// Additional interface parents based on annotations and other cues
- def newParentForAttr(attr: Symbol): Option[Symbol] = attr match {
- case SerializableAttr => Some(SerializableClass)
- case CloneableAttr => Some(CloneableClass)
- case RemoteAttr => Some(RemoteInterfaceClass)
- case _ => None
+ def newParentForAttr(ann: AnnotationInfo): Symbol = ann.symbol match {
+ case RemoteAttr => RemoteInterfaceClass
+ case _ => NoSymbol
}
- /** Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
- * This is important on Android because there is otherwise an interface explosion.
+ /* Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
+ * This is important on Android because there is otherwise an interface explosion.
*/
def minimizeInterfaces(lstIfaces: List[Symbol]): List[Symbol] = {
var rest = lstIfaces
@@ -1294,8 +1298,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
val ps = c.symbol.info.parents
- val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses;
- val superInterfaces = (superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol))).distinct
+ val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses
+ val superInterfaces = existingSymbols(superInterfaces0 ++ c.symbol.annotations.map(newParentForAttr)).distinct
if(superInterfaces.isEmpty) EMPTY_STRING_ARRAY
else mkArray(minimizeInterfaces(superInterfaces) map javaName)
@@ -1319,7 +1323,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
thisName = javaName(c.symbol) // the internal name of the class being emitted
val ps = c.symbol.info.parents
- val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol);
+ val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol)
val ifaces = getSuperInterfaces(c)
@@ -1366,14 +1370,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for (constructor <- c.lookupStaticCtor) {
addStaticInit(Some(constructor))
}
- val skipStaticForwarders = (c.symbol.isInterface || settings.noForwarders.value)
+ val skipStaticForwarders = (c.symbol.isInterface || settings.noForwarders)
if (!skipStaticForwarders) {
val lmoc = c.symbol.companionModule
// add static forwarders if there are no name conflicts; see bugs #363 and #1735
if (lmoc != NoSymbol) {
// it must be a top level class (name contains no $s)
val isCandidateForForwarders = {
- afterPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass }
+ exitingPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass }
}
if (isCandidateForForwarders) {
log("Adding static forwarders from '%s' to implementations in '%s'".format(c.symbol, lmoc))
@@ -1433,7 +1437,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
assert(enclClass.isClass, enclClass)
val sym = enclClass.primaryConstructor
if (sym == NoSymbol) {
- log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(enclClass, clazz))
+ log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(enclClass))
} else {
debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass))
res = EnclMethodEntry(javaName(enclClass), javaName(sym), javaType(sym))
@@ -1478,12 +1482,17 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
sym.owner.isSynthetic &&
sym.owner.tpe.parents.exists { t =>
val TypeRef(_, sym, _) = t
- FunctionClass contains sym
+ FunctionClass.seq contains sym
}
}
if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return
+ if (m.params.size > MaximumJvmParameters) {
+ getCurrentCUnit().error(m.symbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.")
+ return
+ }
+
debuglog("Generating method " + m.symbol.fullName)
method = m
computeLocalVarsIndex(m)
@@ -1608,19 +1617,20 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (isStaticModule(clasz.symbol)) {
// call object's private ctor from static ctor
lastBlock emit NEW(REFERENCE(m.symbol.enclClass))
- lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(true))
+ lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(onInstance = true))
}
if (isParcelableClass) { addCreatorCode(lastBlock) }
lastBlock emit RETURN(UNIT)
- lastBlock.close
+ lastBlock.close()
- method = m
+ method = m
jmethod = clinitMethod
jMethodName = CLASS_CONSTRUCTOR_NAME
jmethod.visitCode()
- genCode(m, false, true)
+ computeLocalVarsIndex(m)
+ genCode(m, emitVars = false, isStatic = true)
jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
jmethod.visitEnd()
@@ -1655,8 +1665,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case BooleanTag => jcode.boolconst(const.booleanValue)
- case ByteTag => jcode.iconst(const.byteValue)
- case ShortTag => jcode.iconst(const.shortValue)
+ case ByteTag => jcode.iconst(const.byteValue.toInt)
+ case ShortTag => jcode.iconst(const.shortValue.toInt)
case CharTag => jcode.iconst(const.charValue)
case IntTag => jcode.iconst(const.intValue)
@@ -1676,7 +1686,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val kind = toTypeKind(const.typeValue)
val toPush: asm.Type =
if (kind.isValueType) classLiteral(kind)
- else javaType(kind);
+ else javaType(kind)
mv.visitLdcInsn(toPush)
case EnumTag =>
@@ -1699,15 +1709,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
*/
object jcode {
- import asm.Opcodes;
-
- def aconst(cst: AnyRef) {
- if (cst == null) { jmethod.visitInsn(Opcodes.ACONST_NULL) }
- else { jmethod.visitLdcInsn(cst) }
- }
+ import asm.Opcodes
final def boolconst(b: Boolean) { iconst(if(b) 1 else 0) }
+ def iconst(cst: Char) { iconst(cst.toInt) }
def iconst(cst: Int) {
if (cst >= -1 && cst <= 5) {
jmethod.visitInsn(Opcodes.ICONST_0 + cst)
@@ -1799,8 +1805,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) }
- def emitIF(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF, label) }
- def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) }
+ def emitIF(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF(), label) }
+ def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP(), label) }
def emitIF_ACMP(cond: TestOp, label: asm.Label) {
assert((cond == EQ) || (cond == NE), cond)
val opc = (if(cond == EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE)
@@ -1856,9 +1862,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val keyMax = keys(keys.length - 1)
val isDenseEnough: Boolean = {
- /** Calculate in long to guard against overflow. TODO what overflow??? */
+ /* Calculate in long to guard against overflow. TODO what overflow??? */
val keyRangeD: Double = (keyMax.asInstanceOf[Long] - keyMin + 1).asInstanceOf[Double]
- val klenD: Double = keys.length
+ val klenD: Double = keys.length.toDouble
val kdensity: Double = (klenD / keyRangeD)
kdensity >= minDensity
@@ -1868,10 +1874,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// use a table in which holes are filled with defaultBranch.
val keyRange = (keyMax - keyMin + 1)
val newBranches = new Array[asm.Label](keyRange)
- var oldPos = 0;
+ var oldPos = 0
var i = 0
while(i < keyRange) {
- val key = keyMin + i;
+ val key = keyMin + i
if (keys(oldPos) == key) {
newBranches(i) = branches(oldPos)
oldPos += 1
@@ -1991,7 +1997,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// Part 2 of genCode(): demarcating exception handler boundaries (visitTryCatchBlock() must be invoked before visitLabel() in genBlock())
// ------------------------------------------------------------------------------------------------------------
- /**Generate exception handlers for the current method.
+ /* Generate exception handlers for the current method.
*
* Quoting from the JVMS 4.7.3 The Code Attribute
* The items of the Code_attribute structure are as follows:
@@ -2014,16 +2020,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
*/
def genExceptionHandlers() {
- /** Return a list of pairs of intervals where the handler is active.
- * Each interval is closed on both ends, ie. inclusive both in the left and right endpoints: [start, end].
- * Preconditions:
- * - e.covered non-empty
- * Postconditions for the result:
- * - always non-empty
- * - intervals are sorted as per `linearization`
- * - the argument's `covered` blocks have been grouped into maximally contiguous intervals,
- * ie. between any two intervals in the result there is a non-empty gap.
- * - each of the `covered` blocks in the argument is contained in some interval in the result
+ /* Return a list of pairs of intervals where the handler is active.
+ * Each interval is closed on both ends, ie. inclusive both in the left and right endpoints: [start, end].
+ * Preconditions:
+ * - e.covered non-empty
+ * Postconditions for the result:
+ * - always non-empty
+ * - intervals are sorted as per `linearization`
+ * - the argument's `covered` blocks have been grouped into maximally contiguous intervals,
+ * ie. between any two intervals in the result there is a non-empty gap.
+ * - each of the `covered` blocks in the argument is contained in some interval in the result
*/
def intervals(e: ExceptionHandler): List[BlockInteval] = {
assert(e.covered.nonEmpty, e)
@@ -2070,7 +2076,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// TODO in that case, ExceptionHandler.cls doesn't go through javaName(). What if cls is an inner class?
for (e <- this.method.exh ; if e.covered.nonEmpty ; p <- intervals(e)) {
debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
- " from: " + p.start + " to: " + p.end + " catching: " + e.cls);
+ " from: " + p.start + " to: " + p.end + " catching: " + e.cls)
val cls: String = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
else javaName(e.cls)
jmethod.visitTryCatchBlock(labels(p.start), linNext(p.end), labels(e.startBlock), cls)
@@ -2094,8 +2100,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def overlaps(that: Interval): Boolean = { !(this.precedes(that) || that.precedes(this)) }
def mergeWith(that: Interval): Interval = {
- val newStart = if(this.start <= that.start) this.lstart else that.lstart;
- val newEnd = if(this.end <= that.end) that.lend else this.lend;
+ val newStart = if(this.start <= that.start) this.lstart else that.lstart
+ val newEnd = if(this.end <= that.end) that.lend else this.lend
Interval(newStart, newEnd)
}
@@ -2151,7 +2157,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def getMerged(): scala.collection.Map[Local, List[Interval]] = {
// TODO should but isn't: unbalanced start(s) of scope(s)
- val shouldBeEmpty = pending filter { p => val Pair(k, st) = p; st.nonEmpty };
+ val shouldBeEmpty = pending filter { p => val (_, st) = p; st.nonEmpty }
val merged = mutable.Map[Local, List[Interval]]()
def addToMerged(lv: Local, start: Label, end: Label) {
val intv = Interval(start, end)
@@ -2164,15 +2170,15 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
(b) take the latest end (onePastLast if none available)
(c) merge the thus made-up interval
*/
- for(Pair(k, st) <- shouldBeEmpty) {
+ for((k, st) <- shouldBeEmpty) {
var start = st.toList.sortBy(_.getOffset).head
if(merged.isDefinedAt(k)) {
val balancedStart = merged(k).head.lstart
if(balancedStart.getOffset < start.getOffset) {
- start = balancedStart;
+ start = balancedStart
}
}
- val endOpt: Option[Label] = for(ranges <- merged.get(k)) yield ranges.last.lend;
+ val endOpt: Option[Label] = for(ranges <- merged.get(k)) yield ranges.last.lend
val end = endOpt.getOrElse(onePastLast)
addToMerged(k, start, end)
}
@@ -2201,25 +2207,25 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
// adding non-param locals
var anonCounter = 0
- var fltnd: List[Triple[String, Local, Interval]] = Nil
- for(Pair(local, ranges) <- scoping.getMerged()) {
+ var fltnd: List[Tuple3[String, Local, Interval]] = Nil
+ for((local, ranges) <- scoping.getMerged()) {
var name = javaName(local.sym)
if (name == null) {
- anonCounter += 1;
+ anonCounter += 1
name = "<anon" + anonCounter + ">"
}
for(intrvl <- ranges) {
- fltnd ::= Triple(name, local, intrvl)
+ fltnd ::= (name, local, intrvl)
}
}
// quest for deterministic output that Map.toList doesn't provide (so that ant test.stability doesn't complain).
val srtd = fltnd.sortBy { kr =>
- val Triple(name: String, local: Local, intrvl: Interval) = kr
+ val (name: String, _, intrvl: Interval) = kr
- Triple(intrvl.start, intrvl.end - intrvl.start, name) // ie sort by (start, length, name)
+ (intrvl.start, intrvl.end - intrvl.start, name) // ie sort by (start, length, name)
}
- for(Triple(name, local, Interval(start, end)) <- srtd) {
+ for((name, local, Interval(start, end)) <- srtd) {
jmethod.visitLocalVariable(name, descriptor(local.kind), null, start, end, indexOf(local))
}
// "There may be no more than one LocalVariableTable attribute per local variable in the Code attribute"
@@ -2245,13 +2251,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
}
- def isAccessibleFrom(target: Symbol, site: Symbol): Boolean = {
- target.isPublic || target.isProtected && {
- (site.enclClass isSubClass target.enclClass) ||
- (site.enclosingPackage == target.privateWithin)
- }
- } // end of genCode()'s isAccessibleFrom()
-
def genCallMethod(call: CALL_METHOD) {
val CALL_METHOD(method, style) = call
val siteSymbol = clasz.symbol
@@ -2320,7 +2319,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
lastLineNr = currentLineNr
val lineLab = new asm.Label
jmethod.visitLabel(lineLab)
- lnEntries ::= LineNumberEntry(currentLineNr, lineLab)
+ lnEntries ::= LineNumberEntry(iPos.finalPosition.line, lineLab)
}
}
@@ -2334,6 +2333,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
import asm.Opcodes
(instr.category: @scala.annotation.switch) match {
+
case icodes.localsCat =>
def genLocalInstr() = (instr: @unchecked) match {
case THIS(_) => jmethod.visitVarInsn(Opcodes.ALOAD, 0)
@@ -2365,15 +2365,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
scoping.popScope(lv, end, instr.pos)
}
}
- genLocalInstr
+ genLocalInstr()
case icodes.stackCat =>
def genStackInstr() = (instr: @unchecked) match {
case LOAD_MODULE(module) =>
// assert(module.isModule, "Expected module: " + module)
- debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
- if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) {
+ debuglog("generating LOAD_MODULE for: " + module + " flags: " + module.flagString)
+ def inStaticMethod = this.method != null && this.method.symbol.isStaticMember
+ if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString && !inStaticMethod) {
jmethod.visitVarInsn(Opcodes.ALOAD, 0)
} else {
jmethod.visitFieldInsn(
@@ -2389,7 +2390,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case LOAD_EXCEPTION(_) => ()
}
- genStackInstr
+ genStackInstr()
case icodes.constCat => genConstant(jmethod, instr.asInstanceOf[CONSTANT].constant)
@@ -2423,11 +2424,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
}
- genCastInstr
+ genCastInstr()
case icodes.objsCat =>
def genObjsInstr() = (instr: @unchecked) match {
-
case BOX(kind) =>
val MethodNameAndType(mname, mdesc) = jBoxTo(kind)
jcode.invokestatic(BoxesRunTime, mname, mdesc)
@@ -2443,14 +2443,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case MONITOR_ENTER() => emit(Opcodes.MONITORENTER)
case MONITOR_EXIT() => emit(Opcodes.MONITOREXIT)
}
- genObjsInstr
+ genObjsInstr()
case icodes.fldsCat =>
def genFldsInstr() = (instr: @unchecked) match {
case lf @ LOAD_FIELD(field, isStatic) =>
- var owner = javaName(lf.hostClass)
- debuglog("LOAD_FIELD with owner: " + owner + " flags: " + Flags.flagsToString(field.owner.flags))
+ val owner = javaName(lf.hostClass)
+ debuglog("LOAD_FIELD with owner: " + owner + " flags: " + field.owner.flagString)
val fieldJName = javaName(field)
val fieldDescr = descriptor(field)
val opc = if (isStatic) Opcodes.GETSTATIC else Opcodes.GETFIELD
@@ -2464,12 +2464,12 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
}
- genFldsInstr
+ genFldsInstr()
case icodes.mthdsCat =>
def genMethodsInstr() = (instr: @unchecked) match {
- /** Special handling to access native Array.clone() */
+ /* Special handling to access native Array.clone() */
case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
val target: String = javaType(call.targetTypeKind).getInternalName
jcode.invokevirtual(target, "clone", mdesc_arrayClone)
@@ -2477,7 +2477,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case call @ CALL_METHOD(method, style) => genCallMethod(call)
}
- genMethodsInstr
+ genMethodsInstr()
case icodes.arraysCat =>
def genArraysInstr() = (instr: @unchecked) match {
@@ -2486,7 +2486,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case CREATE_ARRAY(elem, 1) => jcode newarray elem
case CREATE_ARRAY(elem, dims) => jmethod.visitMultiANewArrayInsn(descriptor(ArrayN(elem, dims)), dims)
}
- genArraysInstr
+ genArraysInstr()
case icodes.jumpsCat =>
def genJumpInstr() = (instr: @unchecked) match {
@@ -2503,7 +2503,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
while (restTagss.nonEmpty) {
val currLabel = labels(restBranches.head)
for (cTag <- restTagss.head) {
- flatKeys(k) = cTag;
+ flatKeys(k) = cTag
flatBranches(k) = currLabel
k += 1
}
@@ -2516,27 +2516,19 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
jcode.emitSWITCH(flatKeys, flatBranches, defaultLabel, MIN_SWITCH_DENSITY)
case JUMP(whereto) =>
- if (nextBlock != whereto) {
+ if (nextBlock != whereto)
jcode goTo labels(whereto)
- } else if (m.exh.exists(eh => eh.covers(b))) {
// SI-6102: Determine whether eliding this JUMP results in an empty range being covered by some EH.
// If so, emit a NOP in place of the elided JUMP, to avoid "java.lang.ClassFormatError: Illegal exception table range"
- val isSthgLeft = b.toList.exists {
- case _: LOAD_EXCEPTION => false
- case _: SCOPE_ENTER => false
- case _: SCOPE_EXIT => false
- case _: JUMP => false
- case _ => true
- }
- if (!isSthgLeft) {
- emit(asm.Opcodes.NOP)
- }
+ else if (newNormal.isJumpOnly(b) && m.exh.exists(eh => eh.covers(b))) {
+ debugwarn("Had a jump only block that wasn't collapsed")
+ emit(asm.Opcodes.NOP)
}
case CJUMP(success, failure, cond, kind) =>
if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
if (nextBlock == success) {
- jcode.emitIF_ICMP(cond.negate, labels(failure))
+ jcode.emitIF_ICMP(cond.negate(), labels(failure))
// .. and fall through to success label
} else {
jcode.emitIF_ICMP(cond, labels(success))
@@ -2544,7 +2536,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
} else if (kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
if (nextBlock == success) {
- jcode.emitIF_ACMP(cond.negate, labels(failure))
+ jcode.emitIF_ACMP(cond.negate(), labels(failure))
// .. and fall through to success label
} else {
jcode.emitIF_ACMP(cond, labels(success))
@@ -2561,7 +2553,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
else emit(Opcodes.DCMPL)
}
if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
+ jcode.emitIF(cond.negate(), labels(failure))
// .. and fall through to success label
} else {
jcode.emitIF(cond, labels(success))
@@ -2572,7 +2564,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case CZJUMP(success, failure, cond, kind) =>
if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
+ jcode.emitIF(cond.negate(), labels(failure))
} else {
jcode.emitIF(cond, labels(success))
if (nextBlock != failure) { jcode goTo labels(failure) }
@@ -2608,7 +2600,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
else emit(Opcodes.DCMPL)
}
if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
+ jcode.emitIF(cond.negate(), labels(failure))
} else {
jcode.emitIF(cond, labels(success))
if (nextBlock != failure) { jcode goTo labels(failure) }
@@ -2616,26 +2608,25 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
}
- genJumpInstr
+ genJumpInstr()
case icodes.retCat =>
def genRetInstr() = (instr: @unchecked) match {
case RETURN(kind) => jcode emitRETURN kind
case THROW(_) => emit(Opcodes.ATHROW)
}
- genRetInstr
+ genRetInstr()
}
}
- /**
+ /*
* Emits one or more conversion instructions based on the types given as arguments.
*
* @param from The type of the value to be converted into another type.
* @param to The type the value will be converted into.
*/
def emitT2T(from: TypeKind, to: TypeKind) {
- assert(isNonUnitValueTK(from), from)
- assert(isNonUnitValueTK(to), to)
+ assert(isNonUnitValueTK(from) && isNonUnitValueTK(to), s"Cannot emit primitive conversion from $from to $to")
def pickOne(opcs: Array[Int]) {
val chosen = (to: @unchecked) match {
@@ -2651,10 +2642,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
if(from == to) { return }
- if((from == BOOL) || (to == BOOL)) {
- // the only conversion involving BOOL that is allowed is (BOOL -> BOOL)
- throw new Error("inconvertible types : " + from.toString() + " -> " + to.toString())
- }
+ // the only conversion involving BOOL that is allowed is (BOOL -> BOOL)
+ assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to")
if(from.isIntSizedType) { // BYTE, CHAR, SHORT, and INT. (we're done with BOOL already)
@@ -2702,7 +2691,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def genPrimitive(primitive: Primitive, pos: Position) {
- import asm.Opcodes;
+ import asm.Opcodes
primitive match {
@@ -2733,7 +2722,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
abort("Unknown arithmetic primitive " + primitive)
}
}
- genArith
+ genArith()
// TODO Logical's 2nd elem should be declared ValueTypeKind, to better approximate its allowed values (isIntSized, its comments appears to convey)
// TODO GenICode uses `toTypeKind` to define that elem, `toValueTypeKind` would be needed instead.
@@ -2765,7 +2754,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (kind != BOOL) { emitT2T(INT, kind) }
}
}
- genLogical
+ genLogical()
case Shift(op, kind) =>
def genShift() = op match {
@@ -2794,7 +2783,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
emitT2T(INT, kind)
}
}
- genShift
+ genShift()
case Comparison(op, kind) =>
def genCompare() = op match {
@@ -2814,12 +2803,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
}
- genCompare
+ genCompare()
case Conversion(src, dst) =>
debuglog("Converting from: " + src + " to: " + dst)
- if (dst == BOOL) { println("Illegal conversion at: " + clasz + " at: " + pos.source + ":" + pos.line) }
- else { emitT2T(src, dst) }
+ emitT2T(src, dst)
case ArrayLength(_) => emit(Opcodes.ARRAYLENGTH)
@@ -2868,15 +2856,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
////////////////////// local vars ///////////////////////
- // def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe))
-
def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1
- // def indexOf(m: IMethod, sym: Symbol): Int = {
- // val Some(local) = m lookupLocal sym
- // indexOf(local)
- // }
-
final def indexOf(local: Local): Int = {
assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ")
local.index
@@ -2887,7 +2868,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* *Does not assume the parameters come first!*
*/
def computeLocalVarsIndex(m: IMethod) {
- var idx = if (m.symbol.isStaticMember) 0 else 1;
+ var idx = if (m.symbol.isStaticMember) 0 else 1
for (l <- m.params) {
debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
@@ -2906,10 +2887,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/** builder of mirror classes */
- class JMirrorBuilder(bytecodeWriter: BytecodeWriter) extends JCommonBuilder(bytecodeWriter) {
+ class JMirrorBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) extends JCommonBuilder(bytecodeWriter, needsOutfile) {
private var cunit: CompilationUnit = _
- def getCurrentCUnit(): CompilationUnit = cunit;
+ def getCurrentCUnit(): CompilationUnit = cunit
/** Generate a mirror class for a top-level module. A mirror class is a class
* containing only static methods that forward to the corresponding method
@@ -2931,7 +2912,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
JAVA_LANG_OBJECT.getInternalName,
EMPTY_STRING_ARRAY)
- log("Dumping mirror class for '%s'".format(mirrorName))
+ log(s"Dumping mirror class for '$mirrorName'")
// typestate: entering mode with valid call sequences:
// [ visitSource ] [ visitOuterClass ] ( visitAnnotation | visitAttribute )*
@@ -2954,13 +2935,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
mirrorClass.visitEnd()
writeIfNotTooBig("" + modsym.name, mirrorName, mirrorClass, modsym)
}
-
-
} // end of class JMirrorBuilder
/** builder of bean info classes */
- class JBeanInfoBuilder(bytecodeWriter: BytecodeWriter) extends JBuilder(bytecodeWriter) {
+ class JBeanInfoBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) extends JBuilder(bytecodeWriter, needsOutfile) {
/**
* Generate a bean info class that describes the given class.
@@ -3002,8 +2981,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for (f <- clasz.fields if f.symbol.hasGetter;
g = f.symbol.getter(clasz.symbol);
- s = f.symbol.setter(clasz.symbol);
- if g.isPublic && !(f.symbol.name startsWith "$")
+ s = f.symbol.setter(clasz.symbol)
+ if g.isPublic && !(f.symbol.name startsWith "$")
) {
// inserting $outer breaks the bean
fieldList = javaName(f.symbol) :: javaName(g) :: (if (s != NoSymbol) javaName(s) else null) :: fieldList
@@ -3092,111 +3071,50 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* TODO Eventually, these utilities should be moved to IMethod and reused from normalize() (there's nothing JVM-specific about them).
*/
object newNormal {
-
- def startsWithJump(b: BasicBlock): Boolean = { assert(b.nonEmpty, "empty block"); b.firstInstruction.isInstanceOf[JUMP] }
-
- /** Prune from an exception handler those covered blocks which are jump-only. */
- private def coverWhatCountsOnly(m: IMethod): Boolean = {
- assert(m.hasCode, "code-less method")
-
- var wasReduced = false
- for(h <- m.exh) {
- val shouldntCover = (h.covered filter startsWithJump)
- if(shouldntCover.nonEmpty) {
- wasReduced = true
- h.covered --= shouldntCover // not removing any block on purpose.
- }
- }
-
- wasReduced
+ /**
+ * True if a block is "jump only" which is defined
+ * as being a block that consists only of 0 or more instructions that
+ * won't make it to the JVM followed by a JUMP.
+ */
+ def isJumpOnly(b: BasicBlock): Boolean = {
+ val nonICode = firstNonIcodeOnlyInstructions(b)
+ // by definition a block has to have a jump, conditional jump, return, or throw
+ assert(nonICode.hasNext, "empty block")
+ nonICode.next.isInstanceOf[JUMP]
}
- /** An exception handler is pruned provided any of the following holds:
- * (1) it covers nothing (for example, this may result after removing unreachable blocks)
- * (2) each block it covers is of the form: JUMP(_)
- * Return true iff one or more ExceptionHandlers were removed.
- *
- * A caveat: removing an exception handler, for whatever reason, means that its handler code (even if unreachable)
- * won't be able to cause a class-loading-exception. As a result, behavior can be different.
+ /**
+ * Returns the list of instructions in a block that follow all ICode only instructions,
+ * where an ICode only instruction is one that won't make it to the JVM
*/
- private def elimNonCoveringExh(m: IMethod): Boolean = {
- assert(m.hasCode, "code-less method")
-
- def isRedundant(eh: ExceptionHandler): Boolean = {
- (eh.cls != NoSymbol) && ( // TODO `eh.isFinallyBlock` more readable than `eh.cls != NoSymbol`
- eh.covered.isEmpty
- || (eh.covered forall startsWithJump)
- )
- }
-
- var wasReduced = false
- val toPrune = (m.exh.toSet filter isRedundant)
- if(toPrune.nonEmpty) {
- wasReduced = true
- for(h <- toPrune; r <- h.blocks) { m.code.removeBlock(r) } // TODO m.code.removeExh(h)
- m.exh = (m.exh filterNot toPrune)
- }
-
- wasReduced
+ private def firstNonIcodeOnlyInstructions(b: BasicBlock): Iterator[Instruction] = {
+ def isICodeOnlyInstruction(i: Instruction) = i match {
+ case LOAD_EXCEPTION(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) => true
+ case _ => false
+ }
+ b.iterator dropWhile isICodeOnlyInstruction
}
- private def isJumpOnly(b: BasicBlock): Option[BasicBlock] = {
- b.toList match {
- case JUMP(whereto) :: rest =>
- assert(rest.isEmpty, "A block contains instructions after JUMP (looks like enterIgnoreMode() was itself ignored.)")
+ /**
+ * Returns the target of a block that is "jump only" which is defined
+ * as being a block that consists only of 0 or more instructions that
+ * won't make it to the JVM followed by a JUMP.
+ *
+ * @param b The basic block to examine
+ * @return Some(target) if b is a "jump only" block or None if it's not
+ */
+ private def getJumpOnlyTarget(b: BasicBlock): Option[BasicBlock] = {
+ val nonICode = firstNonIcodeOnlyInstructions(b)
+ // by definition a block has to have a jump, conditional jump, return, or throw
+ assert(nonICode.nonEmpty, "empty block")
+ nonICode.next match {
+ case JUMP(whereto) =>
+ assert(!nonICode.hasNext, "A block contains instructions after JUMP (looks like enterIgnoreMode() was itself ignored.)")
Some(whereto)
case _ => None
}
}
- private def directSuccStar(b: BasicBlock): List[BasicBlock] = { directSuccStar(List(b)) }
-
- /** Transitive closure of successors potentially reachable due to normal (non-exceptional) control flow.
- Those BBs in the argument are also included in the result */
- private def directSuccStar(starters: Traversable[BasicBlock]): List[BasicBlock] = {
- val result = new mutable.ListBuffer[BasicBlock]
- var toVisit: List[BasicBlock] = starters.toList.distinct
- while(toVisit.nonEmpty) {
- val h = toVisit.head
- toVisit = toVisit.tail
- result += h
- for(p <- h.directSuccessors; if !result.contains(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
- }
- result.toList
- }
-
- /** Returns:
- * for single-block self-loops, the pair (start, Nil)
- * for other cycles, the pair (backedge-target, basic-blocks-in-the-cycle-except-backedge-target)
- * otherwise a pair consisting of:
- * (a) the endpoint of a (single or multi-hop) chain of JUMPs
- * (such endpoint does not start with a JUMP and therefore is not part of the chain); and
- * (b) the chain (ie blocks to be removed when collapsing the chain of jumps).
- * Precondition: the BasicBlock given as argument starts with an unconditional JUMP.
- */
- private def finalDestination(start: BasicBlock): (BasicBlock, List[BasicBlock]) = {
- assert(startsWithJump(start), "not the start of a (single or multi-hop) chain of JUMPs.")
- var hops: List[BasicBlock] = Nil
- var prev = start
- var done = false
- do {
- done = isJumpOnly(prev) match {
- case Some(dest) =>
- if (dest == start) { return (start, hops) } // leave infinite-loops in place
- hops ::= prev
- if (hops.contains(dest)) {
- // leave infinite-loops in place
- return (dest, hops filterNot (dest eq _))
- }
- prev = dest;
- false
- case None => true
- }
- } while(!done)
-
- (prev, hops)
- }
-
/**
* Collapse a chain of "jump-only" blocks such as:
*
@@ -3212,7 +3130,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* In more detail:
* Starting at each of the entry points (m.startBlock, the start block of each exception handler)
* rephrase those control-flow instructions targeting a jump-only block (which jumps to a final destination D) to target D.
- * The blocks thus skipped are also removed from IMethod.blocks.
+ * The blocks thus skipped become eligible to removed by the reachability analyzer
*
* Rationale for this normalization:
* test/files/run/private-inline.scala after -optimize is chock full of
@@ -3223,106 +3141,164 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* and thus ranges with identical (start, end) (i.e, identical after GenJVM omitted the JUMPs in question)
* could be weeded out to avoid "java.lang.ClassFormatError: Illegal exception table range"
* Now that visitTryCatchBlock() must be called before Labels are resolved,
- * this method gets rid of the BasicBlocks described above (to recap, consisting of just a JUMP).
+ * renders the BasicBlocks described above (to recap, consisting of just a JUMP) unreachable.
*/
- private def collapseJumpOnlyBlocks(m: IMethod): Boolean = {
+ private def collapseJumpOnlyBlocks(m: IMethod) {
assert(m.hasCode, "code-less method")
- /* "start" is relative in a cycle, but we call this helper with the "first" entry-point we found. */
- def realTarget(jumpStart: BasicBlock): Map[BasicBlock, BasicBlock] = {
- assert(startsWithJump(jumpStart), "not part of a jump-chain")
- val Pair(dest, redundants) = finalDestination(jumpStart)
- (for(skipOver <- redundants) yield Pair(skipOver, dest)).toMap
- }
+ def rephraseGotos(detour: mutable.Map[BasicBlock, BasicBlock]) {
+ def lookup(b: BasicBlock) = detour.getOrElse(b, b)
- def rephraseGotos(detour: Map[BasicBlock, BasicBlock]) {
- for(Pair(oldTarget, newTarget) <- detour.iterator) {
- if(m.startBlock == oldTarget) {
- m.code.startBlock = newTarget
- }
- for(eh <- m.exh; if eh.startBlock == oldTarget) {
- eh.setStartBlock(newTarget)
- }
- for(b <- m.blocks; if !detour.isDefinedAt(b)) {
- val idxLast = (b.size - 1)
- b.lastInstruction match {
- case JUMP(whereto) =>
- if (whereto == oldTarget) {
- b.replaceInstruction(idxLast, JUMP(newTarget))
- }
- case CJUMP(succ, fail, cond, kind) =>
- if ((succ == oldTarget) || (fail == oldTarget)) {
- b.replaceInstruction(idxLast, CJUMP(detour.getOrElse(succ, succ),
- detour.getOrElse(fail, fail),
- cond, kind))
- }
- case CZJUMP(succ, fail, cond, kind) =>
- if ((succ == oldTarget) || (fail == oldTarget)) {
- b.replaceInstruction(idxLast, CZJUMP(detour.getOrElse(succ, succ),
- detour.getOrElse(fail, fail),
- cond, kind))
- }
- case SWITCH(tags, labels) =>
- if(labels exists (detour.isDefinedAt(_))) {
- val newLabels = (labels map { lab => detour.getOrElse(lab, lab) })
- b.replaceInstruction(idxLast, SWITCH(tags, newLabels))
- }
- case _ => ()
- }
- }
+ m.code.startBlock = lookup(m.code.startBlock)
+
+ for(eh <- m.exh)
+ eh.setStartBlock(lookup(eh.startBlock))
+
+ for (b <- m.blocks) {
+ def replaceLastInstruction(i: Instruction) = {
+ if (b.lastInstruction != i) {
+ val idxLast = b.size - 1
+ debuglog(s"In block $b, replacing last instruction ${b.lastInstruction} with ${i}")
+ b.replaceInstruction(idxLast, i)
}
}
- /* remove from all containers that may contain a reference to */
- def elide(redu: BasicBlock) {
- assert(m.startBlock != redu, "startBlock should have been re-wired by now")
- m.code.removeBlock(redu);
+ b.lastInstruction match {
+ case JUMP(whereto) =>
+ replaceLastInstruction(JUMP(lookup(whereto)))
+ case CJUMP(succ, fail, cond, kind) =>
+ replaceLastInstruction(CJUMP(lookup(succ), lookup(fail), cond, kind))
+ case CZJUMP(succ, fail, cond, kind) =>
+ replaceLastInstruction(CZJUMP(lookup(succ), lookup(fail), cond, kind))
+ case SWITCH(tags, labels) =>
+ val newLabels = (labels map lookup)
+ replaceLastInstruction(SWITCH(tags, newLabels))
+ case _ => ()
}
+ }
+ }
- var wasReduced = false
- val entryPoints: List[BasicBlock] = m.startBlock :: (m.exh map (_.startBlock));
-
- var elided = mutable.Set.empty[BasicBlock] // debug
- var newTargets = mutable.Set.empty[BasicBlock] // debug
-
- for (ep <- entryPoints) {
- var reachable = directSuccStar(ep) // this list may contain blocks belonging to jump-chains that we'll skip over
- while(reachable.nonEmpty) {
- val h = reachable.head
- reachable = reachable.tail
- if(startsWithJump(h)) {
- val detour = realTarget(h)
- if(detour.nonEmpty) {
- wasReduced = true
- reachable = (reachable filterNot (detour.keySet.contains(_)))
- rephraseGotos(detour)
- detour.keySet foreach elide
- elided ++= detour.keySet
- newTargets ++= detour.values
- }
+ /*
+ * Computes a mapping from jump only block to its
+ * final destination which is either a non-jump-only
+ * block or, if it's in a jump-only block cycle, is
+ * itself
+ */
+ def computeDetour: mutable.Map[BasicBlock, BasicBlock] = {
+ // fetch the jump only blocks and their immediate destinations
+ val pairs = for {
+ block <- m.blocks.toIterator
+ target <- getJumpOnlyTarget(block)
+ } yield(block, target)
+
+ // mapping from a jump-only block to our current knowledge of its
+ // final destination. Initially it's just jump block to immediate jump
+ // target
+ val detour = mutable.Map[BasicBlock, BasicBlock](pairs.toSeq:_*)
+
+ // for each jump-only block find its final destination
+ // taking advantage of the destinations we found for previous
+ // blocks
+ for (key <- detour.keySet) {
+ // we use the Robert Floyd's classic Tortoise and Hare algorithm
+ @tailrec
+ def findDestination(tortoise: BasicBlock, hare: BasicBlock): BasicBlock = {
+ if (tortoise == hare)
+ // cycle detected, map key to key
+ key
+ else if (detour contains hare) {
+ // advance hare once
+ val hare1 = detour(hare)
+ // make sure we can advance hare a second time
+ if (detour contains hare1)
+ // advance tortoise once and hare a second time
+ findDestination(detour(tortoise), detour(hare1))
+ else
+ // hare1 is not in the map so it's not a jump-only block, it's the destination
+ hare1
+ } else
+ // hare is not in the map so it's not a jump-only block, it's the destination
+ hare
}
+ // update the mapping for key based on its final destination
+ detour(key) = findDestination(key, detour(key))
}
+ detour
}
- assert(newTargets.intersect(elided).isEmpty, "contradiction: we just elided the final destionation of a jump-chain")
- wasReduced
+ val detour = computeDetour
+ rephraseGotos(detour)
+
+ if (settings.debug) {
+ val (remappings, cycles) = detour partition {case (source, target) => source != target}
+ for ((source, target) <- remappings) {
+ debuglog(s"Will elide jump only block $source because it can be jumped around to get to $target.")
+ if (m.startBlock == source) debugwarn("startBlock should have been re-wired by now")
+ }
+ val sources = remappings.keySet
+ val targets = remappings.values.toSet
+ val intersection = sources intersect targets
+
+ if (intersection.nonEmpty) debugwarn(s"contradiction: we seem to have some source and target overlap in blocks ${intersection.mkString}. Map was ${detour.mkString}")
+
+ for ((source, _) <- cycles) {
+ debuglog(s"Block $source is in a do-nothing infinite loop. Did the user write 'while(true){}'?")
+ }
+ }
+ }
+
+ /**
+ * Removes all blocks that are unreachable in a method using a standard reachability analysis.
+ */
+ def elimUnreachableBlocks(m: IMethod) {
+ assert(m.hasCode, "code-less method")
+
+ // assume nothing is reachable until we prove it can be reached
+ val reachable = mutable.Set[BasicBlock]()
+
+ // the set of blocks that we know are reachable but have
+ // yet to be marked reachable, initially only the start block
+ val worklist = mutable.Set(m.startBlock)
+
+ while (worklist.nonEmpty) {
+ val block = worklist.head
+ worklist remove block
+ // we know that one is reachable
+ reachable add block
+ // so are its successors, so go back around and add the ones we still
+ // think are unreachable
+ worklist ++= (block.successors filterNot reachable)
+ }
+
+ // exception handlers need to be told not to cover unreachable blocks
+ // and exception handlers that no longer cover any blocks need to be
+ // removed entirely
+ val unusedExceptionHandlers = mutable.Set[ExceptionHandler]()
+ for (exh <- m.exh) {
+ exh.covered = exh.covered filter reachable
+ if (exh.covered.isEmpty) {
+ unusedExceptionHandlers += exh
+ }
+ }
+
+ // remove the unusued exception handler references
+ if (settings.debug)
+ for (exh <- unusedExceptionHandlers) debuglog(s"eliding exception handler $exh because it does not cover any reachable blocks")
+ m.exh = m.exh filterNot unusedExceptionHandlers
+
+ // everything not in the reachable set is unreachable, unused, and unloved. buh bye
+ for (b <- m.blocks filterNot reachable) {
+ debuglog(s"eliding block $b because it is unreachable")
+ m.code removeBlock b
+ }
}
def normalize(m: IMethod) {
if(!m.hasCode) { return }
collapseJumpOnlyBlocks(m)
- var wasReduced = false;
- do {
- wasReduced = false
- // Prune from an exception handler those covered blocks which are jump-only.
- wasReduced |= coverWhatCountsOnly(m); icodes.checkValid(m) // TODO should be unnecessary now that collapseJumpOnlyBlocks(m) is in place
- // Prune exception handlers covering nothing.
- wasReduced |= elimNonCoveringExh(m); icodes.checkValid(m)
-
- // TODO see note in genExceptionHandlers about an ExceptionHandler.covered containing dead blocks (newNormal should remove them, but, where do those blocks come from?)
- } while (wasReduced)
-
- // TODO this would be a good time to remove synthetic local vars seeing no use, don't forget to call computeLocalVarsIndex() afterwards.
+ if (settings.optimise)
+ elimUnreachableBlocks(m)
+ icodes checkValid m
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
deleted file mode 100644
index 72b7e35408..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stephane Micheloud
- */
-
-
-package scala.tools.nsc
-package backend.jvm
-
-import ch.epfl.lamp.fjbg._
-import symtab.Flags
-
-trait GenAndroid {
- self: GenJVM =>
-
- import global._
- import icodes._
- import opcodes._
-
- /** From the reference documentation of the Android SDK:
- * The `Parcelable` interface identifies classes whose instances can be
- * written to and restored from a `Parcel`. Classes implementing the
- * `Parcelable` interface must also have a static field called `CREATOR`,
- * which is an object implementing the `Parcelable.Creator` interface.
- */
- private val fieldName = newTermName("CREATOR")
-
- private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
- private lazy val AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
-
- def isAndroidParcelableClass(sym: Symbol) =
- (AndroidParcelableInterface != NoSymbol) &&
- (sym.parentSymbols contains AndroidParcelableInterface)
-
- def addCreatorCode(codegen: BytecodeGenerator, block: BasicBlock) {
- import codegen._
- val fieldSymbol = (
- clasz.symbol.newValue(newTermName(fieldName), NoPosition, Flags.STATIC | Flags.FINAL)
- setInfo AndroidCreatorClass.tpe
- )
- val methodSymbol = definitions.getMember(clasz.symbol.companionModule, fieldName)
- clasz addField new IField(fieldSymbol)
- block emit CALL_METHOD(methodSymbol, Static(false))
- block emit STORE_FIELD(fieldSymbol, true)
- }
-
- def legacyAddCreatorCode(codegen: BytecodeGenerator, clinit: JExtendedCode) {
- import codegen._
- val creatorType = javaType(AndroidCreatorClass)
- jclass.addNewField(PublicStaticFinal,
- fieldName,
- creatorType)
- val moduleName = javaName(clasz.symbol)+"$"
- clinit.emitGETSTATIC(moduleName,
- nme.MODULE_INSTANCE_FIELD.toString,
- new JObjectType(moduleName))
- clinit.emitINVOKEVIRTUAL(moduleName, fieldName,
- new JMethodType(creatorType, Array()))
- clinit.emitPUTSTATIC(jclass.getName(), fieldName, creatorType)
- }
-
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
new file mode 100644
index 0000000000..193100474c
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
@@ -0,0 +1,381 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+
+package scala
+package tools.nsc
+package backend
+package jvm
+
+import scala.collection.{ mutable, immutable }
+import scala.annotation.switch
+
+import scala.tools.asm
+
+/*
+ * Prepare in-memory representations of classfiles using the ASM Tree API, and serialize them to disk.
+ *
+ * Three pipelines are at work, each taking work items from a queue dedicated to that pipeline:
+ *
+ * (There's another pipeline so to speak, the one that populates queue-1 by traversing a CompilationUnit until ClassDefs are found,
+ * but the "interesting" pipelines are the ones described below)
+ *
+ * (1) In the first queue, an item consists of a ClassDef along with its arrival position.
+ * This position is needed at the time classfiles are serialized to disk,
+ * so as to emit classfiles in the same order CleanUp handed them over.
+ * As a result, two runs of the compiler on the same files produce jars that are identical on a byte basis.
+ * See `ant test.stability`
+ *
+ * (2) The second queue contains items where a ClassDef has been lowered into:
+ * (a) an optional mirror class,
+ * (b) a plain class, and
+ * (c) an optional bean class.
+ *
+ * (3) The third queue contains items ready for serialization.
+ * It's a priority queue that follows the original arrival order,
+ * so as to emit identical jars on repeated compilation of the same sources.
+ *
+ * Plain, mirror, and bean classes are built respectively by PlainClassBuilder, JMirrorBuilder, and JBeanInfoBuilder.
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
+ * @version 1.0
+ *
+ */
+abstract class GenBCode extends BCodeSyncAndTry {
+ import global._
+
+ val phaseName = "jvm"
+
+ override def newPhase(prev: Phase) = new BCodePhase(prev)
+
+ final class PlainClassBuilder(cunit: CompilationUnit) extends SyncAndTryBuilder(cunit)
+
+ class BCodePhase(prev: Phase) extends StdPhase(prev) {
+
+ override def name = phaseName
+ override def description = "Generate bytecode from ASTs using the ASM library"
+ override def erasedTypes = true
+
+ private var bytecodeWriter : BytecodeWriter = null
+ private var mirrorCodeGen : JMirrorBuilder = null
+ private var beanInfoCodeGen : JBeanInfoBuilder = null
+
+ /* ---------------- q1 ---------------- */
+
+ case class Item1(arrivalPos: Int, cd: ClassDef, cunit: CompilationUnit) {
+ def isPoison = { arrivalPos == Int.MaxValue }
+ }
+ private val poison1 = Item1(Int.MaxValue, null, null)
+ private val q1 = new java.util.LinkedList[Item1]
+
+ /* ---------------- q2 ---------------- */
+
+ case class Item2(arrivalPos: Int,
+ mirror: asm.tree.ClassNode,
+ plain: asm.tree.ClassNode,
+ bean: asm.tree.ClassNode,
+ outFolder: scala.tools.nsc.io.AbstractFile) {
+ def isPoison = { arrivalPos == Int.MaxValue }
+ }
+
+ private val poison2 = Item2(Int.MaxValue, null, null, null, null)
+ private val q2 = new _root_.java.util.LinkedList[Item2]
+
+ /* ---------------- q3 ---------------- */
+
+ /*
+ * An item of queue-3 (the last queue before serializing to disk) contains three of these
+ * (one for each of mirror, plain, and bean classes).
+ *
+ * @param jclassName internal name of the class
+ * @param jclassBytes bytecode emitted for the class SubItem3 represents
+ */
+ case class SubItem3(
+ jclassName: String,
+ jclassBytes: Array[Byte]
+ )
+
+ case class Item3(arrivalPos: Int,
+ mirror: SubItem3,
+ plain: SubItem3,
+ bean: SubItem3,
+ outFolder: scala.tools.nsc.io.AbstractFile) {
+
+ def isPoison = { arrivalPos == Int.MaxValue }
+ }
+ private val i3comparator = new java.util.Comparator[Item3] {
+ override def compare(a: Item3, b: Item3) = {
+ if (a.arrivalPos < b.arrivalPos) -1
+ else if (a.arrivalPos == b.arrivalPos) 0
+ else 1
+ }
+ }
+ private val poison3 = Item3(Int.MaxValue, null, null, null, null)
+ private val q3 = new java.util.PriorityQueue[Item3](1000, i3comparator)
+
+ /*
+ * Pipeline that takes ClassDefs from queue-1, lowers them into an intermediate form, placing them on queue-2
+ */
+ class Worker1(needsOutFolder: Boolean) {
+
+ val caseInsensitively = mutable.Map.empty[String, Symbol]
+
+ def run() {
+ while (true) {
+ val item = q1.poll
+ if (item.isPoison) {
+ q2 add poison2
+ return
+ }
+ else {
+ try { visit(item) }
+ catch {
+ case ex: Throwable =>
+ ex.printStackTrace()
+ error(s"Error while emitting ${item.cunit.source}\n${ex.getMessage}")
+ }
+ }
+ }
+ }
+
+ /*
+ * Checks for duplicate internal names case-insensitively,
+ * builds ASM ClassNodes for mirror, plain, and bean classes;
+ * enqueues them in queue-2.
+ *
+ */
+ def visit(item: Item1) {
+ val Item1(arrivalPos, cd, cunit) = item
+ val claszSymbol = cd.symbol
+
+ // GenASM checks this before classfiles are emitted, https://github.com/scala/scala/commit/e4d1d930693ac75d8eb64c2c3c69f2fc22bec739
+ val lowercaseJavaClassName = claszSymbol.javaClassName.toLowerCase
+ caseInsensitively.get(lowercaseJavaClassName) match {
+ case None =>
+ caseInsensitively.put(lowercaseJavaClassName, claszSymbol)
+ case Some(dupClassSym) =>
+ item.cunit.warning(
+ claszSymbol.pos,
+ s"Class ${claszSymbol.javaClassName} differs only in case from ${dupClassSym.javaClassName}. " +
+ "Such classes will overwrite one another on case-insensitive filesystems."
+ )
+ }
+
+ // -------------- mirror class, if needed --------------
+ val mirrorC =
+ if (isStaticModule(claszSymbol) && isTopLevelModule(claszSymbol)) {
+ if (claszSymbol.companionClass == NoSymbol) {
+ mirrorCodeGen.genMirrorClass(claszSymbol, cunit)
+ } else {
+ log(s"No mirror class for module with linked class: ${claszSymbol.fullName}")
+ null
+ }
+ } else null
+
+ // -------------- "plain" class --------------
+ val pcb = new PlainClassBuilder(cunit)
+ pcb.genPlainClass(cd)
+ val outF = if (needsOutFolder) getOutFolder(claszSymbol, pcb.thisName, cunit) else null;
+ val plainC = pcb.cnode
+
+ // -------------- bean info class, if needed --------------
+ val beanC =
+ if (claszSymbol hasAnnotation BeanInfoAttr) {
+ beanInfoCodeGen.genBeanInfoClass(
+ claszSymbol, cunit,
+ fieldSymbols(claszSymbol),
+ methodSymbols(cd)
+ )
+ } else null
+
+ // ----------- hand over to pipeline-2
+
+ val item2 =
+ Item2(arrivalPos,
+ mirrorC, plainC, beanC,
+ outF)
+
+ q2 add item2 // at the very end of this method so that no Worker2 thread starts mutating before we're done.
+
+ } // end of method visit(Item1)
+
+ } // end of class BCodePhase.Worker1
+
+ /*
+ * Pipeline that takes ClassNodes from queue-2. The unit of work depends on the optimization level:
+ *
+ * (a) no optimization involves:
+ * - converting the plain ClassNode to byte array and placing it on queue-3
+ */
+ class Worker2 {
+
+ def run() {
+ while (true) {
+ val item = q2.poll
+ if (item.isPoison) {
+ q3 add poison3
+ return
+ }
+ else {
+ try { addToQ3(item) }
+ catch {
+ case ex: Throwable =>
+ ex.printStackTrace()
+ error(s"Error while emitting ${item.plain.name}\n${ex.getMessage}")
+ }
+ }
+ }
+ }
+
+ private def addToQ3(item: Item2) {
+
+ def getByteArray(cn: asm.tree.ClassNode): Array[Byte] = {
+ val cw = new CClassWriter(extraProc)
+ cn.accept(cw)
+ cw.toByteArray
+ }
+
+ val Item2(arrivalPos, mirror, plain, bean, outFolder) = item
+
+ val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror))
+ val plainC = SubItem3(plain.name, getByteArray(plain))
+ val beanC = if (bean == null) null else SubItem3(bean.name, getByteArray(bean))
+
+ q3 add Item3(arrivalPos, mirrorC, plainC, beanC, outFolder)
+
+ }
+
+ } // end of class BCodePhase.Worker2
+
+ var arrivalPos = 0
+
+ /*
+ * A run of the BCodePhase phase comprises:
+ *
+ * (a) set-up steps (most notably supporting maps in `BCodeTypes`,
+ * but also "the" writer where class files in byte-array form go)
+ *
+ * (b) building of ASM ClassNodes, their optimization and serialization.
+ *
+ * (c) tear down (closing the classfile-writer and clearing maps)
+ *
+ */
+ override def run() {
+
+ arrivalPos = 0 // just in case
+ scalaPrimitives.init
+ initBCodeTypes()
+
+ // initBytecodeWriter invokes fullName, thus we have to run it before the typer-dependent thread is activated.
+ bytecodeWriter = initBytecodeWriter(cleanup.getEntryPoints)
+ mirrorCodeGen = new JMirrorBuilder
+ beanInfoCodeGen = new JBeanInfoBuilder
+
+ val needsOutfileForSymbol = bytecodeWriter.isInstanceOf[ClassBytecodeWriter]
+ buildAndSendToDisk(needsOutfileForSymbol)
+
+ // closing output files.
+ bytecodeWriter.close()
+
+ /* TODO Bytecode can be verified (now that all classfiles have been written to disk)
+ *
+ * (1) asm.util.CheckAdapter.verify()
+ * public static void verify(ClassReader cr, ClassLoader loader, boolean dump, PrintWriter pw)
+ * passing a custom ClassLoader to verify inter-dependent classes.
+ * Alternatively,
+ * - an offline-bytecode verifier could be used (e.g. Maxine brings one as separate tool).
+ * - -Xverify:all
+ *
+ * (2) if requested, check-java-signatures, over and beyond the syntactic checks in `getGenericSignature()`
+ *
+ */
+
+ // clearing maps
+ clearBCodeTypes()
+ }
+
+ /*
+ * Sequentially:
+ * (a) place all ClassDefs in queue-1
+ * (b) dequeue one at a time from queue-1, convert it to ASM ClassNode, place in queue-2
+ * (c) dequeue one at a time from queue-2, convert it to byte-array, place in queue-3
+ * (d) serialize to disk by draining queue-3.
+ */
+ private def buildAndSendToDisk(needsOutFolder: Boolean) {
+
+ feedPipeline1()
+ (new Worker1(needsOutFolder)).run()
+ (new Worker2).run()
+ drainQ3()
+
+ }
+
+ /* Feed pipeline-1: place all ClassDefs on q1, recording their arrival position. */
+ private def feedPipeline1() {
+ super.run()
+ q1 add poison1
+ }
+
+ /* Pipeline that writes classfile representations to disk. */
+ private def drainQ3() {
+
+ def sendToDisk(cfr: SubItem3, outFolder: scala.tools.nsc.io.AbstractFile) {
+ if (cfr != null){
+ val SubItem3(jclassName, jclassBytes) = cfr
+ try {
+ val outFile =
+ if (outFolder == null) null
+ else getFileForClassfile(outFolder, jclassName, ".class")
+ bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, outFile)
+ }
+ catch {
+ case e: FileConflictException =>
+ error(s"error writing $jclassName: ${e.getMessage}")
+ }
+ }
+ }
+
+ var moreComing = true
+ // `expected` denotes the arrivalPos whose Item3 should be serialized next
+ var expected = 0
+
+ while (moreComing) {
+ val incoming = q3.poll
+ moreComing = !incoming.isPoison
+ if (moreComing) {
+ val item = incoming
+ val outFolder = item.outFolder
+ sendToDisk(item.mirror, outFolder)
+ sendToDisk(item.plain, outFolder)
+ sendToDisk(item.bean, outFolder)
+ expected += 1
+ }
+ }
+
+ // we're done
+ assert(q1.isEmpty, s"Some ClassDefs remained in the first queue: $q1")
+ assert(q2.isEmpty, s"Some classfiles remained in the second queue: $q2")
+ assert(q3.isEmpty, s"Some classfiles weren't written to disk: $q3")
+
+ }
+
+ override def apply(cunit: CompilationUnit): Unit = {
+
+ def gen(tree: Tree) {
+ tree match {
+ case EmptyTree => ()
+ case PackageDef(_, stats) => stats foreach gen
+ case cd: ClassDef =>
+ q1 add Item1(arrivalPos, cd, cunit)
+ arrivalPos += 1
+ }
+ }
+
+ gen(cunit.body)
+ }
+
+ } // end of class BCodePhase
+
+} // end of class GenBCode
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
deleted file mode 100644
index 36b294b289..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ /dev/null
@@ -1,1921 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Iulian Dragos
- */
-
-package scala.tools.nsc
-package backend.jvm
-
-import java.io.{ByteArrayOutputStream, DataOutputStream, OutputStream }
-import java.nio.ByteBuffer
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
-import scala.tools.nsc.symtab._
-import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
-import scala.reflect.internal.ClassfileConstants._
-import ch.epfl.lamp.fjbg._
-import JAccessFlags._
-import JObjectType.{ JAVA_LANG_STRING, JAVA_LANG_OBJECT }
-import java.util.jar.{ JarEntry, JarOutputStream }
-import scala.tools.nsc.io.AbstractFile
-import scala.language.postfixOps
-
-/** This class ...
- *
- * @author Iulian Dragos
- * @version 1.0
- *
- */
-abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with BytecodeWriters with GenJVMASM {
- import global._
- import icodes._
- import icodes.opcodes._
- import definitions._
-
- val phaseName = "jvm"
-
- /** Create a new phase */
- override def newPhase(p: Phase): Phase = new JvmPhase(p)
-
- /** JVM code generation phase
- */
- class JvmPhase(prev: Phase) extends ICodePhase(prev) {
- def name = phaseName
- override def erasedTypes = true
- def apply(cls: IClass) = sys.error("no implementation")
-
- override def run() {
- // we reinstantiate the bytecode generator at each run, to allow the GC
- // to collect everything
- if (settings.debug.value)
- inform("[running phase " + name + " on icode]")
-
- if (settings.Xdce.value)
- for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
- log(s"Optimizer eliminated ${sym.fullNameString}")
- icodes.classes -= sym
- }
-
- // For predictably ordered error messages.
- val sortedClasses = classes.values.toList sortBy ("" + _.symbol.fullName)
- val entryPoints = sortedClasses filter isJavaEntryPoint
-
- val bytecodeWriter = settings.outputDirs.getSingleOutput match {
- case Some(f) if f hasExtension "jar" =>
- // If no main class was specified, see if there's only one
- // entry point among the classes going into the jar.
- if (settings.mainClass.isDefault) {
- entryPoints map (_.symbol fullName '.') match {
- case Nil =>
- log("No Main-Class designated or discovered.")
- case name :: Nil =>
- log("Unique entry point: setting Main-Class to " + name)
- settings.mainClass.value = name
- case names =>
- log("No Main-Class due to multiple entry points:\n " + names.mkString("\n "))
- }
- }
- else log("Main-Class was specified: " + settings.mainClass.value)
-
- new DirectToJarfileWriter(f.file)
-
- case _ =>
- if (settings.Ygenjavap.isDefault) {
- if(settings.Ydumpclasses.isDefault)
- new ClassBytecodeWriter { }
- else
- new ClassBytecodeWriter with DumpBytecodeWriter { }
- }
- else new ClassBytecodeWriter with JavapBytecodeWriter { }
- }
-
- val codeGenerator = new BytecodeGenerator(bytecodeWriter)
- debuglog("Created new bytecode generator for " + classes.size + " classes.")
-
- sortedClasses foreach { c =>
- try codeGenerator.genClass(c)
- catch {
- case e: JCode.CodeSizeTooBigException =>
- log("Skipped class %s because it has methods that are too long.".format(c))
- }
- }
-
- bytecodeWriter.close()
- classes.clear()
- }
- }
-
- var pickledBytes = 0 // statistics
-
- /**
- * Java bytecode generator.
- *
- */
- class BytecodeGenerator(bytecodeWriter: BytecodeWriter) extends BytecodeUtil {
- def this() = this(new ClassBytecodeWriter { })
- def debugLevel = settings.debuginfo.indexOfChoice
- import bytecodeWriter.writeClass
-
- val MIN_SWITCH_DENSITY = 0.7
- val INNER_CLASSES_FLAGS =
- (ACC_PUBLIC | ACC_PRIVATE | ACC_PROTECTED | ACC_STATIC | ACC_INTERFACE | ACC_ABSTRACT)
-
- val PublicStatic = ACC_PUBLIC | ACC_STATIC
- val PublicStaticFinal = ACC_PUBLIC | ACC_STATIC | ACC_FINAL
-
- val StringBuilderClassName = javaName(definitions.StringBuilderClass)
- val BoxesRunTime = "scala.runtime.BoxesRunTime"
-
- val StringBuilderType = new JObjectType(StringBuilderClassName) // TODO use ASMType.getObjectType
- val toStringType = new JMethodType(JAVA_LANG_STRING, JType.EMPTY_ARRAY) // TODO use ASMType.getMethodType
- val arrayCloneType = new JMethodType(JAVA_LANG_OBJECT, JType.EMPTY_ARRAY)
- val MethodTypeType = new JObjectType("java.dyn.MethodType")
- val JavaLangClassType = new JObjectType("java.lang.Class")
- val MethodHandleType = new JObjectType("java.dyn.MethodHandle")
-
- // Scala attributes
- val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
- val BeanInfoSkipAttr = rootMirror.getRequiredClass("scala.beans.BeanInfoSkip")
- val BeanDisplayNameAttr = rootMirror.getRequiredClass("scala.beans.BeanDisplayName")
- val BeanDescriptionAttr = rootMirror.getRequiredClass("scala.beans.BeanDescription")
-
- // Additional interface parents based on annotations and other cues
- def newParentForAttr(attr: Symbol): Option[Symbol] = attr match {
- case SerializableAttr => Some(SerializableClass)
- case CloneableAttr => Some(JavaCloneableClass)
- case RemoteAttr => Some(RemoteInterfaceClass)
- case _ => None
- }
-
- val versionPickle = {
- val vp = new PickleBuffer(new Array[Byte](16), -1, 0)
- assert(vp.writeIndex == 0, vp)
- vp writeNat PickleFormat.MajorVersion
- vp writeNat PickleFormat.MinorVersion
- vp writeNat 0
- vp
- }
-
- private def helperBoxTo(kind: ValueTypeKind): Tuple2[String, JMethodType] = {
- val boxedType = definitions.boxedClass(kind.toType.typeSymbol)
- val mtype = new JMethodType(javaType(boxedType), Array(javaType(kind)))
-
- Pair("boxTo" + boxedType.decodedName, mtype)
- }
-
- private val jBoxTo: Map[TypeKind, Tuple2[String, JMethodType]] = Map(
- BOOL -> helperBoxTo(BOOL) ,
- BYTE -> helperBoxTo(BYTE) ,
- CHAR -> helperBoxTo(CHAR) ,
- SHORT -> helperBoxTo(SHORT) ,
- INT -> helperBoxTo(INT) ,
- LONG -> helperBoxTo(LONG) ,
- FLOAT -> helperBoxTo(FLOAT) ,
- DOUBLE -> helperBoxTo(DOUBLE)
- )
-
- private def helperUnboxTo(kind: ValueTypeKind): Tuple2[String, JMethodType] = {
- val mtype = new JMethodType(javaType(kind), Array(JAVA_LANG_OBJECT))
- val mname = "unboxTo" + kind.toType.typeSymbol.decodedName
-
- Pair(mname, mtype)
- }
-
- private val jUnboxTo: Map[TypeKind, Tuple2[String, JMethodType]] = Map(
- BOOL -> helperUnboxTo(BOOL) ,
- BYTE -> helperUnboxTo(BYTE) ,
- CHAR -> helperUnboxTo(CHAR) ,
- SHORT -> helperUnboxTo(SHORT) ,
- INT -> helperUnboxTo(INT) ,
- LONG -> helperUnboxTo(LONG) ,
- FLOAT -> helperUnboxTo(FLOAT) ,
- DOUBLE -> helperUnboxTo(DOUBLE)
- )
-
- var clasz: IClass = _
- var method: IMethod = _
- var jclass: JClass = _
- var jmethod: JMethod = _
- // var jcode: JExtendedCode = _
-
- def isParcelableClass = isAndroidParcelableClass(clasz.symbol)
- def isRemoteClass = clasz.symbol hasAnnotation RemoteAttr
- def serialVUID = clasz.symbol getAnnotation SerialVersionUIDAttr collect {
- case AnnotationInfo(_, Literal(const) :: _, _) => const.longValue
- }
-
- val fjbgContext = new FJBGContext(49, 0)
-
- val emitSource = debugLevel >= 1
- val emitLines = debugLevel >= 2
- val emitVars = debugLevel >= 3
-
- // bug had phase with wrong name; leaving enabled for brief pseudo deprecation
- private val checkSignatures = (
- (settings.check containsName phaseName)
- || (settings.check.value contains "genjvm") && {
- global.warning("This option will be removed: please use -Ycheck:%s, not -Ycheck:genjvm." format phaseName)
- true
- }
- )
-
- /** For given symbol return a symbol corresponding to a class that should be declared as inner class.
- *
- * For example:
- * class A {
- * class B
- * object C
- * }
- *
- * then method will return NoSymbol for A, the same symbol for A.B (corresponding to A$B class) and A$C$ symbol
- * for A.C.
- */
- private def innerClassSymbolFor(s: Symbol): Symbol =
- if (s.isClass) s else if (s.isModule) s.moduleClass else NoSymbol
-
- override def javaName(sym: Symbol): String = { // TODO Miguel says: check whether a single pass over `icodes.classes` can populate `innerClassBuffer` faster.
- /**
- * Checks if given symbol corresponds to inner class/object and add it to innerClassBuffer
- *
- * Note: This method is called recursively thus making sure that we add complete chain
- * of inner class all until root class.
- */
- def collectInnerClass(s: Symbol): Unit = {
- // TODO: some beforeFlatten { ... } which accounts for
- // being nested in parameterized classes (if we're going to selectively flatten.)
- val x = innerClassSymbolFor(s)
- if(x ne NoSymbol) {
- assert(x.isClass, "not an inner-class symbol")
- val isInner = !x.rawowner.isPackageClass
- if (isInner) {
- innerClassBuffer += x
- collectInnerClass(x.rawowner)
- }
- }
- }
- collectInnerClass(sym)
-
- super.javaName(sym)
- }
-
- /** Write a class to disk, adding the Scala signature (pickled type
- * information) and inner classes.
- *
- * @param jclass The FJBG class, where code was emitted
- * @param sym The corresponding symbol, used for looking up pickled information
- */
- def emitClass(jclass: JClass, sym: Symbol) {
- addInnerClasses(jclass)
- writeClass("" + sym.name, jclass.getName(), toByteArray(jclass), sym)
- }
-
- /** Returns the ScalaSignature annotation if it must be added to this class,
- * none otherwise; furthermore, it adds to `jclass` the ScalaSig marker
- * attribute (marking that a scala signature annotation is present) or the
- * Scala marker attribute (marking that the signature for this class is in
- * another file). The annotation that is returned by this method must be
- * added to the class' annotations list when generating them.
- *
- * @param jclass The class file that is being readied.
- * @param sym The symbol for which the signature has been entered in
- * the symData map. This is different than the symbol
- * that is being generated in the case of a mirror class.
- * @return An option that is:
- * - defined and contains an annotation info of the
- * ScalaSignature type, instantiated with the pickle
- * signature for sym (a ScalaSig marker attribute has
- * been written);
- * - undefined if the jclass/sym couple must not contain a
- * signature (a Scala marker attribute has been written).
- */
- def scalaSignatureAddingMarker(jclass: JClass, sym: Symbol): Option[AnnotationInfo] =
- currentRun.symData get sym match {
- case Some(pickle) if !nme.isModuleName(newTermName(jclass.getName)) =>
- val scalaAttr =
- fjbgContext.JOtherAttribute(jclass, jclass, tpnme.ScalaSignatureATTR.toString,
- versionPickle.bytes, versionPickle.writeIndex)
- jclass addAttribute scalaAttr
- val scalaAnnot = {
- val sigBytes = ScalaSigBytes(pickle.bytes.take(pickle.writeIndex))
- AnnotationInfo(sigBytes.sigAnnot, Nil, List((nme.bytes, sigBytes)))
- }
- pickledBytes += pickle.writeIndex
- currentRun.symData -= sym
- currentRun.symData -= sym.companionSymbol
- Some(scalaAnnot)
- case _ =>
- val markerAttr =
- fjbgContext.JOtherAttribute(jclass, jclass, tpnme.ScalaATTR.toString, new Array[Byte](0), 0)
- jclass addAttribute markerAttr
- None
- }
-
- private var innerClassBuffer = mutable.LinkedHashSet[Symbol]()
-
- /** Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
- * This is important on Android because there is otherwise an interface explosion.
- */
- private def minimizeInterfaces(interfaces: List[Symbol]): List[Symbol] = {
- var rest = interfaces
- var leaves = List.empty[Symbol]
- while(!rest.isEmpty) {
- val candidate = rest.head
- val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
- if(!nonLeaf) {
- leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
- }
- rest = rest.tail
- }
-
- leaves
- }
-
- def genClass(c: IClass) {
- clasz = c
- innerClassBuffer.clear()
-
- val name = javaName(c.symbol)
-
- val ps = c.symbol.info.parents
-
- val superClass: Symbol = if(ps.isEmpty) ObjectClass else ps.head.typeSymbol;
-
- val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses;
- val superInterfaces = superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol)) distinct
-
- val ifaces =
- if(superInterfaces.isEmpty) JClass.NO_INTERFACES
- else mkArray(minimizeInterfaces(superInterfaces) map javaName)
-
- jclass = fjbgContext.JClass(javaFlags(c.symbol),
- name,
- javaName(superClass),
- ifaces,
- c.cunit.source.toString)
-
- if (isStaticModule(c.symbol) || serialVUID != None || isParcelableClass) {
- if (isStaticModule(c.symbol))
- addModuleInstanceField
- addStaticInit(jclass, c.lookupStaticCtor)
-
- if (isTopLevelModule(c.symbol)) {
- if (c.symbol.companionClass == NoSymbol)
- generateMirrorClass(c.symbol, c.cunit.source)
- else
- log("No mirror class for module with linked class: " +
- c.symbol.fullName)
- }
- }
- else {
- c.lookupStaticCtor foreach (constructor => addStaticInit(jclass, Some(constructor)))
-
- // it must be a top level class (name contains no $s)
- def isCandidateForForwarders(sym: Symbol): Boolean =
- afterPickler {
- !(sym.name.toString contains '$') && sym.hasModuleFlag && !sym.isImplClass && !sym.isNestedClass
- }
-
- // At some point this started throwing lots of exceptions as a compile was finishing.
- // error: java.lang.AssertionError:
- // assertion failed: List(object package$CompositeThrowable, object package$CompositeThrowable)
- // ...is the one I've seen repeatedly. Suppressing.
- val lmoc = (
- try c.symbol.companionModule
- catch { case x: AssertionError =>
- Console.println("Suppressing failed assert: " + x)
- NoSymbol
- }
- )
- // add static forwarders if there are no name conflicts; see bugs #363 and #1735
- if (lmoc != NoSymbol && !c.symbol.isInterface) {
- if (isCandidateForForwarders(lmoc) && !settings.noForwarders.value) {
- log("Adding static forwarders from '%s' to implementations in '%s'".format(c.symbol, lmoc))
- addForwarders(jclass, lmoc.moduleClass)
- }
- }
- }
-
- clasz.fields foreach genField
- clasz.methods foreach genMethod
-
- val ssa = scalaSignatureAddingMarker(jclass, c.symbol)
- addGenericSignature(jclass, c.symbol, c.symbol.owner)
- addAnnotations(jclass, c.symbol.annotations ++ ssa)
- addEnclosingMethodAttribute(jclass, c.symbol)
- emitClass(jclass, c.symbol)
-
- if (c.symbol hasAnnotation BeanInfoAttr)
- genBeanInfoClass(c)
- }
-
- private def addEnclosingMethodAttribute(jclass: JClass, clazz: Symbol) {
- val sym = clazz.originalEnclosingMethod
- if (sym.isMethod) {
- debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, sym.enclClass))
- jclass addAttribute fjbgContext.JEnclosingMethodAttribute(
- jclass,
- javaName(sym.enclClass),
- javaName(sym),
- javaType(sym)
- )
- } else if (clazz.isAnonymousClass) {
- val enclClass = clazz.rawowner
- assert(enclClass.isClass, enclClass)
- val sym = enclClass.primaryConstructor
- if (sym == NoSymbol)
- log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(
- enclClass, clazz)
- )
- else {
- debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass))
- jclass addAttribute fjbgContext.JEnclosingMethodAttribute(
- jclass,
- javaName(enclClass),
- javaName(sym),
- javaType(sym).asInstanceOf[JMethodType]
- )
- }
- }
- }
-
- private def toByteArray(jc: JClass): Array[Byte] = {
- val bos = new java.io.ByteArrayOutputStream()
- val dos = new java.io.DataOutputStream(bos)
- jc.writeTo(dos)
- dos.close()
- bos.toByteArray
- }
-
- /**
- * Generate a bean info class that describes the given class.
- *
- * @author Ross Judson (ross.judson@soletta.com)
- */
- def genBeanInfoClass(c: IClass) {
- val description = c.symbol getAnnotation BeanDescriptionAttr
- // informProgress(description.toString)
-
- val beanInfoClass = fjbgContext.JClass(javaFlags(c.symbol),
- javaName(c.symbol) + "BeanInfo",
- "scala/beans/ScalaBeanInfo",
- JClass.NO_INTERFACES,
- c.cunit.source.toString)
-
- var fieldList = List[String]()
- for (f <- clasz.fields if f.symbol.hasGetter;
- g = f.symbol.getter(c.symbol);
- s = f.symbol.setter(c.symbol);
- if g.isPublic && !(f.symbol.name startsWith "$")) // inserting $outer breaks the bean
- fieldList = javaName(f.symbol) :: javaName(g) :: (if (s != NoSymbol) javaName(s) else null) :: fieldList
- val methodList =
- for (m <- clasz.methods
- if !m.symbol.isConstructor &&
- m.symbol.isPublic &&
- !(m.symbol.name startsWith "$") &&
- !m.symbol.isGetter &&
- !m.symbol.isSetter) yield javaName(m.symbol)
-
- val constructor = beanInfoClass.addNewMethod(ACC_PUBLIC, "<init>", JType.VOID, new Array[JType](0), new Array[String](0))
- val jcode = constructor.getCode().asInstanceOf[JExtendedCode]
- val strKind = new JObjectType(javaName(StringClass))
- val stringArrayKind = new JArrayType(strKind)
- val conType = new JMethodType(JType.VOID, Array(javaType(ClassClass), stringArrayKind, stringArrayKind))
-
- def push(lst:Seq[String]) {
- var fi = 0
- for (f <- lst) {
- jcode.emitDUP()
- jcode emitPUSH fi
- if (f != null)
- jcode emitPUSH f
- else
- jcode.emitACONST_NULL()
- jcode emitASTORE strKind
- fi += 1
- }
- }
-
- jcode.emitALOAD_0()
- // push the class
- jcode emitPUSH javaType(c.symbol).asInstanceOf[JReferenceType]
-
- // push the string array of field information
- jcode emitPUSH fieldList.length
- jcode emitANEWARRAY strKind
- push(fieldList)
-
- // push the string array of method information
- jcode emitPUSH methodList.length
- jcode emitANEWARRAY strKind
- push(methodList)
-
- // invoke the superclass constructor, which will do the
- // necessary java reflection and create Method objects.
- jcode.emitINVOKESPECIAL("scala/beans/ScalaBeanInfo", "<init>", conType)
- jcode.emitRETURN()
-
- // write the bean information class file.
- writeClass("BeanInfo ", beanInfoClass.getName(), toByteArray(beanInfoClass), c.symbol)
- }
-
- /** Add the given 'throws' attributes to jmethod */
- def addExceptionsAttribute(jmethod: JMethod, excs: List[AnnotationInfo]) {
- if (excs.isEmpty) return
-
- val cpool = jmethod.getConstantPool
- val buf: ByteBuffer = ByteBuffer.allocate(512)
- var nattr = 0
-
- // put some random value; the actual number is determined at the end
- buf putShort 0xbaba.toShort
-
- for (ThrownException(exc) <- excs.distinct) {
- buf.putShort(
- cpool.addClass(
- javaName(exc)).shortValue)
- nattr += 1
- }
-
- assert(nattr > 0, nattr)
- buf.putShort(0, nattr.toShort)
- addAttribute(jmethod, tpnme.ExceptionsATTR, buf)
- }
-
- /** Whether an annotation should be emitted as a Java annotation
- * .initialize: if 'annot' is read from pickle, atp might be un-initialized
- */
- private def shouldEmitAnnotation(annot: AnnotationInfo) =
- annot.symbol.initialize.isJavaDefined &&
- annot.matches(ClassfileAnnotationClass) &&
- annot.args.isEmpty
-
- private def emitJavaAnnotations(cpool: JConstantPool, buf: ByteBuffer, annotations: List[AnnotationInfo]): Int = {
- def emitArgument(arg: ClassfileAnnotArg): Unit = arg match {
- case LiteralAnnotArg(const) =>
- const.tag match {
- case BooleanTag =>
- buf put 'Z'.toByte
- buf putShort cpool.addInteger(if(const.booleanValue) 1 else 0).toShort
- case ByteTag =>
- buf put 'B'.toByte
- buf putShort cpool.addInteger(const.byteValue).toShort
- case ShortTag =>
- buf put 'S'.toByte
- buf putShort cpool.addInteger(const.shortValue).toShort
- case CharTag =>
- buf put 'C'.toByte
- buf putShort cpool.addInteger(const.charValue).toShort
- case IntTag =>
- buf put 'I'.toByte
- buf putShort cpool.addInteger(const.intValue).toShort
- case LongTag =>
- buf put 'J'.toByte
- buf putShort cpool.addLong(const.longValue).toShort
- case FloatTag =>
- buf put 'F'.toByte
- buf putShort cpool.addFloat(const.floatValue).toShort
- case DoubleTag =>
- buf put 'D'.toByte
- buf putShort cpool.addDouble(const.doubleValue).toShort
- case StringTag =>
- buf put 's'.toByte
- buf putShort cpool.addUtf8(const.stringValue).toShort
- case ClazzTag =>
- buf put 'c'.toByte
- buf putShort cpool.addUtf8(javaType(const.typeValue).getSignature()).toShort
- case EnumTag =>
- buf put 'e'.toByte
- buf putShort cpool.addUtf8(javaType(const.tpe).getSignature()).toShort
- buf putShort cpool.addUtf8(const.symbolValue.name.toString).toShort
- }
-
- case sb@ScalaSigBytes(bytes) if !sb.isLong =>
- buf put 's'.toByte
- buf putShort cpool.addUtf8(sb.encodedBytes).toShort
-
- case sb@ScalaSigBytes(bytes) if sb.isLong =>
- buf put '['.toByte
- val stringCount = (sb.encodedBytes.length / 65534) + 1
- buf putShort stringCount.toShort
- for (i <- 0 until stringCount) {
- buf put 's'.toByte
- val j = i * 65535
- val string = sb.encodedBytes.slice(j, j + 65535)
- buf putShort cpool.addUtf8(string).toShort
- }
-
- case ArrayAnnotArg(args) =>
- buf put '['.toByte
- buf putShort args.length.toShort
- args foreach emitArgument
-
- case NestedAnnotArg(annInfo) =>
- buf put '@'.toByte
- emitAnnotation(annInfo)
- }
-
- def emitAnnotation(annotInfo: AnnotationInfo) {
- val AnnotationInfo(typ, args, assocs) = annotInfo
- val jtype = javaType(typ)
- buf putShort cpool.addUtf8(jtype.getSignature()).toShort
- assert(args.isEmpty, args)
- buf putShort assocs.length.toShort
- for ((name, value) <- assocs) {
- buf putShort cpool.addUtf8(name.toString).toShort
- emitArgument(value)
- }
- }
-
- var nannots = 0
- val pos = buf.position()
-
- // put some random value; the actual number of annotations is determined at the end
- buf putShort 0xbaba.toShort
-
- for (annot <- annotations if shouldEmitAnnotation(annot)) {
- nannots += 1
- emitAnnotation(annot)
- }
-
- // save the number of annotations
- buf.putShort(pos, nannots.toShort)
- nannots
- }
-
- // @M don't generate java generics sigs for (members of) implementation
- // classes, as they are monomorphic (TODO: ok?)
- private def needsGenericSignature(sym: Symbol) = !(
- // PP: This condition used to include sym.hasExpandedName, but this leads
- // to the total loss of generic information if a private member is
- // accessed from a closure: both the field and the accessor were generated
- // without it. This is particularly bad because the availability of
- // generic information could disappear as a consequence of a seemingly
- // unrelated change.
- settings.Ynogenericsig.value
- || sym.isArtifact
- || sym.isLiftedMethod
- || sym.isBridge
- || (sym.ownerChain exists (_.isImplClass))
- )
- def addGenericSignature(jmember: JMember, sym: Symbol, owner: Symbol) {
- if (needsGenericSignature(sym)) {
- val memberTpe = beforeErasure(owner.thisType.memberInfo(sym))
-
- erasure.javaSig(sym, memberTpe) foreach { sig =>
- // This seems useful enough in the general case.
- log(sig)
- if (checkSignatures) {
- val normalizedTpe = beforeErasure(erasure.prepareSigMap(memberTpe))
- val bytecodeTpe = owner.thisType.memberInfo(sym)
- if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
- clasz.cunit.warning(sym.pos,
- """|compiler bug: created generic signature for %s in %s that does not conform to its erasure
- |signature: %s
- |original type: %s
- |normalized type: %s
- |erasure type: %s
- |if this is reproducible, please report bug at https://issues.scala-lang.org/
- """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig, memberTpe, normalizedTpe, bytecodeTpe))
- return
- }
- }
- val index = jmember.getConstantPool.addUtf8(sig).toShort
- if (opt.verboseDebug)
- beforeErasure(println("add generic sig "+sym+":"+sym.info+" ==> "+sig+" @ "+index))
-
- val buf = ByteBuffer.allocate(2)
- buf putShort index
- addAttribute(jmember, tpnme.SignatureATTR, buf)
- }
- }
- }
-
- def addAnnotations(jmember: JMember, annotations: List[AnnotationInfo]) {
- if (annotations exists (_ matches definitions.DeprecatedAttr)) {
- val attr = jmember.getContext().JOtherAttribute(
- jmember.getJClass(), jmember, tpnme.DeprecatedATTR.toString,
- new Array[Byte](0), 0)
- jmember addAttribute attr
- }
-
- val toEmit = annotations filter shouldEmitAnnotation
- if (toEmit.isEmpty) return
-
- val buf: ByteBuffer = ByteBuffer.allocate(2048)
- emitJavaAnnotations(jmember.getConstantPool, buf, toEmit)
- addAttribute(jmember, tpnme.RuntimeAnnotationATTR, buf)
- }
-
- def addParamAnnotations(jmethod: JMethod, pannotss: List[List[AnnotationInfo]]) {
- val annotations = pannotss map (_ filter shouldEmitAnnotation)
- if (annotations forall (_.isEmpty)) return
-
- val buf: ByteBuffer = ByteBuffer.allocate(2048)
-
- // number of parameters
- buf.put(annotations.length.toByte)
- for (annots <- annotations)
- emitJavaAnnotations(jmethod.getConstantPool, buf, annots)
-
- addAttribute(jmethod, tpnme.RuntimeParamAnnotationATTR, buf)
- }
-
- def addAttribute(jmember: JMember, name: Name, buf: ByteBuffer) {
- if (buf.position() < 2)
- return
-
- val length = buf.position()
- val arr = buf.array().slice(0, length)
-
- val attr = jmember.getContext().JOtherAttribute(jmember.getJClass(),
- jmember,
- name.toString,
- arr,
- length)
- jmember addAttribute attr
- }
-
- def addInnerClasses(jclass: JClass) {
- /** The outer name for this inner class. Note that it returns null
- * when the inner class should not get an index in the constant pool.
- * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
- */
- def outerName(innerSym: Symbol): String = {
- if (innerSym.originalEnclosingMethod != NoSymbol)
- null
- else {
- val outerName = javaName(innerSym.rawowner)
- if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
- else outerName
- }
- }
-
- def innerName(innerSym: Symbol): String =
- if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
- null
- else
- innerSym.rawname + innerSym.moduleSuffix
-
- // add inner classes which might not have been referenced yet
- afterErasure {
- for (sym <- List(clasz.symbol, clasz.symbol.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
- innerClassBuffer += m
- }
-
- val allInners = innerClassBuffer.toList
- if (allInners.nonEmpty) {
- debuglog(clasz.symbol.fullName('.') + " contains " + allInners.size + " inner classes.")
- val innerClassesAttr = jclass.getInnerClasses()
- // sort them so inner classes succeed their enclosing class
- // to satisfy the Eclipse Java compiler
- for (innerSym <- allInners sortBy (_.name.length)) {
- val flags = {
- val staticFlag = if (innerSym.rawowner.hasModuleFlag) ACC_STATIC else 0
- (javaFlags(innerSym) | staticFlag) & INNER_CLASSES_FLAGS
- }
- val jname = javaName(innerSym)
- val oname = outerName(innerSym)
- val iname = innerName(innerSym)
-
- // Mimicking javap inner class output
- debuglog(
- if (oname == null || iname == null) "//class " + jname
- else "//%s=class %s of class %s".format(iname, jname, oname)
- )
-
- innerClassesAttr.addEntry(jname, oname, iname, flags)
- }
- }
- }
-
- def genField(f: IField) {
- debuglog("Adding field: " + f.symbol.fullName)
-
- val jfield = jclass.addNewField(
- javaFieldFlags(f.symbol),
- javaName(f.symbol),
- javaType(f.symbol.tpe)
- )
-
- addGenericSignature(jfield, f.symbol, clasz.symbol)
- addAnnotations(jfield, f.symbol.annotations)
- }
-
- def genMethod(m: IMethod) {
- if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return
-
- debuglog("Generating method " + m.symbol.fullName)
- method = m
- endPC.clear
- computeLocalVarsIndex(m)
-
- var resTpe = javaType(m.symbol.tpe.resultType)
- if (m.symbol.isClassConstructor)
- resTpe = JType.VOID
-
- var flags = javaFlags(m.symbol)
- if (jclass.isInterface)
- flags |= ACC_ABSTRACT
-
- if (m.symbol.isStrictFP)
- flags |= ACC_STRICT
-
- // native methods of objects are generated in mirror classes
- if (method.native)
- flags |= ACC_NATIVE
-
- jmethod = jclass.addNewMethod(flags,
- javaName(m.symbol),
- resTpe,
- mkArray(m.params map (p => javaType(p.kind))),
- mkArray(m.params map (p => javaName(p.sym))))
-
- addRemoteException(jmethod, m.symbol)
-
- if (!jmethod.isAbstract() && !method.native) {
- val jcode = jmethod.getCode().asInstanceOf[JExtendedCode]
-
- // add a fake local for debugging purposes
- if (emitVars && isClosureApply(method.symbol)) {
- val outerField = clasz.symbol.info.decl(nme.OUTER_LOCAL)
- if (outerField != NoSymbol) {
- log("Adding fake local to represent outer 'this' for closure " + clasz)
- val _this = new Local(
- method.symbol.newVariable(nme.FAKE_LOCAL_THIS), toTypeKind(outerField.tpe), false)
- m.locals = m.locals ::: List(_this)
- computeLocalVarsIndex(m) // since we added a new local, we need to recompute indexes
-
- jcode.emitALOAD_0()
- jcode.emitGETFIELD(javaName(clasz.symbol),
- javaName(outerField),
- javaType(outerField))
- jcode.emitSTORE(indexOf(_this), javaType(_this.kind))
- }
- }
-
- for (local <- m.locals if ! m.params.contains(local)) {
- debuglog("add local var: " + local)
- jmethod.addNewLocalVariable(javaType(local.kind), javaName(local.sym))
- }
-
- genCode(m)
- if (emitVars)
- genLocalVariableTable(m, jcode)
- }
-
- addGenericSignature(jmethod, m.symbol, clasz.symbol)
- val (excs, others) = m.symbol.annotations partition (_.symbol == ThrowsClass)
- addExceptionsAttribute(jmethod, excs)
- addAnnotations(jmethod, others)
- addParamAnnotations(jmethod, m.params.map(_.sym.annotations))
-
- // check for code size
- try jmethod.freeze()
- catch {
- case e: JCode.CodeSizeTooBigException =>
- clasz.cunit.error(m.symbol.pos, "Code size exceeds JVM limits: %d".format(e.codeSize))
- throw e
- }
- }
-
- /** Adds a @remote annotation, actual use unknown.
- */
- private def addRemoteException(jmethod: JMethod, meth: Symbol) {
- val needsAnnotation = (
- (isRemoteClass || (meth hasAnnotation RemoteAttr) && jmethod.isPublic)
- && !(meth.throwsAnnotations contains RemoteExceptionClass)
- )
- if (needsAnnotation) {
- val c = Constant(RemoteExceptionClass.tpe)
- val arg = Literal(c) setType c.tpe
- meth.addAnnotation(appliedType(ThrowsClass, c.tpe), arg)
- }
- }
-
- private def isClosureApply(sym: Symbol): Boolean = {
- (sym.name == nme.apply) &&
- sym.owner.isSynthetic &&
- sym.owner.tpe.parents.exists { t =>
- val TypeRef(_, sym, _) = t
- FunctionClass contains sym
- }
- }
-
- def addModuleInstanceField() {
- jclass.addNewField(PublicStaticFinal,
- nme.MODULE_INSTANCE_FIELD.toString,
- jclass.getType())
- }
-
- def addStaticInit(cls: JClass, mopt: Option[IMethod]) {
- val clinitMethod = cls.addNewMethod(PublicStatic,
- "<clinit>",
- JType.VOID,
- JType.EMPTY_ARRAY,
- new Array[String](0))
- val clinit = clinitMethod.getCode().asInstanceOf[JExtendedCode]
-
- mopt match {
- case Some(m) =>
- val oldLastBlock = m.lastBlock
- val lastBlock = m.newBlock()
- oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
-
- if (isStaticModule(clasz.symbol)) {
- // call object's private ctor from static ctor
- lastBlock emit NEW(REFERENCE(m.symbol.enclClass))
- lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(true))
- }
-
- // add serialVUID code
- serialVUID foreach { value =>
- import Flags._, definitions._
- val fieldName = "serialVersionUID"
- val fieldSymbol = clasz.symbol.newValue(newTermName(fieldName), NoPosition, STATIC | FINAL) setInfo LongClass.tpe
- clasz addField new IField(fieldSymbol)
- lastBlock emit CONSTANT(Constant(value))
- lastBlock emit STORE_FIELD(fieldSymbol, true)
- }
-
- if (isParcelableClass)
- addCreatorCode(BytecodeGenerator.this, lastBlock)
-
- lastBlock emit RETURN(UNIT)
- lastBlock.close
-
- method = m
- jmethod = clinitMethod
- genCode(m)
- case None =>
- legacyStaticInitializer(cls, clinit)
- }
- }
-
- private def legacyStaticInitializer(cls: JClass, clinit: JExtendedCode) {
- if (isStaticModule(clasz.symbol)) {
- clinit emitNEW cls.getName()
- clinit.emitINVOKESPECIAL(cls.getName(),
- JMethod.INSTANCE_CONSTRUCTOR_NAME,
- JMethodType.ARGLESS_VOID_FUNCTION)
- }
-
- serialVUID foreach { value =>
- val fieldName = "serialVersionUID"
- jclass.addNewField(PublicStaticFinal, fieldName, JType.LONG)
- clinit emitPUSH value
- clinit.emitPUSH(value)
- clinit.emitPUTSTATIC(jclass.getName(), fieldName, JType.LONG)
- }
-
- if (isParcelableClass)
- legacyAddCreatorCode(BytecodeGenerator.this, clinit)
-
- clinit.emitRETURN()
- }
-
- /** Add a forwarder for method m */
- def addForwarder(jclass: JClass, module: Symbol, m: Symbol) {
- val moduleName = javaName(module)
- val methodInfo = module.thisType.memberInfo(m)
- val paramJavaTypes = methodInfo.paramTypes map javaType
- val paramNames = 0 until paramJavaTypes.length map ("x_" + _)
- // TODO: evaluate the other flags we might be dropping on the floor here.
- val flags = PublicStatic | (
- if (m.isVarargsMethod) ACC_VARARGS else 0
- )
-
- /** Forwarders must not be marked final, as the JVM will not allow
- * redefinition of a final static method, and we don't know what classes
- * might be subclassing the companion class. See SI-4827.
- */
- val mirrorMethod = jclass.addNewMethod(
- flags,
- javaName(m),
- javaType(methodInfo.resultType),
- mkArray(paramJavaTypes),
- mkArray(paramNames))
- val mirrorCode = mirrorMethod.getCode().asInstanceOf[JExtendedCode]
- mirrorCode.emitGETSTATIC(moduleName,
- nme.MODULE_INSTANCE_FIELD.toString,
- new JObjectType(moduleName))
-
- var i = 0
- var index = 0
- var argTypes = mirrorMethod.getArgumentTypes()
- while (i < argTypes.length) {
- mirrorCode.emitLOAD(index, argTypes(i))
- index += argTypes(i).getSize()
- i += 1
- }
-
- mirrorCode.emitINVOKEVIRTUAL(moduleName, mirrorMethod.getName, javaType(m).asInstanceOf[JMethodType])
- mirrorCode emitRETURN mirrorMethod.getReturnType()
-
- addRemoteException(mirrorMethod, m)
- // only add generic signature if the method is concrete; bug #1745
- if (!m.isDeferred)
- addGenericSignature(mirrorMethod, m, module)
-
- val (throws, others) = m.annotations partition (_.symbol == ThrowsClass)
- addExceptionsAttribute(mirrorMethod, throws)
- addAnnotations(mirrorMethod, others)
- addParamAnnotations(mirrorMethod, m.info.params.map(_.annotations))
- }
-
- /** Add forwarders for all methods defined in `module` that don't conflict
- * with methods in the companion class of `module`. A conflict arises when
- * a method with the same name is defined both in a class and its companion
- * object: method signature is not taken into account.
- */
- def addForwarders(jclass: JClass, moduleClass: Symbol) {
- assert(moduleClass.isModuleClass, moduleClass)
- debuglog("Dumping mirror class for object: " + moduleClass)
-
- val className = jclass.getName
- val linkedClass = moduleClass.companionClass
- val linkedModule = linkedClass.companionSymbol
- lazy val conflictingNames: Set[Name] = {
- linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name } toSet
- }
- debuglog("Potentially conflicting names for forwarders: " + conflictingNames)
-
- for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flags.METHOD)) {
- if (m.isType || m.isDeferred || (m.owner eq ObjectClass) || m.isConstructor)
- debuglog("No forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
- else if (conflictingNames(m.name))
- log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name))
- else {
- log("Adding static forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
- addForwarder(jclass, moduleClass, m)
- }
- }
- }
-
- /** Generate a mirror class for a top-level module. A mirror class is a class
- * containing only static methods that forward to the corresponding method
- * on the MODULE instance of the given Scala object. It will only be
- * generated if there is no companion class: if there is, an attempt will
- * instead be made to add the forwarder methods to the companion class.
- */
- def generateMirrorClass(clasz: Symbol, sourceFile: SourceFile) {
- import JAccessFlags._
- /* We need to save inner classes buffer and create a new one to make sure
- * that we do confuse inner classes of the class we mirror with inner
- * classes of the class we are mirroring. These two sets can be different
- * as seen in this case:
- *
- * class A {
- * class B
- * def b: B = new B
- * }
- * object C extends A
- *
- * Here mirror class of C has a static forwarder for (inherited) method `b`
- * therefore it refers to class `B` and needs InnerClasses entry. However,
- * the real class for `C` (named `C$`) is empty and does not refer to `B`
- * thus does not need InnerClasses entry it.
- *
- * NOTE: This logic has been refactored in GenASM and everything is
- * implemented in a much cleaner way by having two separate buffers.
- */
- val savedInnerClasses = innerClassBuffer
- innerClassBuffer = mutable.LinkedHashSet[Symbol]()
- val moduleName = javaName(clasz) // + "$"
- val mirrorName = moduleName.substring(0, moduleName.length() - 1)
- val mirrorClass = fjbgContext.JClass(ACC_SUPER | ACC_PUBLIC | ACC_FINAL,
- mirrorName,
- JAVA_LANG_OBJECT.getName,
- JClass.NO_INTERFACES,
- "" + sourceFile)
-
- log("Dumping mirror class for '%s'".format(mirrorClass.getName))
- addForwarders(mirrorClass, clasz)
- val ssa = scalaSignatureAddingMarker(mirrorClass, clasz.companionSymbol)
- addAnnotations(mirrorClass, clasz.annotations ++ ssa)
- emitClass(mirrorClass, clasz)
- innerClassBuffer = savedInnerClasses
- }
-
- var linearization: List[BasicBlock] = Nil
- var isModuleInitialized = false
-
- /**
- * @param m ...
- */
- def genCode(m: IMethod) {
- val jcode = jmethod.getCode.asInstanceOf[JExtendedCode]
-
- def makeLabels(bs: List[BasicBlock]) = {
- debuglog("Making labels for: " + method)
-
- mutable.HashMap(bs map (_ -> jcode.newLabel) : _*)
- }
-
- isModuleInitialized = false
-
- linearization = linearizer.linearize(m)
- val labels = makeLabels(linearization)
-
- var nextBlock: BasicBlock = linearization.head
-
- def genBlocks(l: List[BasicBlock]): Unit = l match {
- case Nil => ()
- case x :: Nil => nextBlock = null; genBlock(x)
- case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
- }
-
- /** Generate exception handlers for the current method. */
- def genExceptionHandlers() {
-
- /** Return a list of pairs of intervals where the handler is active.
- * The intervals in the list have to be inclusive in the beginning and
- * exclusive in the end: [start, end).
- */
- def ranges(e: ExceptionHandler): List[(Int, Int)] = {
- var covered = e.covered
- var ranges: List[(Int, Int)] = Nil
- var start = -1
- var end = -1
-
- linearization foreach { b =>
- if (! (covered contains b) ) {
- if (start >= 0) { // we're inside a handler range
- end = labels(b).getAnchor()
- ranges ::= ((start, end))
- start = -1
- }
- } else {
- if (start < 0) // we're not inside a handler range
- start = labels(b).getAnchor()
-
- end = endPC(b)
- covered -= b
- }
- }
-
- /* Add the last interval. Note that since the intervals are
- * open-ended to the right, we have to give a number past the actual
- * code!
- */
- if (start >= 0) {
- ranges ::= ((start, jcode.getPC()))
- }
-
- if (!covered.isEmpty)
- debuglog("Some covered blocks were not found in method: " + method +
- " covered: " + covered + " not in " + linearization)
- ranges
- }
-
- for (e <- this.method.exh ; p <- ranges(e).sortBy(_._1)) {
- if (p._1 < p._2) {
- debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
- " from: " + p._1 + " to: " + p._2 + " catching: " + e.cls);
- val cls = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
- else javaName(e.cls)
- jcode.addExceptionHandler(p._1, p._2,
- labels(e.startBlock).getAnchor(),
- cls)
- } else
- log("Empty exception range: " + p)
- }
- }
-
- def isAccessibleFrom(target: Symbol, site: Symbol): Boolean = {
- target.isPublic || target.isProtected && {
- (site.enclClass isSubClass target.enclClass) ||
- (site.enclosingPackage == target.privateWithin)
- }
- }
-
- def genCallMethod(call: CALL_METHOD) {
- val CALL_METHOD(method, style) = call
- val siteSymbol = clasz.symbol
- val hostSymbol = call.hostClass
- val methodOwner = method.owner
- // info calls so that types are up to date; erasure may add lateINTERFACE to traits
- hostSymbol.info ; methodOwner.info
-
- def needsInterfaceCall(sym: Symbol) = (
- sym.isInterface
- || sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
- )
- // whether to reference the type of the receiver or
- // the type of the method owner
- val useMethodOwner = (
- style != Dynamic
- || hostSymbol.isBottomClass
- || methodOwner == ObjectClass
- )
- val receiver = if (useMethodOwner) methodOwner else hostSymbol
- val jowner = javaName(receiver)
- val jname = javaName(method)
- val jtype = javaType(method).asInstanceOf[JMethodType]
-
- def dbg(invoke: String) {
- debuglog("%s %s %s.%s:%s".format(invoke, receiver.accessString, jowner, jname, jtype))
- }
-
- def initModule() {
- // we initialize the MODULE$ field immediately after the super ctor
- if (isStaticModule(siteSymbol) && !isModuleInitialized &&
- jmethod.getName() == JMethod.INSTANCE_CONSTRUCTOR_NAME &&
- jname == JMethod.INSTANCE_CONSTRUCTOR_NAME) {
- isModuleInitialized = true
- jcode.emitALOAD_0()
- jcode.emitPUTSTATIC(jclass.getName(),
- nme.MODULE_INSTANCE_FIELD.toString,
- jclass.getType())
- }
- }
-
- style match {
- case Static(true) => dbg("invokespecial"); jcode.emitINVOKESPECIAL(jowner, jname, jtype)
- case Static(false) => dbg("invokestatic"); jcode.emitINVOKESTATIC(jowner, jname, jtype)
- case Dynamic if needsInterfaceCall(receiver) => dbg("invokinterface"); jcode.emitINVOKEINTERFACE(jowner, jname, jtype)
- case Dynamic => dbg("invokevirtual"); jcode.emitINVOKEVIRTUAL(jowner, jname, jtype)
- case SuperCall(_) =>
- dbg("invokespecial")
- jcode.emitINVOKESPECIAL(jowner, jname, jtype)
- initModule()
- }
- }
-
- def genBlock(b: BasicBlock) {
- labels(b).anchorToNext()
-
- debuglog("Generating code for block: " + b + " at pc: " + labels(b).getAnchor())
- var lastMappedPC = 0
- var lastLineNr = 0
- var crtPC = 0
-
- /** local variables whose scope appears in this block. */
- val varsInBlock: mutable.Set[Local] = new mutable.HashSet
- val lastInstr = b.lastInstruction
-
- for (instr <- b) {
- instr match {
- case THIS(clasz) => jcode.emitALOAD_0()
-
- case CONSTANT(const) => genConstant(jcode, const)
-
- case LOAD_ARRAY_ITEM(kind) =>
- if(kind.isRefOrArrayType) { jcode.emitAALOAD() }
- else {
- (kind: @unchecked) match {
- case UNIT => throw new IllegalArgumentException("invalid type for aload " + kind)
- case BOOL | BYTE => jcode.emitBALOAD()
- case SHORT => jcode.emitSALOAD()
- case CHAR => jcode.emitCALOAD()
- case INT => jcode.emitIALOAD()
- case LONG => jcode.emitLALOAD()
- case FLOAT => jcode.emitFALOAD()
- case DOUBLE => jcode.emitDALOAD()
- }
- }
-
- case LOAD_LOCAL(local) => jcode.emitLOAD(indexOf(local), javaType(local.kind))
-
- case lf @ LOAD_FIELD(field, isStatic) =>
- var owner = javaName(lf.hostClass)
- debuglog("LOAD_FIELD with owner: " + owner +
- " flags: " + Flags.flagsToString(field.owner.flags))
- val fieldJName = javaName(field)
- val fieldJType = javaType(field)
- if (isStatic) jcode.emitGETSTATIC(owner, fieldJName, fieldJType)
- else jcode.emitGETFIELD( owner, fieldJName, fieldJType)
-
- case LOAD_MODULE(module) =>
- // assert(module.isModule, "Expected module: " + module)
- debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
- if (clasz.symbol == module.moduleClass && jmethod.getName() != nme.readResolve.toString)
- jcode.emitALOAD_0()
- else
- jcode.emitGETSTATIC(javaName(module) /* + "$" */ ,
- nme.MODULE_INSTANCE_FIELD.toString,
- javaType(module))
-
- case STORE_ARRAY_ITEM(kind) =>
- if(kind.isRefOrArrayType) { jcode.emitAASTORE() }
- else {
- (kind: @unchecked) match {
- case UNIT => throw new IllegalArgumentException("invalid type for astore " + kind)
- case BOOL | BYTE => jcode.emitBASTORE()
- case SHORT => jcode.emitSASTORE()
- case CHAR => jcode.emitCASTORE()
- case INT => jcode.emitIASTORE()
- case LONG => jcode.emitLASTORE()
- case FLOAT => jcode.emitFASTORE()
- case DOUBLE => jcode.emitDASTORE()
- }
- }
-
- case STORE_LOCAL(local) =>
- jcode.emitSTORE(indexOf(local), javaType(local.kind))
-
- case STORE_THIS(_) =>
- // this only works for impl classes because the self parameter comes first
- // in the method signature. If that changes, this code has to be revisited.
- jcode.emitASTORE_0()
-
- case STORE_FIELD(field, isStatic) =>
- val owner = javaName(field.owner)
- val fieldJName = javaName(field)
- val fieldJType = javaType(field)
- if (isStatic) jcode.emitPUTSTATIC(owner, fieldJName, fieldJType)
- else jcode.emitPUTFIELD( owner, fieldJName, fieldJType)
-
- case CALL_PRIMITIVE(primitive) => genPrimitive(primitive, instr.pos)
-
- /** Special handling to access native Array.clone() */
- case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
- val target: String = javaType(call.targetTypeKind).getSignature()
- jcode.emitINVOKEVIRTUAL(target, "clone", arrayCloneType)
-
- case call @ CALL_METHOD(method, style) => genCallMethod(call)
-
- case BOX(kind) =>
- val Pair(mname, mtype) = jBoxTo(kind)
- jcode.emitINVOKESTATIC(BoxesRunTime, mname, mtype)
-
- case UNBOX(kind) =>
- val Pair(mname, mtype) = jUnboxTo(kind)
- jcode.emitINVOKESTATIC(BoxesRunTime, mname, mtype)
-
- case NEW(REFERENCE(cls)) =>
- val className = javaName(cls)
- jcode emitNEW className
-
- case CREATE_ARRAY(elem, 1) =>
- if(elem.isRefOrArrayType) { jcode emitANEWARRAY javaType(elem).asInstanceOf[JReferenceType] }
- else { jcode emitNEWARRAY javaType(elem) }
-
- case CREATE_ARRAY(elem, dims) =>
- jcode.emitMULTIANEWARRAY(javaType(ArrayN(elem, dims)).asInstanceOf[JReferenceType], dims)
-
- case IS_INSTANCE(tpe) =>
- tpe match {
- case REFERENCE(cls) => jcode emitINSTANCEOF new JObjectType(javaName(cls))
- case ARRAY(elem) => jcode emitINSTANCEOF new JArrayType(javaType(elem))
- case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
- }
-
- case CHECK_CAST(tpe) =>
- tpe match {
- case REFERENCE(cls) => if (cls != ObjectClass) { jcode emitCHECKCAST new JObjectType(javaName(cls)) } // No need to checkcast for Objects
- case ARRAY(elem) => jcode emitCHECKCAST new JArrayType(javaType(elem))
- case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
- }
-
- case SWITCH(tags, branches) =>
- val tagArray = new Array[Array[Int]](tags.length)
- var caze = tags
- var i = 0
-
- while (i < tagArray.length) {
- tagArray(i) = new Array[Int](caze.head.length)
- caze.head.copyToArray(tagArray(i), 0)
- i += 1
- caze = caze.tail
- }
- val branchArray = jcode.newLabels(tagArray.length)
- i = 0
- while (i < branchArray.length) {
- branchArray(i) = labels(branches(i))
- i += 1
- }
- debuglog("Emitting SWITCH:\ntags: " + tags + "\nbranches: " + branches)
- jcode.emitSWITCH(tagArray,
- branchArray,
- labels(branches.last),
- MIN_SWITCH_DENSITY)
- ()
-
- case JUMP(whereto) =>
- if (nextBlock != whereto)
- jcode.emitGOTO_maybe_W(labels(whereto), false) // default to short jumps
-
- case CJUMP(success, failure, cond, kind) =>
- if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- if (nextBlock == success) {
- jcode.emitIF_ICMP(conds(cond.negate()), labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF_ICMP(conds(cond), labels(success))
- if (nextBlock != failure)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- }
- } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
- if (nextBlock == success) {
- jcode.emitIF_ACMP(conds(cond.negate()), labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF_ACMP(conds(cond), labels(success))
- if (nextBlock != failure)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- }
- } else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLCMP()
- case FLOAT =>
- if (cond == LT || cond == LE) jcode.emitFCMPG()
- else jcode.emitFCMPL()
- case DOUBLE =>
- if (cond == LT || cond == LE) jcode.emitDCMPG()
- else jcode.emitDCMPL()
- }
- if (nextBlock == success) {
- jcode.emitIF(conds(cond.negate()), labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF(conds(cond), labels(success));
- if (nextBlock != failure)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- }
- }
-
- case CZJUMP(success, failure, cond, kind) =>
- if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- if (nextBlock == success) {
- jcode.emitIF(conds(cond.negate()), labels(failure))
- } else {
- jcode.emitIF(conds(cond), labels(success))
- if (nextBlock != failure)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- }
- } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
- val Success = success
- val Failure = failure
- (cond, nextBlock) match {
- case (EQ, Success) => jcode emitIFNONNULL labels(failure)
- case (NE, Failure) => jcode emitIFNONNULL labels(success)
- case (EQ, Failure) => jcode emitIFNULL labels(success)
- case (NE, Success) => jcode emitIFNULL labels(failure)
- case (EQ, _) =>
- jcode emitIFNULL labels(success)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- case (NE, _) =>
- jcode emitIFNONNULL labels(success)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- case _ =>
- }
- } else {
- (kind: @unchecked) match {
- case LONG =>
- jcode.emitLCONST_0()
- jcode.emitLCMP()
- case FLOAT =>
- jcode.emitFCONST_0()
- if (cond == LT || cond == LE) jcode.emitFCMPG()
- else jcode.emitFCMPL()
- case DOUBLE =>
- jcode.emitDCONST_0()
- if (cond == LT || cond == LE) jcode.emitDCMPG()
- else jcode.emitDCMPL()
- }
- if (nextBlock == success) {
- jcode.emitIF(conds(cond.negate()), labels(failure))
- } else {
- jcode.emitIF(conds(cond), labels(success))
- if (nextBlock != failure)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- }
- }
-
- case RETURN(kind) => jcode emitRETURN javaType(kind)
-
- case THROW(_) => jcode.emitATHROW()
-
- case DROP(kind) =>
- if(kind.isWideType) jcode.emitPOP2()
- else jcode.emitPOP()
-
- case DUP(kind) =>
- if(kind.isWideType) jcode.emitDUP2()
- else jcode.emitDUP()
-
- case MONITOR_ENTER() => jcode.emitMONITORENTER()
-
- case MONITOR_EXIT() => jcode.emitMONITOREXIT()
-
- case SCOPE_ENTER(lv) =>
- varsInBlock += lv
- lv.start = jcode.getPC()
-
- case SCOPE_EXIT(lv) =>
- if (varsInBlock(lv)) {
- lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
- varsInBlock -= lv
- }
- else if (b.varsInScope(lv)) {
- lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
- b.varsInScope -= lv
- }
- else dumpMethodAndAbort(method, "Illegal local var nesting")
-
- case LOAD_EXCEPTION(_) =>
- ()
- }
-
- crtPC = jcode.getPC()
-
- // assert(instr.pos.source.isEmpty || instr.pos.source.get == (clasz.cunit.source), "sources don't match")
- // val crtLine = instr.pos.line.get(lastLineNr);
-
- val crtLine = try {
- if (instr.pos == NoPosition) lastLineNr else (instr.pos).line // check NoPosition to avoid costly exception
- } catch {
- case _: UnsupportedOperationException =>
- log("Warning: wrong position in: " + method)
- lastLineNr
- }
-
- if (instr eq lastInstr) { endPC(b) = jcode.getPC() }
-
- //System.err.println("CRTLINE: " + instr.pos + " " +
- // /* (if (instr.pos < clasz.cunit.source.content.length) clasz.cunit.source.content(instr.pos) else '*') + */ " " + crtLine);
-
- if (crtPC > lastMappedPC) {
- jcode.completeLineNumber(lastMappedPC, crtPC, crtLine)
- lastMappedPC = crtPC
- lastLineNr = crtLine
- }
- }
-
- // local vars that survived this basic block
- for (lv <- varsInBlock) {
- lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
- }
- for (lv <- b.varsInScope) {
- lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
- }
- }
-
-
- /**
- * @param primitive ...
- * @param pos ...
- */
- def genPrimitive(primitive: Primitive, pos: Position) {
- primitive match {
- case Negation(kind) =>
- if(kind.isIntSizedType) { jcode.emitINEG() }
- else {
- kind match {
- case LONG => jcode.emitLNEG()
- case FLOAT => jcode.emitFNEG()
- case DOUBLE => jcode.emitDNEG()
- case _ => abort("Impossible to negate a " + kind)
- }
- }
-
- case Arithmetic(op, kind) =>
- op match {
- case ADD =>
- if(kind.isIntSizedType) { jcode.emitIADD() }
- else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLADD()
- case FLOAT => jcode.emitFADD()
- case DOUBLE => jcode.emitDADD()
- }
- }
-
- case SUB =>
- if(kind.isIntSizedType) { jcode.emitISUB() }
- else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLSUB()
- case FLOAT => jcode.emitFSUB()
- case DOUBLE => jcode.emitDSUB()
- }
- }
-
- case MUL =>
- if(kind.isIntSizedType) { jcode.emitIMUL() }
- else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLMUL()
- case FLOAT => jcode.emitFMUL()
- case DOUBLE => jcode.emitDMUL()
- }
- }
-
- case DIV =>
- if(kind.isIntSizedType) { jcode.emitIDIV() }
- else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLDIV()
- case FLOAT => jcode.emitFDIV()
- case DOUBLE => jcode.emitDDIV()
- }
- }
-
- case REM =>
- if(kind.isIntSizedType) { jcode.emitIREM() }
- else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLREM()
- case FLOAT => jcode.emitFREM()
- case DOUBLE => jcode.emitDREM()
- }
- }
-
- case NOT =>
- if(kind.isIntSizedType) {
- jcode.emitPUSH(-1)
- jcode.emitIXOR()
- } else if(kind == LONG) {
- jcode.emitPUSH(-1l)
- jcode.emitLXOR()
- } else {
- abort("Impossible to negate an " + kind)
- }
-
- case _ =>
- abort("Unknown arithmetic primitive " + primitive)
- }
-
- case Logical(op, kind) => ((op, kind): @unchecked) match {
- case (AND, LONG) => jcode.emitLAND()
- case (AND, INT) => jcode.emitIAND()
- case (AND, _) =>
- jcode.emitIAND()
- if (kind != BOOL)
- jcode.emitT2T(javaType(INT), javaType(kind));
-
- case (OR, LONG) => jcode.emitLOR()
- case (OR, INT) => jcode.emitIOR()
- case (OR, _) =>
- jcode.emitIOR()
- if (kind != BOOL)
- jcode.emitT2T(javaType(INT), javaType(kind));
-
- case (XOR, LONG) => jcode.emitLXOR()
- case (XOR, INT) => jcode.emitIXOR()
- case (XOR, _) =>
- jcode.emitIXOR()
- if (kind != BOOL)
- jcode.emitT2T(javaType(INT), javaType(kind));
- }
-
- case Shift(op, kind) => ((op, kind): @unchecked) match {
- case (LSL, LONG) => jcode.emitLSHL()
- case (LSL, INT) => jcode.emitISHL()
- case (LSL, _) =>
- jcode.emitISHL()
- jcode.emitT2T(javaType(INT), javaType(kind))
-
- case (ASR, LONG) => jcode.emitLSHR()
- case (ASR, INT) => jcode.emitISHR()
- case (ASR, _) =>
- jcode.emitISHR()
- jcode.emitT2T(javaType(INT), javaType(kind))
-
- case (LSR, LONG) => jcode.emitLUSHR()
- case (LSR, INT) => jcode.emitIUSHR()
- case (LSR, _) =>
- jcode.emitIUSHR()
- jcode.emitT2T(javaType(INT), javaType(kind))
- }
-
- case Comparison(op, kind) => ((op, kind): @unchecked) match {
- case (CMP, LONG) => jcode.emitLCMP()
- case (CMPL, FLOAT) => jcode.emitFCMPL()
- case (CMPG, FLOAT) => jcode.emitFCMPG()
- case (CMPL, DOUBLE) => jcode.emitDCMPL()
- case (CMPG, DOUBLE) => jcode.emitDCMPL()
- }
-
- case Conversion(src, dst) =>
- debuglog("Converting from: " + src + " to: " + dst)
- if (dst == BOOL) {
- println("Illegal conversion at: " + clasz + " at: " + pos.source + ":" + pos.line)
- } else
- jcode.emitT2T(javaType(src), javaType(dst))
-
- case ArrayLength(_) =>
- jcode.emitARRAYLENGTH()
-
- case StartConcat =>
- jcode emitNEW StringBuilderClassName
- jcode.emitDUP()
- jcode.emitINVOKESPECIAL(StringBuilderClassName,
- JMethod.INSTANCE_CONSTRUCTOR_NAME,
- JMethodType.ARGLESS_VOID_FUNCTION)
-
- case StringConcat(el) =>
- val jtype = el match {
- case REFERENCE(_) | ARRAY(_) => JAVA_LANG_OBJECT
- case _ => javaType(el)
- }
- jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
- "append",
- new JMethodType(StringBuilderType,
- Array(jtype)))
- case EndConcat =>
- jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
- "toString",
- toStringType)
-
- case _ =>
- abort("Unimplemented primitive " + primitive)
- }
- }
-
- // genCode starts here
- genBlocks(linearization)
-
- if (this.method.exh != Nil)
- genExceptionHandlers;
- }
-
-
- /** Emit a Local variable table for debugging purposes.
- * Synthetic locals are skipped. All variables are method-scoped.
- */
- private def genLocalVariableTable(m: IMethod, jcode: JCode) {
- val vars = m.locals filterNot (_.sym.isSynthetic)
- if (vars.isEmpty) return
-
- val pool = jclass.getConstantPool
- val pc = jcode.getPC()
- var anonCounter = 0
- var entries = 0
- vars.foreach { lv =>
- lv.ranges = mergeEntries(lv.ranges.reverse);
- entries += lv.ranges.length
- }
- if (!jmethod.isStatic()) entries += 1
-
- val lvTab = ByteBuffer.allocate(2 + 10 * entries)
- def emitEntry(name: String, signature: String, idx: Short, start: Short, end: Short) {
- lvTab putShort start
- lvTab putShort end
- lvTab putShort pool.addUtf8(name).toShort
- lvTab putShort pool.addUtf8(signature).toShort
- lvTab putShort idx
- }
-
- lvTab.putShort(entries.toShort)
-
- if (!jmethod.isStatic()) {
- emitEntry("this", jclass.getType().getSignature(), 0, 0.toShort, pc.toShort)
- }
-
- for (lv <- vars) {
- val name = if (javaName(lv.sym) eq null) {
- anonCounter += 1
- "<anon" + anonCounter + ">"
- } else javaName(lv.sym)
-
- val index = indexOf(lv).toShort
- val tpe = javaType(lv.kind).getSignature()
- for ((start, end) <- lv.ranges) {
- emitEntry(name, tpe, index, start.toShort, (end - start).toShort)
- }
- }
- val attr =
- fjbgContext.JOtherAttribute(jclass,
- jcode,
- tpnme.LocalVariableTableATTR.toString,
- lvTab.array())
- jcode addAttribute attr
- }
-
-
- /** For each basic block, the first PC address following it. */
- val endPC = new mutable.HashMap[BasicBlock, Int]
-
- ////////////////////// local vars ///////////////////////
-
- def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe))
-
- def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1
-
- def indexOf(m: IMethod, sym: Symbol): Int = {
- val Some(local) = m lookupLocal sym
- indexOf(local)
- }
-
- def indexOf(local: Local): Int = {
- assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ")
- local.index
- }
-
- /**
- * Compute the indexes of each local variable of the given
- * method. *Does not assume the parameters come first!*
- */
- def computeLocalVarsIndex(m: IMethod) {
- var idx = if (m.symbol.isStaticMember) 0 else 1;
-
- for (l <- m.params) {
- debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
- l.index = idx
- idx += sizeOf(l.kind)
- }
-
- for (l <- m.locals if !(m.params contains l)) {
- debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
- l.index = idx
- idx += sizeOf(l.kind)
- }
- }
-
- ////////////////////// Utilities ////////////////////////
-
- /** Merge adjacent ranges. */
- private def mergeEntries(ranges: List[(Int, Int)]): List[(Int, Int)] =
- (ranges.foldLeft(Nil: List[(Int, Int)]) { (collapsed: List[(Int, Int)], p: (Int, Int)) => (collapsed, p) match {
- case (Nil, _) => List(p)
- case ((s1, e1) :: rest, (s2, e2)) if (e1 == s2) => (s1, e2) :: rest
- case _ => p :: collapsed
- }}).reverse
- }
-
- private def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
-
- /**
- * Return the Java modifiers for the given symbol.
- * Java modifiers for classes:
- * - public, abstract, final, strictfp (not used)
- * for interfaces:
- * - the same as for classes, without 'final'
- * for fields:
- * - public, private (*)
- * - static, final
- * for methods:
- * - the same as for fields, plus:
- * - abstract, synchronized (not used), strictfp (not used), native (not used)
- *
- * (*) protected cannot be used, since inner classes 'see' protected members,
- * and they would fail verification after lifted.
- */
- def javaFlags(sym: Symbol): Int = {
- // constructors of module classes should be private
- // PP: why are they only being marked private at this stage and not earlier?
- val privateFlag =
- sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner))
-
- // Final: the only fields which can receive ACC_FINAL are eager vals.
- // Neither vars nor lazy vals can, because:
- //
- // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3
- // "Another problem is that the specification allows aggressive
- // optimization of final fields. Within a thread, it is permissible to
- // reorder reads of a final field with those modifications of a final
- // field that do not take place in the constructor."
- //
- // A var or lazy val which is marked final still has meaning to the
- // scala compiler. The word final is heavily overloaded unfortunately;
- // for us it means "not overridable". At present you can't override
- // vars regardless; this may change.
- //
- // The logic does not check .isFinal (which checks flags for the FINAL flag,
- // and includes symbols marked lateFINAL) instead inspecting rawflags so
- // we can exclude lateFINAL. Such symbols are eligible for inlining, but to
- // avoid breaking proxy software which depends on subclassing, we do not
- // emit ACC_FINAL.
- // Nested objects won't receive ACC_FINAL in order to allow for their overriding.
-
- val finalFlag = (
- (((sym.rawflags & Flags.FINAL) != 0) || isTopLevelModule(sym))
- && !sym.enclClass.isInterface
- && !sym.isClassConstructor
- && !sym.isMutable // lazy vals and vars both
- )
-
- // Primitives are "abstract final" to prohibit instantiation
- // without having to provide any implementations, but that is an
- // illegal combination of modifiers at the bytecode level so
- // suppress final if abstract if present.
- mkFlags(
- if (privateFlag) ACC_PRIVATE else ACC_PUBLIC,
- if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0,
- if (sym.isInterface) ACC_INTERFACE else 0,
- if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
- if (sym.isStaticMember) ACC_STATIC else 0,
- if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
- if (sym.isArtifact) ACC_SYNTHETIC else 0,
- if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
- if (sym.isVarargsMethod) ACC_VARARGS else 0,
- if (sym.hasFlag(Flags.SYNCHRONIZED)) JAVA_ACC_SYNCHRONIZED else 0
- )
- }
- def javaFieldFlags(sym: Symbol) = (
- javaFlags(sym) | mkFlags(
- if (sym hasAnnotation TransientAttr) ACC_TRANSIENT else 0,
- if (sym hasAnnotation VolatileAttr) ACC_VOLATILE else 0,
- if (sym.isMutable) 0 else ACC_FINAL
- )
- )
-
- def isTopLevelModule(sym: Symbol): Boolean =
- afterPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
-
- def isStaticModule(sym: Symbol): Boolean = {
- sym.isModuleClass && !sym.isImplClass && !sym.isLifted
- }
-
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
index 540935fd57..01c4ff5a52 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
@@ -5,38 +5,23 @@
package scala.tools.nsc
package backend.jvm
-import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.symtab._
-/** Code shared between the legagy backend [[scala.tools.nsc.backend.jvm.GenJVM]]
- * and the new backend [[scala.tools.nsc.backend.jvm.GenASM]]. There should be
- * more here, but for now I'm starting with the refactorings that are either
- * straightforward to review or necessary for maintenance.
- */
+/** Code shared between the erstwhile legacy backend (aka GenJVM)
+ * and the new backend [[scala.tools.nsc.backend.jvm.GenASM]]. There should be
+ * more here, but for now I'm starting with the refactorings that are either
+ * straightforward to review or necessary for maintenance.
+ */
trait GenJVMASM {
val global: Global
import global._
import icodes._
import definitions._
- protected def outputDirectory(sym: Symbol): AbstractFile =
- settings.outputDirs outputDirFor beforeFlatten(sym.sourceFile)
-
- protected def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
- var dir = base
- val pathParts = clsName.split("[./]").toList
- for (part <- pathParts.init) {
- dir = dir.subdirectoryNamed(part)
- }
- dir.fileNamed(pathParts.last + suffix)
- }
- protected def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
- getFile(outputDirectory(sym), clsName, suffix)
-
- protected val ExcludedForwarderFlags = {
+ val ExcludedForwarderFlags = {
import Flags._
// Should include DEFERRED but this breaks findMember.
- ( CASE | SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO )
+ ( SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO )
}
protected def isJavaEntryPoint(icls: IClass) = {
@@ -65,9 +50,8 @@ trait GenJVMASM {
// At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
hasApproximate && {
// Before erasure so we can identify generic mains.
- beforeErasure {
+ enteringErasure {
val companion = sym.linkedClassOfClass
- val companionMain = companion.tpe.member(nme.main)
if (hasJavaMainMethod(companion))
failNoForwarder("companion contains its own main method")
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
deleted file mode 100644
index e002a614bd..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
+++ /dev/null
@@ -1,142 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Iulian Dragos
- */
-
-package scala.tools.nsc
-package backend.jvm
-
-import scala.collection.{ mutable, immutable }
-import ch.epfl.lamp.fjbg._
-
-trait GenJVMUtil {
- self: GenJVM =>
-
- import global._
- import icodes._
- import icodes.opcodes._
- import definitions._
-
- /** Map from type kinds to the Java reference types. It is used for
- * loading class constants. @see Predef.classOf.
- */
- val classLiteral = immutable.Map[TypeKind, JObjectType](
- UNIT -> new JObjectType("java.lang.Void"),
- BOOL -> new JObjectType("java.lang.Boolean"),
- BYTE -> new JObjectType("java.lang.Byte"),
- SHORT -> new JObjectType("java.lang.Short"),
- CHAR -> new JObjectType("java.lang.Character"),
- INT -> new JObjectType("java.lang.Integer"),
- LONG -> new JObjectType("java.lang.Long"),
- FLOAT -> new JObjectType("java.lang.Float"),
- DOUBLE -> new JObjectType("java.lang.Double")
- )
-
- // Don't put this in per run caches.
- private val javaNameCache = new mutable.WeakHashMap[Symbol, Name]() ++= List(
- NothingClass -> binarynme.RuntimeNothing,
- RuntimeNothingClass -> binarynme.RuntimeNothing,
- NullClass -> binarynme.RuntimeNull,
- RuntimeNullClass -> binarynme.RuntimeNull
- )
-
- /** This trait may be used by tools who need access to
- * utility methods like javaName and javaType. (for instance,
- * the Eclipse plugin uses it).
- */
- trait BytecodeUtil {
-
- val conds = immutable.Map[TestOp, Int](
- EQ -> JExtendedCode.COND_EQ,
- NE -> JExtendedCode.COND_NE,
- LT -> JExtendedCode.COND_LT,
- GT -> JExtendedCode.COND_GT,
- LE -> JExtendedCode.COND_LE,
- GE -> JExtendedCode.COND_GE
- )
-
- /** Specialized array conversion to prevent calling
- * java.lang.reflect.Array.newInstance via TraversableOnce.toArray
- */
-
- def mkArray(xs: Traversable[JType]): Array[JType] = { val a = new Array[JType](xs.size); xs.copyToArray(a); a }
- def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a }
-
- /** Return the a name of this symbol that can be used on the Java
- * platform. It removes spaces from names.
- *
- * Special handling:
- * scala.Nothing erases to scala.runtime.Nothing$
- * scala.Null erases to scala.runtime.Null$
- *
- * This is needed because they are not real classes, and they mean
- * 'abrupt termination upon evaluation of that expression' or null respectively.
- * This handling is done already in GenICode, but here we need to remove
- * references from method signatures to these types, because such classes can
- * not exist in the classpath: the type checker will be very confused.
- */
- def javaName(sym: Symbol): String =
- javaNameCache.getOrElseUpdate(sym, {
- if (sym.isClass || (sym.isModule && !sym.isMethod))
- sym.javaBinaryName
- else
- sym.javaSimpleName
- }).toString
-
- def javaType(t: TypeKind): JType = (t: @unchecked) match {
- case UNIT => JType.VOID
- case BOOL => JType.BOOLEAN
- case BYTE => JType.BYTE
- case SHORT => JType.SHORT
- case CHAR => JType.CHAR
- case INT => JType.INT
- case LONG => JType.LONG
- case FLOAT => JType.FLOAT
- case DOUBLE => JType.DOUBLE
- case REFERENCE(cls) => new JObjectType(javaName(cls))
- case ARRAY(elem) => new JArrayType(javaType(elem))
- }
-
- def javaType(t: Type): JType = javaType(toTypeKind(t))
-
- def javaType(s: Symbol): JType =
- if (s.isMethod)
- new JMethodType(
- if (s.isClassConstructor) JType.VOID else javaType(s.tpe.resultType),
- mkArray(s.tpe.paramTypes map javaType)
- )
- else
- javaType(s.tpe)
-
- protected def genConstant(jcode: JExtendedCode, const: Constant) {
- const.tag match {
- case UnitTag => ()
- case BooleanTag => jcode emitPUSH const.booleanValue
- case ByteTag => jcode emitPUSH const.byteValue
- case ShortTag => jcode emitPUSH const.shortValue
- case CharTag => jcode emitPUSH const.charValue
- case IntTag => jcode emitPUSH const.intValue
- case LongTag => jcode emitPUSH const.longValue
- case FloatTag => jcode emitPUSH const.floatValue
- case DoubleTag => jcode emitPUSH const.doubleValue
- case StringTag => jcode emitPUSH const.stringValue
- case NullTag => jcode.emitACONST_NULL()
- case ClazzTag =>
- val kind = toTypeKind(const.typeValue)
- val toPush =
- if (kind.isValueType) classLiteral(kind)
- else javaType(kind).asInstanceOf[JReferenceType]
-
- jcode emitPUSH toPush
-
- case EnumTag =>
- val sym = const.symbolValue
- jcode.emitGETSTATIC(javaName(sym.owner),
- javaName(sym),
- javaType(sym.tpe.underlying))
- case _ =>
- abort("Unknown constant value: " + const)
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
deleted file mode 100644
index aaffaa84d8..0000000000
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ /dev/null
@@ -1,2358 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Nikolay Mihaylov
- */
-
-
-package scala.tools.nsc
-package backend.msil
-
-import java.io.{File, IOException}
-import java.nio.{ByteBuffer, ByteOrder}
-import scala.collection.{ mutable, immutable }
-import scala.tools.nsc.symtab._
-
-import ch.epfl.lamp.compiler.msil.{Type => MsilType, _}
-import ch.epfl.lamp.compiler.msil.emit._
-import ch.epfl.lamp.compiler.msil.util.PECustomMod
-import scala.language.postfixOps
-
-abstract class GenMSIL extends SubComponent {
- import global._
- import loaders.clrTypes
- import clrTypes.{types, constructors, methods, fields}
- import icodes._
- import icodes.opcodes._
-
- val x = loaders
-
- /** Create a new phase */
- override def newPhase(p: Phase) = new MsilPhase(p)
-
- val phaseName = "msil"
- /** MSIL code generation phase
- */
- class MsilPhase(prev: Phase) extends GlobalPhase(prev) {
- def name = phaseName
- override def newFlags = phaseNewFlags
-
- override def erasedTypes = true
-
- override def run() {
- if (settings.debug.value) inform("[running phase " + name + " on icode]")
-
- val codeGenerator = new BytecodeGenerator
-
- //classes is ICodes.classes, a HashMap[Symbol, IClass]
- classes.values foreach codeGenerator.findEntryPoint
- if( opt.showClass.isDefined && (codeGenerator.entryPoint == null) ) { // TODO introduce dedicated setting instead
- val entryclass = opt.showClass.get.toString
- warning("Couldn't find entry class " + entryclass)
- }
-
- codeGenerator.initAssembly
-
- val classesSorted = classes.values.toList.sortBy(c => c.symbol.id) // simplifies comparing cross-compiler vs. .exe output
- classesSorted foreach codeGenerator.createTypeBuilder
- classesSorted foreach codeGenerator.createClassMembers
-
- try {
- classesSorted foreach codeGenerator.genClass
- } finally {
- codeGenerator.writeAssembly
- }
- }
-
- override def apply(unit: CompilationUnit) {
- abort("MSIL works on icode classes, not on compilation units!")
- }
- }
-
- /**
- * MSIL bytecode generator.
- *
- */
- class BytecodeGenerator {
-
- val MODULE_INSTANCE_NAME = "MODULE$"
-
- import clrTypes.{VOID => MVOID, BOOLEAN => MBOOL, BYTE => MBYTE, SHORT => MSHORT,
- CHAR => MCHAR, INT => MINT, LONG => MLONG, FLOAT => MFLOAT,
- DOUBLE => MDOUBLE, OBJECT => MOBJECT, STRING => MSTRING,
- STRING_ARRAY => MSTRING_ARRAY,
- SYMTAB_CONSTR => SYMTAB_ATTRIBUTE_CONSTRUCTOR,
- SYMTAB_DEFAULT_CONSTR => SYMTAB_ATTRIBUTE_EMPTY_CONSTRUCTOR}
-
- val EXCEPTION = clrTypes.getType("System.Exception")
- val MBYTE_ARRAY = clrTypes.mkArrayType(MBYTE)
-
- val ICLONEABLE = clrTypes.getType("System.ICloneable")
- val MEMBERWISE_CLONE = MOBJECT.GetMethod("MemberwiseClone", MsilType.EmptyTypes)
-
- val MMONITOR = clrTypes.getType("System.Threading.Monitor")
- val MMONITOR_ENTER = MMONITOR.GetMethod("Enter", Array(MOBJECT))
- val MMONITOR_EXIT = MMONITOR.GetMethod("Exit", Array(MOBJECT))
-
- val MSTRING_BUILDER = clrTypes.getType("System.Text.StringBuilder")
- val MSTRING_BUILDER_CONSTR = MSTRING_BUILDER.GetConstructor(MsilType.EmptyTypes)
- val MSTRING_BUILDER_TOSTRING = MSTRING_BUILDER.GetMethod("ToString",
- MsilType.EmptyTypes)
-
- val TYPE_FROM_HANDLE =
- clrTypes.getType("System.Type").GetMethod("GetTypeFromHandle", Array(clrTypes.getType("System.RuntimeTypeHandle")))
-
- val INT_PTR = clrTypes.getType("System.IntPtr")
-
- val JOBJECT = definitions.ObjectClass
- val JSTRING = definitions.StringClass
-
- val SystemConvert = clrTypes.getType("System.Convert")
-
- val objParam = Array(MOBJECT)
-
- val toBool: MethodInfo = SystemConvert.GetMethod("ToBoolean", objParam) // see comment in emitUnbox
- val toSByte: MethodInfo = SystemConvert.GetMethod("ToSByte", objParam)
- val toShort: MethodInfo = SystemConvert.GetMethod("ToInt16", objParam)
- val toChar: MethodInfo = SystemConvert.GetMethod("ToChar", objParam)
- val toInt: MethodInfo = SystemConvert.GetMethod("ToInt32", objParam)
- val toLong: MethodInfo = SystemConvert.GetMethod("ToInt64", objParam)
- val toFloat: MethodInfo = SystemConvert.GetMethod("ToSingle", objParam)
- val toDouble: MethodInfo = SystemConvert.GetMethod("ToDouble", objParam)
-
- //val boxedUnit: FieldInfo = msilType(definitions.BoxedUnitModule.info).GetField("UNIT")
- val boxedUnit: FieldInfo = fields(definitions.BoxedUnit_UNIT)
-
- // Scala attributes
- // symtab.Definitions -> object (singleton..)
- val SerializableAttr = definitions.SerializableAttr.tpe
- val CloneableAttr = definitions.CloneableAttr.tpe
- val TransientAtt = definitions.TransientAttr.tpe
- // remoting: the architectures are too different, no mapping (no portable code
- // possible)
-
- // java instance methods that are mapped to static methods in .net
- // these will need to be called with OpCodes.Call (not Callvirt)
- val dynToStatMapped = mutable.HashSet[Symbol]()
-
- initMappings()
-
- /** Create the mappings between java and .net classes and methods */
- private def initMappings() {
- mapType(definitions.AnyClass, MOBJECT)
- mapType(definitions.AnyRefClass, MOBJECT)
- //mapType(definitions.NullClass, clrTypes.getType("scala.AllRef$"))
- //mapType(definitions.NothingClass, clrTypes.getType("scala.All$"))
- // FIXME: for some reason the upper two lines map to null
- mapType(definitions.NullClass, EXCEPTION)
- mapType(definitions.NothingClass, EXCEPTION)
-
- mapType(definitions.BooleanClass, MBOOL)
- mapType(definitions.ByteClass, MBYTE)
- mapType(definitions.ShortClass, MSHORT)
- mapType(definitions.CharClass, MCHAR)
- mapType(definitions.IntClass, MINT)
- mapType(definitions.LongClass, MLONG)
- mapType(definitions.FloatClass, MFLOAT)
- mapType(definitions.DoubleClass, MDOUBLE)
- }
-
- var clasz: IClass = _
- var method: IMethod = _
-
- var massembly: AssemblyBuilder = _
- var mmodule: ModuleBuilder = _
- var mcode: ILGenerator = _
-
- var assemName: String = _
- var firstSourceName = ""
- var outDir: File = _
- var srcPath: File = _
- var moduleName: String = _
-
- def initAssembly() {
-
- assemName = settings.assemname.value
-
- if (assemName == "") {
- if (entryPoint != null) {
- assemName = msilName(entryPoint.enclClass)
- // remove the $ at the end (from module-name)
- assemName = assemName.substring(0, assemName.length() - 1)
- } else {
- // assuming filename of first source file
- assert(firstSourceName.endsWith(".scala"), firstSourceName)
- assemName = firstSourceName.substring(0, firstSourceName.length() - 6)
- }
- } else {
- if (assemName.endsWith(".msil"))
- assemName = assemName.substring(0, assemName.length()-5)
- if (assemName.endsWith(".il"))
- assemName = assemName.substring(0, assemName.length()-3)
- val f: File = new File(assemName)
- assemName = f.getName()
- }
-
- outDir = new File(settings.outdir.value)
-
- srcPath = new File(settings.sourcedir.value)
-
- val assemblyName = new AssemblyName()
- assemblyName.Name = assemName
- massembly = AssemblyBuilderFactory.DefineDynamicAssembly(assemblyName)
-
- moduleName = assemName // + (if (entryPoint == null) ".dll" else ".exe")
- // filename here: .dll or .exe (in both parameters), second: give absolute-path
- mmodule = massembly.DefineDynamicModule(moduleName,
- new File(outDir, moduleName).getAbsolutePath())
- assert (mmodule != null)
- }
-
-
- /**
- * Form of the custom Attribute parameter (Ecma-335.pdf)
- * - p. 163 for CustomAttrib Form,
- * - p. 164 for FixedArg Form (Array and Element) (if array or not is known!)
- * !! least significant byte first if values longer than one byte !!
- *
- * 1: Prolog (unsigned int16, value 0x0001) -> symtab[0] = 0x01, symtab[1] = 0x00
- * 2: FixedArgs (directly the data, get number and types from related constructor)
- * 2.1: length of the array (unsigned int32, 4 bytes, least significant first)
- * 2.2: the byte array data
- * 3: NumNamed (unsigned int16, number of named fields and properties, 0x0000)
- */
- def addSymtabAttribute(sym: Symbol, tBuilder: TypeBuilder) {
- def addMarker() {
- val markerSymtab = new Array[Byte](4)
- markerSymtab(0) = 1.toByte
- tBuilder.SetCustomAttribute(SYMTAB_ATTRIBUTE_EMPTY_CONSTRUCTOR, markerSymtab)
- }
-
- // both conditions are needed (why exactly..?)
- if (tBuilder.Name.endsWith("$") || sym.isModuleClass) {
- addMarker()
- } else {
- currentRun.symData.get(sym) match {
- case Some(pickle) =>
- var size = pickle.writeIndex
- val symtab = new Array[Byte](size + 8)
- symtab(0) = 1.toByte
- for (i <- 2 until 6) {
- symtab(i) = (size & 0xff).toByte
- size = size >> 8
- }
- java.lang.System.arraycopy(pickle.bytes, 0, symtab, 6, pickle.writeIndex)
-
- tBuilder.SetCustomAttribute(SYMTAB_ATTRIBUTE_CONSTRUCTOR, symtab)
-
- currentRun.symData -= sym
- currentRun.symData -= sym.companionSymbol
-
- case _ =>
- addMarker()
- }
- }
- }
-
- /**
- * Mutates `member` adding CLR attributes (if any) based on sym.annotations.
- * Please notice that CLR custom modifiers are a different beast (see customModifiers below)
- * and thus shouldn't be added by this method.
- */
- def addAttributes(member: ICustomAttributeSetter, annotations: List[AnnotationInfo]) {
- val attributes = annotations.map(_.atp.typeSymbol).collect {
- case definitions.TransientAttr => null // TODO this is just an example
- }
- return // TODO: implement at some point
- }
-
- /**
- * What's a CLR custom modifier? Intro available as source comments in compiler.msil.CustomModifier.
- * It's basically a marker associated with a location (think of FieldInfo, ParameterInfo, and PropertyInfo)
- * and thus that marker (be it optional or required) becomes part of the signature of that location.
- * Some annotations will become CLR attributes (see addAttributes above), others custom modifiers (this method).
- */
- def customModifiers(annotations: List[AnnotationInfo]): Array[CustomModifier] = {
- annotations.map(_.atp.typeSymbol).collect {
- case definitions.VolatileAttr => new CustomModifier(true, CustomModifier.VolatileMarker)
- } toArray
- }
-
-
-
- /*
- debuglog("creating annotations: " + annotations + " for member : " + member)
- for (annot@ AnnotationInfo(typ, annArgs, nvPairs) <- annotations ;
- if annot.isConstant)
- //!typ.typeSymbol.isJavaDefined
- {
-// assert(consts.length <= 1,
-// "too many constant arguments for annotations; "+consts.toString())
-
- // Problem / TODO having the symbol of the annotations type would be nicer
- // (i hope that type.typeSymbol is the same as the one in types2create)
- // AND: this will crash if the annotations Type is already compiled (-> not a typeBuilder)
- // when this is solved, types2create will be the same as icodes.classes, thus superfluous
- val annType: TypeBuilder = getType(typ.typeSymbol).asInstanceOf[TypeBuilder]
-// val annType: MsilType = getType(typ.typeSymbol)
-
- // Problem / TODO: i have no idea which constructor is used. This
- // information should be available in AnnotationInfo.
- annType.CreateType() // else, GetConstructors can't be used
- val constr: ConstructorInfo = annType.GetConstructors()(0)
- // prevent a second call of CreateType, only needed because there's no
- // other way than GetConstructors()(0) to get the constructor, if there's
- // no constructor symbol available.
-
- val args: Array[Byte] =
- getAttributeArgs(
- annArgs map (_.constant.get),
- (for((n,v) <- nvPairs) yield (n, v.constant.get)))
- member.SetCustomAttribute(constr, args)
- }
- } */
-
-/* def getAttributeArgs(consts: List[Constant], nvPairs: List[(Name, Constant)]): Array[Byte] = {
- val buf = ByteBuffer.allocate(2048) // FIXME: this may be not enough!
- buf.order(ByteOrder.LITTLE_ENDIAN)
- buf.putShort(1.toShort) // signature
-
- def emitSerString(str: String) = {
- // this is wrong, it has to be the length of the UTF-8 byte array, which
- // may be longer (see clr-book on page 302)
-// val length: Int = str.length
- val strBytes: Array[Byte] = try {
- str.getBytes("UTF-8")
- } catch {
- case _: Error => abort("could not get byte-array for string: " + str)
- }
- val length: Int = strBytes.length //this length is stored big-endian
- if (length < 128)
- buf.put(length.toByte)
- else if (length < (1<<14)) {
- buf.put(((length >> 8) | 0x80).toByte) // the bits 14 and 15 of length are '0'
- buf.put((length | 0xff).toByte)
- } else if (length < (1 << 29)) {
- buf.put(((length >> 24) | 0xc0).toByte)
- buf.put(((length >> 16) & 0xff).toByte)
- buf.put(((length >> 8) & 0xff).toByte)
- buf.put(((length ) & 0xff).toByte)
- } else
- abort("string too long for attribute parameter: " + length)
- buf.put(strBytes)
- }
-
- def emitConst(const: Constant): Unit = const.tag match {
- case BooleanTag => buf.put((if (const.booleanValue) 1 else 0).toByte)
- case ByteTag => buf.put(const.byteValue)
- case ShortTag => buf.putShort(const.shortValue)
- case CharTag => buf.putChar(const.charValue)
- case IntTag => buf.putInt(const.intValue)
- case LongTag => buf.putLong(const.longValue)
- case FloatTag => buf.putFloat(const.floatValue)
- case DoubleTag => buf.putDouble(const.doubleValue)
- case StringTag =>
- val str: String = const.stringValue
- if (str == null) {
- buf.put(0xff.toByte)
- } else {
- emitSerString(str)
- }
- case ArrayTag =>
- val arr: Array[Constant] = const.arrayValue
- if (arr == null) {
- buf.putInt(0xffffffff)
- } else {
- buf.putInt(arr.length)
- arr.foreach(emitConst)
- }
-
- // TODO: other Tags: NoTag, UnitTag, ClazzTag, EnumTag, ArrayTag ???
-
- case _ => abort("could not handle attribute argument: " + const)
- }
-
- consts foreach emitConst
- buf.putShort(nvPairs.length.toShort)
- def emitNamedArg(nvPair: (Name, Constant)) {
- // the named argument is a property of the attribute (it can't be a field, since
- // all fields in scala are private)
- buf.put(0x54.toByte)
-
- def emitType(c: Constant) = c.tag match { // type of the constant, Ecma-335.pdf, page 151
- case BooleanTag => buf.put(0x02.toByte)
- case ByteTag => buf.put(0x05.toByte)
- case ShortTag => buf.put(0x06.toByte)
- case CharTag => buf.put(0x07.toByte)
- case IntTag => buf.put(0x08.toByte)
- case LongTag => buf.put(0x0a.toByte)
- case FloatTag => buf.put(0x0c.toByte)
- case DoubleTag => buf.put(0x0d.toByte)
- case StringTag => buf.put(0x0e.toByte)
-
- // TODO: other Tags: NoTag, UnitTag, ClazzTag, EnumTag ???
-
- // ArrayTag falls in here
- case _ => abort("could not handle attribute argument: " + c)
- }
-
- val cnst: Constant = nvPair._2
- if (cnst.tag == ArrayTag) {
- buf.put(0x1d.toByte)
- emitType(cnst.arrayValue(0)) // FIXME: will crash if array length = 0
- } else if (cnst.tag == EnumTag) {
- buf.put(0x55.toByte)
- // TODO: put a SerString (don't know what exactly, names of the enums somehow..)
- } else {
- buf.put(0x51.toByte)
- emitType(cnst)
- }
-
- emitSerString(nvPair._1.toString)
- emitConst(nvPair._2)
- }
-
- val length = buf.position()
- buf.array().slice(0, length)
- } */
-
- def writeAssembly() {
- if (entryPoint != null) {
- assert(entryPoint.enclClass.isModuleClass, entryPoint.enclClass)
- val mainMethod = methods(entryPoint)
- val stringArrayTypes: Array[MsilType] = Array(MSTRING_ARRAY)
- val globalMain = mmodule.DefineGlobalMethod(
- "Main", MethodAttributes.Public | MethodAttributes.Static,
- MVOID, stringArrayTypes)
- globalMain.DefineParameter(0, ParameterAttributes.None, "args")
- massembly.SetEntryPoint(globalMain)
- val code = globalMain.GetILGenerator()
- val moduleField = getModuleInstanceField(entryPoint.enclClass)
- code.Emit(OpCodes.Ldsfld, moduleField)
- code.Emit(OpCodes.Ldarg_0)
- code.Emit(OpCodes.Callvirt, mainMethod)
- code.Emit(OpCodes.Ret)
- }
- createTypes()
- var outDirName: String = null
- try {
- if (settings.Ygenjavap.isDefault) { // we reuse the JVM-sounding setting because it's conceptually similar
- outDirName = outDir.getPath()
- massembly.Save(outDirName + "\\" + assemName + ".msil") /* use SingleFileILPrinterVisitor */
- } else {
- outDirName = srcPath.getPath()
- massembly.Save(settings.Ygenjavap.value, outDirName) /* use MultipleFilesILPrinterVisitor */
- }
- } catch {
- case e:IOException => abort("Could not write to " + outDirName + ": " + e.getMessage())
- }
- }
-
- private def createTypes() {
- for (sym <- classes.keys) {
- val iclass = classes(sym)
- val tBuilder = types(sym).asInstanceOf[TypeBuilder]
-
- debuglog("Calling CreatType for " + sym + ", " + tBuilder.toString)
-
- tBuilder.CreateType()
- tBuilder.setSourceFilepath(iclass.cunit.source.file.path)
- }
- }
-
- private[GenMSIL] def ilasmFileName(iclass: IClass) : String = {
- // method.sourceFile contains just the filename
- iclass.cunit.source.file.toString.replace("\\", "\\\\")
- }
-
- private[GenMSIL] def genClass(iclass: IClass) {
- val sym = iclass.symbol
- debuglog("Generating class " + sym + " flags: " + Flags.flagsToString(sym.flags))
- clasz = iclass
-
- val tBuilder = getType(sym).asInstanceOf[TypeBuilder]
- if (isCloneable(sym)) {
- // FIXME: why there's no nme.clone_ ?
- // "Clone": if the code is non-portable, "Clone" is defined, not "clone"
- // TODO: improve condition (should override AnyRef.clone)
- if (iclass.methods.forall(m => {
- !((m.symbol.name.toString != "clone" || m.symbol.name.toString != "Clone") &&
- m.symbol.tpe.paramTypes.length != 0)
- })) {
- debuglog("auto-generating cloneable method for " + sym)
- val attrs: Short = (MethodAttributes.Public | MethodAttributes.Virtual |
- MethodAttributes.HideBySig).toShort
- val cloneMethod = tBuilder.DefineMethod("Clone", attrs, MOBJECT,
- MsilType.EmptyTypes)
- val clCode = cloneMethod.GetILGenerator()
- clCode.Emit(OpCodes.Ldarg_0)
- clCode.Emit(OpCodes.Call, MEMBERWISE_CLONE)
- clCode.Emit(OpCodes.Ret)
- }
- }
-
- val line = sym.pos.line
- tBuilder.setPosition(line, ilasmFileName(iclass))
-
- if (isTopLevelModule(sym)) {
- if (sym.companionClass == NoSymbol)
- generateMirrorClass(sym)
- else
- log("No mirror class for module with linked class: " +
- sym.fullName)
- }
-
- addSymtabAttribute(sym, tBuilder)
- addAttributes(tBuilder, sym.annotations)
-
- if (iclass.symbol != definitions.ArrayClass)
- iclass.methods foreach genMethod
-
- } //genClass
-
-
- private def genMethod(m: IMethod) {
- debuglog("Generating method " + m.symbol + " flags: " + Flags.flagsToString(m.symbol.flags) +
- " owner: " + m.symbol.owner)
- method = m
- localBuilders.clear
- computeLocalVarsIndex(m)
-
- if (m.symbol.isClassConstructor) {
- mcode = constructors(m.symbol).asInstanceOf[ConstructorBuilder].GetILGenerator()
- } else {
- val mBuilder = methods(m.symbol).asInstanceOf[MethodBuilder]
- if (!mBuilder.IsAbstract())
- try {
- mcode = mBuilder.GetILGenerator()
- } catch {
- case e: Exception =>
- java.lang.System.out.println("m.symbol = " + Flags.flagsToString(m.symbol.flags) + " " + m.symbol)
- java.lang.System.out.println("m.symbol.owner = " + Flags.flagsToString(m.symbol.owner.flags) + " " + m.symbol.owner)
- java.lang.System.out.println("mBuilder = " + mBuilder)
- java.lang.System.out.println("mBuilder.DeclaringType = " +
- TypeAttributes.toString(mBuilder.DeclaringType.Attributes) +
- "::" + mBuilder.DeclaringType)
- throw e
- }
- else
- mcode = null
- }
-
- if (mcode != null) {
- for (local <- m.locals ; if !(m.params contains local)) {
- debuglog("add local var: " + local + ", of kind " + local.kind)
- val t: MsilType = msilType(local.kind)
- val localBuilder = mcode.DeclareLocal(t)
- localBuilder.SetLocalSymInfo(msilName(local.sym))
- localBuilders(local) = localBuilder
- }
- genCode(m)
- }
-
- }
-
- /** Special linearizer for methods with at least one exception handler. This
- * linearizer brings all basic blocks in the right order so that nested
- * try-catch and try-finally blocks can be emitted.
- */
- val msilLinearizer = new MSILLinearizer()
-
- val labels = mutable.HashMap[BasicBlock, Label]()
-
- /* when emitting .line, it's enough to include the full filename just once per method, thus reducing filesize.
- * this scheme relies on the fact that the entry block is emitted first. */
- var dbFilenameSeen = false
-
- def genCode(m: IMethod) {
-
- def makeLabels(blocks: List[BasicBlock]) = {
- debuglog("Making labels for: " + method)
- for (bb <- blocks) labels(bb) = mcode.DefineLabel()
- }
-
- labels.clear
-
- var linearization = if(!m.exh.isEmpty) msilLinearizer.linearize(m)
- else linearizer.linearize(m)
-
- if (!m.exh.isEmpty)
- linearization = computeExceptionMaps(linearization, m)
-
- makeLabels(linearization)
-
- // debug val blocksInM = m.code.blocks.toList.sortBy(bb => bb.label)
- // debug val blocksInL = linearization.sortBy(bb => bb.label)
- // debug val MButNotL = (blocksInM.toSet) diff (blocksInL.toSet) // if non-empty, a jump to B fails to find a label for B (case CJUMP, case CZJUMP)
- // debug if(!MButNotL.isEmpty) { }
-
- dbFilenameSeen = false
- genBlocks(linearization)
-
- // RETURN inside exception blocks are replaced by Leave. The target of the
- // leave is a `Ret` outside any exception block (generated here).
- if (handlerReturnMethod == m) {
- mcode.MarkLabel(handlerReturnLabel)
- if (handlerReturnKind != UNIT)
- mcode.Emit(OpCodes.Ldloc, handlerReturnLocal)
- mcode.Emit(OpCodes.Ret)
- }
-
- beginExBlock.clear()
- beginCatchBlock.clear()
- endExBlock.clear()
- endFinallyLabels.clear()
- }
-
- def genBlocks(blocks: List[BasicBlock], previous: BasicBlock = null) {
- blocks match {
- case Nil => ()
- case x :: Nil => genBlock(x, prev = previous, next = null)
- case x :: y :: ys => genBlock(x, prev = previous, next = y); genBlocks(y :: ys, previous = x)
- }
- }
-
- // the try blocks starting at a certain BasicBlock
- val beginExBlock = mutable.HashMap[BasicBlock, List[ExceptionHandler]]()
-
- // the catch blocks starting / endling at a certain BasicBlock
- val beginCatchBlock = mutable.HashMap[BasicBlock, ExceptionHandler]()
- val endExBlock = mutable.HashMap[BasicBlock, List[ExceptionHandler]]()
-
- /** When emitting the code (genBlock), the number of currently active try / catch
- * blocks. When seeing a `RETURN` inside a try / catch, we need to
- * - store the result in a local (if it's not UNIT)
- * - emit `Leave handlerReturnLabel` instead of the Return
- * - emit code at the end: load the local and return its value
- */
- var currentHandlers = new mutable.Stack[ExceptionHandler]
- // The IMethod the Local/Label/Kind below belong to
- var handlerReturnMethod: IMethod = _
- // Stores the result when returning inside an exception block
- var handlerReturnLocal: LocalBuilder = _
- // Label for a return instruction outside any exception block
- var handlerReturnLabel: Label = _
- // The result kind.
- var handlerReturnKind: TypeKind = _
- def returnFromHandler(kind: TypeKind): (LocalBuilder, Label) = {
- if (handlerReturnMethod != method) {
- handlerReturnMethod = method
- if (kind != UNIT) {
- handlerReturnLocal = mcode.DeclareLocal(msilType(kind))
- handlerReturnLocal.SetLocalSymInfo("$handlerReturn")
- }
- handlerReturnLabel = mcode.DefineLabel()
- handlerReturnKind = kind
- }
- (handlerReturnLocal, handlerReturnLabel)
- }
-
- /** For try/catch nested inside a finally, we can't use `Leave OutsideFinally`, the
- * Leave target has to be inside the finally (and it has to be the `endfinally` instruction).
- * So for every finalizer, we have a label which marks the place of the `endfinally`,
- * nested try/catch blocks will leave there.
- */
- val endFinallyLabels = mutable.HashMap[ExceptionHandler, Label]()
-
- /** Computes which blocks are the beginning / end of a try or catch block */
- private def computeExceptionMaps(blocks: List[BasicBlock], m: IMethod): List[BasicBlock] = {
- val visitedBlocks = new mutable.HashSet[BasicBlock]()
-
- // handlers which have not been introduced so far
- var openHandlers = m.exh
-
-
- /** Example
- * try {
- * try {
- * // *1*
- * } catch {
- * case h1 =>
- * }
- * } catch {
- * case h2 =>
- * case h3 =>
- * try {
- *
- * } catch {
- * case h4 => // *2*
- * case h5 =>
- * }
- * }
- */
-
- // Stack of nested try blocks. Each bloc has a List of ExceptionHandler (multiple
- // catch statements). Example *1*: Stack(List(h2, h3), List(h1))
- val currentTryHandlers = new mutable.Stack[List[ExceptionHandler]]()
-
- // Stack of nested catch blocks. The head of the list is the current catch block. The
- // tail is all following catch blocks. Example *2*: Stack(List(h3), List(h4, h5))
- val currentCatchHandlers = new mutable.Stack[List[ExceptionHandler]]()
-
- for (b <- blocks) {
-
- // are we past the current catch blocks?
- def endHandlers(): List[ExceptionHandler] = {
- var res: List[ExceptionHandler] = Nil
- if (!currentCatchHandlers.isEmpty) {
- val handler = currentCatchHandlers.top.head
- if (!handler.blocks.contains(b)) {
- // all blocks of the handler are either visited, or not part of the linearization (i.e. dead)
- assert(handler.blocks.forall(b => visitedBlocks.contains(b) || !blocks.contains(b)),
- "Bad linearization of basic blocks inside catch. Found block not part of the handler\n"+
- b.fullString +"\nwhile in catch-part of\n"+ handler)
-
- val rest = currentCatchHandlers.pop.tail
- if (rest.isEmpty) {
- // all catch blocks of that exception handler are covered
- res = handler :: endHandlers()
- } else {
- // there are more catch blocks for that try (handlers covering the same)
- currentCatchHandlers.push(rest)
- beginCatchBlock(b) = rest.head
- }
- }
- }
- res
- }
- val end = endHandlers()
- if (!end.isEmpty) endExBlock(b) = end
-
- // are we past the current try block?
- if (!currentTryHandlers.isEmpty) {
- val handler = currentTryHandlers.top.head
- if (!handler.covers(b)) {
- // all of the covered blocks are visited, or not part of the linearization
- assert(handler.covered.forall(b => visitedBlocks.contains(b) || !blocks.contains(b)),
- "Bad linearization of basic blocks inside try. Found non-covered block\n"+
- b.fullString +"\nwhile in try-part of\n"+ handler)
-
- assert(handler.startBlock == b,
- "Bad linearization of basic blocks. The entry block of a catch does not directly follow the try\n"+
- b.fullString +"\n"+ handler)
-
- val handlers = currentTryHandlers.pop
- currentCatchHandlers.push(handlers)
- beginCatchBlock(b) = handler
- }
- }
-
- // are there try blocks starting at b?
- val (newHandlers, stillOpen) = openHandlers.partition(_.covers(b))
- openHandlers = stillOpen
-
- val newHandlersBySize = newHandlers.groupBy(_.covered.size)
- // big handlers first, smaller ones are nested inside the try of the big one
- // (checked by the assertions below)
- val sizes = newHandlersBySize.keys.toList.sortWith(_ > _)
-
- val beginHandlers = new mutable.ListBuffer[ExceptionHandler]
- for (s <- sizes) {
- val sHandlers = newHandlersBySize(s)
- for (h <- sHandlers) {
- assert(h.covered == sHandlers.head.covered,
- "bad nesting of exception handlers. same size, but not covering same blocks\n"+
- h +"\n"+ sHandlers.head)
- assert(h.resultKind == sHandlers.head.resultKind,
- "bad nesting of exception handlers. same size, but the same resultKind\n"+
- h +"\n"+ sHandlers.head)
- }
- for (bigger <- beginHandlers; h <- sHandlers) {
- assert(h.covered.subsetOf(bigger.covered),
- "bad nesting of exception handlers. try blocks of smaller handler are not nested in bigger one.\n"+
- h +"\n"+ bigger)
- assert(h.blocks.toSet.subsetOf(bigger.covered),
- "bad nesting of exception handlers. catch blocks of smaller handler are not nested in bigger one.\n"+
- h +"\n"+ bigger)
- }
- beginHandlers += sHandlers.head
- currentTryHandlers.push(sHandlers)
- }
- beginExBlock(b) = beginHandlers.toList
- visitedBlocks += b
- }
-
- // if there handlers left (i.e. handlers covering nothing, or a
- // non-existent (dead) block), remove their catch-blocks.
- val liveBlocks = if (openHandlers.isEmpty) blocks else {
- blocks.filter(b => openHandlers.forall(h => !h.blocks.contains(b)))
- }
-
- /** There might be open handlers, but no more blocks. happens when try/catch end
- * with `throw` or `return`
- * def foo() { try { .. throw } catch { _ => .. throw } }
- *
- * In this case we need some code after the catch block for the auto-generated
- * `leave` instruction. So we're adding a (dead) `throw new Exception`.
- */
- val rest = currentCatchHandlers.map(handlers => {
- assert(handlers.length == 1, handlers)
- handlers.head
- }).toList
-
- if (rest.isEmpty) {
- liveBlocks
- } else {
- val b = m.code.newBlock
- b.emit(Seq(
- NEW(REFERENCE(definitions.ThrowableClass)),
- DUP(REFERENCE(definitions.ObjectClass)),
- CALL_METHOD(definitions.ThrowableClass.primaryConstructor, Static(true)),
- THROW(definitions.ThrowableClass)
- ))
- b.close
- endExBlock(b) = rest
- liveBlocks ::: List(b)
- }
- }
-
- /**
- * @param block the BasicBlock to emit code for
- * @param next the following BasicBlock, `null` if `block` is the last one
- */
- def genBlock(block: BasicBlock, prev: BasicBlock, next: BasicBlock) {
-
- def loadLocalOrAddress(local: Local, msg : String , loadAddr : Boolean) {
- debuglog(msg + " for " + local)
- val isArg = local.arg
- val i = local.index
- if (isArg)
- loadArg(mcode, loadAddr)(i)
- else
- loadLocal(i, local, mcode, loadAddr)
- }
-
- def loadFieldOrAddress(field: Symbol, isStatic: Boolean, msg: String, loadAddr : Boolean) {
- debuglog(msg + " with owner: " + field.owner +
- " flags: " + Flags.flagsToString(field.owner.flags))
- var fieldInfo = fields.get(field) match {
- case Some(fInfo) => fInfo
- case None =>
- val fInfo = getType(field.owner).GetField(msilName(field))
- fields(field) = fInfo
- fInfo
- }
- if (fieldInfo.IsVolatile) {
- mcode.Emit(OpCodes.Volatile)
- }
- if (!fieldInfo.IsLiteral) {
- if (loadAddr) {
- mcode.Emit(if (isStatic) OpCodes.Ldsflda else OpCodes.Ldflda, fieldInfo)
- } else {
- mcode.Emit(if (isStatic) OpCodes.Ldsfld else OpCodes.Ldfld, fieldInfo)
- }
- } else {
- assert(!loadAddr, "can't take AddressOf a literal field (not even with readonly. prefix) because no memory was allocated to such field ...")
- // TODO the above can be overcome by loading the value, boxing, and finally unboxing. An address to a copy of the raw value will be on the stack.
- /* We perform `field inlining' as required by CLR.
- * Emit as for a CONSTANT ICode stmt, with the twist that the constant value is available
- * as a java.lang.Object and its .NET type allows constant initialization in CLR, i.e. that type
- * is one of I1, I2, I4, I8, R4, R8, CHAR, BOOLEAN, STRING, or CLASS (in this last case,
- * only accepting nullref as value). See Table 9-1 in Lidin's book on ILAsm. */
- val value = fieldInfo.getValue()
- if (value == null) {
- mcode.Emit(OpCodes.Ldnull)
- } else {
- val typ = if (fieldInfo.FieldType.IsEnum) fieldInfo.FieldType.getUnderlyingType
- else fieldInfo.FieldType
- if (typ == clrTypes.STRING) {
- mcode.Emit(OpCodes.Ldstr, value.asInstanceOf[String])
- } else if (typ == clrTypes.BOOLEAN) {
- mcode.Emit(if (value.asInstanceOf[Boolean]) OpCodes.Ldc_I4_1
- else OpCodes.Ldc_I4_0)
- } else if (typ == clrTypes.BYTE || typ == clrTypes.UBYTE) {
- loadI4(value.asInstanceOf[Byte], mcode)
- } else if (typ == clrTypes.SHORT || typ == clrTypes.USHORT) {
- loadI4(value.asInstanceOf[Int], mcode)
- } else if (typ == clrTypes.CHAR) {
- loadI4(value.asInstanceOf[Char], mcode)
- } else if (typ == clrTypes.INT || typ == clrTypes.UINT) {
- loadI4(value.asInstanceOf[Int], mcode)
- } else if (typ == clrTypes.LONG || typ == clrTypes.ULONG) {
- mcode.Emit(OpCodes.Ldc_I8, value.asInstanceOf[Long])
- } else if (typ == clrTypes.FLOAT) {
- mcode.Emit(OpCodes.Ldc_R4, value.asInstanceOf[Float])
- } else if (typ == clrTypes.DOUBLE) {
- mcode.Emit(OpCodes.Ldc_R8, value.asInstanceOf[Double])
- } else {
- /* TODO one more case is described in Partition II, 16.2: bytearray(...) */
- abort("Unknown type for static literal field: " + fieldInfo)
- }
- }
- }
- }
-
- /** Creating objects works differently on .NET. On the JVM
- * - NEW(type) => reference on Stack
- * - DUP, load arguments, CALL_METHOD(constructor)
- *
- * On .NET, the NEW and DUP are ignored, but we emit a special method call
- * - load arguments
- * - NewObj(constructor) => reference on stack
- *
- * This variable tells whether the previous instruction was a NEW,
- * we expect a DUP which is not emitted. */
- var previousWasNEW = false
-
- var lastLineNr: Int = 0
- var lastPos: Position = NoPosition
-
-
- // EndExceptionBlock must happen before MarkLabel because it adds the
- // Leave instruction. Otherwise, labels(block) points to the Leave
- // (inside the catch) instead of the instruction afterwards.
- for (handlers <- endExBlock.get(block); exh <- handlers) {
- currentHandlers.pop()
- for (l <- endFinallyLabels.get(exh))
- mcode.MarkLabel(l)
- mcode.EndExceptionBlock()
- }
-
- mcode.MarkLabel(labels(block))
- debuglog("Generating code for block: " + block)
-
- for (handler <- beginCatchBlock.get(block)) {
- if (!currentHandlers.isEmpty && currentHandlers.top.covered == handler.covered) {
- currentHandlers.pop()
- currentHandlers.push(handler)
- }
- if (handler.cls == NoSymbol) {
- // `finally` blocks are represented the same as `catch`, but with no catch-type
- mcode.BeginFinallyBlock()
- } else {
- val t = getType(handler.cls)
- mcode.BeginCatchBlock(t)
- }
- }
- for (handlers <- beginExBlock.get(block); exh <- handlers) {
- currentHandlers.push(exh)
- mcode.BeginExceptionBlock()
- }
-
- for (instr <- block) {
- try {
- val currentLineNr = instr.pos.line
- val skip = if(instr.pos.isRange) instr.pos.sameRange(lastPos) else (currentLineNr == lastLineNr);
- if(!skip || !dbFilenameSeen) {
- val fileName = if(dbFilenameSeen) "" else {dbFilenameSeen = true; ilasmFileName(clasz)};
- if(instr.pos.isRange) {
- val startLine = instr.pos.focusStart.line
- val endLine = instr.pos.focusEnd.line
- val startCol = instr.pos.focusStart.column
- val endCol = instr.pos.focusEnd.column
- mcode.setPosition(startLine, endLine, startCol, endCol, fileName)
- } else {
- mcode.setPosition(instr.pos.line, fileName)
- }
- lastLineNr = currentLineNr
- lastPos = instr.pos
- }
- } catch { case _: UnsupportedOperationException => () }
-
- if (previousWasNEW)
- assert(instr.isInstanceOf[DUP], block)
-
- instr match {
- case THIS(clasz) =>
- mcode.Emit(OpCodes.Ldarg_0)
-
- case CONSTANT(const) =>
- const.tag match {
- case UnitTag => ()
- case BooleanTag => mcode.Emit(if (const.booleanValue) OpCodes.Ldc_I4_1
- else OpCodes.Ldc_I4_0)
- case ByteTag => loadI4(const.byteValue, mcode)
- case ShortTag => loadI4(const.shortValue, mcode)
- case CharTag => loadI4(const.charValue, mcode)
- case IntTag => loadI4(const.intValue, mcode)
- case LongTag => mcode.Emit(OpCodes.Ldc_I8, const.longValue)
- case FloatTag => mcode.Emit(OpCodes.Ldc_R4, const.floatValue)
- case DoubleTag => mcode.Emit(OpCodes.Ldc_R8, const.doubleValue)
- case StringTag => mcode.Emit(OpCodes.Ldstr, const.stringValue)
- case NullTag => mcode.Emit(OpCodes.Ldnull)
- case ClazzTag =>
- mcode.Emit(OpCodes.Ldtoken, msilType(const.typeValue))
- mcode.Emit(OpCodes.Call, TYPE_FROM_HANDLE)
- case _ => abort("Unknown constant value: " + const)
- }
-
- case LOAD_ARRAY_ITEM(kind) =>
- (kind: @unchecked) match {
- case BOOL => mcode.Emit(OpCodes.Ldelem_I1)
- case BYTE => mcode.Emit(OpCodes.Ldelem_I1) // I1 for System.SByte, i.e. a scala.Byte
- case SHORT => mcode.Emit(OpCodes.Ldelem_I2)
- case CHAR => mcode.Emit(OpCodes.Ldelem_U2)
- case INT => mcode.Emit(OpCodes.Ldelem_I4)
- case LONG => mcode.Emit(OpCodes.Ldelem_I8)
- case FLOAT => mcode.Emit(OpCodes.Ldelem_R4)
- case DOUBLE => mcode.Emit(OpCodes.Ldelem_R8)
- case REFERENCE(cls) => mcode.Emit(OpCodes.Ldelem_Ref)
- case ARRAY(elem) => mcode.Emit(OpCodes.Ldelem_Ref)
-
- // case UNIT is not possible: an Array[Unit] will be an
- // Array[scala.runtime.BoxedUnit] (-> case REFERENCE)
- }
-
- case LOAD_LOCAL(local) => loadLocalOrAddress(local, "load_local", false)
-
- case CIL_LOAD_LOCAL_ADDRESS(local) => loadLocalOrAddress(local, "cil_load_local_address", true)
-
- case LOAD_FIELD(field, isStatic) => loadFieldOrAddress(field, isStatic, "load_field", false)
-
- case CIL_LOAD_FIELD_ADDRESS(field, isStatic) => loadFieldOrAddress(field, isStatic, "cil_load_field_address", true)
-
- case CIL_LOAD_ARRAY_ITEM_ADDRESS(kind) => mcode.Emit(OpCodes.Ldelema, msilType(kind))
-
- case CIL_NEWOBJ(msym) =>
- assert(msym.isClassConstructor)
- val constructorInfo: ConstructorInfo = getConstructor(msym)
- mcode.Emit(OpCodes.Newobj, constructorInfo)
-
- case LOAD_MODULE(module) =>
- debuglog("Generating LOAD_MODULE for: " + showsym(module))
- mcode.Emit(OpCodes.Ldsfld, getModuleInstanceField(module))
-
- case STORE_ARRAY_ITEM(kind) =>
- (kind: @unchecked) match {
- case BOOL => mcode.Emit(OpCodes.Stelem_I1)
- case BYTE => mcode.Emit(OpCodes.Stelem_I1)
- case SHORT => mcode.Emit(OpCodes.Stelem_I2)
- case CHAR => mcode.Emit(OpCodes.Stelem_I2)
- case INT => mcode.Emit(OpCodes.Stelem_I4)
- case LONG => mcode.Emit(OpCodes.Stelem_I8)
- case FLOAT => mcode.Emit(OpCodes.Stelem_R4)
- case DOUBLE => mcode.Emit(OpCodes.Stelem_R8)
- case REFERENCE(cls) => mcode.Emit(OpCodes.Stelem_Ref)
- case ARRAY(elem) => mcode.Emit(OpCodes.Stelem_Ref) // @TODO: test this! (occurs when calling a Array[Object]* vararg param method)
-
- // case UNIT not possible (see comment at LOAD_ARRAY_ITEM)
- }
-
- case STORE_LOCAL(local) =>
- val isArg = local.arg
- val i = local.index
- debuglog("store_local for " + local + ", index " + i)
-
- // there are some locals defined by the compiler that
- // are isArg and are need to be stored.
- if (isArg) {
- if (i >= -128 && i <= 127)
- mcode.Emit(OpCodes.Starg_S, i)
- else
- mcode.Emit(OpCodes.Starg, i)
- } else {
- i match {
- case 0 => mcode.Emit(OpCodes.Stloc_0)
- case 1 => mcode.Emit(OpCodes.Stloc_1)
- case 2 => mcode.Emit(OpCodes.Stloc_2)
- case 3 => mcode.Emit(OpCodes.Stloc_3)
- case _ =>
- if (i >= -128 && i <= 127)
- mcode.Emit(OpCodes.Stloc_S, localBuilders(local))
- else
- mcode.Emit(OpCodes.Stloc, localBuilders(local))
- }
- }
-
- case STORE_THIS(_) =>
- // this only works for impl classes because the self parameter comes first
- // in the method signature. If that changes, this code has to be revisited.
- mcode.Emit(OpCodes.Starg_S, 0)
-
- case STORE_FIELD(field, isStatic) =>
- val fieldInfo = fields.get(field) match {
- case Some(fInfo) => fInfo
- case None =>
- val fInfo = getType(field.owner).GetField(msilName(field))
- fields(field) = fInfo
- fInfo
- }
- mcode.Emit(if (isStatic) OpCodes.Stsfld else OpCodes.Stfld, fieldInfo)
-
- case CALL_PRIMITIVE(primitive) =>
- genPrimitive(primitive, instr.pos)
-
- case CALL_METHOD(msym, style) =>
- if (msym.isClassConstructor) {
- val constructorInfo: ConstructorInfo = getConstructor(msym)
- (style: @unchecked) match {
- // normal constructor calls are Static..
- case Static(_) =>
- if (method.symbol.isClassConstructor && method.symbol.owner == msym.owner)
- // we're generating a constructor (method: IMethod is a constructor), and we're
- // calling another constructor of the same class.
-
- // @LUC TODO: this can probably break, namely when having: class A { def this() { new A() } }
- // instead, we should instruct the CALL_METHOD with additional information, know whether it's
- // an instance creation constructor call or not.
- mcode.Emit(OpCodes.Call, constructorInfo)
- else
- mcode.Emit(OpCodes.Newobj, constructorInfo)
- case SuperCall(_) =>
- mcode.Emit(OpCodes.Call, constructorInfo)
- if (isStaticModule(clasz.symbol) &&
- notInitializedModules.contains(clasz.symbol) &&
- method.symbol.isClassConstructor)
- {
- notInitializedModules -= clasz.symbol
- mcode.Emit(OpCodes.Ldarg_0)
- mcode.Emit(OpCodes.Stsfld, getModuleInstanceField(clasz.symbol))
- }
- }
-
- } else {
-
- var doEmit = true
- getTypeOpt(msym.owner) match {
- case Some(typ) if (typ.IsEnum) => {
- def negBool() = {
- mcode.Emit(OpCodes.Ldc_I4_0)
- mcode.Emit(OpCodes.Ceq)
- }
- doEmit = false
- val name = msym.name
- if (name eq nme.EQ) { mcode.Emit(OpCodes.Ceq) }
- else if (name eq nme.NE) { mcode.Emit(OpCodes.Ceq); negBool }
- else if (name eq nme.LT) { mcode.Emit(OpCodes.Clt) }
- else if (name eq nme.LE) { mcode.Emit(OpCodes.Cgt); negBool }
- else if (name eq nme.GT) { mcode.Emit(OpCodes.Cgt) }
- else if (name eq nme.GE) { mcode.Emit(OpCodes.Clt); negBool }
- else if (name eq nme.OR) { mcode.Emit(OpCodes.Or) }
- else if (name eq nme.AND) { mcode.Emit(OpCodes.And) }
- else if (name eq nme.XOR) { mcode.Emit(OpCodes.Xor) }
- else
- doEmit = true
- }
- case _ => ()
- }
-
- // method: implicit view(FunctionX[PType0, PType1, ...,PTypeN, ResType]):DelegateType
- val (isDelegateView, paramType, resType) = beforeTyper {
- msym.tpe match {
- case MethodType(params, resultType)
- if (params.length == 1 && msym.name == nme.view_) =>
- val paramType = params(0).tpe
- val isDel = definitions.isCorrespondingDelegate(resultType, paramType)
- (isDel, paramType, resultType)
- case _ => (false, null, null)
- }
- }
- if (doEmit && isDelegateView) {
- doEmit = false
- createDelegateCaller(paramType, resType)
- }
-
- if (doEmit &&
- (msym.name == nme.PLUS || msym.name == nme.MINUS)
- && clrTypes.isDelegateType(msilType(msym.owner.tpe)))
- {
- doEmit = false
- val methodInfo: MethodInfo = getMethod(msym)
- // call it as a static method, even if the compiler (symbol) thinks it's virtual
- mcode.Emit(OpCodes.Call, methodInfo)
- mcode.Emit(OpCodes.Castclass, msilType(msym.owner.tpe))
- }
-
- if (doEmit && definitions.Delegate_scalaCallers.contains(msym)) {
- doEmit = false
- val methodSym: Symbol = definitions.Delegate_scalaCallerTargets(msym)
- val delegateType: Type = msym.tpe match {
- case MethodType(_, retType) => retType
- case _ => abort("not a method type: " + msym.tpe)
- }
- val methodInfo: MethodInfo = getMethod(methodSym)
- val delegCtor = msilType(delegateType).GetConstructor(Array(MOBJECT, INT_PTR))
- if (methodSym.isStatic) {
- mcode.Emit(OpCodes.Ldftn, methodInfo)
- } else {
- mcode.Emit(OpCodes.Dup)
- mcode.Emit(OpCodes.Ldvirtftn, methodInfo)
- }
- mcode.Emit(OpCodes.Newobj, delegCtor)
- }
-
- if (doEmit) {
- val methodInfo: MethodInfo = getMethod(msym)
- (style: @unchecked) match {
- case SuperCall(_) =>
- mcode.Emit(OpCodes.Call, methodInfo)
- case Dynamic =>
- // methodInfo.DeclaringType is null for global methods
- val isValuetypeMethod = (methodInfo.DeclaringType ne null) && (methodInfo.DeclaringType.IsValueType)
- val isValuetypeVirtualMethod = isValuetypeMethod && (methodInfo.IsVirtual)
- if (dynToStatMapped(msym)) {
- mcode.Emit(OpCodes.Call, methodInfo)
- } else if (isValuetypeVirtualMethod) {
- mcode.Emit(OpCodes.Constrained, methodInfo.DeclaringType)
- mcode.Emit(OpCodes.Callvirt, methodInfo)
- } else if (isValuetypeMethod) {
- // otherwise error "Callvirt on a value type method" ensues
- mcode.Emit(OpCodes.Call, methodInfo)
- } else {
- mcode.Emit(OpCodes.Callvirt, methodInfo)
- }
- case Static(_) =>
- if(methodInfo.IsVirtual && !mcode.Ldarg0WasJustEmitted) {
- mcode.Emit(OpCodes.Callvirt, methodInfo)
- } else mcode.Emit(OpCodes.Call, methodInfo)
- }
- }
- }
-
- case BOX(boxType) =>
- emitBox(mcode, boxType)
-
- case UNBOX(boxType) =>
- emitUnbox(mcode, boxType)
-
- case CIL_UNBOX(boxType) =>
- mcode.Emit(OpCodes.Unbox, msilType(boxType))
-
- case CIL_INITOBJ(valueType) =>
- mcode.Emit(OpCodes.Initobj, msilType(valueType))
-
- case NEW(REFERENCE(cls)) =>
- // the next instruction must be a DUP, see comment on `var previousWasNEW`
- previousWasNEW = true
-
- // works also for arrays and reference-types
- case CREATE_ARRAY(elem, dims) =>
- // TODO: handle multi dimensional arrays
- assert(dims == 1, "Can't handle multi dimensional arrays")
- mcode.Emit(OpCodes.Newarr, msilType(elem))
-
- // works for arrays and reference-types
- case IS_INSTANCE(tpe) =>
- mcode.Emit(OpCodes.Isinst, msilType(tpe))
- mcode.Emit(OpCodes.Ldnull)
- mcode.Emit(OpCodes.Ceq)
- mcode.Emit(OpCodes.Ldc_I4_0)
- mcode.Emit(OpCodes.Ceq)
-
- // works for arrays and reference-types
- // part from the scala reference: "S <: T does not imply
- // Array[S] <: Array[T] in Scala. However, it is possible
- // to cast an array of S to an array of T if such a cast
- // is permitted in the host environment."
- case CHECK_CAST(tpknd) =>
- val tMSIL = msilType(tpknd)
- mcode.Emit(OpCodes.Castclass, tMSIL)
-
- // no SWITCH is generated when there's
- // - a default case ("case _ => ...") in the matching expr
- // - OR is used ("case 1 | 2 => ...")
- case SWITCH(tags, branches) =>
- // tags is List[List[Int]]; a list of integers for every label.
- // if the int on stack is 4, and 4 is in the second list => jump
- // to second label
- // branches is List[BasicBlock]
- // the labels to jump to (the last one is the default one)
-
- val switchLocal = mcode.DeclareLocal(MINT)
- // several switch variables will appear with the same name in the
- // assembly code, but this makes no truble
- switchLocal.SetLocalSymInfo("$switch_var")
-
- mcode.Emit(OpCodes.Stloc, switchLocal)
- var i = 0
- for (l <- tags) {
- var targetLabel = labels(branches(i))
- for (i <- l) {
- mcode.Emit(OpCodes.Ldloc, switchLocal)
- loadI4(i, mcode)
- mcode.Emit(OpCodes.Beq, targetLabel)
- }
- i += 1
- }
- val defaultTarget = labels(branches(i))
- if (next != branches(i))
- mcode.Emit(OpCodes.Br, defaultTarget)
-
- case JUMP(whereto) =>
- val (leaveHandler, leaveFinally, lfTarget) = leavesHandler(block, whereto)
- if (leaveHandler) {
- if (leaveFinally) {
- if (lfTarget.isDefined) mcode.Emit(OpCodes.Leave, lfTarget.get)
- else mcode.Emit(OpCodes.Endfinally)
- } else
- mcode.Emit(OpCodes.Leave, labels(whereto))
- } else if (next != whereto)
- mcode.Emit(OpCodes.Br, labels(whereto))
-
- case CJUMP(success, failure, cond, kind) =>
- // cond is TestOp (see Primitives.scala), and can take
- // values EQ, NE, LT, GE LE, GT
- // kind is TypeKind
- val isFloat = kind == FLOAT || kind == DOUBLE
- val emit = (c: TestOp, l: Label) => emitBr(c, l, isFloat)
- emitCondBr(block, cond, success, failure, next, emit)
-
- case CZJUMP(success, failure, cond, kind) =>
- emitCondBr(block, cond, success, failure, next, emitBrBool(_, _))
-
- case RETURN(kind) =>
- if (currentHandlers.isEmpty)
- mcode.Emit(OpCodes.Ret)
- else {
- val (local, label) = returnFromHandler(kind)
- if (kind != UNIT)
- mcode.Emit(OpCodes.Stloc, local)
- mcode.Emit(OpCodes.Leave, label)
- }
-
- case THROW(_) =>
- mcode.Emit(OpCodes.Throw)
-
- case DROP(kind) =>
- mcode.Emit(OpCodes.Pop)
-
- case DUP(kind) =>
- // see comment on `var previousWasNEW`
- if (!previousWasNEW)
- mcode.Emit(OpCodes.Dup)
- else
- previousWasNEW = false
-
- case MONITOR_ENTER() =>
- mcode.Emit(OpCodes.Call, MMONITOR_ENTER)
-
- case MONITOR_EXIT() =>
- mcode.Emit(OpCodes.Call, MMONITOR_EXIT)
-
- case SCOPE_ENTER(_) | SCOPE_EXIT(_) | LOAD_EXCEPTION(_) =>
- ()
- }
-
- } // end for (instr <- b) { .. }
- } // end genBlock
-
- def genPrimitive(primitive: Primitive, pos: Position) {
- primitive match {
- case Negation(kind) =>
- kind match {
- // CHECK: is ist possible to get this for BOOL? in this case, verify.
- case BOOL | BYTE | CHAR | SHORT | INT | LONG | FLOAT | DOUBLE =>
- mcode.Emit(OpCodes.Neg)
-
- case _ => abort("Impossible to negate a " + kind)
- }
-
- case Arithmetic(op, kind) =>
- op match {
- case ADD => mcode.Emit(OpCodes.Add)
- case SUB => mcode.Emit(OpCodes.Sub)
- case MUL => mcode.Emit(OpCodes.Mul)
- case DIV => mcode.Emit(OpCodes.Div)
- case REM => mcode.Emit(OpCodes.Rem)
- case NOT => mcode.Emit(OpCodes.Not) //bitwise complement (one's complement)
- case _ => abort("Unknown arithmetic primitive " + primitive )
- }
-
- case Logical(op, kind) => op match {
- case AND => mcode.Emit(OpCodes.And)
- case OR => mcode.Emit(OpCodes.Or)
- case XOR => mcode.Emit(OpCodes.Xor)
- }
-
- case Shift(op, kind) => op match {
- case LSL => mcode.Emit(OpCodes.Shl)
- case ASR => mcode.Emit(OpCodes.Shr)
- case LSR => mcode.Emit(OpCodes.Shr_Un)
- }
-
- case Conversion(src, dst) =>
- debuglog("Converting from: " + src + " to: " + dst)
-
- dst match {
- case BYTE => mcode.Emit(OpCodes.Conv_I1) // I1 for System.SByte, i.e. a scala.Byte
- case SHORT => mcode.Emit(OpCodes.Conv_I2)
- case CHAR => mcode.Emit(OpCodes.Conv_U2)
- case INT => mcode.Emit(OpCodes.Conv_I4)
- case LONG => mcode.Emit(OpCodes.Conv_I8)
- case FLOAT => mcode.Emit(OpCodes.Conv_R4)
- case DOUBLE => mcode.Emit(OpCodes.Conv_R8)
- case _ =>
- Console.println("Illegal conversion at: " + clasz +
- " at: " + pos.source + ":" + pos.line)
- }
-
- case ArrayLength(_) =>
- mcode.Emit(OpCodes.Ldlen)
-
- case StartConcat =>
- mcode.Emit(OpCodes.Newobj, MSTRING_BUILDER_CONSTR)
-
-
- case StringConcat(el) =>
- val elemType : MsilType = el match {
- case REFERENCE(_) | ARRAY(_) => MOBJECT
- case _ => msilType(el)
- }
-
- val argTypes:Array[MsilType] = Array(elemType)
- val stringBuilderAppend = MSTRING_BUILDER.GetMethod("Append", argTypes )
- mcode.Emit(OpCodes.Callvirt, stringBuilderAppend)
-
- case EndConcat =>
- mcode.Emit(OpCodes.Callvirt, MSTRING_BUILDER_TOSTRING)
-
- case _ =>
- abort("Unimplemented primitive " + primitive)
- }
- } // end genPrimitive
-
-
- ////////////////////// loading ///////////////////////
-
- def loadI4(value: Int, code: ILGenerator): Unit = value match {
- case -1 => code.Emit(OpCodes.Ldc_I4_M1)
- case 0 => code.Emit(OpCodes.Ldc_I4_0)
- case 1 => code.Emit(OpCodes.Ldc_I4_1)
- case 2 => code.Emit(OpCodes.Ldc_I4_2)
- case 3 => code.Emit(OpCodes.Ldc_I4_3)
- case 4 => code.Emit(OpCodes.Ldc_I4_4)
- case 5 => code.Emit(OpCodes.Ldc_I4_5)
- case 6 => code.Emit(OpCodes.Ldc_I4_6)
- case 7 => code.Emit(OpCodes.Ldc_I4_7)
- case 8 => code.Emit(OpCodes.Ldc_I4_8)
- case _ =>
- if (value >= -128 && value <= 127)
- code.Emit(OpCodes.Ldc_I4_S, value)
- else
- code.Emit(OpCodes.Ldc_I4, value)
- }
-
- def loadArg(code: ILGenerator, loadAddr: Boolean)(i: Int) =
- if (loadAddr) {
- if (i >= -128 && i <= 127)
- code.Emit(OpCodes.Ldarga_S, i)
- else
- code.Emit(OpCodes.Ldarga, i)
- } else {
- i match {
- case 0 => code.Emit(OpCodes.Ldarg_0)
- case 1 => code.Emit(OpCodes.Ldarg_1)
- case 2 => code.Emit(OpCodes.Ldarg_2)
- case 3 => code.Emit(OpCodes.Ldarg_3)
- case _ =>
- if (i >= -128 && i <= 127)
- code.Emit(OpCodes.Ldarg_S, i)
- else
- code.Emit(OpCodes.Ldarg, i)
- }
- }
-
- def loadLocal(i: Int, local: Local, code: ILGenerator, loadAddr: Boolean) =
- if (loadAddr) {
- if (i >= -128 && i <= 127)
- code.Emit(OpCodes.Ldloca_S, localBuilders(local))
- else
- code.Emit(OpCodes.Ldloca, localBuilders(local))
- } else {
- i match {
- case 0 => code.Emit(OpCodes.Ldloc_0)
- case 1 => code.Emit(OpCodes.Ldloc_1)
- case 2 => code.Emit(OpCodes.Ldloc_2)
- case 3 => code.Emit(OpCodes.Ldloc_3)
- case _ =>
- if (i >= -128 && i <= 127)
- code.Emit(OpCodes.Ldloc_S, localBuilders(local))
- else
- code.Emit(OpCodes.Ldloc, localBuilders(local))
- }
- }
-
- ////////////////////// branches ///////////////////////
-
- /** Returns a Triple (Boolean, Boolean, Option[Label])
- * - whether the jump leaves some exception block (try / catch / finally)
- * - whether it leaves a finally handler (finally block, but not it's try / catch)
- * - a label where to jump for leaving the finally handler
- * . None to leave directly using `endfinally`
- * . Some(label) to emit `leave label` (for try / catch inside a finally handler)
- */
- def leavesHandler(from: BasicBlock, to: BasicBlock): (Boolean, Boolean, Option[Label]) =
- if (currentHandlers.isEmpty) (false, false, None)
- else {
- val h = currentHandlers.head
- val leaveHead = { h.covers(from) != h.covers(to) ||
- h.blocks.contains(from) != h.blocks.contains(to) }
- if (leaveHead) {
- // we leave the innermost exception block.
- // find out if we also leave som e `finally` handler
- currentHandlers.find(e => {
- e.cls == NoSymbol && e.blocks.contains(from) != e.blocks.contains(to)
- }) match {
- case Some(finallyHandler) =>
- if (h == finallyHandler) {
- // the finally handler is the innermost, so we can emit `endfinally` directly
- (true, true, None)
- } else {
- // we need to `Leave` to the `endfinally` of the next outer finally handler
- val l = endFinallyLabels.getOrElseUpdate(finallyHandler, mcode.DefineLabel())
- (true, true, Some(l))
- }
- case None =>
- (true, false, None)
- }
- } else (false, false, None)
- }
-
- def emitCondBr(block: BasicBlock, cond: TestOp, success: BasicBlock, failure: BasicBlock,
- next: BasicBlock, emitBrFun: (TestOp, Label) => Unit) {
- val (sLeaveHandler, sLeaveFinally, slfTarget) = leavesHandler(block, success)
- val (fLeaveHandler, fLeaveFinally, flfTarget) = leavesHandler(block, failure)
-
- if (sLeaveHandler || fLeaveHandler) {
- val sLabelOpt = if (sLeaveHandler) {
- val leaveSLabel = mcode.DefineLabel()
- emitBrFun(cond, leaveSLabel)
- Some(leaveSLabel)
- } else {
- emitBrFun(cond, labels(success))
- None
- }
-
- if (fLeaveHandler) {
- if (fLeaveFinally) {
- if (flfTarget.isDefined) mcode.Emit(OpCodes.Leave, flfTarget.get)
- else mcode.Emit(OpCodes.Endfinally)
- } else
- mcode.Emit(OpCodes.Leave, labels(failure))
- } else
- mcode.Emit(OpCodes.Br, labels(failure))
-
- sLabelOpt.map(l => {
- mcode.MarkLabel(l)
- if (sLeaveFinally) {
- if (slfTarget.isDefined) mcode.Emit(OpCodes.Leave, slfTarget.get)
- else mcode.Emit(OpCodes.Endfinally)
- } else
- mcode.Emit(OpCodes.Leave, labels(success))
- })
- } else {
- if (next == success) {
- emitBrFun(cond.negate, labels(failure))
- } else {
- emitBrFun(cond, labels(success))
- if (next != failure) {
- mcode.Emit(OpCodes.Br, labels(failure))
- }
- }
- }
- }
-
- def emitBr(condition: TestOp, dest: Label, isFloat: Boolean) {
- condition match {
- case EQ => mcode.Emit(OpCodes.Beq, dest)
- case NE => mcode.Emit(OpCodes.Bne_Un, dest)
- case LT => mcode.Emit(if (isFloat) OpCodes.Blt_Un else OpCodes.Blt, dest)
- case GE => mcode.Emit(if (isFloat) OpCodes.Bge_Un else OpCodes.Bge, dest)
- case LE => mcode.Emit(if (isFloat) OpCodes.Ble_Un else OpCodes.Ble, dest)
- case GT => mcode.Emit(if (isFloat) OpCodes.Bgt_Un else OpCodes.Bgt, dest)
- }
- }
-
- def emitBrBool(cond: TestOp, dest: Label) {
- (cond: @unchecked) match {
- // EQ -> Brfalse, NE -> Brtrue; this is because we come from
- // a CZJUMP. If the value on the stack is 0 (e.g. a boolean
- // method returned false), and we are in the case EQ, then
- // we need to emit Brfalse (EQ Zero means false). vice versa
- case EQ => mcode.Emit(OpCodes.Brfalse, dest)
- case NE => mcode.Emit(OpCodes.Brtrue, dest)
- }
- }
-
- ////////////////////// local vars ///////////////////////
-
- /**
- * Compute the indexes of each local variable of the given
- * method.
- */
- def computeLocalVarsIndex(m: IMethod) {
- var idx = if (m.symbol.isStaticMember) 0 else 1
-
- val params = m.params
- for (l <- params) {
- debuglog("Index value for parameter " + l + ": " + idx)
- l.index = idx
- idx += 1 // sizeOf(l.kind)
- }
-
- val locvars = m.locals filterNot (params contains)
- idx = 0
-
- for (l <- locvars) {
- debuglog("Index value for local variable " + l + ": " + idx)
- l.index = idx
- idx += 1 // sizeOf(l.kind)
- }
-
- }
-
- ////////////////////// Utilities ////////////////////////
-
- /** Return the a name of this symbol that can be used on the .NET
- * platform. It removes spaces from names.
- *
- * Special handling: scala.All and scala.AllRef are 'erased' to
- * scala.All$ and scala.AllRef$. This is needed because they are
- * not real classes, and they mean 'abrupt termination upon evaluation
- * of that expression' or 'null' respectively. This handling is
- * done already in GenICode, but here we need to remove references
- * from method signatures to these types, because such classes can
- * not exist in the classpath: the type checker will be very confused.
- */
- def msilName(sym: Symbol): String = {
- val suffix = sym.moduleSuffix
- // Flags.JAVA: "symbol was not defined by a scala-class" (java, or .net-class)
-
- if (sym == definitions.NothingClass)
- return "scala.runtime.Nothing$"
- else if (sym == definitions.NullClass)
- return "scala.runtime.Null$"
-
- (if (sym.isClass || (sym.isModule && !sym.isMethod)) {
- if (sym.isNestedClass) sym.simpleName
- else sym.fullName
- } else
- sym.simpleName.toString.trim()) + suffix
- }
-
-
- ////////////////////// flags ///////////////////////
-
- def msilTypeFlags(sym: Symbol): Int = {
- var mf: Int = TypeAttributes.AutoLayout | TypeAttributes.AnsiClass
-
- if(sym.isNestedClass) {
- mf = mf | (if (sym hasFlag Flags.PRIVATE) TypeAttributes.NestedPrivate else TypeAttributes.NestedPublic)
- } else {
- mf = mf | (if (sym hasFlag Flags.PRIVATE) TypeAttributes.NotPublic else TypeAttributes.Public)
- }
- mf = mf | (if (sym hasFlag Flags.ABSTRACT) TypeAttributes.Abstract else 0)
- mf = mf | (if (sym.isTrait && !sym.isImplClass) TypeAttributes.Interface else TypeAttributes.Class)
- mf = mf | (if (sym isFinal) TypeAttributes.Sealed else 0)
-
- sym.annotations foreach { a => a match {
- case AnnotationInfo(SerializableAttr, _, _) =>
- // TODO: add the Serializable TypeAttribute also if the annotation
- // System.SerializableAttribute is present (.net annotation, not scala)
- // Best way to do it: compare with
- // definitions.getClass("System.SerializableAttribute").tpe
- // when frontend available
- mf = mf | TypeAttributes.Serializable
- case _ => ()
- }}
-
- mf
- // static: not possible (or?)
- }
-
- def msilMethodFlags(sym: Symbol): Short = {
- var mf: Int = MethodAttributes.HideBySig |
- (if (sym hasFlag Flags.PRIVATE) MethodAttributes.Private
- else MethodAttributes.Public)
-
- if (!sym.isClassConstructor) {
- if (sym.isStaticMember)
- mf = mf | FieldAttributes.Static // coincidentally, same value as for MethodAttributes.Static ...
- else {
- mf = mf | MethodAttributes.Virtual
- if (sym.isFinal && !getType(sym.owner).IsInterface)
- mf = mf | MethodAttributes.Final
- if (sym.isDeferred || getType(sym.owner).IsInterface)
- mf = mf | MethodAttributes.Abstract
- }
- }
-
- if (sym.isStaticMember) {
- mf = mf | MethodAttributes.Static
- }
-
- // constructors of module classes should be private
- if (sym.isPrimaryConstructor && isTopLevelModule(sym.owner)) {
- mf |= MethodAttributes.Private
- mf &= ~(MethodAttributes.Public)
- }
-
- mf.toShort
- }
-
- def msilFieldFlags(sym: Symbol): Short = {
- var mf: Int =
- if (sym hasFlag Flags.PRIVATE) FieldAttributes.Private
- else if (sym hasFlag Flags.PROTECTED) FieldAttributes.FamORAssem
- else FieldAttributes.Public
-
- if (sym hasFlag Flags.FINAL)
- mf = mf | FieldAttributes.InitOnly
-
- if (sym.isStaticMember)
- mf = mf | FieldAttributes.Static
-
- // TRANSIENT: "not serialized", VOLATILE: doesn't exist on .net
- // TODO: add this annotation also if the class has the custom attribute
- // System.NotSerializedAttribute
- sym.annotations.foreach( a => a match {
- case AnnotationInfo(TransientAtt, _, _) =>
- mf = mf | FieldAttributes.NotSerialized
- case _ => ()
- })
-
- mf.toShort
- }
-
- ////////////////////// builders, types ///////////////////////
-
- var entryPoint: Symbol = _
-
- val notInitializedModules = mutable.HashSet[Symbol]()
-
- // TODO: create fields also in def createType, and not in genClass,
- // add a getField method (it only works as it is because fields never
- // accessed from outside a class)
-
- val localBuilders = mutable.HashMap[Local, LocalBuilder]()
-
- private[GenMSIL] def findEntryPoint(cls: IClass) {
-
- def isEntryPoint(sym: Symbol):Boolean = {
- if (isStaticModule(sym.owner) && msilName(sym) == "main")
- if (sym.tpe.paramTypes.length == 1) {
- toTypeKind(sym.tpe.paramTypes(0)) match {
- case ARRAY(elem) =>
- if (elem.toType.typeSymbol == definitions.StringClass) {
- return true
- }
- case _ => ()
- }
- }
- false
- }
-
- if((entryPoint == null) && opt.showClass.isDefined) { // TODO introduce dedicated setting instead
- val entryclass = opt.showClass.get.toString
- val cfn = cls.symbol.fullName
- if(cfn == entryclass) {
- for (m <- cls.methods; if isEntryPoint(m.symbol)) { entryPoint = m.symbol }
- if(entryPoint == null) { warning("Couldn't find main method in class " + cfn) }
- }
- }
-
- if (firstSourceName == "")
- if (cls.symbol.sourceFile != null) // is null for nested classes
- firstSourceName = cls.symbol.sourceFile.name
- }
-
- // #####################################################################
- // get and create types
-
- private def msilType(t: TypeKind): MsilType = (t: @unchecked) match {
- case UNIT => MVOID
- case BOOL => MBOOL
- case BYTE => MBYTE
- case SHORT => MSHORT
- case CHAR => MCHAR
- case INT => MINT
- case LONG => MLONG
- case FLOAT => MFLOAT
- case DOUBLE => MDOUBLE
- case REFERENCE(cls) => getType(cls)
- case ARRAY(elem) =>
- msilType(elem) match {
- // For type builders, cannot call "clrTypes.mkArrayType" because this looks up
- // the type "tp" in the assembly (not in the HashMap "types" of the backend).
- // This can fail for nested types because the builders are not complete yet.
- case tb: TypeBuilder => tb.MakeArrayType()
- case tp: MsilType => clrTypes.mkArrayType(tp)
- }
- }
-
- private def msilType(tpe: Type): MsilType = msilType(toTypeKind(tpe))
-
- private def msilParamTypes(sym: Symbol): Array[MsilType] = {
- sym.tpe.paramTypes.map(msilType).toArray
- }
-
- def getType(sym: Symbol) = getTypeOpt(sym).getOrElse(abort(showsym(sym)))
-
- /**
- * Get an MSIL type from a symbol. First look in the clrTypes.types map, then
- * lookup the name using clrTypes.getType
- */
- def getTypeOpt(sym: Symbol): Option[MsilType] = {
- val tmp = types.get(sym)
- tmp match {
- case typ @ Some(_) => typ
- case None =>
- def typeString(sym: Symbol): String = {
- val s = if (sym.isNestedClass) typeString(sym.owner) +"+"+ sym.simpleName
- else sym.fullName
- if (sym.isModuleClass && !sym.isTrait) s + "$" else s
- }
- val name = typeString(sym)
- val typ = clrTypes.getType(name)
- if (typ == null)
- None
- else {
- types(sym) = typ
- Some(typ)
- }
- }
- }
-
- def mapType(sym: Symbol, mType: MsilType) {
- assert(mType != null, showsym(sym))
- types(sym) = mType
- }
-
- def createTypeBuilder(iclass: IClass) {
- /**
- * First look in the clrTypes.types map, if that fails check if it's a class being compiled, otherwise
- * lookup by name (clrTypes.getType calls the static method msil.Type.GetType(fullname)).
- */
- def msilTypeFromSym(sym: Symbol): MsilType = {
- types.get(sym).getOrElse {
- classes.get(sym) match {
- case Some(iclass) =>
- msilTypeBuilderFromSym(sym)
- case None =>
- getType(sym)
- }
- }
- }
-
- def msilTypeBuilderFromSym(sym: Symbol): TypeBuilder = {
- if(!(types.contains(sym) && types(sym).isInstanceOf[TypeBuilder])){
- val iclass = classes(sym)
- assert(iclass != null)
- createTypeBuilder(iclass)
- }
- types(sym).asInstanceOf[TypeBuilder]
- }
-
- val sym = iclass.symbol
- if (types.contains(sym) && types(sym).isInstanceOf[TypeBuilder])
- return
-
- def isInterface(s: Symbol) = s.isTrait && !s.isImplClass
- val parents: List[Type] =
- if (sym.info.parents.isEmpty) List(definitions.ObjectClass.tpe)
- else sym.info.parents.distinct
-
- val superType : MsilType = if (isInterface(sym)) null else msilTypeFromSym(parents.head.typeSymbol)
- debuglog("super type: " + parents(0).typeSymbol + ", msil type: " + superType)
-
- val interfaces: Array[MsilType] =
- parents.tail.map(p => msilTypeFromSym(p.typeSymbol)).toArray
- if (parents.length > 1) {
- if (settings.debug.value) {
- log("interfaces:")
- for (i <- 0.until(interfaces.length)) {
- log(" type: " + parents(i + 1).typeSymbol + ", msil type: " + interfaces(i))
- }
- }
- }
-
- val tBuilder = if (sym.isNestedClass) {
- val ownerT = msilTypeBuilderFromSym(sym.owner).asInstanceOf[TypeBuilder]
- ownerT.DefineNestedType(msilName(sym), msilTypeFlags(sym), superType, interfaces)
- } else {
- mmodule.DefineType(msilName(sym), msilTypeFlags(sym), superType, interfaces)
- }
- mapType(sym, tBuilder)
- } // createTypeBuilder
-
- def createClassMembers(iclass: IClass) {
- try {
- createClassMembers0(iclass)
- }
- catch {
- case e: Throwable =>
- java.lang.System.err.println(showsym(iclass.symbol))
- java.lang.System.err.println("with methods = " + iclass.methods)
- throw e
- }
- }
-
- def createClassMembers0(iclass: IClass) {
-
- val mtype = getType(iclass.symbol).asInstanceOf[TypeBuilder]
-
- for (ifield <- iclass.fields) {
- val sym = ifield.symbol
- debuglog("Adding field: " + sym.fullName)
-
- var attributes = msilFieldFlags(sym)
- val fieldTypeWithCustomMods =
- new PECustomMod(msilType(sym.tpe),
- customModifiers(sym.annotations))
- val fBuilder = mtype.DefineField(msilName(sym),
- fieldTypeWithCustomMods,
- attributes)
- fields(sym) = fBuilder
- addAttributes(fBuilder, sym.annotations)
- } // all iclass.fields iterated over
-
- if (isStaticModule(iclass.symbol)) {
- val sc = iclass.lookupStaticCtor
- if (sc.isDefined) {
- val m = sc.get
- val oldLastBlock = m.lastBlock
- val lastBlock = m.newBlock()
- oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
- // call object's private ctor from static ctor
- lastBlock.emit(CIL_NEWOBJ(iclass.symbol.primaryConstructor))
- lastBlock.emit(DROP(toTypeKind(iclass.symbol.tpe)))
- lastBlock emit RETURN(UNIT)
- lastBlock.close
- }
- }
-
- if (iclass.symbol != definitions.ArrayClass) {
- for (m: IMethod <- iclass.methods) {
- val sym = m.symbol
- debuglog("Creating MethodBuilder for " + Flags.flagsToString(sym.flags) + " " +
- sym.owner.fullName + "::" + sym.name)
-
- val ownerType = getType(sym.enclClass).asInstanceOf[TypeBuilder]
- assert(mtype == ownerType, "mtype = " + mtype + "; ownerType = " + ownerType)
- var paramTypes = msilParamTypes(sym)
- val attr = msilMethodFlags(sym)
-
- if (m.symbol.isClassConstructor) {
- val constr =
- ownerType.DefineConstructor(attr, CallingConventions.Standard, paramTypes)
- for (i <- 0.until(paramTypes.length)) {
- constr.DefineParameter(i, ParameterAttributes.None, msilName(m.params(i).sym))
- }
- mapConstructor(sym, constr)
- addAttributes(constr, sym.annotations)
- } else {
- var resType = msilType(m.returnType)
- val method =
- ownerType.DefineMethod(msilName(sym), attr, resType, paramTypes)
- for (i <- 0.until(paramTypes.length)) {
- method.DefineParameter(i, ParameterAttributes.None, msilName(m.params(i).sym))
- }
- if (!methods.contains(sym))
- mapMethod(sym, method)
- addAttributes(method, sym.annotations)
- debuglog("\t created MethodBuilder " + method)
- }
- }
- } // method builders created for non-array iclass
-
- if (isStaticModule(iclass.symbol)) {
- addModuleInstanceField(iclass.symbol)
- notInitializedModules += iclass.symbol
- if (iclass.lookupStaticCtor.isEmpty) {
- addStaticInit(iclass.symbol)
- }
- }
-
- } // createClassMembers0
-
- private def isTopLevelModule(sym: Symbol): Boolean =
- beforeRefchecks {
- sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass
- }
-
- // if the module is lifted it does not need to be initialized in
- // its static constructor, and the MODULE$ field is not required.
- // the outer class will care about it.
- private def isStaticModule(sym: Symbol): Boolean = {
- // .net inner classes: removed '!sym.hasFlag(Flags.LIFTED)', added
- // 'sym.isStatic'. -> no longer compatible without skipping flatten!
- sym.isModuleClass && sym.isStatic && !sym.isImplClass
- }
-
- private def isCloneable(sym: Symbol): Boolean = {
- !sym.annotations.forall( a => a match {
- case AnnotationInfo(CloneableAttr, _, _) => false
- case _ => true
- })
- }
-
- private def addModuleInstanceField(sym: Symbol) {
- debuglog("Adding Module-Instance Field for " + showsym(sym))
- val tBuilder = getType(sym).asInstanceOf[TypeBuilder]
- val fb = tBuilder.DefineField(MODULE_INSTANCE_NAME,
- tBuilder,
- (FieldAttributes.Public |
- //FieldAttributes.InitOnly |
- FieldAttributes.Static).toShort)
- fields(sym) = fb
- }
-
-
- // the symbol may be a object-symbol (module-symbol), or a module-class-symbol
- private def getModuleInstanceField(sym: Symbol): FieldInfo = {
- assert(sym.isModule || sym.isModuleClass, "Expected module: " + showsym(sym))
-
- // when called by LOAD_MODULE, the corresponding type maybe doesn't
- // exist yet -> make a getType
- val moduleClassSym = if (sym.isModule) sym.moduleClass else sym
-
- // TODO: get module field for modules not defined in the
- // source currently compiling (e.g. Console)
-
- fields get moduleClassSym match {
- case Some(sym) => sym
- case None =>
- //val mclass = types(moduleClassSym)
- val nameInMetadata = nestingAwareFullClassname(moduleClassSym)
- val mClass = clrTypes.getType(nameInMetadata)
- val mfield = mClass.GetField("MODULE$")
- assert(mfield ne null, "module not found " + showsym(moduleClassSym))
- fields(moduleClassSym) = mfield
- mfield
- }
-
- //fields(moduleClassSym)
- }
-
- def nestingAwareFullClassname(csym: Symbol) : String = {
- val suffix = csym.moduleSuffix
- val res = if (csym.isNestedClass)
- nestingAwareFullClassname(csym.owner) + "+" + csym.encodedName
- else
- csym.fullName
- res + suffix
- }
-
- /** Adds a static initializer which creates an instance of the module
- * class (calls the primary constructor). A special primary constructor
- * will be generated (notInitializedModules) which stores the new instance
- * in the MODULE$ field right after the super call.
- */
- private def addStaticInit(sym: Symbol) {
- val tBuilder = getType(sym).asInstanceOf[TypeBuilder]
-
- val staticInit = tBuilder.DefineConstructor(
- (MethodAttributes.Static | MethodAttributes.Public).toShort,
- CallingConventions.Standard,
- MsilType.EmptyTypes)
-
- val sicode = staticInit.GetILGenerator()
-
- val instanceConstructor = constructors(sym.primaryConstructor)
-
- // there are no constructor parameters. assuming the constructor takes no parameter
- // is fine: we call (in the static constructor) the constructor of the module class,
- // which takes no arguments - an object definition cannot take constructor arguments.
- sicode.Emit(OpCodes.Newobj, instanceConstructor)
- // the stsfld is done in the instance constructor, just after the super call.
- sicode.Emit(OpCodes.Pop)
-
- sicode.Emit(OpCodes.Ret)
- }
-
- private def generateMirrorClass(sym: Symbol) {
- val tBuilder = getType(sym)
- assert(sym.isModuleClass, "Can't generate Mirror-Class for the Non-Module class " + sym)
- debuglog("Dumping mirror class for object: " + sym)
- val moduleName = msilName(sym)
- val mirrorName = moduleName.substring(0, moduleName.length() - 1)
- val mirrorTypeBuilder = mmodule.DefineType(mirrorName,
- TypeAttributes.Class |
- TypeAttributes.Public |
- TypeAttributes.Sealed,
- MOBJECT,
- MsilType.EmptyTypes)
-
- val iclass = classes(sym)
-
- for (m <- sym.tpe.nonPrivateMembers
- if m.owner != definitions.ObjectClass && !m.isProtected &&
- m.isMethod && !m.isClassConstructor && !m.isStaticMember && !m.isCase &&
- !m.isDeferred)
- {
- debuglog(" Mirroring method: " + m)
- val paramTypes = msilParamTypes(m)
- val paramNames: Array[String] = new Array[String](paramTypes.length)
- for (i <- 0 until paramTypes.length)
- paramNames(i) = "x_" + i
-
- // CHECK: verify if getMethodName is better than msilName
- val mirrorMethod = mirrorTypeBuilder.DefineMethod(msilName(m),
- (MethodAttributes.Public |
- MethodAttributes.Static).toShort,
- msilType(m.tpe.resultType),
- paramTypes)
-
- var i = 0
- while (i < paramTypes.length) {
- mirrorMethod.DefineParameter(i, ParameterAttributes.None, paramNames(i))
- i += 1
- }
-
- val mirrorCode = mirrorMethod.GetILGenerator()
- mirrorCode.Emit(OpCodes.Ldsfld, getModuleInstanceField(sym))
- val mInfo = getMethod(m)
- for (paramidx <- 0.until(paramTypes.length)) {
- val mInfoParams = mInfo.GetParameters
- val loadAddr = mInfoParams(paramidx).ParameterType.IsByRef
- loadArg(mirrorCode, loadAddr)(paramidx)
- }
-
- mirrorCode.Emit(OpCodes.Callvirt, getMethod(m))
- mirrorCode.Emit(OpCodes.Ret)
- }
-
- addSymtabAttribute(sym.sourceModule, mirrorTypeBuilder)
-
- mirrorTypeBuilder.CreateType()
- mirrorTypeBuilder.setSourceFilepath(iclass.cunit.source.file.path)
- }
-
-
- // #####################################################################
- // delegate callers
-
- var delegateCallers: TypeBuilder = _
- var nbDelegateCallers: Int = 0
-
- private def initDelegateCallers() = {
- delegateCallers = mmodule.DefineType("$DelegateCallers", TypeAttributes.Public |
- TypeAttributes.Sealed)
- }
-
- private def createDelegateCaller(functionType: Type, delegateType: Type) = {
- if (delegateCallers == null)
- initDelegateCallers()
- // create a field an store the function-object
- val mFunctionType: MsilType = msilType(functionType)
- val anonfunField: FieldBuilder = delegateCallers.DefineField(
- "$anonfunField$$" + nbDelegateCallers, mFunctionType,
- (FieldAttributes.InitOnly | FieldAttributes.Public | FieldAttributes.Static).toShort)
- mcode.Emit(OpCodes.Stsfld, anonfunField)
-
-
- // create the static caller method and the delegate object
- val (params, returnType) = delegateType.member(nme.apply).tpe match {
- case MethodType(delParams, delReturn) => (delParams, delReturn)
- case _ => abort("not a delegate type: " + delegateType)
- }
- val caller: MethodBuilder = delegateCallers.DefineMethod(
- "$delegateCaller$$" + nbDelegateCallers,
- (MethodAttributes.Final | MethodAttributes.Public | MethodAttributes.Static).toShort,
- msilType(returnType), (params map (_.tpe)).map(msilType).toArray)
- for (i <- 0 until params.length)
- caller.DefineParameter(i, ParameterAttributes.None, "arg" + i) // FIXME: use name of parameter symbol
- val delegCtor = msilType(delegateType).GetConstructor(Array(MOBJECT, INT_PTR))
- mcode.Emit(OpCodes.Ldnull)
- mcode.Emit(OpCodes.Ldftn, caller)
- mcode.Emit(OpCodes.Newobj, delegCtor)
-
-
- // create the static caller method body
- val functionApply: MethodInfo = getMethod(functionType.member(nme.apply))
- val dcode: ILGenerator = caller.GetILGenerator()
- dcode.Emit(OpCodes.Ldsfld, anonfunField)
- for (i <- 0 until params.length) {
- loadArg(dcode, false /* TODO confirm whether passing actual as-is to formal is correct wrt the ByRef attribute of the param */)(i)
- emitBox(dcode, toTypeKind(params(i).tpe))
- }
- dcode.Emit(OpCodes.Callvirt, functionApply)
- emitUnbox(dcode, toTypeKind(returnType))
- dcode.Emit(OpCodes.Ret)
-
- nbDelegateCallers = nbDelegateCallers + 1
-
- } //def createDelegateCaller
-
- def emitBox(code: ILGenerator, boxType: TypeKind) = (boxType: @unchecked) match {
- // doesn't make sense, unit as parameter..
- case UNIT => code.Emit(OpCodes.Ldsfld, boxedUnit)
- case BOOL | BYTE | SHORT | CHAR | INT | LONG | FLOAT | DOUBLE =>
- code.Emit(OpCodes.Box, msilType(boxType))
- case REFERENCE(cls) if clrTypes.isValueType(cls) =>
- code.Emit(OpCodes.Box, (msilType(boxType)))
- case REFERENCE(_) | ARRAY(_) =>
- warning("Tried to BOX a non-valuetype.")
- ()
- }
-
- def emitUnbox(code: ILGenerator, boxType: TypeKind) = (boxType: @unchecked) match {
- case UNIT => code.Emit(OpCodes.Pop)
- /* (1) it's essential to keep the code emitted here (as of now plain calls to System.Convert.ToBlaBla methods)
- behaviorally.equiv.wrt. BoxesRunTime.unboxToBlaBla methods
- (case null: that's easy, case boxed: track changes to unboxBlaBla)
- (2) See also: asInstanceOf to cast from Any to number,
- tracked in http://lampsvn.epfl.ch/trac/scala/ticket/4437 */
- case BOOL => code.Emit(OpCodes.Call, toBool)
- case BYTE => code.Emit(OpCodes.Call, toSByte)
- case SHORT => code.Emit(OpCodes.Call, toShort)
- case CHAR => code.Emit(OpCodes.Call, toChar)
- case INT => code.Emit(OpCodes.Call, toInt)
- case LONG => code.Emit(OpCodes.Call, toLong)
- case FLOAT => code.Emit(OpCodes.Call, toFloat)
- case DOUBLE => code.Emit(OpCodes.Call, toDouble)
- case REFERENCE(cls) if clrTypes.isValueType(cls) =>
- code.Emit(OpCodes.Unbox, msilType(boxType))
- code.Emit(OpCodes.Ldobj, msilType(boxType))
- case REFERENCE(_) | ARRAY(_) =>
- warning("Tried to UNBOX a non-valuetype.")
- ()
- }
-
- // #####################################################################
- // get and create methods / constructors
-
- def getConstructor(sym: Symbol): ConstructorInfo = constructors.get(sym) match {
- case Some(constr) => constr
- case None =>
- val mClass = getType(sym.owner)
- val constr = mClass.GetConstructor(msilParamTypes(sym))
- if (constr eq null) {
- java.lang.System.out.println("Cannot find constructor " + sym.owner + "::" + sym.name)
- java.lang.System.out.println("scope = " + sym.owner.tpe.decls)
- abort(sym.fullName)
- }
- else {
- mapConstructor(sym, constr)
- constr
- }
- }
-
- def mapConstructor(sym: Symbol, cInfo: ConstructorInfo) = {
- constructors(sym) = cInfo
- }
-
- private def getMethod(sym: Symbol): MethodInfo = {
-
- methods.get(sym) match {
- case Some(method) => method
- case None =>
- val mClass = getType(sym.owner)
- try {
- val method = mClass.GetMethod(msilName(sym), msilParamTypes(sym),
- msilType(sym.tpe.resultType))
- if (method eq null) {
- java.lang.System.out.println("Cannot find method " + sym.owner + "::" + msilName(sym))
- java.lang.System.out.println("scope = " + sym.owner.tpe.decls)
- abort(sym.fullName)
- }
- else {
- mapMethod(sym, method)
- method
- }
- }
- catch {
- case e: Exception =>
- Console.println("While looking up " + mClass + "::" + sym.nameString)
- Console.println("\t" + showsym(sym))
- throw e
- }
- }
- }
-
- /*
- * add a mapping between sym and mInfo
- */
- private def mapMethod(sym: Symbol, mInfo: MethodInfo) {
- assert (mInfo != null, mInfo)
- methods(sym) = mInfo
- }
-
- /*
- * add mapping between sym and method with newName, paramTypes of newClass
- */
- private def mapMethod(sym: Symbol, newClass: MsilType, newName: String, paramTypes: Array[MsilType]) {
- val methodInfo = newClass.GetMethod(newName, paramTypes)
- assert(methodInfo != null, "Can't find mapping for " + sym + " -> " +
- newName + "(" + paramTypes + ")")
- mapMethod(sym, methodInfo)
- if (methodInfo.IsStatic)
- dynToStatMapped += sym
- }
-
- /*
- * add mapping between method with name and paramTypes of clazz to
- * method with newName and newParamTypes of newClass (used for instance
- * for "wait")
- */
- private def mapMethod(
- clazz: Symbol, name: Name, paramTypes: Array[Type],
- newClass: MsilType, newName: String, newParamTypes: Array[MsilType]) {
- val methodSym = lookupMethod(clazz, name, paramTypes)
- assert(methodSym != null, "cannot find method " + name + "(" +
- paramTypes + ")" + " in class " + clazz)
- mapMethod(methodSym, newClass, newName, newParamTypes)
- }
-
- /*
- * add mapping for member with name and paramTypes to member
- * newName of newClass (same parameters)
- */
- private def mapMethod(
- clazz: Symbol, name: Name, paramTypes: Array[Type],
- newClass: MsilType, newName: String) {
- mapMethod(clazz, name, paramTypes, newClass, newName, paramTypes map msilType)
- }
-
- /*
- * add mapping for all methods with name of clazz to the corresponding
- * method (same parameters) with newName of newClass
- */
- private def mapMethod(
- clazz: Symbol, name: Name,
- newClass: MsilType, newName: String) {
- val memberSym: Symbol = clazz.tpe.member(name)
- memberSym.tpe match {
- // alternatives: List[Symbol]
- case OverloadedType(_, alternatives) =>
- alternatives.foreach(s => mapMethod(s, newClass, newName, msilParamTypes(s)))
-
- // paramTypes: List[Type], resType: Type
- case MethodType(params, resType) =>
- mapMethod(memberSym, newClass, newName, msilParamTypes(memberSym))
-
- case _ =>
- abort("member not found: " + clazz + ", " + name)
- }
- }
-
-
- /*
- * find the method in clazz with name and paramTypes
- */
- private def lookupMethod(clazz: Symbol, name: Name, paramTypes: Array[Type]): Symbol = {
- val memberSym = clazz.tpe.member(name)
- memberSym.tpe match {
- case OverloadedType(_, alternatives) =>
- alternatives.find(s => {
- var i: Int = 0
- var typesOK: Boolean = true
- if (paramTypes.length == s.tpe.paramTypes.length) {
- while(i < paramTypes.length) {
- if (paramTypes(i) != s.tpe.paramTypes(i))
- typesOK = false
- i += 1
- }
- } else {
- typesOK = false
- }
- typesOK
- }) match {
- case Some(sym) => sym
- case None => abort("member of " + clazz + ", " + name + "(" +
- paramTypes + ") not found")
- }
-
- case MethodType(_, _) => memberSym
-
- case _ => abort("member not found: " + name + " of " + clazz)
- }
- }
-
- private def showsym(sym: Symbol): String = (sym.toString +
- "\n symbol = " + Flags.flagsToString(sym.flags) + " " + sym +
- "\n owner = " + Flags.flagsToString(sym.owner.flags) + " " + sym.owner
- )
-
- } // class BytecodeGenerator
-
-} // class GenMSIL
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index 23f932b5b4..c49f23852f 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package backend.opt
import scala.tools.nsc.backend.icode.analysis.LubException
-import scala.tools.nsc.symtab._
/**
* @author Iulian Dragos
@@ -19,6 +18,8 @@ abstract class ClosureElimination extends SubComponent {
val phaseName = "closelim"
+ override val enabled: Boolean = settings.Xcloselim
+
/** Create a new phase */
override def newPhase(p: Phase) = new ClosureEliminationPhase(p)
@@ -72,8 +73,10 @@ abstract class ClosureElimination extends SubComponent {
def name = phaseName
val closser = new ClosureElim
- override def apply(c: IClass): Unit =
- closser analyzeClass c
+ override def apply(c: IClass): Unit = {
+ if (closser ne null)
+ closser analyzeClass c
+ }
}
/**
@@ -83,7 +86,7 @@ abstract class ClosureElimination extends SubComponent {
*
*/
class ClosureElim {
- def analyzeClass(cls: IClass): Unit = if (settings.Xcloselim.value) {
+ def analyzeClass(cls: IClass): Unit = if (settings.Xcloselim) {
log(s"Analyzing ${cls.methods.size} methods in $cls.")
cls.methods foreach { m =>
analyzeMethod(m)
@@ -97,7 +100,7 @@ abstract class ClosureElimination extends SubComponent {
/* Some embryonic copy propagation. */
def analyzeMethod(m: IMethod): Unit = try {if (m.hasCode) {
cpp.init(m)
- cpp.run
+ cpp.run()
m.linearizedBlocks() foreach { bb =>
var info = cpp.in(bb)
@@ -109,7 +112,7 @@ abstract class ClosureElimination extends SubComponent {
val t = info.getBinding(l)
t match {
case Deref(This) | Const(_) =>
- bb.replaceInstruction(i, valueToInstruction(t));
+ bb.replaceInstruction(i, valueToInstruction(t))
debuglog(s"replaced $i with $t")
case _ =>
@@ -120,7 +123,7 @@ abstract class ClosureElimination extends SubComponent {
case LOAD_FIELD(f, false) /* if accessible(f, m.symbol) */ =>
def replaceFieldAccess(r: Record) {
- val Record(cls, bindings) = r
+ val Record(cls, _) = r
info.getFieldNonRecordValue(r, f) foreach { v =>
bb.replaceInstruction(i, DROP(REFERENCE(cls)) :: valueToInstruction(v) :: Nil)
debuglog(s"replaced $i with $v")
@@ -188,28 +191,20 @@ abstract class ClosureElimination extends SubComponent {
case Boxed(LocalVar(v)) =>
LOAD_LOCAL(v)
}
-
- /** is field 'f' accessible from method 'm'? */
- def accessible(f: Symbol, m: Symbol): Boolean =
- f.isPublic || (f.isProtected && (f.enclosingPackageClass == m.enclosingPackageClass))
} /* class ClosureElim */
/** Peephole optimization. */
abstract class PeepholeOpt {
-
- private var method: IMethod = NoIMethod
-
/** Concrete implementations will perform their optimizations here */
def peep(bb: BasicBlock, i1: Instruction, i2: Instruction): Option[List[Instruction]]
var liveness: global.icodes.liveness.LivenessAnalysis = null
def apply(m: IMethod): Unit = if (m.hasCode) {
- method = m
liveness = new global.icodes.liveness.LivenessAnalysis
liveness.init(m)
- liveness.run
+ liveness.run()
m foreachBlock transformBlock
}
@@ -235,7 +230,7 @@ abstract class ClosureElimination extends SubComponent {
h = t.head
t = t.tail
}
- } while (redo);
+ } while (redo)
b fromList newInstructions
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
new file mode 100644
index 0000000000..64a0727440
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
@@ -0,0 +1,622 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author James Iry
+ */
+
+package scala
+package tools.nsc
+package backend.opt
+
+import scala.tools.nsc.backend.icode.analysis.LubException
+import scala.annotation.tailrec
+
+/**
+ * ConstantOptimization uses abstract interpretation to approximate for
+ * each instruction what constants a variable or stack slot might hold
+ * or cannot hold. From this it will eliminate unreachable conditionals
+ * where only one branch is reachable, e.g. to eliminate unnecessary
+ * null checks.
+ *
+ * With some more work it could be extended to
+ * - cache stable values (final fields, modules) in locals
+ * - replace the copy propagation in ClosureElilmination
+ * - fold constants
+ * - eliminate unnecessary stores and loads
+ * - propagate knowledge gathered from conditionals for further optimization
+ */
+abstract class ConstantOptimization extends SubComponent {
+ import global._
+ import icodes._
+ import icodes.opcodes._
+
+ val phaseName = "constopt"
+
+ /** Create a new phase */
+ override def newPhase(p: Phase) = new ConstantOptimizationPhase(p)
+
+ override val enabled: Boolean = settings.YconstOptimization
+
+ /**
+ * The constant optimization phase.
+ */
+ class ConstantOptimizationPhase(prev: Phase) extends ICodePhase(prev) {
+
+ def name = phaseName
+
+ override def apply(c: IClass) {
+ if (settings.YconstOptimization) {
+ val analyzer = new ConstantOptimizer
+ analyzer optimizeClass c
+ }
+ }
+ }
+
+ class ConstantOptimizer {
+ def optimizeClass(cls: IClass) {
+ log(s"Analyzing ${cls.methods.size} methods in $cls.")
+ cls.methods foreach { m =>
+ optimizeMethod(m)
+ }
+ }
+
+ def optimizeMethod(m: IMethod) {
+ if (m.hasCode) {
+ log(s"Analyzing ${m.symbol}")
+ val replacementInstructions = interpretMethod(m)
+ for (block <- m.blocks) {
+ if (replacementInstructions contains block) {
+ val instructions = replacementInstructions(block)
+ block.replaceInstruction(block.lastInstruction, instructions)
+ }
+ }
+ }
+ }
+
+ /**
+ * A single possible (or impossible) datum that can be held in Contents
+ */
+ private sealed abstract class Datum
+ /**
+ * A constant datum
+ */
+ private case class Const(c: Constant) extends Datum {
+ def isIntAssignable = c.tag >= BooleanTag && c.tag <= IntTag
+ def toInt = c.tag match {
+ case BooleanTag => if (c.booleanValue) 1 else 0
+ case _ => c.intValue
+ }
+
+ /**
+ * True if this constant would compare to other as true under primitive eq
+ */
+ override def equals(other: Any) = other match {
+ case oc @ Const(o) => (this eq oc) || (if (this.isIntAssignable && oc.isIntAssignable) this.toInt == oc.toInt else c.value == o.value)
+ case _ => false
+ }
+
+ /**
+ * Hash code consistent with equals
+ */
+ override def hashCode = if (this.isIntAssignable) this.toInt else c.hashCode
+
+ }
+ /**
+ * A datum that has been Boxed via a BOX instruction
+ */
+ private case class Boxed(c: Datum) extends Datum
+
+ /**
+ * The knowledge we have about the abstract state of one location in terms
+ * of what constants it might or cannot hold. Forms a lower
+ * lattice where lower elements in the lattice indicate less knowledge.
+ *
+ * With the following partial ordering (where '>' indicates more precise knowledge)
+ *
+ * Possible(xs) > Possible(xs + y)
+ * Possible(xs) > Impossible(ys)
+ * Impossible(xs + y) > Impossible(xs)
+ *
+ * and the following merges, which indicate merging knowledge from two paths through
+ * the code,
+ *
+ * // left must be 1 or 2, right must be 2 or 3 then we must have a 1, 2 or 3
+ * Possible(xs) merge Possible(ys) => Possible(xs union ys)
+ *
+ * // Left says can't be 2 or 3, right says can't be 3 or 4
+ * // then it's not 3 (it could be 2 from the right or 4 from the left)
+ * Impossible(xs) merge Impossible(ys) => Impossible(xs intersect ys)
+ *
+ * // Left says it can't be 2 or 3, right says it must be 3 or 4, then
+ * // it can't be 2 (left rules out 4 and right says 3 is possible)
+ * Impossible(xs) merge Possible(ys) => Impossible(xs -- ys)
+ *
+ * Intuitively, Possible(empty) says that a location can't hold anything,
+ * it's uninitialized. However, Possible(empty) never appears in the code.
+ *
+ * Conversely, Impossible(empty) says nothing is impossible, it could be
+ * anything. Impossible(empty) is given a synonym UNKNOWN and is used
+ * for, e.g., the result of an arbitrary method call.
+ */
+ private sealed abstract class Contents {
+ /**
+ * Join this Contents with another coming from another path. Join enforces
+ * the lattice structure. It is symmetrical and never moves upward in the
+ * lattice
+ */
+ final def merge(other: Contents): Contents = if (this eq other) this else (this, other) match {
+ case (Possible(possible1), Possible(possible2)) =>
+ Possible(possible1 union possible2)
+ case (Impossible(impossible1), Impossible(impossible2)) =>
+ Impossible(impossible1 intersect impossible2)
+ case (Impossible(impossible), Possible(possible)) =>
+ Impossible(impossible -- possible)
+ case (Possible(possible), Impossible(impossible)) =>
+ Impossible(impossible -- possible)
+ }
+ // TODO we could have more fine-grained knowledge, e.g. know that 0 < x < 3. But for now equality/inequality is a good start.
+ def mightEqual(other: Contents): Boolean
+ def mightNotEqual(other: Contents): Boolean
+ }
+ private def SingleImpossible(x: Datum) = new Impossible(Set(x))
+
+ /**
+ * The location is known to have one of a set of values.
+ */
+ private case class Possible(possible: Set[Datum]) extends Contents {
+ assert(possible.nonEmpty, "Contradiction: had an empty possible set indicating an uninitialized location")
+ def mightEqual(other: Contents): Boolean = (this eq other) || (other match {
+ // two Possibles might be equal if they have any possible members in common
+ case Possible(possible2) => (possible intersect possible2).nonEmpty
+ // a possible can be equal to an impossible if the impossible doesn't rule
+ // out all the possibilities
+ case Impossible(possible2) => (possible -- possible2).nonEmpty
+ })
+ def mightNotEqual(other: Contents): Boolean = (this ne other) && (other match {
+ // two Possibles might not be equal if either has possible members that the other doesn't
+ case Possible(possible2) => (possible -- possible2).nonEmpty || (possible2 -- possible).nonEmpty
+ case Impossible(_) => true
+ })
+ }
+ private def SinglePossible(x: Datum) = new Possible(Set(x))
+
+ /**
+ * The location is known to not have any of a set of values value (e.g null).
+ */
+ private case class Impossible(impossible: Set[Datum]) extends Contents {
+ def mightEqual(other: Contents): Boolean = (this eq other) || (other match {
+ case Possible(_) => other mightEqual this
+ case _ => true
+ })
+ def mightNotEqual(other: Contents): Boolean = (this eq other) || (other match {
+ case Possible(_) => other mightNotEqual this
+ case _ => true
+ })
+ }
+
+ /**
+ * Our entire knowledge about the contents of all variables and the stack. It forms
+ * a lattice primarily driven by the lattice structure of Contents.
+ *
+ * In addition to the rules of contents, State has the following properties:
+ * - The merge of two sets of locals holds the merges of locals found in the intersection
+ * of the two sets of locals. Locals not found in a
+ * locals map are thus possibly uninitialized and attempting to load them results
+ * in an error.
+ * - The stack heights of two states must match otherwise it's an error to merge them
+ *
+ * State is immutable in order to aid in structure sharing of local maps and stacks
+ */
+ private case class State(locals: Map[Local, Contents], stack: List[Contents]) {
+ def mergeLocals(olocals: Map[Local, Contents]): Map[Local, Contents] = if (locals eq olocals) locals else Map((for {
+ key <- (locals.keySet intersect olocals.keySet).toSeq
+ } yield (key, locals(key) merge olocals(key))): _*)
+
+ def merge(other: State): State = if (this eq other) this else {
+ @tailrec def mergeStacks(l: List[Contents], r: List[Contents], out: List[Contents]): List[Contents] = (l, r) match {
+ case (Nil, Nil) => out.reverse
+ case (l, r) if l eq r => out.reverse ++ l
+ case (lhead :: ltail, rhead :: rtail) => mergeStacks(ltail, rtail, (lhead merge rhead) :: out)
+ case _ => sys.error("Mismatched stack heights")
+ }
+
+ val newLocals = mergeLocals(other.locals)
+
+ val newStack = if (stack eq other.stack) stack else mergeStacks(stack, other.stack, Nil)
+ State(newLocals, newStack)
+ }
+
+ /**
+ * Peek at the top of the stack without modifying it. Error if the stack is empty
+ */
+ def peek(n: Int): Contents = stack(n)
+ /**
+ * Push contents onto a stack
+ */
+ def push(contents: Contents): State = this copy (stack = contents :: stack)
+ /**
+ * Drop n elements from the stack
+ */
+ def drop(number: Int): State = this copy (stack = stack drop number)
+ /**
+ * Store the top of the stack into the specified local. An error if the stack
+ * is empty
+ */
+ def store(variable: Local): State = {
+ val contents = stack.head
+ val newVariables = locals + ((variable, contents))
+ new State(newVariables, stack.tail)
+ }
+ /**
+ * Load the specified local onto the top of the stack. An error the the local is uninitialized.
+ */
+ def load(variable: Local): State = {
+ val contents: Contents = locals.getOrElse(variable, sys.error(s"$variable is not initialized"))
+ push(contents)
+ }
+ /**
+ * A copy of this State with an empty stack
+ */
+ def cleanStack: State = if (stack.isEmpty) this else this copy (stack = Nil)
+ }
+
+ // some precomputed constants
+ private val NULL = Const(Constant(null: Any))
+ private val UNKNOWN = Impossible(Set.empty)
+ private val NOT_NULL = SingleImpossible(NULL)
+ private val CONST_UNIT = SinglePossible(Const(Constant(())))
+ private val CONST_FALSE = SinglePossible(Const(Constant(false)))
+ private val CONST_ZERO_BYTE = SinglePossible(Const(Constant(0: Byte)))
+ private val CONST_ZERO_SHORT = SinglePossible(Const(Constant(0: Short)))
+ private val CONST_ZERO_CHAR = SinglePossible(Const(Constant(0: Char)))
+ private val CONST_ZERO_INT = SinglePossible(Const(Constant(0: Int)))
+ private val CONST_ZERO_LONG = SinglePossible(Const(Constant(0: Long)))
+ private val CONST_ZERO_FLOAT = SinglePossible(Const(Constant(0.0f)))
+ private val CONST_ZERO_DOUBLE = SinglePossible(Const(Constant(0.0d)))
+ private val CONST_NULL = SinglePossible(NULL)
+
+ /**
+ * Given a TypeKind, figure out what '0' for it means in order to interpret CZJUMP
+ */
+ private def getZeroOf(k: TypeKind): Contents = k match {
+ case UNIT => CONST_UNIT
+ case BOOL => CONST_FALSE
+ case BYTE => CONST_ZERO_BYTE
+ case SHORT => CONST_ZERO_SHORT
+ case CHAR => CONST_ZERO_CHAR
+ case INT => CONST_ZERO_INT
+ case LONG => CONST_ZERO_LONG
+ case FLOAT => CONST_ZERO_FLOAT
+ case DOUBLE => CONST_ZERO_DOUBLE
+ case REFERENCE(_) => CONST_NULL
+ case ARRAY(_) => CONST_NULL
+ case BOXED(_) => CONST_NULL
+ case ConcatClass => abort("no zero of ConcatClass")
+ }
+
+ // normal locals can't be null, so we use null to mean the magic 'this' local
+ private val THIS_LOCAL: Local = null
+
+ /**
+ * interpret a single instruction to find its impact on the abstract state
+ */
+ private def interpretInst(in: State, inst: Instruction): State = {
+ // pop the consumed number of values off the `in` state's stack, producing a new state
+ def dropConsumed: State = in drop inst.consumed
+
+ inst match {
+ case THIS(_) =>
+ in load THIS_LOCAL
+
+ case CONSTANT(k) =>
+ // treat NaN as UNKNOWN because NaN must never equal NaN
+ val const = if (k.isNaN) UNKNOWN
+ else SinglePossible(Const(k))
+ in push const
+
+ case LOAD_ARRAY_ITEM(_) | LOAD_FIELD(_, _) | CALL_PRIMITIVE(_) =>
+ dropConsumed push UNKNOWN
+
+ case LOAD_LOCAL(local) =>
+ // TODO if a local is known to hold a constant then we can replace this instruction with a push of that constant
+ in load local
+
+ case STORE_LOCAL(local) =>
+ in store local
+
+ case STORE_THIS(_) =>
+ // if a local is already known to have a constant and we're replacing with the same constant then we can
+ // replace this with a drop
+ in store THIS_LOCAL
+
+ case CALL_METHOD(_, _) =>
+ // TODO we could special case implementations of equals that are known, e.g. String#equals
+ // We could turn Possible(string constants).equals(Possible(string constants) into an eq check
+ // We could turn nonConstantString.equals(constantString) into constantString.equals(nonConstantString)
+ // and eliminate the null check that likely precedes this call
+ val initial = dropConsumed
+ (0 until inst.produced).foldLeft(initial) { case (know, _) => know push UNKNOWN }
+
+ case BOX(_) =>
+ val value = in peek 0
+ // we simulate boxing by, um, boxing the possible/impossible contents
+ // so if we have Possible(1,2) originally then we'll end up with
+ // a Possible(Boxed(1), Boxed(2))
+ // Similarly, if we know the input is not a 0 then we'll know the
+ // output is not a Boxed(0)
+ val newValue = value match {
+ case Possible(values) => Possible(values map Boxed)
+ case Impossible(values) => Impossible(values map Boxed)
+ }
+ dropConsumed push newValue
+
+ case UNBOX(_) =>
+ val value = in peek 0
+ val newValue = value match {
+ // if we have a Possible, then all the possibilities
+ // should themselves be Boxes. In that
+ // case we can merge them to figure out what the UNBOX will produce
+ case Possible(inners) =>
+ assert(inners.nonEmpty, "Empty possible set indicating an uninitialized location")
+ val sanitized: Set[Contents] = (inners map {
+ case Boxed(content) => SinglePossible(content)
+ case _ => UNKNOWN
+ })
+ sanitized reduce (_ merge _)
+ // if we have an impossible then the thing that's impossible
+ // should be a box. We'll unbox that to see what we get
+ case unknown@Impossible(inners) =>
+ if (inners.isEmpty) {
+ unknown
+ } else {
+ val sanitized: Set[Contents] = (inners map {
+ case Boxed(content) => SingleImpossible(content)
+ case _ => UNKNOWN
+ })
+ sanitized reduce (_ merge _)
+ }
+ }
+ dropConsumed push newValue
+
+ case LOAD_MODULE(_) | NEW(_) | LOAD_EXCEPTION(_) =>
+ in push NOT_NULL
+
+ case CREATE_ARRAY(_, _) =>
+ dropConsumed push NOT_NULL
+
+ case IS_INSTANCE(_) =>
+ // TODO IS_INSTANCE is going to be followed by a C(Z)JUMP
+ // and if IS_INSTANCE/C(Z)JUMP the branch for "true" can
+ // know that whatever was checked was not a null
+ // see the TODO on CJUMP for more information about propagating null
+ // information
+ // TODO if the top of stack is guaranteed null then we can eliminate this IS_INSTANCE check and
+ // replace with a constant false, but how often is a knowable null checked for instanceof?
+ // TODO we could track type information and statically know to eliminate IS_INSTANCE
+ // which might be a nice win under specialization
+ dropConsumed push UNKNOWN // it's actually a Possible(true, false) but since the following instruction
+ // will be a conditional jump comparing to true or false there
+ // nothing to be gained by being more precise
+
+ case CHECK_CAST(_) =>
+ // TODO we could track type information and statically know to eliminate CHECK_CAST
+ // but that's probably not a huge win
+ in
+
+ case DUP(_) =>
+ val value = in peek 0
+ in push value
+
+ case DROP(_) | MONITOR_ENTER() | MONITOR_EXIT() | STORE_ARRAY_ITEM(_) | STORE_FIELD(_, _) =>
+ dropConsumed
+
+ case SCOPE_ENTER(_) | SCOPE_EXIT(_) =>
+ in
+
+ case JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | RETURN(_) | THROW(_) | SWITCH(_, _) =>
+ dumpClassesAndAbort("Unexpected block ending instruction: " + inst)
+ }
+ }
+ /**
+ * interpret the last instruction of a block which will be jump, a conditional branch, a throw, or a return.
+ * It will result in a map from target blocks to the input state computed for that block. It
+ * also computes a replacement list of instructions
+ */
+ private def interpretLast(in: State, inst: Instruction): (Map[BasicBlock, State], List[Instruction]) = {
+ def canSwitch(in1: Contents, tagSet: List[Int]) = {
+ in1 mightEqual Possible(tagSet.toSet map { tag: Int => Const(Constant(tag)) })
+ }
+
+ /* common code for interpreting CJUMP and CZJUMP */
+ def interpretConditional(kind: TypeKind, val1: Contents, val2: Contents, success: BasicBlock, failure: BasicBlock, cond: TestOp): (Map[BasicBlock, State], List[Instruction]) = {
+ // TODO use reaching analysis to update the state in the two branches
+ // e.g. if the comparison was checking null equality on local x
+ // then the in the success branch we know x is null and
+ // on the failure branch we know it is not
+ // in fact, with copy propagation we could propagate that knowledge
+ // back through a chain of locations
+ //
+ // TODO if we do all that we need to be careful in the
+ // case that success and failure are the same target block
+ // because we're using a Map and don't want one possible state to clobber the other
+ // alternative mayb we should just replace the conditional with a jump if both targets are the same
+
+ def mightEqual = val1 mightEqual val2
+ def mightNotEqual = val1 mightNotEqual val2
+ def guaranteedEqual = mightEqual && !mightNotEqual
+
+ def succPossible = cond match {
+ case EQ => mightEqual
+ case NE => mightNotEqual
+ case LT | GT => !guaranteedEqual // if the two are guaranteed to be equal then they can't be LT/GT
+ case LE | GE => true
+ }
+
+ def failPossible = cond match {
+ case EQ => mightNotEqual
+ case NE => mightEqual
+ case LT | GT => true
+ case LE | GE => !guaranteedEqual // if the two are guaranteed to be equal then they must be LE/GE
+ }
+
+ val out = in drop inst.consumed
+
+ var result = Map[BasicBlock, State]()
+ if (succPossible) {
+ result += ((success, out))
+ }
+
+ if (failPossible) {
+ result += ((failure, out))
+ }
+
+ val replacements = if (result.size == 1) List.fill(inst.consumed)(DROP(kind)) :+ JUMP(result.keySet.head)
+ else inst :: Nil
+
+ (result, replacements)
+ }
+
+ inst match {
+ case JUMP(whereto) =>
+ (Map((whereto, in)), inst :: Nil)
+
+ case CJUMP(success, failure, cond, kind) =>
+ val in1 = in peek 0
+ val in2 = in peek 1
+ interpretConditional(kind, in1, in2, success, failure, cond)
+
+ case CZJUMP(success, failure, cond, kind) =>
+ val in1 = in peek 0
+ val in2 = getZeroOf(kind)
+ interpretConditional(kind, in1, in2, success, failure, cond)
+
+ case SWITCH(tags, labels) =>
+ val in1 = in peek 0
+ val reachableNormalLabels = tags zip labels collect { case (tagSet, label) if canSwitch(in1, tagSet) => label }
+ val reachableLabels = if (labels.lengthCompare(tags.length) > 0) {
+ // if we've got an extra label then it's the default
+ val defaultLabel = labels.last
+ // see if the default is reachable by seeing if the input might be out of the set
+ // of all tags
+ val allTags = Possible(tags.flatten.toSet map { tag: Int => Const(Constant(tag)) })
+ if (in1 mightNotEqual allTags) {
+ reachableNormalLabels :+ defaultLabel
+ } else {
+ reachableNormalLabels
+ }
+ } else {
+ reachableNormalLabels
+ }
+ // TODO similar to the comment in interpretConditional, we should update our the State going into each
+ // branch based on which tag is being matched. Also, just like interpretConditional, if target blocks
+ // are the same we need to merge State rather than clobber
+
+ // alternative, maybe we should simplify the SWITCH to not have same target labels
+ val newState = in drop inst.consumed
+ val result = Map(reachableLabels map { label => (label, newState) }: _*)
+ if (reachableLabels.size == 1) (result, DROP(INT) :: JUMP(reachableLabels.head) :: Nil)
+ else (result, inst :: Nil)
+
+ // these instructions don't have target blocks
+ // (exceptions are assumed to be reachable from all instructions)
+ case RETURN(_) | THROW(_) =>
+ (Map.empty, inst :: Nil)
+
+ case _ =>
+ dumpClassesAndAbort("Unexpected non-block ending instruction: " + inst)
+ }
+ }
+
+ /**
+ * Analyze a single block to find how it transforms an input state into a states for its successor blocks
+ * Also computes a list of instructions to be used to replace its last instruction
+ */
+ private def interpretBlock(in: State, block: BasicBlock): (Map[BasicBlock, State], Map[BasicBlock, State], List[Instruction]) = {
+ debuglog(s"interpreting block $block")
+ // number of instructions excluding the last one
+ val normalCount = block.size - 1
+
+ val exceptionState = in.cleanStack
+ var normalExitState = in
+ var idx = 0
+ while (idx < normalCount) {
+ val inst = block(idx)
+ normalExitState = interpretInst(normalExitState, inst)
+ if (normalExitState.locals ne exceptionState.locals)
+ exceptionState.copy(locals = exceptionState mergeLocals normalExitState.locals)
+ idx += 1
+ }
+
+ val pairs = block.exceptionSuccessors map { b => (b, exceptionState) }
+ val exceptionMap = Map(pairs: _*)
+
+ val (normalExitMap, newInstructions) = interpretLast(normalExitState, block.lastInstruction)
+
+ (normalExitMap, exceptionMap, newInstructions)
+ }
+
+ /**
+ * Analyze a single method to find replacement instructions
+ */
+ private def interpretMethod(m: IMethod): Map[BasicBlock, List[Instruction]] = {
+ import scala.collection.mutable.{ Set => MSet, Map => MMap }
+
+ debuglog(s"interpreting method $m")
+ var iterations = 0
+
+ // initially we know that 'this' is not null and the params are initialized to some unknown value
+ val initThis: Iterator[(Local, Contents)] = if (m.isStatic) Iterator.empty else Iterator.single((THIS_LOCAL, NOT_NULL))
+ val initOtherLocals: Iterator[(Local, Contents)] = m.params.iterator map { param => (param, UNKNOWN) }
+ val initialLocals: Map[Local, Contents] = Map((initThis ++ initOtherLocals).toSeq: _*)
+ val initialState = State(initialLocals, Nil)
+
+ // worklist of basic blocks to process, initially the start block
+ val worklist = MSet(m.startBlock)
+ // worklist of exception basic blocks. They're kept in a separate set so they can be
+ // processed after normal flow basic blocks. That's because exception basic blocks
+ // are more likely to have multiple predecessors and queueing them for later
+ // increases the chances that they'll only need to be interpreted once
+ val exceptionlist = MSet[BasicBlock]()
+ // our current best guess at what the input state is for each block
+ // initially we only know about the start block
+ val inputState = MMap[BasicBlock, State]((m.startBlock, initialState))
+
+ // update the inputState map based on new information from interpreting a block
+ // When the input state of a block changes, add it back to the work list to be
+ // reinterpreted
+ def updateInputStates(outputStates: Map[BasicBlock, State], worklist: MSet[BasicBlock]) {
+ for ((block, newState) <- outputStates) {
+ val oldState = inputState get block
+ val updatedState = oldState map (x => x merge newState) getOrElse newState
+ if (oldState != Some(updatedState)) {
+ worklist add block
+ inputState(block) = updatedState
+ }
+ }
+ }
+
+ // the instructions to be used as the last instructions on each block
+ val replacements = MMap[BasicBlock, List[Instruction]]()
+
+ while (worklist.nonEmpty || exceptionlist.nonEmpty) {
+ if (worklist.isEmpty) {
+ // once the worklist is empty, start processing exception blocks
+ val block = exceptionlist.head
+ exceptionlist remove block
+ worklist add block
+ } else {
+ iterations += 1
+ val block = worklist.head
+ worklist remove block
+ val (normalExitMap, exceptionMap, newInstructions) = interpretBlock(inputState(block), block)
+
+ updateInputStates(normalExitMap, worklist)
+ updateInputStates(exceptionMap, exceptionlist)
+ replacements(block) = newInstructions
+ }
+ }
+
+ debuglog(s"method $m with ${m.blocks.size} reached fixpoint in $iterations iterations")
+ replacements.toMap
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index db56f61f16..0f317422ac 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc
package backend.opt
import scala.collection.{ mutable, immutable }
-import symtab._
/**
*/
@@ -23,6 +22,8 @@ abstract class DeadCodeElimination extends SubComponent {
val phaseName = "dce"
+ override val enabled: Boolean = settings.Xdce
+
/** Create a new phase */
override def newPhase(p: Phase) = new DeadCodeEliminationPhase(p)
@@ -34,7 +35,7 @@ abstract class DeadCodeElimination extends SubComponent {
val dce = new DeadCode()
override def apply(c: IClass) {
- if (settings.Xdce.value)
+ if (settings.Xdce && (dce ne null))
dce.analyzeClass(c)
}
}
@@ -61,7 +62,7 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
- val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis;
+ val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis
/** Use-def chain: give the reaching definitions at the beginning of given instruction. */
var defs: immutable.Map[InstrLoc, immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
@@ -89,7 +90,7 @@ abstract class DeadCodeElimination extends SubComponent {
def dieCodeDie(m: IMethod) {
if (m.hasCode) {
- debuglog("dead code elimination on " + m);
+ debuglog("dead code elimination on " + m)
dropOf.clear()
localStores.clear()
clobbers.clear()
@@ -111,17 +112,17 @@ abstract class DeadCodeElimination extends SubComponent {
/** collect reaching definitions and initial useful instructions for this method. */
def collectRDef(m: IMethod): Unit = if (m.hasCode) {
- defs = immutable.HashMap.empty; worklist.clear(); useful.clear();
- rdef.init(m);
- rdef.run;
+ defs = immutable.HashMap.empty; worklist.clear(); useful.clear()
+ rdef.init(m)
+ rdef.run()
m foreachBlock { bb =>
useful(bb) = new mutable.BitSet(bb.size)
- var rd = rdef.in(bb);
- for (Pair(i, idx) <- bb.toList.zipWithIndex) {
+ var rd = rdef.in(bb)
+ for ((i, idx) <- bb.toList.zipWithIndex) {
// utility for adding to worklist
- def moveToWorkList() = moveToWorkListIf(true)
+ def moveToWorkList() = moveToWorkListIf(cond = true)
// utility for (conditionally) adding to worklist
def moveToWorkListIf(cond: Boolean) =
@@ -136,8 +137,8 @@ abstract class DeadCodeElimination extends SubComponent {
i match {
case LOAD_LOCAL(_) =>
- defs = defs + Pair(((bb, idx)), rd.vars)
- moveToWorkListIf(false)
+ defs = defs + (((bb, idx), rd.vars))
+ moveToWorkListIf(cond = false)
case STORE_LOCAL(l) =>
/* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
@@ -166,7 +167,7 @@ abstract class DeadCodeElimination extends SubComponent {
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
- LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() =>
+ LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() | CHECK_CAST(_) =>
moveToWorkList()
case CALL_METHOD(m1, _) if isSideEffecting(m1) =>
@@ -188,8 +189,10 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
moveToWorkListIf(necessary)
+ case LOAD_MODULE(sym) if isLoadNeeded(sym) =>
+ moveToWorkList() // SI-4859 Module initialization might side-effect.
case _ => ()
- moveToWorkListIf(false)
+ moveToWorkListIf(cond = false)
}
rd = rdef.interpret(bb, idx, rd)
}
@@ -223,7 +226,7 @@ abstract class DeadCodeElimination extends SubComponent {
// worklist so we also mark their reaching defs as useful - see SI-7060
if (!useful(bb)(idx)) {
useful(bb) += idx
- dropOf.get(bb, idx) foreach {
+ dropOf.get((bb, idx)) foreach {
for ((bb1, idx1) <- _) {
/*
* SI-7060: A drop that we now mark as useful can be reached via several paths,
@@ -345,13 +348,13 @@ abstract class DeadCodeElimination extends SubComponent {
m foreachBlock { bb =>
debuglog(bb + ":")
val oldInstr = bb.toList
- bb.open
- bb.clear
- for (Pair(i, idx) <- oldInstr.zipWithIndex) {
+ bb.open()
+ bb.clear()
+ for ((i, idx) <- oldInstr.zipWithIndex) {
if (useful(bb)(idx)) {
debuglog(" * " + i + " is useful")
bb.emit(i, i.pos)
- compensations.get(bb, idx) match {
+ compensations.get((bb, idx)) match {
case Some(is) => is foreach bb.emit
case None => ()
}
@@ -379,7 +382,7 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
- if (bb.nonEmpty) bb.close
+ if (bb.nonEmpty) bb.close()
else log(s"empty block encountered in $m")
}
}
@@ -418,13 +421,6 @@ abstract class DeadCodeElimination extends SubComponent {
compensations
}
- private def withClosed[a](bb: BasicBlock)(f: => a): a = {
- if (bb.nonEmpty) bb.close
- val res = f
- if (bb.nonEmpty) bb.open
- res
- }
-
private def findInstruction(bb: BasicBlock, i: Instruction): InstrLoc = {
for (b <- linearizer.linearizeAt(method, bb)) {
val idx = b.toList indexWhere (_ eq i)
diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
index ab238af239..235e954f88 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
@@ -4,7 +4,8 @@
package scala.tools.nsc
package backend.opt
-import scala.util.control.Breaks._
+
+import java.util.concurrent.TimeUnit
/**
* This optimization phase inlines the exception handlers so that further phases can optimize the code better
@@ -53,11 +54,13 @@ abstract class InlineExceptionHandlers extends SubComponent {
import icodes._
import icodes.opcodes._
- val phaseName = "inlineExceptionHandlers"
+ val phaseName = "inlinehandlers"
/** Create a new phase */
override def newPhase(p: Phase) = new InlineExceptionHandlersPhase(p)
+ override def enabled = settings.inlineHandlers
+
/**
* Inlining Exception Handlers
*/
@@ -70,9 +73,9 @@ abstract class InlineExceptionHandlers extends SubComponent {
* -some exception handler duplicates expect the exception on the stack while others expect it in a local
* => Option[Local]
*/
- private val handlerCopies = perRunCaches.newMap[BasicBlock, Option[(Option[Local], BasicBlock)]]
+ private val handlerCopies = perRunCaches.newMap[BasicBlock, Option[(Option[Local], BasicBlock)]]()
/* This map is the inverse of handlerCopies, used to compute the stack of duplicate blocks */
- private val handlerCopiesInverted = perRunCaches.newMap[BasicBlock, (BasicBlock, TypeKind)]
+ private val handlerCopiesInverted = perRunCaches.newMap[BasicBlock, (BasicBlock, TypeKind)]()
private def handlerLocal(bb: BasicBlock): Option[Local] =
for (v <- handlerCopies get bb ; (local, block) <- v ; l <- local) yield l
@@ -89,13 +92,13 @@ abstract class InlineExceptionHandlers extends SubComponent {
/** Apply exception handler inlining to a class */
override def apply(c: IClass): Unit =
- if (settings.inlineHandlers.value) {
- val startTime = System.currentTimeMillis
+ if (settings.inlineHandlers) {
+ val startTime = System.nanoTime()
currentClass = c
debuglog("Starting InlineExceptionHandlers on " + c)
c.methods foreach applyMethod
- debuglog("Finished InlineExceptionHandlers on " + c + "... " + (System.currentTimeMillis - startTime) + "ms")
+ debuglog("Finished InlineExceptionHandlers on " + c + "... " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime) + "ms")
currentClass = null
}
@@ -263,7 +266,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
if (analyzedMethod eq NoIMethod) {
analyzedMethod = bblock.method
tfa.init(bblock.method)
- tfa.run
+ tfa.run()
log(" performed tfa on method: " + bblock.method)
for (block <- bblock.method.blocks.sortBy(_.label))
@@ -358,7 +361,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
}
val caughtException = toTypeKind(caughtClass.tpe)
// copy the exception handler code once again, dropping the LOAD_EXCEPTION
- val copy = handler.code.newBlock
+ val copy = handler.code.newBlock()
copy.emitOnly((handler.iterator drop dropCount).toSeq: _*)
// extend the handlers of the handler to the copy
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 498db78636..fa424584b2 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -49,7 +49,10 @@ abstract class Inliners extends SubComponent {
val phaseName = "inliner"
+ override val enabled: Boolean = settings.inline
+
/** Debug - for timing the inliner. */
+ /****
private def timed[T](s: String, body: => T): T = {
val t1 = System.currentTimeMillis()
val res = body
@@ -60,6 +63,7 @@ abstract class Inliners extends SubComponent {
res
}
+ ****/
/** Look up implementation of method 'sym in 'clazz'.
*/
@@ -76,10 +80,10 @@ abstract class Inliners extends SubComponent {
assert(clazz != NoSymbol, "Walked up past Object.superClass looking for " + sym +
", most likely this reveals the TFA at fault (receiver and callee don't match).")
if (sym.owner == clazz || isBottomType(clazz)) sym
- else sym.overridingSymbol(clazz) match {
- case NoSymbol => if (sym.owner.isTrait) sym else lookup(clazz.superClass)
- case imp => imp
- }
+ else sym.overridingSymbol(clazz) orElse (
+ if (sym.owner.isTrait) sym
+ else lookup(clazz.superClass)
+ )
}
if (needsLookup) {
val concreteMethod = lookup(clazz)
@@ -193,7 +197,7 @@ abstract class Inliners extends SubComponent {
private var currentIClazz: IClass = _
private def warn(pos: Position, msg: String) = currentIClazz.cunit.inlinerWarning(pos, msg)
- private def ownedName(sym: Symbol): String = afterUncurry {
+ private def ownedName(sym: Symbol): String = exitingUncurry {
val count = (
if (!sym.isMethod) 1
else if (sym.owner.isAnonymousFunction) 3
@@ -230,7 +234,7 @@ abstract class Inliners extends SubComponent {
val hasRETURN = containsRETURN(incm.code.blocksList) || (incm.exh exists { eh => containsRETURN(eh.blocks) })
var a: analysis.MethodTFA = null
- if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run }
+ if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run() }
if(forceable) { recentTFAs.put(incm.symbol, (hasRETURN, a)) }
@@ -240,7 +244,7 @@ abstract class Inliners extends SubComponent {
def clearCaches() {
// methods
NonPublicRefs.usesNonPublics.clear()
- recentTFAs.clear
+ recentTFAs.clear()
tfa.knownUnsafe.clear()
tfa.knownSafe.clear()
tfa.knownNever.clear()
@@ -263,7 +267,7 @@ abstract class Inliners extends SubComponent {
}
def analyzeClass(cls: IClass): Unit =
- if (settings.inline.value) {
+ if (settings.inline) {
inlineLog("class", s"${cls.symbol.decodedName}", s"analyzing ${cls.methods.size} methods in $cls")
this.currentIClazz = cls
@@ -279,7 +283,7 @@ abstract class Inliners extends SubComponent {
}
val tfa = new analysis.MTFAGrowable()
- tfa.stat = global.opt.printStats
+ tfa.stat = global.settings.Ystatistics.value
val staleOut = new mutable.ListBuffer[BasicBlock]
val splicedBlocks = mutable.Set.empty[BasicBlock]
val staleIn = mutable.Set.empty[BasicBlock]
@@ -317,11 +321,11 @@ abstract class Inliners extends SubComponent {
* */
def analyzeMethod(m: IMethod): Unit = {
// m.normalize
- if (settings.debug.value)
+ if (settings.debug)
inlineLog("caller", ownedName(m.symbol), "in " + m.symbol.owner.fullName)
- var sizeBeforeInlining = m.code.blockCount
- var instrBeforeInlining = m.code.instructionCount
+ val sizeBeforeInlining = m.code.blockCount
+ val instrBeforeInlining = m.code.instructionCount
var retry = false
var count = 0
@@ -340,7 +344,7 @@ abstract class Inliners extends SubComponent {
inlineWithoutTFA(inputBlocks, callsites)
}
- /**
+ /*
* Inline straightforward callsites (those that can be inlined without a TFA).
*
* To perform inlining, all we need to know is listed as formal params in `analyzeInc()`:
@@ -361,7 +365,7 @@ abstract class Inliners extends SubComponent {
assert(ocm.method.isEffectivelyFinal && ocm.method.owner.isEffectivelyFinal)
if(analyzeInc(ocm, x, ocm.method.owner, -1, ocm.method)) {
inlineCount += 1
- break
+ break()
}
}
}
@@ -370,7 +374,7 @@ abstract class Inliners extends SubComponent {
inlineCount
}
- /**
+ /*
* Decides whether it's feasible and desirable to inline the body of the method given by `concreteMethod`
* at the program point given by `i` (a callsite). The boolean result indicates whether inlining was performed.
*
@@ -380,8 +384,8 @@ abstract class Inliners extends SubComponent {
val shouldWarn = hasInline(i.method)
def warnNoInline(reason: String): Boolean = {
- def msg = "Could not inline required method %s because %s.".format(i.method.originalName.decode, reason)
- if (settings.debug.value)
+ def msg = "Could not inline required method %s because %s.".format(i.method.unexpandedName.decode, reason)
+ if (settings.debug)
inlineLog("fail", i.method.fullName, reason)
if (shouldWarn)
warn(i.pos, msg)
@@ -439,7 +443,6 @@ abstract class Inliners extends SubComponent {
case DontInlineHere(msg) => warnNoInline(msg)
case NeverSafeToInline => false
case InlineableAtThisCaller => true
- case inl @ FeasibleInline(_, _) if !inl.isSafe => false
case FeasibleInline(required, toPublicize) =>
for (f <- toPublicize) {
inlineLog("access", f, "making public")
@@ -477,9 +480,9 @@ abstract class Inliners extends SubComponent {
* As a whole, both `preInline()` invocations amount to priming the inlining process,
* so that the first TFA that is run afterwards is able to gain more information as compared to a cold-start.
*/
- val totalPreInlines = {
- val firstRound = preInline(true)
- if(firstRound == 0) 0 else (firstRound + preInline(false))
+ /*val totalPreInlines = */ { // Val name commented out to emphasize it is never used
+ val firstRound = preInline(isFirstRound = true)
+ if(firstRound == 0) 0 else (firstRound + preInline(isFirstRound = false))
}
staleOut.clear()
splicedBlocks.clear()
@@ -511,7 +514,7 @@ abstract class Inliners extends SubComponent {
for (cm <- cms; if tfa.remainingCALLs.isDefinedAt(cm)) {
val analysis.CallsiteInfo(_, receiver, stackLength, concreteMethod) = tfa.remainingCALLs(cm)
if (analyzeInc(cm, bb, receiver, stackLength, concreteMethod)) {
- break
+ break()
}
}
}
@@ -563,13 +566,12 @@ abstract class Inliners extends SubComponent {
while (retry && count < MAX_INLINE_RETRY)
for(inlFail <- tfa.warnIfInlineFails) {
- warn(inlFail.pos, "At the end of the day, could not inline @inline-marked method " + inlFail.method.originalName.decode)
+ warn(inlFail.pos, "At the end of the day, could not inline @inline-marked method " + inlFail.method.unexpandedName.decode)
}
- m.normalize
+ m.normalize()
if (sizeBeforeInlining > 0) {
val instrAfterInlining = m.code.instructionCount
- val prefix = if ((instrAfterInlining > 2 * instrBeforeInlining) && (instrAfterInlining > 200)) "!!" else ""
val inlinings = caller.inlinedCalls
if (inlinings > 0) {
val s1 = s"instructions $instrBeforeInlining -> $instrAfterInlining"
@@ -584,7 +586,7 @@ abstract class Inliners extends SubComponent {
private def isHigherOrderMethod(sym: Symbol) = (
sym.isMethod
- && beforeExplicitOuter(sym.info.paramTypes exists isFunctionType) // was "at erasurePhase.prev"
+ && enteringExplicitOuter(sym.info.paramTypes exists isFunctionType) // was "at erasurePhase.prev"
)
/** Should method 'sym' being called in 'receiver' be loaded from disk? */
@@ -601,7 +603,6 @@ abstract class Inliners extends SubComponent {
override def toString = m.toString
val sym = m.symbol
- val name = sym.name
def owner = sym.owner
def paramTypes = sym.info.paramTypes
def minimumStack = paramTypes.length + 1
@@ -617,13 +618,11 @@ abstract class Inliners extends SubComponent {
def length = blocks.length
def openBlocks = blocks filterNot (_.closed)
def instructions = m.code.instructions
- // def linearized = linearizer linearize m
def isSmall = (length <= SMALL_METHOD_SIZE) && blocks(0).length < 10
def isLarge = length > MAX_INLINE_SIZE
def isRecursive = m.recursive
def hasHandlers = handlers.nonEmpty || m.bytecodeHasEHs
- def hasClosureParam = paramTypes exists (tp => isByNameParamType(tp) || isFunctionType(tp))
def isSynchronized = sym.hasFlag(Flags.SYNCHRONIZED)
def hasNonFinalizerHandler = handlers exists {
@@ -681,9 +680,18 @@ abstract class Inliners extends SubComponent {
}
*/
- def checkField(f: Symbol) = check(f, f.isPrivate && !canMakePublic(f))
- def checkSuper(n: Symbol) = check(n, n.isPrivate || !n.isClassConstructor)
- def checkMethod(n: Symbol) = check(n, n.isPrivate)
+
+ def isPrivateForInlining(sym: Symbol): Boolean = {
+ if (sym.isJavaDefined) {
+ def check(sym: Symbol) = !(sym.isPublic || sym.isProtected)
+ check(sym) || check(sym.owner) // SI-7582 Must check the enclosing class *and* the symbol for Java.
+ }
+ else sym.isPrivate // Scala never emits package-private bytecode
+ }
+
+ def checkField(f: Symbol) = check(f, isPrivateForInlining(f) && !canMakePublic(f))
+ def checkSuper(n: Symbol) = check(n, isPrivateForInlining(n) || !n.isClassConstructor)
+ def checkMethod(n: Symbol) = check(n, isPrivateForInlining(n))
def getAccess(i: Instruction) = i match {
case CALL_METHOD(n, SuperCall(_)) => checkSuper(n)
@@ -729,17 +737,11 @@ abstract class Inliners extends SubComponent {
* - either log the reason for failure --- case (b) ---,
* - or perform inlining --- case (a) ---.
*/
- sealed abstract class InlineSafetyInfo {
- def isSafe = false
- def isUnsafe = !isSafe
- }
+ sealed abstract class InlineSafetyInfo
case object NeverSafeToInline extends InlineSafetyInfo
- case object InlineableAtThisCaller extends InlineSafetyInfo { override def isSafe = true }
+ case object InlineableAtThisCaller extends InlineSafetyInfo
case class DontInlineHere(msg: String) extends InlineSafetyInfo
- case class FeasibleInline(accessNeeded: NonPublicRefs.Value,
- toBecomePublic: List[Symbol]) extends InlineSafetyInfo {
- override def isSafe = true
- }
+ case class FeasibleInline(accessNeeded: NonPublicRefs.Value, toBecomePublic: List[Symbol]) extends InlineSafetyInfo
case class AccessReq(
accessNeeded: NonPublicRefs.Value,
@@ -791,7 +793,7 @@ abstract class Inliners extends SubComponent {
val varsInScope = mutable.HashSet[Local]() ++= block.varsInScope
- /** Side effects varsInScope when it sees SCOPE_ENTERs. */
+ /* Side effects varsInScope when it sees SCOPE_ENTERs. */
def instrBeforeFilter(i: Instruction): Boolean = {
i match { case SCOPE_ENTER(l) => varsInScope += l ; case _ => () }
i ne instr
@@ -804,7 +806,7 @@ abstract class Inliners extends SubComponent {
// store the '$this' into the special local
val inlinedThis = newLocal("$inlThis", REFERENCE(ObjectClass))
- /** buffer for the returned value */
+ /* buffer for the returned value */
val retVal = inc.m.returnType match {
case UNIT => null
case x => newLocal("$retVal", x)
@@ -812,9 +814,9 @@ abstract class Inliners extends SubComponent {
val inlinedLocals = mutable.HashMap.empty[Local, Local]
- /** Add a new block in the current context. */
+ /* Add a new block in the current context. */
def newBlock() = {
- val b = caller.m.code.newBlock
+ val b = caller.m.code.newBlock()
activeHandlers foreach (_ addCoveredBlock b)
if (retVal ne null) b.varsInScope += retVal
b.varsInScope += inlinedThis
@@ -829,7 +831,7 @@ abstract class Inliners extends SubComponent {
handler
}
- /** alfa-rename `l` in caller's context. */
+ /* alfa-rename `l` in caller's context. */
def dupLocal(l: Local): Local = {
val sym = caller.sym.newVariable(freshName(l.sym.name.toString), l.sym.pos)
// sym.setInfo(l.sym.tpe)
@@ -840,10 +842,10 @@ abstract class Inliners extends SubComponent {
val afterBlock = newBlock()
- /** Map from nw.init instructions to their matching NEW call */
+ /* Map from nw.init instructions to their matching NEW call */
val pending: mutable.Map[Instruction, NEW] = new mutable.HashMap
- /** Map an instruction from the callee to one suitable for the caller. */
+ /* Map an instruction from the callee to one suitable for the caller. */
def map(i: Instruction): Instruction = {
def assertLocal(l: Local) = {
assert(caller.locals contains l, "Could not find local '" + l + "' in locals, nor in inlinedLocals: " + inlinedLocals)
@@ -872,7 +874,7 @@ abstract class Inliners extends SubComponent {
r
case CALL_METHOD(meth, Static(true)) if meth.isClassConstructor =>
- CALL_METHOD(meth, Static(true))
+ CALL_METHOD(meth, Static(onInstance = true))
case _ => i.clone()
}
@@ -893,8 +895,8 @@ abstract class Inliners extends SubComponent {
}
// re-emit the instructions before the call
- block.open
- block.clear
+ block.open()
+ block.clear()
block emit instrBefore
// store the arguments into special locals
@@ -903,7 +905,7 @@ abstract class Inliners extends SubComponent {
// jump to the start block of the callee
blockEmit(JUMP(inlinedBlock(inc.m.startBlock)))
- block.close
+ block.close()
// duplicate the other blocks in the callee
val calleeLin = inc.m.linearizedBlocks()
@@ -926,11 +928,11 @@ abstract class Inliners extends SubComponent {
emitInlined(map(i))
info = if(hasRETURN) a.interpret(info, i) else null
}
- inlinedBlock(bb).close
+ inlinedBlock(bb).close()
}
afterBlock emit instrAfter
- afterBlock.close
+ afterBlock.close()
staleIn += afterBlock
splicedBlocks ++= (calleeLin map inlinedBlock)
@@ -938,7 +940,7 @@ abstract class Inliners extends SubComponent {
// add exception handlers of the callee
caller addHandlers (inc.handlers map translateExh)
assert(pending.isEmpty, "Pending NEW elements: " + pending)
- if (settings.debug.value) icodes.checkValid(caller.m)
+ if (settings.debug) icodes.checkValid(caller.m)
}
def isStampedForInlining(stackLength: Int): InlineSafetyInfo = {
@@ -973,7 +975,7 @@ abstract class Inliners extends SubComponent {
}
if(sameSymbols) { // TODO but this also amounts to recursive, ie should lead to adding to tfa.knownNever, right?
- tfa.knownUnsafe += inc.sym;
+ tfa.knownUnsafe += inc.sym
return DontInlineHere("sameSymbols (ie caller == callee)")
}
@@ -1032,7 +1034,6 @@ abstract class Inliners extends SubComponent {
case Public => true
}
private def sameSymbols = caller.sym == inc.sym
- private def sameOwner = caller.owner == inc.owner
/** Gives green light for inlining (which may still be vetoed later). Heuristics:
* - it's bad to make the caller larger (> SMALL_METHOD_SIZE) if it was small
@@ -1048,9 +1049,9 @@ abstract class Inliners extends SubComponent {
if (caller.isInClosure) score -= 2
else if (caller.inlinedCalls < 1) score -= 1 // only monadic methods can trigger the first inline
- if (inc.isSmall) score += 1;
+ if (inc.isSmall) score += 1
// if (inc.hasClosureParam) score += 2
- if (inc.isLarge) score -= 1;
+ if (inc.isLarge) score -= 1
if (caller.isSmall && isLargeSum) {
score -= 1
debuglog(s"inliner score decreased to $score because small caller $caller would become large")
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
deleted file mode 100644
index 7f5f412a20..0000000000
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ /dev/null
@@ -1,227 +0,0 @@
-package scala.tools.nsc
-package dependencies
-
-import symtab.Flags
-
-import scala.collection._
-
-/** A component that describes the possible changes between successive
- * compilations of a class.
- */
-abstract class Changes {
-
- /** A compiler instance used to compile files on demand. */
- val compiler: Global
-
- import compiler._
- import symtab.Flags._
-
- abstract class Change
-
- private lazy val annotationsChecked =
- List(definitions.SpecializedClass) // Any others that should be checked?
-
- private val flagsToCheck = IMPLICIT | FINAL | PRIVATE | PROTECTED | SEALED |
- OVERRIDE | CASE | ABSTRACT | DEFERRED | METHOD |
- MODULE | INTERFACE | PARAM | BYNAMEPARAM | CONTRAVARIANT |
- DEFAULTPARAM | ACCESSOR | LAZY | SPECIALIZED
-
- /** Are the new modifiers more restrictive than the old ones? */
- private def moreRestrictive(from: Long, to: Long): Boolean =
- ((((to & PRIVATE) != 0L) && (from & PRIVATE) == 0L)
- || (((to & PROTECTED) != 0L) && (from & PROTECTED) == 0L))
-
- /** Check if flags have changed **/
- private def modifiedFlags(from: Long, to: Long): Boolean =
- (from & IMPLICIT) != (to & IMPLICIT)
-
- /** An entity in source code, either a class or a member definition.
- * Name is fully-qualified.
- */
- abstract class Entity
- case class Class(name: String) extends Entity
- case class Definition(name: String) extends Entity
-
- case class Added(e: Entity) extends Change
- case class Removed(e: Entity) extends Change
- case class Changed(e: Entity)(implicit val reason: String) extends Change {
- override def toString = "Changed(" + e + ")[" + reason + "]"
- }
- case class ParentChanged(e: Entity) extends Change
-
- private val changedTypeParams = new mutable.HashSet[String]
-
- private def sameParameterSymbolNames(sym1: Symbol, sym2: Symbol): Boolean =
- sameSymbol(sym1, sym2, true) || sym2.encodedName.startsWith(sym1.encodedName + nme.NAME_JOIN_STRING) // see #3140
- private def sameSymbol(sym1: Symbol, sym2: Symbol, simple: Boolean = false): Boolean =
- if (simple) sym1.encodedName == sym2.encodedName else sym1.fullName == sym2.fullName
- private def sameFlags(sym1: Symbol, sym2: Symbol): Boolean =
- (sym1.flags & flagsToCheck) == (sym2.flags & flagsToCheck)
- private def sameAnnotations(sym1: Symbol, sym2: Symbol): Boolean =
- annotationsChecked.forall(a =>
- (sym1.hasAnnotation(a) == sym2.hasAnnotation(a)))
-
- private def sameType(tp1: Type, tp2: Type)(implicit strict: Boolean) = {
- def typeOf(tp: Type): String = tp.toString + "[" + tp.getClass + "]"
- val res = sameType0(tp1, tp2)
- //if (!res) println("\t different types: " + typeOf(tp1) + " : " + typeOf(tp2))
- res
- }
-
- private def sameType0(tp1: Type, tp2: Type)(implicit strict: Boolean): Boolean = ((tp1, tp2) match {
- /*case (ErrorType, _) => false
- case (WildcardType, _) => false
- case (_, ErrorType) => false
- case (_, WildcardType) => false
- */
- case (NoType, _) => false
- case (NoPrefix, NoPrefix) => true
- case (_, NoType) => false
- case (_, NoPrefix) => false
-
- case (ThisType(sym1), ThisType(sym2))
- if sameSymbol(sym1, sym2) => true
-
- case (SingleType(pre1, sym1), SingleType(pre2, sym2))
- if sameType(pre1, pre2) && sameSymbol(sym1, sym2) => true
- case (ConstantType(value1), ConstantType(value2)) =>
- value1 == value2
- case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
- val testSymbols =
- if (!sameSymbol(sym1, sym2)) {
- val v = (!strict && sym1.isType && sym2.isType && sameType(sym1.info, sym2.info))
- if (v) changedTypeParams += sym1.fullName
- v
- } else
- !sym1.isTypeParameter || !changedTypeParams.contains(sym1.fullName)
-
- testSymbols && sameType(pre1, pre2) &&
- (sym1.variance == sym2.variance) &&
- ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
- sameTypes(args1, args2))
- // @M! normalize reduces higher-kinded case to PolyType's
-
- case (RefinedType(parents1, ref1), RefinedType(parents2, ref2)) =>
- def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
- sym2 =>
- var e1 = s1.lookupEntry(sym2.name)
- (e1 ne null) && {
- var isEqual = false
- while (!isEqual && (e1 ne null)) {
- isEqual = sameType(e1.sym.info, sym2.info)
- e1 = s1.lookupNextEntry(e1)
- }
- isEqual
- }
- }
- sameTypes(parents1, parents2) && isSubScope(ref1, ref2) && isSubScope(ref2, ref1)
- case (mt1 @ MethodType(params1, res1), mt2 @ MethodType(params2, res2)) =>
- // new dependent types: probably fix this, use substSym as done for PolyType
- sameTypes(tp1.paramTypes, tp2.paramTypes) &&
- (tp1.params corresponds tp2.params)((t1, t2) => sameParameterSymbolNames(t1, t2) && sameFlags(t1, t2)) &&
- sameType(res1, res2) &&
- mt1.isImplicit == mt2.isImplicit
- case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
- sameTypeParams(tparams1, tparams2) && sameType(res1, res2)
- case (NullaryMethodType(res1), NullaryMethodType(res2)) =>
- sameType(res1, res2)
- case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
- sameTypeParams(tparams1, tparams2)(false) && sameType(res1, res2)(false)
- case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) =>
- sameType(lo1, lo2) && sameType(hi1, hi2)
- case (BoundedWildcardType(bounds), _) =>
- bounds containsType tp2
- case (_, BoundedWildcardType(bounds)) =>
- bounds containsType tp1
- case (AnnotatedType(_,_,_), _) =>
- annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) &&
- sameType(tp1.withoutAnnotations, tp2.withoutAnnotations)
- case (_, AnnotatedType(_,_,_)) =>
- annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) &&
- sameType(tp1.withoutAnnotations, tp2.withoutAnnotations)
- case (_: SingletonType, _: SingletonType) =>
- var origin1 = tp1
- while (origin1.underlying.isInstanceOf[SingletonType]) {
- assert(origin1 ne origin1.underlying, origin1)
- origin1 = origin1.underlying
- }
- var origin2 = tp2
- while (origin2.underlying.isInstanceOf[SingletonType]) {
- assert(origin2 ne origin2.underlying, origin2)
- origin2 = origin2.underlying
- }
- ((origin1 ne tp1) || (origin2 ne tp2)) && sameType(origin1, origin2)
- case _ =>
- false
- }) || {
- val tp1n = normalizePlus(tp1)
- val tp2n = normalizePlus(tp2)
- ((tp1n ne tp1) || (tp2n ne tp2)) && sameType(tp1n, tp2n)
- }
-
- private def sameTypeParams(tparams1: List[Symbol], tparams2: List[Symbol])(implicit strict: Boolean) =
- sameTypes(tparams1 map (_.info), tparams2 map (_.info)) &&
- sameTypes(tparams1 map (_.tpe), tparams2 map (_.tpe)) &&
- (tparams1 corresponds tparams2)((t1, t2) => sameAnnotations(t1, t2))
-
- private def sameTypes(tps1: List[Type], tps2: List[Type])(implicit strict: Boolean) =
- (tps1 corresponds tps2)(sameType(_, _))
-
- /** Return the list of changes between 'from' and 'toSym.info'.
- */
- def changeSet(from: Type, toSym: Symbol): List[Change] = {
- implicit val defaultReason = "types"
- implicit val defaultStrictTypeRefTest = true
-
- val to = toSym.info
- changedTypeParams.clear
- def omitSymbols(s: Symbol): Boolean = !s.hasFlag(LOCAL | LIFTED | PRIVATE | SYNTHETIC)
- val cs = new mutable.ListBuffer[Change]
-
- if ((from.parents zip to.parents) exists { case (t1, t2) => !sameType(t1, t2) })
- cs += Changed(toEntity(toSym))(from.parents.zip(to.parents).toString)
- if (!sameTypeParams(from.typeParams, to.typeParams)(false))
- cs += Changed(toEntity(toSym))(" tparams: " + from.typeParams.zip(to.typeParams))
-
- // new members not yet visited
- val newMembers = mutable.HashSet[Symbol]()
- newMembers ++= to.decls.iterator filter omitSymbols
-
- for (o <- from.decls.iterator filter omitSymbols) {
- val n = to.decl(o.name)
- newMembers -= n
-
- if (o.isClass)
- cs ++= changeSet(o.info, n)
- else if (n == NoSymbol)
- cs += Removed(toEntity(o))
- else {
- val newSym =
- o match {
- case _:TypeSymbol if o.isAliasType =>
- n.suchThat(ov => sameType(ov.info, o.info))
- case _ =>
- n.suchThat(ov => sameType(ov.tpe, o.tpe))
- }
- if (newSym == NoSymbol || moreRestrictive(o.flags, newSym.flags) || modifiedFlags(o.flags, newSym.flags))
- cs += Changed(toEntity(o))(n + " changed from " + o.tpe + " to " + n.tpe + " flags: " + Flags.flagsToString(o.flags))
- else if (newSym.isGetter && (o.accessed(from).hasFlag(MUTABLE) != newSym.accessed.hasFlag(MUTABLE)))
- // o.owner is already updated to newSym.owner
- // so o.accessed will return the accessed for the new owner
- cs += Changed(toEntity(o))(o.accessed(from) + " changed to " + newSym.accessed)
- else
- newMembers -= newSym
- }
- }: Unit // Give the type explicitly until #2281 is fixed
-
- cs ++= (newMembers map (Added compose toEntity))
- cs.toList
- }
- def removeChangeSet(sym: Symbol): Change = Removed(toEntity(sym))
- def changeChangeSet(sym: Symbol, msg: String): Change = Changed(toEntity(sym))(msg)
- def parentChangeSet(sym: Symbol): Change = ParentChanged(toEntity(sym))
-
- private def toEntity(sym: Symbol): Entity =
- if (sym.isClass) Class(sym.fullName)
- else Definition(sym.fullName)
-}
diff --git a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
deleted file mode 100644
index cdde768274..0000000000
--- a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
+++ /dev/null
@@ -1,254 +0,0 @@
-package scala.tools.nsc
-package dependencies
-
-import io.Path
-import scala.collection._
-import symtab.Flags
-import scala.tools.nsc.io.AbstractFile
-import scala.reflect.internal.util.SourceFile
-
-trait DependencyAnalysis extends SubComponent with Files {
- import global._
-
- val phaseName = "dependencyAnalysis"
-
- def off = settings.make.isDefault || settings.make.value == "all"
- def shouldCheckClasspath = settings.make.value != "transitivenocp"
-
- def newPhase(prev: Phase) = new AnalysisPhase(prev)
-
- private def depPath = Path(settings.dependenciesFile.value)
- def loadDependencyAnalysis(): Boolean = (
- depPath.path != "none" && depPath.isFile && loadFrom(
- AbstractFile.getFile(depPath),
- path => AbstractFile.getFile(depPath.parent resolve Path(path))
- )
- )
- def saveDependencyAnalysis(): Unit = {
- if (!depPath.exists)
- dependenciesFile = AbstractFile.getFile(depPath.createFile())
-
- /** The directory where file lookup should start */
- val rootPath = depPath.parent.normalize
- saveDependencies(
- file => rootPath.relativize(Path(file.file).normalize).path
- )
- }
-
- lazy val maxDepth = settings.make.value match {
- case "changed" => 0
- case "immediate" => 1
- case _ => Int.MaxValue
- }
-
- // todo: order insensible checking and, also checking timestamp?
- def validateClasspath(cp1: String, cp2: String): Boolean = cp1 == cp2
-
- def nameToFile(src: AbstractFile, name: String) =
- settings.outputDirs.outputDirFor(src)
- .lookupPathUnchecked(name.toString.replace(".", java.io.File.separator) + ".class", false)
-
- private var depFile: Option[AbstractFile] = None
-
- def dependenciesFile_=(file: AbstractFile) {
- assert(file ne null)
- depFile = Some(file)
- }
-
- def dependenciesFile: Option[AbstractFile] = depFile
-
- def classpath = settings.classpath.value
- def newDeps = new FileDependencies(classpath)
-
- var dependencies = newDeps
-
- def managedFiles = dependencies.dependencies.keySet
-
- /** Top level definitions per source file. */
- val definitions: mutable.Map[AbstractFile, List[Symbol]] =
- new mutable.HashMap[AbstractFile, List[Symbol]] {
- override def default(f: AbstractFile) = Nil
- }
-
- /** External references used by source file. */
- val references: mutable.Map[AbstractFile, immutable.Set[String]] =
- new mutable.HashMap[AbstractFile, immutable.Set[String]] {
- override def default(f: AbstractFile) = immutable.Set()
- }
-
- /** External references for inherited members used in the source file */
- val inherited: mutable.Map[AbstractFile, immutable.Set[Inherited]] =
- new mutable.HashMap[AbstractFile, immutable.Set[Inherited]] {
- override def default(f: AbstractFile) = immutable.Set()
- }
-
- /** Write dependencies to the current file. */
- def saveDependencies(fromFile: AbstractFile => String) =
- if(dependenciesFile.isDefined)
- dependencies.writeTo(dependenciesFile.get, fromFile)
-
- /** Load dependencies from the given file and save the file reference for
- * future saves.
- */
- def loadFrom(f: AbstractFile, toFile: String => AbstractFile): Boolean = {
- dependenciesFile = f
- FileDependencies.readFrom(f, toFile) match {
- case Some(fd) =>
- val success = if (shouldCheckClasspath) validateClasspath(fd.classpath, classpath) else true
- dependencies = if (success) fd else {
- if (settings.debug.value)
- println("Classpath has changed. Nuking dependencies")
- newDeps
- }
-
- success
- case None => false
- }
- }
-
- def calculateFiles(files: List[SourceFile]): List[SourceFile] =
- if (off) files
- else if (dependencies.isEmpty) {
- println("No known dependencies. Compiling " +
- (if (settings.debug.value) files.mkString(", ") else "everything"))
- files
- } else {
- val (direct, indirect) = dependencies.invalidatedFiles(maxDepth);
- val filtered = files.filter(x => {
- val f = x.file.absolute
- direct(f) || indirect(f) || !dependencies.containsFile(f);
- })
- filtered match {
- case Nil => println("No changes to recompile");
- case x => println("Recompiling " + (
- if(settings.debug.value) x.mkString(", ") else x.length + " files")
- )
- }
- filtered
- }
-
- case class Inherited(qualifier: String, member: Name)
-
- class AnalysisPhase(prev: Phase) extends StdPhase(prev) {
-
- override def cancelled(unit: CompilationUnit) =
- super.cancelled(unit) && !unit.isJava
-
- def apply(unit : global.CompilationUnit) {
- val f = unit.source.file.file
- // When we're passed strings by the interpreter
- // they have no source file. We simply ignore this case
- // as irrelevant to dependency analysis.
- if (f != null){
- val source: AbstractFile = unit.source.file;
- for (d <- unit.icode){
- val name = d.toString
- d.symbol match {
- case s : ModuleClassSymbol =>
- val isTopLevelModule = afterPickler { !s.isImplClass && !s.isNestedClass }
-
- if (isTopLevelModule && (s.companionModule != NoSymbol)) {
- dependencies.emits(source, nameToFile(unit.source.file, name))
- }
- dependencies.emits(source, nameToFile(unit.source.file, name + "$"))
- case _ =>
- dependencies.emits(source, nameToFile(unit.source.file, name))
- }
- }
-
- dependencies.reset(source)
- for (d <- unit.depends; if (d.sourceFile != null)){
- dependencies.depends(source, d.sourceFile)
- }
- }
-
- // find all external references in this compilation unit
- val file = unit.source.file
- references += file -> immutable.Set.empty[String]
- inherited += file -> immutable.Set.empty[Inherited]
-
- val buf = new mutable.ListBuffer[Symbol]
-
- (new Traverser {
- override def traverse(tree: Tree) {
- if ((tree.symbol ne null)
- && (tree.symbol != NoSymbol)
- && (!tree.symbol.isPackage)
- && (!tree.symbol.isJavaDefined)
- && (!tree.symbol.tpe.isError)
- && ((tree.symbol.sourceFile eq null)
- || (tree.symbol.sourceFile.path != file.path))
- && (!tree.symbol.isClassConstructor)) {
- updateReferences(tree.symbol.fullName)
- // was "at uncurryPhase.prev", which is actually non-deterministic
- // because the continuations plugin may or may not supply uncurry's
- // immediately preceding phase.
- beforeRefchecks(checkType(tree.symbol.tpe))
- }
-
- tree match {
- case cdef: ClassDef if !cdef.symbol.hasPackageFlag &&
- !cdef.symbol.isAnonymousFunction =>
- if (cdef.symbol != NoSymbol) buf += cdef.symbol
- // was "at erasurePhase.prev"
- beforeExplicitOuter {
- for (s <- cdef.symbol.info.decls)
- s match {
- case ts: TypeSymbol if !ts.isClass =>
- checkType(s.tpe)
- case _ =>
- }
- }
- super.traverse(tree)
-
- case ddef: DefDef =>
- // was "at typer.prev"
- beforeTyper { checkType(ddef.symbol.tpe) }
- super.traverse(tree)
- case a @ Select(q, n) if ((a.symbol != NoSymbol) && (q.symbol != null)) => // #2556
- if (!a.symbol.isConstructor &&
- !a.symbol.owner.isPackageClass &&
- !isSameType(q.tpe, a.symbol.owner.tpe))
- inherited += file ->
- (inherited(file) + Inherited(q.symbol.tpe.resultType.safeToString, n))
- super.traverse(tree)
- case _ =>
- super.traverse(tree)
- }
- }
-
- def checkType(tpe: Type): Unit =
- tpe match {
- case t: MethodType =>
- checkType(t.resultType)
- for (s <- t.params) checkType(s.tpe)
-
- case t: TypeRef =>
- if (t.sym.isAliasType) {
- updateReferences(t.typeSymbolDirect.fullName)
- checkType(t.typeSymbolDirect.info)
- }
- updateReferences(t.typeSymbol.fullName)
- for (tp <- t.args) checkType(tp)
-
- case t: PolyType =>
- checkType(t.resultType)
- updateReferences(t.typeSymbol.fullName)
-
- case t: NullaryMethodType =>
- checkType(t.resultType)
- updateReferences(t.typeSymbol.fullName)
-
- case t =>
- updateReferences(t.typeSymbol.fullName)
- }
-
- def updateReferences(s: String): Unit =
- references += file -> (references(file) + s)
-
- }).apply(unit.body)
-
- definitions(unit.source.file) = buf.toList
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/dependencies/Files.scala b/src/compiler/scala/tools/nsc/dependencies/Files.scala
deleted file mode 100644
index 194351a13f..0000000000
--- a/src/compiler/scala/tools/nsc/dependencies/Files.scala
+++ /dev/null
@@ -1,177 +0,0 @@
-package scala.tools.nsc
-package dependencies
-
-import java.io.{InputStream, OutputStream, PrintStream, InputStreamReader, BufferedReader}
-import io.{AbstractFile, PlainFile, VirtualFile}
-
-import scala.collection._
-
-
-trait Files { self : SubComponent =>
-
- class FileDependencies(val classpath: String) {
- import FileDependencies._
-
- class Tracker extends mutable.OpenHashMap[AbstractFile, mutable.Set[AbstractFile]] {
- override def default(key: AbstractFile) = {
- this(key) = new mutable.HashSet[AbstractFile]
- this(key)
- }
- }
-
- val dependencies = new Tracker
- val targets = new Tracker
-
- def isEmpty = dependencies.isEmpty && targets.isEmpty
-
- def emits(source: AbstractFile, result: AbstractFile) =
- targets(source) += result
- def depends(from: AbstractFile, on: AbstractFile) =
- dependencies(from) += on
-
- def reset(file: AbstractFile) = dependencies -= file
-
- def cleanEmpty = {
- dependencies foreach {case (_, value) =>
- value retain (x => x.exists && (x ne removedFile))}
- dependencies retain ((key, value) => key.exists && !value.isEmpty)
- targets foreach {case (_, value) => value retain (_.exists)}
- targets retain ((key, value) => key.exists && !value.isEmpty)
- }
-
- def containsFile(f: AbstractFile) = targets.contains(f.absolute)
-
- def invalidatedFiles(maxDepth: Int) = {
- val direct = new mutable.HashSet[AbstractFile]
-
- for ((file, products) <- targets) {
- // This looks a bit odd. It may seem like one should invalidate a file
- // if *any* of its dependencies are older than it. The forall is there
- // to deal with the fact that a) Some results might have been orphaned
- // and b) Some files might not need changing.
- direct(file) ||= products.forall(d => d.lastModified < file.lastModified)
- }
-
- val indirect = dependentFiles(maxDepth, direct)
-
- for ((source, targets) <- targets
- if direct(source) || indirect(source) || (source eq removedFile)) {
- targets foreach (_.delete)
- targets -= source
- }
-
- (direct, indirect)
- }
-
- /** Return the set of files that depend on the given changed files.
- * It computes the transitive closure up to the given depth.
- */
- def dependentFiles(depth: Int, changed: Set[AbstractFile]): Set[AbstractFile] = {
- val indirect = new mutable.HashSet[AbstractFile]
- val newInvalidations = new mutable.HashSet[AbstractFile]
-
- def invalid(file: AbstractFile) =
- indirect(file) || changed(file) || (file eq removedFile)
-
- def go(i: Int) : Unit = if(i > 0) {
- newInvalidations.clear
- for((target, depends) <- dependencies if !invalid(target);
- d <- depends)
- newInvalidations(target) ||= invalid(d)
-
- indirect ++= newInvalidations
- if (!newInvalidations.isEmpty) go(i - 1)
- }
-
- go(depth)
-
- indirect --= changed
- }
-
- def writeTo(file: AbstractFile, fromFile: AbstractFile => String): Unit =
- writeToFile(file)(out => writeTo(new PrintStream(out), fromFile))
-
- def writeTo(print: PrintStream, fromFile: AbstractFile => String): Unit = {
- def emit(tracker: Tracker) =
- for ((f, ds) <- tracker; d <- ds) print.println(fromFile(f) + arrow + fromFile(d))
-
- cleanEmpty
- print.println(classpath)
- print.println(separator)
- emit(dependencies)
- print.println(separator)
- emit(targets)
- }
- }
-
- object FileDependencies {
- private val separator:String = "-------"
- private val arrow = " -> "
- private val removedFile = new VirtualFile("removed")
-
- private def validLine(l: String) = (l != null) && (l != separator)
-
- def readFrom(file: AbstractFile, toFile: String => AbstractFile): Option[FileDependencies] =
- readFromFile(file) { in =>
- val reader = new BufferedReader(new InputStreamReader(in))
- val it = new FileDependencies(reader.readLine)
-
- def readLines(valid: Boolean)(f: (AbstractFile, AbstractFile) => Unit): Boolean = {
- var continue = valid
- var line: String = null
- while (continue && {line = reader.readLine; validLine(line)}) {
- line.split(arrow) match {
- case Array(from, on) => f(toFile(from), toFile(on))
- case _ =>
- global.inform("Parse error: Unrecognised string " + line)
- continue = false
- }
- }
- continue
- }
-
- reader.readLine
-
- val dResult = readLines(true)(
- (_, _) match {
- case (null, _) => // fromFile is removed, it's ok
- case (fromFile, null) =>
- // onFile is removed, should recompile fromFile
- it.depends(fromFile, removedFile)
- case (fromFile, onFile) => it.depends(fromFile, onFile)
- })
-
- readLines(dResult)(
- (_, _) match {
- case (null, null) =>
- // source and target are all removed, it's ok
- case (null, targetFile) =>
- // source is removed, should remove relative target later
- it.emits(removedFile, targetFile)
- case (_, null) =>
- // it may has been cleaned outside, or removed during last phase
- case (sourceFile, targetFile) => it.emits(sourceFile, targetFile)
- })
-
- Some(it)
- }
- }
-
- def writeToFile[T](file: AbstractFile)(f: OutputStream => T) : T = {
- val out = file.bufferedOutput
- try {
- f(out)
- } finally {
- out.close
- }
- }
-
- def readFromFile[T](file: AbstractFile)(f: InputStream => T) : T = {
- val in = file.input
- try{
- f(in)
- } finally {
- in.close
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
deleted file mode 100644
index a091b04993..0000000000
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ /dev/null
@@ -1,142 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-
-import scala.util.control.ControlThrowable
-import reporters.Reporter
-import scala.reflect.internal.util.{ NoPosition, BatchSourceFile}
-import io.{ File, Directory }
-import DocParser.Parsed
-
-/** A documentation processor controls the process of generating Scala
- * documentation, which is as follows.
- *
- * * A simplified compiler instance (with only the front-end phases enabled)
- * * is created, and additional ''sourceless'' comments are registered.
- * * Documentable files are compiled, thereby filling the compiler's symbol table.
- * * A documentation model is extracted from the post-compilation symbol table.
- * * A generator is used to transform the model into the correct final format (HTML).
- *
- * A processor contains a single compiler instantiated from the processor's
- * `settings`. Each call to `document` uses the same compiler instance with
- * the same symbol table. In particular, this implies that the scaladoc site
- * obtained from a call to `run` will contain documentation about files compiled
- * during previous calls to the same processor's `run` method.
- *
- * @param reporter The reporter to which both documentation and compilation errors will be reported.
- * @param settings The settings to be used by the documenter and compiler for generating documentation.
- *
- * @author Gilles Dubochet */
-class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor =>
- /** The unique compiler instance used by this processor and constructed from its `settings`. */
- object compiler extends Global(settings, reporter) with interactive.RangePositions {
- override protected def computeInternalPhases() {
- phasesSet += syntaxAnalyzer
- phasesSet += analyzer.namerFactory
- phasesSet += analyzer.packageObjects
- phasesSet += analyzer.typerFactory
- }
- override def forScaladoc = true
- }
-
- /** Creates a scaladoc site for all symbols defined in this call's `source`,
- * as well as those defined in `sources` of previous calls to the same processor.
- * @param source The list of paths (relative to the compiler's source path,
- * or absolute) of files to document or the source code. */
- def makeUniverse(source: Either[List[String], String]): Option[Universe] = {
- assert(settings.docformat.value == "html")
- source match {
- case Left(files) =>
- new compiler.Run() compile files
- case Right(sourceCode) =>
- new compiler.Run() compileSources List(new BatchSourceFile("newSource", sourceCode))
- }
-
- if (reporter.hasErrors)
- return None
-
- val extraTemplatesToDocument: Set[compiler.Symbol] = {
- if (settings.docUncompilable.isDefault) Set()
- else {
- val uncompilable = new {
- val global: compiler.type = compiler
- val settings = processor.settings
- } with Uncompilable { }
-
- compiler.docComments ++= uncompilable.comments
- docdbg("" + uncompilable)
-
- uncompilable.templates
- }
- }
-
- val modelFactory = (
- new { override val global: compiler.type = compiler }
- with model.ModelFactory(compiler, settings)
- with model.ModelFactoryImplicitSupport
- with model.ModelFactoryTypeSupport
- with model.diagram.DiagramFactory
- with model.CommentFactory
- with model.TreeFactory
- with model.MemberLookup {
- override def templateShouldDocument(sym: compiler.Symbol, inTpl: DocTemplateImpl) =
- extraTemplatesToDocument(sym) || super.templateShouldDocument(sym, inTpl)
- }
- )
-
- modelFactory.makeModel match {
- case Some(madeModel) =>
- if (!settings.scaladocQuietRun)
- println("model contains " + modelFactory.templatesCount + " documentable templates")
- Some(madeModel)
- case None =>
- if (!settings.scaladocQuietRun)
- println("no documentable class found in compilation units")
- None
- }
- }
-
- object NoCompilerRunException extends ControlThrowable { }
-
- val documentError: PartialFunction[Throwable, Unit] = {
- case NoCompilerRunException =>
- reporter.info(null, "No documentation generated with unsucessful compiler run", false)
- case _: ClassNotFoundException =>
- ()
- }
-
- /** Generate document(s) for all `files` containing scaladoc documenataion.
- * @param files The list of paths (relative to the compiler's source path, or absolute) of files to document. */
- def document(files: List[String]) {
- def generate() = {
- import doclet._
- val docletClass = Class.forName(settings.docgenerator.value) // default is html.Doclet
- val docletInstance = docletClass.newInstance().asInstanceOf[Generator]
-
- docletInstance match {
- case universer: Universer =>
- val universe = makeUniverse(Left(files)) getOrElse { throw NoCompilerRunException }
- universer setUniverse universe
-
- docletInstance match {
- case indexer: Indexer => indexer setIndex model.IndexModelFactory.makeIndex(universe)
- case _ => ()
- }
- case _ => ()
- }
- docletInstance.generate
- }
-
- try generate()
- catch documentError
- }
-
- private[doc] def docdbg(msg: String) {
- if (settings.Ydocdebug.value)
- println(msg)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/DocParser.scala b/src/compiler/scala/tools/nsc/doc/DocParser.scala
deleted file mode 100644
index 27c995e1c3..0000000000
--- a/src/compiler/scala/tools/nsc/doc/DocParser.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package nsc
-package doc
-
-import reporters._
-import scala.reflect.internal.util._
-import interactive.RangePositions
-import DocParser.Parsed
-
-/** A very minimal global customized for extracting `DocDefs`. It stops
- * right after parsing so it can read `DocDefs` from source code which would
- * otherwise cause the compiler to go haywire.
- */
-class DocParser(settings: nsc.Settings, reporter: Reporter)
- extends Global(settings, reporter)
- with RangePositions {
-
- def this(settings: Settings) = this(settings, new ConsoleReporter(settings))
- def this() = this(new Settings(Console println _))
-
- // the usual global initialization
- locally { new Run() }
-
- override def forScaladoc = true
- override protected def computeInternalPhases() {
- phasesSet += syntaxAnalyzer
- }
-
- /** Returns a list of `DocParser.Parseds`, which hold the DocDefs found
- * in the given code along with the surrounding trees.
- */
- def docDefs(code: String) = {
- def loop(enclosing: List[Tree], tree: Tree): List[Parsed] = tree match {
- case x: PackageDef => x.stats flatMap (t => loop(enclosing :+ x, t))
- case x: DocDef => new Parsed(enclosing, x) :: loop(enclosing :+ x.definition, x.definition)
- case x => x.children flatMap (t => loop(enclosing, t))
- }
- loop(Nil, docUnit(code))
- }
-
- /** A compilation unit containing parsed source.
- */
- def docUnit(code: String) = {
- val unit = new CompilationUnit(new BatchSourceFile("<console>", code))
- val scanner = new syntaxAnalyzer.UnitParser(unit)
-
- scanner.compilationUnit()
- }
-}
-
-/** Since the DocParser's whole reason for existing involves trashing a
- * global, it is designed to bottle up general `Global#Tree` types rather
- * than path dependent ones. The recipient will have to deal.
- */
-object DocParser {
- type Tree = Global#Tree
- type DefTree = Global#DefTree
- type DocDef = Global#DocDef
- type Name = Global#Name
-
- class Parsed(val enclosing: List[Tree], val docDef: DocDef) {
- def nameChain: List[Name] = (enclosing :+ docDef.definition) collect { case x: DefTree => x.name }
- def raw: String = docDef.comment.raw
-
- override def toString = (
- nameChain.init.map(x => if (x.isTypeName) x + "#" else x + ".").mkString + nameChain.last
- )
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/Index.scala b/src/compiler/scala/tools/nsc/doc/Index.scala
deleted file mode 100644
index f9b9eecdb3..0000000000
--- a/src/compiler/scala/tools/nsc/doc/Index.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc.doc
-
-import scala.collection._
-
-
-trait Index {
-
- type SymbolMap = SortedMap[String, SortedSet[model.MemberEntity]]
-
- def firstLetterIndex: Map[Char, SymbolMap]
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala
deleted file mode 100644
index 8c0628c073..0000000000
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ /dev/null
@@ -1,365 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package doc
-
-import java.io.File
-import scala.language.postfixOps
-
-/** An extended version of compiler settings, with additional Scaladoc-specific options.
- * @param error A function that prints a string to the appropriate error stream
- * @param print A function that prints the string, without any extra boilerplate of error */
-class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) extends scala.tools.nsc.Settings(error) {
-
- /** A setting that defines in which format the documentation is output. ''Note:'' this setting is currently always
- * `html`. */
- val docformat = ChoiceSetting (
- "-doc-format",
- "format",
- "Selects in which format documentation is rendered",
- List("html"),
- "html"
- )
-
- /** A setting that defines the overall title of the documentation, typically the name of the library being
- * documented. ''Note:'' This setting is currently not used. */
- val doctitle = StringSetting (
- "-doc-title",
- "title",
- "The overall name of the Scaladoc site",
- ""
- )
-
- /** A setting that defines the overall version number of the documentation, typically the version of the library being
- * documented. ''Note:'' This setting is currently not used. */
- val docversion = StringSetting (
- "-doc-version",
- "version",
- "An optional version number, to be appended to the title",
- ""
- )
-
- val docfooter = StringSetting (
- "-doc-footer",
- "footer",
- "A footer on every ScalaDoc page, by default the EPFL/Typesafe copyright notice. Can be overridden with a custom footer.",
- ""
- )
-
- val docUncompilable = StringSetting (
- "-doc-no-compile",
- "path",
- "A directory containing sources which should be parsed, no more (e.g. AnyRef.scala)",
- ""
- )
-
- lazy val uncompilableFiles = docUncompilable.value match {
- case "" => Nil
- case path => io.Directory(path).deepFiles filter (_ hasExtension "scala") toList
- }
-
- /** A setting that defines a URL to be concatenated with source locations and show a link to source files.
- * If needed the sourcepath option can be used to exclude undesired initial part of the link to sources */
- val docsourceurl = StringSetting (
- "-doc-source-url",
- "url",
- "A URL pattern used to build links to template sources; use variables, for example: ?{TPL_NAME} ('Seq'), ?{TPL_OWNER} ('scala.collection'), ?{FILE_PATH} ('scala/collection/Seq')",
- ""
- )
-
- val docExternalDoc = MultiStringSetting (
- "-doc-external-doc",
- "external-doc",
- "comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies."
- )
-
- val useStupidTypes = BooleanSetting (
- "-Yuse-stupid-types",
- "Print the types of inherited members as seen from their original definition context. Hint: you don't want to do that!"
- )
-
- val docgenerator = StringSetting (
- "-doc-generator",
- "class-name",
- "The fully qualified name of a doclet class, which will be used to generate the documentation",
- "scala.tools.nsc.doc.html.Doclet"
- )
-
- val docRootContent = PathSetting (
- "-doc-root-content",
- "The file from which the root package documentation should be imported.",
- ""
- )
-
- val docImplicits = BooleanSetting (
- "-implicits",
- "Document members inherited by implicit conversions."
- )
-
- val docImplicitsDebug = BooleanSetting (
- "-implicits-debug",
- "Show debugging information for members inherited by implicit conversions."
- )
-
- val docImplicitsShowAll = BooleanSetting (
- "-implicits-show-all",
- "Show members inherited by implicit conversions that are impossible in the default scope. " +
- "(for example conversions that require Numeric[String] to be in scope)"
- )
-
- val docImplicitsSoundShadowing = BooleanSetting (
- "-implicits-sound-shadowing",
- "Use a sound implicit shadowing calculation. Note: this interacts badly with usecases, so " +
- "only use it if you haven't defined usecase for implicitly inherited members."
- )
-
- val docImplicitsHide = MultiStringSetting (
- "-implicits-hide",
- "implicit(s)",
- "Hide the members inherited by the given comma separated, fully qualified implicit conversions. Add dot (.) to include default conversions."
- )
-
- val docDiagrams = BooleanSetting (
- "-diagrams",
- "Create inheritance diagrams for classes, traits and packages."
- )
-
- val docDiagramsDebug = BooleanSetting (
- "-diagrams-debug",
- "Show debugging information for the diagram creation process."
- )
-
- val docDiagramsDotPath = PathSetting (
- "-diagrams-dot-path",
- "The path to the dot executable used to generate the inheritance diagrams. Eg: /usr/bin/dot",
- "dot" // by default, just pick up the system-wide dot
- )
-
- /** The maxium nuber of normal classes to show in the diagram */
- val docDiagramsMaxNormalClasses = IntSetting(
- "-diagrams-max-classes",
- "The maximum number of superclasses or subclasses to show in a diagram",
- 15,
- None,
- _ => None
- )
-
- /** The maxium nuber of implcit classes to show in the diagram */
- val docDiagramsMaxImplicitClasses = IntSetting(
- "-diagrams-max-implicits",
- "The maximum number of implicitly converted classes to show in a diagram",
- 10,
- None,
- _ => None
- )
-
- val docDiagramsDotTimeout = IntSetting(
- "-diagrams-dot-timeout",
- "The timeout before the graphviz dot util is forcefully closed, in seconds (default: 10)",
- 10,
- None,
- _ => None
- )
-
- val docDiagramsDotRestart = IntSetting(
- "-diagrams-dot-restart",
- "The number of times to restart a malfunctioning dot process before disabling diagrams (default: 5)",
- 5,
- None,
- _ => None
- )
-
- val docRawOutput = BooleanSetting (
- "-raw-output",
- "For each html file, create another .html.raw file containing only the text. (can be used for quickly diffing two scaladoc outputs)"
- )
-
- val docNoPrefixes = BooleanSetting (
- "-no-prefixes",
- "Prevents generating prefixes in types, possibly creating ambiguous references, but significantly speeding up scaladoc."
- )
-
- val docNoLinkWarnings = BooleanSetting (
- "-no-link-warnings",
- "Avoid warnings for ambiguous and incorrect links."
- )
-
- val docSkipPackages = StringSetting (
- "-skip-packages",
- "<package1>:...:<packageN>",
- "A colon-delimited list of fully qualified package names that will be skipped from scaladoc.",
- ""
- )
-
- val docExpandAllTypes = BooleanSetting (
- "-expand-all-types",
- "Expand all type aliases and abstract types into full template pages. (locally this can be done with the @template annotation)"
- )
-
- val docExternalUrls = MultiStringSetting (
- "-external-urls",
- "externalUrl(s)",
- "(deprecated) comma-separated list of package_names=doc_URL for external dependencies, where package names are ':'-separated"
- )
-
- val docGroups = BooleanSetting (
- "-groups",
- "Group similar functions together (based on the @group annotation)"
- )
-
- // Somewhere slightly before r18708 scaladoc stopped building unless the
- // self-type check was suppressed. I hijacked the slotted-for-removal-anyway
- // suppress-vt-warnings option and renamed it for this purpose.
- noSelfCheck.value = true
-
- // For improved help output.
- def scaladocSpecific = Set[Settings#Setting](
- docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes,
- docDiagrams, docDiagramsDebug, docDiagramsDotPath,
- docDiagramsDotTimeout, docDiagramsDotRestart,
- docImplicits, docImplicitsDebug, docImplicitsShowAll, docImplicitsHide,
- docDiagramsMaxNormalClasses, docDiagramsMaxImplicitClasses,
- docNoPrefixes, docNoLinkWarnings, docRawOutput, docSkipPackages,
- docExpandAllTypes, docGroups
- )
- val isScaladocSpecific: String => Boolean = scaladocSpecific map (_.name)
-
- override def isScaladoc = true
-
- // set by the testsuite, when checking test output
- var scaladocQuietRun = false
-
- lazy val skipPackageNames =
- if (docSkipPackages.value == "")
- Set[String]()
- else
- docSkipPackages.value.toLowerCase.split(':').toSet
-
- def skipPackage(qname: String) =
- skipPackageNames(qname.toLowerCase)
-
- lazy val hiddenImplicits: Set[String] = {
- if (docImplicitsHide.value.isEmpty) hardcoded.commonConversionTargets
- else docImplicitsHide.value.toSet flatMap { name: String =>
- if(name == ".") hardcoded.commonConversionTargets
- else Set(name)
- }
- }
-
- def appendIndex(url: String): String = url.stripSuffix("index.html").stripSuffix("/") + "/index.html"
-
- // Deprecated together with 'docExternalUrls' option.
- lazy val extUrlPackageMapping: Map[String, String] = (Map.empty[String, String] /: docExternalUrls.value) {
- case (map, binding) =>
- val idx = binding indexOf "="
- val pkgs = binding substring (0, idx) split ":"
- val url = appendIndex(binding substring (idx + 1))
- map ++ (pkgs map (_ -> url))
- }
-
- lazy val extUrlMapping: Map[String, String] = docExternalDoc.value flatMap { s =>
- val idx = s.indexOf("#")
- if (idx > 0) {
- val (first, last) = s.splitAt(idx)
- Some(new File(first).getAbsolutePath -> appendIndex(last.substring(1)))
- } else {
- error(s"Illegal -doc-external-doc option; expected a pair with '#' separator, found: '$s'")
- None
- }
- } toMap
-
- /**
- * This is the hardcoded area of Scaladoc. This is where "undesirable" stuff gets eliminated. I know it's not pretty,
- * but ultimately scaladoc has to be useful. :)
- */
- object hardcoded {
-
- /** The common context bounds and some humanly explanations. Feel free to add more explanations
- * `<root>.scala.package.Numeric` is the type class
- * `tparam` is the name of the type parameter it gets (this only describes type classes with 1 type param)
- * the function result should be a humanly-understandable description of the type class
- */
- val knownTypeClasses: Map[String, String => String] = Map() +
- ("scala.math.Numeric" -> ((tparam: String) => tparam + " is a numeric class, such as Int, Long, Float or Double")) +
- ("scala.math.Integral" -> ((tparam: String) => tparam + " is an integral numeric class, such as Int or Long")) +
- ("scala.math.Fractional" -> ((tparam: String) => tparam + " is a fractional numeric class, such as Float or Double")) +
- ("scala.reflect.Manifest" -> ((tparam: String) => tparam + " is accompanied by a Manifest, which is a runtime representation of its type that survives erasure")) +
- ("scala.reflect.ClassManifest" -> ((tparam: String) => tparam + " is accompanied by a ClassManifest, which is a runtime representation of its type that survives erasure")) +
- ("scala.reflect.OptManifest" -> ((tparam: String) => tparam + " is accompanied by an OptManifest, which can be either a runtime representation of its type or the NoManifest, which means the runtime type is not available")) +
- ("scala.reflect.ClassTag" -> ((tparam: String) => tparam + " is accompanied by a ClassTag, which is a runtime representation of its type that survives erasure")) +
- ("scala.reflect.api.TypeTags.WeakTypeTag" -> ((tparam: String) => tparam + " is accompanied by an WeakTypeTag, which is a runtime representation of its type that survives erasure")) +
- ("scala.reflect.api.TypeTags.TypeTag" -> ((tparam: String) => tparam + " is accompanied by a TypeTag, which is a runtime representation of its type that survives erasure"))
-
- /**
- * Set of classes to exclude from index and diagrams
- * TODO: Should be configurable
- */
- def isExcluded(qname: String) = {
- ( ( qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") ||
- qname.startsWith("scala.Function") || qname.startsWith("scala.runtime.AbstractFunction")
- ) && !(
- qname == "scala.Tuple1" || qname == "scala.Tuple2" ||
- qname == "scala.Product" || qname == "scala.Product1" || qname == "scala.Product2" ||
- qname == "scala.Function" || qname == "scala.Function1" || qname == "scala.Function2" ||
- qname == "scala.runtime.AbstractFunction0" || qname == "scala.runtime.AbstractFunction1" ||
- qname == "scala.runtime.AbstractFunction2"
- )
- )
- }
-
- /** Common conversion targets that affect any class in Scala */
- val commonConversionTargets = Set(
- "scala.Predef.any2stringfmt",
- "scala.Predef.any2stringadd",
- "scala.Predef.any2ArrowAssoc",
- "scala.Predef.any2Ensuring",
- "scala.collection.TraversableOnce.alternateImplicit")
-
- /** There's a reason all these are specialized by hand but documenting each of them is beyond the point */
- val arraySkipConversions = List(
- "scala.Predef.refArrayOps",
- "scala.Predef.intArrayOps",
- "scala.Predef.doubleArrayOps",
- "scala.Predef.longArrayOps",
- "scala.Predef.floatArrayOps",
- "scala.Predef.charArrayOps",
- "scala.Predef.byteArrayOps",
- "scala.Predef.shortArrayOps",
- "scala.Predef.booleanArrayOps",
- "scala.Predef.unitArrayOps",
- "scala.LowPriorityImplicits.wrapRefArray",
- "scala.LowPriorityImplicits.wrapIntArray",
- "scala.LowPriorityImplicits.wrapDoubleArray",
- "scala.LowPriorityImplicits.wrapLongArray",
- "scala.LowPriorityImplicits.wrapFloatArray",
- "scala.LowPriorityImplicits.wrapCharArray",
- "scala.LowPriorityImplicits.wrapByteArray",
- "scala.LowPriorityImplicits.wrapShortArray",
- "scala.LowPriorityImplicits.wrapBooleanArray",
- "scala.LowPriorityImplicits.wrapUnitArray",
- "scala.LowPriorityImplicits.genericWrapArray")
-
- // included as names as here we don't have access to a Global with Definitions :(
- def valueClassList = List("unit", "boolean", "byte", "short", "char", "int", "long", "float", "double")
- def valueClassFilterPrefixes = List("scala.LowPriorityImplicits", "scala.Predef")
-
- /** Dirty, dirty, dirty hack: the value params conversions can all kick in -- and they are disambiguated by priority
- * but showing priority in scaladoc would make no sense -- so we have to manually remove the conversions that we
- * know will never get a chance to kick in. Anyway, DIRTY DIRTY DIRTY! */
- def valueClassFilter(value: String, conversionName: String): Boolean = {
- val valueName = value.toLowerCase
- val otherValues = valueClassList.filterNot(_ == valueName)
-
- for (prefix <- valueClassFilterPrefixes)
- if (conversionName.startsWith(prefix))
- for (otherValue <- otherValues)
- if (conversionName.startsWith(prefix + "." + otherValue))
- return false
-
- true
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
deleted file mode 100644
index d3e5c869e0..0000000000
--- a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package doc
-import scala.language.implicitConversions
-import scala.language.postfixOps
-
-/** Some glue between DocParser (which reads source files which can't be compiled)
- * and the scaladoc model.
- */
-trait Uncompilable {
- val global: Global
- val settings: Settings
-
- import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, Name, DocComment, NoSymbol }
- import global.definitions.AnyRefClass
- import global.rootMirror.RootClass
-
- private implicit def translateName(name: Global#Name) =
- if (name.isTypeName) newTypeName("" + name) else newTermName("" + name)
-
- def docSymbol(p: DocParser.Parsed) = p.nameChain.foldLeft(RootClass: Symbol)(_.tpe member _)
- def docDefs(code: String) = new DocParser(settings, reporter) docDefs code
- def docPairs(code: String) = docDefs(code) map (p => (docSymbol(p), new DocComment(p.raw)))
-
- lazy val pairs = files flatMap { f =>
- val comments = docPairs(f.slurp())
- if (settings.verbose.value)
- inform("Found %d doc comments in parse-only file %s: %s".format(comments.size, f, comments.map(_._1).mkString(", ")))
-
- comments
- }
- def files = settings.uncompilableFiles
- def symbols = pairs map (_._1)
- def templates = symbols filter (x => x.isClass || x.isTrait || x == AnyRefClass/* which is now a type alias */) toSet
- def comments = {
- if (settings.debug.value || settings.verbose.value)
- inform("Found %d uncompilable files: %s".format(files.size, files mkString ", "))
-
- if (pairs.isEmpty)
- warning("no doc comments read from " + settings.docUncompilable.value)
-
- pairs
- }
- override def toString = pairs.size + " uncompilable symbols:\n" + (
- symbols filterNot (_ == NoSymbol) map (x => " " + x.owner.fullName + " " + x.defString) mkString "\n"
- )
-}
diff --git a/src/compiler/scala/tools/nsc/doc/Universe.scala b/src/compiler/scala/tools/nsc/doc/Universe.scala
deleted file mode 100644
index 11520c810e..0000000000
--- a/src/compiler/scala/tools/nsc/doc/Universe.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc.doc
-
-/**
- * Class to hold common dependencies across Scaladoc classes.
- * @author Pedro Furlanetto
- * @author Gilles Dubochet
- */
-trait Universe {
- def settings: Settings
- def rootPackage: model.Package
-}
diff --git a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala
deleted file mode 100755
index f509c63ba0..0000000000
--- a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala
+++ /dev/null
@@ -1,955 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2012 LAMP/EPFL
- * @author Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package base
-
-import base.comment._
-import reporters.Reporter
-import scala.collection._
-import scala.util.matching.Regex
-import scala.annotation.switch
-import scala.reflect.internal.util.{NoPosition, Position}
-import scala.language.postfixOps
-
-/** The comment parser transforms raw comment strings into `Comment` objects.
- * Call `parse` to run the parser. Note that the parser is stateless and
- * should only be built once for a given Scaladoc run.
- *
- * @param reporter The reporter on which user messages (error, warnings) should be printed.
- *
- * @author Manohar Jonnalagedda
- * @author Gilles Dubochet */
-trait CommentFactoryBase { this: MemberLookupBase =>
-
- val global: Global
- import global.{ reporter, definitions, Symbol }
-
- /* Creates comments with necessary arguments */
- def createComment (
- body0: Option[Body] = None,
- authors0: List[Body] = List.empty,
- see0: List[Body] = List.empty,
- result0: Option[Body] = None,
- throws0: Map[String,Body] = Map.empty,
- valueParams0: Map[String,Body] = Map.empty,
- typeParams0: Map[String,Body] = Map.empty,
- version0: Option[Body] = None,
- since0: Option[Body] = None,
- todo0: List[Body] = List.empty,
- deprecated0: Option[Body] = None,
- note0: List[Body] = List.empty,
- example0: List[Body] = List.empty,
- constructor0: Option[Body] = None,
- source0: Option[String] = None,
- inheritDiagram0: List[String] = List.empty,
- contentDiagram0: List[String] = List.empty,
- group0: Option[Body] = None,
- groupDesc0: Map[String,Body] = Map.empty,
- groupNames0: Map[String,Body] = Map.empty,
- groupPrio0: Map[String,Body] = Map.empty
- ) : Comment = new Comment{
- val body = if(body0 isDefined) body0.get else Body(Seq.empty)
- val authors = authors0
- val see = see0
- val result = result0
- val throws = throws0
- val valueParams = valueParams0
- val typeParams = typeParams0
- val version = version0
- val since = since0
- val todo = todo0
- val deprecated = deprecated0
- val note = note0
- val example = example0
- val constructor = constructor0
- val source = source0
- val inheritDiagram = inheritDiagram0
- val contentDiagram = contentDiagram0
- val groupDesc = groupDesc0
- val group =
- group0 match {
- case Some(Body(List(Paragraph(Chain(List(Summary(Text(groupId)))))))) => Some(groupId.toString.trim)
- case _ => None
- }
- val groupPrio = groupPrio0 flatMap {
- case (group, body) =>
- try {
- body match {
- case Body(List(Paragraph(Chain(List(Summary(Text(prio))))))) => List(group -> prio.trim.toInt)
- case _ => List()
- }
- } catch {
- case _: java.lang.NumberFormatException => List()
- }
- }
- val groupNames = groupNames0 flatMap {
- case (group, body) =>
- try {
- body match {
- case Body(List(Paragraph(Chain(List(Summary(Text(name))))))) if (!name.trim.contains("\n")) => List(group -> (name.trim))
- case _ => List()
- }
- } catch {
- case _: java.lang.NumberFormatException => List()
- }
- }
-
- }
-
- private val endOfText = '\u0003'
- private val endOfLine = '\u000A'
-
- /** Something that should not have happened, happened, and Scaladoc should exit. */
- private def oops(msg: String): Nothing =
- throw FatalError("program logic: " + msg)
-
- /** The body of a line, dropping the (optional) start star-marker,
- * one leading whitespace and all trailing whitespace. */
- private val CleanCommentLine =
- new Regex("""(?:\s*\*\s?)?(.*)""")
-
- /** Dangerous HTML tags that should be replaced by something safer,
- * such as wiki syntax, or that should be dropped. */
- private val DangerousTags =
- new Regex("""<(/?(div|ol|ul|li|h[1-6]|p))( [^>]*)?/?>|<!--.*-->""")
-
- /** Maps a dangerous HTML tag to a safe wiki replacement, or an empty string
- * if it cannot be salvaged. */
- private def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match {
- case "p" | "div" => "\n\n"
- case "h1" => "\n= "
- case "/h1" => " =\n"
- case "h2" => "\n== "
- case "/h2" => " ==\n"
- case "h3" => "\n=== "
- case "/h3" => " ===\n"
- case "h4" | "h5" | "h6" => "\n==== "
- case "/h4" | "/h5" | "/h6" => " ====\n"
- case "li" => "\n * - "
- case _ => ""
- }
-
- /** Javadoc tags that should be replaced by something useful, such as wiki
- * syntax, or that should be dropped. */
- private val JavadocTags =
- new Regex("""\{\@(code|docRoot|inheritDoc|link|linkplain|literal|value)([^}]*)\}""")
-
- /** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */
- private def javadocReplacement(mtch: Regex.Match): String = mtch.group(1) match {
- case "code" => "`" + mtch.group(2) + "`"
- case "docRoot" => ""
- case "inheritDoc" => ""
- case "link" => "`" + mtch.group(2) + "`"
- case "linkplain" => "`" + mtch.group(2) + "`"
- case "literal" => mtch.group(2)
- case "value" => "`" + mtch.group(2) + "`"
- case _ => ""
- }
-
- /** Safe HTML tags that can be kept. */
- private val SafeTags =
- new Regex("""((&\w+;)|(&#\d+;)|(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|cite|code|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>))""")
-
- private val safeTagMarker = '\u000E'
-
- /** A Scaladoc tag not linked to a symbol and not followed by text */
- private val SingleTagRegex =
- new Regex("""\s*@(\S+)\s*""")
-
- /** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */
- private val SimpleTagRegex =
- new Regex("""\s*@(\S+)\s+(.*)""")
-
- /** A Scaladoc tag linked to a symbol. Returns the name of the tag, the name
- * of the symbol, and the rest of the line. */
- private val SymbolTagRegex =
- new Regex("""\s*@(param|tparam|throws|groupdesc|groupname|groupprio)\s+(\S*)\s*(.*)""")
-
- /** The start of a scaladoc code block */
- private val CodeBlockStartRegex =
- new Regex("""(.*?)((?:\{\{\{)|(?:\u000E<pre(?: [^>]*)?>\u000E))(.*)""")
-
- /** The end of a scaladoc code block */
- private val CodeBlockEndRegex =
- new Regex("""(.*?)((?:\}\}\})|(?:\u000E</pre>\u000E))(.*)""")
-
- /** A key used for a tag map. The key is built from the name of the tag and
- * from the linked symbol if the tag has one.
- * Equality on tag keys is structural. */
- private sealed abstract class TagKey {
- def name: String
- }
-
- private final case class SimpleTagKey(name: String) extends TagKey
- private final case class SymbolTagKey(name: String, symbol: String) extends TagKey
-
- /** Parses a raw comment string into a `Comment` object.
- * @param comment The expanded comment string (including start and end markers) to be parsed.
- * @param src The raw comment source string.
- * @param pos The position of the comment in source. */
- protected def parseAtSymbol(comment: String, src: String, pos: Position, siteOpt: Option[Symbol] = None): Comment = {
- /** The cleaned raw comment as a list of lines. Cleaning removes comment
- * start and end markers, line start markers and unnecessary whitespace. */
- def clean(comment: String): List[String] = {
- def cleanLine(line: String): String = {
- //replaceAll removes trailing whitespaces
- line.replaceAll("""\s+$""", "") match {
- case CleanCommentLine(ctl) => ctl
- case tl => tl
- }
- }
- val strippedComment = comment.trim.stripPrefix("/*").stripSuffix("*/")
- val safeComment = DangerousTags.replaceAllIn(strippedComment, { htmlReplacement(_) })
- val javadoclessComment = JavadocTags.replaceAllIn(safeComment, { javadocReplacement(_) })
- val markedTagComment =
- SafeTags.replaceAllIn(javadoclessComment, { mtch =>
- java.util.regex.Matcher.quoteReplacement(safeTagMarker + mtch.matched + safeTagMarker)
- })
- markedTagComment.lines.toList map (cleanLine(_))
- }
-
- /** Parses a comment (in the form of a list of lines) to a `Comment`
- * instance, recursively on lines. To do so, it splits the whole comment
- * into main body and tag bodies, then runs the `WikiParser` on each body
- * before creating the comment instance.
- *
- * @param docBody The body of the comment parsed until now.
- * @param tags All tags parsed until now.
- * @param lastTagKey The last parsed tag, or `None` if the tag section hasn't started. Lines that are not tagged
- * are part of the previous tag or, if none exists, of the body.
- * @param remaining The lines that must still recursively be parsed.
- * @param inCodeBlock Whether the next line is part of a code block (in which no tags must be read). */
- def parse0 (
- docBody: StringBuilder,
- tags: Map[TagKey, List[String]],
- lastTagKey: Option[TagKey],
- remaining: List[String],
- inCodeBlock: Boolean
- ): Comment = remaining match {
-
- case CodeBlockStartRegex(before, marker, after) :: ls if (!inCodeBlock) =>
- if (!before.trim.isEmpty && !after.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, false)
- else if (!before.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, before :: marker :: ls, false)
- else if (!after.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, marker :: after :: ls, true)
- else lastTagKey match {
- case Some(key) =>
- val value =
- ((tags get key): @unchecked) match {
- case Some(b :: bs) => (b + endOfLine + marker) :: bs
- case None => oops("lastTagKey set when no tag exists for key")
- }
- parse0(docBody, tags + (key -> value), lastTagKey, ls, true)
- case None =>
- parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, true)
- }
-
- case CodeBlockEndRegex(before, marker, after) :: ls =>
- if (!before.trim.isEmpty && !after.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, true)
- if (!before.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, before :: marker :: ls, true)
- else if (!after.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, marker :: after :: ls, false)
- else lastTagKey match {
- case Some(key) =>
- val value =
- ((tags get key): @unchecked) match {
- case Some(b :: bs) => (b + endOfLine + marker) :: bs
- case None => oops("lastTagKey set when no tag exists for key")
- }
- parse0(docBody, tags + (key -> value), lastTagKey, ls, false)
- case None =>
- parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, false)
- }
-
- case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) =>
- val key = SymbolTagKey(name, sym)
- val value = body :: tags.getOrElse(key, Nil)
- parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
-
- case SimpleTagRegex(name, body) :: ls if (!inCodeBlock) =>
- val key = SimpleTagKey(name)
- val value = body :: tags.getOrElse(key, Nil)
- parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
-
- case SingleTagRegex(name) :: ls if (!inCodeBlock) =>
- val key = SimpleTagKey(name)
- val value = "" :: tags.getOrElse(key, Nil)
- parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
-
- case line :: ls if (lastTagKey.isDefined) =>
- val key = lastTagKey.get
- val value =
- ((tags get key): @unchecked) match {
- case Some(b :: bs) => (b + endOfLine + line) :: bs
- case None => oops("lastTagKey set when no tag exists for key")
- }
- parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock)
-
- case line :: ls =>
- if (docBody.length > 0) docBody append endOfLine
- docBody append line
- parse0(docBody, tags, lastTagKey, ls, inCodeBlock)
-
- case Nil =>
- // Take the {inheritance, content} diagram keys aside, as it doesn't need any parsing
- val inheritDiagramTag = SimpleTagKey("inheritanceDiagram")
- val contentDiagramTag = SimpleTagKey("contentDiagram")
-
- val inheritDiagramText: List[String] = tags.get(inheritDiagramTag) match {
- case Some(list) => list
- case None => List.empty
- }
-
- val contentDiagramText: List[String] = tags.get(contentDiagramTag) match {
- case Some(list) => list
- case None => List.empty
- }
-
- val stripTags=List(inheritDiagramTag, contentDiagramTag, SimpleTagKey("template"), SimpleTagKey("documentable"))
- val tagsWithoutDiagram = tags.filterNot(pair => stripTags.contains(pair._1))
-
- val bodyTags: mutable.Map[TagKey, List[Body]] =
- mutable.Map(tagsWithoutDiagram mapValues {tag => tag map (parseWikiAtSymbol(_, pos, siteOpt))} toSeq: _*)
-
- def oneTag(key: SimpleTagKey): Option[Body] =
- ((bodyTags remove key): @unchecked) match {
- case Some(r :: rs) =>
- if (!rs.isEmpty) reporter.warning(pos, "Only one '@" + key.name + "' tag is allowed")
- Some(r)
- case None => None
- }
-
- def allTags(key: SimpleTagKey): List[Body] =
- (bodyTags remove key) getOrElse Nil
-
- def allSymsOneTag(key: TagKey): Map[String, Body] = {
- val keys: Seq[SymbolTagKey] =
- bodyTags.keys.toSeq flatMap {
- case stk: SymbolTagKey if (stk.name == key.name) => Some(stk)
- case stk: SimpleTagKey if (stk.name == key.name) =>
- reporter.warning(pos, "Tag '@" + stk.name + "' must be followed by a symbol name")
- None
- case _ => None
- }
- val pairs: Seq[(String, Body)] =
- for (key <- keys) yield {
- val bs = (bodyTags remove key).get
- if (bs.length > 1)
- reporter.warning(pos, "Only one '@" + key.name + "' tag for symbol " + key.symbol + " is allowed")
- (key.symbol, bs.head)
- }
- Map.empty[String, Body] ++ pairs
- }
-
- val com = createComment (
- body0 = Some(parseWikiAtSymbol(docBody.toString, pos, siteOpt)),
- authors0 = allTags(SimpleTagKey("author")),
- see0 = allTags(SimpleTagKey("see")),
- result0 = oneTag(SimpleTagKey("return")),
- throws0 = allSymsOneTag(SimpleTagKey("throws")),
- valueParams0 = allSymsOneTag(SimpleTagKey("param")),
- typeParams0 = allSymsOneTag(SimpleTagKey("tparam")),
- version0 = oneTag(SimpleTagKey("version")),
- since0 = oneTag(SimpleTagKey("since")),
- todo0 = allTags(SimpleTagKey("todo")),
- deprecated0 = oneTag(SimpleTagKey("deprecated")),
- note0 = allTags(SimpleTagKey("note")),
- example0 = allTags(SimpleTagKey("example")),
- constructor0 = oneTag(SimpleTagKey("constructor")),
- source0 = Some(clean(src).mkString("\n")),
- inheritDiagram0 = inheritDiagramText,
- contentDiagram0 = contentDiagramText,
- group0 = oneTag(SimpleTagKey("group")),
- groupDesc0 = allSymsOneTag(SimpleTagKey("groupdesc")),
- groupNames0 = allSymsOneTag(SimpleTagKey("groupname")),
- groupPrio0 = allSymsOneTag(SimpleTagKey("groupprio"))
- )
-
- for ((key, _) <- bodyTags)
- reporter.warning(pos, "Tag '@" + key.name + "' is not recognised")
-
- com
-
- }
-
- parse0(new StringBuilder(comment.size), Map.empty, None, clean(comment), false)
-
- }
-
- /** Parses a string containing wiki syntax into a `Comment` object.
- * Note that the string is assumed to be clean:
- * - Removed Scaladoc start and end markers.
- * - Removed start-of-line star and one whitespace afterwards (if present).
- * - Removed all end-of-line whitespace.
- * - Only `endOfLine` is used to mark line endings. */
- def parseWikiAtSymbol(string: String, pos: Position, siteOpt: Option[Symbol]): Body = new WikiParser(string, pos, siteOpt).document()
-
- /** TODO
- *
- * @author Ingo Maier
- * @author Manohar Jonnalagedda
- * @author Gilles Dubochet */
- protected final class WikiParser(val buffer: String, pos: Position, siteOpt: Option[Symbol]) extends CharReader(buffer) { wiki =>
- var summaryParsed = false
-
- def document(): Body = {
- val blocks = new mutable.ListBuffer[Block]
- while (char != endOfText)
- blocks += block()
- Body(blocks.toList)
- }
-
- /* BLOCKS */
-
- /** {{{ block ::= code | title | hrule | para }}} */
- def block(): Block = {
- if (checkSkipInitWhitespace("{{{"))
- code()
- else if (checkSkipInitWhitespace('='))
- title()
- else if (checkSkipInitWhitespace("----"))
- hrule()
- else if (checkList)
- listBlock
- else {
- para()
- }
- }
-
- /** listStyle ::= '-' spc | '1.' spc | 'I.' spc | 'i.' spc | 'A.' spc | 'a.' spc
- * Characters used to build lists and their constructors */
- protected val listStyles = Map[String, (Seq[Block] => Block)]( // TODO Should this be defined at some list companion?
- "- " -> ( UnorderedList(_) ),
- "1. " -> ( OrderedList(_,"decimal") ),
- "I. " -> ( OrderedList(_,"upperRoman") ),
- "i. " -> ( OrderedList(_,"lowerRoman") ),
- "A. " -> ( OrderedList(_,"upperAlpha") ),
- "a. " -> ( OrderedList(_,"lowerAlpha") )
- )
-
- /** Checks if the current line is formed with more than one space and one the listStyles */
- def checkList =
- (countWhitespace > 0) && (listStyles.keys exists { checkSkipInitWhitespace(_) })
-
- /** {{{
- * nListBlock ::= nLine { mListBlock }
- * nLine ::= nSpc listStyle para '\n'
- * }}}
- * Where n and m stand for the number of spaces. When `m > n`, a new list is nested. */
- def listBlock: Block = {
-
- /** Consumes one list item block and returns it, or None if the block is
- * not a list or a different list. */
- def listLine(indent: Int, style: String): Option[Block] =
- if (countWhitespace > indent && checkList)
- Some(listBlock)
- else if (countWhitespace != indent || !checkSkipInitWhitespace(style))
- None
- else {
- jumpWhitespace()
- jump(style)
- val p = Paragraph(inline(false))
- blockEnded("end of list line ")
- Some(p)
- }
-
- /** Consumes all list item blocks (possibly with nested lists) of the
- * same list and returns the list block. */
- def listLevel(indent: Int, style: String): Block = {
- val lines = mutable.ListBuffer.empty[Block]
- var line: Option[Block] = listLine(indent, style)
- while (line.isDefined) {
- lines += line.get
- line = listLine(indent, style)
- }
- val constructor = listStyles(style)
- constructor(lines)
- }
-
- val indent = countWhitespace
- val style = (listStyles.keys find { checkSkipInitWhitespace(_) }).getOrElse(listStyles.keys.head)
- listLevel(indent, style)
- }
-
- def code(): Block = {
- jumpWhitespace()
- jump("{{{")
- val str = readUntil("}}}")
- if (char == endOfText)
- reportError(pos, "unclosed code block")
- else
- jump("}}}")
- blockEnded("code block")
- Code(normalizeIndentation(str))
- }
-
- /** {{{ title ::= ('=' inline '=' | "==" inline "==" | ...) '\n' }}} */
- def title(): Block = {
- jumpWhitespace()
- val inLevel = repeatJump('=')
- val text = inline(check("=" * inLevel))
- val outLevel = repeatJump('=', inLevel)
- if (inLevel != outLevel)
- reportError(pos, "unbalanced or unclosed heading")
- blockEnded("heading")
- Title(text, inLevel)
- }
-
- /** {{{ hrule ::= "----" { '-' } '\n' }}} */
- def hrule(): Block = {
- jumpWhitespace()
- repeatJump('-')
- blockEnded("horizontal rule")
- HorizontalRule()
- }
-
- /** {{{ para ::= inline '\n' }}} */
- def para(): Block = {
- val p =
- if (summaryParsed)
- Paragraph(inline(false))
- else {
- val s = summary()
- val r =
- if (checkParaEnded) List(s) else List(s, inline(false))
- summaryParsed = true
- Paragraph(Chain(r))
- }
- while (char == endOfLine && char != endOfText)
- nextChar()
- p
- }
-
- /* INLINES */
-
- val OPEN_TAG = "^<([A-Za-z]+)( [^>]*)?(/?)>$".r
- val CLOSE_TAG = "^</([A-Za-z]+)>$".r
- private def readHTMLFrom(begin: HtmlTag): String = {
- val list = mutable.ListBuffer.empty[String]
- val stack = mutable.ListBuffer.empty[String]
-
- begin.close match {
- case Some(HtmlTag(CLOSE_TAG(s))) =>
- stack += s
- case _ =>
- return ""
- }
-
- do {
- val str = readUntil { char == safeTagMarker || char == endOfText }
- nextChar()
-
- list += str
-
- str match {
- case OPEN_TAG(s, _, standalone) => {
- if (standalone != "/") {
- stack += s
- }
- }
- case CLOSE_TAG(s) => {
- if (s == stack.last) {
- stack.remove(stack.length-1)
- }
- }
- case _ => ;
- }
- } while (stack.length > 0 && char != endOfText)
-
- list mkString ""
- }
-
- def inline(isInlineEnd: => Boolean): Inline = {
-
- def inline0(): Inline = {
- if (char == safeTagMarker) {
- val tag = htmlTag()
- HtmlTag(tag.data + readHTMLFrom(tag))
- }
- else if (check("'''")) bold()
- else if (check("''")) italic()
- else if (check("`")) monospace()
- else if (check("__")) underline()
- else if (check("^")) superscript()
- else if (check(",,")) subscript()
- else if (check("[[")) link()
- else {
- val str = readUntil { char == safeTagMarker || check("''") || char == '`' || check("__") || char == '^' || check(",,") || check("[[") || isInlineEnd || checkParaEnded || char == endOfLine }
- Text(str)
- }
- }
-
- val inlines: List[Inline] = {
- val iss = mutable.ListBuffer.empty[Inline]
- iss += inline0()
- while (!isInlineEnd && !checkParaEnded) {
- val skipEndOfLine = if (char == endOfLine) {
- nextChar()
- true
- } else {
- false
- }
-
- val current = inline0()
- (iss.last, current) match {
- case (Text(t1), Text(t2)) if skipEndOfLine =>
- iss.update(iss.length - 1, Text(t1 + endOfLine + t2))
- case (i1, i2) if skipEndOfLine =>
- iss ++= List(Text(endOfLine.toString), i2)
- case _ => iss += current
- }
- }
- iss.toList
- }
-
- inlines match {
- case Nil => Text("")
- case i :: Nil => i
- case is => Chain(is)
- }
-
- }
-
- def htmlTag(): HtmlTag = {
- jump(safeTagMarker)
- val read = readUntil(safeTagMarker)
- if (char != endOfText) jump(safeTagMarker)
- HtmlTag(read)
- }
-
- def bold(): Inline = {
- jump("'''")
- val i = inline(check("'''"))
- jump("'''")
- Bold(i)
- }
-
- def italic(): Inline = {
- jump("''")
- val i = inline(check("''"))
- jump("''")
- Italic(i)
- }
-
- def monospace(): Inline = {
- jump("`")
- val i = inline(check("`"))
- jump("`")
- Monospace(i)
- }
-
- def underline(): Inline = {
- jump("__")
- val i = inline(check("__"))
- jump("__")
- Underline(i)
- }
-
- def superscript(): Inline = {
- jump("^")
- val i = inline(check("^"))
- if (jump("^")) {
- Superscript(i)
- } else {
- Chain(Seq(Text("^"), i))
- }
- }
-
- def subscript(): Inline = {
- jump(",,")
- val i = inline(check(",,"))
- jump(",,")
- Subscript(i)
- }
-
- def summary(): Inline = {
- val i = inline(check("."))
- Summary(
- if (jump("."))
- Chain(List(i, Text(".")))
- else
- i
- )
- }
-
- def link(): Inline = {
- val SchemeUri = """([a-z]+:.*)""".r
- jump("[[")
- var parens = 2 + repeatJump('[')
- val start = "[" * parens
- val stop = "]" * parens
- //println("link with " + parens + " matching parens")
- val target = readUntil { check(stop) || check(" ") }
- val title =
- if (!check(stop)) Some({
- jump(" ")
- inline(check(stop))
- })
- else None
- jump(stop)
-
- (target, title) match {
- case (SchemeUri(uri), optTitle) =>
- Link(uri, optTitle getOrElse Text(uri))
- case (qualName, optTitle) =>
- makeEntityLink(optTitle getOrElse Text(target), pos, target, siteOpt)
- }
- }
-
- /* UTILITY */
-
- /** {{{ eol ::= { whitespace } '\n' }}} */
- def blockEnded(blockType: String): Unit = {
- if (char != endOfLine && char != endOfText) {
- reportError(pos, "no additional content on same line after " + blockType)
- jumpUntil(endOfLine)
- }
- while (char == endOfLine)
- nextChar()
- }
-
- /**
- * Eliminates the (common) leading spaces in all lines, based on the first line
- * For indented pieces of code, it reduces the indent to the least whitespace prefix:
- * {{{
- * indented example
- * another indented line
- * if (condition)
- * then do something;
- * ^ this is the least whitespace prefix
- * }}}
- */
- def normalizeIndentation(_code: String): String = {
-
- var code = _code.trim
- var maxSkip = Integer.MAX_VALUE
- var crtSkip = 0
- var wsArea = true
- var index = 0
- var firstLine = true
- var emptyLine = true
-
- while (index < code.length) {
- code(index) match {
- case ' ' =>
- if (wsArea)
- crtSkip += 1
- case c =>
- wsArea = (c == '\n')
- maxSkip = if (firstLine || emptyLine) maxSkip else if (maxSkip <= crtSkip) maxSkip else crtSkip
- crtSkip = if (c == '\n') 0 else crtSkip
- firstLine = if (c == '\n') false else firstLine
- emptyLine = if (c == '\n') true else false
- }
- index += 1
- }
-
- if (maxSkip == 0)
- code
- else {
- index = 0
- val builder = new StringBuilder
- while (index < code.length) {
- builder.append(code(index))
- if (code(index) == '\n') {
- // we want to skip as many spaces are available, if there are less spaces (like on empty lines, do not
- // over-consume them)
- index += 1
- val limit = index + maxSkip
- while ((index < code.length) && (code(index) == ' ') && index < limit)
- index += 1
- }
- else
- index += 1
- }
- builder.toString
- }
- }
-
- def checkParaEnded(): Boolean = {
- (char == endOfText) ||
- ((char == endOfLine) && {
- val poff = offset
- nextChar() // read EOL
- val ok = {
- checkSkipInitWhitespace(endOfLine) ||
- checkSkipInitWhitespace('=') ||
- checkSkipInitWhitespace("{{{") ||
- checkList ||
- checkSkipInitWhitespace('\u003D')
- }
- offset = poff
- ok
- })
- }
-
- def reportError(pos: Position, message: String) {
- reporter.warning(pos, message)
- }
- }
-
- protected sealed class CharReader(buffer: String) { reader =>
-
- var offset: Int = 0
- def char: Char =
- if (offset >= buffer.length) endOfText else buffer charAt offset
-
- final def nextChar() {
- offset += 1
- }
-
- final def check(chars: String): Boolean = {
- val poff = offset
- val ok = jump(chars)
- offset = poff
- ok
- }
-
- def checkSkipInitWhitespace(c: Char): Boolean = {
- val poff = offset
- jumpWhitespace()
- val ok = jump(c)
- offset = poff
- ok
- }
-
- def checkSkipInitWhitespace(chars: String): Boolean = {
- val poff = offset
- jumpWhitespace()
- val (ok0, chars0) =
- if (chars.charAt(0) == ' ')
- (offset > poff, chars substring 1)
- else
- (true, chars)
- val ok = ok0 && jump(chars0)
- offset = poff
- ok
- }
-
- def countWhitespace: Int = {
- var count = 0
- val poff = offset
- while (isWhitespace(char) && char != endOfText) {
- nextChar()
- count += 1
- }
- offset = poff
- count
- }
-
- /* JUMPERS */
-
- /** jumps a character and consumes it
- * @return true only if the correct character has been jumped */
- final def jump(ch: Char): Boolean = {
- if (char == ch) {
- nextChar()
- true
- }
- else false
- }
-
- /** jumps all the characters in chars, consuming them in the process.
- * @return true only if the correct characters have been jumped */
- final def jump(chars: String): Boolean = {
- var index = 0
- while (index < chars.length && char == chars.charAt(index) && char != endOfText) {
- nextChar()
- index += 1
- }
- index == chars.length
- }
-
- final def repeatJump(c: Char, max: Int = Int.MaxValue): Int = {
- var count = 0
- while (jump(c) && count < max)
- count += 1
- count
- }
-
- final def jumpUntil(ch: Char): Int = {
- var count = 0
- while (char != ch && char != endOfText) {
- nextChar()
- count += 1
- }
- count
- }
-
- final def jumpUntil(chars: String): Int = {
- assert(chars.length > 0)
- var count = 0
- val c = chars.charAt(0)
- while (!check(chars) && char != endOfText) {
- nextChar()
- while (char != c && char != endOfText) {
- nextChar()
- count += 1
- }
- }
- count
- }
-
- final def jumpUntil(pred: => Boolean): Int = {
- var count = 0
- while (!pred && char != endOfText) {
- nextChar()
- count += 1
- }
- count
- }
-
- def jumpWhitespace() = jumpUntil(!isWhitespace(char))
-
- /* READERS */
-
- final def readUntil(c: Char): String = {
- withRead {
- while (char != c && char != endOfText) {
- nextChar()
- }
- }
- }
-
- final def readUntil(chars: String): String = {
- assert(chars.length > 0)
- withRead {
- val c = chars.charAt(0)
- while (!check(chars) && char != endOfText) {
- nextChar()
- while (char != c && char != endOfText)
- nextChar()
- }
- }
- }
-
- final def readUntil(pred: => Boolean): String = {
- withRead {
- while (char != endOfText && !pred) {
- nextChar()
- }
- }
- }
-
- private def withRead(read: => Unit): String = {
- val start = offset
- read
- buffer.substring(start, offset)
- }
-
-
- /* CHARS CLASSES */
-
- def isWhitespace(c: Char) = c == ' ' || c == '\t'
-
- }
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala b/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala
deleted file mode 100755
index c11179800c..0000000000
--- a/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- */
-
-package scala.tools.nsc
-package doc
-package base
-
-import scala.collection._
-
-sealed trait LinkTo
-final case class LinkToMember[Mbr, Tpl](mbr: Mbr, tpl: Tpl) extends LinkTo
-final case class LinkToTpl[Tpl](tpl: Tpl) extends LinkTo
-final case class LinkToExternal(name: String, url: String) extends LinkTo
-final case class Tooltip(name: String) extends LinkTo
diff --git a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala
deleted file mode 100755
index cdcfeaae81..0000000000
--- a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala
+++ /dev/null
@@ -1,206 +0,0 @@
-package scala.tools.nsc
-package doc
-package base
-
-import comment._
-
-/** This trait extracts all required information for documentation from compilation units.
- * The base trait has been extracted to allow getting light-weight documentation
- * for a particular symbol in the IDE.*/
-trait MemberLookupBase {
-
- val global: Global
- import global._
-
- def internalLink(sym: Symbol, site: Symbol): Option[LinkTo]
- def chooseLink(links: List[LinkTo]): LinkTo
- def toString(link: LinkTo): String
- def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal]
- def warnNoLink: Boolean
-
- import global._
- import rootMirror.{RootPackage, EmptyPackage}
-
- private def isRoot(s: Symbol) = s.isRootSymbol || s.isEmptyPackage || s.isEmptyPackageClass
-
- def makeEntityLink(title: Inline, pos: Position, query: String, siteOpt: Option[Symbol]) =
- new EntityLink(title) { lazy val link = memberLookup(pos, query, siteOpt) }
-
- private var showExplanation = true
- private def explanation: String =
- if (showExplanation) {
- showExplanation = false
- """
- |Quick crash course on using Scaladoc links
- |==========================================
- |Disambiguating terms and types: Prefix terms with '$' and types with '!' in case both names are in use:
- | - [[scala.collection.immutable.List!.apply class List's apply method]] and
- | - [[scala.collection.immutable.List$.apply object List's apply method]]
- |Disambiguating overloaded members: If a term is overloaded, you can indicate the first part of its signature followed by *:
- | - [[[scala.collection.immutable.List$.fill[A](Int)(ā‡’A):List[A]* Fill with a single parameter]]]
- | - [[[scala.collection.immutable.List$.fill[A](Int,Int)(ā‡’A):List[List[A]]* Fill with a two parameters]]]
- |Notes:
- | - you can use any number of matching square brackets to avoid interference with the signature
- | - you can use \\. to escape dots in prefixes (don't forget to use * at the end to match the signature!)
- | - you can use \\# to escape hashes, otherwise they will be considered as delimiters, like dots.""".stripMargin
- } else ""
-
- def memberLookup(pos: Position, query: String, siteOpt: Option[Symbol]): LinkTo = {
- var members = breakMembers(query)
-
- // (1) First look in the root package, as most of the links are qualified
- val fromRoot = lookupInRootPackage(pos, members)
-
- // (2) Or recursively go into each containing template.
- val fromParents = siteOpt.fold(Stream.empty[Symbol]) { s =>
- Stream.iterate(s)(_.owner)
- }.takeWhile (!isRoot(_)).map {
- lookupInTemplate(pos, members, _)
- }
-
- val syms = (fromRoot +: fromParents) find (!_.isEmpty) getOrElse Nil
-
- val links = syms flatMap { case (sym, site) => internalLink(sym, site) } match {
- case Nil =>
- // (3) Look at external links
- syms.flatMap { case (sym, owner) =>
- // reconstruct the original link
- def linkName(sym: Symbol) = {
- def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.isPackage) "$" else "")
- val packageSuffix = if (sym.isPackage) ".package" else ""
-
- sym.ownerChain.reverse.filterNot(isRoot(_)).map(nameString(_)).mkString(".") + packageSuffix
- }
-
- if (sym.isClass || sym.isModule || sym.isTrait || sym.isPackage)
- findExternalLink(sym, linkName(sym))
- else if (owner.isClass || owner.isModule || owner.isTrait || owner.isPackage)
- findExternalLink(sym, linkName(owner) + "@" + externalSignature(sym))
- else
- None
- }
- case links => links
- }
- links match {
- case Nil =>
- if (warnNoLink)
- reporter.warning(pos, "Could not find any member to link for \"" + query + "\".")
- // (4) if we still haven't found anything, create a tooltip
- Tooltip(query)
- case List(l) => l
- case links =>
- val chosen = chooseLink(links)
- def linkToString(link: LinkTo) = {
- val chosenInfo =
- if (link == chosen) " [chosen]" else ""
- toString(link) + chosenInfo + "\n"
- }
- if (warnNoLink) {
- val allLinks = links.map(linkToString).mkString
- reporter.warning(pos,
- s"""The link target \"$query\" is ambiguous. Several members fit the target:
- |$allLinks
- |$explanation""".stripMargin)
- }
- chosen
- }
- }
-
- private sealed trait SearchStrategy
- private case object BothTypeAndTerm extends SearchStrategy
- private case object OnlyType extends SearchStrategy
- private case object OnlyTerm extends SearchStrategy
-
- private def lookupInRootPackage(pos: Position, members: List[String]) =
- lookupInTemplate(pos, members, EmptyPackage) ::: lookupInTemplate(pos, members, RootPackage)
-
- private def lookupInTemplate(pos: Position, members: List[String], container: Symbol): List[(Symbol, Symbol)] = {
- // Maintaining compatibility with previous links is a bit tricky here:
- // we have a preference for term names for all terms except for the last, where we prefer a class:
- // How to do this:
- // - at each step we do a DFS search with the prefered strategy
- // - if the search doesn't return any members, we backtrack on the last decision
- // * we look for terms with the last member's name
- // * we look for types with the same name, all the way up
- val result = members match {
- case Nil => Nil
- case mbrName::Nil =>
- var syms = lookupInTemplate(pos, mbrName, container, OnlyType) map ((_, container))
- if (syms.isEmpty)
- syms = lookupInTemplate(pos, mbrName, container, OnlyTerm) map ((_, container))
- syms
-
- case tplName::rest =>
- def completeSearch(syms: List[Symbol]) =
- syms flatMap (lookupInTemplate(pos, rest, _))
-
- completeSearch(lookupInTemplate(pos, tplName, container, OnlyTerm)) match {
- case Nil => completeSearch(lookupInTemplate(pos, tplName, container, OnlyType))
- case syms => syms
- }
- }
- //println("lookupInTemplate(" + members + ", " + container + ") => " + result)
- result
- }
-
- private def lookupInTemplate(pos: Position, member: String, container: Symbol, strategy: SearchStrategy): List[Symbol] = {
- val name = member.stripSuffix("$").stripSuffix("!").stripSuffix("*")
- def signatureMatch(sym: Symbol): Boolean = externalSignature(sym).startsWith(name)
-
- // We need to cleanup the bogus classes created by the .class file parser. For example, [[scala.Predef]] resolves
- // to (bogus) class scala.Predef loaded by the class loader -- which we need to eliminate by looking at the info
- // and removing NoType classes
- def cleanupBogusClasses(syms: List[Symbol]) = { syms.filter(_.info != NoType) }
-
- def syms(name: Name) = container.info.nonPrivateMember(name.encodedName).alternatives
- def termSyms = cleanupBogusClasses(syms(newTermName(name)))
- def typeSyms = cleanupBogusClasses(syms(newTypeName(name)))
-
- val result = if (member.endsWith("$"))
- termSyms
- else if (member.endsWith("!"))
- typeSyms
- else if (member.endsWith("*"))
- cleanupBogusClasses(container.info.nonPrivateDecls) filter signatureMatch
- else
- strategy match {
- case BothTypeAndTerm => termSyms ::: typeSyms
- case OnlyType => typeSyms
- case OnlyTerm => termSyms
- }
-
- //println("lookupInTemplate(" + member + ", " + container + ") => " + result)
- result
- }
-
- private def breakMembers(query: String): List[String] = {
- // Okay, how does this work? Well: you split on . but you don't want to split on \. => thus the ugly regex
- // query.split((?<=[^\\\\])\\.).map(_.replaceAll("\\."))
- // The same code, just faster:
- var members = List[String]()
- var index = 0
- var last_index = 0
- val length = query.length
- while (index < length) {
- if ((query.charAt(index) == '.' || query.charAt(index) == '#') &&
- ((index == 0) || (query.charAt(index-1) != '\\'))) {
-
- val member = query.substring(last_index, index).replaceAll("\\\\([#\\.])", "$1")
- // we want to allow javadoc-style links [[#member]] -- which requires us to remove empty members from the first
- // elemnt in the list
- if ((member != "") || (!members.isEmpty))
- members ::= member
- last_index = index + 1
- }
- index += 1
- }
- if (last_index < length)
- members ::= query.substring(last_index, length).replaceAll("\\\\\\.", ".")
- members.reverse
- }
-
- def externalSignature(sym: Symbol) = {
- sym.info // force it, otherwise we see lazy types
- (sym.nameString + sym.signatureString).replaceAll("\\s", "")
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala b/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala
deleted file mode 100755
index eb0d751f3e..0000000000
--- a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package base
-package comment
-
-import scala.collection._
-
-import java.net.URL
-
-/** A body of text. A comment has a single body, which is composed of
- * at least one block. Inside every body is exactly one summary (see
- * [[scala.tools.nsc.doc.model.comment.Summary]]). */
-final case class Body(blocks: Seq[Block]) {
-
- /** The summary text of the comment body. */
- lazy val summary: Option[Inline] = {
- def summaryInBlock(block: Block): Seq[Inline] = block match {
- case Title(text, _) => summaryInInline(text)
- case Paragraph(text) => summaryInInline(text)
- case UnorderedList(items) => items flatMap summaryInBlock
- case OrderedList(items, _) => items flatMap summaryInBlock
- case DefinitionList(items) => items.values.toSeq flatMap summaryInBlock
- case _ => Nil
- }
- def summaryInInline(text: Inline): Seq[Inline] = text match {
- case Summary(text) => List(text)
- case Chain(items) => items flatMap summaryInInline
- case Italic(text) => summaryInInline(text)
- case Bold(text) => summaryInInline(text)
- case Underline(text) => summaryInInline(text)
- case Superscript(text) => summaryInInline(text)
- case Subscript(text) => summaryInInline(text)
- case Link(_, title) => summaryInInline(title)
- case _ => Nil
- }
- (blocks flatMap { summaryInBlock(_) }).toList match {
- case Nil => None
- case inline :: Nil => Some(inline)
- case inlines => Some(Chain(inlines))
- }
- }
-}
-
-/** A block-level element of text, such as a paragraph or code block. */
-sealed abstract class Block
-
-final case class Title(text: Inline, level: Int) extends Block
-final case class Paragraph(text: Inline) extends Block
-final case class Code(data: String) extends Block
-final case class UnorderedList(items: Seq[Block]) extends Block
-final case class OrderedList(items: Seq[Block], style: String) extends Block
-final case class DefinitionList(items: SortedMap[Inline, Block]) extends Block
-final case class HorizontalRule() extends Block
-
-/** An section of text inside a block, possibly with formatting. */
-sealed abstract class Inline
-
-final case class Chain(items: Seq[Inline]) extends Inline
-final case class Italic(text: Inline) extends Inline
-final case class Bold(text: Inline) extends Inline
-final case class Underline(text: Inline) extends Inline
-final case class Superscript(text: Inline) extends Inline
-final case class Subscript(text: Inline) extends Inline
-final case class Link(target: String, title: Inline) extends Inline
-final case class Monospace(text: Inline) extends Inline
-final case class Text(text: String) extends Inline
-abstract class EntityLink(val title: Inline) extends Inline { def link: LinkTo }
-object EntityLink {
- def apply(title: Inline, linkTo: LinkTo) = new EntityLink(title) { def link: LinkTo = linkTo }
- def unapply(el: EntityLink): Option[(Inline, LinkTo)] = Some((el.title, el.link))
-}
-final case class HtmlTag(data: String) extends Inline {
- private val Pattern = """(?ms)\A<(/?)(.*?)[\s>].*\z""".r
- private val (isEnd, tagName) = data match {
- case Pattern(s1, s2) =>
- (! s1.isEmpty, Some(s2.toLowerCase))
- case _ =>
- (false, None)
- }
-
- def canClose(open: HtmlTag) = {
- isEnd && tagName == open.tagName
- }
-
- private val TagsNotToClose = Set("br", "img")
- def close = tagName collect { case name if !TagsNotToClose(name) => HtmlTag(s"</$name>") }
-}
-
-/** The summary of a comment, usually its first sentence. There must be exactly one summary per body. */
-final case class Summary(text: Inline) extends Inline
diff --git a/src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala b/src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala
deleted file mode 100644
index 2b28164ca4..0000000000
--- a/src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala
+++ /dev/null
@@ -1,134 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package base
-package comment
-
-import scala.collection._
-
-/** A Scaladoc comment and all its tags.
- *
- * '''Note:''' the only instantiation site of this class is in [[CommentFactory]].
- *
- * @author Manohar Jonnalagedda
- * @author Gilles Dubochet */
-abstract class Comment {
-
- /** The main body of the comment that describes what the entity does and is. */
- def body: Body
-
- private def closeHtmlTags(inline: Inline) = {
- val stack = mutable.ListBuffer.empty[HtmlTag]
- def scan(i: Inline) {
- i match {
- case Chain(list) =>
- list foreach scan
- case tag: HtmlTag => {
- if (stack.length > 0 && tag.canClose(stack.last)) {
- stack.remove(stack.length-1)
- } else {
- tag.close match {
- case Some(t) =>
- stack += t
- case None =>
- ;
- }
- }
- }
- case _ =>
- ;
- }
- }
- scan(inline)
- Chain(List(inline) ++ stack.reverse)
- }
-
- /** A shorter version of the body. Usually, this is the first sentence of the body. */
- def short: Inline = {
- body.summary match {
- case Some(s) =>
- closeHtmlTags(s)
- case _ =>
- Text("")
- }
- }
-
- /** A list of authors. The empty list is used when no author is defined. */
- def authors: List[Body]
-
- /** A list of other resources to see, including links to other entities or
- * to external documentation. The empty list is used when no other resource
- * is mentionned. */
- def see: List[Body]
-
- /** A description of the result of the entity. Typically, this provides additional
- * information on the domain of the result, contractual post-conditions, etc. */
- def result: Option[Body]
-
- /** A map of exceptions that the entity can throw when accessed, and a
- * description of what they mean. */
- def throws: Map[String, Body]
-
- /** A map of value parameters, and a description of what they are. Typically,
- * this provides additional information on the domain of the parameters,
- * contractual pre-conditions, etc. */
- def valueParams: Map[String, Body]
-
- /** A map of type parameters, and a description of what they are. Typically,
- * this provides additional information on the domain of the parameters. */
- def typeParams: Map[String, Body]
-
- /** The version number of the entity. There is no formatting or further
- * meaning attached to this value. */
- def version: Option[Body]
-
- /** A version number of a containing entity where this member-entity was introduced. */
- def since: Option[Body]
-
- /** An annotation as to expected changes on this entity. */
- def todo: List[Body]
-
- /** Whether the entity is deprecated. Using the `@deprecated` Scala attribute
- * is prefereable to using this Scaladoc tag. */
- def deprecated: Option[Body]
-
- /** An additional note concerning the contract of the entity. */
- def note: List[Body]
-
- /** A usage example related to the entity. */
- def example: List[Body]
-
- /** The comment as it appears in the source text. */
- def source: Option[String]
-
- /** A description for the primary constructor */
- def constructor: Option[Body]
-
- /** A set of diagram directives for the inheritance diagram */
- def inheritDiagram: List[String]
-
- /** A set of diagram directives for the content diagram */
- def contentDiagram: List[String]
-
- /** The group this member is part of */
- def group: Option[String]
-
- /** Member group descriptions */
- def groupDesc: Map[String,Body]
-
- /** Member group names (overriding the short tag) */
- def groupNames: Map[String,String]
-
- /** Member group priorities */
- def groupPrio: Map[String,Int]
-
- override def toString =
- body.toString + "\n" +
- (authors map ("@author " + _.toString)).mkString("\n") +
- (result map ("@return " + _.toString)).mkString("\n") +
- (version map ("@version " + _.toString)).mkString
-}
diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala b/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala
deleted file mode 100644
index 735b79c336..0000000000
--- a/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package scala.tools.nsc.doc
-package doclet
-
-import scala.collection._
-
-/** Custom Scaladoc generators must implement the `Generator` class. A custom generator can be selected in Scaladoc
- * using the `-doc-generator` command line option.
- * The `Generator` class does not provide data about the documented code. A number of data provider traits can be used
- * to configure what data is actually available to the generator:
- * - A `Universer` provides a `Universe` data structure representing the interfaces and comments of the documented
- * program.
- * - An `Indexer` provides precalculated indexing information about a universe.
- * To implement this class only requires defining method `generateImpl`. */
-abstract class Generator {
-
- /** A series of tests that must be true before generation can be done. This is used by data provider traits to
- * confirm that they have been correctly initialised before allowing generation to proceed. */
- protected val checks: mutable.Set[()=>Boolean] =
- mutable.Set.empty[()=>Boolean]
-
- /** Outputs documentation (as a side effect). */
- def generate(): Unit = {
- assert(checks forall { check => check() })
- generateImpl
- }
-
- /** Outputs documentation (as a side effect). This method is called only if all `checks` are true. */
- protected def generateImpl(): Unit
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala b/src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala
deleted file mode 100644
index 0cdd47182f..0000000000
--- a/src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-package scala.tools.nsc
-package doc
-package doclet
-
-/** A `Generator` may implement the `Indexer` trait to gain access to pre-calculated indexing information */
-trait Indexer extends Generator with Universer {
-
- protected var indexField: Index = null
-
- def index: Index = indexField
-
- def setIndex(i: Index) {
- assert(indexField == null)
- indexField = i
- }
-
- checks += { () =>
- indexField != null
- }
-
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Universer.scala b/src/compiler/scala/tools/nsc/doc/doclet/Universer.scala
deleted file mode 100644
index ee8b7809e5..0000000000
--- a/src/compiler/scala/tools/nsc/doc/doclet/Universer.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-package scala.tools.nsc
-package doc
-package doclet
-
-/** A `Generator` may implement the `Universer` trait to gain access to a model of the documented program */
-trait Universer extends Generator {
-
- protected var universeField: Universe = null
-
- def universe: Universe = universeField
-
- def setUniverse(u: Universe) {
- assert(universeField == null)
- universeField = u
- }
-
- checks += { () =>
- universeField != null
- }
-
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala b/src/compiler/scala/tools/nsc/doc/html/Doclet.scala
deleted file mode 100644
index 3aa3e87554..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc.doc
-package html
-
-import doclet._
-
-/** The default doclet used by the scaladoc command line tool
- * when no user-provided doclet is provided. */
-class Doclet extends Generator with Universer with Indexer {
-
- def generateImpl() {
- new html.HtmlFactory(universe, index).generate
- }
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
deleted file mode 100644
index 4630c3dda8..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ /dev/null
@@ -1,152 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package html
-
-import model._
-import java.io.{ File => JFile }
-import io.{ Streamable, Directory }
-import scala.collection._
-import page.diagram._
-
-import html.page.diagram.DiagramGenerator
-
-/** A class that can generate Scaladoc sites to some fixed root folder.
- * @author David Bernard
- * @author Gilles Dubochet */
-class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
-
- /** The character encoding to be used for generated Scaladoc sites.
- * This value is currently always UTF-8. */
- def encoding: String = "UTF-8"
-
- def siteRoot: JFile = new JFile(universe.settings.outdir.value)
-
- def libResources = List(
- "index.js",
- "jquery-ui.js",
- "jquery.js",
- "jquery.layout.js",
- "scheduler.js",
- "diagrams.js",
- "template.js",
- "tools.tooltip.js",
- "modernizr.custom.js",
-
- "index.css",
- "ref-index.css",
- "template.css",
- "diagrams.css",
-
- "class.png",
- "class_big.png",
- "class_diagram.png",
- "object.png",
- "object_big.png",
- "object_diagram.png",
- "package.png",
- "package_big.png",
- "trait.png",
- "trait_big.png",
- "trait_diagram.png",
- "type.png",
- "type_big.png",
- "type_diagram.png",
-
- "class_to_object_big.png",
- "object_to_class_big.png",
- "trait_to_object_big.png",
- "object_to_trait_big.png",
- "type_to_object_big.png",
- "object_to_type_big.png",
-
- "arrow-down.png",
- "arrow-right.png",
- "filter_box_left.png",
- "filter_box_left2.gif",
- "filter_box_right.png",
- "filterbg.gif",
- "filterboxbarbg.gif",
- "filterboxbg.gif",
-
- "constructorsbg.gif",
- "defbg-blue.gif",
- "defbg-green.gif",
- "filterboxbarbg.png",
- "fullcommenttopbg.gif",
- "ownderbg2.gif",
- "ownerbg.gif",
- "ownerbg2.gif",
- "packagesbg.gif",
- "signaturebg.gif",
- "signaturebg2.gif",
- "typebg.gif",
- "conversionbg.gif",
- "valuemembersbg.gif",
-
- "navigation-li-a.png",
- "navigation-li.png",
- "remove.png",
- "selected-right.png",
- "selected.png",
- "selected2-right.png",
- "selected2.png",
- "selected-right-implicits.png",
- "selected-implicits.png",
- "unselected.png"
- )
-
- /** Generates the Scaladoc site for a model into the site root.
- * A scaladoc site is a set of HTML and related files
- * that document a model extracted from a compiler run.
- * @param model The model to generate in the form of a sequence of packages. */
- def generate() {
-
- def copyResource(subPath: String) {
- val bytes = new Streamable.Bytes {
- val p = "/scala/tools/nsc/doc/html/resource/" + subPath
- val inputStream = getClass.getResourceAsStream(p)
- assert(inputStream != null, p)
- }.toByteArray
- val dest = Directory(siteRoot) / subPath
- dest.parent.createDirectory()
- val out = dest.toFile.bufferedOutput()
- try out.write(bytes, 0, bytes.length)
- finally out.close()
- }
-
- DiagramGenerator.initialize(universe.settings)
-
- libResources foreach (s => copyResource("lib/" + s))
-
- new page.Index(universe, index) writeFor this
- new page.IndexScript(universe, index) writeFor this
-
- writeTemplates(_ writeFor this)
-
- for (letter <- index.firstLetterIndex) {
- new html.page.ReferenceIndex(letter._1, index, universe) writeFor this
- }
-
- DiagramGenerator.cleanup()
- }
-
- def writeTemplates(writeForThis: HtmlPage => Unit) {
- val written = mutable.HashSet.empty[DocTemplateEntity]
- val diagramGenerator: DiagramGenerator = new DotDiagramGenerator(universe.settings)
-
- def writeTemplate(tpl: DocTemplateEntity) {
- if (!(written contains tpl)) {
- writeForThis(new page.Template(universe, diagramGenerator, tpl))
- written += tpl
- tpl.templates collect { case d: DocTemplateEntity => d } map writeTemplate
- }
- }
-
- writeTemplate(universe.rootPackage)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
deleted file mode 100644
index 69da322418..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ /dev/null
@@ -1,224 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package html
-
-import base._
-import base.comment._
-import model._
-
-import scala.xml.{XML, NodeSeq}
-import scala.xml.dtd.{DocType, PublicID}
-import scala.collection._
-import java.io.Writer
-
-/** An html page that is part of a Scaladoc site.
- * @author David Bernard
- * @author Gilles Dubochet */
-abstract class HtmlPage extends Page { thisPage =>
- /** The title of this page. */
- protected def title: String
-
- /** The page description */
- protected def description: String =
- // unless overwritten, will display the title in a spaced format, keeping - and .
- title.replaceAll("[^a-zA-Z0-9\\.\\-]+", " ").replaceAll("\\-+", " - ").replaceAll(" +", " ")
-
- /** The page keywords */
- protected def keywords: String =
- // unless overwritten, same as description, minus the " - "
- description.replaceAll(" - ", " ")
-
- /** Additional header elements (links, scripts, meta tags, etc.) required for this page. */
- protected def headers: NodeSeq
-
- /** The body of this page. */
- def body: NodeSeq
-
- def writeFor(site: HtmlFactory) {
- val doctype =
- DocType("html", PublicID("-//W3C//DTD XHTML 1.1//EN", "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"), Nil)
- val html =
- <html>
- <head>
- <title>{ title }</title>
- <meta name="description" content={ description }/>
- <meta name="keywords" content={ keywords }/>
- <meta http-equiv="content-type" content={ "text/html; charset=" + site.encoding }/>
- { headers }
- </head>
- { body }
- </html>
-
- writeFile(site) { (w: Writer) =>
- w.write("<?xml version='1.0' encoding='" + site.encoding + "'?>\n")
- w.write(doctype.toString + "\n")
- w.write(xml.Xhtml.toXhtml(html))
- }
-
- if (site.universe.settings.docRawOutput.value)
- writeFile(site, ".raw") {
- // we're only interested in the body, as this will go into the diff
- _.write(body.text)
- }
-
- //XML.save(pageFile.getPath, html, site.encoding, xmlDecl = false, doctype = doctype)
- }
-
- /** Transforms an optional comment into an styled HTML tree representing its body if it is defined, or into an empty
- * node sequence if it is not. */
- def commentToHtml(comment: Option[Comment]): NodeSeq =
- (comment map (commentToHtml(_))) getOrElse NodeSeq.Empty
-
- /** Transforms a comment into an styled HTML tree representing its body. */
- def commentToHtml(comment: Comment): NodeSeq =
- bodyToHtml(comment.body)
-
- def bodyToHtml(body: Body): NodeSeq =
- body.blocks flatMap (blockToHtml(_))
-
- def blockToHtml(block: Block): NodeSeq = block match {
- case Title(in, 1) => <h3>{ inlineToHtml(in) }</h3>
- case Title(in, 2) => <h4>{ inlineToHtml(in) }</h4>
- case Title(in, 3) => <h5>{ inlineToHtml(in) }</h5>
- case Title(in, _) => <h6>{ inlineToHtml(in) }</h6>
- case Paragraph(in) => <p>{ inlineToHtml(in) }</p>
- case Code(data) =>
- <pre>{ SyntaxHigh(data) }</pre> //<pre>{ scala.xml.Text(data) }</pre>
- case UnorderedList(items) =>
- <ul>{ listItemsToHtml(items) }</ul>
- case OrderedList(items, listStyle) =>
- <ol class={ listStyle }>{ listItemsToHtml(items) }</ol>
- case DefinitionList(items) =>
- <dl>{items map { case (t, d) => <dt>{ inlineToHtml(t) }</dt><dd>{ blockToHtml(d) }</dd> } }</dl>
- case HorizontalRule() =>
- <hr/>
- }
-
- def listItemsToHtml(items: Seq[Block]) =
- items.foldLeft(xml.NodeSeq.Empty){ (xmlList, item) =>
- item match {
- case OrderedList(_, _) | UnorderedList(_) => // html requires sub ULs to be put into the last LI
- xmlList.init ++ <li>{ xmlList.last.child ++ blockToHtml(item) }</li>
- case Paragraph(inline) =>
- xmlList :+ <li>{ inlineToHtml(inline) }</li> // LIs are blocks, no need to use Ps
- case block =>
- xmlList :+ <li>{ blockToHtml(block) }</li>
- }
- }
-
- def inlineToHtml(inl: Inline): NodeSeq = inl match {
- case Chain(items) => items flatMap (inlineToHtml(_))
- case Italic(in) => <i>{ inlineToHtml(in) }</i>
- case Bold(in) => <b>{ inlineToHtml(in) }</b>
- case Underline(in) => <u>{ inlineToHtml(in) }</u>
- case Superscript(in) => <sup>{ inlineToHtml(in) }</sup>
- case Subscript(in) => <sub>{ inlineToHtml(in) }</sub>
- case Link(raw, title) => <a href={ raw } target="_blank">{ inlineToHtml(title) }</a>
- case Monospace(in) => <code>{ inlineToHtml(in) }</code>
- case Text(text) => scala.xml.Text(text)
- case Summary(in) => inlineToHtml(in)
- case HtmlTag(tag) => scala.xml.Unparsed(tag)
- case EntityLink(target, link) => linkToHtml(target, link, true)
- }
-
- def linkToHtml(text: Inline, link: LinkTo, hasLinks: Boolean) = link match {
- case LinkToTpl(dtpl: TemplateEntity) =>
- if (hasLinks)
- <a href={ relativeLinkTo(dtpl) } class="extype" name={ dtpl.qualifiedName }>{ inlineToHtml(text) }</a>
- else
- <span class="extype" name={ dtpl.qualifiedName }>{ inlineToHtml(text) }</span>
- case LinkToMember(mbr: MemberEntity, inTpl: TemplateEntity) =>
- if (hasLinks)
- <a href={ relativeLinkTo(inTpl) + "#" + mbr.signature } class="extmbr" name={ mbr.qualifiedName }>{ inlineToHtml(text) }</a>
- else
- <span class="extmbr" name={ mbr.qualifiedName }>{ inlineToHtml(text) }</span>
- case Tooltip(tooltip) =>
- <span class="extype" name={ tooltip }>{ inlineToHtml(text) }</span>
- case LinkToExternal(name, url) =>
- <a href={ url } class="extype" target="_top">{ inlineToHtml(text) }</a>
- case _ =>
- inlineToHtml(text)
- }
-
- def typeToHtml(tpes: List[model.TypeEntity], hasLinks: Boolean): NodeSeq = tpes match {
- case Nil =>
- NodeSeq.Empty
- case List(tpe) =>
- typeToHtml(tpe, hasLinks)
- case tpe :: rest =>
- typeToHtml(tpe, hasLinks) ++ scala.xml.Text(" with ") ++ typeToHtml(rest, hasLinks)
- }
-
- def typeToHtml(tpe: model.TypeEntity, hasLinks: Boolean): NodeSeq = {
- val string = tpe.name
- def toLinksOut(inPos: Int, starts: List[Int]): NodeSeq = {
- if (starts.isEmpty && (inPos == string.length))
- NodeSeq.Empty
- else if (starts.isEmpty)
- scala.xml.Text(string.slice(inPos, string.length))
- else if (inPos == starts.head)
- toLinksIn(inPos, starts)
- else {
- scala.xml.Text(string.slice(inPos, starts.head)) ++ toLinksIn(starts.head, starts)
- }
- }
- def toLinksIn(inPos: Int, starts: List[Int]): NodeSeq = {
- val (link, width) = tpe.refEntity(inPos)
- val text = comment.Text(string.slice(inPos, inPos + width))
- linkToHtml(text, link, hasLinks) ++ toLinksOut(inPos + width, starts.tail)
- }
- if (hasLinks)
- toLinksOut(0, tpe.refEntity.keySet.toList)
- else
- scala.xml.Text(string)
- }
-
- def typesToHtml(tpess: List[model.TypeEntity], hasLinks: Boolean, sep: NodeSeq): NodeSeq = tpess match {
- case Nil => NodeSeq.Empty
- case tpe :: Nil => typeToHtml(tpe, hasLinks)
- case tpe :: tpes => typeToHtml(tpe, hasLinks) ++ sep ++ typesToHtml(tpes, hasLinks, sep)
- }
-
- def hasPage(e: DocTemplateEntity) = {
- e.isPackage || e.isTrait || e.isClass || e.isObject || e.isCaseClass
- }
-
- /** Returns the HTML code that represents the template in `tpl` as a hyperlinked name. */
- def templateToHtml(tpl: TemplateEntity, name: String = null) = tpl match {
- case dTpl: DocTemplateEntity =>
- if (hasPage(dTpl)) {
- <a href={ relativeLinkTo(dTpl) } class="extype" name={ dTpl.qualifiedName }>{ if (name eq null) dTpl.name else name }</a>
- } else {
- scala.xml.Text(if (name eq null) dTpl.name else name)
- }
- case ndTpl: NoDocTemplate =>
- scala.xml.Text(if (name eq null) ndTpl.name else name)
- }
-
- /** Returns the HTML code that represents the templates in `tpls` as a list of hyperlinked names. */
- def templatesToHtml(tplss: List[TemplateEntity], sep: NodeSeq): NodeSeq = tplss match {
- case Nil => NodeSeq.Empty
- case tpl :: Nil => templateToHtml(tpl)
- case tpl :: tpls => templateToHtml(tpl) ++ sep ++ templatesToHtml(tpls, sep)
- }
-
- /** Returns the _big image name corresponding to the DocTemplate Entity (upper left icon) */
- def docEntityKindToBigImage(ety: DocTemplateEntity) =
- if (ety.isTrait && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "trait_to_object_big.png"
- else if (ety.isTrait) "trait_big.png"
- else if (ety.isClass && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "class_to_object_big.png"
- else if (ety.isClass) "class_big.png"
- else if ((ety.isAbstractType || ety.isAliasType) && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "type_to_object_big.png"
- else if ((ety.isAbstractType || ety.isAliasType)) "type_big.png"
- else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isClass) "object_to_class_big.png"
- else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isTrait) "object_to_trait_big.png"
- else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && (ety.companion.get.isAbstractType || ety.companion.get.isAliasType)) "object_to_trait_big.png"
- else if (ety.isObject) "object_big.png"
- else if (ety.isPackage) "package_big.png"
- else "class_big.png" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/Page.scala b/src/compiler/scala/tools/nsc/doc/html/Page.scala
deleted file mode 100644
index 62166f7def..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/Page.scala
+++ /dev/null
@@ -1,108 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc.doc.html
-
-import scala.tools.nsc.doc.model._
-import java.io.{FileOutputStream, File}
-import scala.reflect.NameTransformer
-import java.nio.channels.Channels
-import java.io.Writer
-
-abstract class Page {
- thisPage =>
-
- /** The path of this page, relative to the API site. `path.tail` is a list
- * of folder names leading to this page (from closest package to
- * one-above-root package), `path.head` is the file name of this page.
- * Note that `path` has a length of at least one. */
- def path: List[String]
-
- def absoluteLinkTo(path: List[String]) = path.reverse.mkString("/")
-
- def createFileOutputStream(site: HtmlFactory, suffix: String = "") = {
- val file = new File(site.siteRoot, absoluteLinkTo(thisPage.path) + suffix)
- val folder = file.getParentFile
- if (! folder.exists) {
- folder.mkdirs
- }
- new FileOutputStream(file.getPath)
- }
-
- def writeFile(site: HtmlFactory, suffix: String = "")(fn: Writer => Unit) = {
- val fos = createFileOutputStream(site, suffix)
- val w = Channels.newWriter(fos.getChannel, site.encoding)
- try {
- fn(w)
- }
- finally {
- w.close()
- fos.close()
- }
- }
-
- /** Writes this page as a file. The file's location is relative to the
- * generator's site root, and the encoding is also defined by the generator.
- * @param generator The generator that is writing this page. */
- def writeFor(site: HtmlFactory): Unit
-
- def kindToString(mbr: MemberEntity) =
- mbr match {
- case c: Class => if (c.isCaseClass) "case class" else "class"
- case _: Trait => "trait"
- case _: Package => "package"
- case _: Object => "object"
- case _: AbstractType => "type"
- case _: AliasType => "type"
- case _: Constructor => "new"
- case v: Def => "def"
- case v: Val if (v.isLazyVal) => "lazy val"
- case v: Val if (v.isVal) => "val"
- case v: Val if (v.isVar) => "var"
- case _ => sys.error("Cannot create kind for: " + mbr + " of class " + mbr.getClass)
- }
-
- def templateToPath(tpl: TemplateEntity): List[String] = {
- def doName(tpl: TemplateEntity): String =
- (if (tpl.inPackageObject) "package$$" else "") + NameTransformer.encode(tpl.name) + (if (tpl.isObject) "$" else "")
- def downPacks(pack: Package): List[String] =
- if (pack.isRootPackage) Nil else (doName(pack) :: downPacks(pack.inTemplate))
- def downInner(nme: String, tpl: TemplateEntity): (String, Package) = {
- tpl.inTemplate match {
- case inPkg: Package => (nme + ".html", inPkg)
- case inTpl => downInner(doName(inTpl) + "$" + nme, inTpl)
- }
- }
- val (file, pack) =
- tpl match {
- case p: Package => ("package.html", p)
- case _ => downInner(doName(tpl), tpl)
- }
- file :: downPacks(pack)
- }
-
- /** A relative link from this page to some destination class entity.
- * @param destEntity The class or object entity that the link will point to. */
- def relativeLinkTo(destClass: TemplateEntity): String =
- relativeLinkTo(templateToPath(destClass))
-
- /** A relative link from this page to some destination page in the Scaladoc site.
- * @param destPage The page that the link will point to. */
- def relativeLinkTo(destPage: HtmlPage): String = {
- relativeLinkTo(destPage.path)
- }
-
- /** A relative link from this page to some destination path.
- * @param destPath The path that the link will point to. */
- def relativeLinkTo(destPath: List[String]): String = {
- def relativize(from: List[String], to: List[String]): List[String] = (from, to) match {
- case (f :: fs, t :: ts) if (f == t) => // both paths are identical to that point
- relativize(fs, ts)
- case (fss, tss) =>
- List.fill(fss.length - 1)("..") ::: tss
- }
- relativize(thisPage.path.reverse, destPath.reverse).mkString("/")
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
deleted file mode 100644
index 6fdaaed75f..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ /dev/null
@@ -1,286 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2010-2013 LAMP/EPFL
- * @author Stephane Micheloud
- */
-
-package scala.tools.nsc.doc.html
-
-import scala.xml.NodeSeq
-import scala.annotation.tailrec
-
-/** Highlight the syntax of Scala code appearing in a `{{{` wiki block
- * (see method `HtmlPage.blockToHtml`).
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-private[html] object SyntaxHigh {
-
- /** Reserved words, sorted alphabetically
- * (see [[scala.reflect.internal.StdNames]]) */
- val reserved = Array(
- "abstract", "case", "catch", "class", "def",
- "do", "else", "extends", "false", "final", "finally",
- "for", "if", "implicit", "import", "lazy", "match",
- "new", "null", "object", "override", "package",
- "private", "protected", "return", "sealed", "super",
- "this", "throw", "trait", "true", "try", "type",
- "val", "var", "while", "with", "yield")
-
- /** Annotations, sorted alphabetically */
- val annotations = Array(
- "BeanProperty", "SerialVersionUID",
- "beanGetter", "beanSetter", "bridge", "cloneable",
- "deprecated", "deprecatedName",
- "elidable", "field", "getter", "inline",
- "migration", "native", "noinline", "param",
- "remote", "setter", "specialized", "strictfp", "switch",
- "tailrec", "throws", "transient",
- "unchecked", "uncheckedStable", "uncheckedVariance",
- "varargs", "volatile")
-
- /** Standard library classes/objects, sorted alphabetically */
- val standards = Array (
- "WeakTypeTag", "Any", "AnyRef", "AnyVal", "App", "Application", "Array",
- "Boolean", "Byte", "Char", "Class", "ClassTag", "ClassManifest",
- "Console", "Double", "Enumeration", "Float", "Function", "Int",
- "List", "Long", "Manifest", "Map",
- "NoManifest", "None", "Nothing", "Null", "Object", "Option", "OptManifest",
- "Pair", "Predef",
- "Seq", "Set", "Short", "Some", "String", "Symbol",
- "Triple", "TypeTag", "Unit")
-
- def apply(data: String): NodeSeq = {
- val buf = data.getBytes
- val out = new StringBuilder
-
- def compare(offset: Int, key: String): Int = {
- var i = offset
- var j = 0
- val l = key.length
- while (i < buf.length && j < l) {
- val bch = buf(i).toChar
- val kch = key charAt j
- if (bch < kch) return -1
- else if (bch > kch) return 1
- i += 1
- j += 1
- }
- if (j < l) -1
- else if (i < buf.length &&
- ('A' <= buf(i) && buf(i) <= 'Z' ||
- 'a' <= buf(i) && buf(i) <= 'z' ||
- '0' <= buf(i) && buf(i) <= '9' ||
- buf(i) == '_')) 1
- else 0
- }
-
- def lookup(a: Array[String], i: Int): Int = {
- var lo = 0
- var hi = a.length - 1
- while (lo <= hi) {
- val m = (hi + lo) / 2
- val d = compare(i, a(m))
- if (d < 0) hi = m - 1
- else if (d > 0) lo = m + 1
- else return m
- }
- -1
- }
-
- def comment(i: Int): String = {
- val out = new StringBuilder("/")
- def line(i: Int): Int =
- if (i == buf.length || buf(i) == '\n') i
- else {
- out append buf(i).toChar
- line(i+1)
- }
- var level = 0
- def multiline(i: Int, star: Boolean): Int = {
- if (i == buf.length) return i
- val ch = buf(i).toChar
- out append ch
- ch match {
- case '*' =>
- if (star) level += 1
- multiline(i+1, !star)
- case '/' =>
- if (star) {
- if (level > 0) level -= 1
- if (level == 0) i else multiline(i+1, true)
- } else
- multiline(i+1, false)
- case _ =>
- multiline(i+1, false)
- }
- }
- if (buf(i) == '/') line(i) else multiline(i, true)
- out.toString
- }
-
- /* e.g. `val endOfLine = '\u000A'`*/
- def charlit(j: Int): String = {
- val out = new StringBuilder("'")
- def charlit0(i: Int, bslash: Boolean): Int = {
- if (i == buf.length) i
- else if (i > j+6) { out setLength 0; j }
- else {
- val ch = buf(i).toChar
- out append ch
- ch match {
- case '\\' =>
- charlit0(i+1, true)
- case '\'' if !bslash =>
- i
- case _ =>
- if (bslash && '0' <= ch && ch <= '9') charlit0(i+1, true)
- else charlit0(i+1, false)
- }
- }
- }
- charlit0(j, false)
- out.toString
- }
-
- def strlit(i: Int): String = {
- val out = new StringBuilder("\"")
- def strlit0(i: Int, bslash: Boolean): Int = {
- if (i == buf.length) return i
- val ch = buf(i).toChar
- out append ch
- ch match {
- case '\\' =>
- strlit0(i+1, true)
- case '"' if !bslash =>
- i
- case _ =>
- strlit0(i+1, false)
- }
- }
- strlit0(i, false)
- out.toString
- }
-
- def numlit(i: Int): String = {
- val out = new StringBuilder
- def intg(i: Int): Int = {
- if (i == buf.length) return i
- val ch = buf(i).toChar
- ch match {
- case '.' =>
- out append ch
- frac(i+1)
- case _ =>
- if (Character.isDigit(ch)) {
- out append ch
- intg(i+1)
- } else i
- }
- }
- def frac(i: Int): Int = {
- if (i == buf.length) return i
- val ch = buf(i).toChar
- ch match {
- case 'e' | 'E' =>
- out append ch
- expo(i+1, false)
- case _ =>
- if (Character.isDigit(ch)) {
- out append ch
- frac(i+1)
- } else i
- }
- }
- def expo(i: Int, signed: Boolean): Int = {
- if (i == buf.length) return i
- val ch = buf(i).toChar
- ch match {
- case '+' | '-' if !signed =>
- out append ch
- expo(i+1, true)
- case _ =>
- if (Character.isDigit(ch)) {
- out append ch
- expo(i+1, signed)
- } else i
- }
- }
- intg(i)
- out.toString
- }
-
- @tailrec def parse(pre: String, i: Int): Unit = {
- out append pre
- if (i == buf.length) return
- buf(i) match {
- case '\n' =>
- parse("\n", i+1)
- case ' ' =>
- parse(" ", i+1)
- case '&' =>
- parse("&amp;", i+1)
- case '<' if i+1 < buf.length =>
- val ch = buf(i+1).toChar
- if (ch == '-' || ch == ':' || ch == '%')
- parse("<span class=\"kw\">&lt;"+ch+"</span>", i+2)
- else
- parse("&lt;", i+1)
- case '>' =>
- if (i+1 < buf.length && buf(i+1) == ':')
- parse("<span class=\"kw\">&gt;:</span>", i+2)
- else
- parse("&gt;", i+1)
- case '=' =>
- if (i+1 < buf.length && buf(i+1) == '>')
- parse("<span class=\"kw\">=&gt;</span>", i+2)
- else
- parse(buf(i).toChar.toString, i+1)
- case '/' =>
- if (i+1 < buf.length && (buf(i+1) == '/' || buf(i+1) == '*')) {
- val c = comment(i+1)
- parse("<span class=\"cmt\">"+c+"</span>", i+c.length)
- } else
- parse(buf(i).toChar.toString, i+1)
- case '\'' =>
- val s = charlit(i+1)
- if (s.length > 0)
- parse("<span class=\"lit\">"+s+"</span>", i+s.length)
- else
- parse(buf(i).toChar.toString, i+1)
- case '"' =>
- val s = strlit(i+1)
- parse("<span class=\"lit\">"+s+"</span>", i+s.length)
- case '@' =>
- val k = lookup(annotations, i+1)
- if (k >= 0)
- parse("<span class=\"ano\">@"+annotations(k)+"</span>", i+annotations(k).length+1)
- else
- parse(buf(i).toChar.toString, i+1)
- case _ =>
- if (i == 0 || (i >= 1 && !Character.isJavaIdentifierPart(buf(i-1).toChar))) {
- if (Character.isDigit(buf(i)) ||
- (buf(i) == '.' && i + 1 < buf.length && Character.isDigit(buf(i+1)))) {
- val s = numlit(i)
- parse("<span class=\"num\">"+s+"</span>", i+s.length)
- } else {
- val k = lookup(reserved, i)
- if (k >= 0)
- parse("<span class=\"kw\">"+reserved(k)+"</span>", i+reserved(k).length)
- else {
- val k = lookup(standards, i)
- if (k >= 0)
- parse("<span class=\"std\">"+standards(k)+"</span>", i+standards(k).length)
- else
- parse(buf(i).toChar.toString, i+1)
- }
- }
- } else
- parse(buf(i).toChar.toString, i+1)
- }
- }
-
- parse("", 0)
- scala.xml.Unparsed(out.toString)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
deleted file mode 100644
index 8802d7c35c..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ /dev/null
@@ -1,142 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package html
-package page
-
-import model._
-
-import scala.collection._
-import scala.xml._
-import scala.util.parsing.json.{JSONObject, JSONArray}
-
-class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
-
- def path = List("index.html")
-
- def title = {
- val s = universe.settings
- ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) +
- ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" )
- }
-
- val headers =
- <xml:group>
- <link href={ relativeLinkTo{List("index.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
- </xml:group>
-
- private val scripts = {
- val sources =
- (List("jquery.js", "jquery-ui.js", "jquery.layout.js", "scheduler.js", "index.js").map {
- x => relativeLinkTo(List(x, "lib"))
- }) :+ "index.js"
-
- sources map {
- src => <script defer="defer" type="text/javascript" src={src}></script>
- }
- }
-
- val body =
- <body>
- <div id="library">
- <img class='class icon' src={ relativeLinkTo{List("class.png", "lib")} }/>
- <img class='trait icon' src={ relativeLinkTo{List("trait.png", "lib")} }/>
- <img class='object icon' src={ relativeLinkTo{List("object.png", "lib")} }/>
- <img class='package icon' src={ relativeLinkTo{List("package.png", "lib")} }/>
- </div>
- { browser }
- <div id="content" class="ui-layout-center">
- <iframe id="template" name="template" src={ relativeLinkTo{List("package.html")} }/>
- </div>
- { scripts }
- </body>
-
- def letters: NodeSeq =
- '_' +: ('a' to 'z') map {
- char => {
- val label = if (char == '_') '#' else char.toUpper
-
- index.firstLetterIndex.get(char) match {
- case Some(_) =>
- <a target="template" href={ "index/index-" + char + ".html" }>{
- label
- }</a>
- case None => <span>{ label }</span>
- }
- }
- }
-
- def browser =
- <div id="browser" class="ui-layout-west">
- <div class="ui-west-center">
- <div id="filter">
- <div id="textfilter"></div>
- <div id="letters">{ letters }</div>
- </div>
- <div class="pack" id="tpl">{
- def packageElem(pack: model.Package): NodeSeq = {
- <xml:group>
- { if (!pack.isRootPackage)
- <a class="tplshow" href={ relativeLinkTo(pack) } target="template">{ pack.qualifiedName }</a>
- else NodeSeq.Empty
- }
- <ol class="templates">{
- val tpls: Map[String, Seq[DocTemplateEntity]] =
- (pack.templates collect {
- case t: DocTemplateEntity if !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName) => t
- }) groupBy (_.name)
-
- val placeholderSeq: NodeSeq = <div class="placeholder"></div>
-
- def createLink(entity: DocTemplateEntity, includePlaceholder: Boolean, includeText: Boolean) = {
- val entityType = kindToString(entity)
- val linkContent = (
- { if (includePlaceholder) placeholderSeq else NodeSeq.Empty }
- ++
- { if (includeText) <span class="tplLink">{ Text(packageQualifiedName(entity)) }</span> else NodeSeq.Empty }
- )
- <a class="tplshow" href={ relativeLinkTo(entity) } target="template"><span class={ entityType }>({ Text(entityType) })</span>{ linkContent }</a>
- }
-
- for (tn <- tpls.keySet.toSeq sortBy (_.toLowerCase)) yield {
- val entities = tpls(tn)
- val row = (entities find (e => e.isPackage || e.isObject), entities find (e => e.isTrait || e.isClass))
-
- val itemContents = row match {
- case (Some(obj), None) => createLink(obj, includePlaceholder = true, includeText = true)
-
- case (maybeObj, Some(template)) =>
- val firstLink = maybeObj match {
- case Some(obj) => createLink(obj, includePlaceholder = false, includeText = false)
- case None => placeholderSeq
- }
-
- firstLink ++ createLink(template, includePlaceholder = false, includeText = true)
-
- case _ => // FIXME: this default case should not be necessary. For some reason AnyRef is not a package, object, trait, or class
- val entry = entities.head
- placeholderSeq ++ createLink(entry, includePlaceholder = false, includeText = true)
- }
-
- <li title={ entities.head.qualifiedName }>{ itemContents }</li>
- }
- }</ol>
- <ol class="packages"> {
- for (sp <- pack.packages sortBy (_.name.toLowerCase)) yield
- <li class="pack" title={ sp.qualifiedName }>{ packageElem(sp) }</li>
- }</ol>
- </xml:group>
- }
- packageElem(universe.rootPackage)
- }</div></div>
- </div>
-
- def packageQualifiedName(ety: DocTemplateEntity): String =
- if (ety.inTemplate.isPackage) ety.name
- else (packageQualifiedName(ety.inTemplate) + "." + ety.name)
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
deleted file mode 100644
index a205e02533..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc.doc.html.page
-
-import scala.tools.nsc.doc
-import scala.tools.nsc.doc.model.{Package, DocTemplateEntity}
-import scala.tools.nsc.doc.html.{Page, HtmlFactory}
-import java.nio.channels.Channels
-import scala.util.parsing.json.{JSONObject, JSONArray}
-
-class IndexScript(universe: doc.Universe, index: doc.Index) extends Page {
- def path = List("index.js")
-
- override def writeFor(site: HtmlFactory) {
- writeFile(site) {
- _.write("Index.PACKAGES = " + packages.toString() + ";")
- }
- }
-
- val packages = {
- val pairs = allPackagesWithTemplates.toIterable.map(_ match {
- case (pack, templates) => {
- val merged = mergeByQualifiedName(templates)
-
- val ary = merged.keys.toList.sortBy(_.toLowerCase).map(key => {
- val pairs = merged(key).map(
- t => kindToString(t) -> relativeLinkTo(t)
- ) :+ ("name" -> key)
-
- JSONObject(scala.collection.immutable.Map(pairs : _*))
- })
-
- pack.qualifiedName -> JSONArray(ary)
- }
- }).toSeq
-
- JSONObject(scala.collection.immutable.Map(pairs : _*))
- }
-
- def mergeByQualifiedName(source: List[DocTemplateEntity]) = {
- var result = Map[String, List[DocTemplateEntity]]()
-
- for (t <- source) {
- val k = t.qualifiedName
- result += k -> (result.getOrElse(k, List()) :+ t)
- }
-
- result
- }
-
- def allPackages = {
- def f(parent: Package): List[Package] = {
- parent.packages.flatMap(
- p => f(p) :+ p
- )
- }
- f(universe.rootPackage).sortBy(_.toString)
- }
-
- def allPackagesWithTemplates = {
- Map(allPackages.map((key) => {
- key -> key.templates.collect {
- case t: DocTemplateEntity if !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName) => t
- }
- }) : _*)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala b/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
deleted file mode 100755
index a74c2eedbd..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Pedro Furlanetto
- */
-
-package scala.tools.nsc
-package doc
-package html
-package page
-import doc.model._
-
-class ReferenceIndex(letter: Char, index: doc.Index, universe: Universe) extends HtmlPage {
-
- def path = List("index-"+letter+".html", "index")
-
- def title = {
- val s = universe.settings
- ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) +
- ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" )
- }
-
- def headers =
- <xml:group>
- <link href={ relativeLinkTo(List("ref-index.css", "lib")) } media="screen" type="text/css" rel="stylesheet"/>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
- </xml:group>
-
-
- private def entry(name: String, methods: Iterable[MemberEntity]) = {
- val occurrences = methods.map(method => {
- val html = templateToHtml(method.inDefinitionTemplates.head)
- if (method.deprecation.isDefined) {
- <strike>{ html }</strike>
- } else {
- html
- }
- }).toList.distinct
-
- <div class="entry">
- <div class="name">{
- if (methods.find { ! _.deprecation.isDefined } != None)
- name
- else
- <strike>{ name }</strike>
- }</div>
- <div class="occurrences">{
- for (owner <- occurrences) yield owner ++ scala.xml.Text(" ")
- }</div>
- </div>
- }
-
- def body =
- <body>{
- for(groups <- index.firstLetterIndex(letter)) yield
- entry(groups._1, groups._2.view)
- }</body>
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
deleted file mode 100644
index 68289b7474..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
+++ /dev/null
@@ -1,128 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package html
-package page
-
-import model._
-import scala.xml.{NodeSeq, Unparsed}
-import java.io.File
-
-class Source(sourceFile: File) extends HtmlPage {
-
- val path = List("source.html")
-
- val title = "Scaladoc: page source"
-
- val headers =
- NodeSeq.Empty
-
- val body =
- <body>
- <h1>Page source is not implemented yet</h1>
- </body>
-
- /*
-
-
- def readTextFromSrcDir(subPath: String) :Option[String] = {
- readTextFromFile(new File(sourceDir, subPath))
- }
-
- def readTextFromFile(f : File) :Option[String] = {
- if (f.exists) {
- Some(Source.fromFile(f)(Codec.default).getLines().mkString(""))
- } else {
- None
- }
- }
-
-
- def writeTextToFile(f : File, txt : String, header: Option[String], footer: Option[String]) {
- val out = new FileOutputStream(f)
- try {
- val enc = "UTF-8"
- header.foreach(s => out.write(s.getBytes(enc)))
- out.write(txt.getBytes(enc))
- footer.foreach(s => out.write(s.getBytes(enc)))
- } finally {
- try {
- out.close()
- } catch {
- case _ => //ignore
- }
- }
- }
-
- trait SourceHtmlizer {
- def scalaToHtml(src :File) : Option[File]
- }
-
- lazy val sourceHtmlizer : SourceHtmlizer = {
- if (cfg.htmlizeSource) {
- new SourceHtmlizer {
-
- val inDir: File = cfg.sourcedir
- val outDir: File = cfg.outputdir
-
- private def relativize(uri: URI, from: URI) = linkHelper.relativize(uri, from).getOrElse("__notFound__" + uri.getPath)
-
- def header(dest: URI) = Some("""
- <html>
- <head>
- <link href='""" + relativize(new URI("site:/_highlighter/SyntaxHighlighter.css"), dest) + """' rel='stylesheet' type='text/css'/>
- <script language='javascript' src='""" + relativize(new URI("site:/_highlighter/shAll.js"), dest) + """'></script>
- </head>
- <body>
- <pre name="code" class="scala" style="width:100%">
- """)
-
- def footer(dest: URI) = Some("""</pre>
- <script language='javascript'>
- dp.SyntaxHighlighter.ClipboardSwf = '""" + relativize(new URI("site:/_highlighter/clipboard.swf"), dest) + """';
- dp.SyntaxHighlighter.HighlightAll('code');
- </script>
- </body>
- </html>
- """)
-
- //TODO: escape the source code
- def scalaToHtml(src :File) = {
- val dest = new File(outDir, fileHelper.relativePathUnderDir(src, inDir) + ".html")
- if (!dest.exists || dest.lastModified < src.lastModified) {
-
- //we need to verify whether the directory we are trying to write to has already been created or not
- if(!dest.getParentFile.exists) dest.getParentFile.mkdirs
-
- val uri = linkHelper.uriFor(dest).get
- var txt = fileHelper.readTextFromFile(src).getOrElse("")
- txt = txt.replace("<", "&lt;")
- fileHelper.writeTextToFile(dest, txt, header(uri), footer(uri))
- }
- Some(dest)
- }
-
- def copyResources() {
- val loader = this.getClass().getClassLoader()
- val buf = new Array[Byte](1024)
- def copyResource(name: String) = fileHelper.copyResource("/scala/tools/nsc/doc/html/resource/", name, outDir, loader, buf)
- copyResource("_highlighter/clipboard.swf")
- copyResource("_highlighter/shAll.js")
- copyResource("_highlighter/SyntaxHighlighter.css")
- }
-
- copyResources()
- }
- } else {
- new SourceHtmlizer {
- def scalaToHtml(src :File) = None
- }
- }
- }
- */
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
deleted file mode 100644
index 63c77e7bb3..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ /dev/null
@@ -1,977 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package html
-package page
-
-import base._
-import base.comment._
-
-import model._
-import model.diagram._
-import diagram._
-
-import scala.xml.{ NodeSeq, Text, UnprefixedAttribute }
-import scala.language.postfixOps
-
-import model._
-import model.diagram._
-import diagram._
-
-class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemplateEntity) extends HtmlPage {
-
- val path =
- templateToPath(tpl)
-
- def title = {
- val s = universe.settings
-
- tpl.name +
- ( if (!s.doctitle.isDefault) " - " + s.doctitle.value else "" ) +
- ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" ) +
- " - " + tpl.qualifiedName
- }
-
- val headers =
- <xml:group>
- <link href={ relativeLinkTo{List("template.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
- <link href={ relativeLinkTo{List("diagrams.css", "lib")} } media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
- <script type="text/javascript">
- if(top === self) {{
- var url = '{ val p = templateToPath(tpl); "../" * (p.size - 1) + "index.html" }';
- var hash = '{ val p = templateToPath(tpl); (p.tail.reverse ::: List(p.head.replace(".html", ""))).mkString(".") }';
- var anchor = window.location.hash;
- var anchor_opt = '';
- if (anchor.length { scala.xml.Unparsed(">=") /* unless we use Unparsed, it gets escaped and crashes the script */ } 1)
- anchor_opt = '@' + anchor.substring(1);
- window.location.href = url + '#' + hash + anchor_opt;
- }}
- </script>
- </xml:group>
-
- private val scripts = {
- val sources = {
- val default = List("jquery.js", "jquery-ui.js", "tools.tooltip.js", "template.js")
- val forDiagrams = List("modernizr.custom.js", "diagrams.js")
-
- (default ++ (if (universe.settings.docDiagrams.value) forDiagrams else Nil)) map {
- x => x.replace('.', '-') -> relativeLinkTo(List(x, "lib"))
- }
- }
-
- sources map {
- case (id, src) =>
- <script defer="defer" type="text/javascript" id={id} src={src}></script>
- }
- }
-
- val valueMembers =
- tpl.methods ++ tpl.values ++ tpl.templates.filter(x => x.isObject || x.isPackage) sorted
-
- val (absValueMembers, nonAbsValueMembers) =
- valueMembers partition (_.isAbstract)
-
- val (deprValueMembers, nonDeprValueMembers) =
- nonAbsValueMembers partition (_.deprecation.isDefined)
-
- val (concValueMembers, shadowedImplicitMembers) =
- nonDeprValueMembers partition (!_.isShadowedOrAmbiguousImplicit)
-
- val typeMembers =
- tpl.abstractTypes ++ tpl.aliasTypes ++ tpl.templates.filter(x => x.isTrait || x.isClass) sorted (implicitly[Ordering[MemberEntity]])
-
- val constructors = (tpl match {
- case cls: Class => (cls.constructors: List[MemberEntity]).sorted
- case _ => Nil
- })
-
- /* for body, there is a special case for AnyRef, otherwise AnyRef appears
- * like a package/object this problem should be fixed, this implementation
- * is just a patch. */
- val body = {
- val templateName = if (tpl.isRootPackage) "root package" else tpl.name
- val displayName = tpl.companion match {
- case Some(companion) if (companion.visibility.isPublic && companion.inSource != None) =>
- <a href={relativeLinkTo(companion)} title="Go to companion">{ templateName }</a>
- case _ =>
- templateName
- }
- val owner = {
- if (tpl.isRootPackage || tpl.inTemplate.isRootPackage)
- NodeSeq.Empty
- else
- <p id="owner">{ templatesToHtml(tpl.inTemplate.toRoot.reverse.tail, scala.xml.Text(".")) }</p>
- }
-
- <body class={ if (tpl.isType) "type" else "value" }>
- <div id="definition">
- {
- tpl.companion match {
- case Some(companion) if (companion.visibility.isPublic && companion.inSource != None) =>
- <a href={relativeLinkTo(companion)} title="Go to companion"><img src={ relativeLinkTo(List(docEntityKindToBigImage(tpl), "lib")) }/></a>
- case _ =>
- <img src={ relativeLinkTo(List(docEntityKindToBigImage(tpl), "lib")) }/>
- }}
- { owner }
- <h1>{ displayName }</h1>
- </div>
-
- { signature(tpl, true) }
- { memberToCommentHtml(tpl, tpl.inTemplate, true) }
-
- <div id="mbrsel">
- <div id='textfilter'><span class='pre'/><span class='input'><input id='mbrsel-input' type='text' accesskey='/'/></span><span class='post'/></div>
- { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty && (!universe.settings.docGroups.value || (tpl.members.map(_.group).distinct.length == 1)))
- NodeSeq.Empty
- else
- <div id="order">
- <span class="filtertype">Ordering</span>
- <ol>
- {
- if (!universe.settings.docGroups.value || (tpl.members.map(_.group).distinct.length == 1))
- NodeSeq.Empty
- else
- <li class="group out"><span>Grouped</span></li>
- }
- <li class="alpha in"><span>Alphabetic</span></li>
- {
- if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty)
- NodeSeq.Empty
- else
- <li class="inherit out"><span>By inheritance</span></li>
- }
- </ol>
- </div>
- }
- { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NodeSeq.Empty else
- {
- if (!tpl.linearizationTemplates.isEmpty)
- <div id="ancestors">
- <span class="filtertype">Inherited<br/>
- </span>
- <ol id="linearization">
- { (tpl :: tpl.linearizationTemplates).map(wte => <li class="in" name={ wte.qualifiedName }><span>{ wte.name }</span></li>) }
- </ol>
- </div>
- else NodeSeq.Empty
- } ++ {
- if (!tpl.conversions.isEmpty)
- <div id="ancestors">
- <span class="filtertype">Implicitly<br/>
- </span>
- <ol id="implicits"> {
- tpl.conversions.map { conv =>
- val name = conv.conversionQualifiedName
- val hide = universe.settings.hiddenImplicits(name)
- <li class="in" name={ name } data-hidden={ hide.toString }><span>{ "by " + conv.conversionShortName }</span></li>
- }
- }
- </ol>
- </div>
- else NodeSeq.Empty
- } ++
- <div id="ancestors">
- <span class="filtertype"></span>
- <ol>
- <li class="hideall out"><span>Hide All</span></li>
- <li class="showall in"><span>Show all</span></li>
- </ol>
- <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#members" target="_blank">Learn more about member selection</a>
- </div>
- }
- {
- <div id="visbl">
- <span class="filtertype">Visibility</span>
- <ol><li class="public in"><span>Public</span></li><li class="all out"><span>All</span></li></ol>
- </div>
- }
- </div>
-
- <div id="template">
- <div id="allMembers">
- { if (constructors.isEmpty) NodeSeq.Empty else
- <div id="constructors" class="members">
- <h3>Instance Constructors</h3>
- <ol>{ constructors map (memberToHtml(_, tpl)) }</ol>
- </div>
- }
-
- { if (typeMembers.isEmpty) NodeSeq.Empty else
- <div id="types" class="types members">
- <h3>Type Members</h3>
- <ol>{ typeMembers map (memberToHtml(_, tpl)) }</ol>
- </div>
- }
-
- { if (absValueMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
- <h3>Abstract Value Members</h3>
- <ol>{ absValueMembers map (memberToHtml(_, tpl)) }</ol>
- </div>
- }
-
- { if (concValueMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
- <h3>{ if (absValueMembers.isEmpty) "Value Members" else "Concrete Value Members" }</h3>
- <ol>{ concValueMembers map (memberToHtml(_, tpl)) }</ol>
- </div>
- }
-
- { if (shadowedImplicitMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
- <h3>Shadowed Implicit Value Members</h3>
- <ol>{ shadowedImplicitMembers map (memberToHtml(_, tpl)) }</ol>
- </div>
- }
-
- { if (deprValueMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
- <h3>Deprecated Value Members</h3>
- <ol>{ deprValueMembers map (memberToHtml(_, tpl)) }</ol>
- </div>
- }
- </div>
-
- <div id="inheritedMembers">
- {
- // linearization
- NodeSeq fromSeq (for ((superTpl, superType) <- (tpl.linearizationTemplates zip tpl.linearizationTypes)) yield
- <div class="parent" name={ superTpl.qualifiedName }>
- <h3>Inherited from {
- typeToHtmlWithStupidTypes(tpl, superTpl, superType)
- }</h3>
- </div>
- )
- }
- {
- // implicitly inherited
- NodeSeq fromSeq (for (conversion <- (tpl.conversions)) yield
- <div class="conversion" name={ conversion.conversionQualifiedName }>
- <h3>Inherited by implicit conversion { conversion.conversionShortName } from
- { typeToHtml(tpl.resultType, true) } to { typeToHtml(conversion.targetType, true) }
- </h3>
- </div>
- )
- }
- </div>
-
- <div id="groupedMembers">
- {
- val allGroups = tpl.members.map(_.group).distinct
- val orderedGroups = allGroups.map(group => (tpl.groupPriority(group), group)).sorted.map(_._2)
- // linearization
- NodeSeq fromSeq (for (group <- orderedGroups) yield
- <div class="group" name={ group }>
- <h3>{ tpl.groupName(group) }</h3>
- {
- tpl.groupDescription(group) match {
- case Some(body) => <div class="comment cmt">{ bodyToHtml(body) }</div>
- case _ => NodeSeq.Empty
- }
- }
- </div>
- )
- }
- </div>
-
- </div>
-
- <div id="tooltip" ></div>
-
- {
- if (Set("epfl", "EPFL").contains(tpl.universe.settings.docfooter.value))
- <div id="footer">Scala programming documentation. Copyright (c) 2003-2013 <a href="http://www.epfl.ch" target="_top">EPFL</a>, with contributions from <a href="http://typesafe.com" target="_top">Typesafe</a>.</div>
- else
- <div id="footer"> { tpl.universe.settings.docfooter.value } </div>
- }
- { scripts }
- </body>
- }
-
- def memberToHtml(mbr: MemberEntity, inTpl: DocTemplateEntity): NodeSeq = {
- val memberComment = memberToCommentHtml(mbr, inTpl, false)
- <li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
- data-isabs={ mbr.isAbstract.toString }
- fullComment={ if(memberComment.filter(_.label=="div").isEmpty) "no" else "yes" }
- group={ mbr.group }>
- <a id={ mbr.signature }/>
- <a id={ mbr.signatureCompat }/>
- { signature(mbr, false) }
- { memberComment }
- </li>
- }
-
- def memberToCommentHtml(mbr: MemberEntity, inTpl: DocTemplateEntity, isSelf: Boolean): NodeSeq = {
- mbr match {
- case dte: DocTemplateEntity if isSelf =>
- // comment of class itself
- <xml:group>
- <div id="comment" class="fullcommenttop">{ memberToCommentBodyHtml(mbr, inTpl, isSelf = true) }</div>
- </xml:group>
- case dte: DocTemplateEntity if mbr.comment.isDefined =>
- // comment of inner, documented class (only short comment, full comment is on the class' own page)
- memberToInlineCommentHtml(mbr, isSelf)
- case _ =>
- // comment of non-class member or non-documentented inner class
- val commentBody = memberToCommentBodyHtml(mbr, inTpl, isSelf = false)
- if (commentBody.isEmpty)
- NodeSeq.Empty
- else {
- val shortComment = memberToShortCommentHtml(mbr, isSelf)
- val longComment = memberToUseCaseCommentHtml(mbr, isSelf) ++ memberToCommentBodyHtml(mbr, inTpl, isSelf)
-
- val includedLongComment = if (shortComment.text.trim == longComment.text.trim)
- NodeSeq.Empty
- else
- <div class="fullcomment">{ longComment }</div>
-
- shortComment ++ includedLongComment
- }
- }
- }
-
- def memberToUseCaseCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq = {
- mbr match {
- case nte: NonTemplateMemberEntity if nte.isUseCase =>
- inlineToHtml(comment.Text("[use case] "))
- case _ => NodeSeq.Empty
- }
- }
-
- def memberToShortCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq =
- mbr.comment.fold(NodeSeq.Empty) { comment =>
- <p class="shortcomment cmt">{ memberToUseCaseCommentHtml(mbr, isSelf) }{ inlineToHtml(comment.short) }</p>
- }
-
- def memberToInlineCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq =
- <p class="comment cmt">{ inlineToHtml(mbr.comment.get.short) }</p>
-
- def memberToCommentBodyHtml(mbr: MemberEntity, inTpl: DocTemplateEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
- val s = universe.settings
-
- val memberComment =
- if (mbr.comment.isEmpty) NodeSeq.Empty
- else <div class="comment cmt">{ commentToHtml(mbr.comment) }</div>
-
- val paramComments = {
- val prs: List[ParameterEntity] = mbr match {
- case cls: Class => cls.typeParams ::: cls.valueParams.flatten
- case trt: Trait => trt.typeParams
- case dfe: Def => dfe.typeParams ::: dfe.valueParams.flatten
- case ctr: Constructor => ctr.valueParams.flatten
- case _ => Nil
- }
-
- def paramCommentToHtml(prs: List[ParameterEntity], comment: Comment): NodeSeq = prs match {
-
- case (tp: TypeParam) :: rest =>
- val paramEntry: NodeSeq = {
- <dt class="tparam">{ tp.name }</dt><dd class="cmt">{ bodyToHtml(comment.typeParams(tp.name)) }</dd>
- }
- paramEntry ++ paramCommentToHtml(rest, comment)
-
- case (vp: ValueParam) :: rest =>
- val paramEntry: NodeSeq = {
- <dt class="param">{ vp.name }</dt><dd class="cmt">{ bodyToHtml(comment.valueParams(vp.name)) }</dd>
- }
- paramEntry ++ paramCommentToHtml(rest, comment)
-
- case _ =>
- NodeSeq.Empty
- }
-
- mbr.comment.fold(NodeSeq.Empty) { comment =>
- val cmtedPrs = prs filter {
- case tp: TypeParam => comment.typeParams isDefinedAt tp.name
- case vp: ValueParam => comment.valueParams isDefinedAt vp.name
- }
- if (cmtedPrs.isEmpty && comment.result.isEmpty) NodeSeq.Empty
- else {
- <dl class="paramcmts block">{
- paramCommentToHtml(cmtedPrs, comment) ++ (
- comment.result match {
- case None => NodeSeq.Empty
- case Some(cmt) =>
- <dt>returns</dt><dd class="cmt">{ bodyToHtml(cmt) }</dd>
- })
- }</dl>
- }
- }
- }
-
- val implicitInformation = mbr.byConversion match {
- case Some(conv) =>
- <dt class="implicit">Implicit information</dt> ++
- {
- val targetType = typeToHtml(conv.targetType, true)
- val conversionMethod = conv.convertorMethod match {
- case Left(member) => Text(member.name)
- case Right(name) => Text(name)
- }
-
- // strip off the package object endings, they make things harder to follow
- val conversionOwnerQualifiedNane = conv.convertorOwner.qualifiedName.stripSuffix(".package")
- val conversionOwner = templateToHtml(conv.convertorOwner, conversionOwnerQualifiedNane)
-
- val constraintText = conv.constraints match {
- case Nil =>
- NodeSeq.Empty
- case List(constraint) =>
- scala.xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint) ++ scala.xml.Text(".")
- case List(constraint1, constraint2) =>
- scala.xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint1) ++
- scala.xml.Text(" and at the same time ") ++ constraintToHtml(constraint2) ++ scala.xml.Text(".")
- case constraints =>
- <br/> ++ "This conversion will take place only if all of the following constraints are met:" ++ <br/> ++ {
- var index = 0
- constraints map { constraint => scala.xml.Text({ index += 1; index } + ". ") ++ constraintToHtml(constraint) ++ <br/> }
- }
- }
-
- <dd>
- This member is added by an implicit conversion from { typeToHtml(inTpl.resultType, true) } to
- { targetType } performed by method { conversionMethod } in { conversionOwner }.
- { constraintText }
- </dd>
- } ++ {
- if (mbr.isShadowedOrAmbiguousImplicit) {
- // These are the members that are shadowing or ambiguating the current implicit
- // see ImplicitMemberShadowing trait for more information
- val shadowingSuggestion = {
- val params = mbr match {
- case d: Def => d.valueParams map (_ map (_ name) mkString("(", ", ", ")")) mkString
- case _ => "" // no parameters
- }
- <br/> ++ scala.xml.Text("To access this member you can use a ") ++
- <a href="http://stackoverflow.com/questions/2087250/what-is-the-purpose-of-type-ascription-in-scala"
- target="_blank">type ascription</a> ++ scala.xml.Text(":") ++
- <br/> ++ <div class="cmt"><pre>{"(" + Template.lowerFirstLetter(tpl.name) + ": " + conv.targetType.name + ")." + mbr.name + params }</pre></div>
- }
-
- val shadowingWarning: NodeSeq =
- if (mbr.isShadowedImplicit)
- scala.xml.Text("This implicitly inherited member is shadowed by one or more members in this " +
- "class.") ++ shadowingSuggestion
- else if (mbr.isAmbiguousImplicit)
- scala.xml.Text("This implicitly inherited member is ambiguous. One or more implicitly " +
- "inherited members have similar signatures, so calling this member may produce an ambiguous " +
- "implicit conversion compiler error.") ++ shadowingSuggestion
- else NodeSeq.Empty
-
- <dt class="implicit">Shadowing</dt> ++
- <dd>{ shadowingWarning }</dd>
-
- } else NodeSeq.Empty
- }
- case _ =>
- NodeSeq.Empty
- }
-
- // --- start attributes block vals
- val attributes: NodeSeq = {
- val fvs: List[comment.Paragraph] = visibility(mbr).toList
- if (fvs.isEmpty || isReduced) NodeSeq.Empty
- else {
- <dt>Attributes</dt>
- <dd>{ fvs map { fv => { inlineToHtml(fv.text) ++ scala.xml.Text(" ") } } }</dd>
- }
- }
-
- val definitionClasses: NodeSeq = {
- val inDefTpls = mbr.inDefinitionTemplates
- if ((inDefTpls.tail.isEmpty && (inDefTpls.head == inTpl)) || isReduced) NodeSeq.Empty
- else {
- <dt>Definition Classes</dt>
- <dd>{ templatesToHtml(inDefTpls, scala.xml.Text(" ā†’ ")) }</dd>
- }
- }
-
- val fullSignature: NodeSeq = {
- mbr match {
- case nte: NonTemplateMemberEntity if nte.isUseCase =>
- <div class="full-signature-block toggleContainer">
- <span class="toggle">Full Signature</span>
- <div class="hiddenContent full-signature-usecase">{ signature(nte.useCaseOf.get,true) }</div>
- </div>
- case _ => NodeSeq.Empty
- }
- }
-
- val selfType: NodeSeq = mbr match {
- case dtpl: DocTemplateEntity if (isSelf && !dtpl.selfType.isEmpty && !isReduced) =>
- <dt>Self Type</dt>
- <dd>{ typeToHtml(dtpl.selfType.get, hasLinks = true) }</dd>
- case _ => NodeSeq.Empty
- }
-
- val annotations: NodeSeq = {
- // A list of annotations which don't show their arguments, e. g. because they are shown separately.
- val annotationsWithHiddenArguments = List("deprecated", "Deprecated", "migration")
-
- def showArguments(annotation: Annotation) =
- !(annotationsWithHiddenArguments.contains(annotation.qualifiedName))
-
- if (!mbr.annotations.isEmpty) {
- <dt>Annotations</dt>
- <dd>{
- mbr.annotations.map { annot =>
- <xml:group>
- <span class="name">@{ templateToHtml(annot.annotationClass) }</span>{
- if (showArguments(annot)) argumentsToHtml(annot.arguments) else NodeSeq.Empty
- }
- </xml:group>
- }
- }
- </dd>
- } else NodeSeq.Empty
- }
-
- val sourceLink: NodeSeq = mbr match {
- case dtpl: DocTemplateEntity if (isSelf && dtpl.sourceUrl.isDefined && dtpl.inSource.isDefined && !isReduced) =>
- val (absFile, line) = dtpl.inSource.get
- <dt>Source</dt>
- <dd>{ <a href={ dtpl.sourceUrl.get.toString } target="_blank">{ Text(absFile.file.getName) }</a> }</dd>
- case _ => NodeSeq.Empty
- }
-
- val deprecation: NodeSeq =
- mbr.deprecation match {
- case Some(deprecation) if !isReduced =>
- <dt>Deprecated</dt>
- <dd class="cmt">{ bodyToHtml(deprecation) }</dd>
- case _ => NodeSeq.Empty
- }
-
- val migration: NodeSeq =
- mbr.migration match {
- case Some(migration) if !isReduced =>
- <dt>Migration</dt>
- <dd class="cmt">{ bodyToHtml(migration) }</dd>
- case _ => NodeSeq.Empty
- }
-
- val mainComment: NodeSeq = mbr.comment match {
- case Some(comment) if (! isReduced) =>
- def orEmpty[T](it: Iterable[T])(gen: =>NodeSeq): NodeSeq =
- if (it.isEmpty) NodeSeq.Empty else gen
-
- val example =
- orEmpty(comment.example) {
- <div class="block">Example{ if (comment.example.length > 1) "s" else ""}:
- <ol>{
- val exampleXml: List[NodeSeq] = for (ex <- comment.example) yield
- <li class="cmt">{ bodyToHtml(ex) }</li>
- exampleXml.reduceLeft(_ ++ Text(", ") ++ _)
- }</ol>
- </div>
- }
-
- val version: NodeSeq =
- orEmpty(comment.version) {
- <dt>Version</dt>
- <dd>{ for(body <- comment.version.toList) yield bodyToHtml(body) }</dd>
- }
-
- val sinceVersion: NodeSeq =
- orEmpty(comment.since) {
- <dt>Since</dt>
- <dd>{ for(body <- comment.since.toList) yield bodyToHtml(body) }</dd>
- }
-
- val note: NodeSeq =
- orEmpty(comment.note) {
- <dt>Note</dt>
- <dd>{
- val noteXml: List[NodeSeq] = for(note <- comment.note ) yield <span class="cmt">{bodyToHtml(note)}</span>
- noteXml.reduceLeft(_ ++ Text(", ") ++ _)
- }</dd>
- }
-
- val seeAlso: NodeSeq =
- orEmpty(comment.see) {
- <dt>See also</dt>
- <dd>{
- val seeXml: List[NodeSeq] = for(see <- comment.see ) yield <span class="cmt">{bodyToHtml(see)}</span>
- seeXml.reduceLeft(_ ++ _)
- }</dd>
- }
-
- val exceptions: NodeSeq =
- orEmpty(comment.throws) {
- <dt>Exceptions thrown</dt>
- <dd>{
- val exceptionsXml: List[NodeSeq] =
- for((name, body) <- comment.throws.toList.sortBy(_._1) ) yield
- <span class="cmt">{Text(name) ++ bodyToHtml(body)}</span>
- exceptionsXml.reduceLeft(_ ++ Text("") ++ _)
- }</dd>
- }
-
- val todo: NodeSeq =
- orEmpty(comment.todo) {
- <dt>To do</dt>
- <dd>{
- val todoXml: List[NodeSeq] = (for(todo <- comment.todo ) yield <span class="cmt">{bodyToHtml(todo)}</span> )
- todoXml.reduceLeft(_ ++ Text(", ") ++ _)
- }</dd>
- }
-
- example ++ version ++ sinceVersion ++ exceptions ++ todo ++ note ++ seeAlso
-
- case _ => NodeSeq.Empty
- }
- // end attributes block vals ---
-
- val attributesInfo = implicitInformation ++ attributes ++ definitionClasses ++ fullSignature ++ selfType ++ annotations ++ deprecation ++ migration ++ sourceLink ++ mainComment
- val attributesBlock =
- if (attributesInfo.isEmpty)
- NodeSeq.Empty
- else
- <dl class="attributes block"> { attributesInfo }</dl>
-
- val linearization = mbr match {
- case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.linearizationTemplates.nonEmpty =>
- <div class="toggleContainer block">
- <span class="toggle">Linear Supertypes</span>
- <div class="superTypes hiddenContent">{
- typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = scala.xml.Text(", "))
- }</div>
- </div>
- case _ => NodeSeq.Empty
- }
-
- val subclasses = mbr match {
- case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.allSubClasses.nonEmpty =>
- <div class="toggleContainer block">
- <span class="toggle">Known Subclasses</span>
- <div class="subClasses hiddenContent">{
- templatesToHtml(dtpl.allSubClasses.sortBy(_.name), scala.xml.Text(", "))
- }</div>
- </div>
- case _ => NodeSeq.Empty
- }
-
- def createDiagram(f: DocTemplateEntity => Option[Diagram], description: String, id: String): NodeSeq =
- if (s.docDiagrams.value) mbr match {
- case dtpl: DocTemplateEntity if isSelf && !isReduced =>
- val diagram = f(dtpl)
- if (diagram.isDefined) {
- val s = universe.settings
- val diagramSvg = generator.generate(diagram.get, tpl, this)
- if (diagramSvg != NodeSeq.Empty) {
- <div class="toggleContainer block diagram-container" id={ id + "-container"}>
- <span class="toggle diagram-link">{ description }</span>
- <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#diagrams" target="_blank" class="diagram-help">Learn more about scaladoc diagrams</a>
- <div class="diagram" id={ id }>{
- diagramSvg
- }</div>
- </div>
- } else NodeSeq.Empty
- } else NodeSeq.Empty
- case _ => NodeSeq.Empty
- } else NodeSeq.Empty // diagrams not generated
-
- val typeHierarchy = createDiagram(_.inheritanceDiagram, "Type Hierarchy", "inheritance-diagram")
- val contentHierarchy = createDiagram(_.contentDiagram, "Content Hierarchy", "content-diagram")
-
- memberComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses ++ typeHierarchy ++ contentHierarchy
- }
-
- def boundsToHtml(hi: Option[TypeEntity], lo: Option[TypeEntity], hasLinks: Boolean): NodeSeq = {
- def bound0(bnd: Option[TypeEntity], pre: String): NodeSeq = bnd match {
- case None => NodeSeq.Empty
- case Some(tpe) => scala.xml.Text(pre) ++ typeToHtml(tpe, hasLinks)
- }
- bound0(lo, " >: ") ++ bound0(hi, " <: ")
- }
-
- def visibility(mbr: MemberEntity): Option[comment.Paragraph] = {
- import comment._
- import comment.{ Text => CText }
- mbr.visibility match {
- case PrivateInInstance() =>
- Some(Paragraph(CText("private[this]")))
- case PrivateInTemplate(owner) if (owner == mbr.inTemplate) =>
- Some(Paragraph(CText("private")))
- case PrivateInTemplate(owner) =>
- Some(Paragraph(Chain(List(CText("private["), EntityLink(comment.Text(owner.qualifiedName), LinkToTpl(owner)), CText("]")))))
- case ProtectedInInstance() =>
- Some(Paragraph(CText("protected[this]")))
- case ProtectedInTemplate(owner) if (owner == mbr.inTemplate) =>
- Some(Paragraph(CText("protected")))
- case ProtectedInTemplate(owner) =>
- Some(Paragraph(Chain(List(CText("protected["), EntityLink(comment.Text(owner.qualifiedName), LinkToTpl(owner)), CText("]")))))
- case Public() =>
- None
- }
- }
-
- /** name, tparams, params, result */
- def signature(mbr: MemberEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
- def inside(hasLinks: Boolean, nameLink: String = ""): NodeSeq =
- <xml:group>
- <span class="modifier_kind">
- <span class="modifier">{ mbr.flags.map(flag => inlineToHtml(flag.text) ++ scala.xml.Text(" ")) }</span>
- <span class="kind">{ kindToString(mbr) }</span>
- </span>
- <span class="symbol">
- {
- val nameClass =
- if (mbr.isImplicitlyInherited)
- if (mbr.isShadowedOrAmbiguousImplicit)
- "implicit shadowed"
- else
- "implicit"
- else
- "name"
-
- val nameHtml = {
- val value = if (mbr.isConstructor) tpl.name else mbr.name
- val span = if (mbr.deprecation.isDefined)
- <span class={ nameClass + " deprecated"} title={"Deprecated: "+bodyToStr(mbr.deprecation.get)}>{ value }</span>
- else
- <span class={ nameClass }>{ value }</span>
- val encoded = scala.reflect.NameTransformer.encode(value)
- if (encoded != value) {
- span % new UnprefixedAttribute("title",
- "gt4s: " + encoded +
- span.attribute("title").map(
- node => ". " + node
- ).getOrElse(""),
- scala.xml.Null)
- } else {
- span
- }
- }
- if (!nameLink.isEmpty)
- <a href={nameLink}>{nameHtml}</a>
- else nameHtml
- }{
- def tparamsToHtml(mbr: Any): NodeSeq = mbr match {
- case hk: HigherKinded =>
- val tpss = hk.typeParams
- if (tpss.isEmpty) NodeSeq.Empty else {
- def tparam0(tp: TypeParam): NodeSeq =
- <span name={ tp.name }>{ tp.variance + tp.name }{ tparamsToHtml(tp) }{ boundsToHtml(tp.hi, tp.lo, hasLinks)}</span>
- def tparams0(tpss: List[TypeParam]): NodeSeq = (tpss: @unchecked) match {
- case tp :: Nil => tparam0(tp)
- case tp :: tps => tparam0(tp) ++ Text(", ") ++ tparams0(tps)
- }
- <span class="tparams">[{ tparams0(tpss) }]</span>
- }
- case _ => NodeSeq.Empty
- }
- tparamsToHtml(mbr)
- }{
- if (isReduced) NodeSeq.Empty else {
- def paramsToHtml(vlsss: List[List[ValueParam]]): NodeSeq = {
- def param0(vl: ValueParam): NodeSeq =
- // notice the }{ in the next lines, they are necessary to avoid a undesired withspace in output
- <span name={ vl.name }>{
- Text(vl.name)
- }{ Text(": ") ++ typeToHtml(vl.resultType, hasLinks) }{
- vl.defaultValue match {
- case Some(v) => Text(" = ") ++ treeToHtml(v)
- case None => NodeSeq.Empty
- }
- }</span>
-
- def params0(vlss: List[ValueParam]): NodeSeq = vlss match {
- case Nil => NodeSeq.Empty
- case vl :: Nil => param0(vl)
- case vl :: vls => param0(vl) ++ Text(", ") ++ params0(vls)
- }
- def implicitCheck(vlss: List[ValueParam]): NodeSeq = vlss match {
- case vl :: vls => if(vl.isImplicit) { <span class="implicit">implicit </span> } else Text("")
- case _ => Text("")
- }
- vlsss map { vlss => <span class="params">({implicitCheck(vlss) ++ params0(vlss) })</span> }
- }
- mbr match {
- case cls: Class => paramsToHtml(cls.valueParams)
- case ctr: Constructor => paramsToHtml(ctr.valueParams)
- case dfe: Def => paramsToHtml(dfe.valueParams)
- case _ => NodeSeq.Empty
- }
- }
- }{ if (isReduced) NodeSeq.Empty else {
- mbr match {
- case tme: MemberEntity if (tme.isDef || tme.isVal || tme.isLazyVal || tme.isVar) =>
- <span class="result">: { typeToHtml(tme.resultType, hasLinks) }</span>
-
- case abt: MemberEntity with AbstractType =>
- val b2s = boundsToHtml(abt.hi, abt.lo, hasLinks)
- if (b2s != NodeSeq.Empty)
- <span class="result">{ b2s }</span>
- else NodeSeq.Empty
-
- case alt: MemberEntity with AliasType =>
- <span class="result"> = { typeToHtml(alt.alias, hasLinks) }</span>
-
- case tpl: MemberTemplateEntity if !tpl.parentTypes.isEmpty =>
- <span class="result"> extends { typeToHtml(tpl.parentTypes.map(_._2), hasLinks) }</span>
-
- case _ => NodeSeq.Empty
- }
- }}
- </span>
- </xml:group>
- mbr match {
- case dte: DocTemplateEntity if !isSelf =>
- <h4 class="signature">{ inside(hasLinks = true, nameLink = relativeLinkTo(dte)) }</h4>
- case _ if isSelf =>
- <h4 id="signature" class="signature">{ inside(hasLinks = true) }</h4>
- case _ =>
- <h4 class="signature">{ inside(hasLinks = true) }</h4>
- }
-
- }
-
- /** */
- def treeToHtml(tree: TreeEntity): NodeSeq = {
-
- /** Makes text good looking in the html page : newlines and basic indentation,
- * You must change this function if you want to improve pretty printing of default Values
- */
- def codeStringToXml(text: String): NodeSeq = {
- var goodLookingXml: NodeSeq = NodeSeq.Empty
- var indent = 0
- for (c <- text) c match {
- case '{' => indent+=1
- goodLookingXml ++= Text("{")
- case '}' => indent-=1
- goodLookingXml ++= Text("}")
- case '\n' =>
- goodLookingXml++= <br/> ++ indentation
- case _ => goodLookingXml ++= Text(c.toString)
- }
- def indentation:NodeSeq = {
- var indentXml = NodeSeq.Empty
- for (x <- 1 to indent) indentXml ++= Text("&nbsp;&nbsp;")
- indentXml
- }
- goodLookingXml
- }
-
- var index = 0
- val str = tree.expression
- val length = str.length
- var myXml: NodeSeq = NodeSeq.Empty
- for ((from, (member, to)) <- tree.refEntity.toSeq) {
- if (index < from) {
- myXml ++= codeStringToXml(str.substring(index,from))
- index = from
- }
- if (index == from) {
- member match {
- case mbr: DocTemplateEntity =>
- val link = relativeLinkTo(mbr)
- myXml ++= <span class="name"><a href={link}>{str.substring(from, to)}</a></span>
- case mbr: MemberEntity =>
- val anchor = "#" + mbr.signature
- val link = relativeLinkTo(mbr.inTemplate)
- myXml ++= <span class="name"><a href={link ++ anchor}>{str.substring(from, to)}</a></span>
- }
- index = to
- }
- }
-
- if (index <= length-1)
- myXml ++= codeStringToXml(str.substring(index, length ))
-
- if (length < 36)
- <span class="symbol">{ myXml }</span>
- else
- <span class="defval" name={ myXml }>{ "..." }</span>
- }
-
- private def argumentsToHtml(argss: List[ValueArgument]): NodeSeq = {
- def argumentsToHtml0(argss: List[ValueArgument]): NodeSeq = argss match {
- case Nil => NodeSeq.Empty
- case arg :: Nil => argumentToHtml(arg)
- case arg :: args => argumentToHtml(arg) ++ scala.xml.Text(", ") ++ argumentsToHtml0(args)
- }
- <span class="args">({ argumentsToHtml0(argss) })</span>
- }
-
- private def argumentToHtml(arg: ValueArgument): NodeSeq = {
- <span>
- {
- arg.parameter match {
- case Some(param) => Text(param.name + " = ")
- case None => NodeSeq.Empty
- }
- }
- { treeToHtml(arg.value) }
- </span>
- }
-
- private def bodyToStr(body: comment.Body): String =
- body.blocks flatMap (blockToStr(_)) mkString ""
-
- private def blockToStr(block: comment.Block): String = block match {
- case comment.Paragraph(in) => inlineToStr(in)
- case _ => block.toString
- }
-
- private def inlineToStr(inl: comment.Inline): String = inl match {
- case comment.Chain(items) => items flatMap (inlineToStr(_)) mkString ""
- case comment.Italic(in) => inlineToStr(in)
- case comment.Bold(in) => inlineToStr(in)
- case comment.Underline(in) => inlineToStr(in)
- case comment.Monospace(in) => inlineToStr(in)
- case comment.Text(text) => text
- case comment.Summary(in) => inlineToStr(in)
- case _ => inl.toString
- }
-
- private def typeToHtmlWithStupidTypes(tpl: TemplateEntity, superTpl: TemplateEntity, superType: TypeEntity): NodeSeq =
- if (tpl.universe.settings.useStupidTypes.value)
- superTpl match {
- case dtpl: DocTemplateEntity =>
- val sig = signature(dtpl, false, true) \ "_"
- sig
- case tpl: TemplateEntity =>
- Text(tpl.name)
- }
- else
- typeToHtml(superType, true)
-
- private def constraintToHtml(constraint: Constraint): NodeSeq = constraint match {
- case ktcc: KnownTypeClassConstraint =>
- scala.xml.Text(ktcc.typeExplanation(ktcc.typeParamName) + " (" + ktcc.typeParamName + ": ") ++
- templateToHtml(ktcc.typeClassEntity) ++ scala.xml.Text(")")
- case tcc: TypeClassConstraint =>
- scala.xml.Text(tcc.typeParamName + " is ") ++
- <a href="http://stackoverflow.com/questions/2982276/what-is-a-context-bound-in-scala" target="_blank">
- context-bounded</a> ++ scala.xml.Text(" by " + tcc.typeClassEntity.qualifiedName + " (" + tcc.typeParamName + ": ") ++
- templateToHtml(tcc.typeClassEntity) ++ scala.xml.Text(")")
- case impl: ImplicitInScopeConstraint =>
- scala.xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, true) ++ scala.xml.Text(" is in scope")
- case eq: EqualTypeParamConstraint =>
- scala.xml.Text(eq.typeParamName + " is " + eq.rhs.name + " (" + eq.typeParamName + " =:= ") ++
- typeToHtml(eq.rhs, true) ++ scala.xml.Text(")")
- case bt: BoundedTypeParamConstraint =>
- scala.xml.Text(bt.typeParamName + " is a superclass of " + bt.lowerBound.name + " and a subclass of " +
- bt.upperBound.name + " (" + bt.typeParamName + " >: ") ++
- typeToHtml(bt.lowerBound, true) ++ scala.xml.Text(" <: ") ++
- typeToHtml(bt.upperBound, true) ++ scala.xml.Text(")")
- case lb: LowerBoundedTypeParamConstraint =>
- scala.xml.Text(lb.typeParamName + " is a superclass of " + lb.lowerBound.name + " (" + lb.typeParamName + " >: ") ++
- typeToHtml(lb.lowerBound, true) ++ scala.xml.Text(")")
- case ub: UpperBoundedTypeParamConstraint =>
- scala.xml.Text(ub.typeParamName + " is a subclass of " + ub.upperBound.name + " (" + ub.typeParamName + " <: ") ++
- typeToHtml(ub.upperBound, true) ++ scala.xml.Text(")")
- }
-}
-
-object Template {
- /* Vlad: Lesson learned the hard way: don't put any stateful code that references the model here,
- * it won't be garbage collected and you'll end up filling the heap with garbage */
-
- def lowerFirstLetter(s: String) = if (s.length >= 1) s.substring(0,1).toLowerCase() + s.substring(1) else s
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
deleted file mode 100644
index 61c1819d11..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * @author Damien Obrist
- * @author Vlad Ureche
- */
-package scala.tools.nsc
-package doc
-package html
-package page
-package diagram
-
-import scala.xml.NodeSeq
-import scala.tools.nsc.doc.html.HtmlPage
-import scala.tools.nsc.doc.model.diagram.Diagram
-import scala.tools.nsc.doc.model.DocTemplateEntity
-
-trait DiagramGenerator {
-
- /**
- * Generates a visualization of the internal representation
- * of a diagram.
- *
- * @param d The model of the diagram
- * @param p The page the diagram will be embedded in (needed for link generation)
- * @return The HTML to be embedded in the Scaladoc page
- */
- def generate(d: Diagram, t: DocTemplateEntity, p: HtmlPage):NodeSeq
-}
-
-object DiagramGenerator {
-
- // TODO: This is tailored towards the dot generator, since it's the only generator. In the future it should be more
- // general.
-
- private[this] var dotRunner: DotRunner = null
- private[this] var settings: doc.Settings = null
-
- def initialize(s: doc.Settings) =
- settings = s
-
- def getDotRunner() = {
- if (dotRunner == null)
- dotRunner = new DotRunner(settings)
- dotRunner
- }
-
- def cleanup() = {
- DiagramStats.printStats(settings)
- if (dotRunner != null) {
- dotRunner.cleanup()
- dotRunner = null
- }
- }
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
deleted file mode 100644
index ec00cace75..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/**
- * @author Vlad Ureche
- */
-package scala.tools.nsc.doc
-package html.page.diagram
-
-object DiagramStats {
-
- class TimeTracker(title: String) {
- var totalTime: Long = 0l
- var maxTime: Long = 0l
- var instances: Int = 0
-
- def addTime(ms: Long) = {
- if (maxTime < ms)
- maxTime = ms
- totalTime += ms
- instances += 1
- }
-
- def printStats(print: String => Unit) = {
- if (instances == 0)
- print(title + ": no stats gathered")
- else {
- print(" " + title)
- print(" " + "=" * title.length)
- print(" count: " + instances + " items")
- print(" total time: " + totalTime + " ms")
- print(" average time: " + (totalTime/instances) + " ms")
- print(" maximum time: " + maxTime + " ms")
- print("")
- }
- }
- }
-
- private[this] val filterTrack = new TimeTracker("diagrams model filtering")
- private[this] val modelTrack = new TimeTracker("diagrams model generation")
- private[this] val dotGenTrack = new TimeTracker("dot diagram generation")
- private[this] val dotRunTrack = new TimeTracker("dot process runnning")
- private[this] val svgTrack = new TimeTracker("svg processing")
- private[this] var brokenImages = 0
- private[this] var fixedImages = 0
-
- def printStats(settings: Settings) = {
- if (settings.docDiagramsDebug.value) {
- settings.printMsg("\nDiagram generation running time breakdown:\n")
- filterTrack.printStats(settings.printMsg)
- modelTrack.printStats(settings.printMsg)
- dotGenTrack.printStats(settings.printMsg)
- dotRunTrack.printStats(settings.printMsg)
- svgTrack.printStats(settings.printMsg)
- println(" Broken images: " + brokenImages)
- println(" Fixed images: " + fixedImages)
- println("")
- }
- }
-
- def addFilterTime(ms: Long) = filterTrack.addTime(ms)
- def addModelTime(ms: Long) = modelTrack.addTime(ms)
- def addDotGenerationTime(ms: Long) = dotGenTrack.addTime(ms)
- def addDotRunningTime(ms: Long) = dotRunTrack.addTime(ms)
- def addSvgTime(ms: Long) = svgTrack.addTime(ms)
-
- def addBrokenImage(): Unit = brokenImages += 1
- def addFixedImage(): Unit = fixedImages += 1
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
deleted file mode 100644
index 847367838c..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
+++ /dev/null
@@ -1,511 +0,0 @@
-/**
- * @author Damien Obrist
- * @author Vlad Ureche
- */
-package scala.tools.nsc
-package doc
-package html
-package page
-package diagram
-
-import scala.xml.{NodeSeq, XML, PrefixedAttribute, Elem, MetaData, Null, UnprefixedAttribute}
-import scala.collection.immutable._
-import javax.xml.parsers.SAXParser
-import model._
-import model.diagram._
-
-class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
-
- // the page where the diagram will be embedded
- private var page: HtmlPage = null
- // path to the "lib" folder relative to the page
- private var pathToLib: String = null
- // maps nodes to unique indices
- private var node2Index: Map[Node, Int] = null
- // maps an index to its corresponding node
- private var index2Node: Map[Int, Node] = null
- // true if the current diagram is a class diagram
- private var isInheritanceDiagram = false
- // incoming implicit nodes (needed for determining the CSS class of a node)
- private var incomingImplicitNodes: List[Node] = List()
- // the suffix used when there are two many classes to show
- private final val MultiSuffix = " classes/traits"
- // used to generate unique node and edge ids (i.e. avoid conflicts with multiple diagrams)
- private var counter = 0
-
- def generate(diagram: Diagram, template: DocTemplateEntity, page: HtmlPage):NodeSeq = {
- counter = counter + 1;
- this.page = page
- pathToLib = "../" * (page.templateToPath(template).size - 1) + "lib/"
- val dot = generateDot(diagram)
- val result = generateSVG(dot, template)
- // clean things up a bit, so we don't leave garbage on the heap
- this.page = null
- node2Index = null
- index2Node = null
- incomingImplicitNodes = List()
- result
- }
-
- /**
- * Generates a dot string for a given diagram.
- */
- private def generateDot(d: Diagram) = {
- // inheritance nodes (all nodes except thisNode and implicit nodes)
- var nodes: List[Node] = null
- // inheritance edges (all edges except implicit edges)
- var edges: List[(Node, List[Node])] = null
-
- // timing
- var tDot = -System.currentTimeMillis
-
- // variables specific to class diagrams:
- // current node of a class diagram
- var thisNode:Node = null
- var subClasses = List[Node]()
- var superClasses = List[Node]()
- var incomingImplicits = List[Node]()
- var outgoingImplicits = List[Node]()
- isInheritanceDiagram = false
-
- d match {
- case InheritanceDiagram(_thisNode, _superClasses, _subClasses, _incomingImplicits, _outgoingImplicits) =>
-
- def textTypeEntity(text: String) =
- new TypeEntity {
- val name = text
- def refEntity: SortedMap[Int, (base.LinkTo, Int)] = SortedMap()
- }
-
- // it seems dot chokes on node names over 8000 chars, so let's limit the size of the string
- // conservatively, we'll limit at 4000, to be sure:
- def limitSize(str: String) = if (str.length > 4000) str.substring(0, 3996) + " ..." else str
-
- // avoid overcrowding the diagram:
- // if there are too many super / sub / implicit nodes, represent
- // them by on node with a corresponding tooltip
- superClasses = if (_superClasses.length > settings.docDiagramsMaxNormalClasses.value) {
- val superClassesTooltip = Some(limitSize(_superClasses.map(_.tpe.name).mkString(", ")))
- List(NormalNode(textTypeEntity(_superClasses.length + MultiSuffix), None)(superClassesTooltip))
- } else _superClasses
-
- subClasses = if (_subClasses.length > settings.docDiagramsMaxNormalClasses.value) {
- val subClassesTooltip = Some(limitSize(_subClasses.map(_.tpe.name).mkString(", ")))
- List(NormalNode(textTypeEntity(_subClasses.length + MultiSuffix), None)(subClassesTooltip))
- } else _subClasses
-
- incomingImplicits = if (_incomingImplicits.length > settings.docDiagramsMaxImplicitClasses.value) {
- val incomingImplicitsTooltip = Some(limitSize(_incomingImplicits.map(_.tpe.name).mkString(", ")))
- List(ImplicitNode(textTypeEntity(_incomingImplicits.length + MultiSuffix), None)(incomingImplicitsTooltip))
- } else _incomingImplicits
-
- outgoingImplicits = if (_outgoingImplicits.length > settings.docDiagramsMaxImplicitClasses.value) {
- val outgoingImplicitsTooltip = Some(limitSize(_outgoingImplicits.map(_.tpe.name).mkString(", ")))
- List(ImplicitNode(textTypeEntity(_outgoingImplicits.length + MultiSuffix), None)(outgoingImplicitsTooltip))
- } else _outgoingImplicits
-
- thisNode = _thisNode
- nodes = List()
- edges = (thisNode -> superClasses) :: subClasses.map(_ -> List(thisNode))
- node2Index = (thisNode::subClasses:::superClasses:::incomingImplicits:::outgoingImplicits).zipWithIndex.toMap
- isInheritanceDiagram = true
- incomingImplicitNodes = incomingImplicits
- case _ =>
- nodes = d.nodes
- edges = d.edges
- node2Index = d.nodes.zipWithIndex.toMap
- incomingImplicitNodes = List()
- }
- index2Node = node2Index map {_.swap}
-
- val implicitsDot = {
- if (!isInheritanceDiagram) ""
- else {
- // dot cluster containing thisNode
- val thisCluster = "subgraph clusterThis {\n" +
- "style=\"invis\"\n" +
- node2Dot(thisNode) +
- "}"
- // dot cluster containing incoming implicit nodes, if any
- val incomingCluster = {
- if(incomingImplicits.isEmpty) ""
- else "subgraph clusterIncoming {\n" +
- "style=\"invis\"\n" +
- incomingImplicits.reverse.map(n => node2Dot(n)).mkString +
- (if (incomingImplicits.size > 1)
- incomingImplicits.map(n => "node" + node2Index(n)).mkString(" -> ") +
- " [constraint=\"false\", style=\"invis\", minlen=\"0.0\"];\n"
- else "") +
- "}"
- }
- // dot cluster containing outgoing implicit nodes, if any
- val outgoingCluster = {
- if(outgoingImplicits.isEmpty) ""
- else "subgraph clusterOutgoing {\n" +
- "style=\"invis\"\n" +
- outgoingImplicits.reverse.map(n => node2Dot(n)).mkString +
- (if (outgoingImplicits.size > 1)
- outgoingImplicits.map(n => "node" + node2Index(n)).mkString(" -> ") +
- " [constraint=\"false\", style=\"invis\", minlen=\"0.0\"];\n"
- else "") +
- "}"
- }
-
- // assemble clusters into another cluster
- val incomingTooltip = incomingImplicits.map(_.name).mkString(", ") + " can be implicitly converted to " + thisNode.name
- val outgoingTooltip = thisNode.name + " can be implicitly converted to " + outgoingImplicits.map(_.name).mkString(", ")
- "subgraph clusterAll {\n" +
- "style=\"invis\"\n" +
- outgoingCluster + "\n" +
- thisCluster + "\n" +
- incomingCluster + "\n" +
- // incoming implicit edge
- (if (!incomingImplicits.isEmpty) {
- val n = incomingImplicits.last
- "node" + node2Index(n) +" -> node" + node2Index(thisNode) +
- " [id=\"" + cssClass(n, thisNode) + "|" + node2Index(n) + "_" + node2Index(thisNode) + "\", tooltip=\"" + incomingTooltip + "\"" +
- ", constraint=\"false\", minlen=\"2\", ltail=\"clusterIncoming\", lhead=\"clusterThis\", label=\"implicitly\"];\n"
- } else "") +
- // outgoing implicit edge
- (if (!outgoingImplicits.isEmpty) {
- val n = outgoingImplicits.head
- "node" + node2Index(thisNode) + " -> node" + node2Index(n) +
- " [id=\"" + cssClass(thisNode, n) + "|" + node2Index(thisNode) + "_" + node2Index(n) + "\", tooltip=\"" + outgoingTooltip + "\"" +
- ", constraint=\"false\", minlen=\"2\", ltail=\"clusterThis\", lhead=\"clusterOutgoing\", label=\"implicitly\"];\n"
- } else "") +
- "}"
- }
- }
-
- // assemble graph
- val graph = "digraph G {\n" +
- // graph / node / edge attributes
- graphAttributesStr +
- "node [" + nodeAttributesStr + "];\n" +
- "edge [" + edgeAttributesStr + "];\n" +
- implicitsDot + "\n" +
- // inheritance nodes
- nodes.map(n => node2Dot(n)).mkString +
- subClasses.map(n => node2Dot(n)).mkString +
- superClasses.map(n => node2Dot(n)).mkString +
- // inheritance edges
- edges.map{ case (from, tos) => tos.map(to => {
- val id = "graph" + counter + "_" + node2Index(to) + "_" + node2Index(from)
- // the X -> Y edge is inverted twice to keep the diagram flowing the right way
- // that is, an edge from node X to Y will result in a dot instruction nodeY -> nodeX [dir="back"]
- "node" + node2Index(to) + " -> node" + node2Index(from) +
- " [id=\"" + cssClass(to, from) + "|" + id + "\", " +
- "tooltip=\"" + from.name + (if (from.name.endsWith(MultiSuffix)) " are subtypes of " else " is a subtype of ") +
- to.name + "\", dir=\"back\", arrowtail=\"empty\"];\n"
- }).mkString}.mkString +
- "}"
-
- tDot += System.currentTimeMillis
- DiagramStats.addDotGenerationTime(tDot)
-
- graph
- }
-
- /**
- * Generates the dot string of a given node.
- */
- private def node2Dot(node: Node) = {
-
- // escape HTML characters in node names
- def escape(name: String) = name.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;");
-
- // assemble node attribues in a map
- var attr = scala.collection.mutable.Map[String, String]()
-
- // link
- node.doctpl match {
- case Some(tpl) => attr += "URL" -> (page.relativeLinkTo(tpl) + "#inheritance-diagram")
- case _ =>
- }
-
- // tooltip
- node.tooltip match {
- case Some(text) => attr += "tooltip" -> text
- // show full name where available (instead of TraversableOps[A] show scala.collection.parallel.TraversableOps[A])
- case None if node.tpl.isDefined => attr += "tooltip" -> node.tpl.get.qualifiedName
- case _ =>
- }
-
- // styles
- if(node.isImplicitNode)
- attr ++= implicitStyle
- else if(node.isOutsideNode)
- attr ++= outsideStyle
- else if(node.isTraitNode)
- attr ++= traitStyle
- else if(node.isClassNode)
- attr ++= classStyle
- else if(node.isObjectNode)
- attr ++= objectStyle
- else if(node.isTypeNode)
- attr ++= typeStyle
- else
- attr ++= defaultStyle
-
- // HTML label
- var name = escape(node.name)
- var img = ""
- if(node.isTraitNode)
- img = "trait_diagram.png"
- else if(node.isClassNode)
- img = "class_diagram.png"
- else if(node.isObjectNode)
- img = "object_diagram.png"
- else if(node.isTypeNode)
- img = "type_diagram.png"
-
- if(!img.equals("")) {
- img = "<TD><IMG SCALE=\"TRUE\" SRC=\"" + settings.outdir.value + "/lib/" + img + "\" /></TD>"
- name = name + " "
- }
- val label = "<<TABLE BORDER=\"0\" CELLBORDER=\"0\">" +
- "<TR>" + img + "<TD VALIGN=\"MIDDLE\">" + name + "</TD></TR>" +
- "</TABLE>>"
-
- // dot does not allow to specify a CSS class, therefore
- // set the id to "{class}|{id}", which will be used in
- // the transform method
- val id = "graph" + counter + "_" + node2Index(node)
- attr += ("id" -> (cssClass(node) + "|" + id))
-
- // return dot string
- "node" + node2Index(node) + " [label=" + label + "," + flatten(attr.toMap) + "];\n"
- }
-
- /**
- * Returns the CSS class for an edge connecting node1 and node2.
- */
- private def cssClass(node1: Node, node2: Node): String = {
- if (node1.isImplicitNode && node2.isThisNode)
- "implicit-incoming"
- else if (node1.isThisNode && node2.isImplicitNode)
- "implicit-outgoing"
- else
- "inheritance"
- }
-
- /**
- * Returns the CSS class for a node.
- */
- private def cssClass(node: Node): String =
- if (node.isImplicitNode && incomingImplicitNodes.contains(node))
- "implicit-incoming" + cssBaseClass(node, "", " ")
- else if (node.isImplicitNode)
- "implicit-outgoing" + cssBaseClass(node, "", " ")
- else if (node.isThisNode)
- "this" + cssBaseClass(node, "", " ")
- else if (node.isOutsideNode)
- "outside" + cssBaseClass(node, "", " ")
- else
- cssBaseClass(node, "default", "")
-
- private def cssBaseClass(node: Node, default: String, space: String) =
- if (node.isClassNode)
- space + "class"
- else if (node.isTraitNode)
- space + "trait"
- else if (node.isObjectNode)
- space + "object"
- else if (node.isTypeNode)
- space + "type"
- else
- default
-
- /**
- * Calls dot with a given dot string and returns the SVG output.
- */
- private def generateSVG(dotInput: String, template: DocTemplateEntity) = {
- val dotOutput = DiagramGenerator.getDotRunner.feedToDot(dotInput, template)
- var tSVG = -System.currentTimeMillis
-
- val result = if (dotOutput != null) {
- val src = scala.io.Source.fromString(dotOutput);
- try {
- val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false)
- val doc = cpa.document()
- if (doc != null)
- transform(doc.docElem)
- else
- NodeSeq.Empty
- } catch {
- case exc: Exception =>
- if (settings.docDiagramsDebug.value) {
- settings.printMsg("\n\n**********************************************************************")
- settings.printMsg("Encountered an error while generating page for " + template.qualifiedName)
- settings.printMsg(dotInput.toString.split("\n").mkString("\nDot input:\n\t","\n\t",""))
- settings.printMsg(dotOutput.toString.split("\n").mkString("\nDot output:\n\t","\n\t",""))
- settings.printMsg(exc.getStackTrace.mkString("\nException: " + exc.toString + ":\n\tat ", "\n\tat ",""))
- settings.printMsg("\n\n**********************************************************************")
- } else {
- settings.printMsg("\nThe diagram for " + template.qualifiedName + " could not be created due to an internal error.")
- settings.printMsg("Use " + settings.docDiagramsDebug.name + " for more information and please file this as a bug.")
- }
- NodeSeq.Empty
- }
- } else
- NodeSeq.Empty
-
- tSVG += System.currentTimeMillis
- DiagramStats.addSvgTime(tSVG)
-
- result
- }
-
- /**
- * Transforms the SVG generated by dot:
- * - adds a class attribute to the SVG element
- * - changes the path of the node images from absolute to relative
- * - assigns id and class attributes to nodes and edges
- * - removes title elements
- */
- private def transform(e:scala.xml.Node): scala.xml.Node = e match {
- // add an id and class attribute to the SVG element
- case Elem(prefix, "svg", attribs, scope, child @ _*) => {
- val klass = if (isInheritanceDiagram) "class-diagram" else "package-diagram"
- Elem(prefix, "svg", attribs, scope, child map(x => transform(x)) : _*) %
- new UnprefixedAttribute("id", "graph" + counter, Null) %
- new UnprefixedAttribute("class", klass, Null)
- }
- // change the path of the node images from absolute to relative
- case img @  => {
- val href = (img \ "@{http://www.w3.org/1999/xlink}href").toString
- val file = href.substring(href.lastIndexOf("/") + 1, href.size)
- img.asInstanceOf[Elem] %
- new PrefixedAttribute("xlink", "href", pathToLib + file, Null)
- }
- // assign id and class attributes to edges and nodes:
- // the id attribute generated by dot has the format: "{class}|{id}"
- case g @ Elem(prefix, "g", attribs, scope, children @ _*) if (List("edge", "node").contains((g \ "@class").toString)) => {
- var res = new Elem(prefix, "g", attribs, scope, (children map(x => transform(x))): _*)
- val dotId = (g \ "@id").toString
- if (dotId.count(_ == '|') == 1) {
- val Array(klass, id) = dotId.toString.split("\\|")
- /* Sometimes dot "forgets" to add the image -- that's very annoying, but it seems pretty random, and simple
- * tests like excute 20K times and diff the output don't trigger the bug -- so it's up to us to place the image
- * back in the node */
- val kind = getKind(klass)
- if (kind != "")
- if (((g \ "a" \ "image").isEmpty)) {
- DiagramStats.addBrokenImage()
- val xposition = getPosition(g, "x", -22)
- val yposition = getPosition(g, "y", -11.3334)
- if (xposition.isDefined && yposition.isDefined) {
- val imageNode = <image xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href={ ("./lib/" + kind + "_diagram.png") } width="16px" height="16px" preserveAspectRatio="xMinYMin meet" x={ xposition.get.toString } y={ yposition.get.toString }/>
- val anchorNode = (g \ "a") match {
- case Seq(Elem(prefix, "a", attribs, scope, children @ _*)) =>
- transform(new Elem(prefix, "a", attribs, scope, (children ++ imageNode): _*))
- case _ =>
- g \ "a"
- }
- res = new Elem(prefix, "g", attribs, scope, anchorNode: _*)
- DiagramStats.addFixedImage()
- }
- }
- res % new UnprefixedAttribute("id", id, Null) %
- new UnprefixedAttribute("class", (g \ "@class").toString + " " + klass, Null)
- }
- else res
- }
- // remove titles
- case <title>{ _* }</title> =>
- scala.xml.Text("")
- // apply recursively
- case Elem(prefix, label, attribs, scope, child @ _*) =>
- Elem(prefix, label, attribs, scope, child map(x => transform(x)) : _*)
- case x => x
- }
-
- def getKind(klass: String): String =
- if (klass.contains("class")) "class"
- else if (klass.contains("trait")) "trait"
- else if (klass.contains("object")) "object"
- else ""
-
- def getPosition(g: scala.xml.Node, axis: String, offset: Double): Option[Double] = {
- val node = g \ "a" \ "text" \ ("@" + axis)
- if (node.isEmpty)
- None
- else
- Some(node.toString.toDouble + offset)
- }
-
- /* graph / node / edge attributes */
-
- private val graphAttributes: Map[String, String] = Map(
- "compound" -> "true",
- "rankdir" -> "TB"
- )
-
- private val nodeAttributes = Map(
- "shape" -> "rectangle",
- "style" -> "filled",
- "penwidth" -> "1",
- "margin" -> "0.08,0.01",
- "width" -> "0.0",
- "height" -> "0.0",
- "fontname" -> "Arial",
- "fontsize" -> "10.00"
- )
-
- private val edgeAttributes = Map(
- "color" -> "#d4d4d4",
- "arrowsize" -> "0.5",
- "fontcolor" -> "#aaaaaa",
- "fontsize" -> "10.00",
- "fontname" -> "Arial"
- )
-
- private val defaultStyle = Map(
- "color" -> "#ababab",
- "fillcolor" -> "#e1e1e1",
- "fontcolor" -> "#7d7d7d",
- "margin" -> "0.1,0.04"
- )
-
- private val implicitStyle = Map(
- "color" -> "#ababab",
- "fillcolor" -> "#e1e1e1",
- "fontcolor" -> "#7d7d7d"
- )
-
- private val outsideStyle = Map(
- "color" -> "#ababab",
- "fillcolor" -> "#e1e1e1",
- "fontcolor" -> "#7d7d7d"
- )
-
- private val traitStyle = Map(
- "color" -> "#37657D",
- "fillcolor" -> "#498AAD",
- "fontcolor" -> "#ffffff"
- )
-
- private val classStyle = Map(
- "color" -> "#115F3B",
- "fillcolor" -> "#0A955B",
- "fontcolor" -> "#ffffff"
- )
-
- private val objectStyle = Map(
- "color" -> "#102966",
- "fillcolor" -> "#3556a7",
- "fontcolor" -> "#ffffff"
- )
-
- private val typeStyle = Map(
- "color" -> "#115F3B",
- "fillcolor" -> "#0A955B",
- "fontcolor" -> "#ffffff"
- )
-
- private def flatten(attributes: Map[String, String]) = attributes.map{ case (key, value) => key + "=\"" + value + "\"" }.mkString(", ")
-
- private val graphAttributesStr = graphAttributes.map{ case (key, value) => key + "=\"" + value + "\";\n" }.mkString
- private val nodeAttributesStr = flatten(nodeAttributes)
- private val edgeAttributesStr = flatten(edgeAttributes)
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
deleted file mode 100644
index 5cdd5c74a4..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
+++ /dev/null
@@ -1,228 +0,0 @@
-package scala.tools.nsc
-package doc
-package html
-package page
-package diagram
-
-import java.io.InputStream
-import java.io.OutputStream
-import java.io.InputStreamReader
-import java.io.OutputStreamWriter
-import java.io.BufferedWriter
-import java.io.BufferedReader
-import java.io.IOException
-import scala.sys.process._
-import scala.concurrent.SyncVar
-
-import model._
-import model.diagram._
-
-/** This class takes care of running the graphviz dot utility */
-class DotRunner(settings: doc.Settings) {
-
- private[this] var dotRestarts = 0
- private[this] var dotProcess: DotProcess = null
-
- def feedToDot(dotInput: String, template: DocTemplateEntity): String = {
-
- if (dotProcess == null) {
- if (dotRestarts < settings.docDiagramsDotRestart.value) {
- if (dotRestarts != 0)
- settings.printMsg("Graphviz will be restarted...\n")
- dotRestarts += 1
- dotProcess = new DotProcess(settings)
- } else
- return null
- }
-
- val tStart = System.currentTimeMillis
- val result = dotProcess.feedToDot(dotInput, template.qualifiedName)
- val tFinish = System.currentTimeMillis
- DiagramStats.addDotRunningTime(tFinish - tStart)
-
- if (result == null) {
- dotProcess.cleanup()
- dotProcess = null
- if (dotRestarts == settings.docDiagramsDotRestart.value) {
- settings.printMsg("\n")
- settings.printMsg("**********************************************************************")
- settings.printMsg("Diagrams will be disabled for this run because the graphviz dot tool")
- settings.printMsg("has malfunctioned too many times. These scaladoc flags may help:")
- settings.printMsg("")
- val baseList = List(settings.docDiagramsDebug,
- settings.docDiagramsDotPath,
- settings.docDiagramsDotRestart,
- settings.docDiagramsDotTimeout)
- val width = (baseList map (_.helpSyntax.length)).max
- def helpStr(s: doc.Settings#Setting) = ("%-" + width + "s") format (s.helpSyntax) + " " + s.helpDescription
- baseList.foreach((sett: doc.Settings#Setting) => settings.printMsg(helpStr(sett)))
- settings.printMsg("\nPlease note that graphviz package version 2.26 or above is required.")
- settings.printMsg("**********************************************************************\n\n")
-
- }
- }
-
- result
- }
-
- def cleanup() =
- if (dotProcess != null)
- dotProcess.cleanup()
-}
-
-class DotProcess(settings: doc.Settings) {
-
- @volatile var error: Boolean = false // signal an error
- val inputString = new SyncVar[String] // used for the dot process input
- val outputString = new SyncVar[String] // used for the dot process output
- val errorBuffer: StringBuffer = new StringBuffer() // buffer used for both dot process error console AND logging
-
- // set in only one place, in the main thread
- var process: Process = null
- var templateName: String = ""
- var templateInput: String = ""
-
- def feedToDot(input: String, template: String): String = {
-
- templateName = template
- templateInput = input
-
- try {
-
- // process creation
- if (process == null) {
- val procIO = new ProcessIO(inputFn(_), outputFn(_), errorFn(_))
- val processBuilder: ProcessBuilder = Seq(settings.docDiagramsDotPath.value, "-Tsvg")
- process = processBuilder.run(procIO)
- }
-
- // pass the input and wait for the output
- assert(!inputString.isSet)
- assert(!outputString.isSet)
- inputString.put(input)
- var result = outputString.take(settings.docDiagramsDotTimeout.value * 1000)
- if (error) result = null
-
- result
-
- } catch {
- case exc: Throwable =>
- errorBuffer.append(" Main thread in " + templateName + ": " +
- (if (exc.isInstanceOf[NoSuchElementException]) "Timeout" else "Exception: " + exc))
- error = true
- return null
- }
- }
-
- def cleanup(): Unit = {
-
- // we'll need to know if there was any error for reporting
- val _error = error
-
- if (process != null) {
- // if there's no error, this should exit cleanly
- if (!error) feedToDot("<finish>", "<finishing>")
-
- // just in case there's any thread hanging, this will take it out of the loop
- error = true
- process.destroy()
- // we'll need to unblock the input again
- if (!inputString.isSet) inputString.put("")
- if (outputString.isSet) outputString.take()
- }
-
- if (_error) {
- if (settings.docDiagramsDebug.value) {
- settings.printMsg("\n**********************************************************************")
- settings.printMsg("The graphviz dot diagram tool has malfunctioned and will be restarted.")
- settings.printMsg("\nThe following is the log of the failure:")
- settings.printMsg(errorBuffer.toString)
- settings.printMsg(" Cleanup: Last template: " + templateName)
- settings.printMsg(" Cleanup: Last dot input: \n " + templateInput.replaceAll("\n","\n ") + "\n")
- settings.printMsg(" Cleanup: Dot path: " + settings.docDiagramsDotPath.value)
- if (process != null)
- settings.printMsg(" Cleanup: Dot exit code: " + process.exitValue)
- settings.printMsg("**********************************************************************")
- } else {
- // we shouldn't just sit there for 50s not reporting anything, no?
- settings.printMsg("Graphviz dot encountered an error when generating the diagram for:")
- settings.printMsg(templateName)
- settings.printMsg("These are usually spurious errors, but if you notice a persistant error on")
- settings.printMsg("a diagram, please use the " + settings.docDiagramsDebug.name + " flag and report a bug with the output.")
- }
- }
- }
-
- /* The standard input passing function */
- private[this] def inputFn(stdin: OutputStream): Unit = {
- val writer = new BufferedWriter(new OutputStreamWriter(stdin))
- try {
- var input = inputString.take()
-
- while (!error) {
- if (input == "<finish>") {
- // empty => signal to finish
- stdin.close()
- return
- } else {
- // send output to dot
- writer.write(input + "\n\n")
- writer.flush()
- }
-
- if (!error) input = inputString.take()
- }
- stdin.close()
- } catch {
- case exc: Throwable =>
- error = true
- stdin.close()
- errorBuffer.append(" Input thread in " + templateName + ": Exception: " + exc + "\n")
- }
- }
-
- private[this] def outputFn(stdOut: InputStream): Unit = {
- val reader = new BufferedReader(new InputStreamReader(stdOut))
- var buffer: StringBuilder = new StringBuilder()
- try {
- var line = reader.readLine
- while (!error && line != null) {
- buffer.append(line + "\n")
- // signal the last element in the svg (only for output)
- if (line == "</svg>") {
- outputString.put(buffer.toString)
- buffer.setLength(0)
- }
- if (error) { stdOut.close(); return }
- line = reader.readLine
- }
- assert(!outputString.isSet)
- outputString.put(buffer.toString)
- stdOut.close()
- } catch {
- case exc: Throwable =>
- error = true
- stdOut.close()
- errorBuffer.append(" Output thread in " + templateName + ": Exception: " + exc + "\n")
- }
- }
-
- private[this] def errorFn(stdErr: InputStream): Unit = {
- val reader = new BufferedReader(new InputStreamReader(stdErr))
- var buffer: StringBuilder = new StringBuilder()
- try {
- var line = reader.readLine
- while (line != null) {
- errorBuffer.append(" DOT <error console>: " + line + "\n")
- error = true
- line = reader.readLine
- }
- stdErr.close()
- } catch {
- case exc: Throwable =>
- error = true
- stdErr.close()
- errorBuffer.append(" Error thread in " + templateName + ": Exception: " + exc + "\n")
- }
- }
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
deleted file mode 100644
index 7229603ae5..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
deleted file mode 100644
index b2f2935dc9..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png
deleted file mode 100644
index 97edbd49db..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png
deleted file mode 100644
index cb1f638a58..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
deleted file mode 100644
index 9d7aec792b..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
deleted file mode 100644
index 5dd6e38d2e..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
deleted file mode 100644
index 2e3f5ea530..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
deleted file mode 100644
index 4be145d0af..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
deleted file mode 100644
index 69038337a7..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
deleted file mode 100644
index 36c43be3a2..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css
deleted file mode 100644
index 5fe33f72f5..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css
+++ /dev/null
@@ -1,143 +0,0 @@
-.diagram-container
-{
- display: none;
-}
-
-.diagram
-{
- overflow: hidden;
- padding-top:15px;
-}
-
-.diagram svg
-{
- display: block;
- position: absolute;
- visibility: hidden;
- margin: auto;
-}
-
-.diagram-help
-{
- float:right;
- display:none;
-}
-
-.magnifying
-{
- cursor: -webkit-zoom-in ! important;
- cursor: -moz-zoom-in ! important;
- cursor: pointer;
-}
-
-#close-link
-{
- position: absolute;
- z-index: 100;
- font-family: Arial, sans-serif;
- font-size: 10pt;
- text-decoration: underline;
- color: #315479;
-}
-
-#close:hover
-{
- text-decoration: none;
-}
-
-svg a
-{
- cursor:pointer;
-}
-
-svg text
-{
- font-size: 10px;
-}
-
-/* try to move the node text 1px in order to be vertically
- centered (does not work in all browsers) */
-svg .node text
-{
- transform: translate(0px,1px);
- -ms-transform: translate(0px,1px);
- -webkit-transform: translate(0px,1px);
- -o-transform: translate(0px,1px);
- -moz-transform: translate(0px,1px);
-}
-
-/* hover effect for edges */
-
-svg .edge.over text,
-svg .edge.implicit-incoming.over polygon,
-svg .edge.implicit-outgoing.over polygon
-{
- fill: #202020;
-}
-
-svg .edge.over path,
-svg .edge.over polygon
-{
- stroke: #202020;
-}
-
-/* hover effect for nodes in class diagrams */
-
-svg.class-diagram .node
-{
- opacity: 0.75;
-}
-
-svg.class-diagram .node.this
-{
- opacity: 1.0;
-}
-
-svg.class-diagram .node.over
-{
- opacity: 1.0;
-}
-
-svg .node.over polygon
-{
- stroke: #202020;
-}
-
-/* hover effect for nodes in package diagrams */
-
-svg.package-diagram .node.class.over polygon,
-svg.class-diagram .node.this.class.over polygon
-{
- fill: #098552;
- fill: #04663e;
-}
-
-svg.package-diagram .node.trait.over polygon,
-svg.class-diagram .node.this.trait.over polygon
-{
- fill: #3c7b9b;
- fill: #235d7b;
-}
-
-svg.package-diagram .node.type.over polygon,
-svg.class-diagram .node.this.type.over polygon
-{
- fill: #098552;
- fill: #04663e;
-}
-
-
-svg.package-diagram .node.object.over polygon
-{
- fill: #183377;
-}
-
-svg.package-diagram .node.outside.over polygon
-{
- fill: #d4d4d4;
-}
-
-svg.package-diagram .node.default.over polygon
-{
- fill: #d4d4d4;
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js
deleted file mode 100644
index 478f2e38ac..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js
+++ /dev/null
@@ -1,324 +0,0 @@
-/**
- * JavaScript functions enhancing the SVG diagrams.
- *
- * @author Damien Obrist
- */
-
-var diagrams = {};
-
-/**
- * Initializes the diagrams in the main window.
- */
-$(document).ready(function()
-{
- // hide diagrams in browsers not supporting SVG
- if(Modernizr && !Modernizr.inlinesvg)
- return;
-
- // only execute this in the main window
- if(diagrams.isPopup)
- return;
-
- if($("#content-diagram").length)
- $("#inheritance-diagram").css("padding-bottom", "20px");
-
- $(".diagram-container").css("display", "block");
-
- $(".diagram").each(function() {
- // store inital dimensions
- $(this).data("width", $("svg", $(this)).width());
- $(this).data("height", $("svg", $(this)).height());
- // store unscaled clone of SVG element
- $(this).data("svg", $(this).get(0).childNodes[0].cloneNode(true));
- });
-
- // make diagram visible, hide container
- $(".diagram").css("display", "none");
- $(".diagram svg").css({
- "position": "static",
- "visibility": "visible",
- "z-index": "auto"
- });
-
- // enable linking to diagrams
- if($(location).attr("hash") == "#inheritance-diagram") {
- diagrams.toggle($("#inheritance-diagram-container"), true);
- } else if($(location).attr("hash") == "#content-diagram") {
- diagrams.toggle($("#content-diagram-container"), true);
- }
-
- $(".diagram-link").click(function() {
- diagrams.toggle($(this).parent());
- });
-
- // register resize function
- $(window).resize(diagrams.resize);
-
- // don't bubble event to parent div
- // when clicking on a node of a resized
- // diagram
- $("svg a").click(function(e) {
- e.stopPropagation();
- });
-
- diagrams.initHighlighting();
-});
-
-/**
- * Initializes the diagrams in the popup.
- */
-diagrams.initPopup = function(id)
-{
- // copy diagram from main window
- if(!jQuery.browser.msie)
- $("body").append(opener.$("#" + id).data("svg"));
-
- // positioning
- $("svg").css("position", "absolute");
- $(window).resize(function()
- {
- var svg_w = $("svg").css("width").replace("px", "");
- var svg_h = $("svg").css("height").replace("px", "");
- var x = $(window).width() / 2 - svg_w / 2;
- if(x < 0) x = 0;
- var y = $(window).height() / 2 - svg_h / 2;
- if(y < 0) y = 0;
- $("svg").css("left", x + "px");
- $("svg").css("top", y + "px");
- });
- $(window).resize();
-
- diagrams.initHighlighting();
- $("svg a").click(function(e) {
- opener.diagrams.redirectFromPopup(this.href.baseVal);
- window.close();
- });
- $(document).keyup(function(e) {
- if (e.keyCode == 27) window.close();
- });
-}
-
-/**
- * Initializes highlighting for nodes and edges.
- */
-diagrams.initHighlighting = function()
-{
- // helper function since $.hover doesn't work in IE
-
- function hover(elements, fn)
- {
- elements.mouseover(fn);
- elements.mouseout(fn);
- }
-
- // inheritance edges
-
- hover($("svg .edge.inheritance"), function(evt){
- var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
- var parts = $(this).attr("id").split("_");
- toggleClass($("#" + parts[0] + "_" + parts[1]));
- toggleClass($("#" + parts[0] + "_" + parts[2]));
- toggleClass($(this));
- });
-
- // nodes
-
- hover($("svg .node"), function(evt){
- var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
- toggleClass($(this));
- var parts = $(this).attr("id").split("_");
- var index = parts[1];
- $("svg#" + parts[0] + " .edge.inheritance").each(function(){
- var parts2 = $(this).attr("id").split("_");
- if(parts2[1] == index)
- {
- toggleClass($("#" + parts2[0] + "_" + parts2[2]));
- toggleClass($(this));
- } else if(parts2[2] == index)
- {
- toggleClass($("#" + parts2[0] + "_" + parts2[1]));
- toggleClass($(this));
- }
- });
- });
-
- // incoming implicits
-
- hover($("svg .node.implicit-incoming"), function(evt){
- var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
- toggleClass($(this));
- toggleClass($("svg .edge.implicit-incoming"));
- toggleClass($("svg .node.this"));
- });
-
- hover($("svg .edge.implicit-incoming"), function(evt){
- var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
- toggleClass($(this));
- toggleClass($("svg .node.this"));
- $("svg .node.implicit-incoming").each(function(){
- toggleClass($(this));
- });
- });
-
- // implicit outgoing nodes
-
- hover($("svg .node.implicit-outgoing"), function(evt){
- var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
- toggleClass($(this));
- toggleClass($("svg .edge.implicit-outgoing"));
- toggleClass($("svg .node.this"));
- });
-
- hover($("svg .edge.implicit-outgoing"), function(evt){
- var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
- toggleClass($(this));
- toggleClass($("svg .node.this"));
- $("svg .node.implicit-outgoing").each(function(){
- toggleClass($(this));
- });
- });
-};
-
-/**
- * Resizes the diagrams according to the available width.
- */
-diagrams.resize = function()
-{
- // available width
- var availableWidth = $("body").width() - 20;
-
- $(".diagram-container").each(function() {
- // unregister click event on whole div
- $(".diagram", this).unbind("click");
- var diagramWidth = $(".diagram", this).data("width");
- var diagramHeight = $(".diagram", this).data("height");
-
- if(diagramWidth > availableWidth)
- {
- // resize diagram
- var height = diagramHeight / diagramWidth * availableWidth;
- $(".diagram svg", this).width(availableWidth);
- $(".diagram svg", this).height(height);
-
- // register click event on whole div
- $(".diagram", this).click(function() {
- diagrams.popup($(this));
- });
- $(".diagram", this).addClass("magnifying");
- }
- else
- {
- // restore full size of diagram
- $(".diagram svg", this).width(diagramWidth);
- $(".diagram svg", this).height(diagramHeight);
- // don't show custom cursor any more
- $(".diagram", this).removeClass("magnifying");
- }
- });
-};
-
-/**
- * Shows or hides a diagram depending on its current state.
- */
-diagrams.toggle = function(container, dontAnimate)
-{
- // change class of link
- $(".diagram-link", container).toggleClass("open");
- // get element to show / hide
- var div = $(".diagram", container);
- if (div.is(':visible'))
- {
- $(".diagram-help", container).hide();
- div.unbind("click");
- div.removeClass("magnifying");
- div.slideUp(100);
- }
- else
- {
- diagrams.resize();
- if(dontAnimate)
- div.show();
- else
- div.slideDown(100);
- $(".diagram-help", container).show();
- }
-};
-
-/**
- * Opens a popup containing a copy of a diagram.
- */
-diagrams.windows = {};
-diagrams.popup = function(diagram)
-{
- var id = diagram.attr("id");
- if(!diagrams.windows[id] || diagrams.windows[id].closed) {
- var title = $(".symbol .name", $("#signature")).text();
- // cloning from parent window to popup somehow doesn't work in IE
- // therefore include the SVG as a string into the HTML
- var svgIE = jQuery.browser.msie ? $("<div />").append(diagram.data("svg")).html() : "";
- var html = '' +
- '<?xml version="1.0" encoding="UTF-8"?>\n' +
- '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' +
- '<html>\n' +
- ' <head>\n' +
- ' <title>' + title + '</title>\n' +
- ' <link href="' + $("#diagrams-css").attr("href") + '" media="screen" type="text/css" rel="stylesheet" />\n' +
- ' <script type="text/javascript" src="' + $("#jquery-js").attr("src") + '"></script>\n' +
- ' <script type="text/javascript" src="' + $("#diagrams-js").attr("src") + '"></script>\n' +
- ' <script type="text/javascript">\n' +
- ' diagrams.isPopup = true;\n' +
- ' </script>\n' +
- ' </head>\n' +
- ' <body onload="diagrams.initPopup(\'' + id + '\');">\n' +
- ' <a href="#" onclick="window.close();" id="close-link">Close this window</a>\n' +
- ' ' + svgIE + '\n' +
- ' </body>\n' +
- '</html>';
-
- var padding = 30;
- var screenHeight = screen.availHeight;
- var screenWidth = screen.availWidth;
- var w = Math.min(screenWidth, diagram.data("width") + 2 * padding);
- var h = Math.min(screenHeight, diagram.data("height") + 2 * padding);
- var left = (screenWidth - w) / 2;
- var top = (screenHeight - h) / 2;
- var parameters = "height=" + h + ", width=" + w + ", left=" + left + ", top=" + top + ", scrollbars=yes, location=no, resizable=yes";
- var win = window.open("about:blank", "_blank", parameters);
- win.document.open();
- win.document.write(html);
- win.document.close();
- diagrams.windows[id] = win;
- }
- win.focus();
-};
-
-/**
- * This method is called from within the popup when a node is clicked.
- */
-diagrams.redirectFromPopup = function(url)
-{
- window.location = url;
-};
-
-/**
- * Helper method that adds a class to a SVG element.
- */
-diagrams.addClass = function(svgElem, newClass) {
- newClass = newClass || "over";
- var classes = svgElem.attr("class");
- if ($.inArray(newClass, classes.split(/\s+/)) == -1) {
- classes += (classes ? ' ' : '') + newClass;
- svgElem.attr("class", classes);
- }
-};
-
-/**
- * Helper method that removes a class from a SVG element.
- */
-diagrams.removeClass = function(svgElem, oldClass) {
- oldClass = oldClass || "over";
- var classes = svgElem.attr("class");
- classes = $.grep(classes.split(/\s+/), function(n, i) { return n != oldClass; }).join(' ');
- svgElem.attr("class", classes);
-};
-
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
deleted file mode 100644
index 0e8c893315..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
deleted file mode 100644
index 4d740f3b17..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
deleted file mode 100644
index b9b49076a6..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
deleted file mode 100644
index f127e35b48..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
deleted file mode 100644
index 63a1ae8349..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
deleted file mode 100644
index 542ba4aa5a..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
deleted file mode 100644
index b5075c16cd..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
deleted file mode 100644
index d613cf5633..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
deleted file mode 100644
index ae2f85823b..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
deleted file mode 100644
index a0d93f4844..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
deleted file mode 100644
index 55fb370a41..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
+++ /dev/null
@@ -1,338 +0,0 @@
-* {
- color: inherit;
- font-size: 10pt;
- text-decoration: none;
- font-family: Arial, sans-serif;
- border-width: 0px;
- padding: 0px;
- margin: 0px;
-}
-
-a {
- cursor: pointer;
-}
-
-a:hover {
- text-decoration: underline;
-}
-
-h1 {
- display: none;
-}
-
-.selected {
- -moz-box-shadow: inset 0px 5px 10px rgba(58, 88, 97, .36);
- -webkit-box-shadow: inset 0px 5px 10px rgba(58, 88, 97, .36);
- border-top: solid 1px rgba(119, 138, 153, 0.8);
- border-bottom: solid 1px rgba(151, 173, 191, 0.4);
- background-color: #ced2d9;
- margin: -1px 0px;
-}
-
-/*.letters {
- font-family: monospace;
- font-size: 2pt;
- padding: 5px;
- background-color: #DADADA;
- text-shadow: #ffffff 0 1px 0;
-}*/
-
-#library {
- display: none;
-}
-
-#browser {
- top: 0px;
- left: 0px;
- bottom: 0px;
- width: 100%;
- display: block;
- position: fixed;
-}
-
-#filter {
- position: absolute;
- display: block;
-/* padding: 5px;*/
- right: 0;
- left: 0;
- top: 0;
- background-image:url('filterbg.gif');
- background-repeat:repeat-x;
- background-color: #ededee; /* light gray */
- /*background-color: #DADADA;*/
- border:1px solid #bbbbbb;
- border-top:0;
- border-left:0;
- border-right:0;
-}
-
-#textfilter {
- position: relative;
- display: block;
- height: 20px;
- margin-top: 5px;
- margin-bottom: 5px;
-}
-
-#textfilter > .pre {
- display: block;
- position: absolute;
- top: 0;
- left: 0;
- height: 23px;
- width: 21px;
- background: url("filter_box_left.png");
-}
-
-#textfilter > .input {
- display: block;
- position: absolute;
- top: 0;
- right: 20px;
- left: 20px;
-}
-
-#textfilter > .input > input {
- height: 20px;
- padding: 1px;
- font-weight: bold;
- color: #000000;
- background: #ffffff url("filterboxbarbg.png") repeat-x bottom left;
- width: 100%;
-}
-
-#textfilter > .post {
- display: block;
- position: absolute;
- top: 0;
- right: 0;
- height: 23px;
- width: 21px;
- background: url("filter_box_right.png");
-}
-
-/*#textfilter {
- position: relative;
- display: block;
- height: 20px;
- margin-bottom: 5px;
-}
-
-#textfilter > .pre {
- display: block;
- position: absolute;
- top: 0;
- left: 0;
- height: 20px;
- width: 20px;
- background: url("filter_box_left.png");
-}
-
-#textfilter > .input {
- display: block;
- position: absolute;
- top: 0;
- right: 20px;
- left: 20px;
-}
-
-#textfilter > .input > input {
- height: 16px;
- padding: 2px;
- font-weight: bold;
- color: darkblue;
- background-color: white;
- width: 100%;
-}
-
-#textfilter > .post {
- display: block;
- position: absolute;
- top: 0;
- right: 0;
- height: 20px;
- width: 20px;
- background: url("filter_box_right.png");
-}*/
-
-#focusfilter {
- position: relative;
- text-align: center;
- display: block;
- padding: 5px;
- background-color: #fffebd; /* light yellow*/
- text-shadow: #ffffff 0 1px 0;
-}
-
-#focusfilter .focuscoll {
- font-weight: bold;
- text-shadow: #ffffff 0 1px 0;
-}
-
-#focusfilter img {
- bottom: -2px;
- position: relative;
-}
-
-#kindfilter {
- position: relative;
- display: block;
- padding: 5px;
-/* background-color: #999;*/
- text-align: center;
-}
-
-#kindfilter > a {
- color: black;
-/* text-decoration: underline;*/
- text-shadow: #ffffff 0 1px 0;
-
-}
-
-#kindfilter > a:hover {
- color: #4C4C4C;
- text-decoration: none;
- text-shadow: #ffffff 0 1px 0;
-}
-
-#letters {
- position: relative;
- text-align: center;
- padding-bottom: 5px;
- border:1px solid #bbbbbb;
- border-top:0;
- border-left:0;
- border-right:0;
-}
-
-#letters > a, #letters > span {
-/* font-family: monospace;*/
- color: #858484;
- font-weight: bold;
- font-size: 8pt;
- text-shadow: #ffffff 0 1px 0;
- padding-right: 2px;
-}
-
-#letters > span {
- color: #bbb;
-}
-
-#tpl {
- display: block;
- position: fixed;
- overflow: auto;
- right: 0;
- left: 0;
- bottom: 0;
- top: 5px;
- position: absolute;
- display: block;
-}
-
-#tpl .packhide {
- display: block;
- float: right;
- font-weight: normal;
- color: white;
-}
-
-#tpl .packfocus {
- display: block;
- float: right;
- font-weight: normal;
- color: white;
-}
-
-#tpl .packages > ol {
- background-color: #dadfe6;
- /*margin-bottom: 5px;*/
-}
-
-/*#tpl .packages > ol > li {
- margin-bottom: 1px;
-}*/
-
-#tpl .packages > li > a {
- padding: 0px 5px;
-}
-
-#tpl .packages > li > a.tplshow {
- display: block;
- color: white;
- font-weight: bold;
- display: block;
- text-shadow: #000000 0 1px 0;
-}
-
-#tpl ol > li.pack {
- padding: 3px 5px;
- background: url("packagesbg.gif");
- background-repeat:repeat-x;
- min-height: 14px;
- background-color: #6e808e;
-}
-
-#tpl ol > li {
- display: block;
-}
-
-#tpl .templates > li {
- padding-left: 5px;
- min-height: 18px;
-}
-
-#tpl ol > li .icon {
- padding-right: 5px;
- bottom: -2px;
- position: relative;
-}
-
-#tpl .templates div.placeholder {
- padding-right: 5px;
- width: 13px;
- display: inline-block;
-}
-
-#tpl .templates span.tplLink {
- padding-left: 5px;
-}
-
-#content {
- border-left-width: 1px;
- border-left-color: black;
- border-left-style: white;
- right: 0px;
- left: 0px;
- bottom: 0px;
- top: 0px;
- position: fixed;
- margin-left: 300px;
- display: block;
-}
-
-#content > iframe {
- display: block;
- height: 100%;
- width: 100%;
-}
-
-.ui-layout-pane {
- background: #FFF;
- overflow: auto;
-}
-
-.ui-layout-resizer {
- background-image:url('filterbg.gif');
- background-repeat:repeat-x;
- background-color: #ededee; /* light gray */
- border:1px solid #bbbbbb;
- border-top:0;
- border-bottom:0;
- border-left: 0;
-}
-
-.ui-layout-toggler {
- background: #AAA;
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
deleted file mode 100644
index 96689ae701..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
+++ /dev/null
@@ -1,536 +0,0 @@
-// Ā© 2009ā€“2010 EPFL/LAMP
-// code by Gilles Dubochet with contributions by Johannes Rudolph and "spiros"
-
-var topLevelTemplates = undefined;
-var topLevelPackages = undefined;
-
-var scheduler = undefined;
-
-var kindFilterState = undefined;
-var focusFilterState = undefined;
-
-var title = $(document).attr('title');
-
-var lastHash = "";
-
-$(document).ready(function() {
- $('body').layout({
- west__size: '20%',
- center__maskContents: true
- });
- $('#browser').layout({
- center__paneSelector: ".ui-west-center"
- //,center__initClosed:true
- ,north__paneSelector: ".ui-west-north"
- });
- $('iframe').bind("load", function(){
- var subtitle = $(this).contents().find('title').text();
- $(document).attr('title', (title ? title + " - " : "") + subtitle);
-
- setUrlFragmentFromFrameSrc();
- });
-
- // workaround for IE's iframe sizing lack of smartness
- if($.browser.msie) {
- function fixIFrame() {
- $('iframe').height($(window).height() )
- }
- $('iframe').bind("load",fixIFrame)
- $('iframe').bind("resize",fixIFrame)
- }
-
- scheduler = new Scheduler();
- scheduler.addLabel("init", 1);
- scheduler.addLabel("focus", 2);
- scheduler.addLabel("filter", 4);
-
- prepareEntityList();
-
- configureTextFilter();
- configureKindFilter();
- configureEntityList();
-
- setFrameSrcFromUrlFragment();
-
- // If the url fragment changes, adjust the src of iframe "template".
- $(window).bind('hashchange', function() {
- if(lastFragment != window.location.hash) {
- lastFragment = window.location.hash;
- setFrameSrcFromUrlFragment();
- }
- });
-});
-
-// Set the iframe's src according to the fragment of the current url.
-// fragment = "#scala.Either" => iframe url = "scala/Either.html"
-// fragment = "#scala.Either@isRight:Boolean" => iframe url = "scala/Either.html#isRight:Boolean"
-function setFrameSrcFromUrlFragment() {
- var fragment = location.hash.slice(1);
- if(fragment) {
- var loc = fragment.split("@")[0].replace(/\./g, "/");
- if(loc.indexOf(".html") < 0) loc += ".html";
- if(fragment.indexOf('@') > 0) loc += ("#" + fragment.split("@", 2)[1]);
- frames["template"].location.replace(loc);
- }
- else
- frames["template"].location.replace("package.html");
-}
-
-// Set the url fragment according to the src of the iframe "template".
-// iframe url = "scala/Either.html" => url fragment = "#scala.Either"
-// iframe url = "scala/Either.html#isRight:Boolean" => url fragment = "#scala.Either@isRight:Boolean"
-function setUrlFragmentFromFrameSrc() {
- try {
- var commonLength = location.pathname.lastIndexOf("/");
- var frameLocation = frames["template"].location;
- var relativePath = frameLocation.pathname.slice(commonLength + 1);
-
- if(!relativePath || frameLocation.pathname.indexOf("/") < 0)
- return;
-
- // Add #, remove ".html" and replace "/" with "."
- fragment = "#" + relativePath.replace(/\.html$/, "").replace(/\//g, ".");
-
- // Add the frame's hash after an @
- if(frameLocation.hash) fragment += ("@" + frameLocation.hash.slice(1));
-
- // Use replace to not add history items
- lastFragment = fragment;
- location.replace(fragment);
- }
- catch(e) {
- // Chrome doesn't allow reading the iframe's location when
- // used on the local file system.
- }
-}
-
-var Index = {};
-
-(function (ns) {
- function openLink(t, type) {
- var href;
- if (type == 'object') {
- href = t['object'];
- } else {
- href = t['class'] || t['trait'] || t['case class'] || t['type'];
- }
- return [
- '<a class="tplshow" target="template" href="',
- href,
- '"><img width="13" height="13" class="',
- type,
- ' icon" src="lib/',
- type,
- '.png" />'
- ].join('');
- }
-
- function createPackageHeader(pack) {
- return [
- '<li class="pack">',
- '<a class="packfocus">focus</a><a class="packhide">hide</a>',
- '<a class="tplshow" target="template" href="',
- pack.replace(/\./g, '/'),
- '/package.html">',
- pack,
- '</a></li>'
- ].join('');
- };
-
- function createListItem(template) {
- var inner = '';
-
-
- if (template.object) {
- inner += openLink(template, 'object');
- }
-
- if (template['class'] || template['trait'] || template['case class'] || template['type']) {
- inner += (inner == '') ?
- '<div class="placeholder" />' : '</a>';
- inner += openLink(template, template['trait'] ? 'trait' : template['type'] ? 'type' : 'class');
- } else {
- inner += '<div class="placeholder"/>';
- }
-
- return [
- '<li>',
- inner,
- '<span class="tplLink">',
- template.name.replace(/^.*\./, ''),
- '</span></a></li>'
- ].join('');
- }
-
-
- ns.createPackageTree = function (pack, matched, focused) {
- var html = $.map(matched, function (child, i) {
- return createListItem(child);
- }).join('');
-
- var header;
- if (focused && pack == focused) {
- header = '';
- } else {
- header = createPackageHeader(pack);
- }
-
- return [
- '<ol class="packages">',
- header,
- '<ol class="templates">',
- html,
- '</ol></ol>'
- ].join('');
- }
-
- ns.keys = function (obj) {
- var result = [];
- var key;
- for (key in obj) {
- result.push(key);
- }
- return result;
- }
-
- var hiddenPackages = {};
-
- function subPackages(pack) {
- return $.grep($('#tpl ol.packages'), function (element, index) {
- var pack = $('li.pack > .tplshow', element).text();
- return pack.indexOf(pack + '.') == 0;
- });
- }
-
- ns.hidePackage = function (ol) {
- var selected = $('li.pack > .tplshow', ol).text();
- hiddenPackages[selected] = true;
-
- $('ol.templates', ol).hide();
-
- $.each(subPackages(selected), function (index, element) {
- $(element).hide();
- });
- }
-
- ns.showPackage = function (ol, state) {
- var selected = $('li.pack > .tplshow', ol).text();
- hiddenPackages[selected] = false;
-
- $('ol.templates', ol).show();
-
- $.each(subPackages(selected), function (index, element) {
- $(element).show();
-
- // When the filter is in "packs" state,
- // we don't want to show the `.templates`
- var key = $('li.pack > .tplshow', element).text();
- if (hiddenPackages[key] || state == 'packs') {
- $('ol.templates', element).hide();
- }
- });
- }
-
-})(Index);
-
-function configureEntityList() {
- kindFilterSync();
- configureHideFilter();
- configureFocusFilter();
- textFilter();
-}
-
-/* Updates the list of entities (i.e. the content of the #tpl element) from the raw form generated by Scaladoc to a
- form suitable for display. In particular, it adds class and object etc. icons, and it configures links to open in
- the right frame. Furthermore, it sets the two reference top-level entities lists (topLevelTemplates and
- topLevelPackages) to serve as reference for resetting the list when needed.
- Be advised: this function should only be called once, on page load. */
-function prepareEntityList() {
- var classIcon = $("#library > img.class");
- var traitIcon = $("#library > img.trait");
- var typeIcon = $("#library > img.type");
- var objectIcon = $("#library > img.object");
- var packageIcon = $("#library > img.package");
-
- $('#tpl li.pack > a.tplshow').attr("target", "template");
- $('#tpl li.pack').each(function () {
- $("span.class", this).each(function() { $(this).replaceWith(classIcon.clone()); });
- $("span.trait", this).each(function() { $(this).replaceWith(traitIcon.clone()); });
- $("span.type", this).each(function() { $(this).replaceWith(typeIcon.clone()); });
- $("span.object", this).each(function() { $(this).replaceWith(objectIcon.clone()); });
- $("span.package", this).each(function() { $(this).replaceWith(packageIcon.clone()); });
- });
- $('#tpl li.pack')
- .prepend("<a class='packhide'>hide</a>")
- .prepend("<a class='packfocus'>focus</a>");
-}
-
-/* Handles all key presses while scrolling around with keyboard shortcuts in left panel */
-function keyboardScrolldownLeftPane() {
- scheduler.add("init", function() {
- $("#textfilter input").blur();
- var $items = $("#tpl li");
- $items.first().addClass('selected');
-
- $(window).bind("keydown", function(e) {
- var $old = $items.filter('.selected'),
- $new;
-
- switch ( e.keyCode ) {
-
- case 9: // tab
- $old.removeClass('selected');
- break;
-
- case 13: // enter
- $old.removeClass('selected');
- var $url = $old.children().filter('a:last').attr('href');
- $("#template").attr("src",$url);
- break;
-
- case 27: // escape
- $old.removeClass('selected');
- $(window).unbind(e);
- $("#textfilter input").focus();
-
- break;
-
- case 38: // up
- $new = $old.prev();
-
- if (!$new.length) {
- $new = $old.parent().prev();
- }
-
- if ($new.is('ol') && $new.children(':last').is('ol')) {
- $new = $new.children().children(':last');
- } else if ($new.is('ol')) {
- $new = $new.children(':last');
- }
-
- break;
-
- case 40: // down
- $new = $old.next();
- if (!$new.length) {
- $new = $old.parent().parent().next();
- }
- if ($new.is('ol')) {
- $new = $new.children(':first');
- }
- break;
- }
-
- if ($new.is('li')) {
- $old.removeClass('selected');
- $new.addClass('selected');
- } else if (e.keyCode == 38) {
- $(window).unbind(e);
- $("#textfilter input").focus();
- }
- });
- });
-}
-
-/* Configures the text filter */
-function configureTextFilter() {
- scheduler.add("init", function() {
- $("#textfilter").append("<span class='pre'/><span class='input'><input id='index-input' type='text' accesskey='/'/></span><span class='post'/>");
- var input = $("#textfilter input");
- resizeFilterBlock();
- input.bind('keyup', function(event) {
- if (event.keyCode == 27) { // escape
- input.attr("value", "");
- }
- if (event.keyCode == 40) { // down arrow
- $(window).unbind("keydown");
- keyboardScrolldownLeftPane();
- return false;
- }
- textFilter();
- });
- input.bind('keydown', function(event) {
- if (event.keyCode == 9) { // tab
- $("#template").contents().find("#mbrsel-input").focus();
- input.attr("value", "");
- return false;
- }
- textFilter();
- });
- input.focus(function(event) { input.select(); });
- });
- scheduler.add("init", function() {
- $("#textfilter > .post").click(function(){
- $("#textfilter input").attr("value", "");
- textFilter();
- });
- });
-}
-
-function compilePattern(query) {
- var escaped = query.replace(/([\.\*\+\?\|\(\)\[\]\\])/g, '\\$1');
-
- if (query.toLowerCase() != query) {
- // Regexp that matches CamelCase subbits: "BiSe" is
- // "[a-z]*Bi[a-z]*Se" and matches "BitSet", "ABitSet", ...
- return new RegExp(escaped.replace(/([A-Z])/g,"[a-z]*$1"));
- }
- else { // if query is all lower case make a normal case insensitive search
- return new RegExp(escaped, "i");
- }
-}
-
-// Filters all focused templates and packages. This function should be made less-blocking.
-// @param query The string of the query
-function textFilter() {
- scheduler.clear("filter");
-
- $('#tpl').html('');
-
- var query = $("#textfilter input").attr("value") || '';
- var queryRegExp = compilePattern(query);
-
- var index = 0;
-
- var searchLoop = function () {
- var packages = Index.keys(Index.PACKAGES).sort();
-
- while (packages[index]) {
- var pack = packages[index];
- var children = Index.PACKAGES[pack];
- index++;
-
- if (focusFilterState) {
- if (pack == focusFilterState ||
- pack.indexOf(focusFilterState + '.') == 0) {
- ;
- } else {
- continue;
- }
- }
-
- var matched = $.grep(children, function (child, i) {
- return queryRegExp.test(child.name);
- });
-
- if (matched.length > 0) {
- $('#tpl').append(Index.createPackageTree(pack, matched,
- focusFilterState));
- scheduler.add('filter', searchLoop);
- return;
- }
- }
-
- $('#tpl a.packfocus').click(function () {
- focusFilter($(this).parent().parent());
- });
- configureHideFilter();
- };
-
- scheduler.add('filter', searchLoop);
-}
-
-/* Configures the hide tool by adding the hide link to all packages. */
-function configureHideFilter() {
- $('#tpl li.pack a.packhide').click(function () {
- var packhide = $(this)
- var action = packhide.text();
-
- var ol = $(this).parent().parent();
-
- if (action == "hide") {
- Index.hidePackage(ol);
- packhide.text("show");
- }
- else {
- Index.showPackage(ol, kindFilterState);
- packhide.text("hide");
- }
- return false;
- });
-}
-
-/* Configures the focus tool by adding the focus bar in the filter box (initially hidden), and by adding the focus
- link to all packages. */
-function configureFocusFilter() {
- scheduler.add("init", function() {
- focusFilterState = null;
- if ($("#focusfilter").length == 0) {
- $("#filter").append("<div id='focusfilter'>focused on <span class='focuscoll'></span> <a class='focusremove'><img class='icon' src='lib/remove.png'/></a></div>");
- $("#focusfilter > .focusremove").click(function(event) {
- textFilter();
-
- $("#focusfilter").hide();
- $("#kindfilter").show();
- resizeFilterBlock();
- focusFilterState = null;
- });
- $("#focusfilter").hide();
- resizeFilterBlock();
- }
- });
- scheduler.add("init", function() {
- $('#tpl li.pack a.packfocus').click(function () {
- focusFilter($(this).parent());
- return false;
- });
- });
-}
-
-/* Focuses the entity index on a specific package. To do so, it will copy the sub-templates and sub-packages of the
- focuses package into the top-level templates and packages position of the index. The original top-level
- @param package The <li> element that corresponds to the package in the entity index */
-function focusFilter(package) {
- scheduler.clear("filter");
-
- var currentFocus = $('li.pack > .tplshow', package).text();
- $("#focusfilter > .focuscoll").empty();
- $("#focusfilter > .focuscoll").append(currentFocus);
-
- $("#focusfilter").show();
- $("#kindfilter").hide();
- resizeFilterBlock();
- focusFilterState = currentFocus;
- kindFilterSync();
-
- textFilter();
-}
-
-function configureKindFilter() {
- scheduler.add("init", function() {
- kindFilterState = "all";
- $("#filter").append("<div id='kindfilter'><a>display packages only</a></div>");
- $("#kindfilter > a").click(function(event) { kindFilter("packs"); });
- resizeFilterBlock();
- });
-}
-
-function kindFilter(kind) {
- if (kind == "packs") {
- kindFilterState = "packs";
- kindFilterSync();
- $("#kindfilter > a").replaceWith("<a>display all entities</a>");
- $("#kindfilter > a").click(function(event) { kindFilter("all"); });
- }
- else {
- kindFilterState = "all";
- kindFilterSync();
- $("#kindfilter > a").replaceWith("<a>display packages only</a>");
- $("#kindfilter > a").click(function(event) { kindFilter("packs"); });
- }
-}
-
-/* Applies the kind filter. */
-function kindFilterSync() {
- if (kindFilterState == "all" || focusFilterState != null) {
- $("#tpl a.packhide").text('hide');
- $("#tpl ol.templates").show();
- } else {
- $("#tpl a.packhide").text('show');
- $("#tpl ol.templates").hide();
- }
-}
-
-function resizeFilterBlock() {
- $("#tpl").css("top", $("#filter").outerHeight(true));
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
deleted file mode 100755
index faab0cf1a3..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
+++ /dev/null
@@ -1,6 +0,0 @@
-/*! jQuery UI - v1.9.0 - 2012-10-05
-* http://jqueryui.com
-* Includes: jquery.ui.core.js, jquery.ui.widget.js, jquery.ui.mouse.js, jquery.ui.position.js, jquery.ui.accordion.js, jquery.ui.autocomplete.js, jquery.ui.button.js, jquery.ui.datepicker.js, jquery.ui.dialog.js, jquery.ui.draggable.js, jquery.ui.droppable.js, jquery.ui.effect.js, jquery.ui.effect-blind.js, jquery.ui.effect-bounce.js, jquery.ui.effect-clip.js, jquery.ui.effect-drop.js, jquery.ui.effect-explode.js, jquery.ui.effect-fade.js, jquery.ui.effect-fold.js, jquery.ui.effect-highlight.js, jquery.ui.effect-pulsate.js, jquery.ui.effect-scale.js, jquery.ui.effect-shake.js, jquery.ui.effect-slide.js, jquery.ui.effect-transfer.js, jquery.ui.menu.js, jquery.ui.progressbar.js, jquery.ui.resizable.js, jquery.ui.selectable.js, jquery.ui.slider.js, jquery.ui.sortable.js, jquery.ui.spinner.js, jquery.ui.tabs.js, jquery.ui.tooltip.js
-* Copyright (c) 2012 jQuery Foundation and other contributors Licensed MIT */
-
-(function(e,t){function i(t,n){var r,i,o,u=t.nodeName.toLowerCase();return"area"===u?(r=t.parentNode,i=r.name,!t.href||!i||r.nodeName.toLowerCase()!=="map"?!1:(o=e("img[usemap=#"+i+"]")[0],!!o&&s(o))):(/input|select|textarea|button|object/.test(u)?!t.disabled:"a"===u?t.href||n:n)&&s(t)}function s(t){return!e(t).parents().andSelf().filter(function(){return e.css(this,"visibility")==="hidden"||e.expr.filters.hidden(this)}).length}var n=0,r=/^ui-id-\d+$/;e.ui=e.ui||{};if(e.ui.version)return;e.extend(e.ui,{version:"1.9.0",keyCode:{BACKSPACE:8,COMMA:188,DELETE:46,DOWN:40,END:35,ENTER:13,ESCAPE:27,HOME:36,LEFT:37,NUMPAD_ADD:107,NUMPAD_DECIMAL:110,NUMPAD_DIVIDE:111,NUMPAD_ENTER:108,NUMPAD_MULTIPLY:106,NUMPAD_SUBTRACT:109,PAGE_DOWN:34,PAGE_UP:33,PERIOD:190,RIGHT:39,SPACE:32,TAB:9,UP:38}}),e.fn.extend({_focus:e.fn.focus,focus:function(t,n){return typeof t=="number"?this.each(function(){var r=this;setTimeout(function(){e(r).focus(),n&&n.call(r)},t)}):this._focus.apply(this,arguments)},scrollParent:function(){var t;return e.browser.msie&&/(static|relative)/.test(this.css("position"))||/absolute/.test(this.css("position"))?t=this.parents().filter(function(){return/(relative|absolute|fixed)/.test(e.css(this,"position"))&&/(auto|scroll)/.test(e.css(this,"overflow")+e.css(this,"overflow-y")+e.css(this,"overflow-x"))}).eq(0):t=this.parents().filter(function(){return/(auto|scroll)/.test(e.css(this,"overflow")+e.css(this,"overflow-y")+e.css(this,"overflow-x"))}).eq(0),/fixed/.test(this.css("position"))||!t.length?e(document):t},zIndex:function(n){if(n!==t)return this.css("zIndex",n);if(this.length){var r=e(this[0]),i,s;while(r.length&&r[0]!==document){i=r.css("position");if(i==="absolute"||i==="relative"||i==="fixed"){s=parseInt(r.css("zIndex"),10);if(!isNaN(s)&&s!==0)return s}r=r.parent()}}return 0},uniqueId:function(){return this.each(function(){this.id||(this.id="ui-id-"+ ++n)})},removeUniqueId:function(){return this.each(function(){r.test(this.id)&&e(this).removeAttr("id")})}}),e("<a>").outerWidth(1).jquery||e.each(["Width","Height"],function(n,r){function u(t,n,r,s){return e.each(i,function(){n-=parseFloat(e.css(t,"padding"+this))||0,r&&(n-=parseFloat(e.css(t,"border"+this+"Width"))||0),s&&(n-=parseFloat(e.css(t,"margin"+this))||0)}),n}var i=r==="Width"?["Left","Right"]:["Top","Bottom"],s=r.toLowerCase(),o={innerWidth:e.fn.innerWidth,innerHeight:e.fn.innerHeight,outerWidth:e.fn.outerWidth,outerHeight:e.fn.outerHeight};e.fn["inner"+r]=function(n){return n===t?o["inner"+r].call(this):this.each(function(){e(this).css(s,u(this,n)+"px")})},e.fn["outer"+r]=function(t,n){return typeof t!="number"?o["outer"+r].call(this,t):this.each(function(){e(this).css(s,u(this,t,!0,n)+"px")})}}),e.extend(e.expr[":"],{data:e.expr.createPseudo?e.expr.createPseudo(function(t){return function(n){return!!e.data(n,t)}}):function(t,n,r){return!!e.data(t,r[3])},focusable:function(t){return i(t,!isNaN(e.attr(t,"tabindex")))},tabbable:function(t){var n=e.attr(t,"tabindex"),r=isNaN(n);return(r||n>=0)&&i(t,!r)}}),e(function(){var t=document.body,n=t.appendChild(n=document.createElement("div"));n.offsetHeight,e.extend(n.style,{minHeight:"100px",height:"auto",padding:0,borderWidth:0}),e.support.minHeight=n.offsetHeight===100,e.support.selectstart="onselectstart"in n,t.removeChild(n).style.display="none"}),e.fn.extend({disableSelection:function(){return this.bind((e.support.selectstart?"selectstart":"mousedown")+".ui-disableSelection",function(e){e.preventDefault()})},enableSelection:function(){return this.unbind(".ui-disableSelection")}}),e.extend(e.ui,{plugin:{add:function(t,n,r){var i,s=e.ui[t].prototype;for(i in r)s.plugins[i]=s.plugins[i]||[],s.plugins[i].push([n,r[i]])},call:function(e,t,n){var r,i=e.plugins[t];if(!i||!e.element[0].parentNode||e.element[0].parentNode.nodeType===11)return;for(r=0;r<i.length;r++)e.options[i[r][0]]&&i[r][1].apply(e.element,n)}},contains:e.contains,hasScroll:function(t,n){if(e(t).css("overflow")==="hidden")return!1;var r=n&&n==="left"?"scrollLeft":"scrollTop",i=!1;return t[r]>0?!0:(t[r]=1,i=t[r]>0,t[r]=0,i)},isOverAxis:function(e,t,n){return e>t&&e<t+n},isOver:function(t,n,r,i,s,o){return e.ui.isOverAxis(t,r,s)&&e.ui.isOverAxis(n,i,o)}})})(jQuery);(function(e,t){var n=0,r=Array.prototype.slice,i=e.cleanData;e.cleanData=function(t){for(var n=0,r;(r=t[n])!=null;n++)try{e(r).triggerHandler("remove")}catch(s){}i(t)},e.widget=function(t,n,r){var i,s,o,u,a=t.split(".")[0];t=t.split(".")[1],i=a+"-"+t,r||(r=n,n=e.Widget),e.expr[":"][i.toLowerCase()]=function(t){return!!e.data(t,i)},e[a]=e[a]||{},s=e[a][t],o=e[a][t]=function(e,t){if(!this._createWidget)return new o(e,t);arguments.length&&this._createWidget(e,t)},e.extend(o,s,{version:r.version,_proto:e.extend({},r),_childConstructors:[]}),u=new n,u.options=e.widget.extend({},u.options),e.each(r,function(t,i){e.isFunction(i)&&(r[t]=function(){var e=function(){return n.prototype[t].apply(this,arguments)},r=function(e){return n.prototype[t].apply(this,e)};return function(){var t=this._super,n=this._superApply,s;return this._super=e,this._superApply=r,s=i.apply(this,arguments),this._super=t,this._superApply=n,s}}())}),o.prototype=e.widget.extend(u,{widgetEventPrefix:t},r,{constructor:o,namespace:a,widgetName:t,widgetBaseClass:i,widgetFullName:i}),s?(e.each(s._childConstructors,function(t,n){var r=n.prototype;e.widget(r.namespace+"."+r.widgetName,o,n._proto)}),delete s._childConstructors):n._childConstructors.push(o),e.widget.bridge(t,o)},e.widget.extend=function(n){var i=r.call(arguments,1),s=0,o=i.length,u,a;for(;s<o;s++)for(u in i[s])a=i[s][u],i[s].hasOwnProperty(u)&&a!==t&&(n[u]=e.isPlainObject(a)?e.widget.extend({},n[u],a):a);return n},e.widget.bridge=function(n,i){var s=i.prototype.widgetFullName;e.fn[n]=function(o){var u=typeof o=="string",a=r.call(arguments,1),f=this;return o=!u&&a.length?e.widget.extend.apply(null,[o].concat(a)):o,u?this.each(function(){var r,i=e.data(this,s);if(!i)return e.error("cannot call methods on "+n+" prior to initialization; "+"attempted to call method '"+o+"'");if(!e.isFunction(i[o])||o.charAt(0)==="_")return e.error("no such method '"+o+"' for "+n+" widget instance");r=i[o].apply(i,a);if(r!==i&&r!==t)return f=r&&r.jquery?f.pushStack(r.get()):r,!1}):this.each(function(){var t=e.data(this,s);t?t.option(o||{})._init():new i(o,this)}),f}},e.Widget=function(e,t){},e.Widget._childConstructors=[],e.Widget.prototype={widgetName:"widget",widgetEventPrefix:"",defaultElement:"<div>",options:{disabled:!1,create:null},_createWidget:function(t,r){r=e(r||this.defaultElement||this)[0],this.element=e(r),this.uuid=n++,this.eventNamespace="."+this.widgetName+this.uuid,this.options=e.widget.extend({},this.options,this._getCreateOptions(),t),this.bindings=e(),this.hoverable=e(),this.focusable=e(),r!==this&&(e.data(r,this.widgetName,this),e.data(r,this.widgetFullName,this),this._on({remove:"destroy"}),this.document=e(r.style?r.ownerDocument:r.document||r),this.window=e(this.document[0].defaultView||this.document[0].parentWindow)),this._create(),this._trigger("create",null,this._getCreateEventData()),this._init()},_getCreateOptions:e.noop,_getCreateEventData:e.noop,_create:e.noop,_init:e.noop,destroy:function(){this._destroy(),this.element.unbind(this.eventNamespace).removeData(this.widgetName).removeData(this.widgetFullName).removeData(e.camelCase(this.widgetFullName)),this.widget().unbind(this.eventNamespace).removeAttr("aria-disabled").removeClass(this.widgetFullName+"-disabled "+"ui-state-disabled"),this.bindings.unbind(this.eventNamespace),this.hoverable.removeClass("ui-state-hover"),this.focusable.removeClass("ui-state-focus")},_destroy:e.noop,widget:function(){return this.element},option:function(n,r){var i=n,s,o,u;if(arguments.length===0)return e.widget.extend({},this.options);if(typeof n=="string"){i={},s=n.split("."),n=s.shift();if(s.length){o=i[n]=e.widget.extend({},this.options[n]);for(u=0;u<s.length-1;u++)o[s[u]]=o[s[u]]||{},o=o[s[u]];n=s.pop();if(r===t)return o[n]===t?null:o[n];o[n]=r}else{if(r===t)return this.options[n]===t?null:this.options[n];i[n]=r}}return this._setOptions(i),this},_setOptions:function(e){var t;for(t in e)this._setOption(t,e[t]);return this},_setOption:function(e,t){return this.options[e]=t,e==="disabled"&&(this.widget().toggleClass(this.widgetFullName+"-disabled ui-state-disabled",!!t).attr("aria-disabled",t),this.hoverable.removeClass("ui-state-hover"),this.focusable.removeClass("ui-state-focus")),this},enable:function(){return this._setOption("disabled",!1)},disable:function(){return this._setOption("disabled",!0)},_on:function(t,n){n?(t=e(t),this.bindings=this.bindings.add(t)):(n=t,t=this.element);var r=this;e.each(n,function(n,i){function s(){if(r.options.disabled===!0||e(this).hasClass("ui-state-disabled"))return;return(typeof i=="string"?r[i]:i).apply(r,arguments)}typeof i!="string"&&(s.guid=i.guid=i.guid||s.guid||e.guid++);var o=n.match(/^(\w+)\s*(.*)$/),u=o[1]+r.eventNamespace,a=o[2];a?r.widget().delegate(a,u,s):t.bind(u,s)})},_off:function(e,t){t=(t||"").split(" ").join(this.eventNamespace+" ")+this.eventNamespace,e.unbind(t).undelegate(t)},_delay:function(e,t){function n(){return(typeof e=="string"?r[e]:e).apply(r,arguments)}var r=this;return setTimeout(n,t||0)},_hoverable:function(t){this.hoverable=this.hoverable.add(t),this._on(t,{mouseenter:function(t){e(t.currentTarget).addClass("ui-state-hover")},mouseleave:function(t){e(t.currentTarget).removeClass("ui-state-hover")}})},_focusable:function(t){this.focusable=this.focusable.add(t),this._on(t,{focusin:function(t){e(t.currentTarget).addClass("ui-state-focus")},focusout:function(t){e(t.currentTarget).removeClass("ui-state-focus")}})},_trigger:function(t,n,r){var i,s,o=this.options[t];r=r||{},n=e.Event(n),n.type=(t===this.widgetEventPrefix?t:this.widgetEventPrefix+t).toLowerCase(),n.target=this.element[0],s=n.originalEvent;if(s)for(i in s)i in n||(n[i]=s[i]);return this.element.trigger(n,r),!(e.isFunction(o)&&o.apply(this.element[0],[n].concat(r))===!1||n.isDefaultPrevented())}},e.each({show:"fadeIn",hide:"fadeOut"},function(t,n){e.Widget.prototype["_"+t]=function(r,i,s){typeof i=="string"&&(i={effect:i});var o,u=i?i===!0||typeof i=="number"?n:i.effect||n:t;i=i||{},typeof i=="number"&&(i={duration:i}),o=!e.isEmptyObject(i),i.complete=s,i.delay&&r.delay(i.delay),o&&e.effects&&(e.effects.effect[u]||e.uiBackCompat!==!1&&e.effects[u])?r[t](i):u!==t&&r[u]?r[u](i.duration,i.easing,s):r.queue(function(n){e(this)[t](),s&&s.call(r[0]),n()})}}),e.uiBackCompat!==!1&&(e.Widget.prototype._getCreateOptions=function(){return e.metadata&&e.metadata.get(this.element[0])[this.widgetName]})})(jQuery);(function(e,t){var n=!1;e(document).mouseup(function(e){n=!1}),e.widget("ui.mouse",{version:"1.9.0",options:{cancel:"input,textarea,button,select,option",distance:1,delay:0},_mouseInit:function(){var t=this;this.element.bind("mousedown."+this.widgetName,function(e){return t._mouseDown(e)}).bind("click."+this.widgetName,function(n){if(!0===e.data(n.target,t.widgetName+".preventClickEvent"))return e.removeData(n.target,t.widgetName+".preventClickEvent"),n.stopImmediatePropagation(),!1}),this.started=!1},_mouseDestroy:function(){this.element.unbind("."+this.widgetName),this._mouseMoveDelegate&&e(document).unbind("mousemove."+this.widgetName,this._mouseMoveDelegate).unbind("mouseup."+this.widgetName,this._mouseUpDelegate)},_mouseDown:function(t){if(n)return;this._mouseStarted&&this._mouseUp(t),this._mouseDownEvent=t;var r=this,i=t.which===1,s=typeof this.options.cancel=="string"&&t.target.nodeName?e(t.target).closest(this.options.cancel).length:!1;if(!i||s||!this._mouseCapture(t))return!0;this.mouseDelayMet=!this.options.delay,this.mouseDelayMet||(this._mouseDelayTimer=setTimeout(function(){r.mouseDelayMet=!0},this.options.delay));if(this._mouseDistanceMet(t)&&this._mouseDelayMet(t)){this._mouseStarted=this._mouseStart(t)!==!1;if(!this._mouseStarted)return t.preventDefault(),!0}return!0===e.data(t.target,this.widgetName+".preventClickEvent")&&e.removeData(t.target,this.widgetName+".preventClickEvent"),this._mouseMoveDelegate=function(e){return r._mouseMove(e)},this._mouseUpDelegate=function(e){return r._mouseUp(e)},e(document).bind("mousemove."+this.widgetName,this._mouseMoveDelegate).bind("mouseup."+this.widgetName,this._mouseUpDelegate),t.preventDefault(),n=!0,!0},_mouseMove:function(t){return!e.browser.msie||document.documentMode>=9||!!t.button?this._mouseStarted?(this._mouseDrag(t),t.preventDefault()):(this._mouseDistanceMet(t)&&this._mouseDelayMet(t)&&(this._mouseStarted=this._mouseStart(this._mouseDownEvent,t)!==!1,this._mouseStarted?this._mouseDrag(t):this._mouseUp(t)),!this._mouseStarted):this._mouseUp(t)},_mouseUp:function(t){return e(document).unbind("mousemove."+this.widgetName,this._mouseMoveDelegate).unbind("mouseup."+this.widgetName,this._mouseUpDelegate),this._mouseStarted&&(this._mouseStarted=!1,t.target===this._mouseDownEvent.target&&e.data(t.target,this.widgetName+".preventClickEvent",!0),this._mouseStop(t)),!1},_mouseDistanceMet:function(e){return Math.max(Math.abs(this._mouseDownEvent.pageX-e.pageX),Math.abs(this._mouseDownEvent.pageY-e.pageY))>=this.options.distance},_mouseDelayMet:function(e){return this.mouseDelayMet},_mouseStart:function(e){},_mouseDrag:function(e){},_mouseStop:function(e){},_mouseCapture:function(e){return!0}})})(jQuery);(function(e,t){function h(e,t,n){return[parseInt(e[0],10)*(l.test(e[0])?t/100:1),parseInt(e[1],10)*(l.test(e[1])?n/100:1)]}function p(t,n){return parseInt(e.css(t,n),10)||0}e.ui=e.ui||{};var n,r=Math.max,i=Math.abs,s=Math.round,o=/left|center|right/,u=/top|center|bottom/,a=/[\+\-]\d+%?/,f=/^\w+/,l=/%$/,c=e.fn.position;e.position={scrollbarWidth:function(){if(n!==t)return n;var r,i,s=e("<div style='display:block;width:50px;height:50px;overflow:hidden;'><div style='height:100px;width:auto;'></div></div>"),o=s.children()[0];return e("body").append(s),r=o.offsetWidth,s.css("overflow","scroll"),i=o.offsetWidth,r===i&&(i=s[0].clientWidth),s.remove(),n=r-i},getScrollInfo:function(t){var n=t.isWindow?"":t.element.css("overflow-x"),r=t.isWindow?"":t.element.css("overflow-y"),i=n==="scroll"||n==="auto"&&t.width<t.element[0].scrollWidth,s=r==="scroll"||r==="auto"&&t.height<t.element[0].scrollHeight;return{width:i?e.position.scrollbarWidth():0,height:s?e.position.scrollbarWidth():0}},getWithinInfo:function(t){var n=e(t||window),r=e.isWindow(n[0]);return{element:n,isWindow:r,offset:n.offset()||{left:0,top:0},scrollLeft:n.scrollLeft(),scrollTop:n.scrollTop(),width:r?n.width():n.outerWidth(),height:r?n.height():n.outerHeight()}}},e.fn.position=function(t){if(!t||!t.of)return c.apply(this,arguments);t=e.extend({},t);var n,l,d,v,m,g=e(t.of),y=e.position.getWithinInfo(t.within),b=e.position.getScrollInfo(y),w=g[0],E=(t.collision||"flip").split(" "),S={};return w.nodeType===9?(l=g.width(),d=g.height(),v={top:0,left:0}):e.isWindow(w)?(l=g.width(),d=g.height(),v={top:g.scrollTop(),left:g.scrollLeft()}):w.preventDefault?(t.at="left top",l=d=0,v={top:w.pageY,left:w.pageX}):(l=g.outerWidth(),d=g.outerHeight(),v=g.offset()),m=e.extend({},v),e.each(["my","at"],function(){var e=(t[this]||"").split(" "),n,r;e.length===1&&(e=o.test(e[0])?e.concat(["center"]):u.test(e[0])?["center"].concat(e):["center","center"]),e[0]=o.test(e[0])?e[0]:"center",e[1]=u.test(e[1])?e[1]:"center",n=a.exec(e[0]),r=a.exec(e[1]),S[this]=[n?n[0]:0,r?r[0]:0],t[this]=[f.exec(e[0])[0],f.exec(e[1])[0]]}),E.length===1&&(E[1]=E[0]),t.at[0]==="right"?m.left+=l:t.at[0]==="center"&&(m.left+=l/2),t.at[1]==="bottom"?m.top+=d:t.at[1]==="center"&&(m.top+=d/2),n=h(S.at,l,d),m.left+=n[0],m.top+=n[1],this.each(function(){var o,u,a=e(this),f=a.outerWidth(),c=a.outerHeight(),w=p(this,"marginLeft"),x=p(this,"marginTop"),T=f+w+p(this,"marginRight")+b.width,N=c+x+p(this,"marginBottom")+b.height,C=e.extend({},m),k=h(S.my,a.outerWidth(),a.outerHeight());t.my[0]==="right"?C.left-=f:t.my[0]==="center"&&(C.left-=f/2),t.my[1]==="bottom"?C.top-=c:t.my[1]==="center"&&(C.top-=c/2),C.left+=k[0],C.top+=k[1],e.support.offsetFractions||(C.left=s(C.left),C.top=s(C.top)),o={marginLeft:w,marginTop:x},e.each(["left","top"],function(r,i){e.ui.position[E[r]]&&e.ui.position[E[r]][i](C,{targetWidth:l,targetHeight:d,elemWidth:f,elemHeight:c,collisionPosition:o,collisionWidth:T,collisionHeight:N,offset:[n[0]+k[0],n[1]+k[1]],my:t.my,at:t.at,within:y,elem:a})}),e.fn.bgiframe&&a.bgiframe(),t.using&&(u=function(e){var n=v.left-C.left,s=n+l-f,o=v.top-C.top,u=o+d-c,h={target:{element:g,left:v.left,top:v.top,width:l,height:d},element:{element:a,left:C.left,top:C.top,width:f,height:c},horizontal:s<0?"left":n>0?"right":"center",vertical:u<0?"top":o>0?"bottom":"middle"};l<f&&i(n+s)<l&&(h.horizontal="center"),d<c&&i(o+u)<d&&(h.vertical="middle"),r(i(n),i(s))>r(i(o),i(u))?h.important="horizontal":h.important="vertical",t.using.call(this,e,h)}),a.offset(e.extend(C,{using:u}))})},e.ui.position={fit:{left:function(e,t){var n=t.within,i=n.isWindow?n.scrollLeft:n.offset.left,s=n.width,o=e.left-t.collisionPosition.marginLeft,u=i-o,a=o+t.collisionWidth-s-i,f;t.collisionWidth>s?u>0&&a<=0?(f=e.left+u+t.collisionWidth-s-i,e.left+=u-f):a>0&&u<=0?e.left=i:u>a?e.left=i+s-t.collisionWidth:e.left=i:u>0?e.left+=u:a>0?e.left-=a:e.left=r(e.left-o,e.left)},top:function(e,t){var n=t.within,i=n.isWindow?n.scrollTop:n.offset.top,s=t.within.height,o=e.top-t.collisionPosition.marginTop,u=i-o,a=o+t.collisionHeight-s-i,f;t.collisionHeight>s?u>0&&a<=0?(f=e.top+u+t.collisionHeight-s-i,e.top+=u-f):a>0&&u<=0?e.top=i:u>a?e.top=i+s-t.collisionHeight:e.top=i:u>0?e.top+=u:a>0?e.top-=a:e.top=r(e.top-o,e.top)}},flip:{left:function(e,t){var n=t.within,r=n.offset.left+n.scrollLeft,s=n.width,o=n.isWindow?n.scrollLeft:n.offset.left,u=e.left-t.collisionPosition.marginLeft,a=u-o,f=u+t.collisionWidth-s-o,l=t.my[0]==="left"?-t.elemWidth:t.my[0]==="right"?t.elemWidth:0,c=t.at[0]==="left"?t.targetWidth:t.at[0]==="right"?-t.targetWidth:0,h=-2*t.offset[0],p,d;if(a<0){p=e.left+l+c+h+t.collisionWidth-s-r;if(p<0||p<i(a))e.left+=l+c+h}else if(f>0){d=e.left-t.collisionPosition.marginLeft+l+c+h-o;if(d>0||i(d)<f)e.left+=l+c+h}},top:function(e,t){var n=t.within,r=n.offset.top+n.scrollTop,s=n.height,o=n.isWindow?n.scrollTop:n.offset.top,u=e.top-t.collisionPosition.marginTop,a=u-o,f=u+t.collisionHeight-s-o,l=t.my[1]==="top",c=l?-t.elemHeight:t.my[1]==="bottom"?t.elemHeight:0,h=t.at[1]==="top"?t.targetHeight:t.at[1]==="bottom"?-t.targetHeight:0,p=-2*t.offset[1],d,v;a<0?(v=e.top+c+h+p+t.collisionHeight-s-r,e.top+c+h+p>a&&(v<0||v<i(a))&&(e.top+=c+h+p)):f>0&&(d=e.top-t.collisionPosition.marginTop+c+h+p-o,e.top+c+h+p>f&&(d>0||i(d)<f)&&(e.top+=c+h+p))}},flipfit:{left:function(){e.ui.position.flip.left.apply(this,arguments),e.ui.position.fit.left.apply(this,arguments)},top:function(){e.ui.position.flip.top.apply(this,arguments),e.ui.position.fit.top.apply(this,arguments)}}},function(){var t,n,r,i,s,o=document.getElementsByTagName("body")[0],u=document.createElement("div");t=document.createElement(o?"div":"body"),r={visibility:"hidden",width:0,height:0,border:0,margin:0,background:"none"},o&&e.extend(r,{position:"absolute",left:"-1000px",top:"-1000px"});for(s in r)t.style[s]=r[s];t.appendChild(u),n=o||document.documentElement,n.insertBefore(t,n.firstChild),u.style.cssText="position: absolute; left: 10.7432222px;",i=e(u).offset().left,e.support.offsetFractions=i>10&&i<11,t.innerHTML="",n.removeChild(t)}(),e.uiBackCompat!==!1&&function(e){var n=e.fn.position;e.fn.position=function(r){if(!r||!r.offset)return n.call(this,r);var i=r.offset.split(" "),s=r.at.split(" ");return i.length===1&&(i[1]=i[0]),/^\d/.test(i[0])&&(i[0]="+"+i[0]),/^\d/.test(i[1])&&(i[1]="+"+i[1]),s.length===1&&(/left|center|right/.test(s[0])?s[1]="center":(s[1]=s[0],s[0]="center")),n.call(this,e.extend(r,{at:s[0]+i[0]+" "+s[1]+i[1],offset:t}))}}(jQuery)})(jQuery);(function(e,t){var n=0,r={},i={};r.height=r.paddingTop=r.paddingBottom=r.borderTopWidth=r.borderBottomWidth="hide",i.height=i.paddingTop=i.paddingBottom=i.borderTopWidth=i.borderBottomWidth="show",e.widget("ui.accordion",{version:"1.9.0",options:{active:0,animate:{},collapsible:!1,event:"click",header:"> li > :first-child,> :not(li):even",heightStyle:"auto",icons:{activeHeader:"ui-icon-triangle-1-s",header:"ui-icon-triangle-1-e"},activate:null,beforeActivate:null},_create:function(){var t=this.accordionId="ui-accordion-"+(this.element.attr("id")||++n),r=this.options;this.prevShow=this.prevHide=e(),this.element.addClass("ui-accordion ui-widget ui-helper-reset"),this.headers=this.element.find(r.header).addClass("ui-accordion-header ui-helper-reset ui-state-default ui-corner-all"),this._hoverable(this.headers),this._focusable(this.headers),this.headers.next().addClass("ui-accordion-content ui-helper-reset ui-widget-content ui-corner-bottom").hide(),!r.collapsible&&r.active===!1&&(r.active=0),r.active<0&&(r.active+=this.headers.length),this.active=this._findActive(r.active).addClass("ui-accordion-header-active ui-state-active").toggleClass("ui-corner-all ui-corner-top"),this.active.next().addClass("ui-accordion-content-active").show(),this._createIcons(),this.originalHeight=this.element[0].style.height,this.refresh(),this.element.attr("role","tablist"),this.headers.attr("role","tab").each(function(n){var r=e(this),i=r.attr("id"),s=r.next(),o=s.attr("id");i||(i=t+"-header-"+n,r.attr("id",i)),o||(o=t+"-panel-"+n,s.attr("id",o)),r.attr("aria-controls",o),s.attr("aria-labelledby",i)}).next().attr("role","tabpanel"),this.headers.not(this.active).attr({"aria-selected":"false",tabIndex:-1}).next().attr({"aria-expanded":"false","aria-hidden":"true"}).hide(),this.active.length?this.active.attr({"aria-selected":"true",tabIndex:0}).next().attr({"aria-expanded":"true","aria-hidden":"false"}):this.headers.eq(0).attr("tabIndex",0),this._on(this.headers,{keydown:"_keydown"}),this._on(this.headers.next(),{keydown:"_panelKeyDown"}),this._setupEvents(r.event)},_getCreateEventData:function(){return{header:this.active,content:this.active.length?this.active.next():e()}},_createIcons:function(){var t=this.options.icons;t&&(e("<span>").addClass("ui-accordion-header-icon ui-icon "+t.header).prependTo(this.headers),this.active.children(".ui-accordion-header-icon").removeClass(t.header).addClass(t.activeHeader),this.headers.addClass("ui-accordion-icons"))},_destroyIcons:function(){this.headers.removeClass("ui-accordion-icons").children(".ui-accordion-header-icon").remove()},_destroy:function(){var e;this.element.removeClass("ui-accordion ui-widget ui-helper-reset").removeAttr("role"),this.headers.removeClass("ui-accordion-header ui-accordion-header-active ui-helper-reset ui-state-default ui-corner-all ui-state-active ui-state-disabled ui-corner-top").removeAttr("role").removeAttr("aria-selected").removeAttr("aria-controls").removeAttr("tabIndex").each(function(){/^ui-accordion/.test(this.id)&&this.removeAttribute("id")}),this._destroyIcons(),e=this.headers.next().css("display","").removeAttr("role").removeAttr("aria-expanded").removeAttr("aria-hidden").removeAttr("aria-labelledby").removeClass("ui-helper-reset ui-widget-content ui-corner-bottom ui-accordion-content ui-accordion-content-active ui-state-disabled").each(function(){/^ui-accordion/.test(this.id)&&this.removeAttribute("id")}),this.options.heightStyle!=="content"&&(this.element.css("height",this.originalHeight),e.css("height",""))},_setOption:function(e,t){if(e==="active"){this._activate(t);return}e==="event"&&(this.options.event&&this._off(this.headers,this.options.event),this._setupEvents(t)),this._super(e,t),e==="collapsible"&&!t&&this.options.active===!1&&this._activate(0),e==="icons"&&(this._destroyIcons(),t&&this._createIcons()),e==="disabled"&&this.headers.add(this.headers.next()).toggleClass("ui-state-disabled",!!t)},_keydown:function(t){if(t.altKey||t.ctrlKey)return;var n=e.ui.keyCode,r=this.headers.length,i=this.headers.index(t.target),s=!1;switch(t.keyCode){case n.RIGHT:case n.DOWN:s=this.headers[(i+1)%r];break;case n.LEFT:case n.UP:s=this.headers[(i-1+r)%r];break;case n.SPACE:case n.ENTER:this._eventHandler(t);break;case n.HOME:s=this.headers[0];break;case n.END:s=this.headers[r-1]}s&&(e(t.target).attr("tabIndex",-1),e(s).attr("tabIndex",0),s.focus(),t.preventDefault())},_panelKeyDown:function(t){t.keyCode===e.ui.keyCode.UP&&t.ctrlKey&&e(t.currentTarget).prev().focus()},refresh:function(){var t,n,r=this.options.heightStyle,i=this.element.parent();this.element.css("height",this.originalHeight),r==="fill"?(e.support.minHeight||(n=i.css("overflow"),i.css("overflow","hidden")),t=i.height(),this.element.siblings(":visible").each(function(){var n=e(this),r=n.css("position");if(r==="absolute"||r==="fixed")return;t-=n.outerHeight(!0)}),n&&i.css("overflow",n),this.headers.each(function(){t-=e(this).outerHeight(!0)}),this.headers.next().each(function(){e(this).height(Math.max(0,t-e(this).innerHeight()+e(this).height()))}).css("overflow","auto")):r==="auto"&&(t=0,this.headers.next().each(function(){t=Math.max(t,e(this).height("").height())}).height(t)),r!=="content"&&this.element.height(this.element.height())},_activate:function(t){var n=this._findActive(t)[0];if(n===this.active[0])return;n=n||this.active[0],this._eventHandler({target:n,currentTarget:n,preventDefault:e.noop})},_findActive:function(t){return typeof t=="number"?this.headers.eq(t):e()},_setupEvents:function(t){var n={};if(!t)return;e.each(t.split(" "),function(e,t){n[t]="_eventHandler"}),this._on(this.headers,n)},_eventHandler:function(t){var n=this.options,r=this.active,i=e(t.currentTarget),s=i[0]===r[0],o=s&&n.collapsible,u=o?e():i.next(),a=r.next(),f={oldHeader:r,oldPanel:a,newHeader:o?e():i,newPanel:u};t.preventDefault();if(s&&!n.collapsible||this._trigger("beforeActivate",t,f)===!1)return;n.active=o?!1:this.headers.index(i),this.active=s?e():i,this._toggle(f),r.removeClass("ui-accordion-header-active ui-state-active"),n.icons&&r.children(".ui-accordion-header-icon").removeClass(n.icons.activeHeader).addClass(n.icons.header),s||(i.removeClass("ui-corner-all").addClass("ui-accordion-header-active ui-state-active ui-corner-top"),n.icons&&i.children(".ui-accordion-header-icon").removeClass(n.icons.header).addClass(n.icons.activeHeader),i.next().addClass("ui-accordion-content-active"))},_toggle:function(t){var n=t.newPanel,r=this.prevShow.length?this.prevShow:t.oldPanel;this.prevShow.add(this.prevHide).stop(!0,!0),this.prevShow=n,this.prevHide=r,this.options.animate?this._animate(n,r,t):(r.hide(),n.show(),this._toggleComplete(t)),r.attr({"aria-expanded":"false","aria-hidden":"true"}),r.prev().attr("aria-selected","false"),n.length&&r.length?r.prev().attr("tabIndex",-1):n.length&&this.headers.filter(function(){return e(this).attr("tabIndex")===0}).attr("tabIndex",-1),n.attr({"aria-expanded":"true","aria-hidden":"false"}).prev().attr({"aria-selected":"true",tabIndex:0})},_animate:function(e,t,n){var s,o,u,a=this,f=0,l=e.length&&(!t.length||e.index()<t.index()),c=this.options.animate||{},h=l&&c.down||c,p=function(){a._toggleComplete(n)};typeof h=="number"&&(u=h),typeof h=="string"&&(o=h),o=o||h.easing||c.easing,u=u||h.duration||c.duration;if(!t.length)return e.animate(i,u,o,p);if(!e.length)return t.animate(r,u,o,p);s=e.show().outerHeight(),t.animate(r,{duration:u,easing:o,step:function(e,t){t.now=Math.round(e)}}),e.hide().animate(i,{duration:u,easing:o,complete:p,step:function(e,n){n.now=Math.round(e),n.prop!=="height"?f+=n.now:a.options.heightStyle!=="content"&&(n.now=Math.round(s-t.outerHeight()-f),f=0)}})},_toggleComplete:function(e){var t=e.oldPanel;t.removeClass("ui-accordion-content-active").prev().removeClass("ui-corner-top").addClass("ui-corner-all"),t.length&&(t.parent()[0].className=t.parent()[0].className),this._trigger("activate",null,e)}}),e.uiBackCompat!==!1&&(function(e,t){e.extend(t.options,{navigation:!1,navigationFilter:function(){return this.href.toLowerCase()===location.href.toLowerCase()}});var n=t._create;t._create=function(){if(this.options.navigation){var t=this,r=this.element.find(this.options.header),i=r.next(),s=r.add(i).find("a").filter(this.options.navigationFilter)[0];s&&r.add(i).each(function(n){if(e.contains(this,s))return t.options.active=Math.floor(n/2),!1})}n.call(this)}}(jQuery,jQuery.ui.accordion.prototype),function(e,t){e.extend(t.options,{heightStyle:null,autoHeight:!0,clearStyle:!1,fillSpace:!1});var n=t._create,r=t._setOption;e.extend(t,{_create:function(){this.options.heightStyle=this.options.heightStyle||this._mergeHeightStyle(),n.call(this)},_setOption:function(e,t){if(e==="autoHeight"||e==="clearStyle"||e==="fillSpace")this.options.heightStyle=this._mergeHeightStyle();r.apply(this,arguments)},_mergeHeightStyle:function(){var e=this.options;if(e.fillSpace)return"fill";if(e.clearStyle)return"content";if(e.autoHeight)return"auto"}})}(jQuery,jQuery.ui.accordion.prototype),function(e,t){e.extend(t.options.icons,{activeHeader:null,headerSelected:"ui-icon-triangle-1-s"});var n=t._createIcons;t._createIcons=function(){this.options.icons&&(this.options.icons.activeHeader=this.options.icons.activeHeader||this.options.icons.headerSelected),n.call(this)}}(jQuery,jQuery.ui.accordion.prototype),function(e,t){t.activate=t._activate;var n=t._findActive;t._findActive=function(e){return e===-1&&(e=!1),e&&typeof e!="number"&&(e=this.headers.index(this.headers.filter(e)),e===-1&&(e=!1)),n.call(this,e)}}(jQuery,jQuery.ui.accordion.prototype),jQuery.ui.accordion.prototype.resize=jQuery.ui.accordion.prototype.refresh,function(e,t){e.extend(t.options,{change:null,changestart:null});var n=t._trigger;t._trigger=function(e,t,r){var i=n.apply(this,arguments);return i?(e==="beforeActivate"?i=n.call(this,"changestart",t,{oldHeader:r.oldHeader,oldContent:r.oldPanel,newHeader:r.newHeader,newContent:r.newPanel}):e==="activate"&&(i=n.call(this,"change",t,{oldHeader:r.oldHeader,oldContent:r.oldPanel,newHeader:r.newHeader,newContent:r.newPanel})),i):!1}}(jQuery,jQuery.ui.accordion.prototype),function(e,t){e.extend(t.options,{animate:null,animated:"slide"});var n=t._create;t._create=function(){var e=this.options;e.animate===null&&(e.animated?e.animated==="slide"?e.animate=300:e.animated==="bounceslide"?e.animate={duration:200,down:{easing:"easeOutBounce",duration:1e3}}:e.animate=e.animated:e.animate=!1),n.call(this)}}(jQuery,jQuery.ui.accordion.prototype))})(jQuery);(function(e,t){var n=0;e.widget("ui.autocomplete",{version:"1.9.0",defaultElement:"<input>",options:{appendTo:"body",autoFocus:!1,delay:300,minLength:1,position:{my:"left top",at:"left bottom",collision:"none"},source:null,change:null,close:null,focus:null,open:null,response:null,search:null,select:null},pending:0,_create:function(){var t,n,r;this.isMultiLine=this._isMultiLine(),this.valueMethod=this.element[this.element.is("input,textarea")?"val":"text"],this.isNewMenu=!0,this.element.addClass("ui-autocomplete-input").attr("autocomplete","off"),this._on({keydown:function(i){if(this.element.prop("readOnly")){t=!0,r=!0,n=!0;return}t=!1,r=!1,n=!1;var s=e.ui.keyCode;switch(i.keyCode){case s.PAGE_UP:t=!0,this._move("previousPage",i);break;case s.PAGE_DOWN:t=!0,this._move("nextPage",i);break;case s.UP:t=!0,this._keyEvent("previous",i);break;case s.DOWN:t=!0,this._keyEvent("next",i);break;case s.ENTER:case s.NUMPAD_ENTER:this.menu.active&&(t=!0,i.preventDefault(),this.menu.select(i));break;case s.TAB:this.menu.active&&this.menu.select(i);break;case s.ESCAPE:this.menu.element.is(":visible")&&(this._value(this.term),this.close(i),i.preventDefault());break;default:n=!0,this._searchTimeout(i)}},keypress:function(r){if(t){t=!1,r.preventDefault();return}if(n)return;var i=e.ui.keyCode;switch(r.keyCode){case i.PAGE_UP:this._move("previousPage",r);break;case i.PAGE_DOWN:this._move("nextPage",r);break;case i.UP:this._keyEvent("previous",r);break;case i.DOWN:this._keyEvent("next",r)}},input:function(e){if(r){r=!1,e.preventDefault();return}this._searchTimeout(e)},focus:function(){this.selectedItem=null,this.previous=this._value()},blur:function(e){if(this.cancelBlur){delete this.cancelBlur;return}clearTimeout(this.searching),this.close(e),this._change(e)}}),this._initSource(),this.menu=e("<ul>").addClass("ui-autocomplete").appendTo(this.document.find(this.options.appendTo||"body")[0]).menu({input:e(),role:null}).zIndex(this.element.zIndex()+1).hide().data("menu"),this._on(this.menu.element,{mousedown:function(t){t.preventDefault(),this.cancelBlur=!0,this._delay(function(){delete this.cancelBlur});var n=this.menu.element[0];e(t.target).closest(".ui-menu-item").length||this._delay(function(){var t=this;this.document.one("mousedown",function(r){r.target!==t.element[0]&&r.target!==n&&!e.contains(n,r.target)&&t.close()})})},menufocus:function(t,n){if(this.isNewMenu){this.isNewMenu=!1;if(t.originalEvent&&/^mouse/.test(t.originalEvent.type)){this.menu.blur(),this.document.one("mousemove",function(){e(t.target).trigger(t.originalEvent)});return}}var r=n.item.data("ui-autocomplete-item")||n.item.data("item.autocomplete");!1!==this._trigger("focus",t,{item:r})?t.originalEvent&&/^key/.test(t.originalEvent.type)&&this._value(r.value):this.liveRegion.text(r.value)},menuselect:function(e,t){var n=t.item.data("ui-autocomplete-item")||t.item.data("item.autocomplete"),r=this.previous;this.element[0]!==this.document[0].activeElement&&(this.element.focus(),this.previous=r,this._delay(function(){this.previous=r,this.selectedItem=n})),!1!==this._trigger("select",e,{item:n})&&this._value(n.value),this.term=this._value(),this.close(e),this.selectedItem=n}}),this.liveRegion=e("<span>",{role:"status","aria-live":"polite"}).addClass("ui-helper-hidden-accessible").insertAfter(this.element),e.fn.bgiframe&&this.menu.element.bgiframe(),this._on(this.window,{beforeunload:function(){this.element.removeAttr("autocomplete")}})},_destroy:function(){clearTimeout(this.searching),this.element.removeClass("ui-autocomplete-input").removeAttr("autocomplete"),this.menu.element.remove(),this.liveRegion.remove()},_setOption:function(e,t){this._super(e,t),e==="source"&&this._initSource(),e==="appendTo"&&this.menu.element.appendTo(this.document.find(t||"body")[0]),e==="disabled"&&t&&this.xhr&&this.xhr.abort()},_isMultiLine:function(){return this.element.is("textarea")?!0:this.element.is("input")?!1:this.element.prop("isContentEditable")},_initSource:function(){var t,n,r=this;e.isArray(this.options.source)?(t=this.options.source,this.source=function(n,r){r(e.ui.autocomplete.filter(t,n.term))}):typeof this.options.source=="string"?(n=this.options.source,this.source=function(t,i){r.xhr&&r.xhr.abort(),r.xhr=e.ajax({url:n,data:t,dataType:"json",success:function(e,t){i(e)},error:function(){i([])}})}):this.source=this.options.source},_searchTimeout:function(e){clearTimeout(this.searching),this.searching=this._delay(function(){this.term!==this._value()&&(this.selectedItem=null,this.search(null,e))},this.options.delay)},search:function(e,t){e=e!=null?e:this._value(),this.term=this._value();if(e.length<this.options.minLength)return this.close(t);if(this._trigger("search",t)===!1)return;return this._search(e)},_search:function(e){this.pending++,this.element.addClass("ui-autocomplete-loading"),this.cancelSearch=!1,this.source({term:e},this._response())},_response:function(){var e=this,t=++n;return function(r){t===n&&e.__response(r),e.pending--,e.pending||e.element.removeClass("ui-autocomplete-loading")}},__response:function(e){e&&(e=this._normalize(e)),this._trigger("response",null,{content:e}),!this.options.disabled&&e&&e.length&&!this.cancelSearch?(this._suggest(e),this._trigger("open")):this._close()},close:function(e){this.cancelSearch=!0,this._close(e)},_close:function(e){this.menu.element.is(":visible")&&(this.menu.element.hide(),this.menu.blur(),this.isNewMenu=!0,this._trigger("close",e))},_change:function(e){this.previous!==this._value()&&this._trigger("change",e,{item:this.selectedItem})},_normalize:function(t){return t.length&&t[0].label&&t[0].value?t:e.map(t,function(t){return typeof t=="string"?{label:t,value:t}:e.extend({label:t.label||t.value,value:t.value||t.label},t)})},_suggest:function(t){var n=this.menu.element.empty().zIndex(this.element.zIndex()+1);this._renderMenu(n,t),this.menu.refresh(),n.show(),this._resizeMenu(),n.position(e.extend({of:this.element},this.options.position)),this.options.autoFocus&&this.menu.next()},_resizeMenu:function(){var e=this.menu.element;e.outerWidth(Math.max(e.width("").outerWidth()+1,this.element.outerWidth()))},_renderMenu:function(t,n){var r=this;e.each(n,function(e,n){r._renderItemData(t,n)})},_renderItemData:function(e,t){return this._renderItem(e,t).data("ui-autocomplete-item",t)},_renderItem:function(t,n){return e("<li>").append(e("<a>").text(n.label)).appendTo(t)},_move:function(e,t){if(!this.menu.element.is(":visible")){this.search(null,t);return}if(this.menu.isFirstItem()&&/^previous/.test(e)||this.menu.isLastItem()&&/^next/.test(e)){this._value(this.term),this.menu.blur();return}this.menu[e](t)},widget:function(){return this.menu.element},_value:function(e){return this.valueMethod.apply(this.element,arguments)},_keyEvent:function(e,t){if(!this.isMultiLine||this.menu.element.is(":visible"))this._move(e,t),t.preventDefault()}}),e.extend(e.ui.autocomplete,{escapeRegex:function(e){return e.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g,"\\$&")},filter:function(t,n){var r=new RegExp(e.ui.autocomplete.escapeRegex(n),"i");return e.grep(t,function(e){return r.test(e.label||e.value||e)})}}),e.widget("ui.autocomplete",e.ui.autocomplete,{options:{messages:{noResults:"No search results.",results:function(e){return e+(e>1?" results are":" result is")+" available, use up and down arrow keys to navigate."}}},__response:function(e){var t;this._superApply(arguments);if(this.options.disabled||this.cancelSearch)return;e&&e.length?t=this.options.messages.results(e.length):t=this.options.messages.noResults,this.liveRegion.text(t)}})})(jQuery);(function(e,t){var n,r,i,s,o="ui-button ui-widget ui-state-default ui-corner-all",u="ui-state-hover ui-state-active ",a="ui-button-icons-only ui-button-icon-only ui-button-text-icons ui-button-text-icon-primary ui-button-text-icon-secondary ui-button-text-only",f=function(){var t=e(this).find(":ui-button");setTimeout(function(){t.button("refresh")},1)},l=function(t){var n=t.name,r=t.form,i=e([]);return n&&(r?i=e(r).find("[name='"+n+"']"):i=e("[name='"+n+"']",t.ownerDocument).filter(function(){return!this.form})),i};e.widget("ui.button",{version:"1.9.0",defaultElement:"<button>",options:{disabled:null,text:!0,label:null,icons:{primary:null,secondary:null}},_create:function(){this.element.closest("form").unbind("reset"+this.eventNamespace).bind("reset"+this.eventNamespace,f),typeof this.options.disabled!="boolean"?this.options.disabled=!!this.element.prop("disabled"):this.element.prop("disabled",this.options.disabled),this._determineButtonType(),this.hasTitle=!!this.buttonElement.attr("title");var t=this,u=this.options,a=this.type==="checkbox"||this.type==="radio",c="ui-state-hover"+(a?"":" ui-state-active"),h="ui-state-focus";u.label===null&&(u.label=this.type==="input"?this.buttonElement.val():this.buttonElement.html()),this.buttonElement.addClass(o).attr("role","button").bind("mouseenter"+this.eventNamespace,function(){if(u.disabled)return;e(this).addClass("ui-state-hover"),this===n&&e(this).addClass("ui-state-active")}).bind("mouseleave"+this.eventNamespace,function(){if(u.disabled)return;e(this).removeClass(c)}).bind("click"+this.eventNamespace,function(e){u.disabled&&(e.preventDefault(),e.stopImmediatePropagation())}),this.element.bind("focus"+this.eventNamespace,function(){t.buttonElement.addClass(h)}).bind("blur"+this.eventNamespace,function(){t.buttonElement.removeClass(h)}),a&&(this.element.bind("change"+this.eventNamespace,function(){if(s)return;t.refresh()}),this.buttonElement.bind("mousedown"+this.eventNamespace,function(e){if(u.disabled)return;s=!1,r=e.pageX,i=e.pageY}).bind("mouseup"+this.eventNamespace,function(e){if(u.disabled)return;if(r!==e.pageX||i!==e.pageY)s=!0})),this.type==="checkbox"?this.buttonElement.bind("click"+this.eventNamespace,function(){if(u.disabled||s)return!1;e(this).toggleClass("ui-state-active"),t.buttonElement.attr("aria-pressed",t.element[0].checked)}):this.type==="radio"?this.buttonElement.bind("click"+this.eventNamespace,function(){if(u.disabled||s)return!1;e(this).addClass("ui-state-active"),t.buttonElement.attr("aria-pressed","true");var n=t.element[0];l(n).not(n).map(function(){return e(this).button("widget")[0]}).removeClass("ui-state-active").attr("aria-pressed","false")}):(this.buttonElement.bind("mousedown"+this.eventNamespace,function(){if(u.disabled)return!1;e(this).addClass("ui-state-active"),n=this,t.document.one("mouseup",function(){n=null})}).bind("mouseup"+this.eventNamespace,function(){if(u.disabled)return!1;e(this).removeClass("ui-state-active")}).bind("keydown"+this.eventNamespace,function(t){if(u.disabled)return!1;(t.keyCode===e.ui.keyCode.SPACE||t.keyCode===e.ui.keyCode.ENTER)&&e(this).addClass("ui-state-active")}).bind("keyup"+this.eventNamespace,function(){e(this).removeClass("ui-state-active")}),this.buttonElement.is("a")&&this.buttonElement.keyup(function(t){t.keyCode===e.ui.keyCode.SPACE&&e(this).click()})),this._setOption("disabled",u.disabled),this._resetButton()},_determineButtonType:function(){var e,t,n;this.element.is("[type=checkbox]")?this.type="checkbox":this.element.is("[type=radio]")?this.type="radio":this.element.is("input")?this.type="input":this.type="button",this.type==="checkbox"||this.type==="radio"?(e=this.element.parents().last(),t="label[for='"+this.element.attr("id")+"']",this.buttonElement=e.find(t),this.buttonElement.length||(e=e.length?e.siblings():this.element.siblings(),this.buttonElement=e.filter(t),this.buttonElement.length||(this.buttonElement=e.find(t))),this.element.addClass("ui-helper-hidden-accessible"),n=this.element.is(":checked"),n&&this.buttonElement.addClass("ui-state-active"),this.buttonElement.prop("aria-pressed",n)):this.buttonElement=this.element},widget:function(){return this.buttonElement},_destroy:function(){this.element.removeClass("ui-helper-hidden-accessible"),this.buttonElement.removeClass(o+" "+u+" "+a).removeAttr("role").removeAttr("aria-pressed").html(this.buttonElement.find(".ui-button-text").html()),this.hasTitle||this.buttonElement.removeAttr("title")},_setOption:function(e,t){this._super(e,t);if(e==="disabled"){t?this.element.prop("disabled",!0):this.element.prop("disabled",!1);return}this._resetButton()},refresh:function(){var t=this.element.is(":disabled");t!==this.options.disabled&&this._setOption("disabled",t),this.type==="radio"?l(this.element[0]).each(function(){e(this).is(":checked")?e(this).button("widget").addClass("ui-state-active").attr("aria-pressed","true"):e(this).button("widget").removeClass("ui-state-active").attr("aria-pressed","false")}):this.type==="checkbox"&&(this.element.is(":checked")?this.buttonElement.addClass("ui-state-active").attr("aria-pressed","true"):this.buttonElement.removeClass("ui-state-active").attr("aria-pressed","false"))},_resetButton:function(){if(this.type==="input"){this.options.label&&this.element.val(this.options.label);return}var t=this.buttonElement.removeClass(a),n=e("<span></span>",this.document[0]).addClass("ui-button-text").html(this.options.label).appendTo(t.empty()).text(),r=this.options.icons,i=r.primary&&r.secondary,s=[];r.primary||r.secondary?(this.options.text&&s.push("ui-button-text-icon"+(i?"s":r.primary?"-primary":"-secondary")),r.primary&&t.prepend("<span class='ui-button-icon-primary ui-icon "+r.primary+"'></span>"),r.secondary&&t.append("<span class='ui-button-icon-secondary ui-icon "+r.secondary+"'></span>"),this.options.text||(s.push(i?"ui-button-icons-only":"ui-button-icon-only"),this.hasTitle||t.attr("title",e.trim(n)))):s.push("ui-button-text-only"),t.addClass(s.join(" "))}}),e.widget("ui.buttonset",{version:"1.9.0",options:{items:"button, input[type=button], input[type=submit], input[type=reset], input[type=checkbox], input[type=radio], a, :data(button)"},_create:function(){this.element.addClass("ui-buttonset")},_init:function(){this.refresh()},_setOption:function(e,t){e==="disabled"&&this.buttons.button("option",e,t),this._super(e,t)},refresh:function(){var t=this.element.css("direction")==="rtl";this.buttons=this.element.find(this.options.items).filter(":ui-button").button("refresh").end().not(":ui-button").button().end().map(function(){return e(this).button("widget")[0]}).removeClass("ui-corner-all ui-corner-left ui-corner-right").filter(":first").addClass(t?"ui-corner-right":"ui-corner-left").end().filter(":last").addClass(t?"ui-corner-left":"ui-corner-right").end().end()},_destroy:function(){this.element.removeClass("ui-buttonset"),this.buttons.map(function(){return e(this).button("widget")[0]}).removeClass("ui-corner-left ui-corner-right").end().button("destroy")}})})(jQuery);(function($,undefined){function Datepicker(){this.debug=!1,this._curInst=null,this._keyEvent=!1,this._disabledInputs=[],this._datepickerShowing=!1,this._inDialog=!1,this._mainDivId="ui-datepicker-div",this._inlineClass="ui-datepicker-inline",this._appendClass="ui-datepicker-append",this._triggerClass="ui-datepicker-trigger",this._dialogClass="ui-datepicker-dialog",this._disableClass="ui-datepicker-disabled",this._unselectableClass="ui-datepicker-unselectable",this._currentClass="ui-datepicker-current-day",this._dayOverClass="ui-datepicker-days-cell-over",this.regional=[],this.regional[""]={closeText:"Done",prevText:"Prev",nextText:"Next",currentText:"Today",monthNames:["January","February","March","April","May","June","July","August","September","October","November","December"],monthNamesShort:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],dayNames:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],dayNamesShort:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],dayNamesMin:["Su","Mo","Tu","We","Th","Fr","Sa"],weekHeader:"Wk",dateFormat:"mm/dd/yy",firstDay:0,isRTL:!1,showMonthAfterYear:!1,yearSuffix:""},this._defaults={showOn:"focus",showAnim:"fadeIn",showOptions:{},defaultDate:null,appendText:"",buttonText:"...",buttonImage:"",buttonImageOnly:!1,hideIfNoPrevNext:!1,navigationAsDateFormat:!1,gotoCurrent:!1,changeMonth:!1,changeYear:!1,yearRange:"c-10:c+10",showOtherMonths:!1,selectOtherMonths:!1,showWeek:!1,calculateWeek:this.iso8601Week,shortYearCutoff:"+10",minDate:null,maxDate:null,duration:"fast",beforeShowDay:null,beforeShow:null,onSelect:null,onChangeMonthYear:null,onClose:null,numberOfMonths:1,showCurrentAtPos:0,stepMonths:1,stepBigMonths:12,altField:"",altFormat:"",constrainInput:!0,showButtonPanel:!1,autoSize:!1,disabled:!1},$.extend(this._defaults,this.regional[""]),this.dpDiv=bindHover($('<div id="'+this._mainDivId+'" class="ui-datepicker ui-widget ui-widget-content ui-helper-clearfix ui-corner-all"></div>'))}function bindHover(e){var t="button, .ui-datepicker-prev, .ui-datepicker-next, .ui-datepicker-calendar td a";return e.delegate(t,"mouseout",function(){$(this).removeClass("ui-state-hover"),this.className.indexOf("ui-datepicker-prev")!=-1&&$(this).removeClass("ui-datepicker-prev-hover"),this.className.indexOf("ui-datepicker-next")!=-1&&$(this).removeClass("ui-datepicker-next-hover")}).delegate(t,"mouseover",function(){$.datepicker._isDisabledDatepicker(instActive.inline?e.parent()[0]:instActive.input[0])||($(this).parents(".ui-datepicker-calendar").find("a").removeClass("ui-state-hover"),$(this).addClass("ui-state-hover"),this.className.indexOf("ui-datepicker-prev")!=-1&&$(this).addClass("ui-datepicker-prev-hover"),this.className.indexOf("ui-datepicker-next")!=-1&&$(this).addClass("ui-datepicker-next-hover"))})}function extendRemove(e,t){$.extend(e,t);for(var n in t)if(t[n]==null||t[n]==undefined)e[n]=t[n];return e}$.extend($.ui,{datepicker:{version:"1.9.0"}});var PROP_NAME="datepicker",dpuuid=(new Date).getTime(),instActive;$.extend(Datepicker.prototype,{markerClassName:"hasDatepicker",maxRows:4,log:function(){this.debug&&console.log.apply("",arguments)},_widgetDatepicker:function(){return this.dpDiv},setDefaults:function(e){return extendRemove(this._defaults,e||{}),this},_attachDatepicker:function(target,settings){var inlineSettings=null;for(var attrName in this._defaults){var attrValue=target.getAttribute("date:"+attrName);if(attrValue){inlineSettings=inlineSettings||{};try{inlineSettings[attrName]=eval(attrValue)}catch(err){inlineSettings[attrName]=attrValue}}}var nodeName=target.nodeName.toLowerCase(),inline=nodeName=="div"||nodeName=="span";target.id||(this.uuid+=1,target.id="dp"+this.uuid);var inst=this._newInst($(target),inline);inst.settings=$.extend({},settings||{},inlineSettings||{}),nodeName=="input"?this._connectDatepicker(target,inst):inline&&this._inlineDatepicker(target,inst)},_newInst:function(e,t){var n=e[0].id.replace(/([^A-Za-z0-9_-])/g,"\\\\$1");return{id:n,input:e,selectedDay:0,selectedMonth:0,selectedYear:0,drawMonth:0,drawYear:0,inline:t,dpDiv:t?bindHover($('<div class="'+this._inlineClass+' ui-datepicker ui-widget ui-widget-content ui-helper-clearfix ui-corner-all"></div>')):this.dpDiv}},_connectDatepicker:function(e,t){var n=$(e);t.append=$([]),t.trigger=$([]);if(n.hasClass(this.markerClassName))return;this._attachments(n,t),n.addClass(this.markerClassName).keydown(this._doKeyDown).keypress(this._doKeyPress).keyup(this._doKeyUp).bind("setData.datepicker",function(e,n,r){t.settings[n]=r}).bind("getData.datepicker",function(e,n){return this._get(t,n)}),this._autoSize(t),$.data(e,PROP_NAME,t),t.settings.disabled&&this._disableDatepicker(e)},_attachments:function(e,t){var n=this._get(t,"appendText"),r=this._get(t,"isRTL");t.append&&t.append.remove(),n&&(t.append=$('<span class="'+this._appendClass+'">'+n+"</span>"),e[r?"before":"after"](t.append)),e.unbind("focus",this._showDatepicker),t.trigger&&t.trigger.remove();var i=this._get(t,"showOn");(i=="focus"||i=="both")&&e.focus(this._showDatepicker);if(i=="button"||i=="both"){var s=this._get(t,"buttonText"),o=this._get(t,"buttonImage");t.trigger=$(this._get(t,"buttonImageOnly")?$("<img/>").addClass(this._triggerClass).attr({src:o,alt:s,title:s}):$('<button type="button"></button>').addClass(this._triggerClass).html(o==""?s:$("<img/>").attr({src:o,alt:s,title:s}))),e[r?"before":"after"](t.trigger),t.trigger.click(function(){return $.datepicker._datepickerShowing&&$.datepicker._lastInput==e[0]?$.datepicker._hideDatepicker():$.datepicker._datepickerShowing&&$.datepicker._lastInput!=e[0]?($.datepicker._hideDatepicker(),$.datepicker._showDatepicker(e[0])):$.datepicker._showDatepicker(e[0]),!1})}},_autoSize:function(e){if(this._get(e,"autoSize")&&!e.inline){var t=new Date(2009,11,20),n=this._get(e,"dateFormat");if(n.match(/[DM]/)){var r=function(e){var t=0,n=0;for(var r=0;r<e.length;r++)e[r].length>t&&(t=e[r].length,n=r);return n};t.setMonth(r(this._get(e,n.match(/MM/)?"monthNames":"monthNamesShort"))),t.setDate(r(this._get(e,n.match(/DD/)?"dayNames":"dayNamesShort"))+20-t.getDay())}e.input.attr("size",this._formatDate(e,t).length)}},_inlineDatepicker:function(e,t){var n=$(e);if(n.hasClass(this.markerClassName))return;n.addClass(this.markerClassName).append(t.dpDiv).bind("setData.datepicker",function(e,n,r){t.settings[n]=r}).bind("getData.datepicker",function(e,n){return this._get(t,n)}),$.data(e,PROP_NAME,t),this._setDate(t,this._getDefaultDate(t),!0),this._updateDatepicker(t),this._updateAlternate(t),t.settings.disabled&&this._disableDatepicker(e),t.dpDiv.css("display","block")},_dialogDatepicker:function(e,t,n,r,i){var s=this._dialogInst;if(!s){this.uuid+=1;var o="dp"+this.uuid;this._dialogInput=$('<input type="text" id="'+o+'" style="position: absolute; top: -100px; width: 0px;"/>'),this._dialogInput.keydown(this._doKeyDown),$("body").append(this._dialogInput),s=this._dialogInst=this._newInst(this._dialogInput,!1),s.settings={},$.data(this._dialogInput[0],PROP_NAME,s)}extendRemove(s.settings,r||{}),t=t&&t.constructor==Date?this._formatDate(s,t):t,this._dialogInput.val(t),this._pos=i?i.length?i:[i.pageX,i.pageY]:null;if(!this._pos){var u=document.documentElement.clientWidth,a=document.documentElement.clientHeight,f=document.documentElement.scrollLeft||document.body.scrollLeft,l=document.documentElement.scrollTop||document.body.scrollTop;this._pos=[u/2-100+f,a/2-150+l]}return this._dialogInput.css("left",this._pos[0]+20+"px").css("top",this._pos[1]+"px"),s.settings.onSelect=n,this._inDialog=!0,this.dpDiv.addClass(this._dialogClass),this._showDatepicker(this._dialogInput[0]),$.blockUI&&$.blockUI(this.dpDiv),$.data(this._dialogInput[0],PROP_NAME,s),this},_destroyDatepicker:function(e){var t=$(e),n=$.data(e,PROP_NAME);if(!t.hasClass(this.markerClassName))return;var r=e.nodeName.toLowerCase();$.removeData(e,PROP_NAME),r=="input"?(n.append.remove(),n.trigger.remove(),t.removeClass(this.markerClassName).unbind("focus",this._showDatepicker).unbind("keydown",this._doKeyDown).unbind("keypress",this._doKeyPress).unbind("keyup",this._doKeyUp)):(r=="div"||r=="span")&&t.removeClass(this.markerClassName).empty()},_enableDatepicker:function(e){var t=$(e),n=$.data(e,PROP_NAME);if(!t.hasClass(this.markerClassName))return;var r=e.nodeName.toLowerCase();if(r=="input")e.disabled=!1,n.trigger.filter("button").each(function(){this.disabled=!1}).end().filter("img").css({opacity:"1.0",cursor:""});else if(r=="div"||r=="span"){var i=t.children("."+this._inlineClass);i.children().removeClass("ui-state-disabled"),i.find("select.ui-datepicker-month, select.ui-datepicker-year").prop("disabled",!1)}this._disabledInputs=$.map(this._disabledInputs,function(t){return t==e?null:t})},_disableDatepicker:function(e){var t=$(e),n=$.data(e,PROP_NAME);if(!t.hasClass(this.markerClassName))return;var r=e.nodeName.toLowerCase();if(r=="input")e.disabled=!0,n.trigger.filter("button").each(function(){this.disabled=!0}).end().filter("img").css({opacity:"0.5",cursor:"default"});else if(r=="div"||r=="span"){var i=t.children("."+this._inlineClass);i.children().addClass("ui-state-disabled"),i.find("select.ui-datepicker-month, select.ui-datepicker-year").prop("disabled",!0)}this._disabledInputs=$.map(this._disabledInputs,function(t){return t==e?null:t}),this._disabledInputs[this._disabledInputs.length]=e},_isDisabledDatepicker:function(e){if(!e)return!1;for(var t=0;t<this._disabledInputs.length;t++)if(this._disabledInputs[t]==e)return!0;return!1},_getInst:function(e){try{return $.data(e,PROP_NAME)}catch(t){throw"Missing instance data for this datepicker"}},_optionDatepicker:function(e,t,n){var r=this._getInst(e);if(arguments.length==2&&typeof t=="string")return t=="defaults"?$.extend({},$.datepicker._defaults):r?t=="all"?$.extend({},r.settings):this._get(r,t):null;var i=t||{};typeof t=="string"&&(i={},i[t]=n);if(r){this._curInst==r&&this._hideDatepicker();var s=this._getDateDatepicker(e,!0),o=this._getMinMaxDate(r,"min"),u=this._getMinMaxDate(r,"max");extendRemove(r.settings,i),o!==null&&i.dateFormat!==undefined&&i.minDate===undefined&&(r.settings.minDate=this._formatDate(r,o)),u!==null&&i.dateFormat!==undefined&&i.maxDate===undefined&&(r.settings.maxDate=this._formatDate(r,u)),this._attachments($(e),r),this._autoSize(r),this._setDate(r,s),this._updateAlternate(r),this._updateDatepicker(r)}},_changeDatepicker:function(e,t,n){this._optionDatepicker(e,t,n)},_refreshDatepicker:function(e){var t=this._getInst(e);t&&this._updateDatepicker(t)},_setDateDatepicker:function(e,t){var n=this._getInst(e);n&&(this._setDate(n,t),this._updateDatepicker(n),this._updateAlternate(n))},_getDateDatepicker:function(e,t){var n=this._getInst(e);return n&&!n.inline&&this._setDateFromField(n,t),n?this._getDate(n):null},_doKeyDown:function(e){var t=$.datepicker._getInst(e.target),n=!0,r=t.dpDiv.is(".ui-datepicker-rtl");t._keyEvent=!0;if($.datepicker._datepickerShowing)switch(e.keyCode){case 9:$.datepicker._hideDatepicker(),n=!1;break;case 13:var i=$("td."+$.datepicker._dayOverClass+":not(."+$.datepicker._currentClass+")",t.dpDiv);i[0]&&$.datepicker._selectDay(e.target,t.selectedMonth,t.selectedYear,i[0]);var s=$.datepicker._get(t,"onSelect");if(s){var o=$.datepicker._formatDate(t);s.apply(t.input?t.input[0]:null,[o,t])}else $.datepicker._hideDatepicker();return!1;case 27:$.datepicker._hideDatepicker();break;case 33:$.datepicker._adjustDate(e.target,e.ctrlKey?-$.datepicker._get(t,"stepBigMonths"):-$.datepicker._get(t,"stepMonths"),"M");break;case 34:$.datepicker._adjustDate(e.target,e.ctrlKey?+$.datepicker._get(t,"stepBigMonths"):+$.datepicker._get(t,"stepMonths"),"M");break;case 35:(e.ctrlKey||e.metaKey)&&$.datepicker._clearDate(e.target),n=e.ctrlKey||e.metaKey;break;case 36:(e.ctrlKey||e.metaKey)&&$.datepicker._gotoToday(e.target),n=e.ctrlKey||e.metaKey;break;case 37:(e.ctrlKey||e.metaKey)&&$.datepicker._adjustDate(e.target,r?1:-1,"D"),n=e.ctrlKey||e.metaKey,e.originalEvent.altKey&&$.datepicker._adjustDate(e.target,e.ctrlKey?-$.datepicker._get(t,"stepBigMonths"):-$.datepicker._get(t,"stepMonths"),"M");break;case 38:(e.ctrlKey||e.metaKey)&&$.datepicker._adjustDate(e.target,-7,"D"),n=e.ctrlKey||e.metaKey;break;case 39:(e.ctrlKey||e.metaKey)&&$.datepicker._adjustDate(e.target,r?-1:1,"D"),n=e.ctrlKey||e.metaKey,e.originalEvent.altKey&&$.datepicker._adjustDate(e.target,e.ctrlKey?+$.datepicker._get(t,"stepBigMonths"):+$.datepicker._get(t,"stepMonths"),"M");break;case 40:(e.ctrlKey||e.metaKey)&&$.datepicker._adjustDate(e.target,7,"D"),n=e.ctrlKey||e.metaKey;break;default:n=!1}else e.keyCode==36&&e.ctrlKey?$.datepicker._showDatepicker(this):n=!1;n&&(e.preventDefault(),e.stopPropagation())},_doKeyPress:function(e){var t=$.datepicker._getInst(e.target);if($.datepicker._get(t,"constrainInput")){var n=$.datepicker._possibleChars($.datepicker._get(t,"dateFormat")),r=String.fromCharCode(e.charCode==undefined?e.keyCode:e.charCode);return e.ctrlKey||e.metaKey||r<" "||!n||n.indexOf(r)>-1}},_doKeyUp:function(e){var t=$.datepicker._getInst(e.target);if(t.input.val()!=t.lastVal)try{var n=$.datepicker.parseDate($.datepicker._get(t,"dateFormat"),t.input?t.input.val():null,$.datepicker._getFormatConfig(t));n&&($.datepicker._setDateFromField(t),$.datepicker._updateAlternate(t),$.datepicker._updateDatepicker(t))}catch(r){$.datepicker.log(r)}return!0},_showDatepicker:function(e){e=e.target||e,e.nodeName.toLowerCase()!="input"&&(e=$("input",e.parentNode)[0]);if($.datepicker._isDisabledDatepicker(e)||$.datepicker._lastInput==e)return;var t=$.datepicker._getInst(e);$.datepicker._curInst&&$.datepicker._curInst!=t&&($.datepicker._curInst.dpDiv.stop(!0,!0),t&&$.datepicker._datepickerShowing&&$.datepicker._hideDatepicker($.datepicker._curInst.input[0]));var n=$.datepicker._get(t,"beforeShow"),r=n?n.apply(e,[e,t]):{};if(r===!1)return;extendRemove(t.settings,r),t.lastVal=null,$.datepicker._lastInput=e,$.datepicker._setDateFromField(t),$.datepicker._inDialog&&(e.value=""),$.datepicker._pos||($.datepicker._pos=$.datepicker._findPos(e),$.datepicker._pos[1]+=e.offsetHeight);var i=!1;$(e).parents().each(function(){return i|=$(this).css("position")=="fixed",!i});var s={left:$.datepicker._pos[0],top:$.datepicker._pos[1]};$.datepicker._pos=null,t.dpDiv.empty(),t.dpDiv.css({position:"absolute",display:"block",top:"-1000px"}),$.datepicker._updateDatepicker(t),s=$.datepicker._checkOffset(t,s,i),t.dpDiv.css({position:$.datepicker._inDialog&&$.blockUI?"static":i?"fixed":"absolute",display:"none",left:s.left+"px",top:s.top+"px"});if(!t.inline){var o=$.datepicker._get(t,"showAnim"),u=$.datepicker._get(t,"duration"),a=function(){var e=t.dpDiv.find("iframe.ui-datepicker-cover");if(!!e.length){var n=$.datepicker._getBorders(t.dpDiv);e.css({left:-n[0],top:-n[1],width:t.dpDiv.outerWidth(),height:t.dpDiv.outerHeight()})}};t.dpDiv.zIndex($(e).zIndex()+1),$.datepicker._datepickerShowing=!0,$.effects&&($.effects.effect[o]||$.effects[o])?t.dpDiv.show(o,$.datepicker._get(t,"showOptions"),u,a):t.dpDiv[o||"show"](o?u:null,a),(!o||!u)&&a(),t.input.is(":visible")&&!t.input.is(":disabled")&&t.input.focus(),$.datepicker._curInst=t}},_updateDatepicker:function(e){this.maxRows=4;var t=$.datepicker._getBorders(e.dpDiv);instActive=e,e.dpDiv.empty().append(this._generateHTML(e)),this._attachHandlers(e);var n=e.dpDiv.find("iframe.ui-datepicker-cover");!n.length||n.css({left:-t[0],top:-t[1],width:e.dpDiv.outerWidth(),height:e.dpDiv.outerHeight()}),e.dpDiv.find("."+this._dayOverClass+" a").mouseover();var r=this._getNumberOfMonths(e),i=r[1],s=17;e.dpDiv.removeClass("ui-datepicker-multi-2 ui-datepicker-multi-3 ui-datepicker-multi-4").width(""),i>1&&e.dpDiv.addClass("ui-datepicker-multi-"+i).css("width",s*i+"em"),e.dpDiv[(r[0]!=1||r[1]!=1?"add":"remove")+"Class"]("ui-datepicker-multi"),e.dpDiv[(this._get(e,"isRTL")?"add":"remove")+"Class"]("ui-datepicker-rtl"),e==$.datepicker._curInst&&$.datepicker._datepickerShowing&&e.input&&e.input.is(":visible")&&!e.input.is(":disabled")&&e.input[0]!=document.activeElement&&e.input.focus();if(e.yearshtml){var o=e.yearshtml;setTimeout(function(){o===e.yearshtml&&e.yearshtml&&e.dpDiv.find("select.ui-datepicker-year:first").replaceWith(e.yearshtml),o=e.yearshtml=null},0)}},_getBorders:function(e){var t=function(e){return{thin:1,medium:2,thick:3}[e]||e};return[parseFloat(t(e.css("border-left-width"))),parseFloat(t(e.css("border-top-width")))]},_checkOffset:function(e,t,n){var r=e.dpDiv.outerWidth(),i=e.dpDiv.outerHeight(),s=e.input?e.input.outerWidth():0,o=e.input?e.input.outerHeight():0,u=document.documentElement.clientWidth+(n?0:$(document).scrollLeft()),a=document.documentElement.clientHeight+(n?0:$(document).scrollTop());return t.left-=this._get(e,"isRTL")?r-s:0,t.left-=n&&t.left==e.input.offset().left?$(document).scrollLeft():0,t.top-=n&&t.top==e.input.offset().top+o?$(document).scrollTop():0,t.left-=Math.min(t.left,t.left+r>u&&u>r?Math.abs(t.left+r-u):0),t.top-=Math.min(t.top,t.top+i>a&&a>i?Math.abs(i+o):0),t},_findPos:function(e){var t=this._getInst(e),n=this._get(t,"isRTL");while(e&&(e.type=="hidden"||e.nodeType!=1||$.expr.filters.hidden(e)))e=e[n?"previousSibling":"nextSibling"];var r=$(e).offset();return[r.left,r.top]},_hideDatepicker:function(e){var t=this._curInst;if(!t||e&&t!=$.data(e,PROP_NAME))return;if(this._datepickerShowing){var n=this._get(t,"showAnim"),r=this._get(t,"duration"),i=function(){$.datepicker._tidyDialog(t)};$.effects&&($.effects.effect[n]||$.effects[n])?t.dpDiv.hide(n,$.datepicker._get(t,"showOptions"),r,i):t.dpDiv[n=="slideDown"?"slideUp":n=="fadeIn"?"fadeOut":"hide"](n?r:null,i),n||i(),this._datepickerShowing=!1;var s=this._get(t,"onClose");s&&s.apply(t.input?t.input[0]:null,[t.input?t.input.val():"",t]),this._lastInput=null,this._inDialog&&(this._dialogInput.css({position:"absolute",left:"0",top:"-100px"}),$.blockUI&&($.unblockUI(),$("body").append(this.dpDiv))),this._inDialog=!1}},_tidyDialog:function(e){e.dpDiv.removeClass(this._dialogClass).unbind(".ui-datepicker-calendar")},_checkExternalClick:function(e){if(!$.datepicker._curInst)return;var t=$(e.target),n=$.datepicker._getInst(t[0]);(t[0].id!=$.datepicker._mainDivId&&t.parents("#"+$.datepicker._mainDivId).length==0&&!t.hasClass($.datepicker.markerClassName)&&!t.closest("."+$.datepicker._triggerClass).length&&$.datepicker._datepickerShowing&&(!$.datepicker._inDialog||!$.blockUI)||t.hasClass($.datepicker.markerClassName)&&$.datepicker._curInst!=n)&&$.datepicker._hideDatepicker()},_adjustDate:function(e,t,n){var r=$(e),i=this._getInst(r[0]);if(this._isDisabledDatepicker(r[0]))return;this._adjustInstDate(i,t+(n=="M"?this._get(i,"showCurrentAtPos"):0),n),this._updateDatepicker(i)},_gotoToday:function(e){var t=$(e),n=this._getInst(t[0]);if(this._get(n,"gotoCurrent")&&n.currentDay)n.selectedDay=n.currentDay,n.drawMonth=n.selectedMonth=n.currentMonth,n.drawYear=n.selectedYear=n.currentYear;else{var r=new Date;n.selectedDay=r.getDate(),n.drawMonth=n.selectedMonth=r.getMonth(),n.drawYear=n.selectedYear=r.getFullYear()}this._notifyChange(n),this._adjustDate(t)},_selectMonthYear:function(e,t,n){var r=$(e),i=this._getInst(r[0]);i["selected"+(n=="M"?"Month":"Year")]=i["draw"+(n=="M"?"Month":"Year")]=parseInt(t.options[t.selectedIndex].value,10),this._notifyChange(i),this._adjustDate(r)},_selectDay:function(e,t,n,r){var i=$(e);if($(r).hasClass(this._unselectableClass)||this._isDisabledDatepicker(i[0]))return;var s=this._getInst(i[0]);s.selectedDay=s.currentDay=$("a",r).html(),s.selectedMonth=s.currentMonth=t,s.selectedYear=s.currentYear=n,this._selectDate(e,this._formatDate(s,s.currentDay,s.currentMonth,s.currentYear))},_clearDate:function(e){var t=$(e),n=this._getInst(t[0]);this._selectDate(t,"")},_selectDate:function(e,t){var n=$(e),r=this._getInst(n[0]);t=t!=null?t:this._formatDate(r),r.input&&r.input.val(t),this._updateAlternate(r);var i=this._get(r,"onSelect");i?i.apply(r.input?r.input[0]:null,[t,r]):r.input&&r.input.trigger("change"),r.inline?this._updateDatepicker(r):(this._hideDatepicker(),this._lastInput=r.input[0],typeof r.input[0]!="object"&&r.input.focus(),this._lastInput=null)},_updateAlternate:function(e){var t=this._get(e,"altField");if(t){var n=this._get(e,"altFormat")||this._get(e,"dateFormat"),r=this._getDate(e),i=this.formatDate(n,r,this._getFormatConfig(e));$(t).each(function(){$(this).val(i)})}},noWeekends:function(e){var t=e.getDay();return[t>0&&t<6,""]},iso8601Week:function(e){var t=new Date(e.getTime());t.setDate(t.getDate()+4-(t.getDay()||7));var n=t.getTime();return t.setMonth(0),t.setDate(1),Math.floor(Math.round((n-t)/864e5)/7)+1},parseDate:function(e,t,n){if(e==null||t==null)throw"Invalid arguments";t=typeof t=="object"?t.toString():t+"";if(t=="")return null;var r=(n?n.shortYearCutoff:null)||this._defaults.shortYearCutoff;r=typeof r!="string"?r:(new Date).getFullYear()%100+parseInt(r,10);var i=(n?n.dayNamesShort:null)||this._defaults.dayNamesShort,s=(n?n.dayNames:null)||this._defaults.dayNames,o=(n?n.monthNamesShort:null)||this._defaults.monthNamesShort,u=(n?n.monthNames:null)||this._defaults.monthNames,a=-1,f=-1,l=-1,c=-1,h=!1,p=function(t){var n=y+1<e.length&&e.charAt(y+1)==t;return n&&y++,n},d=function(e){var n=p(e),r=e=="@"?14:e=="!"?20:e=="y"&&n?4:e=="o"?3:2,i=new RegExp("^\\d{1,"+r+"}"),s=t.substring(g).match(i);if(!s)throw"Missing number at position "+g;return g+=s[0].length,parseInt(s[0],10)},v=function(e,n,r){var i=$.map(p(e)?r:n,function(e,t){return[[t,e]]}).sort(function(e,t){return-(e[1].length-t[1].length)}),s=-1;$.each(i,function(e,n){var r=n[1];if(t.substr(g,r.length).toLowerCase()==r.toLowerCase())return s=n[0],g+=r.length,!1});if(s!=-1)return s+1;throw"Unknown name at position "+g},m=function(){if(t.charAt(g)!=e.charAt(y))throw"Unexpected literal at position "+g;g++},g=0;for(var y=0;y<e.length;y++)if(h)e.charAt(y)=="'"&&!p("'")?h=!1:m();else switch(e.charAt(y)){case"d":l=d("d");break;case"D":v("D",i,s);break;case"o":c=d("o");break;case"m":f=d("m");break;case"M":f=v("M",o,u);break;case"y":a=d("y");break;case"@":var b=new Date(d("@"));a=b.getFullYear(),f=b.getMonth()+1,l=b.getDate();break;case"!":var b=new Date((d("!")-this._ticksTo1970)/1e4);a=b.getFullYear(),f=b.getMonth()+1,l=b.getDate();break;case"'":p("'")?m():h=!0;break;default:m()}if(g<t.length){var w=t.substr(g);if(!/^\s+/.test(w))throw"Extra/unparsed characters found in date: "+w}a==-1?a=(new Date).getFullYear():a<100&&(a+=(new Date).getFullYear()-(new Date).getFullYear()%100+(a<=r?0:-100));if(c>-1){f=1,l=c;do{var E=this._getDaysInMonth(a,f-1);if(l<=E)break;f++,l-=E}while(!0)}var b=this._daylightSavingAdjust(new Date(a,f-1,l));if(b.getFullYear()!=a||b.getMonth()+1!=f||b.getDate()!=l)throw"Invalid date";return b},ATOM:"yy-mm-dd",COOKIE:"D, dd M yy",ISO_8601:"yy-mm-dd",RFC_822:"D, d M y",RFC_850:"DD, dd-M-y",RFC_1036:"D, d M y",RFC_1123:"D, d M yy",RFC_2822:"D, d M yy",RSS:"D, d M y",TICKS:"!",TIMESTAMP:"@",W3C:"yy-mm-dd",_ticksTo1970:(718685+Math.floor(492.5)-Math.floor(19.7)+Math.floor(4.925))*24*60*60*1e7,formatDate:function(e,t,n){if(!t)return"";var r=(n?n.dayNamesShort:null)||this._defaults.dayNamesShort,i=(n?n.dayNames:null)||this._defaults.dayNames,s=(n?n.monthNamesShort:null)||this._defaults.monthNamesShort,o=(n?n.monthNames:null)||this._defaults.monthNames,u=function(t){var n=h+1<e.length&&e.charAt(h+1)==t;return n&&h++,n},a=function(e,t,n){var r=""+t;if(u(e))while(r.length<n)r="0"+r;return r},f=function(e,t,n,r){return u(e)?r[t]:n[t]},l="",c=!1;if(t)for(var h=0;h<e.length;h++)if(c)e.charAt(h)=="'"&&!u("'")?c=!1:l+=e.charAt(h);else switch(e.charAt(h)){case"d":l+=a("d",t.getDate(),2);break;case"D":l+=f("D",t.getDay(),r,i);break;case"o":l+=a("o",Math.round(((new Date(t.getFullYear(),t.getMonth(),t.getDate())).getTime()-(new Date(t.getFullYear(),0,0)).getTime())/864e5),3);break;case"m":l+=a("m",t.getMonth()+1,2);break;case"M":l+=f("M",t.getMonth(),s,o);break;case"y":l+=u("y")?t.getFullYear():(t.getYear()%100<10?"0":"")+t.getYear()%100;break;case"@":l+=t.getTime();break;case"!":l+=t.getTime()*1e4+this._ticksTo1970;break;case"'":u("'")?l+="'":c=!0;break;default:l+=e.charAt(h)}return l},_possibleChars:function(e){var t="",n=!1,r=function(t){var n=i+1<e.length&&e.charAt(i+1)==t;return n&&i++,n};for(var i=0;i<e.length;i++)if(n)e.charAt(i)=="'"&&!r("'")?n=!1:t+=e.charAt(i);else switch(e.charAt(i)){case"d":case"m":case"y":case"@":t+="0123456789";break;case"D":case"M":return null;case"'":r("'")?t+="'":n=!0;break;default:t+=e.charAt(i)}return t},_get:function(e,t){return e.settings[t]!==undefined?e.settings[t]:this._defaults[t]},_setDateFromField:function(e,t){if(e.input.val()==e.lastVal)return;var n=this._get(e,"dateFormat"),r=e.lastVal=e.input?e.input.val():null,i,s;i=s=this._getDefaultDate(e);var o=this._getFormatConfig(e);try{i=this.parseDate(n,r,o)||s}catch(u){this.log(u),r=t?"":r}e.selectedDay=i.getDate(),e.drawMonth=e.selectedMonth=i.getMonth(),e.drawYear=e.selectedYear=i.getFullYear(),e.currentDay=r?i.getDate():0,e.currentMonth=r?i.getMonth():0,e.currentYear=r?i.getFullYear():0,this._adjustInstDate(e)},_getDefaultDate:function(e){return this._restrictMinMax(e,this._determineDate(e,this._get(e,"defaultDate"),new Date))},_determineDate:function(e,t,n){var r=function(e){var t=new Date;return t.setDate(t.getDate()+e),t},i=function(t){try{return $.datepicker.parseDate($.datepicker._get(e,"dateFormat"),t,$.datepicker._getFormatConfig(e))}catch(n){}var r=(t.toLowerCase().match(/^c/)?$.datepicker._getDate(e):null)||new Date,i=r.getFullYear(),s=r.getMonth(),o=r.getDate(),u=/([+-]?[0-9]+)\s*(d|D|w|W|m|M|y|Y)?/g,a=u.exec(t);while(a){switch(a[2]||"d"){case"d":case"D":o+=parseInt(a[1],10);break;case"w":case"W":o+=parseInt(a[1],10)*7;break;case"m":case"M":s+=parseInt(a[1],10),o=Math.min(o,$.datepicker._getDaysInMonth(i,s));break;case"y":case"Y":i+=parseInt(a[1],10),o=Math.min(o,$.datepicker._getDaysInMonth(i,s))}a=u.exec(t)}return new Date(i,s,o)},s=t==null||t===""?n:typeof t=="string"?i(t):typeof t=="number"?isNaN(t)?n:r(t):new Date(t.getTime());return s=s&&s.toString()=="Invalid Date"?n:s,s&&(s.setHours(0),s.setMinutes(0),s.setSeconds(0),s.setMilliseconds(0)),this._daylightSavingAdjust(s)},_daylightSavingAdjust:function(e){return e?(e.setHours(e.getHours()>12?e.getHours()+2:0),e):null},_setDate:function(e,t,n){var r=!t,i=e.selectedMonth,s=e.selectedYear,o=this._restrictMinMax(e,this._determineDate(e,t,new Date));e.selectedDay=e.currentDay=o.getDate(),e.drawMonth=e.selectedMonth=e.currentMonth=o.getMonth(),e.drawYear=e.selectedYear=e.currentYear=o.getFullYear(),(i!=e.selectedMonth||s!=e.selectedYear)&&!n&&this._notifyChange(e),this._adjustInstDate(e),e.input&&e.input.val(r?"":this._formatDate(e))},_getDate:function(e){var t=!e.currentYear||e.input&&e.input.val()==""?null:this._daylightSavingAdjust(new Date(e.currentYear,e.currentMonth,e.currentDay));return t},_attachHandlers:function(e){var t=this._get(e,"stepMonths"),n="#"+e.id.replace(/\\\\/g,"\\");e.dpDiv.find("[data-handler]").map(function(){var e={prev:function(){window["DP_jQuery_"+dpuuid].datepicker._adjustDate(n,-t,"M")},next:function(){window["DP_jQuery_"+dpuuid].datepicker._adjustDate(n,+t,"M")},hide:function(){window["DP_jQuery_"+dpuuid].datepicker._hideDatepicker()},today:function(){window["DP_jQuery_"+dpuuid].datepicker._gotoToday(n)},selectDay:function(){return window["DP_jQuery_"+dpuuid].datepicker._selectDay(n,+this.getAttribute("data-month"),+this.getAttribute("data-year"),this),!1},selectMonth:function(){return window["DP_jQuery_"+dpuuid].datepicker._selectMonthYear(n,this,"M"),!1},selectYear:function(){return window["DP_jQuery_"+dpuuid].datepicker._selectMonthYear(n,this,"Y"),!1}};$(this).bind(this.getAttribute("data-event"),e[this.getAttribute("data-handler")])})},_generateHTML:function(e){var t=new Date;t=this._daylightSavingAdjust(new Date(t.getFullYear(),t.getMonth(),t.getDate()));var n=this._get(e,"isRTL"),r=this._get(e,"showButtonPanel"),i=this._get(e,"hideIfNoPrevNext"),s=this._get(e,"navigationAsDateFormat"),o=this._getNumberOfMonths(e),u=this._get(e,"showCurrentAtPos"),a=this._get(e,"stepMonths"),f=o[0]!=1||o[1]!=1,l=this._daylightSavingAdjust(e.currentDay?new Date(e.currentYear,e.currentMonth,e.currentDay):new Date(9999,9,9)),c=this._getMinMaxDate(e,"min"),h=this._getMinMaxDate(e,"max"),p=e.drawMonth-u,d=e.drawYear;p<0&&(p+=12,d--);if(h){var v=this._daylightSavingAdjust(new Date(h.getFullYear(),h.getMonth()-o[0]*o[1]+1,h.getDate()));v=c&&v<c?c:v;while(this._daylightSavingAdjust(new Date(d,p,1))>v)p--,p<0&&(p=11,d--)}e.drawMonth=p,e.drawYear=d;var m=this._get(e,"prevText");m=s?this.formatDate(m,this._daylightSavingAdjust(new Date(d,p-a,1)),this._getFormatConfig(e)):m;var g=this._canAdjustMonth(e,-1,d,p)?'<a class="ui-datepicker-prev ui-corner-all" data-handler="prev" data-event="click" title="'+m+'"><span class="ui-icon ui-icon-circle-triangle-'+(n?"e":"w")+'">'+m+"</span></a>":i?"":'<a class="ui-datepicker-prev ui-corner-all ui-state-disabled" title="'+m+'"><span class="ui-icon ui-icon-circle-triangle-'+(n?"e":"w")+'">'+m+"</span></a>",y=this._get(e,"nextText");y=s?this.formatDate(y,this._daylightSavingAdjust(new Date(d,p+a,1)),this._getFormatConfig(e)):y;var b=this._canAdjustMonth(e,1,d,p)?'<a class="ui-datepicker-next ui-corner-all" data-handler="next" data-event="click" title="'+y+'"><span class="ui-icon ui-icon-circle-triangle-'+(n?"w":"e")+'">'+y+"</span></a>":i?"":'<a class="ui-datepicker-next ui-corner-all ui-state-disabled" title="'+y+'"><span class="ui-icon ui-icon-circle-triangle-'+(n?"w":"e")+'">'+y+"</span></a>",w=this._get(e,"currentText"),E=this._get(e,"gotoCurrent")&&e.currentDay?l:t;w=s?this.formatDate(w,E,this._getFormatConfig(e)):w;var S=e.inline?"":'<button type="button" class="ui-datepicker-close ui-state-default ui-priority-primary ui-corner-all" data-handler="hide" data-event="click">'+this._get(e,"closeText")+"</button>",x=r?'<div class="ui-datepicker-buttonpane ui-widget-content">'+(n?S:"")+(this._isInRange(e,E)?'<button type="button" class="ui-datepicker-current ui-state-default ui-priority-secondary ui-corner-all" data-handler="today" data-event="click">'+w+"</button>":"")+(n?"":S)+"</div>":"",T=parseInt(this._get(e,"firstDay"),10);T=isNaN(T)?0:T;var N=this._get(e,"showWeek"),C=this._get(e,"dayNames"),k=this._get(e,"dayNamesShort"),L=this._get(e,"dayNamesMin"),A=this._get(e,"monthNames"),O=this._get(e,"monthNamesShort"),M=this._get(e,"beforeShowDay"),_=this._get(e,"showOtherMonths"),D=this._get(e,"selectOtherMonths"),P=this._get(e,"calculateWeek")||this.iso8601Week,H=this._getDefaultDate(e),B="";for(var j=0;j<o[0];j++){var F="";this.maxRows=4;for(var I=0;I<o[1];I++){var q=this._daylightSavingAdjust(new Date(d,p,e.selectedDay)),R=" ui-corner-all",U="";if(f){U+='<div class="ui-datepicker-group';if(o[1]>1)switch(I){case 0:U+=" ui-datepicker-group-first",R=" ui-corner-"+(n?"right":"left");break;case o[1]-1:U+=" ui-datepicker-group-last",R=" ui-corner-"+(n?"left":"right");break;default:U+=" ui-datepicker-group-middle",R=""}U+='">'}U+='<div class="ui-datepicker-header ui-widget-header ui-helper-clearfix'+R+'">'+(/all|left/.test(R)&&j==0?n?b:g:"")+(/all|right/.test(R)&&j==0?n?g:b:"")+this._generateMonthYearHeader(e,p,d,c,h,j>0||I>0,A,O)+'</div><table class="ui-datepicker-calendar"><thead>'+"<tr>";var z=N?'<th class="ui-datepicker-week-col">'+this._get(e,"weekHeader")+"</th>":"";for(var W=0;W<7;W++){var X=(W+T)%7;z+="<th"+((W+T+6)%7>=5?' class="ui-datepicker-week-end"':"")+">"+'<span title="'+C[X]+'">'+L[X]+"</span></th>"}U+=z+"</tr></thead><tbody>";var V=this._getDaysInMonth(d,p);d==e.selectedYear&&p==e.selectedMonth&&(e.selectedDay=Math.min(e.selectedDay,V));var J=(this._getFirstDayOfMonth(d,p)-T+7)%7,K=Math.ceil((J+V)/7),Q=f?this.maxRows>K?this.maxRows:K:K;this.maxRows=Q;var G=this._daylightSavingAdjust(new Date(d,p,1-J));for(var Y=0;Y<Q;Y++){U+="<tr>";var Z=N?'<td class="ui-datepicker-week-col">'+this._get(e,"calculateWeek")(G)+"</td>":"";for(var W=0;W<7;W++){var et=M?M.apply(e.input?e.input[0]:null,[G]):[!0,""],tt=G.getMonth()!=p,nt=tt&&!D||!et[0]||c&&G<c||h&&G>h;Z+='<td class="'+((W+T+6)%7>=5?" ui-datepicker-week-end":"")+(tt?" ui-datepicker-other-month":"")+(G.getTime()==q.getTime()&&p==e.selectedMonth&&e._keyEvent||H.getTime()==G.getTime()&&H.getTime()==q.getTime()?" "+this._dayOverClass:"")+(nt?" "+this._unselectableClass+" ui-state-disabled":"")+(tt&&!_?"":" "+et[1]+(G.getTime()==l.getTime()?" "+this._currentClass:"")+(G.getTime()==t.getTime()?" ui-datepicker-today":""))+'"'+((!tt||_)&&et[2]?' title="'+et[2]+'"':"")+(nt?"":' data-handler="selectDay" data-event="click" data-month="'+G.getMonth()+'" data-year="'+G.getFullYear()+'"')+">"+(tt&&!_?"&#xa0;":nt?'<span class="ui-state-default">'+G.getDate()+"</span>":'<a class="ui-state-default'+(G.getTime()==t.getTime()?" ui-state-highlight":"")+(G.getTime()==l.getTime()?" ui-state-active":"")+(tt?" ui-priority-secondary":"")+'" href="#">'+G.getDate()+"</a>")+"</td>",G.setDate(G.getDate()+1),G=this._daylightSavingAdjust(G)}U+=Z+"</tr>"}p++,p>11&&(p=0,d++),U+="</tbody></table>"+(f?"</div>"+(o[0]>0&&I==o[1]-1?'<div class="ui-datepicker-row-break"></div>':""):""),F+=U}B+=F}return B+=x+($.browser.msie&&parseInt($.browser.version,10)<7&&!e.inline?'<iframe src="javascript:false;" class="ui-datepicker-cover" frameborder="0"></iframe>':""),e._keyEvent=!1,B},_generateMonthYearHeader:function(e,t,n,r,i,s,o,u){var a=this._get(e,"changeMonth"),f=this._get(e,"changeYear"),l=this._get(e,"showMonthAfterYear"),c='<div class="ui-datepicker-title">',h="";if(s||!a)h+='<span class="ui-datepicker-month">'+o[t]+"</span>";else{var p=r&&r.getFullYear()==n,d=i&&i.getFullYear()==n;h+='<select class="ui-datepicker-month" data-handler="selectMonth" data-event="change">';for(var v=0;v<12;v++)(!p||v>=r.getMonth())&&(!d||v<=i.getMonth())&&(h+='<option value="'+v+'"'+(v==t?' selected="selected"':"")+">"+u[v]+"</option>");h+="</select>"}l||(c+=h+(s||!a||!f?"&#xa0;":""));if(!e.yearshtml){e.yearshtml="";if(s||!f)c+='<span class="ui-datepicker-year">'+n+"</span>";else{var m=this._get(e,"yearRange").split(":"),g=(new Date).getFullYear(),y=function(e){var t=e.match(/c[+-].*/)?n+parseInt(e.substring(1),10):e.match(/[+-].*/)?g+parseInt(e,10):parseInt(e,10);return isNaN(t)?g:t},b=y(m[0]),w=Math.max(b,y(m[1]||""));b=r?Math.max(b,r.getFullYear()):b,w=i?Math.min(w,i.getFullYear()):w,e.yearshtml+='<select class="ui-datepicker-year" data-handler="selectYear" data-event="change">';for(;b<=w;b++)e.yearshtml+='<option value="'+b+'"'+(b==n?' selected="selected"':"")+">"+b+"</option>";e.yearshtml+="</select>",c+=e.yearshtml,e.yearshtml=null}}return c+=this._get(e,"yearSuffix"),l&&(c+=(s||!a||!f?"&#xa0;":"")+h),c+="</div>",c},_adjustInstDate:function(e,t,n){var r=e.drawYear+(n=="Y"?t:0),i=e.drawMonth+(n=="M"?t:0),s=Math.min(e.selectedDay,this._getDaysInMonth(r,i))+(n=="D"?t:0),o=this._restrictMinMax(e,this._daylightSavingAdjust(new Date(r,i,s)));e.selectedDay=o.getDate(),e.drawMonth=e.selectedMonth=o.getMonth(),e.drawYear=e.selectedYear=o.getFullYear(),(n=="M"||n=="Y")&&this._notifyChange(e)},_restrictMinMax:function(e,t){var n=this._getMinMaxDate(e,"min"),r=this._getMinMaxDate(e,"max"),i=n&&t<n?n:t;return i=r&&i>r?r:i,i},_notifyChange:function(e){var t=this._get(e,"onChangeMonthYear");t&&t.apply(e.input?e.input[0]:null,[e.selectedYear,e.selectedMonth+1,e])},_getNumberOfMonths:function(e){var t=this._get(e,"numberOfMonths");return t==null?[1,1]:typeof t=="number"?[1,t]:t},_getMinMaxDate:function(e,t){return this._determineDate(e,this._get(e,t+"Date"),null)},_getDaysInMonth:function(e,t){return 32-this._daylightSavingAdjust(new Date(e,t,32)).getDate()},_getFirstDayOfMonth:function(e,t){return(new Date(e,t,1)).getDay()},_canAdjustMonth:function(e,t,n,r){var i=this._getNumberOfMonths(e),s=this._daylightSavingAdjust(new Date(n,r+(t<0?t:i[0]*i[1]),1));return t<0&&s.setDate(this._getDaysInMonth(s.getFullYear(),s.getMonth())),this._isInRange(e,s)},_isInRange:function(e,t){var n=this._getMinMaxDate(e,"min"),r=this._getMinMaxDate(e,"max");return(!n||t.getTime()>=n.getTime())&&(!r||t.getTime()<=r.getTime())},_getFormatConfig:function(e){var t=this._get(e,"shortYearCutoff");return t=typeof t!="string"?t:(new Date).getFullYear()%100+parseInt(t,10),{shortYearCutoff:t,dayNamesShort:this._get(e,"dayNamesShort"),dayNames:this._get(e,"dayNames"),monthNamesShort:this._get(e,"monthNamesShort"),monthNames:this._get(e,"monthNames")}},_formatDate:function(e,t,n,r){t||(e.currentDay=e.selectedDay,e.currentMonth=e.selectedMonth,e.currentYear=e.selectedYear);var i=t?typeof t=="object"?t:this._daylightSavingAdjust(new Date(r,n,t)):this._daylightSavingAdjust(new Date(e.currentYear,e.currentMonth,e.currentDay));return this.formatDate(this._get(e,"dateFormat"),i,this._getFormatConfig(e))}}),$.fn.datepicker=function(e){if(!this.length)return this;$.datepicker.initialized||($(document).mousedown($.datepicker._checkExternalClick).find(document.body).append($.datepicker.dpDiv),$.datepicker.initialized=!0);var t=Array.prototype.slice.call(arguments,1);return typeof e!="string"||e!="isDisabled"&&e!="getDate"&&e!="widget"?e=="option"&&arguments.length==2&&typeof arguments[1]=="string"?$.datepicker["_"+e+"Datepicker"].apply($.datepicker,[this[0]].concat(t)):this.each(function(){typeof e=="string"?$.datepicker["_"+e+"Datepicker"].apply($.datepicker,[this].concat(t)):$.datepicker._attachDatepicker(this,e)}):$.datepicker["_"+e+"Datepicker"].apply($.datepicker,[this[0]].concat(t))},$.datepicker=new Datepicker,$.datepicker.initialized=!1,$.datepicker.uuid=(new Date).getTime(),$.datepicker.version="1.9.0",window["DP_jQuery_"+dpuuid]=$})(jQuery);(function(e,t){var n="ui-dialog ui-widget ui-widget-content ui-corner-all ",r={buttons:!0,height:!0,maxHeight:!0,maxWidth:!0,minHeight:!0,minWidth:!0,width:!0},i={maxHeight:!0,maxWidth:!0,minHeight:!0,minWidth:!0};e.widget("ui.dialog",{version:"1.9.0",options:{autoOpen:!0,buttons:{},closeOnEscape:!0,closeText:"close",dialogClass:"",draggable:!0,hide:null,height:"auto",maxHeight:!1,maxWidth:!1,minHeight:150,minWidth:150,modal:!1,position:{my:"center",at:"center",of:window,collision:"fit",using:function(t){var n=e(this).css(t).offset().top;n<0&&e(this).css("top",t.top-n)}},resizable:!0,show:null,stack:!0,title:"",width:300,zIndex:1e3},_create:function(){this.originalTitle=this.element.attr("title"),typeof this.originalTitle!="string"&&(this.originalTitle=""),this.oldPosition={parent:this.element.parent(),index:this.element.parent().children().index(this.element)},this.options.title=this.options.title||this.originalTitle;var t=this,r=this.options,i=r.title||"&#160;",s=(this.uiDialog=e("<div>")).addClass(n+r.dialogClass).css({display:"none",outline:0,zIndex:r.zIndex}).attr("tabIndex",-1).keydown(function(n){r.closeOnEscape&&!n.isDefaultPrevented()&&n.keyCode&&n.keyCode===e.ui.keyCode.ESCAPE&&(t.close(n),n.preventDefault())}).mousedown(function(e){t.moveToTop(!1,e)}).appendTo("body"),o=this.element.show().removeAttr("title").addClass("ui-dialog-content ui-widget-content").appendTo(s),u=(this.uiDialogTitlebar=e("<div>")).addClass("ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix").prependTo(s),a=e("<a href='#'></a>").addClass("ui-dialog-titlebar-close ui-corner-all").attr("role","button").click(function(e){e.preventDefault(),t.close(e)}).appendTo(u),f=(this.uiDialogTitlebarCloseText=e("<span>")).addClass("ui-icon ui-icon-closethick").text(r.closeText).appendTo(a),l=e("<span>").uniqueId().addClass("ui-dialog-title").html(i).prependTo(u),c=(this.uiDialogButtonPane=e("<div>")).addClass("ui-dialog-buttonpane ui-widget-content ui-helper-clearfix"),h=(this.uiButtonSet=e("<div>")).addClass("ui-dialog-buttonset").appendTo(c);s.attr({role:"dialog","aria-labelledby":l.attr("id")}),u.find("*").add(u).disableSelection(),this._hoverable(a),this._focusable(a),r.draggable&&e.fn.draggable&&this._makeDraggable(),r.resizable&&e.fn.resizable&&this._makeResizable(),this._createButtons(r.buttons),this._isOpen=!1,e.fn.bgiframe&&s.bgiframe(),this._on(s,{keydown:function(t){if(!r.modal||t.keyCode!==e.ui.keyCode.TAB)return;var n=e(":tabbable",s),i=n.filter(":first"),o=n.filter(":last");if(t.target===o[0]&&!t.shiftKey)return i.focus(1),!1;if(t.target===i[0]&&t.shiftKey)return o.focus(1),!1}})},_init:function(){this.options.autoOpen&&this.open()},_destroy:function(){var e,t=this.oldPosition;this.overlay&&this.overlay.destroy(),this.uiDialog.hide(),this.element.removeClass("ui-dialog-content ui-widget-content").hide().appendTo("body"),this.uiDialog.remove(),this.originalTitle&&this.element.attr("title",this.originalTitle),e=t.parent.children().eq(t.index),e.length&&e[0]!==this.element[0]?e.before(this.element):t.parent.append(this.element)},widget:function(){return this.uiDialog},close:function(t){var n=this,r,i;if(!this._isOpen)return;if(!1===this._trigger("beforeClose",t))return;return this._isOpen=!1,this.overlay&&this.overlay.destroy(),this.options.hide?this.uiDialog.hide(this.options.hide,function(){n._trigger("close",t)}):(this.uiDialog.hide(),this._trigger("close",t)),e.ui.dialog.overlay.resize(),this.options.modal&&(r=0,e(".ui-dialog").each(function(){this!==n.uiDialog[0]&&(i=e(this).css("z-index"),isNaN(i)||(r=Math.max(r,i)))}),e.ui.dialog.maxZ=r),this},isOpen:function(){return this._isOpen},moveToTop:function(t,n){var r=this.options,i;return r.modal&&!t||!r.stack&&!r.modal?this._trigger("focus",n):(r.zIndex>e.ui.dialog.maxZ&&(e.ui.dialog.maxZ=r.zIndex),this.overlay&&(e.ui.dialog.maxZ+=1,e.ui.dialog.overlay.maxZ=e.ui.dialog.maxZ,this.overlay.$el.css("z-index",e.ui.dialog.overlay.maxZ)),i={scrollTop:this.element.scrollTop(),scrollLeft:this.element.scrollLeft()},e.ui.dialog.maxZ+=1,this.uiDialog.css("z-index",e.ui.dialog.maxZ),this.element.attr(i),this._trigger("focus",n),this)},open:function(){if(this._isOpen)return;var t,n=this.options,r=this.uiDialog;return this._size(),this._position(n.position),r.show(n.show),this.overlay=n.modal?new e.ui.dialog.overlay(this):null,this.moveToTop(!0),t=this.element.find(":tabbable"),t.length||(t=this.uiDialogButtonPane.find(":tabbable"),t.length||(t=r)),t.eq(0).focus(),this._isOpen=!0,this._trigger("open"),this},_createButtons:function(t){var n,r,i=this,s=!1;this.uiDialogButtonPane.remove(),this.uiButtonSet.empty(),typeof t=="object"&&t!==null&&e.each(t,function(){return!(s=!0)}),s?(e.each(t,function(t,n){n=e.isFunction(n)?{click:n,text:t}:n;var r=e("<button type='button'>").attr(n,!0).unbind("click").click(function(){n.click.apply(i.element[0],arguments)}).appendTo(i.uiButtonSet);e.fn.button&&r.button()}),this.uiDialog.addClass("ui-dialog-buttons"),this.uiDialogButtonPane.appendTo(this.uiDialog)):this.uiDialog.removeClass("ui-dialog-buttons")},_makeDraggable:function(){function r(e){return{position:e.position,offset:e.offset}}var t=this,n=this.options;this.uiDialog.draggable({cancel:".ui-dialog-content, .ui-dialog-titlebar-close",handle:".ui-dialog-titlebar",containment:"document",start:function(n,i){e(this).addClass("ui-dialog-dragging"),t._trigger("dragStart",n,r(i))},drag:function(e,n){t._trigger("drag",e,r(n))},stop:function(i,s){n.position=[s.position.left-t.document.scrollLeft(),s.position.top-t.document.scrollTop()],e(this).removeClass("ui-dialog-dragging"),t._trigger("dragStop",i,r(s)),e.ui.dialog.overlay.resize()}})},_makeResizable:function(n){function u(e){return{originalPosition:e.originalPosition,originalSize:e.originalSize,position:e.position,size:e.size}}n=n===t?this.options.resizable:n;var r=this,i=this.options,s=this.uiDialog.css("position"),o=typeof n=="string"?n:"n,e,s,w,se,sw,ne,nw";this.uiDialog.resizable({cancel:".ui-dialog-content",containment:"document",alsoResize:this.element,maxWidth:i.maxWidth,maxHeight:i.maxHeight,minWidth:i.minWidth,minHeight:this._minHeight(),handles:o,start:function(t,n){e(this).addClass("ui-dialog-resizing"),r._trigger("resizeStart",t,u(n))},resize:function(e,t){r._trigger("resize",e,u(t))},stop:function(t,n){e(this).removeClass("ui-dialog-resizing"),i.height=e(this).height(),i.width=e(this).width(),r._trigger("resizeStop",t,u(n)),e.ui.dialog.overlay.resize()}}).css("position",s).find(".ui-resizable-se").addClass("ui-icon ui-icon-grip-diagonal-se")},_minHeight:function(){var e=this.options;return e.height==="auto"?e.minHeight:Math.min(e.minHeight,e.height)},_position:function(t){var n=[],r=[0,0],i;if(t){if(typeof t=="string"||typeof t=="object"&&"0"in t)n=t.split?t.split(" "):[t[0],t[1]],n.length===1&&(n[1]=n[0]),e.each(["left","top"],function(e,t){+n[e]===n[e]&&(r[e]=n[e],n[e]=t)}),t={my:n.join(" "),at:n.join(" "),offset:r.join(" ")};t=e.extend({},e.ui.dialog.prototype.options.position,t)}else t=e.ui.dialog.prototype.options.position;i=this.uiDialog.is(":visible"),i||this.uiDialog.show(),this.uiDialog.position(t),i||this.uiDialog.hide()},_setOptions:function(t){var n=this,s={},o=!1;e.each(t,function(e,t){n._setOption(e,t),e in r&&(o=!0),e in i&&(s[e]=t)}),o&&this._size(),this.uiDialog.is(":data(resizable)")&&this.uiDialog.resizable("option",s)},_setOption:function(t,r){var i,s,o=this.uiDialog;switch(t){case"buttons":this._createButtons(r);break;case"closeText":this.uiDialogTitlebarCloseText.text(""+r);break;case"dialogClass":o.removeClass(this.options.dialogClass).addClass(n+r);break;case"disabled":r?o.addClass("ui-dialog-disabled"):o.removeClass("ui-dialog-disabled");break;case"draggable":i=o.is(":data(draggable)"),i&&!r&&o.draggable("destroy"),!i&&r&&this._makeDraggable();break;case"position":this._position(r);break;case"resizable":s=o.is(":data(resizable)"),s&&!r&&o.resizable("destroy"),s&&typeof r=="string"&&o.resizable("option","handles",r),!s&&r!==!1&&this._makeResizable(r);break;case"title":e(".ui-dialog-title",this.uiDialogTitlebar).html(""+(r||"&#160;"))}this._super(t,r)},_size:function(){var t,n,r,i=this.options,s=this.uiDialog.is(":visible");this.element.show().css({width:"auto",minHeight:0,height:0}),i.minWidth>i.width&&(i.width=i.minWidth),t=this.uiDialog.css({height:"auto",width:i.width}).outerHeight(),n=Math.max(0,i.minHeight-t),i.height==="auto"?e.support.minHeight?this.element.css({minHeight:n,height:"auto"}):(this.uiDialog.show(),r=this.element.css("height","auto").height(),s||this.uiDialog.hide(),this.element.height(Math.max(r,n))):this.element.height(Math.max(i.height-t,0)),this.uiDialog.is(":data(resizable)")&&this.uiDialog.resizable("option","minHeight",this._minHeight())}}),e.extend(e.ui.dialog,{uuid:0,maxZ:0,getTitleId:function(e){var t=e.attr("id");return t||(this.uuid+=1,t=this.uuid),"ui-dialog-title-"+t},overlay:function(t){this.$el=e.ui.dialog.overlay.create(t)}}),e.extend(e.ui.dialog.overlay,{instances:[],oldInstances:[],maxZ:0,events:e.map("focus,mousedown,mouseup,keydown,keypress,click".split(","),function(e){return e+".dialog-overlay"}).join(" "),create:function(t){this.instances.length===0&&(setTimeout(function(){e.ui.dialog.overlay.instances.length&&e(document).bind(e.ui.dialog.overlay.events,function(t){if(e(t.target).zIndex()<e.ui.dialog.overlay.maxZ)return!1})},1),e(window).bind("resize.dialog-overlay",e.ui.dialog.overlay.resize));var n=this.oldInstances.pop()||e("<div>").addClass("ui-widget-overlay");return e(document).bind("keydown.dialog-overlay",function(r){var i=e.ui.dialog.overlay.instances;i.length!==0&&i[i.length-1]===n&&t.options.closeOnEscape&&!r.isDefaultPrevented()&&r.keyCode&&r.keyCode===e.ui.keyCode.ESCAPE&&(t.close(r),r.preventDefault())}),n.appendTo(document.body).css({width:this.width(),height:this.height()}),e.fn.bgiframe&&n.bgiframe(),this.instances.push(n),n},destroy:function(t){var n=e.inArray(t,this.instances),r=0;n!==-1&&this.oldInstances.push(this.instances.splice(n,1)[0]),this.instances.length===0&&e([document,window]).unbind(".dialog-overlay"),t.height(0).width(0).remove(),e.each(this.instances,function(){r=Math.max(r,this.css("z-index"))}),this.maxZ=r},height:function(){var t,n;return e.browser.msie?(t=Math.max(document.documentElement.scrollHeight,document.body.scrollHeight),n=Math.max(document.documentElement.offsetHeight,document.body.offsetHeight),t<n?e(window).height()+"px":t+"px"):e(document).height()+"px"},width:function(){var t,n;return e.browser.msie?(t=Math.max(document.documentElement.scrollWidth,document.body.scrollWidth),n=Math.max(document.documentElement.offsetWidth,document.body.offsetWidth),t<n?e(window).width()+"px":t+"px"):e(document).width()+"px"},resize:function(){var t=e([]);e.each(e.ui.dialog.overlay.instances,function(){t=t.add(this)}),t.css({width:0,height:0}).css({width:e.ui.dialog.overlay.width(),height:e.ui.dialog.overlay.height()})}}),e.extend(e.ui.dialog.overlay.prototype,{destroy:function(){e.ui.dialog.overlay.destroy(this.$el)}})})(jQuery);(function(e,t){e.widget("ui.draggable",e.ui.mouse,{version:"1.9.0",widgetEventPrefix:"drag",options:{addClasses:!0,appendTo:"parent",axis:!1,connectToSortable:!1,containment:!1,cursor:"auto",cursorAt:!1,grid:!1,handle:!1,helper:"original",iframeFix:!1,opacity:!1,refreshPositions:!1,revert:!1,revertDuration:500,scope:"default",scroll:!0,scrollSensitivity:20,scrollSpeed:20,snap:!1,snapMode:"both",snapTolerance:20,stack:!1,zIndex:!1},_create:function(){this.options.helper=="original"&&!/^(?:r|a|f)/.test(this.element.css("position"))&&(this.element[0].style.position="relative"),this.options.addClasses&&this.element.addClass("ui-draggable"),this.options.disabled&&this.element.addClass("ui-draggable-disabled"),this._mouseInit()},_destroy:function(){this.element.removeClass("ui-draggable ui-draggable-dragging ui-draggable-disabled"),this._mouseDestroy()},_mouseCapture:function(t){var n=this.options;return this.helper||n.disabled||e(t.target).is(".ui-resizable-handle")?!1:(this.handle=this._getHandle(t),this.handle?(e(n.iframeFix===!0?"iframe":n.iframeFix).each(function(){e('<div class="ui-draggable-iframeFix" style="background: #fff;"></div>').css({width:this.offsetWidth+"px",height:this.offsetHeight+"px",position:"absolute",opacity:"0.001",zIndex:1e3}).css(e(this).offset()).appendTo("body")}),!0):!1)},_mouseStart:function(t){var n=this.options;return this.helper=this._createHelper(t),this.helper.addClass("ui-draggable-dragging"),this._cacheHelperProportions(),e.ui.ddmanager&&(e.ui.ddmanager.current=this),this._cacheMargins(),this.cssPosition=this.helper.css("position"),this.scrollParent=this.helper.scrollParent(),this.offset=this.positionAbs=this.element.offset(),this.offset={top:this.offset.top-this.margins.top,left:this.offset.left-this.margins.left},e.extend(this.offset,{click:{left:t.pageX-this.offset.left,top:t.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()}),this.originalPosition=this.position=this._generatePosition(t),this.originalPageX=t.pageX,this.originalPageY=t.pageY,n.cursorAt&&this._adjustOffsetFromHelper(n.cursorAt),n.containment&&this._setContainment(),this._trigger("start",t)===!1?(this._clear(),!1):(this._cacheHelperProportions(),e.ui.ddmanager&&!n.dropBehaviour&&e.ui.ddmanager.prepareOffsets(this,t),this._mouseDrag(t,!0),e.ui.ddmanager&&e.ui.ddmanager.dragStart(this,t),!0)},_mouseDrag:function(t,n){this.position=this._generatePosition(t),this.positionAbs=this._convertPositionTo("absolute");if(!n){var r=this._uiHash();if(this._trigger("drag",t,r)===!1)return this._mouseUp({}),!1;this.position=r.position}if(!this.options.axis||this.options.axis!="y")this.helper[0].style.left=this.position.left+"px";if(!this.options.axis||this.options.axis!="x")this.helper[0].style.top=this.position.top+"px";return e.ui.ddmanager&&e.ui.ddmanager.drag(this,t),!1},_mouseStop:function(t){var n=!1;e.ui.ddmanager&&!this.options.dropBehaviour&&(n=e.ui.ddmanager.drop(this,t)),this.dropped&&(n=this.dropped,this.dropped=!1);var r=this.element[0],i=!1;while(r&&(r=r.parentNode))r==document&&(i=!0);if(!i&&this.options.helper==="original")return!1;if(this.options.revert=="invalid"&&!n||this.options.revert=="valid"&&n||this.options.revert===!0||e.isFunction(this.options.revert)&&this.options.revert.call(this.element,n)){var s=this;e(this.helper).animate(this.originalPosition,parseInt(this.options.revertDuration,10),function(){s._trigger("stop",t)!==!1&&s._clear()})}else this._trigger("stop",t)!==!1&&this._clear();return!1},_mouseUp:function(t){return e("div.ui-draggable-iframeFix").each(function(){this.parentNode.removeChild(this)}),e.ui.ddmanager&&e.ui.ddmanager.dragStop(this,t),e.ui.mouse.prototype._mouseUp.call(this,t)},cancel:function(){return this.helper.is(".ui-draggable-dragging")?this._mouseUp({}):this._clear(),this},_getHandle:function(t){var n=!this.options.handle||!e(this.options.handle,this.element).length?!0:!1;return e(this.options.handle,this.element).find("*").andSelf().each(function(){this==t.target&&(n=!0)}),n},_createHelper:function(t){var n=this.options,r=e.isFunction(n.helper)?e(n.helper.apply(this.element[0],[t])):n.helper=="clone"?this.element.clone().removeAttr("id"):this.element;return r.parents("body").length||r.appendTo(n.appendTo=="parent"?this.element[0].parentNode:n.appendTo),r[0]!=this.element[0]&&!/(fixed|absolute)/.test(r.css("position"))&&r.css("position","absolute"),r},_adjustOffsetFromHelper:function(t){typeof t=="string"&&(t=t.split(" ")),e.isArray(t)&&(t={left:+t[0],top:+t[1]||0}),"left"in t&&(this.offset.click.left=t.left+this.margins.left),"right"in t&&(this.offset.click.left=this.helperProportions.width-t.right+this.margins.left),"top"in t&&(this.offset.click.top=t.top+this.margins.top),"bottom"in t&&(this.offset.click.top=this.helperProportions.height-t.bottom+this.margins.top)},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var t=this.offsetParent.offset();this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&e.contains(this.scrollParent[0],this.offsetParent[0])&&(t.left+=this.scrollParent.scrollLeft(),t.top+=this.scrollParent.scrollTop());if(this.offsetParent[0]==document.body||this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&e.browser.msie)t={top:0,left:0};return{top:t.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:t.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var e=this.element.position();return{top:e.top-(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:e.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}return{top:0,left:0}},_cacheMargins:function(){this.margins={left:parseInt(this.element.css("marginLeft"),10)||0,top:parseInt(this.element.css("marginTop"),10)||0,right:parseInt(this.element.css("marginRight"),10)||0,bottom:parseInt(this.element.css("marginBottom"),10)||0}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var t=this.options;t.containment=="parent"&&(t.containment=this.helper[0].parentNode);if(t.containment=="document"||t.containment=="window")this.containment=[t.containment=="document"?0:e(window).scrollLeft()-this.offset.relative.left-this.offset.parent.left,t.containment=="document"?0:e(window).scrollTop()-this.offset.relative.top-this.offset.parent.top,(t.containment=="document"?0:e(window).scrollLeft())+e(t.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(t.containment=="document"?0:e(window).scrollTop())+(e(t.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top];if(!/^(document|window|parent)$/.test(t.containment)&&t.containment.constructor!=Array){var n=e(t.containment),r=n[0];if(!r)return;var i=n.offset(),s=e(r).css("overflow")!="hidden";this.containment=[(parseInt(e(r).css("borderLeftWidth"),10)||0)+(parseInt(e(r).css("paddingLeft"),10)||0),(parseInt(e(r).css("borderTopWidth"),10)||0)+(parseInt(e(r).css("paddingTop"),10)||0),(s?Math.max(r.scrollWidth,r.offsetWidth):r.offsetWidth)-(parseInt(e(r).css("borderLeftWidth"),10)||0)-(parseInt(e(r).css("paddingRight"),10)||0)-this.helperProportions.width-this.margins.left-this.margins.right,(s?Math.max(r.scrollHeight,r.offsetHeight):r.offsetHeight)-(parseInt(e(r).css("borderTopWidth"),10)||0)-(parseInt(e(r).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top-this.margins.bottom],this.relative_container=n}else t.containment.constructor==Array&&(this.containment=t.containment)},_convertPositionTo:function(t,n){n||(n=this.position);var r=t=="absolute"?1:-1,i=this.options,s=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,o=/(html|body)/i.test(s[0].tagName);return{top:n.top+this.offset.relative.top*r+this.offset.parent.top*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():o?0:s.scrollTop())*r,left:n.left+this.offset.relative.left*r+this.offset.parent.left*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():o?0:s.scrollLeft())*r}},_generatePosition:function(t){var n=this.options,r=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,i=/(html|body)/i.test(r[0].tagName),s=t.pageX,o=t.pageY;if(this.originalPosition){var u;if(this.containment){if(this.relative_container){var a=this.relative_container.offset();u=[this.containment[0]+a.left,this.containment[1]+a.top,this.containment[2]+a.left,this.containment[3]+a.top]}else u=this.containment;t.pageX-this.offset.click.left<u[0]&&(s=u[0]+this.offset.click.left),t.pageY-this.offset.click.top<u[1]&&(o=u[1]+this.offset.click.top),t.pageX-this.offset.click.left>u[2]&&(s=u[2]+this.offset.click.left),t.pageY-this.offset.click.top>u[3]&&(o=u[3]+this.offset.click.top)}if(n.grid){var f=n.grid[1]?this.originalPageY+Math.round((o-this.originalPageY)/n.grid[1])*n.grid[1]:this.originalPageY;o=u?f-this.offset.click.top<u[1]||f-this.offset.click.top>u[3]?f-this.offset.click.top<u[1]?f+n.grid[1]:f-n.grid[1]:f:f;var l=n.grid[0]?this.originalPageX+Math.round((s-this.originalPageX)/n.grid[0])*n.grid[0]:this.originalPageX;s=u?l-this.offset.click.left<u[0]||l-this.offset.click.left>u[2]?l-this.offset.click.left<u[0]?l+n.grid[0]:l-n.grid[0]:l:l}}return{top:o-this.offset.click.top-this.offset.relative.top-this.offset.parent.top+(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():i?0:r.scrollTop()),left:s-this.offset.click.left-this.offset.relative.left-this.offset.parent.left+(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():i?0:r.scrollLeft())}},_clear:function(){this.helper.removeClass("ui-draggable-dragging"),this.helper[0]!=this.element[0]&&!this.cancelHelperRemoval&&this.helper.remove(),this.helper=null,this.cancelHelperRemoval=!1},_trigger:function(t,n,r){return r=r||this._uiHash(),e.ui.plugin.call(this,t,[n,r]),t=="drag"&&(this.positionAbs=this._convertPositionTo("absolute")),e.Widget.prototype._trigger.call(this,t,n,r)},plugins:{},_uiHash:function(e){return{helper:this.helper,position:this.position,originalPosition:this.originalPosition,offset:this.positionAbs}}}),e.ui.plugin.add("draggable","connectToSortable",{start:function(t,n){var r=e(this).data("draggable"),i=r.options,s=e.extend({},n,{item:r.element});r.sortables=[],e(i.connectToSortable).each(function(){var n=e.data(this,"sortable");n&&!n.options.disabled&&(r.sortables.push({instance:n,shouldRevert:n.options.revert}),n.refreshPositions(),n._trigger("activate",t,s))})},stop:function(t,n){var r=e(this).data("draggable"),i=e.extend({},n,{item:r.element});e.each(r.sortables,function(){this.instance.isOver?(this.instance.isOver=0,r.cancelHelperRemoval=!0,this.instance.cancelHelperRemoval=!1,this.shouldRevert&&(this.instance.options.revert=!0),this.instance._mouseStop(t),this.instance.options.helper=this.instance.options._helper,r.options.helper=="original"&&this.instance.currentItem.css({top:"auto",left:"auto"})):(this.instance.cancelHelperRemoval=!1,this.instance._trigger("deactivate",t,i))})},drag:function(t,n){var r=e(this).data("draggable"),i=this,s=function(t){var n=this.offset.click.top,r=this.offset.click.left,i=this.positionAbs.top,s=this.positionAbs.left,o=t.height,u=t.width,a=t.top,f=t.left;return e.ui.isOver(i+n,s+r,a,f,o,u)};e.each(r.sortables,function(s){this.instance.positionAbs=r.positionAbs,this.instance.helperProportions=r.helperProportions,this.instance.offset.click=r.offset.click,this.instance._intersectsWith(this.instance.containerCache)?(this.instance.isOver||(this.instance.isOver=1,this.instance.currentItem=e(i).clone().removeAttr("id").appendTo(this.instance.element).data("sortable-item",!0),this.instance.options._helper=this.instance.options.helper,this.instance.options.helper=function(){return n.helper[0]},t.target=this.instance.currentItem[0],this.instance._mouseCapture(t,!0),this.instance._mouseStart(t,!0,!0),this.instance.offset.click.top=r.offset.click.top,this.instance.offset.click.left=r.offset.click.left,this.instance.offset.parent.left-=r.offset.parent.left-this.instance.offset.parent.left,this.instance.offset.parent.top-=r.offset.parent.top-this.instance.offset.parent.top,r._trigger("toSortable",t),r.dropped=this.instance.element,r.currentItem=r.element,this.instance.fromOutside=r),this.instance.currentItem&&this.instance._mouseDrag(t)):this.instance.isOver&&(this.instance.isOver=0,this.instance.cancelHelperRemoval=!0,this.instance.options.revert=!1,this.instance._trigger("out",t,this.instance._uiHash(this.instance)),this.instance._mouseStop(t,!0),this.instance.options.helper=this.instance.options._helper,this.instance.currentItem.remove(),this.instance.placeholder&&this.instance.placeholder.remove(),r._trigger("fromSortable",t),r.dropped=!1)})}}),e.ui.plugin.add("draggable","cursor",{start:function(t,n){var r=e("body"),i=e(this).data("draggable").options;r.css("cursor")&&(i._cursor=r.css("cursor")),r.css("cursor",i.cursor)},stop:function(t,n){var r=e(this).data("draggable").options;r._cursor&&e("body").css("cursor",r._cursor)}}),e.ui.plugin.add("draggable","opacity",{start:function(t,n){var r=e(n.helper),i=e(this).data("draggable").options;r.css("opacity")&&(i._opacity=r.css("opacity")),r.css("opacity",i.opacity)},stop:function(t,n){var r=e(this).data("draggable").options;r._opacity&&e(n.helper).css("opacity",r._opacity)}}),e.ui.plugin.add("draggable","scroll",{start:function(t,n){var r=e(this).data("draggable");r.scrollParent[0]!=document&&r.scrollParent[0].tagName!="HTML"&&(r.overflowOffset=r.scrollParent.offset())},drag:function(t,n){var r=e(this).data("draggable"),i=r.options,s=!1;if(r.scrollParent[0]!=document&&r.scrollParent[0].tagName!="HTML"){if(!i.axis||i.axis!="x")r.overflowOffset.top+r.scrollParent[0].offsetHeight-t.pageY<i.scrollSensitivity?r.scrollParent[0].scrollTop=s=r.scrollParent[0].scrollTop+i.scrollSpeed:t.pageY-r.overflowOffset.top<i.scrollSensitivity&&(r.scrollParent[0].scrollTop=s=r.scrollParent[0].scrollTop-i.scrollSpeed);if(!i.axis||i.axis!="y")r.overflowOffset.left+r.scrollParent[0].offsetWidth-t.pageX<i.scrollSensitivity?r.scrollParent[0].scrollLeft=s=r.scrollParent[0].scrollLeft+i.scrollSpeed:t.pageX-r.overflowOffset.left<i.scrollSensitivity&&(r.scrollParent[0].scrollLeft=s=r.scrollParent[0].scrollLeft-i.scrollSpeed)}else{if(!i.axis||i.axis!="x")t.pageY-e(document).scrollTop()<i.scrollSensitivity?s=e(document).scrollTop(e(document).scrollTop()-i.scrollSpeed):e(window).height()-(t.pageY-e(document).scrollTop())<i.scrollSensitivity&&(s=e(document).scrollTop(e(document).scrollTop()+i.scrollSpeed));if(!i.axis||i.axis!="y")t.pageX-e(document).scrollLeft()<i.scrollSensitivity?s=e(document).scrollLeft(e(document).scrollLeft()-i.scrollSpeed):e(window).width()-(t.pageX-e(document).scrollLeft())<i.scrollSensitivity&&(s=e(document).scrollLeft(e(document).scrollLeft()+i.scrollSpeed))}s!==!1&&e.ui.ddmanager&&!i.dropBehaviour&&e.ui.ddmanager.prepareOffsets(r,t)}}),e.ui.plugin.add("draggable","snap",{start:function(t,n){var r=e(this).data("draggable"),i=r.options;r.snapElements=[],e(i.snap.constructor!=String?i.snap.items||":data(draggable)":i.snap).each(function(){var t=e(this),n=t.offset();this!=r.element[0]&&r.snapElements.push({item:this,width:t.outerWidth(),height:t.outerHeight(),top:n.top,left:n.left})})},drag:function(t,n){var r=e(this).data("draggable"),i=r.options,s=i.snapTolerance,o=n.offset.left,u=o+r.helperProportions.width,a=n.offset.top,f=a+r.helperProportions.height;for(var l=r.snapElements.length-1;l>=0;l--){var c=r.snapElements[l].left,h=c+r.snapElements[l].width,p=r.snapElements[l].top,d=p+r.snapElements[l].height;if(!(c-s<o&&o<h+s&&p-s<a&&a<d+s||c-s<o&&o<h+s&&p-s<f&&f<d+s||c-s<u&&u<h+s&&p-s<a&&a<d+s||c-s<u&&u<h+s&&p-s<f&&f<d+s)){r.snapElements[l].snapping&&r.options.snap.release&&r.options.snap.release.call(r.element,t,e.extend(r._uiHash(),{snapItem:r.snapElements[l].item})),r.snapElements[l].snapping=!1;continue}if(i.snapMode!="inner"){var v=Math.abs(p-f)<=s,m=Math.abs(d-a)<=s,g=Math.abs(c-u)<=s,y=Math.abs(h-o)<=s;v&&(n.position.top=r._convertPositionTo("relative",{top:p-r.helperProportions.height,left:0}).top-r.margins.top),m&&(n.position.top=r._convertPositionTo("relative",{top:d,left:0}).top-r.margins.top),g&&(n.position.left=r._convertPositionTo("relative",{top:0,left:c-r.helperProportions.width}).left-r.margins.left),y&&(n.position.left=r._convertPositionTo("relative",{top:0,left:h}).left-r.margins.left)}var b=v||m||g||y;if(i.snapMode!="outer"){var v=Math.abs(p-a)<=s,m=Math.abs(d-f)<=s,g=Math.abs(c-o)<=s,y=Math.abs(h-u)<=s;v&&(n.position.top=r._convertPositionTo("relative",{top:p,left:0}).top-r.margins.top),m&&(n.position.top=r._convertPositionTo("relative",{top:d-r.helperProportions.height,left:0}).top-r.margins.top),g&&(n.position.left=r._convertPositionTo("relative",{top:0,left:c}).left-r.margins.left),y&&(n.position.left=r._convertPositionTo("relative",{top:0,left:h-r.helperProportions.width}).left-r.margins.left)}!r.snapElements[l].snapping&&(v||m||g||y||b)&&r.options.snap.snap&&r.options.snap.snap.call(r.element,t,e.extend(r._uiHash(),{snapItem:r.snapElements[l].item})),r.snapElements[l].snapping=v||m||g||y||b}}}),e.ui.plugin.add("draggable","stack",{start:function(t,n){var r=e(this).data("draggable").options,i=e.makeArray(e(r.stack)).sort(function(t,n){return(parseInt(e(t).css("zIndex"),10)||0)-(parseInt(e(n).css("zIndex"),10)||0)});if(!i.length)return;var s=parseInt(i[0].style.zIndex)||0;e(i).each(function(e){this.style.zIndex=s+e}),this[0].style.zIndex=s+i.length}}),e.ui.plugin.add("draggable","zIndex",{start:function(t,n){var r=e(n.helper),i=e(this).data("draggable").options;r.css("zIndex")&&(i._zIndex=r.css("zIndex")),r.css("zIndex",i.zIndex)},stop:function(t,n){var r=e(this).data("draggable").options;r._zIndex&&e(n.helper).css("zIndex",r._zIndex)}})})(jQuery);(function(e,t){e.widget("ui.droppable",{version:"1.9.0",widgetEventPrefix:"drop",options:{accept:"*",activeClass:!1,addClasses:!0,greedy:!1,hoverClass:!1,scope:"default",tolerance:"intersect"},_create:function(){var t=this.options,n=t.accept;this.isover=0,this.isout=1,this.accept=e.isFunction(n)?n:function(e){return e.is(n)},this.proportions={width:this.element[0].offsetWidth,height:this.element[0].offsetHeight},e.ui.ddmanager.droppables[t.scope]=e.ui.ddmanager.droppables[t.scope]||[],e.ui.ddmanager.droppables[t.scope].push(this),t.addClasses&&this.element.addClass("ui-droppable")},_destroy:function(){var t=e.ui.ddmanager.droppables[this.options.scope];for(var n=0;n<t.length;n++)t[n]==this&&t.splice(n,1);this.element.removeClass("ui-droppable ui-droppable-disabled")},_setOption:function(t,n){t=="accept"&&(this.accept=e.isFunction(n)?n:function(e){return e.is(n)}),e.Widget.prototype._setOption.apply(this,arguments)},_activate:function(t){var n=e.ui.ddmanager.current;this.options.activeClass&&this.element.addClass(this.options.activeClass),n&&this._trigger("activate",t,this.ui(n))},_deactivate:function(t){var n=e.ui.ddmanager.current;this.options.activeClass&&this.element.removeClass(this.options.activeClass),n&&this._trigger("deactivate",t,this.ui(n))},_over:function(t){var n=e.ui.ddmanager.current;if(!n||(n.currentItem||n.element)[0]==this.element[0])return;this.accept.call(this.element[0],n.currentItem||n.element)&&(this.options.hoverClass&&this.element.addClass(this.options.hoverClass),this._trigger("over",t,this.ui(n)))},_out:function(t){var n=e.ui.ddmanager.current;if(!n||(n.currentItem||n.element)[0]==this.element[0])return;this.accept.call(this.element[0],n.currentItem||n.element)&&(this.options.hoverClass&&this.element.removeClass(this.options.hoverClass),this._trigger("out",t,this.ui(n)))},_drop:function(t,n){var r=n||e.ui.ddmanager.current;if(!r||(r.currentItem||r.element)[0]==this.element[0])return!1;var i=!1;return this.element.find(":data(droppable)").not(".ui-draggable-dragging").each(function(){var t=e.data(this,"droppable");if(t.options.greedy&&!t.options.disabled&&t.options.scope==r.options.scope&&t.accept.call(t.element[0],r.currentItem||r.element)&&e.ui.intersect(r,e.extend(t,{offset:t.element.offset()}),t.options.tolerance))return i=!0,!1}),i?!1:this.accept.call(this.element[0],r.currentItem||r.element)?(this.options.activeClass&&this.element.removeClass(this.options.activeClass),this.options.hoverClass&&this.element.removeClass(this.options.hoverClass),this._trigger("drop",t,this.ui(r)),this.element):!1},ui:function(e){return{draggable:e.currentItem||e.element,helper:e.helper,position:e.position,offset:e.positionAbs}}}),e.ui.intersect=function(t,n,r){if(!n.offset)return!1;var i=(t.positionAbs||t.position.absolute).left,s=i+t.helperProportions.width,o=(t.positionAbs||t.position.absolute).top,u=o+t.helperProportions.height,a=n.offset.left,f=a+n.proportions.width,l=n.offset.top,c=l+n.proportions.height;switch(r){case"fit":return a<=i&&s<=f&&l<=o&&u<=c;case"intersect":return a<i+t.helperProportions.width/2&&s-t.helperProportions.width/2<f&&l<o+t.helperProportions.height/2&&u-t.helperProportions.height/2<c;case"pointer":var h=(t.positionAbs||t.position.absolute).left+(t.clickOffset||t.offset.click).left,p=(t.positionAbs||t.position.absolute).top+(t.clickOffset||t.offset.click).top,d=e.ui.isOver(p,h,l,a,n.proportions.height,n.proportions.width);return d;case"touch":return(o>=l&&o<=c||u>=l&&u<=c||o<l&&u>c)&&(i>=a&&i<=f||s>=a&&s<=f||i<a&&s>f);default:return!1}},e.ui.ddmanager={current:null,droppables:{"default":[]},prepareOffsets:function(t,n){var r=e.ui.ddmanager.droppables[t.options.scope]||[],i=n?n.type:null,s=(t.currentItem||t.element).find(":data(droppable)").andSelf();e:for(var o=0;o<r.length;o++){if(r[o].options.disabled||t&&!r[o].accept.call(r[o].element[0],t.currentItem||t.element))continue;for(var u=0;u<s.length;u++)if(s[u]==r[o].element[0]){r[o].proportions.height=0;continue e}r[o].visible=r[o].element.css("display")!="none";if(!r[o].visible)continue;i=="mousedown"&&r[o]._activate.call(r[o],n),r[o].offset=r[o].element.offset(),r[o].proportions={width:r[o].element[0].offsetWidth,height:r[o].element[0].offsetHeight}}},drop:function(t,n){var r=!1;return e.each(e.ui.ddmanager.droppables[t.options.scope]||[],function(){if(!this.options)return;!this.options.disabled&&this.visible&&e.ui.intersect(t,this,this.options.tolerance)&&(r=this._drop.call(this,n)||r),!this.options.disabled&&this.visible&&this.accept.call(this.element[0],t.currentItem||t.element)&&(this.isout=1,this.isover=0,this._deactivate.call(this,n))}),r},dragStart:function(t,n){t.element.parentsUntil("body").bind("scroll.droppable",function(){t.options.refreshPositions||e.ui.ddmanager.prepareOffsets(t,n)})},drag:function(t,n){t.options.refreshPositions&&e.ui.ddmanager.prepareOffsets(t,n),e.each(e.ui.ddmanager.droppables[t.options.scope]||[],function(){if(this.options.disabled||this.greedyChild||!this.visible)return;var r=e.ui.intersect(t,this,this.options.tolerance),i=!r&&this.isover==1?"isout":r&&this.isover==0?"isover":null;if(!i)return;var s;if(this.options.greedy){var o=this.options.scope,u=this.element.parents(":data(droppable)").filter(function(){return e.data(this,"droppable").options.scope===o});u.length&&(s=e.data(u[0],"droppable"),s.greedyChild=i=="isover"?1:0)}s&&i=="isover"&&(s.isover=0,s.isout=1,s._out.call(s,n)),this[i]=1,this[i=="isout"?"isover":"isout"]=0,this[i=="isover"?"_over":"_out"].call(this,n),s&&i=="isout"&&(s.isout=0,s.isover=1,s._over.call(s,n))})},dragStop:function(t,n){t.element.parentsUntil("body").unbind("scroll.droppable"),t.options.refreshPositions||e.ui.ddmanager.prepareOffsets(t,n)}}})(jQuery);jQuery.effects||function(e,t){var n=e.uiBackCompat!==!1,r="ui-effects-";e.effects={effect:{}},function(t,n){function p(e,t,n){var r=a[t.type]||{};return e==null?n||!t.def?null:t.def:(e=r.floor?~~e:parseFloat(e),isNaN(e)?t.def:r.mod?(e+r.mod)%r.mod:0>e?0:r.max<e?r.max:e)}function d(e){var n=o(),r=n._rgba=[];return e=e.toLowerCase(),h(s,function(t,i){var s,o=i.re.exec(e),a=o&&i.parse(o),f=i.space||"rgba";if(a)return s=n[f](a),n[u[f].cache]=s[u[f].cache],r=n._rgba=s._rgba,!1}),r.length?(r.join()==="0,0,0,0"&&t.extend(r,c.transparent),n):c[e]}function v(e,t,n){return n=(n+1)%1,n*6<1?e+(t-e)*n*6:n*2<1?t:n*3<2?e+(t-e)*(2/3-n)*6:e}var r="backgroundColor borderBottomColor borderLeftColor borderRightColor borderTopColor color columnRuleColor outlineColor textDecorationColor textEmphasisColor".split(" "),i=/^([\-+])=\s*(\d+\.?\d*)/,s=[{re:/rgba?\(\s*(\d{1,3})\s*,\s*(\d{1,3})\s*,\s*(\d{1,3})\s*(?:,\s*(\d+(?:\.\d+)?)\s*)?\)/,parse:function(e){return[e[1],e[2],e[3],e[4]]}},{re:/rgba?\(\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*(?:,\s*(\d+(?:\.\d+)?)\s*)?\)/,parse:function(e){return[e[1]*2.55,e[2]*2.55,e[3]*2.55,e[4]]}},{re:/#([a-f0-9]{2})([a-f0-9]{2})([a-f0-9]{2})/,parse:function(e){return[parseInt(e[1],16),parseInt(e[2],16),parseInt(e[3],16)]}},{re:/#([a-f0-9])([a-f0-9])([a-f0-9])/,parse:function(e){return[parseInt(e[1]+e[1],16),parseInt(e[2]+e[2],16),parseInt(e[3]+e[3],16)]}},{re:/hsla?\(\s*(\d+(?:\.\d+)?)\s*,\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*(?:,\s*(\d+(?:\.\d+)?)\s*)?\)/,space:"hsla",parse:function(e){return[e[1],e[2]/100,e[3]/100,e[4]]}}],o=t.Color=function(e,n,r,i){return new t.Color.fn.parse(e,n,r,i)},u={rgba:{props:{red:{idx:0,type:"byte"},green:{idx:1,type:"byte"},blue:{idx:2,type:"byte"}}},hsla:{props:{hue:{idx:0,type:"degrees"},saturation:{idx:1,type:"percent"},lightness:{idx:2,type:"percent"}}}},a={"byte":{floor:!0,max:255},percent:{max:1},degrees:{mod:360,floor:!0}},f=o.support={},l=t("<p>")[0],c,h=t.each;l.style.cssText="background-color:rgba(1,1,1,.5)",f.rgba=l.style.backgroundColor.indexOf("rgba")>-1,h(u,function(e,t){t.cache="_"+e,t.props.alpha={idx:3,type:"percent",def:1}}),o.fn=t.extend(o.prototype,{parse:function(r,i,s,a){if(r===n)return this._rgba=[null,null,null,null],this;if(r.jquery||r.nodeType)r=t(r).css(i),i=n;var f=this,l=t.type(r),v=this._rgba=[],m;i!==n&&(r=[r,i,s,a],l="array");if(l==="string")return this.parse(d(r)||c._default);if(l==="array")return h(u.rgba.props,function(e,t){v[t.idx]=p(r[t.idx],t)}),this;if(l==="object")return r instanceof o?h(u,function(e,t){r[t.cache]&&(f[t.cache]=r[t.cache].slice())}):h(u,function(t,n){var i=n.cache;h(n.props,function(e,t){if(!f[i]&&n.to){if(e==="alpha"||r[e]==null)return;f[i]=n.to(f._rgba)}f[i][t.idx]=p(r[e],t,!0)}),f[i]&&e.inArray(null,f[i].slice(0,3))<0&&(f[i][3]=1,n.from&&(f._rgba=n.from(f[i])))}),this},is:function(e){var t=o(e),n=!0,r=this;return h(u,function(e,i){var s,o=t[i.cache];return o&&(s=r[i.cache]||i.to&&i.to(r._rgba)||[],h(i.props,function(e,t){if(o[t.idx]!=null)return n=o[t.idx]===s[t.idx],n})),n}),n},_space:function(){var e=[],t=this;return h(u,function(n,r){t[r.cache]&&e.push(n)}),e.pop()},transition:function(e,t){var n=o(e),r=n._space(),i=u[r],s=this.alpha()===0?o("transparent"):this,f=s[i.cache]||i.to(s._rgba),l=f.slice();return n=n[i.cache],h(i.props,function(e,r){var i=r.idx,s=f[i],o=n[i],u=a[r.type]||{};if(o===null)return;s===null?l[i]=o:(u.mod&&(o-s>u.mod/2?s+=u.mod:s-o>u.mod/2&&(s-=u.mod)),l[i]=p((o-s)*t+s,r))}),this[r](l)},blend:function(e){if(this._rgba[3]===1)return this;var n=this._rgba.slice(),r=n.pop(),i=o(e)._rgba;return o(t.map(n,function(e,t){return(1-r)*i[t]+r*e}))},toRgbaString:function(){var e="rgba(",n=t.map(this._rgba,function(e,t){return e==null?t>2?1:0:e});return n[3]===1&&(n.pop(),e="rgb("),e+n.join()+")"},toHslaString:function(){var e="hsla(",n=t.map(this.hsla(),function(e,t){return e==null&&(e=t>2?1:0),t&&t<3&&(e=Math.round(e*100)+"%"),e});return n[3]===1&&(n.pop(),e="hsl("),e+n.join()+")"},toHexString:function(e){var n=this._rgba.slice(),r=n.pop();return e&&n.push(~~(r*255)),"#"+t.map(n,function(e,t){return e=(e||0).toString(16),e.length===1?"0"+e:e}).join("")},toString:function(){return this._rgba[3]===0?"transparent":this.toRgbaString()}}),o.fn.parse.prototype=o.fn,u.hsla.to=function(e){if(e[0]==null||e[1]==null||e[2]==null)return[null,null,null,e[3]];var t=e[0]/255,n=e[1]/255,r=e[2]/255,i=e[3],s=Math.max(t,n,r),o=Math.min(t,n,r),u=s-o,a=s+o,f=a*.5,l,c;return o===s?l=0:t===s?l=60*(n-r)/u+360:n===s?l=60*(r-t)/u+120:l=60*(t-n)/u+240,f===0||f===1?c=f:f<=.5?c=u/a:c=u/(2-a),[Math.round(l)%360,c,f,i==null?1:i]},u.hsla.from=function(e){if(e[0]==null||e[1]==null||e[2]==null)return[null,null,null,e[3]];var t=e[0]/360,n=e[1],r=e[2],i=e[3],s=r<=.5?r*(1+n):r+n-r*n,o=2*r-s,u,a,f;return[Math.round(v(o,s,t+1/3)*255),Math.round(v(o,s,t)*255),Math.round(v(o,s,t-1/3)*255),i]},h(u,function(e,r){var s=r.props,u=r.cache,a=r.to,f=r.from;o.fn[e]=function(e){a&&!this[u]&&(this[u]=a(this._rgba));if(e===n)return this[u].slice();var r,i=t.type(e),l=i==="array"||i==="object"?e:arguments,c=this[u].slice();return h(s,function(e,t){var n=l[i==="object"?e:t.idx];n==null&&(n=c[t.idx]),c[t.idx]=p(n,t)}),f?(r=o(f(c)),r[u]=c,r):o(c)},h(s,function(n,r){if(o.fn[n])return;o.fn[n]=function(s){var o=t.type(s),u=n==="alpha"?this._hsla?"hsla":"rgba":e,a=this[u](),f=a[r.idx],l;return o==="undefined"?f:(o==="function"&&(s=s.call(this,f),o=t.type(s)),s==null&&r.empty?this:(o==="string"&&(l=i.exec(s),l&&(s=f+parseFloat(l[2])*(l[1]==="+"?1:-1))),a[r.idx]=s,this[u](a)))}})}),h(r,function(e,n){t.cssHooks[n]={set:function(e,r){var i,s,u="";if(t.type(r)!=="string"||(i=d(r))){r=o(i||r);if(!f.rgba&&r._rgba[3]!==1){s=n==="backgroundColor"?e.parentNode:e;while((u===""||u==="transparent")&&s&&s.style)try{u=t.css(s,"backgroundColor"),s=s.parentNode}catch(a){}r=r.blend(u&&u!=="transparent"?u:"_default")}r=r.toRgbaString()}try{e.style[n]=r}catch(r){}}},t.fx.step[n]=function(e){e.colorInit||(e.start=o(e.elem,n),e.end=o(e.end),e.colorInit=!0),t.cssHooks[n].set(e.elem,e.start.transition(e.end,e.pos))}}),t.cssHooks.borderColor={expand:function(e){var t={};return h(["Top","Right","Bottom","Left"],function(n,r){t["border"+r+"Color"]=e}),t}},c=t.Color.names={aqua:"#00ffff",black:"#000000",blue:"#0000ff",fuchsia:"#ff00ff",gray:"#808080",green:"#008000",lime:"#00ff00",maroon:"#800000",navy:"#000080",olive:"#808000",purple:"#800080",red:"#ff0000",silver:"#c0c0c0",teal:"#008080",white:"#ffffff",yellow:"#ffff00",transparent:[null,null,null,0],_default:"#ffffff"}}(jQuery),function(){function i(){var t=this.ownerDocument.defaultView?this.ownerDocument.defaultView.getComputedStyle(this,null):this.currentStyle,n={},r,i,s;if(t&&t.length&&t[0]&&t[t[0]]){s=t.length;while(s--)r=t[s],typeof t[r]=="string"&&(n[e.camelCase(r)]=t[r])}else for(r in t)typeof t[r]=="string"&&(n[r]=t[r]);return n}function s(t,n){var i={},s,o;for(s in n)o=n[s],t[s]!==o&&!r[s]&&(e.fx.step[s]||!isNaN(parseFloat(o)))&&(i[s]=o);return i}var n=["add","remove","toggle"],r={border:1,borderBottom:1,borderColor:1,borderLeft:1,borderRight:1,borderTop:1,borderWidth:1,margin:1,padding:1};e.each(["borderLeftStyle","borderRightStyle","borderBottomStyle","borderTopStyle"],function(t,n){e.fx.step[n]=function(e){if(e.end!=="none"&&!e.setAttr||e.pos===1&&!e.setAttr)jQuery.style(e.elem,n,e.end),e.setAttr=!0}}),e.effects.animateClass=function(t,r,o,u){var a=e.speed(r,o,u);return this.queue(function(){var r=e(this),o=r.attr("class")||"",u,f=a.children?r.find("*").andSelf():r;f=f.map(function(){var t=e(this);return{el:t,start:i.call(this)}}),u=function(){e.each(n,function(e,n){t[n]&&r[n+"Class"](t[n])})},u(),f=f.map(function(){return this.end=i.call(this.el[0]),this.diff=s(this.start,this.end),this}),r.attr("class",o),f=f.map(function(){var t=this,n=e.Deferred(),r=jQuery.extend({},a,{queue:!1,complete:function(){n.resolve(t)}});return this.el.animate(this.diff,r),n.promise()}),e.when.apply(e,f.get()).done(function(){u(),e.each(arguments,function(){var t=this.el;e.each(this.diff,function(e){t.css(e,"")})}),a.complete.call(r[0])})})},e.fn.extend({_addClass:e.fn.addClass,addClass:function(t,n,r,i){return n?e.effects.animateClass.call(this,{add:t},n,r,i):this._addClass(t)},_removeClass:e.fn.removeClass,removeClass:function(t,n,r,i){return n?e.effects.animateClass.call(this,{remove:t},n,r,i):this._removeClass(t)},_toggleClass:e.fn.toggleClass,toggleClass:function(n,r,i,s,o){return typeof r=="boolean"||r===t?i?e.effects.animateClass.call(this,r?{add:n}:{remove:n},i,s,o):this._toggleClass(n,r):e.effects.animateClass.call(this,{toggle:n},r,i,s)},switchClass:function(t,n,r,i,s){return e.effects.animateClass.call(this,{add:n,remove:t},r,i,s)}})}(),function(){function i(n,r,i,s){e.isPlainObject(n)&&(r=n,n=n.effect),n={effect:n},r===t&&(r={}),e.isFunction(r)&&(s=r,i=null,r={});if(typeof r=="number"||e.fx.speeds[r])s=i,i=r,r={};return e.isFunction(i)&&(s=i,i=null),r&&e.extend(n,r),i=i||r.duration,n.duration=e.fx.off?0:typeof i=="number"?i:i in e.fx.speeds?e.fx.speeds[i]:e.fx.speeds._default,n.complete=s||r.complete,n}function s(t){return!t||typeof t=="number"||e.fx.speeds[t]?!0:typeof t=="string"&&!e.effects.effect[t]?n&&e.effects[t]?!1:!0:!1}e.extend(e.effects,{version:"1.9.0",save:function(e,t){for(var n=0;n<t.length;n++)t[n]!==null&&e.data(r+t[n],e[0].style[t[n]])},restore:function(e,n){var i,s;for(s=0;s<n.length;s++)n[s]!==null&&(i=e.data(r+n[s]),i===t&&(i=""),e.css(n[s],i))},setMode:function(e,t){return t==="toggle"&&(t=e.is(":hidden")?"show":"hide"),t},getBaseline:function(e,t){var n,r;switch(e[0]){case"top":n=0;break;case"middle":n=.5;break;case"bottom":n=1;break;default:n=e[0]/t.height}switch(e[1]){case"left":r=0;break;case"center":r=.5;break;case"right":r=1;break;default:r=e[1]/t.width}return{x:r,y:n}},createWrapper:function(t){if(t.parent().is(".ui-effects-wrapper"))return t.parent();var n={width:t.outerWidth(!0),height:t.outerHeight(!0),"float":t.css("float")},r=e("<div></div>").addClass("ui-effects-wrapper").css({fontSize:"100%",background:"transparent",border:"none",margin:0,padding:0}),i={width:t.width(),height:t.height()},s=document.activeElement;try{s.id}catch(o){s=document.body}return t.wrap(r),(t[0]===s||e.contains(t[0],s))&&e(s).focus(),r=t.parent(),t.css("position")==="static"?(r.css({position:"relative"}),t.css({position:"relative"})):(e.extend(n,{position:t.css("position"),zIndex:t.css("z-index")}),e.each(["top","left","bottom","right"],function(e,r){n[r]=t.css(r),isNaN(parseInt(n[r],10))&&(n[r]="auto")}),t.css({position:"relative",top:0,left:0,right:"auto",bottom:"auto"})),t.css(i),r.css(n).show()},removeWrapper:function(t){var n=document.activeElement;return t.parent().is(".ui-effects-wrapper")&&(t.parent().replaceWith(t),(t[0]===n||e.contains(t[0],n))&&e(n).focus()),t},setTransition:function(t,n,r,i){return i=i||{},e.each(n,function(e,n){var s=t.cssUnit(n);s[0]>0&&(i[n]=s[0]*r+s[1])}),i}}),e.fn.extend({effect:function(t,r,s,o){function h(t){function s(){e.isFunction(r)&&r.call(n[0]),e.isFunction(t)&&t()}var n=e(this),r=u.complete,i=u.mode;(n.is(":hidden")?i==="hide":i==="show")?s():l.call(n[0],u,s)}var u=i.apply(this,arguments),a=u.mode,f=u.queue,l=e.effects.effect[u.effect],c=!l&&n&&e.effects[u.effect];return e.fx.off||!l&&!c?a?this[a](u.duration,u.complete):this.each(function(){u.complete&&u.complete.call(this)}):l?f===!1?this.each(h):this.queue(f||"fx",h):c.call(this,{options:u,duration:u.duration,callback:u.complete,mode:u.mode})},_show:e.fn.show,show:function(e){if(s(e))return this._show.apply(this,arguments);var t=i.apply(this,arguments);return t.mode="show",this.effect.call(this,t)},_hide:e.fn.hide,hide:function(e){if(s(e))return this._hide.apply(this,arguments);var t=i.apply(this,arguments);return t.mode="hide",this.effect.call(this,t)},__toggle:e.fn.toggle,toggle:function(t){if(s(t)||typeof t=="boolean"||e.isFunction(t))return this.__toggle.apply(this,arguments);var n=i.apply(this,arguments);return n.mode="toggle",this.effect.call(this,n)},cssUnit:function(t){var n=this.css(t),r=[];return e.each(["em","px","%","pt"],function(e,t){n.indexOf(t)>0&&(r=[parseFloat(n),t])}),r}})}(),function(){var t={};e.each(["Quad","Cubic","Quart","Quint","Expo"],function(e,n){t[n]=function(t){return Math.pow(t,e+2)}}),e.extend(t,{Sine:function(e){return 1-Math.cos(e*Math.PI/2)},Circ:function(e){return 1-Math.sqrt(1-e*e)},Elastic:function(e){return e===0||e===1?e:-Math.pow(2,8*(e-1))*Math.sin(((e-1)*80-7.5)*Math.PI/15)},Back:function(e){return e*e*(3*e-2)},Bounce:function(e){var t,n=4;while(e<((t=Math.pow(2,--n))-1)/11);return 1/Math.pow(4,3-n)-7.5625*Math.pow((t*3-2)/22-e,2)}}),e.each(t,function(t,n){e.easing["easeIn"+t]=n,e.easing["easeOut"+t]=function(e){return 1-n(1-e)},e.easing["easeInOut"+t]=function(e){return e<.5?n(e*2)/2:1-n(e*-2+2)/2}})}()}(jQuery);(function(e,t){var n=/up|down|vertical/,r=/up|left|vertical|horizontal/;e.effects.effect.blind=function(t,i){var s=e(this),o=["position","top","bottom","left","right","height","width"],u=e.effects.setMode(s,t.mode||"hide"),a=t.direction||"up",f=n.test(a),l=f?"height":"width",c=f?"top":"left",h=r.test(a),p={},d=u==="show",v,m,g;s.parent().is(".ui-effects-wrapper")?e.effects.save(s.parent(),o):e.effects.save(s,o),s.show(),v=e.effects.createWrapper(s).css({overflow:"hidden"}),m=v[l](),g=parseFloat(v.css(c))||0,p[l]=d?m:0,h||(s.css(f?"bottom":"right",0).css(f?"top":"left","auto").css({position:"absolute"}),p[c]=d?g:m+g),d&&(v.css(l,0),h||v.css(c,g+m)),v.animate(p,{duration:t.duration,easing:t.easing,queue:!1,complete:function(){u==="hide"&&s.hide(),e.effects.restore(s,o),e.effects.removeWrapper(s),i()}})}})(jQuery);(function(e,t){e.effects.effect.bounce=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"effect"),o=s==="hide",u=s==="show",a=t.direction||"up",f=t.distance,l=t.times||5,c=l*2+(u||o?1:0),h=t.duration/c,p=t.easing,d=a==="up"||a==="down"?"top":"left",v=a==="up"||a==="left",m,g,y,b=r.queue(),w=b.length;(u||o)&&i.push("opacity"),e.effects.save(r,i),r.show(),e.effects.createWrapper(r),f||(f=r[d==="top"?"outerHeight":"outerWidth"]()/3),u&&(y={opacity:1},y[d]=0,r.css("opacity",0).css(d,v?-f*2:f*2).animate(y,h,p)),o&&(f/=Math.pow(2,l-1)),y={},y[d]=0;for(m=0;m<l;m++)g={},g[d]=(v?"-=":"+=")+f,r.animate(g,h,p).animate(y,h,p),f=o?f*2:f/2;o&&(g={opacity:0},g[d]=(v?"-=":"+=")+f,r.animate(g,h,p)),r.queue(function(){o&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}),w>1&&b.splice.apply(b,[1,0].concat(b.splice(w,c+1))),r.dequeue()}})(jQuery);(function(e,t){e.effects.effect.clip=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"hide"),o=s==="show",u=t.direction||"vertical",a=u==="vertical",f=a?"height":"width",l=a?"top":"left",c={},h,p,d;e.effects.save(r,i),r.show(),h=e.effects.createWrapper(r).css({overflow:"hidden"}),p=r[0].tagName==="IMG"?h:r,d=p[f](),o&&(p.css(f,0),p.css(l,d/2)),c[f]=o?d:0,c[l]=o?0:d/2,p.animate(c,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){o||r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.drop=function(t,n){var r=e(this),i=["position","top","bottom","left","right","opacity","height","width"],s=e.effects.setMode(r,t.mode||"hide"),o=s==="show",u=t.direction||"left",a=u==="up"||u==="down"?"top":"left",f=u==="up"||u==="left"?"pos":"neg",l={opacity:o?1:0},c;e.effects.save(r,i),r.show(),e.effects.createWrapper(r),c=t.distance||r[a==="top"?"outerHeight":"outerWidth"](!0)/2,o&&r.css("opacity",0).css(a,f==="pos"?-c:c),l[a]=(o?f==="pos"?"+=":"-=":f==="pos"?"-=":"+=")+c,r.animate(l,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){s==="hide"&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.explode=function(t,n){function y(){c.push(this),c.length===r*i&&b()}function b(){s.css({visibility:"visible"}),e(c).remove(),u||s.hide(),n()}var r=t.pieces?Math.round(Math.sqrt(t.pieces)):3,i=r,s=e(this),o=e.effects.setMode(s,t.mode||"hide"),u=o==="show",a=s.show().css("visibility","hidden").offset(),f=Math.ceil(s.outerWidth()/i),l=Math.ceil(s.outerHeight()/r),c=[],h,p,d,v,m,g;for(h=0;h<r;h++){v=a.top+h*l,g=h-(r-1)/2;for(p=0;p<i;p++)d=a.left+p*f,m=p-(i-1)/2,s.clone().appendTo("body").wrap("<div></div>").css({position:"absolute",visibility:"visible",left:-p*f,top:-h*l}).parent().addClass("ui-effects-explode").css({position:"absolute",overflow:"hidden",width:f,height:l,left:d+(u?m*f:0),top:v+(u?g*l:0),opacity:u?0:1}).animate({left:d+(u?0:m*f),top:v+(u?0:g*l),opacity:u?1:0},t.duration||500,t.easing,y)}}})(jQuery);(function(e,t){e.effects.effect.fade=function(t,n){var r=e(this),i=e.effects.setMode(r,t.mode||"toggle");r.animate({opacity:i},{queue:!1,duration:t.duration,easing:t.easing,complete:n})}})(jQuery);(function(e,t){e.effects.effect.fold=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"hide"),o=s==="show",u=s==="hide",a=t.size||15,f=/([0-9]+)%/.exec(a),l=!!t.horizFirst,c=o!==l,h=c?["width","height"]:["height","width"],p=t.duration/2,d,v,m={},g={};e.effects.save(r,i),r.show(),d=e.effects.createWrapper(r).css({overflow:"hidden"}),v=c?[d.width(),d.height()]:[d.height(),d.width()],f&&(a=parseInt(f[1],10)/100*v[u?0:1]),o&&d.css(l?{height:0,width:a}:{height:a,width:0}),m[h[0]]=o?v[0]:a,g[h[1]]=o?v[1]:0,d.animate(m,p,t.easing).animate(g,p,t.easing,function(){u&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()})}})(jQuery);(function(e,t){e.effects.effect.highlight=function(t,n){var r=e(this),i=["backgroundImage","backgroundColor","opacity"],s=e.effects.setMode(r,t.mode||"show"),o={backgroundColor:r.css("backgroundColor")};s==="hide"&&(o.opacity=0),e.effects.save(r,i),r.show().css({backgroundImage:"none",backgroundColor:t.color||"#ffff99"}).animate(o,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){s==="hide"&&r.hide(),e.effects.restore(r,i),n()}})}})(jQuery);(function(e,t){e.effects.effect.pulsate=function(t,n){var r=e(this),i=e.effects.setMode(r,t.mode||"show"),s=i==="show",o=i==="hide",u=s||i==="hide",a=(t.times||5)*2+(u?1:0),f=t.duration/a,l=0,c=r.queue(),h=c.length,p;if(s||!r.is(":visible"))r.css("opacity",0).show(),l=1;for(p=1;p<a;p++)r.animate({opacity:l},f,t.easing),l=1-l;r.animate({opacity:l},f,t.easing),r.queue(function(){o&&r.hide(),n()}),h>1&&c.splice.apply(c,[1,0].concat(c.splice(h,a+1))),r.dequeue()}})(jQuery);(function(e,t){e.effects.effect.puff=function(t,n){var r=e(this),i=e.effects.setMode(r,t.mode||"hide"),s=i==="hide",o=parseInt(t.percent,10)||150,u=o/100,a={height:r.height(),width:r.width()};e.extend(t,{effect:"scale",queue:!1,fade:!0,mode:i,complete:n,percent:s?o:100,from:s?a:{height:a.height*u,width:a.width*u}}),r.effect(t)},e.effects.effect.scale=function(t,n){var r=e(this),i=e.extend(!0,{},t),s=e.effects.setMode(r,t.mode||"effect"),o=parseInt(t.percent,10)||(parseInt(t.percent,10)===0?0:s==="hide"?0:100),u=t.direction||"both",a=t.origin,f={height:r.height(),width:r.width(),outerHeight:r.outerHeight(),outerWidth:r.outerWidth()},l={y:u!=="horizontal"?o/100:1,x:u!=="vertical"?o/100:1};i.effect="size",i.queue=!1,i.complete=n,s!=="effect"&&(i.origin=a||["middle","center"],i.restore=!0),i.from=t.from||(s==="show"?{height:0,width:0}:f),i.to={height:f.height*l.y,width:f.width*l.x,outerHeight:f.outerHeight*l.y,outerWidth:f.outerWidth*l.x},i.fade&&(s==="show"&&(i.from.opacity=0,i.to.opacity=1),s==="hide"&&(i.from.opacity=1,i.to.opacity=0)),r.effect(i)},e.effects.effect.size=function(t,n){var r=e(this),i=["position","top","bottom","left","right","width","height","overflow","opacity"],s=["position","top","bottom","left","right","overflow","opacity"],o=["width","height","overflow"],u=["fontSize"],a=["borderTopWidth","borderBottomWidth","paddingTop","paddingBottom"],f=["borderLeftWidth","borderRightWidth","paddingLeft","paddingRight"],l=e.effects.setMode(r,t.mode||"effect"),c=t.restore||l!=="effect",h=t.scale||"both",p=t.origin||["middle","center"],d,v,m,g=r.css("position");l==="show"&&r.show(),d={height:r.height(),width:r.width(),outerHeight:r.outerHeight(),outerWidth:r.outerWidth()},r.from=t.from||d,r.to=t.to||d,m={from:{y:r.from.height/d.height,x:r.from.width/d.width},to:{y:r.to.height/d.height,x:r.to.width/d.width}};if(h==="box"||h==="both")m.from.y!==m.to.y&&(i=i.concat(a),r.from=e.effects.setTransition(r,a,m.from.y,r.from),r.to=e.effects.setTransition(r,a,m.to.y,r.to)),m.from.x!==m.to.x&&(i=i.concat(f),r.from=e.effects.setTransition(r,f,m.from.x,r.from),r.to=e.effects.setTransition(r,f,m.to.x,r.to));(h==="content"||h==="both")&&m.from.y!==m.to.y&&(i=i.concat(u),r.from=e.effects.setTransition(r,u,m.from.y,r.from),r.to=e.effects.setTransition(r,u,m.to.y,r.to)),e.effects.save(r,c?i:s),r.show(),e.effects.createWrapper(r),r.css("overflow","hidden").css(r.from),p&&(v=e.effects.getBaseline(p,d),r.from.top=(d.outerHeight-r.outerHeight())*v.y,r.from.left=(d.outerWidth-r.outerWidth())*v.x,r.to.top=(d.outerHeight-r.to.outerHeight)*v.y,r.to.left=(d.outerWidth-r.to.outerWidth)*v.x),r.css(r.from);if(h==="content"||h==="both")a=a.concat(["marginTop","marginBottom"]).concat(u),f=f.concat(["marginLeft","marginRight"]),o=i.concat(a).concat(f),r.find("*[width]").each(function(){var n=e(this),r={height:n.height(),width:n.width()};c&&e.effects.save(n,o),n.from={height:r.height*m.from.y,width:r.width*m.from.x},n.to={height:r.height*m.to.y,width:r.width*m.to.x},m.from.y!==m.to.y&&(n.from=e.effects.setTransition(n,a,m.from.y,n.from),n.to=e.effects.setTransition(n,a,m.to.y,n.to)),m.from.x!==m.to.x&&(n.from=e.effects.setTransition(n,f,m.from.x,n.from),n.to=e.effects.setTransition(n,f,m.to.x,n.to)),n.css(n.from),n.animate(n.to,t.duration,t.easing,function(){c&&e.effects.restore(n,o)})});r.animate(r.to,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){r.to.opacity===0&&r.css("opacity",r.from.opacity),l==="hide"&&r.hide(),e.effects.restore(r,c?i:s),c||(g==="static"?r.css({position:"relative",top:r.to.top,left:r.to.left}):e.each(["top","left"],function(e,t){r.css(t,function(t,n){var i=parseInt(n,10),s=e?r.to.left:r.to.top;return n==="auto"?s+"px":i+s+"px"})})),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.shake=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"effect"),o=t.direction||"left",u=t.distance||20,a=t.times||3,f=a*2+1,l=Math.round(t.duration/f),c=o==="up"||o==="down"?"top":"left",h=o==="up"||o==="left",p={},d={},v={},m,g=r.queue(),y=g.length;e.effects.save(r,i),r.show(),e.effects.createWrapper(r),p[c]=(h?"-=":"+=")+u,d[c]=(h?"+=":"-=")+u*2,v[c]=(h?"-=":"+=")+u*2,r.animate(p,l,t.easing);for(m=1;m<a;m++)r.animate(d,l,t.easing).animate(v,l,t.easing);r.animate(d,l,t.easing).animate(p,l/2,t.easing).queue(function(){s==="hide"&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}),y>1&&g.splice.apply(g,[1,0].concat(g.splice(y,f+1))),r.dequeue()}})(jQuery);(function(e,t){e.effects.effect.slide=function(t,n){var r=e(this),i=["position","top","bottom","left","right","width","height"],s=e.effects.setMode(r,t.mode||"show"),o=s==="show",u=t.direction||"left",a=u==="up"||u==="down"?"top":"left",f=u==="up"||u==="left",l,c={};e.effects.save(r,i),r.show(),l=t.distance||r[a==="top"?"outerHeight":"outerWidth"](!0),e.effects.createWrapper(r).css({overflow:"hidden"}),o&&r.css(a,f?isNaN(l)?"-"+l:-l:l),c[a]=(o?f?"+=":"-=":f?"-=":"+=")+l,r.animate(c,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){s==="hide"&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.transfer=function(t,n){var r=e(this),i=e(t.to),s=i.css("position")==="fixed",o=e("body"),u=s?o.scrollTop():0,a=s?o.scrollLeft():0,f=i.offset(),l={top:f.top-u,left:f.left-a,height:i.innerHeight(),width:i.innerWidth()},c=r.offset(),h=e('<div class="ui-effects-transfer"></div>').appendTo(document.body).addClass(t.className).css({top:c.top-u,left:c.left-a,height:r.innerHeight(),width:r.innerWidth(),position:s?"fixed":"absolute"}).animate(l,t.duration,t.easing,function(){h.remove(),n()})}})(jQuery);(function(e,t){var n=!1;e.widget("ui.menu",{version:"1.9.0",defaultElement:"<ul>",delay:300,options:{icons:{submenu:"ui-icon-carat-1-e"},menus:"ul",position:{my:"left top",at:"right top"},role:"menu",blur:null,focus:null,select:null},_create:function(){this.activeMenu=this.element,this.element.uniqueId().addClass("ui-menu ui-widget ui-widget-content ui-corner-all").toggleClass("ui-menu-icons",!!this.element.find(".ui-icon").length).attr({role:this.options.role,tabIndex:0}).bind("click"+this.eventNamespace,e.proxy(function(e){this.options.disabled&&e.preventDefault()},this)),this.options.disabled&&this.element.addClass("ui-state-disabled").attr("aria-disabled","true"),this._on({"mousedown .ui-menu-item > a":function(e){e.preventDefault()},"click .ui-state-disabled > a":function(e){e.preventDefault()},"click .ui-menu-item:has(a)":function(t){var r=e(t.target).closest(".ui-menu-item");!n&&r.not(".ui-state-disabled").length&&(n=!0,this.select(t),r.has(".ui-menu").length?this.expand(t):this.element.is(":focus")||(this.element.trigger("focus",[!0]),this.active&&this.active.parents(".ui-menu").length===1&&clearTimeout(this.timer)))},"mouseenter .ui-menu-item":function(t){var n=e(t.currentTarget);n.siblings().children(".ui-state-active").removeClass("ui-state-active"),this.focus(t,n)},mouseleave:"collapseAll","mouseleave .ui-menu":"collapseAll",focus:function(e,t){var n=this.active||this.element.children(".ui-menu-item").eq(0);t||this.focus(e,n)},blur:function(t){this._delay(function(){e.contains(this.element[0],this.document[0].activeElement)||this.collapseAll(t)})},keydown:"_keydown"}),this.refresh(),this._on(this.document,{click:function(t){e(t.target).closest(".ui-menu").length||this.collapseAll(t),n=!1}})},_destroy:function(){this.element.removeAttr("aria-activedescendant").find(".ui-menu").andSelf().removeClass("ui-menu ui-widget ui-widget-content ui-corner-all ui-menu-icons").removeAttr("role").removeAttr("tabIndex").removeAttr("aria-labelledby").removeAttr("aria-expanded").removeAttr("aria-hidden").removeAttr("aria-disabled").removeUniqueId().show(),this.element.find(".ui-menu-item").removeClass("ui-menu-item").removeAttr("role").removeAttr("aria-disabled").children("a").removeUniqueId().removeClass("ui-corner-all ui-state-hover").removeAttr("tabIndex").removeAttr("role").removeAttr("aria-haspopup").children().each(function(){var t=e(this);t.data("ui-menu-submenu-carat")&&t.remove()}),this.element.find(".ui-menu-divider").removeClass("ui-menu-divider ui-widget-content")},_keydown:function(t){function a(e){return e.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g,"\\$&")}var n,r,i,s,o,u=!0;switch(t.keyCode){case e.ui.keyCode.PAGE_UP:this.previousPage(t);break;case e.ui.keyCode.PAGE_DOWN:this.nextPage(t);break;case e.ui.keyCode.HOME:this._move("first","first",t);break;case e.ui.keyCode.END:this._move("last","last",t);break;case e.ui.keyCode.UP:this.previous(t);break;case e.ui.keyCode.DOWN:this.next(t);break;case e.ui.keyCode.LEFT:this.collapse(t);break;case e.ui.keyCode.RIGHT:this.active&&!this.active.is(".ui-state-disabled")&&this.expand(t);break;case e.ui.keyCode.ENTER:case e.ui.keyCode.SPACE:this._activate(t);break;case e.ui.keyCode.ESCAPE:this.collapse(t);break;default:u=!1,r=this.previousFilter||"",i=String.fromCharCode(t.keyCode),s=!1,clearTimeout(this.filterTimer),i===r?s=!0:i=r+i,o=new RegExp("^"+a(i),"i"),n=this.activeMenu.children(".ui-menu-item").filter(function(){return o.test(e(this).children("a").text())}),n=s&&n.index(this.active.next())!==-1?this.active.nextAll(".ui-menu-item"):n,n.length||(i=String.fromCharCode(t.keyCode),o=new RegExp("^"+a(i),"i"),n=this.activeMenu.children(".ui-menu-item").filter(function(){return o.test(e(this).children("a").text())})),n.length?(this.focus(t,n),n.length>1?(this.previousFilter=i,this.filterTimer=this._delay(function(){delete this.previousFilter},1e3)):delete this.previousFilter):delete this.previousFilter}u&&t.preventDefault()},_activate:function(e){this.active.is(".ui-state-disabled")||(this.active.children("a[aria-haspopup='true']").length?this.expand(e):this.select(e))},refresh:function(){var t,n=this.options.icons.submenu,r=this.element.find(this.options.menus+":not(.ui-menu)").addClass("ui-menu ui-widget ui-widget-content ui-corner-all").hide().attr({role:this.options.role,"aria-hidden":"true","aria-expanded":"false"});t=r.add(this.element),t.children(":not(.ui-menu-item):has(a)").addClass("ui-menu-item").attr("role","presentation").children("a").uniqueId().addClass("ui-corner-all").attr({tabIndex:-1,role:this._itemRole()}),t.children(":not(.ui-menu-item)").each(function(){var t=e(this);/[^\-ā€”ā€“\s]/.test(t.text())||t.addClass("ui-widget-content ui-menu-divider")}),t.children(".ui-state-disabled").attr("aria-disabled","true"),r.each(function(){var t=e(this),r=t.prev("a"),i=e("<span>").addClass("ui-menu-icon ui-icon "+n).data("ui-menu-submenu-carat",!0);r.attr("aria-haspopup","true").prepend(i),t.attr("aria-labelledby",r.attr("id"))}),this.active&&!e.contains(this.element[0],this.active[0])&&this.blur()},_itemRole:function(){return{menu:"menuitem",listbox:"option"}[this.options.role]},focus:function(e,t){var n,r;this.blur(e,e&&e.type==="focus"),this._scrollIntoView(t),this.active=t.first(),r=this.active.children("a").addClass("ui-state-focus"),this.options.role&&this.element.attr("aria-activedescendant",r.attr("id")),this.active.parent().closest(".ui-menu-item").children("a:first").addClass("ui-state-active"),e&&e.type==="keydown"?this._close():this.timer=this._delay(function(){this._close()},this.delay),n=t.children(".ui-menu"),n.length&&/^mouse/.test(e.type)&&this._startOpening(n),this.activeMenu=t.parent(),this._trigger("focus",e,{item:t})},_scrollIntoView:function(t){var n,r,i,s,o,u;this._hasScroll()&&(n=parseFloat(e.css(this.activeMenu[0],"borderTopWidth"))||0,r=parseFloat(e.css(this.activeMenu[0],"paddingTop"))||0,i=t.offset().top-this.activeMenu.offset().top-n-r,s=this.activeMenu.scrollTop(),o=this.activeMenu.height(),u=t.height(),i<0?this.activeMenu.scrollTop(s+i):i+u>o&&this.activeMenu.scrollTop(s+i-o+u))},blur:function(e,t){t||clearTimeout(this.timer);if(!this.active)return;this.active.children("a").removeClass("ui-state-focus"),this.active=null,this._trigger("blur",e,{item:this.active})},_startOpening:function(e){clearTimeout(this.timer);if(e.attr("aria-hidden")!=="true")return;this.timer=this._delay(function(){this._close(),this._open(e)},this.delay)},_open:function(t){var n=e.extend({of:this.active},this.options.position);clearTimeout(this.timer),this.element.find(".ui-menu").not(t.parents(".ui-menu")).hide().attr("aria-hidden","true"),t.show().removeAttr("aria-hidden").attr("aria-expanded","true").position(n)},collapseAll:function(t,n){clearTimeout(this.timer),this.timer=this._delay(function(){var r=n?this.element:e(t&&t.target).closest(this.element.find(".ui-menu"));r.length||(r=this.element),this._close(r),this.blur(t),this.activeMenu=r},this.delay)},_close:function(e){e||(e=this.active?this.active.parent():this.element),e.find(".ui-menu").hide().attr("aria-hidden","true").attr("aria-expanded","false").end().find("a.ui-state-active").removeClass("ui-state-active")},collapse:function(e){var t=this.active&&this.active.parent().closest(".ui-menu-item",this.element);t&&t.length&&(this._close(),this.focus(e,t))},expand:function(e){var t=this.active&&this.active.children(".ui-menu ").children(".ui-menu-item").first();t&&t.length&&(this._open(t.parent()),this._delay(function(){this.focus(e,t)}))},next:function(e){this._move("next","first",e)},previous:function(e){this._move("prev","last",e)},isFirstItem:function(){return this.active&&!this.active.prevAll(".ui-menu-item").length},isLastItem:function(){return this.active&&!this.active.nextAll(".ui-menu-item").length},_move:function(e,t,n){var r;this.active&&(e==="first"||e==="last"?r=this.active[e==="first"?"prevAll":"nextAll"](".ui-menu-item").eq(-1):r=this.active[e+"All"](".ui-menu-item").eq(0));if(!r||!r.length||!this.active)r=this.activeMenu.children(".ui-menu-item")[t]();this.focus(n,r)},nextPage:function(t){var n,r,i;if(!this.active){this.next(t);return}if(this.isLastItem())return;this._hasScroll()?(r=this.active.offset().top,i=this.element.height(),this.active.nextAll(".ui-menu-item").each(function(){return n=e(this),n.offset().top-r-i<0}),this.focus(t,n)):this.focus(t,this.activeMenu.children(".ui-menu-item")[this.active?"last":"first"]())},previousPage:function(t){var n,r,i;if(!this.active){this.next(t);return}if(this.isFirstItem())return;this._hasScroll()?(r=this.active.offset().top,i=this.element.height(),this.active.prevAll(".ui-menu-item").each(function(){return n=e(this),n.offset().top-r+i>0}),this.focus(t,n)):this.focus(t,this.activeMenu.children(".ui-menu-item").first())},_hasScroll:function(){return this.element.outerHeight()<this.element.prop("scrollHeight")},select:function(t){this.active=this.active||e(t.target).closest(".ui-menu-item");var n={item:this.active};this.active.has(".ui-menu").length||this.collapseAll(t,!0),this._trigger("select",t,n)}})})(jQuery);(function(e,t){e.widget("ui.progressbar",{version:"1.9.0",options:{value:0,max:100},min:0,_create:function(){this.element.addClass("ui-progressbar ui-widget ui-widget-content ui-corner-all").attr({role:"progressbar","aria-valuemin":this.min,"aria-valuemax":this.options.max,"aria-valuenow":this._value()}),this.valueDiv=e("<div class='ui-progressbar-value ui-widget-header ui-corner-left'></div>").appendTo(this.element),this.oldValue=this._value(),this._refreshValue()},_destroy:function(){this.element.removeClass("ui-progressbar ui-widget ui-widget-content ui-corner-all").removeAttr("role").removeAttr("aria-valuemin").removeAttr("aria-valuemax").removeAttr("aria-valuenow"),this.valueDiv.remove()},value:function(e){return e===t?this._value():(this._setOption("value",e),this)},_setOption:function(e,t){e==="value"&&(this.options.value=t,this._refreshValue(),this._value()===this.options.max&&this._trigger("complete")),this._super(e,t)},_value:function(){var e=this.options.value;return typeof e!="number"&&(e=0),Math.min(this.options.max,Math.max(this.min,e))},_percentage:function(){return 100*this._value()/this.options.max},_refreshValue:function(){var e=this.value(),t=this._percentage();this.oldValue!==e&&(this.oldValue=e,this._trigger("change")),this.valueDiv.toggle(e>this.min).toggleClass("ui-corner-right",e===this.options.max).width(t.toFixed(0)+"%"),this.element.attr("aria-valuenow",e)}})})(jQuery);(function(e,t){e.widget("ui.resizable",e.ui.mouse,{version:"1.9.0",widgetEventPrefix:"resize",options:{alsoResize:!1,animate:!1,animateDuration:"slow",animateEasing:"swing",aspectRatio:!1,autoHide:!1,containment:!1,ghost:!1,grid:!1,handles:"e,s,se",helper:!1,maxHeight:null,maxWidth:null,minHeight:10,minWidth:10,zIndex:1e3},_create:function(){var t=this,n=this.options;this.element.addClass("ui-resizable"),e.extend(this,{_aspectRatio:!!n.aspectRatio,aspectRatio:n.aspectRatio,originalElement:this.element,_proportionallyResizeElements:[],_helper:n.helper||n.ghost||n.animate?n.helper||"ui-resizable-helper":null}),this.element[0].nodeName.match(/canvas|textarea|input|select|button|img/i)&&(this.element.wrap(e('<div class="ui-wrapper" style="overflow: hidden;"></div>').css({position:this.element.css("position"),width:this.element.outerWidth(),height:this.element.outerHeight(),top:this.element.css("top"),left:this.element.css("left")})),this.element=this.element.parent().data("resizable",this.element.data("resizable")),this.elementIsWrapper=!0,this.element.css({marginLeft:this.originalElement.css("marginLeft"),marginTop:this.originalElement.css("marginTop"),marginRight:this.originalElement.css("marginRight"),marginBottom:this.originalElement.css("marginBottom")}),this.originalElement.css({marginLeft:0,marginTop:0,marginRight:0,marginBottom:0}),this.originalResizeStyle=this.originalElement.css("resize"),this.originalElement.css("resize","none"),this._proportionallyResizeElements.push(this.originalElement.css({position:"static",zoom:1,display:"block"})),this.originalElement.css({margin:this.originalElement.css("margin")}),this._proportionallyResize()),this.handles=n.handles||(e(".ui-resizable-handle",this.element).length?{n:".ui-resizable-n",e:".ui-resizable-e",s:".ui-resizable-s",w:".ui-resizable-w",se:".ui-resizable-se",sw:".ui-resizable-sw",ne:".ui-resizable-ne",nw:".ui-resizable-nw"}:"e,s,se");if(this.handles.constructor==String){this.handles=="all"&&(this.handles="n,e,s,w,se,sw,ne,nw");var r=this.handles.split(",");this.handles={};for(var i=0;i<r.length;i++){var s=e.trim(r[i]),o="ui-resizable-"+s,u=e('<div class="ui-resizable-handle '+o+'"></div>');u.css({zIndex:n.zIndex}),"se"==s&&u.addClass("ui-icon ui-icon-gripsmall-diagonal-se"),this.handles[s]=".ui-resizable-"+s,this.element.append(u)}}this._renderAxis=function(t){t=t||this.element;for(var n in this.handles){this.handles[n].constructor==String&&(this.handles[n]=e(this.handles[n],this.element).show());if(this.elementIsWrapper&&this.originalElement[0].nodeName.match(/textarea|input|select|button/i)){var r=e(this.handles[n],this.element),i=0;i=/sw|ne|nw|se|n|s/.test(n)?r.outerHeight():r.outerWidth();var s=["padding",/ne|nw|n/.test(n)?"Top":/se|sw|s/.test(n)?"Bottom":/^e$/.test(n)?"Right":"Left"].join("");t.css(s,i),this._proportionallyResize()}if(!e(this.handles[n]).length)continue}},this._renderAxis(this.element),this._handles=e(".ui-resizable-handle",this.element).disableSelection(),this._handles.mouseover(function(){if(!t.resizing){if(this.className)var e=this.className.match(/ui-resizable-(se|sw|ne|nw|n|e|s|w)/i);t.axis=e&&e[1]?e[1]:"se"}}),n.autoHide&&(this._handles.hide(),e(this.element).addClass("ui-resizable-autohide").mouseenter(function(){if(n.disabled)return;e(this).removeClass("ui-resizable-autohide"),t._handles.show()}).mouseleave(function(){if(n.disabled)return;t.resizing||(e(this).addClass("ui-resizable-autohide"),t._handles.hide())})),this._mouseInit()},_destroy:function(){this._mouseDestroy();var t=function(t){e(t).removeClass("ui-resizable ui-resizable-disabled ui-resizable-resizing").removeData("resizable").removeData("ui-resizable").unbind(".resizable").find(".ui-resizable-handle").remove()};if(this.elementIsWrapper){t(this.element);var n=this.element;n.after(this.originalElement.css({position:n.css("position"),width:n.outerWidth(),height:n.outerHeight(),top:n.css("top"),left:n.css("left")})).remove()}return this.originalElement.css("resize",this.originalResizeStyle),t(this.originalElement),this},_mouseCapture:function(t){var n=!1;for(var r in this.handles)e(this.handles[r])[0]==t.target&&(n=!0);return!this.options.disabled&&n},_mouseStart:function(t){var r=this.options,i=this.element.position(),s=this.element;this.resizing=!0,this.documentScroll={top:e(document).scrollTop(),left:e(document).scrollLeft()},(s.is(".ui-draggable")||/absolute/.test(s.css("position")))&&s.css({position:"absolute",top:i.top,left:i.left}),this._renderProxy();var o=n(this.helper.css("left")),u=n(this.helper.css("top"));r.containment&&(o+=e(r.containment).scrollLeft()||0,u+=e(r.containment).scrollTop()||0),this.offset=this.helper.offset(),this.position={left:o,top:u},this.size=this._helper?{width:s.outerWidth(),height:s.outerHeight()}:{width:s.width(),height:s.height()},this.originalSize=this._helper?{width:s.outerWidth(),height:s.outerHeight()}:{width:s.width(),height:s.height()},this.originalPosition={left:o,top:u},this.sizeDiff={width:s.outerWidth()-s.width(),height:s.outerHeight()-s.height()},this.originalMousePosition={left:t.pageX,top:t.pageY},this.aspectRatio=typeof r.aspectRatio=="number"?r.aspectRatio:this.originalSize.width/this.originalSize.height||1;var a=e(".ui-resizable-"+this.axis).css("cursor");return e("body").css("cursor",a=="auto"?this.axis+"-resize":a),s.addClass("ui-resizable-resizing"),this._propagate("start",t),!0},_mouseDrag:function(e){var t=this.helper,n=this.options,r={},i=this,s=this.originalMousePosition,o=this.axis,u=e.pageX-s.left||0,a=e.pageY-s.top||0,f=this._change[o];if(!f)return!1;var l=f.apply(this,[e,u,a]);this._updateVirtualBoundaries(e.shiftKey);if(this._aspectRatio||e.shiftKey)l=this._updateRatio(l,e);return l=this._respectSize(l,e),this._propagate("resize",e),t.css({top:this.position.top+"px",left:this.position.left+"px",width:this.size.width+"px",height:this.size.height+"px"}),!this._helper&&this._proportionallyResizeElements.length&&this._proportionallyResize(),this._updateCache(l),this._trigger("resize",e,this.ui()),!1},_mouseStop:function(t){this.resizing=!1;var n=this.options,r=this;if(this._helper){var i=this._proportionallyResizeElements,s=i.length&&/textarea/i.test(i[0].nodeName),o=s&&e.ui.hasScroll(i[0],"left")?0:r.sizeDiff.height,u=s?0:r.sizeDiff.width,a={width:r.helper.width()-u,height:r.helper.height()-o},f=parseInt(r.element.css("left"),10)+(r.position.left-r.originalPosition.left)||null,l=parseInt(r.element.css("top"),10)+(r.position.top-r.originalPosition.top)||null;n.animate||this.element.css(e.extend(a,{top:l,left:f})),r.helper.height(r.size.height),r.helper.width(r.size.width),this._helper&&!n.animate&&this._proportionallyResize()}return e("body").css("cursor","auto"),this.element.removeClass("ui-resizable-resizing"),this._propagate("stop",t),this._helper&&this.helper.remove(),!1},_updateVirtualBoundaries:function(e){var t=this.options,n,i,s,o,u;u={minWidth:r(t.minWidth)?t.minWidth:0,maxWidth:r(t.maxWidth)?t.maxWidth:Infinity,minHeight:r(t.minHeight)?t.minHeight:0,maxHeight:r(t.maxHeight)?t.maxHeight:Infinity};if(this._aspectRatio||e)n=u.minHeight*this.aspectRatio,s=u.minWidth/this.aspectRatio,i=u.maxHeight*this.aspectRatio,o=u.maxWidth/this.aspectRatio,n>u.minWidth&&(u.minWidth=n),s>u.minHeight&&(u.minHeight=s),i<u.maxWidth&&(u.maxWidth=i),o<u.maxHeight&&(u.maxHeight=o);this._vBoundaries=u},_updateCache:function(e){var t=this.options;this.offset=this.helper.offset(),r(e.left)&&(this.position.left=e.left),r(e.top)&&(this.position.top=e.top),r(e.height)&&(this.size.height=e.height),r(e.width)&&(this.size.width=e.width)},_updateRatio:function(e,t){var n=this.options,i=this.position,s=this.size,o=this.axis;return r(e.height)?e.width=e.height*this.aspectRatio:r(e.width)&&(e.height=e.width/this.aspectRatio),o=="sw"&&(e.left=i.left+(s.width-e.width),e.top=null),o=="nw"&&(e.top=i.top+(s.height-e.height),e.left=i.left+(s.width-e.width)),e},_respectSize:function(e,t){var n=this.helper,i=this._vBoundaries,s=this._aspectRatio||t.shiftKey,o=this.axis,u=r(e.width)&&i.maxWidth&&i.maxWidth<e.width,a=r(e.height)&&i.maxHeight&&i.maxHeight<e.height,f=r(e.width)&&i.minWidth&&i.minWidth>e.width,l=r(e.height)&&i.minHeight&&i.minHeight>e.height;f&&(e.width=i.minWidth),l&&(e.height=i.minHeight),u&&(e.width=i.maxWidth),a&&(e.height=i.maxHeight);var c=this.originalPosition.left+this.originalSize.width,h=this.position.top+this.size.height,p=/sw|nw|w/.test(o),d=/nw|ne|n/.test(o);f&&p&&(e.left=c-i.minWidth),u&&p&&(e.left=c-i.maxWidth),l&&d&&(e.top=h-i.minHeight),a&&d&&(e.top=h-i.maxHeight);var v=!e.width&&!e.height;return v&&!e.left&&e.top?e.top=null:v&&!e.top&&e.left&&(e.left=null),e},_proportionallyResize:function(){var t=this.options;if(!this._proportionallyResizeElements.length)return;var n=this.helper||this.element;for(var r=0;r<this._proportionallyResizeElements.length;r++){var i=this._proportionallyResizeElements[r];if(!this.borderDif){var s=[i.css("borderTopWidth"),i.css("borderRightWidth"),i.css("borderBottomWidth"),i.css("borderLeftWidth")],o=[i.css("paddingTop"),i.css("paddingRight"),i.css("paddingBottom"),i.css("paddingLeft")];this.borderDif=e.map(s,function(e,t){var n=parseInt(e,10)||0,r=parseInt(o[t],10)||0;return n+r})}i.css({height:n.height()-this.borderDif[0]-this.borderDif[2]||0,width:n.width()-this.borderDif[1]-this.borderDif[3]||0})}},_renderProxy:function(){var t=this.element,n=this.options;this.elementOffset=t.offset();if(this._helper){this.helper=this.helper||e('<div style="overflow:hidden;"></div>');var r=e.browser.msie&&e.browser.version<7,i=r?1:0,s=r?2:-1;this.helper.addClass(this._helper).css({width:this.element.outerWidth()+s,height:this.element.outerHeight()+s,position:"absolute",left:this.elementOffset.left-i+"px",top:this.elementOffset.top-i+"px",zIndex:++n.zIndex}),this.helper.appendTo("body").disableSelection()}else this.helper=this.element},_change:{e:function(e,t,n){return{width:this.originalSize.width+t}},w:function(e,t,n){var r=this.options,i=this.originalSize,s=this.originalPosition;return{left:s.left+t,width:i.width-t}},n:function(e,t,n){var r=this.options,i=this.originalSize,s=this.originalPosition;return{top:s.top+n,height:i.height-n}},s:function(e,t,n){return{height:this.originalSize.height+n}},se:function(t,n,r){return e.extend(this._change.s.apply(this,arguments),this._change.e.apply(this,[t,n,r]))},sw:function(t,n,r){return e.extend(this._change.s.apply(this,arguments),this._change.w.apply(this,[t,n,r]))},ne:function(t,n,r){return e.extend(this._change.n.apply(this,arguments),this._change.e.apply(this,[t,n,r]))},nw:function(t,n,r){return e.extend(this._change.n.apply(this,arguments),this._change.w.apply(this,[t,n,r]))}},_propagate:function(t,n){e.ui.plugin.call(this,t,[n,this.ui()]),t!="resize"&&this._trigger(t,n,this.ui())},plugins:{},ui:function(){return{originalElement:this.originalElement,element:this.element,helper:this.helper,position:this.position,size:this.size,originalSize:this.originalSize,originalPosition:this.originalPosition}}}),e.ui.plugin.add("resizable","alsoResize",{start:function(t,n){var r=e(this).data("resizable"),i=r.options,s=function(t){e(t).each(function(){var t=e(this);t.data("resizable-alsoresize",{width:parseInt(t.width(),10),height:parseInt(t.height(),10),left:parseInt(t.css("left"),10),top:parseInt(t.css("top"),10)})})};typeof i.alsoResize=="object"&&!i.alsoResize.parentNode?i.alsoResize.length?(i.alsoResize=i.alsoResize[0],s(i.alsoResize)):e.each(i.alsoResize,function(e){s(e)}):s(i.alsoResize)},resize:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.originalSize,o=r.originalPosition,u={height:r.size.height-s.height||0,width:r.size.width-s.width||0,top:r.position.top-o.top||0,left:r.position.left-o.left||0},a=function(t,r){e(t).each(function(){var t=e(this),i=e(this).data("resizable-alsoresize"),s={},o=r&&r.length?r:t.parents(n.originalElement[0]).length?["width","height"]:["width","height","top","left"];e.each(o,function(e,t){var n=(i[t]||0)+(u[t]||0);n&&n>=0&&(s[t]=n||null)}),t.css(s)})};typeof i.alsoResize=="object"&&!i.alsoResize.nodeType?e.each(i.alsoResize,function(e,t){a(e,t)}):a(i.alsoResize)},stop:function(t,n){e(this).removeData("resizable-alsoresize")}}),e.ui.plugin.add("resizable","animate",{stop:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r._proportionallyResizeElements,o=s.length&&/textarea/i.test(s[0].nodeName),u=o&&e.ui.hasScroll(s[0],"left")?0:r.sizeDiff.height,a=o?0:r.sizeDiff.width,f={width:r.size.width-a,height:r.size.height-u},l=parseInt(r.element.css("left"),10)+(r.position.left-r.originalPosition.left)||null,c=parseInt(r.element.css("top"),10)+(r.position.top-r.originalPosition.top)||null;r.element.animate(e.extend(f,c&&l?{top:c,left:l}:{}),{duration:i.animateDuration,easing:i.animateEasing,step:function(){var n={width:parseInt(r.element.css("width"),10),height:parseInt(r.element.css("height"),10),top:parseInt(r.element.css("top"),10),left:parseInt(r.element.css("left"),10)};s&&s.length&&e(s[0]).css({width:n.width,height:n.height}),r._updateCache(n),r._propagate("resize",t)}})}}),e.ui.plugin.add("resizable","containment",{start:function(t,r){var i=e(this).data("resizable"),s=i.options,o=i.element,u=s.containment,a=u instanceof e?u.get(0):/parent/.test(u)?o.parent().get(0):u;if(!a)return;i.containerElement=e(a);if(/document/.test(u)||u==document)i.containerOffset={left:0,top:0},i.containerPosition={left:0,top:0},i.parentData={element:e(document),left:0,top:0,width:e(document).width(),height:e(document).height()||document.body.parentNode.scrollHeight};else{var f=e(a),l=[];e(["Top","Right","Left","Bottom"]).each(function(e,t){l[e]=n(f.css("padding"+t))}),i.containerOffset=f.offset(),i.containerPosition=f.position(),i.containerSize={height:f.innerHeight()-l[3],width:f.innerWidth()-l[1]};var c=i.containerOffset,h=i.containerSize.height,p=i.containerSize.width,d=e.ui.hasScroll(a,"left")?a.scrollWidth:p,v=e.ui.hasScroll(a)?a.scrollHeight:h;i.parentData={element:a,left:c.left,top:c.top,width:d,height:v}}},resize:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.containerSize,o=r.containerOffset,u=r.size,a=r.position,f=r._aspectRatio||t.shiftKey,l={top:0,left:0},c=r.containerElement;c[0]!=document&&/static/.test(c.css("position"))&&(l=o),a.left<(r._helper?o.left:0)&&(r.size.width=r.size.width+(r._helper?r.position.left-o.left:r.position.left-l.left),f&&(r.size.height=r.size.width/r.aspectRatio),r.position.left=i.helper?o.left:0),a.top<(r._helper?o.top:0)&&(r.size.height=r.size.height+(r._helper?r.position.top-o.top:r.position.top),f&&(r.size.width=r.size.height*r.aspectRatio),r.position.top=r._helper?o.top:0),r.offset.left=r.parentData.left+r.position.left,r.offset.top=r.parentData.top+r.position.top;var h=Math.abs((r._helper?r.offset.left-l.left:r.offset.left-l.left)+r.sizeDiff.width),p=Math.abs((r._helper?r.offset.top-l.top:r.offset.top-o.top)+r.sizeDiff.height),d=r.containerElement.get(0)==r.element.parent().get(0),v=/relative|absolute/.test(r.containerElement.css("position"));d&&v&&(h-=r.parentData.left),h+r.size.width>=r.parentData.width&&(r.size.width=r.parentData.width-h,f&&(r.size.height=r.size.width/r.aspectRatio)),p+r.size.height>=r.parentData.height&&(r.size.height=r.parentData.height-p,f&&(r.size.width=r.size.height*r.aspectRatio))},stop:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.position,o=r.containerOffset,u=r.containerPosition,a=r.containerElement,f=e(r.helper),l=f.offset(),c=f.outerWidth()-r.sizeDiff.width,h=f.outerHeight()-r.sizeDiff.height;r._helper&&!i.animate&&/relative/.test(a.css("position"))&&e(this).css({left:l.left-u.left-o.left,width:c,height:h}),r._helper&&!i.animate&&/static/.test(a.css("position"))&&e(this).css({left:l.left-u.left-o.left,width:c,height:h})}}),e.ui.plugin.add("resizable","ghost",{start:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.size;r.ghost=r.originalElement.clone(),r.ghost.css({opacity:.25,display:"block",position:"relative",height:s.height,width:s.width,margin:0,left:0,top:0}).addClass("ui-resizable-ghost").addClass(typeof i.ghost=="string"?i.ghost:""),r.ghost.appendTo(r.helper)},resize:function(t,n){var r=e(this).data("resizable"),i=r.options;r.ghost&&r.ghost.css({position:"relative",height:r.size.height,width:r.size.width})},stop:function(t,n){var r=e(this).data("resizable"),i=r.options;r.ghost&&r.helper&&r.helper.get(0).removeChild(r.ghost.get(0))}}),e.ui.plugin.add("resizable","grid",{resize:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.size,o=r.originalSize,u=r.originalPosition,a=r.axis,f=i._aspectRatio||t.shiftKey;i.grid=typeof i.grid=="number"?[i.grid,i.grid]:i.grid;var l=Math.round((s.width-o.width)/(i.grid[0]||1))*(i.grid[0]||1),c=Math.round((s.height-o.height)/(i.grid[1]||1))*(i.grid[1]||1);/^(se|s|e)$/.test(a)?(r.size.width=o.width+l,r.size.height=o.height+c):/^(ne)$/.test(a)?(r.size.width=o.width+l,r.size.height=o.height+c,r.position.top=u.top-c):/^(sw)$/.test(a)?(r.size.width=o.width+l,r.size.height=o.height+c,r.position.left=u.left-l):(r.size.width=o.width+l,r.size.height=o.height+c,r.position.top=u.top-c,r.position.left=u.left-l)}});var n=function(e){return parseInt(e,10)||0},r=function(e){return!isNaN(parseInt(e,10))}})(jQuery);(function(e,t){e.widget("ui.selectable",e.ui.mouse,{version:"1.9.0",options:{appendTo:"body",autoRefresh:!0,distance:0,filter:"*",tolerance:"touch"},_create:function(){var t=this;this.element.addClass("ui-selectable"),this.dragged=!1;var n;this.refresh=function(){n=e(t.options.filter,t.element[0]),n.addClass("ui-selectee"),n.each(function(){var t=e(this),n=t.offset();e.data(this,"selectable-item",{element:this,$element:t,left:n.left,top:n.top,right:n.left+t.outerWidth(),bottom:n.top+t.outerHeight(),startselected:!1,selected:t.hasClass("ui-selected"),selecting:t.hasClass("ui-selecting"),unselecting:t.hasClass("ui-unselecting")})})},this.refresh(),this.selectees=n.addClass("ui-selectee"),this._mouseInit(),this.helper=e("<div class='ui-selectable-helper'></div>")},_destroy:function(){this.selectees.removeClass("ui-selectee").removeData("selectable-item"),this.element.removeClass("ui-selectable ui-selectable-disabled"),this._mouseDestroy()},_mouseStart:function(t){var n=this;this.opos=[t.pageX,t.pageY];if(this.options.disabled)return;var r=this.options;this.selectees=e(r.filter,this.element[0]),this._trigger("start",t),e(r.appendTo).append(this.helper),this.helper.css({left:t.clientX,top:t.clientY,width:0,height:0}),r.autoRefresh&&this.refresh(),this.selectees.filter(".ui-selected").each(function(){var r=e.data(this,"selectable-item");r.startselected=!0,!t.metaKey&&!t.ctrlKey&&(r.$element.removeClass("ui-selected"),r.selected=!1,r.$element.addClass("ui-unselecting"),r.unselecting=!0,n._trigger("unselecting",t,{unselecting:r.element}))}),e(t.target).parents().andSelf().each(function(){var r=e.data(this,"selectable-item");if(r){var i=!t.metaKey&&!t.ctrlKey||!r.$element.hasClass("ui-selected");return r.$element.removeClass(i?"ui-unselecting":"ui-selected").addClass(i?"ui-selecting":"ui-unselecting"),r.unselecting=!i,r.selecting=i,r.selected=i,i?n._trigger("selecting",t,{selecting:r.element}):n._trigger("unselecting",t,{unselecting:r.element}),!1}})},_mouseDrag:function(t){var n=this;this.dragged=!0;if(this.options.disabled)return;var r=this.options,i=this.opos[0],s=this.opos[1],o=t.pageX,u=t.pageY;if(i>o){var a=o;o=i,i=a}if(s>u){var a=u;u=s,s=a}return this.helper.css({left:i,top:s,width:o-i,height:u-s}),this.selectees.each(function(){var a=e.data(this,"selectable-item");if(!a||a.element==n.element[0])return;var f=!1;r.tolerance=="touch"?f=!(a.left>o||a.right<i||a.top>u||a.bottom<s):r.tolerance=="fit"&&(f=a.left>i&&a.right<o&&a.top>s&&a.bottom<u),f?(a.selected&&(a.$element.removeClass("ui-selected"),a.selected=!1),a.unselecting&&(a.$element.removeClass("ui-unselecting"),a.unselecting=!1),a.selecting||(a.$element.addClass("ui-selecting"),a.selecting=!0,n._trigger("selecting",t,{selecting:a.element}))):(a.selecting&&((t.metaKey||t.ctrlKey)&&a.startselected?(a.$element.removeClass("ui-selecting"),a.selecting=!1,a.$element.addClass("ui-selected"),a.selected=!0):(a.$element.removeClass("ui-selecting"),a.selecting=!1,a.startselected&&(a.$element.addClass("ui-unselecting"),a.unselecting=!0),n._trigger("unselecting",t,{unselecting:a.element}))),a.selected&&!t.metaKey&&!t.ctrlKey&&!a.startselected&&(a.$element.removeClass("ui-selected"),a.selected=!1,a.$element.addClass("ui-unselecting"),a.unselecting=!0,n._trigger("unselecting",t,{unselecting:a.element})))}),!1},_mouseStop:function(t){var n=this;this.dragged=!1;var r=this.options;return e(".ui-unselecting",this.element[0]).each(function(){var r=e.data(this,"selectable-item");r.$element.removeClass("ui-unselecting"),r.unselecting=!1,r.startselected=!1,n._trigger("unselected",t,{unselected:r.element})}),e(".ui-selecting",this.element[0]).each(function(){var r=e.data(this,"selectable-item");r.$element.removeClass("ui-selecting").addClass("ui-selected"),r.selecting=!1,r.selected=!0,r.startselected=!0,n._trigger("selected",t,{selected:r.element})}),this._trigger("stop",t),this.helper.remove(),!1}})})(jQuery);(function(e,t){var n=5;e.widget("ui.slider",e.ui.mouse,{version:"1.9.0",widgetEventPrefix:"slide",options:{animate:!1,distance:0,max:100,min:0,orientation:"horizontal",range:!1,step:1,value:0,values:null},_create:function(){var t,r=this.options,i=this.element.find(".ui-slider-handle").addClass("ui-state-default ui-corner-all"),s="<a class='ui-slider-handle ui-state-default ui-corner-all' href='#'></a>",o=r.values&&r.values.length||1,u=[];this._keySliding=!1,this._mouseSliding=!1,this._animateOff=!0,this._handleIndex=null,this._detectOrientation(),this._mouseInit(),this.element.addClass("ui-slider ui-slider-"+this.orientation+" ui-widget"+" ui-widget-content"+" ui-corner-all"+(r.disabled?" ui-slider-disabled ui-disabled":"")),this.range=e([]),r.range&&(r.range===!0&&(r.values||(r.values=[this._valueMin(),this._valueMin()]),r.values.length&&r.values.length!==2&&(r.values=[r.values[0],r.values[0]])),this.range=e("<div></div>").appendTo(this.element).addClass("ui-slider-range ui-widget-header"+(r.range==="min"||r.range==="max"?" ui-slider-range-"+r.range:"")));for(t=i.length;t<o;t++)u.push(s);this.handles=i.add(e(u.join("")).appendTo(this.element)),this.handle=this.handles.eq(0),this.handles.add(this.range).filter("a").click(function(e){e.preventDefault()}).mouseenter(function(){r.disabled||e(this).addClass("ui-state-hover")}).mouseleave(function(){e(this).removeClass("ui-state-hover")}).focus(function(){r.disabled?e(this).blur():(e(".ui-slider .ui-state-focus").removeClass("ui-state-focus"),e(this).addClass("ui-state-focus"))}).blur(function(){e(this).removeClass("ui-state-focus")}),this.handles.each(function(t){e(this).data("ui-slider-handle-index",t)}),this._on(this.handles,{keydown:function(t){var r,i,s,o,u=e(t.target).data("ui-slider-handle-index");switch(t.keyCode){case e.ui.keyCode.HOME:case e.ui.keyCode.END:case e.ui.keyCode.PAGE_UP:case e.ui.keyCode.PAGE_DOWN:case e.ui.keyCode.UP:case e.ui.keyCode.RIGHT:case e.ui.keyCode.DOWN:case e.ui.keyCode.LEFT:t.preventDefault();if(!this._keySliding){this._keySliding=!0,e(t.target).addClass("ui-state-active"),r=this._start(t,u);if(r===!1)return}}o=this.options.step,this.options.values&&this.options.values.length?i=s=this.values(u):i=s=this.value();switch(t.keyCode){case e.ui.keyCode.HOME:s=this._valueMin();break;case e.ui.keyCode.END:s=this._valueMax();break;case e.ui.keyCode.PAGE_UP:s=this._trimAlignValue(i+(this._valueMax()-this._valueMin())/n);break;case e.ui.keyCode.PAGE_DOWN:s=this._trimAlignValue(i-(this._valueMax()-this._valueMin())/n);break;case e.ui.keyCode.UP:case e.ui.keyCode.RIGHT:if(i===this._valueMax())return;s=this._trimAlignValue(i+o);break;case e.ui.keyCode.DOWN:case e.ui.keyCode.LEFT:if(i===this._valueMin())return;s=this._trimAlignValue(i-o)}this._slide(t,u,s)},keyup:function(t){var n=e(t.target).data("ui-slider-handle-index");this._keySliding&&(this._keySliding=!1,this._stop(t,n),this._change(t,n),e(t.target).removeClass("ui-state-active"))}}),this._refreshValue(),this._animateOff=!1},_destroy:function(){this.handles.remove(),this.range.remove(),this.element.removeClass("ui-slider ui-slider-horizontal ui-slider-vertical ui-slider-disabled ui-widget ui-widget-content ui-corner-all"),this._mouseDestroy()},_mouseCapture:function(t){var n,r,i,s,o,u,a,f,l=this,c=this.options;return c.disabled?!1:(this.elementSize={width:this.element.outerWidth(),height:this.element.outerHeight()},this.elementOffset=this.element.offset(),n={x:t.pageX,y:t.pageY},r=this._normValueFromMouse(n),i=this._valueMax()-this._valueMin()+1,this.handles.each(function(t){var n=Math.abs(r-l.values(t));i>n&&(i=n,s=e(this),o=t)}),c.range===!0&&this.values(1)===c.min&&(o+=1,s=e(this.handles[o])),u=this._start(t,o),u===!1?!1:(this._mouseSliding=!0,this._handleIndex=o,s.addClass("ui-state-active").focus(),a=s.offset(),f=!e(t.target).parents().andSelf().is(".ui-slider-handle"),this._clickOffset=f?{left:0,top:0}:{left:t.pageX-a.left-s.width()/2,top:t.pageY-a.top-s.height()/2-(parseInt(s.css("borderTopWidth"),10)||0)-(parseInt(s.css("borderBottomWidth"),10)||0)+(parseInt(s.css("marginTop"),10)||0)},this.handles.hasClass("ui-state-hover")||this._slide(t,o,r),this._animateOff=!0,!0))},_mouseStart:function(e){return!0},_mouseDrag:function(e){var t={x:e.pageX,y:e.pageY},n=this._normValueFromMouse(t);return this._slide(e,this._handleIndex,n),!1},_mouseStop:function(e){return this.handles.removeClass("ui-state-active"),this._mouseSliding=!1,this._stop(e,this._handleIndex),this._change(e,this._handleIndex),this._handleIndex=null,this._clickOffset=null,this._animateOff=!1,!1},_detectOrientation:function(){this.orientation=this.options.orientation==="vertical"?"vertical":"horizontal"},_normValueFromMouse:function(e){var t,n,r,i,s;return this.orientation==="horizontal"?(t=this.elementSize.width,n=e.x-this.elementOffset.left-(this._clickOffset?this._clickOffset.left:0)):(t=this.elementSize.height,n=e.y-this.elementOffset.top-(this._clickOffset?this._clickOffset.top:0)),r=n/t,r>1&&(r=1),r<0&&(r=0),this.orientation==="vertical"&&(r=1-r),i=this._valueMax()-this._valueMin(),s=this._valueMin()+r*i,this._trimAlignValue(s)},_start:function(e,t){var n={handle:this.handles[t],value:this.value()};return this.options.values&&this.options.values.length&&(n.value=this.values(t),n.values=this.values()),this._trigger("start",e,n)},_slide:function(e,t,n){var r,i,s;this.options.values&&this.options.values.length?(r=this.values(t?0:1),this.options.values.length===2&&this.options.range===!0&&(t===0&&n>r||t===1&&n<r)&&(n=r),n!==this.values(t)&&(i=this.values(),i[t]=n,s=this._trigger("slide",e,{handle:this.handles[t],value:n,values:i}),r=this.values(t?0:1),s!==!1&&this.values(t,n,!0))):n!==this.value()&&(s=this._trigger("slide",e,{handle:this.handles[t],value:n}),s!==!1&&this.value(n))},_stop:function(e,t){var n={handle:this.handles[t],value:this.value()};this.options.values&&this.options.values.length&&(n.value=this.values(t),n.values=this.values()),this._trigger("stop",e,n)},_change:function(e,t){if(!this._keySliding&&!this._mouseSliding){var n={handle:this.handles[t],value:this.value()};this.options.values&&this.options.values.length&&(n.value=this.values(t),n.values=this.values()),this._trigger("change",e,n)}},value:function(e){if(arguments.length){this.options.value=this._trimAlignValue(e),this._refreshValue(),this._change(null,0);return}return this._value()},values:function(t,n){var r,i,s;if(arguments.length>1){this.options.values[t]=this._trimAlignValue(n),this._refreshValue(),this._change(null,t);return}if(!arguments.length)return this._values();if(!e.isArray(arguments[0]))return this.options.values&&this.options.values.length?this._values(t):this.value();r=this.options.values,i=arguments[0];for(s=0;s<r.length;s+=1)r[s]=this._trimAlignValue(i[s]),this._change(null,s);this._refreshValue()},_setOption:function(t,n){var r,i=0;e.isArray(this.options.values)&&(i=this.options.values.length),e.Widget.prototype._setOption.apply(this,arguments);switch(t){case"disabled":n?(this.handles.filter(".ui-state-focus").blur(),this.handles.removeClass("ui-state-hover"),this.handles.prop("disabled",!0),this.element.addClass("ui-disabled")):(this.handles.prop("disabled",!1),this.element.removeClass("ui-disabled"));break;case"orientation":this._detectOrientation(),this.element.removeClass("ui-slider-horizontal ui-slider-vertical").addClass("ui-slider-"+this.orientation),this._refreshValue();break;case"value":this._animateOff=!0,this._refreshValue(),this._change(null,0),this._animateOff=!1;break;case"values":this._animateOff=!0,this._refreshValue();for(r=0;r<i;r+=1)this._change(null,r);this._animateOff=!1}},_value:function(){var e=this.options.value;return e=this._trimAlignValue(e),e},_values:function(e){var t,n,r;if(arguments.length)return t=this.options.values[e],t=this._trimAlignValue(t),t;n=this.options.values.slice();for(r=0;r<n.length;r+=1)n[r]=this._trimAlignValue(n[r]);return n},_trimAlignValue:function(e){if(e<=this._valueMin())return this._valueMin();if(e>=this._valueMax())return this._valueMax();var t=this.options.step>0?this.options.step:1,n=(e-this._valueMin())%t,r=e-n;return Math.abs(n)*2>=t&&(r+=n>0?t:-t),parseFloat(r.toFixed(5))},_valueMin:function(){return this.options.min},_valueMax:function(){return this.options.max},_refreshValue:function(){var t,n,r,i,s,o=this.options.range,u=this.options,a=this,f=this._animateOff?!1:u.animate,l={};this.options.values&&this.options.values.length?this.handles.each(function(r,i){n=(a.values(r)-a._valueMin())/(a._valueMax()-a._valueMin())*100,l[a.orientation==="horizontal"?"left":"bottom"]=n+"%",e(this).stop(1,1)[f?"animate":"css"](l,u.animate),a.options.range===!0&&(a.orientation==="horizontal"?(r===0&&a.range.stop(1,1)[f?"animate":"css"]({left:n+"%"},u.animate),r===1&&a.range[f?"animate":"css"]({width:n-t+"%"},{queue:!1,duration:u.animate})):(r===0&&a.range.stop(1,1)[f?"animate":"css"]({bottom:n+"%"},u.animate),r===1&&a.range[f?"animate":"css"]({height:n-t+"%"},{queue:!1,duration:u.animate}))),t=n}):(r=this.value(),i=this._valueMin(),s=this._valueMax(),n=s!==i?(r-i)/(s-i)*100:0,l[this.orientation==="horizontal"?"left":"bottom"]=n+"%",this.handle.stop(1,1)[f?"animate":"css"](l,u.animate),o==="min"&&this.orientation==="horizontal"&&this.range.stop(1,1)[f?"animate":"css"]({width:n+"%"},u.animate),o==="max"&&this.orientation==="horizontal"&&this.range[f?"animate":"css"]({width:100-n+"%"},{queue:!1,duration:u.animate}),o==="min"&&this.orientation==="vertical"&&this.range.stop(1,1)[f?"animate":"css"]({height:n+"%"},u.animate),o==="max"&&this.orientation==="vertical"&&this.range[f?"animate":"css"]({height:100-n+"%"},{queue:!1,duration:u.animate}))}})})(jQuery);(function(e,t){e.widget("ui.sortable",e.ui.mouse,{version:"1.9.0",widgetEventPrefix:"sort",ready:!1,options:{appendTo:"parent",axis:!1,connectWith:!1,containment:!1,cursor:"auto",cursorAt:!1,dropOnEmpty:!0,forcePlaceholderSize:!1,forceHelperSize:!1,grid:!1,handle:!1,helper:"original",items:"> *",opacity:!1,placeholder:!1,revert:!1,scroll:!0,scrollSensitivity:20,scrollSpeed:20,scope:"default",tolerance:"intersect",zIndex:1e3},_create:function(){var e=this.options;this.containerCache={},this.element.addClass("ui-sortable"),this.refresh(),this.floating=this.items.length?e.axis==="x"||/left|right/.test(this.items[0].item.css("float"))||/inline|table-cell/.test(this.items[0].item.css("display")):!1,this.offset=this.element.offset(),this._mouseInit(),this.ready=!0},_destroy:function(){this.element.removeClass("ui-sortable ui-sortable-disabled"),this._mouseDestroy();for(var e=this.items.length-1;e>=0;e--)this.items[e].item.removeData(this.widgetName+"-item");return this},_setOption:function(t,n){t==="disabled"?(this.options[t]=n,this.widget().toggleClass("ui-sortable-disabled",!!n)):e.Widget.prototype._setOption.apply(this,arguments)},_mouseCapture:function(t,n){var r=this;if(this.reverting)return!1;if(this.options.disabled||this.options.type=="static")return!1;this._refreshItems(t);var i=null,s=e(t.target).parents().each(function(){if(e.data(this,r.widgetName+"-item")==r)return i=e(this),!1});e.data(t.target,r.widgetName+"-item")==r&&(i=e(t.target));if(!i)return!1;if(this.options.handle&&!n){var o=!1;e(this.options.handle,i).find("*").andSelf().each(function(){this==t.target&&(o=!0)});if(!o)return!1}return this.currentItem=i,this._removeCurrentsFromItems(),!0},_mouseStart:function(t,n,r){var i=this.options;this.currentContainer=this,this.refreshPositions(),this.helper=this._createHelper(t),this._cacheHelperProportions(),this._cacheMargins(),this.scrollParent=this.helper.scrollParent(),this.offset=this.currentItem.offset(),this.offset={top:this.offset.top-this.margins.top,left:this.offset.left-this.margins.left},e.extend(this.offset,{click:{left:t.pageX-this.offset.left,top:t.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()}),this.helper.css("position","absolute"),this.cssPosition=this.helper.css("position"),this.originalPosition=this._generatePosition(t),this.originalPageX=t.pageX,this.originalPageY=t.pageY,i.cursorAt&&this._adjustOffsetFromHelper(i.cursorAt),this.domPosition={prev:this.currentItem.prev()[0],parent:this.currentItem.parent()[0]},this.helper[0]!=this.currentItem[0]&&this.currentItem.hide(),this._createPlaceholder(),i.containment&&this._setContainment(),i.cursor&&(e("body").css("cursor")&&(this._storedCursor=e("body").css("cursor")),e("body").css("cursor",i.cursor)),i.opacity&&(this.helper.css("opacity")&&(this._storedOpacity=this.helper.css("opacity")),this.helper.css("opacity",i.opacity)),i.zIndex&&(this.helper.css("zIndex")&&(this._storedZIndex=this.helper.css("zIndex")),this.helper.css("zIndex",i.zIndex)),this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML"&&(this.overflowOffset=this.scrollParent.offset()),this._trigger("start",t,this._uiHash()),this._preserveHelperProportions||this._cacheHelperProportions();if(!r)for(var s=this.containers.length-1;s>=0;s--)this.containers[s]._trigger("activate",t,this._uiHash(this));return e.ui.ddmanager&&(e.ui.ddmanager.current=this),e.ui.ddmanager&&!i.dropBehaviour&&e.ui.ddmanager.prepareOffsets(this,t),this.dragging=!0,this.helper.addClass("ui-sortable-helper"),this._mouseDrag(t),!0},_mouseDrag:function(t){this.position=this._generatePosition(t),this.positionAbs=this._convertPositionTo("absolute"),this.lastPositionAbs||(this.lastPositionAbs=this.positionAbs);if(this.options.scroll){var n=this.options,r=!1;this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML"?(this.overflowOffset.top+this.scrollParent[0].offsetHeight-t.pageY<n.scrollSensitivity?this.scrollParent[0].scrollTop=r=this.scrollParent[0].scrollTop+n.scrollSpeed:t.pageY-this.overflowOffset.top<n.scrollSensitivity&&(this.scrollParent[0].scrollTop=r=this.scrollParent[0].scrollTop-n.scrollSpeed),this.overflowOffset.left+this.scrollParent[0].offsetWidth-t.pageX<n.scrollSensitivity?this.scrollParent[0].scrollLeft=r=this.scrollParent[0].scrollLeft+n.scrollSpeed:t.pageX-this.overflowOffset.left<n.scrollSensitivity&&(this.scrollParent[0].scrollLeft=r=this.scrollParent[0].scrollLeft-n.scrollSpeed)):(t.pageY-e(document).scrollTop()<n.scrollSensitivity?r=e(document).scrollTop(e(document).scrollTop()-n.scrollSpeed):e(window).height()-(t.pageY-e(document).scrollTop())<n.scrollSensitivity&&(r=e(document).scrollTop(e(document).scrollTop()+n.scrollSpeed)),t.pageX-e(document).scrollLeft()<n.scrollSensitivity?r=e(document).scrollLeft(e(document).scrollLeft()-n.scrollSpeed):e(window).width()-(t.pageX-e(document).scrollLeft())<n.scrollSensitivity&&(r=e(document).scrollLeft(e(document).scrollLeft()+n.scrollSpeed))),r!==!1&&e.ui.ddmanager&&!n.dropBehaviour&&e.ui.ddmanager.prepareOffsets(this,t)}this.positionAbs=this._convertPositionTo("absolute");if(!this.options.axis||this.options.axis!="y")this.helper[0].style.left=this.position.left+"px";if(!this.options.axis||this.options.axis!="x")this.helper[0].style.top=this.position.top+"px";for(var i=this.items.length-1;i>=0;i--){var s=this.items[i],o=s.item[0],u=this._intersectsWithPointer(s);if(!u)continue;if(s.instance!==this.currentContainer)continue;if(o!=this.currentItem[0]&&this.placeholder[u==1?"next":"prev"]()[0]!=o&&!e.contains(this.placeholder[0],o)&&(this.options.type=="semi-dynamic"?!e.contains(this.element[0],o):!0)){this.direction=u==1?"down":"up";if(this.options.tolerance!="pointer"&&!this._intersectsWithSides(s))break;this._rearrange(t,s),this._trigger("change",t,this._uiHash());break}}return this._contactContainers(t),e.ui.ddmanager&&e.ui.ddmanager.drag(this,t),this._trigger("sort",t,this._uiHash()),this.lastPositionAbs=this.positionAbs,!1},_mouseStop:function(t,n){if(!t)return;e.ui.ddmanager&&!this.options.dropBehaviour&&e.ui.ddmanager.drop(this,t);if(this.options.revert){var r=this,i=this.placeholder.offset();this.reverting=!0,e(this.helper).animate({left:i.left-this.offset.parent.left-this.margins.left+(this.offsetParent[0]==document.body?0:this.offsetParent[0].scrollLeft),top:i.top-this.offset.parent.top-this.margins.top+(this.offsetParent[0]==document.body?0:this.offsetParent[0].scrollTop)},parseInt(this.options.revert,10)||500,function(){r._clear(t)})}else this._clear(t,n);return!1},cancel:function(){if(this.dragging){this._mouseUp({target:null}),this.options.helper=="original"?this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper"):this.currentItem.show();for(var t=this.containers.length-1;t>=0;t--)this.containers[t]._trigger("deactivate",null,this._uiHash(this)),this.containers[t].containerCache.over&&(this.containers[t]._trigger("out",null,this._uiHash(this)),this.containers[t].containerCache.over=0)}return this.placeholder&&(this.placeholder[0].parentNode&&this.placeholder[0].parentNode.removeChild(this.placeholder[0]),this.options.helper!="original"&&this.helper&&this.helper[0].parentNode&&this.helper.remove(),e.extend(this,{helper:null,dragging:!1,reverting:!1,_noFinalSort:null}),this.domPosition.prev?e(this.domPosition.prev).after(this.currentItem):e(this.domPosition.parent).prepend(this.currentItem)),this},serialize:function(t){var n=this._getItemsAsjQuery(t&&t.connected),r=[];return t=t||{},e(n).each(function(){var n=(e(t.item||this).attr(t.attribute||"id")||"").match(t.expression||/(.+)[-=_](.+)/);n&&r.push((t.key||n[1]+"[]")+"="+(t.key&&t.expression?n[1]:n[2]))}),!r.length&&t.key&&r.push(t.key+"="),r.join("&")},toArray:function(t){var n=this._getItemsAsjQuery(t&&t.connected),r=[];return t=t||{},n.each(function(){r.push(e(t.item||this).attr(t.attribute||"id")||"")}),r},_intersectsWith:function(e){var t=this.positionAbs.left,n=t+this.helperProportions.width,r=this.positionAbs.top,i=r+this.helperProportions.height,s=e.left,o=s+e.width,u=e.top,a=u+e.height,f=this.offset.click.top,l=this.offset.click.left,c=r+f>u&&r+f<a&&t+l>s&&t+l<o;return this.options.tolerance=="pointer"||this.options.forcePointerForContainers||this.options.tolerance!="pointer"&&this.helperProportions[this.floating?"width":"height"]>e[this.floating?"width":"height"]?c:s<t+this.helperProportions.width/2&&n-this.helperProportions.width/2<o&&u<r+this.helperProportions.height/2&&i-this.helperProportions.height/2<a},_intersectsWithPointer:function(t){var n=this.options.axis==="x"||e.ui.isOverAxis(this.positionAbs.top+this.offset.click.top,t.top,t.height),r=this.options.axis==="y"||e.ui.isOverAxis(this.positionAbs.left+this.offset.click.left,t.left,t.width),i=n&&r,s=this._getDragVerticalDirection(),o=this._getDragHorizontalDirection();return i?this.floating?o&&o=="right"||s=="down"?2:1:s&&(s=="down"?2:1):!1},_intersectsWithSides:function(t){var n=e.ui.isOverAxis(this.positionAbs.top+this.offset.click.top,t.top+t.height/2,t.height),r=e.ui.isOverAxis(this.positionAbs.left+this.offset.click.left,t.left+t.width/2,t.width),i=this._getDragVerticalDirection(),s=this._getDragHorizontalDirection();return this.floating&&s?s=="right"&&r||s=="left"&&!r:i&&(i=="down"&&n||i=="up"&&!n)},_getDragVerticalDirection:function(){var e=this.positionAbs.top-this.lastPositionAbs.top;return e!=0&&(e>0?"down":"up")},_getDragHorizontalDirection:function(){var e=this.positionAbs.left-this.lastPositionAbs.left;return e!=0&&(e>0?"right":"left")},refresh:function(e){return this._refreshItems(e),this.refreshPositions(),this},_connectWith:function(){var e=this.options;return e.connectWith.constructor==String?[e.connectWith]:e.connectWith},_getItemsAsjQuery:function(t){var n=[],r=[],i=this._connectWith();if(i&&t)for(var s=i.length-1;s>=0;s--){var o=e(i[s]);for(var u=o.length-1;u>=0;u--){var a=e.data(o[u],this.widgetName);a&&a!=this&&!a.options.disabled&&r.push([e.isFunction(a.options.items)?a.options.items.call(a.element):e(a.options.items,a.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"),a])}}r.push([e.isFunction(this.options.items)?this.options.items.call(this.element,null,{options:this.options,item:this.currentItem}):e(this.options.items,this.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"),this]);for(var s=r.length-1;s>=0;s--)r[s][0].each(function(){n.push(this)});return e(n)},_removeCurrentsFromItems:function(){var e=this.currentItem.find(":data("+this.widgetName+"-item)");for(var t=0;t<this.items.length;t++)for(var n=0;n<e.length;n++)e[n]==this.items[t].item[0]&&this.items.splice(t,1)},_refreshItems:function(t){this.items=[],this.containers=[this];var n=this.items,r=[[e.isFunction(this.options.items)?this.options.items.call(this.element[0],t,{item:this.currentItem}):e(this.options.items,this.element),this]],i=this._connectWith();if(i&&this.ready)for(var s=i.length-1;s>=0;s--){var o=e(i[s]);for(var u=o.length-1;u>=0;u--){var a=e.data(o[u],this.widgetName);a&&a!=this&&!a.options.disabled&&(r.push([e.isFunction(a.options.items)?a.options.items.call(a.element[0],t,{item:this.currentItem}):e(a.options.items,a.element),a]),this.containers.push(a))}}for(var s=r.length-1;s>=0;s--){var f=r[s][1],l=r[s][0];for(var u=0,c=l.length;u<c;u++){var h=e(l[u]);h.data(this.widgetName+"-item",f),n.push({item:h,instance:f,width:0,height:0,left:0,top:0})}}},refreshPositions:function(t){this.offsetParent&&this.helper&&(this.offset.parent=this._getParentOffset());for(var n=this.items.length-1;n>=0;n--){var r=this.items[n];if(r.instance!=this.currentContainer&&this.currentContainer&&r.item[0]!=this.currentItem[0])continue;var i=this.options.toleranceElement?e(this.options.toleranceElement,r.item):r.item;t||(r.width=i.outerWidth(),r.height=i.outerHeight());var s=i.offset();r.left=s.left,r.top=s.top}if(this.options.custom&&this.options.custom.refreshContainers)this.options.custom.refreshContainers.call(this);else for(var n=this.containers.length-1;n>=0;n--){var s=this.containers[n].element.offset();this.containers[n].containerCache.left=s.left,this.containers[n].containerCache.top=s.top,this.containers[n].containerCache.width=this.containers[n].element.outerWidth(),this.containers[n].containerCache.height=this.containers[n].element.outerHeight()}return this},_createPlaceholder:function(t){t=t||this;var n=t.options;if(!n.placeholder||n.placeholder.constructor==String){var r=n.placeholder;n.placeholder={element:function(){var n=e(document.createElement(t.currentItem[0].nodeName)).addClass(r||t.currentItem[0].className+" ui-sortable-placeholder").removeClass("ui-sortable-helper")[0];return r||(n.style.visibility="hidden"),n},update:function(e,i){if(r&&!n.forcePlaceholderSize)return;i.height()||i.height(t.currentItem.innerHeight()-parseInt(t.currentItem.css("paddingTop")||0,10)-parseInt(t.currentItem.css("paddingBottom")||0,10)),i.width()||i.width(t.currentItem.innerWidth()-parseInt(t.currentItem.css("paddingLeft")||0,10)-parseInt(t.currentItem.css("paddingRight")||0,10))}}}t.placeholder=e(n.placeholder.element.call(t.element,t.currentItem)),t.currentItem.after(t.placeholder),n.placeholder.update(t,t.placeholder)},_contactContainers:function(t){var n=null,r=null;for(var i=this.containers.length-1;i>=0;i--){if(e.contains(this.currentItem[0],this.containers[i].element[0]))continue;if(this._intersectsWith(this.containers[i].containerCache)){if(n&&e.contains(this.containers[i].element[0],n.element[0]))continue;n=this.containers[i],r=i}else this.containers[i].containerCache.over&&(this.containers[i]._trigger("out",t,this._uiHash(this)),this.containers[i].containerCache.over=0)}if(!n)return;if(this.containers.length===1)this.containers[r]._trigger("over",t,this._uiHash(this)),this.containers[r].containerCache.over=1;else if(this.currentContainer!=this.containers[r]){var s=1e4,o=null,u=this.positionAbs[this.containers[r].floating?"left":"top"];for(var a=this.items.length-1;a>=0;a--){if(!e.contains(this.containers[r].element[0],this.items[a].item[0]))continue;var f=this.containers[r].floating?this.items[a].item.offset().left:this.items[a].item.offset().top;Math.abs(f-u)<s&&(s=Math.abs(f-u),o=this.items[a],this.direction=f-u>0?"down":"up")}if(!o&&!this.options.dropOnEmpty)return;this.currentContainer=this.containers[r],o?this._rearrange(t,o,null,!0):this._rearrange(t,null,this.containers[r].element,!0),this._trigger("change",t,this._uiHash()),this.containers[r]._trigger("change",t,this._uiHash(this)),this.options.placeholder.update(this.currentContainer,this.placeholder),this.containers[r]._trigger("over",t,this._uiHash(this)),this.containers[r].containerCache.over=1}},_createHelper:function(t){var n=this.options,r=e.isFunction(n.helper)?e(n.helper.apply(this.element[0],[t,this.currentItem])):n.helper=="clone"?this.currentItem.clone():this.currentItem;return r.parents("body").length||e(n.appendTo!="parent"?n.appendTo:this.currentItem[0].parentNode)[0].appendChild(r[0]),r[0]==this.currentItem[0]&&(this._storedCSS={width:this.currentItem[0].style.width,height:this.currentItem[0].style.height,position:this.currentItem.css("position"),top:this.currentItem.css("top"),left:this.currentItem.css("left")}),(r[0].style.width==""||n.forceHelperSize)&&r.width(this.currentItem.width()),(r[0].style.height==""||n.forceHelperSize)&&r.height(this.currentItem.height()),r},_adjustOffsetFromHelper:function(t){typeof t=="string"&&(t=t.split(" ")),e.isArray(t)&&(t={left:+t[0],top:+t[1]||0}),"left"in t&&(this.offset.click.left=t.left+this.margins.left),"right"in t&&(this.offset.click.left=this.helperProportions.width-t.right+this.margins.left),"top"in t&&(this.offset.click.top=t.top+this.margins.top),"bottom"in t&&(this.offset.click.top=this.helperProportions.height-t.bottom+this.margins.top)},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var t=this.offsetParent.offset();this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&e.contains(this.scrollParent[0],this.offsetParent[0])&&(t.left+=this.scrollParent.scrollLeft(),t.top+=this.scrollParent.scrollTop());if(this.offsetParent[0]==document.body||this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&e.browser.msie)t={top:0,left:0};return{top:t.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:t.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var e=this.currentItem.position();return{top:e.top-(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:e.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}return{top:0,left:0}},_cacheMargins:function(){this.margins={left:parseInt(this.currentItem.css("marginLeft"),10)||0,top:parseInt(this.currentItem.css("marginTop"),10)||0}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var t=this.options;t.containment=="parent"&&(t.containment=this.helper[0].parentNode);if(t.containment=="document"||t.containment=="window")this.containment=[0-this.offset.relative.left-this.offset.parent.left,0-this.offset.relative.top-this.offset.parent.top,e(t.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(e(t.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top];if(!/^(document|window|parent)$/.test(t.containment)){var n=e(t.containment)[0],r=e(t.containment).offset(),i=e(n).css("overflow")!="hidden";this.containment=[r.left+(parseInt(e(n).css("borderLeftWidth"),10)||0)+(parseInt(e(n).css("paddingLeft"),10)||0)-this.margins.left,r.top+(parseInt(e(n).css("borderTopWidth"),10)||0)+(parseInt(e(n).css("paddingTop"),10)||0)-this.margins.top,r.left+(i?Math.max(n.scrollWidth,n.offsetWidth):n.offsetWidth)-(parseInt(e(n).css("borderLeftWidth"),10)||0)-(parseInt(e(n).css("paddingRight"),10)||0)-this.helperProportions.width-this.margins.left,r.top+(i?Math.max(n.scrollHeight,n.offsetHeight):n.offsetHeight)-(parseInt(e(n).css("borderTopWidth"),10)||0)-(parseInt(e(n).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top]}},_convertPositionTo:function(t,n){n||(n=this.position);var r=t=="absolute"?1:-1,i=this.options,s=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,o=/(html|body)/i.test(s[0].tagName);return{top:n.top+this.offset.relative.top*r+this.offset.parent.top*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():o?0:s.scrollTop())*r,left:n.left+this.offset.relative.left*r+this.offset.parent.left*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():o?0:s.scrollLeft())*r}},_generatePosition:function(t){var n=this.options,r=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,i=/(html|body)/i.test(r[0].tagName);this.cssPosition=="relative"&&(this.scrollParent[0]==document||this.scrollParent[0]==this.offsetParent[0])&&(this.offset.relative=this._getRelativeOffset());var s=t.pageX,o=t.pageY;if(this.originalPosition){this.containment&&(t.pageX-this.offset.click.left<this.containment[0]&&(s=this.containment[0]+this.offset.click.left),t.pageY-this.offset.click.top<this.containment[1]&&(o=this.containment[1]+this.offset.click.top),t.pageX-this.offset.click.left>this.containment[2]&&(s=this.containment[2]+this.offset.click.left),t.pageY-this.offset.click.top>this.containment[3]&&(o=this.containment[3]+this.offset.click.top));if(n.grid){var u=this.originalPageY+Math.round((o-this.originalPageY)/n.grid[1])*n.grid[1];o=this.containment?u-this.offset.click.top<this.containment[1]||u-this.offset.click.top>this.containment[3]?u-this.offset.click.top<this.containment[1]?u+n.grid[1]:u-n.grid[1]:u:u;var a=this.originalPageX+Math.round((s-this.originalPageX)/n.grid[0])*n.grid[0];s=this.containment?a-this.offset.click.left<this.containment[0]||a-this.offset.click.left>this.containment[2]?a-this.offset.click.left<this.containment[0]?a+n.grid[0]:a-n.grid[0]:a:a}}return{top:o-this.offset.click.top-this.offset.relative.top-this.offset.parent.top+(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():i?0:r.scrollTop()),left:s-this.offset.click.left-this.offset.relative.left-this.offset.parent.left+(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():i?0:r.scrollLeft())}},_rearrange:function(e,t,n,r){n?n[0].appendChild(this.placeholder[0]):t.item[0].parentNode.insertBefore(this.placeholder[0],this.direction=="down"?t.item[0]:t.item[0].nextSibling),this.counter=this.counter?++this.counter:1;var i=this.counter;this._delay(function(){i==this.counter&&this.refreshPositions(!r)})},_clear:function(t,n){this.reverting=!1;var r=[];!this._noFinalSort&&this.currentItem.parent().length&&this.placeholder.before(this.currentItem),this._noFinalSort=null;if(this.helper[0]==this.currentItem[0]){for(var i in this._storedCSS)if(this._storedCSS[i]=="auto"||this._storedCSS[i]=="static")this._storedCSS[i]="";this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper")}else this.currentItem.show();this.fromOutside&&!n&&r.push(function(e){this._trigger("receive",e,this._uiHash(this.fromOutside))}),(this.fromOutside||this.domPosition.prev!=this.currentItem.prev().not(".ui-sortable-helper")[0]||this.domPosition.parent!=this.currentItem.parent()[0])&&!n&&r.push(function(e){this._trigger("update",e,this._uiHash())}),this!==this.currentContainer&&(n||(r.push(function(e){this._trigger("remove",e,this._uiHash())}),r.push(function(e){return function(t){e._trigger("receive",t,this._uiHash(this))}}.call(this,this.currentContainer)),r.push(function(e){return function(t){e._trigger("update",t,this._uiHash(this))}}.call(this,this.currentContainer))));for(var i=this.containers.length-1;i>=0;i--)n||r.push(function(e){return function(t){e._trigger("deactivate",t,this._uiHash(this))}}.call(this,this.containers[i])),this.containers[i].containerCache.over&&(r.push(function(e){return function(t){e._trigger("out",t,this._uiHash(this))}}.call(this,this.containers[i])),this.containers[i].containerCache.over=0);this._storedCursor&&e("body").css("cursor",this._storedCursor),this._storedOpacity&&this.helper.css("opacity",this._storedOpacity),this._storedZIndex&&this.helper.css("zIndex",this._storedZIndex=="auto"?"":this._storedZIndex),this.dragging=!1;if(this.cancelHelperRemoval){if(!n){this._trigger("beforeStop",t,this._uiHash());for(var i=0;i<r.length;i++)r[i].call(this,t);this._trigger("stop",t,this._uiHash())}return this.fromOutside=!1,!1}n||this._trigger("beforeStop",t,this._uiHash()),this.placeholder[0].parentNode.removeChild(this.placeholder[0]),this.helper[0]!=this.currentItem[0]&&this.helper.remove(),this.helper=null;if(!n){for(var i=0;i<r.length;i++)r[i].call(this,t);this._trigger("stop",t,this._uiHash())}return this.fromOutside=!1,!0},_trigger:function(){e.Widget.prototype._trigger.apply(this,arguments)===!1&&this.cancel()},_uiHash:function(t){var n=t||this;return{helper:n.helper,placeholder:n.placeholder||e([]),position:n.position,originalPosition:n.originalPosition,offset:n.positionAbs,item:n.currentItem,sender:t?t.element:null}}})})(jQuery);(function(e){function t(e){return function(){var t=this.element.val();e.apply(this,arguments),this._refresh(),t!==this.element.val()&&this._trigger("change")}}e.widget("ui.spinner",{version:"1.9.0",defaultElement:"<input>",widgetEventPrefix:"spin",options:{culture:null,icons:{down:"ui-icon-triangle-1-s",up:"ui-icon-triangle-1-n"},incremental:!0,max:null,min:null,numberFormat:null,page:10,step:1,change:null,spin:null,start:null,stop:null},_create:function(){this._setOption("max",this.options.max),this._setOption("min",this.options.min),this._setOption("step",this.options.step),this._value(this.element.val(),!0),this._draw(),this._on(this._events),this._refresh(),this._on(this.window,{beforeunload:function(){this.element.removeAttr("autocomplete")}})},_getCreateOptions:function(){var t={},n=this.element;return e.each(["min","max","step"],function(e,r){var i=n.attr(r);i!==undefined&&i.length&&(t[r]=i)}),t},_events:{keydown:function(e){this._start(e)&&this._keydown(e)&&e.preventDefault()},keyup:"_stop",focus:function(){this.uiSpinner.addClass("ui-state-active"),this.previous=this.element.val()},blur:function(e){if(this.cancelBlur){delete this.cancelBlur;return}this._refresh(),this.uiSpinner.removeClass("ui-state-active"),this.previous!==this.element.val()&&this._trigger("change",e)},mousewheel:function(e,t){if(!t)return;if(!this.spinning&&!this._start(e))return!1;this._spin((t>0?1:-1)*this.options.step,e),clearTimeout(this.mousewheelTimer),this.mousewheelTimer=this._delay(function(){this.spinning&&this._stop(e)},100),e.preventDefault()},"mousedown .ui-spinner-button":function(t){function r(){var e=this.element[0]===this.document[0].activeElement;e||(this.element.focus(),this.previous=n,this._delay(function(){this.previous=n}))}var n;n=this.element[0]===this.document[0].activeElement?this.previous:this.element.val(),t.preventDefault(),r.call(this),this.cancelBlur=!0,this._delay(function(){delete this.cancelBlur,r.call(this)});if(this._start(t)===!1)return;this._repeat(null,e(t.currentTarget).hasClass("ui-spinner-up")?1:-1,t)},"mouseup .ui-spinner-button":"_stop","mouseenter .ui-spinner-button":function(t){if(!e(t.currentTarget).hasClass("ui-state-active"))return;if(this._start(t)===!1)return!1;this._repeat(null,e(t.currentTarget).hasClass("ui-spinner-up")?1:-1,t)},"mouseleave .ui-spinner-button":"_stop"},_draw:function(){var e=this.uiSpinner=this.element.addClass("ui-spinner-input").attr("autocomplete","off").wrap(this._uiSpinnerHtml()).parent().append(this._buttonHtml());this._hoverable(e),this.element.attr("role","spinbutton"),this.buttons=e.find(".ui-spinner-button").attr("tabIndex",-1).button().removeClass("ui-corner-all"),this.buttons.height()>Math.ceil(e.height()*.5)&&e.height()>0&&e.height(e.height()),this.options.disabled&&this.disable()},_keydown:function(t){var n=this.options,r=e.ui.keyCode;switch(t.keyCode){case r.UP:return this._repeat(null,1,t),!0;case r.DOWN:return this._repeat(null,-1,t),!0;case r.PAGE_UP:return this._repeat(null,n.page,t),!0;case r.PAGE_DOWN:return this._repeat(null,-n.page,t),!0}return!1},_uiSpinnerHtml:function(){return"<span class='ui-spinner ui-state-default ui-widget ui-widget-content ui-corner-all'></span>"},_buttonHtml:function(){return"<a class='ui-spinner-button ui-spinner-up ui-corner-tr'><span class='ui-icon "+this.options.icons.up+"'>&#9650;</span>"+"</a>"+"<a class='ui-spinner-button ui-spinner-down ui-corner-br'>"+"<span class='ui-icon "+this.options.icons.down+"'>&#9660;</span>"+"</a>"},_start:function(e){return!this.spinning&&this._trigger("start",e)===!1?!1:(this.counter||(this.counter=1),this.spinning=!0,!0)},_repeat:function(e,t,n){e=e||500,clearTimeout(this.timer),this.timer=this._delay(function(){this._repeat(40,t,n)},e),this._spin(t*this.options.step,n)},_spin:function(e,t){var n=this.value()||0;this.counter||(this.counter=1),n=this._adjustValue(n+e*this._increment(this.counter));if(!this.spinning||this._trigger("spin",t,{value:n})!==!1)this._value(n),this.counter++},_increment:function(t){var n=this.options.incremental;return n?e.isFunction(n)?n(t):Math.floor(t*t*t/5e4-t*t/500+17*t/200+1):1},_precision:function(){var e=this._precisionOf(this.options.step);return this.options.min!==null&&(e=Math.max(e,this._precisionOf(this.options.min))),e},_precisionOf:function(e){var t=e.toString(),n=t.indexOf(".");return n===-1?0:t.length-n-1},_adjustValue:function(e){var t,n,r=this.options;return t=r.min!==null?r.min:0,n=e-t,n=Math.round(n/r.step)*r.step,e=t+n,e=parseFloat(e.toFixed(this._precision())),r.max!==null&&e>r.max?r.max:r.min!==null&&e<r.min?r.min:e},_stop:function(e){if(!this.spinning)return;clearTimeout(this.timer),clearTimeout(this.mousewheelTimer),this.counter=0,this.spinning=!1,this._trigger("stop",e)},_setOption:function(e,t){if(e==="culture"||e==="numberFormat"){var n=this._parse(this.element.val());this.options[e]=t,this.element.val(this._format(n));return}(e==="max"||e==="min"||e==="step")&&typeof t=="string"&&(t=this._parse(t)),this._super(e,t),e==="disabled"&&(t?(this.element.prop("disabled",!0),this.buttons.button("disable")):(this.element.prop("disabled",!1),this.buttons.button("enable")))},_setOptions:t(function(e){this._super(e),this._value(this.element.val())}),_parse:function(e){return typeof e=="string"&&e!==""&&(e=window.Globalize&&this.options.numberFormat?Globalize.parseFloat(e,10,this.options.culture):+e),e===""||isNaN(e)?null:e},_format:function(e){return e===""?"":window.Globalize&&this.options.numberFormat?Globalize.format(e,this.options.numberFormat,this.options.culture):e},_refresh:function(){this.element.attr({"aria-valuemin":this.options.min,"aria-valuemax":this.options.max,"aria-valuenow":this._parse(this.element.val())})},_value:function(e,t){var n;e!==""&&(n=this._parse(e),n!==null&&(t||(n=this._adjustValue(n)),e=this._format(n))),this.element.val(e),this._refresh()},_destroy:function(){this.element.removeClass("ui-spinner-input").prop("disabled",!1).removeAttr("autocomplete").removeAttr("role").removeAttr("aria-valuemin").removeAttr("aria-valuemax").removeAttr("aria-valuenow"),this.uiSpinner.replaceWith(this.element)},stepUp:t(function(e){this._stepUp(e)}),_stepUp:function(e){this._spin((e||1)*this.options.step)},stepDown:t(function(e){this._stepDown(e)}),_stepDown:function(e){this._spin((e||1)*-this.options.step)},pageUp:t(function(e){this._stepUp((e||1)*this.options.page)}),pageDown:t(function(e){this._stepDown((e||1)*this.options.page)}),value:function(e){if(!arguments.length)return this._parse(this.element.val());t(this._value).call(this,e)},widget:function(){return this.uiSpinner}})})(jQuery);(function(e,t){function i(){return++n}function s(e){return e=e.cloneNode(!1),e.hash.length>1&&e.href.replace(r,"")===location.href.replace(r,"")}var n=0,r=/#.*$/;e.widget("ui.tabs",{version:"1.9.0",delay:300,options:{active:null,collapsible:!1,event:"click",heightStyle:"content",hide:null,show:null,activate:null,beforeActivate:null,beforeLoad:null,load:null},_create:function(){var t,n=this,r=this.options,i=r.active;this.running=!1,this.element.addClass("ui-tabs ui-widget ui-widget-content ui-corner-all").toggleClass("ui-tabs-collapsible",r.collapsible).delegate(".ui-tabs-nav > li","mousedown"+this.eventNamespace,function(t){e(this).is(".ui-state-disabled")&&t.preventDefault()}).delegate(".ui-tabs-anchor","focus"+this.eventNamespace,function(){e(this).closest("li").is(".ui-state-disabled")&&this.blur()}),this._processTabs();if(i===null){location.hash&&this.anchors.each(function(e,t){if(t.hash===location.hash)return i=e,!1}),i===null&&(i=this.tabs.filter(".ui-tabs-active").index());if(i===null||i===-1)i=this.tabs.length?0:!1}i!==!1&&(i=this.tabs.index(this.tabs.eq(i)),i===-1&&(i=r.collapsible?!1:0)),r.active=i,!r.collapsible&&r.active===!1&&this.anchors.length&&(r.active=0),e.isArray(r.disabled)&&(r.disabled=e.unique(r.disabled.concat(e.map(this.tabs.filter(".ui-state-disabled"),function(e){return n.tabs.index(e)}))).sort()),this.options.active!==!1&&this.anchors.length?this.active=this._findActive(this.options.active):this.active=e(),this._refresh(),this.active.length&&this.load(r.active)},_getCreateEventData:function(){return{tab:this.active,panel:this.active.length?this._getPanelForTab(this.active):e()}},_tabKeydown:function(t){var n=e(this.document[0].activeElement).closest("li"),r=this.tabs.index(n),i=!0;if(this._handlePageNav(t))return;switch(t.keyCode){case e.ui.keyCode.RIGHT:case e.ui.keyCode.DOWN:r++;break;case e.ui.keyCode.UP:case e.ui.keyCode.LEFT:i=!1,r--;break;case e.ui.keyCode.END:r=this.anchors.length-1;break;case e.ui.keyCode.HOME:r=0;break;case e.ui.keyCode.SPACE:t.preventDefault(),clearTimeout(this.activating),this._activate(r);return;case e.ui.keyCode.ENTER:t.preventDefault(),clearTimeout(this.activating),this._activate(r===this.options.active?!1:r);return;default:return}t.preventDefault(),clearTimeout(this.activating),r=this._focusNextTab(r,i),t.ctrlKey||(n.attr("aria-selected","false"),this.tabs.eq(r).attr("aria-selected","true"),this.activating=this._delay(function(){this.option("active",r)},this.delay))},_panelKeydown:function(t){if(this._handlePageNav(t))return;t.ctrlKey&&t.keyCode===e.ui.keyCode.UP&&(t.preventDefault(),this.active.focus())},_handlePageNav:function(t){if(t.altKey&&t.keyCode===e.ui.keyCode.PAGE_UP)return this._activate(this._focusNextTab(this.options.active-1,!1)),!0;if(t.altKey&&t.keyCode===e.ui.keyCode.PAGE_DOWN)return this._activate(this._focusNextTab(this.options.active+1,!0)),!0},_findNextTab:function(t,n){function i(){return t>r&&(t=0),t<0&&(t=r),t}var r=this.tabs.length-1;while(e.inArray(i(),this.options.disabled)!==-1)t=n?t+1:t-1;return t},_focusNextTab:function(e,t){return e=this._findNextTab(e,t),this.tabs.eq(e).focus(),e},_setOption:function(e,t){if(e==="active"){this._activate(t);return}if(e==="disabled"){this._setupDisabled(t);return}this._super(e,t),e==="collapsible"&&(this.element.toggleClass("ui-tabs-collapsible",t),!t&&this.options.active===!1&&this._activate(0)),e==="event"&&this._setupEvents(t),e==="heightStyle"&&this._setupHeightStyle(t)},_tabId:function(e){return e.attr("aria-controls")||"ui-tabs-"+i()},_sanitizeSelector:function(e){return e?e.replace(/[!"$%&'()*+,.\/:;<=>?@\[\]\^`{|}~]/g,"\\$&"):""},refresh:function(){var t,n=this.options,r=this.tablist.children(":has(a[href])");n.disabled=e.map(r.filter(".ui-state-disabled"),function(e){return r.index(e)}),this._processTabs(),n.active===!1||!this.anchors.length?(n.active=!1,this.active=e()):this.active.length&&!e.contains(this.tablist[0],this.active[0])?this.tabs.length===n.disabled.length?(n.active=!1,this.active=e()):this._activate(this._findNextTab(Math.max(0,n.active-1),!1)):n.active=this.tabs.index(this.active),this._refresh()},_refresh:function(){this._setupDisabled(this.options.disabled),this._setupEvents(this.options.event),this._setupHeightStyle(this.options.heightStyle),this.tabs.not(this.active).attr({"aria-selected":"false",tabIndex:-1}),this.panels.not(this._getPanelForTab(this.active)).hide().attr({"aria-expanded":"false","aria-hidden":"true"}),this.active.length?(this.active.addClass("ui-tabs-active ui-state-active").attr({"aria-selected":"true",tabIndex:0}),this._getPanelForTab(this.active).show().attr({"aria-expanded":"true","aria-hidden":"false"})):this.tabs.eq(0).attr("tabIndex",0)},_processTabs:function(){var t=this;this.tablist=this._getList().addClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all").attr("role","tablist"),this.tabs=this.tablist.find("> li:has(a[href])").addClass("ui-state-default ui-corner-top").attr({role:"tab",tabIndex:-1}),this.anchors=this.tabs.map(function(){return e("a",this)[0]}).addClass("ui-tabs-anchor").attr({role:"presentation",tabIndex:-1}),this.panels=e(),this.anchors.each(function(n,r){var i,o,u,a=e(r).uniqueId().attr("id"),f=e(r).closest("li"),l=f.attr("aria-controls");s(r)?(i=r.hash,o=t.element.find(t._sanitizeSelector(i))):(u=t._tabId(f),i="#"+u,o=t.element.find(i),o.length||(o=t._createPanel(u),o.insertAfter(t.panels[n-1]||t.tablist)),o.attr("aria-live","polite")),o.length&&(t.panels=t.panels.add(o)),l&&f.data("ui-tabs-aria-controls",l),f.attr({"aria-controls":i.substring(1),"aria-labelledby":a}),o.attr("aria-labelledby",a)}),this.panels.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").attr("role","tabpanel")},_getList:function(){return this.element.find("ol,ul").eq(0)},_createPanel:function(t){return e("<div>").attr("id",t).addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").data("ui-tabs-destroy",!0)},_setupDisabled:function(t){e.isArray(t)&&(t.length?t.length===this.anchors.length&&(t=!0):t=!1);for(var n=0,r;r=this.tabs[n];n++)t===!0||e.inArray(n,t)!==-1?e(r).addClass("ui-state-disabled").attr("aria-disabled","true"):e(r).removeClass("ui-state-disabled").removeAttr("aria-disabled");this.options.disabled=t},_setupEvents:function(t){var n={click:function(e){e.preventDefault()}};t&&e.each(t.split(" "),function(e,t){n[t]="_eventHandler"}),this._off(this.anchors.add(this.tabs).add(this.panels)),this._on(this.anchors,n),this._on(this.tabs,{keydown:"_tabKeydown"}),this._on(this.panels,{keydown:"_panelKeydown"}),this._focusable(this.tabs),this._hoverable(this.tabs)},_setupHeightStyle:function(t){var n,r,i=this.element.parent();t==="fill"?(e.support.minHeight||(r=i.css("overflow"),i.css("overflow","hidden")),n=i.height(),this.element.siblings(":visible").each(function(){var t=e(this),r=t.css("position");if(r==="absolute"||r==="fixed")return;n-=t.outerHeight(!0)}),r&&i.css("overflow",r),this.element.children().not(this.panels).each(function(){n-=e(this).outerHeight(!0)}),this.panels.each(function(){e(this).height(Math.max(0,n-e(this).innerHeight()+e(this).height()))}).css("overflow","auto")):t==="auto"&&(n=0,this.panels.each(function(){n=Math.max(n,e(this).height("").height())}).height(n))},_eventHandler:function(t){var n=this.options,r=this.active,i=e(t.currentTarget),s=i.closest("li"),o=s[0]===r[0],u=o&&n.collapsible,a=u?e():this._getPanelForTab(s),f=r.length?this._getPanelForTab(r):e(),l={oldTab:r,oldPanel:f,newTab:u?e():s,newPanel:a};t.preventDefault();if(s.hasClass("ui-state-disabled")||s.hasClass("ui-tabs-loading")||this.running||o&&!n.collapsible||this._trigger("beforeActivate",t,l)===!1)return;n.active=u?!1:this.tabs.index(s),this.active=o?e():s,this.xhr&&this.xhr.abort(),!f.length&&!a.length&&e.error("jQuery UI Tabs: Mismatching fragment identifier."),a.length&&this.load(this.tabs.index(s),t),this._toggle(t,l)},_toggle:function(t,n){function o(){r.running=!1,r._trigger("activate",t,n)}function u(){n.newTab.closest("li").addClass("ui-tabs-active ui-state-active"),i.length&&r.options.show?r._show(i,r.options.show,o):(i.show(),o())}var r=this,i=n.newPanel,s=n.oldPanel;this.running=!0,s.length&&this.options.hide?this._hide(s,this.options.hide,function(){n.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),u()}):(n.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),s.hide(),u()),s.attr({"aria-expanded":"false","aria-hidden":"true"}),n.oldTab.attr("aria-selected","false"),i.length&&s.length?n.oldTab.attr("tabIndex",-1):i.length&&this.tabs.filter(function(){return e(this).attr("tabIndex")===0}).attr("tabIndex",-1),i.attr({"aria-expanded":"true","aria-hidden":"false"}),n.newTab.attr({"aria-selected":"true",tabIndex:0})},_activate:function(t){var n,r=this._findActive(t);if(r[0]===this.active[0])return;r.length||(r=this.active),n=r.find(".ui-tabs-anchor")[0],this._eventHandler({target:n,currentTarget:n,preventDefault:e.noop})},_findActive:function(t){return t===!1?e():this.tabs.eq(t)},_getIndex:function(e){return typeof e=="string"&&(e=this.anchors.index(this.anchors.filter("[href$='"+e+"']"))),e},_destroy:function(){this.xhr&&this.xhr.abort(),this.element.removeClass("ui-tabs ui-widget ui-widget-content ui-corner-all ui-tabs-collapsible"),this.tablist.removeClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all").removeAttr("role"),this.anchors.removeClass("ui-tabs-anchor").removeAttr("role").removeAttr("tabIndex").removeData("href.tabs").removeData("load.tabs").removeUniqueId(),this.tabs.add(this.panels).each(function(){e.data(this,"ui-tabs-destroy")?e(this).remove():e(this).removeClass("ui-state-default ui-state-active ui-state-disabled ui-corner-top ui-corner-bottom ui-widget-content ui-tabs-active ui-tabs-panel").removeAttr("tabIndex").removeAttr("aria-live").removeAttr("aria-busy").removeAttr("aria-selected").removeAttr("aria-labelledby").removeAttr("aria-hidden").removeAttr("aria-expanded").removeAttr("role")}),this.tabs.each(function(){var t=e(this),n=t.data("ui-tabs-aria-controls");n?t.attr("aria-controls",n):t.removeAttr("aria-controls")}),this.options.heightStyle!=="content"&&this.panels.css("height","")},enable:function(n){var r=this.options.disabled;if(r===!1)return;n===t?r=!1:(n=this._getIndex(n),e.isArray(r)?r=e.map(r,function(e){return e!==n?e:null}):r=e.map(this.tabs,function(e,t){return t!==n?t:null})),this._setupDisabled(r)},disable:function(n){var r=this.options.disabled;if(r===!0)return;if(n===t)r=!0;else{n=this._getIndex(n);if(e.inArray(n,r)!==-1)return;e.isArray(r)?r=e.merge([n],r).sort():r=[n]}this._setupDisabled(r)},load:function(t,n){t=this._getIndex(t);var r=this,i=this.tabs.eq(t),o=i.find(".ui-tabs-anchor"),u=this._getPanelForTab(i),a={tab:i,panel:u};if(s(o[0]))return;this.xhr=e.ajax(this._ajaxSettings(o,n,a)),this.xhr&&this.xhr.statusText!=="canceled"&&(i.addClass("ui-tabs-loading"),u.attr("aria-busy","true"),this.xhr.success(function(e){setTimeout(function(){u.html(e),r._trigger("load",n,a)},1)}).complete(function(e,t){setTimeout(function(){t==="abort"&&r.panels.stop(!1,!0),i.removeClass("ui-tabs-loading"),u.removeAttr("aria-busy"),e===r.xhr&&delete r.xhr},1)}))},_ajaxSettings:function(t,n,r){var i=this;return{url:t.attr("href"),beforeSend:function(t,s){return i._trigger("beforeLoad",n,e.extend({jqXHR:t,ajaxSettings:s},r))}}},_getPanelForTab:function(t){var n=e(t).attr("aria-controls");return this.element.find(this._sanitizeSelector("#"+n))}}),e.uiBackCompat!==!1&&(e.ui.tabs.prototype._ui=function(e,t){return{tab:e,panel:t,index:this.anchors.index(e)}},e.widget("ui.tabs",e.ui.tabs,{url:function(e,t){this.anchors.eq(e).attr("href",t)}}),e.widget("ui.tabs",e.ui.tabs,{options:{ajaxOptions:null,cache:!1},_create:function(){this._super();var t=this;this._on({tabsbeforeload:function(n,r){if(e.data(r.tab[0],"cache.tabs")){n.preventDefault();return}r.jqXHR.success(function(){t.options.cache&&e.data(r.tab[0],"cache.tabs",!0)})}})},_ajaxSettings:function(t,n,r){var i=this.options.ajaxOptions;return e.extend({},i,{error:function(e,t,n){try{i.error(e,t,r.tab.closest("li").index(),r.tab[0])}catch(n){}}},this._superApply(arguments))},_setOption:function(e,t){e==="cache"&&t===!1&&this.anchors.removeData("cache.tabs"),this._super(e,t)},_destroy:function(){this.anchors.removeData("cache.tabs"),this._super()},url:function(e,t){this.anchors.eq(e).removeData("cache.tabs"),this._superApply(arguments)}}),e.widget("ui.tabs",e.ui.tabs,{abort:function(){this.xhr&&this.xhr.abort()}}),e.widget("ui.tabs",e.ui.tabs,{options:{spinner:"<em>Loading&#8230;</em>"},_create:function(){this._super(),this._on({tabsbeforeload:function(e,t){if(e.target!==this.element[0]||!this.options.spinner)return;var n=t.tab.find("span"),r=n.html();n.html(this.options.spinner),t.jqXHR.complete(function(){n.html(r)})}})}}),e.widget("ui.tabs",e.ui.tabs,{options:{enable:null,disable:null},enable:function(t){var n=this.options,r;if(t&&n.disabled===!0||e.isArray(n.disabled)&&e.inArray(t,n.disabled)!==-1)r=!0;this._superApply(arguments),r&&this._trigger("enable",null,this._ui(this.anchors[t],this.panels[t]))},disable:function(t){var n=this.options,r;if(t&&n.disabled===!1||e.isArray(n.disabled)&&e.inArray(t,n.disabled)===-1)r=!0;this._superApply(arguments),r&&this._trigger("disable",null,this._ui(this.anchors[t],this.panels[t]))}}),e.widget("ui.tabs",e.ui.tabs,{options:{add:null,remove:null,tabTemplate:"<li><a href='#{href}'><span>#{label}</span></a></li>"},add:function(n,r,i){i===t&&(i=this.anchors.length);var s,o,u=this.options,a=e(u.tabTemplate.replace(/#\{href\}/g,n).replace(/#\{label\}/g,r)),f=n.indexOf("#")?this._tabId(a):n.replace("#","");return a.addClass("ui-state-default ui-corner-top").data("ui-tabs-destroy",!0),a.attr("aria-controls",f),s=i>=this.tabs.length,o=this.element.find("#"+f),o.length||(o=this._createPanel(f),s?i>0?o.insertAfter(this.panels.eq(-1)):o.appendTo(this.element):o.insertBefore(this.panels[i])),o.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").hide(),s?a.appendTo(this.tablist):a.insertBefore(this.tabs[i]),u.disabled=e.map(u.disabled,function(e){return e>=i?++e:e}),this.refresh(),this.tabs.length===1&&u.active===!1&&this.option("active",0),this._trigger("add",null,this._ui(this.anchors[i],this.panels[i])),this},remove:function(t){t=this._getIndex(t);var n=this.options,r=this.tabs.eq(t).remove(),i=this._getPanelForTab(r).remove();return r.hasClass("ui-tabs-active")&&this.anchors.length>2&&this._activate(t+(t+1<this.anchors.length?1:-1)),n.disabled=e.map(e.grep(n.disabled,function(e){return e!==t}),function(e){return e>=t?--e:e}),this.refresh(),this._trigger("remove",null,this._ui(r.find("a")[0],i[0])),this}}),e.widget("ui.tabs",e.ui.tabs,{length:function(){return this.anchors.length}}),e.widget("ui.tabs",e.ui.tabs,{options:{idPrefix:"ui-tabs-"},_tabId:function(t){var n=t.is("li")?t.find("a[href]"):t;return n=n[0],e(n).closest("li").attr("aria-controls")||n.title&&n.title.replace(/\s/g,"_").replace(/[^\w\u00c0-\uFFFF\-]/g,"")||this.options.idPrefix+i()}}),e.widget("ui.tabs",e.ui.tabs,{options:{panelTemplate:"<div></div>"},_createPanel:function(t){return e(this.options.panelTemplate).attr("id",t).addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").data("ui-tabs-destroy",!0)}}),e.widget("ui.tabs",e.ui.tabs,{_create:function(){var e=this.options;e.active===null&&e.selected!==t&&(e.active=e.selected===-1?!1:e.selected),this._super(),e.selected=e.active,e.selected===!1&&(e.selected=-1)},_setOption:function(e,t){if(e!=="selected")return this._super(e,t);var n=this.options;this._super("active",t===-1?!1:t),n.selected=n.active,n.selected===!1&&(n.selected=-1)},_eventHandler:function(e){this._superApply(arguments),this.options.selected=this.options.active,this.options.selected===!1&&(this.options.selected=-1)}}),e.widget("ui.tabs",e.ui.tabs,{options:{show:null,select:null},_create:function(){this._super(),this.options.active!==!1&&this._trigger("show",null,this._ui(this.active.find(".ui-tabs-anchor")[0],this._getPanelForTab(this.active)[0]))},_trigger:function(e,t,n){var r=this._superApply(arguments);return r?(e==="beforeActivate"&&n.newTab.length?r=this._super("select",t,{tab:n.newTab.find(".ui-tabs-anchor")[0],panel:n.newPanel[0],index:n.newTab.closest("li").index()}):e==="activate"&&n.newTab.length&&(r=this._super("show",t,{tab:n.newTab.find(".ui-tabs-anchor")[0],panel:n.newPanel[0],index:n.newTab.closest("li").index()})),r):!1}}),e.widget("ui.tabs",e.ui.tabs,{select:function(e){e=this._getIndex(e);if(e===-1){if(!this.options.collapsible||this.options.selected===-1)return;e=this.options.selected}this.anchors.eq(e).trigger(this.options.event+this.eventNamespace)}}),function(){var t=0;e.widget("ui.tabs",e.ui.tabs,{options:{cookie:null},_create:function(){var e=this.options,t;e.active==null&&e.cookie&&(t=parseInt(this._cookie(),10),t===-1&&(t=!1),e.active=t),this._super()},_cookie:function(n){var r=[this.cookie||(this.cookie=this.options.cookie.name||"ui-tabs-"+ ++t)];return arguments.length&&(r.push(n===!1?-1:n),r.push(this.options.cookie)),e.cookie.apply(null,r)},_refresh:function(){this._super(),this.options.cookie&&this._cookie(this.options.active,this.options.cookie)},_eventHandler:function(e){this._superApply(arguments),this.options.cookie&&this._cookie(this.options.active,this.options.cookie)},_destroy:function(){this._super(),this.options.cookie&&this._cookie(null,this.options.cookie)}})}(),e.widget("ui.tabs",e.ui.tabs,{_trigger:function(t,n,r){var i=e.extend({},r);return t==="load"&&(i.panel=i.panel[0],i.tab=i.tab.find(".ui-tabs-anchor")[0]),this._super(t,n,i)}}),e.widget("ui.tabs",e.ui.tabs,{options:{fx:null},_getFx:function(){var t,n,r=this.options.fx;return r&&(e.isArray(r)?(t=r[0],n=r[1]):t=n=r),r?{show:n,hide:t}:null},_toggle:function(e,t){function o(){n.running=!1,n._trigger("activate",e,t)}function u(){t.newTab.closest("li").addClass("ui-tabs-active ui-state-active"),r.length&&s.show?r.animate(s.show,s.show.duration,function(){o()}):(r.show(),o())}var n=this,r=t.newPanel,i=t.oldPanel,s=this._getFx();if(!s)return this._super(e,t);n.running=!0,i.length&&s.hide?i.animate(s.hide,s.hide.duration,function(){t.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),u()}):(t.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),i.hide(),u())}}))})(jQuery);(function(e){function n(t,n){var r=(t.attr("aria-describedby")||"").split(/\s+/);r.push(n),t.data("ui-tooltip-id",n).attr("aria-describedby",e.trim(r.join(" ")))}function r(t){var n=t.data("ui-tooltip-id"),r=(t.attr("aria-describedby")||"").split(/\s+/),i=e.inArray(n,r);i!==-1&&r.splice(i,1),t.removeData("ui-tooltip-id"),r=e.trim(r.join(" ")),r?t.attr("aria-describedby",r):t.removeAttr("aria-describedby")}var t=0;e.widget("ui.tooltip",{version:"1.9.0",options:{content:function(){return e(this).attr("title")},hide:!0,items:"[title]",position:{my:"left+15 center",at:"right center",collision:"flipfit flipfit"},show:!0,tooltipClass:null,track:!1,close:null,open:null},_create:function(){this._on({mouseover:"open",focusin:"open"}),this.tooltips={}},_setOption:function(t,n){var r=this;if(t==="disabled"){this[n?"_disable":"_enable"](),this.options[t]=n;return}this._super(t,n),t==="content"&&e.each(this.tooltips,function(e,t){r._updateContent(t)})},_disable:function(){var t=this;e.each(this.tooltips,function(n,r){var i=e.Event("blur");i.target=i.currentTarget=r[0],t.close(i,!0)}),this.element.find(this.options.items).andSelf().each(function(){var t=e(this);t.is("[title]")&&t.data("ui-tooltip-title",t.attr("title")).attr("title","")})},_enable:function(){this.element.find(this.options.items).andSelf().each(function(){var t=e(this);t.data("ui-tooltip-title")&&t.attr("title",t.data("ui-tooltip-title"))})},open:function(t){var n=e(t?t.target:this.element).closest(this.options.items);if(!n.length)return;if(this.options.track&&n.data("ui-tooltip-id")){this._find(n).position(e.extend({of:n},this.options.position)),this._off(this.document,"mousemove");return}n.attr("title")&&n.data("ui-tooltip-title",n.attr("title")),n.data("tooltip-open",!0),this._updateContent(n,t)},_updateContent:function(e,t){var n,r=this.options.content,i=this;if(typeof r=="string")return this._open(t,e,r);n=r.call(e[0],function(n){if(!e.data("tooltip-open"))return;i._delay(function(){this._open(t,e,n)})}),n&&this._open(t,e,n)},_open:function(t,r,i){function u(e){o.of=e,s.position(o)}var s,o;if(!i)return;s=this._find(r);if(s.length){s.find(".ui-tooltip-content").html(i);return}r.is("[title]")&&(t&&t.type==="mouseover"?r.attr("title",""):r.removeAttr("title")),s=this._tooltip(r),n(r,s.attr("id")),s.find(".ui-tooltip-content").html(i),this.options.track&&t&&/^mouse/.test(t.originalEvent.type)?(o=e.extend({},this.options.position),this._on(this.document,{mousemove:u}),u(t)):s.position(e.extend({of:r},this.options.position)),s.hide(),this._show(s,this.options.show),this._trigger("open",t,{tooltip:s}),this._on(r,{mouseleave:"close",focusout:"close",keyup:function(t){if(t.keyCode===e.ui.keyCode.ESCAPE){var n=e.Event(t);n.currentTarget=r[0],this.close(n,!0)}}})},close:function(t,n){var i=this,s=e(t?t.currentTarget:this.element),o=this._find(s);if(this.closing)return;if(!n&&t&&t.type!=="focusout"&&this.document[0].activeElement===s[0])return;s.data("ui-tooltip-title")&&s.attr("title",s.data("ui-tooltip-title")),r(s),o.stop(!0),this._hide(o,this.options.hide,function(){e(this).remove(),delete i.tooltips[this.id]}),s.removeData("tooltip-open"),this._off(s,"mouseleave focusout keyup"),this._off(this.document,"mousemove"),this.closing=!0,this._trigger("close",t,{tooltip:o}),this.closing=!1},_tooltip:function(n){var r="ui-tooltip-"+t++,i=e("<div>").attr({id:r,role:"tooltip"}).addClass("ui-tooltip ui-widget ui-corner-all ui-widget-content "+(this.options.tooltipClass||""));return e("<div>").addClass("ui-tooltip-content").appendTo(i),i.appendTo(this.document[0].body),e.fn.bgiframe&&i.bgiframe(),this.tooltips[r]=n,i},_find:function(t){var n=t.data("ui-tooltip-id");return n?e("#"+n):e()},_destroy:function(){var t=this;e.each(this.tooltips,function(n,r){var i=e.Event("blur");i.target=i.currentTarget=r[0],t.close(i,!0),e("#"+n).remove(),r.data("ui-tooltip-title")&&(r.attr("title",r.data("ui-tooltip-title")),r.removeData("ui-tooltip-title"))})}})})(jQuery); \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
deleted file mode 100644
index bc3fbc81b2..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
+++ /dev/null
@@ -1,2 +0,0 @@
-/*! jQuery v1.8.2 jquery.com | jquery.org/license */
-(function(a,b){function G(a){var b=F[a]={};return p.each(a.split(s),function(a,c){b[c]=!0}),b}function J(a,c,d){if(d===b&&a.nodeType===1){var e="data-"+c.replace(I,"-$1").toLowerCase();d=a.getAttribute(e);if(typeof d=="string"){try{d=d==="true"?!0:d==="false"?!1:d==="null"?null:+d+""===d?+d:H.test(d)?p.parseJSON(d):d}catch(f){}p.data(a,c,d)}else d=b}return d}function K(a){var b;for(b in a){if(b==="data"&&p.isEmptyObject(a[b]))continue;if(b!=="toJSON")return!1}return!0}function ba(){return!1}function bb(){return!0}function bh(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function bi(a,b){do a=a[b];while(a&&a.nodeType!==1);return a}function bj(a,b,c){b=b||0;if(p.isFunction(b))return p.grep(a,function(a,d){var e=!!b.call(a,d,a);return e===c});if(b.nodeType)return p.grep(a,function(a,d){return a===b===c});if(typeof b=="string"){var d=p.grep(a,function(a){return a.nodeType===1});if(be.test(b))return p.filter(b,d,!c);b=p.filter(b,d)}return p.grep(a,function(a,d){return p.inArray(a,b)>=0===c})}function bk(a){var b=bl.split("|"),c=a.createDocumentFragment();if(c.createElement)while(b.length)c.createElement(b.pop());return c}function bC(a,b){return a.getElementsByTagName(b)[0]||a.appendChild(a.ownerDocument.createElement(b))}function bD(a,b){if(b.nodeType!==1||!p.hasData(a))return;var c,d,e,f=p._data(a),g=p._data(b,f),h=f.events;if(h){delete g.handle,g.events={};for(c in h)for(d=0,e=h[c].length;d<e;d++)p.event.add(b,c,h[c][d])}g.data&&(g.data=p.extend({},g.data))}function bE(a,b){var c;if(b.nodeType!==1)return;b.clearAttributes&&b.clearAttributes(),b.mergeAttributes&&b.mergeAttributes(a),c=b.nodeName.toLowerCase(),c==="object"?(b.parentNode&&(b.outerHTML=a.outerHTML),p.support.html5Clone&&a.innerHTML&&!p.trim(b.innerHTML)&&(b.innerHTML=a.innerHTML)):c==="input"&&bv.test(a.type)?(b.defaultChecked=b.checked=a.checked,b.value!==a.value&&(b.value=a.value)):c==="option"?b.selected=a.defaultSelected:c==="input"||c==="textarea"?b.defaultValue=a.defaultValue:c==="script"&&b.text!==a.text&&(b.text=a.text),b.removeAttribute(p.expando)}function bF(a){return typeof a.getElementsByTagName!="undefined"?a.getElementsByTagName("*"):typeof a.querySelectorAll!="undefined"?a.querySelectorAll("*"):[]}function bG(a){bv.test(a.type)&&(a.defaultChecked=a.checked)}function bY(a,b){if(b in a)return b;var c=b.charAt(0).toUpperCase()+b.slice(1),d=b,e=bW.length;while(e--){b=bW[e]+c;if(b in a)return b}return d}function bZ(a,b){return a=b||a,p.css(a,"display")==="none"||!p.contains(a.ownerDocument,a)}function b$(a,b){var c,d,e=[],f=0,g=a.length;for(;f<g;f++){c=a[f];if(!c.style)continue;e[f]=p._data(c,"olddisplay"),b?(!e[f]&&c.style.display==="none"&&(c.style.display=""),c.style.display===""&&bZ(c)&&(e[f]=p._data(c,"olddisplay",cc(c.nodeName)))):(d=bH(c,"display"),!e[f]&&d!=="none"&&p._data(c,"olddisplay",d))}for(f=0;f<g;f++){c=a[f];if(!c.style)continue;if(!b||c.style.display==="none"||c.style.display==="")c.style.display=b?e[f]||"":"none"}return a}function b_(a,b,c){var d=bP.exec(b);return d?Math.max(0,d[1]-(c||0))+(d[2]||"px"):b}function ca(a,b,c,d){var e=c===(d?"border":"content")?4:b==="width"?1:0,f=0;for(;e<4;e+=2)c==="margin"&&(f+=p.css(a,c+bV[e],!0)),d?(c==="content"&&(f-=parseFloat(bH(a,"padding"+bV[e]))||0),c!=="margin"&&(f-=parseFloat(bH(a,"border"+bV[e]+"Width"))||0)):(f+=parseFloat(bH(a,"padding"+bV[e]))||0,c!=="padding"&&(f+=parseFloat(bH(a,"border"+bV[e]+"Width"))||0));return f}function cb(a,b,c){var d=b==="width"?a.offsetWidth:a.offsetHeight,e=!0,f=p.support.boxSizing&&p.css(a,"boxSizing")==="border-box";if(d<=0||d==null){d=bH(a,b);if(d<0||d==null)d=a.style[b];if(bQ.test(d))return d;e=f&&(p.support.boxSizingReliable||d===a.style[b]),d=parseFloat(d)||0}return d+ca(a,b,c||(f?"border":"content"),e)+"px"}function cc(a){if(bS[a])return bS[a];var b=p("<"+a+">").appendTo(e.body),c=b.css("display");b.remove();if(c==="none"||c===""){bI=e.body.appendChild(bI||p.extend(e.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!bJ||!bI.createElement)bJ=(bI.contentWindow||bI.contentDocument).document,bJ.write("<!doctype html><html><body>"),bJ.close();b=bJ.body.appendChild(bJ.createElement(a)),c=bH(b,"display"),e.body.removeChild(bI)}return bS[a]=c,c}function ci(a,b,c,d){var e;if(p.isArray(b))p.each(b,function(b,e){c||ce.test(a)?d(a,e):ci(a+"["+(typeof e=="object"?b:"")+"]",e,c,d)});else if(!c&&p.type(b)==="object")for(e in b)ci(a+"["+e+"]",b[e],c,d);else d(a,b)}function cz(a){return function(b,c){typeof b!="string"&&(c=b,b="*");var d,e,f,g=b.toLowerCase().split(s),h=0,i=g.length;if(p.isFunction(c))for(;h<i;h++)d=g[h],f=/^\+/.test(d),f&&(d=d.substr(1)||"*"),e=a[d]=a[d]||[],e[f?"unshift":"push"](c)}}function cA(a,c,d,e,f,g){f=f||c.dataTypes[0],g=g||{},g[f]=!0;var h,i=a[f],j=0,k=i?i.length:0,l=a===cv;for(;j<k&&(l||!h);j++)h=i[j](c,d,e),typeof h=="string"&&(!l||g[h]?h=b:(c.dataTypes.unshift(h),h=cA(a,c,d,e,h,g)));return(l||!h)&&!g["*"]&&(h=cA(a,c,d,e,"*",g)),h}function cB(a,c){var d,e,f=p.ajaxSettings.flatOptions||{};for(d in c)c[d]!==b&&((f[d]?a:e||(e={}))[d]=c[d]);e&&p.extend(!0,a,e)}function cC(a,c,d){var e,f,g,h,i=a.contents,j=a.dataTypes,k=a.responseFields;for(f in k)f in d&&(c[k[f]]=d[f]);while(j[0]==="*")j.shift(),e===b&&(e=a.mimeType||c.getResponseHeader("content-type"));if(e)for(f in i)if(i[f]&&i[f].test(e)){j.unshift(f);break}if(j[0]in d)g=j[0];else{for(f in d){if(!j[0]||a.converters[f+" "+j[0]]){g=f;break}h||(h=f)}g=g||h}if(g)return g!==j[0]&&j.unshift(g),d[g]}function cD(a,b){var c,d,e,f,g=a.dataTypes.slice(),h=g[0],i={},j=0;a.dataFilter&&(b=a.dataFilter(b,a.dataType));if(g[1])for(c in a.converters)i[c.toLowerCase()]=a.converters[c];for(;e=g[++j];)if(e!=="*"){if(h!=="*"&&h!==e){c=i[h+" "+e]||i["* "+e];if(!c)for(d in i){f=d.split(" ");if(f[1]===e){c=i[h+" "+f[0]]||i["* "+f[0]];if(c){c===!0?c=i[d]:i[d]!==!0&&(e=f[0],g.splice(j--,0,e));break}}}if(c!==!0)if(c&&a["throws"])b=c(b);else try{b=c(b)}catch(k){return{state:"parsererror",error:c?k:"No conversion from "+h+" to "+e}}}h=e}return{state:"success",data:b}}function cL(){try{return new a.XMLHttpRequest}catch(b){}}function cM(){try{return new a.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}}function cU(){return setTimeout(function(){cN=b},0),cN=p.now()}function cV(a,b){p.each(b,function(b,c){var d=(cT[b]||[]).concat(cT["*"]),e=0,f=d.length;for(;e<f;e++)if(d[e].call(a,b,c))return})}function cW(a,b,c){var d,e=0,f=0,g=cS.length,h=p.Deferred().always(function(){delete i.elem}),i=function(){var b=cN||cU(),c=Math.max(0,j.startTime+j.duration-b),d=1-(c/j.duration||0),e=0,f=j.tweens.length;for(;e<f;e++)j.tweens[e].run(d);return h.notifyWith(a,[j,d,c]),d<1&&f?c:(h.resolveWith(a,[j]),!1)},j=h.promise({elem:a,props:p.extend({},b),opts:p.extend(!0,{specialEasing:{}},c),originalProperties:b,originalOptions:c,startTime:cN||cU(),duration:c.duration,tweens:[],createTween:function(b,c,d){var e=p.Tween(a,j.opts,b,c,j.opts.specialEasing[b]||j.opts.easing);return j.tweens.push(e),e},stop:function(b){var c=0,d=b?j.tweens.length:0;for(;c<d;c++)j.tweens[c].run(1);return b?h.resolveWith(a,[j,b]):h.rejectWith(a,[j,b]),this}}),k=j.props;cX(k,j.opts.specialEasing);for(;e<g;e++){d=cS[e].call(j,a,k,j.opts);if(d)return d}return cV(j,k),p.isFunction(j.opts.start)&&j.opts.start.call(a,j),p.fx.timer(p.extend(i,{anim:j,queue:j.opts.queue,elem:a})),j.progress(j.opts.progress).done(j.opts.done,j.opts.complete).fail(j.opts.fail).always(j.opts.always)}function cX(a,b){var c,d,e,f,g;for(c in a){d=p.camelCase(c),e=b[d],f=a[c],p.isArray(f)&&(e=f[1],f=a[c]=f[0]),c!==d&&(a[d]=f,delete a[c]),g=p.cssHooks[d];if(g&&"expand"in g){f=g.expand(f),delete a[d];for(c in f)c in a||(a[c]=f[c],b[c]=e)}else b[d]=e}}function cY(a,b,c){var d,e,f,g,h,i,j,k,l=this,m=a.style,n={},o=[],q=a.nodeType&&bZ(a);c.queue||(j=p._queueHooks(a,"fx"),j.unqueued==null&&(j.unqueued=0,k=j.empty.fire,j.empty.fire=function(){j.unqueued||k()}),j.unqueued++,l.always(function(){l.always(function(){j.unqueued--,p.queue(a,"fx").length||j.empty.fire()})})),a.nodeType===1&&("height"in b||"width"in b)&&(c.overflow=[m.overflow,m.overflowX,m.overflowY],p.css(a,"display")==="inline"&&p.css(a,"float")==="none"&&(!p.support.inlineBlockNeedsLayout||cc(a.nodeName)==="inline"?m.display="inline-block":m.zoom=1)),c.overflow&&(m.overflow="hidden",p.support.shrinkWrapBlocks||l.done(function(){m.overflow=c.overflow[0],m.overflowX=c.overflow[1],m.overflowY=c.overflow[2]}));for(d in b){f=b[d];if(cP.exec(f)){delete b[d];if(f===(q?"hide":"show"))continue;o.push(d)}}g=o.length;if(g){h=p._data(a,"fxshow")||p._data(a,"fxshow",{}),q?p(a).show():l.done(function(){p(a).hide()}),l.done(function(){var b;p.removeData(a,"fxshow",!0);for(b in n)p.style(a,b,n[b])});for(d=0;d<g;d++)e=o[d],i=l.createTween(e,q?h[e]:0),n[e]=h[e]||p.style(a,e),e in h||(h[e]=i.start,q&&(i.end=i.start,i.start=e==="width"||e==="height"?1:0))}}function cZ(a,b,c,d,e){return new cZ.prototype.init(a,b,c,d,e)}function c$(a,b){var c,d={height:a},e=0;b=b?1:0;for(;e<4;e+=2-b)c=bV[e],d["margin"+c]=d["padding"+c]=a;return b&&(d.opacity=d.width=a),d}function da(a){return p.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:!1}var c,d,e=a.document,f=a.location,g=a.navigator,h=a.jQuery,i=a.$,j=Array.prototype.push,k=Array.prototype.slice,l=Array.prototype.indexOf,m=Object.prototype.toString,n=Object.prototype.hasOwnProperty,o=String.prototype.trim,p=function(a,b){return new p.fn.init(a,b,c)},q=/[\-+]?(?:\d*\.|)\d+(?:[eE][\-+]?\d+|)/.source,r=/\S/,s=/\s+/,t=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,u=/^(?:[^#<]*(<[\w\W]+>)[^>]*$|#([\w\-]*)$)/,v=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,w=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,y=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,z=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,A=/^-ms-/,B=/-([\da-z])/gi,C=function(a,b){return(b+"").toUpperCase()},D=function(){e.addEventListener?(e.removeEventListener("DOMContentLoaded",D,!1),p.ready()):e.readyState==="complete"&&(e.detachEvent("onreadystatechange",D),p.ready())},E={};p.fn=p.prototype={constructor:p,init:function(a,c,d){var f,g,h,i;if(!a)return this;if(a.nodeType)return this.context=this[0]=a,this.length=1,this;if(typeof a=="string"){a.charAt(0)==="<"&&a.charAt(a.length-1)===">"&&a.length>=3?f=[null,a,null]:f=u.exec(a);if(f&&(f[1]||!c)){if(f[1])return c=c instanceof p?c[0]:c,i=c&&c.nodeType?c.ownerDocument||c:e,a=p.parseHTML(f[1],i,!0),v.test(f[1])&&p.isPlainObject(c)&&this.attr.call(a,c,!0),p.merge(this,a);g=e.getElementById(f[2]);if(g&&g.parentNode){if(g.id!==f[2])return d.find(a);this.length=1,this[0]=g}return this.context=e,this.selector=a,this}return!c||c.jquery?(c||d).find(a):this.constructor(c).find(a)}return p.isFunction(a)?d.ready(a):(a.selector!==b&&(this.selector=a.selector,this.context=a.context),p.makeArray(a,this))},selector:"",jquery:"1.8.2",length:0,size:function(){return this.length},toArray:function(){return k.call(this)},get:function(a){return a==null?this.toArray():a<0?this[this.length+a]:this[a]},pushStack:function(a,b,c){var d=p.merge(this.constructor(),a);return d.prevObject=this,d.context=this.context,b==="find"?d.selector=this.selector+(this.selector?" ":"")+c:b&&(d.selector=this.selector+"."+b+"("+c+")"),d},each:function(a,b){return p.each(this,a,b)},ready:function(a){return p.ready.promise().done(a),this},eq:function(a){return a=+a,a===-1?this.slice(a):this.slice(a,a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(k.apply(this,arguments),"slice",k.call(arguments).join(","))},map:function(a){return this.pushStack(p.map(this,function(b,c){return a.call(b,c,b)}))},end:function(){return this.prevObject||this.constructor(null)},push:j,sort:[].sort,splice:[].splice},p.fn.init.prototype=p.fn,p.extend=p.fn.extend=function(){var a,c,d,e,f,g,h=arguments[0]||{},i=1,j=arguments.length,k=!1;typeof h=="boolean"&&(k=h,h=arguments[1]||{},i=2),typeof h!="object"&&!p.isFunction(h)&&(h={}),j===i&&(h=this,--i);for(;i<j;i++)if((a=arguments[i])!=null)for(c in a){d=h[c],e=a[c];if(h===e)continue;k&&e&&(p.isPlainObject(e)||(f=p.isArray(e)))?(f?(f=!1,g=d&&p.isArray(d)?d:[]):g=d&&p.isPlainObject(d)?d:{},h[c]=p.extend(k,g,e)):e!==b&&(h[c]=e)}return h},p.extend({noConflict:function(b){return a.$===p&&(a.$=i),b&&a.jQuery===p&&(a.jQuery=h),p},isReady:!1,readyWait:1,holdReady:function(a){a?p.readyWait++:p.ready(!0)},ready:function(a){if(a===!0?--p.readyWait:p.isReady)return;if(!e.body)return setTimeout(p.ready,1);p.isReady=!0;if(a!==!0&&--p.readyWait>0)return;d.resolveWith(e,[p]),p.fn.trigger&&p(e).trigger("ready").off("ready")},isFunction:function(a){return p.type(a)==="function"},isArray:Array.isArray||function(a){return p.type(a)==="array"},isWindow:function(a){return a!=null&&a==a.window},isNumeric:function(a){return!isNaN(parseFloat(a))&&isFinite(a)},type:function(a){return a==null?String(a):E[m.call(a)]||"object"},isPlainObject:function(a){if(!a||p.type(a)!=="object"||a.nodeType||p.isWindow(a))return!1;try{if(a.constructor&&!n.call(a,"constructor")&&!n.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}var d;for(d in a);return d===b||n.call(a,d)},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},error:function(a){throw new Error(a)},parseHTML:function(a,b,c){var d;return!a||typeof a!="string"?null:(typeof b=="boolean"&&(c=b,b=0),b=b||e,(d=v.exec(a))?[b.createElement(d[1])]:(d=p.buildFragment([a],b,c?null:[]),p.merge([],(d.cacheable?p.clone(d.fragment):d.fragment).childNodes)))},parseJSON:function(b){if(!b||typeof b!="string")return null;b=p.trim(b);if(a.JSON&&a.JSON.parse)return a.JSON.parse(b);if(w.test(b.replace(y,"@").replace(z,"]").replace(x,"")))return(new Function("return "+b))();p.error("Invalid JSON: "+b)},parseXML:function(c){var d,e;if(!c||typeof c!="string")return null;try{a.DOMParser?(e=new DOMParser,d=e.parseFromString(c,"text/xml")):(d=new ActiveXObject("Microsoft.XMLDOM"),d.async="false",d.loadXML(c))}catch(f){d=b}return(!d||!d.documentElement||d.getElementsByTagName("parsererror").length)&&p.error("Invalid XML: "+c),d},noop:function(){},globalEval:function(b){b&&r.test(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(A,"ms-").replace(B,C)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,c,d){var e,f=0,g=a.length,h=g===b||p.isFunction(a);if(d){if(h){for(e in a)if(c.apply(a[e],d)===!1)break}else for(;f<g;)if(c.apply(a[f++],d)===!1)break}else if(h){for(e in a)if(c.call(a[e],e,a[e])===!1)break}else for(;f<g;)if(c.call(a[f],f,a[f++])===!1)break;return a},trim:o&&!o.call("ļ»æĀ ")?function(a){return a==null?"":o.call(a)}:function(a){return a==null?"":(a+"").replace(t,"")},makeArray:function(a,b){var c,d=b||[];return a!=null&&(c=p.type(a),a.length==null||c==="string"||c==="function"||c==="regexp"||p.isWindow(a)?j.call(d,a):p.merge(d,a)),d},inArray:function(a,b,c){var d;if(b){if(l)return l.call(b,a,c);d=b.length,c=c?c<0?Math.max(0,d+c):c:0;for(;c<d;c++)if(c in b&&b[c]===a)return c}return-1},merge:function(a,c){var d=c.length,e=a.length,f=0;if(typeof d=="number")for(;f<d;f++)a[e++]=c[f];else while(c[f]!==b)a[e++]=c[f++];return a.length=e,a},grep:function(a,b,c){var d,e=[],f=0,g=a.length;c=!!c;for(;f<g;f++)d=!!b(a[f],f),c!==d&&e.push(a[f]);return e},map:function(a,c,d){var e,f,g=[],h=0,i=a.length,j=a instanceof p||i!==b&&typeof i=="number"&&(i>0&&a[0]&&a[i-1]||i===0||p.isArray(a));if(j)for(;h<i;h++)e=c(a[h],h,d),e!=null&&(g[g.length]=e);else for(f in a)e=c(a[f],f,d),e!=null&&(g[g.length]=e);return g.concat.apply([],g)},guid:1,proxy:function(a,c){var d,e,f;return typeof c=="string"&&(d=a[c],c=a,a=d),p.isFunction(a)?(e=k.call(arguments,2),f=function(){return a.apply(c,e.concat(k.call(arguments)))},f.guid=a.guid=a.guid||p.guid++,f):b},access:function(a,c,d,e,f,g,h){var i,j=d==null,k=0,l=a.length;if(d&&typeof d=="object"){for(k in d)p.access(a,c,k,d[k],1,g,e);f=1}else if(e!==b){i=h===b&&p.isFunction(e),j&&(i?(i=c,c=function(a,b,c){return i.call(p(a),c)}):(c.call(a,e),c=null));if(c)for(;k<l;k++)c(a[k],d,i?e.call(a[k],k,c(a[k],d)):e,h);f=1}return f?a:j?c.call(a):l?c(a[0],d):g},now:function(){return(new Date).getTime()}}),p.ready.promise=function(b){if(!d){d=p.Deferred();if(e.readyState==="complete")setTimeout(p.ready,1);else if(e.addEventListener)e.addEventListener("DOMContentLoaded",D,!1),a.addEventListener("load",p.ready,!1);else{e.attachEvent("onreadystatechange",D),a.attachEvent("onload",p.ready);var c=!1;try{c=a.frameElement==null&&e.documentElement}catch(f){}c&&c.doScroll&&function g(){if(!p.isReady){try{c.doScroll("left")}catch(a){return setTimeout(g,50)}p.ready()}}()}}return d.promise(b)},p.each("Boolean Number String Function Array Date RegExp Object".split(" "),function(a,b){E["[object "+b+"]"]=b.toLowerCase()}),c=p(e);var F={};p.Callbacks=function(a){a=typeof a=="string"?F[a]||G(a):p.extend({},a);var c,d,e,f,g,h,i=[],j=!a.once&&[],k=function(b){c=a.memory&&b,d=!0,h=f||0,f=0,g=i.length,e=!0;for(;i&&h<g;h++)if(i[h].apply(b[0],b[1])===!1&&a.stopOnFalse){c=!1;break}e=!1,i&&(j?j.length&&k(j.shift()):c?i=[]:l.disable())},l={add:function(){if(i){var b=i.length;(function d(b){p.each(b,function(b,c){var e=p.type(c);e==="function"&&(!a.unique||!l.has(c))?i.push(c):c&&c.length&&e!=="string"&&d(c)})})(arguments),e?g=i.length:c&&(f=b,k(c))}return this},remove:function(){return i&&p.each(arguments,function(a,b){var c;while((c=p.inArray(b,i,c))>-1)i.splice(c,1),e&&(c<=g&&g--,c<=h&&h--)}),this},has:function(a){return p.inArray(a,i)>-1},empty:function(){return i=[],this},disable:function(){return i=j=c=b,this},disabled:function(){return!i},lock:function(){return j=b,c||l.disable(),this},locked:function(){return!j},fireWith:function(a,b){return b=b||[],b=[a,b.slice?b.slice():b],i&&(!d||j)&&(e?j.push(b):k(b)),this},fire:function(){return l.fireWith(this,arguments),this},fired:function(){return!!d}};return l},p.extend({Deferred:function(a){var b=[["resolve","done",p.Callbacks("once memory"),"resolved"],["reject","fail",p.Callbacks("once memory"),"rejected"],["notify","progress",p.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return p.Deferred(function(c){p.each(b,function(b,d){var f=d[0],g=a[b];e[d[1]](p.isFunction(g)?function(){var a=g.apply(this,arguments);a&&p.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f+"With"](this===e?c:this,[a])}:c[f])}),a=null}).promise()},promise:function(a){return a!=null?p.extend(a,d):d}},e={};return d.pipe=d.then,p.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[a^1][2].disable,b[2][2].lock),e[f[0]]=g.fire,e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=k.call(arguments),d=c.length,e=d!==1||a&&p.isFunction(a.promise)?d:0,f=e===1?a:p.Deferred(),g=function(a,b,c){return function(d){b[a]=this,c[a]=arguments.length>1?k.call(arguments):d,c===h?f.notifyWith(b,c):--e||f.resolveWith(b,c)}},h,i,j;if(d>1){h=new Array(d),i=new Array(d),j=new Array(d);for(;b<d;b++)c[b]&&p.isFunction(c[b].promise)?c[b].promise().done(g(b,j,c)).fail(f.reject).progress(g(b,i,h)):--e}return e||f.resolveWith(j,c),f.promise()}}),p.support=function(){var b,c,d,f,g,h,i,j,k,l,m,n=e.createElement("div");n.setAttribute("className","t"),n.innerHTML=" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",c=n.getElementsByTagName("*"),d=n.getElementsByTagName("a")[0],d.style.cssText="top:1px;float:left;opacity:.5";if(!c||!c.length)return{};f=e.createElement("select"),g=f.appendChild(e.createElement("option")),h=n.getElementsByTagName("input")[0],b={leadingWhitespace:n.firstChild.nodeType===3,tbody:!n.getElementsByTagName("tbody").length,htmlSerialize:!!n.getElementsByTagName("link").length,style:/top/.test(d.getAttribute("style")),hrefNormalized:d.getAttribute("href")==="/a",opacity:/^0.5/.test(d.style.opacity),cssFloat:!!d.style.cssFloat,checkOn:h.value==="on",optSelected:g.selected,getSetAttribute:n.className!=="t",enctype:!!e.createElement("form").enctype,html5Clone:e.createElement("nav").cloneNode(!0).outerHTML!=="<:nav></:nav>",boxModel:e.compatMode==="CSS1Compat",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},h.checked=!0,b.noCloneChecked=h.cloneNode(!0).checked,f.disabled=!0,b.optDisabled=!g.disabled;try{delete n.test}catch(o){b.deleteExpando=!1}!n.addEventListener&&n.attachEvent&&n.fireEvent&&(n.attachEvent("onclick",m=function(){b.noCloneEvent=!1}),n.cloneNode(!0).fireEvent("onclick"),n.detachEvent("onclick",m)),h=e.createElement("input"),h.value="t",h.setAttribute("type","radio"),b.radioValue=h.value==="t",h.setAttribute("checked","checked"),h.setAttribute("name","t"),n.appendChild(h),i=e.createDocumentFragment(),i.appendChild(n.lastChild),b.checkClone=i.cloneNode(!0).cloneNode(!0).lastChild.checked,b.appendChecked=h.checked,i.removeChild(h),i.appendChild(n);if(n.attachEvent)for(k in{submit:!0,change:!0,focusin:!0})j="on"+k,l=j in n,l||(n.setAttribute(j,"return;"),l=typeof n[j]=="function"),b[k+"Bubbles"]=l;return p(function(){var c,d,f,g,h="padding:0;margin:0;border:0;display:block;overflow:hidden;",i=e.getElementsByTagName("body")[0];if(!i)return;c=e.createElement("div"),c.style.cssText="visibility:hidden;border:0;width:0;height:0;position:static;top:0;margin-top:1px",i.insertBefore(c,i.firstChild),d=e.createElement("div"),c.appendChild(d),d.innerHTML="<table><tr><td></td><td>t</td></tr></table>",f=d.getElementsByTagName("td"),f[0].style.cssText="padding:0;margin:0;border:0;display:none",l=f[0].offsetHeight===0,f[0].style.display="",f[1].style.display="none",b.reliableHiddenOffsets=l&&f[0].offsetHeight===0,d.innerHTML="",d.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",b.boxSizing=d.offsetWidth===4,b.doesNotIncludeMarginInBodyOffset=i.offsetTop!==1,a.getComputedStyle&&(b.pixelPosition=(a.getComputedStyle(d,null)||{}).top!=="1%",b.boxSizingReliable=(a.getComputedStyle(d,null)||{width:"4px"}).width==="4px",g=e.createElement("div"),g.style.cssText=d.style.cssText=h,g.style.marginRight=g.style.width="0",d.style.width="1px",d.appendChild(g),b.reliableMarginRight=!parseFloat((a.getComputedStyle(g,null)||{}).marginRight)),typeof d.style.zoom!="undefined"&&(d.innerHTML="",d.style.cssText=h+"width:1px;padding:1px;display:inline;zoom:1",b.inlineBlockNeedsLayout=d.offsetWidth===3,d.style.display="block",d.style.overflow="visible",d.innerHTML="<div></div>",d.firstChild.style.width="5px",b.shrinkWrapBlocks=d.offsetWidth!==3,c.style.zoom=1),i.removeChild(c),c=d=f=g=null}),i.removeChild(n),c=d=f=g=h=i=n=null,b}();var H=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,I=/([A-Z])/g;p.extend({cache:{},deletedIds:[],uuid:0,expando:"jQuery"+(p.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(a){return a=a.nodeType?p.cache[a[p.expando]]:a[p.expando],!!a&&!K(a)},data:function(a,c,d,e){if(!p.acceptData(a))return;var f,g,h=p.expando,i=typeof c=="string",j=a.nodeType,k=j?p.cache:a,l=j?a[h]:a[h]&&h;if((!l||!k[l]||!e&&!k[l].data)&&i&&d===b)return;l||(j?a[h]=l=p.deletedIds.pop()||p.guid++:l=h),k[l]||(k[l]={},j||(k[l].toJSON=p.noop));if(typeof c=="object"||typeof c=="function")e?k[l]=p.extend(k[l],c):k[l].data=p.extend(k[l].data,c);return f=k[l],e||(f.data||(f.data={}),f=f.data),d!==b&&(f[p.camelCase(c)]=d),i?(g=f[c],g==null&&(g=f[p.camelCase(c)])):g=f,g},removeData:function(a,b,c){if(!p.acceptData(a))return;var d,e,f,g=a.nodeType,h=g?p.cache:a,i=g?a[p.expando]:p.expando;if(!h[i])return;if(b){d=c?h[i]:h[i].data;if(d){p.isArray(b)||(b in d?b=[b]:(b=p.camelCase(b),b in d?b=[b]:b=b.split(" ")));for(e=0,f=b.length;e<f;e++)delete d[b[e]];if(!(c?K:p.isEmptyObject)(d))return}}if(!c){delete h[i].data;if(!K(h[i]))return}g?p.cleanData([a],!0):p.support.deleteExpando||h!=h.window?delete h[i]:h[i]=null},_data:function(a,b,c){return p.data(a,b,c,!0)},acceptData:function(a){var b=a.nodeName&&p.noData[a.nodeName.toLowerCase()];return!b||b!==!0&&a.getAttribute("classid")===b}}),p.fn.extend({data:function(a,c){var d,e,f,g,h,i=this[0],j=0,k=null;if(a===b){if(this.length){k=p.data(i);if(i.nodeType===1&&!p._data(i,"parsedAttrs")){f=i.attributes;for(h=f.length;j<h;j++)g=f[j].name,g.indexOf("data-")||(g=p.camelCase(g.substring(5)),J(i,g,k[g]));p._data(i,"parsedAttrs",!0)}}return k}return typeof a=="object"?this.each(function(){p.data(this,a)}):(d=a.split(".",2),d[1]=d[1]?"."+d[1]:"",e=d[1]+"!",p.access(this,function(c){if(c===b)return k=this.triggerHandler("getData"+e,[d[0]]),k===b&&i&&(k=p.data(i,a),k=J(i,a,k)),k===b&&d[1]?this.data(d[0]):k;d[1]=c,this.each(function(){var b=p(this);b.triggerHandler("setData"+e,d),p.data(this,a,c),b.triggerHandler("changeData"+e,d)})},null,c,arguments.length>1,null,!1))},removeData:function(a){return this.each(function(){p.removeData(this,a)})}}),p.extend({queue:function(a,b,c){var d;if(a)return b=(b||"fx")+"queue",d=p._data(a,b),c&&(!d||p.isArray(c)?d=p._data(a,b,p.makeArray(c)):d.push(c)),d||[]},dequeue:function(a,b){b=b||"fx";var c=p.queue(a,b),d=c.length,e=c.shift(),f=p._queueHooks(a,b),g=function(){p.dequeue(a,b)};e==="inprogress"&&(e=c.shift(),d--),e&&(b==="fx"&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return p._data(a,c)||p._data(a,c,{empty:p.Callbacks("once memory").add(function(){p.removeData(a,b+"queue",!0),p.removeData(a,c,!0)})})}}),p.fn.extend({queue:function(a,c){var d=2;return typeof a!="string"&&(c=a,a="fx",d--),arguments.length<d?p.queue(this[0],a):c===b?this:this.each(function(){var b=p.queue(this,a,c);p._queueHooks(this,a),a==="fx"&&b[0]!=="inprogress"&&p.dequeue(this,a)})},dequeue:function(a){return this.each(function(){p.dequeue(this,a)})},delay:function(a,b){return a=p.fx?p.fx.speeds[a]||a:a,b=b||"fx",this.queue(b,function(b,c){var d=setTimeout(b,a);c.stop=function(){clearTimeout(d)}})},clearQueue:function(a){return this.queue(a||"fx",[])},promise:function(a,c){var d,e=1,f=p.Deferred(),g=this,h=this.length,i=function(){--e||f.resolveWith(g,[g])};typeof a!="string"&&(c=a,a=b),a=a||"fx";while(h--)d=p._data(g[h],a+"queueHooks"),d&&d.empty&&(e++,d.empty.add(i));return i(),f.promise(c)}});var L,M,N,O=/[\t\r\n]/g,P=/\r/g,Q=/^(?:button|input)$/i,R=/^(?:button|input|object|select|textarea)$/i,S=/^a(?:rea|)$/i,T=/^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i,U=p.support.getSetAttribute;p.fn.extend({attr:function(a,b){return p.access(this,p.attr,a,b,arguments.length>1)},removeAttr:function(a){return this.each(function(){p.removeAttr(this,a)})},prop:function(a,b){return p.access(this,p.prop,a,b,arguments.length>1)},removeProp:function(a){return a=p.propFix[a]||a,this.each(function(){try{this[a]=b,delete this[a]}catch(c){}})},addClass:function(a){var b,c,d,e,f,g,h;if(p.isFunction(a))return this.each(function(b){p(this).addClass(a.call(this,b,this.className))});if(a&&typeof a=="string"){b=a.split(s);for(c=0,d=this.length;c<d;c++){e=this[c];if(e.nodeType===1)if(!e.className&&b.length===1)e.className=a;else{f=" "+e.className+" ";for(g=0,h=b.length;g<h;g++)f.indexOf(" "+b[g]+" ")<0&&(f+=b[g]+" ");e.className=p.trim(f)}}}return this},removeClass:function(a){var c,d,e,f,g,h,i;if(p.isFunction(a))return this.each(function(b){p(this).removeClass(a.call(this,b,this.className))});if(a&&typeof a=="string"||a===b){c=(a||"").split(s);for(h=0,i=this.length;h<i;h++){e=this[h];if(e.nodeType===1&&e.className){d=(" "+e.className+" ").replace(O," ");for(f=0,g=c.length;f<g;f++)while(d.indexOf(" "+c[f]+" ")>=0)d=d.replace(" "+c[f]+" "," ");e.className=a?p.trim(d):""}}}return this},toggleClass:function(a,b){var c=typeof a,d=typeof b=="boolean";return p.isFunction(a)?this.each(function(c){p(this).toggleClass(a.call(this,c,this.className,b),b)}):this.each(function(){if(c==="string"){var e,f=0,g=p(this),h=b,i=a.split(s);while(e=i[f++])h=d?h:!g.hasClass(e),g[h?"addClass":"removeClass"](e)}else if(c==="undefined"||c==="boolean")this.className&&p._data(this,"__className__",this.className),this.className=this.className||a===!1?"":p._data(this,"__className__")||""})},hasClass:function(a){var b=" "+a+" ",c=0,d=this.length;for(;c<d;c++)if(this[c].nodeType===1&&(" "+this[c].className+" ").replace(O," ").indexOf(b)>=0)return!0;return!1},val:function(a){var c,d,e,f=this[0];if(!arguments.length){if(f)return c=p.valHooks[f.type]||p.valHooks[f.nodeName.toLowerCase()],c&&"get"in c&&(d=c.get(f,"value"))!==b?d:(d=f.value,typeof d=="string"?d.replace(P,""):d==null?"":d);return}return e=p.isFunction(a),this.each(function(d){var f,g=p(this);if(this.nodeType!==1)return;e?f=a.call(this,d,g.val()):f=a,f==null?f="":typeof f=="number"?f+="":p.isArray(f)&&(f=p.map(f,function(a){return a==null?"":a+""})),c=p.valHooks[this.type]||p.valHooks[this.nodeName.toLowerCase()];if(!c||!("set"in c)||c.set(this,f,"value")===b)this.value=f})}}),p.extend({valHooks:{option:{get:function(a){var b=a.attributes.value;return!b||b.specified?a.value:a.text}},select:{get:function(a){var b,c,d,e,f=a.selectedIndex,g=[],h=a.options,i=a.type==="select-one";if(f<0)return null;c=i?f:0,d=i?f+1:h.length;for(;c<d;c++){e=h[c];if(e.selected&&(p.support.optDisabled?!e.disabled:e.getAttribute("disabled")===null)&&(!e.parentNode.disabled||!p.nodeName(e.parentNode,"optgroup"))){b=p(e).val();if(i)return b;g.push(b)}}return i&&!g.length&&h.length?p(h[f]).val():g},set:function(a,b){var c=p.makeArray(b);return p(a).find("option").each(function(){this.selected=p.inArray(p(this).val(),c)>=0}),c.length||(a.selectedIndex=-1),c}}},attrFn:{},attr:function(a,c,d,e){var f,g,h,i=a.nodeType;if(!a||i===3||i===8||i===2)return;if(e&&p.isFunction(p.fn[c]))return p(a)[c](d);if(typeof a.getAttribute=="undefined")return p.prop(a,c,d);h=i!==1||!p.isXMLDoc(a),h&&(c=c.toLowerCase(),g=p.attrHooks[c]||(T.test(c)?M:L));if(d!==b){if(d===null){p.removeAttr(a,c);return}return g&&"set"in g&&h&&(f=g.set(a,d,c))!==b?f:(a.setAttribute(c,d+""),d)}return g&&"get"in g&&h&&(f=g.get(a,c))!==null?f:(f=a.getAttribute(c),f===null?b:f)},removeAttr:function(a,b){var c,d,e,f,g=0;if(b&&a.nodeType===1){d=b.split(s);for(;g<d.length;g++)e=d[g],e&&(c=p.propFix[e]||e,f=T.test(e),f||p.attr(a,e,""),a.removeAttribute(U?e:c),f&&c in a&&(a[c]=!1))}},attrHooks:{type:{set:function(a,b){if(Q.test(a.nodeName)&&a.parentNode)p.error("type property can't be changed");else if(!p.support.radioValue&&b==="radio"&&p.nodeName(a,"input")){var c=a.value;return a.setAttribute("type",b),c&&(a.value=c),b}}},value:{get:function(a,b){return L&&p.nodeName(a,"button")?L.get(a,b):b in a?a.value:null},set:function(a,b,c){if(L&&p.nodeName(a,"button"))return L.set(a,b,c);a.value=b}}},propFix:{tabindex:"tabIndex",readonly:"readOnly","for":"htmlFor","class":"className",maxlength:"maxLength",cellspacing:"cellSpacing",cellpadding:"cellPadding",rowspan:"rowSpan",colspan:"colSpan",usemap:"useMap",frameborder:"frameBorder",contenteditable:"contentEditable"},prop:function(a,c,d){var e,f,g,h=a.nodeType;if(!a||h===3||h===8||h===2)return;return g=h!==1||!p.isXMLDoc(a),g&&(c=p.propFix[c]||c,f=p.propHooks[c]),d!==b?f&&"set"in f&&(e=f.set(a,d,c))!==b?e:a[c]=d:f&&"get"in f&&(e=f.get(a,c))!==null?e:a[c]},propHooks:{tabIndex:{get:function(a){var c=a.getAttributeNode("tabindex");return c&&c.specified?parseInt(c.value,10):R.test(a.nodeName)||S.test(a.nodeName)&&a.href?0:b}}}}),M={get:function(a,c){var d,e=p.prop(a,c);return e===!0||typeof e!="boolean"&&(d=a.getAttributeNode(c))&&d.nodeValue!==!1?c.toLowerCase():b},set:function(a,b,c){var d;return b===!1?p.removeAttr(a,c):(d=p.propFix[c]||c,d in a&&(a[d]=!0),a.setAttribute(c,c.toLowerCase())),c}},U||(N={name:!0,id:!0,coords:!0},L=p.valHooks.button={get:function(a,c){var d;return d=a.getAttributeNode(c),d&&(N[c]?d.value!=="":d.specified)?d.value:b},set:function(a,b,c){var d=a.getAttributeNode(c);return d||(d=e.createAttribute(c),a.setAttributeNode(d)),d.value=b+""}},p.each(["width","height"],function(a,b){p.attrHooks[b]=p.extend(p.attrHooks[b],{set:function(a,c){if(c==="")return a.setAttribute(b,"auto"),c}})}),p.attrHooks.contenteditable={get:L.get,set:function(a,b,c){b===""&&(b="false"),L.set(a,b,c)}}),p.support.hrefNormalized||p.each(["href","src","width","height"],function(a,c){p.attrHooks[c]=p.extend(p.attrHooks[c],{get:function(a){var d=a.getAttribute(c,2);return d===null?b:d}})}),p.support.style||(p.attrHooks.style={get:function(a){return a.style.cssText.toLowerCase()||b},set:function(a,b){return a.style.cssText=b+""}}),p.support.optSelected||(p.propHooks.selected=p.extend(p.propHooks.selected,{get:function(a){var b=a.parentNode;return b&&(b.selectedIndex,b.parentNode&&b.parentNode.selectedIndex),null}})),p.support.enctype||(p.propFix.enctype="encoding"),p.support.checkOn||p.each(["radio","checkbox"],function(){p.valHooks[this]={get:function(a){return a.getAttribute("value")===null?"on":a.value}}}),p.each(["radio","checkbox"],function(){p.valHooks[this]=p.extend(p.valHooks[this],{set:function(a,b){if(p.isArray(b))return a.checked=p.inArray(p(a).val(),b)>=0}})});var V=/^(?:textarea|input|select)$/i,W=/^([^\.]*|)(?:\.(.+)|)$/,X=/(?:^|\s)hover(\.\S+|)\b/,Y=/^key/,Z=/^(?:mouse|contextmenu)|click/,$=/^(?:focusinfocus|focusoutblur)$/,_=function(a){return p.event.special.hover?a:a.replace(X,"mouseenter$1 mouseleave$1")};p.event={add:function(a,c,d,e,f){var g,h,i,j,k,l,m,n,o,q,r;if(a.nodeType===3||a.nodeType===8||!c||!d||!(g=p._data(a)))return;d.handler&&(o=d,d=o.handler,f=o.selector),d.guid||(d.guid=p.guid++),i=g.events,i||(g.events=i={}),h=g.handle,h||(g.handle=h=function(a){return typeof p!="undefined"&&(!a||p.event.triggered!==a.type)?p.event.dispatch.apply(h.elem,arguments):b},h.elem=a),c=p.trim(_(c)).split(" ");for(j=0;j<c.length;j++){k=W.exec(c[j])||[],l=k[1],m=(k[2]||"").split(".").sort(),r=p.event.special[l]||{},l=(f?r.delegateType:r.bindType)||l,r=p.event.special[l]||{},n=p.extend({type:l,origType:k[1],data:e,handler:d,guid:d.guid,selector:f,needsContext:f&&p.expr.match.needsContext.test(f),namespace:m.join(".")},o),q=i[l];if(!q){q=i[l]=[],q.delegateCount=0;if(!r.setup||r.setup.call(a,e,m,h)===!1)a.addEventListener?a.addEventListener(l,h,!1):a.attachEvent&&a.attachEvent("on"+l,h)}r.add&&(r.add.call(a,n),n.handler.guid||(n.handler.guid=d.guid)),f?q.splice(q.delegateCount++,0,n):q.push(n),p.event.global[l]=!0}a=null},global:{},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,n,o,q,r=p.hasData(a)&&p._data(a);if(!r||!(m=r.events))return;b=p.trim(_(b||"")).split(" ");for(f=0;f<b.length;f++){g=W.exec(b[f])||[],h=i=g[1],j=g[2];if(!h){for(h in m)p.event.remove(a,h+b[f],c,d,!0);continue}n=p.event.special[h]||{},h=(d?n.delegateType:n.bindType)||h,o=m[h]||[],k=o.length,j=j?new RegExp("(^|\\.)"+j.split(".").sort().join("\\.(?:.*\\.|)")+"(\\.|$)"):null;for(l=0;l<o.length;l++)q=o[l],(e||i===q.origType)&&(!c||c.guid===q.guid)&&(!j||j.test(q.namespace))&&(!d||d===q.selector||d==="**"&&q.selector)&&(o.splice(l--,1),q.selector&&o.delegateCount--,n.remove&&n.remove.call(a,q));o.length===0&&k!==o.length&&((!n.teardown||n.teardown.call(a,j,r.handle)===!1)&&p.removeEvent(a,h,r.handle),delete m[h])}p.isEmptyObject(m)&&(delete r.handle,p.removeData(a,"events",!0))},customEvent:{getData:!0,setData:!0,changeData:!0},trigger:function(c,d,f,g){if(!f||f.nodeType!==3&&f.nodeType!==8){var h,i,j,k,l,m,n,o,q,r,s=c.type||c,t=[];if($.test(s+p.event.triggered))return;s.indexOf("!")>=0&&(s=s.slice(0,-1),i=!0),s.indexOf(".")>=0&&(t=s.split("."),s=t.shift(),t.sort());if((!f||p.event.customEvent[s])&&!p.event.global[s])return;c=typeof c=="object"?c[p.expando]?c:new p.Event(s,c):new p.Event(s),c.type=s,c.isTrigger=!0,c.exclusive=i,c.namespace=t.join("."),c.namespace_re=c.namespace?new RegExp("(^|\\.)"+t.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,m=s.indexOf(":")<0?"on"+s:"";if(!f){h=p.cache;for(j in h)h[j].events&&h[j].events[s]&&p.event.trigger(c,d,h[j].handle.elem,!0);return}c.result=b,c.target||(c.target=f),d=d!=null?p.makeArray(d):[],d.unshift(c),n=p.event.special[s]||{};if(n.trigger&&n.trigger.apply(f,d)===!1)return;q=[[f,n.bindType||s]];if(!g&&!n.noBubble&&!p.isWindow(f)){r=n.delegateType||s,k=$.test(r+s)?f:f.parentNode;for(l=f;k;k=k.parentNode)q.push([k,r]),l=k;l===(f.ownerDocument||e)&&q.push([l.defaultView||l.parentWindow||a,r])}for(j=0;j<q.length&&!c.isPropagationStopped();j++)k=q[j][0],c.type=q[j][1],o=(p._data(k,"events")||{})[c.type]&&p._data(k,"handle"),o&&o.apply(k,d),o=m&&k[m],o&&p.acceptData(k)&&o.apply&&o.apply(k,d)===!1&&c.preventDefault();return c.type=s,!g&&!c.isDefaultPrevented()&&(!n._default||n._default.apply(f.ownerDocument,d)===!1)&&(s!=="click"||!p.nodeName(f,"a"))&&p.acceptData(f)&&m&&f[s]&&(s!=="focus"&&s!=="blur"||c.target.offsetWidth!==0)&&!p.isWindow(f)&&(l=f[m],l&&(f[m]=null),p.event.triggered=s,f[s](),p.event.triggered=b,l&&(f[m]=l)),c.result}return},dispatch:function(c){c=p.event.fix(c||a.event);var d,e,f,g,h,i,j,l,m,n,o=(p._data(this,"events")||{})[c.type]||[],q=o.delegateCount,r=k.call(arguments),s=!c.exclusive&&!c.namespace,t=p.event.special[c.type]||{},u=[];r[0]=c,c.delegateTarget=this;if(t.preDispatch&&t.preDispatch.call(this,c)===!1)return;if(q&&(!c.button||c.type!=="click"))for(f=c.target;f!=this;f=f.parentNode||this)if(f.disabled!==!0||c.type!=="click"){h={},j=[];for(d=0;d<q;d++)l=o[d],m=l.selector,h[m]===b&&(h[m]=l.needsContext?p(m,this).index(f)>=0:p.find(m,this,null,[f]).length),h[m]&&j.push(l);j.length&&u.push({elem:f,matches:j})}o.length>q&&u.push({elem:this,matches:o.slice(q)});for(d=0;d<u.length&&!c.isPropagationStopped();d++){i=u[d],c.currentTarget=i.elem;for(e=0;e<i.matches.length&&!c.isImmediatePropagationStopped();e++){l=i.matches[e];if(s||!c.namespace&&!l.namespace||c.namespace_re&&c.namespace_re.test(l.namespace))c.data=l.data,c.handleObj=l,g=((p.event.special[l.origType]||{}).handle||l.handler).apply(i.elem,r),g!==b&&(c.result=g,g===!1&&(c.preventDefault(),c.stopPropagation()))}}return t.postDispatch&&t.postDispatch.call(this,c),c.result},props:"attrChange attrName relatedNode srcElement altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(a,b){return a.which==null&&(a.which=b.charCode!=null?b.charCode:b.keyCode),a}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(a,c){var d,f,g,h=c.button,i=c.fromElement;return a.pageX==null&&c.clientX!=null&&(d=a.target.ownerDocument||e,f=d.documentElement,g=d.body,a.pageX=c.clientX+(f&&f.scrollLeft||g&&g.scrollLeft||0)-(f&&f.clientLeft||g&&g.clientLeft||0),a.pageY=c.clientY+(f&&f.scrollTop||g&&g.scrollTop||0)-(f&&f.clientTop||g&&g.clientTop||0)),!a.relatedTarget&&i&&(a.relatedTarget=i===a.target?c.toElement:i),!a.which&&h!==b&&(a.which=h&1?1:h&2?3:h&4?2:0),a}},fix:function(a){if(a[p.expando])return a;var b,c,d=a,f=p.event.fixHooks[a.type]||{},g=f.props?this.props.concat(f.props):this.props;a=p.Event(d);for(b=g.length;b;)c=g[--b],a[c]=d[c];return a.target||(a.target=d.srcElement||e),a.target.nodeType===3&&(a.target=a.target.parentNode),a.metaKey=!!a.metaKey,f.filter?f.filter(a,d):a},special:{load:{noBubble:!0},focus:{delegateType:"focusin"},blur:{delegateType:"focusout"},beforeunload:{setup:function(a,b,c){p.isWindow(this)&&(this.onbeforeunload=c)},teardown:function(a,b){this.onbeforeunload===b&&(this.onbeforeunload=null)}}},simulate:function(a,b,c,d){var e=p.extend(new p.Event,c,{type:a,isSimulated:!0,originalEvent:{}});d?p.event.trigger(e,null,b):p.event.dispatch.call(b,e),e.isDefaultPrevented()&&c.preventDefault()}},p.event.handle=p.event.dispatch,p.removeEvent=e.removeEventListener?function(a,b,c){a.removeEventListener&&a.removeEventListener(b,c,!1)}:function(a,b,c){var d="on"+b;a.detachEvent&&(typeof a[d]=="undefined"&&(a[d]=null),a.detachEvent(d,c))},p.Event=function(a,b){if(this instanceof p.Event)a&&a.type?(this.originalEvent=a,this.type=a.type,this.isDefaultPrevented=a.defaultPrevented||a.returnValue===!1||a.getPreventDefault&&a.getPreventDefault()?bb:ba):this.type=a,b&&p.extend(this,b),this.timeStamp=a&&a.timeStamp||p.now(),this[p.expando]=!0;else return new p.Event(a,b)},p.Event.prototype={preventDefault:function(){this.isDefaultPrevented=bb;var a=this.originalEvent;if(!a)return;a.preventDefault?a.preventDefault():a.returnValue=!1},stopPropagation:function(){this.isPropagationStopped=bb;var a=this.originalEvent;if(!a)return;a.stopPropagation&&a.stopPropagation(),a.cancelBubble=!0},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=bb,this.stopPropagation()},isDefaultPrevented:ba,isPropagationStopped:ba,isImmediatePropagationStopped:ba},p.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){p.event.special[a]={delegateType:b,bindType:b,handle:function(a){var c,d=this,e=a.relatedTarget,f=a.handleObj,g=f.selector;if(!e||e!==d&&!p.contains(d,e))a.type=f.origType,c=f.handler.apply(this,arguments),a.type=b;return c}}}),p.support.submitBubbles||(p.event.special.submit={setup:function(){if(p.nodeName(this,"form"))return!1;p.event.add(this,"click._submit keypress._submit",function(a){var c=a.target,d=p.nodeName(c,"input")||p.nodeName(c,"button")?c.form:b;d&&!p._data(d,"_submit_attached")&&(p.event.add(d,"submit._submit",function(a){a._submit_bubble=!0}),p._data(d,"_submit_attached",!0))})},postDispatch:function(a){a._submit_bubble&&(delete a._submit_bubble,this.parentNode&&!a.isTrigger&&p.event.simulate("submit",this.parentNode,a,!0))},teardown:function(){if(p.nodeName(this,"form"))return!1;p.event.remove(this,"._submit")}}),p.support.changeBubbles||(p.event.special.change={setup:function(){if(V.test(this.nodeName)){if(this.type==="checkbox"||this.type==="radio")p.event.add(this,"propertychange._change",function(a){a.originalEvent.propertyName==="checked"&&(this._just_changed=!0)}),p.event.add(this,"click._change",function(a){this._just_changed&&!a.isTrigger&&(this._just_changed=!1),p.event.simulate("change",this,a,!0)});return!1}p.event.add(this,"beforeactivate._change",function(a){var b=a.target;V.test(b.nodeName)&&!p._data(b,"_change_attached")&&(p.event.add(b,"change._change",function(a){this.parentNode&&!a.isSimulated&&!a.isTrigger&&p.event.simulate("change",this.parentNode,a,!0)}),p._data(b,"_change_attached",!0))})},handle:function(a){var b=a.target;if(this!==b||a.isSimulated||a.isTrigger||b.type!=="radio"&&b.type!=="checkbox")return a.handleObj.handler.apply(this,arguments)},teardown:function(){return p.event.remove(this,"._change"),!V.test(this.nodeName)}}),p.support.focusinBubbles||p.each({focus:"focusin",blur:"focusout"},function(a,b){var c=0,d=function(a){p.event.simulate(b,a.target,p.event.fix(a),!0)};p.event.special[b]={setup:function(){c++===0&&e.addEventListener(a,d,!0)},teardown:function(){--c===0&&e.removeEventListener(a,d,!0)}}}),p.fn.extend({on:function(a,c,d,e,f){var g,h;if(typeof a=="object"){typeof c!="string"&&(d=d||c,c=b);for(h in a)this.on(h,c,d,a[h],f);return this}d==null&&e==null?(e=c,d=c=b):e==null&&(typeof c=="string"?(e=d,d=b):(e=d,d=c,c=b));if(e===!1)e=ba;else if(!e)return this;return f===1&&(g=e,e=function(a){return p().off(a),g.apply(this,arguments)},e.guid=g.guid||(g.guid=p.guid++)),this.each(function(){p.event.add(this,a,e,d,c)})},one:function(a,b,c,d){return this.on(a,b,c,d,1)},off:function(a,c,d){var e,f;if(a&&a.preventDefault&&a.handleObj)return e=a.handleObj,p(a.delegateTarget).off(e.namespace?e.origType+"."+e.namespace:e.origType,e.selector,e.handler),this;if(typeof a=="object"){for(f in a)this.off(f,c,a[f]);return this}if(c===!1||typeof c=="function")d=c,c=b;return d===!1&&(d=ba),this.each(function(){p.event.remove(this,a,d,c)})},bind:function(a,b,c){return this.on(a,null,b,c)},unbind:function(a,b){return this.off(a,null,b)},live:function(a,b,c){return p(this.context).on(a,this.selector,b,c),this},die:function(a,b){return p(this.context).off(a,this.selector||"**",b),this},delegate:function(a,b,c,d){return this.on(b,a,c,d)},undelegate:function(a,b,c){return arguments.length===1?this.off(a,"**"):this.off(b,a||"**",c)},trigger:function(a,b){return this.each(function(){p.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0])return p.event.trigger(a,b,this[0],!0)},toggle:function(a){var b=arguments,c=a.guid||p.guid++,d=0,e=function(c){var e=(p._data(this,"lastToggle"+a.guid)||0)%d;return p._data(this,"lastToggle"+a.guid,e+1),c.preventDefault(),b[e].apply(this,arguments)||!1};e.guid=c;while(d<b.length)b[d++].guid=c;return this.click(e)},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}}),p.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(a,b){p.fn[b]=function(a,c){return c==null&&(c=a,a=null),arguments.length>0?this.on(b,null,a,c):this.trigger(b)},Y.test(b)&&(p.event.fixHooks[b]=p.event.keyHooks),Z.test(b)&&(p.event.fixHooks[b]=p.event.mouseHooks)}),function(a,b){function bc(a,b,c,d){c=c||[],b=b||r;var e,f,i,j,k=b.nodeType;if(!a||typeof a!="string")return c;if(k!==1&&k!==9)return[];i=g(b);if(!i&&!d)if(e=P.exec(a))if(j=e[1]){if(k===9){f=b.getElementById(j);if(!f||!f.parentNode)return c;if(f.id===j)return c.push(f),c}else if(b.ownerDocument&&(f=b.ownerDocument.getElementById(j))&&h(b,f)&&f.id===j)return c.push(f),c}else{if(e[2])return w.apply(c,x.call(b.getElementsByTagName(a),0)),c;if((j=e[3])&&_&&b.getElementsByClassName)return w.apply(c,x.call(b.getElementsByClassName(j),0)),c}return bp(a.replace(L,"$1"),b,c,d,i)}function bd(a){return function(b){var c=b.nodeName.toLowerCase();return c==="input"&&b.type===a}}function be(a){return function(b){var c=b.nodeName.toLowerCase();return(c==="input"||c==="button")&&b.type===a}}function bf(a){return z(function(b){return b=+b,z(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function bg(a,b,c){if(a===b)return c;var d=a.nextSibling;while(d){if(d===b)return-1;d=d.nextSibling}return 1}function bh(a,b){var c,d,f,g,h,i,j,k=C[o][a];if(k)return b?0:k.slice(0);h=a,i=[],j=e.preFilter;while(h){if(!c||(d=M.exec(h)))d&&(h=h.slice(d[0].length)),i.push(f=[]);c=!1;if(d=N.exec(h))f.push(c=new q(d.shift())),h=h.slice(c.length),c.type=d[0].replace(L," ");for(g in e.filter)(d=W[g].exec(h))&&(!j[g]||(d=j[g](d,r,!0)))&&(f.push(c=new q(d.shift())),h=h.slice(c.length),c.type=g,c.matches=d);if(!c)break}return b?h.length:h?bc.error(a):C(a,i).slice(0)}function bi(a,b,d){var e=b.dir,f=d&&b.dir==="parentNode",g=u++;return b.first?function(b,c,d){while(b=b[e])if(f||b.nodeType===1)return a(b,c,d)}:function(b,d,h){if(!h){var i,j=t+" "+g+" ",k=j+c;while(b=b[e])if(f||b.nodeType===1){if((i=b[o])===k)return b.sizset;if(typeof i=="string"&&i.indexOf(j)===0){if(b.sizset)return b}else{b[o]=k;if(a(b,d,h))return b.sizset=!0,b;b.sizset=!1}}}else while(b=b[e])if(f||b.nodeType===1)if(a(b,d,h))return b}}function bj(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function bk(a,b,c,d,e){var f,g=[],h=0,i=a.length,j=b!=null;for(;h<i;h++)if(f=a[h])if(!c||c(f,d,e))g.push(f),j&&b.push(h);return g}function bl(a,b,c,d,e,f){return d&&!d[o]&&(d=bl(d)),e&&!e[o]&&(e=bl(e,f)),z(function(f,g,h,i){if(f&&e)return;var j,k,l,m=[],n=[],o=g.length,p=f||bo(b||"*",h.nodeType?[h]:h,[],f),q=a&&(f||!b)?bk(p,m,a,h,i):p,r=c?e||(f?a:o||d)?[]:g:q;c&&c(q,r,h,i);if(d){l=bk(r,n),d(l,[],h,i),j=l.length;while(j--)if(k=l[j])r[n[j]]=!(q[n[j]]=k)}if(f){j=a&&r.length;while(j--)if(k=r[j])f[m[j]]=!(g[m[j]]=k)}else r=bk(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):w.apply(g,r)})}function bm(a){var b,c,d,f=a.length,g=e.relative[a[0].type],h=g||e.relative[" "],i=g?1:0,j=bi(function(a){return a===b},h,!0),k=bi(function(a){return y.call(b,a)>-1},h,!0),m=[function(a,c,d){return!g&&(d||c!==l)||((b=c).nodeType?j(a,c,d):k(a,c,d))}];for(;i<f;i++)if(c=e.relative[a[i].type])m=[bi(bj(m),c)];else{c=e.filter[a[i].type].apply(null,a[i].matches);if(c[o]){d=++i;for(;d<f;d++)if(e.relative[a[d].type])break;return bl(i>1&&bj(m),i>1&&a.slice(0,i-1).join("").replace(L,"$1"),c,i<d&&bm(a.slice(i,d)),d<f&&bm(a=a.slice(d)),d<f&&a.join(""))}m.push(c)}return bj(m)}function bn(a,b){var d=b.length>0,f=a.length>0,g=function(h,i,j,k,m){var n,o,p,q=[],s=0,u="0",x=h&&[],y=m!=null,z=l,A=h||f&&e.find.TAG("*",m&&i.parentNode||i),B=t+=z==null?1:Math.E;y&&(l=i!==r&&i,c=g.el);for(;(n=A[u])!=null;u++){if(f&&n){for(o=0;p=a[o];o++)if(p(n,i,j)){k.push(n);break}y&&(t=B,c=++g.el)}d&&((n=!p&&n)&&s--,h&&x.push(n))}s+=u;if(d&&u!==s){for(o=0;p=b[o];o++)p(x,q,i,j);if(h){if(s>0)while(u--)!x[u]&&!q[u]&&(q[u]=v.call(k));q=bk(q)}w.apply(k,q),y&&!h&&q.length>0&&s+b.length>1&&bc.uniqueSort(k)}return y&&(t=B,l=z),x};return g.el=0,d?z(g):g}function bo(a,b,c,d){var e=0,f=b.length;for(;e<f;e++)bc(a,b[e],c,d);return c}function bp(a,b,c,d,f){var g,h,j,k,l,m=bh(a),n=m.length;if(!d&&m.length===1){h=m[0]=m[0].slice(0);if(h.length>2&&(j=h[0]).type==="ID"&&b.nodeType===9&&!f&&e.relative[h[1].type]){b=e.find.ID(j.matches[0].replace(V,""),b,f)[0];if(!b)return c;a=a.slice(h.shift().length)}for(g=W.POS.test(a)?-1:h.length-1;g>=0;g--){j=h[g];if(e.relative[k=j.type])break;if(l=e.find[k])if(d=l(j.matches[0].replace(V,""),R.test(h[0].type)&&b.parentNode||b,f)){h.splice(g,1),a=d.length&&h.join("");if(!a)return w.apply(c,x.call(d,0)),c;break}}}return i(a,m)(d,b,f,c,R.test(a)),c}function bq(){}var c,d,e,f,g,h,i,j,k,l,m=!0,n="undefined",o=("sizcache"+Math.random()).replace(".",""),q=String,r=a.document,s=r.documentElement,t=0,u=0,v=[].pop,w=[].push,x=[].slice,y=[].indexOf||function(a){var b=0,c=this.length;for(;b<c;b++)if(this[b]===a)return b;return-1},z=function(a,b){return a[o]=b==null||b,a},A=function(){var a={},b=[];return z(function(c,d){return b.push(c)>e.cacheLength&&delete a[b.shift()],a[c]=d},a)},B=A(),C=A(),D=A(),E="[\\x20\\t\\r\\n\\f]",F="(?:\\\\.|[-\\w]|[^\\x00-\\xa0])+",G=F.replace("w","w#"),H="([*^$|!~]?=)",I="\\["+E+"*("+F+")"+E+"*(?:"+H+E+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+G+")|)|)"+E+"*\\]",J=":("+F+")(?:\\((?:(['\"])((?:\\\\.|[^\\\\])*?)\\2|([^()[\\]]*|(?:(?:"+I+")|[^:]|\\\\.)*|.*))\\)|)",K=":(even|odd|eq|gt|lt|nth|first|last)(?:\\("+E+"*((?:-\\d)?\\d*)"+E+"*\\)|)(?=[^-]|$)",L=new RegExp("^"+E+"+|((?:^|[^\\\\])(?:\\\\.)*)"+E+"+$","g"),M=new RegExp("^"+E+"*,"+E+"*"),N=new RegExp("^"+E+"*([\\x20\\t\\r\\n\\f>+~])"+E+"*"),O=new RegExp(J),P=/^(?:#([\w\-]+)|(\w+)|\.([\w\-]+))$/,Q=/^:not/,R=/[\x20\t\r\n\f]*[+~]/,S=/:not\($/,T=/h\d/i,U=/input|select|textarea|button/i,V=/\\(?!\\)/g,W={ID:new RegExp("^#("+F+")"),CLASS:new RegExp("^\\.("+F+")"),NAME:new RegExp("^\\[name=['\"]?("+F+")['\"]?\\]"),TAG:new RegExp("^("+F.replace("w","w*")+")"),ATTR:new RegExp("^"+I),PSEUDO:new RegExp("^"+J),POS:new RegExp(K,"i"),CHILD:new RegExp("^:(only|nth|first|last)-child(?:\\("+E+"*(even|odd|(([+-]|)(\\d*)n|)"+E+"*(?:([+-]|)"+E+"*(\\d+)|))"+E+"*\\)|)","i"),needsContext:new RegExp("^"+E+"*[>+~]|"+K,"i")},X=function(a){var b=r.createElement("div");try{return a(b)}catch(c){return!1}finally{b=null}},Y=X(function(a){return a.appendChild(r.createComment("")),!a.getElementsByTagName("*").length}),Z=X(function(a){return a.innerHTML="<a href='#'></a>",a.firstChild&&typeof a.firstChild.getAttribute!==n&&a.firstChild.getAttribute("href")==="#"}),$=X(function(a){a.innerHTML="<select></select>";var b=typeof a.lastChild.getAttribute("multiple");return b!=="boolean"&&b!=="string"}),_=X(function(a){return a.innerHTML="<div class='hidden e'></div><div class='hidden'></div>",!a.getElementsByClassName||!a.getElementsByClassName("e").length?!1:(a.lastChild.className="e",a.getElementsByClassName("e").length===2)}),ba=X(function(a){a.id=o+0,a.innerHTML="<a name='"+o+"'></a><div name='"+o+"'></div>",s.insertBefore(a,s.firstChild);var b=r.getElementsByName&&r.getElementsByName(o).length===2+r.getElementsByName(o+0).length;return d=!r.getElementById(o),s.removeChild(a),b});try{x.call(s.childNodes,0)[0].nodeType}catch(bb){x=function(a){var b,c=[];for(;b=this[a];a++)c.push(b);return c}}bc.matches=function(a,b){return bc(a,null,null,b)},bc.matchesSelector=function(a,b){return bc(b,null,null,[a]).length>0},f=bc.getText=function(a){var b,c="",d=0,e=a.nodeType;if(e){if(e===1||e===9||e===11){if(typeof a.textContent=="string")return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=f(a)}else if(e===3||e===4)return a.nodeValue}else for(;b=a[d];d++)c+=f(b);return c},g=bc.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?b.nodeName!=="HTML":!1},h=bc.contains=s.contains?function(a,b){var c=a.nodeType===9?a.documentElement:a,d=b&&b.parentNode;return a===d||!!(d&&d.nodeType===1&&c.contains&&c.contains(d))}:s.compareDocumentPosition?function(a,b){return b&&!!(a.compareDocumentPosition(b)&16)}:function(a,b){while(b=b.parentNode)if(b===a)return!0;return!1},bc.attr=function(a,b){var c,d=g(a);return d||(b=b.toLowerCase()),(c=e.attrHandle[b])?c(a):d||$?a.getAttribute(b):(c=a.getAttributeNode(b),c?typeof a[b]=="boolean"?a[b]?b:null:c.specified?c.value:null:null)},e=bc.selectors={cacheLength:50,createPseudo:z,match:W,attrHandle:Z?{}:{href:function(a){return a.getAttribute("href",2)},type:function(a){return a.getAttribute("type")}},find:{ID:d?function(a,b,c){if(typeof b.getElementById!==n&&!c){var d=b.getElementById(a);return d&&d.parentNode?[d]:[]}}:function(a,c,d){if(typeof c.getElementById!==n&&!d){var e=c.getElementById(a);return e?e.id===a||typeof e.getAttributeNode!==n&&e.getAttributeNode("id").value===a?[e]:b:[]}},TAG:Y?function(a,b){if(typeof b.getElementsByTagName!==n)return b.getElementsByTagName(a)}:function(a,b){var c=b.getElementsByTagName(a);if(a==="*"){var d,e=[],f=0;for(;d=c[f];f++)d.nodeType===1&&e.push(d);return e}return c},NAME:ba&&function(a,b){if(typeof b.getElementsByName!==n)return b.getElementsByName(name)},CLASS:_&&function(a,b,c){if(typeof b.getElementsByClassName!==n&&!c)return b.getElementsByClassName(a)}},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(V,""),a[3]=(a[4]||a[5]||"").replace(V,""),a[2]==="~="&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),a[1]==="nth"?(a[2]||bc.error(a[0]),a[3]=+(a[3]?a[4]+(a[5]||1):2*(a[2]==="even"||a[2]==="odd")),a[4]=+(a[6]+a[7]||a[2]==="odd")):a[2]&&bc.error(a[0]),a},PSEUDO:function(a){var b,c;if(W.CHILD.test(a[0]))return null;if(a[3])a[2]=a[3];else if(b=a[4])O.test(b)&&(c=bh(b,!0))&&(c=b.indexOf(")",b.length-c)-b.length)&&(b=b.slice(0,c),a[0]=a[0].slice(0,c)),a[2]=b;return a.slice(0,3)}},filter:{ID:d?function(a){return a=a.replace(V,""),function(b){return b.getAttribute("id")===a}}:function(a){return a=a.replace(V,""),function(b){var c=typeof b.getAttributeNode!==n&&b.getAttributeNode("id");return c&&c.value===a}},TAG:function(a){return a==="*"?function(){return!0}:(a=a.replace(V,"").toLowerCase(),function(b){return b.nodeName&&b.nodeName.toLowerCase()===a})},CLASS:function(a){var b=B[o][a];return b||(b=B(a,new RegExp("(^|"+E+")"+a+"("+E+"|$)"))),function(a){return b.test(a.className||typeof a.getAttribute!==n&&a.getAttribute("class")||"")}},ATTR:function(a,b,c){return function(d,e){var f=bc.attr(d,a);return f==null?b==="!=":b?(f+="",b==="="?f===c:b==="!="?f!==c:b==="^="?c&&f.indexOf(c)===0:b==="*="?c&&f.indexOf(c)>-1:b==="$="?c&&f.substr(f.length-c.length)===c:b==="~="?(" "+f+" ").indexOf(c)>-1:b==="|="?f===c||f.substr(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d){return a==="nth"?function(a){var b,e,f=a.parentNode;if(c===1&&d===0)return!0;if(f){e=0;for(b=f.firstChild;b;b=b.nextSibling)if(b.nodeType===1){e++;if(a===b)break}}return e-=d,e===c||e%c===0&&e/c>=0}:function(b){var c=b;switch(a){case"only":case"first":while(c=c.previousSibling)if(c.nodeType===1)return!1;if(a==="first")return!0;c=b;case"last":while(c=c.nextSibling)if(c.nodeType===1)return!1;return!0}}},PSEUDO:function(a,b){var c,d=e.pseudos[a]||e.setFilters[a.toLowerCase()]||bc.error("unsupported pseudo: "+a);return d[o]?d(b):d.length>1?(c=[a,a,"",b],e.setFilters.hasOwnProperty(a.toLowerCase())?z(function(a,c){var e,f=d(a,b),g=f.length;while(g--)e=y.call(a,f[g]),a[e]=!(c[e]=f[g])}):function(a){return d(a,0,c)}):d}},pseudos:{not:z(function(a){var b=[],c=[],d=i(a.replace(L,"$1"));return d[o]?z(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)if(f=g[h])a[h]=!(b[h]=f)}):function(a,e,f){return b[0]=a,d(b,null,f,c),!c.pop()}}),has:z(function(a){return function(b){return bc(a,b).length>0}}),contains:z(function(a){return function(b){return(b.textContent||b.innerText||f(b)).indexOf(a)>-1}}),enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return b==="input"&&!!a.checked||b==="option"&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},parent:function(a){return!e.pseudos.empty(a)},empty:function(a){var b;a=a.firstChild;while(a){if(a.nodeName>"@"||(b=a.nodeType)===3||b===4)return!1;a=a.nextSibling}return!0},header:function(a){return T.test(a.nodeName)},text:function(a){var b,c;return a.nodeName.toLowerCase()==="input"&&(b=a.type)==="text"&&((c=a.getAttribute("type"))==null||c.toLowerCase()===b)},radio:bd("radio"),checkbox:bd("checkbox"),file:bd("file"),password:bd("password"),image:bd("image"),submit:be("submit"),reset:be("reset"),button:function(a){var b=a.nodeName.toLowerCase();return b==="input"&&a.type==="button"||b==="button"},input:function(a){return U.test(a.nodeName)},focus:function(a){var b=a.ownerDocument;return a===b.activeElement&&(!b.hasFocus||b.hasFocus())&&(!!a.type||!!a.href)},active:function(a){return a===a.ownerDocument.activeElement},first:bf(function(a,b,c){return[0]}),last:bf(function(a,b,c){return[b-1]}),eq:bf(function(a,b,c){return[c<0?c+b:c]}),even:bf(function(a,b,c){for(var d=0;d<b;d+=2)a.push(d);return a}),odd:bf(function(a,b,c){for(var d=1;d<b;d+=2)a.push(d);return a}),lt:bf(function(a,b,c){for(var d=c<0?c+b:c;--d>=0;)a.push(d);return a}),gt:bf(function(a,b,c){for(var d=c<0?c+b:c;++d<b;)a.push(d);return a})}},j=s.compareDocumentPosition?function(a,b){return a===b?(k=!0,0):(!a.compareDocumentPosition||!b.compareDocumentPosition?a.compareDocumentPosition:a.compareDocumentPosition(b)&4)?-1:1}:function(a,b){if(a===b)return k=!0,0;if(a.sourceIndex&&b.sourceIndex)return a.sourceIndex-b.sourceIndex;var c,d,e=[],f=[],g=a.parentNode,h=b.parentNode,i=g;if(g===h)return bg(a,b);if(!g)return-1;if(!h)return 1;while(i)e.unshift(i),i=i.parentNode;i=h;while(i)f.unshift(i),i=i.parentNode;c=e.length,d=f.length;for(var j=0;j<c&&j<d;j++)if(e[j]!==f[j])return bg(e[j],f[j]);return j===c?bg(a,f[j],-1):bg(e[j],b,1)},[0,0].sort(j),m=!k,bc.uniqueSort=function(a){var b,c=1;k=m,a.sort(j);if(k)for(;b=a[c];c++)b===a[c-1]&&a.splice(c--,1);return a},bc.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},i=bc.compile=function(a,b){var c,d=[],e=[],f=D[o][a];if(!f){b||(b=bh(a)),c=b.length;while(c--)f=bm(b[c]),f[o]?d.push(f):e.push(f);f=D(a,bn(e,d))}return f},r.querySelectorAll&&function(){var a,b=bp,c=/'|\\/g,d=/\=[\x20\t\r\n\f]*([^'"\]]*)[\x20\t\r\n\f]*\]/g,e=[":focus"],f=[":active",":focus"],h=s.matchesSelector||s.mozMatchesSelector||s.webkitMatchesSelector||s.oMatchesSelector||s.msMatchesSelector;X(function(a){a.innerHTML="<select><option selected=''></option></select>",a.querySelectorAll("[selected]").length||e.push("\\["+E+"*(?:checked|disabled|ismap|multiple|readonly|selected|value)"),a.querySelectorAll(":checked").length||e.push(":checked")}),X(function(a){a.innerHTML="<p test=''></p>",a.querySelectorAll("[test^='']").length&&e.push("[*^$]="+E+"*(?:\"\"|'')"),a.innerHTML="<input type='hidden'/>",a.querySelectorAll(":enabled").length||e.push(":enabled",":disabled")}),e=new RegExp(e.join("|")),bp=function(a,d,f,g,h){if(!g&&!h&&(!e||!e.test(a))){var i,j,k=!0,l=o,m=d,n=d.nodeType===9&&a;if(d.nodeType===1&&d.nodeName.toLowerCase()!=="object"){i=bh(a),(k=d.getAttribute("id"))?l=k.replace(c,"\\$&"):d.setAttribute("id",l),l="[id='"+l+"'] ",j=i.length;while(j--)i[j]=l+i[j].join("");m=R.test(a)&&d.parentNode||d,n=i.join(",")}if(n)try{return w.apply(f,x.call(m.querySelectorAll(n),0)),f}catch(p){}finally{k||d.removeAttribute("id")}}return b(a,d,f,g,h)},h&&(X(function(b){a=h.call(b,"div");try{h.call(b,"[test!='']:sizzle"),f.push("!=",J)}catch(c){}}),f=new RegExp(f.join("|")),bc.matchesSelector=function(b,c){c=c.replace(d,"='$1']");if(!g(b)&&!f.test(c)&&(!e||!e.test(c)))try{var i=h.call(b,c);if(i||a||b.document&&b.document.nodeType!==11)return i}catch(j){}return bc(c,null,null,[b]).length>0})}(),e.pseudos.nth=e.pseudos.eq,e.filters=bq.prototype=e.pseudos,e.setFilters=new bq,bc.attr=p.attr,p.find=bc,p.expr=bc.selectors,p.expr[":"]=p.expr.pseudos,p.unique=bc.uniqueSort,p.text=bc.getText,p.isXMLDoc=bc.isXML,p.contains=bc.contains}(a);var bc=/Until$/,bd=/^(?:parents|prev(?:Until|All))/,be=/^.[^:#\[\.,]*$/,bf=p.expr.match.needsContext,bg={children:!0,contents:!0,next:!0,prev:!0};p.fn.extend({find:function(a){var b,c,d,e,f,g,h=this;if(typeof a!="string")return p(a).filter(function(){for(b=0,c=h.length;b<c;b++)if(p.contains(h[b],this))return!0});g=this.pushStack("","find",a);for(b=0,c=this.length;b<c;b++){d=g.length,p.find(a,this[b],g);if(b>0)for(e=d;e<g.length;e++)for(f=0;f<d;f++)if(g[f]===g[e]){g.splice(e--,1);break}}return g},has:function(a){var b,c=p(a,this),d=c.length;return this.filter(function(){for(b=0;b<d;b++)if(p.contains(this,c[b]))return!0})},not:function(a){return this.pushStack(bj(this,a,!1),"not",a)},filter:function(a){return this.pushStack(bj(this,a,!0),"filter",a)},is:function(a){return!!a&&(typeof a=="string"?bf.test(a)?p(a,this.context).index(this[0])>=0:p.filter(a,this).length>0:this.filter(a).length>0)},closest:function(a,b){var c,d=0,e=this.length,f=[],g=bf.test(a)||typeof a!="string"?p(a,b||this.context):0;for(;d<e;d++){c=this[d];while(c&&c.ownerDocument&&c!==b&&c.nodeType!==11){if(g?g.index(c)>-1:p.find.matchesSelector(c,a)){f.push(c);break}c=c.parentNode}}return f=f.length>1?p.unique(f):f,this.pushStack(f,"closest",a)},index:function(a){return a?typeof a=="string"?p.inArray(this[0],p(a)):p.inArray(a.jquery?a[0]:a,this):this[0]&&this[0].parentNode?this.prevAll().length:-1},add:function(a,b){var c=typeof a=="string"?p(a,b):p.makeArray(a&&a.nodeType?[a]:a),d=p.merge(this.get(),c);return this.pushStack(bh(c[0])||bh(d[0])?d:p.unique(d))},addBack:function(a){return this.add(a==null?this.prevObject:this.prevObject.filter(a))}}),p.fn.andSelf=p.fn.addBack,p.each({parent:function(a){var b=a.parentNode;return b&&b.nodeType!==11?b:null},parents:function(a){return p.dir(a,"parentNode")},parentsUntil:function(a,b,c){return p.dir(a,"parentNode",c)},next:function(a){return bi(a,"nextSibling")},prev:function(a){return bi(a,"previousSibling")},nextAll:function(a){return p.dir(a,"nextSibling")},prevAll:function(a){return p.dir(a,"previousSibling")},nextUntil:function(a,b,c){return p.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return p.dir(a,"previousSibling",c)},siblings:function(a){return p.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return p.sibling(a.firstChild)},contents:function(a){return p.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:p.merge([],a.childNodes)}},function(a,b){p.fn[a]=function(c,d){var e=p.map(this,b,c);return bc.test(a)||(d=c),d&&typeof d=="string"&&(e=p.filter(d,e)),e=this.length>1&&!bg[a]?p.unique(e):e,this.length>1&&bd.test(a)&&(e=e.reverse()),this.pushStack(e,a,k.call(arguments).join(","))}}),p.extend({filter:function(a,b,c){return c&&(a=":not("+a+")"),b.length===1?p.find.matchesSelector(b[0],a)?[b[0]]:[]:p.find.matches(a,b)},dir:function(a,c,d){var e=[],f=a[c];while(f&&f.nodeType!==9&&(d===b||f.nodeType!==1||!p(f).is(d)))f.nodeType===1&&e.push(f),f=f[c];return e},sibling:function(a,b){var c=[];for(;a;a=a.nextSibling)a.nodeType===1&&a!==b&&c.push(a);return c}});var bl="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",bm=/ jQuery\d+="(?:null|\d+)"/g,bn=/^\s+/,bo=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,bp=/<([\w:]+)/,bq=/<tbody/i,br=/<|&#?\w+;/,bs=/<(?:script|style|link)/i,bt=/<(?:script|object|embed|option|style)/i,bu=new RegExp("<(?:"+bl+")[\\s/>]","i"),bv=/^(?:checkbox|radio)$/,bw=/checked\s*(?:[^=]|=\s*.checked.)/i,bx=/\/(java|ecma)script/i,by=/^\s*<!(?:\[CDATA\[|\-\-)|[\]\-]{2}>\s*$/g,bz={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]},bA=bk(e),bB=bA.appendChild(e.createElement("div"));bz.optgroup=bz.option,bz.tbody=bz.tfoot=bz.colgroup=bz.caption=bz.thead,bz.th=bz.td,p.support.htmlSerialize||(bz._default=[1,"X<div>","</div>"]),p.fn.extend({text:function(a){return p.access(this,function(a){return a===b?p.text(this):this.empty().append((this[0]&&this[0].ownerDocument||e).createTextNode(a))},null,a,arguments.length)},wrapAll:function(a){if(p.isFunction(a))return this.each(function(b){p(this).wrapAll(a.call(this,b))});if(this[0]){var b=p(a,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstChild&&a.firstChild.nodeType===1)a=a.firstChild;return a}).append(this)}return this},wrapInner:function(a){return p.isFunction(a)?this.each(function(b){p(this).wrapInner(a.call(this,b))}):this.each(function(){var b=p(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=p.isFunction(a);return this.each(function(c){p(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){p.nodeName(this,"body")||p(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(a){(this.nodeType===1||this.nodeType===11)&&this.appendChild(a)})},prepend:function(){return this.domManip(arguments,!0,function(a){(this.nodeType===1||this.nodeType===11)&&this.insertBefore(a,this.firstChild)})},before:function(){if(!bh(this[0]))return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this)});if(arguments.length){var a=p.clean(arguments);return this.pushStack(p.merge(a,this),"before",this.selector)}},after:function(){if(!bh(this[0]))return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this.nextSibling)});if(arguments.length){var a=p.clean(arguments);return this.pushStack(p.merge(this,a),"after",this.selector)}},remove:function(a,b){var c,d=0;for(;(c=this[d])!=null;d++)if(!a||p.filter(a,[c]).length)!b&&c.nodeType===1&&(p.cleanData(c.getElementsByTagName("*")),p.cleanData([c])),c.parentNode&&c.parentNode.removeChild(c);return this},empty:function(){var a,b=0;for(;(a=this[b])!=null;b++){a.nodeType===1&&p.cleanData(a.getElementsByTagName("*"));while(a.firstChild)a.removeChild(a.firstChild)}return this},clone:function(a,b){return a=a==null?!1:a,b=b==null?a:b,this.map(function(){return p.clone(this,a,b)})},html:function(a){return p.access(this,function(a){var c=this[0]||{},d=0,e=this.length;if(a===b)return c.nodeType===1?c.innerHTML.replace(bm,""):b;if(typeof a=="string"&&!bs.test(a)&&(p.support.htmlSerialize||!bu.test(a))&&(p.support.leadingWhitespace||!bn.test(a))&&!bz[(bp.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(bo,"<$1></$2>");try{for(;d<e;d++)c=this[d]||{},c.nodeType===1&&(p.cleanData(c.getElementsByTagName("*")),c.innerHTML=a);c=0}catch(f){}}c&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(a){return bh(this[0])?this.length?this.pushStack(p(p.isFunction(a)?a():a),"replaceWith",a):this:p.isFunction(a)?this.each(function(b){var c=p(this),d=c.html();c.replaceWith(a.call(this,b,d))}):(typeof a!="string"&&(a=p(a).detach()),this.each(function(){var b=this.nextSibling,c=this.parentNode;p(this).remove(),b?p(b).before(a):p(c).append(a)}))},detach:function(a){return this.remove(a,!0)},domManip:function(a,c,d){a=[].concat.apply([],a);var e,f,g,h,i=0,j=a[0],k=[],l=this.length;if(!p.support.checkClone&&l>1&&typeof j=="string"&&bw.test(j))return this.each(function(){p(this).domManip(a,c,d)});if(p.isFunction(j))return this.each(function(e){var f=p(this);a[0]=j.call(this,e,c?f.html():b),f.domManip(a,c,d)});if(this[0]){e=p.buildFragment(a,this,k),g=e.fragment,f=g.firstChild,g.childNodes.length===1&&(g=f);if(f){c=c&&p.nodeName(f,"tr");for(h=e.cacheable||l-1;i<l;i++)d.call(c&&p.nodeName(this[i],"table")?bC(this[i],"tbody"):this[i],i===h?g:p.clone(g,!0,!0))}g=f=null,k.length&&p.each(k,function(a,b){b.src?p.ajax?p.ajax({url:b.src,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0}):p.error("no ajax"):p.globalEval((b.text||b.textContent||b.innerHTML||"").replace(by,"")),b.parentNode&&b.parentNode.removeChild(b)})}return this}}),p.buildFragment=function(a,c,d){var f,g,h,i=a[0];return c=c||e,c=!c.nodeType&&c[0]||c,c=c.ownerDocument||c,a.length===1&&typeof i=="string"&&i.length<512&&c===e&&i.charAt(0)==="<"&&!bt.test(i)&&(p.support.checkClone||!bw.test(i))&&(p.support.html5Clone||!bu.test(i))&&(g=!0,f=p.fragments[i],h=f!==b),f||(f=c.createDocumentFragment(),p.clean(a,c,f,d),g&&(p.fragments[i]=h&&f)),{fragment:f,cacheable:g}},p.fragments={},p.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){p.fn[a]=function(c){var d,e=0,f=[],g=p(c),h=g.length,i=this.length===1&&this[0].parentNode;if((i==null||i&&i.nodeType===11&&i.childNodes.length===1)&&h===1)return g[b](this[0]),this;for(;e<h;e++)d=(e>0?this.clone(!0):this).get(),p(g[e])[b](d),f=f.concat(d);return this.pushStack(f,a,g.selector)}}),p.extend({clone:function(a,b,c){var d,e,f,g;p.support.html5Clone||p.isXMLDoc(a)||!bu.test("<"+a.nodeName+">")?g=a.cloneNode(!0):(bB.innerHTML=a.outerHTML,bB.removeChild(g=bB.firstChild));if((!p.support.noCloneEvent||!p.support.noCloneChecked)&&(a.nodeType===1||a.nodeType===11)&&!p.isXMLDoc(a)){bE(a,g),d=bF(a),e=bF(g);for(f=0;d[f];++f)e[f]&&bE(d[f],e[f])}if(b){bD(a,g);if(c){d=bF(a),e=bF(g);for(f=0;d[f];++f)bD(d[f],e[f])}}return d=e=null,g},clean:function(a,b,c,d){var f,g,h,i,j,k,l,m,n,o,q,r,s=b===e&&bA,t=[];if(!b||typeof b.createDocumentFragment=="undefined")b=e;for(f=0;(h=a[f])!=null;f++){typeof h=="number"&&(h+="");if(!h)continue;if(typeof h=="string")if(!br.test(h))h=b.createTextNode(h);else{s=s||bk(b),l=b.createElement("div"),s.appendChild(l),h=h.replace(bo,"<$1></$2>"),i=(bp.exec(h)||["",""])[1].toLowerCase(),j=bz[i]||bz._default,k=j[0],l.innerHTML=j[1]+h+j[2];while(k--)l=l.lastChild;if(!p.support.tbody){m=bq.test(h),n=i==="table"&&!m?l.firstChild&&l.firstChild.childNodes:j[1]==="<table>"&&!m?l.childNodes:[];for(g=n.length-1;g>=0;--g)p.nodeName(n[g],"tbody")&&!n[g].childNodes.length&&n[g].parentNode.removeChild(n[g])}!p.support.leadingWhitespace&&bn.test(h)&&l.insertBefore(b.createTextNode(bn.exec(h)[0]),l.firstChild),h=l.childNodes,l.parentNode.removeChild(l)}h.nodeType?t.push(h):p.merge(t,h)}l&&(h=l=s=null);if(!p.support.appendChecked)for(f=0;(h=t[f])!=null;f++)p.nodeName(h,"input")?bG(h):typeof h.getElementsByTagName!="undefined"&&p.grep(h.getElementsByTagName("input"),bG);if(c){q=function(a){if(!a.type||bx.test(a.type))return d?d.push(a.parentNode?a.parentNode.removeChild(a):a):c.appendChild(a)};for(f=0;(h=t[f])!=null;f++)if(!p.nodeName(h,"script")||!q(h))c.appendChild(h),typeof h.getElementsByTagName!="undefined"&&(r=p.grep(p.merge([],h.getElementsByTagName("script")),q),t.splice.apply(t,[f+1,0].concat(r)),f+=r.length)}return t},cleanData:function(a,b){var c,d,e,f,g=0,h=p.expando,i=p.cache,j=p.support.deleteExpando,k=p.event.special;for(;(e=a[g])!=null;g++)if(b||p.acceptData(e)){d=e[h],c=d&&i[d];if(c){if(c.events)for(f in c.events)k[f]?p.event.remove(e,f):p.removeEvent(e,f,c.handle);i[d]&&(delete i[d],j?delete e[h]:e.removeAttribute?e.removeAttribute(h):e[h]=null,p.deletedIds.push(d))}}}}),function(){var a,b;p.uaMatch=function(a){a=a.toLowerCase();var b=/(chrome)[ \/]([\w.]+)/.exec(a)||/(webkit)[ \/]([\w.]+)/.exec(a)||/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(a)||/(msie) ([\w.]+)/.exec(a)||a.indexOf("compatible")<0&&/(mozilla)(?:.*? rv:([\w.]+)|)/.exec(a)||[];return{browser:b[1]||"",version:b[2]||"0"}},a=p.uaMatch(g.userAgent),b={},a.browser&&(b[a.browser]=!0,b.version=a.version),b.chrome?b.webkit=!0:b.webkit&&(b.safari=!0),p.browser=b,p.sub=function(){function a(b,c){return new a.fn.init(b,c)}p.extend(!0,a,this),a.superclass=this,a.fn=a.prototype=this(),a.fn.constructor=a,a.sub=this.sub,a.fn.init=function c(c,d){return d&&d instanceof p&&!(d instanceof a)&&(d=a(d)),p.fn.init.call(this,c,d,b)},a.fn.init.prototype=a.fn;var b=a(e);return a}}();var bH,bI,bJ,bK=/alpha\([^)]*\)/i,bL=/opacity=([^)]*)/,bM=/^(top|right|bottom|left)$/,bN=/^(none|table(?!-c[ea]).+)/,bO=/^margin/,bP=new RegExp("^("+q+")(.*)$","i"),bQ=new RegExp("^("+q+")(?!px)[a-z%]+$","i"),bR=new RegExp("^([-+])=("+q+")","i"),bS={},bT={position:"absolute",visibility:"hidden",display:"block"},bU={letterSpacing:0,fontWeight:400},bV=["Top","Right","Bottom","Left"],bW=["Webkit","O","Moz","ms"],bX=p.fn.toggle;p.fn.extend({css:function(a,c){return p.access(this,function(a,c,d){return d!==b?p.style(a,c,d):p.css(a,c)},a,c,arguments.length>1)},show:function(){return b$(this,!0)},hide:function(){return b$(this)},toggle:function(a,b){var c=typeof a=="boolean";return p.isFunction(a)&&p.isFunction(b)?bX.apply(this,arguments):this.each(function(){(c?a:bZ(this))?p(this).show():p(this).hide()})}}),p.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=bH(a,"opacity");return c===""?"1":c}}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":p.support.cssFloat?"cssFloat":"styleFloat"},style:function(a,c,d,e){if(!a||a.nodeType===3||a.nodeType===8||!a.style)return;var f,g,h,i=p.camelCase(c),j=a.style;c=p.cssProps[i]||(p.cssProps[i]=bY(j,i)),h=p.cssHooks[c]||p.cssHooks[i];if(d===b)return h&&"get"in h&&(f=h.get(a,!1,e))!==b?f:j[c];g=typeof d,g==="string"&&(f=bR.exec(d))&&(d=(f[1]+1)*f[2]+parseFloat(p.css(a,c)),g="number");if(d==null||g==="number"&&isNaN(d))return;g==="number"&&!p.cssNumber[i]&&(d+="px");if(!h||!("set"in h)||(d=h.set(a,d,e))!==b)try{j[c]=d}catch(k){}},css:function(a,c,d,e){var f,g,h,i=p.camelCase(c);return c=p.cssProps[i]||(p.cssProps[i]=bY(a.style,i)),h=p.cssHooks[c]||p.cssHooks[i],h&&"get"in h&&(f=h.get(a,!0,e)),f===b&&(f=bH(a,c)),f==="normal"&&c in bU&&(f=bU[c]),d||e!==b?(g=parseFloat(f),d||p.isNumeric(g)?g||0:f):f},swap:function(a,b,c){var d,e,f={};for(e in b)f[e]=a.style[e],a.style[e]=b[e];d=c.call(a);for(e in b)a.style[e]=f[e];return d}}),a.getComputedStyle?bH=function(b,c){var d,e,f,g,h=a.getComputedStyle(b,null),i=b.style;return h&&(d=h[c],d===""&&!p.contains(b.ownerDocument,b)&&(d=p.style(b,c)),bQ.test(d)&&bO.test(c)&&(e=i.width,f=i.minWidth,g=i.maxWidth,i.minWidth=i.maxWidth=i.width=d,d=h.width,i.width=e,i.minWidth=f,i.maxWidth=g)),d}:e.documentElement.currentStyle&&(bH=function(a,b){var c,d,e=a.currentStyle&&a.currentStyle[b],f=a.style;return e==null&&f&&f[b]&&(e=f[b]),bQ.test(e)&&!bM.test(b)&&(c=f.left,d=a.runtimeStyle&&a.runtimeStyle.left,d&&(a.runtimeStyle.left=a.currentStyle.left),f.left=b==="fontSize"?"1em":e,e=f.pixelLeft+"px",f.left=c,d&&(a.runtimeStyle.left=d)),e===""?"auto":e}),p.each(["height","width"],function(a,b){p.cssHooks[b]={get:function(a,c,d){if(c)return a.offsetWidth===0&&bN.test(bH(a,"display"))?p.swap(a,bT,function(){return cb(a,b,d)}):cb(a,b,d)},set:function(a,c,d){return b_(a,c,d?ca(a,b,d,p.support.boxSizing&&p.css(a,"boxSizing")==="border-box"):0)}}}),p.support.opacity||(p.cssHooks.opacity={get:function(a,b){return bL.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":b?"1":""},set:function(a,b){var c=a.style,d=a.currentStyle,e=p.isNumeric(b)?"alpha(opacity="+b*100+")":"",f=d&&d.filter||c.filter||"";c.zoom=1;if(b>=1&&p.trim(f.replace(bK,""))===""&&c.removeAttribute){c.removeAttribute("filter");if(d&&!d.filter)return}c.filter=bK.test(f)?f.replace(bK,e):f+" "+e}}),p(function(){p.support.reliableMarginRight||(p.cssHooks.marginRight={get:function(a,b){return p.swap(a,{display:"inline-block"},function(){if(b)return bH(a,"marginRight")})}}),!p.support.pixelPosition&&p.fn.position&&p.each(["top","left"],function(a,b){p.cssHooks[b]={get:function(a,c){if(c){var d=bH(a,b);return bQ.test(d)?p(a).position()[b]+"px":d}}}})}),p.expr&&p.expr.filters&&(p.expr.filters.hidden=function(a){return a.offsetWidth===0&&a.offsetHeight===0||!p.support.reliableHiddenOffsets&&(a.style&&a.style.display||bH(a,"display"))==="none"},p.expr.filters.visible=function(a){return!p.expr.filters.hidden(a)}),p.each({margin:"",padding:"",border:"Width"},function(a,b){p.cssHooks[a+b]={expand:function(c){var d,e=typeof c=="string"?c.split(" "):[c],f={};for(d=0;d<4;d++)f[a+bV[d]+b]=e[d]||e[d-2]||e[0];return f}},bO.test(a)||(p.cssHooks[a+b].set=b_)});var cd=/%20/g,ce=/\[\]$/,cf=/\r?\n/g,cg=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,ch=/^(?:select|textarea)/i;p.fn.extend({serialize:function(){return p.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?p.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ch.test(this.nodeName)||cg.test(this.type))}).map(function(a,b){var c=p(this).val();return c==null?null:p.isArray(c)?p.map(c,function(a,c){return{name:b.name,value:a.replace(cf,"\r\n")}}):{name:b.name,value:c.replace(cf,"\r\n")}}).get()}}),p.param=function(a,c){var d,e=[],f=function(a,b){b=p.isFunction(b)?b():b==null?"":b,e[e.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};c===b&&(c=p.ajaxSettings&&p.ajaxSettings.traditional);if(p.isArray(a)||a.jquery&&!p.isPlainObject(a))p.each(a,function(){f(this.name,this.value)});else for(d in a)ci(d,a[d],c,f);return e.join("&").replace(cd,"+")};var cj,ck,cl=/#.*$/,cm=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,cn=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,co=/^(?:GET|HEAD)$/,cp=/^\/\//,cq=/\?/,cr=/<script\b[^<]*(?:(?!<\/script>)<[^<]*)*<\/script>/gi,cs=/([?&])_=[^&]*/,ct=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,cu=p.fn.load,cv={},cw={},cx=["*/"]+["*"];try{ck=f.href}catch(cy){ck=e.createElement("a"),ck.href="",ck=ck.href}cj=ct.exec(ck.toLowerCase())||[],p.fn.load=function(a,c,d){if(typeof a!="string"&&cu)return cu.apply(this,arguments);if(!this.length)return this;var e,f,g,h=this,i=a.indexOf(" ");return i>=0&&(e=a.slice(i,a.length),a=a.slice(0,i)),p.isFunction(c)?(d=c,c=b):c&&typeof c=="object"&&(f="POST"),p.ajax({url:a,type:f,dataType:"html",data:c,complete:function(a,b){d&&h.each(d,g||[a.responseText,b,a])}}).done(function(a){g=arguments,h.html(e?p("<div>").append(a.replace(cr,"")).find(e):a)}),this},p.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){p.fn[b]=function(a){return this.on(b,a)}}),p.each(["get","post"],function(a,c){p[c]=function(a,d,e,f){return p.isFunction(d)&&(f=f||e,e=d,d=b),p.ajax({type:c,url:a,data:d,success:e,dataType:f})}}),p.extend({getScript:function(a,c){return p.get(a,b,c,"script")},getJSON:function(a,b,c){return p.get(a,b,c,"json")},ajaxSetup:function(a,b){return b?cB(a,p.ajaxSettings):(b=a,a=p.ajaxSettings),cB(a,b),a},ajaxSettings:{url:ck,isLocal:cn.test(cj[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded; charset=UTF-8",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":cx},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":a.String,"text html":!0,"text json":p.parseJSON,"text xml":p.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:cz(cv),ajaxTransport:cz(cw),ajax:function(a,c){function y(a,c,f,i){var k,s,t,u,w,y=c;if(v===2)return;v=2,h&&clearTimeout(h),g=b,e=i||"",x.readyState=a>0?4:0,f&&(u=cC(l,x,f));if(a>=200&&a<300||a===304)l.ifModified&&(w=x.getResponseHeader("Last-Modified"),w&&(p.lastModified[d]=w),w=x.getResponseHeader("Etag"),w&&(p.etag[d]=w)),a===304?(y="notmodified",k=!0):(k=cD(l,u),y=k.state,s=k.data,t=k.error,k=!t);else{t=y;if(!y||a)y="error",a<0&&(a=0)}x.status=a,x.statusText=(c||y)+"",k?o.resolveWith(m,[s,y,x]):o.rejectWith(m,[x,y,t]),x.statusCode(r),r=b,j&&n.trigger("ajax"+(k?"Success":"Error"),[x,l,k?s:t]),q.fireWith(m,[x,y]),j&&(n.trigger("ajaxComplete",[x,l]),--p.active||p.event.trigger("ajaxStop"))}typeof a=="object"&&(c=a,a=b),c=c||{};var d,e,f,g,h,i,j,k,l=p.ajaxSetup({},c),m=l.context||l,n=m!==l&&(m.nodeType||m instanceof p)?p(m):p.event,o=p.Deferred(),q=p.Callbacks("once memory"),r=l.statusCode||{},t={},u={},v=0,w="canceled",x={readyState:0,setRequestHeader:function(a,b){if(!v){var c=a.toLowerCase();a=u[c]=u[c]||a,t[a]=b}return this},getAllResponseHeaders:function(){return v===2?e:null},getResponseHeader:function(a){var c;if(v===2){if(!f){f={};while(c=cm.exec(e))f[c[1].toLowerCase()]=c[2]}c=f[a.toLowerCase()]}return c===b?null:c},overrideMimeType:function(a){return v||(l.mimeType=a),this},abort:function(a){return a=a||w,g&&g.abort(a),y(0,a),this}};o.promise(x),x.success=x.done,x.error=x.fail,x.complete=q.add,x.statusCode=function(a){if(a){var b;if(v<2)for(b in a)r[b]=[r[b],a[b]];else b=a[x.status],x.always(b)}return this},l.url=((a||l.url)+"").replace(cl,"").replace(cp,cj[1]+"//"),l.dataTypes=p.trim(l.dataType||"*").toLowerCase().split(s),l.crossDomain==null&&(i=ct.exec(l.url.toLowerCase())||!1,l.crossDomain=i&&i.join(":")+(i[3]?"":i[1]==="http:"?80:443)!==cj.join(":")+(cj[3]?"":cj[1]==="http:"?80:443)),l.data&&l.processData&&typeof l.data!="string"&&(l.data=p.param(l.data,l.traditional)),cA(cv,l,c,x);if(v===2)return x;j=l.global,l.type=l.type.toUpperCase(),l.hasContent=!co.test(l.type),j&&p.active++===0&&p.event.trigger("ajaxStart");if(!l.hasContent){l.data&&(l.url+=(cq.test(l.url)?"&":"?")+l.data,delete l.data),d=l.url;if(l.cache===!1){var z=p.now(),A=l.url.replace(cs,"$1_="+z);l.url=A+(A===l.url?(cq.test(l.url)?"&":"?")+"_="+z:"")}}(l.data&&l.hasContent&&l.contentType!==!1||c.contentType)&&x.setRequestHeader("Content-Type",l.contentType),l.ifModified&&(d=d||l.url,p.lastModified[d]&&x.setRequestHeader("If-Modified-Since",p.lastModified[d]),p.etag[d]&&x.setRequestHeader("If-None-Match",p.etag[d])),x.setRequestHeader("Accept",l.dataTypes[0]&&l.accepts[l.dataTypes[0]]?l.accepts[l.dataTypes[0]]+(l.dataTypes[0]!=="*"?", "+cx+"; q=0.01":""):l.accepts["*"]);for(k in l.headers)x.setRequestHeader(k,l.headers[k]);if(!l.beforeSend||l.beforeSend.call(m,x,l)!==!1&&v!==2){w="abort";for(k in{success:1,error:1,complete:1})x[k](l[k]);g=cA(cw,l,c,x);if(!g)y(-1,"No Transport");else{x.readyState=1,j&&n.trigger("ajaxSend",[x,l]),l.async&&l.timeout>0&&(h=setTimeout(function(){x.abort("timeout")},l.timeout));try{v=1,g.send(t,y)}catch(B){if(v<2)y(-1,B);else throw B}}return x}return x.abort()},active:0,lastModified:{},etag:{}});var cE=[],cF=/\?/,cG=/(=)\?(?=&|$)|\?\?/,cH=p.now();p.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var a=cE.pop()||p.expando+"_"+cH++;return this[a]=!0,a}}),p.ajaxPrefilter("json jsonp",function(c,d,e){var f,g,h,i=c.data,j=c.url,k=c.jsonp!==!1,l=k&&cG.test(j),m=k&&!l&&typeof i=="string"&&!(c.contentType||"").indexOf("application/x-www-form-urlencoded")&&cG.test(i);if(c.dataTypes[0]==="jsonp"||l||m)return f=c.jsonpCallback=p.isFunction(c.jsonpCallback)?c.jsonpCallback():c.jsonpCallback,g=a[f],l?c.url=j.replace(cG,"$1"+f):m?c.data=i.replace(cG,"$1"+f):k&&(c.url+=(cF.test(j)?"&":"?")+c.jsonp+"="+f),c.converters["script json"]=function(){return h||p.error(f+" was not called"),h[0]},c.dataTypes[0]="json",a[f]=function(){h=arguments},e.always(function(){a[f]=g,c[f]&&(c.jsonpCallback=d.jsonpCallback,cE.push(f)),h&&p.isFunction(g)&&g(h[0]),h=g=b}),"script"}),p.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(a){return p.globalEval(a),a}}}),p.ajaxPrefilter("script",function(a){a.cache===b&&(a.cache=!1),a.crossDomain&&(a.type="GET",a.global=!1)}),p.ajaxTransport("script",function(a){if(a.crossDomain){var c,d=e.head||e.getElementsByTagName("head")[0]||e.documentElement;return{send:function(f,g){c=e.createElement("script"),c.async="async",a.scriptCharset&&(c.charset=a.scriptCharset),c.src=a.url,c.onload=c.onreadystatechange=function(a,e){if(e||!c.readyState||/loaded|complete/.test(c.readyState))c.onload=c.onreadystatechange=null,d&&c.parentNode&&d.removeChild(c),c=b,e||g(200,"success")},d.insertBefore(c,d.firstChild)},abort:function(){c&&c.onload(0,1)}}}});var cI,cJ=a.ActiveXObject?function(){for(var a in cI)cI[a](0,1)}:!1,cK=0;p.ajaxSettings.xhr=a.ActiveXObject?function(){return!this.isLocal&&cL()||cM()}:cL,function(a){p.extend(p.support,{ajax:!!a,cors:!!a&&"withCredentials"in a})}(p.ajaxSettings.xhr()),p.support.ajax&&p.ajaxTransport(function(c){if(!c.crossDomain||p.support.cors){var d;return{send:function(e,f){var g,h,i=c.xhr();c.username?i.open(c.type,c.url,c.async,c.username,c.password):i.open(c.type,c.url,c.async);if(c.xhrFields)for(h in c.xhrFields)i[h]=c.xhrFields[h];c.mimeType&&i.overrideMimeType&&i.overrideMimeType(c.mimeType),!c.crossDomain&&!e["X-Requested-With"]&&(e["X-Requested-With"]="XMLHttpRequest");try{for(h in e)i.setRequestHeader(h,e[h])}catch(j){}i.send(c.hasContent&&c.data||null),d=function(a,e){var h,j,k,l,m;try{if(d&&(e||i.readyState===4)){d=b,g&&(i.onreadystatechange=p.noop,cJ&&delete cI[g]);if(e)i.readyState!==4&&i.abort();else{h=i.status,k=i.getAllResponseHeaders(),l={},m=i.responseXML,m&&m.documentElement&&(l.xml=m);try{l.text=i.responseText}catch(a){}try{j=i.statusText}catch(n){j=""}!h&&c.isLocal&&!c.crossDomain?h=l.text?200:404:h===1223&&(h=204)}}}catch(o){e||f(-1,o)}l&&f(h,j,l,k)},c.async?i.readyState===4?setTimeout(d,0):(g=++cK,cJ&&(cI||(cI={},p(a).unload(cJ)),cI[g]=d),i.onreadystatechange=d):d()},abort:function(){d&&d(0,1)}}}});var cN,cO,cP=/^(?:toggle|show|hide)$/,cQ=new RegExp("^(?:([-+])=|)("+q+")([a-z%]*)$","i"),cR=/queueHooks$/,cS=[cY],cT={"*":[function(a,b){var c,d,e=this.createTween(a,b),f=cQ.exec(b),g=e.cur(),h=+g||0,i=1,j=20;if(f){c=+f[2],d=f[3]||(p.cssNumber[a]?"":"px");if(d!=="px"&&h){h=p.css(e.elem,a,!0)||c||1;do i=i||".5",h=h/i,p.style(e.elem,a,h+d);while(i!==(i=e.cur()/g)&&i!==1&&--j)}e.unit=d,e.start=h,e.end=f[1]?h+(f[1]+1)*c:c}return e}]};p.Animation=p.extend(cW,{tweener:function(a,b){p.isFunction(a)?(b=a,a=["*"]):a=a.split(" ");var c,d=0,e=a.length;for(;d<e;d++)c=a[d],cT[c]=cT[c]||[],cT[c].unshift(b)},prefilter:function(a,b){b?cS.unshift(a):cS.push(a)}}),p.Tween=cZ,cZ.prototype={constructor:cZ,init:function(a,b,c,d,e,f){this.elem=a,this.prop=c,this.easing=e||"swing",this.options=b,this.start=this.now=this.cur(),this.end=d,this.unit=f||(p.cssNumber[c]?"":"px")},cur:function(){var a=cZ.propHooks[this.prop];return a&&a.get?a.get(this):cZ.propHooks._default.get(this)},run:function(a){var b,c=cZ.propHooks[this.prop];return this.options.duration?this.pos=b=p.easing[this.easing](a,this.options.duration*a,0,1,this.options.duration):this.pos=b=a,this.now=(this.end-this.start)*b+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),c&&c.set?c.set(this):cZ.propHooks._default.set(this),this}},cZ.prototype.init.prototype=cZ.prototype,cZ.propHooks={_default:{get:function(a){var b;return a.elem[a.prop]==null||!!a.elem.style&&a.elem.style[a.prop]!=null?(b=p.css(a.elem,a.prop,!1,""),!b||b==="auto"?0:b):a.elem[a.prop]},set:function(a){p.fx.step[a.prop]?p.fx.step[a.prop](a):a.elem.style&&(a.elem.style[p.cssProps[a.prop]]!=null||p.cssHooks[a.prop])?p.style(a.elem,a.prop,a.now+a.unit):a.elem[a.prop]=a.now}}},cZ.propHooks.scrollTop=cZ.propHooks.scrollLeft={set:function(a){a.elem.nodeType&&a.elem.parentNode&&(a.elem[a.prop]=a.now)}},p.each(["toggle","show","hide"],function(a,b){var c=p.fn[b];p.fn[b]=function(d,e,f){return d==null||typeof d=="boolean"||!a&&p.isFunction(d)&&p.isFunction(e)?c.apply(this,arguments):this.animate(c$(b,!0),d,e,f)}}),p.fn.extend({fadeTo:function(a,b,c,d){return this.filter(bZ).css("opacity",0).show().end().animate({opacity:b},a,c,d)},animate:function(a,b,c,d){var e=p.isEmptyObject(a),f=p.speed(b,c,d),g=function(){var b=cW(this,p.extend({},a),f);e&&b.stop(!0)};return e||f.queue===!1?this.each(g):this.queue(f.queue,g)},stop:function(a,c,d){var e=function(a){var b=a.stop;delete a.stop,b(d)};return typeof a!="string"&&(d=c,c=a,a=b),c&&a!==!1&&this.queue(a||"fx",[]),this.each(function(){var b=!0,c=a!=null&&a+"queueHooks",f=p.timers,g=p._data(this);if(c)g[c]&&g[c].stop&&e(g[c]);else for(c in g)g[c]&&g[c].stop&&cR.test(c)&&e(g[c]);for(c=f.length;c--;)f[c].elem===this&&(a==null||f[c].queue===a)&&(f[c].anim.stop(d),b=!1,f.splice(c,1));(b||!d)&&p.dequeue(this,a)})}}),p.each({slideDown:c$("show"),slideUp:c$("hide"),slideToggle:c$("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a,b){p.fn[a]=function(a,c,d){return this.animate(b,a,c,d)}}),p.speed=function(a,b,c){var d=a&&typeof a=="object"?p.extend({},a):{complete:c||!c&&b||p.isFunction(a)&&a,duration:a,easing:c&&b||b&&!p.isFunction(b)&&b};d.duration=p.fx.off?0:typeof d.duration=="number"?d.duration:d.duration in p.fx.speeds?p.fx.speeds[d.duration]:p.fx.speeds._default;if(d.queue==null||d.queue===!0)d.queue="fx";return d.old=d.complete,d.complete=function(){p.isFunction(d.old)&&d.old.call(this),d.queue&&p.dequeue(this,d.queue)},d},p.easing={linear:function(a){return a},swing:function(a){return.5-Math.cos(a*Math.PI)/2}},p.timers=[],p.fx=cZ.prototype.init,p.fx.tick=function(){var a,b=p.timers,c=0;for(;c<b.length;c++)a=b[c],!a()&&b[c]===a&&b.splice(c--,1);b.length||p.fx.stop()},p.fx.timer=function(a){a()&&p.timers.push(a)&&!cO&&(cO=setInterval(p.fx.tick,p.fx.interval))},p.fx.interval=13,p.fx.stop=function(){clearInterval(cO),cO=null},p.fx.speeds={slow:600,fast:200,_default:400},p.fx.step={},p.expr&&p.expr.filters&&(p.expr.filters.animated=function(a){return p.grep(p.timers,function(b){return a===b.elem}).length});var c_=/^(?:body|html)$/i;p.fn.offset=function(a){if(arguments.length)return a===b?this:this.each(function(b){p.offset.setOffset(this,a,b)});var c,d,e,f,g,h,i,j={top:0,left:0},k=this[0],l=k&&k.ownerDocument;if(!l)return;return(d=l.body)===k?p.offset.bodyOffset(k):(c=l.documentElement,p.contains(c,k)?(typeof k.getBoundingClientRect!="undefined"&&(j=k.getBoundingClientRect()),e=da(l),f=c.clientTop||d.clientTop||0,g=c.clientLeft||d.clientLeft||0,h=e.pageYOffset||c.scrollTop,i=e.pageXOffset||c.scrollLeft,{top:j.top+h-f,left:j.left+i-g}):j)},p.offset={bodyOffset:function(a){var b=a.offsetTop,c=a.offsetLeft;return p.support.doesNotIncludeMarginInBodyOffset&&(b+=parseFloat(p.css(a,"marginTop"))||0,c+=parseFloat(p.css(a,"marginLeft"))||0),{top:b,left:c}},setOffset:function(a,b,c){var d=p.css(a,"position");d==="static"&&(a.style.position="relative");var e=p(a),f=e.offset(),g=p.css(a,"top"),h=p.css(a,"left"),i=(d==="absolute"||d==="fixed")&&p.inArray("auto",[g,h])>-1,j={},k={},l,m;i?(k=e.position(),l=k.top,m=k.left):(l=parseFloat(g)||0,m=parseFloat(h)||0),p.isFunction(b)&&(b=b.call(a,c,f)),b.top!=null&&(j.top=b.top-f.top+l),b.left!=null&&(j.left=b.left-f.left+m),"using"in b?b.using.call(a,j):e.css(j)}},p.fn.extend({position:function(){if(!this[0])return;var a=this[0],b=this.offsetParent(),c=this.offset(),d=c_.test(b[0].nodeName)?{top:0,left:0}:b.offset();return c.top-=parseFloat(p.css(a,"marginTop"))||0,c.left-=parseFloat(p.css(a,"marginLeft"))||0,d.top+=parseFloat(p.css(b[0],"borderTopWidth"))||0,d.left+=parseFloat(p.css(b[0],"borderLeftWidth"))||0,{top:c.top-d.top,left:c.left-d.left}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||e.body;while(a&&!c_.test(a.nodeName)&&p.css(a,"position")==="static")a=a.offsetParent;return a||e.body})}}),p.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(a,c){var d=/Y/.test(c);p.fn[a]=function(e){return p.access(this,function(a,e,f){var g=da(a);if(f===b)return g?c in g?g[c]:g.document.documentElement[e]:a[e];g?g.scrollTo(d?p(g).scrollLeft():f,d?f:p(g).scrollTop()):a[e]=f},a,e,arguments.length,null)}}),p.each({Height:"height",Width:"width"},function(a,c){p.each({padding:"inner"+a,content:c,"":"outer"+a},function(d,e){p.fn[e]=function(e,f){var g=arguments.length&&(d||typeof e!="boolean"),h=d||(e===!0||f===!0?"margin":"border");return p.access(this,function(c,d,e){var f;return p.isWindow(c)?c.document.documentElement["client"+a]:c.nodeType===9?(f=c.documentElement,Math.max(c.body["scroll"+a],f["scroll"+a],c.body["offset"+a],f["offset"+a],f["client"+a])):e===b?p.css(c,d,e,h):p.style(c,d,e,h)},c,g?e:b,g,null)}})}),a.jQuery=a.$=p,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return p})})(window); \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
deleted file mode 100644
index 4dd48675b7..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
+++ /dev/null
@@ -1,5486 +0,0 @@
-/**
- * @preserve jquery.layout 1.3.0 - Release Candidate 30.62
- * $Date: 2012-08-04 08:00:00 (Thu, 23 Aug 2012) $
- * $Rev: 303006 $
- *
- * Copyright (c) 2012
- * Fabrizio Balliano (http://www.fabrizioballiano.net)
- * Kevin Dalman (http://allpro.net)
- *
- * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
- * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
- *
- * Changelog: http://layout.jquery-dev.net/changelog.cfm#1.3.0.rc30.62
- * NOTE: This is a short-term release to patch a couple of bugs.
- * These bugs are listed as officially fixed in RC30.7, which will be released shortly.
- *
- * Docs: http://layout.jquery-dev.net/documentation.html
- * Tips: http://layout.jquery-dev.net/tips.html
- * Help: http://groups.google.com/group/jquery-ui-layout
- */
-
-/* JavaDoc Info: http://code.google.com/closure/compiler/docs/js-for-compiler.html
- * {!Object} non-nullable type (never NULL)
- * {?string} nullable type (sometimes NULL) - default for {Object}
- * {number=} optional parameter
- * {*} ALL types
- */
-
-// NOTE: For best readability, view with a fixed-width font and tabs equal to 4-chars
-
-;(function ($) {
-
-// alias Math methods - used a lot!
-var min = Math.min
-, max = Math.max
-, round = Math.floor
-
-, isStr = function (v) { return $.type(v) === "string"; }
-
-, runPluginCallbacks = function (Instance, a_fn) {
- if ($.isArray(a_fn))
- for (var i=0, c=a_fn.length; i<c; i++) {
- var fn = a_fn[i];
- try {
- if (isStr(fn)) // 'name' of a function
- fn = eval(fn);
- if ($.isFunction(fn))
- fn( Instance );
- } catch (ex) {}
- }
- }
-
-;
-
-
-/*
- * GENERIC $.layout METHODS - used by all layouts
- */
-$.layout = {
-
- version: "1.3.rc30.62"
-, revision: 0.033006 // 1.3.0 final = 1.0300 - major(n+).minor(nn)+patch(nn+)
-
- // can update code here if $.browser is phased out
-, browser: {
- mozilla: !!$.browser.mozilla
- , webkit: !!$.browser.webkit || !!$.browser.safari // webkit = jQ 1.4
- , msie: !!$.browser.msie
- , isIE6: $.browser.msie && $.browser.version == 6
- , boxModel: $.support.boxModel !== false || !$.browser.msie // ONLY IE reverts to old box-model - update for older jQ onReady
- , version: $.browser.version // not used in Layout core, but may be used by plugins
- }
-
- // *PREDEFINED* EFFECTS & DEFAULTS
- // MUST list effect here - OR MUST set an fxSettings option (can be an empty hash: {})
-, effects: {
-
- // Pane Open/Close Animations
- slide: {
- all: { duration: "fast" } // eg: duration: 1000, easing: "easeOutBounce"
- , north: { direction: "up" }
- , south: { direction: "down" }
- , east: { direction: "right"}
- , west: { direction: "left" }
- }
- , drop: {
- all: { duration: "slow" }
- , north: { direction: "up" }
- , south: { direction: "down" }
- , east: { direction: "right"}
- , west: { direction: "left" }
- }
- , scale: {
- all: { duration: "fast" }
- }
- // these are not recommended, but can be used
- , blind: {}
- , clip: {}
- , explode: {}
- , fade: {}
- , fold: {}
- , puff: {}
-
- // Pane Resize Animations
- , size: {
- all: { easing: "swing" }
- }
- }
-
- // INTERNAL CONFIG DATA - DO NOT CHANGE THIS!
-, config: {
- optionRootKeys: "effects,panes,north,south,west,east,center".split(",")
- , allPanes: "north,south,west,east,center".split(",")
- , borderPanes: "north,south,west,east".split(",")
- , oppositeEdge: {
- north: "south"
- , south: "north"
- , east: "west"
- , west: "east"
- }
- // offscreen data
- , offscreenCSS: { left: "-99999px", right: "auto" } // used by hide/close if useOffscreenClose=true
- , offscreenReset: "offscreenReset" // key used for data
- // CSS used in multiple places
- , hidden: { visibility: "hidden" }
- , visible: { visibility: "visible" }
- // layout element settings
- , resizers: {
- cssReq: {
- position: "absolute"
- , padding: 0
- , margin: 0
- , fontSize: "1px"
- , textAlign: "left" // to counter-act "center" alignment!
- , overflow: "hidden" // prevent toggler-button from overflowing
- // SEE $.layout.defaults.zIndexes.resizer_normal
- }
- , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
- background: "#DDD"
- , border: "none"
- }
- }
- , togglers: {
- cssReq: {
- position: "absolute"
- , display: "block"
- , padding: 0
- , margin: 0
- , overflow: "hidden"
- , textAlign: "center"
- , fontSize: "1px"
- , cursor: "pointer"
- , zIndex: 1
- }
- , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
- background: "#AAA"
- }
- }
- , content: {
- cssReq: {
- position: "relative" /* contain floated or positioned elements */
- }
- , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
- overflow: "auto"
- , padding: "10px"
- }
- , cssDemoPane: { // DEMO CSS - REMOVE scrolling from 'pane' when it has a content-div
- overflow: "hidden"
- , padding: 0
- }
- }
- , panes: { // defaults for ALL panes - overridden by 'per-pane settings' below
- cssReq: {
- position: "absolute"
- , margin: 0
- // $.layout.defaults.zIndexes.pane_normal
- }
- , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
- padding: "10px"
- , background: "#FFF"
- , border: "1px solid #BBB"
- , overflow: "auto"
- }
- }
- , north: {
- side: "Top"
- , sizeType: "Height"
- , dir: "horz"
- , cssReq: {
- top: 0
- , bottom: "auto"
- , left: 0
- , right: 0
- , width: "auto"
- // height: DYNAMIC
- }
- }
- , south: {
- side: "Bottom"
- , sizeType: "Height"
- , dir: "horz"
- , cssReq: {
- top: "auto"
- , bottom: 0
- , left: 0
- , right: 0
- , width: "auto"
- // height: DYNAMIC
- }
- }
- , east: {
- side: "Right"
- , sizeType: "Width"
- , dir: "vert"
- , cssReq: {
- left: "auto"
- , right: 0
- , top: "auto" // DYNAMIC
- , bottom: "auto" // DYNAMIC
- , height: "auto"
- // width: DYNAMIC
- }
- }
- , west: {
- side: "Left"
- , sizeType: "Width"
- , dir: "vert"
- , cssReq: {
- left: 0
- , right: "auto"
- , top: "auto" // DYNAMIC
- , bottom: "auto" // DYNAMIC
- , height: "auto"
- // width: DYNAMIC
- }
- }
- , center: {
- dir: "center"
- , cssReq: {
- left: "auto" // DYNAMIC
- , right: "auto" // DYNAMIC
- , top: "auto" // DYNAMIC
- , bottom: "auto" // DYNAMIC
- , height: "auto"
- , width: "auto"
- }
- }
- }
-
- // CALLBACK FUNCTION NAMESPACE - used to store reusable callback functions
-, callbacks: {}
-
-, getParentPaneElem: function (el) {
- // must pass either a container or pane element
- var $el = $(el)
- , layout = $el.data("layout") || $el.data("parentLayout");
- if (layout) {
- var $cont = layout.container;
- // see if this container is directly-nested inside an outer-pane
- if ($cont.data("layoutPane")) return $cont;
- var $pane = $cont.closest("."+ $.layout.defaults.panes.paneClass);
- // if a pane was found, return it
- if ($pane.data("layoutPane")) return $pane;
- }
- return null;
- }
-
-, getParentPaneInstance: function (el) {
- // must pass either a container or pane element
- var $pane = $.layout.getParentPaneElem(el);
- return $pane ? $pane.data("layoutPane") : null;
- }
-
-, getParentLayoutInstance: function (el) {
- // must pass either a container or pane element
- var $pane = $.layout.getParentPaneElem(el);
- return $pane ? $pane.data("parentLayout") : null;
- }
-
-, getEventObject: function (evt) {
- return typeof evt === "object" && evt.stopPropagation ? evt : null;
- }
-, parsePaneName: function (evt_or_pane) {
- // getEventObject() automatically calls .stopPropagation(), WHICH MUST BE DONE!
- var evt = $.layout.getEventObject( evt_or_pane );
- if (evt) {
- // ALWAYS stop propagation of events triggered in Layout!
- evt.stopPropagation();
- return $(this).data("layoutEdge");
- }
- else
- return evt_or_pane;
- }
-
-
- // LAYOUT-PLUGIN REGISTRATION
- // more plugins can added beyond this default list
-, plugins: {
- draggable: !!$.fn.draggable // resizing
- , effects: {
- core: !!$.effects // animimations (specific effects tested by initOptions)
- , slide: $.effects && $.effects.slide // default effect
- }
- }
-
-// arrays of plugin or other methods to be triggered for events in *each layout* - will be passed 'Instance'
-, onCreate: [] // runs when layout is just starting to be created - right after options are set
-, onLoad: [] // runs after layout container and global events init, but before initPanes is called
-, onReady: [] // runs after initialization *completes* - ie, after initPanes completes successfully
-, onDestroy: [] // runs after layout is destroyed
-, onUnload: [] // runs after layout is destroyed OR when page unloads
-, afterOpen: [] // runs after setAsOpen() completes
-, afterClose: [] // runs after setAsClosed() completes
-
- /*
- * GENERIC UTILITY METHODS
- */
-
- // calculate and return the scrollbar width, as an integer
-, scrollbarWidth: function () { return window.scrollbarWidth || $.layout.getScrollbarSize('width'); }
-, scrollbarHeight: function () { return window.scrollbarHeight || $.layout.getScrollbarSize('height'); }
-, getScrollbarSize: function (dim) {
- var $c = $('<div style="position: absolute; top: -10000px; left: -10000px; width: 100px; height: 100px; overflow: scroll;"></div>').appendTo("body");
- var d = { width: $c.width() - $c[0].clientWidth, height: $c.height() - $c[0].clientHeight };
- $c.remove();
- window.scrollbarWidth = d.width;
- window.scrollbarHeight = d.height;
- return dim.match(/^(width|height)$/) ? d[dim] : d;
- }
-
-
- /**
- * Returns hash container 'display' and 'visibility'
- *
- * @see $.swap() - swaps CSS, runs callback, resets CSS
- */
-, showInvisibly: function ($E, force) {
- if ($E && $E.length && (force || $E.css('display') === "none")) { // only if not *already hidden*
- var s = $E[0].style
- // save ONLY the 'style' props because that is what we must restore
- , CSS = { display: s.display || '', visibility: s.visibility || '' };
- // show element 'invisibly' so can be measured
- $E.css({ display: "block", visibility: "hidden" });
- return CSS;
- }
- return {};
- }
-
- /**
- * Returns data for setting size of an element (container or a pane).
- *
- * @see _create(), onWindowResize() for container, plus others for pane
- * @return JSON Returns a hash of all dimensions: top, bottom, left, right, outerWidth, innerHeight, etc
- */
-, getElementDimensions: function ($E) {
- var
- d = {} // dimensions hash
- , x = d.css = {} // CSS hash
- , i = {} // TEMP insets
- , b, p // TEMP border, padding
- , N = $.layout.cssNum
- , off = $E.offset()
- ;
- d.offsetLeft = off.left;
- d.offsetTop = off.top;
-
- $.each("Left,Right,Top,Bottom".split(","), function (idx, e) { // e = edge
- b = x["border" + e] = $.layout.borderWidth($E, e);
- p = x["padding"+ e] = $.layout.cssNum($E, "padding"+e);
- i[e] = b + p; // total offset of content from outer side
- d["inset"+ e] = p; // eg: insetLeft = paddingLeft
- });
-
- d.offsetWidth = $E.innerWidth(); // offsetWidth is used in calc when doing manual resize
- d.offsetHeight = $E.innerHeight(); // ditto
- d.outerWidth = $E.outerWidth();
- d.outerHeight = $E.outerHeight();
- d.innerWidth = max(0, d.outerWidth - i.Left - i.Right);
- d.innerHeight = max(0, d.outerHeight - i.Top - i.Bottom);
-
- x.width = $E.width();
- x.height = $E.height();
- x.top = N($E,"top",true);
- x.bottom = N($E,"bottom",true);
- x.left = N($E,"left",true);
- x.right = N($E,"right",true);
-
- //d.visible = $E.is(":visible");// && x.width > 0 && x.height > 0;
-
- return d;
- }
-
-, getElementCSS: function ($E, list) {
- var
- CSS = {}
- , style = $E[0].style
- , props = list.split(",")
- , sides = "Top,Bottom,Left,Right".split(",")
- , attrs = "Color,Style,Width".split(",")
- , p, s, a, i, j, k
- ;
- for (i=0; i < props.length; i++) {
- p = props[i];
- if (p.match(/(border|padding|margin)$/))
- for (j=0; j < 4; j++) {
- s = sides[j];
- if (p === "border")
- for (k=0; k < 3; k++) {
- a = attrs[k];
- CSS[p+s+a] = style[p+s+a];
- }
- else
- CSS[p+s] = style[p+s];
- }
- else
- CSS[p] = style[p];
- };
- return CSS
- }
-
- /**
- * Return the innerWidth for the current browser/doctype
- *
- * @see initPanes(), sizeMidPanes(), initHandles(), sizeHandles()
- * @param {Array.<Object>} $E Must pass a jQuery object - first element is processed
- * @param {number=} outerWidth (optional) Can pass a width, allowing calculations BEFORE element is resized
- * @return {number} Returns the innerWidth of the elem by subtracting padding and borders
- */
-, cssWidth: function ($E, outerWidth) {
- // a 'calculated' outerHeight can be passed so borders and/or padding are removed if needed
- if (outerWidth <= 0) return 0;
-
- if (!$.layout.browser.boxModel) return outerWidth;
-
- // strip border and padding from outerWidth to get CSS Width
- var b = $.layout.borderWidth
- , n = $.layout.cssNum
- , W = outerWidth
- - b($E, "Left")
- - b($E, "Right")
- - n($E, "paddingLeft")
- - n($E, "paddingRight");
-
- return max(0,W);
- }
-
- /**
- * Return the innerHeight for the current browser/doctype
- *
- * @see initPanes(), sizeMidPanes(), initHandles(), sizeHandles()
- * @param {Array.<Object>} $E Must pass a jQuery object - first element is processed
- * @param {number=} outerHeight (optional) Can pass a width, allowing calculations BEFORE element is resized
- * @return {number} Returns the innerHeight of the elem by subtracting padding and borders
- */
-, cssHeight: function ($E, outerHeight) {
- // a 'calculated' outerHeight can be passed so borders and/or padding are removed if needed
- if (outerHeight <= 0) return 0;
-
- if (!$.layout.browser.boxModel) return outerHeight;
-
- // strip border and padding from outerHeight to get CSS Height
- var b = $.layout.borderWidth
- , n = $.layout.cssNum
- , H = outerHeight
- - b($E, "Top")
- - b($E, "Bottom")
- - n($E, "paddingTop")
- - n($E, "paddingBottom");
-
- return max(0,H);
- }
-
- /**
- * Returns the 'current CSS numeric value' for a CSS property - 0 if property does not exist
- *
- * @see Called by many methods
- * @param {Array.<Object>} $E Must pass a jQuery object - first element is processed
- * @param {string} prop The name of the CSS property, eg: top, width, etc.
- * @param {boolean=} [allowAuto=false] true = return 'auto' if that is value; false = return 0
- * @return {(string|number)} Usually used to get an integer value for position (top, left) or size (height, width)
- */
-, cssNum: function ($E, prop, allowAuto) {
- if (!$E.jquery) $E = $($E);
- var CSS = $.layout.showInvisibly($E)
- , p = $.css($E[0], prop, true)
- , v = allowAuto && p=="auto" ? p : (parseInt(p, 10) || 0);
- $E.css( CSS ); // RESET
- return v;
- }
-
-, borderWidth: function (el, side) {
- if (el.jquery) el = el[0];
- var b = "border"+ side.substr(0,1).toUpperCase() + side.substr(1); // left => Left
- return $.css(el, b+"Style", true) === "none" ? 0 : (parseInt($.css(el, b+"Width", true), 10) || 0);
- }
-
- /**
- * Mouse-tracking utility - FUTURE REFERENCE
- *
- * init: if (!window.mouse) {
- * window.mouse = { x: 0, y: 0 };
- * $(document).mousemove( $.layout.trackMouse );
- * }
- *
- * @param {Object} evt
- *
-, trackMouse: function (evt) {
- window.mouse = { x: evt.clientX, y: evt.clientY };
- }
- */
-
- /**
- * SUBROUTINE for preventPrematureSlideClose option
- *
- * @param {Object} evt
- * @param {Object=} el
- */
-, isMouseOverElem: function (evt, el) {
- var
- $E = $(el || this)
- , d = $E.offset()
- , T = d.top
- , L = d.left
- , R = L + $E.outerWidth()
- , B = T + $E.outerHeight()
- , x = evt.pageX // evt.clientX ?
- , y = evt.pageY // evt.clientY ?
- ;
- // if X & Y are < 0, probably means is over an open SELECT
- return ($.layout.browser.msie && x < 0 && y < 0) || ((x >= L && x <= R) && (y >= T && y <= B));
- }
-
- /**
- * Message/Logging Utility
- *
- * @example $.layout.msg("My message"); // log text
- * @example $.layout.msg("My message", true); // alert text
- * @example $.layout.msg({ foo: "bar" }, "Title"); // log hash-data, with custom title
- * @example $.layout.msg({ foo: "bar" }, true, "Title", { sort: false }); -OR-
- * @example $.layout.msg({ foo: "bar" }, "Title", { sort: false, display: true }); // alert hash-data
- *
- * @param {(Object|string)} info String message OR Hash/Array
- * @param {(Boolean|string|Object)=} [popup=false] True means alert-box - can be skipped
- * @param {(Object|string)=} [debugTitle=""] Title for Hash data - can be skipped
- * @param {Object=} [debugOpts] Extra options for debug output
- */
-, msg: function (info, popup, debugTitle, debugOpts) {
- if ($.isPlainObject(info) && window.debugData) {
- if (typeof popup === "string") {
- debugOpts = debugTitle;
- debugTitle = popup;
- }
- else if (typeof debugTitle === "object") {
- debugOpts = debugTitle;
- debugTitle = null;
- }
- var t = debugTitle || "log( <object> )"
- , o = $.extend({ sort: false, returnHTML: false, display: false }, debugOpts);
- if (popup === true || o.display)
- debugData( info, t, o );
- else if (window.console)
- console.log(debugData( info, t, o ));
- }
- else if (popup)
- alert(info);
- else if (window.console)
- console.log(info);
- else {
- var id = "#layoutLogger"
- , $l = $(id);
- if (!$l.length)
- $l = createLog();
- $l.children("ul").append('<li style="padding: 4px 10px; margin: 0; border-top: 1px solid #CCC;">'+ info.replace(/\</g,"&lt;").replace(/\>/g,"&gt;") +'</li>');
- }
-
- function createLog () {
- var pos = $.support.fixedPosition ? 'fixed' : 'absolute'
- , $e = $('<div id="layoutLogger" style="position: '+ pos +'; top: 5px; z-index: 999999; max-width: 25%; overflow: hidden; border: 1px solid #000; border-radius: 5px; background: #FBFBFB; box-shadow: 0 2px 10px rgba(0,0,0,0.3);">'
- + '<div style="font-size: 13px; font-weight: bold; padding: 5px 10px; background: #F6F6F6; border-radius: 5px 5px 0 0; cursor: move;">'
- + '<span style="float: right; padding-left: 7px; cursor: pointer;" title="Remove Console" onclick="$(this).closest(\'#layoutLogger\').remove()">X</span>Layout console.log</div>'
- + '<ul style="font-size: 13px; font-weight: none; list-style: none; margin: 0; padding: 0 0 2px;"></ul>'
- + '</div>'
- ).appendTo("body");
- $e.css('left', $(window).width() - $e.outerWidth() - 5)
- if ($.ui.draggable) $e.draggable({ handle: ':first-child' });
- return $e;
- };
- }
-
-};
-
-// DEFAULT OPTIONS
-$.layout.defaults = {
-/*
- * LAYOUT & LAYOUT-CONTAINER OPTIONS
- * - none of these options are applicable to individual panes
- */
- name: "" // Not required, but useful for buttons and used for the state-cookie
-, containerSelector: "" // ONLY used when specifying a childOptions - to find container-element that is NOT directly-nested
-, containerClass: "ui-layout-container" // layout-container element
-, scrollToBookmarkOnLoad: true // after creating a layout, scroll to bookmark in URL (.../page.htm#myBookmark)
-, resizeWithWindow: true // bind thisLayout.resizeAll() to the window.resize event
-, resizeWithWindowDelay: 200 // delay calling resizeAll because makes window resizing very jerky
-, resizeWithWindowMaxDelay: 0 // 0 = none - force resize every XX ms while window is being resized
-, onresizeall_start: null // CALLBACK when resizeAll() STARTS - NOT pane-specific
-, onresizeall_end: null // CALLBACK when resizeAll() ENDS - NOT pane-specific
-, onload_start: null // CALLBACK when Layout inits - after options initialized, but before elements
-, onload_end: null // CALLBACK when Layout inits - after EVERYTHING has been initialized
-, onunload_start: null // CALLBACK when Layout is destroyed OR onWindowUnload
-, onunload_end: null // CALLBACK when Layout is destroyed OR onWindowUnload
-, initPanes: true // false = DO NOT initialize the panes onLoad - will init later
-, showErrorMessages: true // enables fatal error messages to warn developers of common errors
-, showDebugMessages: false // display console-and-alert debug msgs - IF this Layout version _has_ debugging code!
-// Changing this zIndex value will cause other zIndex values to automatically change
-, zIndex: null // the PANE zIndex - resizers and masks will be +1
-// DO NOT CHANGE the zIndex values below unless you clearly understand their relationships
-, zIndexes: { // set _default_ z-index values here...
- pane_normal: 0 // normal z-index for panes
- , content_mask: 1 // applied to overlays used to mask content INSIDE panes during resizing
- , resizer_normal: 2 // normal z-index for resizer-bars
- , pane_sliding: 100 // applied to *BOTH* the pane and its resizer when a pane is 'slid open'
- , pane_animate: 1000 // applied to the pane when being animated - not applied to the resizer
- , resizer_drag: 10000 // applied to the CLONED resizer-bar when being 'dragged'
- }
-, errors: {
- pane: "pane" // description of "layout pane element" - used only in error messages
- , selector: "selector" // description of "jQuery-selector" - used only in error messages
- , addButtonError: "Error Adding Button \n\nInvalid "
- , containerMissing: "UI Layout Initialization Error\n\nThe specified layout-container does not exist."
- , centerPaneMissing: "UI Layout Initialization Error\n\nThe center-pane element does not exist.\n\nThe center-pane is a required element."
- , noContainerHeight: "UI Layout Initialization Warning\n\nThe layout-container \"CONTAINER\" has no height.\n\nTherefore the layout is 0-height and hence 'invisible'!"
- , callbackError: "UI Layout Callback Error\n\nThe EVENT callback is not a valid function."
- }
-/*
- * PANE DEFAULT SETTINGS
- * - settings under the 'panes' key become the default settings for *all panes*
- * - ALL pane-options can also be set specifically for each panes, which will override these 'default values'
- */
-, panes: { // default options for 'all panes' - will be overridden by 'per-pane settings'
- applyDemoStyles: false // NOTE: renamed from applyDefaultStyles for clarity
- , closable: true // pane can open & close
- , resizable: true // when open, pane can be resized
- , slidable: true // when closed, pane can 'slide open' over other panes - closes on mouse-out
- , initClosed: false // true = init pane as 'closed'
- , initHidden: false // true = init pane as 'hidden' - no resizer-bar/spacing
- // SELECTORS
- //, paneSelector: "" // MUST be pane-specific - jQuery selector for pane
- , contentSelector: ".ui-layout-content" // INNER div/element to auto-size so only it scrolls, not the entire pane!
- , contentIgnoreSelector: ".ui-layout-ignore" // element(s) to 'ignore' when measuring 'content'
- , findNestedContent: false // true = $P.find(contentSelector), false = $P.children(contentSelector)
- // GENERIC ROOT-CLASSES - for auto-generated classNames
- , paneClass: "ui-layout-pane" // Layout Pane
- , resizerClass: "ui-layout-resizer" // Resizer Bar
- , togglerClass: "ui-layout-toggler" // Toggler Button
- , buttonClass: "ui-layout-button" // CUSTOM Buttons - eg: '[ui-layout-button]-toggle/-open/-close/-pin'
- // ELEMENT SIZE & SPACING
- //, size: 100 // MUST be pane-specific -initial size of pane
- , minSize: 0 // when manually resizing a pane
- , maxSize: 0 // ditto, 0 = no limit
- , spacing_open: 6 // space between pane and adjacent panes - when pane is 'open'
- , spacing_closed: 6 // ditto - when pane is 'closed'
- , togglerLength_open: 50 // Length = WIDTH of toggler button on north/south sides - HEIGHT on east/west sides
- , togglerLength_closed: 50 // 100% OR -1 means 'full height/width of resizer bar' - 0 means 'hidden'
- , togglerAlign_open: "center" // top/left, bottom/right, center, OR...
- , togglerAlign_closed: "center" // 1 => nn = offset from top/left, -1 => -nn == offset from bottom/right
- , togglerContent_open: "" // text or HTML to put INSIDE the toggler
- , togglerContent_closed: "" // ditto
- // RESIZING OPTIONS
- , resizerDblClickToggle: true //
- , autoResize: true // IF size is 'auto' or a percentage, then recalc 'pixel size' whenever the layout resizes
- , autoReopen: true // IF a pane was auto-closed due to noRoom, reopen it when there is room? False = leave it closed
- , resizerDragOpacity: 1 // option for ui.draggable
- //, resizerCursor: "" // MUST be pane-specific - cursor when over resizer-bar
- , maskContents: false // true = add DIV-mask over-or-inside this pane so can 'drag' over IFRAMES
- , maskObjects: false // true = add IFRAME-mask over-or-inside this pane to cover objects/applets - content-mask will overlay this mask
- , maskZindex: null // will override zIndexes.content_mask if specified - not applicable to iframe-panes
- , resizingGrid: false // grid size that the resizers will snap-to during resizing, eg: [20,20]
- , livePaneResizing: false // true = LIVE Resizing as resizer is dragged
- , liveContentResizing: false // true = re-measure header/footer heights as resizer is dragged
- , liveResizingTolerance: 1 // how many px change before pane resizes, to control performance
- // SLIDING OPTIONS
- , sliderCursor: "pointer" // cursor when resizer-bar will trigger 'sliding'
- , slideTrigger_open: "click" // click, dblclick, mouseenter
- , slideTrigger_close: "mouseleave"// click, mouseleave
- , slideDelay_open: 300 // applies only for mouseenter event - 0 = instant open
- , slideDelay_close: 300 // applies only for mouseleave event (300ms is the minimum!)
- , hideTogglerOnSlide: false // when pane is slid-open, should the toggler show?
- , preventQuickSlideClose: $.layout.browser.webkit // Chrome triggers slideClosed as it is opening
- , preventPrematureSlideClose: false // handle incorrect mouseleave trigger, like when over a SELECT-list in IE
- // PANE-SPECIFIC TIPS & MESSAGES
- , tips: {
- Open: "Open" // eg: "Open Pane"
- , Close: "Close"
- , Resize: "Resize"
- , Slide: "Slide Open"
- , Pin: "Pin"
- , Unpin: "Un-Pin"
- , noRoomToOpen: "Not enough room to show this panel." // alert if user tries to open a pane that cannot
- , minSizeWarning: "Panel has reached its minimum size" // displays in browser statusbar
- , maxSizeWarning: "Panel has reached its maximum size" // ditto
- }
- // HOT-KEYS & MISC
- , showOverflowOnHover: false // will bind allowOverflow() utility to pane.onMouseOver
- , enableCursorHotkey: true // enabled 'cursor' hotkeys
- //, customHotkey: "" // MUST be pane-specific - EITHER a charCode OR a character
- , customHotkeyModifier: "SHIFT" // either 'SHIFT', 'CTRL' or 'CTRL+SHIFT' - NOT 'ALT'
- // PANE ANIMATION
- // NOTE: fxSss_open, fxSss_close & fxSss_size options (eg: fxName_open) are auto-generated if not passed
- , fxName: "slide" // ('none' or blank), slide, drop, scale -- only relevant to 'open' & 'close', NOT 'size'
- , fxSpeed: null // slow, normal, fast, 200, nnn - if passed, will OVERRIDE fxSettings.duration
- , fxSettings: {} // can be passed, eg: { easing: "easeOutBounce", duration: 1500 }
- , fxOpacityFix: true // tries to fix opacity in IE to restore anti-aliasing after animation
- , animatePaneSizing: false // true = animate resizing after dragging resizer-bar OR sizePane() is called
- /* NOTE: Action-specific FX options are auto-generated from the options above if not specifically set:
- fxName_open: "slide" // 'Open' pane animation
- fnName_close: "slide" // 'Close' pane animation
- fxName_size: "slide" // 'Size' pane animation - when animatePaneSizing = true
- fxSpeed_open: null
- fxSpeed_close: null
- fxSpeed_size: null
- fxSettings_open: {}
- fxSettings_close: {}
- fxSettings_size: {}
- */
- // CHILD/NESTED LAYOUTS
- , childOptions: null // Layout-options for nested/child layout - even {} is valid as options
- , initChildLayout: true // true = child layout will be created as soon as _this_ layout completes initialization
- , destroyChildLayout: true // true = destroy child-layout if this pane is destroyed
- , resizeChildLayout: true // true = trigger child-layout.resizeAll() when this pane is resized
- // EVENT TRIGGERING
- , triggerEventsOnLoad: false // true = trigger onopen OR onclose callbacks when layout initializes
- , triggerEventsDuringLiveResize: true // true = trigger onresize callback REPEATEDLY if livePaneResizing==true
- // PANE CALLBACKS
- , onshow_start: null // CALLBACK when pane STARTS to Show - BEFORE onopen/onhide_start
- , onshow_end: null // CALLBACK when pane ENDS being Shown - AFTER onopen/onhide_end
- , onhide_start: null // CALLBACK when pane STARTS to Close - BEFORE onclose_start
- , onhide_end: null // CALLBACK when pane ENDS being Closed - AFTER onclose_end
- , onopen_start: null // CALLBACK when pane STARTS to Open
- , onopen_end: null // CALLBACK when pane ENDS being Opened
- , onclose_start: null // CALLBACK when pane STARTS to Close
- , onclose_end: null // CALLBACK when pane ENDS being Closed
- , onresize_start: null // CALLBACK when pane STARTS being Resized ***FOR ANY REASON***
- , onresize_end: null // CALLBACK when pane ENDS being Resized ***FOR ANY REASON***
- , onsizecontent_start: null // CALLBACK when sizing of content-element STARTS
- , onsizecontent_end: null // CALLBACK when sizing of content-element ENDS
- , onswap_start: null // CALLBACK when pane STARTS to Swap
- , onswap_end: null // CALLBACK when pane ENDS being Swapped
- , ondrag_start: null // CALLBACK when pane STARTS being ***MANUALLY*** Resized
- , ondrag_end: null // CALLBACK when pane ENDS being ***MANUALLY*** Resized
- }
-/*
- * PANE-SPECIFIC SETTINGS
- * - options listed below MUST be specified per-pane - they CANNOT be set under 'panes'
- * - all options under the 'panes' key can also be set specifically for any pane
- * - most options under the 'panes' key apply only to 'border-panes' - NOT the the center-pane
- */
-, north: {
- paneSelector: ".ui-layout-north"
- , size: "auto" // eg: "auto", "30%", .30, 200
- , resizerCursor: "n-resize" // custom = url(myCursor.cur)
- , customHotkey: "" // EITHER a charCode (43) OR a character ("o")
- }
-, south: {
- paneSelector: ".ui-layout-south"
- , size: "auto"
- , resizerCursor: "s-resize"
- , customHotkey: ""
- }
-, east: {
- paneSelector: ".ui-layout-east"
- , size: 200
- , resizerCursor: "e-resize"
- , customHotkey: ""
- }
-, west: {
- paneSelector: ".ui-layout-west"
- , size: 200
- , resizerCursor: "w-resize"
- , customHotkey: ""
- }
-, center: {
- paneSelector: ".ui-layout-center"
- , minWidth: 0
- , minHeight: 0
- }
-};
-
-$.layout.optionsMap = {
- // layout/global options - NOT pane-options
- layout: ("stateManagement,effects,zIndexes,errors,"
- + "name,zIndex,scrollToBookmarkOnLoad,showErrorMessages,"
- + "resizeWithWindow,resizeWithWindowDelay,resizeWithWindowMaxDelay,"
- + "onresizeall,onresizeall_start,onresizeall_end,onload,onunload").split(",")
-// borderPanes: [ ALL options that are NOT specified as 'layout' ]
- // default.panes options that apply to the center-pane (most options apply _only_ to border-panes)
-, center: ("paneClass,contentSelector,contentIgnoreSelector,findNestedContent,applyDemoStyles,triggerEventsOnLoad,"
- + "showOverflowOnHover,maskContents,maskObjects,liveContentResizing,"
- + "childOptions,initChildLayout,resizeChildLayout,destroyChildLayout,"
- + "onresize,onresize_start,onresize_end,onsizecontent,onsizecontent_start,onsizecontent_end").split(",")
- // options that MUST be specifically set 'per-pane' - CANNOT set in the panes (defaults) key
-, noDefault: ("paneSelector,resizerCursor,customHotkey").split(",")
-};
-
-/**
- * Processes options passed in converts flat-format data into subkey (JSON) format
- * In flat-format, subkeys are _currently_ separated with 2 underscores, like north__optName
- * Plugins may also call this method so they can transform their own data
- *
- * @param {!Object} hash Data/options passed by user - may be a single level or nested levels
- * @return {Object} Returns hash of minWidth & minHeight
- */
-$.layout.transformData = function (hash) {
- var json = { panes: {}, center: {} } // init return object
- , data, branch, optKey, keys, key, val, i, c;
-
- if (typeof hash !== "object") return json; // no options passed
-
- // convert all 'flat-keys' to 'sub-key' format
- for (optKey in hash) {
- branch = json;
- data = $.layout.optionsMap.layout;
- val = hash[ optKey ];
- keys = optKey.split("__"); // eg: west__size or north__fxSettings__duration
- c = keys.length - 1;
- // convert underscore-delimited to subkeys
- for (i=0; i <= c; i++) {
- key = keys[i];
- if (i === c)
- branch[key] = val;
- else if (!branch[key])
- branch[key] = {}; // create the subkey
- // recurse to sub-key for next loop - if not done
- branch = branch[key];
- }
- }
-
- return json;
-};
-
-// INTERNAL CONFIG DATA - DO NOT CHANGE THIS!
-$.layout.backwardCompatibility = {
- // data used by renameOldOptions()
- map: {
- // OLD Option Name: NEW Option Name
- applyDefaultStyles: "applyDemoStyles"
- , resizeNestedLayout: "resizeChildLayout"
- , resizeWhileDragging: "livePaneResizing"
- , resizeContentWhileDragging: "liveContentResizing"
- , triggerEventsWhileDragging: "triggerEventsDuringLiveResize"
- , maskIframesOnResize: "maskContents"
- , useStateCookie: "stateManagement.enabled"
- , "cookie.autoLoad": "stateManagement.autoLoad"
- , "cookie.autoSave": "stateManagement.autoSave"
- , "cookie.keys": "stateManagement.stateKeys"
- , "cookie.name": "stateManagement.cookie.name"
- , "cookie.domain": "stateManagement.cookie.domain"
- , "cookie.path": "stateManagement.cookie.path"
- , "cookie.expires": "stateManagement.cookie.expires"
- , "cookie.secure": "stateManagement.cookie.secure"
- // OLD Language options
- , noRoomToOpenTip: "tips.noRoomToOpen"
- , togglerTip_open: "tips.Close" // open = Close
- , togglerTip_closed: "tips.Open" // closed = Open
- , resizerTip: "tips.Resize"
- , sliderTip: "tips.Slide"
- }
-
-/**
-* @param {Object} opts
-*/
-, renameOptions: function (opts) {
- var map = $.layout.backwardCompatibility.map
- , oldData, newData, value
- ;
- for (var itemPath in map) {
- oldData = getBranch( itemPath );
- value = oldData.branch[ oldData.key ];
- if (value !== undefined) {
- newData = getBranch( map[itemPath], true );
- newData.branch[ newData.key ] = value;
- delete oldData.branch[ oldData.key ];
- }
- }
-
- /**
- * @param {string} path
- * @param {boolean=} [create=false] Create path if does not exist
- */
- function getBranch (path, create) {
- var a = path.split(".") // split keys into array
- , c = a.length - 1
- , D = { branch: opts, key: a[c] } // init branch at top & set key (last item)
- , i = 0, k, undef;
- for (; i<c; i++) { // skip the last key (data)
- k = a[i];
- if (D.branch[ k ] == undefined) { // child-key does not exist
- if (create) {
- D.branch = D.branch[ k ] = {}; // create child-branch
- }
- else // can't go any farther
- D.branch = {}; // branch is undefined
- }
- else
- D.branch = D.branch[ k ]; // get child-branch
- }
- return D;
- };
- }
-
-/**
-* @param {Object} opts
-*/
-, renameAllOptions: function (opts) {
- var ren = $.layout.backwardCompatibility.renameOptions;
- // rename root (layout) options
- ren( opts );
- // rename 'defaults' to 'panes'
- if (opts.defaults) {
- if (typeof opts.panes !== "object")
- opts.panes = {};
- $.extend(true, opts.panes, opts.defaults);
- delete opts.defaults;
- }
- // rename options in the the options.panes key
- if (opts.panes) ren( opts.panes );
- // rename options inside *each pane key*, eg: options.west
- $.each($.layout.config.allPanes, function (i, pane) {
- if (opts[pane]) ren( opts[pane] );
- });
- return opts;
- }
-};
-
-
-
-
-/* ============================================================
- * BEGIN WIDGET: $( selector ).layout( {options} );
- * ============================================================
- */
-$.fn.layout = function (opts) {
- var
-
- // local aliases to global data
- browser = $.layout.browser
-, _c = $.layout.config
-
- // local aliases to utlity methods
-, cssW = $.layout.cssWidth
-, cssH = $.layout.cssHeight
-, elDims = $.layout.getElementDimensions
-, elCSS = $.layout.getElementCSS
-, evtObj = $.layout.getEventObject
-, evtPane = $.layout.parsePaneName
-
-/**
- * options - populated by initOptions()
- */
-, options = $.extend(true, {}, $.layout.defaults)
-, effects = options.effects = $.extend(true, {}, $.layout.effects)
-
-/**
- * layout-state object
- */
-, state = {
- // generate unique ID to use for event.namespace so can unbind only events added by 'this layout'
- id: "layout"+ $.now() // code uses alias: sID
- , initialized: false
- , container: {} // init all keys
- , north: {}
- , south: {}
- , east: {}
- , west: {}
- , center: {}
- }
-
-/**
- * parent/child-layout pointers
- */
-//, hasParentLayout = false - exists ONLY inside Instance so can be set externally
-, children = {
- north: null
- , south: null
- , east: null
- , west: null
- , center: null
- }
-
-/*
- * ###########################
- * INTERNAL HELPER FUNCTIONS
- * ###########################
- */
-
- /**
- * Manages all internal timers
- */
-, timer = {
- data: {}
- , set: function (s, fn, ms) { timer.clear(s); timer.data[s] = setTimeout(fn, ms); }
- , clear: function (s) { var t=timer.data; if (t[s]) {clearTimeout(t[s]); delete t[s];} }
- }
-
- /**
- * Alert or console.log a message - IF option is enabled.
- *
- * @param {(string|!Object)} msg Message (or debug-data) to display
- * @param {?boolean} popup True by default, means 'alert', false means use console.log
- * @param {?boolean} debug True means is a widget debugging message
- */
-, _log = function (msg, popup, debug) {
- var o = options;
- if ((o.showErrorMessages && !debug) || (debug && o.showDebugMessages))
- $.layout.msg( o.name +' / '+ msg, (popup !== false) );
- return false;
- }
-
- /**
- * Executes a Callback function after a trigger event, like resize, open or close
- *
- * @param {string} evtName Name of the layout callback, eg "onresize_start"
- * @param {?string} pane This is passed only so we can pass the 'pane object' to the callback
- * @param {?string|?boolean} skipBoundEvents True = do not run events bound to the elements - only the callbacks set in options
- */
-, _runCallbacks = function (evtName, pane, skipBoundEvents) {
- var paneCB = pane && isStr(pane)
- , s = paneCB ? state[pane] : state
- , o = paneCB ? options[pane] : options
- , lName = options.name
- // names like onopen and onopen_end separate are interchangeable in options...
- , lng = evtName + (evtName.match(/_/) ? "" : "_end")
- , shrt = lng.match(/_end$/) ? lng.substr(0, lng.length - 4) : ""
- , fn = o[lng] || o[shrt]
- , retVal = "NC" // NC = No Callback
- , args = []
- , $P
- ;
- if ( !paneCB && $.type(skipBoundEvents) !== 'boolean' )
- skipBoundEvents = pane; // allow pane param to be skipped for Layout callback
-
- // first trigger the callback set in the options
- if (fn) {
- try {
- // convert function name (string) to function object
- if (isStr( fn )) {
- if (fn.match(/,/)) {
- // function name cannot contain a comma,
- // so must be a function name AND a parameter to pass
- args = fn.split(",")
- , fn = eval(args[0]);
- }
- else // just the name of an external function?
- fn = eval(fn);
- }
- // execute the callback, if exists
- if ($.isFunction( fn )) {
- if (args.length)
- retVal = fn(args[1]); // pass the argument parsed from 'list'
- else if ( paneCB )
- // pass data: pane-name, pane-element, pane-state, pane-options, and layout-name
- retVal = fn( pane, $Ps[pane], s, o, lName );
- else // must be a layout/container callback - pass suitable info
- retVal = fn( Instance, s, o, lName );
- }
- }
- catch (ex) {
- _log( options.errors.callbackError.replace(/EVENT/, $.trim(pane +" "+ lng)), false );
- }
- }
-
- // trigger additional events bound directly to the pane
- if (!skipBoundEvents && retVal !== false) {
- if ( paneCB ) { // PANE events can be bound to each pane-elements
- $P = $Ps[pane];
- o = options[pane];
- s = state[pane];
- $P.triggerHandler('layoutpane'+ lng, [ pane, $P, s, o, lName ]);
- if (shrt)
- $P.triggerHandler('layoutpane'+ shrt, [ pane, $P, s, o, lName ]);
- }
- else { // LAYOUT events can be bound to the container-element
- $N.triggerHandler('layout'+ lng, [ Instance, s, o, lName ]);
- if (shrt)
- $N.triggerHandler('layout'+ shrt, [ Instance, s, o, lName ]);
- }
- }
-
- // ALWAYS resizeChildLayout after a resize event - even during initialization
- if (evtName === "onresize_end" || evtName === "onsizecontent_end")
- resizeChildLayout(pane);
-
- return retVal;
- }
-
-
- /**
- * cure iframe display issues in IE & other browsers
- */
-, _fixIframe = function (pane) {
- if (browser.mozilla) return; // skip FireFox - it auto-refreshes iframes onShow
- var $P = $Ps[pane];
- // if the 'pane' is an iframe, do it
- if (state[pane].tagName === "IFRAME")
- $P.css(_c.hidden).css(_c.visible);
- else // ditto for any iframes INSIDE the pane
- $P.find('IFRAME').css(_c.hidden).css(_c.visible);
- }
-
- /**
- * @param {string} pane Can accept ONLY a 'pane' (east, west, etc)
- * @param {number=} outerSize (optional) Can pass a width, allowing calculations BEFORE element is resized
- * @return {number} Returns the innerHeight/Width of el by subtracting padding and borders
- */
-, cssSize = function (pane, outerSize) {
- var fn = _c[pane].dir=="horz" ? cssH : cssW;
- return fn($Ps[pane], outerSize);
- }
-
- /**
- * @param {string} pane Can accept ONLY a 'pane' (east, west, etc)
- * @return {Object} Returns hash of minWidth & minHeight
- */
-, cssMinDims = function (pane) {
- // minWidth/Height means CSS width/height = 1px
- var $P = $Ps[pane]
- , dir = _c[pane].dir
- , d = {
- minWidth: 1001 - cssW($P, 1000)
- , minHeight: 1001 - cssH($P, 1000)
- }
- ;
- if (dir === "horz") d.minSize = d.minHeight;
- if (dir === "vert") d.minSize = d.minWidth;
- return d;
- }
-
- // TODO: see if these methods can be made more useful...
- // TODO: *maybe* return cssW/H from these so caller can use this info
-
- /**
- * @param {(string|!Object)} el
- * @param {number=} outerWidth
- * @param {boolean=} [autoHide=false]
- */
-, setOuterWidth = function (el, outerWidth, autoHide) {
- var $E = el, w;
- if (isStr(el)) $E = $Ps[el]; // west
- else if (!el.jquery) $E = $(el);
- w = cssW($E, outerWidth);
- $E.css({ width: w });
- if (w > 0) {
- if (autoHide && $E.data('autoHidden') && $E.innerHeight() > 0) {
- $E.show().data('autoHidden', false);
- if (!browser.mozilla) // FireFox refreshes iframes - IE does not
- // make hidden, then visible to 'refresh' display after animation
- $E.css(_c.hidden).css(_c.visible);
- }
- }
- else if (autoHide && !$E.data('autoHidden'))
- $E.hide().data('autoHidden', true);
- }
-
- /**
- * @param {(string|!Object)} el
- * @param {number=} outerHeight
- * @param {boolean=} [autoHide=false]
- */
-, setOuterHeight = function (el, outerHeight, autoHide) {
- var $E = el, h;
- if (isStr(el)) $E = $Ps[el]; // west
- else if (!el.jquery) $E = $(el);
- h = cssH($E, outerHeight);
- $E.css({ height: h, visibility: "visible" }); // may have been 'hidden' by sizeContent
- if (h > 0 && $E.innerWidth() > 0) {
- if (autoHide && $E.data('autoHidden')) {
- $E.show().data('autoHidden', false);
- if (!browser.mozilla) // FireFox refreshes iframes - IE does not
- $E.css(_c.hidden).css(_c.visible);
- }
- }
- else if (autoHide && !$E.data('autoHidden'))
- $E.hide().data('autoHidden', true);
- }
-
- /**
- * @param {(string|!Object)} el
- * @param {number=} outerSize
- * @param {boolean=} [autoHide=false]
- */
-, setOuterSize = function (el, outerSize, autoHide) {
- if (_c[pane].dir=="horz") // pane = north or south
- setOuterHeight(el, outerSize, autoHide);
- else // pane = east or west
- setOuterWidth(el, outerSize, autoHide);
- }
-
-
- /**
- * Converts any 'size' params to a pixel/integer size, if not already
- * If 'auto' or a decimal/percentage is passed as 'size', a pixel-size is calculated
- *
- /**
- * @param {string} pane
- * @param {(string|number)=} size
- * @param {string=} [dir]
- * @return {number}
- */
-, _parseSize = function (pane, size, dir) {
- if (!dir) dir = _c[pane].dir;
-
- if (isStr(size) && size.match(/%/))
- size = (size === '100%') ? -1 : parseInt(size, 10) / 100; // convert % to decimal
-
- if (size === 0)
- return 0;
- else if (size >= 1)
- return parseInt(size, 10);
-
- var o = options, avail = 0;
- if (dir=="horz") // north or south or center.minHeight
- avail = sC.innerHeight - ($Ps.north ? o.north.spacing_open : 0) - ($Ps.south ? o.south.spacing_open : 0);
- else if (dir=="vert") // east or west or center.minWidth
- avail = sC.innerWidth - ($Ps.west ? o.west.spacing_open : 0) - ($Ps.east ? o.east.spacing_open : 0);
-
- if (size === -1) // -1 == 100%
- return avail;
- else if (size > 0) // percentage, eg: .25
- return round(avail * size);
- else if (pane=="center")
- return 0;
- else { // size < 0 || size=='auto' || size==Missing || size==Invalid
- // auto-size the pane
- var dim = (dir === "horz" ? "height" : "width")
- , $P = $Ps[pane]
- , $C = dim === 'height' ? $Cs[pane] : false
- , vis = $.layout.showInvisibly($P) // show pane invisibly if hidden
- , szP = $P.css(dim) // SAVE current pane size
- , szC = $C ? $C.css(dim) : 0 // SAVE current content size
- ;
- $P.css(dim, "auto");
- if ($C) $C.css(dim, "auto");
- size = (dim === "height") ? $P.outerHeight() : $P.outerWidth(); // MEASURE
- $P.css(dim, szP).css(vis); // RESET size & visibility
- if ($C) $C.css(dim, szC);
- return size;
- }
- }
-
- /**
- * Calculates current 'size' (outer-width or outer-height) of a border-pane - optionally with 'pane-spacing' added
- *
- * @param {(string|!Object)} pane
- * @param {boolean=} [inclSpace=false]
- * @return {number} Returns EITHER Width for east/west panes OR Height for north/south panes
- */
-, getPaneSize = function (pane, inclSpace) {
- var
- $P = $Ps[pane]
- , o = options[pane]
- , s = state[pane]
- , oSp = (inclSpace ? o.spacing_open : 0)
- , cSp = (inclSpace ? o.spacing_closed : 0)
- ;
- if (!$P || s.isHidden)
- return 0;
- else if (s.isClosed || (s.isSliding && inclSpace))
- return cSp;
- else if (_c[pane].dir === "horz")
- return $P.outerHeight() + oSp;
- else // dir === "vert"
- return $P.outerWidth() + oSp;
- }
-
- /**
- * Calculate min/max pane dimensions and limits for resizing
- *
- * @param {string} pane
- * @param {boolean=} [slide=false]
- */
-, setSizeLimits = function (pane, slide) {
- if (!isInitialized()) return;
- var
- o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , dir = c.dir
- , side = c.side.toLowerCase()
- , type = c.sizeType.toLowerCase()
- , isSliding = (slide != undefined ? slide : s.isSliding) // only open() passes 'slide' param
- , $P = $Ps[pane]
- , paneSpacing = o.spacing_open
- // measure the pane on the *opposite side* from this pane
- , altPane = _c.oppositeEdge[pane]
- , altS = state[altPane]
- , $altP = $Ps[altPane]
- , altPaneSize = (!$altP || altS.isVisible===false || altS.isSliding ? 0 : (dir=="horz" ? $altP.outerHeight() : $altP.outerWidth()))
- , altPaneSpacing = ((!$altP || altS.isHidden ? 0 : options[altPane][ altS.isClosed !== false ? "spacing_closed" : "spacing_open" ]) || 0)
- // limitSize prevents this pane from 'overlapping' opposite pane
- , containerSize = (dir=="horz" ? sC.innerHeight : sC.innerWidth)
- , minCenterDims = cssMinDims("center")
- , minCenterSize = dir=="horz" ? max(options.center.minHeight, minCenterDims.minHeight) : max(options.center.minWidth, minCenterDims.minWidth)
- // if pane is 'sliding', then ignore center and alt-pane sizes - because 'overlays' them
- , limitSize = (containerSize - paneSpacing - (isSliding ? 0 : (_parseSize("center", minCenterSize, dir) + altPaneSize + altPaneSpacing)))
- , minSize = s.minSize = max( _parseSize(pane, o.minSize), cssMinDims(pane).minSize )
- , maxSize = s.maxSize = min( (o.maxSize ? _parseSize(pane, o.maxSize) : 100000), limitSize )
- , r = s.resizerPosition = {} // used to set resizing limits
- , top = sC.insetTop
- , left = sC.insetLeft
- , W = sC.innerWidth
- , H = sC.innerHeight
- , rW = o.spacing_open // subtract resizer-width to get top/left position for south/east
- ;
- switch (pane) {
- case "north": r.min = top + minSize;
- r.max = top + maxSize;
- break;
- case "west": r.min = left + minSize;
- r.max = left + maxSize;
- break;
- case "south": r.min = top + H - maxSize - rW;
- r.max = top + H - minSize - rW;
- break;
- case "east": r.min = left + W - maxSize - rW;
- r.max = left + W - minSize - rW;
- break;
- };
- }
-
- /**
- * Returns data for setting the size/position of center pane. Also used to set Height for east/west panes
- *
- * @return JSON Returns a hash of all dimensions: top, bottom, left, right, (outer) width and (outer) height
- */
-, calcNewCenterPaneDims = function () {
- var d = {
- top: getPaneSize("north", true) // true = include 'spacing' value for pane
- , bottom: getPaneSize("south", true)
- , left: getPaneSize("west", true)
- , right: getPaneSize("east", true)
- , width: 0
- , height: 0
- };
-
- // NOTE: sC = state.container
- // calc center-pane outer dimensions
- d.width = sC.innerWidth - d.left - d.right; // outerWidth
- d.height = sC.innerHeight - d.bottom - d.top; // outerHeight
- // add the 'container border/padding' to get final positions relative to the container
- d.top += sC.insetTop;
- d.bottom += sC.insetBottom;
- d.left += sC.insetLeft;
- d.right += sC.insetRight;
-
- return d;
- }
-
-
- /**
- * @param {!Object} el
- * @param {boolean=} [allStates=false]
- */
-, getHoverClasses = function (el, allStates) {
- var
- $El = $(el)
- , type = $El.data("layoutRole")
- , pane = $El.data("layoutEdge")
- , o = options[pane]
- , root = o[type +"Class"]
- , _pane = "-"+ pane // eg: "-west"
- , _open = "-open"
- , _closed = "-closed"
- , _slide = "-sliding"
- , _hover = "-hover " // NOTE the trailing space
- , _state = $El.hasClass(root+_closed) ? _closed : _open
- , _alt = _state === _closed ? _open : _closed
- , classes = (root+_hover) + (root+_pane+_hover) + (root+_state+_hover) + (root+_pane+_state+_hover)
- ;
- if (allStates) // when 'removing' classes, also remove alternate-state classes
- classes += (root+_alt+_hover) + (root+_pane+_alt+_hover);
-
- if (type=="resizer" && $El.hasClass(root+_slide))
- classes += (root+_slide+_hover) + (root+_pane+_slide+_hover);
-
- return $.trim(classes);
- }
-, addHover = function (evt, el) {
- var $E = $(el || this);
- if (evt && $E.data("layoutRole") === "toggler")
- evt.stopPropagation(); // prevent triggering 'slide' on Resizer-bar
- $E.addClass( getHoverClasses($E) );
- }
-, removeHover = function (evt, el) {
- var $E = $(el || this);
- $E.removeClass( getHoverClasses($E, true) );
- }
-
-, onResizerEnter = function (evt) { // ALSO called by toggler.mouseenter
- if ($.fn.disableSelection)
- $("body").disableSelection();
- }
-, onResizerLeave = function (evt, el) {
- var
- e = el || this // el is only passed when called by the timer
- , pane = $(e).data("layoutEdge")
- , name = pane +"ResizerLeave"
- ;
- timer.clear(pane+"_openSlider"); // cancel slideOpen timer, if set
- timer.clear(name); // cancel enableSelection timer - may re/set below
- // this method calls itself on a timer because it needs to allow
- // enough time for dragging to kick-in and set the isResizing flag
- // dragging has a 100ms delay set, so this delay must be >100
- if (!el) // 1st call - mouseleave event
- timer.set(name, function(){ onResizerLeave(evt, e); }, 200);
- // if user is resizing, then dragStop will enableSelection(), so can skip it here
- else if (!state[pane].isResizing && $.fn.enableSelection) // 2nd call - by timer
- $("body").enableSelection();
- }
-
-/*
- * ###########################
- * INITIALIZATION METHODS
- * ###########################
- */
-
- /**
- * Initialize the layout - called automatically whenever an instance of layout is created
- *
- * @see none - triggered onInit
- * @return mixed true = fully initialized | false = panes not initialized (yet) | 'cancel' = abort
- */
-, _create = function () {
- // initialize config/options
- initOptions();
- var o = options;
-
- // TEMP state so isInitialized returns true during init process
- state.creatingLayout = true;
-
- // init plugins for this layout, if there are any (eg: stateManagement)
- runPluginCallbacks( Instance, $.layout.onCreate );
-
- // options & state have been initialized, so now run beforeLoad callback
- // onload will CANCEL layout creation if it returns false
- if (false === _runCallbacks("onload_start"))
- return 'cancel';
-
- // initialize the container element
- _initContainer();
-
- // bind hotkey function - keyDown - if required
- initHotkeys();
-
- // bind window.onunload
- $(window).bind("unload."+ sID, unload);
-
- // init plugins for this layout, if there are any (eg: customButtons)
- runPluginCallbacks( Instance, $.layout.onLoad );
-
- // if layout elements are hidden, then layout WILL NOT complete initialization!
- // initLayoutElements will set initialized=true and run the onload callback IF successful
- if (o.initPanes) _initLayoutElements();
-
- delete state.creatingLayout;
-
- return state.initialized;
- }
-
- /**
- * Initialize the layout IF not already
- *
- * @see All methods in Instance run this test
- * @return boolean true = layoutElements have been initialized | false = panes are not initialized (yet)
- */
-, isInitialized = function () {
- if (state.initialized || state.creatingLayout) return true; // already initialized
- else return _initLayoutElements(); // try to init panes NOW
- }
-
- /**
- * Initialize the layout - called automatically whenever an instance of layout is created
- *
- * @see _create() & isInitialized
- * @return An object pointer to the instance created
- */
-, _initLayoutElements = function (retry) {
- // initialize config/options
- var o = options;
-
- // CANNOT init panes inside a hidden container!
- if (!$N.is(":visible")) {
- // handle Chrome bug where popup window 'has no height'
- // if layout is BODY element, try again in 50ms
- // SEE: http://layout.jquery-dev.net/samples/test_popup_window.html
- if ( !retry && browser.webkit && $N[0].tagName === "BODY" )
- setTimeout(function(){ _initLayoutElements(true); }, 50);
- return false;
- }
-
- // a center pane is required, so make sure it exists
- if (!getPane("center").length) {
- return _log( o.errors.centerPaneMissing );
- }
-
- // TEMP state so isInitialized returns true during init process
- state.creatingLayout = true;
-
- // update Container dims
- $.extend(sC, elDims( $N ));
-
- // initialize all layout elements
- initPanes(); // size & position panes - calls initHandles() - which calls initResizable()
-
- if (o.scrollToBookmarkOnLoad) {
- var l = self.location;
- if (l.hash) l.replace( l.hash ); // scrollTo Bookmark
- }
-
- // check to see if this layout 'nested' inside a pane
- if (Instance.hasParentLayout)
- o.resizeWithWindow = false;
- // bind resizeAll() for 'this layout instance' to window.resize event
- else if (o.resizeWithWindow)
- $(window).bind("resize."+ sID, windowResize);
-
- delete state.creatingLayout;
- state.initialized = true;
-
- // init plugins for this layout, if there are any
- runPluginCallbacks( Instance, $.layout.onReady );
-
- // now run the onload callback, if exists
- _runCallbacks("onload_end");
-
- return true; // elements initialized successfully
- }
-
- /**
- * Initialize nested layouts - called when _initLayoutElements completes
- *
- * NOT CURRENTLY USED
- *
- * @see _initLayoutElements
- * @return An object pointer to the instance created
- */
-, _initChildLayouts = function () {
- $.each(_c.allPanes, function (idx, pane) {
- if (options[pane].initChildLayout)
- createChildLayout( pane );
- });
- }
-
- /**
- * Initialize nested layouts for a specific pane - can optionally pass layout-options
- *
- * @see _initChildLayouts
- * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west
- * @param {Object=} [opts] Layout-options - if passed, will OVERRRIDE options[pane].childOptions
- * @return An object pointer to the layout instance created - or null
- */
-, createChildLayout = function (evt_or_pane, opts) {
- var pane = evtPane.call(this, evt_or_pane)
- , $P = $Ps[pane]
- , C = children
- ;
- if ($P) {
- var $C = $Cs[pane]
- , o = opts || options[pane].childOptions
- , d = "layout"
- // determine which element is supposed to be the 'child container'
- // if pane has a 'containerSelector' OR a 'content-div', use those instead of the pane
- , $Cont = o.containerSelector ? $P.find( o.containerSelector ) : ($C || $P)
- , containerFound = $Cont.length
- // see if a child-layout ALREADY exists on this element
- , child = containerFound ? (C[pane] = $Cont.data(d) || null) : null
- ;
- // if no layout exists, but childOptions are set, try to create the layout now
- if (!child && containerFound && o)
- child = C[pane] = $Cont.eq(0).layout(o) || null;
- if (child)
- child.hasParentLayout = true; // set parent-flag in child
- }
- Instance[pane].child = C[pane]; // ALWAYS set pane-object pointer, even if null
- }
-
-, windowResize = function () {
- var delay = Number(options.resizeWithWindowDelay);
- if (delay < 10) delay = 100; // MUST have a delay!
- // resizing uses a delay-loop because the resize event fires repeatly - except in FF, but delay anyway
- timer.clear("winResize"); // if already running
- timer.set("winResize", function(){
- timer.clear("winResize");
- timer.clear("winResizeRepeater");
- var dims = elDims( $N );
- // only trigger resizeAll() if container has changed size
- if (dims.innerWidth !== sC.innerWidth || dims.innerHeight !== sC.innerHeight)
- resizeAll();
- }, delay);
- // ALSO set fixed-delay timer, if not already running
- if (!timer.data["winResizeRepeater"]) setWindowResizeRepeater();
- }
-
-, setWindowResizeRepeater = function () {
- var delay = Number(options.resizeWithWindowMaxDelay);
- if (delay > 0)
- timer.set("winResizeRepeater", function(){ setWindowResizeRepeater(); resizeAll(); }, delay);
- }
-
-, unload = function () {
- var o = options;
-
- _runCallbacks("onunload_start");
-
- // trigger plugin callabacks for this layout (eg: stateManagement)
- runPluginCallbacks( Instance, $.layout.onUnload );
-
- _runCallbacks("onunload_end");
- }
-
- /**
- * Validate and initialize container CSS and events
- *
- * @see _create()
- */
-, _initContainer = function () {
- var
- N = $N[0]
- , tag = sC.tagName = N.tagName
- , id = sC.id = N.id
- , cls = sC.className = N.className
- , o = options
- , name = o.name
- , fullPage= (tag === "BODY")
- , props = "overflow,position,margin,padding,border"
- , css = "layoutCSS"
- , CSS = {}
- , hid = "hidden" // used A LOT!
- // see if this container is a 'pane' inside an outer-layout
- , parent = $N.data("parentLayout") // parent-layout Instance
- , pane = $N.data("layoutEdge") // pane-name in parent-layout
- , isChild = parent && pane
- ;
- // sC -> state.container
- sC.selector = $N.selector.split(".slice")[0];
- sC.ref = (o.name ? o.name +' layout / ' : '') + tag + (id ? "#"+id : cls ? '.['+cls+']' : ''); // used in messages
-
- $N .data({
- layout: Instance
- , layoutContainer: sID // FLAG to indicate this is a layout-container - contains unique internal ID
- })
- .addClass(o.containerClass)
- ;
- var layoutMethods = {
- destroy: ''
- , initPanes: ''
- , resizeAll: 'resizeAll'
- , resize: 'resizeAll'
- };
- // loop hash and bind all methods - include layoutID namespacing
- for (name in layoutMethods) {
- $N.bind("layout"+ name.toLowerCase() +"."+ sID, Instance[ layoutMethods[name] || name ]);
- }
-
- // if this container is another layout's 'pane', then set child/parent pointers
- if (isChild) {
- // update parent flag
- Instance.hasParentLayout = true;
- // set pointers to THIS child-layout (Instance) in parent-layout
- // NOTE: parent.PANE.child is an ALIAS to parent.children.PANE
- parent[pane].child = parent.children[pane] = $N.data("layout");
- }
-
- // SAVE original container CSS for use in destroy()
- if (!$N.data(css)) {
- // handle props like overflow different for BODY & HTML - has 'system default' values
- if (fullPage) {
- CSS = $.extend( elCSS($N, props), {
- height: $N.css("height")
- , overflow: $N.css("overflow")
- , overflowX: $N.css("overflowX")
- , overflowY: $N.css("overflowY")
- });
- // ALSO SAVE <HTML> CSS
- var $H = $("html");
- $H.data(css, {
- height: "auto" // FF would return a fixed px-size!
- , overflow: $H.css("overflow")
- , overflowX: $H.css("overflowX")
- , overflowY: $H.css("overflowY")
- });
- }
- else // handle props normally for non-body elements
- CSS = elCSS($N, props+",top,bottom,left,right,width,height,overflow,overflowX,overflowY");
-
- $N.data(css, CSS);
- }
-
- try { // format html/body if this is a full page layout
- if (fullPage) {
- $("html").css({
- height: "100%"
- , overflow: hid
- , overflowX: hid
- , overflowY: hid
- });
- $("body").css({
- position: "relative"
- , height: "100%"
- , overflow: hid
- , overflowX: hid
- , overflowY: hid
- , margin: 0
- , padding: 0 // TODO: test whether body-padding could be handled?
- , border: "none" // a body-border creates problems because it cannot be measured!
- });
-
- // set current layout-container dimensions
- $.extend(sC, elDims( $N ));
- }
- else { // set required CSS for overflow and position
- // ENSURE container will not 'scroll'
- CSS = { overflow: hid, overflowX: hid, overflowY: hid }
- var
- p = $N.css("position")
- , h = $N.css("height")
- ;
- // if this is a NESTED layout, then container/outer-pane ALREADY has position and height
- if (!isChild) {
- if (!p || !p.match(/fixed|absolute|relative/))
- CSS.position = "relative"; // container MUST have a 'position'
- /*
- if (!h || h=="auto")
- CSS.height = "100%"; // container MUST have a 'height'
- */
- }
- $N.css( CSS );
-
- // set current layout-container dimensions
- if ( $N.is(":visible") ) {
- $.extend(sC, elDims( $N ));
- if (sC.innerHeight < 1)
- _log( o.errors.noContainerHeight.replace(/CONTAINER/, sC.ref) );
- }
- }
- } catch (ex) {}
- }
-
- /**
- * Bind layout hotkeys - if options enabled
- *
- * @see _create() and addPane()
- * @param {string=} [panes=""] The edge(s) to process
- */
-, initHotkeys = function (panes) {
- panes = panes ? panes.split(",") : _c.borderPanes;
- // bind keyDown to capture hotkeys, if option enabled for ANY pane
- $.each(panes, function (i, pane) {
- var o = options[pane];
- if (o.enableCursorHotkey || o.customHotkey) {
- $(document).bind("keydown."+ sID, keyDown); // only need to bind this ONCE
- return false; // BREAK - binding was done
- }
- });
- }
-
- /**
- * Build final OPTIONS data
- *
- * @see _create()
- */
-, initOptions = function () {
- var data, d, pane, key, val, i, c, o;
-
- // reprocess user's layout-options to have correct options sub-key structure
- opts = $.layout.transformData( opts ); // panes = default subkey
-
- // auto-rename old options for backward compatibility
- opts = $.layout.backwardCompatibility.renameAllOptions( opts );
-
- // if user-options has 'panes' key (pane-defaults), clean it...
- if (!$.isEmptyObject(opts.panes)) {
- // REMOVE any pane-defaults that MUST be set per-pane
- data = $.layout.optionsMap.noDefault;
- for (i=0, c=data.length; i<c; i++) {
- key = data[i];
- delete opts.panes[key]; // OK if does not exist
- }
- // REMOVE any layout-options specified under opts.panes
- data = $.layout.optionsMap.layout;
- for (i=0, c=data.length; i<c; i++) {
- key = data[i];
- delete opts.panes[key]; // OK if does not exist
- }
- }
-
- // MOVE any NON-layout-options from opts-root to opts.panes
- data = $.layout.optionsMap.layout;
- var rootKeys = $.layout.config.optionRootKeys;
- for (key in opts) {
- val = opts[key];
- if ($.inArray(key, rootKeys) < 0 && $.inArray(key, data) < 0) {
- if (!opts.panes[key])
- opts.panes[key] = $.isPlainObject(val) ? $.extend(true, {}, val) : val;
- delete opts[key]
- }
- }
-
- // START by updating ALL options from opts
- $.extend(true, options, opts);
-
- // CREATE final options (and config) for EACH pane
- $.each(_c.allPanes, function (i, pane) {
-
- // apply 'pane-defaults' to CONFIG.[PANE]
- _c[pane] = $.extend(true, {}, _c.panes, _c[pane]);
-
- d = options.panes;
- o = options[pane];
-
- // center-pane uses SOME keys in defaults.panes branch
- if (pane === 'center') {
- // ONLY copy keys from opts.panes listed in: $.layout.optionsMap.center
- data = $.layout.optionsMap.center; // list of 'center-pane keys'
- for (i=0, c=data.length; i<c; i++) { // loop the list...
- key = data[i];
- // only need to use pane-default if pane-specific value not set
- if (!opts.center[key] && (opts.panes[key] || !o[key]))
- o[key] = d[key]; // pane-default
- }
- }
- else {
- // border-panes use ALL keys in defaults.panes branch
- o = options[pane] = $.extend(true, {}, d, o); // re-apply pane-specific opts AFTER pane-defaults
- createFxOptions( pane );
- // ensure all border-pane-specific base-classes exist
- if (!o.resizerClass) o.resizerClass = "ui-layout-resizer";
- if (!o.togglerClass) o.togglerClass = "ui-layout-toggler";
- }
- // ensure we have base pane-class (ALL panes)
- if (!o.paneClass) o.paneClass = "ui-layout-pane";
- });
-
- // update options.zIndexes if a zIndex-option specified
- var zo = opts.zIndex
- , z = options.zIndexes;
- if (zo > 0) {
- z.pane_normal = zo;
- z.content_mask = max(zo+1, z.content_mask); // MIN = +1
- z.resizer_normal = max(zo+2, z.resizer_normal); // MIN = +2
- }
-
- // DELETE 'panes' key now that we are done - values were copied to EACH pane
- delete options.panes;
-
-
- function createFxOptions ( pane ) {
- var o = options[pane]
- , d = options.panes;
- // ensure fxSettings key to avoid errors
- if (!o.fxSettings) o.fxSettings = {};
- if (!d.fxSettings) d.fxSettings = {};
-
- $.each(["_open","_close","_size"], function (i,n) {
- var
- sName = "fxName"+ n
- , sSpeed = "fxSpeed"+ n
- , sSettings = "fxSettings"+ n
- // recalculate fxName according to specificity rules
- , fxName = o[sName] =
- o[sName] // options.west.fxName_open
- || d[sName] // options.panes.fxName_open
- || o.fxName // options.west.fxName
- || d.fxName // options.panes.fxName
- || "none" // MEANS $.layout.defaults.panes.fxName == "" || false || null || 0
- ;
- // validate fxName to ensure is valid effect - MUST have effect-config data in options.effects
- if (fxName === "none" || !$.effects || !$.effects[fxName] || !options.effects[fxName])
- fxName = o[sName] = "none"; // effect not loaded OR unrecognized fxName
-
- // set vars for effects subkeys to simplify logic
- var fx = options.effects[fxName] || {} // effects.slide
- , fx_all = fx.all || null // effects.slide.all
- , fx_pane = fx[pane] || null // effects.slide.west
- ;
- // create fxSpeed[_open|_close|_size]
- o[sSpeed] =
- o[sSpeed] // options.west.fxSpeed_open
- || d[sSpeed] // options.west.fxSpeed_open
- || o.fxSpeed // options.west.fxSpeed
- || d.fxSpeed // options.panes.fxSpeed
- || null // DEFAULT - let fxSetting.duration control speed
- ;
- // create fxSettings[_open|_close|_size]
- o[sSettings] = $.extend(
- true
- , {}
- , fx_all // effects.slide.all
- , fx_pane // effects.slide.west
- , d.fxSettings // options.panes.fxSettings
- , o.fxSettings // options.west.fxSettings
- , d[sSettings] // options.panes.fxSettings_open
- , o[sSettings] // options.west.fxSettings_open
- );
- });
-
- // DONE creating action-specific-settings for this pane,
- // so DELETE generic options - are no longer meaningful
- delete o.fxName;
- delete o.fxSpeed;
- delete o.fxSettings;
- }
- }
-
- /**
- * Initialize module objects, styling, size and position for all panes
- *
- * @see _initElements()
- * @param {string} pane The pane to process
- */
-, getPane = function (pane) {
- var sel = options[pane].paneSelector
- if (sel.substr(0,1)==="#") // ID selector
- // NOTE: elements selected 'by ID' DO NOT have to be 'children'
- return $N.find(sel).eq(0);
- else { // class or other selector
- var $P = $N.children(sel).eq(0);
- // look for the pane nested inside a 'form' element
- return $P.length ? $P : $N.children("form:first").children(sel).eq(0);
- }
- }
-
-, initPanes = function (evt) {
- // stopPropagation if called by trigger("layoutinitpanes") - use evtPane utility
- evtPane(evt);
-
- // NOTE: do north & south FIRST so we can measure their height - do center LAST
- $.each(_c.allPanes, function (idx, pane) {
- addPane( pane, true );
- });
-
- // init the pane-handles NOW in case we have to hide or close the pane below
- initHandles();
-
- // now that all panes have been initialized and initially-sized,
- // make sure there is really enough space available for each pane
- $.each(_c.borderPanes, function (i, pane) {
- if ($Ps[pane] && state[pane].isVisible) { // pane is OPEN
- setSizeLimits(pane);
- makePaneFit(pane); // pane may be Closed, Hidden or Resized by makePaneFit()
- }
- });
- // size center-pane AGAIN in case we 'closed' a border-pane in loop above
- sizeMidPanes("center");
-
- // Chrome/Webkit sometimes fires callbacks BEFORE it completes resizing!
- // Before RC30.3, there was a 10ms delay here, but that caused layout
- // to load asynchrously, which is BAD, so try skipping delay for now
-
- // process pane contents and callbacks, and init/resize child-layout if exists
- $.each(_c.allPanes, function (i, pane) {
- var o = options[pane];
- if ($Ps[pane]) {
- if (state[pane].isVisible) { // pane is OPEN
- sizeContent(pane);
- // trigger pane.onResize if triggerEventsOnLoad = true
- if (o.triggerEventsOnLoad)
- _runCallbacks("onresize_end", pane);
- else // automatic if onresize called, otherwise call it specifically
- // resize child - IF inner-layout already exists (created before this layout)
- resizeChildLayout(pane);
- }
- // init childLayout - even if pane is not visible
- if (o.initChildLayout && o.childOptions)
- createChildLayout(pane);
- }
- });
- }
-
- /**
- * Add a pane to the layout - subroutine of initPanes()
- *
- * @see initPanes()
- * @param {string} pane The pane to process
- * @param {boolean=} [force=false] Size content after init
- */
-, addPane = function (pane, force) {
- if (!force && !isInitialized()) return;
- var
- o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , fx = s.fx
- , dir = c.dir
- , spacing = o.spacing_open || 0
- , isCenter = (pane === "center")
- , CSS = {}
- , $P = $Ps[pane]
- , size, minSize, maxSize
- ;
- // if pane-pointer already exists, remove the old one first
- if ($P)
- removePane( pane, false, true, false );
- else
- $Cs[pane] = false; // init
-
- $P = $Ps[pane] = getPane(pane);
- if (!$P.length) {
- $Ps[pane] = false; // logic
- return;
- }
-
- // SAVE original Pane CSS
- if (!$P.data("layoutCSS")) {
- var props = "position,top,left,bottom,right,width,height,overflow,zIndex,display,backgroundColor,padding,margin,border";
- $P.data("layoutCSS", elCSS($P, props));
- }
-
- // create alias for pane data in Instance - initHandles will add more
- Instance[pane] = { name: pane, pane: $Ps[pane], content: $Cs[pane], options: options[pane], state: state[pane], child: children[pane] };
-
- // add classes, attributes & events
- $P .data({
- parentLayout: Instance // pointer to Layout Instance
- , layoutPane: Instance[pane] // NEW pointer to pane-alias-object
- , layoutEdge: pane
- , layoutRole: "pane"
- })
- .css(c.cssReq).css("zIndex", options.zIndexes.pane_normal)
- .css(o.applyDemoStyles ? c.cssDemo : {}) // demo styles
- .addClass( o.paneClass +" "+ o.paneClass+"-"+pane ) // default = "ui-layout-pane ui-layout-pane-west" - may be a dupe of 'paneSelector'
- .bind("mouseenter."+ sID, addHover )
- .bind("mouseleave."+ sID, removeHover )
- ;
- var paneMethods = {
- hide: ''
- , show: ''
- , toggle: ''
- , close: ''
- , open: ''
- , slideOpen: ''
- , slideClose: ''
- , slideToggle: ''
- , size: 'sizePane'
- , sizePane: 'sizePane'
- , sizeContent: ''
- , sizeHandles: ''
- , enableClosable: ''
- , disableClosable: ''
- , enableSlideable: ''
- , disableSlideable: ''
- , enableResizable: ''
- , disableResizable: ''
- , swapPanes: 'swapPanes'
- , swap: 'swapPanes'
- , move: 'swapPanes'
- , removePane: 'removePane'
- , remove: 'removePane'
- , createChildLayout: ''
- , resizeChildLayout: ''
- , resizeAll: 'resizeAll'
- , resizeLayout: 'resizeAll'
- }
- , name;
- // loop hash and bind all methods - include layoutID namespacing
- for (name in paneMethods) {
- $P.bind("layoutpane"+ name.toLowerCase() +"."+ sID, Instance[ paneMethods[name] || name ]);
- }
-
- // see if this pane has a 'scrolling-content element'
- initContent(pane, false); // false = do NOT sizeContent() - called later
-
- if (!isCenter) {
- // call _parseSize AFTER applying pane classes & styles - but before making visible (if hidden)
- // if o.size is auto or not valid, then MEASURE the pane and use that as its 'size'
- size = s.size = _parseSize(pane, o.size);
- minSize = _parseSize(pane,o.minSize) || 1;
- maxSize = _parseSize(pane,o.maxSize) || 100000;
- if (size > 0) size = max(min(size, maxSize), minSize);
-
- // state for border-panes
- s.isClosed = false; // true = pane is closed
- s.isSliding = false; // true = pane is currently open by 'sliding' over adjacent panes
- s.isResizing= false; // true = pane is in process of being resized
- s.isHidden = false; // true = pane is hidden - no spacing, resizer or toggler is visible!
-
- // array for 'pin buttons' whose classNames are auto-updated on pane-open/-close
- if (!s.pins) s.pins = [];
- }
- // states common to ALL panes
- s.tagName = $P[0].tagName;
- s.edge = pane; // useful if pane is (or about to be) 'swapped' - easy find out where it is (or is going)
- s.noRoom = false; // true = pane 'automatically' hidden due to insufficient room - will unhide automatically
- s.isVisible = true; // false = pane is invisible - closed OR hidden - simplify logic
-
- // set css-position to account for container borders & padding
- switch (pane) {
- case "north": CSS.top = sC.insetTop;
- CSS.left = sC.insetLeft;
- CSS.right = sC.insetRight;
- break;
- case "south": CSS.bottom = sC.insetBottom;
- CSS.left = sC.insetLeft;
- CSS.right = sC.insetRight;
- break;
- case "west": CSS.left = sC.insetLeft; // top, bottom & height set by sizeMidPanes()
- break;
- case "east": CSS.right = sC.insetRight; // ditto
- break;
- case "center": // top, left, width & height set by sizeMidPanes()
- }
-
- if (dir === "horz") // north or south pane
- CSS.height = cssH($P, size);
- else if (dir === "vert") // east or west pane
- CSS.width = cssW($P, size);
- //else if (isCenter) {}
-
- $P.css(CSS); // apply size -- top, bottom & height will be set by sizeMidPanes
- if (dir != "horz") sizeMidPanes(pane, true); // true = skipCallback
-
- // close or hide the pane if specified in settings
- if (o.initClosed && o.closable && !o.initHidden)
- close(pane, true, true); // true, true = force, noAnimation
- else if (o.initHidden || o.initClosed)
- hide(pane); // will be completely invisible - no resizer or spacing
- else if (!s.noRoom)
- // make the pane visible - in case was initially hidden
- $P.css("display","block");
- // ELSE setAsOpen() - called later by initHandles()
-
- // RESET visibility now - pane will appear IF display:block
- $P.css("visibility","visible");
-
- // check option for auto-handling of pop-ups & drop-downs
- if (o.showOverflowOnHover)
- $P.hover( allowOverflow, resetOverflow );
-
- // if manually adding a pane AFTER layout initialization, then...
- if (state.initialized) {
- initHandles( pane );
- initHotkeys( pane );
- resizeAll(); // will sizeContent if pane is visible
- if (s.isVisible) { // pane is OPEN
- if (o.triggerEventsOnLoad)
- _runCallbacks("onresize_end", pane);
- else // automatic if onresize called, otherwise call it specifically
- // resize child - IF inner-layout already exists (created before this layout)
- resizeChildLayout(pane); // a previously existing childLayout
- }
- if (o.initChildLayout && o.childOptions)
- createChildLayout(pane);
- }
- }
-
- /**
- * Initialize module objects, styling, size and position for all resize bars and toggler buttons
- *
- * @see _create()
- * @param {string=} [panes=""] The edge(s) to process
- */
-, initHandles = function (panes) {
- panes = panes ? panes.split(",") : _c.borderPanes;
-
- // create toggler DIVs for each pane, and set object pointers for them, eg: $R.north = north toggler DIV
- $.each(panes, function (i, pane) {
- var $P = $Ps[pane];
- $Rs[pane] = false; // INIT
- $Ts[pane] = false;
- if (!$P) return; // pane does not exist - skip
-
- var
- o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , paneId = o.paneSelector.substr(0,1) === "#" ? o.paneSelector.substr(1) : ""
- , rClass = o.resizerClass
- , tClass = o.togglerClass
- , side = c.side.toLowerCase()
- , spacing = (s.isVisible ? o.spacing_open : o.spacing_closed)
- , _pane = "-"+ pane // used for classNames
- , _state = (s.isVisible ? "-open" : "-closed") // used for classNames
- , I = Instance[pane]
- // INIT RESIZER BAR
- , $R = I.resizer = $Rs[pane] = $("<div></div>")
- // INIT TOGGLER BUTTON
- , $T = I.toggler = (o.closable ? $Ts[pane] = $("<div></div>") : false)
- ;
-
- //if (s.isVisible && o.resizable) ... handled by initResizable
- if (!s.isVisible && o.slidable)
- $R.attr("title", o.tips.Slide).css("cursor", o.sliderCursor);
-
- $R // if paneSelector is an ID, then create a matching ID for the resizer, eg: "#paneLeft" => "paneLeft-resizer"
- .attr("id", paneId ? paneId +"-resizer" : "" )
- .data({
- parentLayout: Instance
- , layoutPane: Instance[pane] // NEW pointer to pane-alias-object
- , layoutEdge: pane
- , layoutRole: "resizer"
- })
- .css(_c.resizers.cssReq).css("zIndex", options.zIndexes.resizer_normal)
- .css(o.applyDemoStyles ? _c.resizers.cssDemo : {}) // add demo styles
- .addClass(rClass +" "+ rClass+_pane)
- .hover(addHover, removeHover) // ALWAYS add hover-classes, even if resizing is not enabled - handle with CSS instead
- .hover(onResizerEnter, onResizerLeave) // ALWAYS NEED resizer.mouseleave to balance toggler.mouseenter
- .appendTo($N) // append DIV to container
- ;
-
- if ($T) {
- $T // if paneSelector is an ID, then create a matching ID for the resizer, eg: "#paneLeft" => "#paneLeft-toggler"
- .attr("id", paneId ? paneId +"-toggler" : "" )
- .data({
- parentLayout: Instance
- , layoutPane: Instance[pane] // NEW pointer to pane-alias-object
- , layoutEdge: pane
- , layoutRole: "toggler"
- })
- .css(_c.togglers.cssReq) // add base/required styles
- .css(o.applyDemoStyles ? _c.togglers.cssDemo : {}) // add demo styles
- .addClass(tClass +" "+ tClass+_pane)
- .hover(addHover, removeHover) // ALWAYS add hover-classes, even if toggling is not enabled - handle with CSS instead
- .bind("mouseenter", onResizerEnter) // NEED toggler.mouseenter because mouseenter MAY NOT fire on resizer
- .appendTo($R) // append SPAN to resizer DIV
- ;
- // ADD INNER-SPANS TO TOGGLER
- if (o.togglerContent_open) // ui-layout-open
- $("<span>"+ o.togglerContent_open +"</span>")
- .data({
- layoutEdge: pane
- , layoutRole: "togglerContent"
- })
- .data("layoutRole", "togglerContent")
- .data("layoutEdge", pane)
- .addClass("content content-open")
- .css("display","none")
- .appendTo( $T )
- //.hover( addHover, removeHover ) // use ui-layout-toggler-west-hover .content-open instead!
- ;
- if (o.togglerContent_closed) // ui-layout-closed
- $("<span>"+ o.togglerContent_closed +"</span>")
- .data({
- layoutEdge: pane
- , layoutRole: "togglerContent"
- })
- .addClass("content content-closed")
- .css("display","none")
- .appendTo( $T )
- //.hover( addHover, removeHover ) // use ui-layout-toggler-west-hover .content-closed instead!
- ;
- // ADD TOGGLER.click/.hover
- enableClosable(pane);
- }
-
- // add Draggable events
- initResizable(pane);
-
- // ADD CLASSNAMES & SLIDE-BINDINGS - eg: class="resizer resizer-west resizer-open"
- if (s.isVisible)
- setAsOpen(pane); // onOpen will be called, but NOT onResize
- else {
- setAsClosed(pane); // onClose will be called
- bindStartSlidingEvent(pane, true); // will enable events IF option is set
- }
-
- });
-
- // SET ALL HANDLE DIMENSIONS
- sizeHandles();
- }
-
-
- /**
- * Initialize scrolling ui-layout-content div - if exists
- *
- * @see initPane() - or externally after an Ajax injection
- * @param {string} [pane] The pane to process
- * @param {boolean=} [resize=true] Size content after init
- */
-, initContent = function (pane, resize) {
- if (!isInitialized()) return;
- var
- o = options[pane]
- , sel = o.contentSelector
- , I = Instance[pane]
- , $P = $Ps[pane]
- , $C
- ;
- if (sel) $C = I.content = $Cs[pane] = (o.findNestedContent)
- ? $P.find(sel).eq(0) // match 1-element only
- : $P.children(sel).eq(0)
- ;
- if ($C && $C.length) {
- $C.data("layoutRole", "content");
- // SAVE original Pane CSS
- if (!$C.data("layoutCSS"))
- $C.data("layoutCSS", elCSS($C, "height"));
- $C.css( _c.content.cssReq );
- if (o.applyDemoStyles) {
- $C.css( _c.content.cssDemo ); // add padding & overflow: auto to content-div
- $P.css( _c.content.cssDemoPane ); // REMOVE padding/scrolling from pane
- }
- state[pane].content = {}; // init content state
- if (resize !== false) sizeContent(pane);
- // sizeContent() is called AFTER init of all elements
- }
- else
- I.content = $Cs[pane] = false;
- }
-
-
- /**
- * Add resize-bars to all panes that specify it in options
- * -dependancy: $.fn.resizable - will skip if not found
- *
- * @see _create()
- * @param {string=} [panes=""] The edge(s) to process
- */
-, initResizable = function (panes) {
- var draggingAvailable = $.layout.plugins.draggable
- , side // set in start()
- ;
- panes = panes ? panes.split(",") : _c.borderPanes;
-
- $.each(panes, function (idx, pane) {
- var o = options[pane];
- if (!draggingAvailable || !$Ps[pane] || !o.resizable) {
- o.resizable = false;
- return true; // skip to next
- }
-
- var s = state[pane]
- , z = options.zIndexes
- , c = _c[pane]
- , side = c.dir=="horz" ? "top" : "left"
- , opEdge = _c.oppositeEdge[pane]
- , masks = pane +",center,"+ opEdge + (c.dir=="horz" ? ",west,east" : "")
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , base = o.resizerClass
- , lastPos = 0 // used when live-resizing
- , r, live // set in start because may change
- // 'drag' classes are applied to the ORIGINAL resizer-bar while dragging is in process
- , resizerClass = base+"-drag" // resizer-drag
- , resizerPaneClass = base+"-"+pane+"-drag" // resizer-north-drag
- // 'helper' class is applied to the CLONED resizer-bar while it is being dragged
- , helperClass = base+"-dragging" // resizer-dragging
- , helperPaneClass = base+"-"+pane+"-dragging" // resizer-north-dragging
- , helperLimitClass = base+"-dragging-limit" // resizer-drag
- , helperPaneLimitClass = base+"-"+pane+"-dragging-limit" // resizer-north-drag
- , helperClassesSet = false // logic var
- ;
-
- if (!s.isClosed)
- $R.attr("title", o.tips.Resize)
- .css("cursor", o.resizerCursor); // n-resize, s-resize, etc
-
- $R.draggable({
- containment: $N[0] // limit resizing to layout container
- , axis: (c.dir=="horz" ? "y" : "x") // limit resizing to horz or vert axis
- , delay: 0
- , distance: 1
- , grid: o.resizingGrid
- // basic format for helper - style it using class: .ui-draggable-dragging
- , helper: "clone"
- , opacity: o.resizerDragOpacity
- , addClasses: false // avoid ui-state-disabled class when disabled
- //, iframeFix: o.draggableIframeFix // TODO: consider using when bug is fixed
- , zIndex: z.resizer_drag
-
- , start: function (e, ui) {
- // REFRESH options & state pointers in case we used swapPanes
- o = options[pane];
- s = state[pane];
- // re-read options
- live = o.livePaneResizing;
-
- // ondrag_start callback - will CANCEL hide if returns false
- // TODO: dragging CANNOT be cancelled like this, so see if there is a way?
- if (false === _runCallbacks("ondrag_start", pane)) return false;
-
- s.isResizing = true; // prevent pane from closing while resizing
- timer.clear(pane+"_closeSlider"); // just in case already triggered
-
- // SET RESIZER LIMITS - used in drag()
- setSizeLimits(pane); // update pane/resizer state
- r = s.resizerPosition;
- lastPos = ui.position[ side ]
-
- $R.addClass( resizerClass +" "+ resizerPaneClass ); // add drag classes
- helperClassesSet = false; // reset logic var - see drag()
-
- // DISABLE TEXT SELECTION (probably already done by resizer.mouseOver)
- $('body').disableSelection();
-
- // MASK PANES CONTAINING IFRAMES, APPLETS OR OTHER TROUBLESOME ELEMENTS
- showMasks( masks );
- }
-
- , drag: function (e, ui) {
- if (!helperClassesSet) { // can only add classes after clone has been added to the DOM
- //$(".ui-draggable-dragging")
- ui.helper
- .addClass( helperClass +" "+ helperPaneClass ) // add helper classes
- .css({ right: "auto", bottom: "auto" }) // fix dir="rtl" issue
- .children().css("visibility","hidden") // hide toggler inside dragged resizer-bar
- ;
- helperClassesSet = true;
- // draggable bug!? RE-SET zIndex to prevent E/W resize-bar showing through N/S pane!
- if (s.isSliding) $Ps[pane].css("zIndex", z.pane_sliding);
- }
- // CONTAIN RESIZER-BAR TO RESIZING LIMITS
- var limit = 0;
- if (ui.position[side] < r.min) {
- ui.position[side] = r.min;
- limit = -1;
- }
- else if (ui.position[side] > r.max) {
- ui.position[side] = r.max;
- limit = 1;
- }
- // ADD/REMOVE dragging-limit CLASS
- if (limit) {
- ui.helper.addClass( helperLimitClass +" "+ helperPaneLimitClass ); // at dragging-limit
- window.defaultStatus = (limit>0 && pane.match(/(north|west)/)) || (limit<0 && pane.match(/(south|east)/)) ? o.tips.maxSizeWarning : o.tips.minSizeWarning;
- }
- else {
- ui.helper.removeClass( helperLimitClass +" "+ helperPaneLimitClass ); // not at dragging-limit
- window.defaultStatus = "";
- }
- // DYNAMICALLY RESIZE PANES IF OPTION ENABLED
- // won't trigger unless resizer has actually moved!
- if (live && Math.abs(ui.position[side] - lastPos) >= o.liveResizingTolerance) {
- lastPos = ui.position[side];
- resizePanes(e, ui, pane)
- }
- }
-
- , stop: function (e, ui) {
- $('body').enableSelection(); // RE-ENABLE TEXT SELECTION
- window.defaultStatus = ""; // clear 'resizing limit' message from statusbar
- $R.removeClass( resizerClass +" "+ resizerPaneClass ); // remove drag classes from Resizer
- s.isResizing = false;
- resizePanes(e, ui, pane, true, masks); // true = resizingDone
- }
-
- });
- });
-
- /**
- * resizePanes
- *
- * Sub-routine called from stop() - and drag() if livePaneResizing
- *
- * @param {!Object} evt
- * @param {!Object} ui
- * @param {string} pane
- * @param {boolean=} [resizingDone=false]
- */
- var resizePanes = function (evt, ui, pane, resizingDone, masks) {
- var dragPos = ui.position
- , c = _c[pane]
- , o = options[pane]
- , s = state[pane]
- , resizerPos
- ;
- switch (pane) {
- case "north": resizerPos = dragPos.top; break;
- case "west": resizerPos = dragPos.left; break;
- case "south": resizerPos = sC.offsetHeight - dragPos.top - o.spacing_open; break;
- case "east": resizerPos = sC.offsetWidth - dragPos.left - o.spacing_open; break;
- };
- // remove container margin from resizer position to get the pane size
- var newSize = resizerPos - sC["inset"+ c.side];
-
- // Disable OR Resize Mask(s) created in drag.start
- if (!resizingDone) {
- // ensure we meet liveResizingTolerance criteria
- if (Math.abs(newSize - s.size) < o.liveResizingTolerance)
- return; // SKIP resize this time
- // resize the pane
- manualSizePane(pane, newSize, false, true); // true = noAnimation
- sizeMasks(); // resize all visible masks
- }
- else { // resizingDone
- // ondrag_end callback
- if (false !== _runCallbacks("ondrag_end", pane))
- manualSizePane(pane, newSize, false, true); // true = noAnimation
- hideMasks(); // hide all masks, which include panes with 'content/iframe-masks'
- if (s.isSliding && masks) // RE-SHOW only 'object-masks' so objects won't show through sliding pane
- showMasks( masks, true ); // true = onlyForObjects
- }
- };
- }
-
- /**
- * sizeMask
- *
- * Needed to overlay a DIV over an IFRAME-pane because mask CANNOT be *inside* the pane
- * Called when mask created, and during livePaneResizing
- */
-, sizeMask = function () {
- var $M = $(this)
- , pane = $M.data("layoutMask") // eg: "west"
- , s = state[pane]
- ;
- // only masks over an IFRAME-pane need manual resizing
- if (s.tagName == "IFRAME" && s.isVisible) // no need to mask closed/hidden panes
- $M.css({
- top: s.offsetTop
- , left: s.offsetLeft
- , width: s.outerWidth
- , height: s.outerHeight
- });
- /* ALT Method...
- var $P = $Ps[pane];
- $M.css( $P.position() ).css({ width: $P[0].offsetWidth, height: $P[0].offsetHeight });
- */
- }
-, sizeMasks = function () {
- $Ms.each( sizeMask ); // resize all 'visible' masks
- }
-
-, showMasks = function (panes, onlyForObjects) {
- var a = panes ? panes.split(",") : $.layout.config.allPanes
- , z = options.zIndexes
- , o, s;
- $.each(a, function(i,p){
- s = state[p];
- o = options[p];
- if (s.isVisible && ( (!onlyForObjects && o.maskContents) || o.maskObjects )) {
- getMasks(p).each(function(){
- sizeMask.call(this);
- this.style.zIndex = s.isSliding ? z.pane_sliding+1 : z.pane_normal+1
- this.style.display = "block";
- });
- }
- });
- }
-
-, hideMasks = function () {
- // ensure no pane is resizing - could be a timing issue
- var skip;
- $.each( $.layout.config.borderPanes, function(i,p){
- if (state[p].isResizing) {
- skip = true;
- return false; // BREAK
- }
- });
- if (!skip)
- $Ms.hide(); // hide ALL masks
- }
-
-, getMasks = function (pane) {
- var $Masks = $([])
- , $M, i = 0, c = $Ms.length
- ;
- for (; i<c; i++) {
- $M = $Ms.eq(i);
- if ($M.data("layoutMask") === pane)
- $Masks = $Masks.add( $M );
- }
- if ($Masks.length)
- return $Masks;
- else
- return createMasks(pane);
- }
-
- /**
- * createMasks
- *
- * Generates both DIV (ALWAYS used) and IFRAME (optional) elements as masks
- * An IFRAME mask is created *under* the DIV when maskObjects=true, because a DIV cannot mask an applet
- */
-, createMasks = function (pane) {
- var
- $P = $Ps[pane]
- , s = state[pane]
- , o = options[pane]
- , z = options.zIndexes
- //, objMask = o.maskObjects && s.tagName != "IFRAME" // check for option
- , $Masks = $([])
- , isIframe, el, $M, css, i
- ;
- if (!o.maskContents && !o.maskObjects) return $Masks;
- // if o.maskObjects=true, then loop TWICE to create BOTH kinds of mask, else only create a DIV
- for (i=0; i < (o.maskObjects ? 2 : 1); i++) {
- isIframe = o.maskObjects && i==0;
- el = document.createElement( isIframe ? "iframe" : "div" );
- $M = $(el).data("layoutMask", pane); // add data to relate mask to pane
- el.className = "ui-layout-mask ui-layout-mask-"+ pane; // for user styling
- css = el.style;
- // styles common to both DIVs and IFRAMES
- css.display = "block";
- css.position = "absolute";
- if (isIframe) { // IFRAME-only props
- el.frameborder = 0;
- el.src = "about:blank";
- css.opacity = 0;
- css.filter = "Alpha(Opacity='0')";
- css.border = 0;
- }
- // if pane is an IFRAME, then must mask the pane itself
- if (s.tagName == "IFRAME") {
- // NOTE sizing done by a subroutine so can be called during live-resizing
- css.zIndex = z.pane_normal+1; // 1-higher than pane
- $N.append( el ); // append to LAYOUT CONTAINER
- }
- // otherwise put masks *inside the pane* to mask its contents
- else {
- $M.addClass("ui-layout-mask-inside-pane");
- css.zIndex = o.maskZindex || z.content_mask; // usually 1, but customizable
- css.top = 0;
- css.left = 0;
- css.width = "100%";
- css.height = "100%";
- $P.append( el ); // append INSIDE pane element
- }
- // add to return object
- $Masks = $Masks.add( el );
- // add Mask to cached array so can be resized & reused
- $Ms = $Ms.add( el );
- }
- return $Masks;
- }
-
-
- /**
- * Destroy this layout and reset all elements
- *
- * @param {boolean=} [destroyChildren=false] Destory Child-Layouts first?
- */
-, destroy = function (evt_or_destroyChildren, destroyChildren) {
- // UNBIND layout events and remove global object
- $(window).unbind("."+ sID); // resize & unload
- $(document).unbind("."+ sID); // keyDown (hotkeys)
-
- if (typeof evt_or_destroyChildren === "object")
- // stopPropagation if called by trigger("layoutdestroy") - use evtPane utility
- evtPane(evt_or_destroyChildren);
- else // no event, so transfer 1st param to destroyChildren param
- destroyChildren = evt_or_destroyChildren;
-
- // need to look for parent layout BEFORE we remove the container data, else skips a level
- //var parentPane = Instance.hasParentLayout ? $.layout.getParentPaneInstance( $N ) : null;
-
- // reset layout-container
- $N .clearQueue()
- .removeData("layout")
- .removeData("layoutContainer")
- .removeClass(options.containerClass)
- .unbind("."+ sID) // remove ALL Layout events
- ;
-
- // remove all mask elements that have been created
- $Ms.remove();
-
- // loop all panes to remove layout classes, attributes and bindings
- $.each(_c.allPanes, function (i, pane) {
- removePane( pane, false, true, destroyChildren ); // true = skipResize
- });
-
- // do NOT reset container CSS if is a 'pane' (or 'content') in an outer-layout - ie, THIS layout is 'nested'
- var css = "layoutCSS";
- if ($N.data(css) && !$N.data("layoutRole")) // RESET CSS
- $N.css( $N.data(css) ).removeData(css);
-
- // for full-page layouts, also reset the <HTML> CSS
- if (sC.tagName === "BODY" && ($N = $("html")).data(css)) // RESET <HTML> CSS
- $N.css( $N.data(css) ).removeData(css);
-
- // trigger plugins for this layout, if there are any
- runPluginCallbacks( Instance, $.layout.onDestroy );
-
- // trigger state-management and onunload callback
- unload();
-
- // clear the Instance of everything except for container & options (so could recreate)
- // RE-CREATE: myLayout = myLayout.container.layout( myLayout.options );
- for (n in Instance)
- if (!n.match(/^(container|options)$/)) delete Instance[ n ];
- // add a 'destroyed' flag to make it easy to check
- Instance.destroyed = true;
-
- // if this is a child layout, CLEAR the child-pointer in the parent
- /* for now the pointer REMAINS, but with only container, options and destroyed keys
- if (parentPane) {
- var layout = parentPane.pane.data("parentLayout");
- parentPane.child = layout.children[ parentPane.name ] = null;
- }
- */
-
- return Instance; // for coding convenience
- }
-
- /**
- * Remove a pane from the layout - subroutine of destroy()
- *
- * @see destroy()
- * @param {string|Object} evt_or_pane The pane to process
- * @param {boolean=} [remove=false] Remove the DOM element?
- * @param {boolean=} [skipResize=false] Skip calling resizeAll()?
- * @param {boolean=} [destroyChild=true] Destroy Child-layouts? If not passed, obeys options setting
- */
-, removePane = function (evt_or_pane, remove, skipResize, destroyChild) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $P = $Ps[pane]
- , $C = $Cs[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- ;
- // NOTE: elements can still exist even after remove()
- // so check for missing data(), which is cleared by removed()
- if ($P && $.isEmptyObject( $P.data() )) $P = false;
- if ($C && $.isEmptyObject( $C.data() )) $C = false;
- if ($R && $.isEmptyObject( $R.data() )) $R = false;
- if ($T && $.isEmptyObject( $T.data() )) $T = false;
-
- if ($P) $P.stop(true, true);
-
- // check for a child layout
- var o = options[pane]
- , s = state[pane]
- , d = "layout"
- , css = "layoutCSS"
- , child = children[pane] || ($P ? $P.data(d) : 0) || ($C ? $C.data(d) : 0) || null
- , destroy = destroyChild !== undefined ? destroyChild : o.destroyChildLayout
- ;
-
- // FIRST destroy the child-layout(s)
- if (destroy && child && !child.destroyed) {
- child.destroy(true); // tell child-layout to destroy ALL its child-layouts too
- if (child.destroyed) // destroy was successful
- child = null; // clear pointer for logic below
- }
-
- if ($P && remove && !child)
- $P.remove();
- else if ($P && $P[0]) {
- // create list of ALL pane-classes that need to be removed
- var root = o.paneClass // default="ui-layout-pane"
- , pRoot = root +"-"+ pane // eg: "ui-layout-pane-west"
- , _open = "-open"
- , _sliding= "-sliding"
- , _closed = "-closed"
- , classes = [ root, root+_open, root+_closed, root+_sliding, // generic classes
- pRoot, pRoot+_open, pRoot+_closed, pRoot+_sliding ] // pane-specific classes
- ;
- $.merge(classes, getHoverClasses($P, true)); // ADD hover-classes
- // remove all Layout classes from pane-element
- $P .removeClass( classes.join(" ") ) // remove ALL pane-classes
- .removeData("parentLayout")
- .removeData("layoutPane")
- .removeData("layoutRole")
- .removeData("layoutEdge")
- .removeData("autoHidden") // in case set
- .unbind("."+ sID) // remove ALL Layout events
- // TODO: remove these extra unbind commands when jQuery is fixed
- //.unbind("mouseenter"+ sID)
- //.unbind("mouseleave"+ sID)
- ;
- // do NOT reset CSS if this pane/content is STILL the container of a nested layout!
- // the nested layout will reset its 'container' CSS when/if it is destroyed
- if ($C && $C.data(d)) {
- // a content-div may not have a specific width, so give it one to contain the Layout
- $C.width( $C.width() );
- child.resizeAll(); // now resize the Layout
- }
- else if ($C)
- $C.css( $C.data(css) ).removeData(css).removeData("layoutRole");
- // remove pane AFTER content in case there was a nested layout
- if (!$P.data(d))
- $P.css( $P.data(css) ).removeData(css);
- }
-
- // REMOVE pane resizer and toggler elements
- if ($T) $T.remove();
- if ($R) $R.remove();
-
- // CLEAR all pointers and state data
- Instance[pane] = $Ps[pane] = $Cs[pane] = $Rs[pane] = $Ts[pane] = children[pane] = false;
- s = { removed: true };
-
- if (!skipResize)
- resizeAll();
- }
-
-
-/*
- * ###########################
- * ACTION METHODS
- * ###########################
- */
-
-, _hidePane = function (pane) {
- var $P = $Ps[pane]
- , o = options[pane]
- , s = $P[0].style
- ;
- if (o.useOffscreenClose) {
- if (!$P.data(_c.offscreenReset))
- $P.data(_c.offscreenReset, { left: s.left, right: s.right });
- $P.css( _c.offscreenCSS );
- }
- else
- $P.hide().removeData(_c.offscreenReset);
- }
-
-, _showPane = function (pane) {
- var $P = $Ps[pane]
- , o = options[pane]
- , off = _c.offscreenCSS
- , old = $P.data(_c.offscreenReset)
- , s = $P[0].style
- ;
- $P .show() // ALWAYS show, just in case
- .removeData(_c.offscreenReset);
- if (o.useOffscreenClose && old) {
- if (s.left == off.left)
- s.left = old.left;
- if (s.right == off.right)
- s.right = old.right;
- }
- }
-
-
- /**
- * Completely 'hides' a pane, including its spacing - as if it does not exist
- * The pane is not actually 'removed' from the source, so can use 'show' to un-hide it
- *
- * @param {string|Object} evt_or_pane The pane being hidden, ie: north, south, east, or west
- * @param {boolean=} [noAnimation=false]
- */
-, hide = function (evt_or_pane, noAnimation) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , o = options[pane]
- , s = state[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- ;
- if (!$P || s.isHidden) return; // pane does not exist OR is already hidden
-
- // onhide_start callback - will CANCEL hide if returns false
- if (state.initialized && false === _runCallbacks("onhide_start", pane)) return;
-
- s.isSliding = false; // just in case
-
- // now hide the elements
- if ($R) $R.hide(); // hide resizer-bar
- if (!state.initialized || s.isClosed) {
- s.isClosed = true; // to trigger open-animation on show()
- s.isHidden = true;
- s.isVisible = false;
- if (!state.initialized)
- _hidePane(pane); // no animation when loading page
- sizeMidPanes(_c[pane].dir === "horz" ? "" : "center");
- if (state.initialized || o.triggerEventsOnLoad)
- _runCallbacks("onhide_end", pane);
- }
- else {
- s.isHiding = true; // used by onclose
- close(pane, false, noAnimation); // adjust all panes to fit
- }
- }
-
- /**
- * Show a hidden pane - show as 'closed' by default unless openPane = true
- *
- * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west
- * @param {boolean=} [openPane=false]
- * @param {boolean=} [noAnimation=false]
- * @param {boolean=} [noAlert=false]
- */
-, show = function (evt_or_pane, openPane, noAnimation, noAlert) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , o = options[pane]
- , s = state[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- ;
- if (!$P || !s.isHidden) return; // pane does not exist OR is not hidden
-
- // onshow_start callback - will CANCEL show if returns false
- if (false === _runCallbacks("onshow_start", pane)) return;
-
- s.isSliding = false; // just in case
- s.isShowing = true; // used by onopen/onclose
- //s.isHidden = false; - will be set by open/close - if not cancelled
-
- // now show the elements
- //if ($R) $R.show(); - will be shown by open/close
- if (openPane === false)
- close(pane, true); // true = force
- else
- open(pane, false, noAnimation, noAlert); // adjust all panes to fit
- }
-
-
- /**
- * Toggles a pane open/closed by calling either open or close
- *
- * @param {string|Object} evt_or_pane The pane being toggled, ie: north, south, east, or west
- * @param {boolean=} [slide=false]
- */
-, toggle = function (evt_or_pane, slide) {
- if (!isInitialized()) return;
- var evt = evtObj(evt_or_pane)
- , pane = evtPane.call(this, evt_or_pane)
- , s = state[pane]
- ;
- if (evt) // called from to $R.dblclick OR triggerPaneEvent
- evt.stopImmediatePropagation();
- if (s.isHidden)
- show(pane); // will call 'open' after unhiding it
- else if (s.isClosed)
- open(pane, !!slide);
- else
- close(pane);
- }
-
-
- /**
- * Utility method used during init or other auto-processes
- *
- * @param {string} pane The pane being closed
- * @param {boolean=} [setHandles=false]
- */
-, _closePane = function (pane, setHandles) {
- var
- $P = $Ps[pane]
- , s = state[pane]
- ;
- _hidePane(pane);
- s.isClosed = true;
- s.isVisible = false;
- // UNUSED: if (setHandles) setAsClosed(pane, true); // true = force
- }
-
- /**
- * Close the specified pane (animation optional), and resize all other panes as needed
- *
- * @param {string|Object} evt_or_pane The pane being closed, ie: north, south, east, or west
- * @param {boolean=} [force=false]
- * @param {boolean=} [noAnimation=false]
- * @param {boolean=} [skipCallback=false]
- */
-, close = function (evt_or_pane, force, noAnimation, skipCallback) {
- var pane = evtPane.call(this, evt_or_pane);
- // if pane has been initialized, but NOT the complete layout, close pane instantly
- if (!state.initialized && $Ps[pane]) {
- _closePane(pane); // INIT pane as closed
- return;
- }
- if (!isInitialized()) return;
-
- var
- $P = $Ps[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- , o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , doFX, isShowing, isHiding, wasSliding;
-
- // QUEUE in case another action/animation is in progress
- $N.queue(function( queueNext ){
-
- if ( !$P
- || (!o.closable && !s.isShowing && !s.isHiding) // invalid request // (!o.resizable && !o.closable) ???
- || (!force && s.isClosed && !s.isShowing) // already closed
- ) return queueNext();
-
- // onclose_start callback - will CANCEL hide if returns false
- // SKIP if just 'showing' a hidden pane as 'closed'
- var abort = !s.isShowing && false === _runCallbacks("onclose_start", pane);
-
- // transfer logic vars to temp vars
- isShowing = s.isShowing;
- isHiding = s.isHiding;
- wasSliding = s.isSliding;
- // now clear the logic vars (REQUIRED before aborting)
- delete s.isShowing;
- delete s.isHiding;
-
- if (abort) return queueNext();
-
- doFX = !noAnimation && !s.isClosed && (o.fxName_close != "none");
- s.isMoving = true;
- s.isClosed = true;
- s.isVisible = false;
- // update isHidden BEFORE sizing panes
- if (isHiding) s.isHidden = true;
- else if (isShowing) s.isHidden = false;
-
- if (s.isSliding) // pane is being closed, so UNBIND trigger events
- bindStopSlidingEvents(pane, false); // will set isSliding=false
- else // resize panes adjacent to this one
- sizeMidPanes(_c[pane].dir === "horz" ? "" : "center", false); // false = NOT skipCallback
-
- // if this pane has a resizer bar, move it NOW - before animation
- setAsClosed(pane);
-
- // CLOSE THE PANE
- if (doFX) { // animate the close
- // mask panes with objects
- var masks = "center"+ (c.dir=="horz" ? ",west,east" : "");
- showMasks( masks, true ); // true = ONLY mask panes with maskObjects=true
- lockPaneForFX(pane, true); // need to set left/top so animation will work
- $P.hide( o.fxName_close, o.fxSettings_close, o.fxSpeed_close, function () {
- lockPaneForFX(pane, false); // undo
- if (s.isClosed) close_2();
- queueNext();
- });
- }
- else { // hide the pane without animation
- _hidePane(pane);
- close_2();
- queueNext();
- };
- });
-
- // SUBROUTINE
- function close_2 () {
- s.isMoving = false;
- bindStartSlidingEvent(pane, true); // will enable if o.slidable = true
-
- // if opposite-pane was autoClosed, see if it can be autoOpened now
- var altPane = _c.oppositeEdge[pane];
- if (state[ altPane ].noRoom) {
- setSizeLimits( altPane );
- makePaneFit( altPane );
- }
-
- // hide any masks shown while closing
- hideMasks();
-
- if (!skipCallback && (state.initialized || o.triggerEventsOnLoad)) {
- // onclose callback - UNLESS just 'showing' a hidden pane as 'closed'
- if (!isShowing) _runCallbacks("onclose_end", pane);
- // onhide OR onshow callback
- if (isShowing) _runCallbacks("onshow_end", pane);
- if (isHiding) _runCallbacks("onhide_end", pane);
- }
- }
- }
-
- /**
- * @param {string} pane The pane just closed, ie: north, south, east, or west
- */
-, setAsClosed = function (pane) {
- var
- $P = $Ps[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- , o = options[pane]
- , s = state[pane]
- , side = _c[pane].side.toLowerCase()
- , inset = "inset"+ _c[pane].side
- , rClass = o.resizerClass
- , tClass = o.togglerClass
- , _pane = "-"+ pane // used for classNames
- , _open = "-open"
- , _sliding= "-sliding"
- , _closed = "-closed"
- ;
- $R
- .css(side, sC[inset]) // move the resizer
- .removeClass( rClass+_open +" "+ rClass+_pane+_open )
- .removeClass( rClass+_sliding +" "+ rClass+_pane+_sliding )
- .addClass( rClass+_closed +" "+ rClass+_pane+_closed )
- .unbind("dblclick."+ sID)
- ;
- // DISABLE 'resizing' when closed - do this BEFORE bindStartSlidingEvent?
- if (o.resizable && $.layout.plugins.draggable)
- $R
- .draggable("disable")
- .removeClass("ui-state-disabled") // do NOT apply disabled styling - not suitable here
- .css("cursor", "default")
- .attr("title","")
- ;
-
- // if pane has a toggler button, adjust that too
- if ($T) {
- $T
- .removeClass( tClass+_open +" "+ tClass+_pane+_open )
- .addClass( tClass+_closed +" "+ tClass+_pane+_closed )
- .attr("title", o.tips.Open) // may be blank
- ;
- // toggler-content - if exists
- $T.children(".content-open").hide();
- $T.children(".content-closed").css("display","block");
- }
-
- // sync any 'pin buttons'
- syncPinBtns(pane, false);
-
- if (state.initialized) {
- // resize 'length' and position togglers for adjacent panes
- sizeHandles();
- }
- }
-
- /**
- * Open the specified pane (animation optional), and resize all other panes as needed
- *
- * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west
- * @param {boolean=} [slide=false]
- * @param {boolean=} [noAnimation=false]
- * @param {boolean=} [noAlert=false]
- */
-, open = function (evt_or_pane, slide, noAnimation, noAlert) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- , o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , doFX, isShowing
- ;
- // QUEUE in case another action/animation is in progress
- $N.queue(function( queueNext ){
-
- if ( !$P
- || (!o.resizable && !o.closable && !s.isShowing) // invalid request
- || (s.isVisible && !s.isSliding) // already open
- ) return queueNext();
-
- // pane can ALSO be unhidden by just calling show(), so handle this scenario
- if (s.isHidden && !s.isShowing) {
- queueNext(); // call before show() because it needs the queue free
- show(pane, true);
- return;
- }
-
- if (o.autoResize && s.size != o.size) // resize pane to original size set in options
- sizePane(pane, o.size, true, true, true); // true=skipCallback/forceResize/noAnimation
- else
- // make sure there is enough space available to open the pane
- setSizeLimits(pane, slide);
-
- // onopen_start callback - will CANCEL open if returns false
- var cbReturn = _runCallbacks("onopen_start", pane);
-
- if (cbReturn === "abort")
- return queueNext();
-
- // update pane-state again in case options were changed in onopen_start
- if (cbReturn !== "NC") // NC = "No Callback"
- setSizeLimits(pane, slide);
-
- if (s.minSize > s.maxSize) { // INSUFFICIENT ROOM FOR PANE TO OPEN!
- syncPinBtns(pane, false); // make sure pin-buttons are reset
- if (!noAlert && o.tips.noRoomToOpen)
- alert(o.tips.noRoomToOpen);
- return queueNext(); // ABORT
- }
-
- if (slide) // START Sliding - will set isSliding=true
- bindStopSlidingEvents(pane, true); // BIND trigger events to close sliding-pane
- else if (s.isSliding) // PIN PANE (stop sliding) - open pane 'normally' instead
- bindStopSlidingEvents(pane, false); // UNBIND trigger events - will set isSliding=false
- else if (o.slidable)
- bindStartSlidingEvent(pane, false); // UNBIND trigger events
-
- s.noRoom = false; // will be reset by makePaneFit if 'noRoom'
- makePaneFit(pane);
-
- // transfer logic var to temp var
- isShowing = s.isShowing;
- // now clear the logic var
- delete s.isShowing;
-
- doFX = !noAnimation && s.isClosed && (o.fxName_open != "none");
- s.isMoving = true;
- s.isVisible = true;
- s.isClosed = false;
- // update isHidden BEFORE sizing panes - WHY??? Old?
- if (isShowing) s.isHidden = false;
-
- if (doFX) { // ANIMATE
- // mask panes with objects
- var masks = "center"+ (c.dir=="horz" ? ",west,east" : "");
- if (s.isSliding) masks += ","+ _c.oppositeEdge[pane];
- showMasks( masks, true ); // true = ONLY mask panes with maskObjects=true
- lockPaneForFX(pane, true); // need to set left/top so animation will work
- $P.show( o.fxName_open, o.fxSettings_open, o.fxSpeed_open, function() {
- lockPaneForFX(pane, false); // undo
- if (s.isVisible) open_2(); // continue
- queueNext();
- });
- }
- else { // no animation
- _showPane(pane);// just show pane and...
- open_2(); // continue
- queueNext();
- };
- });
-
- // SUBROUTINE
- function open_2 () {
- s.isMoving = false;
-
- // cure iframe display issues
- _fixIframe(pane);
-
- // NOTE: if isSliding, then other panes are NOT 'resized'
- if (!s.isSliding) { // resize all panes adjacent to this one
- hideMasks(); // remove any masks shown while opening
- sizeMidPanes(_c[pane].dir=="vert" ? "center" : "", false); // false = NOT skipCallback
- }
-
- // set classes, position handles and execute callbacks...
- setAsOpen(pane);
- };
-
- }
-
- /**
- * @param {string} pane The pane just opened, ie: north, south, east, or west
- * @param {boolean=} [skipCallback=false]
- */
-, setAsOpen = function (pane, skipCallback) {
- var
- $P = $Ps[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- , o = options[pane]
- , s = state[pane]
- , side = _c[pane].side.toLowerCase()
- , inset = "inset"+ _c[pane].side
- , rClass = o.resizerClass
- , tClass = o.togglerClass
- , _pane = "-"+ pane // used for classNames
- , _open = "-open"
- , _closed = "-closed"
- , _sliding= "-sliding"
- ;
- $R
- .css(side, sC[inset] + getPaneSize(pane)) // move the resizer
- .removeClass( rClass+_closed +" "+ rClass+_pane+_closed )
- .addClass( rClass+_open +" "+ rClass+_pane+_open )
- ;
- if (s.isSliding)
- $R.addClass( rClass+_sliding +" "+ rClass+_pane+_sliding )
- else // in case 'was sliding'
- $R.removeClass( rClass+_sliding +" "+ rClass+_pane+_sliding )
-
- if (o.resizerDblClickToggle)
- $R.bind("dblclick", toggle );
- removeHover( 0, $R ); // remove hover classes
- if (o.resizable && $.layout.plugins.draggable)
- $R .draggable("enable")
- .css("cursor", o.resizerCursor)
- .attr("title", o.tips.Resize);
- else if (!s.isSliding)
- $R.css("cursor", "default"); // n-resize, s-resize, etc
-
- // if pane also has a toggler button, adjust that too
- if ($T) {
- $T .removeClass( tClass+_closed +" "+ tClass+_pane+_closed )
- .addClass( tClass+_open +" "+ tClass+_pane+_open )
- .attr("title", o.tips.Close); // may be blank
- removeHover( 0, $T ); // remove hover classes
- // toggler-content - if exists
- $T.children(".content-closed").hide();
- $T.children(".content-open").css("display","block");
- }
-
- // sync any 'pin buttons'
- syncPinBtns(pane, !s.isSliding);
-
- // update pane-state dimensions - BEFORE resizing content
- $.extend(s, elDims($P));
-
- if (state.initialized) {
- // resize resizer & toggler sizes for all panes
- sizeHandles();
- // resize content every time pane opens - to be sure
- sizeContent(pane, true); // true = remeasure headers/footers, even if 'pane.isMoving'
- }
-
- if (!skipCallback && (state.initialized || o.triggerEventsOnLoad) && $P.is(":visible")) {
- // onopen callback
- _runCallbacks("onopen_end", pane);
- // onshow callback - TODO: should this be here?
- if (s.isShowing) _runCallbacks("onshow_end", pane);
-
- // ALSO call onresize because layout-size *may* have changed while pane was closed
- if (state.initialized)
- _runCallbacks("onresize_end", pane);
- }
-
- // TODO: Somehow sizePane("north") is being called after this point???
- }
-
-
- /**
- * slideOpen / slideClose / slideToggle
- *
- * Pass-though methods for sliding
- */
-, slideOpen = function (evt_or_pane) {
- if (!isInitialized()) return;
- var evt = evtObj(evt_or_pane)
- , pane = evtPane.call(this, evt_or_pane)
- , s = state[pane]
- , delay = options[pane].slideDelay_open
- ;
- // prevent event from triggering on NEW resizer binding created below
- if (evt) evt.stopImmediatePropagation();
-
- if (s.isClosed && evt && evt.type === "mouseenter" && delay > 0)
- // trigger = mouseenter - use a delay
- timer.set(pane+"_openSlider", open_NOW, delay);
- else
- open_NOW(); // will unbind events if is already open
-
- /**
- * SUBROUTINE for timed open
- */
- function open_NOW () {
- if (!s.isClosed) // skip if no longer closed!
- bindStopSlidingEvents(pane, true); // BIND trigger events to close sliding-pane
- else if (!s.isMoving)
- open(pane, true); // true = slide - open() will handle binding
- };
- }
-
-, slideClose = function (evt_or_pane) {
- if (!isInitialized()) return;
- var evt = evtObj(evt_or_pane)
- , pane = evtPane.call(this, evt_or_pane)
- , o = options[pane]
- , s = state[pane]
- , delay = s.isMoving ? 1000 : 300 // MINIMUM delay - option may override
- ;
- if (s.isClosed || s.isResizing)
- return; // skip if already closed OR in process of resizing
- else if (o.slideTrigger_close === "click")
- close_NOW(); // close immediately onClick
- else if (o.preventQuickSlideClose && s.isMoving)
- return; // handle Chrome quick-close on slide-open
- else if (o.preventPrematureSlideClose && evt && $.layout.isMouseOverElem(evt, $Ps[pane]))
- return; // handle incorrect mouseleave trigger, like when over a SELECT-list in IE
- else if (evt) // trigger = mouseleave - use a delay
- // 1 sec delay if 'opening', else .3 sec
- timer.set(pane+"_closeSlider", close_NOW, max(o.slideDelay_close, delay));
- else // called programically
- close_NOW();
-
- /**
- * SUBROUTINE for timed close
- */
- function close_NOW () {
- if (s.isClosed) // skip 'close' if already closed!
- bindStopSlidingEvents(pane, false); // UNBIND trigger events - TODO: is this needed here?
- else if (!s.isMoving)
- close(pane); // close will handle unbinding
- };
- }
-
- /**
- * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west
- */
-, slideToggle = function (evt_or_pane) {
- var pane = evtPane.call(this, evt_or_pane);
- toggle(pane, true);
- }
-
-
- /**
- * Must set left/top on East/South panes so animation will work properly
- *
- * @param {string} pane The pane to lock, 'east' or 'south' - any other is ignored!
- * @param {boolean} doLock true = set left/top, false = remove
- */
-, lockPaneForFX = function (pane, doLock) {
- var $P = $Ps[pane]
- , s = state[pane]
- , o = options[pane]
- , z = options.zIndexes
- ;
- if (doLock) {
- $P.css({ zIndex: z.pane_animate }); // overlay all elements during animation
- if (pane=="south")
- $P.css({ top: sC.insetTop + sC.innerHeight - $P.outerHeight() });
- else if (pane=="east")
- $P.css({ left: sC.insetLeft + sC.innerWidth - $P.outerWidth() });
- }
- else { // animation DONE - RESET CSS
- // TODO: see if this can be deleted. It causes a quick-close when sliding in Chrome
- $P.css({ zIndex: (s.isSliding ? z.pane_sliding : z.pane_normal) });
- if (pane=="south")
- $P.css({ top: "auto" });
- // if pane is positioned 'off-screen', then DO NOT screw with it!
- else if (pane=="east" && !$P.css("left").match(/\-99999/))
- $P.css({ left: "auto" });
- // fix anti-aliasing in IE - only needed for animations that change opacity
- if (browser.msie && o.fxOpacityFix && o.fxName_open != "slide" && $P.css("filter") && $P.css("opacity") == 1)
- $P[0].style.removeAttribute('filter');
- }
- }
-
-
- /**
- * Toggle sliding functionality of a specific pane on/off by adding removing 'slide open' trigger
- *
- * @see open(), close()
- * @param {string} pane The pane to enable/disable, 'north', 'south', etc.
- * @param {boolean} enable Enable or Disable sliding?
- */
-, bindStartSlidingEvent = function (pane, enable) {
- var o = options[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , evtName = o.slideTrigger_open.toLowerCase()
- ;
- if (!$R || (enable && !o.slidable)) return;
-
- // make sure we have a valid event
- if (evtName.match(/mouseover/))
- evtName = o.slideTrigger_open = "mouseenter";
- else if (!evtName.match(/(click|dblclick|mouseenter)/))
- evtName = o.slideTrigger_open = "click";
-
- $R
- // add or remove event
- [enable ? "bind" : "unbind"](evtName +'.'+ sID, slideOpen)
- // set the appropriate cursor & title/tip
- .css("cursor", enable ? o.sliderCursor : "default")
- .attr("title", enable ? o.tips.Slide : "")
- ;
- }
-
- /**
- * Add or remove 'mouseleave' events to 'slide close' when pane is 'sliding' open or closed
- * Also increases zIndex when pane is sliding open
- * See bindStartSlidingEvent for code to control 'slide open'
- *
- * @see slideOpen(), slideClose()
- * @param {string} pane The pane to process, 'north', 'south', etc.
- * @param {boolean} enable Enable or Disable events?
- */
-, bindStopSlidingEvents = function (pane, enable) {
- var o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , z = options.zIndexes
- , evtName = o.slideTrigger_close.toLowerCase()
- , action = (enable ? "bind" : "unbind")
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- ;
- s.isSliding = enable; // logic
- timer.clear(pane+"_closeSlider"); // just in case
-
- // remove 'slideOpen' event from resizer
- // ALSO will raise the zIndex of the pane & resizer
- if (enable) bindStartSlidingEvent(pane, false);
-
- // RE/SET zIndex - increases when pane is sliding-open, resets to normal when not
- $P.css("zIndex", enable ? z.pane_sliding : z.pane_normal);
- $R.css("zIndex", enable ? z.pane_sliding+2 : z.resizer_normal); // NOTE: mask = pane_sliding+1
-
- // make sure we have a valid event
- if (!evtName.match(/(click|mouseleave)/))
- evtName = o.slideTrigger_close = "mouseleave"; // also catches 'mouseout'
-
- // add/remove slide triggers
- $R[action](evtName, slideClose); // base event on resize
- // need extra events for mouseleave
- if (evtName === "mouseleave") {
- // also close on pane.mouseleave
- $P[action]("mouseleave."+ sID, slideClose);
- // cancel timer when mouse moves between 'pane' and 'resizer'
- $R[action]("mouseenter."+ sID, cancelMouseOut);
- $P[action]("mouseenter."+ sID, cancelMouseOut);
- }
-
- if (!enable)
- timer.clear(pane+"_closeSlider");
- else if (evtName === "click" && !o.resizable) {
- // IF pane is not resizable (which already has a cursor and tip)
- // then set the a cursor & title/tip on resizer when sliding
- $R.css("cursor", enable ? o.sliderCursor : "default");
- $R.attr("title", enable ? o.tips.Close : ""); // use Toggler-tip, eg: "Close Pane"
- }
-
- // SUBROUTINE for mouseleave timer clearing
- function cancelMouseOut (evt) {
- timer.clear(pane+"_closeSlider");
- evt.stopPropagation();
- }
- }
-
-
- /**
- * Hides/closes a pane if there is insufficient room - reverses this when there is room again
- * MUST have already called setSizeLimits() before calling this method
- *
- * @param {string} pane The pane being resized
- * @param {boolean=} [isOpening=false] Called from onOpen?
- * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
- * @param {boolean=} [force=false]
- */
-, makePaneFit = function (pane, isOpening, skipCallback, force) {
- var
- o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , isSidePane = c.dir==="vert"
- , hasRoom = false
- ;
- // special handling for center & east/west panes
- if (pane === "center" || (isSidePane && s.noVerticalRoom)) {
- // see if there is enough room to display the pane
- // ERROR: hasRoom = s.minHeight <= s.maxHeight && (isSidePane || s.minWidth <= s.maxWidth);
- hasRoom = (s.maxHeight >= 0);
- if (hasRoom && s.noRoom) { // previously hidden due to noRoom, so show now
- _showPane(pane);
- if ($R) $R.show();
- s.isVisible = true;
- s.noRoom = false;
- if (isSidePane) s.noVerticalRoom = false;
- _fixIframe(pane);
- }
- else if (!hasRoom && !s.noRoom) { // not currently hidden, so hide now
- _hidePane(pane);
- if ($R) $R.hide();
- s.isVisible = false;
- s.noRoom = true;
- }
- }
-
- // see if there is enough room to fit the border-pane
- if (pane === "center") {
- // ignore center in this block
- }
- else if (s.minSize <= s.maxSize) { // pane CAN fit
- hasRoom = true;
- if (s.size > s.maxSize) // pane is too big - shrink it
- sizePane(pane, s.maxSize, skipCallback, force, true); // true = noAnimation
- else if (s.size < s.minSize) // pane is too small - enlarge it
- sizePane(pane, s.minSize, skipCallback, force, true);
- // need s.isVisible because new pseudoClose method keeps pane visible, but off-screen
- else if ($R && s.isVisible && $P.is(":visible")) {
- // make sure resizer-bar is positioned correctly
- // handles situation where nested layout was 'hidden' when initialized
- var side = c.side.toLowerCase()
- , pos = s.size + sC["inset"+ c.side]
- ;
- if ($.layout.cssNum($R, side) != pos) $R.css( side, pos );
- }
-
- // if was previously hidden due to noRoom, then RESET because NOW there is room
- if (s.noRoom) {
- // s.noRoom state will be set by open or show
- if (s.wasOpen && o.closable) {
- if (o.autoReopen)
- open(pane, false, true, true); // true = noAnimation, true = noAlert
- else // leave the pane closed, so just update state
- s.noRoom = false;
- }
- else
- show(pane, s.wasOpen, true, true); // true = noAnimation, true = noAlert
- }
- }
- else { // !hasRoom - pane CANNOT fit
- if (!s.noRoom) { // pane not set as noRoom yet, so hide or close it now...
- s.noRoom = true; // update state
- s.wasOpen = !s.isClosed && !s.isSliding;
- if (s.isClosed){} // SKIP
- else if (o.closable) // 'close' if possible
- close(pane, true, true); // true = force, true = noAnimation
- else // 'hide' pane if cannot just be closed
- hide(pane, true); // true = noAnimation
- }
- }
- }
-
-
- /**
- * sizePane / manualSizePane
- * sizePane is called only by internal methods whenever a pane needs to be resized
- * manualSizePane is an exposed flow-through method allowing extra code when pane is 'manually resized'
- *
- * @param {string|Object} evt_or_pane The pane being resized
- * @param {number} size The *desired* new size for this pane - will be validated
- * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
- * @param {boolean=} [noAnimation=false]
- */
-, manualSizePane = function (evt_or_pane, size, skipCallback, noAnimation) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , o = options[pane]
- , s = state[pane]
- // if resizing callbacks have been delayed and resizing is now DONE, force resizing to complete...
- , forceResize = o.livePaneResizing && !s.isResizing
- ;
- // ANY call to manualSizePane disables autoResize - ie, percentage sizing
- o.autoResize = false;
- // flow-through...
- sizePane(pane, size, skipCallback, forceResize, noAnimation); // will animate resize if option enabled
- }
-
- /**
- * @param {string|Object} evt_or_pane The pane being resized
- * @param {number} size The *desired* new size for this pane - will be validated
- * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
- * @param {boolean=} [force=false] Force resizing even if does not seem necessary
- * @param {boolean=} [noAnimation=false]
- */
-, sizePane = function (evt_or_pane, size, skipCallback, force, noAnimation) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane) // probably NEVER called from event?
- , o = options[pane]
- , s = state[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , side = _c[pane].side.toLowerCase()
- , dimName = _c[pane].sizeType.toLowerCase()
- , inset = "inset"+ _c[pane].side
- , skipResizeWhileDragging = s.isResizing && !o.triggerEventsDuringLiveResize
- , doFX = noAnimation !== true && o.animatePaneSizing
- , oldSize, newSize
- ;
- // QUEUE in case another action/animation is in progress
- $N.queue(function( queueNext ){
- // calculate 'current' min/max sizes
- setSizeLimits(pane); // update pane-state
- oldSize = s.size;
- size = _parseSize(pane, size); // handle percentages & auto
- size = max(size, _parseSize(pane, o.minSize));
- size = min(size, s.maxSize);
- if (size < s.minSize) { // not enough room for pane!
- queueNext(); // call before makePaneFit() because it needs the queue free
- makePaneFit(pane, false, skipCallback); // will hide or close pane
- return;
- }
-
- // IF newSize is same as oldSize, then nothing to do - abort
- if (!force && size === oldSize)
- return queueNext();
-
- // onresize_start callback CANNOT cancel resizing because this would break the layout!
- if (!skipCallback && state.initialized && s.isVisible)
- _runCallbacks("onresize_start", pane);
-
- // resize the pane, and make sure its visible
- newSize = cssSize(pane, size);
-
- if (doFX && $P.is(":visible")) { // ANIMATE
- var fx = $.layout.effects.size[pane] || $.layout.effects.size.all
- , easing = o.fxSettings_size.easing || fx.easing
- , z = options.zIndexes
- , props = {};
- props[ dimName ] = newSize +'px';
- s.isMoving = true;
- // overlay all elements during animation
- $P.css({ zIndex: z.pane_animate })
- .show().animate( props, o.fxSpeed_size, easing, function(){
- // reset zIndex after animation
- $P.css({ zIndex: (s.isSliding ? z.pane_sliding : z.pane_normal) });
- s.isMoving = false;
- sizePane_2(); // continue
- queueNext();
- });
- }
- else { // no animation
- $P.css( dimName, newSize ); // resize pane
- // if pane is visible, then
- if ($P.is(":visible"))
- sizePane_2(); // continue
- else {
- // pane is NOT VISIBLE, so just update state data...
- // when pane is *next opened*, it will have the new size
- s.size = size; // update state.size
- $.extend(s, elDims($P)); // update state dimensions
- }
- queueNext();
- };
-
- });
-
- // SUBROUTINE
- function sizePane_2 () {
- /* Panes are sometimes not sized precisely in some browsers!?
- * This code will resize the pane up to 3 times to nudge the pane to the correct size
- */
- var actual = dimName==='width' ? $P.outerWidth() : $P.outerHeight()
- , tries = [{
- pane: pane
- , count: 1
- , target: size
- , actual: actual
- , correct: (size === actual)
- , attempt: size
- , cssSize: newSize
- }]
- , lastTry = tries[0]
- , thisTry = {}
- , msg = 'Inaccurate size after resizing the '+ pane +'-pane.'
- ;
- while ( !lastTry.correct ) {
- thisTry = { pane: pane, count: lastTry.count+1, target: size };
-
- if (lastTry.actual > size)
- thisTry.attempt = max(0, lastTry.attempt - (lastTry.actual - size));
- else // lastTry.actual < size
- thisTry.attempt = max(0, lastTry.attempt + (size - lastTry.actual));
-
- thisTry.cssSize = cssSize(pane, thisTry.attempt);
- $P.css( dimName, thisTry.cssSize );
-
- thisTry.actual = dimName=='width' ? $P.outerWidth() : $P.outerHeight();
- thisTry.correct = (size === thisTry.actual);
-
- // log attempts and alert the user of this *non-fatal error* (if showDebugMessages)
- if ( tries.length === 1) {
- _log(msg, false, true);
- _log(lastTry, false, true);
- }
- _log(thisTry, false, true);
- // after 4 tries, is as close as its gonna get!
- if (tries.length > 3) break;
-
- tries.push( thisTry );
- lastTry = tries[ tries.length - 1 ];
- }
- // END TESTING CODE
-
- // update pane-state dimensions
- s.size = size;
- $.extend(s, elDims($P));
-
- if (s.isVisible && $P.is(":visible")) {
- // reposition the resizer-bar
- if ($R) $R.css( side, size + sC[inset] );
- // resize the content-div
- sizeContent(pane);
- }
-
- if (!skipCallback && !skipResizeWhileDragging && state.initialized && s.isVisible)
- _runCallbacks("onresize_end", pane);
-
- // resize all the adjacent panes, and adjust their toggler buttons
- // when skipCallback passed, it means the controlling method will handle 'other panes'
- if (!skipCallback) {
- // also no callback if live-resize is in progress and NOT triggerEventsDuringLiveResize
- if (!s.isSliding) sizeMidPanes(_c[pane].dir=="horz" ? "" : "center", skipResizeWhileDragging, force);
- sizeHandles();
- }
-
- // if opposite-pane was autoClosed, see if it can be autoOpened now
- var altPane = _c.oppositeEdge[pane];
- if (size < oldSize && state[ altPane ].noRoom) {
- setSizeLimits( altPane );
- makePaneFit( altPane, false, skipCallback );
- }
-
- // DEBUG - ALERT user/developer so they know there was a sizing problem
- if (tries.length > 1)
- _log(msg +'\nSee the Error Console for details.', true, true);
- }
- }
-
- /**
- * @see initPanes(), sizePane(), resizeAll(), open(), close(), hide()
- * @param {Array.<string>|string} panes The pane(s) being resized, comma-delmited string
- * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
- * @param {boolean=} [force=false]
- */
-, sizeMidPanes = function (panes, skipCallback, force) {
- panes = (panes ? panes : "east,west,center").split(",");
-
- $.each(panes, function (i, pane) {
- if (!$Ps[pane]) return; // NO PANE - skip
- var
- o = options[pane]
- , s = state[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , isCenter= (pane=="center")
- , hasRoom = true
- , CSS = {}
- , newCenter = calcNewCenterPaneDims()
- ;
- // update pane-state dimensions
- $.extend(s, elDims($P));
-
- if (pane === "center") {
- if (!force && s.isVisible && newCenter.width === s.outerWidth && newCenter.height === s.outerHeight)
- return true; // SKIP - pane already the correct size
- // set state for makePaneFit() logic
- $.extend(s, cssMinDims(pane), {
- maxWidth: newCenter.width
- , maxHeight: newCenter.height
- });
- CSS = newCenter;
- // convert OUTER width/height to CSS width/height
- CSS.width = cssW($P, CSS.width);
- // NEW - allow pane to extend 'below' visible area rather than hide it
- CSS.height = cssH($P, CSS.height);
- hasRoom = CSS.width >= 0 && CSS.height >= 0; // height >= 0 = ALWAYS TRUE NOW
- // during layout init, try to shrink east/west panes to make room for center
- if (!state.initialized && o.minWidth > s.outerWidth) {
- var
- reqPx = o.minWidth - s.outerWidth
- , minE = options.east.minSize || 0
- , minW = options.west.minSize || 0
- , sizeE = state.east.size
- , sizeW = state.west.size
- , newE = sizeE
- , newW = sizeW
- ;
- if (reqPx > 0 && state.east.isVisible && sizeE > minE) {
- newE = max( sizeE-minE, sizeE-reqPx );
- reqPx -= sizeE-newE;
- }
- if (reqPx > 0 && state.west.isVisible && sizeW > minW) {
- newW = max( sizeW-minW, sizeW-reqPx );
- reqPx -= sizeW-newW;
- }
- // IF we found enough extra space, then resize the border panes as calculated
- if (reqPx === 0) {
- if (sizeE && sizeE != minE)
- sizePane('east', newE, true, force, true); // true = skipCallback/noAnimation - initPanes will handle when done
- if (sizeW && sizeW != minW)
- sizePane('west', newW, true, force, true);
- // now start over!
- sizeMidPanes('center', skipCallback, force);
- return; // abort this loop
- }
- }
- }
- else { // for east and west, set only the height, which is same as center height
- // set state.min/maxWidth/Height for makePaneFit() logic
- if (s.isVisible && !s.noVerticalRoom)
- $.extend(s, elDims($P), cssMinDims(pane))
- if (!force && !s.noVerticalRoom && newCenter.height === s.outerHeight)
- return true; // SKIP - pane already the correct size
- // east/west have same top, bottom & height as center
- CSS.top = newCenter.top;
- CSS.bottom = newCenter.bottom;
- // NEW - allow pane to extend 'below' visible area rather than hide it
- CSS.height = cssH($P, newCenter.height);
- s.maxHeight = CSS.height;
- hasRoom = (s.maxHeight >= 0); // ALWAYS TRUE NOW
- if (!hasRoom) s.noVerticalRoom = true; // makePaneFit() logic
- }
-
- if (hasRoom) {
- // resizeAll passes skipCallback because it triggers callbacks after ALL panes are resized
- if (!skipCallback && state.initialized)
- _runCallbacks("onresize_start", pane);
-
- $P.css(CSS); // apply the CSS to pane
- if (pane !== "center")
- sizeHandles(pane); // also update resizer length
- if (s.noRoom && !s.isClosed && !s.isHidden)
- makePaneFit(pane); // will re-open/show auto-closed/hidden pane
- if (s.isVisible) {
- $.extend(s, elDims($P)); // update pane dimensions
- if (state.initialized) sizeContent(pane); // also resize the contents, if exists
- }
- }
- else if (!s.noRoom && s.isVisible) // no room for pane
- makePaneFit(pane); // will hide or close pane
-
- if (!s.isVisible)
- return true; // DONE - next pane
-
- /*
- * Extra CSS for IE6 or IE7 in Quirks-mode - add 'width' to NORTH/SOUTH panes
- * Normally these panes have only 'left' & 'right' positions so pane auto-sizes
- * ALSO required when pane is an IFRAME because will NOT default to 'full width'
- * TODO: Can I use width:100% for a north/south iframe?
- * TODO: Sounds like a job for $P.outerWidth( sC.innerWidth ) SETTER METHOD
- */
- if (pane === "center") { // finished processing midPanes
- var fix = browser.isIE6 || !browser.boxModel;
- if ($Ps.north && (fix || state.north.tagName=="IFRAME"))
- $Ps.north.css("width", cssW($Ps.north, sC.innerWidth));
- if ($Ps.south && (fix || state.south.tagName=="IFRAME"))
- $Ps.south.css("width", cssW($Ps.south, sC.innerWidth));
- }
-
- // resizeAll passes skipCallback because it triggers callbacks after ALL panes are resized
- if (!skipCallback && state.initialized)
- _runCallbacks("onresize_end", pane);
- });
- }
-
-
- /**
- * @see window.onresize(), callbacks or custom code
- */
-, resizeAll = function (evt) {
- // stopPropagation if called by trigger("layoutdestroy") - use evtPane utility
- evtPane(evt);
-
- if (!state.initialized) {
- _initLayoutElements();
- return; // no need to resize since we just initialized!
- }
- var oldW = sC.innerWidth
- , oldH = sC.innerHeight
- ;
- // cannot size layout when 'container' is hidden or collapsed
- if (!$N.is(":visible") ) return;
- $.extend(state.container, elDims( $N )); // UPDATE container dimensions
- if (!sC.outerHeight) return;
-
- // onresizeall_start will CANCEL resizing if returns false
- // state.container has already been set, so user can access this info for calcuations
- if (false === _runCallbacks("onresizeall_start")) return false;
-
- var // see if container is now 'smaller' than before
- shrunkH = (sC.innerHeight < oldH)
- , shrunkW = (sC.innerWidth < oldW)
- , $P, o, s, dir
- ;
- // NOTE special order for sizing: S-N-E-W
- $.each(["south","north","east","west"], function (i, pane) {
- if (!$Ps[pane]) return; // no pane - SKIP
- s = state[pane];
- o = options[pane];
- dir = _c[pane].dir;
-
- if (o.autoResize && s.size != o.size) // resize pane to original size set in options
- sizePane(pane, o.size, true, true, true); // true=skipCallback/forceResize/noAnimation
- else {
- setSizeLimits(pane);
- makePaneFit(pane, false, true, true); // true=skipCallback/forceResize
- }
- });
-
- sizeMidPanes("", true, true); // true=skipCallback, true=forceResize
- sizeHandles(); // reposition the toggler elements
-
- // trigger all individual pane callbacks AFTER layout has finished resizing
- o = options; // reuse alias
- $.each(_c.allPanes, function (i, pane) {
- $P = $Ps[pane];
- if (!$P) return; // SKIP
- if (state[pane].isVisible) // undefined for non-existent panes
- _runCallbacks("onresize_end", pane); // callback - if exists
- });
-
- _runCallbacks("onresizeall_end");
- //_triggerLayoutEvent(pane, 'resizeall');
- }
-
- /**
- * Whenever a pane resizes or opens that has a nested layout, trigger resizeAll
- *
- * @param {string|Object} evt_or_pane The pane just resized or opened
- */
-, resizeChildLayout = function (evt_or_pane) {
- var pane = evtPane.call(this, evt_or_pane);
- if (!options[pane].resizeChildLayout) return;
- var $P = $Ps[pane]
- , $C = $Cs[pane]
- , d = "layout"
- , P = Instance[pane]
- , L = children[pane]
- ;
- // user may have manually set EITHER instance pointer, so handle that
- if (P.child && !L) {
- // have to reverse the pointers!
- var el = P.child.container;
- L = children[pane] = (el ? el.data(d) : 0) || null; // set pointer _directly_ to layout instance
- }
-
- // if a layout-pointer exists, see if child has been destroyed
- if (L && L.destroyed)
- L = children[pane] = null; // clear child pointers
- // no child layout pointer is set - see if there is a child layout NOW
- if (!L) L = children[pane] = $P.data(d) || ($C ? $C.data(d) : 0) || null; // set/update child pointers
-
- // ALWAYS refresh the pane.child alias
- P.child = children[pane];
-
- if (L) L.resizeAll();
- }
-
-
- /**
- * IF pane has a content-div, then resize all elements inside pane to fit pane-height
- *
- * @param {string|Object} evt_or_panes The pane(s) being resized
- * @param {boolean=} [remeasure=false] Should the content (header/footer) be remeasured?
- */
-, sizeContent = function (evt_or_panes, remeasure) {
- if (!isInitialized()) return;
-
- var panes = evtPane.call(this, evt_or_panes);
- panes = panes ? panes.split(",") : _c.allPanes;
-
- $.each(panes, function (idx, pane) {
- var
- $P = $Ps[pane]
- , $C = $Cs[pane]
- , o = options[pane]
- , s = state[pane]
- , m = s.content // m = measurements
- ;
- if (!$P || !$C || !$P.is(":visible")) return true; // NOT VISIBLE - skip
-
- // if content-element was REMOVED, update OR remove the pointer
- if (!$C.length) {
- initContent(pane, false); // false = do NOT sizeContent() - already there!
- if (!$C) return; // no replacement element found - pointer have been removed
- }
-
- // onsizecontent_start will CANCEL resizing if returns false
- if (false === _runCallbacks("onsizecontent_start", pane)) return;
-
- // skip re-measuring offsets if live-resizing
- if ((!s.isMoving && !s.isResizing) || o.liveContentResizing || remeasure || m.top == undefined) {
- _measure();
- // if any footers are below pane-bottom, they may not measure correctly,
- // so allow pane overflow and re-measure
- if (m.hiddenFooters > 0 && $P.css("overflow") === "hidden") {
- $P.css("overflow", "visible");
- _measure(); // remeasure while overflowing
- $P.css("overflow", "hidden");
- }
- }
- // NOTE: spaceAbove/Below *includes* the pane paddingTop/Bottom, but not pane.borders
- var newH = s.innerHeight - (m.spaceAbove - s.css.paddingTop) - (m.spaceBelow - s.css.paddingBottom);
-
- if (!$C.is(":visible") || m.height != newH) {
- // size the Content element to fit new pane-size - will autoHide if not enough room
- setOuterHeight($C, newH, true); // true=autoHide
- m.height = newH; // save new height
- };
-
- if (state.initialized)
- _runCallbacks("onsizecontent_end", pane);
-
- function _below ($E) {
- return max(s.css.paddingBottom, (parseInt($E.css("marginBottom"), 10) || 0));
- };
-
- function _measure () {
- var
- ignore = options[pane].contentIgnoreSelector
- , $Fs = $C.nextAll().not(ignore || ':lt(0)') // not :lt(0) = ALL
- , $Fs_vis = $Fs.filter(':visible')
- , $F = $Fs_vis.filter(':last')
- ;
- m = {
- top: $C[0].offsetTop
- , height: $C.outerHeight()
- , numFooters: $Fs.length
- , hiddenFooters: $Fs.length - $Fs_vis.length
- , spaceBelow: 0 // correct if no content footer ($E)
- }
- m.spaceAbove = m.top; // just for state - not used in calc
- m.bottom = m.top + m.height;
- if ($F.length)
- //spaceBelow = (LastFooter.top + LastFooter.height) [footerBottom] - Content.bottom + max(LastFooter.marginBottom, pane.paddingBotom)
- m.spaceBelow = ($F[0].offsetTop + $F.outerHeight()) - m.bottom + _below($F);
- else // no footer - check marginBottom on Content element itself
- m.spaceBelow = _below($C);
- };
- });
- }
-
-
- /**
- * Called every time a pane is opened, closed, or resized to slide the togglers to 'center' and adjust their length if necessary
- *
- * @see initHandles(), open(), close(), resizeAll()
- * @param {string|Object} evt_or_panes The pane(s) being resized
- */
-, sizeHandles = function (evt_or_panes) {
- var panes = evtPane.call(this, evt_or_panes)
- panes = panes ? panes.split(",") : _c.borderPanes;
-
- $.each(panes, function (i, pane) {
- var
- o = options[pane]
- , s = state[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- , $TC
- ;
- if (!$P || !$R) return;
-
- var
- dir = _c[pane].dir
- , _state = (s.isClosed ? "_closed" : "_open")
- , spacing = o["spacing"+ _state]
- , togAlign = o["togglerAlign"+ _state]
- , togLen = o["togglerLength"+ _state]
- , paneLen
- , left
- , offset
- , CSS = {}
- ;
-
- if (spacing === 0) {
- $R.hide();
- return;
- }
- else if (!s.noRoom && !s.isHidden) // skip if resizer was hidden for any reason
- $R.show(); // in case was previously hidden
-
- // Resizer Bar is ALWAYS same width/height of pane it is attached to
- if (dir === "horz") { // north/south
- //paneLen = $P.outerWidth(); // s.outerWidth ||
- paneLen = sC.innerWidth; // handle offscreen-panes
- s.resizerLength = paneLen;
- left = $.layout.cssNum($P, "left")
- $R.css({
- width: cssW($R, paneLen) // account for borders & padding
- , height: cssH($R, spacing) // ditto
- , left: left > -9999 ? left : sC.insetLeft // handle offscreen-panes
- });
- }
- else { // east/west
- paneLen = $P.outerHeight(); // s.outerHeight ||
- s.resizerLength = paneLen;
- $R.css({
- height: cssH($R, paneLen) // account for borders & padding
- , width: cssW($R, spacing) // ditto
- , top: sC.insetTop + getPaneSize("north", true) // TODO: what if no North pane?
- //, top: $.layout.cssNum($Ps["center"], "top")
- });
- }
-
- // remove hover classes
- removeHover( o, $R );
-
- if ($T) {
- if (togLen === 0 || (s.isSliding && o.hideTogglerOnSlide)) {
- $T.hide(); // always HIDE the toggler when 'sliding'
- return;
- }
- else
- $T.show(); // in case was previously hidden
-
- if (!(togLen > 0) || togLen === "100%" || togLen > paneLen) {
- togLen = paneLen;
- offset = 0;
- }
- else { // calculate 'offset' based on options.PANE.togglerAlign_open/closed
- if (isStr(togAlign)) {
- switch (togAlign) {
- case "top":
- case "left": offset = 0;
- break;
- case "bottom":
- case "right": offset = paneLen - togLen;
- break;
- case "middle":
- case "center":
- default: offset = round((paneLen - togLen) / 2); // 'default' catches typos
- }
- }
- else { // togAlign = number
- var x = parseInt(togAlign, 10); //
- if (togAlign >= 0) offset = x;
- else offset = paneLen - togLen + x; // NOTE: x is negative!
- }
- }
-
- if (dir === "horz") { // north/south
- var width = cssW($T, togLen);
- $T.css({
- width: width // account for borders & padding
- , height: cssH($T, spacing) // ditto
- , left: offset // TODO: VERIFY that toggler positions correctly for ALL values
- , top: 0
- });
- // CENTER the toggler content SPAN
- $T.children(".content").each(function(){
- $TC = $(this);
- $TC.css("marginLeft", round((width-$TC.outerWidth())/2)); // could be negative
- });
- }
- else { // east/west
- var height = cssH($T, togLen);
- $T.css({
- height: height // account for borders & padding
- , width: cssW($T, spacing) // ditto
- , top: offset // POSITION the toggler
- , left: 0
- });
- // CENTER the toggler content SPAN
- $T.children(".content").each(function(){
- $TC = $(this);
- $TC.css("marginTop", round((height-$TC.outerHeight())/2)); // could be negative
- });
- }
-
- // remove ALL hover classes
- removeHover( 0, $T );
- }
-
- // DONE measuring and sizing this resizer/toggler, so can be 'hidden' now
- if (!state.initialized && (o.initHidden || s.noRoom)) {
- $R.hide();
- if ($T) $T.hide();
- }
- });
- }
-
-
- /**
- * @param {string|Object} evt_or_pane
- */
-, enableClosable = function (evt_or_pane) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $T = $Ts[pane]
- , o = options[pane]
- ;
- if (!$T) return;
- o.closable = true;
- $T .bind("click."+ sID, function(evt){ evt.stopPropagation(); toggle(pane); })
- .css("visibility", "visible")
- .css("cursor", "pointer")
- .attr("title", state[pane].isClosed ? o.tips.Open : o.tips.Close) // may be blank
- .show();
- }
- /**
- * @param {string|Object} evt_or_pane
- * @param {boolean=} [hide=false]
- */
-, disableClosable = function (evt_or_pane, hide) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $T = $Ts[pane]
- ;
- if (!$T) return;
- options[pane].closable = false;
- // is closable is disable, then pane MUST be open!
- if (state[pane].isClosed) open(pane, false, true);
- $T .unbind("."+ sID)
- .css("visibility", hide ? "hidden" : "visible") // instead of hide(), which creates logic issues
- .css("cursor", "default")
- .attr("title", "");
- }
-
-
- /**
- * @param {string|Object} evt_or_pane
- */
-, enableSlidable = function (evt_or_pane) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $R = $Rs[pane]
- ;
- if (!$R || !$R.data('draggable')) return;
- options[pane].slidable = true;
- if (state[pane].isClosed)
- bindStartSlidingEvent(pane, true);
- }
- /**
- * @param {string|Object} evt_or_pane
- */
-, disableSlidable = function (evt_or_pane) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $R = $Rs[pane]
- ;
- if (!$R) return;
- options[pane].slidable = false;
- if (state[pane].isSliding)
- close(pane, false, true);
- else {
- bindStartSlidingEvent(pane, false);
- $R .css("cursor", "default")
- .attr("title", "");
- removeHover(null, $R[0]); // in case currently hovered
- }
- }
-
-
- /**
- * @param {string|Object} evt_or_pane
- */
-, enableResizable = function (evt_or_pane) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $R = $Rs[pane]
- , o = options[pane]
- ;
- if (!$R || !$R.data('draggable')) return;
- o.resizable = true;
- $R.draggable("enable");
- if (!state[pane].isClosed)
- $R .css("cursor", o.resizerCursor)
- .attr("title", o.tips.Resize);
- }
- /**
- * @param {string|Object} evt_or_pane
- */
-, disableResizable = function (evt_or_pane) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $R = $Rs[pane]
- ;
- if (!$R || !$R.data('draggable')) return;
- options[pane].resizable = false;
- $R .draggable("disable")
- .css("cursor", "default")
- .attr("title", "");
- removeHover(null, $R[0]); // in case currently hovered
- }
-
-
- /**
- * Move a pane from source-side (eg, west) to target-side (eg, east)
- * If pane exists on target-side, move that to source-side, ie, 'swap' the panes
- *
- * @param {string|Object} evt_or_pane1 The pane/edge being swapped
- * @param {string} pane2 ditto
- */
-, swapPanes = function (evt_or_pane1, pane2) {
- if (!isInitialized()) return;
- var pane1 = evtPane.call(this, evt_or_pane1);
- // change state.edge NOW so callbacks can know where pane is headed...
- state[pane1].edge = pane2;
- state[pane2].edge = pane1;
- // run these even if NOT state.initialized
- if (false === _runCallbacks("onswap_start", pane1)
- || false === _runCallbacks("onswap_start", pane2)
- ) {
- state[pane1].edge = pane1; // reset
- state[pane2].edge = pane2;
- return;
- }
-
- var
- oPane1 = copy( pane1 )
- , oPane2 = copy( pane2 )
- , sizes = {}
- ;
- sizes[pane1] = oPane1 ? oPane1.state.size : 0;
- sizes[pane2] = oPane2 ? oPane2.state.size : 0;
-
- // clear pointers & state
- $Ps[pane1] = false;
- $Ps[pane2] = false;
- state[pane1] = {};
- state[pane2] = {};
-
- // ALWAYS remove the resizer & toggler elements
- if ($Ts[pane1]) $Ts[pane1].remove();
- if ($Ts[pane2]) $Ts[pane2].remove();
- if ($Rs[pane1]) $Rs[pane1].remove();
- if ($Rs[pane2]) $Rs[pane2].remove();
- $Rs[pane1] = $Rs[pane2] = $Ts[pane1] = $Ts[pane2] = false;
-
- // transfer element pointers and data to NEW Layout keys
- move( oPane1, pane2 );
- move( oPane2, pane1 );
-
- // cleanup objects
- oPane1 = oPane2 = sizes = null;
-
- // make panes 'visible' again
- if ($Ps[pane1]) $Ps[pane1].css(_c.visible);
- if ($Ps[pane2]) $Ps[pane2].css(_c.visible);
-
- // fix any size discrepancies caused by swap
- resizeAll();
-
- // run these even if NOT state.initialized
- _runCallbacks("onswap_end", pane1);
- _runCallbacks("onswap_end", pane2);
-
- return;
-
- function copy (n) { // n = pane
- var
- $P = $Ps[n]
- , $C = $Cs[n]
- ;
- return !$P ? false : {
- pane: n
- , P: $P ? $P[0] : false
- , C: $C ? $C[0] : false
- , state: $.extend(true, {}, state[n])
- , options: $.extend(true, {}, options[n])
- }
- };
-
- function move (oPane, pane) {
- if (!oPane) return;
- var
- P = oPane.P
- , C = oPane.C
- , oldPane = oPane.pane
- , c = _c[pane]
- , side = c.side.toLowerCase()
- , inset = "inset"+ c.side
- // save pane-options that should be retained
- , s = $.extend(true, {}, state[pane])
- , o = options[pane]
- // RETAIN side-specific FX Settings - more below
- , fx = { resizerCursor: o.resizerCursor }
- , re, size, pos
- ;
- $.each("fxName,fxSpeed,fxSettings".split(","), function (i, k) {
- fx[k +"_open"] = o[k +"_open"];
- fx[k +"_close"] = o[k +"_close"];
- fx[k +"_size"] = o[k +"_size"];
- });
-
- // update object pointers and attributes
- $Ps[pane] = $(P)
- .data({
- layoutPane: Instance[pane] // NEW pointer to pane-alias-object
- , layoutEdge: pane
- })
- .css(_c.hidden)
- .css(c.cssReq)
- ;
- $Cs[pane] = C ? $(C) : false;
-
- // set options and state
- options[pane] = $.extend(true, {}, oPane.options, fx);
- state[pane] = $.extend(true, {}, oPane.state);
-
- // change classNames on the pane, eg: ui-layout-pane-east ==> ui-layout-pane-west
- re = new RegExp(o.paneClass +"-"+ oldPane, "g");
- P.className = P.className.replace(re, o.paneClass +"-"+ pane);
-
- // ALWAYS regenerate the resizer & toggler elements
- initHandles(pane); // create the required resizer & toggler
-
- // if moving to different orientation, then keep 'target' pane size
- if (c.dir != _c[oldPane].dir) {
- size = sizes[pane] || 0;
- setSizeLimits(pane); // update pane-state
- size = max(size, state[pane].minSize);
- // use manualSizePane to disable autoResize - not useful after panes are swapped
- manualSizePane(pane, size, true, true); // true/true = skipCallback/noAnimation
- }
- else // move the resizer here
- $Rs[pane].css(side, sC[inset] + (state[pane].isVisible ? getPaneSize(pane) : 0));
-
-
- // ADD CLASSNAMES & SLIDE-BINDINGS
- if (oPane.state.isVisible && !s.isVisible)
- setAsOpen(pane, true); // true = skipCallback
- else {
- setAsClosed(pane);
- bindStartSlidingEvent(pane, true); // will enable events IF option is set
- }
-
- // DESTROY the object
- oPane = null;
- };
- }
-
-
- /**
- * INTERNAL method to sync pin-buttons when pane is opened or closed
- * Unpinned means the pane is 'sliding' - ie, over-top of the adjacent panes
- *
- * @see open(), setAsOpen(), setAsClosed()
- * @param {string} pane These are the params returned to callbacks by layout()
- * @param {boolean} doPin True means set the pin 'down', False means 'up'
- */
-, syncPinBtns = function (pane, doPin) {
- if ($.layout.plugins.buttons)
- $.each(state[pane].pins, function (i, selector) {
- $.layout.buttons.setPinState(Instance, $(selector), pane, doPin);
- });
- }
-
-; // END var DECLARATIONS
-
- /**
- * Capture keys when enableCursorHotkey - toggle pane if hotkey pressed
- *
- * @see document.keydown()
- */
- function keyDown (evt) {
- if (!evt) return true;
- var code = evt.keyCode;
- if (code < 33) return true; // ignore special keys: ENTER, TAB, etc
-
- var
- PANE = {
- 38: "north" // Up Cursor - $.ui.keyCode.UP
- , 40: "south" // Down Cursor - $.ui.keyCode.DOWN
- , 37: "west" // Left Cursor - $.ui.keyCode.LEFT
- , 39: "east" // Right Cursor - $.ui.keyCode.RIGHT
- }
- , ALT = evt.altKey // no worky!
- , SHIFT = evt.shiftKey
- , CTRL = evt.ctrlKey
- , CURSOR = (CTRL && code >= 37 && code <= 40)
- , o, k, m, pane
- ;
-
- if (CURSOR && options[PANE[code]].enableCursorHotkey) // valid cursor-hotkey
- pane = PANE[code];
- else if (CTRL || SHIFT) // check to see if this matches a custom-hotkey
- $.each(_c.borderPanes, function (i, p) { // loop each pane to check its hotkey
- o = options[p];
- k = o.customHotkey;
- m = o.customHotkeyModifier; // if missing or invalid, treated as "CTRL+SHIFT"
- if ((SHIFT && m=="SHIFT") || (CTRL && m=="CTRL") || (CTRL && SHIFT)) { // Modifier matches
- if (k && code === (isNaN(k) || k <= 9 ? k.toUpperCase().charCodeAt(0) : k)) { // Key matches
- pane = p;
- return false; // BREAK
- }
- }
- });
-
- // validate pane
- if (!pane || !$Ps[pane] || !options[pane].closable || state[pane].isHidden)
- return true;
-
- toggle(pane);
-
- evt.stopPropagation();
- evt.returnValue = false; // CANCEL key
- return false;
- };
-
-
-/*
- * ######################################
- * UTILITY METHODS
- * called externally or by initButtons
- * ######################################
- */
-
- /**
- * Change/reset a pane overflow setting & zIndex to allow popups/drop-downs to work
- *
- * @param {Object=} [el] (optional) Can also be 'bound' to a click, mouseOver, or other event
- */
- function allowOverflow (el) {
- if (!isInitialized()) return;
- if (this && this.tagName) el = this; // BOUND to element
- var $P;
- if (isStr(el))
- $P = $Ps[el];
- else if ($(el).data("layoutRole"))
- $P = $(el);
- else
- $(el).parents().each(function(){
- if ($(this).data("layoutRole")) {
- $P = $(this);
- return false; // BREAK
- }
- });
- if (!$P || !$P.length) return; // INVALID
-
- var
- pane = $P.data("layoutEdge")
- , s = state[pane]
- ;
-
- // if pane is already raised, then reset it before doing it again!
- // this would happen if allowOverflow is attached to BOTH the pane and an element
- if (s.cssSaved)
- resetOverflow(pane); // reset previous CSS before continuing
-
- // if pane is raised by sliding or resizing, or its closed, then abort
- if (s.isSliding || s.isResizing || s.isClosed) {
- s.cssSaved = false;
- return;
- }
-
- var
- newCSS = { zIndex: (options.zIndexes.resizer_normal + 1) }
- , curCSS = {}
- , of = $P.css("overflow")
- , ofX = $P.css("overflowX")
- , ofY = $P.css("overflowY")
- ;
- // determine which, if any, overflow settings need to be changed
- if (of != "visible") {
- curCSS.overflow = of;
- newCSS.overflow = "visible";
- }
- if (ofX && !ofX.match(/(visible|auto)/)) {
- curCSS.overflowX = ofX;
- newCSS.overflowX = "visible";
- }
- if (ofY && !ofY.match(/(visible|auto)/)) {
- curCSS.overflowY = ofX;
- newCSS.overflowY = "visible";
- }
-
- // save the current overflow settings - even if blank!
- s.cssSaved = curCSS;
-
- // apply new CSS to raise zIndex and, if necessary, make overflow 'visible'
- $P.css( newCSS );
-
- // make sure the zIndex of all other panes is normal
- $.each(_c.allPanes, function(i, p) {
- if (p != pane) resetOverflow(p);
- });
-
- };
- /**
- * @param {Object=} [el] (optional) Can also be 'bound' to a click, mouseOver, or other event
- */
- function resetOverflow (el) {
- if (!isInitialized()) return;
- if (this && this.tagName) el = this; // BOUND to element
- var $P;
- if (isStr(el))
- $P = $Ps[el];
- else if ($(el).data("layoutRole"))
- $P = $(el);
- else
- $(el).parents().each(function(){
- if ($(this).data("layoutRole")) {
- $P = $(this);
- return false; // BREAK
- }
- });
- if (!$P || !$P.length) return; // INVALID
-
- var
- pane = $P.data("layoutEdge")
- , s = state[pane]
- , CSS = s.cssSaved || {}
- ;
- // reset the zIndex
- if (!s.isSliding && !s.isResizing)
- $P.css("zIndex", options.zIndexes.pane_normal);
-
- // reset Overflow - if necessary
- $P.css( CSS );
-
- // clear var
- s.cssSaved = false;
- };
-
-/*
- * #####################
- * CREATE/RETURN LAYOUT
- * #####################
- */
-
- // validate that container exists
- var $N = $(this).eq(0); // FIRST matching Container element
- if (!$N.length) {
- return _log( options.errors.containerMissing );
- };
-
- // Users retrieve Instance of a layout with: $N.layout() OR $N.data("layout")
- // return the Instance-pointer if layout has already been initialized
- if ($N.data("layoutContainer") && $N.data("layout"))
- return $N.data("layout"); // cached pointer
-
- // init global vars
- var
- $Ps = {} // Panes x5 - set in initPanes()
- , $Cs = {} // Content x5 - set in initPanes()
- , $Rs = {} // Resizers x4 - set in initHandles()
- , $Ts = {} // Togglers x4 - set in initHandles()
- , $Ms = $([]) // Masks - up to 2 masks per pane (IFRAME + DIV)
- // aliases for code brevity
- , sC = state.container // alias for easy access to 'container dimensions'
- , sID = state.id // alias for unique layout ID/namespace - eg: "layout435"
- ;
-
- // create Instance object to expose data & option Properties, and primary action Methods
- var Instance = {
- // layout data
- options: options // property - options hash
- , state: state // property - dimensions hash
- // object pointers
- , container: $N // property - object pointers for layout container
- , panes: $Ps // property - object pointers for ALL Panes: panes.north, panes.center
- , contents: $Cs // property - object pointers for ALL Content: contents.north, contents.center
- , resizers: $Rs // property - object pointers for ALL Resizers, eg: resizers.north
- , togglers: $Ts // property - object pointers for ALL Togglers, eg: togglers.north
- // border-pane open/close
- , hide: hide // method - ditto
- , show: show // method - ditto
- , toggle: toggle // method - pass a 'pane' ("north", "west", etc)
- , open: open // method - ditto
- , close: close // method - ditto
- , slideOpen: slideOpen // method - ditto
- , slideClose: slideClose // method - ditto
- , slideToggle: slideToggle // method - ditto
- // pane actions
- , setSizeLimits: setSizeLimits // method - pass a 'pane' - update state min/max data
- , _sizePane: sizePane // method -intended for user by plugins only!
- , sizePane: manualSizePane // method - pass a 'pane' AND an 'outer-size' in pixels or percent, or 'auto'
- , sizeContent: sizeContent // method - pass a 'pane'
- , swapPanes: swapPanes // method - pass TWO 'panes' - will swap them
- , showMasks: showMasks // method - pass a 'pane' OR list of panes - default = all panes with mask option set
- , hideMasks: hideMasks // method - ditto'
- // pane element methods
- , initContent: initContent // method - ditto
- , addPane: addPane // method - pass a 'pane'
- , removePane: removePane // method - pass a 'pane' to remove from layout, add 'true' to delete the pane-elem
- , createChildLayout: createChildLayout// method - pass a 'pane' and (optional) layout-options (OVERRIDES options[pane].childOptions
- // special pane option setting
- , enableClosable: enableClosable // method - pass a 'pane'
- , disableClosable: disableClosable // method - ditto
- , enableSlidable: enableSlidable // method - ditto
- , disableSlidable: disableSlidable // method - ditto
- , enableResizable: enableResizable // method - ditto
- , disableResizable: disableResizable// method - ditto
- // utility methods for panes
- , allowOverflow: allowOverflow // utility - pass calling element (this)
- , resetOverflow: resetOverflow // utility - ditto
- // layout control
- , destroy: destroy // method - no parameters
- , initPanes: isInitialized // method - no parameters
- , resizeAll: resizeAll // method - no parameters
- // callback triggering
- , runCallbacks: _runCallbacks // method - pass evtName & pane (if a pane-event), eg: trigger("onopen", "west")
- // alias collections of options, state and children - created in addPane and extended elsewhere
- , hasParentLayout: false // set by initContainer()
- , children: children // pointers to child-layouts, eg: Instance.children["west"]
- , north: false // alias group: { name: pane, pane: $Ps[pane], options: options[pane], state: state[pane], child: children[pane] }
- , south: false // ditto
- , west: false // ditto
- , east: false // ditto
- , center: false // ditto
- };
-
- // create the border layout NOW
- if (_create() === 'cancel') // onload_start callback returned false to CANCEL layout creation
- return null;
- else // true OR false -- if layout-elements did NOT init (hidden or do not exist), can auto-init later
- return Instance; // return the Instance object
-
-}
-
-
-/* OLD versions of jQuery only set $.support.boxModel after page is loaded
- * so if this is IE, use support.boxModel to test for quirks-mode (ONLY IE changes boxModel).
- */
-$(function(){
- var b = $.layout.browser;
- if (b.msie) b.boxModel = $.support.boxModel;
-});
-
-
-/**
- * jquery.layout.state 1.0
- * $Date: 2011-07-16 08:00:00 (Sat, 16 July 2011) $
- *
- * Copyright (c) 2010
- * Kevin Dalman (http://allpro.net)
- *
- * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
- * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
- *
- * @dependancies: UI Layout 1.3.0.rc30.1 or higher
- * @dependancies: $.ui.cookie (above)
- *
- * @support: http://groups.google.com/group/jquery-ui-layout
- */
-/*
- * State-management options stored in options.stateManagement, which includes a .cookie hash
- * Default options saves ALL KEYS for ALL PANES, ie: pane.size, pane.isClosed, pane.isHidden
- *
- * // STATE/COOKIE OPTIONS
- * @example $(el).layout({
- stateManagement: {
- enabled: true
- , stateKeys: "east.size,west.size,east.isClosed,west.isClosed"
- , cookie: { name: "appLayout", path: "/" }
- }
- })
- * @example $(el).layout({ stateManagement__enabled: true }) // enable auto-state-management using cookies
- * @example $(el).layout({ stateManagement__cookie: { name: "appLayout", path: "/" } })
- * @example $(el).layout({ stateManagement__cookie__name: "appLayout", stateManagement__cookie__path: "/" })
- *
- * // STATE/COOKIE METHODS
- * @example myLayout.saveCookie( "west.isClosed,north.size,south.isHidden", {expires: 7} );
- * @example myLayout.loadCookie();
- * @example myLayout.deleteCookie();
- * @example var JSON = myLayout.readState(); // CURRENT Layout State
- * @example var JSON = myLayout.readCookie(); // SAVED Layout State (from cookie)
- * @example var JSON = myLayout.state.stateData; // LAST LOADED Layout State (cookie saved in layout.state hash)
- *
- * CUSTOM STATE-MANAGEMENT (eg, saved in a database)
- * @example var JSON = myLayout.readState( "west.isClosed,north.size,south.isHidden" );
- * @example myLayout.loadState( JSON );
- */
-
-/**
- * UI COOKIE UTILITY
- *
- * A $.cookie OR $.ui.cookie namespace *should be standard*, but until then...
- * This creates $.ui.cookie so Layout does not need the cookie.jquery.js plugin
- * NOTE: This utility is REQUIRED by the layout.state plugin
- *
- * Cookie methods in Layout are created as part of State Management
- */
-if (!$.ui) $.ui = {};
-$.ui.cookie = {
-
- // cookieEnabled is not in DOM specs, but DOES works in all browsers,including IE6
- acceptsCookies: !!navigator.cookieEnabled
-
-, read: function (name) {
- var
- c = document.cookie
- , cs = c ? c.split(';') : []
- , pair // loop var
- ;
- for (var i=0, n=cs.length; i < n; i++) {
- pair = $.trim(cs[i]).split('='); // name=value pair
- if (pair[0] == name) // found the layout cookie
- return decodeURIComponent(pair[1]);
-
- }
- return null;
- }
-
-, write: function (name, val, cookieOpts) {
- var
- params = ''
- , date = ''
- , clear = false
- , o = cookieOpts || {}
- , x = o.expires
- ;
- if (x && x.toUTCString)
- date = x;
- else if (x === null || typeof x === 'number') {
- date = new Date();
- if (x > 0)
- date.setDate(date.getDate() + x);
- else {
- date.setFullYear(1970);
- clear = true;
- }
- }
- if (date) params += ';expires='+ date.toUTCString();
- if (o.path) params += ';path='+ o.path;
- if (o.domain) params += ';domain='+ o.domain;
- if (o.secure) params += ';secure';
- document.cookie = name +'='+ (clear ? "" : encodeURIComponent( val )) + params; // write or clear cookie
- }
-
-, clear: function (name) {
- $.ui.cookie.write(name, '', {expires: -1});
- }
-
-};
-// if cookie.jquery.js is not loaded, create an alias to replicate it
-// this may be useful to other plugins or code dependent on that plugin
-if (!$.cookie) $.cookie = function (k, v, o) {
- var C = $.ui.cookie;
- if (v === null)
- C.clear(k);
- else if (v === undefined)
- return C.read(k);
- else
- C.write(k, v, o);
-};
-
-
-// tell Layout that the state plugin is available
-$.layout.plugins.stateManagement = true;
-
-// Add State-Management options to layout.defaults
-$.layout.config.optionRootKeys.push("stateManagement");
-$.layout.defaults.stateManagement = {
- enabled: false // true = enable state-management, even if not using cookies
-, autoSave: true // Save a state-cookie when page exits?
-, autoLoad: true // Load the state-cookie when Layout inits?
- // List state-data to save - must be pane-specific
-, stateKeys: "north.size,south.size,east.size,west.size,"+
- "north.isClosed,south.isClosed,east.isClosed,west.isClosed,"+
- "north.isHidden,south.isHidden,east.isHidden,west.isHidden"
-, cookie: {
- name: "" // If not specified, will use Layout.name, else just "Layout"
- , domain: "" // blank = current domain
- , path: "" // blank = current page, '/' = entire website
- , expires: "" // 'days' to keep cookie - leave blank for 'session cookie'
- , secure: false
- }
-};
-// Set stateManagement as a layout-option, NOT a pane-option
-$.layout.optionsMap.layout.push("stateManagement");
-
-/*
- * State Management methods
- */
-$.layout.state = {
-
- /**
- * Get the current layout state and save it to a cookie
- *
- * myLayout.saveCookie( keys, cookieOpts )
- *
- * @param {Object} inst
- * @param {(string|Array)=} keys
- * @param {Object=} cookieOpts
- */
- saveCookie: function (inst, keys, cookieOpts) {
- var o = inst.options
- , oS = o.stateManagement
- , oC = $.extend(true, {}, oS.cookie, cookieOpts || null)
- , data = inst.state.stateData = inst.readState( keys || oS.stateKeys ) // read current panes-state
- ;
- $.ui.cookie.write( oC.name || o.name || "Layout", $.layout.state.encodeJSON(data), oC );
- return $.extend(true, {}, data); // return COPY of state.stateData data
- }
-
- /**
- * Remove the state cookie
- *
- * @param {Object} inst
- */
-, deleteCookie: function (inst) {
- var o = inst.options;
- $.ui.cookie.clear( o.stateManagement.cookie.name || o.name || "Layout" );
- }
-
- /**
- * Read & return data from the cookie - as JSON
- *
- * @param {Object} inst
- */
-, readCookie: function (inst) {
- var o = inst.options;
- var c = $.ui.cookie.read( o.stateManagement.cookie.name || o.name || "Layout" );
- // convert cookie string back to a hash and return it
- return c ? $.layout.state.decodeJSON(c) : {};
- }
-
- /**
- * Get data from the cookie and USE IT to loadState
- *
- * @param {Object} inst
- */
-, loadCookie: function (inst) {
- var c = $.layout.state.readCookie(inst); // READ the cookie
- if (c) {
- inst.state.stateData = $.extend(true, {}, c); // SET state.stateData
- inst.loadState(c); // LOAD the retrieved state
- }
- return c;
- }
-
- /**
- * Update layout options from the cookie, if one exists
- *
- * @param {Object} inst
- * @param {Object=} stateData
- * @param {boolean=} animate
- */
-, loadState: function (inst, stateData, animate) {
- stateData = $.layout.transformData( stateData ); // panes = default subkey
- if ($.isEmptyObject( stateData )) return;
- $.extend(true, inst.options, stateData); // update layout options
- // if layout has already been initialized, then UPDATE layout state
- if (inst.state.initialized) {
- var pane, vis, o, s, h, c
- , noAnimate = (animate===false)
- ;
- $.each($.layout.config.borderPanes, function (idx, pane) {
- state = inst.state[pane];
- o = stateData[ pane ];
- if (typeof o != 'object') return; // no key, continue
- s = o.size;
- c = o.initClosed;
- h = o.initHidden;
- vis = state.isVisible;
- // resize BEFORE opening
- if (!vis)
- inst.sizePane(pane, s, false, false);
- if (h === true) inst.hide(pane, noAnimate);
- else if (c === false) inst.open (pane, false, noAnimate);
- else if (c === true) inst.close(pane, false, noAnimate);
- else if (h === false) inst.show (pane, false, noAnimate);
- // resize AFTER any other actions
- if (vis)
- inst.sizePane(pane, s, false, noAnimate); // animate resize if option passed
- });
- };
- }
-
- /**
- * Get the *current layout state* and return it as a hash
- *
- * @param {Object=} inst
- * @param {(string|Array)=} keys
- */
-, readState: function (inst, keys) {
- var
- data = {}
- , alt = { isClosed: 'initClosed', isHidden: 'initHidden' }
- , state = inst.state
- , panes = $.layout.config.allPanes
- , pair, pane, key, val
- ;
- if (!keys) keys = inst.options.stateManagement.stateKeys; // if called by user
- if ($.isArray(keys)) keys = keys.join(",");
- // convert keys to an array and change delimiters from '__' to '.'
- keys = keys.replace(/__/g, ".").split(',');
- // loop keys and create a data hash
- for (var i=0, n=keys.length; i < n; i++) {
- pair = keys[i].split(".");
- pane = pair[0];
- key = pair[1];
- if ($.inArray(pane, panes) < 0) continue; // bad pane!
- val = state[ pane ][ key ];
- if (val == undefined) continue;
- if (key=="isClosed" && state[pane]["isSliding"])
- val = true; // if sliding, then *really* isClosed
- ( data[pane] || (data[pane]={}) )[ alt[key] ? alt[key] : key ] = val;
- }
- return data;
- }
-
- /**
- * Stringify a JSON hash so can save in a cookie or db-field
- */
-, encodeJSON: function (JSON) {
- return parse(JSON);
- function parse (h) {
- var D=[], i=0, k, v, t; // k = key, v = value
- for (k in h) {
- v = h[k];
- t = typeof v;
- if (t == 'string') // STRING - add quotes
- v = '"'+ v +'"';
- else if (t == 'object') // SUB-KEY - recurse into it
- v = parse(v);
- D[i++] = '"'+ k +'":'+ v;
- }
- return '{'+ D.join(',') +'}';
- };
- }
-
- /**
- * Convert stringified JSON back to a hash object
- * @see $.parseJSON(), adding in jQuery 1.4.1
- */
-, decodeJSON: function (str) {
- try { return $.parseJSON ? $.parseJSON(str) : window["eval"]("("+ str +")") || {}; }
- catch (e) { return {}; }
- }
-
-
-, _create: function (inst) {
- var _ = $.layout.state;
- // ADD State-Management plugin methods to inst
- $.extend( inst, {
- // readCookie - update options from cookie - returns hash of cookie data
- readCookie: function () { return _.readCookie(inst); }
- // deleteCookie
- , deleteCookie: function () { _.deleteCookie(inst); }
- // saveCookie - optionally pass keys-list and cookie-options (hash)
- , saveCookie: function (keys, cookieOpts) { return _.saveCookie(inst, keys, cookieOpts); }
- // loadCookie - readCookie and use to loadState() - returns hash of cookie data
- , loadCookie: function () { return _.loadCookie(inst); }
- // loadState - pass a hash of state to use to update options
- , loadState: function (stateData, animate) { _.loadState(inst, stateData, animate); }
- // readState - returns hash of current layout-state
- , readState: function (keys) { return _.readState(inst, keys); }
- // add JSON utility methods too...
- , encodeJSON: _.encodeJSON
- , decodeJSON: _.decodeJSON
- });
-
- // init state.stateData key, even if plugin is initially disabled
- inst.state.stateData = {};
-
- // read and load cookie-data per options
- var oS = inst.options.stateManagement;
- if (oS.enabled) {
- if (oS.autoLoad) // update the options from the cookie
- inst.loadCookie();
- else // don't modify options - just store cookie data in state.stateData
- inst.state.stateData = inst.readCookie();
- }
- }
-
-, _unload: function (inst) {
- var oS = inst.options.stateManagement;
- if (oS.enabled) {
- if (oS.autoSave) // save a state-cookie automatically
- inst.saveCookie();
- else // don't save a cookie, but do store state-data in state.stateData key
- inst.state.stateData = inst.readState();
- }
- }
-
-};
-
-// add state initialization method to Layout's onCreate array of functions
-$.layout.onCreate.push( $.layout.state._create );
-$.layout.onUnload.push( $.layout.state._unload );
-
-
-
-
-/**
- * jquery.layout.buttons 1.0
- * $Date: 2011-07-16 08:00:00 (Sat, 16 July 2011) $
- *
- * Copyright (c) 2010
- * Kevin Dalman (http://allpro.net)
- *
- * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
- * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
- *
- * @dependancies: UI Layout 1.3.0.rc30.1 or higher
- *
- * @support: http://groups.google.com/group/jquery-ui-layout
- *
- * Docs: [ to come ]
- * Tips: [ to come ]
- */
-
-// tell Layout that the state plugin is available
-$.layout.plugins.buttons = true;
-
-// Add buttons options to layout.defaults
-$.layout.defaults.autoBindCustomButtons = false;
-// Specify autoBindCustomButtons as a layout-option, NOT a pane-option
-$.layout.optionsMap.layout.push("autoBindCustomButtons");
-
-/*
- * Button methods
- */
-$.layout.buttons = {
-
- /**
- * Searches for .ui-layout-button-xxx elements and auto-binds them as layout-buttons
- *
- * @see _create()
- *
- * @param {Object} inst Layout Instance object
- */
- init: function (inst) {
- var pre = "ui-layout-button-"
- , layout = inst.options.name || ""
- , name;
- $.each("toggle,open,close,pin,toggle-slide,open-slide".split(","), function (i, action) {
- $.each($.layout.config.borderPanes, function (ii, pane) {
- $("."+pre+action+"-"+pane).each(function(){
- // if button was previously 'bound', data.layoutName was set, but is blank if layout has no 'name'
- name = $(this).data("layoutName") || $(this).attr("layoutName");
- if (name == undefined || name === layout)
- inst.bindButton(this, action, pane);
- });
- });
- });
- }
-
- /**
- * Helper function to validate params received by addButton utilities
- *
- * Two classes are added to the element, based on the buttonClass...
- * The type of button is appended to create the 2nd className:
- * - ui-layout-button-pin // action btnClass
- * - ui-layout-button-pin-west // action btnClass + pane
- * - ui-layout-button-toggle
- * - ui-layout-button-open
- * - ui-layout-button-close
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
- *
- * @return {Array.<Object>} If both params valid, the element matching 'selector' in a jQuery wrapper - otherwise returns null
- */
-, get: function (inst, selector, pane, action) {
- var $E = $(selector)
- , o = inst.options
- , err = o.errors.addButtonError
- ;
- if (!$E.length) { // element not found
- $.layout.msg(err +" "+ o.errors.selector +": "+ selector, true);
- }
- else if ($.inArray(pane, $.layout.config.borderPanes) < 0) { // invalid 'pane' sepecified
- $.layout.msg(err +" "+ o.errors.pane +": "+ pane, true);
- $E = $(""); // NO BUTTON
- }
- else { // VALID
- var btn = o[pane].buttonClass +"-"+ action;
- $E .addClass( btn +" "+ btn +"-"+ pane )
- .data("layoutName", o.name); // add layout identifier - even if blank!
- }
- return $E;
- }
-
-
- /**
- * NEW syntax for binding layout-buttons - will eventually replace addToggle, addOpen, etc.
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} action
- * @param {string} pane
- */
-, bind: function (inst, selector, action, pane) {
- var _ = $.layout.buttons;
- switch (action.toLowerCase()) {
- case "toggle": _.addToggle (inst, selector, pane); break;
- case "open": _.addOpen (inst, selector, pane); break;
- case "close": _.addClose (inst, selector, pane); break;
- case "pin": _.addPin (inst, selector, pane); break;
- case "toggle-slide": _.addToggle (inst, selector, pane, true); break;
- case "open-slide": _.addOpen (inst, selector, pane, true); break;
- }
- return inst;
- }
-
- /**
- * Add a custom Toggler button for a pane
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
- * @param {boolean=} slide true = slide-open, false = pin-open
- */
-, addToggle: function (inst, selector, pane, slide) {
- $.layout.buttons.get(inst, selector, pane, "toggle")
- .click(function(evt){
- inst.toggle(pane, !!slide);
- evt.stopPropagation();
- });
- return inst;
- }
-
- /**
- * Add a custom Open button for a pane
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
- * @param {boolean=} slide true = slide-open, false = pin-open
- */
-, addOpen: function (inst, selector, pane, slide) {
- $.layout.buttons.get(inst, selector, pane, "open")
- .attr("title", inst.options[pane].tips.Open)
- .click(function (evt) {
- inst.open(pane, !!slide);
- evt.stopPropagation();
- });
- return inst;
- }
-
- /**
- * Add a custom Close button for a pane
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
- */
-, addClose: function (inst, selector, pane) {
- $.layout.buttons.get(inst, selector, pane, "close")
- .attr("title", inst.options[pane].tips.Close)
- .click(function (evt) {
- inst.close(pane);
- evt.stopPropagation();
- });
- return inst;
- }
-
- /**
- * Add a custom Pin button for a pane
- *
- * Four classes are added to the element, based on the paneClass for the associated pane...
- * Assuming the default paneClass and the pin is 'up', these classes are added for a west-pane pin:
- * - ui-layout-pane-pin
- * - ui-layout-pane-west-pin
- * - ui-layout-pane-pin-up
- * - ui-layout-pane-west-pin-up
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} pane Name of the pane the pin is for: 'north', 'south', etc.
- */
-, addPin: function (inst, selector, pane) {
- var _ = $.layout.buttons
- , $E = _.get(inst, selector, pane, "pin");
- if ($E.length) {
- var s = inst.state[pane];
- $E.click(function (evt) {
- _.setPinState(inst, $(this), pane, (s.isSliding || s.isClosed));
- if (s.isSliding || s.isClosed) inst.open( pane ); // change from sliding to open
- else inst.close( pane ); // slide-closed
- evt.stopPropagation();
- });
- // add up/down pin attributes and classes
- _.setPinState(inst, $E, pane, (!s.isClosed && !s.isSliding));
- // add this pin to the pane data so we can 'sync it' automatically
- // PANE.pins key is an array so we can store multiple pins for each pane
- s.pins.push( selector ); // just save the selector string
- }
- return inst;
- }
-
- /**
- * Change the class of the pin button to make it look 'up' or 'down'
- *
- * @see addPin(), syncPins()
- *
- * @param {Object} inst Layout Instance object
- * @param {Array.<Object>} $Pin The pin-span element in a jQuery wrapper
- * @param {string} pane These are the params returned to callbacks by layout()
- * @param {boolean} doPin true = set the pin 'down', false = set it 'up'
- */
-, setPinState: function (inst, $Pin, pane, doPin) {
- var updown = $Pin.attr("pin");
- if (updown && doPin === (updown=="down")) return; // already in correct state
- var
- o = inst.options[pane]
- , pin = o.buttonClass +"-pin"
- , side = pin +"-"+ pane
- , UP = pin +"-up "+ side +"-up"
- , DN = pin +"-down "+side +"-down"
- ;
- $Pin
- .attr("pin", doPin ? "down" : "up") // logic
- .attr("title", doPin ? o.tips.Unpin : o.tips.Pin)
- .removeClass( doPin ? UP : DN )
- .addClass( doPin ? DN : UP )
- ;
- }
-
- /**
- * INTERNAL function to sync 'pin buttons' when pane is opened or closed
- * Unpinned means the pane is 'sliding' - ie, over-top of the adjacent panes
- *
- * @see open(), close()
- *
- * @param {Object} inst Layout Instance object
- * @param {string} pane These are the params returned to callbacks by layout()
- * @param {boolean} doPin True means set the pin 'down', False means 'up'
- */
-, syncPinBtns: function (inst, pane, doPin) {
- // REAL METHOD IS _INSIDE_ LAYOUT - THIS IS HERE JUST FOR REFERENCE
- $.each(inst.state[pane].pins, function (i, selector) {
- $.layout.buttons.setPinState(inst, $(selector), pane, doPin);
- });
- }
-
-
-, _load: function (inst) {
- var _ = $.layout.buttons;
- // ADD Button methods to Layout Instance
- // Note: sel = jQuery Selector string
- $.extend( inst, {
- bindButton: function (sel, action, pane) { return _.bind(inst, sel, action, pane); }
- // DEPRECATED METHODS
- , addToggleBtn: function (sel, pane, slide) { return _.addToggle(inst, sel, pane, slide); }
- , addOpenBtn: function (sel, pane, slide) { return _.addOpen(inst, sel, pane, slide); }
- , addCloseBtn: function (sel, pane) { return _.addClose(inst, sel, pane); }
- , addPinBtn: function (sel, pane) { return _.addPin(inst, sel, pane); }
- });
-
- // init state array to hold pin-buttons
- for (var i=0; i<4; i++) {
- var pane = $.layout.config.borderPanes[i];
- inst.state[pane].pins = [];
- }
-
- // auto-init buttons onLoad if option is enabled
- if ( inst.options.autoBindCustomButtons )
- _.init(inst);
- }
-
-, _unload: function (inst) {
- // TODO: unbind all buttons???
- }
-
-};
-
-// add initialization method to Layout's onLoad array of functions
-$.layout.onLoad.push( $.layout.buttons._load );
-//$.layout.onUnload.push( $.layout.buttons._unload );
-
-
-
-/**
- * jquery.layout.browserZoom 1.0
- * $Date: 2011-12-29 08:00:00 (Thu, 29 Dec 2011) $
- *
- * Copyright (c) 2012
- * Kevin Dalman (http://allpro.net)
- *
- * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
- * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
- *
- * @dependancies: UI Layout 1.3.0.rc30.1 or higher
- *
- * @support: http://groups.google.com/group/jquery-ui-layout
- *
- * @todo: Extend logic to handle other problematic zooming in browsers
- * @todo: Add hotkey/mousewheel bindings to _instantly_ respond to these zoom event
- */
-
-// tell Layout that the plugin is available
-$.layout.plugins.browserZoom = true;
-
-$.layout.defaults.browserZoomCheckInterval = 1000;
-$.layout.optionsMap.layout.push("browserZoomCheckInterval");
-
-/*
- * browserZoom methods
- */
-$.layout.browserZoom = {
-
- _init: function (inst) {
- // abort if browser does not need this check
- if ($.layout.browserZoom.ratio() !== false)
- $.layout.browserZoom._setTimer(inst);
- }
-
-, _setTimer: function (inst) {
- // abort if layout destroyed or browser does not need this check
- if (inst.destroyed) return;
- var o = inst.options
- , s = inst.state
- // don't need check if inst has parentLayout, but check occassionally in case parent destroyed!
- // MINIMUM 100ms interval, for performance
- , ms = inst.hasParentLayout ? 5000 : Math.max( o.browserZoomCheckInterval, 100 )
- ;
- // set the timer
- setTimeout(function(){
- if (inst.destroyed || !o.resizeWithWindow) return;
- var d = $.layout.browserZoom.ratio();
- if (d !== s.browserZoom) {
- s.browserZoom = d;
- inst.resizeAll();
- }
- // set a NEW timeout
- $.layout.browserZoom._setTimer(inst);
- }
- , ms );
- }
-
-, ratio: function () {
- var w = window
- , s = screen
- , d = document
- , dE = d.documentElement || d.body
- , b = $.layout.browser
- , v = b.version
- , r, sW, cW
- ;
- // we can ignore all browsers that fire window.resize event onZoom
- if ((b.msie && v > 8)
- || !b.msie
- ) return false; // don't need to track zoom
-
- if (s.deviceXDPI)
- return calc(s.deviceXDPI, s.systemXDPI);
- // everything below is just for future reference!
- if (b.webkit && (r = d.body.getBoundingClientRect))
- return calc((r.left - r.right), d.body.offsetWidth);
- if (b.webkit && (sW = w.outerWidth))
- return calc(sW, w.innerWidth);
- if ((sW = s.width) && (cW = dE.clientWidth))
- return calc(sW, cW);
- return false; // no match, so cannot - or don't need to - track zoom
-
- function calc (x,y) { return (parseInt(x,10) / parseInt(y,10) * 100).toFixed(); }
- }
-
-};
-// add initialization method to Layout's onLoad array of functions
-$.layout.onReady.push( $.layout.browserZoom._init );
-
-
-
-})( jQuery ); \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
deleted file mode 100644
index 4688d633fe..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
+++ /dev/null
@@ -1,4 +0,0 @@
-/* Modernizr 2.5.3 (Custom Build) | MIT & BSD
- * Build: http://www.modernizr.com/download/#-inlinesvg
- */
-;window.Modernizr=function(a,b,c){function u(a){i.cssText=a}function v(a,b){return u(prefixes.join(a+";")+(b||""))}function w(a,b){return typeof a===b}function x(a,b){return!!~(""+a).indexOf(b)}function y(a,b,d){for(var e in a){var f=b[a[e]];if(f!==c)return d===!1?a[e]:w(f,"function")?f.bind(d||b):f}return!1}var d="2.5.3",e={},f=b.documentElement,g="modernizr",h=b.createElement(g),i=h.style,j,k={}.toString,l={svg:"http://www.w3.org/2000/svg"},m={},n={},o={},p=[],q=p.slice,r,s={}.hasOwnProperty,t;!w(s,"undefined")&&!w(s.call,"undefined")?t=function(a,b){return s.call(a,b)}:t=function(a,b){return b in a&&w(a.constructor.prototype[b],"undefined")},Function.prototype.bind||(Function.prototype.bind=function(b){var c=this;if(typeof c!="function")throw new TypeError;var d=q.call(arguments,1),e=function(){if(this instanceof e){var a=function(){};a.prototype=c.prototype;var f=new a,g=c.apply(f,d.concat(q.call(arguments)));return Object(g)===g?g:f}return c.apply(b,d.concat(q.call(arguments)))};return e}),m.inlinesvg=function(){var a=b.createElement("div");return a.innerHTML="<svg/>",(a.firstChild&&a.firstChild.namespaceURI)==l.svg};for(var z in m)t(m,z)&&(r=z.toLowerCase(),e[r]=m[z](),p.push((e[r]?"":"no-")+r));return u(""),h=j=null,e._version=d,e}(this,this.document); \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
deleted file mode 100644
index 9b32288e04..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
deleted file mode 100644
index fd0ad06e81..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png
deleted file mode 100644
index ad312793ea..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png
deleted file mode 100644
index 67ffca79de..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
deleted file mode 100644
index 6e9f2f743f..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
deleted file mode 100644
index 7502942eb6..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
deleted file mode 100644
index c777bfce8d..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
deleted file mode 100644
index 7502942eb6..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
deleted file mode 100644
index 848dd5963a..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
deleted file mode 100644
index 34a04249ee..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
deleted file mode 100644
index 2ed33b0aa4..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png
deleted file mode 100644
index 6ea17ac320..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png
deleted file mode 100644
index 529aa93188..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
deleted file mode 100644
index 00c3378a2a..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
deleted file mode 100644
index d30dbad858..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
+++ /dev/null
@@ -1,10 +0,0 @@
-// ā”Œā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā” \\
-// ā”‚ RaphaĆ«l 2.1.0 - JavaScript Vector Library ā”‚ \\
-// ā”œā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”¤ \\
-// ā”‚ Copyright Ā© 2008-2012 Dmitry Baranovskiy (http://raphaeljs.com) ā”‚ \\
-// ā”‚ Copyright Ā© 2008-2012 Sencha Labs (http://sencha.com) ā”‚ \\
-// ā”œā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”¤ \\
-// ā”‚ Licensed under the MIT (http://raphaeljs.com/license.html) license.ā”‚ \\
-// ā””ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”˜ \\
-
-(function(a){var b="0.3.4",c="hasOwnProperty",d=/[\.\/]/,e="*",f=function(){},g=function(a,b){return a-b},h,i,j={n:{}},k=function(a,b){var c=j,d=i,e=Array.prototype.slice.call(arguments,2),f=k.listeners(a),l=0,m=!1,n,o=[],p={},q=[],r=h,s=[];h=a,i=0;for(var t=0,u=f.length;t<u;t++)"zIndex"in f[t]&&(o.push(f[t].zIndex),f[t].zIndex<0&&(p[f[t].zIndex]=f[t]));o.sort(g);while(o[l]<0){n=p[o[l++]],q.push(n.apply(b,e));if(i){i=d;return q}}for(t=0;t<u;t++){n=f[t];if("zIndex"in n)if(n.zIndex==o[l]){q.push(n.apply(b,e));if(i)break;do{l++,n=p[o[l]],n&&q.push(n.apply(b,e));if(i)break}while(n)}else p[n.zIndex]=n;else{q.push(n.apply(b,e));if(i)break}}i=d,h=r;return q.length?q:null};k.listeners=function(a){var b=a.split(d),c=j,f,g,h,i,k,l,m,n,o=[c],p=[];for(i=0,k=b.length;i<k;i++){n=[];for(l=0,m=o.length;l<m;l++){c=o[l].n,g=[c[b[i]],c[e]],h=2;while(h--)f=g[h],f&&(n.push(f),p=p.concat(f.f||[]))}o=n}return p},k.on=function(a,b){var c=a.split(d),e=j;for(var g=0,h=c.length;g<h;g++)e=e.n,!e[c[g]]&&(e[c[g]]={n:{}}),e=e[c[g]];e.f=e.f||[];for(g=0,h=e.f.length;g<h;g++)if(e.f[g]==b)return f;e.f.push(b);return function(a){+a==+a&&(b.zIndex=+a)}},k.stop=function(){i=1},k.nt=function(a){if(a)return(new RegExp("(?:\\.|\\/|^)"+a+"(?:\\.|\\/|$)")).test(h);return h},k.off=k.unbind=function(a,b){var f=a.split(d),g,h,i,k,l,m,n,o=[j];for(k=0,l=f.length;k<l;k++)for(m=0;m<o.length;m+=i.length-2){i=[m,1],g=o[m].n;if(f[k]!=e)g[f[k]]&&i.push(g[f[k]]);else for(h in g)g[c](h)&&i.push(g[h]);o.splice.apply(o,i)}for(k=0,l=o.length;k<l;k++){g=o[k];while(g.n){if(b){if(g.f){for(m=0,n=g.f.length;m<n;m++)if(g.f[m]==b){g.f.splice(m,1);break}!g.f.length&&delete g.f}for(h in g.n)if(g.n[c](h)&&g.n[h].f){var p=g.n[h].f;for(m=0,n=p.length;m<n;m++)if(p[m]==b){p.splice(m,1);break}!p.length&&delete g.n[h].f}}else{delete g.f;for(h in g.n)g.n[c](h)&&g.n[h].f&&delete g.n[h].f}g=g.n}}},k.once=function(a,b){var c=function(){var d=b.apply(this,arguments);k.unbind(a,c);return d};return k.on(a,c)},k.version=b,k.toString=function(){return"You are running Eve "+b},typeof module!="undefined"&&module.exports?module.exports=k:typeof define!="undefined"?define("eve",[],function(){return k}):a.eve=k})(this),function(){function cF(a){for(var b=0;b<cy.length;b++)cy[b].el.paper==a&&cy.splice(b--,1)}function cE(b,d,e,f,h,i){e=Q(e);var j,k,l,m=[],o,p,q,t=b.ms,u={},v={},w={};if(f)for(y=0,z=cy.length;y<z;y++){var x=cy[y];if(x.el.id==d.id&&x.anim==b){x.percent!=e?(cy.splice(y,1),l=1):k=x,d.attr(x.totalOrigin);break}}else f=+v;for(var y=0,z=b.percents.length;y<z;y++){if(b.percents[y]==e||b.percents[y]>f*b.top){e=b.percents[y],p=b.percents[y-1]||0,t=t/b.top*(e-p),o=b.percents[y+1],j=b.anim[e];break}f&&d.attr(b.anim[b.percents[y]])}if(!!j){if(!k){for(var A in j)if(j[g](A))if(U[g](A)||d.paper.customAttributes[g](A)){u[A]=d.attr(A),u[A]==null&&(u[A]=T[A]),v[A]=j[A];switch(U[A]){case C:w[A]=(v[A]-u[A])/t;break;case"colour":u[A]=a.getRGB(u[A]);var B=a.getRGB(v[A]);w[A]={r:(B.r-u[A].r)/t,g:(B.g-u[A].g)/t,b:(B.b-u[A].b)/t};break;case"path":var D=bR(u[A],v[A]),E=D[1];u[A]=D[0],w[A]=[];for(y=0,z=u[A].length;y<z;y++){w[A][y]=[0];for(var F=1,G=u[A][y].length;F<G;F++)w[A][y][F]=(E[y][F]-u[A][y][F])/t}break;case"transform":var H=d._,I=ca(H[A],v[A]);if(I){u[A]=I.from,v[A]=I.to,w[A]=[],w[A].real=!0;for(y=0,z=u[A].length;y<z;y++){w[A][y]=[u[A][y][0]];for(F=1,G=u[A][y].length;F<G;F++)w[A][y][F]=(v[A][y][F]-u[A][y][F])/t}}else{var J=d.matrix||new cb,K={_:{transform:H.transform},getBBox:function(){return d.getBBox(1)}};u[A]=[J.a,J.b,J.c,J.d,J.e,J.f],b$(K,v[A]),v[A]=K._.transform,w[A]=[(K.matrix.a-J.a)/t,(K.matrix.b-J.b)/t,(K.matrix.c-J.c)/t,(K.matrix.d-J.d)/t,(K.matrix.e-J.e)/t,(K.matrix.f-J.f)/t]}break;case"csv":var L=r(j[A])[s](c),M=r(u[A])[s](c);if(A=="clip-rect"){u[A]=M,w[A]=[],y=M.length;while(y--)w[A][y]=(L[y]-u[A][y])/t}v[A]=L;break;default:L=[][n](j[A]),M=[][n](u[A]),w[A]=[],y=d.paper.customAttributes[A].length;while(y--)w[A][y]=((L[y]||0)-(M[y]||0))/t}}var O=j.easing,P=a.easing_formulas[O];if(!P){P=r(O).match(N);if(P&&P.length==5){var R=P;P=function(a){return cC(a,+R[1],+R[2],+R[3],+R[4],t)}}else P=bf}q=j.start||b.start||+(new Date),x={anim:b,percent:e,timestamp:q,start:q+(b.del||0),status:0,initstatus:f||0,stop:!1,ms:t,easing:P,from:u,diff:w,to:v,el:d,callback:j.callback,prev:p,next:o,repeat:i||b.times,origin:d.attr(),totalOrigin:h},cy.push(x);if(f&&!k&&!l){x.stop=!0,x.start=new Date-t*f;if(cy.length==1)return cA()}l&&(x.start=new Date-x.ms*f),cy.length==1&&cz(cA)}else k.initstatus=f,k.start=new Date-k.ms*f;eve("raphael.anim.start."+d.id,d,b)}}function cD(a,b){var c=[],d={};this.ms=b,this.times=1;if(a){for(var e in a)a[g](e)&&(d[Q(e)]=a[e],c.push(Q(e)));c.sort(bd)}this.anim=d,this.top=c[c.length-1],this.percents=c}function cC(a,b,c,d,e,f){function o(a,b){var c,d,e,f,j,k;for(e=a,k=0;k<8;k++){f=m(e)-a;if(z(f)<b)return e;j=(3*i*e+2*h)*e+g;if(z(j)<1e-6)break;e=e-f/j}c=0,d=1,e=a;if(e<c)return c;if(e>d)return d;while(c<d){f=m(e);if(z(f-a)<b)return e;a>f?c=e:d=e,e=(d-c)/2+c}return e}function n(a,b){var c=o(a,b);return((l*c+k)*c+j)*c}function m(a){return((i*a+h)*a+g)*a}var g=3*b,h=3*(d-b)-g,i=1-g-h,j=3*c,k=3*(e-c)-j,l=1-j-k;return n(a,1/(200*f))}function cq(){return this.x+q+this.y+q+this.width+" Ɨ "+this.height}function cp(){return this.x+q+this.y}function cb(a,b,c,d,e,f){a!=null?(this.a=+a,this.b=+b,this.c=+c,this.d=+d,this.e=+e,this.f=+f):(this.a=1,this.b=0,this.c=0,this.d=1,this.e=0,this.f=0)}function bH(b,c,d){b=a._path2curve(b),c=a._path2curve(c);var e,f,g,h,i,j,k,l,m,n,o=d?0:[];for(var p=0,q=b.length;p<q;p++){var r=b[p];if(r[0]=="M")e=i=r[1],f=j=r[2];else{r[0]=="C"?(m=[e,f].concat(r.slice(1)),e=m[6],f=m[7]):(m=[e,f,e,f,i,j,i,j],e=i,f=j);for(var s=0,t=c.length;s<t;s++){var u=c[s];if(u[0]=="M")g=k=u[1],h=l=u[2];else{u[0]=="C"?(n=[g,h].concat(u.slice(1)),g=n[6],h=n[7]):(n=[g,h,g,h,k,l,k,l],g=k,h=l);var v=bG(m,n,d);if(d)o+=v;else{for(var w=0,x=v.length;w<x;w++)v[w].segment1=p,v[w].segment2=s,v[w].bez1=m,v[w].bez2=n;o=o.concat(v)}}}}}return o}function bG(b,c,d){var e=a.bezierBBox(b),f=a.bezierBBox(c);if(!a.isBBoxIntersect(e,f))return d?0:[];var g=bB.apply(0,b),h=bB.apply(0,c),i=~~(g/5),j=~~(h/5),k=[],l=[],m={},n=d?0:[];for(var o=0;o<i+1;o++){var p=a.findDotsAtSegment.apply(a,b.concat(o/i));k.push({x:p.x,y:p.y,t:o/i})}for(o=0;o<j+1;o++)p=a.findDotsAtSegment.apply(a,c.concat(o/j)),l.push({x:p.x,y:p.y,t:o/j});for(o=0;o<i;o++)for(var q=0;q<j;q++){var r=k[o],s=k[o+1],t=l[q],u=l[q+1],v=z(s.x-r.x)<.001?"y":"x",w=z(u.x-t.x)<.001?"y":"x",x=bD(r.x,r.y,s.x,s.y,t.x,t.y,u.x,u.y);if(x){if(m[x.x.toFixed(4)]==x.y.toFixed(4))continue;m[x.x.toFixed(4)]=x.y.toFixed(4);var y=r.t+z((x[v]-r[v])/(s[v]-r[v]))*(s.t-r.t),A=t.t+z((x[w]-t[w])/(u[w]-t[w]))*(u.t-t.t);y>=0&&y<=1&&A>=0&&A<=1&&(d?n++:n.push({x:x.x,y:x.y,t1:y,t2:A}))}}return n}function bF(a,b){return bG(a,b,1)}function bE(a,b){return bG(a,b)}function bD(a,b,c,d,e,f,g,h){if(!(x(a,c)<y(e,g)||y(a,c)>x(e,g)||x(b,d)<y(f,h)||y(b,d)>x(f,h))){var i=(a*d-b*c)*(e-g)-(a-c)*(e*h-f*g),j=(a*d-b*c)*(f-h)-(b-d)*(e*h-f*g),k=(a-c)*(f-h)-(b-d)*(e-g);if(!k)return;var l=i/k,m=j/k,n=+l.toFixed(2),o=+m.toFixed(2);if(n<+y(a,c).toFixed(2)||n>+x(a,c).toFixed(2)||n<+y(e,g).toFixed(2)||n>+x(e,g).toFixed(2)||o<+y(b,d).toFixed(2)||o>+x(b,d).toFixed(2)||o<+y(f,h).toFixed(2)||o>+x(f,h).toFixed(2))return;return{x:l,y:m}}}function bC(a,b,c,d,e,f,g,h,i){if(!(i<0||bB(a,b,c,d,e,f,g,h)<i)){var j=1,k=j/2,l=j-k,m,n=.01;m=bB(a,b,c,d,e,f,g,h,l);while(z(m-i)>n)k/=2,l+=(m<i?1:-1)*k,m=bB(a,b,c,d,e,f,g,h,l);return l}}function bB(a,b,c,d,e,f,g,h,i){i==null&&(i=1),i=i>1?1:i<0?0:i;var j=i/2,k=12,l=[-0.1252,.1252,-0.3678,.3678,-0.5873,.5873,-0.7699,.7699,-0.9041,.9041,-0.9816,.9816],m=[.2491,.2491,.2335,.2335,.2032,.2032,.1601,.1601,.1069,.1069,.0472,.0472],n=0;for(var o=0;o<k;o++){var p=j*l[o]+j,q=bA(p,a,c,e,g),r=bA(p,b,d,f,h),s=q*q+r*r;n+=m[o]*w.sqrt(s)}return j*n}function bA(a,b,c,d,e){var f=-3*b+9*c-9*d+3*e,g=a*f+6*b-12*c+6*d;return a*g-3*b+3*c}function by(a,b){var c=[];for(var d=0,e=a.length;e-2*!b>d;d+=2){var f=[{x:+a[d-2],y:+a[d-1]},{x:+a[d],y:+a[d+1]},{x:+a[d+2],y:+a[d+3]},{x:+a[d+4],y:+a[d+5]}];b?d?e-4==d?f[3]={x:+a[0],y:+a[1]}:e-2==d&&(f[2]={x:+a[0],y:+a[1]},f[3]={x:+a[2],y:+a[3]}):f[0]={x:+a[e-2],y:+a[e-1]}:e-4==d?f[3]=f[2]:d||(f[0]={x:+a[d],y:+a[d+1]}),c.push(["C",(-f[0].x+6*f[1].x+f[2].x)/6,(-f[0].y+6*f[1].y+f[2].y)/6,(f[1].x+6*f[2].x-f[3].x)/6,(f[1].y+6*f[2].y-f[3].y)/6,f[2].x,f[2].y])}return c}function bx(){return this.hex}function bv(a,b,c){function d(){var e=Array.prototype.slice.call(arguments,0),f=e.join("ā€"),h=d.cache=d.cache||{},i=d.count=d.count||[];if(h[g](f)){bu(i,f);return c?c(h[f]):h[f]}i.length>=1e3&&delete h[i.shift()],i.push(f),h[f]=a[m](b,e);return c?c(h[f]):h[f]}return d}function bu(a,b){for(var c=0,d=a.length;c<d;c++)if(a[c]===b)return a.push(a.splice(c,1)[0])}function bm(a){if(Object(a)!==a)return a;var b=new a.constructor;for(var c in a)a[g](c)&&(b[c]=bm(a[c]));return b}function a(c){if(a.is(c,"function"))return b?c():eve.on("raphael.DOMload",c);if(a.is(c,E))return a._engine.create[m](a,c.splice(0,3+a.is(c[0],C))).add(c);var d=Array.prototype.slice.call(arguments,0);if(a.is(d[d.length-1],"function")){var e=d.pop();return b?e.call(a._engine.create[m](a,d)):eve.on("raphael.DOMload",function(){e.call(a._engine.create[m](a,d))})}return a._engine.create[m](a,arguments)}a.version="2.1.0",a.eve=eve;var b,c=/[, ]+/,d={circle:1,rect:1,path:1,ellipse:1,text:1,image:1},e=/\{(\d+)\}/g,f="prototype",g="hasOwnProperty",h={doc:document,win:window},i={was:Object.prototype[g].call(h.win,"Raphael"),is:h.win.Raphael},j=function(){this.ca=this.customAttributes={}},k,l="appendChild",m="apply",n="concat",o="createTouch"in h.doc,p="",q=" ",r=String,s="split",t="click dblclick mousedown mousemove mouseout mouseover mouseup touchstart touchmove touchend touchcancel"[s](q),u={mousedown:"touchstart",mousemove:"touchmove",mouseup:"touchend"},v=r.prototype.toLowerCase,w=Math,x=w.max,y=w.min,z=w.abs,A=w.pow,B=w.PI,C="number",D="string",E="array",F="toString",G="fill",H=Object.prototype.toString,I={},J="push",K=a._ISURL=/^url\(['"]?([^\)]+?)['"]?\)$/i,L=/^\s*((#[a-f\d]{6})|(#[a-f\d]{3})|rgba?\(\s*([\d\.]+%?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+%?(?:\s*,\s*[\d\.]+%?)?)\s*\)|hsba?\(\s*([\d\.]+(?:deg|\xb0|%)?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+(?:%?\s*,\s*[\d\.]+)?)%?\s*\)|hsla?\(\s*([\d\.]+(?:deg|\xb0|%)?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+(?:%?\s*,\s*[\d\.]+)?)%?\s*\))\s*$/i,M={NaN:1,Infinity:1,"-Infinity":1},N=/^(?:cubic-)?bezier\(([^,]+),([^,]+),([^,]+),([^\)]+)\)/,O=w.round,P="setAttribute",Q=parseFloat,R=parseInt,S=r.prototype.toUpperCase,T=a._availableAttrs={"arrow-end":"none","arrow-start":"none",blur:0,"clip-rect":"0 0 1e9 1e9",cursor:"default",cx:0,cy:0,fill:"#fff","fill-opacity":1,font:'10px "Arial"',"font-family":'"Arial"',"font-size":"10","font-style":"normal","font-weight":400,gradient:0,height:0,href:"http://raphaeljs.com/","letter-spacing":0,opacity:1,path:"M0,0",r:0,rx:0,ry:0,src:"",stroke:"#000","stroke-dasharray":"","stroke-linecap":"butt","stroke-linejoin":"butt","stroke-miterlimit":0,"stroke-opacity":1,"stroke-width":1,target:"_blank","text-anchor":"middle",title:"Raphael",transform:"",width:0,x:0,y:0},U=a._availableAnimAttrs={blur:C,"clip-rect":"csv",cx:C,cy:C,fill:"colour","fill-opacity":C,"font-size":C,height:C,opacity:C,path:"path",r:C,rx:C,ry:C,stroke:"colour","stroke-opacity":C,"stroke-width":C,transform:"transform",width:C,x:C,y:C},V=/[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]/g,W=/[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*/,X={hs:1,rg:1},Y=/,?([achlmqrstvxz]),?/gi,Z=/([achlmrqstvz])[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029,]*((-?\d*\.?\d*(?:e[\-+]?\d+)?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*)+)/ig,$=/([rstm])[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029,]*((-?\d*\.?\d*(?:e[\-+]?\d+)?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*)+)/ig,_=/(-?\d*\.?\d*(?:e[\-+]?\d+)?)[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*/ig,ba=a._radial_gradient=/^r(?:\(([^,]+?)[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*([^\)]+?)\))?/,bb={},bc=function(a,b){return a.key-b.key},bd=function(a,b){return Q(a)-Q(b)},be=function(){},bf=function(a){return a},bg=a._rectPath=function(a,b,c,d,e){if(e)return[["M",a+e,b],["l",c-e*2,0],["a",e,e,0,0,1,e,e],["l",0,d-e*2],["a",e,e,0,0,1,-e,e],["l",e*2-c,0],["a",e,e,0,0,1,-e,-e],["l",0,e*2-d],["a",e,e,0,0,1,e,-e],["z"]];return[["M",a,b],["l",c,0],["l",0,d],["l",-c,0],["z"]]},bh=function(a,b,c,d){d==null&&(d=c);return[["M",a,b],["m",0,-d],["a",c,d,0,1,1,0,2*d],["a",c,d,0,1,1,0,-2*d],["z"]]},bi=a._getPath={path:function(a){return a.attr("path")},circle:function(a){var b=a.attrs;return bh(b.cx,b.cy,b.r)},ellipse:function(a){var b=a.attrs;return bh(b.cx,b.cy,b.rx,b.ry)},rect:function(a){var b=a.attrs;return bg(b.x,b.y,b.width,b.height,b.r)},image:function(a){var b=a.attrs;return bg(b.x,b.y,b.width,b.height)},text:function(a){var b=a._getBBox();return bg(b.x,b.y,b.width,b.height)}},bj=a.mapPath=function(a,b){if(!b)return a;var c,d,e,f,g,h,i;a=bR(a);for(e=0,g=a.length;e<g;e++){i=a[e];for(f=1,h=i.length;f<h;f+=2)c=b.x(i[f],i[f+1]),d=b.y(i[f],i[f+1]),i[f]=c,i[f+1]=d}return a};a._g=h,a.type=h.win.SVGAngle||h.doc.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure","1.1")?"SVG":"VML";if(a.type=="VML"){var bk=h.doc.createElement("div"),bl;bk.innerHTML='<v:shape adj="1"/>',bl=bk.firstChild,bl.style.behavior="url(#default#VML)";if(!bl||typeof bl.adj!="object")return a.type=p;bk=null}a.svg=!(a.vml=a.type=="VML"),a._Paper=j,a.fn=k=j.prototype=a.prototype,a._id=0,a._oid=0,a.is=function(a,b){b=v.call(b);if(b=="finite")return!M[g](+a);if(b=="array")return a instanceof Array;return b=="null"&&a===null||b==typeof a&&a!==null||b=="object"&&a===Object(a)||b=="array"&&Array.isArray&&Array.isArray(a)||H.call(a).slice(8,-1).toLowerCase()==b},a.angle=function(b,c,d,e,f,g){if(f==null){var h=b-d,i=c-e;if(!h&&!i)return 0;return(180+w.atan2(-i,-h)*180/B+360)%360}return a.angle(b,c,f,g)-a.angle(d,e,f,g)},a.rad=function(a){return a%360*B/180},a.deg=function(a){return a*180/B%360},a.snapTo=function(b,c,d){d=a.is(d,"finite")?d:10;if(a.is(b,E)){var e=b.length;while(e--)if(z(b[e]-c)<=d)return b[e]}else{b=+b;var f=c%b;if(f<d)return c-f;if(f>b-d)return c-f+b}return c};var bn=a.createUUID=function(a,b){return function(){return"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(a,b).toUpperCase()}}(/[xy]/g,function(a){var b=w.random()*16|0,c=a=="x"?b:b&3|8;return c.toString(16)});a.setWindow=function(b){eve("raphael.setWindow",a,h.win,b),h.win=b,h.doc=h.win.document,a._engine.initWin&&a._engine.initWin(h.win)};var bo=function(b){if(a.vml){var c=/^\s+|\s+$/g,d;try{var e=new ActiveXObject("htmlfile");e.write("<body>"),e.close(),d=e.body}catch(f){d=createPopup().document.body}var g=d.createTextRange();bo=bv(function(a){try{d.style.color=r(a).replace(c,p);var b=g.queryCommandValue("ForeColor");b=(b&255)<<16|b&65280|(b&16711680)>>>16;return"#"+("000000"+b.toString(16)).slice(-6)}catch(e){return"none"}})}else{var i=h.doc.createElement("i");i.title="RaphaĆ«l Colour Picker",i.style.display="none",h.doc.body.appendChild(i),bo=bv(function(a){i.style.color=a;return h.doc.defaultView.getComputedStyle(i,p).getPropertyValue("color")})}return bo(b)},bp=function(){return"hsb("+[this.h,this.s,this.b]+")"},bq=function(){return"hsl("+[this.h,this.s,this.l]+")"},br=function(){return this.hex},bs=function(b,c,d){c==null&&a.is(b,"object")&&"r"in b&&"g"in b&&"b"in b&&(d=b.b,c=b.g,b=b.r);if(c==null&&a.is(b,D)){var e=a.getRGB(b);b=e.r,c=e.g,d=e.b}if(b>1||c>1||d>1)b/=255,c/=255,d/=255;return[b,c,d]},bt=function(b,c,d,e){b*=255,c*=255,d*=255;var f={r:b,g:c,b:d,hex:a.rgb(b,c,d),toString:br};a.is(e,"finite")&&(f.opacity=e);return f};a.color=function(b){var c;a.is(b,"object")&&"h"in b&&"s"in b&&"b"in b?(c=a.hsb2rgb(b),b.r=c.r,b.g=c.g,b.b=c.b,b.hex=c.hex):a.is(b,"object")&&"h"in b&&"s"in b&&"l"in b?(c=a.hsl2rgb(b),b.r=c.r,b.g=c.g,b.b=c.b,b.hex=c.hex):(a.is(b,"string")&&(b=a.getRGB(b)),a.is(b,"object")&&"r"in b&&"g"in b&&"b"in b?(c=a.rgb2hsl(b),b.h=c.h,b.s=c.s,b.l=c.l,c=a.rgb2hsb(b),b.v=c.b):(b={hex:"none"},b.r=b.g=b.b=b.h=b.s=b.v=b.l=-1)),b.toString=br;return b},a.hsb2rgb=function(a,b,c,d){this.is(a,"object")&&"h"in a&&"s"in a&&"b"in a&&(c=a.b,b=a.s,a=a.h,d=a.o),a*=360;var e,f,g,h,i;a=a%360/60,i=c*b,h=i*(1-z(a%2-1)),e=f=g=c-i,a=~~a,e+=[i,h,0,0,h,i][a],f+=[h,i,i,h,0,0][a],g+=[0,0,h,i,i,h][a];return bt(e,f,g,d)},a.hsl2rgb=function(a,b,c,d){this.is(a,"object")&&"h"in a&&"s"in a&&"l"in a&&(c=a.l,b=a.s,a=a.h);if(a>1||b>1||c>1)a/=360,b/=100,c/=100;a*=360;var e,f,g,h,i;a=a%360/60,i=2*b*(c<.5?c:1-c),h=i*(1-z(a%2-1)),e=f=g=c-i/2,a=~~a,e+=[i,h,0,0,h,i][a],f+=[h,i,i,h,0,0][a],g+=[0,0,h,i,i,h][a];return bt(e,f,g,d)},a.rgb2hsb=function(a,b,c){c=bs(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g;f=x(a,b,c),g=f-y(a,b,c),d=g==0?null:f==a?(b-c)/g:f==b?(c-a)/g+2:(a-b)/g+4,d=(d+360)%6*60/360,e=g==0?0:g/f;return{h:d,s:e,b:f,toString:bp}},a.rgb2hsl=function(a,b,c){c=bs(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g,h,i;g=x(a,b,c),h=y(a,b,c),i=g-h,d=i==0?null:g==a?(b-c)/i:g==b?(c-a)/i+2:(a-b)/i+4,d=(d+360)%6*60/360,f=(g+h)/2,e=i==0?0:f<.5?i/(2*f):i/(2-2*f);return{h:d,s:e,l:f,toString:bq}},a._path2string=function(){return this.join(",").replace(Y,"$1")};var bw=a._preload=function(a,b){var c=h.doc.createElement("img");c.style.cssText="position:absolute;left:-9999em;top:-9999em",c.onload=function(){b.call(this),this.onload=null,h.doc.body.removeChild(this)},c.onerror=function(){h.doc.body.removeChild(this)},h.doc.body.appendChild(c),c.src=a};a.getRGB=bv(function(b){if(!b||!!((b=r(b)).indexOf("-")+1))return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:bx};if(b=="none")return{r:-1,g:-1,b:-1,hex:"none",toString:bx};!X[g](b.toLowerCase().substring(0,2))&&b.charAt()!="#"&&(b=bo(b));var c,d,e,f,h,i,j,k=b.match(L);if(k){k[2]&&(f=R(k[2].substring(5),16),e=R(k[2].substring(3,5),16),d=R(k[2].substring(1,3),16)),k[3]&&(f=R((i=k[3].charAt(3))+i,16),e=R((i=k[3].charAt(2))+i,16),d=R((i=k[3].charAt(1))+i,16)),k[4]&&(j=k[4][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),k[1].toLowerCase().slice(0,4)=="rgba"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100));if(k[5]){j=k[5][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),(j[0].slice(-3)=="deg"||j[0].slice(-1)=="Ā°")&&(d/=360),k[1].toLowerCase().slice(0,4)=="hsba"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100);return a.hsb2rgb(d,e,f,h)}if(k[6]){j=k[6][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),(j[0].slice(-3)=="deg"||j[0].slice(-1)=="Ā°")&&(d/=360),k[1].toLowerCase().slice(0,4)=="hsla"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100);return a.hsl2rgb(d,e,f,h)}k={r:d,g:e,b:f,toString:bx},k.hex="#"+(16777216|f|e<<8|d<<16).toString(16).slice(1),a.is(h,"finite")&&(k.opacity=h);return k}return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:bx}},a),a.hsb=bv(function(b,c,d){return a.hsb2rgb(b,c,d).hex}),a.hsl=bv(function(b,c,d){return a.hsl2rgb(b,c,d).hex}),a.rgb=bv(function(a,b,c){return"#"+(16777216|c|b<<8|a<<16).toString(16).slice(1)}),a.getColor=function(a){var b=this.getColor.start=this.getColor.start||{h:0,s:1,b:a||.75},c=this.hsb2rgb(b.h,b.s,b.b);b.h+=.075,b.h>1&&(b.h=0,b.s-=.2,b.s<=0&&(this.getColor.start={h:0,s:1,b:b.b}));return c.hex},a.getColor.reset=function(){delete this.start},a.parsePathString=function(b){if(!b)return null;var c=bz(b);if(c.arr)return bJ(c.arr);var d={a:7,c:6,h:1,l:2,m:2,r:4,q:4,s:4,t:2,v:1,z:0},e=[];a.is(b,E)&&a.is(b[0],E)&&(e=bJ(b)),e.length||r(b).replace(Z,function(a,b,c){var f=[],g=b.toLowerCase();c.replace(_,function(a,b){b&&f.push(+b)}),g=="m"&&f.length>2&&(e.push([b][n](f.splice(0,2))),g="l",b=b=="m"?"l":"L");if(g=="r")e.push([b][n](f));else while(f.length>=d[g]){e.push([b][n](f.splice(0,d[g])));if(!d[g])break}}),e.toString=a._path2string,c.arr=bJ(e);return e},a.parseTransformString=bv(function(b){if(!b)return null;var c={r:3,s:4,t:2,m:6},d=[];a.is(b,E)&&a.is(b[0],E)&&(d=bJ(b)),d.length||r(b).replace($,function(a,b,c){var e=[],f=v.call(b);c.replace(_,function(a,b){b&&e.push(+b)}),d.push([b][n](e))}),d.toString=a._path2string;return d});var bz=function(a){var b=bz.ps=bz.ps||{};b[a]?b[a].sleep=100:b[a]={sleep:100},setTimeout(function(){for(var c in b)b[g](c)&&c!=a&&(b[c].sleep--,!b[c].sleep&&delete b[c])});return b[a]};a.findDotsAtSegment=function(a,b,c,d,e,f,g,h,i){var j=1-i,k=A(j,3),l=A(j,2),m=i*i,n=m*i,o=k*a+l*3*i*c+j*3*i*i*e+n*g,p=k*b+l*3*i*d+j*3*i*i*f+n*h,q=a+2*i*(c-a)+m*(e-2*c+a),r=b+2*i*(d-b)+m*(f-2*d+b),s=c+2*i*(e-c)+m*(g-2*e+c),t=d+2*i*(f-d)+m*(h-2*f+d),u=j*a+i*c,v=j*b+i*d,x=j*e+i*g,y=j*f+i*h,z=90-w.atan2(q-s,r-t)*180/B;(q>s||r<t)&&(z+=180);return{x:o,y:p,m:{x:q,y:r},n:{x:s,y:t},start:{x:u,y:v},end:{x:x,y:y},alpha:z}},a.bezierBBox=function(b,c,d,e,f,g,h,i){a.is(b,"array")||(b=[b,c,d,e,f,g,h,i]);var j=bQ.apply(null,b);return{x:j.min.x,y:j.min.y,x2:j.max.x,y2:j.max.y,width:j.max.x-j.min.x,height:j.max.y-j.min.y}},a.isPointInsideBBox=function(a,b,c){return b>=a.x&&b<=a.x2&&c>=a.y&&c<=a.y2},a.isBBoxIntersect=function(b,c){var d=a.isPointInsideBBox;return d(c,b.x,b.y)||d(c,b.x2,b.y)||d(c,b.x,b.y2)||d(c,b.x2,b.y2)||d(b,c.x,c.y)||d(b,c.x2,c.y)||d(b,c.x,c.y2)||d(b,c.x2,c.y2)||(b.x<c.x2&&b.x>c.x||c.x<b.x2&&c.x>b.x)&&(b.y<c.y2&&b.y>c.y||c.y<b.y2&&c.y>b.y)},a.pathIntersection=function(a,b){return bH(a,b)},a.pathIntersectionNumber=function(a,b){return bH(a,b,1)},a.isPointInsidePath=function(b,c,d){var e=a.pathBBox(b);return a.isPointInsideBBox(e,c,d)&&bH(b,[["M",c,d],["H",e.x2+10]],1)%2==1},a._removedFactory=function(a){return function(){eve("raphael.log",null,"RaphaĆ«l: you are calling to method ā€œ"+a+"ā€ of removed object",a)}};var bI=a.pathBBox=function(a){var b=bz(a);if(b.bbox)return b.bbox;if(!a)return{x:0,y:0,width:0,height:0,x2:0,y2:0};a=bR(a);var c=0,d=0,e=[],f=[],g;for(var h=0,i=a.length;h<i;h++){g=a[h];if(g[0]=="M")c=g[1],d=g[2],e.push(c),f.push(d);else{var j=bQ(c,d,g[1],g[2],g[3],g[4],g[5],g[6]);e=e[n](j.min.x,j.max.x),f=f[n](j.min.y,j.max.y),c=g[5],d=g[6]}}var k=y[m](0,e),l=y[m](0,f),o=x[m](0,e),p=x[m](0,f),q={x:k,y:l,x2:o,y2:p,width:o-k,height:p-l};b.bbox=bm(q);return q},bJ=function(b){var c=bm(b);c.toString=a._path2string;return c},bK=a._pathToRelative=function(b){var c=bz(b);if(c.rel)return bJ(c.rel);if(!a.is(b,E)||!a.is(b&&b[0],E))b=a.parsePathString(b);var d=[],e=0,f=0,g=0,h=0,i=0;b[0][0]=="M"&&(e=b[0][1],f=b[0][2],g=e,h=f,i++,d.push(["M",e,f]));for(var j=i,k=b.length;j<k;j++){var l=d[j]=[],m=b[j];if(m[0]!=v.call(m[0])){l[0]=v.call(m[0]);switch(l[0]){case"a":l[1]=m[1],l[2]=m[2],l[3]=m[3],l[4]=m[4],l[5]=m[5],l[6]=+(m[6]-e).toFixed(3),l[7]=+(m[7]-f).toFixed(3);break;case"v":l[1]=+(m[1]-f).toFixed(3);break;case"m":g=m[1],h=m[2];default:for(var n=1,o=m.length;n<o;n++)l[n]=+(m[n]-(n%2?e:f)).toFixed(3)}}else{l=d[j]=[],m[0]=="m"&&(g=m[1]+e,h=m[2]+f);for(var p=0,q=m.length;p<q;p++)d[j][p]=m[p]}var r=d[j].length;switch(d[j][0]){case"z":e=g,f=h;break;case"h":e+=+d[j][r-1];break;case"v":f+=+d[j][r-1];break;default:e+=+d[j][r-2],f+=+d[j][r-1]}}d.toString=a._path2string,c.rel=bJ(d);return d},bL=a._pathToAbsolute=function(b){var c=bz(b);if(c.abs)return bJ(c.abs);if(!a.is(b,E)||!a.is(b&&b[0],E))b=a.parsePathString(b);if(!b||!b.length)return[["M",0,0]];var d=[],e=0,f=0,g=0,h=0,i=0;b[0][0]=="M"&&(e=+b[0][1],f=+b[0][2],g=e,h=f,i++,d[0]=["M",e,f]);var j=b.length==3&&b[0][0]=="M"&&b[1][0].toUpperCase()=="R"&&b[2][0].toUpperCase()=="Z";for(var k,l,m=i,o=b.length;m<o;m++){d.push(k=[]),l=b[m];if(l[0]!=S.call(l[0])){k[0]=S.call(l[0]);switch(k[0]){case"A":k[1]=l[1],k[2]=l[2],k[3]=l[3],k[4]=l[4],k[5]=l[5],k[6]=+(l[6]+e),k[7]=+(l[7]+f);break;case"V":k[1]=+l[1]+f;break;case"H":k[1]=+l[1]+e;break;case"R":var p=[e,f][n](l.slice(1));for(var q=2,r=p.length;q<r;q++)p[q]=+p[q]+e,p[++q]=+p[q]+f;d.pop(),d=d[n](by(p,j));break;case"M":g=+l[1]+e,h=+l[2]+f;default:for(q=1,r=l.length;q<r;q++)k[q]=+l[q]+(q%2?e:f)}}else if(l[0]=="R")p=[e,f][n](l.slice(1)),d.pop(),d=d[n](by(p,j)),k=["R"][n](l.slice(-2));else for(var s=0,t=l.length;s<t;s++)k[s]=l[s];switch(k[0]){case"Z":e=g,f=h;break;case"H":e=k[1];break;case"V":f=k[1];break;case"M":g=k[k.length-2],h=k[k.length-1];default:e=k[k.length-2],f=k[k.length-1]}}d.toString=a._path2string,c.abs=bJ(d);return d},bM=function(a,b,c,d){return[a,b,c,d,c,d]},bN=function(a,b,c,d,e,f){var g=1/3,h=2/3;return[g*a+h*c,g*b+h*d,g*e+h*c,g*f+h*d,e,f]},bO=function(a,b,c,d,e,f,g,h,i,j){var k=B*120/180,l=B/180*(+e||0),m=[],o,p=bv(function(a,b,c){var d=a*w.cos(c)-b*w.sin(c),e=a*w.sin(c)+b*w.cos(c);return{x:d,y:e}});if(!j){o=p(a,b,-l),a=o.x,b=o.y,o=p(h,i,-l),h=o.x,i=o.y;var q=w.cos(B/180*e),r=w.sin(B/180*e),t=(a-h)/2,u=(b-i)/2,v=t*t/(c*c)+u*u/(d*d);v>1&&(v=w.sqrt(v),c=v*c,d=v*d);var x=c*c,y=d*d,A=(f==g?-1:1)*w.sqrt(z((x*y-x*u*u-y*t*t)/(x*u*u+y*t*t))),C=A*c*u/d+(a+h)/2,D=A*-d*t/c+(b+i)/2,E=w.asin(((b-D)/d).toFixed(9)),F=w.asin(((i-D)/d).toFixed(9));E=a<C?B-E:E,F=h<C?B-F:F,E<0&&(E=B*2+E),F<0&&(F=B*2+F),g&&E>F&&(E=E-B*2),!g&&F>E&&(F=F-B*2)}else E=j[0],F=j[1],C=j[2],D=j[3];var G=F-E;if(z(G)>k){var H=F,I=h,J=i;F=E+k*(g&&F>E?1:-1),h=C+c*w.cos(F),i=D+d*w.sin(F),m=bO(h,i,c,d,e,0,g,I,J,[F,H,C,D])}G=F-E;var K=w.cos(E),L=w.sin(E),M=w.cos(F),N=w.sin(F),O=w.tan(G/4),P=4/3*c*O,Q=4/3*d*O,R=[a,b],S=[a+P*L,b-Q*K],T=[h+P*N,i-Q*M],U=[h,i];S[0]=2*R[0]-S[0],S[1]=2*R[1]-S[1];if(j)return[S,T,U][n](m);m=[S,T,U][n](m).join()[s](",");var V=[];for(var W=0,X=m.length;W<X;W++)V[W]=W%2?p(m[W-1],m[W],l).y:p(m[W],m[W+1],l).x;return V},bP=function(a,b,c,d,e,f,g,h,i){var j=1-i;return{x:A(j,3)*a+A(j,2)*3*i*c+j*3*i*i*e+A(i,3)*g,y:A(j,3)*b+A(j,2)*3*i*d+j*3*i*i*f+A(i,3)*h}},bQ=bv(function(a,b,c,d,e,f,g,h){var i=e-2*c+a-(g-2*e+c),j=2*(c-a)-2*(e-c),k=a-c,l=(-j+w.sqrt(j*j-4*i*k))/2/i,n=(-j-w.sqrt(j*j-4*i*k))/2/i,o=[b,h],p=[a,g],q;z(l)>"1e12"&&(l=.5),z(n)>"1e12"&&(n=.5),l>0&&l<1&&(q=bP(a,b,c,d,e,f,g,h,l),p.push(q.x),o.push(q.y)),n>0&&n<1&&(q=bP(a,b,c,d,e,f,g,h,n),p.push(q.x),o.push(q.y)),i=f-2*d+b-(h-2*f+d),j=2*(d-b)-2*(f-d),k=b-d,l=(-j+w.sqrt(j*j-4*i*k))/2/i,n=(-j-w.sqrt(j*j-4*i*k))/2/i,z(l)>"1e12"&&(l=.5),z(n)>"1e12"&&(n=.5),l>0&&l<1&&(q=bP(a,b,c,d,e,f,g,h,l),p.push(q.x),o.push(q.y)),n>0&&n<1&&(q=bP(a,b,c,d,e,f,g,h,n),p.push(q.x),o.push(q.y));return{min:{x:y[m](0,p),y:y[m](0,o)},max:{x:x[m](0,p),y:x[m](0,o)}}}),bR=a._path2curve=bv(function(a,b){var c=!b&&bz(a);if(!b&&c.curve)return bJ(c.curve);var d=bL(a),e=b&&bL(b),f={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},g={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},h=function(a,b){var c,d;if(!a)return["C",b.x,b.y,b.x,b.y,b.x,b.y];!(a[0]in{T:1,Q:1})&&(b.qx=b.qy=null);switch(a[0]){case"M":b.X=a[1],b.Y=a[2];break;case"A":a=["C"][n](bO[m](0,[b.x,b.y][n](a.slice(1))));break;case"S":c=b.x+(b.x-(b.bx||b.x)),d=b.y+(b.y-(b.by||b.y)),a=["C",c,d][n](a.slice(1));break;case"T":b.qx=b.x+(b.x-(b.qx||b.x)),b.qy=b.y+(b.y-(b.qy||b.y)),a=["C"][n](bN(b.x,b.y,b.qx,b.qy,a[1],a[2]));break;case"Q":b.qx=a[1],b.qy=a[2],a=["C"][n](bN(b.x,b.y,a[1],a[2],a[3],a[4]));break;case"L":a=["C"][n](bM(b.x,b.y,a[1],a[2]));break;case"H":a=["C"][n](bM(b.x,b.y,a[1],b.y));break;case"V":a=["C"][n](bM(b.x,b.y,b.x,a[1]));break;case"Z":a=["C"][n](bM(b.x,b.y,b.X,b.Y))}return a},i=function(a,b){if(a[b].length>7){a[b].shift();var c=a[b];while(c.length)a.splice(b++,0,["C"][n](c.splice(0,6)));a.splice(b,1),l=x(d.length,e&&e.length||0)}},j=function(a,b,c,f,g){a&&b&&a[g][0]=="M"&&b[g][0]!="M"&&(b.splice(g,0,["M",f.x,f.y]),c.bx=0,c.by=0,c.x=a[g][1],c.y=a[g][2],l=x(d.length,e&&e.length||0))};for(var k=0,l=x(d.length,e&&e.length||0);k<l;k++){d[k]=h(d[k],f),i(d,k),e&&(e[k]=h(e[k],g)),e&&i(e,k),j(d,e,f,g,k),j(e,d,g,f,k);var o=d[k],p=e&&e[k],q=o.length,r=e&&p.length;f.x=o[q-2],f.y=o[q-1],f.bx=Q(o[q-4])||f.x,f.by=Q(o[q-3])||f.y,g.bx=e&&(Q(p[r-4])||g.x),g.by=e&&(Q(p[r-3])||g.y),g.x=e&&p[r-2],g.y=e&&p[r-1]}e||(c.curve=bJ(d));return e?[d,e]:d},null,bJ),bS=a._parseDots=bv(function(b){var c=[];for(var d=0,e=b.length;d<e;d++){var f={},g=b[d].match(/^([^:]*):?([\d\.]*)/);f.color=a.getRGB(g[1]);if(f.color.error)return null;f.color=f.color.hex,g[2]&&(f.offset=g[2]+"%"),c.push(f)}for(d=1,e=c.length-1;d<e;d++)if(!c[d].offset){var h=Q(c[d-1].offset||0),i=0;for(var j=d+1;j<e;j++)if(c[j].offset){i=c[j].offset;break}i||(i=100,j=e),i=Q(i);var k=(i-h)/(j-d+1);for(;d<j;d++)h+=k,c[d].offset=h+"%"}return c}),bT=a._tear=function(a,b){a==b.top&&(b.top=a.prev),a==b.bottom&&(b.bottom=a.next),a.next&&(a.next.prev=a.prev),a.prev&&(a.prev.next=a.next)},bU=a._tofront=function(a,b){b.top!==a&&(bT(a,b),a.next=null,a.prev=b.top,b.top.next=a,b.top=a)},bV=a._toback=function(a,b){b.bottom!==a&&(bT(a,b),a.next=b.bottom,a.prev=null,b.bottom.prev=a,b.bottom=a)},bW=a._insertafter=function(a,b,c){bT(a,c),b==c.top&&(c.top=a),b.next&&(b.next.prev=a),a.next=b.next,a.prev=b,b.next=a},bX=a._insertbefore=function(a,b,c){bT(a,c),b==c.bottom&&(c.bottom=a),b.prev&&(b.prev.next=a),a.prev=b.prev,b.prev=a,a.next=b},bY=a.toMatrix=function(a,b){var c=bI(a),d={_:{transform:p},getBBox:function(){return c}};b$(d,b);return d.matrix},bZ=a.transformPath=function(a,b){return bj(a,bY(a,b))},b$=a._extractTransform=function(b,c){if(c==null)return b._.transform;c=r(c).replace(/\.{3}|\u2026/g,b._.transform||p);var d=a.parseTransformString(c),e=0,f=0,g=0,h=1,i=1,j=b._,k=new cb;j.transform=d||[];if(d)for(var l=0,m=d.length;l<m;l++){var n=d[l],o=n.length,q=r(n[0]).toLowerCase(),s=n[0]!=q,t=s?k.invert():0,u,v,w,x,y;q=="t"&&o==3?s?(u=t.x(0,0),v=t.y(0,0),w=t.x(n[1],n[2]),x=t.y(n[1],n[2]),k.translate(w-u,x-v)):k.translate(n[1],n[2]):q=="r"?o==2?(y=y||b.getBBox(1),k.rotate(n[1],y.x+y.width/2,y.y+y.height/2),e+=n[1]):o==4&&(s?(w=t.x(n[2],n[3]),x=t.y(n[2],n[3]),k.rotate(n[1],w,x)):k.rotate(n[1],n[2],n[3]),e+=n[1]):q=="s"?o==2||o==3?(y=y||b.getBBox(1),k.scale(n[1],n[o-1],y.x+y.width/2,y.y+y.height/2),h*=n[1],i*=n[o-1]):o==5&&(s?(w=t.x(n[3],n[4]),x=t.y(n[3],n[4]),k.scale(n[1],n[2],w,x)):k.scale(n[1],n[2],n[3],n[4]),h*=n[1],i*=n[2]):q=="m"&&o==7&&k.add(n[1],n[2],n[3],n[4],n[5],n[6]),j.dirtyT=1,b.matrix=k}b.matrix=k,j.sx=h,j.sy=i,j.deg=e,j.dx=f=k.e,j.dy=g=k.f,h==1&&i==1&&!e&&j.bbox?(j.bbox.x+=+f,j.bbox.y+=+g):j.dirtyT=1},b_=function(a){var b=a[0];switch(b.toLowerCase()){case"t":return[b,0,0];case"m":return[b,1,0,0,1,0,0];case"r":return a.length==4?[b,0,a[2],a[3]]:[b,0];case"s":return a.length==5?[b,1,1,a[3],a[4]]:a.length==3?[b,1,1]:[b,1]}},ca=a._equaliseTransform=function(b,c){c=r(c).replace(/\.{3}|\u2026/g,b),b=a.parseTransformString(b)||[],c=a.parseTransformString(c)||[];var d=x(b.length,c.length),e=[],f=[],g=0,h,i,j,k;for(;g<d;g++){j=b[g]||b_(c[g]),k=c[g]||b_(j);if(j[0]!=k[0]||j[0].toLowerCase()=="r"&&(j[2]!=k[2]||j[3]!=k[3])||j[0].toLowerCase()=="s"&&(j[3]!=k[3]||j[4]!=k[4]))return;e[g]=[],f[g]=[];for(h=0,i=x(j.length,k.length);h<i;h++)h in j&&(e[g][h]=j[h]),h in k&&(f[g][h]=k[h])}return{from:e,to:f}};a._getContainer=function(b,c,d,e){var f;f=e==null&&!a.is(b,"object")?h.doc.getElementById(b):b;if(f!=null){if(f.tagName)return c==null?{container:f,width:f.style.pixelWidth||f.offsetWidth,height:f.style.pixelHeight||f.offsetHeight}:{container:f,width:c,height:d};return{container:1,x:b,y:c,width:d,height:e}}},a.pathToRelative=bK,a._engine={},a.path2curve=bR,a.matrix=function(a,b,c,d,e,f){return new cb(a,b,c,d,e,f)},function(b){function d(a){var b=w.sqrt(c(a));a[0]&&(a[0]/=b),a[1]&&(a[1]/=b)}function c(a){return a[0]*a[0]+a[1]*a[1]}b.add=function(a,b,c,d,e,f){var g=[[],[],[]],h=[[this.a,this.c,this.e],[this.b,this.d,this.f],[0,0,1]],i=[[a,c,e],[b,d,f],[0,0,1]],j,k,l,m;a&&a instanceof cb&&(i=[[a.a,a.c,a.e],[a.b,a.d,a.f],[0,0,1]]);for(j=0;j<3;j++)for(k=0;k<3;k++){m=0;for(l=0;l<3;l++)m+=h[j][l]*i[l][k];g[j][k]=m}this.a=g[0][0],this.b=g[1][0],this.c=g[0][1],this.d=g[1][1],this.e=g[0][2],this.f=g[1][2]},b.invert=function(){var a=this,b=a.a*a.d-a.b*a.c;return new cb(a.d/b,-a.b/b,-a.c/b,a.a/b,(a.c*a.f-a.d*a.e)/b,(a.b*a.e-a.a*a.f)/b)},b.clone=function(){return new cb(this.a,this.b,this.c,this.d,this.e,this.f)},b.translate=function(a,b){this.add(1,0,0,1,a,b)},b.scale=function(a,b,c,d){b==null&&(b=a),(c||d)&&this.add(1,0,0,1,c,d),this.add(a,0,0,b,0,0),(c||d)&&this.add(1,0,0,1,-c,-d)},b.rotate=function(b,c,d){b=a.rad(b),c=c||0,d=d||0;var e=+w.cos(b).toFixed(9),f=+w.sin(b).toFixed(9);this.add(e,f,-f,e,c,d),this.add(1,0,0,1,-c,-d)},b.x=function(a,b){return a*this.a+b*this.c+this.e},b.y=function(a,b){return a*this.b+b*this.d+this.f},b.get=function(a){return+this[r.fromCharCode(97+a)].toFixed(4)},b.toString=function(){return a.svg?"matrix("+[this.get(0),this.get(1),this.get(2),this.get(3),this.get(4),this.get(5)].join()+")":[this.get(0),this.get(2),this.get(1),this.get(3),0,0].join()},b.toFilter=function(){return"progid:DXImageTransform.Microsoft.Matrix(M11="+this.get(0)+", M12="+this.get(2)+", M21="+this.get(1)+", M22="+this.get(3)+", Dx="+this.get(4)+", Dy="+this.get(5)+", sizingmethod='auto expand')"},b.offset=function(){return[this.e.toFixed(4),this.f.toFixed(4)]},b.split=function(){var b={};b.dx=this.e,b.dy=this.f;var e=[[this.a,this.c],[this.b,this.d]];b.scalex=w.sqrt(c(e[0])),d(e[0]),b.shear=e[0][0]*e[1][0]+e[0][1]*e[1][1],e[1]=[e[1][0]-e[0][0]*b.shear,e[1][1]-e[0][1]*b.shear],b.scaley=w.sqrt(c(e[1])),d(e[1]),b.shear/=b.scaley;var f=-e[0][1],g=e[1][1];g<0?(b.rotate=a.deg(w.acos(g)),f<0&&(b.rotate=360-b.rotate)):b.rotate=a.deg(w.asin(f)),b.isSimple=!+b.shear.toFixed(9)&&(b.scalex.toFixed(9)==b.scaley.toFixed(9)||!b.rotate),b.isSuperSimple=!+b.shear.toFixed(9)&&b.scalex.toFixed(9)==b.scaley.toFixed(9)&&!b.rotate,b.noRotation=!+b.shear.toFixed(9)&&!b.rotate;return b},b.toTransformString=function(a){var b=a||this[s]();if(b.isSimple){b.scalex=+b.scalex.toFixed(4),b.scaley=+b.scaley.toFixed(4),b.rotate=+b.rotate.toFixed(4);return(b.dx||b.dy?"t"+[b.dx,b.dy]:p)+(b.scalex!=1||b.scaley!=1?"s"+[b.scalex,b.scaley,0,0]:p)+(b.rotate?"r"+[b.rotate,0,0]:p)}return"m"+[this.get(0),this.get(1),this.get(2),this.get(3),this.get(4),this.get(5)]}}(cb.prototype);var cc=navigator.userAgent.match(/Version\/(.*?)\s/)||navigator.userAgent.match(/Chrome\/(\d+)/);navigator.vendor=="Apple Computer, Inc."&&(cc&&cc[1]<4||navigator.platform.slice(0,2)=="iP")||navigator.vendor=="Google Inc."&&cc&&cc[1]<8?k.safari=function(){var a=this.rect(-99,-99,this.width+99,this.height+99).attr({stroke:"none"});setTimeout(function(){a.remove()})}:k.safari=be;var cd=function(){this.returnValue=!1},ce=function(){return this.originalEvent.preventDefault()},cf=function(){this.cancelBubble=!0},cg=function(){return this.originalEvent.stopPropagation()},ch=function(){if(h.doc.addEventListener)return function(a,b,c,d){var e=o&&u[b]?u[b]:b,f=function(e){var f=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,i=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft,j=e.clientX+i,k=e.clientY+f;if(o&&u[g](b))for(var l=0,m=e.targetTouches&&e.targetTouches.length;l<m;l++)if(e.targetTouches[l].target==a){var n=e;e=e.targetTouches[l],e.originalEvent=n,e.preventDefault=ce,e.stopPropagation=cg;break}return c.call(d,e,j,k)};a.addEventListener(e,f,!1);return function(){a.removeEventListener(e,f,!1);return!0}};if(h.doc.attachEvent)return function(a,b,c,d){var e=function(a){a=a||h.win.event;var b=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,e=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft,f=a.clientX+e,g=a.clientY+b;a.preventDefault=a.preventDefault||cd,a.stopPropagation=a.stopPropagation||cf;return c.call(d,a,f,g)};a.attachEvent("on"+b,e);var f=function(){a.detachEvent("on"+b,e);return!0};return f}}(),ci=[],cj=function(a){var b=a.clientX,c=a.clientY,d=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,e=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft,f,g=ci.length;while(g--){f=ci[g];if(o){var i=a.touches.length,j;while(i--){j=a.touches[i];if(j.identifier==f.el._drag.id){b=j.clientX,c=j.clientY,(a.originalEvent?a.originalEvent:a).preventDefault();break}}}else a.preventDefault();var k=f.el.node,l,m=k.nextSibling,n=k.parentNode,p=k.style.display;h.win.opera&&n.removeChild(k),k.style.display="none",l=f.el.paper.getElementByPoint(b,c),k.style.display=p,h.win.opera&&(m?n.insertBefore(k,m):n.appendChild(k)),l&&eve("raphael.drag.over."+f.el.id,f.el,l),b+=e,c+=d,eve("raphael.drag.move."+f.el.id,f.move_scope||f.el,b-f.el._drag.x,c-f.el._drag.y,b,c,a)}},ck=function(b){a.unmousemove(cj).unmouseup(ck);var c=ci.length,d;while(c--)d=ci[c],d.el._drag={},eve("raphael.drag.end."+d.el.id,d.end_scope||d.start_scope||d.move_scope||d.el,b);ci=[]},cl=a.el={};for(var cm=t.length;cm--;)(function(b){a[b]=cl[b]=function(c,d){a.is(c,"function")&&(this.events=this.events||[],this.events.push({name:b,f:c,unbind:ch(this.shape||this.node||h.doc,b,c,d||this)}));return this},a["un"+b]=cl["un"+b]=function(a){var c=this.events||[],d=c.length;while(d--)if(c[d].name==b&&c[d].f==a){c[d].unbind(),c.splice(d,1),!c.length&&delete this.events;return this}return this}})(t[cm]);cl.data=function(b,c){var d=bb[this.id]=bb[this.id]||{};if(arguments.length==1){if(a.is(b,"object")){for(var e in b)b[g](e)&&this.data(e,b[e]);return this}eve("raphael.data.get."+this.id,this,d[b],b);return d[b]}d[b]=c,eve("raphael.data.set."+this.id,this,c,b);return this},cl.removeData=function(a){a==null?bb[this.id]={}:bb[this.id]&&delete bb[this.id][a];return this},cl.hover=function(a,b,c,d){return this.mouseover(a,c).mouseout(b,d||c)},cl.unhover=function(a,b){return this.unmouseover(a).unmouseout(b)};var cn=[];cl.drag=function(b,c,d,e,f,g){function i(i){(i.originalEvent||i).preventDefault();var j=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,k=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft;this._drag.x=i.clientX+k,this._drag.y=i.clientY+j,this._drag.id=i.identifier,!ci.length&&a.mousemove(cj).mouseup(ck),ci.push({el:this,move_scope:e,start_scope:f,end_scope:g}),c&&eve.on("raphael.drag.start."+this.id,c),b&&eve.on("raphael.drag.move."+this.id,b),d&&eve.on("raphael.drag.end."+this.id,d),eve("raphael.drag.start."+this.id,f||e||this,i.clientX+k,i.clientY+j,i)}this._drag={},cn.push({el:this,start:i}),this.mousedown(i);return this},cl.onDragOver=function(a){a?eve.on("raphael.drag.over."+this.id,a):eve.unbind("raphael.drag.over."+this.id)},cl.undrag=function(){var b=cn.length;while(b--)cn[b].el==this&&(this.unmousedown(cn[b].start),cn.splice(b,1),eve.unbind("raphael.drag.*."+this.id));!cn.length&&a.unmousemove(cj).unmouseup(ck)},k.circle=function(b,c,d){var e=a._engine.circle(this,b||0,c||0,d||0);this.__set__&&this.__set__.push(e);return e},k.rect=function(b,c,d,e,f){var g=a._engine.rect(this,b||0,c||0,d||0,e||0,f||0);this.__set__&&this.__set__.push(g);return g},k.ellipse=function(b,c,d,e){var f=a._engine.ellipse(this,b||0,c||0,d||0,e||0);this.__set__&&this.__set__.push(f);return f},k.path=function(b){b&&!a.is(b,D)&&!a.is(b[0],E)&&(b+=p);var c=a._engine.path(a.format[m](a,arguments),this);this.__set__&&this.__set__.push(c);return c},k.image=function(b,c,d,e,f){var g=a._engine.image(this,b||"about:blank",c||0,d||0,e||0,f||0);this.__set__&&this.__set__.push(g);return g},k.text=function(b,c,d){var e=a._engine.text(this,b||0,c||0,r(d));this.__set__&&this.__set__.push(e);return e},k.set=function(b){!a.is(b,"array")&&(b=Array.prototype.splice.call(arguments,0,arguments.length));var c=new cG(b);this.__set__&&this.__set__.push(c);return c},k.setStart=function(a){this.__set__=a||this.set()},k.setFinish=function(a){var b=this.__set__;delete this.__set__;return b},k.setSize=function(b,c){return a._engine.setSize.call(this,b,c)},k.setViewBox=function(b,c,d,e,f){return a._engine.setViewBox.call(this,b,c,d,e,f)},k.top=k.bottom=null,k.raphael=a;var co=function(a){var b=a.getBoundingClientRect(),c=a.ownerDocument,d=c.body,e=c.documentElement,f=e.clientTop||d.clientTop||0,g=e.clientLeft||d.clientLeft||0,i=b.top+(h.win.pageYOffset||e.scrollTop||d.scrollTop)-f,j=b.left+(h.win.pageXOffset||e.scrollLeft||d.scrollLeft)-g;return{y:i,x:j}};k.getElementByPoint=function(a,b){var c=this,d=c.canvas,e=h.doc.elementFromPoint(a,b);if(h.win.opera&&e.tagName=="svg"){var f=co(d),g=d.createSVGRect();g.x=a-f.x,g.y=b-f.y,g.width=g.height=1;var i=d.getIntersectionList(g,null);i.length&&(e=i[i.length-1])}if(!e)return null;while(e.parentNode&&e!=d.parentNode&&!e.raphael)e=e.parentNode;e==c.canvas.parentNode&&(e=d),e=e&&e.raphael?c.getById(e.raphaelid):null;return e},k.getById=function(a){var b=this.bottom;while(b){if(b.id==a)return b;b=b.next}return null},k.forEach=function(a,b){var c=this.bottom;while(c){if(a.call(b,c)===!1)return this;c=c.next}return this},k.getElementsByPoint=function(a,b){var c=this.set();this.forEach(function(d){d.isPointInside(a,b)&&c.push(d)});return c},cl.isPointInside=function(b,c){var d=this.realPath=this.realPath||bi[this.type](this);return a.isPointInsidePath(d,b,c)},cl.getBBox=function(a){if(this.removed)return{};var b=this._;if(a){if(b.dirty||!b.bboxwt)this.realPath=bi[this.type](this),b.bboxwt=bI(this.realPath),b.bboxwt.toString=cq,b.dirty=0;return b.bboxwt}if(b.dirty||b.dirtyT||!b.bbox){if(b.dirty||!this.realPath)b.bboxwt=0,this.realPath=bi[this.type](this);b.bbox=bI(bj(this.realPath,this.matrix)),b.bbox.toString=cq,b.dirty=b.dirtyT=0}return b.bbox},cl.clone=function(){if(this.removed)return null;var a=this.paper[this.type]().attr(this.attr());this.__set__&&this.__set__.push(a);return a},cl.glow=function(a){if(this.type=="text")return null;a=a||{};var b={width:(a.width||10)+(+this.attr("stroke-width")||1),fill:a.fill||!1,opacity:a.opacity||.5,offsetx:a.offsetx||0,offsety:a.offsety||0,color:a.color||"#000"},c=b.width/2,d=this.paper,e=d.set(),f=this.realPath||bi[this.type](this);f=this.matrix?bj(f,this.matrix):f;for(var g=1;g<c+1;g++)e.push(d.path(f).attr({stroke:b.color,fill:b.fill?b.color:"none","stroke-linejoin":"round","stroke-linecap":"round","stroke-width":+(b.width/c*g).toFixed(3),opacity:+(b.opacity/c).toFixed(3)}));return e.insertBefore(this).translate(b.offsetx,b.offsety)};var cr={},cs=function(b,c,d,e,f,g,h,i,j){return j==null?bB(b,c,d,e,f,g,h,i):a.findDotsAtSegment(b,c,d,e,f,g,h,i,bC(b,c,d,e,f,g,h,i,j))},ct=function(b,c){return function(d,e,f){d=bR(d);var g,h,i,j,k="",l={},m,n=0;for(var o=0,p=d.length;o<p;o++){i=d[o];if(i[0]=="M")g=+i[1],h=+i[2];else{j=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6]);if(n+j>e){if(c&&!l.start){m=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6],e-n),k+=["C"+m.start.x,m.start.y,m.m.x,m.m.y,m.x,m.y];if(f)return k;l.start=k,k=["M"+m.x,m.y+"C"+m.n.x,m.n.y,m.end.x,m.end.y,i[5],i[6]].join(),n+=j,g=+i[5],h=+i[6];continue}if(!b&&!c){m=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6],e-n);return{x:m.x,y:m.y,alpha:m.alpha}}}n+=j,g=+i[5],h=+i[6]}k+=i.shift()+i}l.end=k,m=b?n:c?l:a.findDotsAtSegment(g,h,i[0],i[1],i[2],i[3],i[4],i[5],1),m.alpha&&(m={x:m.x,y:m.y,alpha:m.alpha});return m}},cu=ct(1),cv=ct(),cw=ct(0,1);a.getTotalLength=cu,a.getPointAtLength=cv,a.getSubpath=function(a,b,c){if(this.getTotalLength(a)-c<1e-6)return cw(a,b).end;var d=cw(a,c,1);return b?cw(d,b).end:d},cl.getTotalLength=function(){if(this.type=="path"){if(this.node.getTotalLength)return this.node.getTotalLength();return cu(this.attrs.path)}},cl.getPointAtLength=function(a){if(this.type=="path")return cv(this.attrs.path,a)},cl.getSubpath=function(b,c){if(this.type=="path")return a.getSubpath(this.attrs.path,b,c)};var cx=a.easing_formulas={linear:function(a){return a},"<":function(a){return A(a,1.7)},">":function(a){return A(a,.48)},"<>":function(a){var b=.48-a/1.04,c=w.sqrt(.1734+b*b),d=c-b,e=A(z(d),1/3)*(d<0?-1:1),f=-c-b,g=A(z(f),1/3)*(f<0?-1:1),h=e+g+.5;return(1-h)*3*h*h+h*h*h},backIn:function(a){var b=1.70158;return a*a*((b+1)*a-b)},backOut:function(a){a=a-1;var b=1.70158;return a*a*((b+1)*a+b)+1},elastic:function(a){if(a==!!a)return a;return A(2,-10*a)*w.sin((a-.075)*2*B/.3)+1},bounce:function(a){var b=7.5625,c=2.75,d;a<1/c?d=b*a*a:a<2/c?(a-=1.5/c,d=b*a*a+.75):a<2.5/c?(a-=2.25/c,d=b*a*a+.9375):(a-=2.625/c,d=b*a*a+.984375);return d}};cx.easeIn=cx["ease-in"]=cx["<"],cx.easeOut=cx["ease-out"]=cx[">"],cx.easeInOut=cx["ease-in-out"]=cx["<>"],cx["back-in"]=cx.backIn,cx["back-out"]=cx.backOut;var cy=[],cz=window.requestAnimationFrame||window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame||window.oRequestAnimationFrame||window.msRequestAnimationFrame||function(a){setTimeout(a,16)},cA=function(){var b=+(new Date),c=0;for(;c<cy.length;c++){var d=cy[c];if(d.el.removed||d.paused)continue;var e=b-d.start,f=d.ms,h=d.easing,i=d.from,j=d.diff,k=d.to,l=d.t,m=d.el,o={},p,r={},s;d.initstatus?(e=(d.initstatus*d.anim.top-d.prev)/(d.percent-d.prev)*f,d.status=d.initstatus,delete d.initstatus,d.stop&&cy.splice(c--,1)):d.status=(d.prev+(d.percent-d.prev)*(e/f))/d.anim.top;if(e<0)continue;if(e<f){var t=h(e/f);for(var u in i)if(i[g](u)){switch(U[u]){case C:p=+i[u]+t*f*j[u];break;case"colour":p="rgb("+[cB(O(i[u].r+t*f*j[u].r)),cB(O(i[u].g+t*f*j[u].g)),cB(O(i[u].b+t*f*j[u].b))].join(",")+")";break;case"path":p=[];for(var v=0,w=i[u].length;v<w;v++){p[v]=[i[u][v][0]];for(var x=1,y=i[u][v].length;x<y;x++)p[v][x]=+i[u][v][x]+t*f*j[u][v][x];p[v]=p[v].join(q)}p=p.join(q);break;case"transform":if(j[u].real){p=[];for(v=0,w=i[u].length;v<w;v++){p[v]=[i[u][v][0]];for(x=1,y=i[u][v].length;x<y;x++)p[v][x]=i[u][v][x]+t*f*j[u][v][x]}}else{var z=function(a){return+i[u][a]+t*f*j[u][a]};p=[["m",z(0),z(1),z(2),z(3),z(4),z(5)]]}break;case"csv":if(u=="clip-rect"){p=[],v=4;while(v--)p[v]=+i[u][v]+t*f*j[u][v]}break;default:var A=[][n](i[u]);p=[],v=m.paper.customAttributes[u].length;while(v--)p[v]=+A[v]+t*f*j[u][v]}o[u]=p}m.attr(o),function(a,b,c){setTimeout(function(){eve("raphael.anim.frame."+a,b,c)})}(m.id,m,d.anim)}else{(function(b,c,d){setTimeout(function(){eve("raphael.anim.frame."+c.id,c,d),eve("raphael.anim.finish."+c.id,c,d),a.is(b,"function")&&b.call(c)})})(d.callback,m,d.anim),m.attr(k),cy.splice(c--,1);if(d.repeat>1&&!d.next){for(s in k)k[g](s)&&(r[s]=d.totalOrigin[s]);d.el.attr(r),cE(d.anim,d.el,d.anim.percents[0],null,d.totalOrigin,d.repeat-1)}d.next&&!d.stop&&cE(d.anim,d.el,d.next,null,d.totalOrigin,d.repeat)}}a.svg&&m&&m.paper&&m.paper.safari(),cy.length&&cz(cA)},cB=function(a){return a>255?255:a<0?0:a};cl.animateWith=function(b,c,d,e,f,g){var h=this;if(h.removed){g&&g.call(h);return h}var i=d instanceof cD?d:a.animation(d,e,f,g),j,k;cE(i,h,i.percents[0],null,h.attr());for(var l=0,m=cy.length;l<m;l++)if(cy[l].anim==c&&cy[l].el==b){cy[m-1].start=cy[l].start;break}return h},cl.onAnimation=function(a){a?eve.on("raphael.anim.frame."+this.id,a):eve.unbind("raphael.anim.frame."+this.id);return this},cD.prototype.delay=function(a){var b=new cD(this.anim,this.ms);b.times=this.times,b.del=+a||0;return b},cD.prototype.repeat=function(a){var b=new cD(this.anim,this.ms);b.del=this.del,b.times=w.floor(x(a,0))||1;return b},a.animation=function(b,c,d,e){if(b instanceof cD)return b;if(a.is(d,"function")||!d)e=e||d||null,d=null;b=Object(b),c=+c||0;var f={},h,i;for(i in b)b[g](i)&&Q(i)!=i&&Q(i)+"%"!=i&&(h=!0,f[i]=b[i]);if(!h)return new cD(b,c);d&&(f.easing=d),e&&(f.callback=e);return new cD({100:f},c)},cl.animate=function(b,c,d,e){var f=this;if(f.removed){e&&e.call(f);return f}var g=b instanceof cD?b:a.animation(b,c,d,e);cE(g,f,g.percents[0],null,f.attr());return f},cl.setTime=function(a,b){a&&b!=null&&this.status(a,y(b,a.ms)/a.ms);return this},cl.status=function(a,b){var c=[],d=0,e,f;if(b!=null){cE(a,this,-1,y(b,1));return this}e=cy.length;for(;d<e;d++){f=cy[d];if(f.el.id==this.id&&(!a||f.anim==a)){if(a)return f.status;c.push({anim:f.anim,status:f.status})}}if(a)return 0;return c},cl.pause=function(a){for(var b=0;b<cy.length;b++)cy[b].el.id==this.id&&(!a||cy[b].anim==a)&&eve("raphael.anim.pause."+this.id,this,cy[b].anim)!==!1&&(cy[b].paused=!0);return this},cl.resume=function(a){for(var b=0;b<cy.length;b++)if(cy[b].el.id==this.id&&(!a||cy[b].anim==a)){var c=cy[b];eve("raphael.anim.resume."+this.id,this,c.anim)!==!1&&(delete c.paused,this.status(c.anim,c.status))}return this},cl.stop=function(a){for(var b=0;b<cy.length;b++)cy[b].el.id==this.id&&(!a||cy[b].anim==a)&&eve("raphael.anim.stop."+this.id,this,cy[b].anim)!==!1&&cy.splice(b--,1);return this},eve.on("raphael.remove",cF),eve.on("raphael.clear",cF),cl.toString=function(){return"RaphaĆ«lā€™s object"};var cG=function(a){this.items=[],this.length=0,this.type="set";if(a)for(var b=0,c=a.length;b<c;b++)a[b]&&(a[b].constructor==cl.constructor||a[b].constructor==cG)&&(this[this.items.length]=this.items[this.items.length]=a[b],this.length++)},cH=cG.prototype;cH.push=function(){var a,b;for(var c=0,d=arguments.length;c<d;c++)a=arguments[c],a&&(a.constructor==cl.constructor||a.constructor==cG)&&(b=this.items.length,this[b]=this.items[b]=a,this.length++);return this},cH.pop=function(){this.length&&delete this[this.length--];return this.items.pop()},cH.forEach=function(a,b){for(var c=0,d=this.items.length;c<d;c++)if(a.call(b,this.items[c],c)===!1)return this;return this};for(var cI in cl)cl[g](cI)&&(cH[cI]=function(a){return function(){var b=arguments;return this.forEach(function(c){c[a][m](c,b)})}}(cI));cH.attr=function(b,c){if(b&&a.is(b,E)&&a.is(b[0],"object"))for(var d=0,e=b.length;d<e;d++)this.items[d].attr(b[d]);else for(var f=0,g=this.items.length;f<g;f++)this.items[f].attr(b,c);return this},cH.clear=function(){while(this.length)this.pop()},cH.splice=function(a,b,c){a=a<0?x(this.length+a,0):a,b=x(0,y(this.length-a,b));var d=[],e=[],f=[],g;for(g=2;g<arguments.length;g++)f.push(arguments[g]);for(g=0;g<b;g++)e.push(this[a+g]);for(;g<this.length-a;g++)d.push(this[a+g]);var h=f.length;for(g=0;g<h+d.length;g++)this.items[a+g]=this[a+g]=g<h?f[g]:d[g-h];g=this.items.length=this.length-=b-h;while(this[g])delete this[g++];return new cG(e)},cH.exclude=function(a){for(var b=0,c=this.length;b<c;b++)if(this[b]==a){this.splice(b,1);return!0}},cH.animate=function(b,c,d,e){(a.is(d,"function")||!d)&&(e=d||null);var f=this.items.length,g=f,h,i=this,j;if(!f)return this;e&&(j=function(){!--f&&e.call(i)}),d=a.is(d,D)?d:j;var k=a.animation(b,c,d,j);h=this.items[--g].animate(k);while(g--)this.items[g]&&!this.items[g].removed&&this.items[g].animateWith(h,k,k);return this},cH.insertAfter=function(a){var b=this.items.length;while(b--)this.items[b].insertAfter(a);return this},cH.getBBox=function(){var a=[],b=[],c=[],d=[];for(var e=this.items.length;e--;)if(!this.items[e].removed){var f=this.items[e].getBBox();a.push(f.x),b.push(f.y),c.push(f.x+f.width),d.push(f.y+f.height)}a=y[m](0,a),b=y[m](0,b),c=x[m](0,c),d=x[m](0,d);return{x:a,y:b,x2:c,y2:d,width:c-a,height:d-b}},cH.clone=function(a){a=new cG;for(var b=0,c=this.items.length;b<c;b++)a.push(this.items[b].clone());return a},cH.toString=function(){return"RaphaĆ«lā€˜s set"},a.registerFont=function(a){if(!a.face)return a;this.fonts=this.fonts||{};var b={w:a.w,face:{},glyphs:{}},c=a.face["font-family"];for(var d in a.face)a.face[g](d)&&(b.face[d]=a.face[d]);this.fonts[c]?this.fonts[c].push(b):this.fonts[c]=[b];if(!a.svg){b.face["units-per-em"]=R(a.face["units-per-em"],10);for(var e in a.glyphs)if(a.glyphs[g](e)){var f=a.glyphs[e];b.glyphs[e]={w:f.w,k:{},d:f.d&&"M"+f.d.replace(/[mlcxtrv]/g,function(a){return{l:"L",c:"C",x:"z",t:"m",r:"l",v:"c"}[a]||"M"})+"z"};if(f.k)for(var h in f.k)f[g](h)&&(b.glyphs[e].k[h]=f.k[h])}}return a},k.getFont=function(b,c,d,e){e=e||"normal",d=d||"normal",c=+c||{normal:400,bold:700,lighter:300,bolder:800}[c]||400;if(!!a.fonts){var f=a.fonts[b];if(!f){var h=new RegExp("(^|\\s)"+b.replace(/[^\w\d\s+!~.:_-]/g,p)+"(\\s|$)","i");for(var i in a.fonts)if(a.fonts[g](i)&&h.test(i)){f=a.fonts[i];break}}var j;if(f)for(var k=0,l=f.length;k<l;k++){j=f[k];if(j.face["font-weight"]==c&&(j.face["font-style"]==d||!j.face["font-style"])&&j.face["font-stretch"]==e)break}return j}},k.print=function(b,d,e,f,g,h,i){h=h||"middle",i=x(y(i||0,1),-1);var j=r(e)[s](p),k=0,l=0,m=p,n;a.is(f,e)&&(f=this.getFont(f));if(f){n=(g||16)/f.face["units-per-em"];var o=f.face.bbox[s](c),q=+o[0],t=o[3]-o[1],u=0,v=+o[1]+(h=="baseline"?t+ +f.face.descent:t/2);for(var w=0,z=j.length;w<z;w++){if(j[w]=="\n")k=0,B=0,l=0,u+=t;else{var A=l&&f.glyphs[j[w-1]]||{},B=f.glyphs[j[w]];k+=l?(A.w||f.w)+(A.k&&A.k[j[w]]||0)+f.w*i:0,l=1}B&&B.d&&(m+=a.transformPath(B.d,["t",k*n,u*n,"s",n,n,q,v,"t",(b-q)/n,(d-v)/n]))}}return this.path(m).attr({fill:"#000",stroke:"none"})},k.add=function(b){if(a.is(b,"array")){var c=this.set(),e=0,f=b.length,h;for(;e<f;e++)h=b[e]||{},d[g](h.type)&&c.push(this[h.type]().attr(h))}return c},a.format=function(b,c){var d=a.is(c,E)?[0][n](c):arguments;b&&a.is(b,D)&&d.length-1&&(b=b.replace(e,function(a,b){return d[++b]==null?p:d[b]}));return b||p},a.fullfill=function(){var a=/\{([^\}]+)\}/g,b=/(?:(?:^|\.)(.+?)(?=\[|\.|$|\()|\[('|")(.+?)\2\])(\(\))?/g,c=function(a,c,d){var e=d;c.replace(b,function(a,b,c,d,f){b=b||d,e&&(b in e&&(e=e[b]),typeof e=="function"&&f&&(e=e()))}),e=(e==null||e==d?a:e)+"";return e};return function(b,d){return String(b).replace(a,function(a,b){return c(a,b,d)})}}(),a.ninja=function(){i.was?h.win.Raphael=i.is:delete Raphael;return a},a.st=cH,function(b,c,d){function e(){/in/.test(b.readyState)?setTimeout(e,9):a.eve("raphael.DOMload")}b.readyState==null&&b.addEventListener&&(b.addEventListener(c,d=function(){b.removeEventListener(c,d,!1),b.readyState="complete"},!1),b.readyState="loading"),e()}(document,"DOMContentLoaded"),i.was?h.win.Raphael=a:Raphael=a,eve.on("raphael.DOMload",function(){b=!0})}(),window.Raphael.svg&&function(a){var b="hasOwnProperty",c=String,d=parseFloat,e=parseInt,f=Math,g=f.max,h=f.abs,i=f.pow,j=/[, ]+/,k=a.eve,l="",m=" ",n="http://www.w3.org/1999/xlink",o={block:"M5,0 0,2.5 5,5z",classic:"M5,0 0,2.5 5,5 3.5,3 3.5,2z",diamond:"M2.5,0 5,2.5 2.5,5 0,2.5z",open:"M6,1 1,3.5 6,6",oval:"M2.5,0A2.5,2.5,0,0,1,2.5,5 2.5,2.5,0,0,1,2.5,0z"},p={};a.toString=function(){return"Your browser supports SVG.\nYou are running RaphaĆ«l "+this.version};var q=function(d,e){if(e){typeof d=="string"&&(d=q(d));for(var f in e)e[b](f)&&(f.substring(0,6)=="xlink:"?d.setAttributeNS(n,f.substring(6),c(e[f])):d.setAttribute(f,c(e[f])))}else d=a._g.doc.createElementNS("http://www.w3.org/2000/svg",d),d.style&&(d.style.webkitTapHighlightColor="rgba(0,0,0,0)");return d},r=function(b,e){var j="linear",k=b.id+e,m=.5,n=.5,o=b.node,p=b.paper,r=o.style,s=a._g.doc.getElementById(k);if(!s){e=c(e).replace(a._radial_gradient,function(a,b,c){j="radial";if(b&&c){m=d(b),n=d(c);var e=(n>.5)*2-1;i(m-.5,2)+i(n-.5,2)>.25&&(n=f.sqrt(.25-i(m-.5,2))*e+.5)&&n!=.5&&(n=n.toFixed(5)-1e-5*e)}return l}),e=e.split(/\s*\-\s*/);if(j=="linear"){var t=e.shift();t=-d(t);if(isNaN(t))return null;var u=[0,0,f.cos(a.rad(t)),f.sin(a.rad(t))],v=1/(g(h(u[2]),h(u[3]))||1);u[2]*=v,u[3]*=v,u[2]<0&&(u[0]=-u[2],u[2]=0),u[3]<0&&(u[1]=-u[3],u[3]=0)}var w=a._parseDots(e);if(!w)return null;k=k.replace(/[\(\)\s,\xb0#]/g,"_"),b.gradient&&k!=b.gradient.id&&(p.defs.removeChild(b.gradient),delete b.gradient);if(!b.gradient){s=q(j+"Gradient",{id:k}),b.gradient=s,q(s,j=="radial"?{fx:m,fy:n}:{x1:u[0],y1:u[1],x2:u[2],y2:u[3],gradientTransform:b.matrix.invert()}),p.defs.appendChild(s);for(var x=0,y=w.length;x<y;x++)s.appendChild(q("stop",{offset:w[x].offset?w[x].offset:x?"100%":"0%","stop-color":w[x].color||"#fff"}))}}q(o,{fill:"url(#"+k+")",opacity:1,"fill-opacity":1}),r.fill=l,r.opacity=1,r.fillOpacity=1;return 1},s=function(a){var b=a.getBBox(1);q(a.pattern,{patternTransform:a.matrix.invert()+" translate("+b.x+","+b.y+")"})},t=function(d,e,f){if(d.type=="path"){var g=c(e).toLowerCase().split("-"),h=d.paper,i=f?"end":"start",j=d.node,k=d.attrs,m=k["stroke-width"],n=g.length,r="classic",s,t,u,v,w,x=3,y=3,z=5;while(n--)switch(g[n]){case"block":case"classic":case"oval":case"diamond":case"open":case"none":r=g[n];break;case"wide":y=5;break;case"narrow":y=2;break;case"long":x=5;break;case"short":x=2}r=="open"?(x+=2,y+=2,z+=2,u=1,v=f?4:1,w={fill:"none",stroke:k.stroke}):(v=u=x/2,w={fill:k.stroke,stroke:"none"}),d._.arrows?f?(d._.arrows.endPath&&p[d._.arrows.endPath]--,d._.arrows.endMarker&&p[d._.arrows.endMarker]--):(d._.arrows.startPath&&p[d._.arrows.startPath]--,d._.arrows.startMarker&&p[d._.arrows.startMarker]--):d._.arrows={};if(r!="none"){var A="raphael-marker-"+r,B="raphael-marker-"+i+r+x+y;a._g.doc.getElementById(A)?p[A]++:(h.defs.appendChild(q(q("path"),{"stroke-linecap":"round",d:o[r],id:A})),p[A]=1);var C=a._g.doc.getElementById(B),D;C?(p[B]++,D=C.getElementsByTagName("use")[0]):(C=q(q("marker"),{id:B,markerHeight:y,markerWidth:x,orient:"auto",refX:v,refY:y/2}),D=q(q("use"),{"xlink:href":"#"+A,transform:(f?"rotate(180 "+x/2+" "+y/2+") ":l)+"scale("+x/z+","+y/z+")","stroke-width":(1/((x/z+y/z)/2)).toFixed(4)}),C.appendChild(D),h.defs.appendChild(C),p[B]=1),q(D,w);var F=u*(r!="diamond"&&r!="oval");f?(s=d._.arrows.startdx*m||0,t=a.getTotalLength(k.path)-F*m):(s=F*m,t=a.getTotalLength(k.path)-(d._.arrows.enddx*m||0)),w={},w["marker-"+i]="url(#"+B+")";if(t||s)w.d=Raphael.getSubpath(k.path,s,t);q(j,w),d._.arrows[i+"Path"]=A,d._.arrows[i+"Marker"]=B,d._.arrows[i+"dx"]=F,d._.arrows[i+"Type"]=r,d._.arrows[i+"String"]=e}else f?(s=d._.arrows.startdx*m||0,t=a.getTotalLength(k.path)-s):(s=0,t=a.getTotalLength(k.path)-(d._.arrows.enddx*m||0)),d._.arrows[i+"Path"]&&q(j,{d:Raphael.getSubpath(k.path,s,t)}),delete d._.arrows[i+"Path"],delete d._.arrows[i+"Marker"],delete d._.arrows[i+"dx"],delete d._.arrows[i+"Type"],delete d._.arrows[i+"String"];for(w in p)if(p[b](w)&&!p[w]){var G=a._g.doc.getElementById(w);G&&G.parentNode.removeChild(G)}}},u={"":[0],none:[0],"-":[3,1],".":[1,1],"-.":[3,1,1,1],"-..":[3,1,1,1,1,1],". ":[1,3],"- ":[4,3],"--":[8,3],"- .":[4,3,1,3],"--.":[8,3,1,3],"--..":[8,3,1,3,1,3]},v=function(a,b,d){b=u[c(b).toLowerCase()];if(b){var e=a.attrs["stroke-width"]||"1",f={round:e,square:e,butt:0}[a.attrs["stroke-linecap"]||d["stroke-linecap"]]||0,g=[],h=b.length;while(h--)g[h]=b[h]*e+(h%2?1:-1)*f;q(a.node,{"stroke-dasharray":g.join(",")})}},w=function(d,f){var i=d.node,k=d.attrs,m=i.style.visibility;i.style.visibility="hidden";for(var o in f)if(f[b](o)){if(!a._availableAttrs[b](o))continue;var p=f[o];k[o]=p;switch(o){case"blur":d.blur(p);break;case"href":case"title":case"target":var u=i.parentNode;if(u.tagName.toLowerCase()!="a"){var w=q("a");u.insertBefore(w,i),w.appendChild(i),u=w}o=="target"?u.setAttributeNS(n,"show",p=="blank"?"new":p):u.setAttributeNS(n,o,p);break;case"cursor":i.style.cursor=p;break;case"transform":d.transform(p);break;case"arrow-start":t(d,p);break;case"arrow-end":t(d,p,1);break;case"clip-rect":var x=c(p).split(j);if(x.length==4){d.clip&&d.clip.parentNode.parentNode.removeChild(d.clip.parentNode);var z=q("clipPath"),A=q("rect");z.id=a.createUUID(),q(A,{x:x[0],y:x[1],width:x[2],height:x[3]}),z.appendChild(A),d.paper.defs.appendChild(z),q(i,{"clip-path":"url(#"+z.id+")"}),d.clip=A}if(!p){var B=i.getAttribute("clip-path");if(B){var C=a._g.doc.getElementById(B.replace(/(^url\(#|\)$)/g,l));C&&C.parentNode.removeChild(C),q(i,{"clip-path":l}),delete d.clip}}break;case"path":d.type=="path"&&(q(i,{d:p?k.path=a._pathToAbsolute(p):"M0,0"}),d._.dirty=1,d._.arrows&&("startString"in d._.arrows&&t(d,d._.arrows.startString),"endString"in d._.arrows&&t(d,d._.arrows.endString,1)));break;case"width":i.setAttribute(o,p),d._.dirty=1;if(k.fx)o="x",p=k.x;else break;case"x":k.fx&&(p=-k.x-(k.width||0));case"rx":if(o=="rx"&&d.type=="rect")break;case"cx":i.setAttribute(o,p),d.pattern&&s(d),d._.dirty=1;break;case"height":i.setAttribute(o,p),d._.dirty=1;if(k.fy)o="y",p=k.y;else break;case"y":k.fy&&(p=-k.y-(k.height||0));case"ry":if(o=="ry"&&d.type=="rect")break;case"cy":i.setAttribute(o,p),d.pattern&&s(d),d._.dirty=1;break;case"r":d.type=="rect"?q(i,{rx:p,ry:p}):i.setAttribute(o,p),d._.dirty=1;break;case"src":d.type=="image"&&i.setAttributeNS(n,"href",p);break;case"stroke-width":if(d._.sx!=1||d._.sy!=1)p/=g(h(d._.sx),h(d._.sy))||1;d.paper._vbSize&&(p*=d.paper._vbSize),i.setAttribute(o,p),k["stroke-dasharray"]&&v(d,k["stroke-dasharray"],f),d._.arrows&&("startString"in d._.arrows&&t(d,d._.arrows.startString),"endString"in d._.arrows&&t(d,d._.arrows.endString,1));break;case"stroke-dasharray":v(d,p,f);break;case"fill":var D=c(p).match(a._ISURL);if(D){z=q("pattern");var F=q("image");z.id=a.createUUID(),q(z,{x:0,y:0,patternUnits:"userSpaceOnUse",height:1,width:1}),q(F,{x:0,y:0,"xlink:href":D[1]}),z.appendChild(F),function(b){a._preload(D[1],function(){var a=this.offsetWidth,c=this.offsetHeight;q(b,{width:a,height:c}),q(F,{width:a,height:c}),d.paper.safari()})}(z),d.paper.defs.appendChild(z),q(i,{fill:"url(#"+z.id+")"}),d.pattern=z,d.pattern&&s(d);break}var G=a.getRGB(p);if(!G.error)delete f.gradient,delete k.gradient,!a.is(k.opacity,"undefined")&&a.is(f.opacity,"undefined")&&q(i,{opacity:k.opacity}),!a.is(k["fill-opacity"],"undefined")&&a.is(f["fill-opacity"],"undefined")&&q(i,{"fill-opacity":k["fill-opacity"]});else if((d.type=="circle"||d.type=="ellipse"||c(p).charAt()!="r")&&r(d,p)){if("opacity"in k||"fill-opacity"in k){var H=a._g.doc.getElementById(i.getAttribute("fill").replace(/^url\(#|\)$/g,l));if(H){var I=H.getElementsByTagName("stop");q(I[I.length-1],{"stop-opacity":("opacity"in k?k.opacity:1)*("fill-opacity"in k?k["fill-opacity"]:1)})}}k.gradient=p,k.fill="none";break}G[b]("opacity")&&q(i,{"fill-opacity":G.opacity>1?G.opacity/100:G.opacity});case"stroke":G=a.getRGB(p),i.setAttribute(o,G.hex),o=="stroke"&&G[b]("opacity")&&q(i,{"stroke-opacity":G.opacity>1?G.opacity/100:G.opacity}),o=="stroke"&&d._.arrows&&("startString"in d._.arrows&&t(d,d._.arrows.startString),"endString"in d._.arrows&&t(d,d._.arrows.endString,1));break;case"gradient":(d.type=="circle"||d.type=="ellipse"||c(p).charAt()!="r")&&r(d,p);break;case"opacity":k.gradient&&!k[b]("stroke-opacity")&&q(i,{"stroke-opacity":p>1?p/100:p});case"fill-opacity":if(k.gradient){H=a._g.doc.getElementById(i.getAttribute("fill").replace(/^url\(#|\)$/g,l)),H&&(I=H.getElementsByTagName("stop"),q(I[I.length-1],{"stop-opacity":p}));break};default:o=="font-size"&&(p=e(p,10)+"px");var J=o.replace(/(\-.)/g,function(a){return a.substring(1).toUpperCase()});i.style[J]=p,d._.dirty=1,i.setAttribute(o,p)}}y(d,f),i.style.visibility=m},x=1.2,y=function(d,f){if(d.type=="text"&&!!(f[b]("text")||f[b]("font")||f[b]("font-size")||f[b]("x")||f[b]("y"))){var g=d.attrs,h=d.node,i=h.firstChild?e(a._g.doc.defaultView.getComputedStyle(h.firstChild,l).getPropertyValue("font-size"),10):10;if(f[b]("text")){g.text=f.text;while(h.firstChild)h.removeChild(h.firstChild);var j=c(f.text).split("\n"),k=[],m;for(var n=0,o=j.length;n<o;n++)m=q("tspan"),n&&q(m,{dy:i*x,x:g.x}),m.appendChild(a._g.doc.createTextNode(j[n])),h.appendChild(m),k[n]=m}else{k=h.getElementsByTagName("tspan");for(n=0,o=k.length;n<o;n++)n?q(k[n],{dy:i*x,x:g.x}):q(k[0],{dy:0})}q(h,{x:g.x,y:g.y}),d._.dirty=1;var p=d._getBBox(),r=g.y-(p.y+p.height/2);r&&a.is(r,"finite")&&q(k[0],{dy:r})}},z=function(b,c){var d=0,e=0;this[0]=this.node=b,b.raphael=!0,this.id=a._oid++,b.raphaelid=this.id,this.matrix=a.matrix(),this.realPath=null,this.paper=c,this.attrs=this.attrs||{},this._={transform:[],sx:1,sy:1,deg:0,dx:0,dy:0,dirty:1},!c.bottom&&(c.bottom=this),this.prev=c.top,c.top&&(c.top.next=this),c.top=this,this.next=null},A=a.el;z.prototype=A,A.constructor=z,a._engine.path=function(a,b){var c=q("path");b.canvas&&b.canvas.appendChild(c);var d=new z(c,b);d.type="path",w(d,{fill:"none",stroke:"#000",path:a});return d},A.rotate=function(a,b,e){if(this.removed)return this;a=c(a).split(j),a.length-1&&(b=d(a[1]),e=d(a[2])),a=d(a[0]),e==null&&(b=e);if(b==null||e==null){var f=this.getBBox(1);b=f.x+f.width/2,e=f.y+f.height/2}this.transform(this._.transform.concat([["r",a,b,e]]));return this},A.scale=function(a,b,e,f){if(this.removed)return this;a=c(a).split(j),a.length-1&&(b=d(a[1]),e=d(a[2]),f=d(a[3])),a=d(a[0]),b==null&&(b=a),f==null&&(e=f);if(e==null||f==null)var g=this.getBBox(1);e=e==null?g.x+g.width/2:e,f=f==null?g.y+g.height/2:f,this.transform(this._.transform.concat([["s",a,b,e,f]]));return this},A.translate=function(a,b){if(this.removed)return this;a=c(a).split(j),a.length-1&&(b=d(a[1])),a=d(a[0])||0,b=+b||0,this.transform(this._.transform.concat([["t",a,b]]));return this},A.transform=function(c){var d=this._;if(c==null)return d.transform;a._extractTransform(this,c),this.clip&&q(this.clip,{transform:this.matrix.invert()}),this.pattern&&s(this),this.node&&q(this.node,{transform:this.matrix});if(d.sx!=1||d.sy!=1){var e=this.attrs[b]("stroke-width")?this.attrs["stroke-width"]:1;this.attr({"stroke-width":e})}return this},A.hide=function(){!this.removed&&this.paper.safari(this.node.style.display="none");return this},A.show=function(){!this.removed&&this.paper.safari(this.node.style.display="");return this},A.remove=function(){if(!this.removed&&!!this.node.parentNode){var b=this.paper;b.__set__&&b.__set__.exclude(this),k.unbind("raphael.*.*."+this.id),this.gradient&&b.defs.removeChild(this.gradient),a._tear(this,b),this.node.parentNode.tagName.toLowerCase()=="a"?this.node.parentNode.parentNode.removeChild(this.node.parentNode):this.node.parentNode.removeChild(this.node);for(var c in this)this[c]=typeof this[c]=="function"?a._removedFactory(c):null;this.removed=!0}},A._getBBox=function(){if(this.node.style.display=="none"){this.show();var a=!0}var b={};try{b=this.node.getBBox()}catch(c){}finally{b=b||{}}a&&this.hide();return b},A.attr=function(c,d){if(this.removed)return this;if(c==null){var e={};for(var f in this.attrs)this.attrs[b](f)&&(e[f]=this.attrs[f]);e.gradient&&e.fill=="none"&&(e.fill=e.gradient)&&delete e.gradient,e.transform=this._.transform;return e}if(d==null&&a.is(c,"string")){if(c=="fill"&&this.attrs.fill=="none"&&this.attrs.gradient)return this.attrs.gradient;if(c=="transform")return this._.transform;var g=c.split(j),h={};for(var i=0,l=g.length;i<l;i++)c=g[i],c in this.attrs?h[c]=this.attrs[c]:a.is(this.paper.customAttributes[c],"function")?h[c]=this.paper.customAttributes[c].def:h[c]=a._availableAttrs[c];return l-1?h:h[g[0]]}if(d==null&&a.is(c,"array")){h={};for(i=0,l=c.length;i<l;i++)h[c[i]]=this.attr(c[i]);return h}if(d!=null){var m={};m[c]=d}else c!=null&&a.is(c,"object")&&(m=c);for(var n in m)k("raphael.attr."+n+"."+this.id,this,m[n]);for(n in this.paper.customAttributes)if(this.paper.customAttributes[b](n)&&m[b](n)&&a.is(this.paper.customAttributes[n],"function")){var o=this.paper.customAttributes[n].apply(this,[].concat(m[n]));this.attrs[n]=m[n];for(var p in o)o[b](p)&&(m[p]=o[p])}w(this,m);return this},A.toFront=function(){if(this.removed)return this;this.node.parentNode.tagName.toLowerCase()=="a"?this.node.parentNode.parentNode.appendChild(this.node.parentNode):this.node.parentNode.appendChild(this.node);var b=this.paper;b.top!=this&&a._tofront(this,b);return this},A.toBack=function(){if(this.removed)return this;var b=this.node.parentNode;b.tagName.toLowerCase()=="a"?b.parentNode.insertBefore(this.node.parentNode,this.node.parentNode.parentNode.firstChild):b.firstChild!=this.node&&b.insertBefore(this.node,this.node.parentNode.firstChild),a._toback(this,this.paper);var c=this.paper;return this},A.insertAfter=function(b){if(this.removed)return this;var c=b.node||b[b.length-1].node;c.nextSibling?c.parentNode.insertBefore(this.node,c.nextSibling):c.parentNode.appendChild(this.node),a._insertafter(this,b,this.paper);return this},A.insertBefore=function(b){if(this.removed)return this;var c=b.node||b[0].node;c.parentNode.insertBefore(this.node,c),a._insertbefore(this,b,this.paper);return this},A.blur=function(b){var c=this;if(+b!==0){var d=q("filter"),e=q("feGaussianBlur");c.attrs.blur=b,d.id=a.createUUID(),q(e,{stdDeviation:+b||1.5}),d.appendChild(e),c.paper.defs.appendChild(d),c._blur=d,q(c.node,{filter:"url(#"+d.id+")"})}else c._blur&&(c._blur.parentNode.removeChild(c._blur),delete c._blur,delete c.attrs.blur),c.node.removeAttribute("filter")},a._engine.circle=function(a,b,c,d){var e=q("circle");a.canvas&&a.canvas.appendChild(e);var f=new z(e,a);f.attrs={cx:b,cy:c,r:d,fill:"none",stroke:"#000"},f.type="circle",q(e,f.attrs);return f},a._engine.rect=function(a,b,c,d,e,f){var g=q("rect");a.canvas&&a.canvas.appendChild(g);var h=new z(g,a);h.attrs={x:b,y:c,width:d,height:e,r:f||0,rx:f||0,ry:f||0,fill:"none",stroke:"#000"},h.type="rect",q(g,h.attrs);return h},a._engine.ellipse=function(a,b,c,d,e){var f=q("ellipse");a.canvas&&a.canvas.appendChild(f);var g=new z(f,a);g.attrs={cx:b,cy:c,rx:d,ry:e,fill:"none",stroke:"#000"},g.type="ellipse",q(f,g.attrs);return g},a._engine.image=function(a,b,c,d,e,f){var g=q("image");q(g,{x:c,y:d,width:e,height:f,preserveAspectRatio:"none"}),g.setAttributeNS(n,"href",b),a.canvas&&a.canvas.appendChild(g);var h=new z(g,a);h.attrs={x:c,y:d,width:e,height:f,src:b},h.type="image";return h},a._engine.text=function(b,c,d,e){var f=q("text");b.canvas&&b.canvas.appendChild(f);var g=new z(f,b);g.attrs={x:c,y:d,"text-anchor":"middle",text:e,font:a._availableAttrs.font,stroke:"none",fill:"#000"},g.type="text",w(g,g.attrs);return g},a._engine.setSize=function(a,b){this.width=a||this.width,this.height=b||this.height,this.canvas.setAttribute("width",this.width),this.canvas.setAttribute("height",this.height),this._viewBox&&this.setViewBox.apply(this,this._viewBox);return this},a._engine.create=function(){var b=a._getContainer.apply(0,arguments),c=b&&b.container,d=b.x,e=b.y,f=b.width,g=b.height;if(!c)throw new Error("SVG container not found.");var h=q("svg"),i="overflow:hidden;",j;d=d||0,e=e||0,f=f||512,g=g||342,q(h,{height:g,version:1.1,width:f,xmlns:"http://www.w3.org/2000/svg"}),c==1?(h.style.cssText=i+"position:absolute;left:"+d+"px;top:"+e+"px",a._g.doc.body.appendChild(h),j=1):(h.style.cssText=i+"position:relative",c.firstChild?c.insertBefore(h,c.firstChild):c.appendChild(h)),c=new a._Paper,c.width=f,c.height=g,c.canvas=h,c.clear(),c._left=c._top=0,j&&(c.renderfix=function(){}),c.renderfix();return c},a._engine.setViewBox=function(a,b,c,d,e){k("raphael.setViewBox",this,this._viewBox,[a,b,c,d,e]);var f=g(c/this.width,d/this.height),h=this.top,i=e?"meet":"xMinYMin",j,l;a==null?(this._vbSize&&(f=1),delete this._vbSize,j="0 0 "+this.width+m+this.height):(this._vbSize=f,j=a+m+b+m+c+m+d),q(this.canvas,{viewBox:j,preserveAspectRatio:i});while(f&&h)l="stroke-width"in h.attrs?h.attrs["stroke-width"]:1,h.attr({"stroke-width":l}),h._.dirty=1,h._.dirtyT=1,h=h.prev;this._viewBox=[a,b,c,d,!!e];return this},a.prototype.renderfix=function(){var a=this.canvas,b=a.style,c;try{c=a.getScreenCTM()||a.createSVGMatrix()}catch(d){c=a.createSVGMatrix()}var e=-c.e%1,f=-c.f%1;if(e||f)e&&(this._left=(this._left+e)%1,b.left=this._left+"px"),f&&(this._top=(this._top+f)%1,b.top=this._top+"px")},a.prototype.clear=function(){a.eve("raphael.clear",this);var b=this.canvas;while(b.firstChild)b.removeChild(b.firstChild);this.bottom=this.top=null,(this.desc=q("desc")).appendChild(a._g.doc.createTextNode("Created with RaphaĆ«l "+a.version)),b.appendChild(this.desc),b.appendChild(this.defs=q("defs"))},a.prototype.remove=function(){k("raphael.remove",this),this.canvas.parentNode&&this.canvas.parentNode.removeChild(this.canvas);for(var b in this)this[b]=typeof this[b]=="function"?a._removedFactory(b):null};var B=a.st;for(var C in A)A[b](C)&&!B[b](C)&&(B[C]=function(a){return function(){var b=arguments;return this.forEach(function(c){c[a].apply(c,b)})}}(C))}(window.Raphael),window.Raphael.vml&&function(a){var b="hasOwnProperty",c=String,d=parseFloat,e=Math,f=e.round,g=e.max,h=e.min,i=e.abs,j="fill",k=/[, ]+/,l=a.eve,m=" progid:DXImageTransform.Microsoft",n=" ",o="",p={M:"m",L:"l",C:"c",Z:"x",m:"t",l:"r",c:"v",z:"x"},q=/([clmz]),?([^clmz]*)/gi,r=/ progid:\S+Blur\([^\)]+\)/g,s=/-?[^,\s-]+/g,t="position:absolute;left:0;top:0;width:1px;height:1px",u=21600,v={path:1,rect:1,image:1},w={circle:1,ellipse:1},x=function(b){var d=/[ahqstv]/ig,e=a._pathToAbsolute;c(b).match(d)&&(e=a._path2curve),d=/[clmz]/g;if(e==a._pathToAbsolute&&!c(b).match(d)){var g=c(b).replace(q,function(a,b,c){var d=[],e=b.toLowerCase()=="m",g=p[b];c.replace(s,function(a){e&&d.length==2&&(g+=d+p[b=="m"?"l":"L"],d=[]),d.push(f(a*u))});return g+d});return g}var h=e(b),i,j;g=[];for(var k=0,l=h.length;k<l;k++){i=h[k],j=h[k][0].toLowerCase(),j=="z"&&(j="x");for(var m=1,r=i.length;m<r;m++)j+=f(i[m]*u)+(m!=r-1?",":o);g.push(j)}return g.join(n)},y=function(b,c,d){var e=a.matrix();e.rotate(-b,.5,.5);return{dx:e.x(c,d),dy:e.y(c,d)}},z=function(a,b,c,d,e,f){var g=a._,h=a.matrix,k=g.fillpos,l=a.node,m=l.style,o=1,p="",q,r=u/b,s=u/c;m.visibility="hidden";if(!!b&&!!c){l.coordsize=i(r)+n+i(s),m.rotation=f*(b*c<0?-1:1);if(f){var t=y(f,d,e);d=t.dx,e=t.dy}b<0&&(p+="x"),c<0&&(p+=" y")&&(o=-1),m.flip=p,l.coordorigin=d*-r+n+e*-s;if(k||g.fillsize){var v=l.getElementsByTagName(j);v=v&&v[0],l.removeChild(v),k&&(t=y(f,h.x(k[0],k[1]),h.y(k[0],k[1])),v.position=t.dx*o+n+t.dy*o),g.fillsize&&(v.size=g.fillsize[0]*i(b)+n+g.fillsize[1]*i(c)),l.appendChild(v)}m.visibility="visible"}};a.toString=function(){return"Your browser doesnā€™t support SVG. Falling down to VML.\nYou are running RaphaĆ«l "+this.version};var A=function(a,b,d){var e=c(b).toLowerCase().split("-"),f=d?"end":"start",g=e.length,h="classic",i="medium",j="medium";while(g--)switch(e[g]){case"block":case"classic":case"oval":case"diamond":case"open":case"none":h=e[g];break;case"wide":case"narrow":j=e[g];break;case"long":case"short":i=e[g]}var k=a.node.getElementsByTagName("stroke")[0];k[f+"arrow"]=h,k[f+"arrowlength"]=i,k[f+"arrowwidth"]=j},B=function(e,i){e.attrs=e.attrs||{};var l=e.node,m=e.attrs,p=l.style,q,r=v[e.type]&&(i.x!=m.x||i.y!=m.y||i.width!=m.width||i.height!=m.height||i.cx!=m.cx||i.cy!=m.cy||i.rx!=m.rx||i.ry!=m.ry||i.r!=m.r),s=w[e.type]&&(m.cx!=i.cx||m.cy!=i.cy||m.r!=i.r||m.rx!=i.rx||m.ry!=i.ry),t=e;for(var y in i)i[b](y)&&(m[y]=i[y]);r&&(m.path=a._getPath[e.type](e),e._.dirty=1),i.href&&(l.href=i.href),i.title&&(l.title=i.title),i.target&&(l.target=i.target),i.cursor&&(p.cursor=i.cursor),"blur"in i&&e.blur(i.blur);if(i.path&&e.type=="path"||r)l.path=x(~c(m.path).toLowerCase().indexOf("r")?a._pathToAbsolute(m.path):m.path),e.type=="image"&&(e._.fillpos=[m.x,m.y],e._.fillsize=[m.width,m.height],z(e,1,1,0,0,0));"transform"in i&&e.transform(i.transform);if(s){var B=+m.cx,D=+m.cy,E=+m.rx||+m.r||0,G=+m.ry||+m.r||0;l.path=a.format("ar{0},{1},{2},{3},{4},{1},{4},{1}x",f((B-E)*u),f((D-G)*u),f((B+E)*u),f((D+G)*u),f(B*u))}if("clip-rect"in i){var H=c(i["clip-rect"]).split(k);if(H.length==4){H[2]=+H[2]+ +H[0],H[3]=+H[3]+ +H[1];var I=l.clipRect||a._g.doc.createElement("div"),J=I.style;J.clip=a.format("rect({1}px {2}px {3}px {0}px)",H),l.clipRect||(J.position="absolute",J.top=0,J.left=0,J.width=e.paper.width+"px",J.height=e.paper.height+"px",l.parentNode.insertBefore(I,l),I.appendChild(l),l.clipRect=I)}i["clip-rect"]||l.clipRect&&(l.clipRect.style.clip="auto")}if(e.textpath){var K=e.textpath.style;i.font&&(K.font=i.font),i["font-family"]&&(K.fontFamily='"'+i["font-family"].split(",")[0].replace(/^['"]+|['"]+$/g,o)+'"'),i["font-size"]&&(K.fontSize=i["font-size"]),i["font-weight"]&&(K.fontWeight=i["font-weight"]),i["font-style"]&&(K.fontStyle=i["font-style"])}"arrow-start"in i&&A(t,i["arrow-start"]),"arrow-end"in i&&A(t,i["arrow-end"],1);if(i.opacity!=null||i["stroke-width"]!=null||i.fill!=null||i.src!=null||i.stroke!=null||i["stroke-width"]!=null||i["stroke-opacity"]!=null||i["fill-opacity"]!=null||i["stroke-dasharray"]!=null||i["stroke-miterlimit"]!=null||i["stroke-linejoin"]!=null||i["stroke-linecap"]!=null){var L=l.getElementsByTagName(j),M=!1;L=L&&L[0],!L&&(M=L=F(j)),e.type=="image"&&i.src&&(L.src=i.src),i.fill&&(L.on=!0);if(L.on==null||i.fill=="none"||i.fill===null)L.on=!1;if(L.on&&i.fill){var N=c(i.fill).match(a._ISURL);if(N){L.parentNode==l&&l.removeChild(L),L.rotate=!0,L.src=N[1],L.type="tile";var O=e.getBBox(1);L.position=O.x+n+O.y,e._.fillpos=[O.x,O.y],a._preload(N[1],function(){e._.fillsize=[this.offsetWidth,this.offsetHeight]})}else L.color=a.getRGB(i.fill).hex,L.src=o,L.type="solid",a.getRGB(i.fill).error&&(t.type in{circle:1,ellipse:1}||c(i.fill).charAt()!="r")&&C(t,i.fill,L)&&(m.fill="none",m.gradient=i.fill,L.rotate=!1)}if("fill-opacity"in i||"opacity"in i){var P=((+m["fill-opacity"]+1||2)-1)*((+m.opacity+1||2)-1)*((+a.getRGB(i.fill).o+1||2)-1);P=h(g(P,0),1),L.opacity=P,L.src&&(L.color="none")}l.appendChild(L);var Q=l.getElementsByTagName("stroke")&&l.getElementsByTagName("stroke")[0],T=!1;!Q&&(T=Q=F("stroke"));if(i.stroke&&i.stroke!="none"||i["stroke-width"]||i["stroke-opacity"]!=null||i["stroke-dasharray"]||i["stroke-miterlimit"]||i["stroke-linejoin"]||i["stroke-linecap"])Q.on=!0;(i.stroke=="none"||i.stroke===null||Q.on==null||i.stroke==0||i["stroke-width"]==0)&&(Q.on=!1);var U=a.getRGB(i.stroke);Q.on&&i.stroke&&(Q.color=U.hex),P=((+m["stroke-opacity"]+1||2)-1)*((+m.opacity+1||2)-1)*((+U.o+1||2)-1);var V=(d(i["stroke-width"])||1)*.75;P=h(g(P,0),1),i["stroke-width"]==null&&(V=m["stroke-width"]),i["stroke-width"]&&(Q.weight=V),V&&V<1&&(P*=V)&&(Q.weight=1),Q.opacity=P,i["stroke-linejoin"]&&(Q.joinstyle=i["stroke-linejoin"]||"miter"),Q.miterlimit=i["stroke-miterlimit"]||8,i["stroke-linecap"]&&(Q.endcap=i["stroke-linecap"]=="butt"?"flat":i["stroke-linecap"]=="square"?"square":"round");if(i["stroke-dasharray"]){var W={"-":"shortdash",".":"shortdot","-.":"shortdashdot","-..":"shortdashdotdot",". ":"dot","- ":"dash","--":"longdash","- .":"dashdot","--.":"longdashdot","--..":"longdashdotdot"};Q.dashstyle=W[b](i["stroke-dasharray"])?W[i["stroke-dasharray"]]:o}T&&l.appendChild(Q)}if(t.type=="text"){t.paper.canvas.style.display=o;var X=t.paper.span,Y=100,Z=m.font&&m.font.match(/\d+(?:\.\d*)?(?=px)/);p=X.style,m.font&&(p.font=m.font),m["font-family"]&&(p.fontFamily=m["font-family"]),m["font-weight"]&&(p.fontWeight=m["font-weight"]),m["font-style"]&&(p.fontStyle=m["font-style"]),Z=d(m["font-size"]||Z&&Z[0])||10,p.fontSize=Z*Y+"px",t.textpath.string&&(X.innerHTML=c(t.textpath.string).replace(/</g,"&#60;").replace(/&/g,"&#38;").replace(/\n/g,"<br>"));var $=X.getBoundingClientRect();t.W=m.w=($.right-$.left)/Y,t.H=m.h=($.bottom-$.top)/Y,t.X=m.x,t.Y=m.y+t.H/2,("x"in i||"y"in i)&&(t.path.v=a.format("m{0},{1}l{2},{1}",f(m.x*u),f(m.y*u),f(m.x*u)+1));var _=["x","y","text","font","font-family","font-weight","font-style","font-size"];for(var ba=0,bb=_.length;ba<bb;ba++)if(_[ba]in i){t._.dirty=1;break}switch(m["text-anchor"]){case"start":t.textpath.style["v-text-align"]="left",t.bbx=t.W/2;break;case"end":t.textpath.style["v-text-align"]="right",t.bbx=-t.W/2;break;default:t.textpath.style["v-text-align"]="center",t.bbx=0}t.textpath.style["v-text-kern"]=!0}},C=function(b,f,g){b.attrs=b.attrs||{};var h=b.attrs,i=Math.pow,j,k,l="linear",m=".5 .5";b.attrs.gradient=f,f=c(f).replace(a._radial_gradient,function(a,b,c){l="radial",b&&c&&(b=d(b),c=d(c),i(b-.5,2)+i(c-.5,2)>.25&&(c=e.sqrt(.25-i(b-.5,2))*((c>.5)*2-1)+.5),m=b+n+c);return o}),f=f.split(/\s*\-\s*/);if(l=="linear"){var p=f.shift();p=-d(p);if(isNaN(p))return null}var q=a._parseDots(f);if(!q)return null;b=b.shape||b.node;if(q.length){b.removeChild(g),g.on=!0,g.method="none",g.color=q[0].color,g.color2=q[q.length-1].color;var r=[];for(var s=0,t=q.length;s<t;s++)q[s].offset&&r.push(q[s].offset+n+q[s].color);g.colors=r.length?r.join():"0% "+g.color,l=="radial"?(g.type="gradientTitle",g.focus="100%",g.focussize="0 0",g.focusposition=m,g.angle=0):(g.type="gradient",g.angle=(270-p)%360),b.appendChild(g)}return 1},D=function(b,c){this[0]=this.node=b,b.raphael=!0,this.id=a._oid++,b.raphaelid=this.id,this.X=0,this.Y=0,this.attrs={},this.paper=c,this.matrix=a.matrix(),this._={transform:[],sx:1,sy:1,dx:0,dy:0,deg:0,dirty:1,dirtyT:1},!c.bottom&&(c.bottom=this),this.prev=c.top,c.top&&(c.top.next=this),c.top=this,this.next=null},E=a.el;D.prototype=E,E.constructor=D,E.transform=function(b){if(b==null)return this._.transform;var d=this.paper._viewBoxShift,e=d?"s"+[d.scale,d.scale]+"-1-1t"+[d.dx,d.dy]:o,f;d&&(f=b=c(b).replace(/\.{3}|\u2026/g,this._.transform||o)),a._extractTransform(this,e+b);var g=this.matrix.clone(),h=this.skew,i=this.node,j,k=~c(this.attrs.fill).indexOf("-"),l=!c(this.attrs.fill).indexOf("url(");g.translate(-0.5,-0.5);if(l||k||this.type=="image"){h.matrix="1 0 0 1",h.offset="0 0",j=g.split();if(k&&j.noRotation||!j.isSimple){i.style.filter=g.toFilter();var m=this.getBBox(),p=this.getBBox(1),q=m.x-p.x,r=m.y-p.y;i.coordorigin=q*-u+n+r*-u,z(this,1,1,q,r,0)}else i.style.filter=o,z(this,j.scalex,j.scaley,j.dx,j.dy,j.rotate)}else i.style.filter=o,h.matrix=c(g),h.offset=g.offset();f&&(this._.transform=f);return this},E.rotate=function(a,b,e){if(this.removed)return this;if(a!=null){a=c(a).split(k),a.length-1&&(b=d(a[1]),e=d(a[2])),a=d(a[0]),e==null&&(b=e);if(b==null||e==null){var f=this.getBBox(1);b=f.x+f.width/2,e=f.y+f.height/2}this._.dirtyT=1,this.transform(this._.transform.concat([["r",a,b,e]]));return this}},E.translate=function(a,b){if(this.removed)return this;a=c(a).split(k),a.length-1&&(b=d(a[1])),a=d(a[0])||0,b=+b||0,this._.bbox&&(this._.bbox.x+=a,this._.bbox.y+=b),this.transform(this._.transform.concat([["t",a,b]]));return this},E.scale=function(a,b,e,f){if(this.removed)return this;a=c(a).split(k),a.length-1&&(b=d(a[1]),e=d(a[2]),f=d(a[3]),isNaN(e)&&(e=null),isNaN(f)&&(f=null)),a=d(a[0]),b==null&&(b=a),f==null&&(e=f);if(e==null||f==null)var g=this.getBBox(1);e=e==null?g.x+g.width/2:e,f=f==null?g.y+g.height/2:f,this.transform(this._.transform.concat([["s",a,b,e,f]])),this._.dirtyT=1;return this},E.hide=function(){!this.removed&&(this.node.style.display="none");return this},E.show=function(){!this.removed&&(this.node.style.display=o);return this},E._getBBox=function(){if(this.removed)return{};return{x:this.X+(this.bbx||0)-this.W/2,y:this.Y-this.H,width:this.W,height:this.H}},E.remove=function(){if(!this.removed&&!!this.node.parentNode){this.paper.__set__&&this.paper.__set__.exclude(this),a.eve.unbind("raphael.*.*."+this.id),a._tear(this,this.paper),this.node.parentNode.removeChild(this.node),this.shape&&this.shape.parentNode.removeChild(this.shape);for(var b in this)this[b]=typeof this[b]=="function"?a._removedFactory(b):null;this.removed=!0}},E.attr=function(c,d){if(this.removed)return this;if(c==null){var e={};for(var f in this.attrs)this.attrs[b](f)&&(e[f]=this.attrs[f]);e.gradient&&e.fill=="none"&&(e.fill=e.gradient)&&delete e.gradient,e.transform=this._.transform;return e}if(d==null&&a.is(c,"string")){if(c==j&&this.attrs.fill=="none"&&this.attrs.gradient)return this.attrs.gradient;var g=c.split(k),h={};for(var i=0,m=g.length;i<m;i++)c=g[i],c in this.attrs?h[c]=this.attrs[c]:a.is(this.paper.customAttributes[c],"function")?h[c]=this.paper.customAttributes[c].def:h[c]=a._availableAttrs[c];return m-1?h:h[g[0]]}if(this.attrs&&d==null&&a.is(c,"array")){h={};for(i=0,m=c.length;i<m;i++)h[c[i]]=this.attr(c[i]);return h}var n;d!=null&&(n={},n[c]=d),d==null&&a.is(c,"object")&&(n=c);for(var o in n)l("raphael.attr."+o+"."+this.id,this,n[o]);if(n){for(o in this.paper.customAttributes)if(this.paper.customAttributes[b](o)&&n[b](o)&&a.is(this.paper.customAttributes[o],"function")){var p=this.paper.customAttributes[o].apply(this,[].concat(n[o]));this.attrs[o]=n[o];for(var q in p)p[b](q)&&(n[q]=p[q])}n.text&&this.type=="text"&&(this.textpath.string=n.text),B(this,n)}return this},E.toFront=function(){!this.removed&&this.node.parentNode.appendChild(this.node),this.paper&&this.paper.top!=this&&a._tofront(this,this.paper);return this},E.toBack=function(){if(this.removed)return this;this.node.parentNode.firstChild!=this.node&&(this.node.parentNode.insertBefore(this.node,this.node.parentNode.firstChild),a._toback(this,this.paper));return this},E.insertAfter=function(b){if(this.removed)return this;b.constructor==a.st.constructor&&(b=b[b.length-1]),b.node.nextSibling?b.node.parentNode.insertBefore(this.node,b.node.nextSibling):b.node.parentNode.appendChild(this.node),a._insertafter(this,b,this.paper);return this},E.insertBefore=function(b){if(this.removed)return this;b.constructor==a.st.constructor&&(b=b[0]),b.node.parentNode.insertBefore(this.node,b.node),a._insertbefore(this,b,this.paper);return this},E.blur=function(b){var c=this.node.runtimeStyle,d=c.filter;d=d.replace(r,o),+b!==0?(this.attrs.blur=b,c.filter=d+n+m+".Blur(pixelradius="+(+b||1.5)+")",c.margin=a.format("-{0}px 0 0 -{0}px",f(+b||1.5))):(c.filter=d,c.margin=0,delete this.attrs.blur)},a._engine.path=function(a,b){var c=F("shape");c.style.cssText=t,c.coordsize=u+n+u,c.coordorigin=b.coordorigin;var d=new D(c,b),e={fill:"none",stroke:"#000"};a&&(e.path=a),d.type="path",d.path=[],d.Path=o,B(d,e),b.canvas.appendChild(c);var f=F("skew");f.on=!0,c.appendChild(f),d.skew=f,d.transform(o);return d},a._engine.rect=function(b,c,d,e,f,g){var h=a._rectPath(c,d,e,f,g),i=b.path(h),j=i.attrs;i.X=j.x=c,i.Y=j.y=d,i.W=j.width=e,i.H=j.height=f,j.r=g,j.path=h,i.type="rect";return i},a._engine.ellipse=function(a,b,c,d,e){var f=a.path(),g=f.attrs;f.X=b-d,f.Y=c-e,f.W=d*2,f.H=e*2,f.type="ellipse",B(f,{cx:b,cy:c,rx:d,ry:e});return f},a._engine.circle=function(a,b,c,d){var e=a.path(),f=e.attrs;e.X=b-d,e.Y=c-d,e.W=e.H=d*2,e.type="circle",B(e,{cx:b,cy:c,r:d});return e},a._engine.image=function(b,c,d,e,f,g){var h=a._rectPath(d,e,f,g),i=b.path(h).attr({stroke:"none"}),k=i.attrs,l=i.node,m=l.getElementsByTagName(j)[0];k.src=c,i.X=k.x=d,i.Y=k.y=e,i.W=k.width=f,i.H=k.height=g,k.path=h,i.type="image",m.parentNode==l&&l.removeChild(m),m.rotate=!0,m.src=c,m.type="tile",i._.fillpos=[d,e],i._.fillsize=[f,g],l.appendChild(m),z(i,1,1,0,0,0);return i},a._engine.text=function(b,d,e,g){var h=F("shape"),i=F("path"),j=F("textpath");d=d||0,e=e||0,g=g||"",i.v=a.format("m{0},{1}l{2},{1}",f(d*u),f(e*u),f(d*u)+1),i.textpathok=!0,j.string=c(g),j.on=!0,h.style.cssText=t,h.coordsize=u+n+u,h.coordorigin="0 0";var k=new D(h,b),l={fill:"#000",stroke:"none",font:a._availableAttrs.font,text:g};k.shape=h,k.path=i,k.textpath=j,k.type="text",k.attrs.text=c(g),k.attrs.x=d,k.attrs.y=e,k.attrs.w=1,k.attrs.h=1,B(k,l),h.appendChild(j),h.appendChild(i),b.canvas.appendChild(h);var m=F("skew");m.on=!0,h.appendChild(m),k.skew=m,k.transform(o);return k},a._engine.setSize=function(b,c){var d=this.canvas.style;this.width=b,this.height=c,b==+b&&(b+="px"),c==+c&&(c+="px"),d.width=b,d.height=c,d.clip="rect(0 "+b+" "+c+" 0)",this._viewBox&&a._engine.setViewBox.apply(this,this._viewBox);return this},a._engine.setViewBox=function(b,c,d,e,f){a.eve("raphael.setViewBox",this,this._viewBox,[b,c,d,e,f]);var h=this.width,i=this.height,j=1/g(d/h,e/i),k,l;f&&(k=i/e,l=h/d,d*k<h&&(b-=(h-d*k)/2/k),e*l<i&&(c-=(i-e*l)/2/l)),this._viewBox=[b,c,d,e,!!f],this._viewBoxShift={dx:-b,dy:-c,scale:j},this.forEach(function(a){a.transform("...")});return this};var F;a._engine.initWin=function(a){var b=a.document;b.createStyleSheet().addRule(".rvml","behavior:url(#default#VML)");try{!b.namespaces.rvml&&b.namespaces.add("rvml","urn:schemas-microsoft-com:vml"),F=function(a){return b.createElement("<rvml:"+a+' class="rvml">')}}catch(c){F=function(a){return b.createElement("<"+a+' xmlns="urn:schemas-microsoft.com:vml" class="rvml">')}}},a._engine.initWin(a._g.win),a._engine.create=function(){var b=a._getContainer.apply(0,arguments),c=b.container,d=b.height,e,f=b.width,g=b.x,h=b.y;if(!c)throw new Error("VML container not found.");var i=new a._Paper,j=i.canvas=a._g.doc.createElement("div"),k=j.style;g=g||0,h=h||0,f=f||512,d=d||342,i.width=f,i.height=d,f==+f&&(f+="px"),d==+d&&(d+="px"),i.coordsize=u*1e3+n+u*1e3,i.coordorigin="0 0",i.span=a._g.doc.createElement("span"),i.span.style.cssText="position:absolute;left:-9999em;top:-9999em;padding:0;margin:0;line-height:1;",j.appendChild(i.span),k.cssText=a.format("top:0;left:0;width:{0};height:{1};display:inline-block;position:relative;clip:rect(0 {0} {1} 0);overflow:hidden",f,d),c==1?(a._g.doc.body.appendChild(j),k.left=g+"px",k.top=h+"px",k.position="absolute"):c.firstChild?c.insertBefore(j,c.firstChild):c.appendChild(j),i.renderfix=function(){};return i},a.prototype.clear=function(){a.eve("raphael.clear",this),this.canvas.innerHTML=o,this.span=a._g.doc.createElement("span"),this.span.style.cssText="position:absolute;left:-9999em;top:-9999em;padding:0;margin:0;line-height:1;display:inline;",this.canvas.appendChild(this.span),this.bottom=this.top=null},a.prototype.remove=function(){a.eve("raphael.remove",this),this.canvas.parentNode.removeChild(this.canvas);for(var b in this)this[b]=typeof this[b]=="function"?a._removedFactory(b):null;return!0};var G=a.st;for(var H in E)E[b](H)&&!G[b](H)&&(G[H]=function(a){return function(){var b=arguments;return this.forEach(function(c){c[a].apply(c,b)})}}(H))}(window.Raphael) \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css
deleted file mode 100755
index 7d64b9c5c5..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css
+++ /dev/null
@@ -1,30 +0,0 @@
-body {
- font-size: 10pt;
- font-family: Arial, sans-serif;
-}
-
-a {
- color:#315479;
-}
-
-.letters {
- width:100%;
- text-align:center;
- margin:0.6em;
- padding:0.1em;
- border-bottom:1px solid gray;
-}
-
-.entry {
- border-bottom: 1px solid lightgray;
- padding: 5px 0 8px;
-}
-
-.name {
- /* background-color:#E5E5E5; */
-}
-
-.occurrences {
- margin-left: 1em;
- margin-top: 5px;
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png
deleted file mode 100644
index 4625f9df74..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd b/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd
deleted file mode 100644
index 3764f82ccb..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js
deleted file mode 100644
index 4417f5b438..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js
+++ /dev/null
@@ -1,71 +0,0 @@
-// Ā© 2010 EPFL/LAMP
-// code by Gilles Dubochet
-
-function Scheduler() {
- var scheduler = this;
- var resolution = 0;
- this.timeout = undefined;
- this.queues = new Array(0); // an array of work pacakges indexed by index in the labels table.
- this.labels = new Array(0); // an indexed array of labels indexed by priority. This should be short.
- this.label = function(name, priority) {
- this.name = name;
- this.priority = priority;
- }
- this.work = function(fn, self, args) {
- this.fn = fn;
- this.self = self;
- this.args = args;
- }
- this.addLabel = function(name, priority) {
- var idx = 0;
- while (idx < scheduler.queues.length && scheduler.labels[idx].priority <= priority) { idx = idx + 1; }
- scheduler.labels.splice(idx, 0, new scheduler.label(name, priority));
- scheduler.queues.splice(idx, 0, new Array(0));
- }
- this.clearLabel = function(name) {
- var idx = 0;
- while (idx < scheduler.queues.length && scheduler.labels[idx].name != name) { idx = idx + 1; }
- if (idx < scheduler.queues.length && scheduler.labels[i].name == name) {
- scheduler.labels.splice(idx, 1);
- scheduler.queues.splice(idx, 1);
- }
- }
- this.nextWork = function() {
- var fn = undefined;
- var idx = 0;
- while (idx < scheduler.queues.length && scheduler.queues[idx].length == 0) { idx = idx + 1; }
- if (idx < scheduler.queues.length && scheduler.queues[idx].length > 0) {
- var fn = scheduler.queues[idx].shift();
- }
- return fn;
- }
- this.add = function(labelName, fn, self, args) {
- var doWork = function() {
- scheduler.timeout = setTimeout(function() {
- var work = scheduler.nextWork();
- if (work != undefined) {
- if (work.args == undefined) { work.args = new Array(0); }
- work.fn.apply(work.self, work.args);
- doWork();
- }
- else {
- scheduler.timeout = undefined;
- }
- }, resolution);
- }
- var idx = 0;
- while (idx < scheduler.labels.length && scheduler.labels[idx].name != labelName) { idx = idx + 1; }
- if (idx < scheduler.queues.length && scheduler.labels[idx].name == labelName) {
- scheduler.queues[idx].push(new scheduler.work(fn, self, args));
- if (scheduler.timeout == undefined) doWork();
- }
- else throw("queue for add is non existant");
- }
- this.clear = function(labelName) {
- var idx = 0;
- while (idx < scheduler.labels.length && scheduler.labels[idx].name != labelName) { idx = idx + 1; }
- if (idx < scheduler.queues.length && scheduler.labels[idx].name == labelName) {
- scheduler.queues[idx] = new Array();
- }
- }
-};
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
deleted file mode 100644
index bc29efb3e6..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
deleted file mode 100644
index 8313f4975b..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png
deleted file mode 100644
index 04eda2f307..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png
deleted file mode 100644
index c89765239e..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
deleted file mode 100644
index bf984ef0ba..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png
deleted file mode 100644
index a790bb1169..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
deleted file mode 100644
index b6ac4415e4..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
deleted file mode 100644
index 9aae5ba0aa..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
deleted file mode 100644
index b066027f04..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
+++ /dev/null
@@ -1,848 +0,0 @@
-/* Reset */
-
-html, body, div, span, object, iframe,
-h1, h2, h3, h4, h5, h6, p, blockquote, pre,
-a, abbr, acronym, address, code, pre,
-del, dfn, em, img, q, dl, dt, dd, ol, ul, li,
-fieldset, form, label, legend, input,
-table, caption, tbody, tfoot, thead, tr, th, td {
- margin: 0;
- padding: 0;
- border: 0;
- font-weight: inherit;
- font-style: inherit;
- font-size: 100%;
- font-family: inherit;
- vertical-align: baseline;
-}
-
-table { border-collapse: separate; border-spacing: 0; }
-caption, th, td { text-align: left; font-weight: normal; }
-table, td, th { vertical-align: middle; }
-
-blockquote:before, blockquote:after, q:before, q:after { content: ""; }
-blockquote, q { quotes: none; }
-
-a img { border: none; }
-
-input { border-width: 0px; }
-
-/* Page */
-
-body {
- font-family: Arial, sans-serif;
- font-size: 10pt;
-}
-
-#footer {
- font-size: 9pt;
- text-align: center;
- color: #858484;
- bottom: 0;
- width: 100%;
- height: 20px;
-}
-
-a[href] {
- text-decoration: underline;
- color: #315479;
-}
-
-a[href]:hover {
- text-decoration: none;
-}
-
-#types ol li > p {
- margin-top: 5px;
-}
-
-#types ol li:last-child {
- margin-bottom: 5px;
-}
-
-/*
-#definition {
- padding: 6px 0 6px 6px;
- min-height: 59px;
- color: white;
-}
-*/
-
-#definition {
- display: block-inline;
- padding: 5px 0px;
- height: 61px;
-}
-
-#definition > img {
- float: left;
- padding-right: 6px;
- padding-left: 5px;
-}
-
-#definition > a > img {
- float: left;
- padding-right: 6px;
- padding-left: 5px;
-}
-
-#definition p + h1 {
- margin-top: 3px;
-}
-
-#definition > h1 {
-/* padding: 12px 0 12px 6px;*/
- color: white;
- text-shadow: 3px black;
- text-shadow: black 0px 2px 0px;
- font-size: 24pt;
- display: inline-block;
- overflow: hidden;
- margin-top: 10px;
-}
-
-#definition h1 > a {
- color: #ffffff;
- font-size: 24pt;
- text-shadow: black 0px 2px 0px;
-/* text-shadow: black 0px 0px 0px;*/
-text-decoration: none;
-}
-
-#definition #owner {
- color: #ffffff;
- margin-top: 4px;
- font-size: 10pt;
- overflow: hidden;
-}
-
-#definition #owner > a {
- color: #ffffff;
-}
-
-#definition #owner > a:hover {
- text-decoration: none;
-}
-
-#signature {
- background-image:url('signaturebg2.gif');
- background-color: #d7d7d7;
- min-height: 18px;
- background-repeat:repeat-x;
- font-size: 11.5pt;
-/* margin-bottom: 10px;*/
- padding: 8px;
-}
-
-#signature > span.modifier_kind {
- display: inline;
- float: left;
- text-align: left;
- width: auto;
- position: static;
- text-shadow: 2px white;
- text-shadow: white 0px 1px 0px;
-}
-
-#signature > span.symbol {
- text-align: left;
- display: inline;
- padding-left: 0.7em;
- text-shadow: 2px white;
- text-shadow: white 0px 1px 0px;
-}
-
-/* Linear super types and known subclasses */
-.hiddenContent {
- display: none;
-}
-
-.toggleContainer .toggle {
- cursor: pointer;
- padding-left: 15px;
- background: url("arrow-right.png") no-repeat 0 3px transparent;
-}
-
-.toggleContainer .toggle.open {
- background: url("arrow-down.png") no-repeat 0 3px transparent;
-}
-
-.toggleContainer .hiddenContent {
- margin-top: 5px;
-}
-
-.value #definition {
- background-color: #2C475C; /* blue */
- background-image:url('defbg-blue.gif');
- background-repeat:repeat-x;
-}
-
-.type #definition {
- background-color: #316555; /* green */
- background-image:url('defbg-green.gif');
- background-repeat:repeat-x;
-}
-
-#template {
- margin-bottom: 50px;
-}
-
-h3 {
- color: white;
- padding: 5px 10px;
- font-size: 12pt;
- font-weight: bold;
- text-shadow: black 1px 1px 0px;
-}
-
-dl.attributes > dt {
- display: block;
- float: left;
- font-style: italic;
-}
-
-dl.attributes > dt.implicit {
- font-weight: bold;
- color: darkgreen;
-}
-
-dl.attributes > dd {
- display: block;
- padding-left: 10em;
- margin-bottom: 5px;
-}
-
-#template .values > h3 {
- background: #2C475C url("valuemembersbg.gif") repeat-x bottom left; /* grayish blue */
- height: 18px;
-}
-
-#values ol li:last-child {
- margin-bottom: 5px;
-}
-
-#template .types > h3 {
- background: #316555 url("typebg.gif") repeat-x bottom left; /* green */
- height: 18px;
-}
-
-#constructors > h3 {
- background: #4f504f url("constructorsbg.gif") repeat-x bottom left; /* gray */
- height: 18px;
-}
-
-#inheritedMembers > div.parent > h3 {
- background: #dadada url("constructorsbg.gif") repeat-x bottom left; /* gray */
- height: 17px;
- font-style: italic;
- font-size: 12pt;
-}
-
-#inheritedMembers > div.parent > h3 * {
- color: white;
-}
-
-#inheritedMembers > div.conversion > h3 {
- background: #dadada url("conversionbg.gif") repeat-x bottom left; /* gray */
- height: 17px;
- font-style: italic;
- font-size: 12pt;
-}
-
-#inheritedMembers > div.conversion > h3 * {
- color: white;
-}
-
-#groupedMembers > div.group > h3 {
- background: #dadada url("typebg.gif") repeat-x bottom left; /* green */
- height: 17px;
- font-size: 12pt;
-}
-
-#groupedMembers > div.group > h3 * {
- color: white;
-}
-
-
-/* Member cells */
-
-div.members > ol {
- background-color: white;
- list-style: none
-}
-
-div.members > ol > li {
- display: block;
- border-bottom: 1px solid gray;
- padding: 5px 0 6px;
- margin: 0 10px;
- position: relative;
-}
-
-div.members > ol > li:last-child {
- border: 0;
- padding: 5px 0 5px;
-}
-
-/* Member signatures */
-
-#tooltip {
- background: #EFD5B5;
- border: 1px solid gray;
- color: black;
- display: none;
- padding: 5px;
- position: absolute;
-}
-
-.signature {
- font-family: monospace;
- font-size: 10pt;
- line-height: 18px;
- clear: both;
- display: block;
- text-shadow: 2px white;
- text-shadow: white 0px 1px 0px;
-}
-
-.signature .modifier_kind {
- position: absolute;
- text-align: right;
- width: 14em;
-}
-
-.signature > a > .symbol > .name {
- text-decoration: underline;
-}
-
-.signature > a:hover > .symbol > .name {
- text-decoration: none;
-}
-
-.signature > a {
- text-decoration: none;
-}
-
-.signature > .symbol {
- display: block;
- padding-left: 14.7em;
-}
-
-.signature .name {
- display: inline-block;
- font-weight: bold;
-}
-
-.signature .symbol > .implicit {
- display: inline-block;
- font-weight: bold;
- text-decoration: underline;
- color: darkgreen;
-}
-
-.signature .symbol .shadowed {
- color: darkseagreen;
-}
-
-.signature .symbol .params > .implicit {
- font-style: italic;
-}
-
-.signature .symbol .deprecated {
- text-decoration: line-through;
-}
-
-.signature .symbol .params .default {
- font-style: italic;
-}
-
-#template .signature.closed {
- background: url("arrow-right.png") no-repeat 0 5px transparent;
- cursor: pointer;
-}
-
-#template .signature.opened {
- background: url("arrow-down.png") no-repeat 0 5px transparent;
- cursor: pointer;
-}
-
-#template .values .signature .name {
- color: darkblue;
-}
-
-#template .types .signature .name {
- color: darkgreen;
-}
-
-.full-signature-usecase h4 span {
- font-size: 10pt;
-}
-
-.full-signature-usecase > #signature {
- padding-top: 0px;
-}
-
-#template .full-signature-usecase > .signature.closed {
- background: none;
-}
-
-#template .full-signature-usecase > .signature.opened {
- background: none;
-}
-
-.full-signature-block {
- padding: 5px 0 0;
- border-top: 1px solid #EBEBEB;
- margin-top: 5px;
- margin-bottom: 5px;
-}
-
-
-/* Comments text formating */
-
-.cmt {}
-
-.cmt p {
- margin: 0.7em 0;
-}
-
-.cmt p:first-child {
- margin-top: 0;
-}
-
-.cmt p:last-child {
- margin-bottom: 0;
-}
-
-.cmt h3,
-.cmt h4,
-.cmt h5,
-.cmt h6 {
- margin-bottom: 0.7em;
- margin-top: 1.4em;
- display: block;
- text-align: left;
- font-weight: bold;
-}
-
-.cmt h3 {
- font-size: 14pt;
-}
-
-.cmt h4 {
- font-size: 13pt;
-}
-
-.cmt h5 {
- font-size: 12pt;
-}
-
-.cmt h6 {
- font-size: 11pt;
-}
-
-.cmt pre {
- padding: 5px;
- border: 1px solid #ddd;
- background-color: #eee;
- margin: 5px 0;
- display: block;
- font-family: monospace;
-}
-
-.cmt pre span.ano {
- color: blue;
-}
-
-.cmt pre span.cmt {
- color: green;
-}
-
-.cmt pre span.kw {
- font-weight: bold;
-}
-
-.cmt pre span.lit {
- color: #c71585;
-}
-
-.cmt pre span.num {
- color: #1e90ff; /* dodgerblue */
-}
-
-.cmt pre span.std {
- color: #008080; /* teal */
-}
-
-.cmt ul {
- display: block;
- list-style: circle;
- padding-left: 20px;
-}
-
-.cmt ol {
- display: block;
- padding-left:20px;
-}
-
-.cmt ol.decimal {
- list-style: decimal;
-}
-
-.cmt ol.lowerAlpha {
- list-style: lower-alpha;
-}
-
-.cmt ol.upperAlpha {
- list-style: upper-alpha;
-}
-
-.cmt ol.lowerRoman {
- list-style: lower-roman;
-}
-
-.cmt ol.upperRoman {
- list-style: upper-roman;
-}
-
-.cmt li {
- display: list-item;
-}
-
-.cmt code {
- font-family: monospace;
-}
-
-.cmt a {
- font-style: bold;
-}
-
-.cmt em, .cmt i {
- font-style: italic;
-}
-
-.cmt strong, .cmt b {
- font-weight: bold;
-}
-
-/* Comments structured layout */
-
-.group > div.comment {
- padding-top: 5px;
- padding-bottom: 5px;
- padding-right: 5px;
- padding-left: 5px;
- border: 1px solid #ddd;
- background-color: #eeeee;
- margin-top:5px;
- margin-bottom:5px;
- margin-right:5px;
- margin-left:5px;
- display: block;
-}
-
-p.comment {
- display: block;
- margin-left: 14.7em;
- margin-top: 5px;
-}
-
-.shortcomment {
- display: block;
- margin: 5px 10px;
-}
-
-div.fullcommenttop {
- padding: 10px 10px;
- background-image:url('fullcommenttopbg.gif');
- background-repeat:repeat-x;
-}
-
-div.fullcomment {
- margin: 5px 10px;
-}
-
-#template div.fullcommenttop,
-#template div.fullcomment {
- display:none;
- margin: 5px 0 0 14.7em;
-}
-
-#template .shortcomment {
- margin: 5px 0 0 14.7em;
- padding: 0;
-}
-
-div.fullcomment .block {
- padding: 5px 0 0;
- border-top: 1px solid #EBEBEB;
- margin-top: 5px;
- overflow: hidden;
-}
-
-div.fullcommenttop .block {
- padding: 5px 0 0;
- border-top: 1px solid #EBEBEB;
- margin-top: 5px;
- margin-bottom: 5px
-}
-
-div.fullcomment div.block ol li p,
-div.fullcomment div.block ol li {
- display:inline
-}
-
-div.fullcomment .block > h5 {
- font-style: italic;
- font-weight: normal;
- display: inline-block;
-}
-
-div.fullcomment .comment {
- margin: 5px 0 10px;
-}
-
-div.fullcommenttop .comment:last-child,
-div.fullcomment .comment:last-child {
- margin-bottom: 0;
-}
-
-div.fullcommenttop dl.paramcmts {
- margin-bottom: 0.8em;
- padding-bottom: 0.8em;
-}
-
-div.fullcommenttop dl.paramcmts > dt,
-div.fullcomment dl.paramcmts > dt {
- display: block;
- float: left;
- font-weight: bold;
- min-width: 70px;
-}
-
-div.fullcommenttop dl.paramcmts > dd,
-div.fullcomment dl.paramcmts > dd {
- display: block;
- padding-left: 10px;
- margin-bottom: 5px;
- margin-left: 70px;
-}
-
-/* Members filter tool */
-
-#textfilter {
- position: relative;
- display: block;
- height: 20px;
- margin-bottom: 5px;
-}
-
-#textfilter > .pre {
- display: block;
- position: absolute;
- top: 0;
- left: 0;
- height: 23px;
- width: 21px;
- background: url("filter_box_left.png");
-}
-
-#textfilter > .input {
- display: block;
- position: absolute;
- top: 0;
- right: 20px;
- left: 20px;
-}
-
-#textfilter > .input > input {
- height: 20px;
- padding: 1px;
- font-weight: bold;
- color: #000000;
- background: #ffffff url("filterboxbarbg.png") repeat-x top left;
- width: 100%;
-}
-
-#textfilter > .post {
- display: block;
- position: absolute;
- top: 0;
- right: 0;
- height: 23px;
- width: 21px;
- background: url("filter_box_right.png");
-}
-
-#mbrsel {
- padding: 5px 10px;
- background-color: #ededee; /* light gray */
- background-image:url('filterboxbg.gif');
- background-repeat:repeat-x;
- font-size: 9.5pt;
- display: block;
- margin-top: 1em;
-/* margin-bottom: 1em; */
-}
-
-#mbrsel > div {
- margin-bottom: 5px;
-}
-
-#mbrsel > div:last-child {
- margin-bottom: 0;
-}
-
-#mbrsel > div > span.filtertype {
- padding: 4px;
- margin-right: 5px;
- float: left;
- display: inline-block;
- color: #000000;
- font-weight: bold;
- text-shadow: white 0px 1px 0px;
- width: 4.5em;
-}
-
-#mbrsel > div > ol {
- display: inline-block;
-}
-
-#mbrsel > div > a {
- position:relative;
- top: -8px;
- font-size: 11px;
- text-shadow: #ffffff 0 1px 0;
-}
-
-#mbrsel > div > ol#linearization {
- display: table;
- margin-left: 70px;
-}
-
-#mbrsel > div > ol#linearization > li.in {
- text-decoration: none;
- float: left;
- padding-right: 10px;
- margin-right: 5px;
- background: url(selected-right.png) no-repeat;
- background-position: right 0px;
-}
-
-#mbrsel > div > ol#linearization > li.in > span{
- color: #404040;
- float: left;
- padding: 1px 0 1px 10px;
- background: url(selected.png) no-repeat;
- background-position: 0px 0px;
- text-shadow: #ffffff 0 1px 0;
-}
-
-#mbrsel > div > ol#implicits {
- display: table;
- margin-left: 70px;
-}
-
-#mbrsel > div > ol#implicits > li.in {
- text-decoration: none;
- float: left;
- padding-right: 10px;
- margin-right: 5px;
- background: url(selected-right-implicits.png) no-repeat;
- background-position: right 0px;
-}
-
-#mbrsel > div > ol#implicits > li.in > span{
- color: #404040;
- float: left;
- padding: 1px 0 1px 10px;
- background: url(selected-implicits.png) no-repeat;
- background-position: 0px 0px;
- text-shadow: #ffffff 0 1px 0;
-}
-
-#mbrsel > div > ol > li {
-/* padding: 3px 10px;*/
- line-height: 16pt;
- display: inline-block;
- cursor: pointer;
-}
-
-#mbrsel > div > ol > li.in {
- text-decoration: none;
- float: left;
- padding-right: 10px;
- margin-right: 5px;
- background: url(selected-right.png) no-repeat;
- background-position: right 0px;
-}
-
-#mbrsel > div > ol > li.in > span{
- color: #404040;
- float: left;
- padding: 1px 0 1px 10px;
- background: url(selected.png) no-repeat;
- background-position: 0px 0px;
- text-shadow: #ffffff 0 1px 0;
-}
-
-#mbrsel > div > ol > li.out {
- text-decoration: none;
- float: left;
- padding-right: 10px;
- margin-right: 5px;
-}
-
-#mbrsel > div > ol > li.out > span{
- color: #747474;
-/* background-color: #999; */
- float: left;
- padding: 1px 0 1px 10px;
-/* background: url(unselected.png) no-repeat;*/
- background-position: 0px -1px;
- text-shadow: #ffffff 0 1px 0;
-}
-/*
-#mbrsel .hideall {
- color: #4C4C4C;
- line-height: 16px;
- font-weight: bold;
-}
-
-#mbrsel .hideall span {
- color: #4C4C4C;
- font-weight: bold;
-}
-
-#mbrsel .showall {
- color: #4C4C4C;
- line-height: 16px;
- font-weight: bold;
-}
-
-#mbrsel .showall span {
- color: #4C4C4C;
- font-weight: bold;
-}*/
-
-.badge {
- display: inline-block;
- padding: 2px 4px;
- font-size: 11.844px;
- font-weight: bold;
- line-height: 14px;
- color: #ffffff;
- text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25);
- white-space: nowrap;
- vertical-align: baseline;
- background-color: #999999;
- padding-right: 9px;
- padding-left: 9px;
- -webkit-border-radius: 9px;
- -moz-border-radius: 9px;
- border-radius: 9px;
-}
-
-.badge-red {
- background-color: #b94a48;
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
deleted file mode 100644
index 6d1caf6d50..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
+++ /dev/null
@@ -1,466 +0,0 @@
-// Ā© 2009ā€“2010 EPFL/LAMP
-// code by Gilles Dubochet with contributions by Pedro Furlanetto
-
-$(document).ready(function(){
-
- // Escapes special characters and returns a valid jQuery selector
- function escapeJquery(str){
- return str.replace(/([;&,\.\+\*\~':"\!\^#$%@\[\]\(\)=>\|])/g, '\\$1');
- }
-
- // highlight and jump to selected member
- if (window.location.hash) {
- var temp = window.location.hash.replace('#', '');
- var elem = '#'+escapeJquery(temp);
-
- window.scrollTo(0, 0);
- $(elem).parent().effect("highlight", {color: "#FFCC85"}, 3000);
- $('html,body').animate({scrollTop:$(elem).parent().offset().top}, 1000);
- }
-
- var isHiddenClass = function (name) {
- return name == 'scala.Any' ||
- name == 'scala.AnyRef';
- };
-
- var isHidden = function (elem) {
- return $(elem).attr("data-hidden") == 'true';
- };
-
- $("#linearization li:gt(0)").filter(function(){
- return isHiddenClass($(this).attr("name"));
- }).removeClass("in").addClass("out");
-
- $("#implicits li").filter(function(){
- return isHidden(this);
- }).removeClass("in").addClass("out");
-
- // Pre-filter members
- filter();
-
- // Member filter box
- var input = $("#textfilter input");
- input.bind("keyup", function(event) {
-
- switch ( event.keyCode ) {
-
- case 27: // escape key
- input.val("");
- filter(true);
- break;
-
- case 38: // up
- input.val("");
- filter(false);
- window.scrollTo(0, $("body").offset().top);
- input.focus();
- break;
-
- case 33: //page up
- input.val("");
- filter(false);
- break;
-
- case 34: //page down
- input.val("");
- filter(false);
- break;
-
- default:
- window.scrollTo(0, $("#mbrsel").offset().top);
- filter(true);
- break;
-
- }
- });
- input.focus(function(event) {
- input.select();
- });
- $("#textfilter > .post").click(function() {
- $("#textfilter input").attr("value", "");
- filter();
- });
- $(document).keydown(function(event) {
-
- if (event.keyCode == 9) { // tab
- $("#index-input", window.parent.document).focus();
- input.attr("value", "");
- return false;
- }
- });
-
- $("#linearization li").click(function(){
- if ($(this).hasClass("in")) {
- $(this).removeClass("in");
- $(this).addClass("out");
- }
- else if ($(this).hasClass("out")) {
- $(this).removeClass("out");
- $(this).addClass("in");
- };
- filter();
- });
-
- $("#implicits li").click(function(){
- if ($(this).hasClass("in")) {
- $(this).removeClass("in");
- $(this).addClass("out");
- }
- else if ($(this).hasClass("out")) {
- $(this).removeClass("out");
- $(this).addClass("in");
- };
- filter();
- });
-
- $("#mbrsel > div[id=ancestors] > ol > li.hideall").click(function() {
- $("#linearization li.in").removeClass("in").addClass("out");
- $("#linearization li:first").removeClass("out").addClass("in");
- $("#implicits li.in").removeClass("in").addClass("out");
-
- if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.showall").hasClass("in")) {
- $(this).removeClass("out").addClass("in");
- $("#mbrsel > div[id=ancestors] > ol > li.showall").removeClass("in").addClass("out");
- }
-
- filter();
- })
- $("#mbrsel > div[id=ancestors] > ol > li.showall").click(function() {
- var filteredLinearization =
- $("#linearization li.out").filter(function() {
- return ! isHiddenClass($(this).attr("name"));
- });
- filteredLinearization.removeClass("out").addClass("in");
-
- var filteredImplicits =
- $("#implicits li.out").filter(function() {
- return ! isHidden(this);
- });
- filteredImplicits.removeClass("out").addClass("in");
-
- if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.hideall").hasClass("in")) {
- $(this).removeClass("out").addClass("in");
- $("#mbrsel > div[id=ancestors] > ol > li.hideall").removeClass("in").addClass("out");
- }
-
- filter();
- });
- $("#visbl > ol > li.public").click(function() {
- if ($(this).hasClass("out")) {
- $(this).removeClass("out").addClass("in");
- $("#visbl > ol > li.all").removeClass("in").addClass("out");
- filter();
- };
- })
- $("#visbl > ol > li.all").click(function() {
- if ($(this).hasClass("out")) {
- $(this).removeClass("out").addClass("in");
- $("#visbl > ol > li.public").removeClass("in").addClass("out");
- filter();
- };
- });
- $("#order > ol > li.alpha").click(function() {
- if ($(this).hasClass("out")) {
- orderAlpha();
- };
- })
- $("#order > ol > li.inherit").click(function() {
- if ($(this).hasClass("out")) {
- orderInherit();
- };
- });
- $("#order > ol > li.group").click(function() {
- if ($(this).hasClass("out")) {
- orderGroup();
- };
- });
- $("#groupedMembers").hide();
-
- initInherit();
-
- // Create tooltips
- $(".extype").add(".defval").tooltip({
- tip: "#tooltip",
- position:"top center",
- predelay: 500,
- onBeforeShow: function(ev) {
- $(this.getTip()).text(this.getTrigger().attr("name"));
- }
- });
-
- /* Add toggle arrows */
- //var docAllSigs = $("#template li").has(".fullcomment").find(".signature");
- // trying to speed things up a little bit
- var docAllSigs = $("#template li[fullComment=yes] .signature");
-
- function commentToggleFct(signature){
- var parent = signature.parent();
- var shortComment = $(".shortcomment", parent);
- var fullComment = $(".fullcomment", parent);
- var vis = $(":visible", fullComment);
- signature.toggleClass("closed").toggleClass("opened");
- if (vis.length > 0) {
- shortComment.slideDown(100);
- fullComment.slideUp(100);
- }
- else {
- shortComment.slideUp(100);
- fullComment.slideDown(100);
- }
- };
- docAllSigs.addClass("closed");
- docAllSigs.click(function() {
- commentToggleFct($(this));
- });
-
- /* Linear super types and known subclasses */
- function toggleShowContentFct(e){
- e.toggleClass("open");
- var content = $(".hiddenContent", e.parent().get(0));
- if (content.is(':visible')) {
- content.slideUp(100);
- }
- else {
- content.slideDown(100);
- }
- };
-
- $(".toggle:not(.diagram-link)").click(function() {
- toggleShowContentFct($(this));
- });
-
- // Set parent window title
- windowTitle();
-
- if ($("#order > ol > li.group").length == 1) { orderGroup(); };
-});
-
-function orderAlpha() {
- $("#order > ol > li.alpha").removeClass("out").addClass("in");
- $("#order > ol > li.inherit").removeClass("in").addClass("out");
- $("#order > ol > li.group").removeClass("in").addClass("out");
- $("#template > div.parent").hide();
- $("#template > div.conversion").hide();
- $("#mbrsel > div[id=ancestors]").show();
- filter();
-};
-
-function orderInherit() {
- $("#order > ol > li.inherit").removeClass("out").addClass("in");
- $("#order > ol > li.alpha").removeClass("in").addClass("out");
- $("#order > ol > li.group").removeClass("in").addClass("out");
- $("#template > div.parent").show();
- $("#template > div.conversion").show();
- $("#mbrsel > div[id=ancestors]").hide();
- filter();
-};
-
-function orderGroup() {
- $("#order > ol > li.group").removeClass("out").addClass("in");
- $("#order > ol > li.alpha").removeClass("in").addClass("out");
- $("#order > ol > li.inherit").removeClass("in").addClass("out");
- $("#template > div.parent").hide();
- $("#template > div.conversion").hide();
- $("#mbrsel > div[id=ancestors]").show();
- filter();
-};
-
-/** Prepares the DOM for inheritance-based display. To do so it will:
- * - hide all statically-generated parents headings;
- * - copy all members from the value and type members lists (flat members) to corresponding lists nested below the
- * parent headings (inheritance-grouped members);
- * - initialises a control variable used by the filter method to control whether filtering happens on flat members
- * or on inheritance-grouped members. */
-function initInherit() {
- // inheritParents is a map from fully-qualified names to the DOM node of parent headings.
- var inheritParents = new Object();
- var groupParents = new Object();
- $("#inheritedMembers > div.parent").each(function(){
- inheritParents[$(this).attr("name")] = $(this);
- });
- $("#inheritedMembers > div.conversion").each(function(){
- inheritParents[$(this).attr("name")] = $(this);
- });
- $("#groupedMembers > div.group").each(function(){
- groupParents[$(this).attr("name")] = $(this);
- });
-
- $("#types > ol > li").each(function(){
- var mbr = $(this);
- this.mbrText = mbr.find("> .fullcomment .cmt").text();
- var qualName = mbr.attr("name");
- var owner = qualName.slice(0, qualName.indexOf("#"));
- var name = qualName.slice(qualName.indexOf("#") + 1);
- var inheritParent = inheritParents[owner];
- if (inheritParent != undefined) {
- var types = $("> .types > ol", inheritParent);
- if (types.length == 0) {
- inheritParent.append("<div class='types members'><h3>Type Members</h3><ol></ol></div>");
- types = $("> .types > ol", inheritParent);
- }
- var clone = mbr.clone();
- clone[0].mbrText = this.mbrText;
- types.append(clone);
- }
- var group = mbr.attr("group")
- var groupParent = groupParents[group];
- if (groupParent != undefined) {
- var types = $("> .types > ol", groupParent);
- if (types.length == 0) {
- groupParent.append("<div class='types members'><ol></ol></div>");
- types = $("> .types > ol", groupParent);
- }
- var clone = mbr.clone();
- clone[0].mbrText = this.mbrText;
- types.append(clone);
- }
- });
-
- $("#values > ol > li").each(function(){
- var mbr = $(this);
- this.mbrText = mbr.find("> .fullcomment .cmt").text();
- var qualName = mbr.attr("name");
- var owner = qualName.slice(0, qualName.indexOf("#"));
- var name = qualName.slice(qualName.indexOf("#") + 1);
- var inheritParent = inheritParents[owner];
- if (inheritParent != undefined) {
- var values = $("> .values > ol", inheritParent);
- if (values.length == 0) {
- inheritParent.append("<div class='values members'><h3>Value Members</h3><ol></ol></div>");
- values = $("> .values > ol", inheritParent);
- }
- var clone = mbr.clone();
- clone[0].mbrText = this.mbrText;
- values.append(clone);
- }
- var group = mbr.attr("group")
- var groupParent = groupParents[group];
- if (groupParent != undefined) {
- var values = $("> .values > ol", groupParent);
- if (values.length == 0) {
- groupParent.append("<div class='values members'><ol></ol></div>");
- values = $("> .values > ol", groupParent);
- }
- var clone = mbr.clone();
- clone[0].mbrText = this.mbrText;
- values.append(clone);
- }
- });
- $("#inheritedMembers > div.parent").each(function() {
- if ($("> div.members", this).length == 0) { $(this).remove(); };
- });
- $("#inheritedMembers > div.conversion").each(function() {
- if ($("> div.members", this).length == 0) { $(this).remove(); };
- });
- $("#groupedMembers > div.group").each(function() {
- if ($("> div.members", this).length == 0) { $(this).remove(); };
- });
-};
-
-/* filter used to take boolean scrollToMember */
-function filter() {
- var query = $.trim($("#textfilter input").val()).toLowerCase();
- query = query.replace(/[-[\]{}()*+?.,\\^$|#]/g, "\\$&").replace(/\s+/g, "|");
- var queryRegExp = new RegExp(query, "i");
- var privateMembersHidden = $("#visbl > ol > li.public").hasClass("in");
- var orderingAlphabetic = $("#order > ol > li.alpha").hasClass("in");
- var orderingInheritance = $("#order > ol > li.inherit").hasClass("in");
- var orderingGroups = $("#order > ol > li.group").hasClass("in");
- var hiddenSuperclassElementsLinearization = orderingInheritance ? $("#linearization > li:gt(0)") : $("#linearization > li.out");
- var hiddenSuperclassesLinearization = hiddenSuperclassElementsLinearization.map(function() {
- return $(this).attr("name");
- }).get();
- var hiddenSuperclassElementsImplicits = orderingInheritance ? $("#implicits > li") : $("#implicits > li.out");
- var hiddenSuperclassesImplicits = hiddenSuperclassElementsImplicits.map(function() {
- return $(this).attr("name");
- }).get();
-
- var hideInheritedMembers;
-
- if (orderingAlphabetic) {
- $("#allMembers").show();
- $("#inheritedMembers").hide();
- $("#groupedMembers").hide();
- hideInheritedMembers = true;
- $("#allMembers > .members").each(filterFunc);
- } else if (orderingGroups) {
- $("#groupedMembers").show();
- $("#inheritedMembers").hide();
- $("#allMembers").hide();
- hideInheritedMembers = true;
- $("#groupedMembers > .group > .members").each(filterFunc);
- $("#groupedMembers > div.group").each(function() {
- $(this).show();
- if ($("> div.members", this).not(":hidden").length == 0) {
- $(this).hide();
- } else {
- $(this).show();
- }
- });
- } else if (orderingInheritance) {
- $("#inheritedMembers").show();
- $("#groupedMembers").hide();
- $("#allMembers").hide();
- hideInheritedMembers = false;
- $("#inheritedMembers > .parent > .members").each(filterFunc);
- $("#inheritedMembers > .conversion > .members").each(filterFunc);
- }
-
-
- function filterFunc() {
- var membersVisible = false;
- var members = $(this);
- members.find("> ol > li").each(function() {
- var mbr = $(this);
- if (privateMembersHidden && mbr.attr("visbl") == "prt") {
- mbr.hide();
- return;
- }
- var name = mbr.attr("name");
- // Owner filtering must not happen in "inherited from" member lists
- if (hideInheritedMembers) {
- var ownerIndex = name.indexOf("#");
- if (ownerIndex < 0) {
- ownerIndex = name.lastIndexOf(".");
- }
- var owner = name.slice(0, ownerIndex);
- for (var i = 0; i < hiddenSuperclassesLinearization.length; i++) {
- if (hiddenSuperclassesLinearization[i] == owner) {
- mbr.hide();
- return;
- }
- };
- for (var i = 0; i < hiddenSuperclassesImplicits.length; i++) {
- if (hiddenSuperclassesImplicits[i] == owner) {
- mbr.hide();
- return;
- }
- };
- }
- if (query && !(queryRegExp.test(name) || queryRegExp.test(this.mbrText))) {
- mbr.hide();
- return;
- }
- mbr.show();
- membersVisible = true;
- });
-
- if (membersVisible)
- members.show();
- else
- members.hide();
- };
-
- return false;
-};
-
-function windowTitle()
-{
- try {
- parent.document.title=document.title;
- }
- catch(e) {
- // Chrome doesn't allow settings the parent's title when
- // used on the local file system.
- }
-};
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
deleted file mode 100644
index 0af34eca4c..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
+++ /dev/null
@@ -1,14 +0,0 @@
-/*
- * tools.tooltip 1.1.3 - Tooltips done right.
- *
- * Copyright (c) 2009 Tero Piirainen
- * http://flowplayer.org/tools/tooltip.html
- *
- * Dual licensed under MIT and GPL 2+ licenses
- * http://www.opensource.org/licenses
- *
- * Launch : November 2008
- * Date: ${date}
- * Revision: ${revision}
- */
-(function(c){var d=[];c.tools=c.tools||{};c.tools.tooltip={version:"1.1.3",conf:{effect:"toggle",fadeOutSpeed:"fast",tip:null,predelay:0,delay:30,opacity:1,lazy:undefined,position:["top","center"],offset:[0,0],cancelDefault:true,relative:false,oneInstance:true,events:{def:"mouseover,mouseout",input:"focus,blur",widget:"focus mouseover,blur mouseout",tooltip:"mouseover,mouseout"},api:false},addEffect:function(e,g,f){b[e]=[g,f]}};var b={toggle:[function(e){var f=this.getConf(),g=this.getTip(),h=f.opacity;if(h<1){g.css({opacity:h})}g.show();e.call()},function(e){this.getTip().hide();e.call()}],fade:[function(e){this.getTip().fadeIn(this.getConf().fadeInSpeed,e)},function(e){this.getTip().fadeOut(this.getConf().fadeOutSpeed,e)}]};function a(f,g){var p=this,k=c(this);f.data("tooltip",p);var l=f.next();if(g.tip){l=c(g.tip);if(l.length>1){l=f.nextAll(g.tip).eq(0);if(!l.length){l=f.parent().nextAll(g.tip).eq(0)}}}function o(u){var t=g.relative?f.position().top:f.offset().top,s=g.relative?f.position().left:f.offset().left,v=g.position[0];t-=l.outerHeight()-g.offset[0];s+=f.outerWidth()+g.offset[1];var q=l.outerHeight()+f.outerHeight();if(v=="center"){t+=q/2}if(v=="bottom"){t+=q}v=g.position[1];var r=l.outerWidth()+f.outerWidth();if(v=="center"){s-=r/2}if(v=="left"){s-=r}return{top:t,left:s}}var i=f.is(":input"),e=i&&f.is(":checkbox, :radio, select, :button"),h=f.attr("type"),n=g.events[h]||g.events[i?(e?"widget":"input"):"def"];n=n.split(/,\s*/);if(n.length!=2){throw"Tooltip: bad events configuration for "+h}f.bind(n[0],function(r){if(g.oneInstance){c.each(d,function(){this.hide()})}var q=l.data("trigger");if(q&&q[0]!=this){l.hide().stop(true,true)}r.target=this;p.show(r);n=g.events.tooltip.split(/,\s*/);l.bind(n[0],function(){p.show(r)});if(n[1]){l.bind(n[1],function(){p.hide(r)})}});f.bind(n[1],function(q){p.hide(q)});if(!c.browser.msie&&!i&&!g.predelay){f.mousemove(function(){if(!p.isShown()){f.triggerHandler("mouseover")}})}if(g.opacity<1){l.css("opacity",g.opacity)}var m=0,j=f.attr("title");if(j&&g.cancelDefault){f.removeAttr("title");f.data("title",j)}c.extend(p,{show:function(r){if(r){f=c(r.target)}clearTimeout(l.data("timer"));if(l.is(":animated")||l.is(":visible")){return p}function q(){l.data("trigger",f);var t=o(r);if(g.tip&&j){l.html(f.data("title"))}r=r||c.Event();r.type="onBeforeShow";k.trigger(r,[t]);if(r.isDefaultPrevented()){return p}t=o(r);l.css({position:"absolute",top:t.top,left:t.left});var s=b[g.effect];if(!s){throw'Nonexistent effect "'+g.effect+'"'}s[0].call(p,function(){r.type="onShow";k.trigger(r)})}if(g.predelay){clearTimeout(m);m=setTimeout(q,g.predelay)}else{q()}return p},hide:function(r){clearTimeout(l.data("timer"));clearTimeout(m);if(!l.is(":visible")){return}function q(){r=r||c.Event();r.type="onBeforeHide";k.trigger(r);if(r.isDefaultPrevented()){return}b[g.effect][1].call(p,function(){r.type="onHide";k.trigger(r)})}if(g.delay&&r){l.data("timer",setTimeout(q,g.delay))}else{q()}return p},isShown:function(){return l.is(":visible, :animated")},getConf:function(){return g},getTip:function(){return l},getTrigger:function(){return f},bind:function(q,r){k.bind(q,r);return p},onHide:function(q){return this.bind("onHide",q)},onBeforeShow:function(q){return this.bind("onBeforeShow",q)},onShow:function(q){return this.bind("onShow",q)},onBeforeHide:function(q){return this.bind("onBeforeHide",q)},unbind:function(q){k.unbind(q);return p}});c.each(g,function(q,r){if(c.isFunction(r)){p.bind(q,r)}})}c.prototype.tooltip=function(e){var f=this.eq(typeof e=="number"?e:0).data("tooltip");if(f){return f}var g=c.extend(true,{},c.tools.tooltip.conf);if(c.isFunction(e)){e={onBeforeShow:e}}else{if(typeof e=="string"){e={tip:e}}}e=c.extend(true,g,e);if(typeof e.position=="string"){e.position=e.position.split(/,?\s/)}if(e.lazy!==false&&(e.lazy===true||this.length>20)){this.one("mouseover",function(h){f=new a(c(this),e);f.show(h);d.push(f)})}else{this.each(function(){f=new a(c(this),e);d.push(f)})}return e.api?f:this}})(jQuery); \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png
deleted file mode 100644
index fb961a2eda..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png
deleted file mode 100644
index 625d9251cb..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
deleted file mode 100644
index 88983254ce..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
deleted file mode 100644
index d0cd7fd512..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png
deleted file mode 100644
index 6c6e1fe2f5..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png
deleted file mode 100644
index 04c8794e92..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png
deleted file mode 100644
index d8152529fd..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
deleted file mode 100644
index 3b5c47c9e3..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
+++ /dev/null
@@ -1,6376 +0,0 @@
-%PDF-1.5 %āćĻÓ
-1 0 obj <</Metadata 1054 0 R/OCProperties<</D<</OFF[15 0 R 27 0 R]/ON[37 0 R 65 0 R 78 0 R 90 0 R 116 0 R 129 0 R 141 0 R 167 0 R 180 0 R 192 0 R 218 0 R 237 0 R 255 0 R 287 0 R 306 0 R 324 0 R 356 0 R 375 0 R 393 0 R 425 0 R 444 0 R 462 0 R 480 0 R 515 0 R 534 0 R 552 0 R 570 0 R 605 0 R 624 0 R 642 0 R 660 0 R 695 0 R 699 0 R 718 0 R 735 0 R 753 0 R 785 0 R 789 0 R 808 0 R 825 0 R 843 0 R 878 0 R 882 0 R 901 0 R 918 0 R 936 0 R 971 0 R 975 0 R 994 0 R 1011 0 R 1029 0 R 1056 0 R 1057 0 R 1058 0 R 1059 0 R 1060 0 R 1138 0 R 1139 0 R 1140 0 R 1141 0 R 1142 0 R 1143 0 R 1223 0 R 1224 0 R 1225 0 R 1226 0 R 1227 0 R 1228 0 R 1308 0 R 1309 0 R 1310 0 R 1311 0 R 1312 0 R 1313 0 R]/Order 1314 0 R/RBGroups[]>>/OCGs[15 0 R 27 0 R 37 0 R 65 0 R 78 0 R 90 0 R 116 0 R 129 0 R 141 0 R 167 0 R 180 0 R 192 0 R 218 0 R 237 0 R 255 0 R 287 0 R 306 0 R 324 0 R 356 0 R 375 0 R 393 0 R 425 0 R 444 0 R 462 0 R 480 0 R 515 0 R 534 0 R 552 0 R 570 0 R 605 0 R 624 0 R 642 0 R 660 0 R 695 0 R 699 0 R 718 0 R 735 0 R 753 0 R 785 0 R 789 0 R 808 0 R 825 0 R 843 0 R 878 0 R 882 0 R 901 0 R 918 0 R 936 0 R 971 0 R 975 0 R 994 0 R 1011 0 R 1029 0 R 1056 0 R 1057 0 R 1058 0 R 1059 0 R 1060 0 R 1138 0 R 1139 0 R 1140 0 R 1141 0 R 1142 0 R 1143 0 R 1223 0 R 1224 0 R 1225 0 R 1226 0 R 1227 0 R 1228 0 R 1308 0 R 1309 0 R 1310 0 R 1311 0 R 1312 0 R 1313 0 R]>>/Pages 2 0 R/Type/Catalog>> endobj 1054 0 obj <</Length 44567/Subtype/XML/Type/Metadata>>stream
-<?xpacket begin="ļ»æ" id="W5M0MpCehiHzreSzNTczkc9d"?>
-<x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="Adobe XMP Core 5.0-c060 61.134777, 2010/02/12-17:32:00 ">
- <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
- <rdf:Description rdf:about=""
- xmlns:dc="http://purl.org/dc/elements/1.1/">
- <dc:format>application/pdf</dc:format>
- <dc:title>
- <rdf:Alt>
- <rdf:li xml:lang="x-default">Print</rdf:li>
- </rdf:Alt>
- </dc:title>
- </rdf:Description>
- <rdf:Description rdf:about=""
- xmlns:xmp="http://ns.adobe.com/xap/1.0/"
- xmlns:xmpGImg="http://ns.adobe.com/xap/1.0/g/img/">
- <xmp:CreatorTool>Adobe Illustrator CS3</xmp:CreatorTool>
- <xmp:CreateDate>2009-11-23T17:10:12+01:00</xmp:CreateDate>
- <xmp:ModifyDate>2011-04-04T19:44:30+02:00</xmp:ModifyDate>
- <xmp:MetadataDate>2011-04-04T19:44:30+02:00</xmp:MetadataDate>
- <xmp:Thumbnails>
- <rdf:Alt>
- <rdf:li rdf:parseType="Resource">
- <xmpGImg:width>256</xmpGImg:width>
- <xmpGImg:height>208</xmpGImg:height>
- <xmpGImg:format>JPEG</xmpGImg:format>
- <xmpGImg:image>/9j/4AAQSkZJRgABAgEASABIAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAASAAAAAEA&#xA;AQBIAAAAAQAB/+4ADkFkb2JlAGTAAAAAAf/bAIQABgQEBAUEBgUFBgkGBQYJCwgGBggLDAoKCwoK&#xA;DBAMDAwMDAwQDA4PEA8ODBMTFBQTExwbGxscHx8fHx8fHx8fHwEHBwcNDA0YEBAYGhURFRofHx8f&#xA;Hx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8f/8AAEQgA0AEAAwER&#xA;AAIRAQMRAf/EAaIAAAAHAQEBAQEAAAAAAAAAAAQFAwIGAQAHCAkKCwEAAgIDAQEBAQEAAAAAAAAA&#xA;AQACAwQFBgcICQoLEAACAQMDAgQCBgcDBAIGAnMBAgMRBAAFIRIxQVEGE2EicYEUMpGhBxWxQiPB&#xA;UtHhMxZi8CRygvElQzRTkqKyY3PCNUQnk6OzNhdUZHTD0uIIJoMJChgZhJRFRqS0VtNVKBry4/PE&#xA;1OT0ZXWFlaW1xdXl9WZ2hpamtsbW5vY3R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo+Ck5SVlpeYmZ&#xA;qbnJ2en5KjpKWmp6ipqqusra6voRAAICAQIDBQUEBQYECAMDbQEAAhEDBCESMUEFURNhIgZxgZEy&#xA;obHwFMHR4SNCFVJicvEzJDRDghaSUyWiY7LCB3PSNeJEgxdUkwgJChgZJjZFGidkdFU38qOzwygp&#xA;0+PzhJSktMTU5PRldYWVpbXF1eX1RlZmdoaWprbG1ub2R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo&#xA;+DlJWWl5iZmpucnZ6fkqOkpaanqKmqq6ytrq+v/aAAwDAQACEQMRAD8A9U4q7FXlX54yWv13ynbe&#xA;YJ5YPIt1eTJ5h9NiqPIsYezSUp+84F0cmm21eoXMnT8jX1dGrJ0vkgvyquNDj/MTVdO8mS8/KaaZ&#xA;FPqUNu8k1lHqzShR6Mko5fFCrbj7VP8AJFJ5yTAGX1X9jHH9W3J7FmG3uxV2KuxV2KuxV2KuxV2K&#xA;pD5s89eU/KUNvN5h1FLFbpylupWSV3KirFY4ld+K7ValBUeIycMcpcgxlIDmivLfmfQfMumDU9Dv&#xA;FvbIu0ZkUMhV06q6OFdDuDRgNiD0IxnjlE0UxkDuE0yCXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXY&#xA;q7FXYq7FXYq7FVO5tre6t5ba5iSe3mUxzQyKHR0YUZWVqggjqDhBpVPT9N07TbRLLTrWGys4q+nb&#xA;W8axRLyJZuKIFUVJJOJJO5QBSIwJdirsVdirsVdirsVdirsVeb+cdC1a0/MF/OMdvNdaYnl24030&#xA;7FPXuxdCYyxqkPCSvqep8LcSoIPOgpXIxyHDw9eJrkDd+SL/AC10zWH1nzL5r1LTH0VfMMlobTTJ&#xA;uHrrFbQEetNwJ4vK0p5IQGUjfHNIUI3dLAbk97Pcx2x2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Ku&#xA;xV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Kux&#xA;V2KuxV2KuxVxIAqemKvMfPn59+V/LN1Pptqjanq0JKPDER6aOOP25Og/aBA+IEfZoa5lYdLKe/Rq&#xA;nlAeb3X/ADkl5xub1JrWwgtbVaVteQk5UNTV2Su422zOj2cKcc6ksw8s/wDOSGmXc4h1/T207lQL&#xA;NC3qxiin7RPEjk1ANqDqWynL2dIDbdsjqQeb2K2ura6t47i2lWaCVQ8UqEMrKdwQR1BzXEU5KpgV&#xA;2KuxV2KuxV2KuxV2KsA88/nX5P8AKcv1WSQ6hqFFb6palXYK4JBZq8V/ZPxEVBqK5kYtNKbXPKA8&#xA;5H/OUOsOV4eXoOIPxk3D1YdqAJ8P45mjs2+rQdV5Mm8rf85GaDfzCDXrR9Kd2/dzKfVgApQBn2at&#xA;e/ED7t6svZ847jdlDUg83rdtcwXMEdxbyLLBKoeORTUMrCoI+jMAinJVMCuxV2KuxV2KuxV2KsOn&#xA;/vpP9Y/rzYDk45WYVdiqf+Xv95ZP9f8AgMxc/NthyTXKGbsVdirsVePf85C/mZd+W9Ii0TSJxHqm&#xA;qKyyypQyQwinJga1RmrQbd6g7ZlabDxGy05Z0HgfkPyNr3m3VPqWlxepIBzuLiQkRRKT9qR6Hqe3&#xA;U5tpZI443JxBEyNB7VZ/84ykW/8ApOvhbgjpFbckU/NpFLfcMx/5VrlH7W38p5sM89/lHr/lOH63&#xA;KyX2lkhTeQgjgTsBKhrxr2NSPeuZ2m1sMu3KTRlwyhv0Tj8j/wAwbnR9ai8tX8/+4i+ZvqxkO0Mx&#xA;BPFSSOKyH/hvmTmL2jpQRxjm26bLvRfR2aNznYq7FXYq7FXYq7FXlv5+fmRL5S8uR2Wntx1jVuSW&#xA;770jjX+8k6UNKgAV6ncEVGZOmxcR8mrLOg8P/Lf8ptc892t7qVtfwRvBNxuGujIXd5BzL8lV6171&#xA;zZT1EcVAhxo4zNlll/zj35wk1a4smlt4rS24A6gxf05CyB6RLx5tx5UJ2Fe+T/lHGIg733Mfy0iV&#xA;vmr8ivM+h6bJqEUkOpW0Cl7hYOSyIo6twYfEoHWhr7ZZh7QxzPCdixnp5RF80T+SP5hXWja1F5bv&#xA;pS+k6g/G1Lkn0Zz0Vd9lkO1Kfa+ZOU9oaUEcY5s9Pl3ovo/NG57sVdirsVdirsVdirDp/wC+k/1j&#xA;+vNgOTjlZhV2Kp/5e/3lk/1/4DMXPzbYcmHa9+Y+raf5i8xWMLWQk0Oze407y/IjnUtXZbE3XqWb&#xA;etHSNJP3bBYJD8DdDTKGbz/zB+dHnC88gag73GnaQ91Zav8AVteikjZZXtbSF4rW1+pX92Le+d7m&#xA;QpynYgRcuFSVRVJL7zN56mv9Zt7XX7mGeS5+qzAz3UskcU2t2NnCJrZZoRYH0p2Fu8BDXEXJyyvR&#xA;gq9X8gah5wP5mebdM1y3u4rCGy019Jjlnimt44EmvLdJVpPNLzuxD6jFxzqpVzshZV4B+fupXd5+&#xA;ampxXD8/qIjtoTSlI+PqqtB4erT8c22lAEQ4eU7vfP8AnHbSbWz/AC0tL2JR6+pzTzTv3PpytAor&#xA;4ARfjmJrZE5K7m7APSxD81fzU81aL+YxstOuTDYaV6HO0AHCcyRrM/qbVNVk4+1KjfM3SaSE8Vkb&#xA;lozZpCe3R7jqen2uq6Xc2FyvO2vImikUj9l1pWh7jNTCRjIEcw5khYp8VNezWF3DeQtxuLSVJomB&#xA;rR42DKQR7jOqzAGNOphsX2zYPI9jbvI3ORokLuQAWYqKmgoN/bOUPN2wV8CXYq7FXYq7FXYq+QP+&#xA;citYXUfzMnjQMkdlClvxNBV0Zg77E9dhXwAza6WNRcTKbL1H/nFn/lHdZ/5io/8Ak3lXaHMe5lp+&#xA;RTn85PzT1zyjqGn6fpEUPqTxm5nmnUuCvMoqKAVp9k8j8qYdFpI5ATJc+YxIAZ75R19PMXlnT9Y9&#xA;MR/XYQ8kXUK4qrqK9QGBpmHmx8EzHuboS4gC+U/zAtk0LztqkNgfSFletJa8duFG9RAP9XbOixy4&#xA;8QJ6h10hwzNPr2wfnY271J5RI1WqW3UHevfOZPN2YV8CXYq7FXYq7FXYqw6f++k/1j+vNgOTjlZh&#xA;V2Kp/wCXv95ZP9f+AzFz822HJNcoZuxV2KuxV8n/APOTXlk6Z54i1eFKW2qQhpG5cj66Ehq9eIYU&#xA;4/I06ZstLO413OLmjuz3/nGj8wdNn0H/AAjeTrDqFpI8mno5p60MpLsqV6ujliR4H2OV6zEb4gyw&#xA;y2pmXm78mtD8y+a4NfuLmSGnpi+tFRWWf0tl+In4aqAp2Ow7YMOtljhwgJngEpWmv5lee9O8o+XL&#xA;i5lmUalNGyabbVHN5SKBuPXghNWP0dSMq02A5JV06s8uThD5Q8q6VN5h81abpMUfrCedTMlVFYkP&#xA;KQVYhalRQVO5oM3mpy1ElwMULL7XtoFt7eKBSzLEiorMasQopUnxznCbdmqYFdirsVdirsVdir5Z&#xA;/wCcpPL89p5ws9bVALW/txEStT+9hJJLdgWDbD2JzY6SXppxsw3Zp/zinJz8ua37Xcf/ACbyGuNk&#xA;JwDYs2/Mn8p7LztcWV0181hdWimJnEYlDxE8uNCyUINaGvfpkdLqziBFXbLLh42S2kGjeUvLEULz&#xA;C30vSbcK88p/ZQbs3+Ux8O52zHkZZJ31LYAIjyD5H1C4ufPHn+RbdH56zfMyogLOkLMSTReRPpxC&#xA;poO2b6Uhjx13B14HFJ9l2sTQ20MLMGaNFRmAoCVFKgb0zni7JUwK7FXYq7FXYq7FXx5rf/OQfny1&#xA;1m/to47H04LmaNKwuTxRyor+89s28cYoOIZG0H/0MZ+YH++7D/kS/wD1UyXhBHEXf9DGfmB/vuw/&#xA;5Ev/ANVMfCC8Re9/849eeda84eWdSvtWWFZre99CMQIUXj6SNuCzb1bMDVxqQb8RsPVMxW12KuxV&#xA;2Ksc8/8AkfTPOfl2bRr+qBj6lvMAC0cqghXFfCvb5dKjLMeQxNhjKNh8d+b/ACB5u8lX9NQtpUgV&#xA;62+oRhvSajEKeY+y1VO30io3zaY8olycSUCERafm7+YdvbC2j8w3wiAoA0zMwA7Bmqw+/J+Hj7gj&#xA;il3oCzXzX5u1b0rVLrWNTmIDuzNK+54gySOfhFT1Y0yZyxiO4MREkvp78l/ykTydZHU9RYS69eJS&#xA;UgUWKM7+mtQG/wBb9W22r1GoMzXRy8ePheoZitrsVdirsVdirsVdirHPP3kjTfOflybRr8lAx9SC&#xA;YAFo5VB4uKjtXt8ulRlmPIYm2Mo2GFfkD5I13yfbeYNM1aLiTdo1tOv2JYwhXmtd+o7/AKqE26jI&#xA;JUQwxRItjP8AzkB+YHmvyn5z039BajJZibTw0sYCyRsRNIKmOQOhPvTL9JCMoniF7sM0iDs8h1fz&#xA;5+YPne4jsbu8udUkJrHZQoFSo/a9KFUTYftEbZmREMe4FNBMpc3vn5J/k3P5Y5a15gjjfWZkAih2&#xA;cW6mjUUg8eVR8Rp1Hw7Cra/U6nj2HJycWKub2DMNudirsVdirsVdirsVfnh5m/5SPVf+Yy4/5Otm&#xA;7jyDhHmluSQ7FX1Z/wA4jf8AKFaz/wBtI/8AJiPNdrPqHucjDyeo6j5wk0/zrYeX7mzWOwvtPvL9&#xA;dXedVVDYtCJUaIrsoW4VuZcfLMNuYy35u6jN5d13zDY6AraZoF1JHcC7uzbTzWi2lveQzwRCCWst&#xA;wl2PShcr25MrHiFV11+cHop5quFtdONt5ZLRPC+plL0yiSONWubb6swtoGZ2/emRvs/ZPZVUufze&#xA;tLe80HSpH0Y6vrUX1ssNYQaets05hiaC7kgR7mSanwRpBuQwqAORVeiYqpz21vcJ6c8Sypv8LqGG&#xA;4Knr7EjEFWMn8q/y3Jq3lrT2buzW6MSfEkipPzyzxp97HgHcnekaBouj262+l2UNnCteKRIFA5Uq&#xA;B4DbpkZSJ5pAAR+RS7FXYq7FXYq7FXYq7FXYqteJHFGG9COQJDAHrRhQj6MVYx5o/LPyh5o1C0vt&#xA;ZtPrM9mOCcjUMnLlwflU8a16U6nLIZZRFBjKAKZ6J5R8saEGGj6Zb2Afjy9CMJUqCAdu+/XqcjKZ&#xA;PMpEQE2yKXYq7FXYq7FXYq7FXYq/PDzN/wApHqv/ADGXH/J1s3ceQcI80tySHYq+rP8AnEb/AJQr&#xA;Wf8AtpH/AJMR5rtZ9Q9zkYeT13VvKekatrNnql+hnaztL2wFo4R7eWDUfR9dZo2Vue1soArShNQe&#xA;2G3MVuPyQ8qJcSS6LcXHl2OW/j1OS10qHTo7dp4II4IA0M1pOjJCYmljUigld3+1x4qpvL5AaXVp&#xA;9YfzBqZ1UwyWtheUsOVlBPNHPLHAv1TgwcwIv79ZCF6UqcVSib8kvLElo1ql9fwxXVvJaazwa3rq&#xA;EM11LeSJccoG4cp7mVq2/pEcqCgAoq9CxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2K&#xA;uxV2KuxV2KuxV2KvkXWv+cafzQu9Yv7qGC0MNxcSyxk3Kg8XcsKingc2cdVABxjiKC/6Ff8AzW/5&#xA;Z7P/AKSV/ph/NwR4Rd/0K/8Amt/yz2f/AEkr/TH83BfCL3f8gPy+8x+SPLWo6frqRJcXN59YiEMg&#xA;kHD0kTcj3U5h6nIJmw3Y4kDd6hmO2OxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Kux&#xA;V2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV&#xA;2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2&#xA;KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2K&#xA;uxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Ku&#xA;xV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxVDHUrEEgzKCNiMHEGzwpdzv0nYf7/XHiC+DLud+k&#xA;7D/f648QXwZdyrDcQzKWicOAaEjxxBYyiRzVMLF2KuxV2KqdxcQ20DzzuI4YwWdz0AGKCaeQeav+&#xA;cjdJsbyWz8v2Y1Vo2Km7LlIDt1U0JajV6bEbhsBLg5tfGPLdiMH5+fmCzDlHZmIdQIiHI/1uXH/h&#xA;cjxOCe1JBn3lP8+NJ1K5W01u2/Rc0jBY5g3OCpIFGc049SakUphEnLwdpQmaOz1JWVlDKaqdwRkn&#xA;ZN4q7FXYq7FXYq7FXYq7FXYqhr/UtP0+3a4vriO2hQFmeRgoABAJ3+YwgWgyA5pEfzN8gCX0/wBO&#xA;2paoFVeq7/5QHH8cn4Uu5q/MY+8J1pus6TqcQm067iuojUB4nDA8aA9PCoyBiRzbIzEuRRmBk7FX&#xA;Yq7FXYq7FXYqxCf++k/1j+vKC7WPJZiydiqfeX/95pP9f+AyyHJwdV9QTTJuM7FXYq7FXzr/AM5E&#xA;/mHcz6n/AIP0+Qx28IDamylgXZqFYiCF/wBY9ailD1rEl1utzfwhg/kLyHrfmi7NtpkIKR0NxcyE&#xA;rFED05MAetNgBXBTq4YZ5ZVF69af848IsI+s63SWm6x2/wAIPzaSp+4YeFyx2P3y+x5z5x8sxaBr&#xA;1zo31kXbWwTlME9PeRA4HHk/Zh3yJdTqcPhZDG7p6J+R/ni6eZvK2oTc0iQvp0jncKCAYuRI2X9k&#xA;UP0AZKJdz2ZqjIcBezZJ3DsVdirsVdirsVdirsVYl+ZPn6x8naE125WS/mPp2dqGHN2IPxUo3wim&#xA;5O3zNFNmOHEWnNlEA+c3vfNXnzzBH9clN1e3clLe2DCOFCRQBFY8VoopU7mm5OZ8YCIt0+TJLJKu&#xA;pZte/k1qmj+X7nVtRuYENsqt9Wi5SMeTBaMxCgfa7VxhqIykAAjLopxgZSPJjOny3+mXK3mmXD21&#xA;whBDodjxNQGU/CwqOhGZMsYkN3Bx55QNgvdfy58/p5ltHt7wpDq9vT1YgacwannGD22+Y79idVnw&#xA;8B8nodJqhlHmzTKHMdirsVdirsVdirEJ/wC+k/1j+vKC7WPJZiydiqfeX/8AeaT/AF/4DLIcnB1X&#xA;1B5r5t8m+c9Y/MTXrzTAHi/R2mWdu18/1SEWk8l0dQisr2O0upopmEUatwP2ZKtuIuM3GS3yTqP5&#xA;m6f5Ut9Pntb+x1bS/LtsnlrQ47NpLC7ki0oEG/u5LTlBcLcKVMDTRUYBfjryKqL8k+Y/zPutXtIf&#xA;NFzqNtprFW0+a00ydjczmQB7XU5JtKt2t40FKSpBEpVv7xipOKp/+T+sfmRqSam/nRTFKogKWskE&#xA;0LQXBMnrxxu1lZQyQiicPTknpvWVuQoq+ZfOFxNP551ySZ2kcX1wnNyWbjHIUWpO5oqjIF0OfmX1&#xA;R+S+kWun/l5pjwqPVvVa6uJAN2d2IFf9VAFyQdnooCOMebAPzL/NXzZaearvS9JufqFpYMIvgRGe&#xA;RqAszM6t3OwGRJdbrNdkGQxiaAeb3+sX+q6jPqF/L613cENNLQLyIAHRQANh2GB1OWZmeI8yivKe&#xA;qy6b5x0m9gAMsU2ynoQylSD9BxDkaGRjkBfWuWPVuxV2KuxV2KuxV2KuxV8rfnpr1xqn5i3Fk7Vt&#xA;9KVbeFFYsvJlDu1D0Y1CtT+XM3BGg6nVzuXuZf8AkDo2h3N3eXl8im+s2t205mkZCGcS8+KhlD/Z&#xA;HUHJamRAAHJjoYxlIk8xVfa9u1ix02+02e01MK1jKAJwzmMUDAirKVI3A75hQkQbHN2uSMZRIlye&#xA;CefbHRdM8zPZ6OqrYiKNlCSNKOTA8viZnP45ttPOUo+rm83rcUIzqHJLfKurzaR5t0y8hbiHmSCY&#xA;FiqmOVgp5EdgSG+jHURuJXRZDHIH0zmnendirsVdirsVdirEJ/76T/WP68oLtY8lmLJ2Kp95f/3m&#xA;k/1/4DLIcnB1X1BNMm4zsVdirsVfGv5weXJvL35g6jGwYwXchu4JGIJYS/E5bjsDzqaeFNhkC6bU&#xA;46kXuH/OP3nzTtT8sQ+XZ5lTVdN5LFExo0sBYurJXrwrxIHQAZIOXosoMeE8wyPzd+UflnzLqR1K&#xA;d57S9cATvbsvGTiKAsrq3xUAFRiQuo0EMsuI2C8G/MLQ7Tyx5rvdItWdra3ERheUguVeJXqxAUdS&#xA;e2QIdHqtPwZDEckX+TOiz675+tZxGJLPSwbi5LAlNwVRSQDuSaivWmEOX2fhud9z6lybv3Yq7FXY&#xA;q7FXYq7FXYq+QvzgtpLT8zta514yzLKjEUqrop2r4Go+jM7EfSHUakesp1+U06v5w0da7+uD/wAK&#xA;cvyH92XEwD99H3vfPzNUnyJq4AqfTQ7eAlUnMDT/AFh3Gt/upPnBCFNTm4eZKppxa78waZaxk8pb&#xA;qFagFqD1BVqDwG+U5pekuTpoXMe99ZZp3p3Yq7FXYq7FXYq+DfMPnrztHr+pxx+YdTSNLudURby4&#xA;AAEjAAAP0GYpL0cIDhGyX/4+89f9THqn/Sbcf814LZcA7nf4+89f9THqn/Sbcf8ANeNrwDufTP8A&#xA;zizrOsar5Q1abVL64v5k1Aokl1K8zKvoRniC5YgVOXYuTqe0ABIe57TlrgOxV2KuxVhP5p/lrY+d&#xA;9FELMYdStAz2FwADRj1U1ps1PEfgKAhpz4RMeb5S1/yt5q8o6iYdStpLZ4XAjukr6bNQMpSQd+JD&#xA;UNGHcDIuoyYjE7o+D81fPsUaxrr9/wAVFBWeQ7fMknFHiZP5xV9B8v8Anrz7qfK2S4vXkYLcandM&#xA;7RoNh8czciaV6Cp9saWGCWQ976k/L3yBpXkzRUsbX99dv8V5esAHkkNK+PFdtl/Wd8kA7jDhEBQZ&#xA;ThbnYq7FXYq7FXYq7FXYq8b/AOcgfy6uNYtIvMemI0t/ZKIp7dQzNJDUn4dyKoei0FanqxAy/DOt&#xA;nD1WK9w8J8q+ZJtE1qx1JBzaznjm9M7BgjAlfpG2ZZ3FOtG0ge59X6X+Z35favp6zjWbOFJVpJbX&#xA;kscEi1G6skpWvhtUZgnHIHk7iOeEhzDwb81td0WXzhdto8sEtgEhWN7biYqiNQ3Hh8PXwzPwyIju&#xA;6fVQichMeTLfyI8j3dzfjzXqKNFDCKabGwYFy4IMtRx2psOtQTUUKk4+oy3s5ei09eovd8xHZuxV&#xA;2KuxV2KuxV+ePmX/AJSPVf8AmMuP+TrZiHm9LD6QluBk7FX1X/ziN/yhWs/9tI/8mI8vxcnUdo/W&#xA;Pcyfzh501vyv5rv77VJNQXQYrOaTRLO2XT3sry4tNPmvJ4Z24SahHJxiLJQqnw9d6G116Rr5685D&#xA;ytLPPqV5Z+ZbK/0KfVLO5tdO9A2esXkdsIrQ27XIELgyUMkjTDjvxqMVTtfO/mXU/wA1p/LURvNI&#xA;0uTTtSgsTNpk/wAV5Zy2gF+LiWIQvFSeRECuU2UtvJGuKsek/NfzdH5M8gXZF2bjU10e81/WItNn&#xA;uYZorq9gtpbWM28EkMc0qyO1Nm2CoC7rRV7hiqjdWVndoUuoEmQqUIdQ3wtTkN/GmKCAWPD8sfy+&#xA;Dcx5fshJ/v0RASV8eY+KvvXGmvwYdzJIbeCBeEMaxJt8KKFGwoNh7DFtpfirsVdirsVdirsVdirs&#xA;VdirTojoyOoZGBVlYVBB2IIOKvNfPH5E+V/MlzNqFsW03Upas8kIHpu5pu6Up4kkbknrlsMxDjZN&#xA;NGW/Vgp/5xf1kS0TX4DD/OYHDU/1eRH/AA2W/mPJo/JHvZZ5R/5x58uaTPHd6vcNq1zE1VjdQtvU&#xA;EFSY969CCGJG+QlnJ5NuPSRHPd6vHFHEgSNQiCpCqKCpNT08TlDl0uxV2KuxV2KuxV2Kvzx8y/8A&#xA;KR6r/wAxlx/ydbMQ83pYfSEtwMnYq+q/+cRv+UK1n/tpH/kxHl+Lk6jtH6x7nsg8teXBq8utDSrM&#xA;axcJ6U+pC3i+syR8QvB5uPqMvFQKE9MtdepWPlDynYWUljY6JYWljLMlzLawWsMcTTxOskcrIqhS&#xA;6OisrUqCAe2Kpi1patdR3jQxtdwxvDFcFQZEjlKNIivTkFdokLAdeI8BiqjHo+kR6fBp0djbpp9q&#xA;Yja2axIIYjbuskBjjA4r6TorJQfCQCOmKovFXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7F&#xA;XYq7FXYq7FXYq7FXYq+eNT/5xH+vald3v+K/T+tTSTen+j+XH1GLUr9ZFaV8MqOLzdlHtGhXD9v7&#xA;EN/0J3/393/cu/7OsHhebL+Uv6P2/sd/0J3/AN/d/wBy7/s6x8LzX+Uv6P2/serflH+V/wDyrzRb&#xA;zTP0n+lPrdz9Z9b0Pq/H92qcePqTV+xWtcsjGnD1GfxDdUzrJOO7FXYq7FXYq7FXYq7FXYq7FXYq&#xA;7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7&#xA;FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXhPnm&#xA;HT9e/MDzVY+ZdTf6toltZyeXtEku0sYJmlg5TSBnHFnR2O/U1pWg2iXXZwJSkD05C6Zp+RfmDXNd&#xA;8iJdau0kzxXMsFpdzcjJPAgUh2difUIdnTkP5aHcHCG7RzlKHqehYXLdirsVdirsVdirsVdirsVd&#xA;irHfOHnax8tCxga2mv8AVNVkaDTNPtwOUsigfadiFRAzKGbcitaHfLMePi8gGjNnEKFWTyW+U/O1&#xA;pr011YTW0mm61Yn/AEzTZypdRWnONl2kSu3Knh4issuEwo84nqw0+qGQmPKQ5hkmUuU7FXYq7FXY&#xA;q7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FUt1Xyx5a1eVJdW0my1CWNeEcl3bxTsq1rRTIrECuLGU&#xA;InmEdb29vbW8VtbRJDbwoscMMahEREFFVVFAAAKADFIFKmKXYq7FXYq7FXYq7FXYq7FXYqwf8yfJ&#xA;uta3d6DrOiSxDUvL1y1zHbTs0aTIxR2TmoahJhC77UJqcvw5AAQeri6nDKRjKPOKn5F8k6rZeZNV&#xA;82ayfq99qi+nDpaTG5W3ico7iSYqvNuaAKF+FR3NdpZs1xERyDDTabhmch2MunNnmYzmuxV2KuxV&#xA;2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV//2Q==</xmpGImg:image>
- </rdf:li>
- </rdf:Alt>
- </xmp:Thumbnails>
- </rdf:Description>
- <rdf:Description rdf:about=""
- xmlns:xmpMM="http://ns.adobe.com/xap/1.0/mm/"
- xmlns:stRef="http://ns.adobe.com/xap/1.0/sType/ResourceRef#">
- <xmpMM:DocumentID>uuid:89B13A64E5D9DE11BB37992E5642CB24</xmpMM:DocumentID>
- <xmpMM:InstanceID>uuid:c9fc39ea-7338-234e-90fd-9c707322e008</xmpMM:InstanceID>
- <xmpMM:RenditionClass>proof:pdf</xmpMM:RenditionClass>
- <xmpMM:DerivedFrom rdf:parseType="Resource">
- <stRef:instanceID>uuid:1052650b-0efc-4cb2-a32e-387095575b05</stRef:instanceID>
- <stRef:documentID>uuid:6120892493BFDB11914A8590D31508C8</stRef:documentID>
- </xmpMM:DerivedFrom>
- </rdf:Description>
- <rdf:Description rdf:about=""
- xmlns:illustrator="http://ns.adobe.com/illustrator/1.0/">
- <illustrator:Type>Document</illustrator:Type>
- <illustrator:StartupProfile>Print</illustrator:StartupProfile>
- </rdf:Description>
- <rdf:Description rdf:about=""
- xmlns:xmpTPg="http://ns.adobe.com/xap/1.0/t/pg/"
- xmlns:stDim="http://ns.adobe.com/xap/1.0/sType/Dimensions#"
- xmlns:stFnt="http://ns.adobe.com/xap/1.0/sType/Font#"
- xmlns:xmpG="http://ns.adobe.com/xap/1.0/g/">
- <xmpTPg:NPages>1</xmpTPg:NPages>
- <xmpTPg:HasVisibleTransparency>False</xmpTPg:HasVisibleTransparency>
- <xmpTPg:HasVisibleOverprint>False</xmpTPg:HasVisibleOverprint>
- <xmpTPg:MaxPageSize rdf:parseType="Resource">
- <stDim:w>841.889648</stDim:w>
- <stDim:h>595.275391</stDim:h>
- <stDim:unit>Pixels</stDim:unit>
- </xmpTPg:MaxPageSize>
- <xmpTPg:Fonts>
- <rdf:Bag>
- <rdf:li rdf:parseType="Resource">
- <stFnt:fontName>MyriadPro-Regular</stFnt:fontName>
- <stFnt:fontFamily>Myriad Pro</stFnt:fontFamily>
- <stFnt:fontFace>Regular</stFnt:fontFace>
- <stFnt:fontType>Open Type</stFnt:fontType>
- <stFnt:versionString>Version 2.062;PS 2.000;hotconv 1.0.57;makeotf.lib2.0.21895</stFnt:versionString>
- <stFnt:composite>False</stFnt:composite>
- <stFnt:fontFileName>MyriadPro-Regular.otf</stFnt:fontFileName>
- </rdf:li>
- </rdf:Bag>
- </xmpTPg:Fonts>
- <xmpTPg:PlateNames>
- <rdf:Seq>
- <rdf:li>Cyan</rdf:li>
- <rdf:li>Magenta</rdf:li>
- <rdf:li>Yellow</rdf:li>
- <rdf:li>Black</rdf:li>
- </rdf:Seq>
- </xmpTPg:PlateNames>
- <xmpTPg:SwatchGroups>
- <rdf:Seq>
- <rdf:li rdf:parseType="Resource">
- <xmpG:groupName>Default Swatch Group</xmpG:groupName>
- <xmpG:groupType>0</xmpG:groupType>
- <xmpG:Colorants>
- <rdf:Seq>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>White</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>255</xmpG:red>
- <xmpG:green>255</xmpG:green>
- <xmpG:blue>255</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>Black</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>35</xmpG:red>
- <xmpG:green>31</xmpG:green>
- <xmpG:blue>32</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>CMYK Red</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>236</xmpG:red>
- <xmpG:green>28</xmpG:green>
- <xmpG:blue>36</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>CMYK Yellow</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>255</xmpG:red>
- <xmpG:green>241</xmpG:green>
- <xmpG:blue>0</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>CMYK Green</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>0</xmpG:red>
- <xmpG:green>165</xmpG:green>
- <xmpG:blue>81</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>CMYK Cyan</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>0</xmpG:red>
- <xmpG:green>173</xmpG:green>
- <xmpG:blue>238</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>CMYK Blue</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>46</xmpG:red>
- <xmpG:green>49</xmpG:green>
- <xmpG:blue>145</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>CMYK Magenta</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>235</xmpG:red>
- <xmpG:green>0</xmpG:green>
- <xmpG:blue>139</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=16 M=98 Y=92 K=7</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>194</xmpG:red>
- <xmpG:green>39</xmpG:green>
- <xmpG:blue>45</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=0 M=99 Y=97 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>236</xmpG:red>
- <xmpG:green>32</xmpG:green>
- <xmpG:blue>39</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=0 M=79 Y=96 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>240</xmpG:red>
- <xmpG:green>92</xmpG:green>
- <xmpG:blue>39</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=0 M=50 Y=98 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>246</xmpG:red>
- <xmpG:green>146</xmpG:green>
- <xmpG:blue>33</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=0 M=35 Y=87 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>250</xmpG:red>
- <xmpG:green>175</xmpG:green>
- <xmpG:blue>59</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=5 M=0 Y=93 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>249</xmpG:red>
- <xmpG:green>236</xmpG:green>
- <xmpG:blue>35</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=19 M=0 Y=98 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>216</xmpG:red>
- <xmpG:green>223</xmpG:green>
- <xmpG:blue>39</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=50 M=0 Y=99 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>139</xmpG:red>
- <xmpG:green>197</xmpG:green>
- <xmpG:blue>64</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=74 M=0 Y=99 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>61</xmpG:red>
- <xmpG:green>180</xmpG:green>
- <xmpG:blue>74</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=86 M=12 Y=100 K=9</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>0</xmpG:red>
- <xmpG:green>146</xmpG:green>
- <xmpG:blue>69</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=88 M=28 Y=95 K=32</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>0</xmpG:red>
- <xmpG:green>104</xmpG:green>
- <xmpG:blue>55</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=76 M=0 Y=75 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>34</xmpG:red>
- <xmpG:green>180</xmpG:green>
- <xmpG:blue>115</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=78 M=9 Y=46 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>3</xmpG:red>
- <xmpG:green>168</xmpG:green>
- <xmpG:blue>156</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=70 M=15 Y=0 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>37</xmpG:red>
- <xmpG:green>169</xmpG:green>
- <xmpG:blue>224</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=87 M=52 Y=0 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>16</xmpG:red>
- <xmpG:green>114</xmpG:green>
- <xmpG:blue>185</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=99 M=96 Y=4 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>46</xmpG:red>
- <xmpG:green>55</xmpG:green>
- <xmpG:blue>143</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=100 M=100 Y=26 K=25</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>38</xmpG:red>
- <xmpG:green>34</xmpG:green>
- <xmpG:blue>97</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=74 M=98 Y=1 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>103</xmpG:red>
- <xmpG:green>48</xmpG:green>
- <xmpG:blue>144</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=49 M=99 Y=1 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>146</xmpG:red>
- <xmpG:green>41</xmpG:green>
- <xmpG:blue>141</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=34 M=100 Y=37 K=11</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>157</xmpG:red>
- <xmpG:green>30</xmpG:green>
- <xmpG:blue>96</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=12 M=100 Y=49 K=1</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>211</xmpG:red>
- <xmpG:green>28</xmpG:green>
- <xmpG:blue>92</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=0 M=96 Y=20 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>236</xmpG:red>
- <xmpG:green>37</xmpG:green>
- <xmpG:blue>122</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=23 M=27 Y=40 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>198</xmpG:red>
- <xmpG:green>178</xmpG:green>
- <xmpG:blue>152</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=40 M=43 Y=52 K=7</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>152</xmpG:red>
- <xmpG:green>133</xmpG:green>
- <xmpG:blue>118</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=50 M=53 Y=61 K=23</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>117</xmpG:red>
- <xmpG:green>101</xmpG:green>
- <xmpG:blue>88</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=57 M=60 Y=64 K=42</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>85</xmpG:red>
- <xmpG:green>72</xmpG:green>
- <xmpG:blue>65</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=23 M=38 Y=63 K=1</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>197</xmpG:red>
- <xmpG:green>156</xmpG:green>
- <xmpG:blue>110</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=32 M=49 Y=74 K=10</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>165</xmpG:red>
- <xmpG:green>124</xmpG:green>
- <xmpG:blue>82</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=36 M=57 Y=84 K=23</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>139</xmpG:red>
- <xmpG:green>99</xmpG:green>
- <xmpG:blue>57</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=39 M=64 Y=93 K=36</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>117</xmpG:red>
- <xmpG:green>77</xmpG:green>
- <xmpG:blue>36</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=41 M=70 Y=96 K=49</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>97</xmpG:red>
- <xmpG:green>57</xmpG:green>
- <xmpG:blue>23</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=47 M=73 Y=83 K=68</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>65</xmpG:red>
- <xmpG:green>35</xmpG:green>
- <xmpG:blue>18</xmpG:blue>
- </rdf:li>
- </rdf:Seq>
- </xmpG:Colorants>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:groupName>Print Color Group</xmpG:groupName>
- <xmpG:groupType>1</xmpG:groupType>
- <xmpG:Colorants>
- <rdf:Seq>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=2 M=28 Y=72 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>246</xmpG:red>
- <xmpG:green>187</xmpG:green>
- <xmpG:blue>96</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=5 M=70 Y=90 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>231</xmpG:red>
- <xmpG:green>110</xmpG:green>
- <xmpG:blue>52</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=4 M=92 Y=77 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>229</xmpG:red>
- <xmpG:green>59</xmpG:green>
- <xmpG:blue>65</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=29 M=2 Y=92 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>191</xmpG:red>
- <xmpG:green>210</xmpG:green>
- <xmpG:blue>65</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=62 M=4 Y=93 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>109</xmpG:red>
- <xmpG:green>182</xmpG:green>
- <xmpG:blue>78</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=30 M=2 Y=7 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>174</xmpG:red>
- <xmpG:green>218</xmpG:green>
- <xmpG:blue>230</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=60 M=8 Y=5 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>85</xmpG:red>
- <xmpG:green>185</xmpG:green>
- <xmpG:blue>223</xmpG:blue>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>C=78 M=4 Y=11 K=0</xmpG:swatchName>
- <xmpG:mode>RGB</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:red>0</xmpG:red>
- <xmpG:green>178</xmpG:green>
- <xmpG:blue>215</xmpG:blue>
- </rdf:li>
- </rdf:Seq>
- </xmpG:Colorants>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:groupName>Grayscale</xmpG:groupName>
- <xmpG:groupType>1</xmpG:groupType>
- <xmpG:Colorants>
- <rdf:Seq>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>K=100</xmpG:swatchName>
- <xmpG:mode>GRAY</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:gray>255</xmpG:gray>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>K=90</xmpG:swatchName>
- <xmpG:mode>GRAY</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:gray>229</xmpG:gray>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>K=80</xmpG:swatchName>
- <xmpG:mode>GRAY</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:gray>203</xmpG:gray>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>K=70</xmpG:swatchName>
- <xmpG:mode>GRAY</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:gray>178</xmpG:gray>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>K=60</xmpG:swatchName>
- <xmpG:mode>GRAY</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:gray>152</xmpG:gray>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>K=50</xmpG:swatchName>
- <xmpG:mode>GRAY</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:gray>127</xmpG:gray>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>K=40</xmpG:swatchName>
- <xmpG:mode>GRAY</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:gray>101</xmpG:gray>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>K=30</xmpG:swatchName>
- <xmpG:mode>GRAY</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:gray>76</xmpG:gray>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>K=20</xmpG:swatchName>
- <xmpG:mode>GRAY</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:gray>50</xmpG:gray>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>K=10</xmpG:swatchName>
- <xmpG:mode>GRAY</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:gray>25</xmpG:gray>
- </rdf:li>
- <rdf:li rdf:parseType="Resource">
- <xmpG:swatchName>K=5</xmpG:swatchName>
- <xmpG:mode>GRAY</xmpG:mode>
- <xmpG:type>PROCESS</xmpG:type>
- <xmpG:gray>12</xmpG:gray>
- </rdf:li>
- </rdf:Seq>
- </xmpG:Colorants>
- </rdf:li>
- </rdf:Seq>
- </xmpTPg:SwatchGroups>
- </rdf:Description>
- </rdf:RDF>
-</x:xmpmeta>
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-<?xpacket end="w"?> endstream endobj 2 0 obj <</Count 1/Kids[5 0 R]/Type/Pages>> endobj 5 0 obj <</ArtBox[154.5 205.274 523.59 501.023]/BleedBox[0.0 0.0 841.89 595.275]/Contents 1315 0 R/CropBox[0.0 0.0 841.89 595.275]/LastModified(D:20110404194430+02'00')/MediaBox[0.0 0.0 841.89 595.275]/Parent 2 0 R/PieceInfo<</Illustrator 1316 0 R>>/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>/Font<</T1_0 1307 0 R>>/ProcSet[/PDF/Text]/Properties<</MC0 1308 0 R/MC1 1309 0 R/MC2 1310 0 R/MC3 1311 0 R/MC4 1312 0 R/MC5 1313 0 R>>/XObject<</Fm0 1319 0 R/Fm1 1320 0 R/Fm2 1321 0 R/Fm3 1322 0 R/Fm4 1323 0 R/Fm5 1324 0 R/Fm6 1325 0 R/Fm7 1326 0 R>>>>/Thumb 1327 0 R/TrimBox[0.0 0.0 841.89 595.275]/Type/Page>> endobj 1315 0 obj <</Filter/FlateDecode/Length 1961>>stream
-H‰ĢWɎ7½ēWš(ŠĶ}¹ŗå10€f ؁9†e· £ŚĘŒž˜÷"˜™ĢTŁ² Œ–*ł‚Œ`06žżhŽ>:ó՛G³|ż?JņJzx|ļĢõ£q¶ēŠßęŚų5Ææ,ß`śĒKŖĪfŗ·S—ŲlŒŃŒĻ’~Xž‡!øŚĘÆŒ9ŚPóo0Ž=SZLŒ)T[“Q÷ņ)Ż}bŒ­Ū”Ā]Ęƞgų§G|ņß9ćĶÓóā“ńIMvO»'óō²|xu}]^żš}{NÆæ}ś'$\0›yz³¼ņ)ä\^?żLr±½ łĆ«ē$Lņó)W.>łD®ÆŸ¾ ź›’byīY,ג·­›ĖŠq’˜_dɏĖĆ?^œyóėņNŻłųĒįŸy’ųÆ%™·$G3Įć/’ĢõE,ĮߋĻ6•„o°9裶’×咢Ī’’Ė¾cĒy|WŅX:x•}pė:'Œ?-ļ”öP—īŲ5õ=ŲäqčŠ¼9ē]Ż‹·;1h°ŸÅNŽhÕ¬CīYm}
-ÉzßM‚[×±¶åĪ„ƒ‚žĀš °Ū *Š8€¦°eƒš&ĶiĘŠŚŌegŠ#„j77ˆ·9vL^¢ķ0„— ū @6gĆ6õŠ%ĒBŽÓ„‚äˆńnk0‰
-]Ūe­±a²±’E¤Cßa(pKÉ(•n=ö…ÉGJY¶ņœ1āH*ŗśóE:"lxŁŽ/Ņń‹Štü«Šō”HÄs‘ˆ,~„³|eG½-ķ…Tóž ²@iB„×QAĀÉ-_*A¬ģ*®ņ®‹‚śrł²–ŒŅµŠĪėĢ€Ÿœ'Ü­E_äq­ū¤—^ƒb3ÄĪUDxÓĀąŠĀ
-TĆ>adm‰ōәrŪ)Ž©WHįķŠR{‡‚oĢ+„¤#·×@ļ††•~"®}bģXv ]NČ`żhyēĮ9¼c«ŠōńŽšńN9Sn; f%¢D)Få@åc2u±«bźq¤š,°ēĒ;Tą“’®bŹlXz¦Gi³¦”&ē6ŒØ²Ģ¼ŹCĘL™ģ»Ō|¤ ČRł^±ų…·R\„Īõ”sŒd]äéPCé‡Ųa1ŹČČæSʑr›„4ńąT>dŁ„ÓB–6Ÿ2N”ŪN¬ģÕ3^b*Ėi7/ |]Īźix_"• ™ŚcöĖUrņžEž=ĆQc©ž=n‡ēQ0>[ežĄ½÷7<ŽĖ/ĻU-
-ŚĢŠĻö§ģµq”®\F= wl.Ļ”ŪDA¹ė~lœzŗK‰£E!…ż×‘R³ŠcĖĖ ŻŲōRÜÉr@ÅŌåH” “`Š—Ó]Łļé^‹H8Rne
-8;Z]!0oj-o&4Mq#BXZ(r-qRjZg"d;e'fX2?:$%:pb1G;i=ZP,(M5$Ct>Pr+OM%Sb3
-$Z$YM!!)e4pih!i-MB:^dR_nsi+[p*`^+#SD`bS0lt>(HO8LDNEmrh`0uV.i4u*`_
-<QuhYM+[,LK_;/1Bi3D]@_As80$4fVpZLHDB2T:(K_'G0h$g%6n.8p"/JA-b,c&qf
-UbPa<\L=gX;:6"QDSB056Ip4P?@MB$feXFUaj4\dSiP/#U:<PVfi:Gt;qD1sE,nhI
-B.M&9Cj3K\";_P6/u!Kf]tDfDELrplU'e?"h&JBb-edM:j$_i`M&(%OVm&WU5dR#=
-[Sm2,zn4nE;!7($KU&~> endstream endobj 1328 0 obj [/Indexed/DeviceRGB 255 1329 0 R] endobj 1329 0 obj <</Filter[/ASCII85Decode/FlateDecode]/Length 428>>stream
-8;X]O>EqN@%''O_@%e@?J;%+8(9e>X=MR6S?i^YgA3=].HDXF.R$lIL@"pJ+EP(%0
-b]6ajmNZn*!='OQZeQ^Y*,=]?C.B+\Ulg9dhD*"iC[;*=3`oP1[!S^)?1)IZ4dup`
-E1r!/,*0[*9.aFIR2&b-C#s<Xl5FH@[<=!#6V)uDBXnIr.F>oRZ7Dl%MLY\.?d>Mn
-6%Q2oYfNRF$$+ON<+]RUJmC0I<jlL.oXisZ;SYU[/7#<&37rclQKqeJe#,UF7Rgb1
-VNWFKf>nDZ4OTs0S!saG>GGKUlQ*Q?45:CI&4J'_2j<etJICj7e7nPMb=O6S7UOH<
-PO7r\I.Hu&e0d&E<.')fERr/l+*W,)q^D*ai5<uuLX.7g/>$XKrcYp0n+Xl_nU*O(
-l[$6Nn+Z_Nq0]s7hs]`XX1nZ8&94a\~> endstream endobj 1319 0 obj <</BBox[155.001 324.272 210.999 268.274]/Group 1330 0 R/Length 283/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>/XObject<</Fm0 1331 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.173 0.522 0.345 scn
-/GS0 gs
-q 1 0 0 1 183 308.2725 cm
-0 0 m
-6.607 0.011 12.01 -5.392 11.999 -11.999 c
-12.01 -18.606 6.607 -24.009 0 -23.998 c
--6.607 -24.009 -12.01 -18.606 -11.999 -11.999 c
--12.01 -5.392 -6.607 0.011 0 0 c
-f
-Q
-q
-0 g
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
- endstream endobj 1320 0 obj <</BBox[180.624 223.274 192.621 211.277]/Group 1332 0 R/Length 296/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>/XObject<</Fm0 1333 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.173 0.522 0.345 scn
-/GS0 gs
-q 1 0 0 1 186.627 218.2744 cm
-0 0 m
-0.544 0.003 0.995 -0.445 0.994 -1.003 c
-0.997 -1.549 0.546 -1.999 0 -1.997 c
--0.55 -2 -1.007 -1.543 -1.003 -1.003 c
--1.005 -0.451 -0.549 0.003 0 0 c
-f
-Q
-q
-0 g
-1 w 4 M 0 j 0 J []0 d
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
- endstream endobj 1321 0 obj <</BBox[306.003 319.277 362.001 263.279]/Group 1334 0 R/Length 287/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>/XObject<</Fm0 1335 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.243 0.533 0.643 scn
-/GS0 gs
-q 1 0 0 1 334.002 303.2773 cm
-0 0 m
-6.607 0.011 12.01 -5.392 11.999 -11.999 c
-12.008 -18.614 6.61 -24.008 0 -23.998 c
--6.61 -24.008 -12.008 -18.614 -11.999 -11.999 c
--12.01 -5.392 -6.607 0.011 0 0 c
-f
-Q
-q
-0 g
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
- endstream endobj 1322 0 obj <</BBox[321.996 217.271 334.002 205.274]/Group 1336 0 R/Length 298/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>/XObject<</Fm0 1337 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.212 0.624 0.78 scn
-/GS0 gs
-q 1 0 0 1 327.999 212.2715 cm
-0 0 m
-0.55 0.003 1.007 -0.454 1.003 -0.994 c
-1.008 -1.537 0.543 -2.002 0 -1.997 c
--0.543 -2.002 -1.008 -1.537 -1.003 -0.994 c
--1.007 -0.454 -0.55 0.003 0 0 c
-f
-Q
-q
-0 g
-1 w 4 M 0 j 0 J []0 d
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
- endstream endobj 1323 0 obj <</BBox[295.671 461.774 351.669 405.776]/Group 1338 0 R/Length 290/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>/XObject<</Fm0 1339 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.196 0.322 0.616 scn
-/GS0 gs
-q 1 0 0 1 323.6699 445.7744 cm
-0 0 m
-6.607 0.011 12.01 -5.392 11.999 -11.999 c
-12.01 -18.606 6.607 -24.009 0 -23.998 c
--6.615 -24.007 -12.009 -18.609 -11.999 -11.999 c
--12.009 -5.389 -6.615 0.009 0 0 c
-f
-Q
-q
-0 g
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
- endstream endobj 1324 0 obj <</BBox[309.171 492.275 321.168 480.278]/Group 1340 0 R/Length 292/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>/XObject<</Fm0 1341 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.196 0.322 0.616 scn
-/GS0 gs
-q 1 0 0 1 315.165 487.2754 cm
-0 0 m
-0.548 0.003 1.005 -0.451 1.003 -1.003 c
-1.007 -1.542 0.55 -2 0 -1.997 c
--0.546 -2 -0.997 -1.549 -0.994 -1.003 c
--0.995 -0.445 -0.544 0.003 0 0 c
-f
-Q
-q
-0 g
-1 w 4 M 0 j 0 J []0 d
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
- endstream endobj 1325 0 obj <</BBox[156.837 462.278 212.835 406.271]/Group 1342 0 R/Length 290/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>/XObject<</Fm0 1343 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.196 0.322 0.616 scn
-/GS0 gs
-q 1 0 0 1 184.8359 446.2783 cm
-0 0 m
-6.607 0.011 12.01 -5.392 11.999 -11.999 c
-12.008 -18.617 6.606 -24.018 0 -24.007 c
--6.606 -24.018 -12.008 -18.617 -11.999 -11.999 c
--12.01 -5.392 -6.607 0.011 0 0 c
-f
-Q
-q
-0 g
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
- endstream endobj 1326 0 obj <</BBox[183.873 501.023 195.879 489.026]/Group 1344 0 R/Length 291/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>/XObject<</Fm0 1345 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.196 0.322 0.616 scn
-/GS0 gs
-q 1 0 0 1 189.876 496.0234 cm
-0 0 m
-0.55 0.003 1.007 -0.455 1.003 -0.994 c
-1.005 -1.546 0.548 -2 0 -1.997 c
--0.548 -2 -1.005 -1.546 -1.003 -0.994 c
--1.007 -0.455 -0.55 0.003 0 0 c
-f
-Q
-q
-0 g
-1 w 4 M 0 j 0 J []0 d
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
- endstream endobj 1344 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1345 0 obj <</BBox[183.873 501.023 195.879 489.026]/Group 1346 0 R/Length 534/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ExtGState<</GS0 1318 0 R>>/XObject<</Fm0 1347 0 R>>>>/Subtype/Form>>stream
-q
-189.876 496.023 m
-189.876 501.023 l
-193.188 501.023 195.879 498.341 195.879 495.029 c
-195.879 491.708 193.188 489.026 189.876 489.026 c
-186.564 489.026 183.873 491.708 183.873 495.029 c
-183.873 498.341 186.564 501.023 189.876 501.023 c
-189.876 496.023 l
-189.326 496.026 188.869 495.569 188.873 495.029 c
-188.871 494.478 189.328 494.023 189.876 494.026 c
-190.424 494.023 190.881 494.478 190.879 495.029 c
-190.883 495.569 190.426 496.026 189.876 496.023 c
-W n
-q
-1 w 4 M 0 j 0 J []0 d
-/GS0 gs
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
-Q
- endstream endobj 1346 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1347 0 obj <</BBox[183.873 501.023 195.879 489.026]/Group 1348 0 R/Length 11001/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.2 0.325 0.624 scn
-/GS0 gs
-q 1 0 0 1 189.876 496.0234 cm
-0 0 m
-0 5 l
-3.312 5 6.003 2.318 6.003 -0.994 c
-6.003 -4.315 3.312 -6.997 0 -6.997 c
--3.312 -6.997 -6.003 -4.315 -6.003 -0.994 c
--6.003 2.318 -3.312 5 0 5 c
-0 0 l
--0.55 0.003 -1.007 -0.455 -1.003 -0.994 c
--1.005 -1.546 -0.548 -2 0 -1.997 c
-0.548 -2 1.005 -1.546 1.003 -0.994 c
-1.007 -0.455 0.55 0.003 0 0 c
-f
-Q
-q 1 0 0 1 189.876 496.9482 cm
-0 0 m
--0.013 -0.041 -0.073 -0.074 -0.083 -0.116 c
--0.111 -0.248 -0.02 -0.426 0 -0.56 c
-0 -0.925 l
--0.55 -0.922 -1.007 -1.379 -1.003 -1.919 c
--1.005 -2.471 -0.548 -2.925 0 -2.922 c
-0.548 -2.925 1.005 -2.471 1.003 -1.919 c
-1.007 -1.379 0.55 -0.922 0 -0.925 c
-0 -0.56 l
-0.034 -0.557 0.079 -0.553 0.113 -0.55 c
-0.142 -0.55 0.184 -0.537 0.21 -0.549 c
-1.046 -1.473 l
-1.442 -2.154 1.79 -2.107 1.805 -2.105 c
-2.057 -2.065 3.182 -0.618 1.901 0.191 c
-1.598 0.383 1.274 0.41 1.132 0.395 c
-0 0 l
-0 4.075 l
-3.312 4.075 6.003 1.393 6.003 -1.919 c
-6.003 -5.24 3.312 -7.922 0 -7.922 c
--3.312 -7.922 -6.003 -5.24 -6.003 -1.919 c
--6.003 1.393 -3.312 4.075 0 4.075 c
-0 0 l
-f
-Q
-0.196 0.318 0.612 scn
-q 1 0 0 1 189.876 497.0903 cm
-0 0 m
--0.03 -0.092 -0.164 -0.17 -0.185 -0.265 c
--0.222 -0.433 -0.125 -0.678 -0.188 -0.838 c
--0.188 -0.839 -0.237 -0.941 -0.403 -1.05 c
--1.156 -1.54 -1.044 -2.156 -0.992 -2.333 c
--0.807 -2.959 -0.146 -3.264 0.451 -2.999 c
-0.651 -2.909 0.79 -2.772 0.872 -2.69 c
-1.143 -2.422 1.548 -2.621 1.836 -2.412 c
-2.433 -1.979 2.576 -1.57 2.629 -1.416 c
-2.85 -0.785 2.461 0.134 1.628 0.371 c
-0.853 0.591 0.002 0.007 0 0 c
-0 3.933 l
-3.312 3.933 6.003 1.251 6.003 -2.061 c
-6.003 -5.382 3.312 -8.064 0 -8.064 c
--3.312 -8.064 -6.003 -5.382 -6.003 -2.061 c
--6.003 1.251 -3.312 3.933 0 3.933 c
-0 0 l
-f
-Q
-0.192 0.31 0.596 scn
-q 1 0 0 1 189.876 497.231 cm
-0 0 m
--0.294 -0.832 -1.296 -1.347 -1.079 -2.407 c
--0.939 -3.088 -0.171 -3.557 0.648 -3.165 c
-2.592 -2.234 2.592 -2.234 2.763 -1.674 c
-3.159 -0.375 2.125 0.263 1.731 0.384 c
-0.831 0.661 0.003 0.008 0 0 c
-0 3.792 l
-3.312 3.792 6.003 1.11 6.003 -2.202 c
-6.003 -5.522 3.312 -8.205 0 -8.205 c
--3.312 -8.205 -6.003 -5.522 -6.003 -2.202 c
--6.003 1.11 -3.312 3.792 0 3.792 c
-0 0 l
-f
-Q
-0.188 0.302 0.58 scn
-q 1 0 0 1 189.876 497.3701 cm
-0 0 m
--0.353 -0.867 -1.383 -1.429 -1.146 -2.56 c
--1.024 -3.139 -0.35 -3.806 0.712 -3.399 c
-2.444 -2.735 2.625 -2.666 2.946 -1.778 c
-2.952 -1.763 3.406 -0.235 2.053 0.316 c
-0.838 0.812 0.004 0.01 0 0 c
-0 3.653 l
-3.312 3.653 6.003 0.971 6.003 -2.341 c
-6.003 -5.662 3.312 -8.344 0 -8.344 c
--3.312 -8.344 -6.003 -5.662 -6.003 -2.341 c
--6.003 0.971 -3.312 3.653 0 3.653 c
-0 0 l
-f
-Q
-0.18 0.294 0.569 scn
-q 1 0 0 1 189.876 497.5073 cm
-0 0 m
--0.193 -0.417 -0.585 -0.692 -0.795 -1.098 c
--1.093 -1.708 l
--1.262 -2.107 -1.291 -2.435 -1.188 -2.804 c
--1.126 -3.032 -0.727 -4.136 0.984 -3.565 c
-4.73 -2.315 2.784 0.034 2.453 0.247 c
-1.442 0.896 0.101 0.218 0 0 c
-0 3.516 l
-3.312 3.516 6.003 0.834 6.003 -2.478 c
-6.003 -5.799 3.312 -8.481 0 -8.481 c
--3.312 -8.481 -6.003 -5.799 -6.003 -2.478 c
--6.003 0.834 -3.312 3.516 0 3.516 c
-0 0 l
-f
-Q
-0.176 0.286 0.553 scn
-q 1 0 0 1 189.876 497.6602 cm
-0 0 m
--0.013 -0.025 -0.053 -0.04 -0.076 -0.058 c
--0.365 -0.276 -0.692 -0.523 -1.173 -1.803 c
--1.244 -1.989 -1.457 -2.557 -1.185 -3.151 c
--0.782 -4.034 0.179 -4.205 1.672 -3.658 c
-3.872 -2.853 3.987 -0.377 2.341 0.401 c
-1.366 0.863 0.123 0.247 0 0 c
-0 3.363 l
-3.312 3.363 6.003 0.681 6.003 -2.631 c
-6.003 -5.952 3.312 -8.634 0 -8.634 c
--3.312 -8.634 -6.003 -5.952 -6.003 -2.631 c
--6.003 0.681 -3.312 3.363 0 3.363 c
-0 0 l
-f
-Q
-0.173 0.278 0.541 scn
-q 1 0 0 1 189.876 497.8516 cm
-0 0 m
--0.034 -0.067 -0.142 -0.105 -0.203 -0.15 c
--0.741 -0.551 -1.014 -1.287 -1.254 -1.937 c
--1.386 -2.294 -1.492 -2.833 -1.246 -3.37 c
--0.614 -4.746 1.248 -4.148 1.804 -3.932 c
-4.133 -3.027 4.261 -0.305 2.51 0.419 c
-1.108 0.999 0.006 0.012 0 0 c
-0 3.172 l
-3.312 3.172 6.003 0.49 6.003 -2.822 c
-6.003 -6.143 3.312 -8.825 0 -8.825 c
--3.312 -8.825 -6.003 -6.143 -6.003 -2.822 c
--6.003 0.49 -3.312 3.172 0 3.172 c
-0 0 l
-f
-Q
-0.169 0.275 0.525 scn
-q 1 0 0 1 189.876 498.0396 cm
-0 0 m
--0.037 -0.07 -0.152 -0.104 -0.217 -0.148 c
--0.223 -0.151 -0.766 -0.542 -1.153 -1.542 c
--1.498 -2.429 -1.549 -2.937 -1.35 -3.481 c
--1.145 -4.045 -0.491 -4.904 1.578 -4.323 c
-4.082 -3.621 4.629 -0.761 2.993 0.316 c
-1.701 1.166 0.079 0.148 0 0 c
-0 2.984 l
-3.312 2.984 6.003 0.302 6.003 -3.01 c
-6.003 -6.331 3.312 -9.013 0 -9.013 c
--3.312 -9.013 -6.003 -6.331 -6.003 -3.01 c
--6.003 0.302 -3.312 2.984 0 2.984 c
-0 0 l
-f
-Q
-0.165 0.267 0.51 scn
-q 1 0 0 1 189.876 498.2236 cm
-0 0 m
--0.175 -0.317 -0.542 -0.437 -0.748 -0.722 c
--1.027 -1.109 -1.128 -1.336 -1.241 -1.614 c
--1.322 -1.817 -1.715 -2.863 -1.448 -3.592 c
--0.849 -5.223 1.105 -4.776 1.689 -4.601 c
-4.425 -3.778 5.003 -0.758 3.22 0.385 c
-1.946 1.2 0.234 0.423 0 0 c
-0 2.8 l
-3.312 2.8 6.003 0.118 6.003 -3.194 c
-6.003 -6.515 3.312 -9.197 0 -9.197 c
--3.312 -9.197 -6.003 -6.515 -6.003 -3.194 c
--6.003 0.118 -3.312 2.8 0 2.8 c
-0 0 l
-f
-Q
-0.161 0.259 0.498 scn
-q 1 0 0 1 189.876 498.4546 cm
-0 0 m
--0.06 -0.132 -0.265 -0.21 -0.386 -0.291 c
--0.759 -0.542 -1.229 -1.473 -1.327 -1.735 c
--1.444 -2.049 -1.803 -3.137 -1.475 -3.94 c
--0.715 -5.801 1.956 -4.866 1.983 -4.856 c
-5.297 -3.576 5.172 -0.368 3.116 0.573 c
-1.411 1.354 0.007 0.017 0 0 c
-0 2.569 l
-3.312 2.569 6.003 -0.113 6.003 -3.425 c
-6.003 -6.746 3.312 -9.428 0 -9.428 c
--3.312 -9.428 -6.003 -6.746 -6.003 -3.425 c
--6.003 -0.113 -3.312 2.569 0 2.569 c
-0 0 l
-f
-Q
-0.153 0.251 0.482 scn
-q 1 0 0 1 189.876 498.7373 cm
-0 0 m
--0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c
--0.737 -0.595 -1.131 -1.172 -1.412 -1.908 c
--1.719 -2.716 -1.736 -3.696 -1.576 -4.141 c
--0.861 -6.127 1.881 -5.307 1.908 -5.298 c
-5.872 -3.968 5.348 -0.494 3.424 0.518 c
-1.628 1.463 0.058 0.121 0 0 c
-0 2.286 l
-3.312 2.286 6.003 -0.396 6.003 -3.708 c
-6.003 -7.029 3.312 -9.711 0 -9.711 c
--3.312 -9.711 -6.003 -7.029 -6.003 -3.708 c
--6.003 -0.396 -3.312 2.286 0 2.286 c
-0 0 l
-f
-Q
-0.149 0.243 0.467 scn
-q 1 0 0 1 189.876 499.0234 cm
-0 0 m
--0.045 -0.106 -0.21 -0.167 -0.302 -0.236 c
--0.488 -0.374 -1.13 -0.939 -1.627 -2.442 c
--1.764 -2.855 -1.88 -3.934 -1.545 -4.673 c
--1.028 -5.816 0.793 -6.212 2.513 -5.554 c
-6.321 -4.099 5.738 -0.283 3.153 0.723 c
-1.353 1.423 0.007 0.017 0 0 c
-0 2 l
-3.312 2 6.003 -0.682 6.003 -3.994 c
-6.003 -7.315 3.312 -9.997 0 -9.997 c
--3.312 -9.997 -6.003 -7.315 -6.003 -3.994 c
--6.003 -0.682 -3.312 2 0 2 c
-0 0 l
-f
-Q
-0.145 0.235 0.455 scn
-q 1 0 0 1 189.876 499.4067 cm
-0 0 m
--0.163 -0.362 -0.542 -0.515 -0.779 -0.805 c
--0.948 -1.011 -1.049 -1.26 -1.205 -1.475 c
--1.361 -1.69 -1.461 -1.951 -1.723 -2.734 c
--2.048 -3.705 -1.823 -4.543 -1.66 -4.957 c
--1.17 -6.199 0.623 -6.718 2.422 -6.139 c
-7.03 -4.656 5.827 -0.75 3.286 0.539 c
-1.422 1.485 0.008 0.018 0 0 c
-0 1.617 l
-3.312 1.617 6.003 -1.065 6.003 -4.377 c
-6.003 -7.698 3.312 -10.38 0 -10.38 c
--3.312 -10.38 -6.003 -7.698 -6.003 -4.377 c
--6.003 -1.065 -3.312 1.617 0 1.617 c
-0 0 l
-f
-Q
-0.141 0.227 0.439 scn
-q 1 0 0 1 189.876 499.8311 cm
-0 0 m
--0.128 -0.296 -0.442 -0.404 -0.638 -0.631 c
--0.788 -0.804 -0.893 -1.009 -1.031 -1.191 c
--1.148 -1.346 -1.62 -2.354 -1.623 -2.361 c
--2.171 -3.896 -2.053 -4.61 -1.842 -5.154 c
--0.963 -7.425 1.653 -7.025 2.586 -6.68 c
-3.893 -6.196 6.611 -5.189 5.553 -2.521 c
-5.843 -3.224 6.003 -3.994 6.003 -4.802 c
-6.003 -8.123 3.312 -10.805 0 -10.805 c
--3.312 -10.805 -6.003 -8.123 -6.003 -4.802 c
--6.003 -1.49 -3.312 1.192 0 1.192 c
-0 0 l
-f
-Q
-0.137 0.22 0.427 scn
-q 1 0 0 1 189.876 500.2959 cm
-0 0 m
--0.037 -0.078 -0.154 -0.129 -0.22 -0.184 c
--1.238 -1.037 -1.832 -2.884 -1.837 -2.903 c
--2.426 -4.762 -2.011 -5.635 -1.875 -5.921 c
--0.599 -8.601 3.356 -7.148 3.396 -7.133 c
-4.442 -6.725 6.193 -6.042 5.899 -4.15 c
-5.967 -4.512 6.003 -4.885 6.003 -5.267 c
-6.003 -8.587 3.312 -11.27 0 -11.27 c
--3.312 -11.27 -6.003 -8.587 -6.003 -5.267 c
--6.003 -1.955 -3.312 0.728 0 0.728 c
-0 0 l
-f
-Q
-0.133 0.216 0.412 scn
-q 1 0 0 1 189.876 500.7388 cm
-0 0 m
--0.038 -0.067 -0.155 -0.091 -0.221 -0.129 c
--1.151 -0.674 -1.646 -2.172 -2.007 -3.267 c
--2.012 -3.284 -2.546 -5.066 -2.073 -6.279 c
--1.012 -9 2.932 -7.99 3.099 -7.945 c
-4.318 -7.622 5.989 -7.18 6.001 -5.577 c
-6.002 -5.621 6.003 -5.665 6.003 -5.709 c
-6.003 -9.03 3.312 -11.712 0 -11.712 c
--3.312 -11.712 -6.003 -9.03 -6.003 -5.709 c
--6.003 -2.397 -3.312 0.285 0 0.285 c
-0 0 l
-f
-Q
-0.125 0.208 0.396 scn
-q 1 0 0 1 189.876 501.0112 cm
-0 0 m
--0.043 -0.052 -0.154 -0.029 -0.221 -0.042 c
--0.696 -0.132 -1.348 -0.689 -1.732 -1.731 c
--2.576 -4.014 -2.459 -5.548 -2.314 -6.26 c
--1.78 -8.88 1.72 -8.614 1.755 -8.611 c
-4.215 -8.371 5.7 -8.227 5.951 -6.778 c
-5.561 -9.721 3.043 -11.985 0 -11.985 c
--3.312 -11.985 -6.003 -9.303 -6.003 -5.982 c
--6.003 -2.67 -3.312 0.012 0 0.012 c
-0 0 l
-f
-Q
-0.122 0.2 0.384 scn
-q 1 0 0 1 188.9707 500.9468 cm
-0 0 m
--1.737 -0.589 -1.75 -4.504 -1.75 -4.544 c
--1.745 -7.052 -0.74 -7.832 0.016 -8.2 c
-1.799 -9.068 6.088 -9.359 6.659 -7.635 c
-5.92 -10.116 3.622 -11.92 0.905 -11.92 c
--2.407 -11.92 -5.098 -9.238 -5.098 -5.917 c
--5.098 -2.856 -2.799 -0.333 0.165 0.031 c
-0.115 0.022 0.049 0.013 0 0 c
-f
-Q
-0.118 0.192 0.369 scn
-q 1 0 0 1 187.6411 500.5234 cm
-0 0 m
--1.064 -0.939 -0.813 -4.868 -0.54 -5.601 c
-0.43 -8.206 2.406 -8.584 3.21 -8.625 c
-4.273 -8.681 5.3 -9.068 6.38 -8.967 c
-6.693 -8.938 7.267 -8.802 7.587 -8.217 c
-6.594 -10.165 4.569 -11.497 2.235 -11.497 c
--1.077 -11.497 -3.768 -8.815 -3.768 -5.494 c
--3.768 -2.81 -2 -0.54 0.432 0.225 c
-0.372 0.201 0.292 0.168 0.231 0.144 c
-0.162 0.102 0.062 0.054 0 0 c
-f
-Q
-0.204 0.333 0.639 scn
-q 1 0 0 1 191.4565 495.208 cm
-0 0 m
--0.097 0.069 -0.097 0.069 -0.519 0.587 c
--0.662 0.762 -0.835 0.91 -0.974 1.089 c
--1.125 1.285 -1.232 1.593 y
--1.227 1.612 -0.03 2.438 0.591 1.363 c
-1.026 0.61 0.244 -0.13 0.233 -0.131 c
-0.153 -0.143 0.065 -0.046 0 0 c
-f
-Q
-0.141 0.227 0.439 scn
-q 1 0 0 1 192.4463 500.4146 cm
-0 0 m
--1.295 0.463 -2.255 -0.325 -2.57 -0.583 c
--2.57 0.609 l
--1.402 0.609 -0.312 0.275 0.611 -0.302 c
-0.521 -0.251 0.401 -0.185 0.312 -0.135 c
-0.218 -0.094 0.096 -0.034 0 0 c
-f
-Q
-0.208 0.337 0.655 scn
-q 1 0 0 1 191.4961 495.46 cm
-0 0 m
--0.335 0.354 l
--0.472 0.524 -0.626 0.679 -0.757 0.854 c
--0.976 1.148 -1.021 1.268 -1.02 1.273 c
--1.015 1.287 -0.029 1.7 0.33 0.953 c
-0.59 0.409 0.174 -0.12 0.167 -0.121 c
-0.106 -0.131 0.048 -0.04 0 0 c
-f
-Q
-0.137 0.22 0.427 scn
-q 1 0 0 1 191.6431 500.7461 cm
-0 0 m
--0.651 0.121 -1.163 -0.01 -1.767 -0.45 c
--1.767 0.277 l
--1.038 0.277 -0.339 0.147 0.307 -0.09 c
-0.224 -0.065 0.112 -0.032 0.029 -0.006 c
-0.02 -0.004 0.009 -0.001 0 0 c
-f
-Q
-0.216 0.345 0.667 scn
-q 1 0 0 1 191.5 495.7261 cm
-0 0 m
--0.004 0.004 -0.533 0.573 -0.71 0.862 c
--0.568 0.875 -0.482 0.883 -0.264 0.809 c
--0.18 0.781 -0.083 0.699 -0.025 0.631 c
-0.033 0.563 0.091 0.45 0.104 0.362 c
-0.135 0.141 0.099 0.019 0.074 -0.062 c
-0.052 -0.043 0.021 -0.021 0 0 c
-f
-Q
-0.133 0.216 0.412 scn
-q 1 0 0 1 190.7813 500.9458 cm
-0 0 m
--0.314 -0.005 -0.487 -0.009 -0.905 -0.207 c
--0.905 0.078 l
--0.519 0.078 -0.142 0.041 0.225 -0.028 c
-0.157 -0.02 0.067 -0.003 0 0 c
-f
-Q
-0.125 0.208 0.396 scn
-q 1 0 0 1 189.876 501.0112 cm
-0 0 m
-0 0.012 l
-0.072 0.012 0.144 0.011 0.215 0.008 c
-0.15 0.006 0.046 -0.044 0 0 c
-f
-Q
- endstream endobj 1348 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1318 0 obj <</AIS false/BM/Normal/CA 1.0/OP false/OPM 1/SA true/SMask/None/Type/ExtGState/ca 1.0/op false>> endobj 1317 0 obj [/ICCBased 1349 0 R] endobj 1349 0 obj <</Filter/FlateDecode/Length 2574/N 3>>stream
-H‰œ–yTSwĒoɞ•°Ćc [€°5la‘QIBHŲADED„Ŗ•2ÖmtFOE.®c­Ö}źŅõ0źč8“׎8GNg¦Óļļ÷9÷wļļŻß½÷ó
- 
-V³)gB£0ńiœWו8#©8wÕ©•õ8_ÅŁ„ŹØQćüÜ«QŹj@é&»A)/ĒŁgŗ>'K‚ó
-€xÆĶś·¶Ņ-
-Øź‡†”Šnč÷ŠQčtŗ}MA ļ —0Óal»Į¾°ŽSąx ¬‚ką&ø^Į£š>ų0|>_ƒ'į‡š,ĀG!"F$H:Rˆ”!z¤éF‘Qd?r 9‹\A&‘GČ ”ˆrQ ¢įhš‹ŹŃ“ķE‡Ń]čaō4zBgŠ×Į–ąE#H ‹*B=”‹0HŲIųˆp†p0MxJ$łD1„˜D, V›‰½Ä­ÄÄćÄKÄ»ÄY‰dEņ"EŅI2’ŌEŚBŚGśŒt™4MzN¦‘Čžär!YKī ’÷?%_&ß#械(®”0J:EAi¤ōQĘ(Ē()ӔWT6U@ ęP+ØķŌ!ź~źźmźęD „eŅŌ“å“!ŚļhŸÓ¦h/čŗ']B/¢éėčŅÓæ¢?a0nŒhF!ĆĄXĒŲĶ8Åųšń܌kęc&5S˜µ™˜6»lö˜Iaŗ2c˜K™MĢAę!ęEę#…åʒ°d¬VÖė(ėk–Ķe‹Ųél »—½‡}Ž}ŸCāøqā9
-N'ēĪ)Ī].ĀuęJørī
-ī÷ wšGä xR^Ƈ÷[ŽoʜchžgŽ`>bž‰ł$į»ń„ü*~’ ’:’„…EŒ…ŅbÅ~‹ĖĻ,m,£-•–Ż–,ÆY¾“Ā¬ā­*­6X[Ż±F­=­3­ė­·YŸ±~dĆ³ ·‘ŪtŪ“¹i ŪzŚfŁ6Ū~`{ĮvÖĪŽ.ŃNg·Åī”Ż#{¾}“}…ż€ż§öø‘j‡‡ĻžŠ™c1X6„Ęfm“Ž;'_9 œr:œ8Żq¦:‹ĖœœO:Ļø8ø¤¹“øģu¹éJq»–»nv=ėśĢMą–ļ¶ŹmÜķ¾ĄR 4 ö
-n»3Ü£ÜkÜGŻÆz=Ä•[=¾ō„=ƒ<Ė=G</zĮ^Į^jÆ­^—¼ Ž”ŽZļQļBŗ0FX'Ü+œņįū¤śtųŒū<öuń-ōŻą{Ö÷µ__•ß˜ß-G”,ź}ēļé/÷ńæĄHh 8šm W 2p[ąŸƒøAiA«‚Nż#8$X¼?ųAˆKHIČ{!7Ä<q†øWüy(!46“-ōćŠaĮa†°ƒa†W†ļ ææ@°@¹`lĮŻ§YψÉH,²$ņżČÉ(Ē(YŌhŌ7ŃĪъčŃ÷b<b*böÅ<Žõ‹ÕĒ~ūL&Y&9‡Ä%ĘuĒMÄsāsć‡ćæNpJP%ģM˜I JlN<žDHJIŚtCj'•KwKg’C’—%ŸN”§d§ §|“ź™ŖO=–§%§mL»½Šu”vįx:H—¦oLæ“!ČØÉųC&13#s$ó/Y¢¬–¬³ŁÜģāģ=ŁOsbsśrnåŗēsOę1óŠņvē=ĖĖļĻŸ\ä»hŁ¢óÖź‚#…¤Ā¼Ā…³‹ćoZ<]TŌUt}‰`IƒsK­—V-ż¤˜Y,+>TB(É/ŁSņƒ,]6*›-•–¾W:#—Č7Ė*¢ŠŹeæņ^YDYŁ}U„j£źAyTł`ł#µD=¬ž¶"©b{ųŹōŹ+¬ŹÆ: !kJ4Gµm„ötµ}uCõ%—®K7YV³©fFŸ¢ßY Õ.©=bąį?SŒīĘ•Ę©ŗČŗ‘ŗēõyõ‡Ų Ś† žkļ5%4ż¦m–7Ÿlqlio™Z³lG+ŌZŚz²Ķ¹­³mzyāņ]ķŌöŹö?uųuōw|æ"űN»ĪåwW&®ÜŪeÖ„ļŗ±*|ÕöÕčjõź‰5k¶¬yŻ­čž¢ĒÆg°ē‡^yļkEk‡Öžø®lŻD_p߶õÄõŚõ×7DmŲÕĻīoźæ»1mćįl {ąūMśĪ nßLŻlÜ<9”śO
-¾„¾’æzæõĄpĄģĮgĮćĀ_ĀŪĆXĆŌÄQÄĪÅKÅČĘFĘĆĒAĒæČ=Č¼É:ɹŹ8Ź·Ė6Ė¶Ģ5ĢµĶ5ĶµĪ6Ī¶Ļ7ĻøŠ9ŠŗŃ<Ń¾Ņ?ŅĮÓDÓĘŌIŌĖÕNÕŃÖUÖŲ×\׹ŲdŲčŁlŁńŚvŚūŪ€Ü܊ŻŻ–ŽŽ¢ß)ßÆą6ą½įDįĢāSāŪćcćėäsäüå„ę ę–ēē©č2č¼éF銟[źåėpėūģ†ķķœī(ī“ļ@ļĢšXšåńrń’ņŒóó§ō4ōĀõPõŽömöū÷ŠųųØł8łĒśWśēūwüü˜ż)żŗžKžÜ’m’’
-q
-184.836 446.278 m
-184.836 462.278 l
-200.298 462.278 212.835 449.741 212.835 434.279 c
-212.835 418.809 200.298 406.271 184.836 406.271 c
-169.374 406.271 156.837 418.809 156.837 434.279 c
-156.837 449.741 169.374 462.278 184.836 462.278 c
-184.836 446.278 l
-178.229 446.289 172.826 440.887 172.837 434.279 c
-172.828 427.661 178.229 422.261 184.836 422.271 c
-191.442 422.261 196.844 427.661 196.835 434.279 c
-196.846 440.887 191.443 446.289 184.836 446.278 c
-W n
-q
-/GS0 gs
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
-Q
- endstream endobj 1350 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1351 0 obj <</BBox[156.837 462.278 212.835 406.271]/Group 1352 0 R/Length 13528/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.208 0.337 0.655 scn
-/GS0 gs
-q 1 0 0 1 184.8359 446.2783 cm
-0 0 m
-0 16 l
-15.462 16 27.999 3.463 27.999 -11.999 c
-27.999 -27.47 15.462 -40.007 0 -40.007 c
--15.462 -40.007 -27.999 -27.47 -27.999 -11.999 c
--27.999 3.463 -15.462 16 0 16 c
-0 0 l
--6.607 0.011 -12.01 -5.392 -11.999 -11.999 c
--12.008 -18.617 -6.606 -24.018 0 -24.007 c
-6.606 -24.018 12.008 -18.617 11.999 -11.999 c
-12.01 -5.392 6.607 0.011 0 0 c
-f
-Q
-q 1 0 0 1 184.8359 451.4419 cm
-0 0 m
-0 -0.468 l
-0 -5.164 l
--6.607 -5.153 -12.01 -10.555 -11.999 -17.163 c
--12.008 -23.781 -6.606 -29.181 0 -29.17 c
-6.606 -29.181 12.008 -23.781 11.999 -17.163 c
-12.01 -10.555 6.607 -5.153 0 -5.164 c
-0 -0.468 l
-0.316 -0.694 0.738 -0.997 1.055 -1.223 c
-3.817 -3.661 7.459 -4.869 10 -7.617 c
-12.018 -9.8 13.458 -12.461 14.279 -15.528 c
-15.076 -18.507 16.901 -19.346 16.917 -19.348 c
-18.874 -19.542 24.735 -10.485 17.857 -2.241 c
-10.879 6.124 0.769 1.958 0 0 c
-0 10.836 l
-15.462 10.836 27.999 -1.701 27.999 -17.163 c
-27.999 -32.633 15.462 -45.17 0 -45.17 c
--15.462 -45.17 -27.999 -32.633 -27.999 -17.163 c
--27.999 -1.701 -15.462 10.836 0 10.836 c
-0 0 l
-f
-Q
-0.204 0.333 0.639 scn
-q 1 0 0 1 184.8359 453.2891 cm
-0 0 m
--0.296 -0.712 -1.487 -1.168 -1.735 -1.898 c
--1.987 -2.638 -2.003 -3.873 -1.53 -4.494 c
--1.227 -4.893 -0.45 -4.945 0 -5.167 c
-0 -7.011 l
--6.607 -7 -12.01 -12.402 -11.999 -19.01 c
--12.008 -25.628 -6.606 -31.028 0 -31.018 c
-6.606 -31.028 12.008 -25.628 11.999 -19.01 c
-12.01 -12.402 6.607 -7 0 -7.011 c
-0 -5.167 l
-0.338 -5.201 0.788 -5.245 1.126 -5.278 c
-2.249 -5.476 12.144 -7.557 13.761 -19.538 c
-13.765 -19.565 14.171 -22.516 14.171 -22.516 c
-14.636 -23.09 15.724 -23.507 16.459 -23.43 c
-20.584 -22.993 26.416 -9.568 15.896 -1.312 c
-7.943 4.929 0.035 0.084 0 0 c
-0 8.989 l
-15.462 8.989 27.999 -3.548 27.999 -19.01 c
-27.999 -34.48 15.462 -47.018 0 -47.018 c
--15.462 -47.018 -27.999 -34.48 -27.999 -19.01 c
--27.999 -3.548 -15.462 8.989 0 8.989 c
-0 0 l
-f
-Q
-0.2 0.325 0.624 scn
-q 1 0 0 1 184.8359 454.4082 cm
-0 0 m
--0.627 -1.109 -1.866 -1.525 -2.708 -2.391 c
--4.764 -4.503 -4.447 -6.209 -4.44 -6.223 c
--4.355 -6.386 -4.355 -6.386 0 -7.408 c
-0 -8.13 l
--6.607 -8.119 -12.01 -13.521 -11.999 -20.129 c
--12.008 -26.747 -6.606 -32.147 0 -32.137 c
-6.606 -32.147 12.008 -26.747 11.999 -20.129 c
-12.01 -13.521 6.607 -8.119 0 -8.13 c
-0 -7.408 l
-0.312 -7.428 0.727 -7.455 1.039 -7.475 c
-5.587 -8.118 13.156 -12.018 12.674 -22.551 c
-12.559 -25.065 12.662 -26.483 12.98 -26.764 c
-14.309 -27.938 23.357 -23.699 22.629 -14.042 c
-21.269 4.004 1.142 2.019 0 0 c
-0 7.87 l
-15.462 7.87 27.999 -4.667 27.999 -20.129 c
-27.999 -35.6 15.462 -48.137 0 -48.137 c
--15.462 -48.137 -27.999 -35.6 -27.999 -20.129 c
--27.999 -4.667 -15.462 7.87 0 7.87 c
-0 0 l
-f
-Q
-0.196 0.318 0.612 scn
-q 1 0 0 1 184.8359 455.3335 cm
-0 0 m
--0.223 -0.377 -0.896 -0.494 -1.279 -0.706 c
--3.984 -2.198 -4.352 -2.882 -7.218 -8.204 c
--10.977 -15.407 l
--12.034 -17.649 -12.409 -19.973 -12.123 -22.512 c
--11.368 -29.209 -4.441 -35.048 3.701 -32.84 c
-16.505 -28.457 l
-19.639 -26.39 21.523 -23.894 22.614 -20.364 c
-24.61 -13.907 21.812 -4.74 13.674 -0.575 c
-6.26 3.219 0.029 0.049 0 0 c
-0 6.945 l
-15.462 6.945 27.999 -5.592 27.999 -21.054 c
-27.999 -36.525 15.462 -49.062 0 -49.062 c
--15.462 -49.062 -27.999 -36.525 -27.999 -21.054 c
--27.999 -5.592 -15.462 6.945 0 6.945 c
-0 0 l
-f
-Q
-0.192 0.31 0.596 scn
-q 1 0 0 1 184.8359 456.1333 cm
-0 0 m
--0.174 -0.267 -0.682 -0.3 -0.974 -0.428 c
--3.27 -1.438 -6.363 -4.313 -7.593 -6.58 c
--13.39 -17.263 -12.999 -20.654 -12.686 -23.38 c
--12.044 -28.948 -6.307 -36.34 3.975 -34.525 c
-32.478 -29.493 24.483 -7.887 15.417 -1.844 c
-7.621 3.352 0.038 0.059 0 0 c
-0 6.145 l
-15.462 6.145 27.999 -6.392 27.999 -21.854 c
-27.999 -37.325 15.462 -49.862 0 -49.862 c
--15.462 -49.862 -27.999 -37.325 -27.999 -21.854 c
--27.999 -6.392 -15.462 6.145 0 6.145 c
-0 0 l
-f
-Q
-0.188 0.302 0.58 scn
-q 1 0 0 1 184.8359 456.834 cm
-0 0 m
--0.26 -0.393 -1.01 -0.429 -1.443 -0.612 c
--4.281 -1.817 -7.531 -4.969 -9.346 -8.278 c
--13.498 -15.848 -13.757 -21.086 -13.243 -24.147 c
--12.335 -29.562 -7.257 -38.122 6.017 -35.862 c
-29.657 -31.837 27.572 -10.232 15.691 -2.188 c
-7.725 3.206 0.039 0.058 0 0 c
-0 5.444 l
-15.462 5.444 27.999 -7.093 27.999 -22.555 c
-27.999 -38.025 15.462 -50.563 0 -50.563 c
--15.462 -50.563 -27.999 -38.025 -27.999 -22.555 c
--27.999 -7.093 -15.462 5.444 0 5.444 c
-0 0 l
-f
-Q
-0.18 0.294 0.569 scn
-q 1 0 0 1 184.8359 457.5 cm
-0 0 m
--0.27 -0.397 -1.042 -0.411 -1.488 -0.586 c
--3.111 -1.225 -7.25 -3.37 -10.633 -9.471 c
--11.685 -11.368 -15.021 -18.085 -13.796 -24.879 c
--12.453 -32.328 -5.461 -39.37 6.714 -37.227 c
-28.951 -33.313 28.976 -11.259 15.609 -2.301 c
-7.856 2.895 0.038 0.056 0 0 c
-0 4.778 l
-15.462 4.778 27.999 -7.759 27.999 -23.221 c
-27.999 -38.691 15.462 -51.229 0 -51.229 c
--15.462 -51.229 -27.999 -38.691 -27.999 -23.221 c
--27.999 -7.759 -15.462 4.778 0 4.778 c
-0 0 l
-f
-Q
-0.176 0.286 0.553 scn
-q 1 0 0 1 184.8359 458.1108 cm
-0 0 m
--0.285 -0.403 -1.085 -0.384 -1.55 -0.549 c
--2.14 -0.758 -7.426 -2.783 -11.14 -9.4 c
--12.536 -11.888 -15.643 -18.441 -14.343 -25.555 c
--13.275 -31.4 -7.567 -40.72 7.05 -38.576 c
-28.069 -35.492 30.907 -13.131 16.17 -2.838 c
-7.979 2.883 0.04 0.057 0 0 c
-0 4.167 l
-15.462 4.167 27.999 -8.37 27.999 -23.832 c
-27.999 -39.302 15.462 -51.839 0 -51.839 c
--15.462 -51.839 -27.999 -39.302 -27.999 -23.832 c
--27.999 -8.37 -15.462 4.167 0 4.167 c
-0 0 l
-f
-Q
-0.173 0.278 0.541 scn
-q 1 0 0 1 184.8359 458.6836 cm
-0 0 m
--0.294 -0.407 -1.113 -0.365 -1.59 -0.521 c
--3.037 -0.996 -8.057 -3.068 -11.887 -9.807 c
--12.95 -11.676 -16.305 -18.381 -14.886 -26.192 c
--13.691 -32.767 -6.813 -41.832 7.241 -39.858 c
-28.692 -36.845 31.476 -13.851 16.374 -3.144 c
-8.08 2.736 0.041 0.056 0 0 c
-0 3.595 l
-15.462 3.595 27.999 -8.942 27.999 -24.404 c
-27.999 -39.875 15.462 -52.412 0 -52.412 c
--15.462 -52.412 -27.999 -39.875 -27.999 -24.404 c
--27.999 -8.942 -15.462 3.595 0 3.595 c
-0 0 l
-f
-Q
-0.169 0.275 0.525 scn
-q 1 0 0 1 184.8359 459.2207 cm
-0 0 m
--0.327 -0.44 -1.224 -0.37 -1.749 -0.528 c
--5.52 -1.667 -9.766 -5.26 -12.073 -9.267 c
--15.394 -15.036 -16.522 -20.933 -15.426 -26.792 c
--13.856 -35.181 -5.227 -43.019 7.675 -41.021 c
-29.387 -37.659 31.678 -13.959 16.092 -3.122 c
-8.188 2.374 0.041 0.052 0 0 c
-0 3.058 l
-15.462 3.058 27.999 -9.479 27.999 -24.941 c
-27.999 -40.412 15.462 -52.949 0 -52.949 c
--15.462 -52.949 -27.999 -40.412 -27.999 -24.941 c
--27.999 -9.479 -15.462 3.058 0 3.058 c
-0 0 l
-f
-Q
-0.165 0.267 0.51 scn
-q 1 0 0 1 184.8359 459.7354 cm
-0 0 m
--0.315 -0.413 -1.169 -0.321 -1.671 -0.458 c
--5.628 -1.543 -10.186 -5.222 -12.509 -9.206 c
--13.794 -11.411 -17.706 -18.119 -15.958 -27.37 c
--14.312 -36.089 -5.369 -44.235 7.962 -42.157 c
-29.829 -38.748 32.261 -15.07 16.713 -3.752 c
-8.241 2.415 0.041 0.054 0 0 c
-0 2.543 l
-15.462 2.543 27.999 -9.994 27.999 -25.456 c
-27.999 -40.927 15.462 -53.464 0 -53.464 c
--15.462 -53.464 -27.999 -40.927 -27.999 -25.456 c
--27.999 -9.994 -15.462 2.543 0 2.543 c
-0 0 l
-f
-Q
-0.161 0.259 0.498 scn
-q 1 0 0 1 184.8359 460.208 cm
-0 0 m
--0.326 -0.417 -1.197 -0.297 -1.71 -0.424 c
--5.005 -1.241 -10.022 -4.174 -13.317 -9.752 c
--16.642 -15.38 -17.707 -21.488 -16.484 -27.905 c
--14.771 -36.893 -5.522 -45.319 8.241 -43.229 c
-29.819 -39.954 32.248 -15.425 16.845 -4.05 c
-8.507 2.107 0.042 0.053 0 0 c
-0 2.07 l
-15.462 2.07 27.999 -10.467 27.999 -25.929 c
-27.999 -41.399 15.462 -53.937 0 -53.937 c
--15.462 -53.937 -27.999 -41.399 -27.999 -25.929 c
--27.999 -10.467 -15.462 2.07 0 2.07 c
-0 0 l
-f
-Q
-0.153 0.251 0.482 scn
-q 1 0 0 1 184.8359 460.6479 cm
-0 0 m
--0.165 -0.201 -0.596 -0.119 -0.852 -0.169 c
--6.63 -1.321 -11.086 -5.48 -13.33 -8.99 c
--17.823 -16.018 -17.959 -22.68 -17.283 -27.032 c
--15.528 -38.313 -5.353 -45.642 6.913 -44.456 c
-29.058 -42.316 33.217 -18.568 18.588 -5.674 c
-9.722 2.142 0.051 0.062 0 0 c
-0 1.63 l
-15.462 1.63 27.999 -10.907 27.999 -26.369 c
-27.999 -41.839 15.462 -54.376 0 -54.376 c
--15.462 -54.376 -27.999 -41.839 -27.999 -26.369 c
--27.999 -10.907 -15.462 1.63 0 1.63 c
-0 0 l
-f
-Q
-0.149 0.243 0.467 scn
-q 1 0 0 1 184.8359 461.0591 cm
-0 0 m
--0.345 -0.419 -1.243 -0.245 -1.775 -0.35 c
--5.333 -1.052 -10.598 -4.013 -13.752 -8.857 c
--18.474 -16.108 -18.606 -22.979 -17.885 -27.466 c
--16.272 -37.507 -7.1 -46.929 7.31 -45.507 c
-29.58 -43.31 33.524 -19.12 18.666 -5.999 c
-9.679 1.938 0.05 0.061 0 0 c
-0 1.219 l
-15.462 1.219 27.999 -11.318 27.999 -26.78 c
-27.999 -42.25 15.462 -54.788 0 -54.788 c
--15.462 -54.788 -27.999 -42.25 -27.999 -26.78 c
--27.999 -11.318 -15.462 1.219 0 1.219 c
-0 0 l
-f
-Q
-0.145 0.235 0.455 scn
-q 1 0 0 1 184.8359 461.4141 cm
-0 0 m
--0.359 -0.424 -1.279 -0.213 -1.827 -0.305 c
--2.571 -0.429 -9.239 -1.713 -14.035 -8.521 c
--19.337 -16.049 -19.04 -23.602 -18.666 -26.5 c
--16.79 -41.041 -4.557 -47.127 6.015 -46.629 c
-29.242 -45.535 34.043 -19.97 18.705 -6.311 c
-9.693 1.714 0.05 0.059 0 0 c
-0 0.864 l
-15.462 0.864 27.999 -11.673 27.999 -27.135 c
-27.999 -42.605 15.462 -55.143 0 -55.143 c
--15.462 -55.143 -27.999 -42.605 -27.999 -27.135 c
--27.999 -11.673 -15.462 0.864 0 0.864 c
-0 0 l
-f
-Q
-0.141 0.227 0.439 scn
-q 1 0 0 1 184.8359 461.7397 cm
-0 0 m
--0.366 -0.422 -1.29 -0.183 -1.842 -0.262 c
--5.616 -0.798 -11.203 -3.577 -14.553 -8.414 c
--20.526 -17.037 -19.484 -25.015 -19.142 -27.636 c
--17.325 -41.551 -4.721 -48.305 6.215 -47.597 c
-22.827 -46.52 31.839 -32.415 25.896 -16.796 c
-27.251 -20.083 27.999 -23.685 27.999 -27.46 c
-27.999 -42.931 15.462 -55.468 0 -55.468 c
--15.462 -55.468 -27.999 -42.931 -27.999 -27.46 c
--27.999 -11.999 -15.462 0.539 0 0.539 c
-0 0 l
-f
-Q
-0.137 0.22 0.427 scn
-q 1 0 0 1 184.8359 461.9951 cm
-0 0 m
--0.38 -0.425 -1.322 -0.147 -1.889 -0.211 c
--3.74 -0.417 -10.183 -1.633 -15.334 -8.604 c
--20.12 -15.08 -20.496 -23.225 -19.964 -27.016 c
--18.071 -40.504 -7.311 -49.146 6.811 -48.521 c
-13.567 -48.222 30.459 -42.962 27.513 -22.495 c
-27.832 -24.187 27.999 -25.932 27.999 -27.716 c
-27.999 -43.187 15.462 -55.724 0 -55.724 c
--15.462 -55.724 -27.999 -43.187 -27.999 -27.716 c
--27.999 -12.254 -15.462 0.283 0 0.283 c
-0 0 l
-f
-Q
-0.133 0.216 0.412 scn
-q 1 0 0 1 184.8359 462.186 cm
-0 0 m
--0.389 -0.421 -1.333 -0.109 -1.905 -0.156 c
--5.862 -0.48 -11.762 -2.986 -15.367 -7.721 c
--21.456 -15.72 -21.121 -23.999 -20.694 -27.186 c
--18.877 -40.772 -7.134 -50.361 6.621 -49.493 c
-16.365 -48.877 27.809 -42.692 27.992 -27.284 c
-27.997 -27.491 27.999 -27.699 27.999 -27.907 c
-27.999 -43.377 15.462 -55.915 0 -55.915 c
--15.462 -55.915 -27.999 -43.377 -27.999 -27.907 c
--27.999 -12.445 -15.462 0.092 0 0.092 c
-0 0 l
-f
-Q
-0.125 0.208 0.396 scn
-q 1 0 0 1 184.8359 462.2749 cm
-0 0 m
--0.403 -0.423 -1.362 -0.067 -1.945 -0.096 c
--5.653 -0.278 -11.171 -1.795 -16.407 -7.987 c
--19.42 -11.549 -22.258 -18.906 -21.583 -25.522 c
--19.025 -50.599 4.157 -50.427 5.143 -50.408 c
-17.394 -50.165 25.848 -43.174 27.755 -31.708 c
-25.94 -45.423 14.204 -56.003 0 -56.003 c
--15.462 -56.003 -27.999 -43.466 -27.999 -27.996 c
--27.999 -12.534 -15.462 0.003 0 0.003 c
-0 0 l
-f
-Q
-0.122 0.2 0.384 scn
-q 1 0 0 1 180.605 461.958 cm
-0 0 m
--22.531 -4.551 -23.529 -35.032 -6.329 -46.266 c
-6.848 -54.872 25.64 -52.177 31.068 -35.689 c
-27.624 -47.255 16.911 -55.687 4.231 -55.687 c
--11.231 -55.687 -23.768 -43.149 -23.768 -27.679 c
--23.768 -13.386 -13.055 -1.592 0.778 0.109 c
-0.544 0.077 0.232 0.04 0 0 c
-f
-Q
-0.118 0.192 0.369 scn
-q 1 0 0 1 172.812 459.498 cm
-0 0 m
--16.566 -9.064 -17.348 -40.201 9.316 -48.722 c
-16.64 -51.062 30.628 -50.199 36.986 -37.919 c
-32.357 -47.005 22.916 -53.227 12.024 -53.227 c
--3.438 -53.227 -15.975 -40.689 -15.975 -25.219 c
--15.975 -12.683 -7.734 -2.069 3.625 1.499 c
-3.1 1.309 2.399 1.057 1.873 0.867 c
-1.31 0.61 0.543 0.297 0 0 c
-f
-Q
-0.216 0.345 0.667 scn
-q 1 0 0 1 200.7622 436.103 cm
-0 0 m
--1.706 2.422 -2.871 5.192 -4.806 7.466 c
--5.581 8.375 -6.334 9.141 -7.046 9.74 c
--7.103 9.788 -12.699 14.577 -12.706 14.929 c
--12.708 15.035 -10.925 16.753 -10.74 16.825 c
--10.058 17.086 -7.544 17.231 -6.875 17.166 c
--5.111 16.992 -2.438 16.241 0.275 13.649 c
-3.79 10.293 4.269 6.382 4.332 5.263 c
-4.608 0.362 1.816 -1.552 1.125 -1.426 c
-0.589 -1.328 0.314 -0.445 0 0 c
-f
-Q
-0.22 0.353 0.682 scn
-q 1 0 0 1 200.8965 438.5967 cm
-0 0 m
--1.97 2.883 -3.056 4.472 -4.87 6.595 c
--5.072 6.832 -5.375 7.116 -5.591 7.34 c
--5.844 7.601 -6.16 7.969 -6.419 8.224 c
--6.913 8.711 -7.551 9.382 -8.074 9.839 c
--9.724 11.281 -9.908 11.547 -9.911 11.595 c
--9.914 11.655 -8.389 13.369 -8.295 13.411 c
--7.711 13.674 -6.801 13.346 -6.164 13.276 c
--2.962 12.927 -1.156 11.212 -0.476 10.566 c
-2.531 7.709 2.783 5.143 2.904 3.909 c
-2.938 3.565 2.929 0.875 2.709 0.41 c
-2.675 0.337 0.707 -0.875 0.645 -0.861 c
-0.33 -0.793 0.182 -0.267 0 0 c
-f
-Q
-0.224 0.361 0.694 scn
-q 1 0 0 1 199.9814 442.126 cm
-0 0 m
--0.737 0.235 -1.076 1.45 -1.576 2.04 c
--3.148 3.895 -3.148 3.895 -3.897 4.678 c
--4.212 5.008 -4.84 5.354 -4.922 5.803 c
--4.014 7.981 l
--3.953 8.007 -1.427 7.15 0.33 5.083 c
-1.631 3.552 2.397 0.755 2.281 0.574 c
-1.906 -0.01 0.699 -0.197 0.037 0.011 c
-0.026 0.014 0.011 -0.003 0 0 c
-f
-Q
-0.141 0.227 0.439 scn
-q 1 0 0 1 196.8853 459.5508 cm
-0 0 m
--5.275 2.417 -9.403 2.407 -12.049 2.189 c
--12.049 2.728 l
--6.604 2.728 -1.522 1.173 2.777 -1.517 c
-2.232 -1.205 1.506 -0.789 0.961 -0.477 c
-0.673 -0.334 0.292 -0.134 0 0 c
-f
-Q
-0.137 0.22 0.427 scn
-q 1 0 0 1 193.0991 461.0352 cm
-0 0 m
--3.078 0.794 -4.478 1.111 -8.263 0.96 c
--8.263 1.243 l
--4.866 1.243 -1.61 0.638 1.402 -0.47 c
-0.981 -0.329 0.425 -0.126 0 0 c
-f
-Q
-0.133 0.216 0.412 scn
-q 1 0 0 1 189.0669 461.958 cm
-0 0 m
--2.557 0.263 -2.657 0.273 -4.231 0.228 c
--4.231 0.32 l
--2.431 0.32 -0.671 0.15 1.035 -0.174 c
-0.724 -0.122 0.312 -0.042 0 0 c
-f
-Q
-0.125 0.208 0.396 scn
-q 1 0 0 1 184.8359 462.2749 cm
-0 0 m
-0.335 0.003 0.669 -0.002 1.001 -0.014 c
-0.701 -0.01 0.211 -0.214 0 0 c
-f
-Q
- endstream endobj 1352 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1340 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1341 0 obj <</BBox[309.171 492.275 321.168 480.278]/Group 1353 0 R/Length 529/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ExtGState<</GS0 1318 0 R>>/XObject<</Fm0 1354 0 R>>>>/Subtype/Form>>stream
-q
-315.165 487.275 m
-315.165 492.275 l
-318.477 492.275 321.168 489.593 321.168 486.272 c
-321.168 482.96 318.477 480.278 315.165 480.278 c
-311.853 480.278 309.171 482.96 309.171 486.272 c
-309.171 489.593 311.853 492.275 315.165 492.275 c
-315.165 487.275 l
-314.621 487.278 314.17 486.83 314.171 486.272 c
-314.168 485.727 314.619 485.276 315.165 485.278 c
-315.715 485.275 316.172 485.733 316.168 486.272 c
-316.17 486.824 315.713 487.279 315.165 487.275 c
-W n
-q
-1 w 4 M 0 j 0 J []0 d
-/GS0 gs
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
-Q
- endstream endobj 1353 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1354 0 obj <</BBox[309.171 492.275 321.168 480.278]/Group 1355 0 R/Length 11074/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.2 0.325 0.624 scn
-/GS0 gs
-q 1 0 0 1 315.165 487.2754 cm
-0 0 m
-0 5 l
-3.312 5 6.003 2.318 6.003 -1.003 c
-6.003 -4.315 3.312 -6.997 0 -6.997 c
--3.312 -6.997 -5.994 -4.315 -5.994 -1.003 c
--5.994 2.318 -3.312 5 0 5 c
-0 0 l
--0.544 0.003 -0.995 -0.445 -0.994 -1.003 c
--0.997 -1.549 -0.546 -2 0 -1.997 c
-0.55 -2 1.007 -1.542 1.003 -1.003 c
-1.005 -0.451 0.548 0.003 0 0 c
-f
-Q
-q 1 0 0 1 315.165 488.1997 cm
-0 0 m
--0.013 -0.041 -0.073 -0.074 -0.082 -0.115 c
--0.11 -0.248 -0.02 -0.425 0 -0.559 c
-0 -0.924 l
--0.544 -0.921 -0.995 -1.37 -0.994 -1.927 c
--0.997 -2.473 -0.546 -2.924 0 -2.921 c
-0.55 -2.924 1.007 -2.467 1.003 -1.927 c
-1.005 -1.375 0.548 -0.921 0 -0.924 c
-0 -0.559 l
-0.034 -0.556 0.079 -0.552 0.113 -0.549 c
-0.142 -0.549 0.183 -0.536 0.209 -0.548 c
-1.045 -1.475 l
-1.44 -2.16 1.79 -2.114 1.805 -2.112 c
-2.058 -2.072 3.187 -0.623 1.901 0.191 c
-1.597 0.384 1.274 0.411 1.13 0.396 c
-0 0 l
-0 4.076 l
-3.312 4.076 6.003 1.394 6.003 -1.927 c
-6.003 -5.239 3.312 -7.921 0 -7.921 c
--3.312 -7.921 -5.994 -5.239 -5.994 -1.927 c
--5.994 1.394 -3.312 4.076 0 4.076 c
-0 0 l
-f
-Q
-0.196 0.318 0.612 scn
-q 1 0 0 1 315.165 488.3418 cm
-0 0 m
--0.03 -0.092 -0.163 -0.17 -0.184 -0.265 c
--0.221 -0.432 -0.125 -0.677 -0.186 -0.837 c
--0.186 -0.838 -0.235 -0.941 -0.399 -1.048 c
--1.15 -1.539 -1.036 -2.16 -0.983 -2.339 c
--0.8 -2.96 -0.143 -3.262 0.452 -2.998 c
-0.652 -2.908 0.791 -2.771 0.873 -2.69 c
-1.144 -2.423 1.548 -2.625 1.836 -2.417 c
-2.431 -1.985 2.564 -1.604 2.628 -1.42 c
-2.85 -0.787 2.46 0.134 1.627 0.371 c
-0.853 0.592 0.002 0.008 0 0 c
-0 3.934 l
-3.312 3.934 6.003 1.251 6.003 -2.069 c
-6.003 -5.381 3.312 -8.063 0 -8.063 c
--3.312 -8.063 -5.994 -5.381 -5.994 -2.069 c
--5.994 1.251 -3.312 3.934 0 3.934 c
-0 0 l
-f
-Q
-0.192 0.31 0.596 scn
-q 1 0 0 1 315.165 488.4824 cm
-0 0 m
--0.294 -0.832 -1.287 -1.354 -1.07 -2.414 c
--0.931 -3.09 -0.167 -3.555 0.649 -3.164 c
-1.049 -2.972 1.516 -2.957 1.889 -2.695 c
-2.243 -2.445 2.625 -2.13 2.762 -1.679 c
-3.159 -0.375 2.125 0.264 1.73 0.385 c
-0.831 0.662 0.003 0.008 0 0 c
-0 3.793 l
-3.312 3.793 6.003 1.111 6.003 -2.21 c
-6.003 -5.522 3.312 -8.204 0 -8.204 c
--3.312 -8.204 -5.994 -5.522 -5.994 -2.21 c
--5.994 1.111 -3.312 3.793 0 3.793 c
-0 0 l
-f
-Q
-0.188 0.302 0.58 scn
-q 1 0 0 1 315.165 488.6216 cm
-0 0 m
--0.352 -0.867 -1.375 -1.438 -1.138 -2.566 c
--1.017 -3.142 -0.345 -3.804 0.713 -3.398 c
-2.483 -2.719 2.628 -2.663 2.945 -1.783 c
-2.951 -1.768 3.406 -0.235 2.053 0.317 c
-0.863 0.802 0.004 0.01 0 0 c
-0 3.654 l
-3.312 3.654 6.003 0.972 6.003 -2.349 c
-6.003 -5.661 3.312 -8.343 0 -8.343 c
--3.312 -8.343 -5.994 -5.661 -5.994 -2.349 c
--5.994 0.972 -3.312 3.654 0 3.654 c
-0 0 l
-f
-Q
-0.18 0.294 0.569 scn
-q 1 0 0 1 315.165 488.7588 cm
-0 0 m
--0.192 -0.416 -0.582 -0.691 -0.789 -1.097 c
--0.793 -1.105 -1.082 -1.703 -1.083 -1.706 c
--1.253 -2.111 -1.282 -2.441 -1.181 -2.81 c
--1.118 -3.036 -0.72 -4.135 0.985 -3.564 c
-5.022 -2.213 2.486 0.225 2.452 0.247 c
-1.442 0.897 0.101 0.219 0 0 c
-0 3.517 l
-3.312 3.517 6.003 0.834 6.003 -2.486 c
-6.003 -5.798 3.312 -8.48 0 -8.48 c
--3.312 -8.48 -5.994 -5.798 -5.994 -2.486 c
--5.994 0.834 -3.312 3.517 0 3.517 c
-0 0 l
-f
-Q
-0.176 0.286 0.553 scn
-q 1 0 0 1 315.165 488.9116 cm
-0 0 m
--0.013 -0.025 -0.053 -0.04 -0.076 -0.057 c
--0.432 -0.327 -0.719 -0.611 -1.164 -1.801 c
--1.234 -1.99 -1.448 -2.564 -1.178 -3.156 c
--0.778 -4.031 0.18 -4.2 1.671 -3.658 c
-3.876 -2.856 3.991 -0.38 2.341 0.402 c
-1.366 0.864 0.123 0.248 0 0 c
-0 3.364 l
-3.312 3.364 6.003 0.682 6.003 -2.639 c
-6.003 -5.951 3.312 -8.633 0 -8.633 c
--3.312 -8.633 -5.994 -5.951 -5.994 -2.639 c
--5.994 0.682 -3.312 3.364 0 3.364 c
-0 0 l
-f
-Q
-0.173 0.278 0.541 scn
-q 1 0 0 1 315.165 489.1035 cm
-0 0 m
--0.034 -0.068 -0.142 -0.105 -0.202 -0.15 c
--0.734 -0.546 -0.993 -1.253 -1.244 -1.936 c
--1.353 -2.232 -1.496 -2.812 -1.238 -3.374 c
--0.612 -4.739 1.248 -4.146 1.803 -3.932 c
-4.138 -3.031 4.265 -0.308 2.51 0.419 c
-1.108 1 0.006 0.012 0 0 c
-0 3.172 l
-3.312 3.172 6.003 0.49 6.003 -2.831 c
-6.003 -6.143 3.312 -8.825 0 -8.825 c
--3.312 -8.825 -5.994 -6.143 -5.994 -2.831 c
--5.994 0.49 -3.312 3.172 0 3.172 c
-0 0 l
-f
-Q
-0.169 0.275 0.525 scn
-q 1 0 0 1 315.165 489.291 cm
-0 0 m
--0.037 -0.069 -0.152 -0.103 -0.217 -0.147 c
--0.48 -0.327 -0.918 -0.951 -1.084 -1.383 c
--1.402 -2.209 -1.592 -2.802 -1.342 -3.486 c
--1.138 -4.046 -0.487 -4.899 1.578 -4.322 c
-4.081 -3.623 4.628 -0.763 2.992 0.316 c
-1.701 1.167 0.079 0.149 0 0 c
-0 2.984 l
-3.312 2.984 6.003 0.302 6.003 -3.019 c
-6.003 -6.331 3.312 -9.013 0 -9.013 c
--3.312 -9.013 -5.994 -6.331 -5.994 -3.019 c
--5.994 0.302 -3.312 2.984 0 2.984 c
-0 0 l
-f
-Q
-0.165 0.267 0.51 scn
-q 1 0 0 1 315.165 489.4751 cm
-0 0 m
--0.175 -0.316 -0.541 -0.436 -0.745 -0.721 c
--1.04 -1.133 -1.134 -1.367 -1.233 -1.614 c
--1.283 -1.739 -1.712 -2.854 -1.439 -3.598 c
--0.844 -5.219 1.105 -4.774 1.689 -4.6 c
-4.424 -3.78 5.002 -0.76 3.22 0.385 c
-1.946 1.202 0.234 0.424 0 0 c
-0 2.8 l
-3.312 2.8 6.003 0.118 6.003 -3.203 c
-6.003 -6.515 3.312 -9.197 0 -9.197 c
--3.312 -9.197 -5.994 -6.515 -5.994 -3.203 c
--5.994 0.118 -3.312 2.8 0 2.8 c
-0 0 l
-f
-Q
-0.161 0.259 0.498 scn
-q 1 0 0 1 315.165 489.7065 cm
-0 0 m
--0.06 -0.132 -0.265 -0.21 -0.385 -0.291 c
--0.751 -0.537 -1.207 -1.436 -1.319 -1.735 c
--1.402 -1.96 -1.802 -3.124 -1.467 -3.945 c
--0.712 -5.795 1.956 -4.866 1.982 -4.855 c
-5.299 -3.58 5.174 -0.371 3.116 0.573 c
-1.411 1.355 0.007 0.017 0 0 c
-0 2.569 l
-3.312 2.569 6.003 -0.113 6.003 -3.434 c
-6.003 -6.746 3.312 -9.428 0 -9.428 c
--3.312 -9.428 -5.994 -6.746 -5.994 -3.434 c
--5.994 -0.113 -3.312 2.569 0 2.569 c
-0 0 l
-f
-Q
-0.153 0.251 0.482 scn
-q 1 0 0 1 315.165 489.9888 cm
-0 0 m
--0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c
--0.739 -0.597 -1.12 -1.159 -1.404 -1.909 c
--1.678 -2.633 -1.751 -3.637 -1.568 -4.146 c
--0.856 -6.124 1.88 -5.306 1.908 -5.297 c
-5.872 -3.969 5.347 -0.495 3.422 0.519 c
-1.628 1.464 0.058 0.122 0 0 c
-0 2.287 l
-3.312 2.287 6.003 -0.396 6.003 -3.716 c
-6.003 -7.028 3.312 -9.71 0 -9.71 c
--3.312 -9.71 -5.994 -7.028 -5.994 -3.716 c
--5.994 -0.396 -3.312 2.287 0 2.287 c
-0 0 l
-f
-Q
-0.149 0.243 0.467 scn
-q 1 0 0 1 315.165 490.2749 cm
-0 0 m
--0.045 -0.106 -0.209 -0.167 -0.302 -0.235 c
--0.485 -0.372 -1.122 -0.935 -1.618 -2.443 c
--1.723 -2.761 -1.897 -3.881 -1.538 -4.677 c
--1.024 -5.812 0.792 -6.206 2.512 -5.554 c
-6.336 -4.105 5.75 -0.288 3.153 0.723 c
-1.353 1.423 0.007 0.017 0 0 c
-0 2 l
-3.312 2 6.003 -0.682 6.003 -4.002 c
-6.003 -7.314 3.312 -9.997 0 -9.997 c
--3.312 -9.997 -5.994 -7.314 -5.994 -4.002 c
--5.994 -0.682 -3.312 2 0 2 c
-0 0 l
-f
-Q
-0.145 0.235 0.455 scn
-q 1 0 0 1 315.165 490.6582 cm
-0 0 m
--0.163 -0.361 -0.541 -0.515 -0.777 -0.805 c
--0.945 -1.011 -1.046 -1.259 -1.201 -1.474 c
--1.269 -1.568 -1.409 -1.763 -1.714 -2.734 c
--2.048 -3.798 -1.784 -4.665 -1.597 -5.087 c
--1.005 -6.421 1.188 -6.695 2.68 -6.041 c
-8.251 -3.594 4.333 0.165 2.965 0.677 c
-1.252 1.319 0.007 0.016 0 0 c
-0 1.617 l
-3.312 1.617 6.003 -1.065 6.003 -4.386 c
-6.003 -7.698 3.312 -10.38 0 -10.38 c
--3.312 -10.38 -5.994 -7.698 -5.994 -4.386 c
--5.994 -1.065 -3.312 1.617 0 1.617 c
-0 0 l
-f
-Q
-0.141 0.227 0.439 scn
-q 1 0 0 1 315.165 491.083 cm
-0 0 m
--0.128 -0.296 -0.441 -0.404 -0.637 -0.631 c
--0.787 -0.804 -0.891 -1.009 -1.028 -1.191 c
--1.149 -1.351 -1.614 -2.354 -1.616 -2.362 c
--2.165 -3.906 -2.034 -4.643 -1.834 -5.161 c
--0.959 -7.42 1.653 -7.023 2.585 -6.679 c
-3.892 -6.198 6.61 -5.196 5.552 -2.522 c
-5.843 -3.227 6.003 -4 6.003 -4.811 c
-6.003 -8.123 3.312 -10.805 0 -10.805 c
--3.312 -10.805 -5.994 -8.123 -5.994 -4.811 c
--5.994 -1.49 -3.312 1.192 0 1.192 c
-0 0 l
-f
-Q
-0.137 0.22 0.427 scn
-q 1 0 0 1 315.165 491.5479 cm
-0 0 m
--0.037 -0.078 -0.154 -0.129 -0.22 -0.185 c
--1.232 -1.033 -1.806 -2.828 -1.83 -2.904 c
--2.22 -4.142 -2.232 -5.159 -1.867 -5.927 c
--0.58 -8.633 3.354 -7.149 3.394 -7.134 c
-4.44 -6.729 6.193 -6.052 5.898 -4.154 c
-5.967 -4.518 6.003 -4.892 6.003 -5.275 c
-6.003 -8.587 3.312 -11.27 0 -11.27 c
--3.312 -11.27 -5.994 -8.587 -5.994 -5.275 c
--5.994 -1.955 -3.312 0.728 0 0.728 c
-0 0 l
-f
-Q
-0.133 0.216 0.412 scn
-q 1 0 0 1 315.165 491.9907 cm
-0 0 m
--0.038 -0.067 -0.155 -0.091 -0.221 -0.13 c
--1.146 -0.672 -1.618 -2.109 -1.997 -3.263 c
--2.003 -3.281 -2.538 -5.073 -2.065 -6.285 c
--1.01 -8.991 2.93 -7.989 3.097 -7.945 c
-4.317 -7.624 5.989 -7.184 6.001 -5.584 c
-6.002 -5.628 6.003 -5.673 6.003 -5.718 c
-6.003 -9.03 3.312 -11.712 0 -11.712 c
--3.312 -11.712 -5.994 -9.03 -5.994 -5.718 c
--5.994 -2.397 -3.312 0.285 0 0.285 c
-0 0 l
-f
-Q
-0.125 0.208 0.396 scn
-q 1 0 0 1 315.165 492.2632 cm
-0 0 m
--0.043 -0.052 -0.154 -0.029 -0.221 -0.042 c
--0.695 -0.132 -1.346 -0.69 -1.729 -1.732 c
--2.601 -4.102 -2.422 -5.693 -2.305 -6.268 c
--1.773 -8.88 1.72 -8.614 1.755 -8.61 c
-4.215 -8.37 5.7 -8.226 5.951 -6.783 c
-5.562 -9.72 3.043 -11.985 0 -11.985 c
--3.312 -11.985 -5.994 -9.303 -5.994 -5.991 c
--5.994 -2.67 -3.312 0.012 0 0.012 c
-0 0 l
-f
-Q
-0.122 0.2 0.384 scn
-q 1 0 0 1 314.2603 492.1987 cm
-0 0 m
--1.727 -0.587 -1.739 -4.385 -1.738 -4.546 c
--1.734 -6.483 -1.193 -7.61 0.017 -8.2 c
-1.798 -9.069 6.085 -9.361 6.66 -7.637 c
-5.921 -10.115 3.622 -11.92 0.905 -11.92 c
--2.407 -11.92 -5.089 -9.238 -5.089 -5.926 c
--5.089 -2.857 -2.798 -0.333 0.165 0.032 c
-0.115 0.022 0.048 0.013 0 0 c
-f
-Q
-0.118 0.192 0.369 scn
-q 1 0 0 1 312.9341 491.7764 cm
-0 0 m
--1.086 -0.961 -0.817 -4.853 -0.535 -5.61 c
-0.431 -8.208 2.403 -8.585 3.207 -8.626 c
-4.27 -8.681 5.298 -9.068 6.378 -8.967 c
-6.691 -8.938 7.264 -8.802 7.584 -8.218 c
-6.592 -10.165 4.566 -11.498 2.231 -11.498 c
--1.081 -11.498 -3.763 -8.816 -3.763 -5.504 c
--3.763 -2.812 -2 -0.54 0.432 0.225 c
-0.372 0.2 0.292 0.168 0.231 0.144 c
-0.161 0.102 0.062 0.054 0 0 c
-f
-Q
-0.204 0.333 0.639 scn
-q 1 0 0 1 316.7451 486.4531 cm
-0 0 m
--0.091 0.065 -0.091 0.065 -0.52 0.593 c
--0.662 0.769 -0.836 0.916 -0.974 1.096 c
--1.233 1.432 -1.232 1.599 -1.232 1.6 c
--1.226 1.62 -0.028 2.446 0.591 1.368 c
-1.026 0.611 0.245 -0.132 0.233 -0.134 c
-0.153 -0.145 0.065 -0.047 0 0 c
-f
-Q
-0.141 0.227 0.439 scn
-q 1 0 0 1 317.7354 491.6665 cm
-0 0 m
--1.294 0.462 -2.254 -0.325 -2.57 -0.583 c
--2.57 0.609 l
--1.403 0.609 -0.313 0.276 0.609 -0.301 c
-0.52 -0.251 0.4 -0.185 0.31 -0.134 c
-0.217 -0.094 0.095 -0.034 0 0 c
-f
-Q
-0.208 0.337 0.655 scn
-q 1 0 0 1 316.7852 486.708 cm
-0 0 m
--0.336 0.357 l
--0.473 0.528 -0.628 0.683 -0.758 0.858 c
--0.977 1.152 -1.021 1.271 -1.02 1.277 c
--1.015 1.292 -0.028 1.706 0.328 0.955 c
-0.588 0.409 0.173 -0.121 0.167 -0.122 c
-0.106 -0.133 0.047 -0.04 0 0 c
-f
-Q
-0.137 0.22 0.427 scn
-q 1 0 0 1 316.9321 491.998 cm
-0 0 m
--0.649 0.12 -1.161 -0.01 -1.767 -0.45 c
--1.767 0.277 l
--1.039 0.277 -0.34 0.147 0.306 -0.09 c
-0.223 -0.065 0.111 -0.031 0.028 -0.006 c
-0.02 -0.004 0.008 -0.001 0 0 c
-f
-Q
-0.216 0.345 0.667 scn
-q 1 0 0 1 316.7891 486.9756 cm
-0 0 m
--0.004 0.004 -0.536 0.578 -0.712 0.865 c
--0.569 0.878 -0.483 0.886 -0.265 0.812 c
--0.18 0.784 -0.084 0.701 -0.026 0.633 c
-0.032 0.564 0.089 0.451 0.102 0.362 c
-0.133 0.142 0.096 0.015 0.073 -0.061 c
-0.051 -0.042 0.021 -0.02 0 0 c
-f
-Q
-0.133 0.216 0.412 scn
-q 1 0 0 1 316.0703 492.1978 cm
-0 0 m
--0.314 -0.005 -0.486 -0.009 -0.905 -0.207 c
--0.905 0.078 l
--0.519 0.078 -0.142 0.041 0.224 -0.028 c
-0.157 -0.02 0.067 -0.003 0 0 c
-f
-Q
-0.125 0.208 0.396 scn
-q 1 0 0 1 315.165 492.2632 cm
-0 0 m
-0 0.012 l
-0.072 0.012 0.144 0.011 0.215 0.008 c
-0.15 0.006 0.046 -0.044 0 0 c
-f
-Q
- endstream endobj 1355 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1338 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1339 0 obj <</BBox[295.671 461.774 351.669 405.776]/Group 1356 0 R/Length 502/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ExtGState<</GS0 1318 0 R>>/XObject<</Fm0 1357 0 R>>>>/Subtype/Form>>stream
-q
-323.67 445.774 m
-323.67 461.774 l
-339.132 461.774 351.669 449.237 351.669 433.775 c
-351.669 418.313 339.132 405.776 323.67 405.776 c
-308.199 405.776 295.671 418.313 295.671 433.775 c
-295.671 449.237 308.199 461.774 323.67 461.774 c
-323.67 445.774 l
-317.055 445.784 311.661 440.386 311.671 433.775 c
-311.661 427.165 317.055 421.767 323.67 421.776 c
-330.277 421.766 335.68 427.168 335.669 433.775 c
-335.68 440.383 330.277 445.785 323.67 445.774 c
-W n
-q
-/GS0 gs
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
-Q
- endstream endobj 1356 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1357 0 obj <</BBox[295.671 461.774 351.669 405.776]/Group 1358 0 R/Length 13386/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.208 0.337 0.655 scn
-/GS0 gs
-q 1 0 0 1 323.6699 445.7744 cm
-0 0 m
-0 16 l
-15.462 16 27.999 3.463 27.999 -11.999 c
-27.999 -27.461 15.462 -39.998 0 -39.998 c
--15.471 -39.998 -27.999 -27.461 -27.999 -11.999 c
--27.999 3.463 -15.471 16 0 16 c
-0 0 l
--6.615 0.009 -12.009 -5.389 -11.999 -11.999 c
--12.009 -18.609 -6.615 -24.007 0 -23.998 c
-6.607 -24.009 12.01 -18.606 11.999 -11.999 c
-12.01 -5.392 6.607 0.011 0 0 c
-f
-Q
-q 1 0 0 1 323.6699 450.936 cm
-0 0 m
-0 -0.46 l
-0 -5.162 l
--6.615 -5.152 -12.009 -10.55 -11.999 -17.161 c
--12.009 -23.771 -6.615 -29.169 0 -29.16 c
-6.607 -29.17 12.01 -23.768 11.999 -17.161 c
-12.01 -10.553 6.607 -5.151 0 -5.162 c
-0 -0.46 l
-0.316 -0.687 0.738 -0.99 1.054 -1.216 c
-3.814 -3.66 7.459 -4.866 10 -7.615 c
-12.018 -9.799 13.458 -12.46 14.279 -15.526 c
-15.091 -18.561 16.901 -19.341 16.918 -19.343 c
-18.873 -19.537 24.733 -10.481 17.857 -2.239 c
-10.881 6.124 0.77 1.958 0 0 c
-0 10.838 l
-15.462 10.838 27.999 -1.699 27.999 -17.161 c
-27.999 -32.623 15.462 -45.16 0 -45.16 c
--15.471 -45.16 -27.999 -32.623 -27.999 -17.161 c
--27.999 -1.699 -15.471 10.838 0 10.838 c
-0 0 l
-f
-Q
-0.204 0.333 0.639 scn
-q 1 0 0 1 323.6699 452.7832 cm
-0 0 m
--0.297 -0.712 -1.488 -1.167 -1.738 -1.898 c
--1.989 -2.637 -2.005 -3.871 -1.531 -4.492 c
--1.227 -4.891 -0.45 -4.943 0 -5.165 c
-0 -7.009 l
--6.615 -7 -12.009 -12.397 -11.999 -19.008 c
--12.009 -25.618 -6.615 -31.016 0 -31.007 c
-6.607 -31.018 12.01 -25.615 11.999 -19.008 c
-12.01 -12.4 6.607 -6.998 0 -7.009 c
-0 -5.165 l
-0.338 -5.198 0.788 -5.242 1.126 -5.275 c
-2.249 -5.474 12.142 -7.557 13.761 -19.535 c
-14.172 -22.508 l
-14.637 -23.083 15.725 -23.499 16.46 -23.421 c
-20.584 -22.986 26.414 -9.565 15.896 -1.31 c
-7.945 4.929 0.035 0.084 0 0 c
-0 8.991 l
-15.462 8.991 27.999 -3.546 27.999 -19.008 c
-27.999 -34.47 15.462 -47.007 0 -47.007 c
--15.471 -47.007 -27.999 -34.47 -27.999 -19.008 c
--27.999 -3.546 -15.471 8.991 0 8.991 c
-0 0 l
-f
-Q
-0.2 0.325 0.624 scn
-q 1 0 0 1 323.6699 453.9038 cm
-0 0 m
--0.627 -1.11 -1.868 -1.524 -2.71 -2.39 c
--4.768 -4.502 -4.451 -6.209 -4.444 -6.223 c
--4.359 -6.387 -4.359 -6.387 0 -7.407 c
-0 -8.129 l
--6.615 -8.12 -12.009 -13.518 -11.999 -20.128 c
--12.009 -26.739 -6.615 -32.137 0 -32.127 c
-6.607 -32.138 12.01 -26.736 11.999 -20.128 c
-12.01 -13.521 6.607 -8.119 0 -8.129 c
-0 -7.407 l
-0.312 -7.427 0.727 -7.454 1.039 -7.474 c
-5.586 -8.118 13.154 -12.018 12.674 -22.547 c
-12.56 -25.06 12.663 -26.477 12.982 -26.758 c
-14.311 -27.928 23.356 -23.682 22.629 -14.041 c
-21.27 3.998 1.142 2.018 0 0 c
-0 7.871 l
-15.462 7.871 27.999 -4.667 27.999 -20.128 c
-27.999 -35.59 15.462 -48.127 0 -48.127 c
--15.471 -48.127 -27.999 -35.59 -27.999 -20.128 c
--27.999 -4.667 -15.471 7.871 0 7.871 c
-0 0 l
-f
-Q
-0.196 0.318 0.612 scn
-q 1 0 0 1 323.6699 454.8291 cm
-0 0 m
--0.223 -0.378 -0.896 -0.494 -1.28 -0.706 c
--3.988 -2.198 -4.356 -2.882 -7.222 -8.202 c
--10.979 -15.406 l
--12.035 -17.648 -12.409 -19.972 -12.123 -22.51 c
--11.368 -29.204 -4.441 -35.039 3.701 -32.831 c
-16.504 -28.45 l
-19.64 -26.383 21.524 -23.889 22.614 -20.364 c
-24.61 -13.907 21.812 -4.74 13.674 -0.575 c
-6.261 3.219 0.029 0.049 0 0 c
-0 6.945 l
-15.462 6.945 27.999 -5.592 27.999 -21.054 c
-27.999 -36.516 15.462 -49.053 0 -49.053 c
--15.471 -49.053 -27.999 -36.516 -27.999 -21.054 c
--27.999 -5.592 -15.471 6.945 0 6.945 c
-0 0 l
-f
-Q
-0.192 0.31 0.596 scn
-q 1 0 0 1 323.6699 455.6289 cm
-0 0 m
--11.795 -5.181 -18.994 -27.783 -4.636 -33.729 c
-5.806 -38.053 30.469 -28.935 22.345 -10.09 c
-19.107 -2.58 10.176 3.509 0 0 c
-0 6.146 l
-15.462 6.146 27.999 -6.392 27.999 -21.854 c
-27.999 -37.315 15.462 -49.853 0 -49.853 c
--15.471 -49.853 -27.999 -37.315 -27.999 -21.854 c
--27.999 -6.392 -15.471 6.146 0 6.146 c
-0 0 l
-f
-Q
-0.188 0.302 0.58 scn
-q 1 0 0 1 323.6699 456.3296 cm
-0 0 m
--0.26 -0.393 -1.011 -0.429 -1.444 -0.612 c
--4.284 -1.815 -7.534 -4.967 -9.349 -8.277 c
--13.499 -15.843 -13.758 -21.083 -13.244 -24.145 c
--12.335 -29.557 -7.256 -38.113 6.018 -35.852 c
-29.65 -31.827 27.567 -10.229 15.691 -2.187 c
-7.726 3.206 0.039 0.058 0 0 c
-0 5.445 l
-15.462 5.445 27.999 -7.092 27.999 -22.554 c
-27.999 -38.016 15.462 -50.553 0 -50.553 c
--15.471 -50.553 -27.999 -38.016 -27.999 -22.554 c
--27.999 -7.092 -15.471 5.445 0 5.445 c
-0 0 l
-f
-Q
-0.18 0.294 0.569 scn
-q 1 0 0 1 323.6699 456.9956 cm
-0 0 m
--0.271 -0.397 -1.043 -0.41 -1.49 -0.586 c
--3.112 -1.224 -7.251 -3.368 -10.636 -9.471 c
--11.688 -11.366 -15.022 -18.08 -13.796 -24.877 c
--12.453 -32.323 -5.461 -39.361 6.714 -37.217 c
-28.943 -33.303 28.97 -11.254 15.609 -2.3 c
-7.857 2.895 0.038 0.056 0 0 c
-0 4.779 l
-15.462 4.779 27.999 -7.758 27.999 -23.22 c
-27.999 -38.682 15.462 -51.219 0 -51.219 c
--15.471 -51.219 -27.999 -38.682 -27.999 -23.22 c
--27.999 -7.758 -15.471 4.779 0 4.779 c
-0 0 l
-f
-Q
-0.176 0.286 0.553 scn
-q 1 0 0 1 323.6699 457.6064 cm
-0 0 m
--0.285 -0.403 -1.086 -0.384 -1.551 -0.549 c
--2.515 -0.89 -7.505 -2.918 -11.143 -9.4 c
--12.539 -11.886 -15.644 -18.437 -14.343 -25.553 c
--13.275 -31.396 -7.567 -40.711 7.05 -38.566 c
-28.064 -35.482 30.902 -13.127 16.17 -2.838 c
-7.979 2.883 0.04 0.057 0 0 c
-0 4.168 l
-15.462 4.168 27.999 -8.369 27.999 -23.831 c
-27.999 -39.293 15.462 -51.83 0 -51.83 c
--15.471 -51.83 -27.999 -39.293 -27.999 -23.831 c
--27.999 -8.369 -15.471 4.168 0 4.168 c
-0 0 l
-f
-Q
-0.173 0.278 0.541 scn
-q 1 0 0 1 323.6699 458.1792 cm
-0 0 m
--0.295 -0.407 -1.114 -0.365 -1.591 -0.521 c
--3.039 -0.995 -8.059 -3.066 -11.891 -9.807 c
--12.952 -11.675 -16.307 -18.377 -14.887 -26.189 c
--13.692 -32.762 -6.813 -41.823 7.243 -39.848 c
-28.687 -36.834 31.471 -13.847 16.374 -3.144 c
-8.08 2.737 0.041 0.056 0 0 c
-0 3.595 l
-15.462 3.595 27.999 -8.942 27.999 -24.404 c
-27.999 -39.866 15.462 -52.403 0 -52.403 c
--15.471 -52.403 -27.999 -39.866 -27.999 -24.404 c
--27.999 -8.942 -15.471 3.595 0 3.595 c
-0 0 l
-f
-Q
-0.169 0.275 0.525 scn
-q 1 0 0 1 323.6699 458.7163 cm
-0 0 m
--0.327 -0.44 -1.225 -0.369 -1.749 -0.527 c
--5.521 -1.665 -9.768 -5.259 -12.076 -9.267 c
--15.396 -15.033 -16.523 -20.929 -15.426 -26.791 c
--13.856 -35.175 -5.227 -43.009 7.675 -41.011 c
-29.382 -37.65 31.673 -13.956 16.092 -3.122 c
-8.188 2.374 0.041 0.052 0 0 c
-0 3.058 l
-15.462 3.058 27.999 -9.479 27.999 -24.941 c
-27.999 -40.403 15.462 -52.94 0 -52.94 c
--15.471 -52.94 -27.999 -40.403 -27.999 -24.941 c
--27.999 -9.479 -15.471 3.058 0 3.058 c
-0 0 l
-f
-Q
-0.165 0.267 0.51 scn
-q 1 0 0 1 323.6699 459.2314 cm
-0 0 m
--0.315 -0.414 -1.17 -0.321 -1.672 -0.458 c
--5.63 -1.542 -10.189 -5.222 -12.512 -9.206 c
--13.797 -11.409 -17.707 -18.115 -15.958 -27.369 c
--14.312 -36.085 -5.369 -44.227 7.962 -42.147 c
-29.823 -38.738 32.256 -15.066 16.713 -3.752 c
-8.241 2.415 0.041 0.054 0 0 c
-0 2.543 l
-15.462 2.543 27.999 -9.994 27.999 -25.456 c
-27.999 -40.918 15.462 -53.455 0 -53.455 c
--15.471 -53.455 -27.999 -40.918 -27.999 -25.456 c
--27.999 -9.994 -15.471 2.543 0 2.543 c
-0 0 l
-f
-Q
-0.161 0.259 0.498 scn
-q 1 0 0 1 323.6699 459.7041 cm
-0 0 m
--0.326 -0.417 -1.198 -0.297 -1.711 -0.424 c
--5.006 -1.24 -10.024 -4.173 -13.32 -9.752 c
--16.644 -15.378 -17.708 -21.484 -16.484 -27.903 c
--14.771 -36.889 -5.522 -45.311 8.242 -43.22 c
-29.813 -39.944 32.242 -15.421 16.845 -4.05 c
-8.507 2.107 0.042 0.053 0 0 c
-0 2.07 l
-15.462 2.07 27.999 -10.467 27.999 -25.929 c
-27.999 -41.391 15.462 -53.928 0 -53.928 c
--15.471 -53.928 -27.999 -41.391 -27.999 -25.929 c
--27.999 -10.467 -15.471 2.07 0 2.07 c
-0 0 l
-f
-Q
-0.153 0.251 0.482 scn
-q 1 0 0 1 323.6699 460.144 cm
-0 0 m
--0.165 -0.201 -0.596 -0.119 -0.852 -0.169 c
--6.632 -1.32 -11.089 -5.48 -13.333 -8.99 c
--17.824 -16.015 -17.96 -22.678 -17.283 -27.031 c
--15.529 -38.309 -5.353 -45.633 6.914 -44.447 c
-29.053 -42.307 33.213 -18.564 18.588 -5.674 c
-9.722 2.142 0.051 0.062 0 0 c
-0 1.63 l
-15.462 1.63 27.999 -10.907 27.999 -26.369 c
-27.999 -41.831 15.462 -54.368 0 -54.368 c
--15.471 -54.368 -27.999 -41.831 -27.999 -26.369 c
--27.999 -10.907 -15.471 1.63 0 1.63 c
-0 0 l
-f
-Q
-0.149 0.243 0.467 scn
-q 1 0 0 1 323.6699 460.5547 cm
-0 0 m
--0.345 -0.419 -1.243 -0.245 -1.776 -0.35 c
--5.454 -1.074 -10.584 -3.985 -13.756 -8.856 c
--18.476 -16.104 -18.606 -22.976 -17.885 -27.465 c
--16.272 -37.503 -7.101 -46.92 7.31 -45.498 c
-29.575 -43.3 33.52 -19.115 18.666 -5.998 c
-9.679 1.938 0.05 0.061 0 0 c
-0 1.22 l
-15.462 1.22 27.999 -11.317 27.999 -26.779 c
-27.999 -42.241 15.462 -54.778 0 -54.778 c
--15.471 -54.778 -27.999 -42.241 -27.999 -26.779 c
--27.999 -11.317 -15.471 1.22 0 1.22 c
-0 0 l
-f
-Q
-0.145 0.235 0.455 scn
-q 1 0 0 1 323.6699 460.9102 cm
-0 0 m
--0.359 -0.424 -1.28 -0.213 -1.828 -0.305 c
--2.573 -0.429 -9.242 -1.712 -14.038 -8.521 c
--19.338 -16.045 -19.04 -23.601 -18.666 -26.5 c
--16.79 -41.035 -4.557 -47.119 6.015 -46.621 c
-29.237 -45.525 34.039 -19.966 18.705 -6.311 c
-9.693 1.714 0.05 0.059 0 0 c
-0 0.864 l
-15.462 0.864 27.999 -11.673 27.999 -27.135 c
-27.999 -42.597 15.462 -55.134 0 -55.134 c
--15.471 -55.134 -27.999 -42.597 -27.999 -27.135 c
--27.999 -11.673 -15.471 0.864 0 0.864 c
-0 0 l
-f
-Q
-0.141 0.227 0.439 scn
-q 1 0 0 1 323.6699 461.2358 cm
-0 0 m
--0.366 -0.422 -1.291 -0.183 -1.844 -0.262 c
--5.618 -0.797 -11.206 -3.577 -14.557 -8.414 c
--20.527 -17.033 -19.484 -25.013 -19.142 -27.635 c
--17.325 -41.544 -4.721 -48.297 6.215 -47.587 c
-22.825 -46.511 31.838 -32.41 25.896 -16.796 c
-27.251 -20.083 27.999 -23.685 27.999 -27.46 c
-27.999 -42.922 15.462 -55.459 0 -55.459 c
--15.471 -55.459 -27.999 -42.922 -27.999 -27.46 c
--27.999 -11.999 -15.471 0.539 0 0.539 c
-0 0 l
-f
-Q
-0.137 0.22 0.427 scn
-q 1 0 0 1 323.6699 461.4912 cm
-0 0 m
--0.38 -0.425 -1.323 -0.147 -1.89 -0.211 c
--3.742 -0.417 -10.186 -1.632 -15.337 -8.604 c
--20.121 -15.077 -20.496 -23.224 -19.964 -27.016 c
--18.071 -40.5 -7.311 -49.138 6.811 -48.512 c
-13.567 -48.212 30.458 -42.954 27.513 -22.495 c
-27.832 -24.187 27.999 -25.932 27.999 -27.716 c
-27.999 -43.178 15.462 -55.715 0 -55.715 c
--15.471 -55.715 -27.999 -43.178 -27.999 -27.716 c
--27.999 -12.254 -15.471 0.283 0 0.283 c
-0 0 l
-f
-Q
-0.133 0.216 0.412 scn
-q 1 0 0 1 323.6699 461.6821 cm
-0 0 m
--0.389 -0.422 -1.334 -0.109 -1.906 -0.156 c
--5.864 -0.48 -11.765 -2.986 -15.37 -7.721 c
--21.457 -15.717 -21.121 -23.997 -20.694 -27.186 c
--18.848 -40.99 -7.359 -50.367 6.621 -49.484 c
-16.365 -48.868 27.809 -42.685 27.992 -27.284 c
-27.997 -27.491 27.999 -27.699 27.999 -27.907 c
-27.999 -43.369 15.462 -55.906 0 -55.906 c
--15.471 -55.906 -27.999 -43.369 -27.999 -27.907 c
--27.999 -12.445 -15.471 0.092 0 0.092 c
-0 0 l
-f
-Q
-0.125 0.208 0.396 scn
-q 1 0 0 1 323.6699 461.771 cm
-0 0 m
--0.403 -0.423 -1.362 -0.067 -1.946 -0.096 c
--5.655 -0.278 -11.174 -1.795 -16.41 -7.986 c
--19.422 -11.547 -22.258 -18.903 -21.583 -25.522 c
--19.025 -50.59 4.157 -50.418 5.143 -50.399 c
-17.394 -50.156 25.847 -43.167 27.756 -31.704 c
-25.941 -45.413 14.205 -55.995 0 -55.995 c
--15.471 -55.995 -27.999 -43.458 -27.999 -27.996 c
--27.999 -12.534 -15.471 0.003 0 0.003 c
-0 0 l
-f
-Q
-0.122 0.2 0.384 scn
-q 1 0 0 1 319.437 461.4541 cm
-0 0 m
--22.531 -4.549 -23.531 -35.025 -6.331 -46.258 c
-6.847 -54.864 25.642 -52.17 31.071 -35.682 c
-27.627 -47.245 16.914 -55.678 4.233 -55.678 c
--11.238 -55.678 -23.766 -43.141 -23.766 -27.679 c
--23.766 -13.386 -13.062 -1.593 0.777 0.109 c
-0.544 0.077 0.232 0.04 0 0 c
-f
-Q
-0.118 0.192 0.369 scn
-q 1 0 0 1 311.6421 458.9941 cm
-0 0 m
--16.565 -9.064 -17.346 -40.196 9.317 -48.713 c
-16.643 -51.053 30.634 -50.189 36.991 -37.91 c
-32.363 -46.995 22.921 -53.218 12.028 -53.218 c
--3.443 -53.218 -15.971 -40.681 -15.971 -25.219 c
--15.971 -12.684 -7.737 -2.07 3.624 1.498 c
-3.099 1.309 2.397 1.056 1.872 0.866 c
-1.309 0.609 0.542 0.297 0 0 c
-f
-Q
-0.216 0.345 0.667 scn
-q 1 0 0 1 339.5962 435.5991 cm
-0 0 m
--1.706 2.422 -2.871 5.192 -4.806 7.466 c
--5.581 8.375 -6.334 9.141 -7.046 9.74 c
--7.103 9.788 -12.699 14.577 -12.705 14.929 c
--12.707 15.035 -10.925 16.753 -10.74 16.825 c
--10.058 17.086 -7.544 17.231 -6.875 17.166 c
--5.111 16.992 -2.438 16.241 0.275 13.649 c
-3.79 10.293 4.269 6.382 4.332 5.263 c
-4.608 0.362 1.816 -1.553 1.125 -1.426 c
-0.589 -1.328 0.314 -0.445 0 0 c
-f
-Q
-0.22 0.353 0.682 scn
-q 1 0 0 1 339.7305 438.0928 cm
-0 0 m
--1.97 2.883 -3.055 4.471 -4.87 6.595 c
--5.072 6.832 -5.375 7.116 -5.591 7.34 c
--5.844 7.601 -6.16 7.969 -6.419 8.224 c
--6.913 8.711 -7.551 9.382 -8.074 9.839 c
--9.724 11.281 -9.908 11.547 -9.911 11.595 c
--9.914 11.657 -8.495 13.252 -8.295 13.411 c
--8.132 13.541 -7.808 13.456 -7.601 13.433 c
--5.32 13.184 -2.962 12.927 -0.476 10.566 c
-2.531 7.709 2.783 5.143 2.904 3.909 c
-2.938 3.565 2.929 0.875 2.709 0.41 c
-2.675 0.337 0.707 -0.875 0.645 -0.861 c
-0.33 -0.793 0.182 -0.267 0 0 c
-f
-Q
-0.224 0.361 0.694 scn
-q 1 0 0 1 338.8154 441.6221 cm
-0 0 m
--0.737 0.235 -1.076 1.45 -1.576 2.04 c
--3.148 3.894 -3.148 3.894 -3.897 4.678 c
--4.212 5.008 -4.84 5.354 -4.922 5.803 c
--4.014 7.981 l
--3.953 8.007 -1.427 7.15 0.33 5.083 c
-1.631 3.552 2.397 0.755 2.281 0.574 c
-1.906 -0.01 0.699 -0.197 0.037 0.011 c
-0.026 0.014 0.011 -0.003 0 0 c
-f
-Q
-0.141 0.227 0.439 scn
-q 1 0 0 1 335.7192 459.0469 cm
-0 0 m
--5.275 2.417 -9.403 2.407 -12.049 2.189 c
--12.049 2.728 l
--6.604 2.728 -1.522 1.173 2.777 -1.517 c
-2.232 -1.205 1.506 -0.789 0.961 -0.477 c
-0.673 -0.334 0.292 -0.134 0 0 c
-f
-Q
-0.137 0.22 0.427 scn
-q 1 0 0 1 331.9331 460.5313 cm
-0 0 m
--3.078 0.794 -4.478 1.111 -8.263 0.96 c
--8.263 1.243 l
--4.866 1.243 -1.61 0.638 1.402 -0.47 c
-0.981 -0.329 0.425 -0.126 0 0 c
-f
-Q
-0.133 0.216 0.412 scn
-q 1 0 0 1 327.9009 461.4541 cm
-0 0 m
--1.314 0.178 -2.48 0.278 -4.231 0.228 c
--4.231 0.32 l
--2.431 0.32 -0.671 0.15 1.035 -0.174 c
-0.724 -0.122 0.312 -0.042 0 0 c
-f
-Q
-0.125 0.208 0.396 scn
-q 1 0 0 1 323.6699 461.771 cm
-0 0 m
-0.335 0.003 0.669 -0.002 1.001 -0.014 c
-0.701 -0.01 0.211 -0.214 0 0 c
-f
-Q
- endstream endobj 1358 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1336 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1337 0 obj <</BBox[321.996 217.271 334.002 205.274]/Group 1359 0 R/Length 530/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ExtGState<</GS0 1318 0 R>>/XObject<</Fm0 1360 0 R>>>>/Subtype/Form>>stream
-q
-327.999 212.271 m
-327.999 217.271 l
-331.311 217.271 334.002 214.59 334.002 211.277 c
-334.002 207.966 331.311 205.274 327.999 205.274 c
-324.687 205.274 321.996 207.966 321.996 211.277 c
-321.996 214.59 324.687 217.271 327.999 217.271 c
-327.999 212.271 l
-327.449 212.274 326.992 211.817 326.996 211.277 c
-326.991 210.734 327.456 210.27 327.999 210.274 c
-328.542 210.27 329.007 210.734 329.002 211.277 c
-329.006 211.817 328.549 212.274 327.999 212.271 c
-W n
-q
-1 w 4 M 0 j 0 J []0 d
-/GS0 gs
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
-Q
- endstream endobj 1359 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1360 0 obj <</BBox[321.996 217.271 334.002 205.274]/Group 1361 0 R/Length 11068/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.216 0.631 0.792 scn
-/GS0 gs
-q 1 0 0 1 327.999 212.2715 cm
-0 0 m
-0 5 l
-3.312 5 6.003 2.318 6.003 -0.994 c
-6.003 -4.306 3.312 -6.997 0 -6.997 c
--3.312 -6.997 -6.003 -4.306 -6.003 -0.994 c
--6.003 2.318 -3.312 5 0 5 c
-0 0 l
--0.55 0.003 -1.007 -0.454 -1.003 -0.994 c
--1.008 -1.537 -0.543 -2.002 0 -1.997 c
-0.543 -2.002 1.008 -1.537 1.003 -0.994 c
-1.007 -0.454 0.55 0.003 0 0 c
-f
-Q
-q 1 0 0 1 327.999 213.1963 cm
-0 0 m
--0.013 -0.041 -0.073 -0.074 -0.083 -0.115 c
--0.111 -0.248 -0.02 -0.426 0 -0.56 c
-0 -0.925 l
--0.55 -0.922 -1.007 -1.379 -1.003 -1.919 c
--1.008 -2.462 -0.543 -2.927 0 -2.922 c
-0.543 -2.927 1.008 -2.462 1.003 -1.919 c
-1.007 -1.379 0.55 -0.922 0 -0.925 c
-0 -0.56 l
-0.034 -0.557 0.079 -0.553 0.113 -0.55 c
-0.142 -0.55 0.184 -0.536 0.21 -0.549 c
-1.046 -1.473 l
-1.441 -2.153 1.79 -2.106 1.805 -2.104 c
-2.057 -2.064 3.185 -0.619 1.901 0.191 c
-1.598 0.383 1.275 0.409 1.132 0.396 c
-0 0 l
-0 4.075 l
-3.312 4.075 6.003 1.394 6.003 -1.919 c
-6.003 -5.23 3.312 -7.922 0 -7.922 c
--3.312 -7.922 -6.003 -5.23 -6.003 -1.919 c
--6.003 1.394 -3.312 4.075 0 4.075 c
-0 0 l
-f
-Q
-0.208 0.616 0.776 scn
-q 1 0 0 1 327.999 213.3379 cm
-0 0 m
--0.03 -0.092 -0.164 -0.17 -0.185 -0.265 c
--0.222 -0.433 -0.125 -0.678 -0.188 -0.838 c
--0.188 -0.839 -0.237 -0.941 -0.404 -1.049 c
--1.156 -1.538 -1.044 -2.153 -0.992 -2.33 c
--0.81 -2.948 -0.137 -3.26 0.449 -2.997 c
-0.649 -2.907 0.789 -2.769 0.872 -2.687 c
-1.143 -2.418 1.548 -2.618 1.836 -2.409 c
-2.434 -1.976 2.571 -1.584 2.629 -1.416 c
-2.851 -0.784 2.461 0.135 1.628 0.371 c
-0.853 0.591 0.002 0.008 0 0 c
-0 3.934 l
-3.312 3.934 6.003 1.252 6.003 -2.061 c
-6.003 -5.372 3.312 -8.063 0 -8.063 c
--3.312 -8.063 -6.003 -5.372 -6.003 -2.061 c
--6.003 1.252 -3.312 3.934 0 3.934 c
-0 0 l
-f
-Q
-0.204 0.604 0.757 scn
-q 1 0 0 1 327.999 213.4785 cm
-0 0 m
--0.294 -0.83 -1.296 -1.345 -1.079 -2.404 c
--0.955 -3.01 -0.239 -3.591 0.647 -3.163 c
-1.047 -2.97 1.515 -2.951 1.888 -2.688 c
-2.104 -2.536 2.607 -2.182 2.763 -1.673 c
-3.16 -0.374 2.125 0.264 1.731 0.385 c
-0.831 0.661 0.003 0.009 0 0 c
-0 3.793 l
-3.312 3.793 6.003 1.111 6.003 -2.201 c
-6.003 -5.513 3.312 -8.204 0 -8.204 c
--3.312 -8.204 -6.003 -5.513 -6.003 -2.201 c
--6.003 1.111 -3.312 3.793 0 3.793 c
-0 0 l
-f
-Q
-0.2 0.588 0.741 scn
-q 1 0 0 1 327.999 213.6182 cm
-0 0 m
--0.352 -0.866 -1.383 -1.428 -1.146 -2.558 c
--1.025 -3.14 -0.35 -3.809 0.711 -3.398 c
-2.484 -2.712 2.629 -2.655 2.946 -1.777 c
-2.952 -1.763 3.406 -0.234 2.053 0.316 c
-0.838 0.812 0.004 0.01 0 0 c
-0 3.653 l
-3.312 3.653 6.003 0.972 6.003 -2.341 c
-6.003 -5.652 3.312 -8.344 0 -8.344 c
--3.312 -8.344 -6.003 -5.652 -6.003 -2.341 c
--6.003 0.972 -3.312 3.653 0 3.653 c
-0 0 l
-f
-Q
-0.196 0.573 0.722 scn
-q 1 0 0 1 327.999 213.7549 cm
-0 0 m
--0.193 -0.417 -0.585 -0.691 -0.795 -1.098 c
--1.093 -1.707 l
--1.262 -2.105 -1.291 -2.433 -1.189 -2.801 c
--1.126 -3.029 -0.725 -4.141 0.983 -3.563 c
-5.011 -2.2 2.486 0.226 2.453 0.247 c
-1.442 0.896 0.101 0.219 0 0 c
-0 3.517 l
-3.312 3.517 6.003 0.835 6.003 -2.478 c
-6.003 -5.789 3.312 -8.48 0 -8.48 c
--3.312 -8.48 -6.003 -5.789 -6.003 -2.478 c
--6.003 0.835 -3.312 3.517 0 3.517 c
-0 0 l
-f
-Q
-0.188 0.561 0.702 scn
-q 1 0 0 1 327.999 213.9082 cm
-0 0 m
--0.013 -0.025 -0.053 -0.04 -0.076 -0.058 c
--0.364 -0.275 -0.691 -0.521 -1.173 -1.803 c
--1.243 -1.988 -1.457 -2.555 -1.186 -3.148 c
--0.781 -4.033 0.18 -4.204 1.671 -3.654 c
-3.863 -2.846 3.98 -0.373 2.341 0.401 c
-1.366 0.862 0.123 0.247 0 0 c
-0 3.363 l
-3.312 3.363 6.003 0.682 6.003 -2.631 c
-6.003 -5.942 3.312 -8.634 0 -8.634 c
--3.312 -8.634 -6.003 -5.942 -6.003 -2.631 c
--6.003 0.682 -3.312 3.363 0 3.363 c
-0 0 l
-f
-Q
-0.184 0.545 0.686 scn
-q 1 0 0 1 327.999 214.0996 cm
-0 0 m
--0.034 -0.067 -0.142 -0.105 -0.203 -0.15 c
--0.702 -0.521 -0.962 -1.182 -1.171 -1.711 c
--1.281 -1.991 -1.54 -2.648 -1.288 -3.269 c
--0.891 -4.246 0.088 -4.488 1.621 -3.988 c
-4.051 -3.195 4.189 -0.578 2.798 0.287 c
-1.588 1.039 0.134 0.266 0 0 c
-0 3.172 l
-3.312 3.172 6.003 0.49 6.003 -2.822 c
-6.003 -6.134 3.312 -8.825 0 -8.825 c
--3.312 -8.825 -6.003 -6.134 -6.003 -2.822 c
--6.003 0.49 -3.312 3.172 0 3.172 c
-0 0 l
-f
-Q
-0.18 0.529 0.667 scn
-q 1 0 0 1 327.999 214.2871 cm
-0 0 m
--0.037 -0.069 -0.152 -0.104 -0.217 -0.147 c
--0.454 -0.309 -0.887 -0.883 -1.091 -1.383 c
--1.28 -1.846 -1.632 -2.707 -1.384 -3.387 c
--0.994 -4.454 0.002 -4.769 1.578 -4.319 c
-4.069 -3.61 4.619 -0.754 2.993 0.316 c
-1.701 1.166 0.079 0.148 0 0 c
-0 2.984 l
-3.312 2.984 6.003 0.303 6.003 -3.01 c
-6.003 -6.321 3.312 -9.013 0 -9.013 c
--3.312 -9.013 -6.003 -6.321 -6.003 -3.01 c
--6.003 0.303 -3.312 2.984 0 2.984 c
-0 0 l
-f
-Q
-0.176 0.518 0.651 scn
-q 1 0 0 1 327.999 214.4717 cm
-0 0 m
--0.176 -0.317 -0.542 -0.437 -0.748 -0.722 c
--1.049 -1.139 -1.146 -1.381 -1.241 -1.614 c
--1.291 -1.738 -1.721 -2.847 -1.448 -3.589 c
--0.846 -5.228 1.105 -4.775 1.689 -4.598 c
-4.413 -3.769 4.993 -0.751 3.22 0.385 c
-1.946 1.2 0.234 0.423 0 0 c
-0 2.8 l
-3.312 2.8 6.003 0.118 6.003 -3.194 c
-6.003 -6.506 3.312 -9.197 0 -9.197 c
--3.312 -9.197 -6.003 -6.506 -6.003 -3.194 c
--6.003 0.118 -3.312 2.8 0 2.8 c
-0 0 l
-f
-Q
-0.169 0.502 0.631 scn
-q 1 0 0 1 327.999 214.7031 cm
-0 0 m
--0.06 -0.133 -0.265 -0.211 -0.386 -0.291 c
--0.759 -0.541 -1.229 -1.474 -1.327 -1.735 c
--1.444 -2.049 -1.803 -3.136 -1.475 -3.938 c
--0.713 -5.804 1.956 -4.863 1.982 -4.853 c
-5.283 -3.568 5.162 -0.364 3.116 0.573 c
-1.411 1.354 0.007 0.017 0 0 c
-0 2.568 l
-3.312 2.568 6.003 -0.113 6.003 -3.426 c
-6.003 -6.737 3.312 -9.429 0 -9.429 c
--3.312 -9.429 -6.003 -6.737 -6.003 -3.426 c
--6.003 -0.113 -3.312 2.568 0 2.568 c
-0 0 l
-f
-Q
-0.165 0.486 0.612 scn
-q 1 0 0 1 327.999 214.9854 cm
-0 0 m
--0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c
--0.736 -0.594 -1.131 -1.171 -1.412 -1.908 c
--1.719 -2.715 -1.736 -3.694 -1.577 -4.139 c
--0.858 -6.132 1.881 -5.304 1.908 -5.295 c
-5.598 -4.044 5.76 -0.555 3.075 0.691 c
-1.838 1.266 0.163 0.34 0 0 c
-0 2.286 l
-3.312 2.286 6.003 -0.396 6.003 -3.708 c
-6.003 -7.02 3.312 -9.711 0 -9.711 c
--3.312 -9.711 -6.003 -7.02 -6.003 -3.708 c
--6.003 -0.396 -3.312 2.286 0 2.286 c
-0 0 l
-f
-Q
-0.161 0.475 0.596 scn
-q 1 0 0 1 327.999 215.2715 cm
-0 0 m
--0.045 -0.106 -0.21 -0.167 -0.302 -0.236 c
--0.487 -0.373 -1.13 -0.938 -1.627 -2.442 c
--1.764 -2.854 -1.88 -3.932 -1.545 -4.67 c
--1.027 -5.814 0.793 -6.21 2.513 -5.55 c
-6.314 -4.092 5.733 -0.28 3.153 0.723 c
-1.353 1.422 0.007 0.017 0 0 c
-0 2 l
-3.312 2 6.003 -0.682 6.003 -3.994 c
-6.003 -7.306 3.312 -9.997 0 -9.997 c
--3.312 -9.997 -6.003 -7.306 -6.003 -3.994 c
--6.003 -0.682 -3.312 2 0 2 c
-0 0 l
-f
-Q
-0.157 0.459 0.576 scn
-q 1 0 0 1 327.999 215.6543 cm
-0 0 m
--0.163 -0.361 -0.542 -0.515 -0.779 -0.805 c
--0.948 -1.011 -1.05 -1.26 -1.205 -1.475 c
--1.369 -1.701 -1.472 -1.983 -1.723 -2.733 c
--2.048 -3.703 -1.823 -4.541 -1.66 -4.953 c
--1.229 -6.046 0.416 -6.786 2.422 -6.135 c
-7.014 -4.645 5.816 -0.744 3.286 0.54 c
-1.422 1.485 0.008 0.019 0 0 c
-0 1.617 l
-3.312 1.617 6.003 -1.064 6.003 -4.377 c
-6.003 -7.688 3.312 -10.38 0 -10.38 c
--3.312 -10.38 -6.003 -7.688 -6.003 -4.377 c
--6.003 -1.064 -3.312 1.617 0 1.617 c
-0 0 l
-f
-Q
-0.149 0.443 0.561 scn
-q 1 0 0 1 327.999 216.0791 cm
-0 0 m
--0.128 -0.296 -0.442 -0.404 -0.638 -0.631 c
--0.788 -0.804 -0.893 -1.01 -1.031 -1.191 c
--1.148 -1.346 -1.62 -2.353 -1.623 -2.36 c
--2.172 -3.895 -2.053 -4.608 -1.843 -5.151 c
--0.961 -7.428 1.653 -7.023 2.586 -6.676 c
-3.891 -6.189 6.606 -5.178 5.553 -2.521 c
-5.843 -3.224 6.003 -3.994 6.003 -4.802 c
-6.003 -8.113 3.312 -10.805 0 -10.805 c
--3.312 -10.805 -6.003 -8.113 -6.003 -4.802 c
--6.003 -1.489 -3.312 1.192 0 1.192 c
-0 0 l
-f
-Q
-0.145 0.431 0.541 scn
-q 1 0 0 1 327.999 216.5439 cm
-0 0 m
--0.037 -0.078 -0.154 -0.129 -0.22 -0.185 c
--1.238 -1.037 -1.832 -2.884 -1.837 -2.902 c
--2.426 -4.76 -2.011 -5.632 -1.875 -5.918 c
--0.597 -8.6 3.355 -7.144 3.396 -7.129 c
-4.441 -6.72 6.192 -6.035 5.899 -4.15 c
-5.967 -4.512 6.003 -4.885 6.003 -5.267 c
-6.003 -8.578 3.312 -11.27 0 -11.27 c
--3.312 -11.27 -6.003 -8.578 -6.003 -5.267 c
--6.003 -1.954 -3.312 0.728 0 0.728 c
-0 0 l
-f
-Q
-0.141 0.416 0.522 scn
-q 1 0 0 1 327.999 216.9863 cm
-0 0 m
--0.038 -0.066 -0.155 -0.09 -0.221 -0.129 c
--1.15 -0.674 -1.646 -2.172 -2.007 -3.267 c
--2.013 -3.283 -2.546 -5.064 -2.073 -6.276 c
--1.009 -9.004 3.058 -7.952 3.099 -7.941 c
-4.318 -7.615 5.989 -7.169 6.001 -5.576 c
-6.002 -5.62 6.003 -5.664 6.003 -5.709 c
-6.003 -9.021 3.312 -11.712 0 -11.712 c
--3.312 -11.712 -6.003 -9.021 -6.003 -5.709 c
--6.003 -2.396 -3.312 0.285 0 0.285 c
-0 0 l
-f
-Q
-0.137 0.4 0.506 scn
-q 1 0 0 1 327.999 217.2598 cm
-0 0 m
--0.043 -0.053 -0.154 -0.029 -0.221 -0.042 c
--0.696 -0.133 -1.348 -0.689 -1.732 -1.73 c
--2.577 -4.014 -2.459 -5.548 -2.314 -6.259 c
--1.864 -8.468 0.843 -8.703 1.755 -8.611 c
-4.299 -8.355 5.7 -8.214 5.951 -6.775 c
-5.562 -9.713 3.043 -11.985 0 -11.985 c
--3.312 -11.985 -6.003 -9.294 -6.003 -5.982 c
--6.003 -2.67 -3.312 0.012 0 0.012 c
-0 0 l
-f
-Q
-0.129 0.388 0.486 scn
-q 1 0 0 1 327.0938 217.1953 cm
-0 0 m
--1.738 -0.59 -1.75 -4.505 -1.75 -4.545 c
--1.745 -7.049 -0.739 -7.83 0.017 -8.199 c
-1.798 -9.07 6.085 -9.361 6.66 -7.631 c
-5.921 -10.109 3.622 -11.921 0.905 -11.921 c
--2.407 -11.921 -5.098 -9.229 -5.098 -5.918 c
--5.098 -2.856 -2.799 -0.334 0.165 0.031 c
-0.115 0.021 0.049 0.013 0 0 c
-f
-Q
-0.125 0.373 0.471 scn
-q 1 0 0 1 325.7642 216.7715 cm
-0 0 m
--1.064 -0.938 -0.813 -4.867 -0.541 -5.6 c
-0.429 -8.205 2.405 -8.584 3.209 -8.627 c
-4.272 -8.682 5.299 -9.067 6.379 -8.965 c
-6.692 -8.936 7.266 -8.798 7.587 -8.212 c
-6.594 -10.16 4.569 -11.497 2.235 -11.497 c
--1.077 -11.497 -3.768 -8.806 -3.768 -5.494 c
--3.768 -2.81 -2.001 -0.54 0.432 0.225 c
-0.372 0.2 0.292 0.168 0.231 0.144 c
-0.161 0.102 0.061 0.054 0 0 c
-f
-Q
-0.22 0.647 0.812 scn
-q 1 0 0 1 329.5791 211.4561 cm
-0 0 m
--0.095 0.068 -0.095 0.068 -0.519 0.587 c
--0.661 0.762 -0.834 0.909 -0.973 1.089 c
--1.125 1.286 -1.231 1.594 y
--1.226 1.612 -0.029 2.438 0.592 1.362 c
-1.027 0.609 0.245 -0.131 0.233 -0.133 c
-0.153 -0.144 0.065 -0.047 0 0 c
-f
-Q
-0.149 0.443 0.561 scn
-q 1 0 0 1 330.5688 216.6631 cm
-0 0 m
--1.295 0.462 -2.254 -0.325 -2.57 -0.584 c
--2.57 0.608 l
--1.402 0.608 -0.311 0.274 0.612 -0.302 c
-0.522 -0.252 0.402 -0.186 0.312 -0.136 c
-0.219 -0.095 0.096 -0.034 0 0 c
-f
-Q
-0.224 0.659 0.831 scn
-q 1 0 0 1 329.6191 211.708 cm
-0 0 m
--0.335 0.354 l
--0.472 0.524 -0.626 0.68 -0.757 0.854 c
--0.976 1.148 -1.021 1.268 -1.019 1.272 c
--1.014 1.287 -0.028 1.7 0.33 0.952 c
-0.591 0.409 0.174 -0.12 0.167 -0.121 c
-0.106 -0.131 0.048 -0.039 0 0 c
-f
-Q
-0.145 0.431 0.541 scn
-q 1 0 0 1 329.7661 216.9941 cm
-0 0 m
--0.649 0.12 -1.161 -0.01 -1.767 -0.45 c
--1.767 0.277 l
--1.038 0.277 -0.339 0.147 0.307 -0.091 c
-0.224 -0.065 0.112 -0.031 0.029 -0.007 c
-0.02 -0.005 0.009 -0.002 0 0 c
-f
-Q
-0.227 0.675 0.847 scn
-q 1 0 0 1 329.623 211.9746 cm
-0 0 m
--0.004 0.004 -0.533 0.572 -0.71 0.861 c
--0.568 0.874 -0.482 0.883 -0.264 0.809 c
--0.18 0.78 -0.083 0.699 -0.025 0.631 c
-0.033 0.563 0.091 0.45 0.104 0.361 c
-0.135 0.141 0.099 0.019 0.074 -0.063 c
-0.052 -0.044 0.021 -0.021 0 0 c
-f
-Q
-0.141 0.416 0.522 scn
-q 1 0 0 1 328.9043 217.1943 cm
-0 0 m
--0.314 -0.006 -0.487 -0.009 -0.905 -0.208 c
--0.905 0.077 l
--0.519 0.077 -0.142 0.041 0.225 -0.029 c
-0.157 -0.021 0.068 -0.004 0 0 c
-f
-Q
-0.137 0.4 0.506 scn
-q 1 0 0 1 327.999 217.2598 cm
-0 0 m
-0 0.012 l
-0.072 0.012 0.144 0.011 0.215 0.008 c
-0.15 0.006 0.046 -0.045 0 0 c
-f
-Q
- endstream endobj 1361 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1334 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1335 0 obj <</BBox[306.003 319.277 362.001 263.279]/Group 1362 0 R/Length 504/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ExtGState<</GS0 1318 0 R>>/XObject<</Fm0 1363 0 R>>>>/Subtype/Form>>stream
-q
-334.002 303.277 m
-334.002 319.277 l
-349.464 319.277 362.001 306.74 362.001 291.278 c
-362.001 275.808 349.464 263.279 334.002 263.279 c
-318.54 263.279 306.003 275.808 306.003 291.278 c
-306.003 306.74 318.54 319.277 334.002 319.277 c
-334.002 303.277 l
-327.395 303.288 321.992 297.886 322.003 291.278 c
-321.994 284.663 327.392 279.27 334.002 279.279 c
-340.612 279.27 346.01 284.663 346.001 291.278 c
-346.012 297.886 340.609 303.288 334.002 303.277 c
-W n
-q
-/GS0 gs
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
-Q
- endstream endobj 1362 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1363 0 obj <</BBox[306.003 319.277 362.001 263.279]/Group 1364 0 R/Length 13467/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.259 0.565 0.682 scn
-/GS0 gs
-q 1 0 0 1 334.002 303.2773 cm
-0 0 m
-0 16 l
-15.462 16 27.999 3.463 27.999 -11.999 c
-27.999 -27.47 15.462 -39.998 0 -39.998 c
--15.462 -39.998 -27.999 -27.47 -27.999 -11.999 c
--27.999 3.463 -15.462 16 0 16 c
-0 0 l
--6.607 0.011 -12.01 -5.392 -11.999 -11.999 c
--12.008 -18.614 -6.61 -24.008 0 -23.998 c
-6.61 -24.008 12.008 -18.614 11.999 -11.999 c
-12.01 -5.392 6.607 0.011 0 0 c
-f
-Q
-q 1 0 0 1 334.002 308.4409 cm
-0 0 m
-0 -0.468 l
-0 -5.164 l
--6.607 -5.153 -12.01 -10.555 -11.999 -17.163 c
--12.008 -23.778 -6.61 -29.171 0 -29.162 c
-6.61 -29.171 12.008 -23.778 11.999 -17.163 c
-12.01 -10.555 6.607 -5.153 0 -5.164 c
-0 -0.468 l
-0.316 -0.694 0.738 -0.996 1.055 -1.223 c
-3.817 -3.661 7.459 -4.869 10 -7.617 c
-12.018 -9.8 13.458 -12.461 14.279 -15.528 c
-15.076 -18.506 16.901 -19.345 16.917 -19.347 c
-18.874 -19.542 24.734 -10.485 17.857 -2.241 c
-10.879 6.124 0.769 1.958 0 0 c
-0 10.836 l
-15.462 10.836 27.999 -1.701 27.999 -17.163 c
-27.999 -32.633 15.462 -45.162 0 -45.162 c
--15.462 -45.162 -27.999 -32.633 -27.999 -17.163 c
--27.999 -1.701 -15.462 10.836 0 10.836 c
-0 0 l
-f
-Q
-0.255 0.553 0.667 scn
-q 1 0 0 1 334.002 310.2881 cm
-0 0 m
--0.296 -0.712 -1.487 -1.168 -1.735 -1.898 c
--1.987 -2.638 -2.003 -3.873 -1.53 -4.494 c
--1.227 -4.893 -0.45 -4.945 0 -5.167 c
-0 -7.011 l
--6.607 -7 -12.01 -12.402 -11.999 -19.01 c
--12.008 -25.625 -6.61 -31.019 0 -31.009 c
-6.61 -31.019 12.008 -25.625 11.999 -19.01 c
-12.01 -12.402 6.607 -7 0 -7.011 c
-0 -5.167 l
-0.338 -5.201 0.788 -5.245 1.126 -5.278 c
-2.249 -5.476 12.144 -7.557 13.761 -19.537 c
-14.171 -22.514 l
-14.636 -23.089 15.724 -23.505 16.459 -23.428 c
-20.584 -22.992 26.416 -9.568 15.896 -1.312 c
-7.943 4.929 0.035 0.084 0 0 c
-0 8.989 l
-15.462 8.989 27.999 -3.548 27.999 -19.01 c
-27.999 -34.48 15.462 -47.009 0 -47.009 c
--15.462 -47.009 -27.999 -34.48 -27.999 -19.01 c
--27.999 -3.548 -15.462 8.989 0 8.989 c
-0 0 l
-f
-Q
-0.247 0.541 0.651 scn
-q 1 0 0 1 334.002 311.4072 cm
-0 0 m
--0.627 -1.109 -1.866 -1.525 -2.708 -2.391 c
--4.764 -4.503 -4.447 -6.209 -4.44 -6.223 c
--4.355 -6.386 -4.355 -6.386 0 -7.408 c
-0 -8.13 l
--6.607 -8.119 -12.01 -13.521 -11.999 -20.129 c
--12.008 -26.744 -6.61 -32.138 0 -32.128 c
-6.61 -32.138 12.008 -26.744 11.999 -20.129 c
-12.01 -13.521 6.607 -8.119 0 -8.13 c
-0 -7.408 l
-0.312 -7.428 0.727 -7.455 1.039 -7.475 c
-5.587 -8.118 13.155 -12.018 12.674 -22.55 c
-12.559 -25.063 12.663 -26.479 12.981 -26.762 c
-14.31 -27.933 23.356 -23.69 22.629 -14.042 c
-21.27 4.006 1.142 2.02 0 0 c
-0 7.87 l
-15.462 7.87 27.999 -4.667 27.999 -20.129 c
-27.999 -35.6 15.462 -48.128 0 -48.128 c
--15.462 -48.128 -27.999 -35.6 -27.999 -20.129 c
--27.999 -4.667 -15.462 7.87 0 7.87 c
-0 0 l
-f
-Q
-0.243 0.529 0.639 scn
-q 1 0 0 1 334.002 312.3325 cm
-0 0 m
--0.223 -0.377 -0.896 -0.494 -1.279 -0.706 c
--3.984 -2.198 -4.352 -2.882 -7.218 -8.204 c
--10.978 -15.407 l
--12.034 -17.649 -12.409 -19.973 -12.123 -22.511 c
--11.368 -29.203 -4.44 -35.038 3.702 -32.832 c
-16.504 -28.455 l
-19.639 -26.388 21.523 -23.893 22.614 -20.364 c
-24.61 -13.908 21.812 -4.74 13.674 -0.575 c
-6.26 3.219 0.029 0.049 0 0 c
-0 6.945 l
-15.462 6.945 27.999 -5.592 27.999 -21.054 c
-27.999 -36.525 15.462 -49.053 0 -49.053 c
--15.462 -49.053 -27.999 -36.525 -27.999 -21.054 c
--27.999 -5.592 -15.462 6.945 0 6.945 c
-0 0 l
-f
-Q
-0.235 0.518 0.624 scn
-q 1 0 0 1 334.002 313.1323 cm
-0 0 m
--0.174 -0.267 -0.682 -0.3 -0.974 -0.428 c
--3.27 -1.438 -6.363 -4.313 -7.593 -6.58 c
--13.39 -17.263 -13 -20.654 -12.686 -23.379 c
--12.044 -28.943 -6.306 -36.331 3.976 -34.516 c
-34.376 -29.152 23.202 -7.033 15.417 -1.844 c
-7.621 3.352 0.038 0.059 0 0 c
-0 6.145 l
-15.462 6.145 27.999 -6.392 27.999 -21.854 c
-27.999 -37.325 15.462 -49.853 0 -49.853 c
--15.462 -49.853 -27.999 -37.325 -27.999 -21.854 c
--27.999 -6.392 -15.462 6.145 0 6.145 c
-0 0 l
-f
-Q
-0.231 0.506 0.608 scn
-q 1 0 0 1 334.002 313.833 cm
-0 0 m
--0.26 -0.393 -1.01 -0.429 -1.443 -0.612 c
--4.281 -1.817 -7.531 -4.969 -9.346 -8.278 c
--13.499 -15.849 -13.757 -21.087 -13.243 -24.146 c
--12.334 -29.559 -7.254 -38.113 6.021 -35.853 c
-29.652 -31.827 27.567 -10.229 15.691 -2.188 c
-7.725 3.206 0.039 0.058 0 0 c
-0 5.444 l
-15.462 5.444 27.999 -7.093 27.999 -22.555 c
-27.999 -38.025 15.462 -50.554 0 -50.554 c
--15.462 -50.554 -27.999 -38.025 -27.999 -22.555 c
--27.999 -7.093 -15.462 5.444 0 5.444 c
-0 0 l
-f
-Q
-0.227 0.494 0.592 scn
-q 1 0 0 1 334.002 314.499 cm
-0 0 m
--0.27 -0.397 -1.042 -0.411 -1.488 -0.586 c
--3.111 -1.225 -7.25 -3.37 -10.633 -9.471 c
--11.685 -11.368 -15.021 -18.085 -13.796 -24.878 c
--12.453 -32.322 -5.461 -39.359 6.715 -37.218 c
-28.949 -33.308 28.975 -11.258 15.609 -2.301 c
-7.856 2.895 0.038 0.056 0 0 c
-0 4.778 l
-15.462 4.778 27.999 -7.759 27.999 -23.221 c
-27.999 -38.691 15.462 -51.22 0 -51.22 c
--15.462 -51.22 -27.999 -38.691 -27.999 -23.221 c
--27.999 -7.759 -15.462 4.778 0 4.778 c
-0 0 l
-f
-Q
-0.22 0.478 0.576 scn
-q 1 0 0 1 334.002 315.1099 cm
-0 0 m
--0.285 -0.403 -1.085 -0.384 -1.55 -0.549 c
--2.14 -0.758 -7.426 -2.783 -11.14 -9.4 c
--12.536 -11.888 -15.643 -18.441 -14.343 -25.554 c
--13.275 -31.396 -7.567 -40.71 7.05 -38.567 c
-28.067 -35.485 30.905 -13.13 16.17 -2.838 c
-7.979 2.883 0.04 0.057 0 0 c
-0 4.167 l
-15.462 4.167 27.999 -8.37 27.999 -23.832 c
-27.999 -39.302 15.462 -51.831 0 -51.831 c
--15.462 -51.831 -27.999 -39.302 -27.999 -23.832 c
--27.999 -8.37 -15.462 4.167 0 4.167 c
-0 0 l
-f
-Q
-0.216 0.467 0.565 scn
-q 1 0 0 1 334.002 315.6826 cm
-0 0 m
--0.294 -0.407 -1.113 -0.365 -1.59 -0.521 c
--3.037 -0.996 -8.057 -3.068 -11.887 -9.807 c
--12.95 -11.677 -16.306 -18.383 -14.886 -26.191 c
--13.691 -32.763 -6.811 -41.823 7.247 -39.848 c
-28.69 -36.835 31.472 -13.848 16.374 -3.144 c
-8.08 2.736 0.041 0.056 0 0 c
-0 3.595 l
-15.462 3.595 27.999 -8.942 27.999 -24.404 c
-27.999 -39.875 15.462 -52.403 0 -52.403 c
--15.462 -52.403 -27.999 -39.875 -27.999 -24.404 c
--27.999 -8.942 -15.462 3.595 0 3.595 c
-0 0 l
-f
-Q
-0.208 0.455 0.549 scn
-q 1 0 0 1 334.002 316.2197 cm
-0 0 m
--0.327 -0.44 -1.224 -0.37 -1.749 -0.528 c
--5.52 -1.667 -9.766 -5.26 -12.073 -9.267 c
--15.394 -15.036 -16.522 -20.933 -15.426 -26.792 c
--13.857 -35.175 -5.228 -43.007 7.675 -41.012 c
-29.388 -37.654 31.678 -13.959 16.092 -3.122 c
-8.188 2.374 0.041 0.052 0 0 c
-0 3.058 l
-15.462 3.058 27.999 -9.479 27.999 -24.941 c
-27.999 -40.412 15.462 -52.94 0 -52.94 c
--15.462 -52.94 -27.999 -40.412 -27.999 -24.941 c
--27.999 -9.479 -15.462 3.058 0 3.058 c
-0 0 l
-f
-Q
-0.204 0.443 0.533 scn
-q 1 0 0 1 334.002 316.7344 cm
-0 0 m
--0.315 -0.413 -1.169 -0.321 -1.671 -0.458 c
--5.628 -1.543 -10.186 -5.222 -12.509 -9.206 c
--13.794 -11.411 -17.706 -18.119 -15.958 -27.369 c
--14.312 -36.083 -5.369 -44.225 7.962 -42.147 c
-29.829 -38.742 32.261 -15.07 16.713 -3.752 c
-8.241 2.415 0.041 0.054 0 0 c
-0 2.543 l
-15.462 2.543 27.999 -9.994 27.999 -25.456 c
-27.999 -40.927 15.462 -53.455 0 -53.455 c
--15.462 -53.455 -27.999 -40.927 -27.999 -25.456 c
--27.999 -9.994 -15.462 2.543 0 2.543 c
-0 0 l
-f
-Q
-0.196 0.431 0.518 scn
-q 1 0 0 1 334.002 317.207 cm
-0 0 m
--0.326 -0.417 -1.197 -0.297 -1.71 -0.424 c
--5.005 -1.241 -10.022 -4.174 -13.317 -9.752 c
--16.642 -15.38 -17.708 -21.487 -16.484 -27.904 c
--14.771 -36.888 -5.523 -45.309 8.242 -43.221 c
-29.817 -39.947 32.246 -15.423 16.845 -4.05 c
-8.507 2.107 0.042 0.053 0 0 c
-0 2.07 l
-15.462 2.07 27.999 -10.467 27.999 -25.929 c
-27.999 -41.399 15.462 -53.928 0 -53.928 c
--15.462 -53.928 -27.999 -41.399 -27.999 -25.929 c
--27.999 -10.467 -15.462 2.07 0 2.07 c
-0 0 l
-f
-Q
-0.192 0.42 0.506 scn
-q 1 0 0 1 334.002 317.647 cm
-0 0 m
--0.165 -0.201 -0.596 -0.119 -0.852 -0.169 c
--6.63 -1.321 -11.086 -5.48 -13.33 -8.99 c
--17.824 -16.019 -17.96 -22.681 -17.283 -27.032 c
--15.528 -38.307 -5.35 -45.631 6.918 -44.447 c
-29.057 -42.308 33.214 -18.565 18.588 -5.674 c
-9.722 2.142 0.051 0.062 0 0 c
-0 1.63 l
-15.462 1.63 27.999 -10.907 27.999 -26.369 c
-27.999 -41.839 15.462 -54.368 0 -54.368 c
--15.462 -54.368 -27.999 -41.839 -27.999 -26.369 c
--27.999 -10.907 -15.462 1.63 0 1.63 c
-0 0 l
-f
-Q
-0.188 0.408 0.49 scn
-q 1 0 0 1 334.002 318.0581 cm
-0 0 m
--0.345 -0.419 -1.243 -0.245 -1.775 -0.35 c
--5.333 -1.052 -10.598 -4.013 -13.752 -8.857 c
--18.474 -16.108 -18.606 -22.979 -17.885 -27.466 c
--16.272 -37.501 -7.101 -46.918 7.31 -45.498 c
-29.578 -43.303 33.522 -19.118 18.666 -5.999 c
-9.679 1.938 0.05 0.061 0 0 c
-0 1.219 l
-15.462 1.219 27.999 -11.318 27.999 -26.78 c
-27.999 -42.25 15.462 -54.779 0 -54.779 c
--15.462 -54.779 -27.999 -42.25 -27.999 -26.78 c
--27.999 -11.318 -15.462 1.219 0 1.219 c
-0 0 l
-f
-Q
-0.18 0.392 0.475 scn
-q 1 0 0 1 334.002 318.4131 cm
-0 0 m
--0.359 -0.424 -1.279 -0.213 -1.827 -0.305 c
--2.571 -0.429 -9.239 -1.713 -14.035 -8.521 c
--19.337 -16.049 -19.04 -23.602 -18.666 -26.5 c
--16.791 -41.034 -4.557 -47.118 6.016 -46.62 c
-29.239 -45.526 34.04 -19.967 18.705 -6.311 c
-9.693 1.714 0.05 0.059 0 0 c
-0 0.864 l
-15.462 0.864 27.999 -11.673 27.999 -27.135 c
-27.999 -42.605 15.462 -55.134 0 -55.134 c
--15.462 -55.134 -27.999 -42.605 -27.999 -27.135 c
--27.999 -11.673 -15.462 0.864 0 0.864 c
-0 0 l
-f
-Q
-0.176 0.38 0.459 scn
-q 1 0 0 1 334.002 318.7388 cm
-0 0 m
--0.366 -0.422 -1.29 -0.183 -1.842 -0.262 c
--5.616 -0.798 -11.203 -3.577 -14.553 -8.414 c
--20.526 -17.037 -19.484 -25.014 -19.142 -27.636 c
--17.325 -41.544 -4.721 -48.295 6.216 -47.587 c
-22.826 -46.511 31.838 -32.411 25.896 -16.796 c
-27.251 -20.083 27.999 -23.685 27.999 -27.46 c
-27.999 -42.931 15.462 -55.459 0 -55.459 c
--15.462 -55.459 -27.999 -42.931 -27.999 -27.46 c
--27.999 -11.999 -15.462 0.539 0 0.539 c
-0 0 l
-f
-Q
-0.169 0.369 0.443 scn
-q 1 0 0 1 334.002 318.9941 cm
-0 0 m
--0.38 -0.425 -1.322 -0.147 -1.889 -0.211 c
--3.74 -0.417 -10.183 -1.633 -15.334 -8.604 c
--20.121 -15.081 -20.497 -23.226 -19.964 -27.017 c
--18.07 -40.5 -7.309 -49.138 6.814 -48.512 c
-13.57 -48.212 30.458 -42.954 27.513 -22.495 c
-27.832 -24.187 27.999 -25.932 27.999 -27.716 c
-27.999 -43.187 15.462 -55.715 0 -55.715 c
--15.462 -55.715 -27.999 -43.187 -27.999 -27.716 c
--27.999 -12.254 -15.462 0.283 0 0.283 c
-0 0 l
-f
-Q
-0.165 0.357 0.431 scn
-q 1 0 0 1 334.002 319.1851 cm
-0 0 m
--0.389 -0.421 -1.333 -0.109 -1.905 -0.156 c
--5.862 -0.48 -11.762 -2.986 -15.367 -7.721 c
--21.456 -15.721 -21.121 -23.999 -20.694 -27.186 c
--18.848 -40.988 -7.36 -50.366 6.622 -49.484 c
-16.365 -48.869 27.809 -42.686 27.992 -27.284 c
-27.997 -27.491 27.999 -27.699 27.999 -27.907 c
-27.999 -43.377 15.462 -55.906 0 -55.906 c
--15.462 -55.906 -27.999 -43.377 -27.999 -27.907 c
--27.999 -12.445 -15.462 0.092 0 0.092 c
-0 0 l
-f
-Q
-0.157 0.345 0.416 scn
-q 1 0 0 1 334.002 319.2739 cm
-0 0 m
--0.403 -0.423 -1.362 -0.067 -1.945 -0.096 c
--5.653 -0.278 -11.171 -1.795 -16.407 -7.987 c
--19.42 -11.549 -22.258 -18.906 -21.583 -25.522 c
--19.025 -50.59 4.157 -50.418 5.143 -50.399 c
-17.395 -50.155 25.849 -43.167 27.755 -31.707 c
-25.94 -45.421 14.205 -55.995 0 -55.995 c
--15.462 -55.995 -27.999 -43.466 -27.999 -27.996 c
--27.999 -12.534 -15.462 0.003 0 0.003 c
-0 0 l
-f
-Q
-0.153 0.333 0.4 scn
-q 1 0 0 1 329.771 318.957 cm
-0 0 m
--22.534 -4.552 -23.533 -35.028 -6.33 -46.26 c
-6.848 -54.863 25.642 -52.17 31.069 -35.688 c
-27.625 -47.252 16.911 -55.678 4.231 -55.678 c
--11.231 -55.678 -23.768 -43.149 -23.768 -27.679 c
--23.768 -13.386 -13.055 -1.592 0.778 0.109 c
-0.544 0.077 0.232 0.04 0 0 c
-f
-Q
-0.145 0.322 0.384 scn
-q 1 0 0 1 321.978 316.4971 cm
-0 0 m
--16.565 -9.063 -17.347 -40.195 9.314 -48.713 c
-16.64 -51.053 30.632 -50.191 36.987 -37.914 c
-32.359 -46.999 22.917 -53.218 12.024 -53.218 c
--3.438 -53.218 -15.975 -40.689 -15.975 -25.219 c
--15.975 -12.683 -7.734 -2.069 3.625 1.499 c
-3.1 1.309 2.399 1.057 1.873 0.867 c
-1.31 0.61 0.543 0.297 0 0 c
-f
-Q
-0.267 0.58 0.698 scn
-q 1 0 0 1 349.9282 293.1025 cm
-0 0 m
--1.706 2.422 -2.871 5.191 -4.806 7.466 c
--5.58 8.375 -6.333 9.14 -7.046 9.739 c
--7.103 9.787 -12.7 14.578 -12.706 14.928 c
--12.708 15.034 -10.925 16.753 -10.74 16.824 c
--10.058 17.085 -7.544 17.231 -6.875 17.165 c
--5.111 16.991 -2.438 16.24 0.275 13.649 c
-3.79 10.292 4.269 6.381 4.332 5.263 c
-4.608 0.361 1.816 -1.553 1.125 -1.426 c
-0.589 -1.328 0.314 -0.446 0 0 c
-f
-Q
-0.271 0.592 0.71 scn
-q 1 0 0 1 350.0625 295.5957 cm
-0 0 m
--1.97 2.883 -3.056 4.472 -4.87 6.595 c
--5.072 6.832 -5.375 7.116 -5.591 7.34 c
--5.844 7.601 -6.16 7.969 -6.419 8.224 c
--6.913 8.711 -7.551 9.382 -8.074 9.839 c
--9.724 11.281 -9.908 11.547 -9.911 11.595 c
--9.914 11.655 -8.389 13.369 -8.295 13.411 c
--7.711 13.674 -6.801 13.346 -6.164 13.276 c
--2.962 12.927 -1.156 11.212 -0.476 10.566 c
-2.531 7.709 2.783 5.143 2.904 3.909 c
-2.938 3.565 2.929 0.875 2.709 0.41 c
-2.675 0.337 0.707 -0.874 0.645 -0.861 c
-0.33 -0.793 0.182 -0.267 0 0 c
-f
-Q
-0.278 0.604 0.725 scn
-q 1 0 0 1 349.1475 299.125 cm
-0 0 m
--0.737 0.235 -1.076 1.45 -1.576 2.04 c
--3.148 3.894 -3.148 3.894 -3.897 4.678 c
--4.212 5.008 -4.84 5.354 -4.922 5.803 c
--4.014 7.981 l
--3.953 8.007 -1.427 7.15 0.33 5.083 c
-1.631 3.552 2.397 0.755 2.281 0.574 c
-1.906 -0.01 0.699 -0.197 0.037 0.011 c
-0.026 0.014 0.011 -0.003 0 0 c
-f
-Q
-0.176 0.38 0.459 scn
-q 1 0 0 1 346.0513 316.5498 cm
-0 0 m
--5.275 2.417 -9.403 2.407 -12.049 2.189 c
--12.049 2.728 l
--6.604 2.728 -1.522 1.173 2.777 -1.517 c
-2.232 -1.205 1.506 -0.789 0.961 -0.477 c
-0.673 -0.334 0.292 -0.134 0 0 c
-f
-Q
-0.169 0.369 0.443 scn
-q 1 0 0 1 342.2651 318.0342 cm
-0 0 m
--3.078 0.794 -4.478 1.111 -8.263 0.96 c
--8.263 1.243 l
--4.866 1.243 -1.61 0.638 1.402 -0.47 c
-0.981 -0.329 0.425 -0.126 0 0 c
-f
-Q
-0.165 0.357 0.431 scn
-q 1 0 0 1 338.2329 318.957 cm
-0 0 m
--2.557 0.263 -2.657 0.273 -4.231 0.228 c
--4.231 0.32 l
--2.431 0.32 -0.671 0.15 1.035 -0.174 c
-0.724 -0.122 0.312 -0.042 0 0 c
-f
-Q
-0.157 0.345 0.416 scn
-q 1 0 0 1 334.002 319.2739 cm
-0 0 m
-0.335 0.003 0.669 -0.002 1.001 -0.014 c
-0.701 -0.01 0.211 -0.214 0 0 c
-f
-Q
- endstream endobj 1364 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1332 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1333 0 obj <</BBox[180.624 223.274 192.621 211.277]/Group 1365 0 R/Length 533/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ExtGState<</GS0 1318 0 R>>/XObject<</Fm0 1366 0 R>>>>/Subtype/Form>>stream
-q
-186.627 218.274 m
-186.627 223.274 l
-189.939 223.274 192.621 220.593 192.621 217.271 c
-192.621 213.959 189.939 211.277 186.627 211.277 c
-183.315 211.277 180.624 213.959 180.624 217.271 c
-180.624 220.593 183.315 223.274 186.627 223.274 c
-186.627 218.274 l
-186.078 218.277 185.622 217.823 185.624 217.271 c
-185.62 216.731 186.077 216.274 186.627 216.277 c
-187.173 216.275 187.624 216.726 187.621 217.271 c
-187.622 217.829 187.171 218.277 186.627 218.274 c
-W n
-q
-1 w 4 M 0 j 0 J []0 d
-/GS0 gs
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
-Q
- endstream endobj 1365 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1366 0 obj <</BBox[180.624 223.274 192.621 211.277]/Group 1367 0 R/Length 11024/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.176 0.529 0.353 scn
-/GS0 gs
-q 1 0 0 1 186.627 218.2744 cm
-0 0 m
-0 5 l
-3.312 5 5.994 2.318 5.994 -1.003 c
-5.994 -4.315 3.312 -6.997 0 -6.997 c
--3.312 -6.997 -6.003 -4.315 -6.003 -1.003 c
--6.003 2.318 -3.312 5 0 5 c
-0 0 l
--0.549 0.003 -1.005 -0.451 -1.003 -1.003 c
--1.007 -1.543 -0.55 -2 0 -1.997 c
-0.546 -1.999 0.997 -1.549 0.994 -1.003 c
-0.995 -0.445 0.544 0.003 0 0 c
-f
-Q
-q 1 0 0 1 186.627 219.1992 cm
-0 0 m
--0.013 -0.041 -0.073 -0.074 -0.083 -0.115 c
--0.111 -0.248 -0.02 -0.426 0 -0.561 c
-0 -0.925 l
--0.549 -0.922 -1.005 -1.376 -1.003 -1.928 c
--1.007 -2.468 -0.55 -2.925 0 -2.922 c
-0.546 -2.924 0.997 -2.474 0.994 -1.928 c
-0.995 -1.37 0.544 -0.922 0 -0.925 c
-0 -0.561 l
-0.034 -0.558 0.078 -0.553 0.112 -0.55 c
-0.141 -0.55 0.182 -0.536 0.208 -0.549 c
-1.037 -1.473 l
-1.432 -2.162 1.781 -2.116 1.796 -2.113 c
-2.048 -2.073 3.175 -0.62 1.896 0.192 c
-1.594 0.385 1.27 0.411 1.126 0.396 c
-0 0 l
-0 4.075 l
-3.312 4.075 5.994 1.394 5.994 -1.928 c
-5.994 -5.24 3.312 -7.922 0 -7.922 c
--3.312 -7.922 -6.003 -5.24 -6.003 -1.928 c
--6.003 1.394 -3.312 4.075 0 4.075 c
-0 0 l
-f
-Q
-0.173 0.518 0.345 scn
-q 1 0 0 1 186.627 219.3418 cm
-0 0 m
--0.03 -0.093 -0.164 -0.171 -0.185 -0.266 c
--0.222 -0.434 -0.125 -0.678 -0.187 -0.838 c
--0.188 -0.839 -0.237 -0.941 -0.403 -1.05 c
--1.157 -1.54 -1.045 -2.159 -0.993 -2.338 c
--0.812 -2.951 -0.139 -3.261 0.448 -2.999 c
-0.646 -2.911 0.784 -2.775 0.866 -2.694 c
-1.137 -2.427 1.542 -2.629 1.829 -2.42 c
-2.42 -1.988 2.555 -1.604 2.619 -1.418 c
-2.84 -0.784 2.454 0.136 1.624 0.372 c
-0.851 0.592 0.002 0.007 0 0 c
-0 3.933 l
-3.312 3.933 5.994 1.251 5.994 -2.07 c
-5.994 -5.383 3.312 -8.064 0 -8.064 c
--3.312 -8.064 -6.003 -5.383 -6.003 -2.07 c
--6.003 1.251 -3.312 3.933 0 3.933 c
-0 0 l
-f
-Q
-0.169 0.506 0.337 scn
-q 1 0 0 1 186.627 219.4824 cm
-0 0 m
--0.295 -0.834 -1.295 -1.352 -1.079 -2.413 c
--0.941 -3.092 -0.175 -3.558 0.645 -3.166 c
-2.581 -2.241 2.581 -2.241 2.752 -1.679 c
-3.15 -0.374 2.119 0.265 1.727 0.386 c
-0.83 0.662 0.003 0.008 0 0 c
-0 3.792 l
-3.312 3.792 5.994 1.11 5.994 -2.211 c
-5.994 -5.523 3.312 -8.205 0 -8.205 c
--3.312 -8.205 -6.003 -5.523 -6.003 -2.211 c
--6.003 1.11 -3.312 3.792 0 3.792 c
-0 0 l
-f
-Q
-0.165 0.49 0.329 scn
-q 1 0 0 1 186.627 219.6211 cm
-0 0 m
--0.353 -0.868 -1.382 -1.434 -1.146 -2.564 c
--1.026 -3.142 -0.354 -3.806 0.709 -3.4 c
-2.435 -2.741 2.615 -2.673 2.848 -2.025 c
-3.232 -0.958 2.919 -0.038 2.048 0.318 c
-0.863 0.804 0.004 0.01 0 0 c
-0 3.653 l
-3.312 3.653 5.994 0.972 5.994 -2.35 c
-5.994 -5.662 3.312 -8.344 0 -8.344 c
--3.312 -8.344 -6.003 -5.662 -6.003 -2.35 c
--6.003 0.972 -3.312 3.653 0 3.653 c
-0 0 l
-f
-Q
-0.161 0.478 0.322 scn
-q 1 0 0 1 186.627 219.7588 cm
-0 0 m
--0.193 -0.418 -0.584 -0.692 -0.794 -1.099 c
--1.091 -1.709 l
--1.261 -2.111 -1.291 -2.44 -1.189 -2.809 c
--1.127 -3.035 -0.731 -4.134 0.979 -3.567 c
-4.729 -2.327 2.779 0.033 2.448 0.247 c
-1.441 0.897 0.102 0.218 0 0 c
-0 3.516 l
-3.312 3.516 5.994 0.834 5.994 -2.487 c
-5.994 -5.8 3.312 -8.481 0 -8.481 c
--3.312 -8.481 -6.003 -5.8 -6.003 -2.487 c
--6.003 0.834 -3.312 3.516 0 3.516 c
-0 0 l
-f
-Q
-0.157 0.467 0.314 scn
-q 1 0 0 1 186.627 219.9111 cm
-0 0 m
--0.013 -0.025 -0.053 -0.04 -0.076 -0.058 c
--0.436 -0.329 -0.724 -0.613 -1.172 -1.804 c
--1.294 -2.128 -1.428 -2.622 -1.186 -3.154 c
--0.786 -4.034 0.174 -4.205 1.666 -3.662 c
-3.819 -2.879 3.945 -0.361 2.337 0.402 c
-1.364 0.864 0.123 0.248 0 0 c
-0 3.363 l
-3.312 3.363 5.994 0.682 5.994 -2.64 c
-5.994 -5.952 3.312 -8.634 0 -8.634 c
--3.312 -8.634 -6.003 -5.952 -6.003 -2.64 c
--6.003 0.682 -3.312 3.363 0 3.363 c
-0 0 l
-f
-Q
-0.153 0.455 0.306 scn
-q 1 0 0 1 186.627 220.1025 cm
-0 0 m
--0.034 -0.067 -0.142 -0.105 -0.203 -0.15 c
--0.738 -0.548 -1 -1.255 -1.252 -1.938 c
--1.385 -2.296 -1.491 -2.836 -1.247 -3.372 c
--0.62 -4.745 1.243 -4.15 1.798 -3.936 c
-4.073 -3.057 4.215 -0.289 2.506 0.421 c
-1.109 1.002 0.006 0.013 0 0 c
-0 3.172 l
-3.312 3.172 5.994 0.49 5.994 -2.831 c
-5.994 -6.144 3.312 -8.825 0 -8.825 c
--3.312 -8.825 -6.003 -6.144 -6.003 -2.831 c
--6.003 0.49 -3.312 3.172 0 3.172 c
-0 0 l
-f
-Q
-0.149 0.443 0.294 scn
-q 1 0 0 1 186.627 220.291 cm
-0 0 m
--0.037 -0.07 -0.152 -0.104 -0.217 -0.148 c
--0.425 -0.29 -0.869 -0.842 -1.09 -1.384 c
--1.279 -1.849 -1.632 -2.713 -1.384 -3.395 c
--1 -4.452 -0.005 -4.766 1.573 -4.327 c
-4.077 -3.63 4.625 -0.767 2.988 0.316 c
-1.701 1.168 0.079 0.148 0 0 c
-0 2.983 l
-3.312 2.983 5.994 0.302 5.994 -3.02 c
-5.994 -6.332 3.312 -9.014 0 -9.014 c
--3.312 -9.014 -6.003 -6.332 -6.003 -3.02 c
--6.003 0.302 -3.312 2.983 0 2.983 c
-0 0 l
-f
-Q
-0.145 0.431 0.286 scn
-q 1 0 0 1 186.627 220.4746 cm
-0 0 m
--0.175 -0.316 -0.542 -0.436 -0.748 -0.721 c
--1.047 -1.138 -1.145 -1.38 -1.239 -1.615 c
--1.289 -1.739 -1.721 -2.852 -1.448 -3.597 c
--0.854 -5.222 1.1 -4.778 1.685 -4.604 c
-4.42 -3.787 4.999 -0.764 3.215 0.386 c
-1.946 1.203 0.235 0.424 0 0 c
-0 2.8 l
-3.312 2.8 5.994 0.118 5.994 -3.203 c
-5.994 -6.516 3.312 -9.197 0 -9.197 c
--3.312 -9.197 -6.003 -6.516 -6.003 -3.203 c
--6.003 0.118 -3.312 2.8 0 2.8 c
-0 0 l
-f
-Q
-0.141 0.42 0.278 scn
-q 1 0 0 1 186.627 220.7061 cm
-0 0 m
--0.06 -0.132 -0.265 -0.211 -0.386 -0.291 c
--0.737 -0.526 -1.203 -1.41 -1.325 -1.736 c
--1.409 -1.96 -1.811 -3.121 -1.476 -3.944 c
--0.72 -5.801 1.951 -4.87 1.978 -4.859 c
-5.294 -3.584 5.17 -0.372 3.113 0.574 c
-1.411 1.356 0.007 0.017 0 0 c
-0 2.568 l
-3.312 2.568 5.994 -0.113 5.994 -3.435 c
-5.994 -6.747 3.312 -9.429 0 -9.429 c
--3.312 -9.429 -6.003 -6.747 -6.003 -3.435 c
--6.003 -0.113 -3.312 2.568 0 2.568 c
-0 0 l
-f
-Q
-0.137 0.408 0.271 scn
-q 1 0 0 1 186.627 220.9883 cm
-0 0 m
--0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c
--0.735 -0.593 -1.129 -1.17 -1.41 -1.909 c
--1.685 -2.632 -1.76 -3.635 -1.577 -4.146 c
--0.866 -6.126 1.876 -5.311 1.903 -5.301 c
-5.874 -3.976 5.345 -0.496 3.416 0.521 c
-1.627 1.465 0.058 0.121 0 0 c
-0 2.286 l
-3.312 2.286 5.994 -0.396 5.994 -3.717 c
-5.994 -7.029 3.312 -9.711 0 -9.711 c
--3.312 -9.711 -6.003 -7.029 -6.003 -3.717 c
--6.003 -0.396 -3.312 2.286 0 2.286 c
-0 0 l
-f
-Q
-0.133 0.396 0.263 scn
-q 1 0 0 1 186.627 221.2744 cm
-0 0 m
--0.045 -0.106 -0.21 -0.167 -0.303 -0.236 c
--0.487 -0.373 -1.127 -0.938 -1.625 -2.443 c
--1.73 -2.761 -1.906 -3.878 -1.546 -4.676 c
--1.031 -5.818 0.788 -6.214 2.508 -5.559 c
-6.319 -4.105 5.737 -0.286 3.15 0.724 c
-1.354 1.425 0.007 0.017 0 0 c
-0 2 l
-3.312 2 5.994 -0.682 5.994 -4.003 c
-5.994 -7.315 3.312 -9.997 0 -9.997 c
--3.312 -9.997 -6.003 -7.315 -6.003 -4.003 c
--6.003 -0.682 -3.312 2 0 2 c
-0 0 l
-f
-Q
-0.129 0.384 0.255 scn
-q 1 0 0 1 186.627 221.6582 cm
-0 0 m
--0.163 -0.362 -0.542 -0.515 -0.779 -0.805 c
--0.947 -1.012 -1.049 -1.261 -1.205 -1.476 c
--1.367 -1.7 -1.47 -1.983 -1.721 -2.735 c
--2.06 -3.745 -1.792 -4.628 -1.661 -4.961 c
--1.172 -6.201 0.619 -6.721 2.417 -6.144 c
-7.025 -4.662 5.824 -0.754 3.284 0.539 c
-1.422 1.486 0.008 0.018 0 0 c
-0 1.616 l
-3.312 1.616 5.994 -1.065 5.994 -4.387 c
-5.994 -7.699 3.312 -10.381 0 -10.381 c
--3.312 -10.381 -6.003 -7.699 -6.003 -4.387 c
--6.003 -1.065 -3.312 1.616 0 1.616 c
-0 0 l
-f
-Q
-0.125 0.373 0.247 scn
-q 1 0 0 1 186.627 222.082 cm
-0 0 m
--0.128 -0.296 -0.442 -0.404 -0.638 -0.631 c
--0.788 -0.804 -0.893 -1.01 -1.031 -1.191 c
--1.147 -1.346 -1.619 -2.354 -1.622 -2.361 c
--2.173 -3.904 -2.042 -4.642 -1.843 -5.159 c
--0.967 -7.426 1.647 -7.027 2.581 -6.683 c
-3.886 -6.201 6.602 -5.198 5.542 -2.518 c
-5.833 -3.224 5.994 -3.998 5.994 -4.811 c
-5.994 -8.123 3.312 -10.805 0 -10.805 c
--3.312 -10.805 -6.003 -8.123 -6.003 -4.811 c
--6.003 -1.489 -3.312 1.192 0 1.192 c
-0 0 l
-f
-Q
-0.122 0.361 0.239 scn
-q 1 0 0 1 186.627 222.5469 cm
-0 0 m
--0.037 -0.078 -0.154 -0.129 -0.22 -0.185 c
--1.236 -1.035 -1.83 -2.885 -1.836 -2.903 c
--2.227 -4.14 -2.24 -5.156 -1.875 -5.925 c
--0.602 -8.604 3.351 -7.152 3.39 -7.137 c
-4.435 -6.729 6.183 -6.049 5.89 -4.151 c
-5.958 -4.516 5.994 -4.891 5.994 -5.275 c
-5.994 -8.588 3.312 -11.27 0 -11.27 c
--3.312 -11.27 -6.003 -8.588 -6.003 -5.275 c
--6.003 -1.954 -3.312 0.728 0 0.728 c
-0 0 l
-f
-Q
-0.118 0.349 0.231 scn
-q 1 0 0 1 186.627 222.9893 cm
-0 0 m
--0.038 -0.066 -0.155 -0.09 -0.221 -0.129 c
--1.149 -0.673 -1.644 -2.171 -2.005 -3.266 c
--2.01 -3.282 -2.546 -5.07 -2.073 -6.283 c
--1.016 -9.001 3.053 -7.959 3.094 -7.948 c
-4.312 -7.626 5.98 -7.185 5.993 -5.583 c
-5.994 -5.628 5.994 -5.673 5.994 -5.718 c
-5.994 -9.03 3.312 -11.712 0 -11.712 c
--3.312 -11.712 -6.003 -9.03 -6.003 -5.718 c
--6.003 -2.396 -3.312 0.285 0 0.285 c
-0 0 l
-f
-Q
-0.114 0.337 0.224 scn
-q 1 0 0 1 186.627 223.2627 cm
-0 0 m
--0.043 -0.052 -0.154 -0.029 -0.221 -0.042 c
--0.696 -0.133 -1.347 -0.689 -1.732 -1.731 c
--2.576 -4.018 -2.459 -5.555 -2.314 -6.268 c
--1.868 -8.458 0.839 -8.7 1.752 -8.612 c
-4.209 -8.376 5.692 -8.233 5.942 -6.786 c
-5.553 -9.723 3.042 -11.985 0 -11.985 c
--3.312 -11.985 -6.003 -9.304 -6.003 -5.991 c
--6.003 -2.67 -3.312 0.012 0 0.012 c
-0 0 l
-f
-Q
-0.11 0.325 0.216 scn
-q 1 0 0 1 185.7217 223.1973 cm
-0 0 m
--1.735 -0.588 -1.748 -4.507 -1.748 -4.547 c
--1.744 -6.481 -1.201 -7.607 0.015 -8.199 c
-1.797 -9.066 6.081 -9.359 6.651 -7.642 c
-5.914 -10.117 3.621 -11.92 0.905 -11.92 c
--2.407 -11.92 -5.098 -9.238 -5.098 -5.926 c
--5.098 -2.855 -2.799 -0.333 0.165 0.032 c
-0.115 0.022 0.049 0.014 0 0 c
-f
-Q
-0.106 0.314 0.208 scn
-q 1 0 0 1 184.3926 222.7744 cm
-0 0 m
--1.065 -0.939 -0.813 -4.875 -0.541 -5.608 c
-0.425 -8.204 2.403 -8.583 3.208 -8.626 c
-4.27 -8.682 5.294 -9.071 6.373 -8.972 c
-6.625 -8.948 7.249 -8.828 7.579 -8.222 c
-6.588 -10.166 4.567 -11.497 2.234 -11.497 c
--1.078 -11.497 -3.769 -8.815 -3.769 -5.503 c
--3.769 -2.812 -2.001 -0.54 0.432 0.225 c
-0.372 0.2 0.292 0.168 0.231 0.144 c
-0.161 0.103 0.062 0.054 0 0 c
-f
-Q
-0.18 0.541 0.361 scn
-q 1 0 0 1 188.1982 217.4531 cm
-0 0 m
--0.089 0.064 -0.089 0.064 -0.518 0.595 c
--0.66 0.77 -0.832 0.916 -0.969 1.096 c
--1.153 1.336 -1.228 1.588 -1.225 1.6 c
--1.219 1.619 -0.023 2.449 0.592 1.369 c
-1.023 0.611 0.244 -0.132 0.233 -0.134 c
-0.153 -0.145 0.065 -0.047 0 0 c
-f
-Q
-0.125 0.373 0.247 scn
-q 1 0 0 1 189.1953 222.666 cm
-0 0 m
--1.292 0.462 -2.253 -0.325 -2.568 -0.584 c
--2.568 0.608 l
--1.402 0.608 -0.314 0.276 0.606 -0.3 c
-0.517 -0.25 0.397 -0.184 0.307 -0.133 c
-0.215 -0.093 0.095 -0.034 0 0 c
-f
-Q
-0.184 0.553 0.369 scn
-q 1 0 0 1 188.2393 217.709 cm
-0 0 m
--0.336 0.357 l
--0.471 0.528 -0.626 0.683 -0.755 0.857 c
--0.971 1.148 -1.017 1.271 -1.015 1.275 c
--1.01 1.29 -0.025 1.71 0.328 0.955 c
-0.583 0.408 0.172 -0.12 0.166 -0.121 c
-0.105 -0.132 0.047 -0.039 0 0 c
-f
-Q
-0.122 0.361 0.239 scn
-q 1 0 0 1 188.3931 222.9971 cm
-0 0 m
--0.649 0.121 -1.161 -0.01 -1.766 -0.45 c
--1.766 0.277 l
--1.038 0.277 -0.341 0.147 0.305 -0.09 c
-0.221 -0.064 0.11 -0.031 0.027 -0.006 c
-0.019 -0.004 0.008 -0.001 0 0 c
-f
-Q
-0.188 0.565 0.376 scn
-q 1 0 0 1 188.2437 217.9775 cm
-0 0 m
--0.004 0.005 -0.532 0.572 -0.709 0.863 c
--0.562 0.878 -0.481 0.886 -0.263 0.812 c
--0.178 0.783 -0.083 0.7 -0.026 0.632 c
-0.032 0.563 0.087 0.449 0.1 0.36 c
-0.13 0.142 0.09 0.006 0.071 -0.06 c
-0.049 -0.041 0.02 -0.02 0 0 c
-f
-Q
-0.118 0.349 0.231 scn
-q 1 0 0 1 187.5317 223.1973 cm
-0 0 m
--0.313 -0.006 -0.486 -0.009 -0.905 -0.208 c
--0.905 0.077 l
--0.519 0.077 -0.142 0.041 0.224 -0.029 c
-0.157 -0.021 0.068 -0.004 0 0 c
-f
-Q
-0.114 0.337 0.224 scn
-q 1 0 0 1 186.627 223.2627 cm
-0 0 m
-0 0.012 l
-0.072 0.012 0.144 0.011 0.215 0.008 c
-0.15 0.006 0.046 -0.045 0 0 c
-f
-Q
- endstream endobj 1367 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1330 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1331 0 obj <</BBox[155.001 324.272 210.999 268.274]/Group 1368 0 R/Length 479/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ExtGState<</GS0 1318 0 R>>/XObject<</Fm0 1369 0 R>>>>/Subtype/Form>>stream
-q
-183 308.272 m
-183 324.272 l
-198.462 324.272 210.999 311.735 210.999 296.273 c
-210.999 280.812 198.462 268.274 183 268.274 c
-167.538 268.274 155.001 280.812 155.001 296.273 c
-155.001 311.735 167.538 324.272 183 324.272 c
-183 308.272 l
-176.393 308.283 170.99 302.881 171.001 296.273 c
-170.99 289.666 176.393 284.264 183 284.274 c
-189.607 284.264 195.01 289.666 194.999 296.273 c
-195.01 302.881 189.607 308.283 183 308.272 c
-W n
-q
-/GS0 gs
-0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do
-Q
-Q
- endstream endobj 1368 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1369 0 obj <</BBox[155.001 324.272 210.999 268.274]/Group 1370 0 R/Length 13398/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Resources<</ColorSpace<</CS0 1317 0 R>>/ExtGState<</GS0 1318 0 R>>>>/Subtype/Form>>stream
-/CS0 cs 0.184 0.553 0.369 scn
-/GS0 gs
-q 1 0 0 1 183 308.2725 cm
-0 0 m
-0 16 l
-15.462 16 27.999 3.463 27.999 -11.999 c
-27.999 -27.461 15.462 -39.998 0 -39.998 c
--15.462 -39.998 -27.999 -27.461 -27.999 -11.999 c
--27.999 3.463 -15.462 16 0 16 c
-0 0 l
--6.607 0.011 -12.01 -5.392 -11.999 -11.999 c
--12.01 -18.606 -6.607 -24.009 0 -23.998 c
-6.607 -24.009 12.01 -18.606 11.999 -11.999 c
-12.01 -5.392 6.607 0.011 0 0 c
-f
-Q
-q 1 0 0 1 183 313.436 cm
-0 0 m
-0 -0.468 l
-0 -5.164 l
--6.607 -5.153 -12.01 -10.555 -11.999 -17.163 c
--12.01 -23.77 -6.607 -29.172 0 -29.162 c
-6.607 -29.172 12.01 -23.77 11.999 -17.163 c
-12.01 -10.555 6.607 -5.153 0 -5.164 c
-0 -0.468 l
-0.316 -0.694 0.738 -0.997 1.055 -1.223 c
-3.817 -3.661 7.459 -4.869 10 -7.617 c
-12.018 -9.8 13.458 -12.461 14.279 -15.528 c
-15.091 -18.562 16.901 -19.343 16.918 -19.345 c
-18.873 -19.539 24.733 -10.483 17.857 -2.241 c
-10.879 6.124 0.769 1.958 0 0 c
-0 10.836 l
-15.462 10.836 27.999 -1.701 27.999 -17.163 c
-27.999 -32.625 15.462 -45.162 0 -45.162 c
--15.462 -45.162 -27.999 -32.625 -27.999 -17.163 c
--27.999 -1.701 -15.462 10.836 0 10.836 c
-0 0 l
-f
-Q
-0.18 0.541 0.361 scn
-q 1 0 0 1 183 315.2832 cm
-0 0 m
--0.296 -0.712 -1.487 -1.168 -1.735 -1.898 c
--1.987 -2.638 -2.003 -3.873 -1.53 -4.494 c
--1.227 -4.893 -0.45 -4.945 0 -5.167 c
-0 -7.011 l
--6.607 -7 -12.01 -12.402 -11.999 -19.01 c
--12.01 -25.617 -6.607 -31.02 0 -31.009 c
-6.607 -31.02 12.01 -25.617 11.999 -19.01 c
-12.01 -12.402 6.607 -7 0 -7.011 c
-0 -5.167 l
-0.338 -5.201 0.788 -5.245 1.126 -5.278 c
-2.249 -5.476 12.142 -7.556 13.761 -19.537 c
-14.172 -22.51 l
-14.637 -23.085 15.725 -23.501 16.46 -23.424 c
-20.584 -22.987 26.414 -9.567 15.896 -1.312 c
-7.943 4.929 0.035 0.084 0 0 c
-0 8.989 l
-15.462 8.989 27.999 -3.548 27.999 -19.01 c
-27.999 -34.472 15.462 -47.009 0 -47.009 c
--15.462 -47.009 -27.999 -34.472 -27.999 -19.01 c
--27.999 -3.548 -15.462 8.989 0 8.989 c
-0 0 l
-f
-Q
-0.176 0.529 0.353 scn
-q 1 0 0 1 183 316.4023 cm
-0 0 m
--0.627 -1.109 -1.866 -1.525 -2.708 -2.391 c
--4.764 -4.503 -4.447 -6.209 -4.44 -6.223 c
--4.355 -6.386 -4.355 -6.386 0 -7.408 c
-0 -8.13 l
--6.607 -8.119 -12.01 -13.521 -11.999 -20.129 c
--12.01 -26.736 -6.607 -32.139 0 -32.128 c
-6.607 -32.139 12.01 -26.736 11.999 -20.129 c
-12.01 -13.521 6.607 -8.119 0 -8.13 c
-0 -7.408 l
-0.312 -7.428 0.727 -7.455 1.039 -7.475 c
-5.586 -8.118 13.155 -12.017 12.674 -22.548 c
-12.56 -25.061 12.663 -26.477 12.982 -26.758 c
-14.311 -27.929 23.356 -23.684 22.629 -14.042 c
-21.269 4.004 1.142 2.019 0 0 c
-0 7.87 l
-15.462 7.87 27.999 -4.667 27.999 -20.129 c
-27.999 -35.591 15.462 -48.128 0 -48.128 c
--15.462 -48.128 -27.999 -35.591 -27.999 -20.129 c
--27.999 -4.667 -15.462 7.87 0 7.87 c
-0 0 l
-f
-Q
-0.173 0.518 0.345 scn
-q 1 0 0 1 183 317.3276 cm
-0 0 m
--0.223 -0.377 -0.896 -0.494 -1.279 -0.706 c
--3.983 -2.198 -4.352 -2.882 -7.218 -8.204 c
--10.977 -15.407 l
--12.034 -17.649 -12.409 -19.973 -12.123 -22.51 c
--11.368 -29.204 -4.441 -35.04 3.701 -32.832 c
-16.504 -28.451 l
-19.64 -26.383 21.524 -23.889 22.614 -20.364 c
-24.61 -13.908 21.812 -4.74 13.674 -0.575 c
-6.26 3.219 0.029 0.049 0 0 c
-0 6.945 l
-15.462 6.945 27.999 -5.592 27.999 -21.054 c
-27.999 -36.516 15.462 -49.053 0 -49.053 c
--15.462 -49.053 -27.999 -36.516 -27.999 -21.054 c
--27.999 -5.592 -15.462 6.945 0 6.945 c
-0 0 l
-f
-Q
-0.169 0.506 0.337 scn
-q 1 0 0 1 183 318.1274 cm
-0 0 m
--0.174 -0.267 -0.682 -0.3 -0.974 -0.428 c
--3.27 -1.438 -6.363 -4.313 -7.593 -6.58 c
--13.39 -17.262 -13 -20.653 -12.686 -23.377 c
--12.045 -28.943 -6.307 -36.332 3.975 -34.516 c
-34.372 -29.149 23.201 -7.033 15.417 -1.844 c
-7.621 3.352 0.038 0.059 0 0 c
-0 6.145 l
-15.462 6.145 27.999 -6.392 27.999 -21.854 c
-27.999 -37.316 15.462 -49.853 0 -49.853 c
--15.462 -49.853 -27.999 -37.316 -27.999 -21.854 c
--27.999 -6.392 -15.462 6.145 0 6.145 c
-0 0 l
-f
-Q
-0.165 0.49 0.329 scn
-q 1 0 0 1 183 318.8281 cm
-0 0 m
--0.26 -0.393 -1.01 -0.429 -1.443 -0.612 c
--4.281 -1.816 -7.531 -4.969 -9.346 -8.278 c
--13.498 -15.848 -13.757 -21.085 -13.244 -24.146 c
--12.335 -29.558 -7.256 -38.113 6.018 -35.853 c
-29.65 -31.827 27.567 -10.229 15.691 -2.188 c
-7.725 3.206 0.039 0.058 0 0 c
-0 5.444 l
-15.462 5.444 27.999 -7.093 27.999 -22.555 c
-27.999 -38.017 15.462 -50.554 0 -50.554 c
--15.462 -50.554 -27.999 -38.017 -27.999 -22.555 c
--27.999 -7.093 -15.462 5.444 0 5.444 c
-0 0 l
-f
-Q
-0.161 0.478 0.322 scn
-q 1 0 0 1 183 319.4941 cm
-0 0 m
--0.27 -0.397 -1.042 -0.411 -1.488 -0.586 c
--3.111 -1.225 -7.249 -3.37 -10.633 -9.471 c
--11.685 -11.368 -15.021 -18.084 -13.796 -24.877 c
--12.453 -32.323 -5.461 -39.362 6.714 -37.218 c
-28.943 -33.304 28.97 -11.255 15.609 -2.301 c
-7.856 2.895 0.038 0.056 0 0 c
-0 4.778 l
-15.462 4.778 27.999 -7.759 27.999 -23.221 c
-27.999 -38.683 15.462 -51.22 0 -51.22 c
--15.462 -51.22 -27.999 -38.683 -27.999 -23.221 c
--27.999 -7.759 -15.462 4.778 0 4.778 c
-0 0 l
-f
-Q
-0.157 0.467 0.314 scn
-q 1 0 0 1 183 320.105 cm
-0 0 m
--0.285 -0.403 -1.085 -0.384 -1.55 -0.549 c
--2.14 -0.758 -7.426 -2.783 -11.14 -9.4 c
--12.536 -11.888 -15.643 -18.441 -14.343 -25.552 c
--13.349 -30.994 -7.597 -40.716 7.05 -38.567 c
-28.064 -35.482 30.902 -13.127 16.17 -2.838 c
-7.979 2.883 0.04 0.057 0 0 c
-0 4.167 l
-15.462 4.167 27.999 -8.37 27.999 -23.832 c
-27.999 -39.293 15.462 -51.831 0 -51.831 c
--15.462 -51.831 -27.999 -39.293 -27.999 -23.832 c
--27.999 -8.37 -15.462 4.167 0 4.167 c
-0 0 l
-f
-Q
-0.153 0.455 0.306 scn
-q 1 0 0 1 183 320.6777 cm
-0 0 m
--0.294 -0.407 -1.113 -0.365 -1.59 -0.521 c
--3.037 -0.996 -8.057 -3.068 -11.887 -9.807 c
--12.95 -11.676 -16.306 -18.381 -14.886 -26.189 c
--13.692 -32.763 -6.813 -41.824 7.243 -39.849 c
-28.687 -36.835 31.471 -13.847 16.374 -3.144 c
-8.08 2.736 0.041 0.056 0 0 c
-0 3.595 l
-15.462 3.595 27.999 -8.942 27.999 -24.404 c
-27.999 -39.866 15.462 -52.403 0 -52.403 c
--15.462 -52.403 -27.999 -39.866 -27.999 -24.404 c
--27.999 -8.942 -15.462 3.595 0 3.595 c
-0 0 l
-f
-Q
-0.149 0.443 0.294 scn
-q 1 0 0 1 183 321.2148 cm
-0 0 m
--0.327 -0.44 -1.224 -0.37 -1.749 -0.528 c
--5.52 -1.667 -9.765 -5.26 -12.073 -9.267 c
--15.394 -15.036 -16.522 -20.932 -15.426 -26.791 c
--13.856 -35.176 -5.227 -43.01 7.675 -41.012 c
-29.382 -37.65 31.673 -13.956 16.092 -3.122 c
-8.188 2.374 0.041 0.052 0 0 c
-0 3.058 l
-15.462 3.058 27.999 -9.479 27.999 -24.941 c
-27.999 -40.403 15.462 -52.94 0 -52.94 c
--15.462 -52.94 -27.999 -40.403 -27.999 -24.941 c
--27.999 -9.479 -15.462 3.058 0 3.058 c
-0 0 l
-f
-Q
-0.145 0.431 0.286 scn
-q 1 0 0 1 183 321.7295 cm
-0 0 m
--0.315 -0.413 -1.169 -0.321 -1.671 -0.458 c
--5.628 -1.543 -10.186 -5.222 -12.509 -9.206 c
--13.794 -11.411 -17.706 -18.119 -15.958 -27.368 c
--14.312 -36.085 -5.369 -44.227 7.962 -42.147 c
-29.823 -38.738 32.256 -15.066 16.713 -3.752 c
-8.241 2.415 0.041 0.054 0 0 c
-0 2.543 l
-15.462 2.543 27.999 -9.994 27.999 -25.456 c
-27.999 -40.918 15.462 -53.455 0 -53.455 c
--15.462 -53.455 -27.999 -40.918 -27.999 -25.456 c
--27.999 -9.994 -15.462 2.543 0 2.543 c
-0 0 l
-f
-Q
-0.141 0.42 0.278 scn
-q 1 0 0 1 183 322.2021 cm
-0 0 m
--0.326 -0.417 -1.197 -0.297 -1.71 -0.424 c
--5.005 -1.241 -10.021 -4.174 -13.317 -9.752 c
--16.642 -15.38 -17.708 -21.487 -16.484 -27.902 c
--14.771 -36.889 -5.522 -45.311 8.242 -43.22 c
-29.813 -39.944 32.242 -15.421 16.845 -4.05 c
-8.507 2.107 0.042 0.053 0 0 c
-0 2.07 l
-15.462 2.07 27.999 -10.467 27.999 -25.929 c
-27.999 -41.391 15.462 -53.928 0 -53.928 c
--15.462 -53.928 -27.999 -41.391 -27.999 -25.929 c
--27.999 -10.467 -15.462 2.07 0 2.07 c
-0 0 l
-f
-Q
-0.137 0.408 0.271 scn
-q 1 0 0 1 183 322.6421 cm
-0 0 m
--0.165 -0.201 -0.596 -0.119 -0.851 -0.169 c
--6.63 -1.321 -11.086 -5.48 -13.33 -8.99 c
--17.823 -16.018 -17.96 -22.68 -17.283 -27.031 c
--15.529 -38.308 -5.353 -45.633 6.914 -44.447 c
-29.053 -42.307 33.213 -18.564 18.588 -5.674 c
-9.722 2.142 0.051 0.062 0 0 c
-0 1.63 l
-15.462 1.63 27.999 -10.907 27.999 -26.369 c
-27.999 -41.831 15.462 -54.368 0 -54.368 c
--15.462 -54.368 -27.999 -41.831 -27.999 -26.369 c
--27.999 -10.907 -15.462 1.63 0 1.63 c
-0 0 l
-f
-Q
-0.133 0.396 0.263 scn
-q 1 0 0 1 183 323.0532 cm
-0 0 m
--0.345 -0.419 -1.243 -0.245 -1.775 -0.35 c
--5.333 -1.052 -10.598 -4.013 -13.752 -8.857 c
--18.474 -16.108 -18.606 -22.979 -17.885 -27.465 c
--16.272 -37.503 -7.101 -46.92 7.31 -45.499 c
-29.575 -43.3 33.52 -19.116 18.666 -5.999 c
-9.679 1.938 0.05 0.061 0 0 c
-0 1.219 l
-15.462 1.219 27.999 -11.318 27.999 -26.78 c
-27.999 -42.242 15.462 -54.779 0 -54.779 c
--15.462 -54.779 -27.999 -42.242 -27.999 -26.78 c
--27.999 -11.318 -15.462 1.219 0 1.219 c
-0 0 l
-f
-Q
-0.129 0.384 0.255 scn
-q 1 0 0 1 183 323.4082 cm
-0 0 m
--0.359 -0.424 -1.279 -0.213 -1.827 -0.305 c
--2.571 -0.429 -9.239 -1.713 -14.035 -8.521 c
--19.337 -16.049 -19.04 -23.602 -18.666 -26.5 c
--16.791 -41.035 -4.557 -47.119 6.015 -46.62 c
-29.237 -45.525 34.039 -19.966 18.705 -6.311 c
-9.693 1.714 0.05 0.059 0 0 c
-0 0.864 l
-15.462 0.864 27.999 -11.673 27.999 -27.135 c
-27.999 -42.597 15.462 -55.134 0 -55.134 c
--15.462 -55.134 -27.999 -42.597 -27.999 -27.135 c
--27.999 -11.673 -15.462 0.864 0 0.864 c
-0 0 l
-f
-Q
-0.125 0.373 0.247 scn
-q 1 0 0 1 183 323.7339 cm
-0 0 m
--0.366 -0.422 -1.29 -0.183 -1.842 -0.262 c
--5.616 -0.798 -11.203 -3.577 -14.553 -8.414 c
--20.526 -17.037 -19.484 -25.015 -19.142 -27.636 c
--17.325 -41.545 -4.721 -48.296 6.215 -47.587 c
-22.825 -46.511 31.838 -32.41 25.896 -16.796 c
-27.251 -20.083 27.999 -23.685 27.999 -27.46 c
-27.999 -42.922 15.462 -55.459 0 -55.459 c
--15.462 -55.459 -27.999 -42.922 -27.999 -27.46 c
--27.999 -11.999 -15.462 0.539 0 0.539 c
-0 0 l
-f
-Q
-0.122 0.361 0.239 scn
-q 1 0 0 1 183 323.9893 cm
-0 0 m
--0.38 -0.425 -1.322 -0.147 -1.889 -0.211 c
--3.74 -0.417 -10.183 -1.633 -15.334 -8.604 c
--20.12 -15.081 -20.496 -23.225 -19.964 -27.016 c
--18.071 -40.5 -7.311 -49.139 6.811 -48.512 c
-13.567 -48.212 30.458 -42.954 27.513 -22.495 c
-27.832 -24.187 27.999 -25.932 27.999 -27.716 c
-27.999 -43.178 15.462 -55.715 0 -55.715 c
--15.462 -55.715 -27.999 -43.178 -27.999 -27.716 c
--27.999 -12.254 -15.462 0.283 0 0.283 c
-0 0 l
-f
-Q
-0.118 0.349 0.231 scn
-q 1 0 0 1 183 324.1802 cm
-0 0 m
--0.389 -0.421 -1.333 -0.109 -1.905 -0.156 c
--5.862 -0.48 -11.762 -2.986 -15.367 -7.721 c
--21.456 -15.72 -21.121 -23.999 -20.694 -27.186 c
--18.877 -40.767 -7.134 -50.353 6.621 -49.484 c
-16.365 -48.869 27.809 -42.685 27.992 -27.284 c
-27.997 -27.491 27.999 -27.699 27.999 -27.907 c
-27.999 -43.369 15.462 -55.906 0 -55.906 c
--15.462 -55.906 -27.999 -43.369 -27.999 -27.907 c
--27.999 -12.445 -15.462 0.092 0 0.092 c
-0 0 l
-f
-Q
-0.114 0.337 0.224 scn
-q 1 0 0 1 183 324.269 cm
-0 0 m
--0.403 -0.423 -1.362 -0.067 -1.945 -0.096 c
--5.653 -0.278 -11.171 -1.795 -16.407 -7.987 c
--19.42 -11.549 -22.258 -18.906 -21.583 -25.522 c
--19.025 -50.59 4.157 -50.418 5.143 -50.399 c
-17.394 -50.156 25.847 -43.167 27.756 -31.704 c
-25.941 -45.414 14.205 -55.995 0 -55.995 c
--15.462 -55.995 -27.999 -43.458 -27.999 -27.996 c
--27.999 -12.534 -15.462 0.003 0 0.003 c
-0 0 l
-f
-Q
-0.11 0.325 0.216 scn
-q 1 0 0 1 178.769 323.9521 cm
-0 0 m
--22.529 -4.551 -23.528 -35.026 -6.329 -46.258 c
-6.848 -54.862 25.641 -52.169 31.069 -35.683 c
-27.625 -47.245 16.912 -55.678 4.231 -55.678 c
--11.231 -55.678 -23.768 -43.141 -23.768 -27.679 c
--23.768 -13.386 -13.055 -1.592 0.778 0.109 c
-0.544 0.077 0.232 0.04 0 0 c
-f
-Q
-0.106 0.314 0.208 scn
-q 1 0 0 1 170.9761 321.4922 cm
-0 0 m
--16.563 -9.063 -17.344 -40.194 9.316 -48.713 c
-16.64 -51.054 30.629 -50.189 36.987 -37.91 c
-32.359 -46.995 22.917 -53.218 12.024 -53.218 c
--3.438 -53.218 -15.975 -40.681 -15.975 -25.219 c
--15.975 -12.683 -7.734 -2.069 3.625 1.499 c
-3.1 1.309 2.399 1.057 1.873 0.867 c
-1.31 0.61 0.543 0.297 0 0 c
-f
-Q
-0.188 0.565 0.376 scn
-q 1 0 0 1 198.9263 298.0972 cm
-0 0 m
--1.706 2.422 -2.871 5.192 -4.806 7.466 c
--5.58 8.375 -6.333 9.14 -7.046 9.74 c
--7.103 9.788 -12.7 14.579 -12.706 14.929 c
--12.708 15.035 -10.925 16.753 -10.74 16.825 c
--10.058 17.086 -7.544 17.231 -6.875 17.166 c
--5.111 16.992 -2.438 16.241 0.275 13.649 c
-3.79 10.293 4.269 6.382 4.332 5.263 c
-4.608 0.362 1.816 -1.553 1.125 -1.426 c
-0.589 -1.328 0.314 -0.445 0 0 c
-f
-Q
-0.192 0.576 0.384 scn
-q 1 0 0 1 199.0605 300.5908 cm
-0 0 m
--1.97 2.883 -3.055 4.471 -4.87 6.595 c
--5.072 6.832 -5.375 7.116 -5.591 7.34 c
--5.844 7.601 -6.16 7.969 -6.419 8.224 c
--6.913 8.711 -7.551 9.382 -8.074 9.839 c
--9.724 11.281 -9.908 11.547 -9.911 11.595 c
--9.914 11.655 -8.389 13.369 -8.295 13.411 c
--7.711 13.674 -6.801 13.346 -6.164 13.276 c
--2.962 12.927 -1.156 11.212 -0.476 10.566 c
-2.531 7.709 2.783 5.143 2.904 3.909 c
-2.938 3.565 2.929 0.875 2.709 0.41 c
-2.675 0.337 0.707 -0.875 0.645 -0.861 c
-0.33 -0.793 0.182 -0.267 0 0 c
-f
-Q
-0.196 0.588 0.392 scn
-q 1 0 0 1 198.1455 304.1201 cm
-0 0 m
--0.737 0.235 -1.076 1.45 -1.576 2.04 c
--3.148 3.894 -3.148 3.894 -3.897 4.678 c
--4.212 5.008 -4.84 5.354 -4.922 5.803 c
--4.014 7.981 l
--3.953 8.007 -1.427 7.15 0.33 5.083 c
-1.631 3.552 2.397 0.755 2.281 0.574 c
-1.906 -0.01 0.699 -0.197 0.037 0.011 c
-0.026 0.014 0.011 -0.003 0 0 c
-f
-Q
-0.125 0.373 0.247 scn
-q 1 0 0 1 195.0493 321.5449 cm
-0 0 m
--5.275 2.417 -9.403 2.407 -12.049 2.189 c
--12.049 2.728 l
--6.604 2.728 -1.522 1.173 2.777 -1.517 c
-2.232 -1.205 1.506 -0.789 0.961 -0.477 c
-0.673 -0.334 0.292 -0.134 0 0 c
-f
-Q
-0.122 0.361 0.239 scn
-q 1 0 0 1 191.2632 323.0293 cm
-0 0 m
--3.078 0.794 -4.478 1.111 -8.263 0.96 c
--8.263 1.243 l
--4.866 1.243 -1.61 0.638 1.402 -0.47 c
-0.981 -0.329 0.425 -0.126 0 0 c
-f
-Q
-0.118 0.349 0.231 scn
-q 1 0 0 1 187.231 323.9521 cm
-0 0 m
--2.557 0.263 -2.657 0.273 -4.231 0.228 c
--4.231 0.32 l
--2.431 0.32 -0.671 0.15 1.035 -0.174 c
-0.724 -0.122 0.312 -0.042 0 0 c
-f
-Q
-0.114 0.337 0.224 scn
-q 1 0 0 1 183 324.269 cm
-0 0 m
-0.335 0.003 0.669 -0.002 1.001 -0.014 c
-0.701 -0.01 0.211 -0.214 0 0 c
-f
-Q
- endstream endobj 1370 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1308 0 obj <</Intent 1371 0 R/Name(Guides For Artboard)/Type/OCG/Usage 1372 0 R>> endobj 1309 0 obj <</Intent 1373 0 R/Name(Base)/Type/OCG/Usage 1374 0 R>> endobj 1310 0 obj <</Intent 1375 0 R/Name(Class)/Type/OCG/Usage 1376 0 R>> endobj 1311 0 obj <</Intent 1377 0 R/Name(Trait)/Type/OCG/Usage 1378 0 R>> endobj 1312 0 obj <</Intent 1379 0 R/Name(Package)/Type/OCG/Usage 1380 0 R>> endobj 1313 0 obj <</Intent 1381 0 R/Name(Object)/Type/OCG/Usage 1382 0 R>> endobj 1381 0 obj [/View/Design] endobj 1382 0 obj <</CreatorInfo<</Creator(Adobe Illustrator 15.0)/Subtype/Artwork>>>> endobj 1379 0 obj [/View/Design] endobj 1380 0 obj <</CreatorInfo<</Creator(Adobe Illustrator 15.0)/Subtype/Artwork>>>> endobj 1377 0 obj [/View/Design] endobj 1378 0 obj <</CreatorInfo<</Creator(Adobe Illustrator 15.0)/Subtype/Artwork>>>> endobj 1375 0 obj [/View/Design] endobj 1376 0 obj <</CreatorInfo<</Creator(Adobe Illustrator 15.0)/Subtype/Artwork>>>> endobj 1373 0 obj [/View/Design] endobj 1374 0 obj <</CreatorInfo<</Creator(Adobe Illustrator 15.0)/Subtype/Artwork>>>> endobj 1371 0 obj [/View/Design] endobj 1372 0 obj <</CreatorInfo<</Creator(Adobe Illustrator 15.0)/Subtype/Artwork>>>> endobj 1307 0 obj <</BaseFont/JEFNSX+MyriadPro-Regular/Encoding/WinAnsiEncoding/FirstChar 49/FontDescriptor 1383 0 R/LastChar 102/Subtype/Type1/Type/Font/Widths[513 513 0 513 513 513 0 513 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 448 564 501 292]>> endobj 1383 0 obj <</Ascent 952/CapHeight 674/CharSet(/one/two/four/five/six/eight/c/d/e/f)/Descent -250/Flags 32/FontBBox[-157 -250 1126 952]/FontFamily(Myriad Pro)/FontFile3 1384 0 R/FontName/JEFNSX+MyriadPro-Regular/FontStretch/Normal/FontWeight 400/ItalicAngle 0/StemV 88/Type/FontDescriptor/XHeight 484>> endobj 1384 0 obj <</Filter/FlateDecode/Length 1244/Subtype/Type1C>>stream
-H‰|SmPWŻ%ģF*MŪÄe4”»«µ#"b(‚¢8
-6cˆ3~u &jђv¦­ !:T,~ą h2UŪ‚+h‘ÅŌ¢‚2ѱTķön|™i7ōOõĻ™{ī™{Ī}oŽĆ±ą ĒńYiéÕyKsö9­Ne4,Ū¤ßU^¢5D†Wąüü`><”B+ćåĖ—Łō½·ßč ~&łpüŽćTC™0»k·™]‘˜@å,&D³q±±±³Ød×ė …zV½ßdÖļ5±™„Ec™ĮØ5ėu1ģś’vÖĀÄõ&½±"Šüw#–3±zĪ¼[odµ‚ø‹ęzk6juś½Zc1k(’”;’'ŠåJYĮ‹Õ”r¦6 M«-Õ-\ ³)E†ņR³‘Ó›b–§«s÷—éŁU¬NæĆpLŠcT¶
- ®«„=U“ØxsėŃ2¦<ƒØ<×W9¢€…/ž‚lL{=Ɲi+*hĢS˜Ś¬˜6XM€”X† wbFńÆ'į¤ą­ōERUå¶Ļ÷1‰§īŃlž“µa[š'GÉÓĀĒ BAšō‰Ss9m m%
-÷6ĀQŠ| WZœ' $ZFt|ņ0”TJ]^83)sóK|ŗd»Ź .¤(PHDŠ·3hÉt$„Üė½Ųsšv²u Ä]ŅzŽź¾,ēĒÅJuŠV}ؚ–¹³Ŗ4E*Ų‚±ß’ģ¹?żS×ÖÜfĘa±°Č%X†aM?,źĒ]Į9!ā ”‚Ŗm“7“{ F+ŃĀxD¢į„Øў–ĆtU?q°XgÉVÄ„øa®•±÷S7\cĻN)smŒ„>Bü $ąć“"Hćć!d…œßłjąĪé–Ļ>m¦›†ˆ†÷5+
-·T¼‡)ŅUå¼'7gd ść…Ēéō½Ó„¶9a¹Óķō›H>«e×ū ĪļźĻŌ‹éć;Ņ>9üwhČą+Ss§źB_åēńoQ’0
-%!PS-Adobe-3.0 %%Creator: Adobe Illustrator(R) 13.0 %%AI8_CreatorVersion: 15.0.0 %%For: (donna) () %%Title: (type_tags.ai) %%CreationDate: 4/4/11 7:44 PM %%Canvassize: 16383 %%BoundingBox: -227 -63 143 234 %%HiResBoundingBox: -226.5 -62.001 142.5898 233.748 %%DocumentProcessColors: Cyan Magenta Yellow Black %AI5_FileFormat 9.0 %AI12_BuildNumber: 399 %AI3_ColorUsage: Color %AI7_ImageSettings: 0 %%RGBProcessColor: 0 0 0 ([Registration]) %AI3_TemplateBox: 40.5 29.5 40.5 29.5 %AI3_TileBox: -239.5552 -349.6377 319.4453 433.3623 %AI3_DocumentPreview: None %AI5_ArtSize: 14400 14400 %AI5_RulerUnits: 6 %AI9_ColorModel: 1 %AI5_ArtFlags: 0 0 0 1 0 0 1 0 0 %AI5_TargetResolution: 800 %AI5_NumLayers: 6 %AI9_OpenToView: -239.25 83.5 4 1355 732 18 0 0 43 154 0 0 1 1 1 0 1 %AI5_OpenViewLayers: 777777 %%PageOrigin:-399 227 %AI7_GridSettings: 72 8 72 8 1 0 0.8 0.8 0.8 0.9 0.9 0.9 %AI9_Flatten: 1 %AI12_CMSettings: 00.MS %%EndComments endstream endobj 1387 0 obj <</Length 7484>>stream
-%%BoundingBox: -227 -63 143 234 %%HiResBoundingBox: -226.5 -62.001 142.5898 233.748 %AI7_Thumbnail: 128 104 8 %%BeginData: 7336 Hex Bytes %0000330000660000990000CC0033000033330033660033990033CC0033FF %0066000066330066660066990066CC0066FF009900009933009966009999 %0099CC0099FF00CC0000CC3300CC6600CC9900CCCC00CCFF00FF3300FF66 %00FF9900FFCC3300003300333300663300993300CC3300FF333300333333 %3333663333993333CC3333FF3366003366333366663366993366CC3366FF %3399003399333399663399993399CC3399FF33CC0033CC3333CC6633CC99 %33CCCC33CCFF33FF0033FF3333FF6633FF9933FFCC33FFFF660000660033 %6600666600996600CC6600FF6633006633336633666633996633CC6633FF %6666006666336666666666996666CC6666FF669900669933669966669999 %6699CC6699FF66CC0066CC3366CC6666CC9966CCCC66CCFF66FF0066FF33 %66FF6666FF9966FFCC66FFFF9900009900339900669900999900CC9900FF %9933009933339933669933999933CC9933FF996600996633996666996699 %9966CC9966FF9999009999339999669999999999CC9999FF99CC0099CC33 %99CC6699CC9999CCCC99CCFF99FF0099FF3399FF6699FF9999FFCC99FFFF %CC0000CC0033CC0066CC0099CC00CCCC00FFCC3300CC3333CC3366CC3399 %CC33CCCC33FFCC6600CC6633CC6666CC6699CC66CCCC66FFCC9900CC9933 %CC9966CC9999CC99CCCC99FFCCCC00CCCC33CCCC66CCCC99CCCCCCCCCCFF %CCFF00CCFF33CCFF66CCFF99CCFFCCCCFFFFFF0033FF0066FF0099FF00CC %FF3300FF3333FF3366FF3399FF33CCFF33FFFF6600FF6633FF6666FF6699 %FF66CCFF66FFFF9900FF9933FF9966FF9999FF99CCFF99FFFFCC00FFCC33 %FFCC66FFCC99FFCCCCFFCCFFFFFF33FFFF66FFFF99FFFFCC110000001100 %000011111111220000002200000022222222440000004400000044444444 %550000005500000055555555770000007700000077777777880000008800 %000088888888AA000000AA000000AAAAAAAABB000000BB000000BBBBBBBB %DD000000DD000000DDDDDDDDEE000000EE000000EEEEEEEE0000000000FF %00FF0000FFFFFF0000FF00FFFFFF00FFFFFF %524C45FD0AFFA8282F53FD7CFF06A8FF5359FD7AFFA92EA8FF537EFD7BFF %53282E28A8FD27FF2E2828A8FD51FF847EAFFD27FF5953AFA928FD7BFF59 %2EFFA82FA8FD7AFF847E7E2E59FD7CFFA87D7DFDFCFFFDFCFFFD31FFA9FF %FFFFA9FFFFFFA9FD76FFA9FFA9AFA9FFA9AFA9FFA9AFA9FD1AFFA87E537E %A8FD2BFFA8A9A8AFA8FD23FFA9A9A8A9A8A9A8A9A8A9A8A9A8AFFD17FFA9 %7E282F282F282F2F7EA8FD26FFA8532F282F282F53A8FD22FFA9AFA9AFA9 %AFA9AFA9AFA9AFA9FD16FF7E2E0128062F292F072F062F7EFD23FFA85906 %06062F282F0629062F84FD1FFFA8A9A8A9A8A9A8A9A8A9A8A984FD16FF84 %28062F282F292F29302F30292FA8FD21FFA8530629282F2F2F29302F2F29 %2F84FD1FFFA9AFA9FFA9AFA9FFA9AFA9AFA9FD09FFA8A87DFF7DFFA8FD04 %FFA8280529282907532F53292F292F072FA8FD1FFFA92E00280629062F29 %2F292F292F072FA8FD1DFFA8A9A8A9A8A9A8A9A8A9A8A9A8FD04FFA87DFF %7DFFA852275227277D27A8FFFFFF530628062F5AA9FD04FF7E302F300753 %FD1FFF7E05282FA984A82F7EA8A9845A2F302953FD1EFFA8AFA9AFA9AFA9 %AFA9AFA9A9A9FFFFFF52FF5227527D7D52527D527DF8A8FFFFA800280628 %7DFD07FF7E2F292F06A8FD1DFFA905280653FFFFFFA9FD05FF7E292F0684 %FD1CFFA8A9A8A9A8A9A8A9A8A9A8A9A8AFFFFFA8527D7D27A8FD05527DFF %7DA8FFFF5328282959FD04FFA9FD04FF7E302F2F59FD1DFF7D28282853FD %0AFF5A292F53FD1DFFA9FFA9AFA9FFA9AFA9FFA9AFA9FD09FFA8FFA8FD06 %FFA82E002806A9FFFFFF7E067EFD04FF292F292FA8FD1CFF5300280653FD %04FF595AA9FFFFFF7E2F292FA8FD1BFFA9A9A8A9A8A9A8A9A8A9A8A9A8AF %FD13FF2828062FFD04FF2F2F29FD04FF542F2F28FD1DFF2E28062953FFFF %FFA82F067EFFFFFFA92F2F28FD1DFFA9AFA9AFA9AFA9AFA9AFA9AFA9FD12 %FFA828052828FFFFFFA82F072FA8FFFFFF2F2F2829A8FD1BFF8428052806 %59FFFFFF8406292FFFFFFF842F2929A8FD1BFFA8A9A8A9A8A9A8A9A8A9A8 %A984FD14FF2828062FFD04FF2F292FFD04FF54292F28FD1DFF5328062953 %FFFFFFA82F075AFFFFFFAF292F28FD1DFFA9FFA9FFA9FFA9FFA9FFA9FD15 %FF53002806A8FFFFFFA82984FFFFFFA8062F0653A8FD1CFF7D00280659FD %04FF532FA8FFFFFF7E29062FA8FD1BFFFD04532E5353532E535353287EFD %13FF7D2828282FFD09FF532F28297DFD1DFF7E28062853FD0AFF53062953 %FD1CFF5300280006002800060028000053FD13FFA92828050653FD07FF53 %29282828FD1FFF28280053FD09FF7E062806A8FD1CFF2E05002800060028 %000600280053FD14FFA8052806282E7E84AFA87E292F282F06A8FD1FFFA8 %062853FFFFFFA8A9FFFFA87E282F067DFD1DFF5300280628052806280528 %060653FD15FF7D0028052806280628062806280059FD21FF7D0053FFFFFF %7E062F28280628012EA8FD1DFF2E05002800060028000600280053FD04FF %A8FFFFFFA8FFFFA8A8FFA8FD07FF7E06280528282806282828057EFD23FF %5952FFFFFF7E28062828280053A8FD1EFF53002805280628052806280506 %53FFFFFF7D52FF7D52A852A852A8527DFF7D7DFD04FFA9A8282800060006 %002828A8FD24FFA8A8A9FFFF8400060006287DA8FD1FFF28060006002800 %06002800060059FFFFFFA852A8277DFF7D527DA8527D7D27A8FD07FFA87E %597D537EA8FD2BFFA8282E7D7DFD22FF5300280528062805280628050653 %FFFFFFA87DA852527D52FF527DA852527D27FD3AFFA8FD25FF2E06000600 %280006002800060059FD04FF7DFFFFA8527D7D7DA87D7DFF7DA8FD60FF53 %00280628052806280528060653FD72FF2E05002800060028000600280053 %FD72FF5300280628052806280528060653FD72FF28000006000500060005 %00060053FD72FF7E2E532E5352532E5352532E537DFDFCFFFDFCFFFDFCFF %FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFD9AFFCACAA8CACACA %A8CACACAA8CAA8FD74FFFD0DCAFD18FFA8A859595883A8FD53FFCACAA8CA %CACAA8CACACAA8CACACAFD17FF7D2E0B2E0B2E0B3434A8FD52FFFD0DCAFD %15FFA82D0B052E0B340B340B340B59A8FD29FFA8845959535959A8FD1EFF %A8CAA8CAA8CAA8CAA8CAA8CAA1FD05FFA8FFA8FD0DFFAE2D2D2D342E3434 %34123434340B59FD28FF7D2F0C2F2F352F352F5AA8FD1DFFCACACACBCACA %CACBFD05CAFFFFFFFD047DFD0DFF2D0B0B2E0B0C0BFD07340B59FD25FFA9 %2E2E0C2F0C352F352F352F357DFD1BFFA8CACACAA8CACACAA8CACACAA8FD %04FFFD04527D7D7D275252A85252A8FFFF590B0B2E0B345FAFFD04FFFD04 %340BA8FD24FF2E2E2E35358484FF5A36355A35357EFD1BFFFD0DCAFFFFFF %52A8522752FF525227A8A82752FFFFA82D052D0B2E84FD06FF340B34120C %2EFD23FF522E2E2F0CA9FFFFFF5A2F3635362F3584FD19FFCACAA8CAA8CA %A8CAA8CAA8CAA8CAFFFFFFA8A8FFA8A87DFF7DA852A87D7DFFFF84052E0B %2E83FD05FFAFAFFD04340C34AEFD21FF842E2E2F2F5AA9FFFFFF5A5A355A %365A2F5AFD1AFFCACBCACACACBCACACACBCACACAFD12FF5805052D2DFD04 %FF830B342E340C3434340BA8FD21FF59052E2E35A8FD06FF5A2F5A35352E %A8FD18FFCACAA8CACACAA8CACACAA8CACACAFD12FF7D052D0B59FFFFFFAF %0C340BFD073459FD21FF2E2E2E3535FD07FF5A35355A353559FD19FFFD0D %CAFD12FF5205050B34FFFFFF84340B340B340B340B340B59FD20FFA82E06 %2E0C3584FD04FFAF845A2F352F360D59FD18FFCAFFCACACAFFCACACAFFCA %CACAFD13FF7D052D0B59FD04FF34340BFD05342E2E83FD20FFA9282E2E35 %2F35A8FFFFFF5935355A355A353559FD18FFA176A176A176A176A176A176 %9AA1FD12FF7D2D050B0BFD05FF595F59340C340B2E0BAFFD20FF842E062E %2E352FA9FFFFFF5A2F352F352F352E59FD18FF4BFD042044202020442020 %2076FD13FF2D2D0B0B59FD07FF2E342E340B59FD22FF062E2E2F2F36A8FF %FFFF59352F5A35362F2F59FD18FF76204B444B204B444B204B444476FD13 %FF7D042D050B59FD06FF340B2E0B0B7DFD21FFA82E052E0C2F2EAFFFFFFF %5A0D352F352F2F0684FD18FF4B44204B2044204B2044204B2076FD13FFAF %58052D052D2E8484A884832E342D0B58FD23FF7DFD042E35A8FFFFFFAFAF %35352F352E59AFFD18FF76204B444B444B444B444B444476FD07FFA8FD0C %FF842E042D052D050B050B052D050B2EFD24FFAF282E062E0684FD05FF59 %0C352E2E59FD19FF4B44204B2044204B2044204B2076FFFFFF7D7DFF52A8 %7D52FF7D52A852FF527DFFFFFFA858050B052D0B2D052E050B52FD26FFA8 %05FD042EAFFD04FF592F2E2E53FD1AFF76204B204B444B204B444B204476 %FFFFFF52A87D52A8FF52A8277DA87DA85252FD04FFA87D2D0B0405040505 %2E7DFD28FF7D052E062E06597D84592F060C2EA9FD1AFF4B442044204B20 %44204B20442076FFFFFFA8275252527D52A85227FF52A85227FD07FF847D %597D59AFFD2BFFA82E2E062E062E062E062E59FD1CFF76204B444B444B44 %4B444B444476FFFFFF7DA8FF7DA8FF7DFFFFA8FFA8FFFFA8FD3AFF592E05 %2E062E065984FD1DFF4B442044204B2044204B20442076FD4FFFA8A8A8FD %20FF76204B444B204B444B204B444476FD72FF4B44204B2044204B204420 %4B2076FD72FF76204B444B204B444B204B444476FD72FF5244204B444B20 %4B444B204B2076FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFD2FFFA82E0BA8FD %7BFFA82D58830BA8FD2EFFAFFD4CFF2DFF830B59FD2CFFAF353584FD4BFF %2E2E5905A8FD2CFF067E840D84FD4AFFA87D59A8FD2DFF2FA8AF0C84FD7B %FF0C59840C84FD7BFFAF3559A8FD7EFFAFFD42FFFF %%EndData endstream endobj 1388 0 obj <</Length 65536>>stream
-%AI12_CompressedDataxœģ½ė’Éu łłµ?dFš-QįqīŚ˜ÕUĆYQ¤±ÉĶŹĘŚŠ@u#
-±ÖWõ8~żīłĒųŗRļĶćŪ§zĘųźckŁžØ~ż|óß<¼|¼¼²ĪøņĮęŻ]>Éy÷Ŗ~¤ĖĒ·ś—äŗ‹u£4l›ŚjĖmį«×æ-$vZģó’’śźżƒõ诧÷OśIõH‰Ļ­-ŻÜéņ«×õ ūšØļ|łM}Ÿ§W2@“ÆŪqöŪ‡ńµQ_śńwOŸ>Ō!rŗ¼JCō.żūŹFĘ]|ø;Ū5?³å?ĖĆõgń¢ŸŁ‹~a/śEŚßūöŠßŲ+~“7ó{ź•ōT=I_¦µ­#Ž>¼|/ć½r__öšR;¤ķˆ>]žīӻƎzūęįÓĒzÖSĆ?\¾|Øė~}§,]žÓ—æžPO ķ¼1}©˜ŗ{÷ņINņ?½ųņŁYõ3ēŁææ|ž²Ėē+Ģż’ŚU6ņ6żŗF0§æ/kńcü¢lÓeŻW’ųéįMm¬—Æß}U•HG ģ}Ah xźĪ—Ę©ģEÄ
-‚ÆŽ¼zżÕW—õ‹¾ÕÓņå7ļŸ^}zY1ńŗnń£
-ÜįęžęīęöęęęŗĀąøŁėš^ė±3ßL7c=Ū×÷×w×·×7õ3]ÕO¶×Ļ·ÖO9×Ļ:^—ÓõPOwõć LäŒrŌƳÕ/µŌÆ6Õ/X®†:`ļźš½©_śŖ~õ½ī€µŽų¹Žž± C=.īź"§Ą+ŁS{=€ÖŗŪęz\õųź±vW¹›zō]ÕcdÆ»·īńŗ›ēŗ³ĒŗĖ‡Jć»ŗūoj'\Õ®ŲėÓkķ–yNõØ/ė°Ü׎ŗ­}v]{īØż·Õ^\j_NµGĖ2Ģ÷u§ŻVŖ\×¾>joµß—Źœ©† e¦ū
-£»Š¤›SeÓU%{+k3s9c„Ų0Ž×±t[GŌu=:¾¶J¼„Ž¶é_חŸķÉŪżōĆöäõpŠžüśQŗńō½śń{õ¢uāééÅŚ‡tįéū÷a=‹•q(÷’›ōåyož¾ēQł½{óō“Ń›§?刓Ž¬}9_ü՗×ļkr"EØeø(Ć K¾8żÕ—iÉõ‡S±V<ķś\„5i‰Ļ÷Ė˜ĘĻdL£dLsdLrž#?"»©';NwrĀ›ō„G$&§="ėC#„kĻœźŠ;•ūˆĖä\(gC9Jt&ńŚ•ī³Ó<źÖ’īõl)Q›Ämõ¬yŅąm®;~Õ(Aœ„qČ]i0'į܍ö•ü’­²ō^Ņ‹³žÜå Čg9i~&įŽģ’æ¤ĒŖ!ą¦a ż’®į”Aį•’’•ž°Æ5³»>Å?ėC_L»nkÕ!µų»MśŽ:øä”Ó7žøŠĒīś•Oõ›/>šģģžŗ›|·Éć&WńŲõ!»|Շ ĶIņŚIūĒŅgõQ»O»ŃWńŲÓ#b]³©”S‰?2nāq›įŗ{\é˜ć±?NŒłCwŖBhQĶ’$^—`‚?>WAŲ#ÜQźH$
-‚į¤aĆ½¦,wš¶Xźr£ĢŠņ§½‹ę6-Ņ­Z®[<£™4Ā5ˆŖC’äé%8r\kŲq„e@Kv,Ż±”Ē’žQK‡zĄh@b鏄%×'Ļ‚,²LČr”)E)–ŻśŁćZwå”rÓ½¼x\÷żIc‹^,OŗõčV" ’o:{ŒŻcčšRÜ= Oś×µēößU<d)Ł‰=rÜøwĶ*w'/ß­Nī^Ö£ø·¤R_‹3ū·ÖzśŽJkžÜJžļö¶éߧ®Šč’2Ž*­%ėvI®%¼FR%ÉŌį铔…~VZ4ōŻzm±Bļ¤Ēœ—³õčšj/oŚēVĘ¾ÓŖ]xŚ£Z}ē„ĘY‹»–oė©³œź ę2ƒœ4ė S;UŹQv„§H;=NZ€ÜōĀ
-²Ļķįō¹XŪNä$S$RG¤PK$O­Ąw^ā{Vä;}kĻ+}Ÿ)ō}g©ļäµ¾ļ¬Ü~G©ļY±ļōżj·ßæŲwś\µÆ+Õ½½āš¢cöy–Tć ©’X÷±ž&eöņ2ƒ<u åXĖÅx^ēū6wżA6nó±Õg•ĀKżGŻŅŗ–¼©£"·Ū“VT’1mu‘ljž^ Ó~±•SŻłķž°›ÕO:Ū¢ßp\ö¢3¼ėYkŚó7’¦ņÕG™ü=Św×Æ<Õåu,å½ųĆlP?Ż: ó®ėīĆ2É7ź+·)omā{n²Łģ6/Ś!ć P/R’@ŪūSŠ½æŖ’zxÓOy¾ģ¢üezĢ_¦ĒüezĢ_¦ĒüezĢ_¦Ē|ē’2=ę/Ócž2=ęŁ)™óļ§/’·›ó]Ó_žŲŌ™qĘ4~&cūŒéśŁƒ¢ōu*S÷ķĻ?§S*[QńĪķĻ=wż--ßąłg:ßģg
-U»4;łÉ¹H.&'č—fõZś•Ÿ©Ū$3fźT3?kG,­ēn‹Å-āl2×põŁg]E|˜§m°źkå°1ėgÆ»¢+S×<Š†ė?ź‡)e·ņk*–ž³jųR³VyŃzZµ>Ź¼­Vt-«nš–}—÷šėŁHß«¦“Ē(5įy}1¬ŪÜןøMž)‘ÉoŽ½{xūųźākĀ}­±Éē–vŃÉwžÉŸ={ō…}(q}|[ū[bšÓgā‘ĻÅSß7„WœĪöŹ_EÆDLˆÄ%ĆSD凊£ÄōŠ+Ÿ*U„I™q·ŻœŅŌP9ģ·aK|½śq-G4”õóAåŲe6čµØ'=JW?B‰Øķ˜“i Sšz«ōµ†Ņ6Łj²¤Čg|^—ćäStŸš[ź@“ɶėmĪÄę“ œżĘöQŁ.tļgk^Å<Ķ6KSā
-fŁeF?$ՎøTŗ9öXfĻžĘ³™CmÖŠOĆ¼ņI˜:7`õy˜ĢĮœbīå`³.õ”vė«‚Xe]¢±]ūœk–µļOŚż³’’ä!UšŠ†zš ’»G?ō…:~<Tč#ˆgńE}\Ÿ=RŌrŅlżćüOŻ÷ÓY4Už‡[cé$¹ž±<{¤Hļ’ܞ=öü°©Ŗß>õ[&£v—¤ņ4½"qŅBõ³©Øi"źģIɞfÜ|~ŖĪA=„d#Ļ½9Ÿ—˜g&ę¹‰mv¢Ķ;½9„I§%¦)ŚDÅÕŠŻ?zŁ©£)Mēø5Ćõ6›péfāf؉q£įŠ÷˜Nų­ó~īŚ¼Ÿ“†hß6õĒ'’ÄE)æ„óņ ŸtņėLb*Šuš$Ćāī,śśA‚’<Ź!
-ūW‡s}öƒlī»¢Æ?åē)Ś/3łO™Ōhķ—Oožįįāöé#?oRcµēĖ.Ęe\.Ź‹żXg!ĶMf!¬Kż¦Ÿ“ĢuõÖ³Š’NŃßżA›’©žóæׅ’t1_üüāļ’ŪpńJ—’ŻÆź_¾%6ü¶.źßźob‰æ›.é?@^”×{§ļ󋿬æ{ų’ū£~å;qšŲ>Ō·žĆśŃÖņbŪäMŹ‹µĘYśŁdŁ!K–½” }Å”=·Œė¬m™ĒSŪó:/mõš©/ė8ÉkÖŹ(}Ķ0č:u[ś.Ó“°ŹR)Y4ŹØķygi×CnÕUiė0ß.nl)¤é²b«õ©M¦¼¬Å6Q•MŽ•‰w4Ź¢E~ÜQŚóŗk»3Ö¶w©[ˆ/3Oć*Ėj<ŗėk*"„]ū]æÜR}›zNŚc •ėQ{fZm§ķ““+G6iņė™µ½Ģ›æOķńɖMĒQ×_LeØ_cÆßxY„½Č$i—U¶I_±o‹¼ą˜äļ£ŽQŁSż@ė¦ķzŠ“5¦ŗ{×C–•aŌ×#ō5sķÕŚ^ėž§ ūl²Ÿė”eˆņ’ķXō»ĶuKūXVŻÄRp[g~!s{õūNņęõ,5źk6³Œś|™&Ö©ėė¢ÉĖK¶”h{߬YĻ…ŗ—„ē|m™“7Ė²čV·Y;s,•ŒŅ”ļ*ķze•]~ĒS:S"i Š„ĆŁµ½nŚ®£®­³lŗŽ\v½HŪØg4m>ģ¶iŽOVc=Vkfdƙõ55Ʋ·ź·ć· ±Ī> zŌ~·Vģ@›ė ŅÆ3.EĶuˆ=0ĶKŻN+³¼OķĪś}¤]ĻS‡ ‰­žī„­}åƒfŁ}Ķ\tŌŌģģŠMŒŅEõļ”pµ]C‹± Ķ]ߦ~ńI^Sź9RŚĆ¶źatȌ2i—}åKĖņq9Aź!0ė½×±oķEŪ[UXĒz¬~ÅčkvŪ%uUcĻ¾Ł.ØCÄ )¦.Ŗ1¾¤F$Śž†CŪĖŗŠźč•uīėĖ\€„,§ 4ĖŲHWŪCĶ‰;ŅɲŃ>±č„ rz†tŅ.óšIWķėÜH'ķ²4ŅI{ZzŅ-rX®A:yÉø4Ņi{čH§[ŻéjŪ°ķ¤ÓöŅ“N–é¾sŅIūéd›óŌ“N¾ą<4ŅéN+tµ=:ł‚tuY…n#]m×ŅH·ČA¹v¤[–
-”Ņ-ņĆĮC#]mĻćѓ®.[‡£‘®¶÷qk¤[Vż‰čLŗŗh”_™†tµ]ūæ‘NžßžtuŁ¤„wŅÕö²­tµ½KOŗŗl?– Ż²½źž†tŅÜJO:Y¶ÆtŚ–ßĻvŌÕv‘/›Q'ĖW ®¶åc NŚĆŠ£NÖ9††ŗ“ A4§„G,+SC¾fkØÓöŚ£N—M uŅ^ʆ:łzõģP';rŪź“?KCŒ}źQWŅ”Sp uµ)5­@Ž»Ņ£NĘęVźj»Ę uµ­ėfŌÉ—õA²ekØÓöŽ£N— uzÜL uŹ¢”C°ę8ź“½7ŌI{?ž£®&&ótt¤“EĖŗź¤]Ļ:i—©tØ«Ė¦u˜uŅ†=PWŪ£Eō®.)’¢@:iמ ŅI{ŻŽtó
-ääŁårŚ¶ätQ™rŅ.(…œ“{ƒ€œ¤iĒQrņ‹ī”qŅÜ," ĘÉ7Ū¦#§{kŽƒqµ½Jš•W—ķƒ-SĘÕöQOTĮøYŽĘ:“ćź¢éųĶ W›«UW›5;ĘÕeGMżƒqó^wč°ćj{šę„ƒ\]¶xœ„«mŻ7@®¶×q\;ČÕe›³S!WŪõܾäfłŒūŽA®.Ē½Q®¶ėŖ”«ĶzŚŲ;ŹÉ²Ńč£”“v±””“¶|ŻL9YVƤ Ü,?Uo‘–RNŚÓ“u”“e£ÅJ¹“ ”œ4WxPNߌr „œ“7ƒ©RĪÖŁ:Źé2 =”rś6v)ådeķ('{²Ōń
-Ōmšø.ź¶.qŻ$o5l)é6Ķ[K&lTHé6”¶'²B:iū±¤Ūt×MA:9ŠG; ”t›ä­FĀ Ż¦yė¤Ū5om¤Ū5o;ŅI·ŲHI·KŽ:lAŗ]óÖ.Ø«xŖƒtu‡&®%P'„śĆ°Ø;$qŻ×@Ż”‰ėŅP7<Ė\ė¢.s­ķ.s•ēĻ3×2ō™kśĢµ Ļ3×2t™k)]ę*ĶóĢU–åĢUŪ)s-åyę*ĖręZŪ]ę*ķóĢUÖəkŚ†²®<Ļ\Ké3W}MŹ\µ}–¹ź²”¹J;g®„<Ļ\eGęĢUśS¾:¬;ž'®»ę­ŌI~5:*źöēyė.ńh9‚tZ.?¦ Ż®kĪéäÜ6ZȦ¤Ū5mƒtūó“U‘s
-čäؙĘŃķšµŽt»f­-¢Ū5kt»f­ó3ŠÉäP”—I7hŽ:·"Ż qŠ¶mĮŗA2×aļćŗAR׽它¤®ŗ{œvƒä®ƒaÜ <¶qm¼4}5
-šÖ6
-¾t
-^^½’xūZošüšž?­‹~ō’üųāņ‹rSą‹é½õž£r·D¹×ī/žOy‘õu „×^__½|łéķƞ>ź sż…WßŗµŸŻŚKžÆś_żßoNL&/m2yż‡Ģ_/~ōć‹æū/'&Mūļ?eĪōŁ<u9‚ĻŖ–²(W-õ _ÓźćyÕRˆµiµ\ŽŅźć¼j©(ŁŅź£ÆZJū¼ji‡äį Zö4{śxVµŌE©j©<KUKåßYÕRØē]fO]ÕRšēUKłf¹j©{+U-ogUKym®ZÖæ»Ŗ„°ź¬j©ØjUKłž‰ó²Ī«–²§rÕR(“«–ŚgUKéā\µŌ.NUK=ÖĪŖ–r@ęŖåVśŖ„Šą¼j)`‰Ŗ天 Wøt^µ”e¹j©ģJUKÅŪYÕR–ķ„Eø‚Ä\µ”öyÕR–åŖ„ncŒWšēUK}ė­EøŅĪUK[§ÆZź²z}›Tµ”=rNzåjŖZŹörÕR¾ÖYÕRöF"½ģ¼\µŌwVµŌĮ™Ŗ–ŅĪUKé³óŖ„öuŖZźx)iśōń¼j©Ė¦Fų„›>}<«Z*¬RÕRŪ©j)ķ\µüŸG÷ė–ī?ŁeŹÉ¢Ć„ ģ—
-“£Į} iq}ŽÖ-)®×bĢż²Īó’ū:¬Š]hkżøLsi¦D?[»Rå}»\ŽūśœönÄ÷£¤˜sŠļGOur|?›­CxæŽĀūuĪ
-‚’zą.9¼ß-^ˆš¾&–[¤č~Ŗē¶mOŃż2 ‡]t!ŗ_7Æ7äč~Ł,f¶śmMź•›-¶Æ ļė·ć¤Ų~,öč~/‡>G÷Ēa•īĒb[‚ūcš3Y ī×b§lķqåslÆEµ³ŲŽ‹6„öė²§«ń5ל­$œCū}¶šßCū™’ƇöėpĢē”=%ķĮ-²×ÆŚGöĖ“­)°_-·n½%Į}`æV‹õĄ¾^ĆŠĄ^ĢÓć,°—”TR`_nlŲ×uöż,°_ĘŃ<ģ—ŁC"ģ=YčC{O-“K™§ŚoS™ĻCū:&ÖŪļ«W€µ|[Gzy}hoi{‹ģq±sŖGöõt&…­>²—ŌŅ
-¬Öžd~“‡Śbß­^ķ\LäŖ±¶|r„ä`[–v&¶2ļųB‡Į¶“‡#>–NØĖÖƂY ¶Åųš- Ö`[”ÆéˆonĮ¶ŒŽ2¶`[bLKM,Ų–ø,-ž±eŪlūĖ‚m¦˜l×#g#v¶h»XĮ—p[N^iZ]ȶˆ“,Ühc ·‹m*Ām9ÆöĮ¶Č¤GiĮ6óg¶ėńU ¶ė²Ż®–X°-Æń+.l×våil×ē¦ĮBK" Ė§mÆa·ČY¢mé Ÿ—£Ń¶“×eŽh[śNj¤-Ś6oöčZ¢mķsŸB$ѶšŸs¬£Ń¶,ÓS:ŪØcm™[ø½Źō‘®“p[qņĻ"į¶ ß9Ö1½pŁ»h[Ž€ÅjÄmÆzEžˆh[Ė-ÖŃh».Ó=ŃvmדžB“½ŗ¦“£mY¶Ś°Ńp[6±ŁQ”į¶Č£2…(‡Ū«Ŗ;m×ę<ŚĮ¦Ń¶:ØkDčmĖ²éŲķ(óV#śÖv™¬ĆDܦˆ¶„}X*ŖѶ`h›śp[–I(Hø­Æ±ģMĆmi×±GEł_ŒŻļMÓ”Få YŽ†Q™¼¬Q™Ć<Ī=FÅū¶qbGŌ:%0*ńń6÷•u+£rźŽ§ĘQ™ÄfķĘQ9ģ¹ĮT“ļ£±Tšę±6–Ź²ak,¢„¢}ļcĻRŃ¾ķ7–
-K–©ĮT“ļµō0-Le—ĶcƒieČbĖLkˆ0»WÆ0ķ{9¦Ŗ}kSõ¾×=`*ßĖI©4•c{p‰šŹīŚ}wéaQĆ„@©ģ’Ķw^ˆāśAéŖÓĀ¦@©Ž`Ck T­o’š‚R=zŽ ©pūŲ‘T„ļķ’ŖõŻ@ŖF÷\:Ŗõ}ģR,p@Ŗīµķ”
-¼É6†e ¼IŪ~t"č&_p™JŠMw×p ö(ŪD¦ŻŚÄ·]–=Š&ž·kŚDćՑźhÕ{·ĄPѶų|±Œ¶e‰°NŃ&j°@„›ØŽeŚŻÄäÖ#Ōé&¦7–cF¬npƒnĖē$„›˜Žś Ž·Ś–I>™nuQ=ķlA7ńšåŖxS{īč&‹d®4tÓ¶„J7Q„G;uŻdŁZJŠMĢīi:oŅ–Lꛦß֙zĒ6œo˜ß™ośŽ–XéŽSmŪ²&园sd¾é¢5š¦ļāG˜ąM]÷aļų¦Ę¼Å^Ź7ķMc“ņMFȱŽßDō.öszģ/³tų¦#ož:¾éą“ĢQMż«,k
-įT®,kG8Ńŗ7ėÄM Ķ©€…qbBÆv† Ęɲb‘æBN­n‹)ōx‘önķ€œHŲÅĪž
-9µ®÷–kŪŲ”›}
-”“¶^‰wŹ©Č½ĶåT·P)§ś¼åŌP· .0'ßĒ驘Ó}65Ī‰;Y@ V¾¤éVk†N:ut-‰ Ņ©Ź»čD÷vˆ)čÄ÷ŽĖցN|oŗtbė¹ĀA'¾÷2%Ī‰ģ½Ķ[pNdļ}X‚s³NČ)ēDöž—58'²÷ča›pNåŁ€
-:m{šā ÓEĖ¤Ó·™ ŗģĆŲ‘Nö¤½J:ķĪiŅÉ™–„#Œ¤Å°¤¤›a®“NGŽ“u¤ÓŃ9Nš¶UŒēm™:Š‰ģ}Ģc€NŃr,:=&, Šé2Ė‘tŚ¶ķ*é”EćŲ‘N·[¶ ¶‡-H‡T~F:»·9“n’¹»sĖV'Ÿ’ė¤SAÓ‚Ä ,+ūī¤óŁkrB:”×>[ųé'ŖŻ[ĖV„=n}¶*Ŗ÷²·lU„k ł”tŚūtU·kŽ’nŅ9r[NŚ«å+A:•»·–®š0>éTS·ų.H§vūŲŅÕÉēHC:1a·”OWÕ µeJ:±n§%@§ŽīŽg«źņZF«¤Ż{[¶:Ė0#a#Ż˜‘N.: t2ÅÓbs÷UƒtC#²’®>柁Nft.-]×{å5ŗśfūÖē«āzļkĖWÕm¶*€®x3ƒNk+-aÕöŲVõ§ķDÖHW—ĶCKXgłLSi¤“.KO:±OĒ–²Ę6 ]‘łø}ŹŖļm)‘N^cł‘®“ŒŅÉ¢„„¬ś6sKYUnļ9Wwä6“ŒU;Ó„œ“²yęēd KĖX§ĆęŁĆ9wSŸ±źŲ“lYʌ4¹„¬“ü¶ĘŅ§¬2Ä'+(č”,GĖY'W2čt™U tŚ^Ę
-«ęźąY,_n˜“©v­Ā0'`Ł·†9i[¾Ü0W¼»Ą\ńÜ Ģæ@™9§_Ę9‘Į­8oœ+ž ÷œu^Āš97ņƤĪ9ik¶n SÓÅ
-“Ē k”y Š‰£+ó2éDå,¹UŅž3znŌ2Ż;Ņ‰š½7Š‰|ģ-qß{KŲŽ~9GA'¶7D–ƒGloCtb{ÆsĖ\Åöž·)@'Ž2×'
-ćāŻ÷~ŗJŃāāŒßvI9«č¹ĒÖOWWÜY'¦·ńKW¤źā׫A\]Fč„Œ«m9ćÄā.eNŒ[K^Ī8ń¾§½MW‘ēyĮø""[ĖYEóŽ=vÄÕvÅW¢H)ci³UTŚ¶™'ŠøBņ‘WØ°8ć
- Œ3NTéŃņӀœZŪ{›­R$·*k@NŚÅ/S9Y6 m¶JlĆ!'ķyķg«č{Ļc@N_ćWŃęX§›­¢‹¦6[E„ó©ĶV‘Æ'Nq†œ˜ŽūŽf«hŽc@N†Ča×rE~ tnÓU
-¼uŹéŠūé*2<w›)£ĆFŹéSĆ\mÓŌOW‘Q¾ŲEŜ‚ekÓU“mÉr`NĖd•sFš-8§$śé*ŗ”MW‘¶_ĄVĪi{x6]E³rėĀ9­lNspN~ØŌEįœŲ—ĒŽē­ā~{Én?ÓE>#ĀżņT ńŌ‰õ½ŲYEüŖ‚ē¤:9&ז¶n2svnؓö`ˆ
-ŌÉf÷Ņ®¶ŹA; ķ:ŖxF
-ēĒØÆ{ÜZŽŗÉ“ē„Ļ[åą¶€:ŁeemÕ9c÷µæąŖÅÅ„å­śs–S›™'GäfÜ
-ŌɼĀyl"Ł;-o•:=Ž>š;˜6ķؓ ĒNKCĢ8/łB„xÜŪŲņVń¼a²¢nšI=u¦¬5Öé=O–Ęŗ”]ØÖ ž3°Nj|V0ÖÆ5fÖ©>ŻņVm—–·–ā%ĻĢ:±¶÷–·ŠŁ½•„±®¶—aéY'æ¶5“¼5¶ėtfq:ykĖ‚ uņ’ĪŁ]ŽŖ‹¦–·Ŗr>µ¼µųOzu¤«;rÜ[Ž*Ż9­>'?¢8m=éźHņū%(č„Æ[iJdą•½Ļ[„:^¬'ƒFŠéĪS]Ļ›Ļš Šķr)ĪVQŠ Y¶­]pŻĶTī@§æ\ŚWEĶŅ@'(<³tu™Ļ!SĪķzM„]pŻõl?Ÿs.|ŗŠA ĪȞ½r°%$³Nu%& « rL‘¼† ’q§2ČaÄVŽ… šBÉÄ ęÉÜ’ešŚÅ×°A2õŠA ^č `/tĢ½ĮĢŁ6Ķ$tȇ’É×tG_ÓAœ}MIš „8„8$Ēyų Äylč!„ä@!„dօrY„–ĖŗB*‹B*‹’SY×AČdŃA(Ł”ƒä’:%;=¬Z&‹Ņ•ģ܉’ē5TģÜé*v/ģ§
-©„gų*6Ym'FĒØŽśĘgG¤’E³Ņ„g—Jź29_"•ČÜĘb? ¢c„½L!čiKēHš¢R‰“U±p©DēC®½V"Ėvsru¦ƒĢƚńimhäļėØV¢ÓČJ ­D^£æžåZ‰<?l”nØV¢{i «Dvāf–‰Z%²£×¶ŠZ%ŅrBĄ*Ń qc «DŚ2c…uÄщtö³dj•HŸ»Ÿ­V‰N½,±ŽVe,éUµmhd„U"ķµéj•,ņŪƒcH%2\XÅnä"cųzsžł«DŽ+¬v±JäŲ”k¾ŽZ%:¹r ©D&8ī&#ĶÅē&Nį"©TbwĶ™C*‘-ųļØT"Ó7åĆsÖūūģ³ż˜‚Z%2ė“ßdYģž?[i?į V‰ĢhŌ0Ż­’˜9źV‰Ģ,Ż¬ĆdŽR‰¼drAD¤yŗ‰e6sJ'­š#ŖR‰¾Ęn™|„@[¶«¤C©k%R×J„ī•t(uÆ$PźfI ŌՒŽ„®–K]- –ŗZ’YźfI
-¾š%HŻ,¤.–HM,é8źbIpŌŒąØ‹%ēÅ,ɔC-rØ%P·$S·Ź™]ćŠK2ć\/qč% ½$!»Äa—Ą8·K2ā°K@v ˆĆ.ɈĆ.qŲ% Īķ’L8ģēv‰Īå’Ģ7—KĄr xC.ÉxC.oČ%ą ¹$ć ¹¼!—@8ä’L8ō‡^įŠK2įŠK cĹ^’ ‡^ā„Ć.qn—dĀa—@8ģ‡]’ ‡į°K@vIfv ŒKŪPĘa—dĘa—8ćK`rIfœŪ% »Äa—dĘa—8ćK`rIfr ŒC.qČ%™qČ%@»Ča—dČa—@9ģ’4“:’)‡]ā”s¹Č!—dŹ!—@9ä(‡\rN9ģ’L9ō(‡^åšK2åšK † œC1ɜC1t(&€Å$ĆŠa˜
-ž›']Ļ­‘Øą¹y<7Oŗ
-ž›'QĮkŪ°
-ž›'] ĻĶjx.žD Ļœ®†ēęIŌšÜ<‰ž›'] ĻĶŅZO"Śsń¤‹ö\<‰hĻœˆö\<É
-öuŌ:ŃyHĒŚÉ|Ų-”\;‘I9ŅY>T;‘eK»•‰lĮoč£Ö‰Lt’©ż18ķg¹]¦Z'Ņ.{»s ĢóYŪ½ŒŌ:Ń¹?ó։L²š°PŠ¶ÕŠōu¬Ćź7ŌŸVsķDŚūŚī(Ŗ‹o`•‘Ē~eĮ­ioc»s Ālš’ ¬“£n€Q—N£.tué$0źŅI`Ō„“ĢQwN‚£īœGŻ9é8źŅ 05ē$XźĪIf©+'ĮRWN‚„®œt,uå$XźŹIĄŌ•“¦®œL]9 ˜ŗsŅĮŌ“€©;'Ąē$ĮåÄaŠqM1N2M1Nā×¼Å8„'„' ć”bœd”bœ€R7Nęö /jœd’bœ@RWN¤'¤'€ćbœdā‹
-r˜%@³$C³Č„mln€ˆX’!‡Xä0K€fI†f Ć,r˜%r˜%@µČ”–dȕV"Ča–@9Ģ’L9Ģ(‡Yę0K2ę0KĄf ˜Ć,ɘĆ,sØ%pµ$q³Īa–Ą9Ģ’sĪa–dĪa–8ēKąbI&b ¤C,tˆ% ux% ÆŌį•dŌ¹XāØĆ+ux% uh% ­Ō”•dŌ”•€:“P‡V’Q‡VźŠJ@^IF^ ØĆ+ux% uh%Ž:·J@VIFV ØĆ* Ō¹UŅ”Ī­’@ŻŠ˜¬Øs«¤C[%Į:·J‚uC»P¬s«$XēZ ¬s«¤c[%Į:·J‚un•t¬s#$XēVI°Ī­’Žun•ėŚ66ó?ŹŁŒ¤’@Y%AŗŅ’• [%A:·J‚tn•t¤s«Ņ”•@:“’L:×JtX%€«$ƒ«Ša•
-ÖŻ)‘ ß2Ó§DŚ2·ŪWŃs–,sDi«`įN‰^€wiĮ•Y“%”¹Ź'AJ‰|›kĮ•łĘć2‡R"ƑĀJ‰<?„¢F‰,ŚÜŸ£Dvāj†‰*%“O‹ńuT)‘¾Š;"ŗRRŪ:o„DŚqĆG×A¤;÷e „DŗÜoR¢JɤfE¬c%ĮĮx”’A.ymĶ)‘ ¹ę”HÜļŸe³ĖšÓėčAJčÅ*Ŗ”Č…ÄclJ‰\ÓŪŽ¦”ȁ¹ĒżRL))2#~jJIŻć‚z”’båcĄ”¹ö=%§¤HęhNI±3@ ĻY·³ĢI))6.”’tĒ³PJdĮp4„¤ųPJ<`÷uL)©ßSM•’b“C))Ł)óySu™ŽJ§D^s,Ķ)‘½öoa”$Œ"”€QŒĒ(BIĘ(B‰cŸŒā“dŽ"”ĄQ„8ŠP’9ŠPLŻ(„%™„®”K]) –ŗRŅ±Ō•’`©+%ĄŌ’¦n”LŻ( ˜ŗQŅĮŌ’€©%Ą£$Ć„˜ā”@Sœ’LSœhŖN (Å)I(E)„(% „$£„”ŗRIQJI1J ©%€„$ƒ„¢”
-:wJ2é\) Ņ d#)%č\) Š¹R s„$ƒĪ’
-§D®5ķksJj[0®« •ČNŚ&•;{„TRln›ÆcR‰\.k“JŠ…g!•Œ6ӄuT*!•HŸŪ=HL*©ļ/µ(_G‹‚²Lų¶ Ż>RImk¼ĮūhńµŽĻ±9%2x§XEßFO[¬"R‰
-ž‹'QĮkŪ°
-ž‹'] ĻÅjxīD Ļ½“®†ēāIŌš\<‰ž‹'] ĻÅŅZ÷N"Śsļ¤‹öÜ;‰hĻ½“ˆöÜ;É
-ńDŠ;Ū_Dœq2{ń¤¶kāæ…y2Ng·‹°ex3ŗ®_q5Ü<{éd¬h\vœ)™ŹŻ pN“ĢŹ]<Ü9ŃŹ¬ł"źœhŁU wN“šė·‡Ā9‘Ņ«D¹8'ćdI!Ī‰ÖYńÜ9‘/[»3¤y(H'ZĖZC½v„<F:‘øįœČ>^Ś*źœH?误9©mf8'ŅŽš×±[…W+b('ŅŪ~UNd4¬ķ#Zg7ęŪ6ģö"*œČ˜[Ö9ŽEµi~”¬Ā8‘Q«7ƒńÉÉZAœŚ:bœČČ?¦5Œ9ž6“nŌ8‘crn2Œ'R¬œ†-”“Qę¹˜Ŗ4ŪÓŚU>T9‘UŽ=Œ“Ń'}bœŌ¶Ā?†å¬›ŃU1NĘՎ6Œ“1Żķ ćDJ¦3aœHmZo ąĘIm«éėXw-6½åD^#£ åDžéĢfR „$ąĀ8‘—櫉LĘRĶ’÷1éźĘI Ō“@Ø'BŻ8 „ŗquć$3Ō…“`Ø 'ĮPN:†ŗqHM8 ŽŗpŅqŌ…“ąØ 'ĮQN:Žŗpuį$@źĀIRN¤.œHŻ8é@źĘI€Ō@Šq2ö²‰ƒ×’āšd’āš@RuMĄ(®IĘ(® Å5£ø&£ø&`Ōe(Šl’)ŠlE]6qˆāšdˆāš
-9Ō Wö”’"C9P@±Ča–
-9Ģ(‡Yå0K*å0K j ”C-©”C-å0K fI„f ”“Yä0K*ä0K€f Ć,©Ć,rØ%Pµ¤P³Źa–@9Ģ’OŹa–TŹa–@9Ģ(‡YR)‡Yå0K fI”b ”C,rˆ%•r˜%¢b ”C,©”C,rˆ%P±¤R±Ź!–@9ĒJ9Ä(‡Xå0K*å0KŹŅŲ–ä0K(Q.©D”Ć)IŹÉ)yQnĖfBČ)č9ä䔼('§$)·%‰ƒrį”¼ '§$!'©$!'©ä9I% 9I%@NNÉ rrJ’rrJ’rrJ*夃$夔$夔¼('„$)·ö”EM(yQNBIRNFIR®­JRNFIRNFIRĪ’ćd”$椔Ą8”’Ź8”1£Ęa”TĘa”Ą8Œ ‡QR!‡Qä0J€FI…F C)rRJ
-ć0J`F ŒĆ(ł†q2J^Œ“Q’Œ £$'£ä…8%‰8%‰8%qJqJqJ^ˆ“Qā$”$ā$”¼'”$BINBɋpJ’pJ’pJ^„“P’„“P’„k™ć]‹CxęrÉĒv£$F „“Lįś¢×ƒē–VĀį’@8\‡KR‡KāpI@.Ie. ŒC&qČ$•qČ$0™DŒĆ%©ŒĆ%qø$0—¤0 Ę”’Ą8T’Ź8TWö”$—ė~!N" „Ć$p˜$•p˜$“Āa’TĘa’Ą8©$‰8©$/ÄI%q°v-`į&É q2Iq2Iq2I^ˆ“I’ˆ“I’ˆ“IņBœL’DœT’DœT’Ź8™$É8™$Éø–}āćŗL’Źø.“Ęu™$@®ļŁFLČu™$@®Ė$r]&I\—HäŗD ×%’TČu™$‚\—HäŗD’
-¹" ŒėI€\—HR!×Ń_¹.‘Ču‰$r]" \@®Ė$©ė2Ir[™$@®Ė$éo‰D”ėrH \—CR)×]ųžZäŗ’
-¹.‡Čõ3IģļN—CR!×å
-āŠ?@
- ˆC!©ˆC!qeQFÄ’Ź8 ‡Aä0H*ä0H€ Ć ©Ć r]
- ”ėRH*åŗQ®‡Aäŗ ’
-¹.ƒČõ0H`\—AR×eĄø.ƒĘu$•q= ׄ€ø¾g…b‘yrˆė{Īƒ;āŗ ’OÄaTÄa€8 I €Ć p2H
-ßHą |C ©|“A"¼!€7’Š7ų†@ßH*ßHą |C ©|C o$š ƒ¤ņ ƒ¾aĄ7 ’ž–GÄ7¹#ą w¤ā w¾įŽĄ7ܑŹ7Üų†;ßpG*ß䎀7䚆<Rń†<ŽG„7ܑŠ7Üų†;ßpG
-ąŠ>
-8Ä
-9Œ ‡2äPF
-ädŒĄ8Œ‡1ņÉø4F*äŅriŒ€¹4F*ēŅtiŒ@ŗ4F
-źRu)Œ
-#•y)Œ
-#`/…ø—ĘHĘH¶ļdŒdūNĘķ;É"“ļ
-Õ¼'WäÕĄ“+’ŲpE²’*WäÕÕt_åŠd÷U®Č«ū®Hö^%‹äd‘םd‘¢“,Ā\‘ם\‘¢Sw†:¹"u„NšGŽŠIÉ:©"Æ:©"9B·öį„CBy ŃIÉ1:™"9F'Sä5F'S$ĒčdŠäL‘םL‘ģ¾JÉ֝T‘WėNŖ­;™"Łŗ“)RĮ‡)ųŅ|iŠTō„)ū0E`_š"~iŠ@?Tč—ŖHĮ_š"
-ņ–E|(„·”EģHŸ6qŹ"v6‰A²ˆõŁ¬¢²ˆżŪ†4EģßöØło$‹ŲU²{‚,bWŃę»°E¼/¹~ć¶ˆÅ†Œ›ķ¶›c"l»wżXUMFt[č"öoUq]Ğ‘ēÖņ“˜Ėīģć¶6óĀH³¼Į£åßńøĻóŁĘ*QboĻßDwŗüÄ|{ŽgS|{Ķ¬Ńƒ/bÆįܲŠæ’Ös·dU|‘6£®’|ėčßČ/ņE|0 žFlŖÓćĀˆ –Xg%ŸĻĆ÷sFµ÷EģŸ[¼lī‹Ų˜Ė±j¹/b±ē³”¾ˆ'tü‘ę’vQæa”Œfc;ų"öokį‹Ų8Æė%ŒXĢZ’#öļ+
-y†•ó¬’’ą‹TJIJ¤²E^ •-H%‹$H%‹¼H*[$I*[$I*[äERŁ"‰ÓŠE’¦ŅE^4•.’4•.’4•.ņ¢©t‘¤©tp*[ä…SŁ"‰SŁ"‰SŁ"/œŹIœŹ§Ų"§(#ągžāŒTžāŒĄSwF€)ĪH)Ź0E¦(#¦(#ĄTŹ,E©,E„RF@)ĪHE)Ī(Å„8#„( TŅ$•4RAŠ4Hs)ŚH)Ś E¤h#¤X#€kŹ©Å£X#pk¤‚k¢$H„¼@Ś•5’•5ņāhX#‰QY#‰QY#/ŒŹIŒŹIŒ†5ņ¢Ø“‘¤Ø“‘¤Ø“‘ŠbĘ!Ą8¤1g¤2gDŒ eĀ”ŒTĀÉp8#
-7Ģą†9ß0G
-ßGąā|C©|;פ€ó qĄI©|Coˆ#
-Śµōƒē.'ēd‘$čĀ"yqN*Ir®'ĆbĮz¹$•sø$p—Šį’Š”’
-:é$pĪ”“Īa“Ą9l8‡MRI‡Méd“$čd“¼@'›$Aqt’褓
-źJ@B ØC(©ØC(u% ”¤¢”Ō!”€:Œ’Š:%£ŅÉ(© Ct(%€„¤‚„Šå>:¤’J:¤P‡TźJ
-źpJ@N ØĆ)©ØĆ)u8%°§¤²§ÖI*uH%uH%BœH‡SRI‡SépJ NI%œ@‡SčpJ*čJ
-éK b‰H‡WRY‡W"Ö”•Ą:“’
-;¼`‡WģšJ*ģä•
-’,»J–ˆe2¼zVOĖÄ:ÉŪł¶L†×öźi™ŒŸ
-R,@ŗö Å2© Å2¤X&€Ė¤€4-4-Ė¤p4-“ąhJ&āhJ&¤)™¤)™
-RI&pĖŽb™TŽŹ2£X&`Ė¤bĖDE2£’L¶—d’ E2¢H&MĖ¤0.51.51.=“ĀøōLÄ8‰&"\Š&…pˆ&¢ €C4IĄa™
-8,
-įR2įR2įR2)„KÉDˆKĖDˆKĖ¤ .-1.-1.-“Āø“L‚qH&B\J&…q)™ˆq)™ˆq)™|0.-“ĀøŌLÄøŌLÄøōL
-ćŅ3ćR4åR4)”KŃD˜C4sˆ&‰9,0‡eę°L*ē°Lą– œĆ2©œĆ2sX&pĶ¤‚ĶD Ć2tX&uH&°ÉÖ!™TÖI2uH& ɤ¢ÉŌ!™€:I&•tX&ĖŅa™TŅa™@:,H‡eRQ‡eź°L@–Ie,P‡eź°L*ź0D@– ØĆ2©ØĆ2uk:,“Š:,P‡eź°L
-źŅ2ėŅ2ėŅ2)¬KĖ$X—’‰X—’Ia]J&b]J&b]J&…u)™ˆuX&B]Z&ui™ui™ui™Ō„eØKÉD¬KÉd{Éf!™ˆu)™ˆu)™|°.-“ĀŗŌLÄŗŌLÄŗōL
-ėŅ3ėR4ėR4)¬KŃD¬C4uˆ&É:,X‡eė°L*ė°L`– ¬Ć2©¬Ć2uX&°Ķ¤²ĶD¬Ć2uX&…uH&°N’ ØC2©ØC2uH&É:I&/ÖI2Iօd’Ø“dņb,“dŻ•Åŗ°L^ؓe’Ø“e’Ø“eņB,“D,“D,“źd™$ėd™$ėd™¼X'C$Y'Ė$Y'ĖäÅ:Y&Éŗ܇X'ĖäÅ:Y&É:Y&Éŗ–]–d– ¬Ć2ua™TŅa™ˆtH&"]J&…t)™ˆt)™ˆt)™Ņ„d"Ņ„e"Ō„eRP—–‰P—–‰P—–IA]Z&ŗ”L„:$“ķ„šŻźū ›’‰H—’É'é°L*éŠL ]£· Ć3© Ć3tˆ&€Ѥ‚ŃŠI4IŠI4Y “e’ “e’ “eņ,“]X&É9Y&/ĪÉ2IĪÉ2IĪI3yqNš œ“e’œ“eR9'É$9'É$A’É‹s’L’s†ēL*ēLą’  C2© Ć2tX&€Ė¤’ĖŅa™@:,“J:,H‡eé°L*é°L – ¤Ć2©¤ĆtX&Ė¤’ĖŅ­}éd™TŠÉ2sX&pĖ¤p.-q.-q.-“Bŗ“L‚tH&€ɤ‚ÉŠ!™
-:$@‡eč°L*č°L
-;,`‡eģ°L*ģ°L€– °C3©°C3ģ°L€–I’ °C2vH&vH&ĄN’ °C2©°C2vH& ;I&/ŲÉ2IŲmpY°“eņ‚,“¤,“¤ŻĘ”E”,“¤,“¤,“ķd™$ķd™$ķd™¼h'C$i'Ė$i'ĖäE;Y&I»Ü‡h–É v²Lva™$ėZöX’uX&°ĖÖa™TÖa™ėR2ėR2)¬C2źR2źR2)ØKÉDØKĖDØKĖ¤ .-”.-”.-“‚ŗ“Lu)™u)™Ō!™ˆt)™ˆt)™|nY&uK3ė–f"Ų-Ļ¤Šny&ĀŻMÄ»%šą-ŃDÄKŃä„h’ĢKĖź„eöŅ2©ÜĆ2{i™
-'Ļ1Œ;tG%!«ĆŪø{āSh=,
-—Ož-võüg²Oģj ļÅt-ŽoĻī«Č‰­ü(Šœ<߄­ß«ČÉ󿠐ˆŪ Vm¦Æńō¤--µ,rņó©ś%QćÄĘ2f[ŒŗB&I”1 B“Ÿķ“°ķ°AĶöC>é–3·­'ŻFē÷ud—æ_G¦”}bßUļ½Č>±ęŁRŒ‰õų,åŗR?±Ź`6"żd÷–Nƒ>ö”Ÿģ. ©Ÿģ¦’“w“=J²aŸŲ܁Š¹}bćüū™/Ū'»„©„mbæŁ</ ū¤»™/CŌs“ævĶBœh5”ŸX.ø=_
-¢€`
-¢€`
-¢TSSS„0˜‚(0XŖŹb°\•7ƒe«,KWIć«¼Œ°²„Ŗ’ĘU©ĘUI»¬’ĘVy]% ŒÆ’FX)ĘWIKXI
-ź²HŠP—ER„ŗ,’RP—N„ŗ,’¬Ė)vY#E°[»Ųe”;ģĮyŲ!ÆTŲÉ^uŲ+°{„Ā{Ų”Æ
-;ō`‡¾ģŠW*ģŠWD;ģh‡½Ri‡½ī°WĄöJÅö
-ø“¾ķŠW*īŠWĄś
-øC_łÄśJÅś
-øC_wč+wč+Āö
-ĄĆ^©ĄĆ^xŲ+{„ā¹°rÆv–I)ČĖ2)B^–Iņ²LJA^–Iņ²LŠ—uRó²LŠ˜‡Ą’ŠĆ`yQ‡%±‡Ä’ŲĆbyaOĖ–KRƒ„Rƒ%©‡Ā’Ō“Ćņ‚KB‹%”‡ĘR ‡Å’ŠCcIčį±¼Ø‡Č’ŌĖz)¢^ÖK)Ų£^ŠØ—õRD½¬—RؗµND½¬—ŌĖr)…zY.EŌ[»źe¹”B=DQźį±Tź!²€=D°‡ČR±‡Čö0YĄ&KÅ& ŲĆd{˜,{˜,ĀžD؇ČR©‡ČõY "K„" ŌĆd{˜,{˜,`“ģa²|b“„b“ģa²€=L–Š=La‘ģ!²Tģ!²€=D°‡ČR±‡ĖöÜ]ń –Ą^VL)ŲĖŠ)Ā^VLö²bJĮ^VLö²bŠ°—%Sö²bŠ°‡Ė’ŲCfya%±'Ÿ%©‡Šņ¢FĖ–.ĖĀžd–ö6Śd`/l–E=é,oģÉgYŲŪŃĀ^-•zZõd“,źIiySONĖ¢„S „S*õ(ö(ö(R±GŁ°GéaŹ){TN{¹ aŹ){rZĄž”–Ä^£KS°'§%±'§%±ēNĖ zrZz’Z€RK…R ŠCjzH-zH-‚N ŌĆi©ŌĆiz8-P§„R§ź!µ@=I-zH-@©č!µ|=I-/čIjIč…Ō’Ģ“Ōņbž¤˜'§%™'§åÅ<9-É<9-É<9-/ęIkIęŁ,÷ƒÄ<Š§TęQ<ę©x
-Č£xJEÅS@ÅS@ÕS
-ņ(žņ¤µ,äÉky#OfĖBžŌ–żp[ŽČ“ܲ„֒ČĆk©ČĆkIä!¶$ó0[^ĢCmIęį¶$ó[
-ōp[zČ- =ģ–ōŠ[zYEEŠĖ**zYEEŠĖ**‚^VQ)ŠĖ
-(‚^VQ če•½,¢"č­]ō(¢R˜'½EČĆnyŲ-yč- ½ä”·Tč”·
-ļ²¤Š€—%U¼,©R€—%U¼,©"ąeM•¼,©"ą!»$š°]^ĄĆwIą!¼$š0^^ĄCyŁ]’wŲ.•wŲ. <t—¾Ė x/ <Œ—ŹKžŒ—äŹKņēåÅ;¤—ä]ÖVļ²¶Jį]ÖVš²¶Š€—µU
-ń².ŹZA?<Ž ^–V)ÄĖŅ*"ŽŚE/K«ā!½ō,Ē¤Łø ĪK%Ņ ÄCzxH/•xH/ėāa½Tāa½@<¬‡õR‘‡õ"ä!½Ą<¤—Ź<¤˜‡ōó^*ó^€Ö ŠĆz©ŠĆzzX/@ėåzX/zX/@ėča½Tģa½{H/`é„réī!½Ą=¤—Ź=y/pĻR™#[!ø—ÕU
-÷²ŗŠø—ÕUĽ¬®Rø—ÕUĽ¬®"īey•Å½¬®"īį½$÷_^ÜC}Iīį¾$÷_^ÜĆ~Łš^’{ˆ/•{ˆ/É=Ģ—Å=©/oīÉ}YÜŪŅāžģ—Ź=É/ |²_ų¶œė(ą“’²ĄG™ĄG™•
->Ź¬
-³ĪŠøź¬«ĪJa 0!¢ ‚«ĪŠ(˜…VWa0 ˜ä`*0/"Į¬ ,˜Õ
-āĮä(Ÿ<˜×(ŸD˜å““£|a^£|a²æ+&[2a^­?™0Łś“ “­?™0•ƒ˜0ā`Š0€0E˜JĀa@”‹01•,L¦Āšy“„‹CĀ“`* ӂ…iĮ
-vģ1ˆw¶0Łžæć·õ9ÖiRŌóļ§A›‹Ž[v˜’éCėäožMįĪó O.™’ĄE™Ć/™aŲIŁs¢köü·Yy\3olÖf9ņšŁÄ‰qĶŗ%lļēėšukxX։®Y·7ś9®Y·¶ĢsĒź5ė6{l×Q×ĢĘ-E–kf3&—Ż«rĶl,ŻĘß¹f67c‹Äź’Łž>ēė’õ›rĶŗ Ž3ÆY·‘Ģ>źE{žr·¤ž³®“.še
-ök¼.š=±f—y*±ÕN܃Ƨ½;±4„%^šq·9KĖ[ŸŽ}ų±ĻhāžģÓ,vœc4GątCĖFecÅ~ŸĒ±YŸyęŖžžv’\ļ
-S‰j{hQ—\>ÓuN½żĻ6Ļ 髱Ł”\~ś~šŽ¤2“+ŖĢg–:Ł/ėFŚŸ>\śhŻśg=’'ž70 +ƒĪ!qbS5 3)§ēŲQp·ŪrµqsŃ¾Ä}ź‹ö\ö‡ ‹ęvWŹE³Œ{G¹h»ūĖ-/šåXģĒ|]“]hä¢ķ¦)ŧ_4›Š÷ģąrŃ,wįˆŁ<?”=tH.Śnļćqæ.š=D– ĪEŪļHLä¢ķ—žrŃ<Į†uўS=*ūEó
-÷ĮEsĪ>G浙ĶFægĢJūķõo’ū¢ł3;ć‚ŲȞet;få’“_¹{‹u«›DĪĖF$ÆXÄØSžČfµø¢M‚_6I YŻ¦oY6³Æˆśü`\ü䲬ƒēŻõ‚ Žö°bĻ^­/c›9¤”ūš>G2¬¢Įu1ōV­Gģ’¾Ę¹]«õ1O’Ū6ÕpĆU]o}ų$łƒ‡cś.…uKjŃ
-K‰Ę’¢>Ītęeó’_—ĶZHÖbē²y É>ĶŗlŸīl!=įŗyɗŠ‹ėęM¤Ł^×­«¹Éuó6’eEźŗYi¼®Śæżķ~Ųi’ž÷’ę?üžzz|÷tś~ś)ĀŽ=«’į‡óŁń_žō?žżśßŁĻæų‹ß’žÆ~ż›æž±’Ė¶’äł’īųūē’¾ścßļy_ģ§£?M/ĻU:vKŌ·Qé_ŸpsµŁžķ żŪ^¶Æ;łpīä÷~x’ć>Ę•ˆÆūiśų’°Ń_,cŸgxØūŻ}€ģyNļę7W^žŽŁ7GcķŽ=Œ×ŃX8–LߟĘĢf¹Ęß9%ŪÉמ֞ƒŃŃR”|{tų•°©Įī6d­`·“_ž
-¹!3ż¢?7G^Ŗ§Gavz³tDŠ†½bvß=Ŗ_ņp/o#ś0+…ƍĻ±‡m“NėĒ±ķĻ eAŽÜµēnCu_“˜Ś”?ęMh?2Ūśī|‰Cöś¶^žÜƄ·Ī²Õf"čuc|Śu
-źYpÓµ“šÜth^†W{ՕŌ¶ß=Śģ–£©]ŲH^ŚWyX+ŅļŅl>į+īó4|Ż;›^~}ę«āū^ćĻ)šč¶ń1ßė°™²ćĢ?×į4/UGpYB•÷Ȇńąƒį”ė°;t¶–¬‚Vc8‚sķąµöjƒ¤æhĒą{O÷§§ "ī EÄ„?ß¹(žl*vē- H{ĢĖā ŚÄļ»»¤qUģ½·Xö«Ģ4yv ąµåļ£éįćŗ9,ėh+øń=į“`ž§ēDÅ#ž
-<MĒ”˜O!<.÷‹JEŲ“ĮÆ
-ß'÷ŠV=ō]\nšĒ Ųś6³å%u± –…żWŗ±Ļ‹ōUįėężk·ū=³w
-^{>nÖfeæQ#0Æķ?±×±ö:fĖDqCæø¶JžŸ-²;ĝ[ē[^jiXųä#sÄwÖzI¾,„žķ“^[kš|»ó÷ *ćŽĶŹēŠ»ļ¾×³×›įmćAŲomėÉéŗ†AĆŪóQhŪÜ3;Ų/øgŅ|ՁĶä™o­my”}ī_žü­§“NĢ5Óx<¶c׶ž•ļ‡µ÷"čŽ½}1:vŠö#Ć: Ž·‚×\ĮxCFxxyoŪĮŪö|8Śv|Į LĘ’1ö$^·ŗĢ>kĶSŪ]ĄŒNp¼łÖ‚8¹ŠĆ¾œjDyīĻzɶqr}ē-?vbgÜń§±ŌÆ£l?ļ„8g°Ļ–‹÷ĪRpƒ¼¾VķxOÓūEæ÷ŃRķˆėb_E…Ds#” Mܗ½€kņ§Ģ7‰ąÓóÓµö§[Įć"hOaģ`{^ƦĖ•-ż·’ćŚÜĘŌżļsģłkŸbć‹čk+v’~ßīį¹ł±įPŅ£mć^Ū„MoĪHńķŗ¶ónküܓ /Ā3ž¼Įeh{×·ĄŚ­ń˜lz¬ÆÓ&óY‹÷źł˜<M!]’§QA“ß6kz±®=ä‹#Ųā+gćł£qśMpėS€8åéó/˜?Ō±{ž÷Ó<Ģ.—Ż@’D>_³ÉįūŽćs:cJ5žžWąœŃŒŸ«rŚ:ėķŁuYĪ‘£ ł§‚ō¹ós]Ŗu™nüųņ5sµ„½› +ŲōŽ×Ģ‚ŪĪ!]|Ÿl ¢l“ńöž–KÄ!m=Žąšµu'J+鰕79U’ī~U+Å׉Yo@ŻĢP3%>œą,Ž ŸžQQ8¾ē¤©gĖ+˜}vÅļmiäŠüˆł2²zƒ¼²#<¢[æ·ę'ę¶ėO}7ØŽ§.÷¶ē¢DƟģˆÆ¹-rž“‡-pc3ig™{±ƒ3]ŗ嗚Ē™MŲgėl’Ņ9bk³@œwrHĒp–gĘSęāĻõJ»č¶“Aöegt¬l+Usé<a_ąX‡÷ĖķPF/|¹ēh)ųlķß±‚_Łi9—ń×öl1Æļ†É¾9R¢¦„é™9Ø5(ń°ŽĘÕd¶&¾āūIļąĢw!ļ®OaĶ|o¢½æ†IŪ×T°1<é9äüœÆłĖ*Ųī^. ‚ģnÖqˆŽĶ杕ŻiąŃzņéē •Ļ·ū«ß­ŽVÆĶSĪ~ņ>×£ø(ņ°§>D‹ØėnćŹ¼eęq ī©®Ü^ |L:“ć ¾œZÄ|-«nq±<šīų5ó°Zé śü°¶K3Ŗµź#Ż}!yėMŠÓŻ®ŅCōž¹ļw³UÉr\óŅŁ>Ń=ĒV/[µÕ‚>_›…tM,®Ėģģö“¶3}“ĮlĆfĆ:]āļķfWŒfiŒā=-X(~Ļ–…wzhŽgˆ½¶˜hŽpP}Óāq\£\?·!ø/śłÓų¾8Łh«XšÜ¹ w“‰6[ö“!“Ėš±ƒ-’$ōxÅĢœŃ>Œmc
-īyćižĪØößŖK²µ “YÓėWĀłéó~‚‚2ūŚd?ż“]¾—Ż²Ó=;jfs
-bśėįåw?Ä©zÖī°¶¢/čÅĒꇵŻĒ‰Ćņ.žš–āŒĖgWÕ~äWź“iėļÖ²~>8lŚRĻ«%WŚ<žŸ€+G :ĖĘo\¬/ś½•©Šėā.Vœėv3~ļ™iž“Ķ-śN“ēķ߬gf³I~±xŚžcńŖSƦ‚}v#.—µŁķ.śdš>o=™ŽuFųšßžźüØyVžk·V€äźłąµ‡½”Ā¶Öf±)oŒ×żŒqtœŚYpDņ–óŃŅoćŲ}ń'ßPZ[œ§§,łyśŗœv”4åA³³ąŒnĀš‰h»»~õ|¦yų¶6vbCq„MJõ3š×Ņ‚=ʇ%ųņŌ_„<+œfįŻ…Éæ{eŒ°’Gy`ķ†zMŹęÆŌŸµēבĀģŸ5?ś'hĶĘqč³¦Våö|jŻż³uā)±OÕfce6Jļāš–įvm‘¤ļ­{‚÷}æ?&†ÄĘ«ń ÷Ł‘'h£¦¼Jž³gM«†vņŚ7ķ µ3Ģ÷ēÓµŌh“§øMeDūšŒwiŒXē)¾vOÓĒDó8³£ĒēśŽ4¾W_÷/ˆ°^›iźr|†wƒć¤|xęwjpū²~Mõ“P!/’“ū_ˆKӍmōkĄµW“¹Z¹Żué†u;g‹OŻ°ńŽSĻ£‰ēĀļćžś˜‘Ŗ÷ÄׄžøfŽ_›æč—īo\1ó>ŽķˆSšń¬æŌ·ĪµV?†vńŖŽęļļP†ß ”ŗ“#¤e?\×y÷x}jņ»OØē5zsŠg”ŸŻ·‡¼Efß>›gĀ=Ŗ„Łåķ±jś“’<żÉ¾šŻ`+ĖeK ų嵬ōēć*āÜŽ"Ū|Ńf‡šķéÄŪCѵ˜d{n63Žx][¬hhżĻķy·Ė{Łwüšś]ŃŠŗ¶AÕw`ó/žĶżœłh‘/ēa_8ī„7}ƾĄÜ_ź®yœoėõ'£į£Ė{Y2Äå$ó‰Ø+szĀųźF„Įf% ō<Ś3ęŻJŪÖĢN÷ˆųż*śü˜_ÅÓÖ°“™g`YĘ{\Ś£µŌ›&ŚŚm8Ģˆ×ay%ßĮœ1fk‰ÄŠфöŪ“Y­x'#¾€~Ē]&v
-A5mēˆqüh-ŗvø‚q
-3²Ź_ŪīS½ł×5ØA_lœ¾cü8‚ū<t½"7žŌ}kŖć²DWvą™#f"ŹÜ惩‚}Ī曠»“ģ`…}õ¬÷»bb&‚ĻÓ¹—`ģ@­¹÷¶žŁŅ•u ķ?Æ·ˆØ½E–æ§vøm"ž_ē­ķ<ĮM>ŽŠ}"Ģ ?uæ<}ƒžćŹ–-jęqĖžcóa7N÷Yc ŚēzöśVĖ¶yąq•ą/š…{šŚö¹³> dAępJš.’Wø1„e™ƒÓgѶ+ŸūS:,9ÆYł¹ķ„ėuEGO]-ż©/ŚŪ$Öx²Ÿ®„ƒyPßĻķ%’Ž5«_[ŽQÄÄc›^Ī;Öznļš”Žmh65ŚŗqœGÜB¹€žä½¶åļpK9:DfLBj7§& lžWq“^‚'\·½#ēĪ÷écŃń EGÕ’M‰Ē®hrx8²K<³ģńA± -ű‚ģĄ‡?·É¢+$æC¾dhž˜4„÷VżĪQfÓb^éė3x[n6?'lŅq½·ķ›r’żņŃŌWūõtA?·QˆwX–\(BÆ /ą<s+ÜŌ€>41ģ“x1’įĮ½łu²%6ŽćĪčłMŽ)„WŠĘ'¶}ēWŲ«Fp÷Į¼ˆž–HŠZĀ¼]ÓFé=F²Ÿ‚gōxŠKR¼ƒšMātW˜V‰ÆVäl±!ļ§•m‹ŁM›ĘĻÓ=|„¹ķ‘+ó~ˆ»Ä€ K±Ÿ|/o8;ķr掏˜U0·Å¬ėŚö܂Ķ‘¾ņ2 ‰?’
-_z:ŻßŠ+Ę#čkŖ¢ČĻ[¤kŲ<ģqń§¼)ąĮż¾æ‰ł®łū ūXŅēNżÉŅŸŸ<nž#žE;8Ē~źŃĢ3 ”µ;:HļąˆŁė<}…g“$#čstžń•7„yKßzÉ_€ė±×Ž¶/Šąy2ŻzVŃG2Nķ›‚tfŗ¤¼·lx]į[æ_ß+²ąāpb*©yMĒ'ĮH|…ź¶§™ƒb…Æ;AO,]A>ś>¶õŽ¶wR±2ŲęĻZŚó3ģsü
-žq—Ęb` nķ.GprGoŽ ¾CŹŅŠ˜/u›1~~ī‘)U·<"Ēv &lĶ¶”ĒÄ3ÅQ*™'-ūQi8ĪZHžXč ŸĪmkõDxĒ÷剰žÉyé~xžęO‘œrŗDŽŽŸ"f?ŽŁĒéA^Č2¶½'łł“ņu“‚O]#zūE£“ÓG™ė–> Į§ćˆ±`Ė»ŚņŃ5ć]A½Š3Éoc/mÓ%ź:¦Tn%}IĀ½Œ?¶Ē+Ń,!ģ¢/ė–“²?V‚œ[,Ī×+ŚVĘÖÅ ’źn¬±Æf9Äżc˜Čšj›€3L4ā¶lGH±q
-r]cäśc[=mÖb·‘ŽĻą•¤W˜Nŗķu÷Ėŗ Ēö%š`6»Źx˜¦”­Ų„ÓŚ”lšt$°ƒ×sO†F®Ł»ųC“)ŸöćæօŒYW{īģ¾’JtWģŅjä9ž“Ūū¼Ü­ym<Y’½ķµŹć»rę0"ĮŁŽģ`…}É‚ń,Ūz‰wīÕ^ĢąM?xpM. ąÉ“ÓSē‰0ž†ŪžŒ2ė ]į˜)ō Õ *­õ›+ų%¦‡ķž“!¾¶uik˜YAĖcÉņ#ßr[į2Žv¶+ßh_-ŅeĻ™YóT‹Ć—Qցł,{st¬RŹ¶3tÉh %+¬ąó)ŒO­0˜wp䠝ĻķyšČ¼ÓÉ·×®Ą­±“‚p\ZM½‚>€› šŠŚ~Z×—kŹ9÷ėāӁå‹K8”}cŃc<£V†uO ÕWįÜz ę
-%|‡'ūSŒ¤ē'Ōē#ø_}” Cń¾Vć{ŪM3|Ne ˆń˜-®Ņ
-Gp”šfń+xŠš{Ķ8‚§×«”ƒ~ĒĒĀzTzCät½Vł{Ū=ÖbŠ ÆiÉ#1°j5®öŽćžCU¶TJWOĻE½čn0$Xƒ{Tčų ū÷"‚ŪĶ°œ—ąŒæ4}B%ƒĄ½ŻēĒ¶ęŲü1_ ʃŒü(˜7lļŸŪnŒ°ŪƒŽ+hżÓ¾nį¦ ‰‘¦£Ż®;ŽUAv@_šµķŌõöj/Lé4 >)ųy
-Æm÷¶iŪ;§#VĢē‰ų} s [¬ˆļDō¶ńgp :7+lĘTóZ!Šå n7b–`>†>ƒö±ķč1™ą+
-o3LõŲ+³Ą÷ZµĻƦNŗŪ>¼Izā¶“aDs)ÆŹ ŗ[ēI°tFĀV„ wÖF&.ŠŁĒÖv„ķÄ{0¢Ķd-1Fć¶hŁł\×ÕfĖą øåd†ÖĮņp_č1 Šµ@LL ŽC¾ŃÅCd«˜FćQ¦Ńµ^tń57o¾ļ `õ7ė¶–źĒ Ćė%Ųc˜5v°Ā{
-–SØį©nŁīŅ¤½œā·ĪĒY‚Łģūł±­5”|>Ć­l˜ĆŖŠ­`¶£spe[®ģ‘„WŠW‘ż¼ 6ŒŚÉē¹Ē®·‹‘šW°Æ±†5šį‹clz»˜&³Ä|הš5ätoŹ¶¶÷ŌŪÅ
-`łpų‚qq^GvÆųc–«”é½ŻW5ž%Čč8¶ł±m§yøi§ĻąyÜė¶ĀLżŲ^gץ‰Ė}÷ps¬„ĮÅń¹ŗ_ ŸC­9{,Ų£Ļū
-öl÷]«‡¾fšŹ¶įžÖkšœÆļć,É ŪAot›5j¬ u
-öKXżęģÜžÕ_üį‰ĖĒGāņæžg%/[›Ż?Ī6;esīĪZšw…æ¾ĆMé_ß;łšŚÉļ’Hėćk†|ׂŻ^&r‰ MäŸųŽ1ėZ9`Žf&{ģd^xYżƒķA_ ź»<žT3Śukk/cé×ó’āéć =ŗõ ÕŶŃŖŲ®=ņ™<čŁ²_|G$°EųģmwĘF¢Q°]ØL>^²ÅźPOų,šÜ®;Śß#&Zż‚™Ór¶yrmŽ” åģū_±t‰‡Ÿ£½_Wg3‰Ŗ]^—f"ø«“3b9ÕŲļŒœ,]„0ž ¦ü¼HśÅz$‚5M—äĪ;ošŁµ­/+„ ‚ Ū@} qm¶ˆ¢²JćC\Į+xŠkśĘl±zƇł¦‰…_f“uĘbāl™×2b Ŗ=äA_#>öū„·KęCƱƒ T >,źĮɁ­ėQŒ!_;ķ 92įÕÜc§gŌöCŻbYą07uƒ†æwJŸ’}œ]×Čßū*šzdææfü~j!Aś"z“qļŲĮ%…Ē’–/±źA_ĪÕ·ŻåźŲŠ¶GŒoĶųŒ²ƒŃņ“)ŠŁŹA÷Ī4O×.n™q¶ŗŚwŒšĘć£la_K)ŽwlkæźW6-ŹĮXZĀß1_@÷ ŽČŠŅ _<IuR)Å ūšb·· *Õ.|Y5^ūĪ[Ž÷Üķõžk±[©Žė¢EpO:³T£iB»?īÖĘ„C˜7øzjf,’£;yź|ĶJŹŃZ¼ŗĆJŸ“ µ˜
-jĪĀ>²Gż¼Ļ+ĶaŽ›wŒI0 kŪv0—shSåé[š8aĒœg°ŅZ¬`[<5ĆüĢ¶³¦>4›ĢüćՉł Ī¶7žƒ€ż°œ±­—«;št¹aśˆXšąÕŃt֖:Ÿgd‚h9Œē}sa38›w7/Į­Ż&|=>š³qP^©QW€§h\ńØĒ^#Ā‚‹“Q6ƃāńS=®ÜĮ9›mćį¶uŽüŚ„/d[śdu\×Ō“-[ė™-µsY,E‡“ŠĻ„…ĪĪĆrgl±{Žė×ū$iG“«œė}ū†üŽuL±ƒōj“m;ļ‹ūæÓ¶w#™I+>™ÓŲś‘ģPL£t¶įqš Ī˜-×nó÷t)_ŸQ栞rĖę¶óĪŪdąO¾€VńG š<¶ŅÄMŽÓ®Y&‹Ž;ą“O4Ø\ĀiKwŽ½ 0v`/“&F-ų“³¹]‘c`Įc{}æŲsÖu™Ń‚õ?7OķBcžĒęäŹDæŅ÷{g‹Õ÷ŪxdłŽĻž­čÕ<²Õ>”{„œśŲ§”éÅv"čSCāƒ.ķ“”qiņõž]^fÜÜėŠ¶Ź³ņK ?ŪA§%ę2iģĄ²£ó«¦*öŒ½ŪG_ŪŽyĻNI·{ęŃĢČÜcæ>ȏĀ©ĖxX)om« z6›¾«k{ÄZ‚ėĄ"Č:i> «ŲóŒņłj4-|ļ\™Č!±ąś“+wŚwŗētŻ›SŽ\„ĻŲö$ßßw°ģćĪąuęÖ#ī °UX#“jŽElh6c’žhĮ«sĖ=—ŻzM ÷­Ńtģ ęż/AīķŃFīąjpāTŪĀ¬Ē½)č#?łŅg¾ō¢‚7·Ę†|·ƒÓ-ŲŪÅaÅ7e">ĒSp¹ƒć˜ŗ4sMzŚiĪŻœ‚‰ \ļ”ŖC>›Rq¶®÷ę&]Ōk&Œą?µē{sĖ ó”›ß”ß
-šŌSƒć•·iįćęc£1-ßVŻø™­A 6Ž„ŃhæOÆč«š‚ī†Ī{Õŗ)&©5¾!žYĀ–[ķQSx;A/_A_SVW–ʉēśńā]G`ąę QŽļuņ†\‡F&-ģ5gcæJ·3ł0a]»m„ÉćI¶Ž#·°“(m&&rŅŗÖLŽą~“ KĶ꘱¹‡½Ę«±_E)+6ŠVĢóxb·.cßtš÷ĪåÓB#Ķ“By€nšK–oi7P(ž¬:uōQ
-*‚×ų†›~LÉŲ<ųÅMcŃ· /lgÆõŁ=ŒÅ0éõūNĪžŠ8£ļ 6šF¾7EĻ •D³šą[ę7É[·ģ`t€ģ…]控2„*µé”ć®W!‚[¼1¾%—vTŽõEŠ‹ŻĘƒ} j9LŪ© ö±ƒ#ø£^Ķ'\ŹõŪöø©ļŪ°J³ņ|„|.]›=ŠµÆ»£ą}䣎cķ`Nå°w‰×Ū‘ł#"Ø7Ž')vpdŸhĘ²˜?łā#rŗśFOĖ‚G¾qw&łœ/8^…‰n÷8ųcŗą¹902v`‹‘ĀļsEpĖęļœe‰ó$jŻKż~ņI¹”/ł{¾x±č„}ŌV7a›¹ƒżbc(”Ģ”ĀsÆģŽ °éC•/Ž)£3ƶ;ķåŽ)ø³„Æ_ĮX1ŃŽ›]›jČŃīģuĘØņŹśJÄļµ½ĪVĻį©|Ā±®‹MK|莖æהéź'XŠ–nēoéżHēŅwzģ¹_W}Qę«Āćœ|=bŠd³FĘĪ«A’Ķ¾Œ ųOb·3rōAŠĆ9KŪĘ[£<£³k0w½62Žėu
-7­V×;c=3ż¬)Ø£µüÄ©ĘņD‘ ;Y›“e׶ēkw²%ßłėļ—ĶÜML#„{›fœśŖ+¾*|ģhV>ŖĮ>»5RoĮlNƕ(lŁŃ8õq5•ersNåU˜:¶½<s±{Ļötgz£ķ|®Ź›ggÉIÓmnh§tŃĒ’Ī¶_ ēÓĢܚ»‘ąS£Ü<7f\āaæ«åɇĮŽį¢SqkUiūS9ąčŸdv lguĢ€Łä÷Š, ?‚|!õ{WIkĖՂc£­¦ÆאŅ ā‹75n9O܏ü“éž[īųNĆēäkća¾3śŻTĘŗ?ø1ŌÖtįėa“ä‹QßSķu2LŅæ³uą
-Š*Sž„&.|Æ#A¢<ŪA¹„mķĄėė~O²
-Æ;ףłżÓXį
-¦“§ g„C­q”<10»ęņl·­ŹUų¼ µWH%¬ŌX'Ź‘GsČ÷Qsk]‰öž—Ž“\’,i¦!o|@R’Ÿń7kebÅö-ʓŲ)ׄ/øżł>K«E{šłó1Ė<¤Nk}_€vĖŌ–ē4³£3ę:§žƒ `ūŠ’h¤¹XÖÖ§/ć ß¼HŌö\‹ń‡¾XÆ7Ą‚ł®djŠ›ŖŁ[ä«lŗōäœZ.ś°™.ź•£gnėĖs"ŠzĶóśå1]śāžõl%615>o†Nž°Ü†k¼v Ę¹ƒm2ä}­Ŗ$ē<s
-ģ÷Æ!}šc-µ4C‡a«逊/Ģ]ŚTQ
-bęÕÕWČFō¾Ć„aĄMcä4œ ƒ(­ģŽŚo?XW!īćķ¶āÉ1½„5C1.'{XI$6ōuŸZł‘šń§
-¶å5dD­ēĖ*.–Å‚žęÓ3?Å ā 'S[ˆ”ѝż*Ļ6^ÖsDV± Ų¶‹óŹÄ'ņÕØĻ½†®öōŠģ£~3^¬Å“}ÆēZ’™†ĒżL Z}¼“e§ļ™~7+ĘzbkN–dó–zLZŽĢ8¹s]›!ÕB—÷ZSų“™LBꥣ¬w.īŗ3ćfKźFWŌ× ]‹Ć½˜¤µuu»Ķ±3›S>¹Ž‡Ŗ/ō£<4“ōlžzćqńék2—VĶ Jøs5Ę]…žcł8°īI]į]EK,Ż q
-x5wĢ[~U˜Å#ļXŽąŠ~éō3ģĢ9'Ė™P£īŽVŠOž“±Ž›\Å“„¤vÆ Ž'ŚsUó\‰{c×V²Õh¼u'$ĮŽ‘ÆJ*‹æ™±[Ķ(x·˜ z§Q±ŗÆĘÅX5ūĪ)BšPz¼NźBŅ1ø# "v0²orĒ
-N?½Ž;ŲŽ|ŗąœĆ…§ŽēssÆĘī¢g½Å®”ŅƤ½­
-›žŽ¶–Ęø×R²–w™7öAė¹¦ŁMēĪW˜ć=ČVC[’Č¹_>d^_™
-ŒīO.SĀĢ[xfś›ķ÷dæļŁĪt˜īČóŒąµóĮĻ_ģj-Œ­üü“Uåü1Öœ™æv—ņ$Ū‘Āé½\[Šˆ‰ójÖQå·jŪŹk¤„ˆ|Ū"Ģq
-¼Ķņe%óFĮy2²ŻM%ī# ™gWįŸüEæ§Öřfš× į£ĪWĄrĘ„7¾¦?;ø.¶ĶĀŅgfI_±ō¬ėŚėČć;Ģ¬‡²ķüYqŸ±UĒÉ^€°£Źy_r«¼"P¤`R_lfņ8·ŗ¦Ų cĮ9󻱑5¾|“µ¬¦%¾gīĘĪ©ūi*ģ$Xk;‚¾ƒ}«rŹŚV+'ų—RcœYBżUōa+Č¾V!#æŖśZ†÷ŽJŒŸk¬li4Šz)óęܳŗŠ
-N4\¶ņ»}T*K^½ ²äæ׶,9°jčŁPčĪxĄŖ”gIĖ“aŌŠóµ”IѤ†ž-žŪīśDŲ8}T’ŹŖŠ¶ 8YHĒŁiłü~’ų>#Įrwe)ū‹t4³W9Ī­EĆ{š9jµ Uģē]Qō^-5a·Šż” ņłį3Ģ‚³>I3vPfĮ&~˜Ł«$Ɲ«”mm%@&ņ2uik9°W8ąĪ ü; jæ‚ūŖ”ŗĀ×*ü|gvČ+ČHĄUVb·É­1ļmÆ(C}ĖXeń\U³Jx€¤ćĪ•ńrUž(W›KģŌÓ¶.Ę ƒm™avŃ#’]+FäXó‰–Åę#[YæZ±¼ęZžmmyEžę{—¶8ŃTpiõ«˜g;£;ö“fč²råX–.«`Ą©JōŪž•Aö,u0j˜‚C«š¦­õl‰ŃŌmūØĮ_˜kŅ`Ūż"5ĶV#>ŪGģUÅh…K£Į‚ųµ2ŃĢl˜ZÅČSö(GĢ¶–€„Ņ-«ŠQ Ö*F%¼×Ō“½7-²œ HūBt5§`|¢˜oŁvÓØVĻU',‘TƒxÆĪGŹĮ{dņż¤©Ä®EĖ³†č ±O›p@ė¶6„ÅbčŲ.+f«įpWt‹V’ūd·,€iż„h ž’=Ē DŁÖ*ŽŗZż%xÖŖÖ+<²=e™ˆ')–Y†i”fł"ŸTÓŖŪŚśų 9x“į3čĶ‹ŲA g9®ó³ŠŅgŠ;Ay&óÓ§8dīc×ZŽ+˜ßTł½eŪ¦4k‚żŖUJšF”F)H"S˜Ō½©5LXŚöżc[[Ę/žŖU§h·EŒh#ƚFöķW‹¢l{F
-uÖµS$€ŠŽ{ł€š† eüzTŁˆ Ėß“ČmŠ D$/ ßĪµƒŅģ¾|r<ŁJšńåé•:žŸZ›C±Ć¢wM5­"Ė<æ®a½ņ!!ųõƒDć;V ±©•½­tT•õ—“ŹNOkõˆ µ%^Įēå_„#<j¶„-Ö6V”hų;Kš 9iŁ§ŗķi­Ŗ®KCŽĪ
-^?ƒY+zÄTV¼²[ŒīŚŗVŖīįU‘.Üj…įKžŻd®FĀŠ¾r
-·\BĘ×rš®Ź¶±ü}Ō0:®ĢĮŹ+®TŁvū™²…ūĢāĖ„žoöŽu¹ŽŪZ}½’ø*©ZT
-Wq‰™K§Ŗ2”f»XN%ĒbŠ Ā $äÆĮŽŲ)+‘nB|øf„2t² Ÿ¤o¦ ž-”°ˆńD(¼ķŖÆ;m£KJ,Č$ ęĪā”ÓĢ
-(¬°hĶ!jU=
-\”Ŗń’©)žJ„ELĖ:,ŚJ"5 3X …颍@ERbĒÕF ž/B™Ōj :œUįō–M6 ^Å("W§Æk“ģ€Żo
-żØD|œ"&y[­ØĪDXә*­'k~-ż¾fSq•Ɔ±H„ūĘ’X€…emˆÉ8£Čõ]ņ2*aJ1wę¬Sx“Ś‡cĮ~ —ąK%«ž£‡G«C?Õ¬¤ųŒHŹņł“%Y‚EL@`œ7ļ’ćØÆ0!žė¤cŽÓąĪźņ^³yįfäØŽCēĢåĮ[ˆ
-kč źQsęŹ¢ØēĄ°=#rŅ=:2]Ź@ƒ€ķ•hp!ŠPŁĢźĀĀJĄkɹŌՀO?[ŠVE•É¶Ņčˆg‹Ż¦…‚°$ē‘¶lQŽk¤©ƒ¤I<t#āÓŹ7k ›0ŲL'‹°¬ćQ­˜T²3Yc9ō}DP”Ŗš³õŒx®WvŪŠ›uŃ«”tą @šuˆ·!2ņhKĄ;•£ķƒhĄ'Dé “!rX“„¤v–Ö`Œ„² FŽ4RIIRƒÅgĄh.x'ŹŪUYļ¬m•8?éUEB‡Lo“­¼8ÕŚg§ŗ(
-q(ę
-Æ+«ń&2VóķRˆ”pń!Jü!ŸaõŽĶˆœ÷ˆ:Łi”ŁÉe+‡@ĪcŻJ”ØĘs‹¶tY]YńšIÕ×Aäū^ń7¬6]ū'Q=Ƅ(ļŌ䚷՜6"jµ5ŽqĖ*XĮą4‘•FŠČqŖĢ
-¤1Ļd­ ­”õ‚ų܈Ż0TóɏƔh†į8ģk»Ÿ8ī֌ü«8VĢĖĆ“) ²³*Ā ‰ŹbĮZŖˆ“u%o–1G X$µ!äå×b‹eń‹¶e8*ćŅģsā Q~Ję],ÄäE0é—ŽpE&#"¦ŻD‹ †rčżē‡`Fäāź°™v2?TįŠŽlT·Įä§
-:ą+ģÄNāü£BK‹ļÄ:ńgĶČȵŠš T 
-^Ōøm#b­„lń¬ķ ßM+ÕÜ$kM>m˜+ŪęÅTMŽ68ŪÅŠ¬F„µQnøE[WFæÓ+ˆeŗŪÉŁ"
-
-ź p(ŹœØÅØŠČ£Õėˆš!y4ZÆ=jōk'b9ė€“Wk[-7ŗWö«ĄĀŚrĪąČi؈­ģߘQė”˜+5rŖAŻi£‚§HÓ":<Å»&m§D+S9%ö0Ź)Õkö§ŻŠ’ČķlGéP•ˆ%ČŁ
-Ē -§: *‚āÕNˆŸS„äIEK04NĒ1(oiK#¢ņb“ęYĖ8©žV‘);%rÜ/Žy²T֩׊ćB‹ÓķŹYMø©‘³N‰j_"*uŸŲ”Xc˜ģb#£²<Żź£dé¤^Y0’™ēWż“ķ0>o”‰s¢›tŠÉd0„›‹“$4F†o•öäp8æjgm5ó£<­mNŌņĖŲ…Jö£ 2"-¶ĖŽ\O:°ąņ¢m
--­ŹŽ’ėׅ‰.RQ¢ _S~¢ĢF¾P1©ézČ%1Nć¢‚lø`‰3¢ļ{eJ¶H}2āź¾5µiJœØa0§#¤' ™™&A`ėDœė&m½˜F„ØiŃ3"—nCŒšBĶ4xöfD.FmPrŸ@wtG“†#‡±ē ļ+FĻ“­C\ c`YX”;l_Ó½'mĶ Ą±„ī]„ssB¦ü…
-恦Črēˆ$
-A4Ž#Xéó¶„ ¼Į6³<H ·yö%‘{c¹°<ZŁ°·N§01_LŚV^}( b˜$HMČ¾’öĄ s”zxE@
-®Ėģ”ö’QūĪŚĀBĢ¹ c^“Ōö[“9 Ą{+ó*!W$Ö°Cćŗńxæ“Q/ˆ’CÅQAĪ,¬ŽFœ›§mc/!™,0uFdĢ_tŠÉ@Źšōš¬
-ƒõƒ »NGI±ńā:;UwYRĶk@q—N$®Ēʌ °3ŹåˆyŠĮAĖƔˆ“ĄĘ“- ‰#iéóāøōł¹;#‡4!’(1L‰Żé÷µŻKÜ;¬AEƙĪ+¢¬śtT„šÆÖ zä²­Z;¦ß`Jœ|ÆNž|Ū8I²]Šķ³Óu·ÉīJV7v¶A\ģYūö6)hr Ü
-Ö £
-#©`éM?˜ ”œVņ¢­IżŃvFģbšŒ<L+`gķĀõģ§ŗ ī Ž{"“ONmļ;q¦
-LČ]m øUv»‚Ał§ŃŒ5¦ŒP¶ŖEOŪFuõK1ɓœpo0SŠ’ćÓ¶]uŠX%D=rŽÓ^G[Dä§ Ø§;a}n@¹¾)Qµ?tŠÉŠ;‹šč”œį>Œ¢$Łž“¶Ī̉V;!N5ą ¹kĖ!*U¦ĄĪ²YA“xŲ±¬-Ā 9Éaš˜‚·t ŲkĮP!«)Ę;ģ.§•E•oe¤C0‡™yŪ¼­f§™ūKbŸĀŒ<*]gNK(—TżW‰`Ø\Ó`ÖÖ)ĄŻAÄąć„h‹(¼oŽ+;fIdadł¢@7±œĘĞ5#2˜ :čdżd
-ÆeqĄœ)š³ĆHØśm&m³%„tՄ8źt0%#YĘe)±2³3ĪˆŻ&9#Ć~Iö“\»ńSH.Ÿ‰=”\„Īe¤‰e fW墖ܑקd¤øŠf
-K1ŁŚt³NĶÉd+°·XŪĪؔ›\˜'Dé
-sŪ`1µŻOŪR¢ ęeźN R“ß<S*Ąt=°`ļÆJ–TšQÕ«8I“¦m Õr'm'§‚*•ø šgéė™ŻT8^ī#ĪÓMŒü£Ä0%v ļ°Æķ~ā8!.MĻ³e+SŠÓMØĄÉ
-åčJ“×+nŸŽ’
-v$D@:Ÿ‡Ičö„ģM‚2OŚ–EnŠ8©ü:%¢ądpQī°fl2D;O5ždB›iBīĆ'|»8,V$ Źp¶"Į’õ{K łš.ņ )ĖŅ&¢ü ŪLŖ˜ĢvŖBTt¦QŽzą^.ČżpzņócĢŞó”Ą®A_ó¶ØŪÕɜhLgFƒšõ
-VĘ ·_:ŪcŌÜ“`‘=;mĀL'Ä)材=•…“-ĀG‹¤"Øg ĢP¢t0¢²ć¤m°ĀUd‚öZN§µX¼|Ż)#ŻĮ‡I¶aÖ0Įa"-‡iM:Ø@¹n¾<Q9ž1»=¬ĮÄaŸéz¦™ōG):[§īģ QĆķ± wˆ;†=/źb¶Ź5Ž°Ŗ”ˆ’-ŚŚR‰>«5,IÄiLĄ„<įņ¤Ņ# ÷7ō„ŁŻį śuŚÖČäFŚ=Lēļ*³‘ˆųåŸn®’r{y}wyżĶ驐¹śĘōĮ“?½¢'ŽÉ£/žóææ¼jŻ<łżyņļO~ó?>’ÓĶ‹‹öēƾfāÆO~óÅ]ė曓_}’ņźŗ=;„’”Bæ>ł·'ūŸžėģź ?ö'æłė»łĆ»^ń³ß|v{{öĆņG/žöęö«7W×ē?ĆOŚŸŸ{yõāöāšŸ’öņüīņęśģv9>éį7æ¾<o”ŽÆī¦ł‘lÄįŃF<iŌöŹķåWoī.^S³öĄzœĢźöāõ›«»ĢkS3Āčēsŗ~óņĻēwg’¢f÷šWŲŌ¤¦ĆŸOģõŻåŻł·»¼ŗ÷Ģ®o¾ąw65ĮŁ4ę3üźģõÅļo/žūM;<?ÜsŽ”$¹©ł-&ńo?Öųß<šŁĶĖW7Æ/ļ6É/ÆļłI¾øys{~ń‡Ū³Wß^žoźŪ\^/·Üåõ;Ž’vŁ}>››W·gw7·÷œŅ»¾čĻ<”>śwšoŻo/.æłö¾÷œwī“M­ FæŲ¹/ŽĮ$~õŁ|łŁÕ«oĻ¾ō›š üßöHœm„w÷½Łn¾śæēwŸß¼¹~Ń}~óż¦f8›Ļ|ŖļhæÖ¶µæ_ĪćŽ×ó¶ęńĆrß]¾ø{‡|“]Ę ƒ’Qnł›ß^|}ņéQŪū©“½ÆoĻšø~õ§›Ė×G}ļ½Š÷ā¦&uŌ÷ŖļmKš9Ŗ{GuļØīÕ½ ©{ŪŗąŽźŽū§&Õ½-2†÷XŻūĆŁ›×Æ/Ļ®?æzósüųƒoŒ×w/~{ńÆĖ3zś^Ŗó  õ}%_h;3ŪÆõ=P:å‹qSÓŁōŹĀŸæžśõÅŻĻ‹ūŽŪ:2/vn‡÷½®76‘ėśA'ž†7ÖēŲ¹ß{š’āÕÅł›«³Ūē¤“Ń’ģ—ö_n.Æļž«*µ9®ó?÷ܧ`{6µžg­špš¶6•ÕšĀ©ßÜgy·łn…\łĆÕ}= WzĪOĻo®nn’ż»oÅ0øõŃÉ,¬Ōonæ>;æųāüģŽŻV¼Ī|‹É) ~vsżśīģśŽ&„mMpgū'ł»ļ_Ż\_Ü’Ū:¼»³8@$¢ĪžüꭏņŠĻ+mŚ[ņ±oÆ-Né÷ĻvĶ«č=b’ŽKA†=_ē¾c€Żī歉ų‰¼Ćb³Õ‰Ä‰Ü—mlkķp­]˜W—w9»|—hś~Ż˜fŲÄ6å€CļĢ³ŪĖ»o_^ÜmģC­¼;’xqūĶĻqoīłUś×-ŽŲ÷>µ5–?X™ś0¾Ę&ļ…{|‹Cć'‰Š }8 &źó‹]\}ńķŁ‹›ļ6Ÿso?@ŲŲ÷YļŲŚL ŚŚ¹yC†žxsūźŪ›«›o~Ųäõā¾¢ć¦öĆ.Ė¹={qłę¾q˜žé¶’å1ųTĖm}Ÿ:Ųé
-dą C®Ü÷VßÖÓ”ĻēsöāÅåŻåæī+FŽ^°qfSóźS˜Oķüģźü7/ī;µ«Ėė‹³mE€÷,>Śłł›—oŽķpž˜sÆ7öĶ&3XĢ M’ăæ_‚Ź½+}żÜӜOę±+QÜ_[ŚÖ±ĒłšZmŪŗ+ÆVŪ¶>Õ”
-ѶŌ»Ø@Ū½Ļžł¶Ź!Į0uS390fcßå s †łIõĀC¬½ēs$ ³±Ł<R0ĢĘjG£a>4ĢÅU÷ŅĘnŁ:ę|cŽ‘ƒĀa¶%‹ĆaŽį0=f*›dąėĀa6&÷Ća6õ9S6fg{äp’±„Ćlģ;Ź»·um=ę'”¼·„Śż?ļĶ§:”l‹Ÿ­+©ö3GĀ}(…Ü6¶“?Ņrzó'¤_a‹Õ.EyÜh–¶šį³››«Ļ·8|y½OLBŚÖ6\_“qkź°¢~c³9mü©@ZRl[öˆż%Į>ČbóQ5­uVœwÉæåłr[X$„#Gū9ZŽŌ†8r“#G{ GŪÖ>r“_œ£mėŠ;r“·Mķāöę]¹uCū-ČQB;ņ³£„väg ?ŪÖö}_łŁÖ#IVÄ_lĖ³æŒæųĪĖō¹­Õ9›Ķ›²©±µc³­Õy?Ķ‡QIėöāåĶ»ŹŅl ’Öƒāšż‰’4øŸŚ’ŗ“öĻ§ķļö’Ÿ¶'›šź[C™?ņ²a~c5öŌ ;VØśP+T½~E5Ŗ65³c…Ŗ{–e`AąÆļŗ)~éņTsf’źāģī·÷f—×/.¾¾¼¾Ü˜—k23÷ć«Ķįš?FöĒ¶…Ē«žµµõAƒ—>€<»yłźęõå6ÕŖ‡%a&~óŽWŽÆ­¶¹£sHŖų–ż,a9­:ZŽõE·źeyĀ?ßį…Žh% {¾Ī’|Ē
-ņ(ī=Šø·©q”ö>riļƒ
-’|G¦m’ŻŌ§ aĻ׳ŸļŻ6*āŃ°y‡§«_$īLä¾ lcŁįZŗ8Æ.ļžrvł.Yüżŗ5·Ā¹īŽÜ¢Qüš;óģöņīŪ—[«höQߝ»rÖߝćŌĒ»sk[ėxw¾s:äŚÜ®
-}¼:§Wē¤q}€Ķ#®Ł ļżWŁ0+=Ü3÷€Æy¬DōSŌƒ+żm›EuųļšŚĒPhó%h0R}
-£‡8U6ĪŽ”|¬ ķ7śh¢ö”łm+Ļž@ī·Y®¾ī¦„”æŻž]æžś(ńłū7×ēŻāI~€•ķīģ«Ż„ūŒl<Ź’|ˆ„-œlĖĶ0›ĮĆ
-®µ]ö‡ć.Ūä.{ZNÜɶęuŠ>ūüøĻ¶¹Ļü{ĢĪ~z5õķö’żåķ¶¾ü¬¦~“ö±Th[m³GčšÄĘm¦ķ!ĒÓĆ¢7ĢŒ+¦q5£ę/üŁÕÕ¦Våžõ¼2CJĆw9
-łó‹o.Ƨžüé•öĮ¾ųįåW7Wģ˜¾8łüņöÅƟø“ĻŚ?’ųīɛÉŻÉoŪ?~āž–Ē¦„?ueȅžš®ä\‰Ņž3p”ßÓ<V7Œķ:ŒĮē“œ=递ų”żĖ’ŪžųæōŻI<łćÉ’ž?īäżź_ŸœVŸĘ§!é¤†ąžĘÜ:zIäL’R+‘ėÓ”śzBmÓÓ0¤ŲˆCz}šDŒµ Ó"¶®čÆsź ʧCJŌoōOK ‰Ś¶×ü"ę§1–‰īiŹ…zįiö~’”ßnOk¹įš4Ē¢Do=֘ō×G?Žśr|:Ō8ŗ ļÖÖ§
-bĻ©<ĮM:ȅę?ä§C)Ü“ż5V~Ņ>k['”ø1T}¹>­µ!{×¾ [÷É"FśTU‰ŽCĄÜóÓāyõ9Œ^½ĘŹæ5<-5%†QHƒ³¾]žÖ2Š:µ5lėČæŌ–,¶’%"m™!2qhė!–Ś¶Ę_B¦Ÿ
-m”JŖŚ¶–Ģ„}»šłŪŶrām’Ś:דgū÷Ōó'_·ķŚźµnŪī ®ĘČ»}²0ŁĻO3-Ś@»9Eśnīéčü˜=mkŚ—įiŪģžŗ¦ULŲ®éiN¼±Ś8\–½‘ĖSŽ¬öeeRńķ·Ė Ĕ²®XiÆ;^±0ņ4ˆÖ¾W¢tY“ćŲ†¶ć‰Ś±HŗŁKh£äuōméä{åö•~ßYNYŚęDōOćŠ63柆ö[üqÜųt YÉŚZ“F¬Os³B½—÷ŪGÓ÷‡Š¾c2}„¢ĒŖŒi”³l×öéZY&J[~æ­„ÉåiHņūĮĖ
-ń¢%ś¶!Ā8ŹŽįõ—ŚædŁH”õ+;“Yé!l,Ą;鵿n#Cē>÷`Ē·¶c–¶±qł±(k!Sˆ9źŁ(Ęo›
-ĪéI*ƒŽ 6žPz•÷ŪdFł„¶B¾T[Bl¶cŽÓø@ĻŻ~µ„¢G¾mIŻyō:ƒhÜ” ;Š\„c¦ĘGˆčŪ
- <Ŗ¶‡ėȬ{ųŁ“ß7¾’ɗō;ł+żļÓŅ>m’ÖGūŽōGp¼?—ˆÖ–Ģµ©üć³ö®üD¦ƒ%ē’8õK}ŠFéåTŠV­’ūŚ¹ĘG†¢G„]
-Nc”;ıŲĒhCÆŁņ€ŲŽž’T˜ĻræŽ+K‰i“ćĒÉJ†lė€ÖmļWa@ĈõĀkżfÆū¤äbkĖS—*8“2€Ō¶ZŖŹnGāĀX›`Q&{ūYėā‹öĻßŪ?ćÉÆ~}ņ’"ÉźÕT¼z€H“ļš|t>8›ƒĶ”Gę sšq9ų°|T>(“?™†ĮŚÅŖFÄ0Śjņ1č i"OæRé~®äS„Š1vņ¼õų9¹“ˆé/A5›¬—atŚK;£FmŠ§^Ę£Ozķ–4jCć·ūcĻl±3•(Ū±°Qx9ŚAeJcŠN\œŪĪĶ£®æK^ecj[e­ƒ"b!żŲą±$Öķ­m¬ŅZ;[M’”5r‡‚’=–y·µ®sn¼Æŗ¹ÉKƒŒ„ɲ$Ę?7…ˆõ¤Fn'»Bż‚īÜĘ° č ”x!t Ÿ©V²/Bfɚū%U.¹Q÷¹æ¦ÖČā$±Ś
-4Ż
-Ye–ĮCH:”²X?˜œŲ1H@RcŁéĘØÄqĢja
-cŅ'-7ņ–mćNQŖŻhŖīēŠgĻTh eˆöŗóE„^^8‘Śš 0@< É ^Ķć
-…˜ƒtįMyh"ĘÕ¤a\)D­IĶwŲąŲ>…ėØāh°el-+8G`SŸ
-Ł,Č’7‘‘XŪf“!䃵S:ŖŒZq%°x5*9Ņ"ČÄŚih+&§„ŁŸcĢƒ0„!«„•ō„Č\‚,?QgFVŻ‚o] röÉŌG5h•*Ɖ
-Ģ Y' I÷7QcŹBš0­"@ŪĀ‰‡ąi[ˆ¼Ņ.ęäųŒ6"8elr“OŚ²Ęéŗh¾žŒ†²G‰«†1
-ŃGedŌ«Į
-QoÅÜN]b–ģÕ$'oæ‹IÆ“³*ņFi0š. żHU­¦,F'²n;Œ üØ)–CT EŪ’aŁėĮUÕzUs-©«[ilČEŻˆcy”é»ķŽ¬ŗcŠ°z2“ŗA·F aɈ\ąC )<W²÷¢³ŌeR©Eb)$<U,M‘½ÕŽ!ā3UÄ]ōŹRŪźCHō¼ģDÖ,„ŲDķĀ :‰Ę’ā
-)³XUń>.†L¬ŖŖ,šI©Š¶ C[»A‰MÖšwzfąf‰5 ƒ” 7QÄ&)čĶÉčbYˆŽ˜\™¤÷„QÆ1–»”(
-.1ų\1³vÄBqeVmźŚŪ(„˜ĘĜķõ£J®IY)N 7n„S<œ|e@ćÉfsf9AˆC‚Ę3¦
-pIś ¼7S¼óƒ. _†˜W» 0)—"bĶ#>¢īNk]l¶å£5/¹q=Ū1ĮC„Éź>”ī³vĮŽ*Ż1£Wż‰G|š2 J¦+Ż–•jā3ė"0[;ŪßM…‚FŗĪ ›­˜Üō«Ŗg׫®Ņ]Œ–l¶\ķGżģՖM’HV€}„j'©±FÓŖø-ī0•}“Ń³LÖų"āq?Ś½ę²1¤ĮĒ„;0č>ŸŽ{•±ó4&„8&Hķ@čŗ4žēŅ»˜"yŌX$„{K,–ķå ī™šåŚ”ÄŻ¬Ś&GHCP÷ˆX‚J.ƒC4®ĢÆ|9 „ūÆÆj-l{§Ā-Ė¶須ĒAXŖ^—…ø«×ó1;¤†śfŖuś¾ģCR#…ĒµßŖ…p¤ųbƒZ&]”Ć°£›¢źR›XĀ(WßĶD¤Ÿ€Od Ž-ĦŒBl;B»% "n…¶ˆu JT „¾LōJcė¾Œ˜'ÅĶįŗs"{ś*|^¶†E2h›æ@%rdĪŁČķ.óŲž"ņŠÕ<`Ė¦Ģ6¼Fc­gʍl[õY6¤ĻĮ‰hŠ¾3Pq<+vŽ÷•˜U¶(Ż¤Ö–3óNō”sļ¦ ųQ‰Å‰r—i÷zķ·_īĢ¢£HšDyw±ż:ŽŹ-mf*0®±FLrš=Ė«P°ŠŚń«e°÷›
-DoamŪ]_‹kh-‹‘ōKtŠÄ"•ą5Ą7%;{qž“«AēFF¶“KUŌ%‹PNZz•OŁö}öPŻ[Kl[
-:–Ī2¶HąEŪ6K.&O Kd)Ö’Ŗź%¤øĖūm_ę“£4©F“•¶ķG$<ŁŲTJŅg“;ŒČƒŲļÉĘ9čžbļu†ŗ¦}¼d½Ęq…°9­Ąv`"±]‡Ū ”żDc^Ŗ
-ŗ±@ufżS.›¶ Ś9W.bņ7ˆ>ܖ1xxRF^Io½:b8īBF§*y6sK#·}›…\H y®äR«jĻ­ųūK- … ż?ƒ1²YŠ©/‡ējķ AC¦ĮUŪ&ŽLäæBæI,Mlańj.›™L[ü¹Rõ"#ƒKįć‚&‰ MˆMtCP[Ąn…µĀ¤¤šv–ż®mU'o-%ģƒˆŠŽŪ]īdŪįś!ę Ŗyč‹bhhL`„ģĒNŖ¢KĄwĆs%—2čā²±@¶3ėįbrŖęås¢ģd74Y­\‰‘ź¹’‡T&‚Ķ$
-Ļ÷·&ĢߟĄYżćżź³7_]œ|v{÷ēėæœŻ}{ņłķ›×ߞüķęęź×ģÕnšyņ‰‹npĮyņŽU7üŽĶ’ćåIćėÓäß5&1ęgĮ…ß—ā“Orćóļœ{ņɗæi?5‹§üäĖ;Ķƒł’‡pńāKÅL~ņå§ķŸÉąŁÓžāI6wü!ńŒzUķ!$Z¼TŖŚ<Ū
-ł,‘Ļ˜u"7˜©‹­Éü¾#ńM£~č¢cēo“›,ʉ“sōc2išUDfoN¤Īs=—ķŹä¼mbēƒĮ&&%Ŗus°ĄµFdG ¢Y\f²Æ¢;Š©)½M `
-“šs°rķ4~nō ~3¹`xÅĀ7™MÖŅZzĶ§œDvb“xŠnƒ#rE”Dl™cD¤ u`—JŹŒķŽ ;cæ0ęóBĄ–ĪēJĪb'ąŸ+F-± ™tBkl›EÕ„FSłŒ;Ø ūōŽw ~ńń¦æİċ£ę‡j¾}€|g“÷)Ā
-vÄŠ 6v{ć‚»€,ų™rH{*&ģ©q˜ģ怠‡āģ7rF(ĖšBŒÉt1Ā6#3[BŠkĢˆĀ.±Ī8ģ śb"ā‰X ×…”Ø>t›<}ɱŸnxŅ“i’¼2ą$(Ćø¦Ń,“”Y¼NĢģĄ²¦§s°U¬ÉéžāšņžqFDk„GlQXŠqō°õóX°iŚ?śu4œ6R1¦Uaµd—9I:#Į6ŹŻķ4ułÕŒ™Ä
-‚m¾)LČ„? X›Ŗ”_U²Ś”į'ō)öo‹ļ'd]Ü
-¼Ÿ.f]–ĒmÄq]*CUœv–”„ˆ0×"Ķč·ė·kż†™t njn9ö*č–b­»–Œķ›B°{CŹˆ88a]²Ņ³ZĢO7ē€øX««³Ó׶Oģīv¾źø8¬ń9Č1ke¾Ł®¾R¬QńEügz×g§³ˆź«!¢&ēŠA‚ŌŁČl~Ó¶“Qp8½UŽ”0„±eC‰6C$Ó4ŗU‘IģC·CuĪ‘’vŒ¬šUeU‰£×¶ip˜G-
-‘üāÖ'‡ĪėŚų 5yÜ>ljƒ|†ˆ«" Ē䳌ɹTņ„ŒEoWs'Ck-ʵHņKč™ÆnsSFæĪŪ ®Cę“`½ŖQ–dÄ$QĪ$ē‹äå\Q‚ōØ%tšLlŪāŅ Cźa5$'ā¤č@•= åV‹Ž%é9$]«śóźŠ~Ą!pXĶ§ŌA5 `:¬,)ķD6?2IŚŃAŅĘW f”Čr'aŗ'5÷U,ŃkŪ`* c6ģSFä¦!,Æzdœ›ĪEBłue¼9鈜{'"¬ˆ¬a!żŹĆėĶę ŃˆCŃü^Jä
-©'ƒ©¤£˜˜·ĄųEl¹s‘b:9tbbN!xcn»1Ń6vÖ[m
-V qøĢ¶‘ĶœM­ßłµg6Œźr¤Ø¾†”ńb4›MÆ m“iŪ5`dkgWv-‹!žH˜Å2.šĢ˜LÓ-ź¤ć—FĘē`øēū[w^…Ė½mg."c,ƒ1H6ĮkŪœa ŁRCZ„ćĒŲ=ƒ-3‡ ŪąœGkāÉüc£)OI\)Œ]¬ ™h‘ų ø•Lx#ņ€dŗä'ž©R+7؜MD:ą8˜YG]œn"drŠŗé‘4Ń;
-#ž ‡~Ɩ®@Vj“­‡ļ—Až5äītœkļ6[$b1©i†ė®öŖc¾Å’Č¹ŪlC˜xą \¬ŽzÕ`NīĄ™§&#Ēhā’nä#
-s½µ× µ«3Ū:‰NH½Œ Rӓzbæ>T.N‡·,
-!
-Ż‹ķXŚ"įžnŖź“ųnI÷nŲy½W"IFŹ88ij
-Šm˜żĪģV.ø„&Ö³d…4łŅ&÷3Å“‚•“!9#›€ž0Fd“×Ųycä’āI%–`Z`G‰#Nž³‹$‡›wfׄ#źWy‰ö“x Óz…Ÿ‰½ŻūlC€|ŹfO½x:%ā*bOČQćO’ŌČb» ēŖ=[ĻM{
-}Vc€šLj³“G«óœLśńˆŌŸ˜Õ£«Į’4”łÉlž…ŖŌ²¢ÖŒOĆŠ…Ćāµm/‡Įd(PĢĢŒl"n;ņXōv¢Q¬$;‡9C7dÓ?~®I«(Œāc—E¹žŽ¶NˆLŃPn&F WQ·¹m+Ö¤©†•S?Q§š=™Āš±2%ŚęŸżÄŪ•“˜ 褩M4
-?>]HłlTčœŗūģ°ū‰½cč\ÉtĪ¤)š,”Z! š&¾ØANqRłA¼(ećĘ~m5·/Ås:ćPL_ŹJ~Ģ”{45xžõ‡ÜĒ›2¶TI˜gœˆĪCģSķPM{š”„jF
-+Öė’)6£E”UŚśõŲŠ9`m“›qļų€ƒCĘÓ$š2Y/“"ś©ŗīēøhn‹īƒna¦bńZ·dœČ6f7āĢP#›…D€.łĶģj=–za}zidoäś#¶*ó1$‹+¤ĖŃ| s)šłžÖŸ{ąp{7į?Uay©ä„ś'ņļĢ†«ź5Vš/n6ˆ%ŖJµ6‚ģZȒ—/ÄźQõ¢łé\;`ŁŖJp„ņŖĢ§ÆS¼ĢO„Č—z•€ˆ–Ucߋœb ŚW±¢ %Šģ^%ģ*#Ļ|1żó¶‡ĻéŅdõ¶CįwŁ³ÖxšĪÕF”ÜÖ]ÜgÅ„ķ}×Üz^æźŸ|łśżIĒ]½\ßėGó‘D„j"qƒµÕŚńL¶ō* ˆńEĖńÜķW@œŽ²±ĆzW–ć,­cž£%u²•ŠųVTå#Z®÷מõaØ©§HÅHFµ°"ĖÆßmŁPI”1Šč¤×™),§<ßד÷0 ŹIiģ} Ź“'S ˆlAeˆ)äŖfZր"F„M%K}+½“OÓ2ŗÄ!g›Ø+³l\Ā¤µźŽ‹÷`XaéXY0-w"bÆøp#†Ly‘Ųė”o„ŃLĄˆlQC\ŁČZ“©˜-™ˆńY¬¤j„Ęj¶ž(Č
-żČZæjĶ)½nd«š;Ū¶Zźa¶1¢„ąhća2äPmæL¶ėÉB¶ų£99ĒIk‡hA3d­ĘŠ† öW‚•k­“Z7Č×*Ū”DØėˆ£6 ÄbŁžķßöŗ -öqGŒœpśøĀ˜mś€ó4Œ#ĘĄJ ±m+B2ė\°’”š³¤D‡0Fƒ‹Ģä',Ų·f&ķ…«h”8t›
-ĀśCOŻ tÖŠ6Ł*I!QrA%8DšŖļO‰Ę–œ6£UŽn2³ö9i ×&
-6, B z˜É‰ŠÅEžN–ŖE„ |uōÕ-į
-ƒ”ęšv rĘ½[“±Æv~PR«Z§UŠąp’Jģ< 5’(*|ßYU/$Épu!KF!‹
-°Šīż„uGLoź))lųĮLŹŲūˆÕ^“EL–hq$ŌÉ8![•6‰‘³dӋɮŅ!öEF‘ęÅ<°O&}jÆ&®m"c,XVæū>…ˆÕćļ£ł–&j“ä„"•1~|L&¢[„v©"kƒÅQĒ~Õ”0a/i7A@<žĖ““<|TA¤¶—;‹{X‹j.f}‰"9‡ÉēÕŅ;YüEFv0ĪCöš&óŹD”
-ŠbĒÅ[U’’&›Ģ[ŗ]®żŹ= ¶„hĮg§³ÉDyŅ Ųbc›Ń4³¤Äš ‹ŒĆéņ8]vĮ3YDµj=`?8Ė±,]ć‚ūƒ’ {]6µ‘¦ˆÄ‰f£Ļ=Ł.\=R‚…@°ž¹(ź‡8Y›N1‹žšĢ²Œ%zŽr¶„°( "ȵ`§rHP-‘UTB#ĒJ™äSP±3÷yźœ-fŸ[;”ķqXƒŲ4{'z;gä_š’mś—Õ”ZŒ¢‘Ń6žDn4ņ8LØėžlWŖ4BDĘüˆ¬m»ó8
-ŲjŠNyż ŲNėbp§¼~t§t±¼SŽ?¾³Xął–}ö¼=ųZ|Āx”pČS
-×C{b1
-øÄĢkĘTd
-ĻÕz(S˜
-iu=䩈Ģ{@O»r¾€=Ń%š©Š‚>…Ö~
-p’»o¬ź—F«~ł8ńŖĖ‰hœé'_öHÓÓc
-®Ēd¶Ó“De¶ėq™M#Z"3ŪƒÕŲĢó,Š™©­ĘgF;Ķ&=Źš<X¢4æÅ9µTīĮj¶s“ę±ė”³=˜Ķ]¶\ 6æ卣~ž1½Żó™妎 ččĒō=ĪWŃpéž`/mō%Ąō[zšŸŲ˜éž`4m–PÓ]§^ 6=Šąt×·WCN/ķžrĢąéž`=ż–7ę x€ŗ?X@PŪ¶Õļü`jt°Cm]DmŠęŠŚ,ĮØķĮāĒŌŽõ€Ō¦•/!©Ķ4ø”Ś8ģ–Ślƒ«©m£.”©'Ę3`ß=œŚnÜ%<µ=X PŻ{X@T›łg5Hµ­éj˜jū\uTm¶ŸõPÕčā
-»›ĪVa›ą±„Ć6yb5 6z8
-ī` ų!JZ»YuŃId£5ĀŖMa ŠPł
-n$MXåš w(wBRŖķį$Ļž|½Ÿķ¹“ßüĒõ]»„Śsr~óņÕĶ›ė'Ææ={uqņņęÅńå½yņ«æÜ\żóģä·7w'M¹»øm‘X¼7fO’żßÄ(fŌĄÕ]Ć’9yõDļė‘
-6ʈÅ*‚lcå¼s1ø$[ó‘ų§œR2½°V=†Éē”"ä1IżvbQ^EzG°8R
-9«I!©SīfÄ0Ą8e«Ż$ĪÄØĮ$8ŪXķĒ)»iĄŽŪ,Ž;"cw@āŠ˜±x{WSļÄĢV4 Ve#AWć²Kķ(ž?łż[n4:ģ„ńį{]h’żd½ś”Ė‚Ė}Ą£Äåł³¦d™b –Œ †2MlĢVę'DŖ2L¹Ł²2dõŲĮö^ŒŁ‘”#­«2BŒ“¦¬‹Ņjx\I9‹°£±°J–©Ü²„Ā×ߖ hƒūją#'· žDIFŒR­ü”qŖ“<^‰ MæߊüÅōܘŒ~pµØŸ*”/«at2(± "äj× A9±ū!;‘ÕO¹rŁ~Ōŗāŗ<‚”Zę aōr]* eQ.%+ę5>-øé²®l£2#€–pO“ģW`$
-[²a¤–ž÷­„õ™ÆÅN+ś—$ŸķŁ“/ƒSķķ—Hpy*C£€u4I)t„®(>7źxūšģrR“€jŽkŁ¶$o¾‹µr&™żS**>ÖäĘ8ŗģ=.£~ŗA¦ÄV³¹Ņ‰ĒAw„\\D}L`AÆHFtVšŽ Vā%JM
- Łh““4ŒÅW±
-¶ĮuK/AÄJ–ŪX$ž7•#[Ū“ĒŅłW5¦V%žW4-•šŖ:ØĻmvÄ|ßƄ:B¢°™ɜ°÷TČ Ł}d·§kh†K•ZmrRż¶s¬5ķ“, Š^8‰€ą­Ę%{õ Šč’Ø÷#Ī-k„ėø¹Øõ€Ī‰pāĮ” ,ł89ߤ1²FōĀÜDåš„׆ėіˆ ĒĀ×®eW“rĀņzŅƒŃE}|¤rØP—-¹—tPՆIcŹe1
-œߜŁ*ƒ$'™7¢·%'(«ģgµ"–Vh2tXjüŗŹYyĪ%DxJmćÆ·YUp@ƒh߶ö„VŖŽēśܠµė*1Żc<C6Īēbū…lb6L#J<Ÿ2x ‹‡Ü6k)IBaSŠ³Z‰9Š¬Lƒ"ż@n~}¶Mŗ ķŠm¼
-m椬JEŹo2ę-8Ė}€Öł#óg©Æu·•-Į™6&I.aS 3ÉįāNfcK…Ē„Į,8„‹åƒrN²~ų¬Ė— \`S p“©ód Sļ§°€ŻRł¶a/‰­Gö†ū" q/qÉh֟“ˆ.įXĄqŽPĮø€ (@ ģe Ą‰ b4q„+ŹJ ŪŲ‹–&)¶ĪDv™
-Óõ¢"‰ÆžAˆ遱ÜöE° rč‹HŁŖ’ÆĶ!¹Ŗ˜U±ČTż>y ųŒā«²/4Š\Õš’ŸsV³%™Ł!‰Æ׎ˆœ<,ś¢żŠ“|Eõ‚I¬īäsŲŅī°X"^KÆÄÉ“@ a»²e1D­Ė‹„źŃŁAēeˆ€,¢Ņ½$†.sĒ‰ÉUp÷FK÷ņz…żšDŗPć$įO¹“vŹ&żś_č×V!Ą«R1d©£‡ ±¢å EŠhĖ°
-HQÖ5ˆ _!v(zčģi0S4œ ƒ
-QłØåīc«½1£ #œĆ™!ī¶č-Vi°µ¦”ā†)(ßKčƒÄ‡ŃĮ0ĀƝ“<š9ĶW§üAĶa*§ żsB„÷ZÄ’_jbW;lt†,Ŗ?Ęd[^Ą’x­HźĶŗ_h£ĖÜ&õ|†  M²_ōšęźy¦@Üē:‚QdS’ĮĒ 4U„
-ķ8ŁG µP|2{'P%ŒŚ÷tś ²pÆQĄä 9ˆŃ„«„L–ˆ4AD Q!Ą_aŽKQkéĄ£<Šć0ĒQOlOź“ÕčsšL®Ī˜Ä8>ŠmØo:D 7…SÅļ$V"Ē^?ķ?Żf‰\mZ¤M3ļ§ ™$ęœōüįÜUeØ$„ŖžēTĮ”ĮÆ’ †.Ż)€ÓAĀś˜¶‰3{Ŗ Åg‘Ų{/DųW„5:$Õ±'ēŽ©vĮUĀH֑•UüEöå 2RŒ…N:XQųģĘņÅ1F*דŌūĻ9+ą+źh’ś®>öĘ/ƒØ'Ž‰‚ę?T|r§µCN°bF-zMĒƔŌ=vŹŃ±<©%UĻ<‰c½ŖNŪˆžƒąōŖ6ßv=Œé»ĖæŽęy˜|ć]ć±T™‡V"%öóŽC)S¹Ŗ{rł:7(@‰2åS­ā0šD“"„KŌQ_PĮRī)%Ģ^5š/Dģx2÷Je+B÷{\+m‰pó‡ŽĮ‰—8«Œ:ø
-u>½i4ƒ™" ˆ[ õµĆ’C©?Ÿäj€#i”ˆqÆe0å¤k”6É>*ØębŁC^Ų’poѬĪPtR†@ƬBŌä Yx)³öLõEµø£…~R’"žü}Q¹‹ˆ©żÅc
-±ńŃz:Ņ
-“Sų»&å–ÉīžĮL{äĒŅDŻ‘œŖēåAÖĄIų±gōæĒ\^…Cęß氐—zqˆ¼Åw=†a±„TėTÖ°| 7ŻĢ·I\¶|ōZBŸ¢6Ņą;Śf«€Qėm Ņ’R‘4-6ŪI}Én +›ˆŅb°ŗģcĪ:3P
-Aį%Mź-ŗkX勏µøŸxū_{;NŻ<4h¤Ę_/¾¹|}wĖžķ‡Wj!
-» ÷ż«›Ū»ŽÄüę󛛫y›ė³Æ®.žšęņÅÅėÉĮųģźęĶ‹Yģ¢X§\r8ō ÉLT%x"=Ł#IOōhĢ±8£ŚÖ¢iŽ]Üwč÷Å’ĶW
-ī0ÖņŗnpZ±”ī .tJ18Zl˜ģ;œČČj…ךżÄ¹fHūž“ Ż]
-ć4~LĮžI4VžZMf"ØĀĮ‹¼/°Š$H”ō”KR=š:pĄ’ō ĢDĪVčĻ‰^KĆ&hxćU’c"ɕ$XI XŹ¤’Ū؍`Œ°ŽF/a™D¤ų@–%†AĢ<…‘“b
-L’# k ^ ś~‡d 0Y²_ *ćŒŃĆ½ĻˆŚüūō#Ō†„‘&Ģ&©Å Ēõķ§
-²ļ²ĀĖ' –pĀ^
-¶!Č 0­åĄ«¢nõmŠ²&6S$Ł‚dPŠ’Å=ˆAĄqm>¾$Ŗz‰X¤rlėµJUVī€ĄČČ²D É;B +I-Aj1;ŗä»'±ųŅA„+Q‘éś(Au%Se“Y'y]s–9«óź)RęčX™•&·£M"2uÖŲ4ī€ŌIō„Ų'M*dr%e¾<é$FH”€7Rłżdu£ŖB}ÓQ®(É`š¬÷:N½ąLŠ-"-ż\=ßVlŗWhV7Ō4‹“—ŲŪ¦ŲĮ­UŌ:Gd'E JGÄ#ŽÄ+ņŲē)Æ}^ —Š ĘQĢ¦ Q…F§o'ŹĻK!(¹1%;„ƒĄI¤čœÄ²X­
-(E'¦1Å7„x‰_%^!¦D
-/‰šų5¾”UPÉqģnł¦A KfŪøČ­~t·¬²¬Ę ¾ĆŗLĪı|QĮ[ė
-łQoŚmā{!ó@’©čÉAѵČä;±ĒoPE‰|[#3xžKćV‚½KŒØVPęu™”mOĀ@“®Ä5+téL™Mćd²Ø”#Ę°öD-É<+HRééŚeX]zz<U/2•p%6Śāńp8”œ”A†„UŗŻņ4¬¶­0F¢Sl[O#÷ɬ–ån$vY³~†ˆ‘+ÆéźEzģ*„83®öČÉDU~ŒŅRX āqė"ŅŹr@[˜¦Ąķ1‰!?Uń ’=‰“^ɁhW ”ˆn1*Æ-ˆGę&}wņ橎mŪ ś¬’œrø Ē‹­‰n1Q}FĀĮO4ȇ=0M¦Q,N/iŖ>EŠÄcEĒ¢ ‚ŠeĶIĄ ]3lū§ĢqŪ"õ’ ·TQ©Hé>mDņ‚ø©XōH ŒćJī ĘŽ½!j)ŽU­Į«ŽE4r£g"Œė•Jū%kŲ¾£[Yż©ą2Ijńå{H~YU^ך,Q!+D™ńR#™ŒNō‚hcyĢ¹£³j1ÄŃĀXtœ$ö³ĻV;H£G¬CUՏī1©C½UMĒÉ@ī!·Ū‹‘€bŲūBNUS¼Ŗ¹ŽéœŠ¤į(]LŒĆy˜CÕb¦,Ā"Äž¹§¢įPž†×ī
-DÖ ½)‡QĆķŁė%#Š¤o¾%3‹Ė—¶ŠĘlRuœ/]4“ŸńÓe
-—[C½1:ą—“”E.ŖTĢ,ĘW>z ČTylLjćęRņcQ5H]\,ūźM@š”†æŅń–¢÷dN(ež©ƒšģ–»īO1j„§]¬*×@ß0Ŗźī„R ė6™w!ĮLAidV)ö’žä( `©j/;eŃ$g1č
-$ k”ż¦+æ•‰fꑾ"R8]µA9a“‹8Œ…{–żO%±¦H6ŃštT•§
-<˜($dųq7Cå$ŻĆåAŪBe©Ó„?ÕĶŹ9”śóTaBņ
-+,ėŌ“ę–,®UiXŁqY4O^Ļ°ā“NÉ<ą¢
-ļ0…#5F­}“峐n”NZ2¢źŪž6JgÓOÅ «¼UÉČ,—Ķ?JU›!‹ I6@±b7Ą¾¬… Iż €Å¬‚¤D<nØŠų0ģ(†gJfŽ7 {³BÖ;į€E. ߢŁ "ƒÅ‰ĢÅ,Ā©r"‘øĮp§‰Z8oZ.«ERńXeūR‚\%¤¢P#…ŲÉg"iŸeyŃxŠF§° ›OŃØ["*Ź {”øŌ„ÜGšPąµ`Ļ‹Ny†’L*€öŹeø“„t"f³Ą*Ź~j o!5Žogž®Ęņ°Ģe
-é1›ŗDeø@ 9>į…åŗE¢¹£N9¤^pČH±aĶH.!†.'½ŒĢ©¤čqX™]šī»Ā°Œ`4Ŗщt£ģkåÖ¶^ØĀ
-˜ZÕA>kœšŃÕB$NćtCB† Wgh)¹Wó ļŲ{ėĘśĆķŁė׋o±Ē}YđÅ( N“Ę2ļ5Zd½Ć’HŪ’l/‰ūä¦Q[»Æb¢“›Q½7«1%dkŃ¾|s³c€c øÖ'EļU„Łs ĘČ¼‚ėĘAĢ”Äј F'7‚2i÷ģ‡āEõ"[Y‡Ģ>1œj4•±H\|‚†Āź’˜€Į™é'¶ö{N7šSŠHDŁäƒ%Ѷ†Š}ĮƓ“ŁŲˆø¤šhhSÕD.g¼I?•M3$tą4}—‹#h ;OXš%>’„/Å÷6ŖSŌńÄ«Ÿ“0 bżN¤1±ŌĒ¢ž„Ć‘–Ą¼YqJÄoī¤č c±°L ńZÓš”C"]F0 ”#®A}ÖØ5'’C‰ÅĆD¦“° ąÅĆN #ƒ.
-
-×I(ż@ˆČmr,q>\Fx€źīIR
-rm’õfŖ6T4ĻK>~£ŪÉf2Ģ\ƒČƒØėJĒmū³­a>F
-„Z"Vr‹{ć\°,ė|$mŒøv*Ē9GZkh9ŠÕŗĄF63m«L!ł
-N“?ĄMīÕ§63 žŌrT¦mU2möÕĆÄ„aķŹ½QtD–ŒķŲƒ)
-Qø‚’ņjŲŻƒ#šŠ@@&Oņ·ĖĀLäQĢČ»|ŹE«d!vŲĶ]©žDųW7fxčūÄś›«.°ˆ.Ż,#œtBSŠœGɒ ­ŠŲÄx-Śńyāg1!ŖŪfŁ„xŁwlŽ?[9˜ ņPćvSÄõ
-Ż€FUpY®ńņš+¹ł>¤ž˜.£Ę
-ŒSĶŪP»ī(§00¹ŪJXŻhųŗJ×VžėųµķÕŚX2ĀyŠ^UqĪŃfA5qõźŪ(/h<· ‡ČfK4?WÕķ
-¼3µĖŌŲÖŲ-ÄWFY—6üģ±šŚfYu¬zQ¢Y?4V ģEešPO•¢Ųu[kŻ7ÖŖ‚L8;Ā!ÄŪn’ā'›ŸD£å<Q!¢(  ­ē8ņŌpTaēŅĖ’é×X†ß+zhŲmvć
-Iŗ«0»|7
-ā’‹¶]›"²CŹė§nĪM&·“mŻ&¶)”
- ÜˆĘ¢Ź!ÖS˜UĶõŒŹ±*`D@t/voĢē“ČĀ0Łā\—øQ`Ē ßÄf%Å9‘Ża¤ķŽŽØ$3čx…j|¤QOKv££0|Y֞U/b*ó²"šĪŖ× ©C£„±ĻÉ:ę`šÜ¼Js9 ‡“ĖgGćb“…§Ēœ}&ŌŁ"Ÿ“c*'Sķėp2II WČqæ'N•Æ¾K ‚
-ń.kĒQŅ„®:o™ ¢BėńöCķqŸ 9 X7@–:Ā“UµÕA Ž5ųhĀ¼Š^m,ÅBjKX¶Ū–Ė¬aÄņ EsĀ(^ž§@)Æ@.”āG Ž:@Ia;pĒÄĪŽFWćr)0Ęŗż€ĒŹČŻĆiæāXQĶh¢Ø•ŖKęŠ Vq@rc{±¹u˜ƒ®N@„¶Ŗ 1,ElPó°“%‡—ZĖxs-ö-܃Wīŗxšć
-0ä}»cÅHģ0Š©ÜāØ
-(źŌų¤ōvŁ\ßŲ’¾ćž ›‘Ś
-jt­9Ī>9č(ŽZIĶn±Å•DSą^, N÷_ĢÕģ+ćš+Ż{)Kh”ĒœD9Įń¼ŽF“žą" ¦Œ`õßF„6&fŽ%k†6łćØĄč@‚±€¹ŸDKvK|heC2åū˜Į:›€x^\Ż'zRÉŖ›ČŒęDEŃ-DKpGÅéĻ¢/Ż‰j>ŠJõĶDĶh®Éø°,ąhnī$÷l_ŅEõ²;1”“’É%ūÉI2>ā\·’(šź.śdoģĶć×NÉW/ɚe€Ģ3ATĒŖŗa–p·É…(»=Ż®±s/$éĀ0-’
-?„q‹ŌņóaXm·uŒH 7`{Ē¾³’ć}=ĄīļhĀ6Ymŗ¦
-%„±ÆOqZ£.d^©œM¶³€²`5!cšśh‰™Æ­aŌ°Ņ1(óĀk
-ZĢP5Č¹zļ˜9·mkŚ|q„†’s#HMŠ!&#1I5ü©HFāŒnɛ¤–YŚ‘«j·ųēøo4œµö° lēU©-G%„½6óF*»ĶOõ a ō¶Du¼+ÖĄ*Ž
->—ĪDa²4Öd<ÉZńdėéQemį?~r„Ķ…;Bō^£—Ø’JŽŖ³*ą°ėˆÄēf2ŽQC‘ŲĶ¢“C²z/h^‹sŲsl¶ŅŗC+ŖöÕ¼ŠF@…….Œ†µ­cōʅĆkœiÆįÜ_r$e ?šs±Į U"½É|śž°ō)/SōģĮe:Ż _BŸµt&1ĮtpźĢQhäu7c¬„”Yė¶£łÅ½}ŠTćŽ,¼
-Ļ«qŹ%ØuŪŠ«MĮ8ū”ę$Ųc
-aóÄŅ%»č8ĘZ,r$)DĪ V5sŃ#EQ{Tӓ"ļ7„0ī4O„UĮŖüx=qjš'š›|Ń­™õ™j^•žPµ–R·(O“-E·HŪæƒOG3<ż|T Ź(Ēj`čų”)„–
-ōP?**ž\%ć÷'RÕ9²ŚĻS†2
-Å[® @×n‘6ÖĪŸY•Ń=we±pn‹ ­ķ1źiØɀ$»r,[¶=|ŽĀe'TA·-BƁ”'³ĘŹ‚:¹¤oRbSÉķpˆ5Z×ĄÆ­"ˆ]YŅēULi5®¼·łOɁ ŻŻA,nw}vórĆVZ$¹Ū„^4Ž
-E
-Ģ&;xęWÉo‚y±ÅźŲ‰š›V)
->Œ\>”ˆ,ž@ųŒŠĮ1ń}Ä( ē¾DB™<†ßp?Ķ%ęeUBŽ¼F]02¬ći“.²öµIO•·”hģ½”; !uXóöˆpr[g†ƒļŅ; Įy`iņźZˆE “¦ĢwÅ\­B¦7tÆ÷"Ļ\ß_CV ē½t-Xy)ZĮf+ĬJaplrš² .C•‚Lį
-$cÖåņ0ĆĻ¤>ļ­Åōüø/źTŖµČ*ė§ÓbCLÄ!ȇvZ’Tˆß†
-šōFpGfjŠūĮ’(Ń&‡‹Æ”ł|„@”æ߅ŁT§³MŃ)ÜfžŲøŌēuĒ‚S ]ė#ƒdd,“"'ʗ§Éå©ĘśĀ&Šō‹Nr™p³2žœ'_»M¦¤¦Æˆø‰KÕZļdėŲėj'uZ†“2®Źģ“5$Ä‚’cķČ8¬„šŒļUąbŒį6u°DņŲ’ź”ŒƒÉw7׿©H‹«„ʧ$Ÿ¢›Ņa`U³%\ū/ĆrŠć½aŸē†^ėSÄŗJܗ^P¹jięv49š6„¢vH
-uR"I¼=$§q¢€’ż2dīÕķd­$Ę»Į·¢nWóHćzµÄ >»L«ƒL č“Bo J}ö ī Ņ”±Ք³×Źą« ę¬K@Ųn‹¤<z*eĻ*>UŌ”Š‡;5„4Į‡Ą†-qBĶGó*'¤+‹”WU7Ą©ˆ„\Ż‚Øn“­AŽį¼ŗß Ž\¬BĒhL©<†TQBņ‡† )°VML'ė©„¢™m{Šī>Ņ÷Ņ3E˜[d[ƒ¤Ÿ¬‚|5¤}+n.e<™XųY?Hō¬(e‘į3ćŖøF‹(Y§…ė\ź*Ź(ō‰Æ3h¼GF'£yV! ™pc£?YżI&ōŌAö‚¦ˆ–ŠāzŚr¾Ļ's^)Rm€ H”ŒH`ģÆĘ.h–†a*G%ż…bRŠZ e.Š·Š·&hs@ˆńKqÖŲw/< ±p
-Į8Ī
-Ž4 Õd/ŃĻÖć]ęĄLŅˆ•i'ō†I?ięŸrQOC“”7„¦0łŠö²£ĻEhpbÕ¶÷VJs€p"]MćJŪHkŌ†˜5Ź)e­śĮ6™)ģŲȆĒ­
-ū¦•ā
-æ0(čzå(ūKq²{‹µ„¼ĆLxĶ-9üŹ8ŅŽ\¢¢hp$Œx‘ŻA…ȆBUaœĖxÅi£XÄGØęȅ±Y tb¹śŽZĀģ%ī8š³]“*ę“›˜z˜®L4»® gŖœ Aö› "|£ć¶8F§6H5G4uA{‰h«Ö`4Ė%(­Ä+O:Ł‘
-SŠż˜R‰CĶ|9hÅéžŌ­³æĻüT4Śį®¹†0Y“W¤lgŲEŽQ­CŅ3yüŲ–“,$k’Ė „æA3¶“ä|ä, "b+bŗ£fuü–k]µ(؜51Še&£·źNĶź:ĢŠ?äŖ–_‡‡v4?kn{čsśZČfęŅā‰@Īā€’āå0 įRķ5daX,!,ų_‹}ŠĢa ’ÜģĶ±Į]ėĒBźø°YŌ©Ć“Ŗ}£ÕµØU
-½£yš"įŸ¦½»ØŹ”šglĪz”Ąō9›§~pņĢ£„/ļ½Ę•‘‡ĶÓš‘ļĶÉ]óEŒ‘ééņĆˆ@XŠ“Omš"\F~\‡åLs®5Xvs]żI24ŽW¼6‡¢io»Ń|œ+#ōź”ZNŁ…—aE„¬ĖJdĻ'v©.„ŸĢ*,ę8B§ rƒØ `O}2‘d­›Fē×QōĊÕ˜8A"'iÅCØŚšl4æC”¢™$Č4RŸ]Y93Mø—ŗpŲi¢wĒscxĀs`€YŹ;mńķ–ø£`،‰Li÷O° ~ż°Ł
-¼Ļ€·(w¶4$ør“źÖ'R#OįŠi—œØ„ØHų"%/‹;&ųUvø:؃qg„BŽ„®S°B0œß" Čü“‚į‘Ē©Cå.g?Œ5^DŠÆ„DĻĶā4Rą°MD¦Ž l~õKXJQW×Yf¹9ŲČŅ8&š>ŅŅ«ÅdÅ N"]&AÅ÷{œ@5qFo±mH—OėŲŠÓ**oä[ó^błŽx0 ·!üRĉEĖ˜Yw¦heu#s¼šŚ& ›‹
->Āā”!«&g€łRR„›Ķ®¹–¶öhv¦{Ī!TĆ«¶G{N)me]š\ö®ńTӅjGŻhܶØS"S2†Ś]¢Š},$šŌKÜxkš/,Fūōƒ˜†Qˆč­€ą"¶\1”‹¬¼™µŻØėbōØ{ļŁ7$;!*›…Ė)CŲ»QĶŽōAG¦m„$ÖMśŌĶ„“„\čd×ĶiŠOÆ9Z[ŠftĒŠG³żmœµę>Gö6īˆ²:ÓĖ'ŹŅ
-؂6
-hē‹§Õ\ Ē@s[Ż]Łhnk<·T4›„R†Ś©š÷f Ź²Q™qŸęx3Ė<bŽŒJG”x¤„•(nkH)ŖQz{VVd÷6N%…śp'b•ŒŸ8q®xķ‘+(f1#Į
-Xzą9:y³³ƒz=i}@{²'o;Æ¦3.s±šŪrąæM=Ūģšø'
-˜]$e‘®œŠówIĪ,Fģ ¾Ķó)ŗ‡šį( Ņš³ė™‚;„¤’”3 $ÉĪgf[ė(ā šM›lņS‘ö"'>¬eöJb|¶„ęX]³f4K i~„9Xh6ėČŠż$«B>dsBn_°—UtĪĀ×AŠŽ¤äķńUÉ*CŖ:OSœFpu%J
-C󥞐Xś!µšū’ōžĒĻ®æ¼8łųęöÓ«ĻĪnæ>łäęõ«ÆOžåśśÅ¤KÜ^ĘÆ?{ļļRŪĒvŒihIõēéų?YI›y²ķāŸMŪÉ4=ŁÖüO·ķŠŽū» D ?Kéa‡]żœƒł-‡pńģ·Ļŗ ÷8³é¬Oé¬é¬é¬é¬é¬é¬鬒6謳^Ž,¢«ė«“>µ‡-Ÿž›*i}~ńģäē/®’ćāꎜÖĆ}łęŌßøZXę©ŅO¤æ/Żl]m*IN ń2b…į’eT™Š4Q‹‹Xė9;
-ćŖ&
-Ä\tąSƒ£ė€84ƒŗ¢Ų tœ)@Yö ļ*4‡J¦“•©-G#…KēĶŒ#e©Č§—(ČĀńˆ=›[ÄŚ§ŵčA4 ÅRBæɤNYÕIćā~;w|V„…Ē­
-ÆĀ34ĄĻW•J&Ž/¶[Z’UńŸŚĀĢUæ¬&#†d0ėÜ9 ‰„Āō +qŗ’£tĮz €³Q$² …*”Ą°Č†@M²NĄ©£¤~¦Hnöz’ÖęWµŻUØĀŅņw•Ū ^‹[!^ģ¦Ėž¼÷óm›ž*©Å„#ž“jĀÜ8ø5łŽńœ}UĶŻl,F·Ūä”'Ö½„ć³"åēŽĄĻ$ĖAg;2K3±J Ķ–ŪJ¢/–EÖ¶ŁX•=9*ā Wƒe~!F®ņšÅĄ6ŠĄŖħ‹³€6Bd‘¬ŸT¦Ōr±ndlI–0iŸvżŌ“‡gģé‘ėžŚ_ėI!ųŒ{éTKĻX+mŗ¤ļ¶×6ūčɼµ$.É° ÓęcŁśĶ &—å©JžČ ’CCX«Ćģø|GdØW&:`ż’2IåĄü’Ū&%3Ā °ĄOAŽv 1Ģɔ^|2ÕEŃĻ™˜õĮ&óŸ«ĄX
-#—¾ēČļĢŲń“žRœŅµŹ š
-ĮDc$NyĪ:W9ū½xk©š½óm#É%Š™ŒĆŠ­ōmĶ!c—‹-R¬°ŠŲÄ©¾!<; §€NĢå"gĘ ±XĢ]²jĢ‚(d7#ME ¶Ÿ;QU 1w-„ĖČnh/ĪóØĪ\|‘“žt‰9ģ* E?Č
-BņlŒ6ŒŠmŗz剋• –;ĶĶ]Ō÷ēFƒL5„ÄŠģö8įPčKfv%ocÖ{ʍ :Ūe]£ˆųˆéŚā³.b¢>'Ų'Sē6%ØI8BÕŹƒµģa-ĮYžĄ#ųŁvŚ˜E4{ŚįĖŖ,‡Ø1Ń®¬šĘav+ĢģIW8U`[˜äM„Ā(ļLāH¹?‚Ų®)N¢ģJ–K_‹Źā^ ӆNDcĮ
-XÖāFƳ<É`:7¾)ém §*
-PšwJåż€œ'2ŗ ‰įĒXŅ*A†Ō‡*k`Žq?ÕńŚc¤“€±©Vd€8.¶²Å„i-Ø8‹'Z©“
-ą —ė¼M¤ėµč{źz‡9Ļcr`•zh.ØŠä$8²a­M–¦ÉžząŹŚW/jbkĪ[²Ł×19
-Q Ø3ŽéSŃ* Y ½ofēˆ†É5hāÉpšõ՗;0ķČ®qŒŗ“Ÿ£~e—÷°²÷¹ßA¤7vź"¤ŚRšęĶ;é‹ÓøŠ¦Io÷u¼_^k$õA¤‘ļwaY•ÓĶrźXdŖģĒm³¶ø·šŻ£`sĆ2f­ŹQ¬2+q§AäAŽ”E};Īr
-‡D²/-Åyl1Ć tW#@wŁ„ė“«L3%O‚·
-F^­‡ŁO#JP£¤›ö`4‹Ģ
-€AŠXrĖ¦ū,£šˆ˜ģ³¬ķ9Q'Śqvē5¬aļĖl”·¦}\IīAuBeR°Ņ›œÉtŗÜA‹łĄzx T®:…xW„„ė®uJļČė£ óN¶r0*t;Ļ"½
-ü#Q:öcąƒśń©ķ4; Ā“–|Ōp”Õ
-wĻS7;Ž‡Ź/˜ÜŃģXהt;F³įµt UŽĄ.ätē]= —µEäĘFōwµŪ‹łÖš
- XL½
-‘Į@ļ˜ėĶ]­Śń—4ÉX)›å{j5y-ū>ßöćZėŲĪ,RÄå'å‘ŹJ¾%¢·ŗĆ+^uэDÜVH„Bp®±®¢°Ū“£‹ŽźÖ80t„'Ü2•Hę^ĘńL:bž/
-a#&]HŠ§
-ŲźvŖŖ@ B
-#Gšg2ūÉöū6SÕ(r×
-vœa
-Šē·³\ā
-D5Ütœ²’ņ»Bր/ˆt•„ŖĆ’ŅūcĮā
-nūJõæ%ņ„~xH Śļ@UīT›;ĶeT­`"¾uš‚ ŒüČ%čf`“ŌF•Š8@#”§<iE‰¬i±ōžčƒę(łh¬%²aŪķ"#¤"q/Ų7!)V1¬šQŅ›§t T4ūp¢eŪķ9‰emM€<xNRuB#į1Q…%¶
-ī'¦V0ĄqP«n1m~T}yŻ¹[mĻ5¬q䯩"=$€ØiŻXŸx:ę9č£X=I¤*ē0Ā²\*‹ü‡Gpį™]ÄģŃŖF¹Åž:īžĶ°ĄÓÉēøe6ŪuF>;«ąß&R ¦õ0ēænŽz
-
-T:ˆĶ%V“vmœ²~źšöR g£@…|ķ¬kbSä‰&—~pżčņńŁ[–Ć‹jj<{‹«#VI!‰¹?f]Ę­1521LŲ“O“²9^ŗ$PÄŅ R\ęĀ›÷i“k2
-zŁéC…_øĢfz ūµūņA¹\*˜Äp#DLGU…x®qрŽ»Ēū²™ŽT/…Ńhü‹÷²ÄŻ?}ė…rę÷Į4 #@õŽ:Ė|š•=ccų§½÷~Ų·¹Ŗ$>‹3]7ĮAÆ{£ßß[£sļsåśńļ»1–YŒõč³ū³¶aĶ?’9įķ8 TbŽ—ĒøĀå½ęŌk„$…½Lŗ 5öEēƒ#Ē+”£ĻĪf˜8źõ°‘#ˆ'Ū›ĶzŠi˜Éƒ“ąöč³ūcķŽ™˜šjA;NŁŅG$9Z'%v“×ØżÉF+ź1¬JÆl4Ę哲¬ŻƒĻn^8 >ŽzŻ5€ųžŽwĒŽēąŚ:üżyW,>ėĮgŸ5:½ūü1/-.Õb9™Ėˆ"ū¼HƊĒEI34PyŠøĻ›ø’&’MŽļO1/¢‘ćēC „i…Ę$ęE•[<Æ\؆ŸeØŅƒź4Įį—aĢ# .Ė¶ärŒLÉŌü[ó°×ļĢ
-OĪ7ÓŻG’”Ī(™īžĄį@8če„ś—ęc:.C:éŗvėˆĮń¦š7GšÄs|?¢ó8\ŚĪ~
-~j„ՅøĪA:dŹfžŠµ|˜C
- ‚:µ:Ė2āBžCÄ/N™-J¾ĘhĢ8ĀŸ.KPU"3•Ön®ĢsŠ†Å­>SL'ĄĻB}¼Üüj˜ ģx™2ˆß¶ųŹ:Żo­y ć…1"³b¦Ż°Ę•9·1bƒ9_ĢĶ:®tI ¦YāWhBˆĘĢĄĪążKmÖĶJĮ@UÖ°‘›G¬S6ŲĖ9Ø1ē>ėÖA
-÷øå}ĖĢćj›ŠŠ0źTWŽ óŲ˜ā؍"÷SŖ”żAĆ×Ä
-¤UWKĖSpKu‚Ӎ•é'üŌ v ār“:ŸÅ# –Ļ)ƒ0{b›īb¤ŗœ@ŠŪUģfµīł¢¾¹8Pž;Æ:ģõ^Ž,?³¬õ{™^æŗø½½øWaJ“ ž“‚–¶Ō0ē°Ō#›½Ķ …„sÅPa:O(±‡Éōę¦½Fq1ČąŅę#Ż¤Hø ˆŚj¾O+ZųjŅd–u·jKD»r rŖŗ¬<HyŖż ]ŃM@QÆū°iāiq€µQæŖ¢$R:VE
- Xš‡µ2Ņ$g¹³ų‘žņŽYš(ā«ˆ”±yƒS²ī™wL!Ł”óø šĄäÄĆ®D*Q@GĶ]ƒFXŒ@¾=¶@SlHēQ&„ø«¶§Ä)“ Ŗ£5ą—8j¤av(ę„!;ÜĘżŽ·ĄJR©9čÖ¾&֒ł>ķŽÉė^NR½Ļ9Vē— O?
-Ī @a¢ˆ×ś~'ˆĒ2źbc £&wói½ą¢O’Ā†¹©Ī‹™™/mnĻŽ4}ČČ5j¤Nā= x]£"D'}
-šWóŗØ{ÖŚ^q ›wŲ؛*VøŅe€*ę@^28žyĢ¦.šā’ZU%P^(Kiä
-[5Lā©Lž=&ŠtĪ«nL+Sę毀‘GŃ|‘NOóՓ9ƒļ¼®C>²'՝O.“˜C«T†ć„Ā~éc*Šö³źO 4ó¶®ōŪŖ`•ŠŽŌ9
-s ]žš±„¶/Q(’­ŌvŖrb—"Õ¤ĀI%(%E ā©HÉ×d=Ŗ ¾œ–ÄO0©BB
-‘Mm
-„Zˆ@¾é?e9­·79†–½—^ģŻ;QRQ„óP]læ ęÄKkȝŖ²”…ī‰Į@0ż–¼­ė“p$^Ņ”¦‰»Ōř"Fl¢F¼zj¦Ę
-”8Œŗ4IOĘG[«ysāvOŽ\Åér±Žū,‘jL]D=¼"½«%ļ6ė%×}uˆ%œ,é«ųĀ/Ó*ń>dŁ–ÉĶÖ8mø?=i­!śQ&¼bd`Õ©Ö0œÄp«§ČŅŸ&ø]ąóż•ŻY%$§SYÓ¢\Ś„Æė±ŒŚ ż
-9Įuź\čĄōPȞZŒ!TdP&Sy·aAµƒKsø@£ÆpØ}Ų#œä9 ±jÄF©Z]Cƒ…³q… <ÉL(ˆ»O˜źŚY]!’IĘ!`²f”Ä/nS2®z¤Q•-¾%*冫žÄó¬Ē(ƒ?ĶĘž”£ć ėŗö#XeµRb}Չ^ܜTāĆŻ
-«Hü³X'u‰o—<OĖ¾›"T¬ŽuaÅøœH^ŅļŁF,•J—ŗ0Ēå%SXr¬sź‚Ą˜œŲ¼;—X·ešµU¦HTĒš©YjöŽö£–õšaŁ
-,š*O„ŠŠ:ń.ņc-µĖŖēk(Š§+_ĶÆĢ›(˜q `¶„ ¹6Oł^––]¶ėNäeå  g^uQū»vŻOø ”raQ—šJb—}+Ö~pŽŖ
-‘Ģƒ\«Ś ^AVļ;āø乌 ČCM~KŠQGŅ§Ķ„g'R—ā»‹šģjTŠfœ¬łEu”eW=,ŗGeœōSIłIœJ—NCk±?qĪĆ|æŒÆ&øŻ,±Ł[ˆŹuÕ²N(,½öˆQo śTĖĖP2m!QAz°¤(Ā¢h ’CöĄ0žQ•€+pyH^ƹRJ“–& Ćix³$Ś". kŸĶįha*N¤ōHssæōløž'7$zķRŹcÆQZ‚JwU·UįÕx¶Œ,a³ ÷:']s“FE5ļĆ0-$Ż Ė‚ājń6ĒlöĢ<Ų„7—ķĒķlšķiĮ‘”X=ÄŹė
-’ĀU!‰m+Gƒē9!؞«ĀfņįĢŒĢ¼jčē$Aq¹9RūwLMwFÅ>Ņ&"žhŌQgnŒŁ§ 5Y× ŁHČĻ–l¼R@ąi‰N'‡e…“¼ćžøšØųī\r”eå‹ļav”Š1¤#”:ĖX.āU;ÆXoĮTņkĖ­b —÷ķ:ĮČ%H“É$Œą¾§ž Åo­‰ž!ˆÉ[µ{ć_śWė”Ģ
-KŸ’ŅÅ÷} ?2#^Ą’LNM€":9A5‡Ž+ØbŹŗŚ'‰d!ÖŖrOõ‚ĻA1C#)*ŗÜķ³„‘š7ó,ņ‰d‰ŽĖ>:&ßkŚĖ]¹1)³&H™ĆŁG“B×D; +Œ;öÄt°C/ćģYĀoœ¾[C}ĢĖNf”-~głŒ{²°ę«ł^GD‡XjQ‚ĀœĖ .'kH$ -i_`K“Ž°²•¹‹‰G–,x×Ę:Śp0e(Mģ»įH¹ł ŒĄee€°IwkC“ež”Œ8J*<*—]ƒŽIB%Ė4°©uŠf€ōd
- AW³Ģ˜}į%§c§”øéĆ¼aĒ„)Ż)¶+Q$Č$’—`Ž¹‰‡OÄEŸvó(Rų0qĮ™ĒL ×Ådš×ģ+R<y)JŻVóĶõƒšeżAÄę
-õę“VX×ŌGfg—(¢„ķĖ"åī®
-JŌ‘˜_I¶ˆæĆ«¹élÉ0‡¢o#!Į}\Š”ßÖu›pŽĖ5ŚJę—ØŅ‚łĀŹ
-lņ‚ēuv„E7uB'7"öPb·8ō“@Ē\Ŗs£VIxRJRéIG}
-1šŠ–ČüĀ§ē Że `
-@
-LhB²B‘w"wˆ
-K8;=—ę<€&­ „LŚģ
-ģ“y¬_ē˜ ČO¼>7z©ūa0EŹdIRĄŌ1•Bņs±NŠžXaܜĶŅ¾{črn”\旷_Ŗ
-Ū£>Ū*ĶpŌ ­‹]°?DŪ*†^ž;E*ąd©@æ&ĶƒQCKńĆ¢CjæżcŹö”ŖŃ³U FpÓē Ąŗg’·Ć\YėXu¼&ČŗīödBŽŽGŲ80 Ģ€o”Öįv.DqvĄą§ó4‚śH”ßQ:¹Ę"¢9h
-ŗĘĘUģaĪķĻÓąš\ĢĒÄn¹&OuY‘>æ"wLCr_‘Ol¬„>WÆ^Õr’ŗրlL…Õ֑-4?Č.§DŅ¼H@å<¢FbPÕŗSœpJJ€ÅÉŹ#sš·_ŲK=Ö’ĒĖ
-TČqI$¹P¬@ŠI+XN‹ƒ½fi䩆Čł¬³¦łĻs  łéP0”¾+õ³nŅ‚>•:؄—½²FėjH-uųt3HŒWõj Į)“`’ŠÆqŒO&$ÄõAžTŻ.NœÉ0—3Bk5¢ņ]ÉÅ“H:ŠŪ£ŅVøšN“/Ń3Ŗ/ńČż8öųŠzĄš®vØ>gõȚm.Q²Ug(ż 16ć[Œbꌼ
-W ÕqÕ3Æī‚?K`*¬JRƒS!ĄGķRŅŠ-{fõŅ1Źp › ”øDy!­iŠŹ|FS
-Ż#¹>töŒ·#ĶGVłRLżPÖ2č>fł[[.Ąók%ž"ŽšˆƒąÓĢĘ_śĪ¾#8Üw'”‡ …kÉX”ŁQJ˜G‰ ‹j·fŖ8ų°¬ÜU+JŚeĪb’$śb
-ž39‚ N7·®„¢Wå
-vĮFpųÉRŸ„Ųx±—nężČōjŠ #yÖ„¶XFģ"";‰š^ł”Ņ«)Š<„ʧ©„›ŻĆ»§½I3ąŹr,“š)IŠ£ÓLŠC‡²™d›"ƒź Ė;AŅÓŲēęū dŠ›9ŠÓ*^£čjqĮq¢]ČijyÄyéņśŠŪØ”ņŃĘ"~(Ś%ÖĒ0ĢŅֈÓ0±”ĻIßäÉā!Ē˜‡Ż3¬•WĀ3…µ™£’I”ćöA‰”"õX¢æÓkZčE-™>GP`ńYK@0+Śm5Į³-† \3’d*®n+?ƒ‚VøJ\„Js+ź‡4Å!yĒŻC E¢Æ#š"#ņD’1[B3-QŽ‰o?'Ū(\źæµqc%jŠ2[¼P.»†ł&}^ēŌę”j]„i„j“¢8Ņ6Ļß܌P§ĶęģŶ•ŅHūĶŠ•Cą<Ž«d]ŹŠ£Fz]ćZCĶ ·ló£†z'«x×å‹xŅ0­t„„[ BcAKČäl—Å Ėœ±4īsD³+› ą­œ½ÄqV·¹²ŪśnXeUŸF%‹ «5(å•u·‹Ž£ Āą! ń¹xbÉĀ¹ ĖÜ:¢ Āg£R¶…rJźį’Řņa…Q ĀHĄDš8‚K
-kgÕ„“Hz.ałW–ĀWcö‡Ęƒ fį_HW¬Ģ®óÕ:O*BĆPF–‡ŃŚ@2_&[z†Ķ=3ß'@’vEgž­QH?¬”ŚĢ$°š&Ž[Æxw{Śą"g³
-fÕ:©r~ÉÉäBMģåo)°ąt ¶f—,\qP®%˜³35yyÆ=œžmQi}hK¢‘¤x“ź £"y…°£’üōŪčdeä²²ŠQÕ,0s‰“ĄėiøĘļžG˜ā6.K>ė÷lµQ OāM攍I‡vq)oĻ)ŽŚ™+£ŪaĪō,öąN¾ižļ[ŗpA7cÅ'%Ž™$’m< †ęz¬ehʤĻ"Ü×ĆlÜ£x:šŹbŖ/B<Ž4¬%ņN8W f]ĖĮB˜!äČn)Ž #K&iįƕUŹņŚƒ
-ÜBīqxp Ŭfų“ F0LU„÷Ö“ˆ÷‡WŲ āä
-“ĮZœ»g„Gš/Ņ#2åķ iĻŗŠĮÕĪĄ*™c0ĶŚó€.]Ģž>{ęžķ±…†ĒV©¶PĘj‹āŁŌ@Ó±x_Ł’¦tf< ŌŲ9ņT­²—A@÷»ŲžD‘Æ”DEĮ²>`ćEĪĮR‡Ē—o˜“/ŠcĀ9ŸøØ”Į˜…ĒŠ‘l0ˆ²‘ø3*V j!łŁ:
-Ūä
-P»/Tō€Sœ%ōx44ņŚ7½Š Æk„Tķ©=hĮÉ Sb0vŻK”‡Å¬z(“xg‘Žų(^ų2dČ»1č¤så¢/Fŗk¤*
-±HĄš›0wF9)C~$ øŹ{F½h¤Z×Q=°øH ’j}Š°®Ü 
-Ä9ŽÜ²‡zvlŹŌCļ›ŚÅ3g$)Ķ9DńĆÅÓqpźŚļZčÄ”ØfīĮóo'¶­Ī;Ā£±·4NćöÅ\ĘR+¦<!HĮ%R{4æté;ęx õŖķčK?ńD÷ovYMH:™HOa¢‹†ŗ`±ėw¢
-č)a°†C]Cƒ
-ł}E|S#Ėŗ©XŸ5Ž±Īfõ„lzŠöÕZ« ¤©’,‹ŹÄc,&•t* Té…Ä–ÆcI4cŲ$ŒR{ŸUÖö­ų-³~¬Ī2D5-%Ź5j“ P® %Ć7·7hqƒļgr o†\wŁ,0wŪ†ŽéčVĖ<ēę!lÜu
-±hjŽ,=.y”¾‚±Ä[Ž'‚±ÕE›AT²ų-y§pPllĢ r/»Mŗ?±Nį1cr-±źŅƒv %&©Ģ.šF 5•śāę\°Ō
-‘]œdלgīxFŅ–½ˆz1^ģhd\ňsZĀÜVŸ1\䜗thjÅaÄ­ŅĢ" ŻĶµŠk˜ŗ,8SY(|!KŽƒø©ƒ%ĮĻ"aļÖż]Vč8V<QD„Ē1eį“löfaS«ČŸzŅÅŖĘÕÆeŚėWŔ}NZq`””ŃAŁ$„ķ€•"Ž³“‘į9œŲ¤ęż
-:ÉF
-Ń˜$š1H·(õ«ćÜĢ§ˆ±—d.€YzļDk¹fr»¼4°Ž;*qdĢńuŻµ›ČZÜ9”Ę°DF”Õm²&b•dœBūÄfZ™”Qƒ²ŗ=mäHB¶×²ŠÄŻČtŖŃTZķ&“@tF9ƒuÅ2
-4¶¾Ć’`X
-å”÷¼ Ļø-šŚĪģXåʓįm&Ōę0€žVŽJH+m–Ā›Źš 4ĄµøÓ/żŌ¢Æą8Šx·F-JŠ—V@ó†žMŁ„–mC3Aʔ«,59Ōųœ‚%ULŌ:9šŲPžĄõ!õĆ
-ģ˜¶`«g4Ÿ„ō¶„X–±LfYr™©Ō<Ź:łŌxĖ%HdƔåŚ;($žTƒYƒ.qĪĢ }Ē©Ė÷¤!±ŹDŽI²B¹Šä 6ƒÄ:Q¶É> n-½!Z§²ŻŲY/K[ū¹įØrH×x>pP×”ˆµŲ–"ŖVž,³•Ė)
-oć½w ėŌĪ5öū\·ńšX3-”µµRĄ±»-šIņĘ«å{OČŖæl/ī‹Ū?¼øxõŽG’|użWü—Ķ°y’ć+D.>śå6ś“æļ£7£ē›‹ųėGO®/_bßžüł‹mīšłmń=æ:ŃŌśGż‘?ņ÷}é‹Ū³óßß/½Į/}röźłłįĻÜ\’žāO~„šŸ_|zćĻnzŪD\ß~~qŽEš ŌĒ<'ß*«ø£GŗŠ¶)?śéÅļNžį体÷śAœčNų³'’šŽÉGŸŻÜ>š0Ÿ¼øøzö§’OŽœß’ŽßG¼’[ü'ĘžäśźŁėē·ß2īĆ/žÆ/ō-&
-_ßēéÓ÷Ąé•fļ››>Qž2ČórؾYį€čę”;2Õć.ųõŁ·«oŽ_&łx™Œ±`Ī·Gųd;ŸNŽ’Õ«‹Ÿ}sqõé³gƒkēO<†ĪķśÓ“ŸżīwŪł÷²?ƒ’žO/^¼~u{sv{}óįŁĖ—Ūa¦Csū܅µ÷śųč_žß¾ųĪĻoüēķĶėg'’ć䓋o.^D~śüÕĖgŠæāū6®Ļ/¶ėćż }é§/oæžąįåµ+ø®ERŖ®šé·ZBƾ>{vńäśÅõ ~¾ōŲ—z'/_”½NcÄ7ß\æųęāć«Æōŗ¶#ņŸ®n·Ž^ßüīģ|?³ögō_>¾üņłviāoķīß~Ėö°·śÓo.n^9LžŻxõśņéóƾ¾}u“ܟ_}óüÕóm¼’xqĶOOćšž×¾rō >ó/Ī^F?ģ<žė?ŸæśÅõ³‹Ć?mæ»]ˆń…|wš’ųāśÕ+S?ą¾žœ½ääÅ ńgļō}Įuvæ¼¹xuq³Ż­/ÆĒóźW/Ļn[óQ“ļÓßżīÕÅ­G²8„{M9ßė2×ūMåžßo*÷æ˜ųŌ_ßžę~ēéž×ī7„~/ß{ĀRīžŽÆļ?_ŗū™’ļƒļŚé§ČNĒ7^`iżōł æ†ōźŻ?üśh\lŻŽśÅÕ«ē·8śmžé³ėWœ”Ó|·9~ Žżß<ŲĖÆuŽźßļæżZV«Ļ ÆÖŃĢŻ³jŪžż§«£æ¼<»9»|µĻŅ?]=æ}~öāł/ž ē|;NoĪ>Ū6ņĖ Š‡;üĘy°Ļ°Šo’ł‚óńėŸ< o}ņŪO’æŪĮõļŠ£~OÉ®„J„
-ņsyÜ\ šž(OLųRÄ
-‡yŁVq;^Äł;|ø±9?»¹~y}Ɨńāp¦?~öļ›azi)ģƒ_½|õ|³·Å}Ųśd[…Ū"9q}žū’xžźMÄ7w“ž’>ӟė’™ŽĪēøėĒŻłø›§TȕŃū¼‚ń&—šČt§bm')eĀAümĒ\~<įOøwī„›Oøõ WŚŹš×\'ņå4ƒmūų„[Ø'‘ź·äé[Oøķ“żń{<䎹Cī Xx<äžF¹²līģć9÷v)¾?ļüR{?žSņ±ŗóGC(?š"ū[„!üŪb’ßśå„œSz'¼åĒ#ņæß*ū[<"aĘMu…ģøµ38å jĪ‡H­^p&„@2¤QŹė”‘ZH­Ē¢’Ē¢’G Ö#PėØõŌzjż®Ź;ó~tUžū­²æUWe^ÖNĮ“<Ķ Aó“L™Õ%ešŠlų81U$¾É£oņč›<ś&¾É£oņč›<ś&?Øoņ˜i~ōM}“GßäŃ7y¬p'|“Ē
-÷GēäŃ9ytN“ÆsņÓ³W__ąA`7æ Źß0¼ńæĄoś
-“­X7čøĻcä[æå ćžŠńrÜqįėß Vm³eŽ¦ęˆĪg×Ļ—Ž÷üŽńÖÅo~²MŶWē“ƒ)Y[ŹåxĄzO3­­nūÖ-‡[zĶkžsónń<”„ w‰ķ³X–āh~7ŪĢżNĆ6­“d.¶mÉīÖeiĒĢĢ9ę·ģkSė~׃~ēVkÓŚoėvŻܽl(Ć.,¤KrĒs{ØćĒ"ēlīXE\¹uó;¼­ĘŁ©»Z°¶ń›Ÿä”¶ĪÖ8¶óŸēMmrqÖeę³—)M8ia«b±rø›?z\¶ wWß:¬%ž{jE,Q©“l×Ø壍u$<÷ĮYš“yņcĻżp>·»€ßIŪāéxmŚ¶«ß“9aaŖ't‹?MjqĒ26X;Ü`ŪĪį•ū²&«›h­ļć÷æ”ŪŹāćéź~±
-ę“ō±ś·Ė€{ĪX…x7Ėä‡÷ŗhr}0ŽE'Q×9v°śy|ēƒ§ßĪŌE®Ō²¹ā|éeŖ~žÜx“ļ§`œ³żšhkœą‡×mŽ–®iˆƒ%oĮ›fŠ‰ĢAkVošŚĘ4Tœ„9Ē”U*n1>ķfe4=?L¬”Ļ&H=|MŪnNöēv/|ŗµ/}ĮXJjK=ŅåĒj,Üvø^S¼’rø®J­Z>e.‰ŚŅ“.Gģēöäu a°ćĶ$ŚĪ„#>ģy3½,ŁNÄĪū„ēŗźäžÖL;ŹŻl’†]ÓÖš+Ɛ·3č7ŲūU¾-†Ī“i›Ķą“­:_Ū–łqŌų‰ŚīŽ=OX „ʵ0M‰ ~ėF³<ń—ŚvS¦VcG,ŽcPśÅŽšņ&ģ²VĘ,gßŗ›13sČ55ZVŪ%Y¦£[fŠó¬ŽŗœåŠ1ՃuQĖvušĆĖŖiŽĶęĶ°”A6ģĖ÷yÕ|{cÉ5ģŒƒĪcoL‹&”ēõv¢­Åó¬>īČ»yOö”kźKci‡ó¼7+G=ÕķšŠ±¶܆½&žĢū+—ą±­ž“Šż¼Ķ² †:ÅźČeR×ė:)EŁVÉVm<§¼Ž“2Āv階ŽŒ_ąt·IÖāÖܶ[ęžĖ¬Ž–¼j5 ³Ū«įĄgI‡rĆś˜±ī zZŲõ'_™ņōłÕÅŁĶfó˜’’4.åĻ»ÆcĄ  g/Ž¼ŪūŸ’ö!–ļ9ÄņfCü®nļŻ¾’Ī¾¹x…ćķÅĶÕGčæ’zŠŪ杼¼ū vņŁõ‹ßŸüōśö{ut’[ŗ^ߧC~ĀJŗm¼×Æ_"ŗöŁW8Š·ƒŽsŁģ®”y[‚Ó‘ē<,üy\ćP [l.G†ų[$m‰kżžēo‹ŁÅņĒĖf>ōŽ.ĪŽyĄQPĀć‹nßNɎO<ļś$e]䏧²Łz4ŶWw,b=Åõ°Ä°3¢āšJńį<L›Ļ £iŁ,2yę›mzäIū'. ł<.zÅz8² K«ZUs+
- Ą9ķįĶ¾ˆŹõĄtĘóē|`3¾æm¼?¼:?CˆN‹łä«÷ŽĒÅ1¼6ō<©uӆURŌøŒ;†cPć¼[šÉÕ8 M’ŽfküFŪ­śż7ź0É~£ {śą7ʍ·öżė²i?ūdÄ×"Œ¶Ö#nļżņ„bsüKDēž>u{‹<€Œø{·*é8–~”Pæūcļßž{`”9éO_üįņĖėčę’B$ķõÅÉ'ĻožmæśäÅõkü’Ļ·ż|ęÅĶžĄų·m¼z…\æĀ“|ą~·‘öśķ£óLżōśü51~zv{†’øųwVĆ…š·Ł€ķ§¶•ēbŲ“¹ųĻŪ“Ÿ={~{öåó®”Ü>ÜBuż~qqūśåÉÓ³«Æ^Ÿ}uqņŁõĖ×/ż… õzńųĀ“³«oĪ^}ńüśŌftngn|j=9»¹żņśģęŁÉ9łrņ%Bcźš[?¦æ—ƒæߎœ]½zyvsquž‡“Ænž?;y5~2ķ½ž¼<{yq³żńņõ‹ƒrĄĶkGBįŪF•OnT…łŸüŠAG~vŪR“}ōÓ×·/_ߞ|~öj»ā¬sņłÅ«ėÆ÷‘ä~ō_\¼śś;¾‘ŽŽź³xQĻ·OŻ^œ°ęÜ`©ożšļÆ®Ļ½ ī+]²ųčū’ś‹‹gĻ__üąæŻ˜yhuüünń«mžU‡z‚ ‚:½»$c‘m³ws0il{öņł‡wF}öāł«±8\–łņśöĪ§. fg£ŌįåŁ3æĮĄ O>~}{=&öā ßķ¾6æsóŸ<X0żśO¹M®jY’ļx=Ŭ’yłājūĄ)žĻ՘Շ’žĶŁīźžłö/ć„||³]I:2ŽFpyq{öl;YŽ~
-?ļ° ˜œsŽĘäœsN‹œĪœ™ß³’mpĄ6fĮ©;÷īŖ½
-°Ü-µZ¤Vó¬Ēń,CRGæŪdMÖßų˜£ų_rĮ²ąß¼X$‰=öĖāÆ’cš²]O’ŸPÄ@Ŗ‡Ūy:ĮC†’Ź1±»¼¢®{zŃO§×i²¹4 æÆ—U=­å—C2Ś=ÖŁpžĖ7’æ_Äęr’gēo¾įüEęä¼’Ēa5y<^×ælDŻŠø£š®Ą6ĢĶ×āy½$ŹµŠšįäžKFö<€‰¦Šé’rEū=mó„Ī3¢„l§3īKž×³aŠWņN³—m!ļ°šŽ&·‡Éx[™oŪ  cD«,°Ņt»>ž™ łß?ēł¼@uÜMĒoXoœ‰Hń? +‡ó€i¼ŸRóī`gØČd¢ņm ƒ”r体bŻ¹_+ųēšŸłłĆ“NDŠóĀįv…鈮ó’^Ł¬¹‰¦Kš"ø9čķ„¹ZOW„óa±ŽĪ©»Ü`5-a8LZ_׬.o[Ųōų5™ļŻ¼±õ{ąj² Vo“ĖüŠĄą+0PÉģšóŠ»É|ĘŚ“8`­Čź|›_±Ć4w˜Ž·ŠŠæĻłVļ£-¼šœĘ˜-™k0¦±R²Äź4YL†$ĘoŪķ=ĄŁ ļĻOļ°ĢŒÓŪœpw’"¦Īd 'żBiżßł,-ņ6U ˆ5Ļ[3½ęĶ?‡‰é—<ƒnpĢJnS‚ÅĒŪķżšŪy›O[‰otžĆ»Iē°Ėgqģ—Ķś8ėw#­“\ś© 9cĢžĒīÜyĪ&éżāš‡fVL“K”Ŗą>=¬Üžg¼Ÿ‘»)OQ}ę¼ĆWŠIĮ
-ūSmō,¹ĄĖ”vä;dDŪÜ å\Œ)ēŲ”¢Ōś®=ĐU°M•%žšÅ”DŽS¹Ā”ČŪ¬żAdӀA=ŪŸhś‰xģ9!šH˜ēÓKļœxE"½Én‹’›ÕDŚD«OˆM
-’S 'ƾ &O·é?2ž*Ļ€4
-9¤³~HüŽ?gĄ0ÕŁųy³¾·•ūąģ‡ąIŗB÷ÖMpK¬\ {ģ½‚ÉšŒµ;•heVÅf…öGcZžz,“ &T‡–.ė¹F±ofˆœŚėƒ•hØEĄg.cų3Õꯖ7<€§‹.WvŚˆÆƒ)Ÿ>‘ĻY#§šŌ­O乂Ė®¼M’łĒ¼Éd}čzvSŚg|ŃŖ!²Æ—³« uļz!6æ„Ż‰ĪE|…Ÿ9»Š;Ü»®µĪéMŽŸ¾¤Š‰‰Åp‹fŪŠ˜É}Ģč-w±ō,­–”±mdkmčÄķ6_öčyš¶ź7Ž>\ņr`Ī¢Õ²ŅĒ¶E2j=õ‘øzõMrlaģŁ±X>āN)9—’<ü¢ŠZˆĀ<Ż!ņYźA°¦šģ9Ž įĢöSNĶ•Vח#ŚBė„č|^Įćļ‚ŌŠÕōį7®9x,’øšĘ­>rŃ.=š‘ņ6¢BäŖ¾Q°¢“é'źKW,:ļ«}–$¢ o»ˆ6Q= ŗ¬ß„čĖņbh ˆ±—ųAL£1/×AIJ+•ėÅ=GģŠ«±ßź.Ä©
-d—A1@ÜÖįńDņvċ;RˆÆŽ ’t
-f Ŗšn$Ź‘p#øD"{«‰łtļ*†HBuV"xéAR豇¤‹G’5Ž¢HnŒ‘BXÆEJ*W)O¢k¤šĪ;ŗ„UG·9Ņ:Hg\,½VŻ ŹŪ2Ŗ9 ȤQ©"³ÖQ‰,f”²:ŒÆČFeI!;_ū„Š¦$rš6OČÕ`Kɑōų&Wcy¹*¦PÉ5«V]®‡-rćÉ-ł±_n·ęw2Tīüńe䞂Z#÷ūö=yH=Č#ēöI_UŖr|uČÓżōVžė„Šņā,ļW¶å½¼~mŌäm €Ó‹nåņaŹ';€yįOŪäė:
-•#¼Rhד¦ĀXu„V{Ū£pŖģz…g7¾(#’B©ütX!ST¤Ŗ\‘kn‚ŠRÆģPŌ~‚FE[”U+śŽ£\1.ĻŠł²ŗSüŲ²?2T±ÆÄWŠ‹*ŗTŹ‹į…Rmˆ/•śAr„“ų³?J'Rß)½½įIŹe³Ŗ•){ŌØĢė›NeÅp *›&_RŁ37JŹ±OŃU.ņŁ•r3ŗܔĒkŅ„"ž[P†¢ŖV!‡źoźj-Œ÷ØėÕ£‚*„Ę,‹*š\•Wh>Ō¢U·)‚¶å×:ŲnNčģgķB†£z\Lw*d6vØŌ›YYeRķŽ*‡[īWłJÖ®*²ŽjdØ*éjdUłÖfÆŖ© AU§œœØʖµCµœ;»Ŗ=Ö4©nFmS­ŗ4jćÜÜR;}“Ś_ö÷Ō±ģĮ”N«3u©
-«›kĶQ=ø­ņź…«­Wļr锜ŗ‰„üS‡‚ˆĘ“š¶4.ÆĖ§ žx®šīoiņ@*jjūŖJÓė§šYę˜Ólc—ęĖŹµźŲ|Ŗ5Ēõ%­;— iƃ‹I›<amÉ“™i[™xS;: Yķ:TX“§­Ē©C±“^gRu:W;éĀ)ĒF—²+ēŗ²ź0ÖµóøK=ŻfŻļź®‡aWÆÕ.ūz›ē0ŌūSņ‰Yś‚Ūź›łÖY?–_PżOŽcŅ_å· 5h†Š˜Į†§ †€ńŠ5$.ųŚPœ^†væd7LGö„a7Ł4ņ}mc4¢!½Ńķ3FŃĀ©iĢncĆŁ³G½fŽø1UVĘŪ²c1écŻ¼É„šlM‘Õ`1eš¦©‘ (Mćl=eŚfö[³<c˜•ŹÄģ™)œęų-50}W»¹SĻ Ģ€ęSi0±h4рÅ^Wl,aē·d®%…„±Äš–IŁī“ģ^+šÖädØÕZŠš­ĮŖveM-ykżą¶Y'ÖųĮŗO•6ō“ŽŚlAµŽ:$¶¶l|Š¶µt¦¤ ‚nŪ)§PŪµXuow9“#{Ü9ŖŪKīXĘŽ÷*#öŸÜŅć7‹ź0_ćzG0ąV92m½ÜŃŅ)Ǝy[yq\ģŹ‹SæŃ]Ž¦[īLĘćسn.jSćŅā<tn—N »<ÖQŹ…ĒĢUW­Rŗ&[ĶÖuņ¶Un]Ļįr{µ3č³ä Ūr7B¶{v>čܗŌ-łbÓH„Žžl<ęštBž‚güxňĶk›gKŽ˜Ŗqō–«€w4W‚õī |ŗu»ģóįrȬpŅמƎ¾u-ˆÉPæ2»=ųń$īĒ0õÅ_Ćś9’¬™QūÆcK3`Ń(œHüg(­¦éĄŲÓÓN‹Ī(h ·ćĮn¦ Ę«ipŲPfƒĒŒÓ2$³ēP›ōC…¬.vņ`öCĒõQ6ć›pØpī‡‹Ē\)<Ę Xų|{#ębŅ‰ŗķźHUƒ"‘ŁnsŒ"ƒž&jw×ŃĬ½Œ6—ĶEōG=^ÅŌĶOĢ[¾ķbŁ£åĒŠŲń<ÕĖŠø© ³Ē£¶\(^½^3ńÅ$ŁĄ”­Ū s ,S4™°~sŎ£|5aV…Ö‰˜G§MŌ{H4±ÖķŚøŗłsĮżę•/Ģ7u|R¼^’ˆOL:žA2­-
-QW¢[h:…Ā.ŠĆŠ¦<ī/b‹­Ų•ß ÅSb£.ŁVcE)īŻJĆėąZFś‹[Łƒäå¢WƒŹŠņBļŅV4ę’„¶’ø+ 9\Łź™Ŗy®ŖW“ꏤ:(j5DŽÓ×<µp VņiŠµåu1©ė¦ E=ÖĶśźj¤\?×ü›†s“6ņ«P¾±P%~šŚXÕ!C›Ń墎ģxPyó¼%Z®T’§UŌkü­å&?mėGJwė×'ķ~×ęé ćõ¢ć[åĀšĮwčģŚ\×ŗøčŗYūrŠ &”ž68EzqtŻéõ&ĒPé›T2“ļ/ų&żz¹šėj[÷Ą1v)E¤³¬ŻŗśŠTÆ'†iÕ=œ¶Vś‘Ö‘¾ā{ėf4č^§cezŅ‡#ęø)UĒ·t©<ńWJ„IcÖ(MĪŗIeź)\2tZ½X»ÓC6=ž¹ ĖŸYyj9Ļv®vīČXœóR°›o枏Āž:MÅfė¶Ų,"ö„ŻfO/‹UÕp¹¹•½¼÷ÆJę]cµŻœĻkGOė_—ė¾īz_ČėóĒUš¦Ŗ=żīēøL7^Õv¶i`Q÷ęņso‰²gŪ¾Yē;ykŚEÕŻn`Ć3{µĘ«Ž'ŌÖŽ~j¶·ézȤģĶĆj
-m–¢āXŖGĒƒÉČŠ“gržšé±üŒŲMósÄŌoœ‡:wņ¢EŽ’/ōš{mŒó(äčŒČcoŁ{¹ē\Žćż}Ånu0NÄš
-‹å‚ż`KƒEńLŗHĄp¼D.Y›Y†Fü9ä|o”¹Ę–©r.pĢ«Ą
-M1“I½|B•›µ>7†£>w'yÅ~ś1GĒh
-répJŗ®¦«¼‡t
-Å6Z—‘bĶĘ,g±öyĢ<ØpDß9C,.ų`@@!šČPĖDžvŸõ!<@}
-4Y²yܐ‘Ąlm[°nĖ¢…^)8Ķ`Ą?ū) ˆ™|tĄ+ʜŠ(–yt OwĄŖ V`£¹ƒųĶ‰Ä°!Ajč©;\×Ļ4ZĆ~ōŲĀœ=%Ęcµ2ęšŌĖ6 Ek«é5ZRNóњĶfųļmxgÖI“FįŪƒD$ײŲt°”y¶¦sź>aķ¶Ø&f=Ų±VRų›ä AČ241“ź1G+ƒGĪēUŻįĖ7ĆÄ yœė³ LžĪ ¼.)ŁgŅżN'rb!(`xéƒ7 Žm­1KĘé’–5 'äš"žŚU­e®·gRrf’A÷ūÄ·Īü­äŲÉ“Žrét«xēøFuŌĘW®J'1±tŲü|6X€śņ?:B’ćAŒ\2:ĢX n G8\©Y–žqÉµONاzŚe£ÅA,ĒÅšĢšųl×&„'Ļ$c[W°IĆv]s@…¦jŠ §2ōb^ĒtP•D„Ę²š5Ļ½å¶›‚O†kņ(Ÿx½zæ£Ą”ˆ?}9,āėĖŚ„„±[²ié=*’ܧŚ•\ŃBöŹėƒ™NęŽtŁrĢž¢9]ņ:œķ¢ÕQ3ƃœ°P§\ŃZž°Œ4jé1žš”[24 ·$zr†CĮ†«\¾˜ó›71ŻœTwÄŁŽIĮ¬źŖs@<ÕŃŹöØc¶+S@ü©īŹJgŠfŪÕ=0ˆi~>ŒŌÜŖb<wŖYY*C§ĀQõ€„*8>Ō"+b@ęīē`I9'œ§ŌģĆē–¦a¼­J4yHjÓ  b¹ vK©]vā)Å[—gphlRĆ²Ł”$œZā)ĖR·'›×ņ4¶ĶXLp®ŗōśt{Āk#Ņ˜˜Ż„T¾b źQ»
-Q&lZå nMĘsų)XÜ“Ģ'¾°Õ ‰‡æƒ‡żŁ’ų ~A”'^$¾ŗŖ9[> o?Äįo1T4`ÄŻN#V± ;Ēl
-&9`Ų)HüĶO
-–9ėF|
-h¹Ż•ŁvĪHŪG”[ŻöiP€“)`’/y.Ż#Ā–Fm˜DP¶‡FĄP+Ŗć+};n^‡‚>j®Ųtbł"Ąö&œv„cĪA)E µ+`š Gņ<ŜŒŽ9ŖÉēō(„¶Ą³ ķ¶õĻčØw3ĻĘĮ€fz|4 y±dŪ½
-<±ælØwē”f?`Pš»÷4—‰•K0–ö®@9ų·6-Ŗ ÓĪīhf?Ø,īĶmR²ä²“#ķ#sc¶ĪĢK|ÕC|śPr8¼sVßfŠ”mņz`Ś—ōšõĮHĖ ¶±śĮ#`ńĶv[ T"Hķķ
-aĆ.F«ÉŁ˜—ąŁ©#^bˆjn¶9d¹ń
-UŒOßK ~®S"łLĻä51‰é42Ō—×Ū»ŃZM…°˜%œ~€ĶQQ*Š-r¤¾ķ 0HJĒ£}`"FŅ- Ē0CĶłˆ’½õn¢‰ZŌķ­,½ŽQl³6Š` ]%­qoØ=Ģ3ą…ÕįM$:6įµ½EÅbŪØƙ˜£-Ƨ‚=‡ZŅH)3EcźA,›tø‰sŻ¹"Ø 3ÄŁG°~ķ
-ą"t¼4ŸÕƒpm»tfŪÖi0[˜a³×ĄÜ³
-įĆhåd¬ŌĒī)põ…ƶšf¶ŲD·F½¹Ń9€G·{9åš ˜}ø•ē+ÄĶ=,› ˜ISĶV®ń„u”Į}ъü>”M ü5³+ˆGC1ztbéń}’?Ū’Š¦_²Ō„eÓņ8–ģ0ÜJm<b¹Įœļ'8¢3X [‡ļxm`åŲG`†= °vFK:—īš{pß»Š ģŻ7Š~\“ŽE½«^Ac„|G \šóeÄė8čµH™Aąm`{xQwU‚’©HĄhŅņbTI,īF ūóSl¢M¬žŠ\&}(H¬\bY.†1`įŪ‚XŖ¹˜M»Øń‚°Ŗ33č2g£‡UQĒōœÄV*ĮöwO”ńĮ÷.±
-[
-e1“˜Ņ€É.Ü52!>Īź®ŗē1Ē-<ąi3|r¢ÉNćŌ”ŲĘ~~J,=+_±ŒFƒĆmŠ4¹oŻŽw„i”øŖŅČsę¬ńgć5ĒgĮRfģY1ƒ|gˆ–uC !§;+SgnWKŗ› pÖ5` Lk¾Ā`rĆGŪņ˜ż±™Ža²RpcˆøĪI: –M(‡–[؎
-×5{ āĆ­ÉĢĀ²–`6.:·o ÆĀż =w^ģŚ½ŽqŌ”V¼ÖfÖõ$/Wxl
-^ūF<¢ž×Ų›q.b”ęī”€jŽW€ĻLv<µ^bR‹6±„SµęHƇąŗĖ¬‡–&å5% b.d(±_u'<ZŠ}łĖķĄ ŗ\'«·‚Mćć ¦[›§ī€=1OtéÄ#<I4©‚_Å”nß±w“„Ų"(Vźøk£”ĒnQb[² ,µĆ÷Ń<ŠŸ5mlć)…q \õV›Ń7HiH-ĖcN-nrlęƖ iI=Æ)(*S“«G+‡ÜČ:i7L—Ņ8¦ä†~ ųū.ŪŽĪÄŗ·· tlŅšWĄj „’ķįqDČČĒņ ücrØ8ap#®pĖ©00}Ā@DgO1¢K÷čØ­¬¹B~
-øź„™¬ŌŗG­ą7—+,ƒ,,ĢVW擈*Gsnö£ń ¦•t–{lqčė‰Šoüšõ1Œג7ÓqĶv“zöį
-GŸį{u§T—QŗüĻļ³čų²õ@Æ% Žo‡miƒ-Éā
-šŗÆż•:=Q”Ūõ=Ūš’Ę ˜ĮäūĆtu>ģęŲįÆ=yC¹ąõ?
-v/½ĒˆKQh0$¶=˜³čŻaA£__žÜö›=ą?“dŽ˜ž×Ē·u}…¼6źļ‚čm2Üxæ‘ķksnž}Ķ  óRŌ.¦ļõ%®õķøģ8˜‘»É.ĪśpįKā|z”ÄDÆēā˜ 5FUø×āq­čė1>'PķÅ„PĀ5;¾Īk«Ūn²Æ·RĘ
-é&ľ”$÷“t@Ķn/WÓōp»OāżŠ–ō~ó–TdÜĶā†éŌˆd×U„ Čz‹šīÖū31dę/¼Ycśsäև³Å¬ĻėōįÕÕ?Ł²ĻžxP~|"ųķįŚ5†-¼ÉMJQ Ž±…„z<µéƒ×
-ŃŚSAÄhVWd(¢mˆ¶Šń"†Õ<-Lˆžę«"†| CŒ–¼Ķb¶5z'זŪ%:‡mįā(dyģWĒµė™ÄqEś©%5œĒečł
-N¢†c!Éz/!O*Š4į‡Ž£‘8÷:¬ƒ·kx0œZuQ÷žĀbW–õq£®šå\BĆ%d²ē¼Jø§Ąm”»œĻžK 1¤-fG•Ż³‹æ6lƒĆFe™©¬ę" öÜ·\
-垬
- m,ų¢P„‘Y–“I$ ²f†5^¤É”­&H^['TH/Nģ9mNīŻB9ˆVū<«š¶½»µjRĒį‰•r–…“¤Vs:»į‘FJs2‰vxīÆö5¤‘±Ė+­¼HįQEiJ†*ķ¬¤Q¬ę~¤åø6tRīr|Hõž­/D#óĀf%]ø)ņ#u“ŪÜk)š"•ćK·²²³łŹP Žļćcu)ŠĶ>B:¶$ £?Ņ¤%ŖžkÜm© åųŖ0„)¤m£†CąpĮ¤Č›čmpŅNĄ’s­©ŽƒT†^ä©łĮUm
-/ŅŖ_mDšWķ>¤Pņ;ŗFKuؕóõ"Ļ¦ę…nG«įEŚPļ—‚H««ņhB •”Ļcķ&-ģ1Ą4gGėx$äGzĢČł™ Ń6†IłU€ĄŻ¢„ĪbüHó”ÄpPī÷y‘ö‹›4ź—ē±ž“3\
-RHÆa=g¤HoÜՒHcmkš- ēK=„
-ó:8Ńé$Oœo#“‘%nĻrJ£%<¤.ōsŗüHķŽ R D~¬{Œ9VĀÓD
-UŽT錉ŌQ>2­§«2ĪT*”°žHķņBė’1Ņ䖅TŪ&¼W&Zƒ6A¤„õ/€ųƒĄśŠHĮXXcķ "¾8„‘Bė_© …ö’Š¬Ä$†4§DJŲ4RøöYh”MQcĶźÜ{GO|¢&BļĢvRŚ n»=sķ “t)NƒD«ų²sEń%-b`Ģ½(%-Ÿ–īH³ƒŌ©04!üv¦„‹Ŗį¬xóZį3ŠBGĶ¬F9ėP&‘ž~čØ 5ˆ7„ü!zņ&»\ćÄl§E…>”@©?†üB@˜ĄwŪō .x$q¢O@ģÅĢ«9†>žT˜33+>cK’9ÜQ&LŔ@—ė%šƒ
-“7Ż:¤ŻN‰cĘJ*67ŠaKʉ¢ÄȐM¶—Äש7Ķé³Kƛ°nŽ`S‡-Œ8'HHĖž-–}Etų‡ź<éĢóŒOŽO†Ņ#$žĪą}žŖņ×ó§xŒOGńß”ńZ!–ōł;üIz‰%ĢŖ‹żą‚ dRŲżN,æń-Ī¢łŠ„‘IĪJöNļSž—īĄøéPœü1åm±Ž7#Nw™0±Ų¢Gū,z ¶č± ŠŁ+ÖMōŅg&]fPEŠ~Ś ąm$w ōĒ0(ŚY÷ü“kA±-“*‰xļŠŚņ÷‡ĘŌbÄąģįF-’ŠŌɬŠŚMąķ¦ēŒŠ©Å£š—L*jöŸÄh`iŽŸ„¼ܹƒ¶eČäó‘V:×Ā¦Ép“Ž•źŃ2Ī’KĀX„µōź Ž9‡ĮÉDP‡˜U*0>PŻ»<ŗēu7Ę„®;ńUtåXŽŗ‰Ī„ĀjØ[ šOWĻŲ¤xb 5‰,ßtŠ1'žJ¢ŸyŒļ!Ÿy£X~t‰Ń/b,×$oĻBÉÉEhĖüZ7?YaĻSB{|yżó”Ģ’Ƭ0©¢gć¹ČPAÅ$Įf¤k¦{TK2ōłMBš¤Ž·žųÖž,ɖÓ<ÄŹhÅ+Ņ„k8#Ø+‹Ŗ?RL:Š%aƒī>u2TŚ /_Ō°xeó³fpŠoĢ_²“£A‘œü°ź^BæĄŖ”Œ­PŽķ×}‡—ÖwbŽ$ÓŖū˜bĆó)ʖhoöėȎƌÜē'ĒõŗüŅ:fķ怞¤ųœA!ƒ–WAŪb­ņBŽ1 lžUy Ėæ¶*c­“ņ=G™Ü}šÉÜ2ÉŃ“žųūŌiĖUR‡mÉ„NčņPĖ/Ćü†aŠ#x×Ėėi
-]Ä]<NG(N~īŹ ) ©#Ūņw4y±ģ!}d½€ŠZ¦`čŗū†ONnJ²=ĪöŲś—‰‰ŠŸŅēo j
-ū¼%šyžbćš·O7`ļśe”> ‹!‰–Y¬óā•o1“ų÷Ä,
-¢°÷ųĀ‰ŸĆ’6>"Ö÷*sNņų8‘«ēõ"čę÷»$łų"r%Ö%¾”98–·’ę$ɚē(}¶ē½Ŗė[GAČóc¼‡AŽ ‰›Į>ŽWµ
-ń”É$&»±OB¾$8Ģł§c2ŁMhŪ†ķ%½JvöOÅ7Ņ2!z„åš¾rCœlV¦°4`ģ#XÖē›‰ķō*óQšĢćo9½ÄŖr{'vį#”o„˜¹oy‰Y‡Æ}{@,¾½ģ‡=ö:{8ÜxŽœ0 x·Ī{hö;VIA³K0 Œa)qóĄ^š]’óųnÆĪĄæ‘Ē7y•’¢`­„Sӄ“`ÄMéē3šźąJ!Ō©_Š¦šü½</łĮ,Ɏ`ņĪŸŌ<>ĻłUNŒ3°O§C`_Ė=Ó;B™)ļ
-ńŅ)Ę°~H±—i¢ŅIK“_QŒųµÅZm#ū SŠs°ž'“Kš-ūó}v³K½Ā·Ļ—ć€
-”Ēs#Ø`×ļóįX§”īļØæĒ½w”Ģˆūv>ÜĖ[¾’ĒŚ±’pŌņwłp\_Lh[f²}žYÜ×ĪDPģ3‘B¶„”3‘ƒ¤3‘ÆÖžec’Ō ˆ$>ĻF'ą^Ķ¾D8RņH”öē ™\O'DĪĄæ–h0¹N8 ö|Ž™QĖFąÄ³ś¤{:ń¬>½˜Ī"Œö“Ÿ,C:łˆUųt`¢×ŹšÉ1ĮeŲ>J]†"Ö8¤÷{ž"/£}ē6`Ī{ĖG ĄłJ"$f–¦Å^Ā¾Xą6-”cÖĻWæ¼ •ęÜÖØ5<Ē°`ņ™ųÅN/źGFŖUžŪ"Œ1 ],ü«ŒŌ±|(!‰TbFźX>‘2x‘ź÷•ŒŌŽé;©Ī72Ra¾Ųē©Ź72R!i×@s\b¾s}ľ4õķCFZ#5ūģeŲśxrRįųåŲ·Sįˆóü¢'z¾‘
-Ē;/_O…ū}ܒM1qßż æņƒT8ęDD2Ü’$Ž'Ŗš?H…描IōĻŚb†!S0ļOK|Z žU•r#<Ū‹›^øERzAPNIE dLųf»7m˜§K†ßŻ\`d£·%nįˆŻĪ D9ŸŹ£±¼NnęęÕõÆĆį*‡{t”G=ŠėV3šį£ę€(q*®’ŪĘxˆ)f·īUFx-TJDĢW5üx-ģneƱNā‰¦\µL<fšĘć1s!ØļźŻ²»LEœŲ¹X|YgŠK¢r”ŗĀygž²ÆÄd/v²›ZĢ3ƒĻ ¤Ī• ” ;G»%–ģ63 "µąµ˜X†Z\ēĒBHG"H“F/)7Ė§æ0¢pÜd7Gk•{T&䤀É5bÉnV)»Ÿ'”aēR(N®[O(Ł­+–u¶Ļ°;Ōj‚HU¹łj&„t.^ÆÜFš(t“‚äUīķž”Ņ2;Ǝ˜U°`©QŸ(>÷IlēēkGFX-½œ$ˆm‰lG©Īœ‹Ē-ܽ¤~ŪrŖXģXĀ‘[¶«ß“caķ•T‡ļ¤ö¼>”Źš×±:Ičc_L¤SƧ¾:ēAzÆ߬$ĒWv„ēN Ń%±JroEįŗ˜ä#’/²!‰;!ß8×÷Ŗˆœč¹>éüō¢ˆœąųųź¾½*6"u|Æk H&ś«:#œ»Ō>Ø'}½$ēĀ…ˆ»J-@'éżM6Żļā0ļfÓńł÷ZœßĖ¦“x?Ģ‡Łt|1Aīzł<›Ž/—ī·™ĀŁt|±xž“öŸdÓ±ČB­YŽūĒ>Ź¦ćõā†_dÓżR#æ™MĒ·OóŠ•_Ė¦ćĖ„cśūßɦćĖ„“x¶ēl:¾Xū½ū÷²éųf—ˆō~5›ŽĻøažżN6_.ĄmódÓ=wi„¶”~›MĒgœŹŠogÓńĶĻiسéø ^ÖžU6młŻl:éū$›ŽŠ»'ž„lŗ_Qģķl:Ń«ÆeÓńēV;›Ž
-ŻIŪ<:fæpĻ•=\·ŠO¢ō<‹ģ׎(³“Éżė\0“ V*
-īņĄu/Ļ¾āO˜FgŃ¼¤D:)'šeÆ3 …qHĪ/XDŽ¾qˆ RG0ōĒędi¶
-€zy~Lņi(
-U ķ¾ĮĀk›$±SoĄłĀ-$œO«U“Pī‹ļ†Ć÷ĪŖÖ%$2¼µĆ«Õó,Ɔ„DIų+ÜŻó+…jÜżbņ®ųmŻź÷*܉e>~° ļŒFUøūJ„¼—łDŅ*å}šOōؔ÷ł2©pĒĪK’’ņ›
-wbõ^a»÷+ÜI½Õf?}žXŪ;ŃfŽ÷*9±
-„»¢ķ %įõ‘‚±Čń£]p¬–¤¢SDŖNOLK”:lZé#“‹&pÉic e§¦]Ōn]GŠĪÉrŌjĀŻ-R
- Ģ­9ĒJĆss’˜äĶJH]
-ŌQĄFt –›7AšTŗ„‘&ĖŠR¢ī›'ÅJ­ä"­ˆÕĢ6„‘&uœu²  UĀē†Ē'* ó¦ šŁ³/ŠĪ·¶cęWZF:_TD½ļpMŠjŒ¹ķąĶ*ŚĆVžq‘8ńCŃI<6 ˆźŌ»A¬*fÆNJŠĶ
-té)¹_IöēyTŸå“^‘ķ“¢|Œ±ü‹ņ=Ež'Eł^UdūNQ>2gDÜ@ł¼(Ÿ å±
-æ^”ļ¹>ņ’¢(Ÿp}äoå“Tæņ“¢|ĢØģ”K°S‰^„ WõūünØĢ7ī†’X×OŅŻP×õc ķ wC Õõ{ļœŅoėś‰WõūÕŻP<użÄ£Büžųūużų™OōnØ_Ōõ{ĶÉßØė'~2ä¹ś°®ß«L®/tE«śÉŠ·ŗ‚użÄ‡Ę‰]üŗ®«#OUż~Q„·®Ÿų€ųNØž¦®o:¢Ų­ęŅ #9S–÷.č_Ōõæłė±'ža]?QE'“Ųźś‰‡…ßŗJ¤®Ÿø«Ė{
-śużųŅ$é„÷Y=¾/ä¼æU×O
-Œó£®Ÿų†ŠL2œOß¹©æ­ėĒcj2Ŗś1«3X>ØėĒY†vU?֝]o§§Xuż^ę½~„®ŸxUæ'X×OüĄ5##õ£ŗ~tž/rī¹śu]?qģźś‰¬µĖĘž„z|:q(’ėń½(''¹ßGużPx—Ļ»ŁOużDņ^ž¶æ[×Oܒ‡7|£®Ÿo<ļ$ž.K‚9ßI|µ _ÕõßofŸ"ų}]?6µ¹Žāoėń½wHCøß–Ļ£Ŗßēõų¤$1½ŗ½įł¬Ę{„ųø©ŸÕõc¶ŸŖś‘CŸ×õ{¤J‰Õżø®Ÿø™#Ąco×õÆź÷ń­MT]æc}ėśIČ{żB]?ńŖ~o×ćūU.GZ>×õū}2<£ŖĖGużžCÉĢŖ~B·œ½[×OüxG‹żŗ®ŸųA'NLé×użxę…QÕOü^8éuż~·dSģżœ'æņƒŗ~“ąā«ź÷ėӃœŗ~¢ĮØ
-hĘūÉJĖUˆĻäó5‰˜/3?īUlJ‰fÄ>
-‹=ę±{üóv_¹šblėQxŽĘ£`# Œ»'LׅmWĢ|šdčļ!Ö-4DW=č‹Gė*’(¾rgŃZt߄:Ąk‹uuŠµŻ¬M;m­ļ³Q³ÓŒ ¬ćcŒś$g ø>Ö5ķĄo’ķašCK~ĖFā y•ąixr8Ö±§ ’iŪś·@~ŚG.Ż9źö >÷Ŷ]d­aK0­_r.1§65‹ ’%¾F|…•­ūWĢŻ§t'<pÆeœŸM®źĖėķ]āEbV}…„_®¼żd$Tf,ĻŃ`»Ä6˜z „(†«“=Š‡Ī/ņŽūp[Z<a=»#§>>–”.…ÆUML c“–¹é)²¹l·ų1ĪpĶA'ŁDĢč÷ūņś;ōĻ‹=¾§īF¹[sńh(ęBN,=Õ_,¾†|æ`4ś‡-¬Łö6>Ė—Ćń¼©«ĒGša`Y./Čc˜z,= "ńUwz]ö+ĮoœÅźu®cÅė J?uŸ”k“z3©b©švDŠNē>źBņ-@ĄŗKo \3wµLĪčÕZ`Ś75€N Õ²Pš`żu[A²—m{
-Ły*™›…ˆŁ:CāØd4õt6Ķrl*:¾Óés>ķÜ­UśÄ
-Wä~„WAÕ§ßÖ L_›ŠŸp@Ą?²ėéJõŻ³}ģ=Nd§ü—ēė]īWo“Ä\Ežjė÷Šµ›p
-÷ _–NTØ±IhĄmÉ*qO~äé7@Ürj$
-Ī$µńM N”,O½xŗŗóW} NŖP×~2 "L+@J"ƒūõ0ź>3/Š‡#ƒ÷Ž×ńµc3¢ÕGõ’ Ö­dēĮŚ˜™õōõQ•mĄlwŒ[ą>2?D)s +…*™ŠŃØ NĢŠ»ÕŽ;õcÕó\ŗDqąxčb~ż™»™DĶ9‚ĢƃH˜…Qnę-{ókNļg~øƒĢÆĖ`ˆń˜F^ę×ŗĘĒä1kr`bīx€łõčcĀŽ96šĪĆĢÆ®\„ł5\‰2W%P­(³+“„ŽĒüz“2;2Uŗ™™ƒĢŽL]ń”N›ś•mkšc?1ÕQpß°…)»µ_sY­żŹÕ%I¢ß½ }5F˜ī÷UIe„Iž0ß`˜ČĊR=‚H`+'‹y|ņÜ#šŠ(…ōŒd$B\9į';ķÉ<ćKŃ×Äé®$CC<V³RŽĆĄŒ–ķTłfpūŃ³& Ū¢Uz,Ö“Ś¹[Ć <­²ĒņŠŸ°Y ¾¢—qæeŒŠ¹°įŲF‡$ `xr±J#/–ŁqxSķÜĖĀˆL<dW!7³&­E#¹—T”<E@e^—r KÖg5C²iīAüĘéĪ#šŚ^Ä}r|‹µ¶Ö$ō«zi}“žóžTÄ‹mĢ”Ū=>F’ŸC“Š1@ŗŽ”åyuŲ†3oŅ®ō”.ójį ŁFž™ūŽ‘źX”Żéšå« N6%„öŹ1"°Õ“”½ķĘłŠÖ’£ćPx:¬Éų2jÓījux¾Į äAŻOL"a1'BÓ ŪF…r^柤“d69ƒ \ÉūŠ”†v$¶7«YTvė*[lĪĶ
-Ć£¶Ūo>2ŲµĆp?ADĘ|ŃŚČ¢®G ūŹ*:-¶PŒ–iGłi13ß=¤¤sߊ-åĒ·ŗēĒ‡ī8$yłĻ°ÉH.}ŖSEH Łs>ā¹møCB¬M¦'|p©P·Į…&¶ƒUļv ńŚ!37g„»›Bŗ ®-µ=»ū~˜t“ „@ØmŲ³NNģśĄ hūĪø(ćtčd(J‡&«‰ §ü¼TøEc|~€Ł
-ōwĆ Ķ0L‹Åb7§«ćÖM`„Æk±ūŃRğm-Ż
--H~{,8„Čc
-g”flk*¢Ę­Ón8Ķčˆ:¬KB6
-‡ ˜&c°ō¦µ‰ ńéĪų¶žÕ—y„7|RaCČz‚™kŸÜ©™äŅZųÜFģŁ0œžg|Nz•
-c|ÕMØØ@$ėÓXœF!Ķm³ ÓHŚš\é܏Ž“ˆģp@E' ńĶ#WĆNÄó^]÷­öŽ.“žMkD
-Xh>0ȵ2 ßOp]’<¤kј:¬)ńęĢėP¬Jh™:­FŌ‚…ŻĻC]j‡’?a5X»)2
-ģūŻ=¬źŠ2-
-°j½6›āuŁĀ«GŠ“eßAÄ2næhŚQ8~ćö‹¦-ćt×øÕSĮ˜Iāw¦­Ž=ŅøBŚT–lü{œ¬ų’iKÜ ö0nķ1†qūEÓV†
-·_4mÉóÉ]Āš…Ž­LDČHŚŗ,ÓÖK#Pą¦mĀ6Ņ¶|Ą?¦­Ż37q–iKM˜—¬o ŪB¶tž#Œ[
-—æųĆÅ/ī¹—^’*|ņķ¹Ė×īž õߟŻæt!’é÷o}’ąŹŻ _īó÷Kēæü‹ė?\L“+ü{éü?žp6~ūū÷fĢŲ
-Ųļ=kČž•§ ŲƐüt!ū§
-Ų›wōéCöO°™üs!ū’ߛ{MĢ‚M ĮoĪ¼’ć[÷ß½ū«Ūw7Æī½¾węģļÆŻ½yļŅż[·>¼õæ/Ü;ųń»[wn^Ūœ9ūĮł+Wšģčƒ{7om˜Õ‘?Æ+%W13ć£ģbšcē.|Ł’ü­Ų_ūĖ}ņęŹjf0ųT¼üĒNŸyI, nßxįÄ-’—^żś›šė% +.fł!7õɗ_¼pėž¹/¾ņĪÕæ_ųĖ•>Ųqāå;_\üįĀ§—é={9æ†Ö&_’ćÜē Ōæ¼õ§Wå׃ÓŗovĆŠ­d Ćń‡N½ńׄ×ųO}AŽå÷Ś™^=õÖ[}ū…Wžö»æĀUńWüįO/œ>o¾šź;_~įå^¼„żńŠnŪ”ļŅõIš|ļŽ°0č"{õ«ÆŽŌ­'ü·æSŠ  ü»‡°Ļč>…SÜ~śęÖĖę²”öŅŲK—_±__;‰Éø|Ślelf»…ģŽ‡c«_YéIkŗć›õŌJ[½Į›|eõ‡æ?’Ēń‡ó§×»S,kŗrf½m_’źģųĆ»~QÆĢĮņē?¾¼ŅĘVßüēóÆ®Ī’­æūĻWö!_ÅöŸÆPæ Ÿ=ųęGłĆūžæĘ?]{A~}ļ­åŽ×™V+c—Ø
-æ¼HaæʘŅÕ&‚ā]YŚ÷Ž=£ÄĪs’š{łõšW&'ēĢ{’ˆS{Ō‹”ŅĘ?ųäź¢C~>¾…ˆ’TfģĀ™oüįµÆī¾tķŅÓ7+:²9Nvų'–³4]Į]žšīłåŽGŻOżŹO|ĒaŖ󎧮nėķĻ—Žzéև¤ä ŚēĒ~8 ? ߊe tŗsżÅ”²ŻH+ē
-^_¼«¬McÓ~qķŒĮž‹¹×¾eWĻæ/žŽģW‚¬Õ7=ĄŸĆ‹×æ?÷Ž….½šņž§Ī÷w¾'ļņāžį‡”«½ö¶HŚߍ_O¾łĘē/üė}äŖƒżė»o­40¹Ėõ·¾?õ”h²gļ}żīĖv¼•æQ¦>üįā©k¾¾tņąŸūCų“»ōņ©W÷Ļ}{ņż»ēī¼uēĆó·æ)g.\¹ņ÷/½šźÉ—&Ū0‹²pńŃ'N_T·•_ł
-y™o|ńéG_æõї×.‘*i,JĒŃüł©GüõĘT>-7?•ĆęķW¹ˆ0
-ģä ¼öŁ»÷ą)“óÜ©;÷і«\>wŃķx8S`zŁĢ’ü’*gŪK”mC|ļžķ—æ}e˜ł’z€½³äöļŽūĆ<K ńæ’c)&
-s:„Zāžu1ł÷ß¼©†žT*å9ZūĢc‚ń»ol5Äł?Uć’č,źĖ,Śv~&C’aüśėĆ7
-—?~¢ī@ĮģY:¦_^dņßŪE7~€/Yāéä’%>Y_/©Žvė›[ƘGåŹ£hVŅKį„·^ųĢāÉęƒ~Ē}pęöĒ÷Õ¦»żźkÆ0 2žžõ‹oU”šöRF·-ÅpøÕ×÷™ Åß#S×ß<Ļży¢š[le“»Å<Üļ˜üw8³dŽ5 ‚»ŽĢ@üæĻ‡ąĄė2AW¼æ~ńīĶµ’ļÄ łąÖĆæĒłś¹[_Ż¾{õĘO·īļłžĒÉü&oBĪ›ŚłŪdŒ^żbļ䟼}óփĶ„{÷7gļ?üāŽū7Om®ŽŻ;qżŒü~įöĮĆŪ÷īŽø’Óę5 }üĪÕkW.l^ŪŲē®Ėē®/Ÿ{}sRžŅaD.;…`Äuyņė{nsVžūńæ÷œ=üņ“üņ¶üš ż{ćŻęĶ?>s››øšż½Ó!“Mī›ļöNūĀŸ®źO§=~Ā_—ŸųדwOm^žŁē{Ż?ęqå¦eߗ’7¾W}„ÕļW×æĒŖńØßyżS>ZxÜ£µŗcĝĖ~Ę³…­««hķ‘ö‘§|¾ųøē«÷Ž„õńr_ż~ułŻż}\æū»]’”–÷h²ū%ČNˆi?šį|Kė‘««‘ąĒČųŌį‘ń©§|ČüXč9·ßjŽœNqߞ±®®®ņXšń‘Cö‘§|¾ņøē“õƍ»>\*ó·«ć·ęJśĶ®|ŹŖ\ŻäĉāWłĆéB,­~ąŸžņ{Ūć¾76æ‰"|¾ŪKE¦»õĀ_Æ.æž„ī‡Ŗ…Ė čžņ©ścžźź9=äTą)pśōĻœ ‰‚ «sį܍·žč Š ^ņūĶŁļå§w÷ܾ 1ś²qūĮ'łY~šŽł$’:łæ(¶ßsii®¶“śįć>CŅöŅ€µ(N¾ŪĖ^$ræ_æ < õU½|ē×yõ—ņŲ±ÖŚš(ŻõŲž)D_›ü{ėĻcå»Ļ§õĖņܼćiŸ–‡Į/W=Łö7ĻOéCx<^Ē”¹Z]ĻŁüa™³T/¶éį”yy żu>ˆļmæįx°/?ō»]žåW*Źl`bń`0Č!y®ŻœŸ²ėm™žł!š—'±«»¶õÅ[ŸÅs½HĄé6gĄ’
-¢zŹSĖä5‹}¾DąŠ… EģŲœÄ:QžG?÷«˜Ł›čäŃeƒčęģNŌČP”Ćr‚ä„ńS(ūĶĖdėH : ¦ >Vū¾Q C„Uˆņ^„pkå·AķĆ;Äż./¤ßVåc²urŲ-›QS =eˆ³™–‹S i{/žōō‚åąĖĮ3 –ƒ§,G½č˜„’‘}(ŪĘb95<Jķ>BƒŽ B^;5Jrņ¾Źk(ą‹¼nŠfŻČ£";R¦æg~eßO­š{[ģMGj”æįIœ- ŒõV#ą«Ć›;ĢhLxQsŌĻå(G|‚€ń¹ ųĄ˜ \ų¶ŸRÆqš„ŒMՒ‹«=bākŖLé²õ²[Ó°ž"/E¶ʜ‚JÖS6FŖģq/ “#ö”¼_3?“cś- NäR†ė¢#D0ņb@V}(ZølĮu’Ēø£C±ĄU'’+„®»)TŃĀe$;śjEvȓć÷f’£ī‹uˆŪøą0U®hb¤F
-ėīĒ4ĘĒŽ’' ½ѱ~Ń1 Ct”œélI¹R@T9očĒy«±Ņx*©zO¢źįšØJߔ*ļMt@ ė@Ģpo7!dIqĮdBu X!ŗK‰ČŠ·¢ŒĆ@J…€–ĒČ6;ĮŪ]š‹²k
-^Ņw‹!S3“łń.²³)\)ćŖ(FŗĢ€˜6B8aNŹųœh.M^ ų}y<g•¹N‰2(E|¬ņ¢Ś!€3 Ōyl o
-ZQ—R{D½lŸ†ē9rbyQ›ö@K+žB¢¦ąÕĻ(3øåa”øŲņ°čĶ!£åH
-F€˜č‚ؤhŻ—ÅĢɹy”
-Ze$¦RuWzŁ%‘Š‰¬g^N ‘¹„äČrČUbŗ‰Ī¢Ÿ“i©ņžÉ8žøU£čQ`ė‚mše7Bm¢^FD³¢ö«š
-¶¼Ų¹²9ŽÜ•
-Ś" @Č«¤āņ·^Ō&a@?ÓwJoź>”ˆ¢Rӹ؞¾Ė­…-GöøaĖaųBØr|ɘ(6Šü"§0r0å·ĖɈ‘$šÖ@¾<
-“źh@“jUUŽ’ jkDš)bæéHH‘^y”0š‰˜ąŁł1ˆż,_‹ŁŒŻT"Ń $suNeZ™‹ķģå!årBŠš1ČF ±źŗ‹fąyšˆv3ąÉ'–uؙŹĀ“jCńĆ»3¦qY5žŽ<W/j“`šŒ>Óx. P9¦š¤^ÖĖә¬®Y‰Œ¦™šlįˆš¬ų’¼9"œœĪpCqHP„±2ęȂųōX%
-sŽšé«lb|‰§Š+ņL.Į§ˆÅŖų”é”ōäaoų¤5$ėŅJ÷Ÿ4WŻšvi«&ĒG‰ĒįlßzQ›„ĻŲÕĪE‘ŌµīšĘaø`€§jE·+$߄É·ģP3§ųĢ•K]ńŁ Æ*”wŸ^—Ę[i—ˆŲ¢ĪĢfŗNXŹS(‹G`$T‹ŗxxQ…—Ø&} ³R|2FĆk’œD§œU@ x:å;Sz‹€“ć‹j„TÄi?b‹@g£*Js^Į)Ŗ
-/f =F ƒĀ;‰r³•’e,A']äŠß&»čōƅoī|ßĢ›{Ż;łöMUŁ~'<43xŠqŅ‹½žž²įœE…mE²U¢.RĒ§NOpŠ.0Ą™ĮŁbK›­y\Ö,—C}õ¢6 œōŗg{—dYŃ<»Ās(}6Øé>į ;4,šĢŠ0³ņ%ž øĖIsi°#°6ąß˜?)ƏÄĻiØØ*cJ?‡£XNÅJV†÷6‡UĆīIęæ©Ņ)ņ09Ņ/øŠtC0»ĢĄY;Īt±ź{­Ķšų"f™ÅBG¼ģ!9®GT½4e !ōäŌ\źOGX“Ÿp’|yĀN(­N™ķ°§|¬ūÅ&ē,.kFŁ{Ą¹õ¢œ„NY%ėr@Ńob}š‡øģ@§FšćåMZ3Aą­J®ü`ąD\ŗE¤Į–äu¹6„MƒČ
-“RFāB@¤N'…$ą
-ŽüœA‘L R&,¾NGW>{c±ĄJƒ)ĄoNb³’!čcµŠ$c‹‚ńl$J¹ŻaɔI:³Į؆-]¼!ƒG ōEO_$ Øzń 2aI&€ ‡‹’M"Ŗ—¼
-ebA'¢R” ŚåœU'Ė BCKäHŠ˜]8;p”Žŗ)”)’Å ~Ŗ3Ÿt4“ōócʁ^'¦J†÷l˜ņÕ'µy5T—”Ż@VŒ–ž NļKJŒ6،•Ł®Õ®(ƒĆĒ`õu|ŒÄA°‹ŹóŽš9R&¢m ČgŚ”±‚±F“é ‡ēöQÕL"!¤D¢·Y³r fÄg9
-•$‹QŸ]oŽ~ä”1"Lpń©=–Ŗ~ø’
-sQf|(śt‘‚ų—•Ÿ(G@q $H>(h~²<~æ-œ¾Ü‘&!ē…F½Ŗ7żsfīT0^Į’ķšaOī,XVæŚÜGYw?ü(Åė«TLA`ÖłØmųæ½ś]0–5XĘNÓęw§ń`(…yćI¶#3Xˆ;#Ķ3āä'ŻNcņ =Y
-PŠU«k©Œ@Ģrżł1YĮ(Æ*cŻi(īņŖI9jŗŸśqMŅ¼"Šäɕֱ!rž|ąU–TÕgjŒŹ+_£bŠ"#÷–Œ%0–ÕŖąń7SFj’ĆÜņĄzĀ)½ŖD<NĀōP,H*oNŸ2`óä<Ō˜ß@ »ŽM”‘+Ÿ7‡]Į$Ė™”yŽ³J<™ńD&¤K¾ü)he€<RXĢń¢`ņ2ą0¶g`"ßnžĖS—óFŽž¾]l"ĄÄ[&¢Ģ¶óČÜDą¦źV—/ŹĢ›Š¤WĖˆŅ!79ĻEŖœlø•‹®¤Ų'Ō"É%RŸ°¼ņ|ĶŅąžN23”PE#®L<ķš’į
-¦¢f‘QČS¾µŒČ×q=åø.ćs`ˆĀmßø«1^AókŻn$ĮBęØ},ĄŻM½Ó(i ō™t5ƼŒŒ((twX”“źĀĄ<“ŗ8| »Ų˜Ļžf¼½ŹO™LŖ±ˆqQśįųĪCFŻC~2qo
-×·A^&>bBÜTązÄ|3Ō7f¾źHkÉ iIžĀ5uéؑš=“‰č[„V±Jb˜E^Æ©<Ųµ0Ł«$K{ʶChPE¬óŽ!·c9˜ģWrŌm«¶
-©ž2„”…±™Å“6³
-'P“#ŅŌt 5"GHŃ2 ¹§F¹×b2ŁÄČ#œź<g9ł ^“‡”³«i\(7ćq6x­øŽš¼µ4ĻĒo‡T±ģfÕ- \@åjš.‰H2l”ŽpĖ<Ā4½Ø=(ĮjQĆ "uƔ²D‰†Du„å;·=Ā`Čń(Ć¹ ‚OsvdQpd˜?ŠMŖ1>“ĘJ–żī ³Œėęš[<U#·]’yø­°Ž‰
-mNĶƒŅ“’Ņ"R3F„éne6”ŅĒµMŗfåōāIģDĄÓę¬'µ.ćTŒŖ60ā”wI
-Źį§?x>^YšŅ×VRź˜€„—0.~1FkQwż–­ž(;~N‰œĪ­֓ņ 0‹j—vd0ėœĖżÓPŪé?ĘXÖuˆÜäxÖØ”ēķ‡?ųu¼0RY??B-ŽŽ±‡YŌėąrUM$ĆX£óøC]čjbÕ¾žŖØćŚŃ„–ĀęĢ•»Y]ÉoĪŽ}xūģŪ7ܾūÕ;·~}ļęV„%Ļ‚NV i«ŠßžĻĶŻ‘7’UŸüšyņżėŃvā:kӝøē‡č©²¤öóŽ;¬ąū¹"8ååµådįh³•µ‡ š]„¤Ż¤×-ŸČCjT½j~CVæŽņ{(\õØ:[?óūɳ7ļ}qk/l.~łå­ƒ‡§P ģ¾Lńęd·łŻč5×ÆÜ}ššĘŻƒ[×/Üxx涕 ×ß¹ń½^²{S·9sīŽ½;›“÷oÜŗūšÖ}½ģ“Ł¬gŽæuCžxóöwļ~łåƒ[?Ń?ū#žō±żiÜšū÷o|÷`łŗ+wo?¼}ćĪķ’wėę|ŚŻē>{„_æųæ·~Äg¶ßĀ’ģżū7Ęt\Łœżńį½Ķū7ČsĖ}— 9tÓwæxpėž’Üŗyż/·~ŗ®=XaŅĖµwļŻŻTēĮä6ļļU“o÷żĻĒg÷Ņę½x?»'
-iŌń9§į(d!g±1ÆŖ*ęĘ2’Žķ$ƒó¾9aį7Ą×=.÷Y‘k^EO›qIŠWī'¬  Ę#ž8ŽÅ}£bE&¼“2ĄÉŠ<„1bˆ‚g\]eOąņŖ‚Ŗ¢.œ?buļģ}¹÷ņ5“Ż„źŖŽß/-߇*}Ŗ7A%9M÷¹į)ŖÖ}:©ŠQ<A«ĆU)©¢m3#}:üč{@RnUU#›Āoc>YīFĢ~_-`N'½‹ą*;_†1(hx(5bx/ƒ—'ß~½Gė;bĘP™ŪjŒ$^ō‰āĮR%“óB ?”õŠxz[j„„:2Ūx/ŠT¶ŽĪ›NļŌ¼~WšqŸedłŗ9¶<Õ¼Ó|ō×;’ų9Š‰śhļČY?biņ”‡:r”£a2ē÷aźHŠžń£A¼‹ō]¼ģ”ųūåe_ž “’ķ( ]ZĻB“S­„‰ņvYĶ­ŚŌč}ƒ ę8gCˆf›h%Õƒ1¤1«Øwƒ”¼=ĻČć“Łe¬ęŅ¶Æjóx]æĄĮ³®ĻĮ3®ĻĮ3­ĻĮ3®ĻĪ¬ģĪĮQó“5›H›ŅĮh—©‡CaÜ+l،Oj²‹qDō!ńĮÜ&žäY^”ē+2Guaą½£śH§s™|źg„ŅÄ2_™N ®äŹ9…»“Ļ`
-é`“Ō6Ō‚Ē_Ū‹Ż…SN#vŅbeHƒÅ؆qŸÉIć|Ŗ&Ä0Rū!r3ÜYåC®²ä§g•Ļ&*žQT<›Ø<xüŲL=§ŚCć®PżcĀŌż* ČP&“‘±dNr®CśXI³\)ŒŹ”cpŽ(˜½™Q~n©źgP³ ū
-†t3ŚH­Ł„xK&X2
-p«×h–I2Ä;+ [ööÅüĪ¬a@ ŅuƏ8 D’‡ßĄķąŠS(Å0ķFrųŃ4h¹aŪ0 ¬Ū‹łÉżŒĆBb ģ“Ę·JAØP³’Ž°t’Ž,"¼d”¦ŽT…0£OŽä\ŽĆ)J&†X’É>šIwĮōśjĮ,eJĖā×N/(öq‚Sa×ÓķQ#1Ɋ8jœnåĮģŁŅˆYCē˜oäŻ9°™zN•”–»BŪ>.Åd·ękdč£:ö Olģ
-Wü<Ø}ŚŻ
-Å02=—Z7'³@QäĢŠÖ%«°Š¹h9lĶĪīĀų|„B~ĢĄéšØēVEi¹mA¼Żą©BŠRC082PC…¬Us
-dr‚ŠY¢EŚ‡¤ķy8‚T{MOčØ—¼ź–9M˜õhØ%ĶĀ¤eUĆ  Šžf ¾>?*/[¶”2ÆakÕz†b%JxUa9ź~ńąU¬T'„YĮd³¤¹P&'aY Ķ&=Ž Ż›©ēVLi¹Æ›SRz=Åu²µė¹Ŗ¦œ”B‹ŠU±²ł„‘ĢRÄ,¬Œd…šR “ąLĻ&¦U ČV.a(9Ŗdoś Ė†ęĮ¤9€4|›EJŅżÄˆŗbMg(DŪHßTż%­Ėb tRs°ZV_<YyÄT‡'wwĘvÖĖÕĒ³»s`3õÜŹ,-÷e‘¬,ŒÉĮÖ ³)”hggŠĆĮŠUÆKP&ķĮ›kf¤u…³×ōŗ®¹’
-5tzµŻļcSYĒČÖĢī¬+ŪslļĪĶŌs+.µÜ—šéÜ1čSWē Ūe,]mĪō­ŒG*‹vįŪ-ö_-Cįī†÷Bņ7·3d2Ӝé@é&¹5Į„’ŪŁ1 Õų©rĒłQõ‹ ¾ÕW¦š Ʉy7Ł­N ”ōŽį[É8ˆåh²›U
-(G’|‹ÓaźaSß,jiŖI֍ĄĀȇ'xgyG9Īć ńŻ9°™znEŖVqśVż¢›ŲÄ$m¬F!fш4„pQĆwB¼Ł]ūšź™¢“ęk³š„ļÅVÅT›ŠoM.'Ą”ó¬# Y«©bŽÓ Ļd­ź€ėsą]ćČS¾¢mXW՛²ŚŠ U6³HR€;ŗ ¹™‡iN Œ¢še†A“AU6t.}R ś²#˜8»[k›f=’ć‹ļĆs`3õ|ź\-7]qs×b»¶:ā¶Ž~„·JĶzMˆuJ„–{·=‘®mK,ŖJńOę©²™×Åh\ŃZ„E«SŃWgĢƒl°‡š\?Kb]Vuמ <fb=ų4ņŌJ6’I^ćzµPr;ĶŒYĮÄ/§X%7ŚĻPH×ń¦q_+s ŬN’H)Ŗ®Py<4µ;‹ZŪq܇ē@'źy•ČZGsź$åHŹPžŗÄŠŗ‹ęĪŖ€Ī¾/‚|w$ ćmĆĄ£Ŗä3<Sk1õ%Lˆ²č 0¾N-$Ø}G’­éāÉtš<8-ehR؆NĆ°|j¢°’łżX÷zxČSN¶1š¹Äkf= Xj.“ž‹ždG£l{¶qŹŅM=ŚŽ„ćū8ē·M¦:Ķ.Ž‹»%E¾oÉ|[šXXėŹć¼3v¦ĄęéyÕēZYpjĮ q¶zFēzT/Wy]ŹŅāJdz.uŪj|·P²N€ŖĀ‹œ€FZ(©*v>äŲż8Œ^1ā[ŁŅ314C—H”3¶—³§+šåĖ##Z*Y` ²FĀŌæb¢ &ĢPN0ē8Ž ó†2RÆ»§NN~jÕ|7=Y6*;j-¬l‘Tś“³D¶KŌÓÆ[Čķ£ŻrÓÅŅ0ö¼ź€­0źĖĮ ŽUę47”JMżf²ļŌaz
-UœBRė2PmCńPė õ‡MŒÄŃI„Xņ/܄„Y^ž~€egUń(Z·KŽSMźōéa%ōžŽ¼*
-Õqk7×HOÓIC@c«ŗ‘Ś\Ņ tĆČŻ$öl‹<¬MŒŁ–Ą”£+-ĖUį‘£F/bŻ”š°)røųy Aõŗ#õøŖa|©3 šZ -™æ&BŖ–b'BŠ:yóhžA¶C”–9§2 Š‚s¦2jØCCżŌ»CæØóƒ–•&IhZD`2Uūä
-œgf`¶Żœ‹ŠŹŲĄ«}Į’£«GJcšQüQš•·ŖiĮƒ·-euT¢P僅Ķoō¼Jtmv?Ę\s³>[TĘ÷Źš[“’qbÕīd³iAG Ī„Ā`§“žĶ‡žėæE½~ķ¢^ōGŃŚµŹ^s Ī®›ōOah.‘ž¢µ¹ha4ū³Ś ė/#N?0ŅRqÜb 2ė/ö ćŅŒ)l}|õæ‘B`ģ-›ę‘…ĄH²M’©ÕĄ*Qų źgA« jū–q6S{µķ3›ū„ ”ćądšCĆfu®n]W–M‡ńhÜmCnečĮdń„yT#ÖqŖkńŠ14
-R 14$g’šŅ”ŅIk QSŒ“tŠ:ģb²^؝čĮ‚^ uƒSÕL“ūS@=†?d„½š|×®RŲW]OŃ¢+
-ūUę-ƒMŻĆp¢i«CüĪLż”„ƒŪXŪsŸõ€4ń”ŃĶÓ²Ē¢Fė}Š’XĄ«ÓŽvĢ‘ŒņŻh;‰gGC`#å;“ˆ/ģ %EĀ  ČæÅø!ØLĄ‘ČŚµµ=iRDOc źöw£ł!`ŲŠk²xV6.‚&Čķ,Ź
- bĢd<`æżŖc. õµø  ÆģLŃ»+KŃ:ß*é|Œ,Ņƒ2Ā³e 6.wm 5(ģ={ČĀQV‹Õ Ę@¶ŹĆ…#Įč³“tęź6ö:K~°åŠśĶÕ“Žj²Nn}r2 whFĪ¼ČYŽ¾$HeŠ:"QžŖśō±3Ųš¼·ßŚ±Øm“šNCŽ£9•łä/!ģ‹1 CŌĀ…čx@ź{vjYģę1¢/Ż­.Ūzā¶ÖKƒžĒ›[ļ:fĆĄ)Sė³ri<]č”»ĘTa'ŅŁØ3VædVU- ”tܵ(×Ķd2ˆ}8-šāDˆüŒ8Įc– md™½²QhH8„ńfš7MŠŠ¾ŒŠ‘R3 ä¢ķ‘wå›ŠÓŚ˜†øKqJ.6½60Ļ U6™$ƒ¢fa£‰ĒŖ ¼ 2aˆFźcfßµRrŹŃų…ÕŅį+±6‚SÉĄ‚MŃ-‰…š‘_ł|ĘĢm­XėĒÄĘŲ~Õ1›Č÷%IĆA_c @ēF»JViČZg4-% ]ŚU.īyģ‰MŹĄb­‡N*qĶæE!TKT„”lēʖĻš£Ą$yFō e*-|xŽĘoĶźvĖMiž1k GLOw8“bS`
-Üé/#“˜ņ&(cļR&D
-PƒgSx¶–‡ĀéD‰%„™ƒščōŠŹ¤6fpš3]¼Å’ž²įŽ¶ÅtGŲāZjAB¬h€§ĻC€$V•ŠĪQK“”(į*u–ĢkHH źä¶=ļG¶ 2/eSĒdYKšØŽĶL%`“5v›0 ¢)˜Øpƒ‰*kiŃfõõeĮX‡ł˜€sė]ĒlüĻČ4įyJ„12“ÄtF7י®“*zŗTšXnŁ“–ņeÓÄnöćb”Ąė@ĮtUŃŖwj”ŌÖÕ0©Fčˆ
-5-Ż3čÕ1$G‡•N!;čFōxĒĮY¢–*ćˆĶŪ9?æ]Ćœ ųz6Šµ4°ė³2…iĮ1+ūfÓ7é˜óhżM`2=†~;„˜LaĮżŲHČÉdI«#5Ł{)›Ż‰ŪZ­Y õ sė]Ēl d¶T$z§U¤eW3Æ@–Ū%ĆG­3“včDkpvv54l‚ƉŲV²ž,¾TXÕBsą,ķ•²ņĘaĄC õŅ VĒ¢8¦$zÜ»aŒ$"µAéI½®Šž(’
-J°ō˜®™õØČWśh¹kA+ą¢*ė Gkt_I³Čd¬ʑ}‚ITy¢‹@×øoh‰~‡jŃė Å¢™4K5»tkŅVkĪjKĒ—;ļj“1ZK”ؐ«1e¹u­z䱓dŻQĘkÖ5GŅ6,Y®~ČĢh‹`ōFßÆŃĪÖL-RēS‚>tߐÉīYČóåB3ÆhkņŖFZ
-üźĪė‘ź.3[Šjt®Dõ4—Į<Nž-µ=šoG„¢ż
-ŹßPP<ÅMa%•4` Ā
-鞤h
-Žq9ŠĒĢm­˜•<:čÜ~W›Ī*[œ€sŚżµ) 5l„°Ķ2ŸÖgµMŖ “šŲ˜ĢĄx,Qģ Z¹å
-SyUj“„ÕÄ“‚/.s Z>S
-éøtŒlĖ9®HƒSÄ>š=³źwAY¢t6$y¾Øų, ]å-j”²6żYŁ2£āp!A›m g|]Ļ˜čC>UvbōFш^›¢ż3¦,Ķ§‰…*Aę֏åI²£Łčmm‚ŪÄm-X>F§śö»ŚlĢZ†™)hŻ4õ¬Ä¬•ĆSg|h¦ÜĒ)Céī\cÓk@Ś°‰,BŗN¬ŚSĄYŠˆgH
-ÖowfŹ¬¹XhōćĖV»?"Ē±‹šKM)ĄÄŲŁrœŁ„„h²rØ6’€*‚ˆ=A°@ę5ŗÕµŖ'«nĀ@w
-NÖu@³Šį@‚KĒœcŲčL yĢ‘Ųd’SĆ&’ø3¢»ĮÆ°éµjīīÄm­Wéõxź½«ĶĘ(("s‘‘• £’C³Ģ Kī¤®Ķw¬½s MJßg*ZLpt>Ŗ«eĒŖM*ĶHƒ 5ų0ą“ē«ń%¢³VŁFYŌĖ„£³xU-PLvF3”ˆŹŸöó) ©Čƒ+ʂ!2@Šaˆ6%iƑĪšYy—ł ƒ­kŽӃ#ślź7É2hVP˜ŗ”µyˆ5+ŁgŌ€gŚŽ:#`ši”gĆŖ²7ĖoKĶÆIq¶źK-jŽxl‘g/Ŗé!Ür„gömŒć‡4ssVw­hngXB4 „G5«Yˆ•},ÓÅÄĄ°_ŒÉ
-Ž°Č%p•“ŪoKŻ„-¹ÄU7,•ØĒ`†×„Y
-\Kź*°ūئTÕĀj1¤Ł‘~QšRŁCµbŽÅZ$J¶Ą5zvVŹ=³Ž`‹żĮJlö Ļc6ŪĮi"D¢×„rķ`­Ąa^£Fq@jšŹ¾ƒ²@ĒŖŁü&”¤•¶}@I‹Zˆ’ĘJRg(O~×ā ³«\œ\–ši«›ö6"xrJvBYżĪŃPŅGH|’0¼*ąÄ\,‰ƒˆ\l4naēĮEārT֋‹8Š‡@.ˆh]k—Äh=+
-¢1ˆé”ššÖ²€–†°7¶čoˆņ`Į*S ėĆSĶ‚Ģd »É
-’»ź¬‚¬k²°ĀPz4³]j’'~Ō*Vr†©µœ%• Ö2CPąZ**C™³ģö— ī½&6ƧĻŹtŌ˜ŲOD^P?Ą©żf\([m–ā¬ł£Å˜ø((HĆ·`Ql6ÓhW‹¶™Å•JŌét}ĀkŠ¬[«* äĆ*īĄ‘¼‡€†ŗ‰‰\ģ蔩I!?b>ģ•­tęƶ&£fvA·»Ģp,#Qˆ½Fä†C
-ĘRĻzØŠuį4†zņ¬šŽž>µ® «3‘QĄŚÓŃfķŲK;Ņ“Ü7»®ĖXA[į¢µ®›(x±5‡4d­24ƩƜ(?qåhq¢@±”ĀžIÜ,o0)wµ ņ”W;ONVÖüH=é#w¬€Ŗ“°)ZYcŃ$y0dóØ7Å”*&HĢֆ-E-ļœ~X`2ūģ`O‡:‡Z”>źu}Ņ,ӝw śL¤æSLČP÷tÖ4eŠĮ¶a$1Ąˆ+ŗ6‹V€Ņœ?dėÉÜb@S¢ĄŽƒ@€œ¬&×A!ńŌj=24€ GT¶½# śŽé¢ń"%hÖŠj48ŁvÉŚé…®Āé;ŗŒ’Õźŗd“ ä!åvVv
-Ä@…ē 0Ų·‘Ķ²Ķ®ƒp‰£ū(ęˆÄ&vóƆOš(n, ė%`CRj¬Zél5 ršį¤„”‘k…¶×ø–U0žh‰+tŽC}
-Õ&›‡ÓhÄóå™V['ōrČæšÄ2“QÅģ41hRĶÅYI±’Ė›kćn’‚cR¾ÆiZ±1›~čįž“£e«_›#`Ż!»/Čą;sC¢¾qĪž”üSz™”~m ŗRdŗ%¦™ÓŽ3D§=;+PLŌ¬4»tcćI£ l=śĮÆeÆC9[#‹G󒢉ĪĆFMr“ÕMąõ©¤« 8j©²+’[ļ×®æåŅ*ļ©3Š
-÷tF‘@€X ¶=ĘbFåårœŪvIĶźRˆ}3/głZĔō’%Čr¹>Ėo¤‚Žéa>>¢„žł?°€Ž/«—6ļŠK÷"ՂÆ:j;lē«Ŗ4ƒ(uÄģ-˜½Wēē€ Ü·GFŃkFuĢś°®Ļ8¼ŸŖē³Ŗš¼hæ5Šō®QK¶ģŲ6heuuOŁ,ą!wiHkhɼ„lżüż`Oėųķ0.AȤœ<82 ė‘s@}n…¤ŸAß®n­\"N7$¶³'ß`+‰lž ­ęŽ9¹¢|1“Z†ńĮ#† ‚”KČÕCkleņ¼‚꿧ĶŁ½)¦†b»¼£„ŌQ"Šõ³źgÅÓ®Żéŗ’ŽÜ bt7ėŅ"Õļ“²]£ µY¢Ŗc ēNVŖꑓEO+X’pä|·§)MōrgūŪ¬f—µ{•ž›ė a§XĖŽņ¢X­“jcł¦:I“q¤“Īk ķZ[Ż&éc­æ
-Cµ­ž&qq>ļĪ;=Ŗ`ŽQļ>†Šš†ļ3¬iÄ'{Z"½/CÓ=˜g„č²5?ē÷–‘nµĄĒMJŲ~fū]>‚\JWWō1ćĖČüžeh<ĖøĶ|ŚŻ7:’ø·¶™Ńh;³|hŅįĒ9“ž‡—|ē÷Ž‚Ī!tłŻ >ŒŃC(>„Šz#©;ˆ5Å¢ P›_ķ'ĖĒ)ƒĮ[—DĘ’ĀéxĆéŅźĶ“ö–7ŠžGa|²&MŌĘólĀ5aĻwMü@įWv G09Tū«'!­Ļ„õĖ‚Āeõ–!Ęŗ׌”p÷Įže1že1že1že1¶'bē½˜™9{aæ³&Ē²ö(›1}¶$­ 5]>–3TƒÖ—1Ž°
-A›~•¼†k“öKՈĒŗ Yuź›>@żs‰pŚŚ#^-Ųæ5ėÓ9mq]“Ü7ßjˆPź€XcFjŃlU~? ^ˆyjĆ¹ ×tėA"wöŗƒ“°!"žD”6” Vę‚ŃŌdAö°^sĶZƒŽ(³ \öZ-ƒu.ŹČŸÕʉĄ‘Š¾®2įĢØUW˜SaĮug­†™ōė¢µÕ‘N<x²–l|J(­y SØ<§¤lƒŌŗ³­¹óÖ63£Ö¦HE:)™Ģ0ŹjLmmM‹ś,įudīlMFquk†žÓ÷ŗżHłČY+;p0Ģ Č;?jŅYW÷b5ņ¹ÅŗbŽ*ĪߊS ń‰ŠÓŹHyrv@0/ĄõA÷I27)ųå½ŚTFŹŲ£u׬Mkū[vn1osGčE÷¼W:ŗc§mĀ4F=g‚µšaœū^lį<a«sĘ®M»“ø³~¾ÆÆ9.ØŻ~k›™QŪ1&­[C`Į9ö'¢6čŸHհܙŖõw
-~ߦV—T·…Ć(vøZŠœŻ1ŌhvŽŚff
-Ģ LĀ•±®ą½ņ…œ–šŸÆy45¬Ź’Ś!ō„“¦€ę›)±Įūn»`ص^ L2…*ź³gÉŽ•¼Y{|Q{õRölź5aGTÉOe…Ÿ‚92ž”†P±öĘĪWtÓxYՊžø·QDu1ćI`żT2æ1RćP:Č--“‰Ļn‚Ņš;Ó“½@Ś1žøĮrē­mff%?­:qāY€Śi%?’×”­ŁĻ:V¤R“™kæeÄĒŃ9tƤB†u3Ō¬7’,«”·0mZsūheŒ›6tOŹ]£bP4.ĄņUfńš.š†įœƒŌ7SF³ĻCcÖ.p¹Z5ą)8M¤Š¬‹ĢŚf•ćõ”X—QŪA;sX:ˆõBŗ Ī’‡a^ōŠĆ#”Ļ§Ŗin³¹³¤¾;ųnæµĶĢا–+¹–1
-Ŗ
-ŃŽÕ ™PeŻČtÄŠIøČ»šeŠŽ° †Ó#ē- 5ćc5N°ųZ>2ZÆ%ČeÖŠÉÖŅXįkxd6Į—M i+hpnĮ\_Ś
-ŒĀ ±Čė[2ä“ņōķųom:>¼uÅöŃ,)-<ĀļŻY¢‘īøZF-ārÜĄ»óÖ63£äZ©ZūU0č0/ ¬±kAVŗÕlP=k/] ę]ƒVæŲöA63Ćē`'c·#Ņšl™u”Oœņ<Ė”øŗŖ8ɳKZRä ģšyÓL.j‰ŃMEĶøąĘ‰Ü{6­7¼f–W-­BĄK(Üsö"5_…µ|mrsėÜĘĘČŗ Ķ"²ÕĻ”²įµ®µZź£iźīdn/£Öé8nąŻyk›™Q]W`‡ÄąĢU—¼ö's•įžI\J
-Õ6¼ĆęśņV›“BÓČ¢iĪ[¹^?·‡?FØ=ō֜—QÆ(y’œ“|»c—3RՄ˜5#Ź·B²Ā×Ya; ]ÓDT4¢‡Šm‹ł†įd‡?(*p‰`Óq›i†j¾'åR0 Ķ”„6Žž«#Hh!Æ$Ļ֑—:DŃ(CAūƒįģµ”CĶcĆ³øj%?¼YZöHȚƒŸ^š:\ĻŚ’ƒ1>u ³0m×(ŠÖęG¹yę3ŃqÖĢ,ėƒ"ŠloĶåöÖ#Vł?·[ļl³2J2ńE¬š£ {_a›š£7lróÓģzDüŁė mźm‰Å$(nU½ #ĢI94 <Mhf˜š%UĢ¦Q. "Zm$gՀ×ŃjM²„NF¦dŃź`ĢĮ˜Ó  Ī AīóCz­uŸ 6ēš†|ŠäbjNÖbņmøüÜŠ«ę™0×Ā°‰@vVŻĀtö`Ž@čÅŽt ×ĢīDA‚¦į‹U„ĢĀil)sh ¶—¾„µžqLš¾żŅ6/³>R"ćB Ņh«‰
-1†’żĢIÓ ™å#¢²N=ōc#FŸĶ !k:2x_!˜·rńGiÓ=Ąn “J”bś¦M£Šv\`›z–@¤Jʝ!Š×T;ĄŁāĀŌSg H2¬!¶ ‚K^Ž®É-M]Š1öa÷8Ē”Ė: ]•oJdX~K+ė–µCÓŹ-DQ
-ü†!ę]Ś†±`«Ł*£ģƒ·TN-óā]QˆÅŲ¶˜JäRIxż­ˆYćĒL-Ŗ¶–`.„.ćČ¹<Ł„Å OfÅÄd*”9­’v4¦˜Kšł0Us?i2ļn1p±H6UøšøZIz"Zō9Œs¬iv2m—D¦@£^¢ąŖ*œŚˆMĮłoŏÕ1øegd-­ÄĆž2ŗĘu\ ń’NęęŻZUĆI£ V£ĻŚĖŗd5”}”ƒŸ‘ĢŠ³ĀļV$“-x_0›¾ćź•¤2ÅøÉYó×i±Ś~œ Tč’³~£†aš“Ēé3C}ÅńsÓĀų\@[)k™­Ģµ…Ó»u!„¦Øų`źŽ`ƒ
-Q·3T 4~“¶³tĒŖļbj“·‘įÖH]`EtŽhĢƒLņdc#‡i²1ŲŚųŠ,<'clA‚KZgÅ'ƒ Ź4|^µCQ…¹gŗMLšøט†?0źˆQ泇2Œ®¹ēw&Ø5”$£A‡ߦ¬šóå,’?„yÕ`ńoM€ĮŗiŸœ¶µ‘"Ct+T[Ų£ŲæM%ų
-ŁĘFÓo;Ł~“Ę¢\!æxÓ"¦.4Šź4zģ_ĖixĢÖ>ZŠŠ×Ecw¢fdŁzƒ_Ė|}¤1“¼b=læā¢nęlmeu#•“,7\adŒŗAŠŲy¤’Ö2ś•kŖE‹%Óż©lӗ}€Š•÷ +©2?žŠWŃĢ»j˜į#ĮPÆpz/ł—WĶ/Į,·Qģ¹~#…`į9QxQ×ČPżX×hS+Ļ”øäÆÖ[Ļ“Ņ6µ;ÖU;éĶą8<–5ātużŁ£ĘPLRĪ®±n˜¬ÉĒI1–iÓmÉåEõČŗsUŚżXHY}e£gģ2†JØ5ŪÓk?¼āFe0=ž–l n —=Œ"ŹÆ®‚¾J„²Ü]K’VO@Œ`c:¢\©ļl Ž'Ķ£§³T¶Į4‹nśœŃ¬·=zlµBó³Gķ—qõX¤„vŠÕs{'Ī^É×/Ž½yõĘO·īŸ>­æŸ»õÕķ»ŁŠr“žrōÉūBWŽd ^żbļä{7¾½ń•ą’ź£¤ĖĒļ\½våĀęµĶrķėŁīŗ\-ŚÓgåæ’{«“¶AķB •ęY^Ņ„ä5Kž}ŽVIe³–x™wDųŪņĆ72ōļģ™Ķ?>s››ųŚ÷÷²DŒÆEæSD1D»@‹1¢¶~–MĻ¦ČØĒUŹ~¶pæ^"ąhš4nC•Ó)kVæ HR’N‰ŖŗźEģJŪµŻ2oÄtIŒĄ¤˜ß5/Įć&4oĆG6ŚĶÖ{=¢öјn1ĶĪ*f^ČGŠŸY• ­Ø,­~šĒ1ŌŖ™jc kéO“ō'vŽBYXŲę-=Gųśē÷–=L ˜w‰»|“ ȧ*˜²,ĆfדlÓ:o3GĘ7Ķł0ó.ćywßéüćŽŪęę£½#ģˆEķ;s4Įgcē÷Ž
-Ļ¤½Ó|Ź¢“D{;ųrb¤4–mU&ĀWÆŖĮiĄ—“Ļ£eN4¢5,¹Ž11~õėœŅ`=zyŃC‡T Š-ŒƒŗźY/ņ}I‚–,ēģj!ų®Ō/LS
-~~ezƒo]Ū jxĶ[Éō»8ŻZ„ŽŠ@:¼½z‘ušL"ėąDÖĮ3‰¬Ēæų˜‹åuŗ›įķh c3%7 &tĶ³ē
-ˆP¾lŸņ T^T<® #֑WŠfī3¤Į.]xgŻ9€µł:=²x÷¢™|M[:`Äy‹ū‚KRu§ĶJ¬ŹMÄHÖF±«ŻČ¢ė9Ŗh Z·¶±(„ĮØĒN`ńņ‡&h= >„ cĪ
-\Ś†%J²JŅĶčLVß åULśjˆ¶ū
-ūĆ«ąÉŃAČōŖ•éQ^fžź nÜ”0*Pß1¦É
-„GHŅb8,’Źf!AZ7 ћWY¢ĮŒ tĮō 9`Ūt8 p:… ¬7I{Ū:vo¾oļĖš|8¦čؗߝ ­id&ĒœWķ€­Ó Ź“CŃJŽä4ń;˜ĻÄX Ś"ēY>ƒɍ#|8(,ƒŠVĢģó\f‡aķ/źéĄS¢sh¶œ£ŖŗŌ(/Ė“Ō€
-Ø•Q]d©.ņčÄkĶA4Ƭ
-b(€c*AK‰ÅjžĖV¢ņÄ­HÓ ĻęŁavmŒ„äFœ]LÜŹ1gźėnq z Ÿ Ļ‡ćµqC Ś[XT¶†Gė榎1 ½šżF7ü›U™ÖĮEX01cĮ@śQ žq\‚2@/ Ž)zY¦ŌĻzš£3ƒd,~9œéb“œŖ³Š³©J!kńa WŪ›¢i1²494™9é<‚z“^« ~½ŹčƓ¹³Q“Ž‚w_ܦĒ\|`ņBK“}`Õ³bS2RsFµ¦„QüŅī{}_JĆ0ņš0ł® vNG¬ø,Ņš‚­_3›@bkj
-Ą%((Ō»B § Ž`8:¬ĖQƒÄ0Ćųū,vGńƒiPĮ] č„?‹]ŅäDb' K7®·dŗŃ¢¶²Æ–ŚO Ć÷/īå1†;źj
-4©91³X8I,³fÖģ@{YZ
-pųKwt"C+³ >A¹3­]DĪaÅ­(møˆó€`ų"@0½I)`dŗ'G¢\Œˆ|ė?ķmhõSōā^¾ø—Ē‡¶aCćq%k®}Kš˜ėŻ(mĶĖŹƔKĆ½†27†A”€QP‹Ż ”:ÅŖł
-éŗ·5PU2‘!µĘŖ§¾j¬
-0{äj$£ņ ©Ŗ”˜å=¤ļÉ,Œŗ<›"#>ģeāGn!ČčĖOgĒļ¬O/ń`œĶ0H Z6ђĻMėœ¦žĻź)ŁlE€»ā¬Ø#L8ė¬ŠŖf
-‚koÓõ°æ’ås!&–²æG“µ»~¢Š
-UęŻ6°ŖÅÅŽŻĶ<Ē’°Lƶ»>Y ‘€É@u9ea*“ŽI}X•lfD@ Żœ}Œ[Pœ—UøŖLoAƒ$t*XiO~( łdŚƒ)4ĮŖp¶ĀŻ`5L=ō“šy)ī±ŚPåłaL*5sēm<fPG·kŅ÷ļņJ…’PėgCŗJdNu™Zųć³g™:ļŌÕŲ§UœŲxÕń+rÆ LŹˆ
-ĶĄˆ‚U؟Qz¶q&²\^pļż!!7`–¼ļ‡Š;ōM'ĢTQZlMŠŠĪņ6)˜6c÷oU:OŹśĄg•2–āŅT®qšY …öĒ¾2]~†IśŚkżŽZ
-
-Ē`) •ų!|‹gķõM
-~erÓ“)œ,±£ógķĒŚCSh„BæÓÕ¢ŗkZlŚU¤śų~ŖSÖVH°żqe“` EJ§Ö5¤ŲūĢÆ ńž± + 9ģW„½)7y]žE{32]˜Ćž’Ķ+ĢėīĻ{nŽ-¼„.äk”łrR™64O`Ę<Ø Ē a^ zł¼#©AUfŒƒŃpL/åøōE1sIöJ„)ĖT)KhyńŠVš-B†ŅČ'č%±fšų¦†ŗįRM‹˜4FÖ0cš®‡2ųDՓvf)
-­ ŽTVūw&Ā§’©dV¦Ć„ĪT5©ƒ,¾©į.š›ĒŽ} ƒÉ¾‘,wa5[ɑׅķę¬ƒSõJV*6‰]=L 2£ģ±ėzMYž* Ā’°ŽMß;,Cɘ¦jR“ĀQÜøĆĻTeĘ
-)؝ŠjÜÜä7$/¶åł11Ą­k>“p5ƒJ­MQUåµŃš1J\6Xz%VōžˆŅŁ’{±ę›qĪQ
-næ?jæ?f’/ų?Ķ
-ßDd“’¤žĄņŸ%kā¬·üņĶM„U…ÓzBĢw/¹²źQżtū ŹÆ“€ė97ÉķmŁžEėA©^ŽäĖļzŃ@gäå^¼¶_/~Ā«{;}łęd/1X^,ģ+h~‰ų3|~yŽź/f*yķ7ˆ} ÓąyPŲ…%!õ?$т k"8Y±5„BÄŖöó_ĖžXx„RŚå3ä†8żāē܄ēŪ„ēļ½ Ļß{®+p÷Ā/Wä¼j!fĖĮD¾‘,J:Ū%+<ŃdIõ×X¢j‘Ž¹”Ś‹’į®aV½ź3­ś„śšK‘ĖōŹ8c/ß§Č F„„į” @%‰Ųpšė.)«ÉČ’ń/r`v-¦„¢G•H6¤DMYĆMāč#üŹPšI}Ć÷ųžę$ŹXßxŃæ’¾śģłƒōŁó‡é³ēļ”ĻžæėE½¢Ū•U+K‡
-qĖMiŽÄŌ”į+¾‰:‚
-sy}­„^¬õl”mqCĘ2{Ŗ ¢(³b­h¢xŪÜ +c%Hg’VˆŚ°åe’“ W…9…¹µ=½X‘ėV¶uū$0w÷¢^ŒĒPžžŹyŁßŚz€=JUL€¬Éę «?®üdūtxc źK­}kFߦ}„a‰²Hū@j‚ĀČl„wéW*:bµkdaŲó‚8‡·ėŹ·­Āj8 Z}hóž¢0wh‹f7ĪĖŸØ/C6r7VÉ DA6R3²]ܶNµ”śRŠO/Öńŗƒš_ų @õīE½"7;=¶e+ČķšGs<C2,9ģ€d×<h‡«'·ĆžÅÜQłNÓM™PD}ŖĘļaūÉ.°®ė2_m ŹxHƗ°zWkä2Ā6‘#ķ_t­o7“&FWI0Æe“£®ŻIpc »”²>#8’„N1PĄ°E;Å|“[ŚžōĒ3^pŻŒĖRŽķā¼|ę#†ė|m1Bdvzč@‰ōņ§@ĒZ:yJĶŹIr*ßl`¶n6é¦É ÜsVm0 3äįUōŠuø| Ö©ŖMR1wŲG”+]O“ŖL»/#!œ²u*XV$BŌ)üŠz
-Œ(‹ĄŚ¦ćŖde’ŌēĢ®
-7©hā\s&dCµäuķ%HĻėń#×M4ŗ>~“Ž½Øć!Äe§Hļš˜ŚH„$lŠĻhE--%āDx"» cl.'‚ÅšĆ~¦iƒ¢L³8¬‘–%Ąq²Dphā‹*ɖ€(čŽµs5Ķm
-ȝźtŽ EéÖ›\žThĘū×^m8ŒØHøŹż§nå–'ŌĢmTŽĖ”cN ;e#Ņ–l»(ӚözŽĒŽ/y‹°w/źÅxOŁé±,x·å¬o|Ļ5…u„r‹LW¦žåu=¾cʧ4.”/õfė¶"dõ(,-G«réņŃs±Zę`tŚ®ę^ÜՇ’uœ§ą8mšŠ$šŅwd³½V'U‰ʑˆ—ĘģŠ(•<…WR½ņŽe…—Ē 7†X”õJJc|Ŗqn׿Z^·qŌžiąõīE½"&;=¶€*×*–܇sŹ‹a9%)H™±I^nrXbĒqNB]ą³8ŗĢ6€³f!øMÕo ĖpņkŲŗ¾ĒŒƒ¬×ŗ‚Š¤s
-ɳĻž?DE­T“:Ī LŚ;õŹµ<obY&öĒŽÖ/źÅxéŲ锜#·Š:½ 3œĢ/q_čg1æ*Ź¶_SČ¾žšµ[k™”·Ck Ŗ… Ų¬]Ł;O­@?iZEßåŚņŹ)Ż‚µV„ō֔Ė„ÆIÓmŹkĶgU8³<=žl¬U6“€YYokÖĮ8€©Ņ¤Y‡>ÅŽ—+yŻĀĖ§Õ»õb<ˆ^ģ’OHĆh=~±KH±Śtµ_¬‚†&¶kŪe 0źĻ·Sų¼µŃĪˆŒ»®ŗoŗma,ŻŹįN“ ¢1Ķœ8Ż¶¢­Yėdö“ź{ƒĮQ„†-Nl„ÖÉE_“Ņøuédŗņ“y›‡ 0‚F£BøG¦mˆ\Ņāŗ«bČClfŒk3|£¤HŅ4HŖ˜V
-,)8$ s˜T,q@)ÆšŲXbž«2ü Ś fˆ9šR¹Ō°¢
-źXŲŖBŪ,·pkPń‹5œ®‡]Ų4]hR})»œŌįŁe°cŠł+Ŗ>Տ cT*4ŗLŌ äÄŠU
-…” ‰’’?kŹ7ƒ†!od‰šlŁ1Yܟ¹śķÓdę:×VC Į˜=^ńG†‚f¼c !”ųjņY|Jžź
-÷“¹dœR£©\ģ|ļćź»æ¬]¬ec^–
-½ŪhóžĢØhX/Se†!t=Ŗ;Nõ]Øó¢FXFJ;9cE\+¼ˆ¦ü–²¬ėz"Ŗ¦DéK‰^‹E[a7G}hg†ĆašS„US!_9Ø#SśÆ k¹:’(!Įˆ©$mXŅĒžž‚mģ¶S½·mēÆĄP^+u|Ė<@ä`ŗd!†l+£śŽIцŠ‰Ž Q&SČi9rs™0AŲu-DփŸÜŠ”PžšŽČ1į£ł¶v4¹:ˆgŻuē+}§?„PFƒrrżqķV}ä§A萁Ål×,'ž^łDNŻŒfŸˆisZÕ?’8&Ć»Šm”¤¢ˆO!^ü//^Ū11‰včәŽAš¶-„åõµS˜a<éƂ«k-g d‘"ÆėdÄĘÉŖiŲ%Ŗ:¤Č# wWœqē„…łÜ ēąžÖ|K²£©)¹ĆÖvŃYc½,»°™³mrMŠJ¶¤C7Š»ÆČG—ŒtL”¬bŹøĻ*Ée¤M}/6æ.ė_.Ōƒ’š{š7Ÿ–‚6 G›#\c-ČXszÅ;ūNN€®"@M¾ćŸčA„ņ¬¢'ÄW½ ų}æ·Śµź³6+ģé e(žamՂ’§J²”T}سł%]`}ķQ_r›‚UZŠxbŹj¶¬h39HŖ·kĆēRĘ¾„÷pą8(˜¬¾š’œ„ęJoÖŖq§xD™CŁŹpŗCĘõå–gf{³aČ¢üŖęß©¶ œ±źļ‘Ę“·ˆGÅÓ5čķ
-{I¾1P«Š+Vē;duZVW<nŗ¢Õ·sYĻĒ^q¹·˜Vźō8ā–E…\‹ƒN3^Ź»ķ5~§g¹0ļP¦“F=Śõknå>®k_se*Y’Ć…$šŽ–üµ8Ürułõ”µz‡ūśŗ?“^Æ7jE—Ėn3{ZŁŹ?\_ׂµį‹„,|ČšķÉ8ė‹ņG껌žs'łZ‡²“ü3£ø”żĆŖ‰Īj’øņŒY¶w. 5ó’“^ź†JĮļü½żļ©Ļ®‰å faš\’Ķ}¦Nß;żŽŸ
-]ż$DßĮXÅ×ķce¬źĢXuĮęŽĢ?—8*’pL^čØ9Ÿ]1œ`]2’NTE“Gj2Ü;Ü֍‚€±¬dŪŽ²ÆMTH¢›Ż24ßÉRÆn ."©čS‚c¾&Z°Dx¾•*8•TjƒŠöq°]“–7YĆ8]£t_Õ8«ŻµÖn[³ę󧲊iq& ’l#Ø(!¦•‰©”Ī-śź,Ź*hūź¼Æɼ?W6Ŗż””¢ßoƲ4²n=N}P—|UI3łOÕd¼*–˜@9¹õ.¤9au[”vdIdÅūCÅ%š:z> Š]Zw‘dÓLļĶ![Śū«™»ŗ•żHŗö‡šžt»kmēåŸZ‡ĖśO; ˜b: Ųūå+‹ū.Ŗ›ų«?{󳟒Eżū?’Ķ?~õ«ßżŪ·oõļ?ūś×’üJŽ,-x?¦',ż±(bŖ~õožÓ’‡”ź’ź]—ČßžåWżæxś/Oū£’•—vųūćĆĒ¹\Ō??ž÷·’qį„śÅc(”ī.ņ’vüŸ9D’ńTžžņé’ŸšōųÓõęķń5ؑĖ–ŌXŒ—`…(Ė{ ĪvO5·D{œyPœMĆĢ0ŹoNÕŠq”¬ §?Zof~
-I”ČŪēRhĒ*§§QÄɶ§æŖĻåqśqzŚā(×+¼xÕ÷0KżąPÕė‹»…‘d„Źn"Œ½ÅēžKTå“…oAŻĘaƒŚŲ’µĒ)Ł2šń
-&yY<¹Õ;ąx‚ć7”©'Ļ¢ )žÆü™ē_Ńē_Ńē^Ńē_Ńē7ß½$Æ-ÜmD&°ŹfdYĮFŃqżhģKlsJ¤a5ļ„ŌéŽĪD“^u( ‡_§¤<f„ł£#Cy¾ėĄš=ÜCÓ؊Ī©É`+?2‚® " (‡NĄ2"÷%’‚.‘/T!žHQTÕZä(Å ńõŖbĮ+i&­łˆ|Śų"Ć$įaRP ič|“żß ’I¾ ą—¾ģĖDį7Æ/•Äļ?T>°.|ž`]ųüĮŗš}Ė±–ķß“9<=øéčXŰ鍆ŲNŠ“ŗ1tÉße@Ę9šå,$ThP¢‰ž‰2? įh`g}Æ@ ‚SLŃū!v\`™CE/x˜5ä"L…ą<ŸžcÅė;Ā'©ū^Y’Wī²ĄÕÓV!*7—§Øłœ“Ÿ€’¬Z$ £ÆŅ{,EįȟÄA;C%»<hņ‰¬ ^ ”PĒ­ÉQ…S©e8Żˆ”cIĄ=o‰µŁ’ĄęĖ·¢`ž"ųåpÆ·ESĶ¼”^Uė(žĘŸų|¼œkʬį:Į“Ę{°ˆ“Óµ–œeĘQ­¢†o”åÆ_ļŸ³ėA'©_yM!|’zļår¬esÕ³½^ Ķ˜Õ aŸž+)i-’9r1+Ū Ņ`ģ~QŹ,… Ńל:Čq+µčq]!EŌ§ ū›œ<ŒAWŌ+’Ė”ķo{–;5Ć ½ŽõS×wÆ,É+ w[ą*®ø­(dėeĶµjœ”DS/yųI
-)‹Ü#G’Š®ĖØßĀļĄ<æĢyf˜m£p'ģJ*žŃƒoxŖÄ:³
-V9Š#`blķØēėB‰r9²^®\ʞu~sq!óR8}‘EnĀz•­¤yŹ/—÷~{ÉźżģÆÆĘZ“ĒŠŸ^āhĒ¾a=ū1qKd§Š,ŠeE»’ąßņ2ī§ēŽć
-¤Y
-Īź®8:\i<·6 mW½>üœ6ĖV‡mrƙ]ó-“-åLńé•Õ½ß^rZ}Fū;—ĆĖöņՋÆĶ^€…u4j@Ø©ņÄzoŠ“½ėĆsĪ©ÖŃäÅ3±Ž)÷Af™0@½‡ÓĆ4`£ę÷Ķ/X'}¬V;/Ć,œRä“hb2¶mč;hćyĶ4×'ƒGdė²D†Ŗū¹mU šĖ5»•Dķo™©{­Ż`ļŗŖ’¹xp‡g!sŽµ”žµj„]³Ÿkuļ7—ķCŸ±žĪåš²=„ŗõņX6„.¬OfāJ(#}6ŗE}„ē¦nmKvGlāļy÷4ĪŁ
-i"‘ ĪћIw•tąœHĮļ@T–8§Ī¦F7ƒS± ģŪĪ³soÓŗ
-^,īżŽ¦źgØæ{9¼lį™½Ü$+Z‘u4{É/ŽŽČg"jaeÆĄ(N‘±ĆŒ3§5CĖƒœžN(Ó2ŻĪ”Ī)Č£lņ?ąz¢źŖ”®e,¶:H”SųKiŖE”’_„’ ß2T%„ĄĖ¢tŗ§ōJ”ŁiĶ˜q–įh
-N®4ēQ•Ā?€¼kP ŻœŹ÷*6pÜ邢›aĒ#•|6’ÓĖŽß[’ß~†ś;—ĆĖö’ŚĖcūøŅQOóoµåƒ–€ˆŽDB~8(#P;Yo­^f“j³kL“±īB†Šö¬ČŒę:Å,5£^«…ę`B]°ƒ€– ”ÅŹDß57Łéämd
-ŠXW¦ÉÜN)»ēqZP
-v×,)›@ŽÆ$ÓR
-‘ĘT¼ĒĻŖ¬j
-źPvÕaĒd
-²åąŖėž/{ņ ®Döį!}ÉĀ"rź-67`WŖĪ
-Ł1qŽ9a°䂅Ns5Uõ>³ń9(ž ~čØĢŪTīčbm¦%ŪŠ3¹Ą“ėCĆß*€ī>qWø"Ī%Ą’ņ¦’„˜óÉĆż”XÄ%Ł;[UČƍŽPŽœj±ŪWdĶl#Œń©¹rā؏šśC9q¦ų›>ē.ŪīaiŌŠŻ…Rģ‚ģŖuqƒnƚ¤‰’“–
-—Qćg^ÆOEi“ fķø^Āė\jI½²ø\ÄnŖ­Ö©@„Ļš+×°5oÆXūŽŪ¤ŗ7ź°ØW0.*Żv6 Zż¹mä+&“»š4_•ĢčNK¾²Eņ…G‡Nj\fwPŲ''ػռæIīo¢Ń7o|f£p¬I±T~ŅŠ0Į£V•‰v,8JČ½Hā!&ķ*Ļ®bĮU=²Ī©„zUq¤æcr6°k3„½‘4©«}ŽØ[l4÷mŽąŽw
-‹ĶöśzZwĻüu³De.Ā‡¢bźo³EåVDV}š6ā2ö®„“‹½7|vŠmO„Dg:;
-Š˜¹Ė6½QäÜeū°üįYE_ Ć”/@¶ąĀ FŹ¬Ģ³>ÅĮcV-$ŻR­StkīH®°‰¬Ž|¹&āŁf|AŚmPhß¾±a0dF×ZlxdX•…v·Ų ū¼7<t^lō¹ōK"1eõēM[£ø ¬EŌ(9ĖĮšŹhŖ{¬»0
-1ļ¼«C./öŠfnfVœ=8ĀĶšsM÷ąŅ±V¢ŚīŃÉąÄ{ƒē+ŹńžŻŲ¤¾l O3kŹ½I5`h R4•cD«éŽÕWŽź1ÜĶļˆ›p¢āÕ#ƊēźsŻMńŪ»›c•ÅŸ£ńHįtOł¬žéhļ‘éQ¤TŖęŪųĢÉö™“ķķČÉö™ų¬ÄB±UŌĻ7 ±Ųā6»qƒd³:ƒ˜3Ī’.^p±O_Ł"9;-„§Ģzž+Ó|Š·_Į’¾ę~Ąå]~*“Š,@€Ņ|-ō†īĒÉ ]‚ś_}VčŸæ¹ī÷†ķ÷ķ÷†ģ÷ģļĄMĘčļO’Mo£vŃl0ķĢ-!*{ŪtgF Ż*Ē“ؙY"É"sū^VMyį9Ļ"04·•½[BųLc: ©>ŚČ®wŁ®ˆ°hWŌ0D¶?·_a=ģ囂m›LĶ¬ą›°7|2Īs¦] >9>śEĶ"Ģc źg³äķ3¢É@ŅŪ¬Q»m!œzŽ|WPiN„ył‹Kä »ńūs·"`¹I²’õ‰łŽ}ų‡2+š™@ =Č¢f§„vRG£.Ż„ˆ_IšL\Ķ šāįx*xļRŒ‡Ņq÷ĆH'ŽÄKqś“÷Ź¶¾deS`$N¢¹=‹åw2i Ųł§įayœ’õ%ßĆ$üCXĖ^YM‹rT-ĒI’ł”æ[µ“EŠCrysŌ(ŗ“h½ž—oĪĀā €ūY)Žæ½_ŚĶŹķü™ī„ŻŁ’Ó_[²ŪZŚ?üīå¾üĪš2żĶ›W7ķ•MÆü¦Wpņ
-š®˜ūņĶėČ|æ/WżõóphīA÷Ė3@¦ś™B2'MMʒhr|ÓÜL#Q#Ž(Ŗ}Ÿ›”«žWŅŽ
-¬uzWŻķ¹ä„–848MPŻŒ.ŻŅܚ{fH“ł°!,M'¾Zf5œ»_†3yŖś¢“ø«LnœQ‚óø®xņtńŌ¶µ7I“'<īJWBėpŠa_ģŚÄŖķ)#nųĶ¤c4Ž[¼p²Ņ4­’'ķŠ}YŸŠn©
-Ó¢Ž…Šū"4äFƒ šĢ Ē`“y®ę$+:ä°zk½»źģłĶ±ę°±•vŹņƚˆ8‰&SyEj$½žjBVŠ'9+• f–NaE1’jAō'”¤+›Ż%+~M2@kēe £ ˆa¢ÆĶū4ń‹Xėō0†¼Ūs9ĶÅĒøÄįƒ—|(Hwh0³¢£w–WA»ö!4Ć½­ĖŖeˆ*ćŸ*ÅĶk˜‘ ’k'ś®×EūX§±XšQČĮįĢsW,'@ĻµšĢae”‘Ä»Ž"S¶X¾ā*%)ø[”»żAĘāÓFčŻ
-¬uz­ŻśCAL@  œƒ5™;”¬Ch«6)żŖĒ —¶EY†ŒÅ•Ā£ŠĄ˜Ę°­X"µTÓč©„BūŌóreéĖ!„]Ÿš,M
-·Ü_Ż¦ S]ŒD<Ķu:§I 9l6<7 “S ē\Ž`MŹE9z…ī¶'…Ų?m€ŽÆ€×éQ\t·Ē¦·!ÄŽK«kļRŗGk^V=‡·Ź¾±EL¶M¹¦Rd×&%=h°§ŗr‘öe0MĀ]<CŠU{=Įś˜
-eŒaµ4|oŅä•Ć]/™f—&ÜÄź żqV™sin^ä4WÓpȀ1P"9ÆŽŅ(ū™¶E m¹ĄE"²aČ6­ÉęF²Ļ€šO›×uWĻķ—¢Ŗ
-Ę6‹õ)Ju’ģŃŖĆi;‰œ]dĢ…PŽs„’].C³dŽ›ĮJQ[Žd/yÕ÷“Ł¬¦Wwj
-i#x¬B#]zś°7V]œ‚;½ėU£3cZ–9Q{<ĖŗH=ŗŪæ”;™¶Å¬§Ļ*Ż]ū:īĒē…£<½\ß»½]øłtŃ}æ^§ĒŻžJŹµ…ī­‚4(‚ŃˆŖ;·µ’Ģ%ngo;B%L~¹1‚K)„eL;ŗœRÜĘE“;?Hp%‰FÕwÕĖR‘2ūt3KŚ”Y÷Ē?Ś<g—DB7'šØö“ęfS6ceÕō³€IŖ;9¼s”–#lÕ]d†0xćæPRĶSÓŻŪĮāio«[K>]tßƀ×éa<q·ēĪ26¾Ó\žz6/ZFk\ö=»}Ū¶50§–Ń
-É;rQWH4ʱ4p‘>Ļ·į~„4‡ęó²_Fž²_8ķ‚
-ķ$r wp`*¾W5yĘzU@fĒØ°˜‹N@.}g¼Éx@lw“X½Tt}}W[7v0ÆŅēiĒ3ŪÅ}+¾)]N®ó—K{·±¤?ū¤”}æ^§‡ńĆŻ¢ó½uKu¬m
-e¹KRuå² ‡"»zŪŗ1ņ:ŻÖ‡™0ŚRĄ5Ū„”Ø«m¬äŃō± ŃĮ½źŽ™I”Ń}«hą9ŗY•,š5›į«ęŽÓc(I·%^ Je!fŅ2ĒK­Ąmo,FÆ«ud‚ĒŚŗG%M;ķfŒūµ½ŪŁć
-$<)ģK²MEyĘ„§MĪ^^Ö2æ‰}PbɓŪW2āɾ"ūą ?’ī“%.Ä
-Š,O<¾ÅčĄ-M¾8Ēµ‡mõq ņo0ĖYD?öÖc°E¾¾pŗ»>“¦*ŅŽA­£ œ
- ˜yĖm+ĶĄa~Łr'GS(±¹3¾ąćTˆh€ć¬Ŗ.āµ¾ÓhŹ=ą>č·$ńįĪYØ:sļĄśIQtŒēMÖEŚķšŗŁ!¬Įf‚]ō .dŸā Ģ¤4£āƒ‡å”ŅÕC%–°ˆĘTˆz@źŽe µ3‡ĪŌäl:[k
-ŖāJ9é)†Æˆ'lI­ u5C[¬+¹”{”[Vó;MY؏ ­æŚšK ĶX8ŁóįBDŠ„.śeˆj)‹DĶ¬ĢéaU«ŽjŹŗ&Ęś«®o*U S†¤ÓŖR Õ¢ø3–$’}³Ķ±ģ"W·\ņa,GCF²ql<Ķ·żŠ1(K·(–»°}·÷xx§čĢTÉń)<3V]QV1§B©ÅˆqmÆ1i­(}Õ×$Ž³(J%éi‡²æšs6WŠ­ŁRuØÄYśb1|’ Ģžmūž½mƒ-擝ā‹Ų>Øań”}œ>’āŽ36ī0kÜ#s0Ą[6pSsÜńŲ]!ķŅ’¼Xxjró«ž,ŹÅ¢¾›Ü(ó~n^ jca™ĆŻä5%7#ß`7Żä¼Nr[(ēV–čł®ÓjŽÅ‰Ķ^f±Vø®ŚĆŲ¦Ąø¾t'ĀōnÜĮ¦pq/”-óø?ūŠļõ(ä§aOˆŸą§ómW[(¶±óøÅIŁTĄĒ‡ŗŌJ):%Æ^†ĘęÓ²ū%[ó7sŁŸ[£®Kń0śŅcóøsĢø»Aqˆŗībœ dŃĶŪѝ›yō¼ œ£Ļi5GīžĻ¼“”,æ”Z®·Ģ¢»¦Ārś!#»©§¬ž–ꚇŒMYš ½L—C’cŖv†¤+ĻćyźOsĶXXāWm>ŻÖž••“ØĘŲŁ¢"åO[u}Īå”%ÆBŽÓā>ŠRĢB20CŁtŅøųbī”śrhń®6.ģ{ŻÓ± eąÉŻ!'£?×vq¤µ•ā Ńr ®ņQ}XzåE'ößśŻNŁzßرā—÷Ż†>ć×dėQ~_ņ­JøŠ¼Ō»L·įEMG®²ćžØ„cĖTžĢąš™Įįś’Ÿ‰ÅL,6§Ņ‚šZlĖŠ†ÓÅ,}
-ģ)Į[uŸė×Ģ4n¢åĢµ4/Ÿƒ&Ŗœni–‹HÓß“Œq}ŹM¼SćRĪÓ¢]iho}næ
-0000000016 00000 n
-0000046044 00000 n
-0000000004 00000 f
-0000000006 00000 f
-0000046095 00000 n
-0000000007 00000 f
-0000000008 00000 f
-0000000009 00000 f
-0000000010 00000 f
-0000000011 00000 f
-0000000012 00000 f
-0000000013 00000 f
-0000000014 00000 f
-0000000018 00000 f
-0000359516 00000 n
-0000372837 00000 n
-0000372868 00000 n
-0000000019 00000 f
-0000000020 00000 f
-0000000021 00000 f
-0000000022 00000 f
-0000000023 00000 f
-0000000024 00000 f
-0000000025 00000 f
-0000000026 00000 f
-0000000030 00000 f
-0000359585 00000 n
-0000372721 00000 n
-0000372752 00000 n
-0000000031 00000 f
-0000000032 00000 f
-0000000033 00000 f
-0000000034 00000 f
-0000000035 00000 f
-0000000036 00000 f
-0000000040 00000 f
-0000359656 00000 n
-0000372605 00000 n
-0000372636 00000 n
-0000000041 00000 f
-0000000042 00000 f
-0000000043 00000 f
-0000000044 00000 f
-0000000045 00000 f
-0000000046 00000 f
-0000000047 00000 f
-0000000048 00000 f
-0000000049 00000 f
-0000000050 00000 f
-0000000051 00000 f
-0000000052 00000 f
-0000000053 00000 f
-0000000054 00000 f
-0000000055 00000 f
-0000000056 00000 f
-0000000057 00000 f
-0000000058 00000 f
-0000000059 00000 f
-0000000060 00000 f
-0000000061 00000 f
-0000000062 00000 f
-0000000063 00000 f
-0000000064 00000 f
-0000000068 00000 f
-0000359726 00000 n
-0000372489 00000 n
-0000372520 00000 n
-0000000069 00000 f
-0000000070 00000 f
-0000000071 00000 f
-0000000072 00000 f
-0000000073 00000 f
-0000000074 00000 f
-0000000075 00000 f
-0000000076 00000 f
-0000000077 00000 f
-0000000081 00000 f
-0000359795 00000 n
-0000372373 00000 n
-0000372404 00000 n
-0000000082 00000 f
-0000000083 00000 f
-0000000084 00000 f
-0000000085 00000 f
-0000000086 00000 f
-0000000087 00000 f
-0000000088 00000 f
-0000000089 00000 f
-0000000093 00000 f
-0000359866 00000 n
-0000372257 00000 n
-0000372288 00000 n
-0000000094 00000 f
-0000000095 00000 f
-0000000096 00000 f
-0000000097 00000 f
-0000000098 00000 f
-0000000099 00000 f
-0000000100 00000 f
-0000000101 00000 f
-0000000102 00000 f
-0000000103 00000 f
-0000000104 00000 f
-0000000105 00000 f
-0000000106 00000 f
-0000000107 00000 f
-0000000108 00000 f
-0000000109 00000 f
-0000000110 00000 f
-0000000111 00000 f
-0000000112 00000 f
-0000000113 00000 f
-0000000114 00000 f
-0000000115 00000 f
-0000000119 00000 f
-0000359936 00000 n
-0000372139 00000 n
-0000372171 00000 n
-0000000120 00000 f
-0000000121 00000 f
-0000000122 00000 f
-0000000123 00000 f
-0000000124 00000 f
-0000000125 00000 f
-0000000126 00000 f
-0000000127 00000 f
-0000000128 00000 f
-0000000132 00000 f
-0000360008 00000 n
-0000372021 00000 n
-0000372053 00000 n
-0000000133 00000 f
-0000000134 00000 f
-0000000135 00000 f
-0000000136 00000 f
-0000000137 00000 f
-0000000138 00000 f
-0000000139 00000 f
-0000000140 00000 f
-0000000144 00000 f
-0000360082 00000 n
-0000371903 00000 n
-0000371935 00000 n
-0000000145 00000 f
-0000000146 00000 f
-0000000147 00000 f
-0000000148 00000 f
-0000000149 00000 f
-0000000150 00000 f
-0000000151 00000 f
-0000000152 00000 f
-0000000153 00000 f
-0000000154 00000 f
-0000000155 00000 f
-0000000156 00000 f
-0000000157 00000 f
-0000000158 00000 f
-0000000159 00000 f
-0000000160 00000 f
-0000000161 00000 f
-0000000162 00000 f
-0000000163 00000 f
-0000000164 00000 f
-0000000165 00000 f
-0000000166 00000 f
-0000000170 00000 f
-0000360155 00000 n
-0000371785 00000 n
-0000371817 00000 n
-0000000171 00000 f
-0000000172 00000 f
-0000000173 00000 f
-0000000174 00000 f
-0000000175 00000 f
-0000000176 00000 f
-0000000177 00000 f
-0000000178 00000 f
-0000000179 00000 f
-0000000183 00000 f
-0000360227 00000 n
-0000371667 00000 n
-0000371699 00000 n
-0000000184 00000 f
-0000000185 00000 f
-0000000186 00000 f
-0000000187 00000 f
-0000000188 00000 f
-0000000189 00000 f
-0000000190 00000 f
-0000000191 00000 f
-0000000195 00000 f
-0000360301 00000 n
-0000371549 00000 n
-0000371581 00000 n
-0000000196 00000 f
-0000000197 00000 f
-0000000198 00000 f
-0000000199 00000 f
-0000000200 00000 f
-0000000201 00000 f
-0000000202 00000 f
-0000000203 00000 f
-0000000204 00000 f
-0000000205 00000 f
-0000000206 00000 f
-0000000207 00000 f
-0000000208 00000 f
-0000000209 00000 f
-0000000210 00000 f
-0000000211 00000 f
-0000000212 00000 f
-0000000213 00000 f
-0000000214 00000 f
-0000000215 00000 f
-0000000216 00000 f
-0000000217 00000 f
-0000000221 00000 f
-0000360374 00000 n
-0000371431 00000 n
-0000371463 00000 n
-0000000222 00000 f
-0000000223 00000 f
-0000000224 00000 f
-0000000225 00000 f
-0000000226 00000 f
-0000000227 00000 f
-0000000228 00000 f
-0000000229 00000 f
-0000000230 00000 f
-0000000231 00000 f
-0000000232 00000 f
-0000000233 00000 f
-0000000234 00000 f
-0000000235 00000 f
-0000000236 00000 f
-0000000240 00000 f
-0000360446 00000 n
-0000371313 00000 n
-0000371345 00000 n
-0000000241 00000 f
-0000000242 00000 f
-0000000243 00000 f
-0000000244 00000 f
-0000000245 00000 f
-0000000246 00000 f
-0000000247 00000 f
-0000000248 00000 f
-0000000249 00000 f
-0000000250 00000 f
-0000000251 00000 f
-0000000252 00000 f
-0000000253 00000 f
-0000000254 00000 f
-0000000258 00000 f
-0000360520 00000 n
-0000371195 00000 n
-0000371227 00000 n
-0000000259 00000 f
-0000000260 00000 f
-0000000261 00000 f
-0000000262 00000 f
-0000000263 00000 f
-0000000264 00000 f
-0000000265 00000 f
-0000000266 00000 f
-0000000267 00000 f
-0000000268 00000 f
-0000000269 00000 f
-0000000270 00000 f
-0000000271 00000 f
-0000000272 00000 f
-0000000273 00000 f
-0000000274 00000 f
-0000000275 00000 f
-0000000276 00000 f
-0000000277 00000 f
-0000000278 00000 f
-0000000279 00000 f
-0000000280 00000 f
-0000000281 00000 f
-0000000282 00000 f
-0000000283 00000 f
-0000000284 00000 f
-0000000285 00000 f
-0000000286 00000 f
-0000000290 00000 f
-0000360593 00000 n
-0000371077 00000 n
-0000371109 00000 n
-0000000291 00000 f
-0000000292 00000 f
-0000000293 00000 f
-0000000294 00000 f
-0000000295 00000 f
-0000000296 00000 f
-0000000297 00000 f
-0000000298 00000 f
-0000000299 00000 f
-0000000300 00000 f
-0000000301 00000 f
-0000000302 00000 f
-0000000303 00000 f
-0000000304 00000 f
-0000000305 00000 f
-0000000309 00000 f
-0000360665 00000 n
-0000370959 00000 n
-0000370991 00000 n
-0000000310 00000 f
-0000000311 00000 f
-0000000312 00000 f
-0000000313 00000 f
-0000000314 00000 f
-0000000315 00000 f
-0000000316 00000 f
-0000000317 00000 f
-0000000318 00000 f
-0000000319 00000 f
-0000000320 00000 f
-0000000321 00000 f
-0000000322 00000 f
-0000000323 00000 f
-0000000327 00000 f
-0000360739 00000 n
-0000370841 00000 n
-0000370873 00000 n
-0000000328 00000 f
-0000000329 00000 f
-0000000330 00000 f
-0000000331 00000 f
-0000000332 00000 f
-0000000333 00000 f
-0000000334 00000 f
-0000000335 00000 f
-0000000336 00000 f
-0000000337 00000 f
-0000000338 00000 f
-0000000339 00000 f
-0000000340 00000 f
-0000000341 00000 f
-0000000342 00000 f
-0000000343 00000 f
-0000000344 00000 f
-0000000345 00000 f
-0000000346 00000 f
-0000000347 00000 f
-0000000348 00000 f
-0000000349 00000 f
-0000000350 00000 f
-0000000351 00000 f
-0000000352 00000 f
-0000000353 00000 f
-0000000354 00000 f
-0000000355 00000 f
-0000000359 00000 f
-0000360812 00000 n
-0000370723 00000 n
-0000370755 00000 n
-0000000360 00000 f
-0000000361 00000 f
-0000000362 00000 f
-0000000363 00000 f
-0000000364 00000 f
-0000000365 00000 f
-0000000366 00000 f
-0000000367 00000 f
-0000000368 00000 f
-0000000369 00000 f
-0000000370 00000 f
-0000000371 00000 f
-0000000372 00000 f
-0000000373 00000 f
-0000000374 00000 f
-0000000378 00000 f
-0000360884 00000 n
-0000370605 00000 n
-0000370637 00000 n
-0000000379 00000 f
-0000000380 00000 f
-0000000381 00000 f
-0000000382 00000 f
-0000000383 00000 f
-0000000384 00000 f
-0000000385 00000 f
-0000000386 00000 f
-0000000387 00000 f
-0000000388 00000 f
-0000000389 00000 f
-0000000390 00000 f
-0000000391 00000 f
-0000000392 00000 f
-0000000396 00000 f
-0000360958 00000 n
-0000370487 00000 n
-0000370519 00000 n
-0000000397 00000 f
-0000000398 00000 f
-0000000399 00000 f
-0000000400 00000 f
-0000000401 00000 f
-0000000402 00000 f
-0000000403 00000 f
-0000000404 00000 f
-0000000405 00000 f
-0000000406 00000 f
-0000000407 00000 f
-0000000408 00000 f
-0000000409 00000 f
-0000000410 00000 f
-0000000411 00000 f
-0000000412 00000 f
-0000000413 00000 f
-0000000414 00000 f
-0000000415 00000 f
-0000000416 00000 f
-0000000417 00000 f
-0000000418 00000 f
-0000000419 00000 f
-0000000420 00000 f
-0000000421 00000 f
-0000000422 00000 f
-0000000423 00000 f
-0000000424 00000 f
-0000000428 00000 f
-0000361031 00000 n
-0000370369 00000 n
-0000370401 00000 n
-0000000429 00000 f
-0000000430 00000 f
-0000000431 00000 f
-0000000432 00000 f
-0000000433 00000 f
-0000000434 00000 f
-0000000435 00000 f
-0000000436 00000 f
-0000000437 00000 f
-0000000438 00000 f
-0000000439 00000 f
-0000000440 00000 f
-0000000441 00000 f
-0000000442 00000 f
-0000000443 00000 f
-0000000447 00000 f
-0000361103 00000 n
-0000370251 00000 n
-0000370283 00000 n
-0000000448 00000 f
-0000000449 00000 f
-0000000450 00000 f
-0000000451 00000 f
-0000000452 00000 f
-0000000453 00000 f
-0000000454 00000 f
-0000000455 00000 f
-0000000456 00000 f
-0000000457 00000 f
-0000000458 00000 f
-0000000459 00000 f
-0000000460 00000 f
-0000000461 00000 f
-0000000465 00000 f
-0000361175 00000 n
-0000370133 00000 n
-0000370165 00000 n
-0000000466 00000 f
-0000000467 00000 f
-0000000468 00000 f
-0000000469 00000 f
-0000000470 00000 f
-0000000471 00000 f
-0000000472 00000 f
-0000000473 00000 f
-0000000474 00000 f
-0000000475 00000 f
-0000000476 00000 f
-0000000477 00000 f
-0000000478 00000 f
-0000000479 00000 f
-0000000483 00000 f
-0000361249 00000 n
-0000370015 00000 n
-0000370047 00000 n
-0000000484 00000 f
-0000000485 00000 f
-0000000486 00000 f
-0000000487 00000 f
-0000000488 00000 f
-0000000489 00000 f
-0000000490 00000 f
-0000000491 00000 f
-0000000492 00000 f
-0000000493 00000 f
-0000000494 00000 f
-0000000495 00000 f
-0000000496 00000 f
-0000000497 00000 f
-0000000498 00000 f
-0000000499 00000 f
-0000000500 00000 f
-0000000501 00000 f
-0000000502 00000 f
-0000000503 00000 f
-0000000504 00000 f
-0000000505 00000 f
-0000000506 00000 f
-0000000507 00000 f
-0000000508 00000 f
-0000000509 00000 f
-0000000510 00000 f
-0000000511 00000 f
-0000000512 00000 f
-0000000513 00000 f
-0000000514 00000 f
-0000000518 00000 f
-0000361322 00000 n
-0000369897 00000 n
-0000369929 00000 n
-0000000519 00000 f
-0000000520 00000 f
-0000000521 00000 f
-0000000522 00000 f
-0000000523 00000 f
-0000000524 00000 f
-0000000525 00000 f
-0000000526 00000 f
-0000000527 00000 f
-0000000528 00000 f
-0000000529 00000 f
-0000000530 00000 f
-0000000531 00000 f
-0000000532 00000 f
-0000000533 00000 f
-0000000537 00000 f
-0000361394 00000 n
-0000369779 00000 n
-0000369811 00000 n
-0000000538 00000 f
-0000000539 00000 f
-0000000540 00000 f
-0000000541 00000 f
-0000000542 00000 f
-0000000543 00000 f
-0000000544 00000 f
-0000000545 00000 f
-0000000546 00000 f
-0000000547 00000 f
-0000000548 00000 f
-0000000549 00000 f
-0000000550 00000 f
-0000000551 00000 f
-0000000555 00000 f
-0000361466 00000 n
-0000369661 00000 n
-0000369693 00000 n
-0000000556 00000 f
-0000000557 00000 f
-0000000558 00000 f
-0000000559 00000 f
-0000000560 00000 f
-0000000561 00000 f
-0000000562 00000 f
-0000000563 00000 f
-0000000564 00000 f
-0000000565 00000 f
-0000000566 00000 f
-0000000567 00000 f
-0000000568 00000 f
-0000000569 00000 f
-0000000573 00000 f
-0000361540 00000 n
-0000369543 00000 n
-0000369575 00000 n
-0000000574 00000 f
-0000000575 00000 f
-0000000576 00000 f
-0000000577 00000 f
-0000000578 00000 f
-0000000579 00000 f
-0000000580 00000 f
-0000000581 00000 f
-0000000582 00000 f
-0000000583 00000 f
-0000000584 00000 f
-0000000585 00000 f
-0000000586 00000 f
-0000000587 00000 f
-0000000588 00000 f
-0000000589 00000 f
-0000000590 00000 f
-0000000591 00000 f
-0000000592 00000 f
-0000000593 00000 f
-0000000594 00000 f
-0000000595 00000 f
-0000000596 00000 f
-0000000597 00000 f
-0000000598 00000 f
-0000000599 00000 f
-0000000600 00000 f
-0000000601 00000 f
-0000000602 00000 f
-0000000603 00000 f
-0000000604 00000 f
-0000000608 00000 f
-0000361613 00000 n
-0000369425 00000 n
-0000369457 00000 n
-0000000609 00000 f
-0000000610 00000 f
-0000000611 00000 f
-0000000612 00000 f
-0000000613 00000 f
-0000000614 00000 f
-0000000615 00000 f
-0000000616 00000 f
-0000000617 00000 f
-0000000618 00000 f
-0000000619 00000 f
-0000000620 00000 f
-0000000621 00000 f
-0000000622 00000 f
-0000000623 00000 f
-0000000627 00000 f
-0000361685 00000 n
-0000369307 00000 n
-0000369339 00000 n
-0000000628 00000 f
-0000000629 00000 f
-0000000630 00000 f
-0000000631 00000 f
-0000000632 00000 f
-0000000633 00000 f
-0000000634 00000 f
-0000000635 00000 f
-0000000636 00000 f
-0000000637 00000 f
-0000000638 00000 f
-0000000639 00000 f
-0000000640 00000 f
-0000000641 00000 f
-0000000645 00000 f
-0000361757 00000 n
-0000369189 00000 n
-0000369221 00000 n
-0000000646 00000 f
-0000000647 00000 f
-0000000648 00000 f
-0000000649 00000 f
-0000000650 00000 f
-0000000651 00000 f
-0000000652 00000 f
-0000000653 00000 f
-0000000654 00000 f
-0000000655 00000 f
-0000000656 00000 f
-0000000657 00000 f
-0000000658 00000 f
-0000000659 00000 f
-0000000663 00000 f
-0000361831 00000 n
-0000369071 00000 n
-0000369103 00000 n
-0000000664 00000 f
-0000000665 00000 f
-0000000666 00000 f
-0000000667 00000 f
-0000000668 00000 f
-0000000669 00000 f
-0000000670 00000 f
-0000000671 00000 f
-0000000672 00000 f
-0000000673 00000 f
-0000000674 00000 f
-0000000675 00000 f
-0000000676 00000 f
-0000000677 00000 f
-0000000678 00000 f
-0000000679 00000 f
-0000000680 00000 f
-0000000681 00000 f
-0000000682 00000 f
-0000000683 00000 f
-0000000684 00000 f
-0000000685 00000 f
-0000000686 00000 f
-0000000687 00000 f
-0000000688 00000 f
-0000000689 00000 f
-0000000690 00000 f
-0000000691 00000 f
-0000000692 00000 f
-0000000693 00000 f
-0000000694 00000 f
-0000000698 00000 f
-0000361904 00000 n
-0000368953 00000 n
-0000368985 00000 n
-0000000702 00000 f
-0000361975 00000 n
-0000368835 00000 n
-0000368867 00000 n
-0000000703 00000 f
-0000000704 00000 f
-0000000705 00000 f
-0000000706 00000 f
-0000000707 00000 f
-0000000708 00000 f
-0000000709 00000 f
-0000000710 00000 f
-0000000711 00000 f
-0000000712 00000 f
-0000000713 00000 f
-0000000714 00000 f
-0000000715 00000 f
-0000000716 00000 f
-0000000717 00000 f
-0000000721 00000 f
-0000362047 00000 n
-0000368717 00000 n
-0000368749 00000 n
-0000000722 00000 f
-0000000723 00000 f
-0000000724 00000 f
-0000000725 00000 f
-0000000726 00000 f
-0000000727 00000 f
-0000000728 00000 f
-0000000729 00000 f
-0000000730 00000 f
-0000000731 00000 f
-0000000732 00000 f
-0000000733 00000 f
-0000000734 00000 f
-0000000738 00000 f
-0000362119 00000 n
-0000368599 00000 n
-0000368631 00000 n
-0000000739 00000 f
-0000000740 00000 f
-0000000741 00000 f
-0000000742 00000 f
-0000000743 00000 f
-0000000744 00000 f
-0000000745 00000 f
-0000000746 00000 f
-0000000747 00000 f
-0000000748 00000 f
-0000000749 00000 f
-0000000750 00000 f
-0000000751 00000 f
-0000000752 00000 f
-0000000756 00000 f
-0000362193 00000 n
-0000368481 00000 n
-0000368513 00000 n
-0000000757 00000 f
-0000000758 00000 f
-0000000759 00000 f
-0000000760 00000 f
-0000000761 00000 f
-0000000762 00000 f
-0000000763 00000 f
-0000000764 00000 f
-0000000765 00000 f
-0000000766 00000 f
-0000000767 00000 f
-0000000768 00000 f
-0000000769 00000 f
-0000000770 00000 f
-0000000771 00000 f
-0000000772 00000 f
-0000000773 00000 f
-0000000774 00000 f
-0000000775 00000 f
-0000000776 00000 f
-0000000777 00000 f
-0000000778 00000 f
-0000000779 00000 f
-0000000780 00000 f
-0000000781 00000 f
-0000000782 00000 f
-0000000783 00000 f
-0000000784 00000 f
-0000000788 00000 f
-0000362266 00000 n
-0000368363 00000 n
-0000368395 00000 n
-0000000792 00000 f
-0000362337 00000 n
-0000368245 00000 n
-0000368277 00000 n
-0000000793 00000 f
-0000000794 00000 f
-0000000795 00000 f
-0000000796 00000 f
-0000000797 00000 f
-0000000798 00000 f
-0000000799 00000 f
-0000000800 00000 f
-0000000801 00000 f
-0000000802 00000 f
-0000000803 00000 f
-0000000804 00000 f
-0000000805 00000 f
-0000000806 00000 f
-0000000807 00000 f
-0000000811 00000 f
-0000362409 00000 n
-0000368127 00000 n
-0000368159 00000 n
-0000000812 00000 f
-0000000813 00000 f
-0000000814 00000 f
-0000000815 00000 f
-0000000816 00000 f
-0000000817 00000 f
-0000000818 00000 f
-0000000819 00000 f
-0000000820 00000 f
-0000000821 00000 f
-0000000822 00000 f
-0000000823 00000 f
-0000000824 00000 f
-0000000828 00000 f
-0000362481 00000 n
-0000368009 00000 n
-0000368041 00000 n
-0000000829 00000 f
-0000000830 00000 f
-0000000831 00000 f
-0000000832 00000 f
-0000000833 00000 f
-0000000834 00000 f
-0000000835 00000 f
-0000000836 00000 f
-0000000837 00000 f
-0000000838 00000 f
-0000000839 00000 f
-0000000840 00000 f
-0000000841 00000 f
-0000000842 00000 f
-0000000846 00000 f
-0000362555 00000 n
-0000367891 00000 n
-0000367923 00000 n
-0000000847 00000 f
-0000000848 00000 f
-0000000849 00000 f
-0000000850 00000 f
-0000000851 00000 f
-0000000852 00000 f
-0000000853 00000 f
-0000000854 00000 f
-0000000855 00000 f
-0000000856 00000 f
-0000000857 00000 f
-0000000858 00000 f
-0000000859 00000 f
-0000000860 00000 f
-0000000861 00000 f
-0000000862 00000 f
-0000000863 00000 f
-0000000864 00000 f
-0000000865 00000 f
-0000000866 00000 f
-0000000867 00000 f
-0000000868 00000 f
-0000000869 00000 f
-0000000870 00000 f
-0000000871 00000 f
-0000000872 00000 f
-0000000873 00000 f
-0000000874 00000 f
-0000000875 00000 f
-0000000876 00000 f
-0000000877 00000 f
-0000000881 00000 f
-0000362628 00000 n
-0000367773 00000 n
-0000367805 00000 n
-0000000885 00000 f
-0000362699 00000 n
-0000367655 00000 n
-0000367687 00000 n
-0000000886 00000 f
-0000000887 00000 f
-0000000888 00000 f
-0000000889 00000 f
-0000000890 00000 f
-0000000891 00000 f
-0000000892 00000 f
-0000000893 00000 f
-0000000894 00000 f
-0000000895 00000 f
-0000000896 00000 f
-0000000897 00000 f
-0000000898 00000 f
-0000000899 00000 f
-0000000900 00000 f
-0000000904 00000 f
-0000362771 00000 n
-0000367537 00000 n
-0000367569 00000 n
-0000000905 00000 f
-0000000906 00000 f
-0000000907 00000 f
-0000000908 00000 f
-0000000909 00000 f
-0000000910 00000 f
-0000000911 00000 f
-0000000912 00000 f
-0000000913 00000 f
-0000000914 00000 f
-0000000915 00000 f
-0000000916 00000 f
-0000000917 00000 f
-0000000921 00000 f
-0000362843 00000 n
-0000367419 00000 n
-0000367451 00000 n
-0000000922 00000 f
-0000000923 00000 f
-0000000924 00000 f
-0000000925 00000 f
-0000000926 00000 f
-0000000927 00000 f
-0000000928 00000 f
-0000000929 00000 f
-0000000930 00000 f
-0000000931 00000 f
-0000000932 00000 f
-0000000933 00000 f
-0000000934 00000 f
-0000000935 00000 f
-0000000939 00000 f
-0000362917 00000 n
-0000367301 00000 n
-0000367333 00000 n
-0000000940 00000 f
-0000000941 00000 f
-0000000942 00000 f
-0000000943 00000 f
-0000000944 00000 f
-0000000945 00000 f
-0000000946 00000 f
-0000000947 00000 f
-0000000948 00000 f
-0000000949 00000 f
-0000000950 00000 f
-0000000951 00000 f
-0000000952 00000 f
-0000000953 00000 f
-0000000954 00000 f
-0000000955 00000 f
-0000000956 00000 f
-0000000957 00000 f
-0000000958 00000 f
-0000000959 00000 f
-0000000960 00000 f
-0000000961 00000 f
-0000000962 00000 f
-0000000963 00000 f
-0000000964 00000 f
-0000000965 00000 f
-0000000966 00000 f
-0000000967 00000 f
-0000000968 00000 f
-0000000969 00000 f
-0000000970 00000 f
-0000000974 00000 f
-0000362990 00000 n
-0000367183 00000 n
-0000367215 00000 n
-0000000978 00000 f
-0000363061 00000 n
-0000367065 00000 n
-0000367097 00000 n
-0000000979 00000 f
-0000000980 00000 f
-0000000981 00000 f
-0000000982 00000 f
-0000000983 00000 f
-0000000984 00000 f
-0000000985 00000 f
-0000000986 00000 f
-0000000987 00000 f
-0000000988 00000 f
-0000000989 00000 f
-0000000990 00000 f
-0000000991 00000 f
-0000000992 00000 f
-0000000993 00000 f
-0000000997 00000 f
-0000363133 00000 n
-0000366947 00000 n
-0000366979 00000 n
-0000000998 00000 f
-0000000999 00000 f
-0000001000 00000 f
-0000001001 00000 f
-0000001002 00000 f
-0000001003 00000 f
-0000001004 00000 f
-0000001005 00000 f
-0000001006 00000 f
-0000001007 00000 f
-0000001008 00000 f
-0000001009 00000 f
-0000001010 00000 f
-0000001014 00000 f
-0000363205 00000 n
-0000366827 00000 n
-0000366860 00000 n
-0000001015 00000 f
-0000001016 00000 f
-0000001017 00000 f
-0000001018 00000 f
-0000001019 00000 f
-0000001020 00000 f
-0000001021 00000 f
-0000001022 00000 f
-0000001023 00000 f
-0000001024 00000 f
-0000001025 00000 f
-0000001026 00000 f
-0000001027 00000 f
-0000001028 00000 f
-0000001032 00000 f
-0000363282 00000 n
-0000366707 00000 n
-0000366740 00000 n
-0000001033 00000 f
-0000001034 00000 f
-0000001035 00000 f
-0000001036 00000 f
-0000001037 00000 f
-0000001038 00000 f
-0000001039 00000 f
-0000001040 00000 f
-0000001041 00000 f
-0000001042 00000 f
-0000001043 00000 f
-0000001044 00000 f
-0000001045 00000 f
-0000001046 00000 f
-0000001047 00000 f
-0000001048 00000 f
-0000001049 00000 f
-0000001050 00000 f
-0000001051 00000 f
-0000001052 00000 f
-0000001053 00000 f
-0000001055 00000 f
-0000001397 00000 n
-0000001061 00000 f
-0000363358 00000 n
-0000363432 00000 n
-0000363507 00000 n
-0000363582 00000 n
-0000363659 00000 n
-0000001062 00000 f
-0000001063 00000 f
-0000001064 00000 f
-0000001065 00000 f
-0000001066 00000 f
-0000001067 00000 f
-0000001068 00000 f
-0000001069 00000 f
-0000001070 00000 f
-0000001071 00000 f
-0000001072 00000 f
-0000001073 00000 f
-0000001074 00000 f
-0000001075 00000 f
-0000001076 00000 f
-0000001077 00000 f
-0000001078 00000 f
-0000001079 00000 f
-0000001080 00000 f
-0000001081 00000 f
-0000001082 00000 f
-0000001083 00000 f
-0000001084 00000 f
-0000001085 00000 f
-0000001086 00000 f
-0000001087 00000 f
-0000001088 00000 f
-0000001089 00000 f
-0000001090 00000 f
-0000001091 00000 f
-0000001092 00000 f
-0000001093 00000 f
-0000001094 00000 f
-0000001095 00000 f
-0000001096 00000 f
-0000001097 00000 f
-0000001098 00000 f
-0000001099 00000 f
-0000001100 00000 f
-0000001101 00000 f
-0000001102 00000 f
-0000001103 00000 f
-0000001104 00000 f
-0000001105 00000 f
-0000001106 00000 f
-0000001107 00000 f
-0000001108 00000 f
-0000001109 00000 f
-0000001110 00000 f
-0000001111 00000 f
-0000001112 00000 f
-0000001113 00000 f
-0000001114 00000 f
-0000001115 00000 f
-0000001116 00000 f
-0000001117 00000 f
-0000001128 00000 f
-0000366587 00000 n
-0000366620 00000 n
-0000366467 00000 n
-0000366500 00000 n
-0000366347 00000 n
-0000366380 00000 n
-0000366227 00000 n
-0000366260 00000 n
-0000366107 00000 n
-0000366140 00000 n
-0000001129 00000 f
-0000001130 00000 f
-0000001131 00000 f
-0000001132 00000 f
-0000001133 00000 f
-0000001134 00000 f
-0000001135 00000 f
-0000001136 00000 f
-0000001137 00000 f
-0000001144 00000 f
-0000363735 00000 n
-0000363824 00000 n
-0000363898 00000 n
-0000363973 00000 n
-0000364048 00000 n
-0000364125 00000 n
-0000001145 00000 f
-0000001146 00000 f
-0000001147 00000 f
-0000001148 00000 f
-0000001149 00000 f
-0000001150 00000 f
-0000001151 00000 f
-0000001152 00000 f
-0000001153 00000 f
-0000001154 00000 f
-0000001155 00000 f
-0000001156 00000 f
-0000001157 00000 f
-0000001158 00000 f
-0000001159 00000 f
-0000001160 00000 f
-0000001161 00000 f
-0000001162 00000 f
-0000001163 00000 f
-0000001164 00000 f
-0000001165 00000 f
-0000001166 00000 f
-0000001167 00000 f
-0000001168 00000 f
-0000001169 00000 f
-0000001170 00000 f
-0000001171 00000 f
-0000001172 00000 f
-0000001173 00000 f
-0000001174 00000 f
-0000001175 00000 f
-0000001176 00000 f
-0000001177 00000 f
-0000001178 00000 f
-0000001179 00000 f
-0000001180 00000 f
-0000001181 00000 f
-0000001182 00000 f
-0000001183 00000 f
-0000001184 00000 f
-0000001185 00000 f
-0000001186 00000 f
-0000001187 00000 f
-0000001188 00000 f
-0000001189 00000 f
-0000001190 00000 f
-0000001191 00000 f
-0000001192 00000 f
-0000001193 00000 f
-0000001194 00000 f
-0000001195 00000 f
-0000001196 00000 f
-0000001197 00000 f
-0000001198 00000 f
-0000001199 00000 f
-0000001200 00000 f
-0000001213 00000 f
-0000365987 00000 n
-0000366020 00000 n
-0000365867 00000 n
-0000365900 00000 n
-0000365747 00000 n
-0000365780 00000 n
-0000365627 00000 n
-0000365660 00000 n
-0000365507 00000 n
-0000365540 00000 n
-0000365387 00000 n
-0000365420 00000 n
-0000001214 00000 f
-0000001215 00000 f
-0000001216 00000 f
-0000001217 00000 f
-0000001218 00000 f
-0000001219 00000 f
-0000001220 00000 f
-0000001221 00000 f
-0000001242 00000 f
-0000000000 00000 f
-0000364201 00000 n
-0000364290 00000 n
-0000364364 00000 n
-0000364439 00000 n
-0000364514 00000 n
-0000364591 00000 n
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000365267 00000 n
-0000365300 00000 n
-0000365147 00000 n
-0000365180 00000 n
-0000365027 00000 n
-0000365060 00000 n
-0000364907 00000 n
-0000364940 00000 n
-0000364787 00000 n
-0000364820 00000 n
-0000364667 00000 n
-0000364700 00000 n
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000000000 00000 f
-0000165196 00000 n
-0000164010 00000 n
-0000164099 00000 n
-0000164173 00000 n
-0000164248 00000 n
-0000164323 00000 n
-0000164400 00000 n
-0000372953 00000 n
-0000046731 00000 n
-0000167129 00000 n
-0000066385 00000 n
-0000066270 00000 n
-0000049827 00000 n
-0000050354 00000 n
-0000050894 00000 n
-0000051425 00000 n
-0000051967 00000 n
-0000052501 00000 n
-0000053037 00000 n
-0000053571 00000 n
-0000048764 00000 n
-0000049260 00000 n
-0000049312 00000 n
-0000149504 00000 n
-0000149568 00000 n
-0000137318 00000 n
-0000137382 00000 n
-0000122718 00000 n
-0000122782 00000 n
-0000110491 00000 n
-0000110555 00000 n
-0000095974 00000 n
-0000096038 00000 n
-0000083742 00000 n
-0000083806 00000 n
-0000069074 00000 n
-0000069138 00000 n
-0000054106 00000 n
-0000054170 00000 n
-0000054920 00000 n
-0000054984 00000 n
-0000066206 00000 n
-0000066424 00000 n
-0000069865 00000 n
-0000069929 00000 n
-0000083678 00000 n
-0000084551 00000 n
-0000084615 00000 n
-0000095910 00000 n
-0000096756 00000 n
-0000096820 00000 n
-0000110427 00000 n
-0000111301 00000 n
-0000111365 00000 n
-0000122654 00000 n
-0000123502 00000 n
-0000123566 00000 n
-0000137254 00000 n
-0000138131 00000 n
-0000138195 00000 n
-0000149440 00000 n
-0000150263 00000 n
-0000150327 00000 n
-0000163946 00000 n
-0000165076 00000 n
-0000165109 00000 n
-0000164956 00000 n
-0000164989 00000 n
-0000164836 00000 n
-0000164869 00000 n
-0000164716 00000 n
-0000164749 00000 n
-0000164596 00000 n
-0000164629 00000 n
-0000164476 00000 n
-0000164509 00000 n
-0000165488 00000 n
-0000165798 00000 n
-0000167207 00000 n
-0000167437 00000 n
-0000168418 00000 n
-0000175955 00000 n
-0000241545 00000 n
-0000307135 00000 n
-0000373027 00000 n
-trailer <</Size 1392/Root 1 0 R/Info 1391 0 R/ID[<DFAD564605F549E3ABF1FC747EBA5A34><483E7E53040A4057B79E46A3828ABB5E>]>> startxref 373165 %%EOF \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
deleted file mode 100644
index ef2615bacc..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif
deleted file mode 100644
index 2fcc77b2e8..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png
deleted file mode 100644
index d5ac639405..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
deleted file mode 100644
index 2a949311d7..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt b/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt
deleted file mode 100644
index 17d1caeb66..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt
+++ /dev/null
@@ -1 +0,0 @@
-jquery=1.4.2
diff --git a/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala
deleted file mode 100644
index 9ba89146c0..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala
+++ /dev/null
@@ -1,114 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package model
-
-import base.comment._
-
-import reporters.Reporter
-import scala.collection._
-import scala.reflect.internal.util.{NoPosition, Position}
-import scala.language.postfixOps
-
-/** The comment parser transforms raw comment strings into `Comment` objects.
- * Call `parse` to run the parser. Note that the parser is stateless and
- * should only be built once for a given Scaladoc run.
- *
- * @param reporter The reporter on which user messages (error, warnings) should be printed.
- *
- * @author Manohar Jonnalagedda
- * @author Gilles Dubochet */
-trait CommentFactory extends base.CommentFactoryBase {
- thisFactory: ModelFactory with CommentFactory with MemberLookup =>
-
- val global: Global
- import global.{ reporter, definitions, Symbol }
-
- protected val commentCache = mutable.HashMap.empty[(Symbol, TemplateImpl), Comment]
-
- def addCommentBody(sym: Symbol, inTpl: TemplateImpl, docStr: String, docPos: global.Position): Symbol = {
- commentCache += (sym, inTpl) -> parse(docStr, docStr, docPos, None)
- sym
- }
-
- def comment(sym: Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl): Option[Comment] = {
- val key = (sym, inTpl)
- if (commentCache isDefinedAt key)
- Some(commentCache(key))
- else {
- val c = defineComment(sym, currentTpl, inTpl)
- if (c isDefined) commentCache += (sym, inTpl) -> c.get
- c
- }
- }
-
- /** A comment is usualy created by the parser, however for some special
- * cases we have to give some `inTpl` comments (parent class for example)
- * to the comment of the symbol.
- * This function manages some of those cases : Param accessor and Primary constructor */
- def defineComment(sym: Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl):Option[Comment] = {
-
- //param accessor case
- // We just need the @param argument, we put it into the body
- if( sym.isParamAccessor &&
- inTpl.comment.isDefined &&
- inTpl.comment.get.valueParams.isDefinedAt(sym.encodedName)) {
- val comContent = Some(inTpl.comment.get.valueParams(sym.encodedName))
- Some(createComment(body0 = comContent))
- }
-
- // Primary constructor case
- // We need some content of the class definition : @constructor for the body,
- // @param and @deprecated, we can add some more if necessary
- else if (sym.isPrimaryConstructor && inTpl.comment.isDefined ) {
- val tplComment = inTpl.comment.get
- // If there is nothing to put into the comment there is no need to create it
- if(tplComment.constructor.isDefined ||
- tplComment.throws != Map.empty ||
- tplComment.valueParams != Map.empty ||
- tplComment.typeParams != Map.empty ||
- tplComment.deprecated.isDefined
- )
- Some(createComment( body0 = tplComment.constructor,
- throws0 = tplComment.throws,
- valueParams0 = tplComment.valueParams,
- typeParams0 = tplComment.typeParams,
- deprecated0 = tplComment.deprecated
- ))
- else None
- }
-
- //other comment cases
- // parse function will make the comment
- else {
- val rawComment = global.expandedDocComment(sym, inTpl.sym).trim
- if (rawComment != "") {
- val tplOpt = if (currentTpl.isDefined) currentTpl else Some(inTpl)
- val c = parse(rawComment, global.rawDocComment(sym), global.docCommentPos(sym), tplOpt)
- Some(c)
- }
- else None
- }
-
- }
-
- protected def parse(comment: String, src: String, pos: Position, inTplOpt: Option[DocTemplateImpl] = None): Comment = {
- assert(!inTplOpt.isDefined || inTplOpt.get != null)
- parseAtSymbol(comment, src, pos, inTplOpt map (_.sym))
- }
-
- /** Parses a string containing wiki syntax into a `Comment` object.
- * Note that the string is assumed to be clean:
- * - Removed Scaladoc start and end markers.
- * - Removed start-of-line star and one whitespace afterwards (if present).
- * - Removed all end-of-line whitespace.
- * - Only `endOfLine` is used to mark line endings. */
- def parseWiki(string: String, pos: Position, inTplOpt: Option[DocTemplateImpl]): Body = {
- assert(!inTplOpt.isDefined || inTplOpt.get != null)
- parseWikiAtSymbol(string,pos, inTplOpt map (_.sym))
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
deleted file mode 100644
index cbc1a23d44..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala
+++ /dev/null
@@ -1,631 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Manohar Jonnalagedda
- * @author Gilles Dubochet
- */
-
-package scala.tools.nsc
-package doc
-package model
-
-import scala.collection._
-import base.comment._
-import diagram._
-
-/** An entity in a Scaladoc universe. Entities are declarations in the program and correspond to symbols in the
- * compiler. Entities model the following Scala concepts:
- * - classes and traits;
- * - objects and package;
- * - constructors;
- * - methods;
- * - values, lazy values, and variables;
- * - abstract type members and type aliases;
- * - type and value parameters;
- * - annotations. */
-trait Entity {
-
- /** Similar to symbols, so we can track entities */
- def id: Int
-
- /** The name of the entity. Note that the name does not qualify this entity uniquely; use its `qualifiedName`
- * instead. */
- def name : String
-
- /** The qualified name of the entity. This is this entity's name preceded by the qualified name of the template
- * of which this entity is a member. The qualified name is unique to this entity. */
- def qualifiedName: String
-
- /** The template of which this entity is a member. */
- def inTemplate: TemplateEntity
-
- /** The list of entities such that each is a member of the entity that follows it; the first entity is always this
- * entity, the last the root package entity. */
- def toRoot: List[Entity]
-
- /** The qualified name of this entity. */
- override def toString = qualifiedName
-
- /** The Scaladoc universe of which this entity is a member. */
- def universe: Universe
-
- /** The annotations attached to this entity, if any. */
- def annotations: List[Annotation]
-
- /** The kind of the entity */
- def kind: String
-
- /** Whether or not the template was defined in a package object */
- def inPackageObject: Boolean
-
- /** Indicates whether this entity lives in the types namespace (classes, traits, abstract/alias types) */
- def isType: Boolean
-
- /** Indicates whether this entity lives in the terms namespace (objects, packages, methods, values) */
- def isTerm: Boolean
-}
-
-object Entity {
- private def isDeprecated(x: Entity) = x match {
- case x: MemberEntity => x.deprecation.isDefined
- case _ => false
- }
- /** Ordering deprecated things last. */
- implicit lazy val EntityOrdering: Ordering[Entity] =
- Ordering[(Boolean, String)] on (x => (isDeprecated(x), x.name))
-}
-
-/** A template, which is either a class, trait, object or package. Depending on whether documentation is available
- * or not, the template will be modeled as a [scala.tools.nsc.doc.model.NoDocTemplate] or a
- * [scala.tools.nsc.doc.model.DocTemplateEntity]. */
-trait TemplateEntity extends Entity {
-
- /** Whether this template is a package (including the root package). */
- def isPackage: Boolean
-
- /** Whether this template is the root package. */
- def isRootPackage: Boolean
-
- /** Whether this template is a trait. */
- def isTrait: Boolean
-
- /** Whether this template is a class. */
- def isClass: Boolean
-
- /** Whether this template is an object. */
- def isObject: Boolean
-
- /** Whether documentation is available for this template. */
- def isDocTemplate: Boolean
-
- /** Whether documentation is available for this template. */
- def isNoDocMemberTemplate: Boolean
-
- /** Whether this template is a case class. */
- def isCaseClass: Boolean
-
- /** The self-type of this template, if it differs from the template type. */
- def selfType : Option[TypeEntity]
-}
-
-
-/** An entity that is a member of a template. All entities, including templates, are member of another entity
- * except for parameters and annotations. Note that all members of a template are modelled, including those that are
- * inherited and not declared locally. */
-trait MemberEntity extends Entity {
-
- /** The comment attached to this member, if any. */
- def comment: Option[Comment]
-
- /** The group this member is from */
- def group: String
-
- /** The template of which this entity is a member. */
- def inTemplate: DocTemplateEntity
-
- /** The list of entities such that each is a member of the entity that follows it; the first entity is always this
- * member, the last the root package entity. */
- def toRoot: List[MemberEntity]
-
- /** The templates in which this member has been declared. The first element of the list is the template that contains
- * the currently active declaration of this member, subsequent elements are declarations that have been overriden. If
- * the first element is equal to `inTemplate`, the member is declared locally, if not, it has been inherited. All
- * elements of this list are in the linearization of `inTemplate`. */
- def inDefinitionTemplates: List[TemplateEntity]
-
- /** The qualified name of the member in its currently active declaration template. */
- def definitionName: String
-
- /** The visibility of this member. Note that members with restricted visibility may not be modeled in some
- * universes. */
- def visibility: Visibility
-
- /** The flags that have been set for this entity. The following flags are supported: `implicit`, `sealed`, `abstract`,
- * and `final`. */
- def flags: List[Paragraph]
-
- /** Some deprecation message if this member is deprecated, or none otherwise. */
- def deprecation: Option[Body]
-
- /** Some migration warning if this member has a migration annotation, or none otherwise. */
- def migration: Option[Body]
-
- @deprecated("Use `inDefinitionTemplates` instead", "2.9.0")
- def inheritedFrom: List[TemplateEntity]
-
- /** For members representing values: the type of the value returned by this member; for members
- * representing types: the type itself. */
- def resultType: TypeEntity
-
- /** Whether this member is a method. */
- def isDef: Boolean
-
- /** Whether this member is a value (this excludes lazy values). */
- def isVal: Boolean
-
- /** Whether this member is a lazy value. */
- def isLazyVal: Boolean
-
- /** Whether this member is a variable. */
- def isVar: Boolean
-
- /** Whether this member is a constructor. */
- def isConstructor: Boolean
-
- /** Whether this member is an alias type. */
- def isAliasType: Boolean
-
- /** Whether this member is an abstract type. */
- def isAbstractType: Boolean
-
- /** Whether this member is a template. */
- def isTemplate: Boolean
-
- /** Whether this member is implicit. */
- def isImplicit: Boolean
-
- /** Whether this member is abstract. */
- def isAbstract: Boolean
-
- /** If this symbol is a use case, the useCaseOf will contain the member it was derived from, containing the full
- * signature and the complete parameter descriptions. */
- def useCaseOf: Option[MemberEntity]
-
- /** If this member originates from an implicit conversion, we set the implicit information to the correct origin */
- def byConversion: Option[ImplicitConversion]
-
- /** The identity of this member, used for linking */
- def signature: String
-
- /** Compatibility signature, will be removed from future versions */
- def signatureCompat: String
-
- /** Indicates whether the member is inherited by implicit conversion */
- def isImplicitlyInherited: Boolean
-
- /** Indicates whether there is another member with the same name in the template that will take precendence */
- def isShadowedImplicit: Boolean
-
- /** Indicates whether there are other implicitly inherited members that have similar signatures (and thus they all
- * become ambiguous) */
- def isAmbiguousImplicit: Boolean
-
- /** Indicates whether the implicitly inherited member is shadowed or ambiguous in its template */
- def isShadowedOrAmbiguousImplicit: Boolean
-}
-
-object MemberEntity {
- // Oh contravariance, contravariance, wherefore art thou contravariance?
- // Note: the above works for both the commonly misunderstood meaning of the line and the real one.
- implicit lazy val MemberEntityOrdering: Ordering[MemberEntity] = Entity.EntityOrdering on (x => x)
-}
-
-/** An entity that is parameterized by types */
-trait HigherKinded {
-
- /** The type parameters of this entity. */
- def typeParams: List[TypeParam]
-}
-
-
-/** A template (class, trait, object or package) which is referenced in the universe, but for which no further
- * documentation is available. Only templates for which a source file is given are documented by Scaladoc. */
-trait NoDocTemplate extends TemplateEntity {
- def kind =
- if (isClass) "class"
- else if (isTrait) "trait"
- else if (isObject) "object"
- else ""
-}
-
-/** An inherited template that was not documented in its original owner - example:
- * in classpath: trait T { class C } -- T (and implicitly C) are not documented
- * in the source: trait U extends T -- C appears in U as a MemberTemplateImpl
- * -- that is, U has a member for it but C doesn't get its own page */
-trait MemberTemplateEntity extends TemplateEntity with MemberEntity with HigherKinded {
-
- /** The value parameters of this case class, or an empty list if this class is not a case class. As case class value
- * parameters cannot be curried, the outer list has exactly one element. */
- def valueParams: List[List[ValueParam]]
-
- /** The direct super-type of this template
- e.g: {{{class A extends B[C[Int]] with D[E]}}} will have two direct parents: class B and D
- NOTE: we are dropping the refinement here! */
- def parentTypes: List[(TemplateEntity, TypeEntity)]
-}
-
-/** A template (class, trait, object or package) for which documentation is available. Only templates for which
- * a source file is given are documented by Scaladoc. */
-trait DocTemplateEntity extends MemberTemplateEntity {
-
- /** The list of templates such that each is a member of the template that follows it; the first template is always
- * this template, the last the root package entity. */
- def toRoot: List[DocTemplateEntity]
-
- /** The source file in which the current template is defined and the line where the definition starts, if they exist.
- * A source file exists for all templates, except for those that are generated synthetically by Scaladoc. */
- def inSource: Option[(io.AbstractFile, Int)]
-
- /** An HTTP address at which the source of this template is available, if it is available. An address is available
- * only if the `docsourceurl` setting has been set. */
- def sourceUrl: Option[java.net.URL]
-
- /** All class, trait and object templates which are part of this template's linearization, in lineratization order.
- * This template's linearization contains all of its direct and indirect super-classes and super-traits. */
- def linearizationTemplates: List[TemplateEntity]
-
- /** All instantiated types which are part of this template's linearization, in lineratization order.
- * This template's linearization contains all of its direct and indirect super-types. */
- def linearizationTypes: List[TypeEntity]
-
- /** All class, trait and object templates for which this template is a direct or indirect super-class or super-trait.
- * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
- def allSubClasses: List[DocTemplateEntity]
-
- /** All class, trait and object templates for which this template is a *direct* super-class or super-trait.
- * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
- def directSubClasses: List[DocTemplateEntity]
-
- /** All members of this template. If this template is a package, only templates for which documentation is available
- * in the universe (`DocTemplateEntity`) are listed. */
- def members: List[MemberEntity]
-
- /** All templates that are members of this template. If this template is a package, only templates for which
- * documentation is available in the universe (`DocTemplateEntity`) are listed. */
- def templates: List[TemplateEntity with MemberEntity]
-
- /** All methods that are members of this template. */
- def methods: List[Def]
-
- /** All values, lazy values and variables that are members of this template. */
- def values: List[Val]
-
- /** All abstract types that are members of this template. */
- def abstractTypes: List[AbstractType]
-
- /** All type aliases that are members of this template. */
- def aliasTypes: List[AliasType]
-
- /** The primary constructor of this class, if it has been defined. */
- def primaryConstructor: Option[Constructor]
-
- /** All constructors of this class, including the primary constructor. */
- def constructors: List[Constructor]
-
- /** The companion of this template, or none. If a class and an object are defined as a pair of the same name, the
- * other entity of the pair is the companion. */
- def companion: Option[DocTemplateEntity]
-
- /** The implicit conversions this template (class or trait, objects and packages are not affected) */
- def conversions: List[ImplicitConversion]
-
- /** The shadowing information for the implicitly added members */
- def implicitsShadowing: Map[MemberEntity, ImplicitMemberShadowing]
-
- /** Classes that can be implcitly converted to this class */
- def incomingImplicitlyConvertedClasses: List[(DocTemplateEntity, ImplicitConversion)]
-
- /** Classes to which this class can be implicitly converted to
- NOTE: Some classes might not be included in the scaladoc run so they will be NoDocTemplateEntities */
- def outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity, ImplicitConversion)]
-
- /** If this template takes place in inheritance and implicit conversion relations, it will be shown in this diagram */
- def inheritanceDiagram: Option[Diagram]
-
- /** If this template contains other templates, such as classes and traits, they will be shown in this diagram */
- def contentDiagram: Option[Diagram]
-
- /** Returns the group description taken either from this template or its linearizationTypes */
- def groupDescription(group: String): Option[Body]
-
- /** Returns the group description taken either from this template or its linearizationTypes */
- def groupPriority(group: String): Int
-
- /** Returns the group description taken either from this template or its linearizationTypes */
- def groupName(group: String): String
-}
-
-/** A trait template. */
-trait Trait extends MemberTemplateEntity {
- def kind = "trait"
-}
-
-/** A class template. */
-trait Class extends MemberTemplateEntity {
- override def kind = "class"
-}
-
-/** An object template. */
-trait Object extends MemberTemplateEntity {
- def kind = "object"
-}
-
-/** A package template. A package is in the universe if it is declared as a package object, or if it
- * contains at least one template. */
-trait Package extends DocTemplateEntity {
-
- /** The package of which this package is a member. */
- def inTemplate: Package
-
- /** The package such that each is a member of the package that follows it; the first package is always this
- * package, the last the root package. */
- def toRoot: List[Package]
-
- /** All packages that are member of this package. */
- def packages: List[Package]
-
- override def kind = "package"
-}
-
-
-/** The root package, which contains directly or indirectly all members in the universe. A universe
- * contains exactly one root package. */
-trait RootPackage extends Package
-
-
-/** A non-template member (method, value, lazy value, variable, constructor, alias type, and abstract type). */
-trait NonTemplateMemberEntity extends MemberEntity {
-
- /** Whether this member is a use case. A use case is a member which does not exist in the documented code.
- * It corresponds to a real member, and provides a simplified, yet compatible signature for that member. */
- def isUseCase: Boolean
-
- /** Whether this member is a bridge member. A bridge member does only exist for binary compatibility reasons
- * and should not appear in ScalaDoc. */
- def isBridge: Boolean
-}
-
-
-/** A method (`def`) of a template. */
-trait Def extends NonTemplateMemberEntity with HigherKinded {
-
- /** The value parameters of this method. Each parameter block of a curried method is an element of the list.
- * Each parameter block is a list of value parameters. */
- def valueParams : List[List[ValueParam]]
-
- def kind = "method"
-}
-
-
-/** A constructor of a class. */
-trait Constructor extends NonTemplateMemberEntity {
-
- /** Whether this is the primary constructor of a class. The primary constructor is defined syntactically as part of
- * the declaration of the class. */
- def isPrimary: Boolean
-
- /** The value parameters of this constructor. As constructors cannot be curried, the outer list has exactly one
- * element. */
- def valueParams : List[List[ValueParam]]
-
- def kind = "constructor"
-}
-
-
-/** A value (`val`), lazy val (`lazy val`) or variable (`var`) of a template. */
-trait Val extends NonTemplateMemberEntity {
- def kind = "[lazy] value/variable"
-}
-
-
-/** An abstract type member of a template. */
-trait AbstractType extends MemberTemplateEntity with HigherKinded {
-
- /** The lower bound for this abstract type, if it has been defined. */
- def lo: Option[TypeEntity]
-
- /** The upper bound for this abstract type, if it has been defined. */
- def hi: Option[TypeEntity]
-
- def kind = "abstract type"
-}
-
-
-/** An type alias of a template. */
-trait AliasType extends MemberTemplateEntity with HigherKinded {
-
- /** The type aliased by this type alias. */
- def alias: TypeEntity
-
- def kind = "type alias"
-}
-
-
-/** A parameter to an entity. */
-trait ParameterEntity {
-
- def name: String
-}
-
-
-/** A type parameter to a class, trait, or method. */
-trait TypeParam extends ParameterEntity with HigherKinded {
-
- /** The variance of this type parameter. Valid values are "+", "-", and the empty string. */
- def variance: String
-
- /** The lower bound for this type parameter, if it has been defined. */
- def lo: Option[TypeEntity]
-
- /** The upper bound for this type parameter, if it has been defined. */
- def hi: Option[TypeEntity]
-}
-
-
-/** A value parameter to a constructor or method. */
-trait ValueParam extends ParameterEntity {
-
- /** The type of this value parameter. */
- def resultType: TypeEntity
-
- /** The devault value of this value parameter, if it has been defined. */
- def defaultValue: Option[TreeEntity]
-
- /** Whether this value parameter is implicit. */
- def isImplicit: Boolean
-}
-
-
-/** An annotation to an entity. */
-trait Annotation extends Entity {
-
- /** The class of this annotation. */
- def annotationClass: TemplateEntity
-
- /** The arguments passed to the constructor of the annotation class. */
- def arguments: List[ValueArgument]
-
- def kind = "annotation"
-}
-
-/** A trait that signals the member results from an implicit conversion */
-trait ImplicitConversion {
-
- /** The source of the implicit conversion*/
- def source: DocTemplateEntity
-
- /** The result type after the conversion */
- def targetType: TypeEntity
-
- /** The result type after the conversion
- * Note: not all targetTypes have a corresponding template. Examples include conversions resulting in refinement
- * types. Need to check it's not option!
- */
- def targetTemplate: Option[TemplateEntity]
-
- /** The components of the implicit conversion type parents */
- def targetTypeComponents: List[(TemplateEntity, TypeEntity)]
-
- /** The entity for the method that performed the conversion, if it's documented (or just its name, otherwise) */
- def convertorMethod: Either[MemberEntity, String]
-
- /** A short name of the convertion */
- def conversionShortName: String
-
- /** A qualified name uniquely identifying the convertion (currently: the conversion method's qualified name) */
- def conversionQualifiedName: String
-
- /** The entity that performed the conversion */
- def convertorOwner: TemplateEntity
-
- /** The constraints that the transformations puts on the type parameters */
- def constraints: List[Constraint]
-
- /** The members inherited by this implicit conversion */
- def members: List[MemberEntity]
-
- /** Is this a hidden implicit conversion (as specified in the settings) */
- def isHiddenConversion: Boolean
-}
-
-/** Shadowing captures the information that the member is shadowed by some other members
- * There are two cases of implicitly added member shadowing:
- * 1) shadowing from a original class member (the class already has that member)
- * in this case, it won't be possible to call the member directly, the type checker will fail attempting to adapt
- * the call arguments (or if they fit it will call the original class' method)
- * 2) shadowing from other possible implicit conversions ()
- * this will result in an ambiguous implicit converion error
- */
-trait ImplicitMemberShadowing {
- /** The members that shadow the current entry use .inTemplate to get to the template name */
- def shadowingMembers: List[MemberEntity]
-
- /** The members that ambiguate this implicit conversion
- Note: for ambiguatingMembers you have the following invariant:
- assert(ambiguatingMembers.foreach(_.byConversion.isDefined) */
- def ambiguatingMembers: List[MemberEntity]
-
- def isShadowed: Boolean = !shadowingMembers.isEmpty
- def isAmbiguous: Boolean = !ambiguatingMembers.isEmpty
-}
-
-/** A trait that encapsulates a constraint necessary for implicit conversion */
-trait Constraint
-
-/** A constraint involving a type parameter which must be in scope */
-trait ImplicitInScopeConstraint extends Constraint {
- /** The type of the implicit value required */
- def implicitType: TypeEntity
-
- /** toString for debugging */
- override def toString = "an implicit _: " + implicitType.name + " must be in scope"
-}
-
-trait TypeClassConstraint extends ImplicitInScopeConstraint with TypeParamConstraint {
- /** Type class name */
- def typeClassEntity: TemplateEntity
-
- /** toString for debugging */
- override def toString = typeParamName + " is a class of type " + typeClassEntity.qualifiedName + " (" +
- typeParamName + ": " + typeClassEntity.name + ")"
-}
-
-trait KnownTypeClassConstraint extends TypeClassConstraint {
- /** Type explanation, takes the type parameter name and generates the explanation */
- def typeExplanation: (String) => String
-
- /** toString for debugging */
- override def toString = typeExplanation(typeParamName) + " (" + typeParamName + ": " + typeClassEntity.name + ")"
-}
-
-/** A constraint involving a type parameter */
-trait TypeParamConstraint extends Constraint {
- /** The type parameter involved */
- def typeParamName: String
-}
-
-trait EqualTypeParamConstraint extends TypeParamConstraint {
- /** The rhs */
- def rhs: TypeEntity
- /** toString for debugging */
- override def toString = typeParamName + " is " + rhs.name + " (" + typeParamName + " =:= " + rhs.name + ")"
-}
-
-trait BoundedTypeParamConstraint extends TypeParamConstraint {
- /** The lower bound */
- def lowerBound: TypeEntity
-
- /** The upper bound */
- def upperBound: TypeEntity
-
- /** toString for debugging */
- override def toString = typeParamName + " is a superclass of " + lowerBound.name + " and a subclass of " +
- upperBound.name + " (" + typeParamName + " >: " + lowerBound.name + " <: " + upperBound.name + ")"
-}
-
-trait LowerBoundedTypeParamConstraint extends TypeParamConstraint {
- /** The lower bound */
- def lowerBound: TypeEntity
-
- /** toString for debugging */
- override def toString = typeParamName + " is a superclass of " + lowerBound.name + " (" + typeParamName + " >: " +
- lowerBound.name + ")"
-}
-
-trait UpperBoundedTypeParamConstraint extends TypeParamConstraint {
- /** The lower bound */
- def upperBound: TypeEntity
-
- /** toString for debugging */
- override def toString = typeParamName + " is a subclass of " + upperBound.name + " (" + typeParamName + " <: " +
- upperBound.name + ")"
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
deleted file mode 100755
index 4ee6daf73e..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Pedro Furlanetto
- */
-
-package scala.tools.nsc
-package doc
-package model
-
-import scala.collection._
-
-object IndexModelFactory {
-
- def makeIndex(universe: Universe): Index = new Index {
-
- lazy val firstLetterIndex: Map[Char, SymbolMap] = {
-
- object result extends mutable.HashMap[Char,SymbolMap] {
-
- /* Owner template ordering */
- implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase }
- /* symbol name ordering */
- implicit def orderingMap = math.Ordering.String
-
- def addMember(d: MemberEntity) = {
- val firstLetter = {
- val ch = d.name.head.toLower
- if(ch.isLetterOrDigit) ch else '_'
- }
- val letter = this.get(firstLetter).getOrElse {
- immutable.SortedMap[String, SortedSet[MemberEntity]]()
- }
- val members = letter.get(d.name).getOrElse {
- SortedSet.empty[MemberEntity](Ordering.by { _.toString })
- } + d
- this(firstLetter) = letter + (d.name -> members)
- }
- }
-
- //@scala.annotation.tailrec // TODO
- def gather(owner: DocTemplateEntity): Unit =
- for(m <- owner.members if m.inDefinitionTemplates.isEmpty || m.inDefinitionTemplates.head == owner)
- m match {
- case tpl: DocTemplateEntity =>
- result.addMember(tpl)
- gather(tpl)
- case non: MemberEntity if !non.isConstructor =>
- result.addMember(non)
- case x @ _ =>
- }
-
- gather(universe.rootPackage)
-
- result.toMap
-
- }
-
- }
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
deleted file mode 100644
index 23259a4ae8..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-package scala.tools.nsc
-package doc
-package model
-
-import base._
-
-/** This trait extracts all required information for documentation from compilation units */
-trait MemberLookup extends base.MemberLookupBase {
- thisFactory: ModelFactory =>
-
- import global._
- import definitions.{ NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass }
-
- override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] =
- findTemplateMaybe(sym) match {
- case Some(tpl) => Some(LinkToTpl(tpl))
- case None =>
- findTemplateMaybe(site) flatMap { inTpl =>
- inTpl.members find (_.asInstanceOf[EntityImpl].sym == sym) map (LinkToMember(_, inTpl))
- }
- }
-
- override def chooseLink(links: List[LinkTo]): LinkTo = {
- val mbrs = links.collect {
- case lm@LinkToMember(mbr: MemberEntity, _) => (mbr, lm)
- }
- if (mbrs.isEmpty)
- links.head
- else
- mbrs.min(Ordering[MemberEntity].on[(MemberEntity, LinkTo)](_._1))._2
- }
-
- override def toString(link: LinkTo) = link match {
- case LinkToTpl(tpl: EntityImpl) => tpl.sym.toString
- case LinkToMember(mbr: EntityImpl, inTpl: EntityImpl) =>
- mbr.sym.signatureString + " in " + inTpl.sym.toString
- case _ => link.toString
- }
-
- override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = {
- val sym1 =
- if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass
- else if (sym.isPackage)
- /* Get package object which has associatedFile ne null */
- sym.info.member(newTermName("package"))
- else sym
- Option(sym1.associatedFile) flatMap (_.underlyingSource) flatMap { src =>
- val path = src.path
- settings.extUrlMapping get path map { url =>
- LinkToExternal(name, url + "#" + name)
- }
- } orElse {
- // Deprecated option.
- settings.extUrlPackageMapping find {
- case (pkg, _) => name startsWith pkg
- } map {
- case (_, url) => LinkToExternal(name, url + "#" + name)
- }
- }
- }
-
- override def warnNoLink = !settings.docNoLinkWarnings.value
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
deleted file mode 100644
index d9b173bc43..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ /dev/null
@@ -1,1103 +0,0 @@
-/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */
-
-package scala.tools.nsc
-package doc
-package model
-
-import base._
-import base.comment._
-import diagram._
-
-import scala.collection._
-import scala.util.matching.Regex
-
-import symtab.Flags
-
-import io._
-
-import model.{ RootPackage => RootPackageEntity }
-
-/** This trait extracts all required information for documentation from compilation units */
-class ModelFactory(val global: Global, val settings: doc.Settings) {
- thisFactory: ModelFactory
- with ModelFactoryImplicitSupport
- with ModelFactoryTypeSupport
- with DiagramFactory
- with CommentFactory
- with TreeFactory
- with MemberLookup =>
-
- import global._
- import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass }
- import rootMirror.{ RootPackage, RootClass, EmptyPackage }
-
- // Defaults for member grouping, that may be overridden by the template
- val defaultGroup = "Ungrouped"
- val defaultGroupName = "Ungrouped"
- val defaultGroupDesc = None
- val defaultGroupPriority = 1000
-
- def templatesCount = docTemplatesCache.count(_._2.isDocTemplate) - droppedPackages.size
-
- private var _modelFinished = false
- def modelFinished: Boolean = _modelFinished
- private var universe: Universe = null
-
- private def dbg(msg: String) = if (sys.props contains "scala.scaladoc.debug") println(msg)
- protected def closestPackage(sym: Symbol) = {
- if (sym.isPackage || sym.isPackageClass) sym
- else sym.enclosingPackage
- }
-
- private def printWithoutPrefix(memberSym: Symbol, templateSym: Symbol) = {
- dbg(
- "memberSym " + memberSym + " templateSym " + templateSym + " encls = " +
- closestPackage(memberSym) + ", " + closestPackage(templateSym)
- )
- memberSym.isOmittablePrefix || (closestPackage(memberSym) == closestPackage(templateSym))
- }
-
- def makeModel: Option[Universe] = {
- val universe = new Universe { thisUniverse =>
- thisFactory.universe = thisUniverse
- val settings = thisFactory.settings
- val rootPackage = modelCreation.createRootPackage
- }
- _modelFinished = true
- // complete the links between model entities, everthing that couldn't have been done before
- universe.rootPackage.completeModel
-
- Some(universe) filter (_.rootPackage != null)
- }
-
- // state:
- var ids = 0
- private val droppedPackages = mutable.Set[PackageImpl]()
- protected val docTemplatesCache = new mutable.LinkedHashMap[Symbol, DocTemplateImpl]
- protected val noDocTemplatesCache = new mutable.LinkedHashMap[Symbol, NoDocTemplateImpl]
- def packageDropped(tpl: DocTemplateImpl) = tpl match {
- case p: PackageImpl => droppedPackages(p)
- case _ => false
- }
-
- def optimize(str: String): String =
- if (str.length < 16) str.intern else str
-
- /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
-
- abstract class EntityImpl(val sym: Symbol, val inTpl: TemplateImpl) extends Entity {
- val id = { ids += 1; ids }
- val name = optimize(sym.nameString)
- val universe = thisFactory.universe
-
- // Debugging:
- // assert(id != 36, sym + " " + sym.getClass)
- //println("Creating entity #" + id + " [" + kind + " " + qualifiedName + "] for sym " + sym.kindString + " " + sym.ownerChain.reverse.map(_.name).mkString("."))
-
- def inTemplate: TemplateImpl = inTpl
- def toRoot: List[EntityImpl] = this :: inTpl.toRoot
- def qualifiedName = name
- def annotations = sym.annotations.map(makeAnnotation)
- def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject
- def isType = sym.name.isTypeName
- def isTerm = sym.name.isTermName
- }
-
- trait TemplateImpl extends EntityImpl with TemplateEntity {
- override def qualifiedName: String =
- if (inTemplate == null || inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name)
- def isPackage = sym.isPackage
- def isTrait = sym.isTrait
- def isClass = sym.isClass && !sym.isTrait
- def isObject = sym.isModule && !sym.isPackage
- def isCaseClass = sym.isCaseClass
- def isRootPackage = false
- def isNoDocMemberTemplate = false
- def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this))
- }
-
- abstract class MemberImpl(sym: Symbol, inTpl: DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
- lazy val comment = {
- // If the current tpl is a DocTemplate, we consider itself as the root for resolving link targets (instead of the
- // package the class is in) -- so people can refer to methods directly [[foo]], instead of using [[MyClass.foo]]
- // in the doc comment of MyClass
- val thisTpl = this match {
- case d: DocTemplateImpl => Some(d)
- case _ => None
- }
- if (inTpl != null) thisFactory.comment(sym, thisTpl, inTpl) else None
- }
- def group = if (comment.isDefined) comment.get.group.getOrElse(defaultGroup) else defaultGroup
- override def inTemplate = inTpl
- override def toRoot: List[MemberImpl] = this :: inTpl.toRoot
- def inDefinitionTemplates = this match {
- case mb: NonTemplateMemberEntity if (mb.useCaseOf.isDefined) =>
- mb.useCaseOf.get.inDefinitionTemplates
- case _ =>
- if (inTpl == null)
- List(makeRootPackage)
- else
- makeTemplate(sym.owner)::(sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
- }
- def visibility = {
- if (sym.isPrivateLocal) PrivateInInstance()
- else if (sym.isProtectedLocal) ProtectedInInstance()
- else {
- val qual =
- if (sym.hasAccessBoundary)
- Some(makeTemplate(sym.privateWithin))
- else None
- if (sym.isPrivate) PrivateInTemplate(inTpl)
- else if (sym.isProtected) ProtectedInTemplate(qual getOrElse inTpl)
- else if (qual.isDefined) PrivateInTemplate(qual.get)
- else Public()
- }
- }
- def flags = {
- val fgs = mutable.ListBuffer.empty[Paragraph]
- if (sym.isImplicit) fgs += Paragraph(Text("implicit"))
- if (sym.isSealed) fgs += Paragraph(Text("sealed"))
- if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract"))
- /* Resetting the DEFERRED flag is a little trick here for refined types: (example from scala.collections)
- * {{{
- * implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] {
- * def isParallel = ...
- * }}}
- * the type the method returns is TraversableOps, which has all-abstract symbols. But in reality, it couldn't have
- * any abstract terms, otherwise it would fail compilation. So we reset the DEFERRED flag. */
- if (!sym.isTrait && (sym hasFlag Flags.DEFERRED) && (!isImplicitlyInherited)) fgs += Paragraph(Text("abstract"))
- if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final"))
- fgs.toList
- }
- def deprecation =
- if (sym.isDeprecated)
- Some((sym.deprecationMessage, sym.deprecationVersion) match {
- case (Some(msg), Some(ver)) => parseWiki("''(Since version " + ver + ")'' " + msg, NoPosition, Some(inTpl))
- case (Some(msg), None) => parseWiki(msg, NoPosition, Some(inTpl))
- case (None, Some(ver)) => parseWiki("''(Since version " + ver + ")''", NoPosition, Some(inTpl))
- case (None, None) => Body(Nil)
- })
- else
- comment flatMap { _.deprecated }
- def migration =
- if(sym.hasMigrationAnnotation)
- Some((sym.migrationMessage, sym.migrationVersion) match {
- case (Some(msg), Some(ver)) => parseWiki("''(Changed in version " + ver + ")'' " + msg, NoPosition, Some(inTpl))
- case (Some(msg), None) => parseWiki(msg, NoPosition, Some(inTpl))
- case (None, Some(ver)) => parseWiki("''(Changed in version " + ver + ")''", NoPosition, Some(inTpl))
- case (None, None) => Body(Nil)
- })
- else
- None
- def inheritedFrom =
- if (inTemplate.sym == this.sym.owner || inTemplate.sym.isPackage) Nil else
- makeTemplate(this.sym.owner) :: (sym.allOverriddenSymbols map { os => makeTemplate(os.owner) })
- def resultType = {
- def resultTpe(tpe: Type): Type = tpe match { // similar to finalResultType, except that it leaves singleton types alone
- case PolyType(_, res) => resultTpe(res)
- case MethodType(_, res) => resultTpe(res)
- case NullaryMethodType(res) => resultTpe(res)
- case _ => tpe
- }
- val tpe = if (!isImplicitlyInherited) sym.tpe else byConversion.get.toType memberInfo sym
- makeTypeInTemplateContext(resultTpe(tpe), inTemplate, sym)
- }
- def isDef = false
- def isVal = false
- def isLazyVal = false
- def isVar = false
- def isImplicit = sym.isImplicit
- def isConstructor = false
- def isAliasType = false
- def isAbstractType = false
- def isAbstract =
- // for the explanation of conversion == null see comment on flags
- ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED)) && (!isImplicitlyInherited)) ||
- sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic
- def isTemplate = false
- def signature = externalSignature(sym)
- lazy val signatureCompat = {
-
- def defParams(mbr: Any): String = mbr match {
- case d: MemberEntity with Def =>
- val paramLists: List[String] =
- if (d.valueParams.isEmpty) Nil
- else d.valueParams map (ps => ps map (_.resultType.name) mkString ("(",",",")"))
- paramLists.mkString
- case _ => ""
- }
-
- def tParams(mbr: Any): String = mbr match {
- case hk: HigherKinded if !hk.typeParams.isEmpty =>
- def boundsToString(hi: Option[TypeEntity], lo: Option[TypeEntity]): String = {
- def bound0(bnd: Option[TypeEntity], pre: String): String = bnd match {
- case None => ""
- case Some(tpe) => pre ++ tpe.toString
- }
- bound0(hi, "<:") ++ bound0(lo, ">:")
- }
- "[" + hk.typeParams.map(tp => tp.variance + tp.name + tParams(tp) + boundsToString(tp.hi, tp.lo)).mkString(", ") + "]"
- case _ => ""
- }
-
- (name + tParams(this) + defParams(this) +":"+ resultType.name).replaceAll("\\s","") // no spaces allowed, they break links
- }
- // these only apply for NonTemplateMemberEntities
- def useCaseOf: Option[MemberEntity] = None
- def byConversion: Option[ImplicitConversionImpl] = None
- def isImplicitlyInherited = false
- def isShadowedImplicit = false
- def isAmbiguousImplicit = false
- def isShadowedOrAmbiguousImplicit = false
- }
-
- /** A template that is not documented at all. The class is instantiated during lookups, to indicate that the class
- * exists, but should not be documented (either it's not included in the source or it's not visible)
- */
- class NoDocTemplateImpl(sym: Symbol, inTpl: TemplateImpl) extends EntityImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with NoDocTemplate {
- assert(modelFinished)
- assert(!(noDocTemplatesCache isDefinedAt sym))
- noDocTemplatesCache += (sym -> this)
- def isDocTemplate = false
- }
-
- /** An inherited template that was not documented in its original owner - example:
- * in classpath: trait T { class C } -- T (and implicitly C) are not documented
- * in the source: trait U extends T -- C appears in U as a MemberTemplateImpl -- that is, U has a member for it
- * but C doesn't get its own page
- */
- abstract class MemberTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with MemberTemplateEntity {
- // no templates cache for this class, each owner gets its own instance
- override def isTemplate = true
- def isDocTemplate = false
- override def isNoDocMemberTemplate = true
- lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name)
- def valueParams: List[List[ValueParam]] = Nil /** TODO, these are now only computed for DocTemplates */
-
- // Seems unused
- // def parentTemplates =
- // if (sym.isPackage || sym == AnyClass)
- // List()
- // else
- // sym.tpe.parents.flatMap { tpe: Type =>
- // val tSym = tpe.typeSymbol
- // if (tSym != NoSymbol)
- // List(makeTemplate(tSym))
- // else
- // List()
- // } filter (_.isInstanceOf[DocTemplateEntity])
-
- def parentTypes =
- if (sym.isPackage || sym == AnyClass) List() else {
- val tps = (this match {
- case a: AliasType => sym.tpe.dealias.parents
- case a: AbstractType => sym.info.bounds match {
- case TypeBounds(lo, RefinedType(parents, decls)) => parents
- case TypeBounds(lo, hi) => hi :: Nil
- case _ => Nil
- }
- case _ => sym.tpe.parents
- }) map { _.asSeenFrom(sym.thisType, sym) }
- makeParentTypes(RefinedType(tps, EmptyScope), Some(this), inTpl)
- }
- }
-
- /** The instantiation of `TemplateImpl` triggers the creation of the following entities:
- * All ancestors of the template and all non-package members.
- */
- abstract class DocTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberTemplateImpl(sym, inTpl) with DocTemplateEntity {
- assert(!modelFinished)
- assert(!(docTemplatesCache isDefinedAt sym), sym)
- docTemplatesCache += (sym -> this)
-
- if (settings.verbose.value)
- inform("Creating doc template for " + sym)
-
- override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot
-
- protected def inSourceFromSymbol(symbol: Symbol) =
- if (symbol.sourceFile != null && ! symbol.isSynthetic)
- Some((symbol.sourceFile, symbol.pos.line))
- else
- None
-
- def inSource = inSourceFromSymbol(sym)
-
- def sourceUrl = {
- def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/")
- val assumedSourceRoot = fixPath(settings.sourcepath.value) stripSuffix "/"
-
- if (!settings.docsourceurl.isDefault)
- inSource map { case (file, _) =>
- val filePath = fixPath(file.path).replaceFirst("^" + assumedSourceRoot, "").stripSuffix(".scala")
- val tplOwner = this.inTemplate.qualifiedName
- val tplName = this.name
- val patches = new Regex("""ā‚¬\{(FILE_PATH|TPL_OWNER|TPL_NAME)\}""")
- def substitute(name: String): String = name match {
- case "FILE_PATH" => filePath
- case "TPL_OWNER" => tplOwner
- case "TPL_NAME" => tplName
- }
- val patchedString = patches.replaceAllIn(settings.docsourceurl.value, m => java.util.regex.Matcher.quoteReplacement(substitute(m.group(1))) )
- new java.net.URL(patchedString)
- }
- else None
- }
-
- protected def linearizationFromSymbol(symbol: Symbol): List[(TemplateEntity, TypeEntity)] = {
- symbol.ancestors map { ancestor =>
- val typeEntity = makeType(symbol.info.baseType(ancestor), this)
- val tmplEntity = makeTemplate(ancestor) match {
- case tmpl: DocTemplateImpl => tmpl registerSubClass this ; tmpl
- case tmpl => tmpl
- }
- (tmplEntity, typeEntity)
- }
- }
-
- lazy val linearization = linearizationFromSymbol(sym)
- def linearizationTemplates = linearization map { _._1 }
- def linearizationTypes = linearization map { _._2 }
-
- /* Subclass cache */
- private lazy val subClassesCache = (
- if (sym == AnyRefClass) null
- else mutable.ListBuffer[DocTemplateEntity]()
- )
- def registerSubClass(sc: DocTemplateEntity): Unit = {
- if (subClassesCache != null)
- subClassesCache += sc
- }
- def allSubClasses = if (subClassesCache == null) Nil else subClassesCache.toList
- def directSubClasses = allSubClasses.filter(_.parentTypes.map(_._1).contains(this))
-
- /* Implcitly convertible class cache */
- private var implicitlyConvertibleClassesCache: mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)] = null
- def registerImplicitlyConvertibleClass(dtpl: DocTemplateImpl, conv: ImplicitConversionImpl): Unit = {
- if (implicitlyConvertibleClassesCache == null)
- implicitlyConvertibleClassesCache = mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)]()
- implicitlyConvertibleClassesCache += ((dtpl, conv))
- }
-
- def incomingImplicitlyConvertedClasses: List[(DocTemplateImpl, ImplicitConversionImpl)] =
- if (implicitlyConvertibleClassesCache == null)
- List()
- else
- implicitlyConvertibleClassesCache.toList
-
- // the implicit conversions are generated eagerly, but the members generated by implicit conversions are added
- // lazily, on completeModel
- val conversions: List[ImplicitConversionImpl] =
- if (settings.docImplicits.value) makeImplicitConversions(sym, this) else Nil
-
- // members as given by the compiler
- lazy val memberSyms = sym.info.members.filter(s => membersShouldDocument(s, this)).toList
-
- // the inherited templates (classes, traits or objects)
- var memberSymsLazy = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this))
- // the direct members (methods, values, vars, types and directly contained templates)
- var memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_))
- // the members generated by the symbols in memberSymsEager
- val ownMembers = (memberSymsEager.flatMap(makeMember(_, None, this)))
-
- // all the members that are documentented PLUS the members inherited by implicit conversions
- var members: List[MemberImpl] = ownMembers
-
- def templates = members collect { case c: TemplateEntity with MemberEntity => c }
- def methods = members collect { case d: Def => d }
- def values = members collect { case v: Val => v }
- def abstractTypes = members collect { case t: AbstractType => t }
- def aliasTypes = members collect { case t: AliasType => t }
-
- /**
- * This is the final point in the core model creation: no DocTemplates are created after the model has finished, but
- * inherited templates and implicit members are added to the members at this point.
- */
- def completeModel(): Unit = {
- // DFS completion
- // since alias types and abstract types have no own members, there's no reason for them to call completeModel
- if (!sym.isAliasType && !sym.isAbstractType)
- for (member <- members)
- member match {
- case d: DocTemplateImpl => d.completeModel
- case _ =>
- }
-
- members :::= memberSymsLazy.map(modelCreation.createLazyTemplateMember(_, this))
-
- // compute linearization to register subclasses
- linearization
- outgoingImplicitlyConvertedClasses
-
- // the members generated by the symbols in memberSymsEager PLUS the members from the usecases
- val allMembers = ownMembers ::: ownMembers.flatMap(_.useCaseOf.map(_.asInstanceOf[MemberImpl])).distinct
- implicitsShadowing = makeShadowingTable(allMembers, conversions, this)
- // finally, add the members generated by implicit conversions
- members :::= conversions.flatMap(_.memberImpls)
- }
-
- var implicitsShadowing = Map[MemberEntity, ImplicitMemberShadowing]()
-
- lazy val outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity, ImplicitConversionImpl)] =
- conversions flatMap (conv =>
- if (!implicitExcluded(conv.conversionQualifiedName))
- conv.targetTypeComponents map {
- case pair@(template, tpe) =>
- template match {
- case d: DocTemplateImpl if (d != this) => d.registerImplicitlyConvertibleClass(this, conv)
- case _ => // nothing
- }
- (pair._1, pair._2, conv)
- }
- else List()
- )
-
- override def isTemplate = true
- override def isDocTemplate = true
- private[this] lazy val companionSymbol =
- if (sym.isAliasType || sym.isAbstractType) {
- inTpl.sym.info.member(sym.name.toTermName) match {
- case NoSymbol => NoSymbol
- case s =>
- s.info match {
- case ot: OverloadedType =>
- NoSymbol
- case _ =>
- // that's to navigate from val Foo: FooExtractor to FooExtractor :)
- s.info.resultType.typeSymbol
- }
- }
- }
- else
- sym.companionSymbol
-
- def companion =
- companionSymbol match {
- case NoSymbol => None
- case comSym if !isEmptyJavaObject(comSym) && (comSym.isClass || comSym.isModule) =>
- makeTemplate(comSym) match {
- case d: DocTemplateImpl => Some(d)
- case _ => None
- }
- case _ => None
- }
-
- def constructors: List[MemberImpl with Constructor] = if (isClass) members collect { case d: Constructor => d } else Nil
- def primaryConstructor: Option[MemberImpl with Constructor] = if (isClass) constructors find { _.isPrimary } else None
- override def valueParams =
- // we don't want params on a class (non case class) signature
- if (isCaseClass) primaryConstructor match {
- case Some(const) => const.sym.paramss map (_ map (makeValueParam(_, this)))
- case None => List()
- }
- else List.empty
-
- // These are generated on-demand, make sure you don't call them more than once
- def inheritanceDiagram = makeInheritanceDiagram(this)
- def contentDiagram = makeContentDiagram(this)
-
- def groupSearch[T](extractor: Comment => Option[T]): Option[T] = {
- val comments = comment +: linearizationTemplates.collect { case dtpl: DocTemplateImpl => dtpl.comment }
- comments.flatten.map(extractor).flatten.headOption orElse {
- Option(inTpl) flatMap (_.groupSearch(extractor))
- }
- }
-
- def groupDescription(group: String): Option[Body] = groupSearch(_.groupDesc.get(group)) orElse { if (group == defaultGroup) defaultGroupDesc else None }
- def groupPriority(group: String): Int = groupSearch(_.groupPrio.get(group)) getOrElse { if (group == defaultGroup) defaultGroupPriority else 0 }
- def groupName(group: String): String = groupSearch(_.groupNames.get(group)) getOrElse { if (group == defaultGroup) defaultGroupName else group }
- }
-
- abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
- override def inTemplate = inTpl
- override def toRoot: List[PackageImpl] = this :: inTpl.toRoot
- override lazy val (inSource, linearization) = {
- val representive = sym.info.members.find {
- s => s.isPackageObject
- } getOrElse sym
- (inSourceFromSymbol(representive), linearizationFromSymbol(representive))
- }
- def packages = members collect { case p: PackageImpl if !(droppedPackages contains p) => p }
- }
-
- abstract class RootPackageImpl(sym: Symbol) extends PackageImpl(sym, null) with RootPackageEntity
-
- abstract class NonTemplateMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl],
- override val useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl)
- extends MemberImpl(sym, inTpl) with NonTemplateMemberEntity {
- override lazy val comment = {
- val inRealTpl =
- /* Variable precendence order for implicitly added members: Take the variable defifinitions from ...
- * 1. the target of the implicit conversion
- * 2. the definition template (owner)
- * 3. the current template
- */
- if (conversion.isDefined) findTemplateMaybe(conversion.get.toType.typeSymbol) match {
- case Some(d) if d != makeRootPackage => d //in case of NoSymbol, it will give us the root package
- case _ => findTemplateMaybe(sym.owner) match {
- case Some(d) if d != makeRootPackage => d //in case of NoSymbol, it will give us the root package
- case _ => inTpl
- }
- } else inTpl
- if (inRealTpl != null) thisFactory.comment(sym, None, inRealTpl) else None
- }
-
- override def qualifiedName = optimize(inTemplate.qualifiedName + "#" + name)
- lazy val definitionName = {
- // this contrived name is here just to satisfy some older tests -- if you decide to remove it, be my guest, and
- // also remove property("package object") from test/scaladoc/scalacheck/HtmlFactoryTest.scala so you don't break
- // the test suite...
- val packageObject = if (inPackageObject) ".package" else ""
- if (!conversion.isDefined) optimize(inDefinitionTemplates.head.qualifiedName + packageObject + "#" + name)
- else optimize(conversion.get.conversionQualifiedName + packageObject + "#" + name)
- }
- def isBridge = sym.isBridge
- def isUseCase = useCaseOf.isDefined
- override def byConversion: Option[ImplicitConversionImpl] = conversion
- override def isImplicitlyInherited = { assert(modelFinished); conversion.isDefined }
- override def isShadowedImplicit = isImplicitlyInherited && inTpl.implicitsShadowing.get(this).map(_.isShadowed).getOrElse(false)
- override def isAmbiguousImplicit = isImplicitlyInherited && inTpl.implicitsShadowing.get(this).map(_.isAmbiguous).getOrElse(false)
- override def isShadowedOrAmbiguousImplicit = isShadowedImplicit || isAmbiguousImplicit
- }
-
- abstract class NonTemplateParamMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl],
- useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl)
- extends NonTemplateMemberImpl(sym, conversion, useCaseOf, inTpl) {
- def valueParams = {
- val info = if (!isImplicitlyInherited) sym.info else conversion.get.toType memberInfo sym
- info.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
- if (p.nameString contains "$") makeValueParam(p, inTpl, optimize("arg" + i)) else makeValueParam(p, inTpl)
- }}
- }
- }
-
- abstract class ParameterImpl(val sym: Symbol, val inTpl: TemplateImpl) extends ParameterEntity {
- val name = optimize(sym.nameString)
- }
-
- private trait AliasImpl {
- def sym: Symbol
- def inTpl: TemplateImpl
- def alias = makeTypeInTemplateContext(sym.tpe.dealias, inTpl, sym)
- }
-
- private trait TypeBoundsImpl {
- def sym: Symbol
- def inTpl: TemplateImpl
- def lo = sym.info.bounds match {
- case TypeBounds(lo, hi) if lo.typeSymbol != NothingClass =>
- Some(makeTypeInTemplateContext(appliedType(lo, sym.info.typeParams map {_.tpe}), inTpl, sym))
- case _ => None
- }
- def hi = sym.info.bounds match {
- case TypeBounds(lo, hi) if hi.typeSymbol != AnyClass =>
- Some(makeTypeInTemplateContext(appliedType(hi, sym.info.typeParams map {_.tpe}), inTpl, sym))
- case _ => None
- }
- }
-
- trait HigherKindedImpl extends HigherKinded {
- def sym: Symbol
- def inTpl: TemplateImpl
- def typeParams =
- sym.typeParams map (makeTypeParam(_, inTpl))
- }
- /* ============== MAKER METHODS ============== */
-
- /** This method makes it easier to work with the different kinds of symbols created by scalac by stripping down the
- * package object abstraction and placing members directly in the package.
- *
- * Here's the explanation of what we do. The code:
- *
- * package foo {
- * object `package` {
- * class Bar
- * }
- * }
- *
- * will yield this Symbol structure:
- * +---------+ (2)
- * | |
- * +---------------+ +---------- v ------- | ---+ +--------+ (2)
- * | package foo#1 <---(1)---- module class foo#2 | | | |
- * +---------------+ | +------------------ | -+ | +------------------- v ---+ |
- * | | package object foo#3 <-----(1)---- module class package#4 | |
- * | +----------------------+ | | +---------------------+ | |
- * +--------------------------+ | | class package$Bar#5 | | |
- * | +----------------- | -+ | |
- * +------------------- | ---+ |
- * | |
- * +--------+
- * (1) sourceModule
- * (2) you get out of owners with .owner
- *
- * and normalizeTemplate(Bar.owner) will get us the package, instead of the module class of the package object.
- */
- def normalizeTemplate(aSym: Symbol): Symbol = aSym match {
- case null | rootMirror.EmptyPackage | NoSymbol =>
- normalizeTemplate(RootPackage)
- case ObjectClass =>
- normalizeTemplate(AnyRefClass)
- case _ if aSym.isPackageObject =>
- normalizeTemplate(aSym.owner)
- case _ if aSym.isModuleClass =>
- normalizeTemplate(aSym.sourceModule)
- case _ =>
- aSym
- }
-
- /**
- * These are all model construction methods. Please do not use them directly, they are calling each other recursively
- * starting from makeModel. On the other hand, makeTemplate, makeAnnotation, makeMember, makeType should only be used
- * after the model was created (modelFinished=true) otherwise assertions will start failing.
- */
- object modelCreation {
-
- def createRootPackage: PackageImpl = docTemplatesCache.get(RootPackage) match {
- case Some(root: PackageImpl) => root
- case _ => modelCreation.createTemplate(RootPackage, null) match {
- case Some(root: PackageImpl) => root
- case _ => sys.error("Scaladoc: Unable to create root package!")
- }
- }
-
- /**
- * Create a template, either a package, class, trait or object
- */
- def createTemplate(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = {
- // don't call this after the model finished!
- assert(!modelFinished)
-
- def createRootPackageComment: Option[Comment] =
- if(settings.docRootContent.isDefault) None
- else {
- import Streamable._
- Path(settings.docRootContent.value) match {
- case f : File => {
- val rootComment = closing(f.inputStream)(is => parse(slurp(is), "", NoPosition, Option(inTpl)))
- Some(rootComment)
- }
- case _ => None
- }
- }
-
- def createDocTemplate(bSym: Symbol, inTpl: DocTemplateImpl): DocTemplateImpl = {
- assert(!modelFinished) // only created BEFORE the model is finished
- if (bSym.isAliasType && bSym != AnyRefClass)
- new DocTemplateImpl(bSym, inTpl) with AliasImpl with AliasType { override def isAliasType = true }
- else if (bSym.isAbstractType)
- new DocTemplateImpl(bSym, inTpl) with TypeBoundsImpl with AbstractType { override def isAbstractType = true }
- else if (bSym.isModule)
- new DocTemplateImpl(bSym, inTpl) with Object {}
- else if (bSym.isTrait)
- new DocTemplateImpl(bSym, inTpl) with Trait {}
- else if (bSym.isClass || bSym == AnyRefClass)
- new DocTemplateImpl(bSym, inTpl) with Class {}
- else
- sys.error("'" + bSym + "' isn't a class, trait or object thus cannot be built as a documentable template.")
- }
-
- val bSym = normalizeTemplate(aSym)
- if (docTemplatesCache isDefinedAt bSym)
- return Some(docTemplatesCache(bSym))
-
- /* Three cases of templates:
- * (1) root package -- special cased for bootstrapping
- * (2) package
- * (3) class/object/trait
- */
- if (bSym == RootPackage) // (1)
- Some(new RootPackageImpl(bSym) {
- override lazy val comment = createRootPackageComment
- override val name = "root"
- override def inTemplate = this
- override def toRoot = this :: Nil
- override def qualifiedName = "_root_"
- override def inheritedFrom = Nil
- override def isRootPackage = true
- override lazy val memberSyms =
- (bSym.info.members ++ EmptyPackage.info.members).toList filter { s =>
- s != EmptyPackage && s != RootPackage
- }
- })
- else if (bSym.isPackage) // (2)
- if (settings.skipPackage(makeQualifiedName(bSym)))
- None
- else
- inTpl match {
- case inPkg: PackageImpl =>
- val pack = new PackageImpl(bSym, inPkg) {}
- // Used to check package pruning works:
- //println(pack.qualifiedName)
- if (pack.templates.filter(_.isDocTemplate).isEmpty && pack.memberSymsLazy.isEmpty) {
- droppedPackages += pack
- None
- } else
- Some(pack)
- case _ =>
- sys.error("'" + bSym + "' must be in a package")
- }
- else {
- // no class inheritance at this point
- assert(inOriginalOwner(bSym, inTpl), bSym + " in " + inTpl)
- Some(createDocTemplate(bSym, inTpl))
- }
- }
-
- /**
- * After the model is completed, no more DocTemplateEntities are created.
- * Therefore any symbol that still appears is:
- * - MemberTemplateEntity (created here)
- * - NoDocTemplateEntity (created in makeTemplate)
- */
- def createLazyTemplateMember(aSym: Symbol, inTpl: DocTemplateImpl): MemberImpl = {
-
- // Code is duplicate because the anonymous classes are created statically
- def createNoDocMemberTemplate(bSym: Symbol, inTpl: DocTemplateImpl): MemberTemplateImpl = {
- assert(modelFinished) // only created AFTER the model is finished
- if (bSym.isModule || (bSym.isAliasType && bSym.tpe.typeSymbol.isModule))
- new MemberTemplateImpl(bSym, inTpl) with Object {}
- else if (bSym.isTrait || (bSym.isAliasType && bSym.tpe.typeSymbol.isTrait))
- new MemberTemplateImpl(bSym, inTpl) with Trait {}
- else if (bSym.isClass || (bSym.isAliasType && bSym.tpe.typeSymbol.isClass))
- new MemberTemplateImpl(bSym, inTpl) with Class {}
- else
- sys.error("'" + bSym + "' isn't a class, trait or object thus cannot be built as a member template.")
- }
-
- assert(modelFinished)
- val bSym = normalizeTemplate(aSym)
-
- if (docTemplatesCache isDefinedAt bSym)
- docTemplatesCache(bSym)
- else
- docTemplatesCache.get(bSym.owner) match {
- case Some(inTpl) =>
- val mbrs = inTpl.members.collect({ case mbr: MemberImpl if mbr.sym == bSym => mbr })
- assert(mbrs.length == 1)
- mbrs.head
- case _ =>
- // move the class completely to the new location
- createNoDocMemberTemplate(bSym, inTpl)
- }
- }
- }
-
- /** Get the root package */
- def makeRootPackage: PackageImpl = docTemplatesCache(RootPackage).asInstanceOf[PackageImpl]
-
- // TODO: Should be able to override the type
- def makeMember(aSym: Symbol, conversion: Option[ImplicitConversionImpl], inTpl: DocTemplateImpl): List[MemberImpl] = {
-
- def makeMember0(bSym: Symbol, useCaseOf: Option[MemberImpl]): Option[MemberImpl] = {
- if (bSym.isGetter && bSym.isLazy)
- Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val {
- override lazy val comment = // The analyser does not duplicate the lazy val's DocDef when it introduces its accessor.
- thisFactory.comment(bSym.accessed, None, inTpl.asInstanceOf[DocTemplateImpl]) // This hack should be removed after analyser is fixed.
- override def isLazyVal = true
- })
- else if (bSym.isGetter && bSym.accessed.isMutable)
- Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val {
- override def isVar = true
- })
- else if (bSym.isMethod && !bSym.hasAccessorFlag && !bSym.isConstructor && !bSym.isModule) {
- val cSym = { // This unsightly hack closes issue #4086.
- if (bSym == definitions.Object_synchronized) {
- val cSymInfo = (bSym.info: @unchecked) match {
- case PolyType(ts, MethodType(List(bp), mt)) =>
- val cp = bp.cloneSymbol.setPos(bp.pos).setInfo(definitions.byNameType(bp.info))
- PolyType(ts, MethodType(List(cp), mt))
- }
- bSym.cloneSymbol.setPos(bSym.pos).setInfo(cSymInfo)
- }
- else bSym
- }
- Some(new NonTemplateParamMemberImpl(cSym, conversion, useCaseOf, inTpl) with HigherKindedImpl with Def {
- override def isDef = true
- })
- }
- else if (bSym.isConstructor)
- if (conversion.isDefined)
- None // don't list constructors inherted by implicit conversion
- else
- Some(new NonTemplateParamMemberImpl(bSym, conversion, useCaseOf, inTpl) with Constructor {
- override def isConstructor = true
- def isPrimary = sym.isPrimaryConstructor
- })
- else if (bSym.isGetter) // Scala field accessor or Java field
- Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val {
- override def isVal = true
- })
- else if (bSym.isAbstractType && !typeShouldDocument(bSym, inTpl))
- Some(new MemberTemplateImpl(bSym, inTpl) with TypeBoundsImpl with AbstractType {
- override def isAbstractType = true
- })
- else if (bSym.isAliasType && !typeShouldDocument(bSym, inTpl))
- Some(new MemberTemplateImpl(bSym, inTpl) with AliasImpl with AliasType {
- override def isAliasType = true
- })
- else if (!modelFinished && (bSym.isPackage || templateShouldDocument(bSym, inTpl)))
- modelCreation.createTemplate(bSym, inTpl)
- else
- None
- }
-
- if (!localShouldDocument(aSym) || aSym.isModuleClass || aSym.isPackageObject || aSym.isMixinConstructor)
- Nil
- else {
- val allSyms = useCases(aSym, inTpl.sym) map { case (bSym, bComment, bPos) =>
- docComments.put(bSym, DocComment(bComment, bPos)) // put the comment in the list, don't parse it yet, closes SI-4898
- bSym
- }
-
- val member = makeMember0(aSym, None)
- if (allSyms.isEmpty)
- member.toList
- else
- // Use cases replace the original definitions - SI-5054
- allSyms flatMap { makeMember0(_, member) }
- }
- }
-
- def findMember(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = {
- val tplSym = normalizeTemplate(aSym.owner)
- inTpl.members.find(_.sym == aSym)
- }
-
- @deprecated("Use `findLinkTarget` instead.", "2.10.0")
- def findTemplate(query: String): Option[DocTemplateImpl] = {
- assert(modelFinished)
- docTemplatesCache.values find { (tpl: DocTemplateImpl) => tpl.qualifiedName == query && !packageDropped(tpl) && !tpl.isObject }
- }
-
- def findTemplateMaybe(aSym: Symbol): Option[DocTemplateImpl] = {
- assert(modelFinished)
- docTemplatesCache.get(normalizeTemplate(aSym)).filterNot(packageDropped(_))
- }
-
- def makeTemplate(aSym: Symbol): TemplateImpl = makeTemplate(aSym, None)
-
- def makeTemplate(aSym: Symbol, inTpl: Option[TemplateImpl]): TemplateImpl = {
- assert(modelFinished)
-
- def makeNoDocTemplate(aSym: Symbol, inTpl: TemplateImpl): NoDocTemplateImpl = {
- val bSym = normalizeTemplate(aSym)
- noDocTemplatesCache.get(bSym) match {
- case Some(noDocTpl) => noDocTpl
- case None => new NoDocTemplateImpl(bSym, inTpl)
- }
- }
-
- findTemplateMaybe(aSym) match {
- case Some(dtpl) =>
- dtpl
- case None =>
- val bSym = normalizeTemplate(aSym)
- makeNoDocTemplate(bSym, if (inTpl.isDefined) inTpl.get else makeTemplate(bSym.owner))
- }
- }
-
- def makeAnnotation(annot: AnnotationInfo): scala.tools.nsc.doc.model.Annotation = {
- val aSym = annot.symbol
- new EntityImpl(aSym, makeTemplate(aSym.owner)) with scala.tools.nsc.doc.model.Annotation {
- lazy val annotationClass =
- makeTemplate(annot.symbol)
- val arguments = {
- val paramsOpt: Option[List[ValueParam]] = annotationClass match {
- case aClass: DocTemplateEntity with Class =>
- val constr = aClass.constructors collectFirst {
- case c: MemberImpl if c.sym == annot.original.symbol => c
- }
- constr flatMap (_.valueParams.headOption)
- case _ => None
- }
- val argTrees = annot.args map makeTree
- paramsOpt match {
- case Some (params) =>
- params zip argTrees map { case (param, tree) =>
- new ValueArgument {
- def parameter = Some(param)
- def value = tree
- }
- }
- case None =>
- argTrees map { tree =>
- new ValueArgument {
- def parameter = None
- def value = tree
- }
- }
- }
- }
- }
- }
-
- /** */
- def makeTypeParam(aSym: Symbol, inTpl: TemplateImpl): TypeParam =
- new ParameterImpl(aSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with TypeParam {
- def variance: String = {
- if (sym hasFlag Flags.COVARIANT) "+"
- else if (sym hasFlag Flags.CONTRAVARIANT) "-"
- else ""
- }
- }
-
- /** */
- def makeValueParam(aSym: Symbol, inTpl: DocTemplateImpl): ValueParam = {
- makeValueParam(aSym, inTpl, aSym.nameString)
- }
-
-
- /** */
- def makeValueParam(aSym: Symbol, inTpl: DocTemplateImpl, newName: String): ValueParam =
- new ParameterImpl(aSym, inTpl) with ValueParam {
- override val name = newName
- def defaultValue =
- if (aSym.hasDefault) {
- // units.filter should return only one element
- (currentRun.units filter (_.source.file == aSym.sourceFile)).toList match {
- case List(unit) =>
- // SI-4922 `sym == aSym` is insufficent if `aSym` is a clone of symbol
- // of the parameter in the tree, as can happen with type parametric methods.
- def isCorrespondingParam(sym: Symbol) = (
- sym != null &&
- sym != NoSymbol &&
- sym.owner == aSym.owner &&
- sym.name == aSym.name &&
- sym.isParamWithDefault
- )
- unit.body find (t => isCorrespondingParam(t.symbol)) collect {
- case ValDef(_,_,_,rhs) if rhs ne EmptyTree => makeTree(rhs)
- }
- case _ => None
- }
- }
- else None
- def resultType =
- makeTypeInTemplateContext(aSym.tpe, inTpl, aSym)
- def isImplicit = aSym.isImplicit
- }
-
- /** */
- def makeTypeInTemplateContext(aType: Type, inTpl: TemplateImpl, dclSym: Symbol): TypeEntity = {
- def ownerTpl(sym: Symbol): Symbol =
- if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner)
- val tpe =
- if (thisFactory.settings.useStupidTypes.value) aType else {
- def ownerTpl(sym: Symbol): Symbol =
- if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner)
- val fixedSym = if (inTpl.sym.isModule) inTpl.sym.moduleClass else inTpl.sym
- aType.asSeenFrom(fixedSym.thisType, ownerTpl(dclSym))
- }
- makeType(tpe, inTpl)
- }
-
- /** Get the types of the parents of the current class, ignoring the refinements */
- def makeParentTypes(aType: Type, tpl: Option[MemberTemplateImpl], inTpl: TemplateImpl): List[(TemplateEntity, TypeEntity)] = aType match {
- case RefinedType(parents, defs) =>
- val ignoreParents = Set[Symbol](AnyClass, AnyRefClass, ObjectClass)
- val filtParents =
- // we don't want to expose too many links to AnyRef, that will just be redundant information
- if (tpl.isDefined && { val sym = tpl.get.sym; (!sym.isModule && parents.length < 2) || (sym == AnyValClass) || (sym == AnyRefClass) || (sym == AnyClass) })
- parents
- else
- parents.filterNot((p: Type) => ignoreParents(p.typeSymbol))
-
- /** Returns:
- * - a DocTemplate if the type's symbol is documented
- * - a NoDocTemplateMember if the type's symbol is not documented in its parent but in another template
- * - a NoDocTemplate if the type's symbol is not documented at all */
- def makeTemplateOrMemberTemplate(parent: Type): TemplateImpl = {
- def noDocTemplate = makeTemplate(parent.typeSymbol)
- findTemplateMaybe(parent.typeSymbol) match {
- case Some(tpl) => tpl
- case None => parent match {
- case TypeRef(pre, sym, args) =>
- findTemplateMaybe(pre.typeSymbol) match {
- case Some(tpl) => findMember(parent.typeSymbol, tpl).collect({case t: TemplateImpl => t}).getOrElse(noDocTemplate)
- case None => noDocTemplate
- }
- case _ => noDocTemplate
- }
- }
- }
-
- filtParents.map(parent => {
- val templateEntity = makeTemplateOrMemberTemplate(parent)
- val typeEntity = makeType(parent, inTpl)
- (templateEntity, typeEntity)
- })
- case _ =>
- List((makeTemplate(aType.typeSymbol), makeType(aType, inTpl)))
- }
-
- def makeQualifiedName(sym: Symbol, relativeTo: Option[Symbol] = None): String = {
- val stop = if (relativeTo.isDefined) relativeTo.get.ownerChain.toSet else Set[Symbol]()
- var sym1 = sym
- var path = new StringBuilder()
- // var path = List[Symbol]()
-
- while ((sym1 != NoSymbol) && (path.isEmpty || !stop(sym1))) {
- val sym1Norm = normalizeTemplate(sym1)
- if (!sym1.sourceModule.isPackageObject && sym1Norm != RootPackage) {
- if (path.length != 0)
- path.insert(0, ".")
- path.insert(0, sym1Norm.nameString)
- // path::= sym1Norm
- }
- sym1 = sym1.owner
- }
-
- optimize(path.toString)
- //path.mkString(".")
- }
-
- def inOriginalOwner(aSym: Symbol, inTpl: TemplateImpl): Boolean =
- normalizeTemplate(aSym.owner) == normalizeTemplate(inTpl.sym)
-
- def templateShouldDocument(aSym: Symbol, inTpl: DocTemplateImpl): Boolean =
- (aSym.isTrait || aSym.isClass || aSym.isModule || typeShouldDocument(aSym, inTpl)) &&
- localShouldDocument(aSym) &&
- !isEmptyJavaObject(aSym) &&
- // either it's inside the original owner or we can document it later:
- (!inOriginalOwner(aSym, inTpl) || (aSym.isPackageClass || (aSym.sourceFile != null)))
-
- def membersShouldDocument(sym: Symbol, inTpl: TemplateImpl) = {
- // pruning modules that shouldn't be documented
- // Why Symbol.isInitialized? Well, because we need to avoid exploring all the space available to scaladoc
- // from the classpath -- scaladoc is a hog, it will explore everything starting from the root package unless we
- // somehow prune the tree. And isInitialized is a good heuristic for prunning -- if the package was not explored
- // during typer and refchecks, it's not necessary for the current application and there's no need to explore it.
- (!sym.isModule || sym.moduleClass.isInitialized) &&
- // documenting only public and protected members
- localShouldDocument(sym) &&
- // Only this class's constructors are part of its members, inherited constructors are not.
- (!sym.isConstructor || sym.owner == inTpl.sym) &&
- // If the @bridge annotation overrides a normal member, show it
- !isPureBridge(sym)
- }
-
- def isEmptyJavaObject(aSym: Symbol): Boolean =
- aSym.isModule && aSym.isJavaDefined &&
- aSym.info.members.exists(s => localShouldDocument(s) && (!s.isConstructor || s.owner == aSym))
-
- def localShouldDocument(aSym: Symbol): Boolean =
- !aSym.isPrivate && (aSym.isProtected || aSym.privateWithin == NoSymbol) && !aSym.isSynthetic
-
- /** Filter '@bridge' methods only if *they don't override non-bridge methods*. See SI-5373 for details */
- def isPureBridge(sym: Symbol) = sym.isBridge && sym.allOverriddenSymbols.forall(_.isBridge)
-
- // the classes that are excluded from the index should also be excluded from the diagrams
- def classExcluded(clazz: TemplateEntity): Boolean = settings.hardcoded.isExcluded(clazz.qualifiedName)
-
- // the implicit conversions that are excluded from the pages should not appear in the diagram
- def implicitExcluded(convertorMethod: String): Boolean = settings.hiddenImplicits(convertorMethod)
-
- // whether or not to create a page for an {abstract,alias} type
- def typeShouldDocument(bSym: Symbol, inTpl: DocTemplateImpl) =
- (settings.docExpandAllTypes.value && (bSym.sourceFile != null)) ||
- (bSym.isAliasType || bSym.isAbstractType) &&
- { val rawComment = global.expandedDocComment(bSym, inTpl.sym)
- rawComment.contains("@template") || rawComment.contains("@documentable") }
-}
-
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
deleted file mode 100644
index f88251b22e..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
+++ /dev/null
@@ -1,609 +0,0 @@
-/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL
- *
- * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them.
- *
- * @author Vlad Ureche
- * @author Adriaan Moors
- */
-
-package scala.tools.nsc
-package doc
-package model
-
-import scala.collection._
-import scala.util.matching.Regex
-
-import symtab.Flags
-import io._
-
-import model.{ RootPackage => RootPackageEntity }
-
-/**
- * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them.
- *
- * Let's take this as an example:
- * {{{
- * object Test {
- * class A
- *
- * class B {
- * def foo = 1
- * }
- *
- * class C extends B {
- * def bar = 2
- * class implicit
- * }
- *
- * D def conv(a: A) = new C
- * }
- * }}}
- *
- * Overview:
- * - scaladoc-ing the above classes, `A` will get two more methods: foo and bar, over its default methods
- * - the nested classes (specifically `D` above), abstract types, type aliases and constructor members are not added to
- * `A` (see makeMember0 in ModelFactory, last 3 cases)
- * - the members added by implicit conversion are always listed under the implicit conversion, not under the class they
- * actually come from (`foo` will be listed as coming from the implicit conversion to `C` instead of `B`) - see
- * `definitionName` in MemberImpl
- *
- * Internals:
- * TODO: Give an overview here
- */
-trait ModelFactoryImplicitSupport {
- thisFactory: ModelFactory with ModelFactoryTypeSupport with CommentFactory with TreeFactory =>
-
- import global._
- import global.analyzer._
- import global.definitions._
- import rootMirror.{RootPackage, RootClass, EmptyPackage, EmptyPackageClass}
- import settings.hardcoded
-
- // debugging:
- val DEBUG: Boolean = settings.docImplicitsDebug.value
- val ERROR: Boolean = true // currently we show all errors
- @inline final def debug(msg: => String) = if (DEBUG) settings.printMsg(msg)
- @inline final def error(msg: => String) = if (ERROR) settings.printMsg(msg)
-
- /** This is a flag that indicates whether to eliminate implicits that cannot be satisfied within the current scope.
- * For example, if an implicit conversion requires that there is a Numeric[T] in scope:
- * {{{
- * class A[T]
- * class B extends A[Int]
- * class C extends A[String]
- * implicit def pimpA[T: Numeric](a: A[T]): D
- * }}}
- * For B, no constraints are generated as Numeric[Int] is already in the default scope. On the other hand, for the
- * conversion from C to D, depending on -implicits-show-all, the conversion can:
- * - not be generated at all, since there's no Numeric[String] in scope (if ran without -implicits-show-all)
- * - generated with a *weird* constraint, Numeric[String] as the user might add it by hand (if flag is enabled)
- */
- class ImplicitNotFound(tpe: Type) extends Exception("No implicit of type " + tpe + " found in scope.")
-
- /* ============== MAKER METHODS ============== */
-
- /**
- * Make the implicit conversion objects
- *
- * A word about the scope of the implicit conversions: currently we look at a very basic context composed of the
- * default Scala imports (Predef._ for example) and the companion object of the current class, if one exists. In the
- * future we might want to extend this to more complex scopes.
- */
- def makeImplicitConversions(sym: Symbol, inTpl: DocTemplateImpl): List[ImplicitConversionImpl] =
- // Nothing and Null are somewhat special -- they can be transformed by any implicit conversion available in scope.
- // But we don't want that, so we'll simply refuse to find implicit conversions on for Nothing and Null
- if (!(sym.isClass || sym.isTrait || sym == AnyRefClass) || sym == NothingClass || sym == NullClass) Nil
- else {
- var context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit)
-
- val results = global.analyzer.allViewsFrom(sym.tpe, context, sym.typeParams)
- var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl))
- // also keep empty conversions, so they appear in diagrams
- // conversions = conversions.filter(!_.members.isEmpty)
-
- // Filter out specialized conversions from array
- if (sym == ArrayClass)
- conversions = conversions.filterNot((conv: ImplicitConversionImpl) =>
- hardcoded.arraySkipConversions.contains(conv.conversionQualifiedName))
-
- // Filter out non-sensical conversions from value types
- if (isPrimitiveValueType(sym.tpe))
- conversions = conversions.filter((ic: ImplicitConversionImpl) =>
- hardcoded.valueClassFilter(sym.nameString, ic.conversionQualifiedName))
-
- // Put the visible conversions in front
- val (ownConversions, commonConversions) =
- conversions.partition(!_.isHiddenConversion)
-
- ownConversions ::: commonConversions
- }
-
- /** makeImplicitConversion performs the heavier lifting to get the implicit listing:
- * - for each possible conversion function (also called view)
- * * figures out the final result of the view (to what is our class transformed?)
- * * figures out the necessary constraints on the type parameters (such as T <: Int) and the context (such as Numeric[T])
- * * lists all inherited members
- *
- * What? in details:
- * - say we start from a class A[T1, T2, T3, T4]
- * - we have an implicit function (view) in scope:
- * def pimpA[T3 <: Long, T4](a: A[Int, Foo[Bar[X]], T3, T4])(implicit ev1: TypeTag[T4], ev2: Numeric[T4]): PimpedA
- * - A is converted to PimpedA ONLY if a couple of constraints are satisfied:
- * * T1 must be equal to Int
- * * T2 must be equal to Foo[Bar[X]]
- * * T3 must be upper bounded by Long
- * * there must be evidence of Numeric[T4] and a TypeTag[T4] within scope
- * - the final type is PimpedA and A therefore inherits a couple of members from pimpedA
- *
- * How?
- * some notes:
- * - Scala's type inference will want to solve all type parameters down to actual types, but we only want constraints
- * to maintain generality
- * - therefore, allViewsFrom wraps type parameters into "untouchable" type variables that only gather constraints,
- * but are never solved down to a type
- * - these must be reverted back to the type parameters and the constraints must be extracted and simplified (this is
- * done by the uniteConstraints and boundedTParamsConstraints. Be sure to check them out
- * - we also need to transform implicit parameters in the view's signature into constraints, such that Numeric[T4]
- * appears as a constraint
- */
- def makeImplicitConversion(sym: Symbol, result: SearchResult, constrs: List[TypeConstraint], context: Context, inTpl: DocTemplateImpl): List[ImplicitConversionImpl] =
- if (result.tree == EmptyTree) Nil
- else {
- // `result` will contain the type of the view (= implicit conversion method)
- // the search introduces untouchable type variables, but we want to get back to type parameters
- val viewFullType = result.tree.tpe
- // set the previously implicit parameters to being explicit
-
- val (viewSimplifiedType, viewImplicitTypes) = removeImplicitParameters(viewFullType)
-
- // TODO: Isolate this corner case :) - Predef.<%< and put it in the testsuite
- if (viewSimplifiedType.params.length != 1) {
- // This is known to be caused by the `<%<` object in Predef:
- // {{{
- // sealed abstract class <%<[-From, +To] extends (From => To) with Serializable
- // object <%< {
- // implicit def conformsOrViewsAs[A <% B, B]: A <%< B = new (A <%< B) {def apply(x: A) = x}
- // }
- // }}}
- // so we just won't generate an implicit conversion for implicit methods that only take implicit parameters
- return Nil
- }
-
- // type the view application so we get the exact type of the result (not the formal type)
- val viewTree = result.tree.setType(viewSimplifiedType)
- val appliedTree = new ApplyImplicitView(viewTree, List(Ident("<argument>") setType viewTree.tpe.paramTypes.head))
- val appliedTreeTyped: Tree = {
- val newContext = context.makeImplicit(context.ambiguousErrors)
- newContext.macrosEnabled = false
- val newTyper = global.analyzer.newTyper(newContext)
- newTyper.silent(_.typed(appliedTree, global.analyzer.EXPRmode, WildcardType), false) match {
-
- case global.analyzer.SilentResultValue(t: Tree) => t
- case global.analyzer.SilentTypeError(err) =>
- global.reporter.warning(sym.pos, err.toString)
- return Nil
- }
- }
-
- // now we have the final type:
- val toType = wildcardToNothing(typeVarToOriginOrWildcard(appliedTreeTyped.tpe.finalResultType))
-
- try {
- // Transform bound constraints into scaladoc constraints
- val implParamConstraints = makeImplicitConstraints(viewImplicitTypes, sym, context, inTpl)
- val boundsConstraints = makeBoundedConstraints(sym.typeParams, constrs, inTpl)
- // TODO: no substitution constraints appear in the library and compiler scaladoc. Maybe they can be removed?
- val substConstraints = makeSubstitutionConstraints(result.subst, inTpl)
- val constraints = implParamConstraints ::: boundsConstraints ::: substConstraints
-
- List(new ImplicitConversionImpl(sym, result.tree.symbol, toType, constraints, inTpl))
- } catch {
- case i: ImplicitNotFound =>
- //println(" Eliminating: " + toType)
- Nil
- }
- }
-
- def makeImplicitConstraints(types: List[Type], sym: Symbol, context: Context, inTpl: DocTemplateImpl): List[Constraint] =
- types.flatMap((tpe:Type) => {
- // TODO: Before creating constraints, map typeVarToOriginOrWildcard on the implicitTypes
- val implType = typeVarToOriginOrWildcard(tpe)
- val qualifiedName = makeQualifiedName(implType.typeSymbol)
-
- var available: Option[Boolean] = None
-
- // see: https://groups.google.com/forum/?hl=en&fromgroups#!topic/scala-internals/gm_fr0RKzC4
- //
- // println(implType + " => " + implType.isTrivial)
- // var tpes: List[Type] = List(implType)
- // while (!tpes.isEmpty) {
- // val tpe = tpes.head
- // tpes = tpes.tail
- // tpe match {
- // case TypeRef(pre, sym, args) =>
- // tpes = pre :: args ::: tpes
- // println(tpe + " => " + tpe.isTrivial)
- // case _ =>
- // println(tpe + " (of type" + tpe.getClass + ") => " + tpe.isTrivial)
- // }
- // }
- // println("\n")
-
- // look for type variables in the type. If there are none, we can decide if the implicit is there or not
- if (implType.isTrivial) {
- try {
- context.flushBuffer() /* any errors here should not prevent future findings */
- // TODO: Not sure this is the right thing to do -- seems similar to what scalac should be doing
- val context2 = context.make(context.unit, context.tree, sym.owner, context.scope, context.imports)
- val search = inferImplicit(EmptyTree, tpe, false, false, context2, false)
- context.flushBuffer() /* any errors here should not prevent future findings */
-
- available = Some(search.tree != EmptyTree)
- } catch {
- case _: TypeError =>
- }
- }
-
- available match {
- case Some(true) =>
- Nil
- case Some(false) if (!settings.docImplicitsShowAll.value) =>
- // if -implicits-show-all is not set, we get rid of impossible conversions (such as Numeric[String])
- throw new ImplicitNotFound(implType)
- case _ =>
- val typeParamNames = sym.typeParams.map(_.name)
-
- // TODO: This is maybe the worst hack I ever did - it's as dirty as hell, but it seems to work, so until I
- // learn more about symbols, it'll have to do.
- implType match {
- case TypeRef(pre, sym, List(TypeRef(NoPrefix, targ, Nil))) if (typeParamNames contains targ.name) =>
- hardcoded.knownTypeClasses.get(qualifiedName) match {
- case Some(explanation) =>
- List(new KnownTypeClassConstraint {
- val typeParamName = targ.nameString
- lazy val typeExplanation = explanation
- lazy val typeClassEntity = makeTemplate(sym)
- lazy val implicitType: TypeEntity = makeType(implType, inTpl)
- })
- case None =>
- List(new TypeClassConstraint {
- val typeParamName = targ.nameString
- lazy val typeClassEntity = makeTemplate(sym)
- lazy val implicitType: TypeEntity = makeType(implType, inTpl)
- })
- }
- case _ =>
- List(new ImplicitInScopeConstraint{
- lazy val implicitType: TypeEntity = makeType(implType, inTpl)
- })
- }
- }
- })
-
- def makeSubstitutionConstraints(subst: TreeTypeSubstituter, inTpl: DocTemplateImpl): List[Constraint] =
- (subst.from zip subst.to) map {
- case (from, to) =>
- new EqualTypeParamConstraint {
- error("Scaladoc implicits: Unexpected type substitution constraint from: " + from + " to: " + to)
- val typeParamName = from.toString
- val rhs = makeType(to, inTpl)
- }
- }
-
- def makeBoundedConstraints(tparams: List[Symbol], constrs: List[TypeConstraint], inTpl: DocTemplateImpl): List[Constraint] =
- (tparams zip constrs) flatMap {
- case (tparam, constr) => {
- uniteConstraints(constr) match {
- case (loBounds, upBounds) => (loBounds filter (_ != NothingClass.tpe), upBounds filter (_ != AnyClass.tpe)) match {
- case (Nil, Nil) =>
- Nil
- case (List(lo), List(up)) if (lo == up) =>
- List(new EqualTypeParamConstraint {
- val typeParamName = tparam.nameString
- lazy val rhs = makeType(lo, inTpl)
- })
- case (List(lo), List(up)) =>
- List(new BoundedTypeParamConstraint {
- val typeParamName = tparam.nameString
- lazy val lowerBound = makeType(lo, inTpl)
- lazy val upperBound = makeType(up, inTpl)
- })
- case (List(lo), Nil) =>
- List(new LowerBoundedTypeParamConstraint {
- val typeParamName = tparam.nameString
- lazy val lowerBound = makeType(lo, inTpl)
- })
- case (Nil, List(up)) =>
- List(new UpperBoundedTypeParamConstraint {
- val typeParamName = tparam.nameString
- lazy val upperBound = makeType(up, inTpl)
- })
- case other =>
- // this is likely an error on the lub/glb side
- error("Scaladoc implicits: Error computing lub/glb for: " + (tparam, constr) + ":\n" + other)
- Nil
- }
- }
- }
- }
-
- /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
-
- class ImplicitConversionImpl(
- val sym: Symbol,
- val convSym: Symbol,
- val toType: Type,
- val constrs: List[Constraint],
- inTpl: DocTemplateImpl)
- extends ImplicitConversion {
-
- def source: DocTemplateEntity = inTpl
-
- def targetType: TypeEntity = makeType(toType, inTpl)
-
- def convertorOwner: TemplateEntity =
- if (convSym != NoSymbol)
- makeTemplate(convSym.owner)
- else {
- error("Scaladoc implicits: " + toString + " = NoSymbol!")
- makeRootPackage
- }
-
- def targetTemplate: Option[TemplateEntity] = toType match {
- // @Vlad: I'm being extra conservative in template creation -- I don't want to create templates for complex types
- // such as refinement types because the template can't represent the type corectly (a template corresponds to a
- // package, class, trait or object)
- case t: TypeRef => Some(makeTemplate(t.sym))
- case RefinedType(parents, decls) => None
- case _ => error("Scaladoc implicits: Could not create template for: " + toType + " of type " + toType.getClass); None
- }
-
- def targetTypeComponents: List[(TemplateEntity, TypeEntity)] = makeParentTypes(toType, None, inTpl)
-
- def convertorMethod: Either[MemberEntity, String] = {
- var convertor: MemberEntity = null
-
- convertorOwner match {
- case doc: DocTemplateImpl =>
- val convertors = members.collect { case m: MemberImpl if m.sym == convSym => m }
- if (convertors.length == 1)
- convertor = convertors.head
- case _ =>
- }
- if (convertor ne null)
- Left(convertor)
- else
- Right(convSym.nameString)
- }
-
- def conversionShortName = convSym.nameString
-
- def conversionQualifiedName = makeQualifiedName(convSym)
-
- lazy val constraints: List[Constraint] = constrs
-
- lazy val memberImpls: List[MemberImpl] = {
- // Obtain the members inherited by the implicit conversion
- val memberSyms = toType.members.filter(implicitShouldDocument(_)).toList
- val existingSyms = sym.info.members
-
- // Debugging part :)
- debug(sym.nameString + "\n" + "=" * sym.nameString.length())
- debug(" * conversion " + convSym + " from " + sym.tpe + " to " + toType)
-
- debug(" -> full type: " + toType)
- if (constraints.length != 0) {
- debug(" -> constraints: ")
- constraints foreach { constr => debug(" - " + constr) }
- }
- debug(" -> members:")
- memberSyms foreach (sym => debug(" - "+ sym.decodedName +" : " + sym.info))
- debug("")
-
- memberSyms.flatMap({ aSym =>
- // we can't just pick up nodes from the original template, although that would be very convenient:
- // they need the byConversion field to be attached to themselves and the types to be transformed by
- // asSeenFrom
-
- // at the same time, the member itself is in the inTpl, not in the new template -- but should pick up
- // variables from the old template. Ugly huh? We'll always create the member inTpl, but it will change
- // the template when expanding variables in the comment :)
- makeMember(aSym, Some(this), inTpl)
- })
- }
-
- lazy val members: List[MemberEntity] = memberImpls
-
- def isHiddenConversion = settings.hiddenImplicits(conversionQualifiedName)
-
- override def toString = "Implcit conversion from " + sym.tpe + " to " + toType + " done by " + convSym
- }
-
- /* ========================= HELPER METHODS ========================== */
- /**
- * Computes the shadowing table for all the members in the implicit conversions
- * @param mbrs All template's members, including usecases and full signature members
- * @param convs All the conversions the template takes part in
- * @param inTpl the ususal :)
- */
- def makeShadowingTable(mbrs: List[MemberImpl],
- convs: List[ImplicitConversionImpl],
- inTpl: DocTemplateImpl): Map[MemberEntity, ImplicitMemberShadowing] = {
- assert(modelFinished)
-
- var shadowingTable = Map[MemberEntity, ImplicitMemberShadowing]()
-
- for (conv <- convs) {
- val otherConvs = convs.filterNot(_ == conv)
-
- for (member <- conv.memberImpls) {
- // for each member in our list
- val sym1 = member.sym
- val tpe1 = conv.toType.memberInfo(sym1)
-
- // check if it's shadowed by a member in the original class
- var shadowedBySyms: List[Symbol] = List()
- for (mbr <- mbrs) {
- val sym2 = mbr.sym
- if (sym1.name == sym2.name) {
- val shadowed = !settings.docImplicitsSoundShadowing.value || {
- val tpe2 = inTpl.sym.info.memberInfo(sym2)
- !isDistinguishableFrom(tpe1, tpe2)
- }
- if (shadowed)
- shadowedBySyms ::= sym2
- }
- }
-
- val shadowedByMembers = mbrs.filter((mb: MemberImpl) => shadowedBySyms.contains(mb.sym))
-
- // check if it's shadowed by another member
- var ambiguousByMembers: List[MemberEntity] = List()
- for (conv <- otherConvs)
- for (member2 <- conv.memberImpls) {
- val sym2 = member2.sym
- if (sym1.name == sym2.name) {
- val tpe2 = conv.toType.memberInfo(sym2)
- // Ambiguity should be an equivalence relation
- val ambiguated = !isDistinguishableFrom(tpe1, tpe2) || !isDistinguishableFrom(tpe2, tpe1)
- if (ambiguated)
- ambiguousByMembers ::= member2
- }
- }
-
- // we finally have the shadowing info
- val shadowing = new ImplicitMemberShadowing {
- def shadowingMembers: List[MemberEntity] = shadowedByMembers
- def ambiguatingMembers: List[MemberEntity] = ambiguousByMembers
- }
-
- shadowingTable += (member -> shadowing)
- }
- }
-
- shadowingTable
- }
-
-
- /**
- * uniteConstraints takes a TypeConstraint instance and simplifies the constraints inside
- *
- * Normally TypeConstraint contains multiple lower and upper bounds, and we want to reduce this to a lower and an
- * upper bound. Here are a couple of catches we need to be aware of:
- * - before finding a view (implicit method in scope that maps class A[T1,T2,.. Tn] to something else) the type
- * parameters are transformed into "untouchable" type variables so that type inference does not attempt to
- * fully solve them down to a type but rather constrains them on both sides just enough for the view to be
- * applicable -- now, we want to transform those type variables back to the original type parameters
- * - some of the bounds fail type inference and therefore refer to Nothing => when performing unification (lub, glb)
- * they start looking ugly => we (unsoundly) transform Nothing to WildcardType so we fool the unification algorithms
- * into thinking there's nothing there
- * - we don't want the wildcard types surviving the unification so we replace them back to Nothings
- */
- def uniteConstraints(constr: TypeConstraint): (List[Type], List[Type]) =
- try {
- (List(wildcardToNothing(lub(constr.loBounds map typeVarToOriginOrWildcard))),
- List(wildcardToNothing(glb(constr.hiBounds map typeVarToOriginOrWildcard))))
- } catch {
- // does this actually ever happen? (probably when type vars occur in the bounds)
- case x: Throwable => (constr.loBounds.distinct, constr.hiBounds.distinct)
- }
-
- /**
- * Make implicits explicit - Not used curently
- */
- object implicitToExplicit extends TypeMap {
- def apply(tp: Type): Type = mapOver(tp) match {
- case MethodType(params, resultType) =>
- MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType)
- case other =>
- other
- }
- }
-
- /**
- * removeImplicitParameters transforms implicit parameters from the view result type into constraints and
- * returns the simplified type of the view
- *
- * for the example view:
- * implicit def pimpMyClass[T](a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T]
- * the implicit view result type is:
- * (a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T]
- * and the simplified type will be:
- * MyClass[T] => PimpedMyClass[T]
- */
- def removeImplicitParameters(viewType: Type): (Type, List[Type]) = {
-
- val params = viewType.paramss.flatten
- val (normalParams, implParams) = params.partition(!_.isImplicit)
- val simplifiedType = MethodType(normalParams, viewType.finalResultType)
- val implicitTypes = implParams.map(_.tpe)
-
- (simplifiedType, implicitTypes)
- }
-
- /**
- * typeVarsToOriginOrWildcard transforms the "untouchable" type variables into either their origins (the original
- * type parameters) or into wildcard types if nothing matches
- */
- object typeVarToOriginOrWildcard extends TypeMap {
- def apply(tp: Type): Type = mapOver(tp) match {
- case tv: TypeVar =>
- if (tv.constr.inst.typeSymbol == NothingClass)
- WildcardType
- else
- tv.origin //appliedType(tv.origin.typeConstructor, tv.typeArgs map this)
- case other =>
- if (other.typeSymbol == NothingClass)
- WildcardType
- else
- other
- }
- }
-
- /**
- * wildcardToNothing transforms wildcard types back to Nothing
- */
- object wildcardToNothing extends TypeMap {
- def apply(tp: Type): Type = mapOver(tp) match {
- case WildcardType =>
- NothingClass.tpe
- case other =>
- other
- }
- }
-
- /** implicitShouldDocument decides whether a member inherited by implicit conversion should be documented */
- def implicitShouldDocument(aSym: Symbol): Boolean = {
- // We shouldn't document:
- // - constructors
- // - common methods (in Any, AnyRef, Object) as they are automatically removed
- // - private and protected members (not accessible following an implicit conversion)
- // - members starting with _ (usually reserved for internal stuff)
- localShouldDocument(aSym) && (!aSym.isConstructor) && (aSym.owner != AnyValClass) &&
- (aSym.owner != AnyClass) && (aSym.owner != ObjectClass) &&
- (!aSym.isProtected) && (!aSym.isPrivate) && (!aSym.name.startsWith("_")) &&
- (aSym.isMethod || aSym.isGetter || aSym.isSetter) &&
- (aSym.nameString != "getClass")
- }
-
- /* To put it very bluntly: checks if you can call implicitly added method with t1 when t2 is already there in the
- * class. We suppose the name of the two members coincides
- *
- * The trick here is that the resultType does not matter - the condition for removal it that paramss have the same
- * structure (A => B => C may not override (A, B) => C) and that all the types involved are
- * of the implcit conversion's member are subtypes of the parent members' parameters */
- def isDistinguishableFrom(t1: Type, t2: Type): Boolean = {
- // Vlad: I tried using matches but it's not exactly what we need:
- // (p: AnyRef)AnyRef matches ((t: String)AnyRef returns false -- but we want that to be true
- // !(t1 matches t2)
- if (t1.paramss.map(_.length) == t2.paramss.map(_.length)) {
- for ((t1p, t2p) <- t1.paramss.flatten zip t2.paramss.flatten)
- if (!isSubType(t1 memberInfo t1p, t2 memberInfo t2p))
- return true // if on the corresponding parameter you give a type that is in t1 but not in t2
- // def foo(a: Either[Int, Double]): Int = 3
- // def foo(b: Left[T1]): Int = 6
- // a.foo(Right(4.5d)) prints out 3 :)
- false
- } else true // the member structure is different foo(3, 5) vs foo(3)(5)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
deleted file mode 100644
index 844a509b7e..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
+++ /dev/null
@@ -1,326 +0,0 @@
-/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */
-
-package scala.tools.nsc
-package doc
-package model
-
-import base._
-import diagram._
-
-import scala.collection._
-import scala.util.matching.Regex
-
-import symtab.Flags
-
-import io._
-
-import model.{ RootPackage => RootPackageEntity }
-
-/** This trait extracts all required information for documentation from compilation units */
-trait ModelFactoryTypeSupport {
- thisFactory: ModelFactory
- with ModelFactoryImplicitSupport
- with ModelFactoryTypeSupport
- with DiagramFactory
- with CommentFactory
- with TreeFactory
- with MemberLookup =>
-
- import global._
- import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass }
- import rootMirror.{ RootPackage, RootClass, EmptyPackage }
-
- protected val typeCache = new mutable.LinkedHashMap[Type, TypeEntity]
-
- /** */
- def makeType(aType: Type, inTpl: TemplateImpl): TypeEntity = {
- def templatePackage = closestPackage(inTpl.sym)
-
- def createTypeEntity = new TypeEntity {
- private var nameBuffer = new StringBuilder
- private var refBuffer = new immutable.TreeMap[Int, (LinkTo, Int)]
- private def appendTypes0(types: List[Type], sep: String): Unit = types match {
- case Nil =>
- case tp :: Nil =>
- appendType0(tp)
- case tp :: tps =>
- appendType0(tp)
- nameBuffer append sep
- appendTypes0(tps, sep)
- }
-
- private def appendType0(tpe: Type): Unit = tpe match {
- /* Type refs */
- case tp: TypeRef if definitions.isFunctionType(tp) =>
- val args = tp.normalize.typeArgs
- nameBuffer append '('
- appendTypes0(args.init, ", ")
- nameBuffer append ") ā‡’ "
- appendType0(args.last)
- case tp: TypeRef if definitions.isScalaRepeatedParamType(tp) =>
- appendType0(tp.args.head)
- nameBuffer append '*'
- case tp: TypeRef if definitions.isByNameParamType(tp) =>
- nameBuffer append "ā‡’ "
- appendType0(tp.args.head)
- case tp: TypeRef if definitions.isTupleType(tp) =>
- val args = tp.normalize.typeArgs
- nameBuffer append '('
- appendTypes0(args, ", ")
- nameBuffer append ')'
- case TypeRef(pre, aSym, targs) =>
- val preSym = pre.widen.typeSymbol
-
- // SI-3314/SI-4888: Classes, Traits and Types can be inherited from a template to another:
- // class Enum { abstract class Value }
- // class Day extends Enum { object Mon extends Value /*...*/ }
- // ===> in such cases we have two options:
- // (0) if there's no inheritance taking place (Enum#Value) we can link to the template directly
- // (1) if we generate the doc template for Day, we can link to the correct member
- // (2) If the symbol comes from an external library for which we know the documentation URL, point to it.
- // (3) if we don't generate the doc template, we should at least indicate the correct prefix in the tooltip
- val bSym = normalizeTemplate(aSym)
- val owner =
- if ((preSym != NoSymbol) && /* it needs a prefix */
- (preSym != bSym.owner) && /* prefix is different from owner */
- (aSym == bSym)) /* normalization doesn't play tricks on us */
- preSym
- else
- bSym.owner
-
- val link =
- findTemplateMaybe(bSym) match {
- case Some(bTpl) if owner == bSym.owner =>
- // (0) the owner's class is linked AND has a template - lovely
- bTpl match {
- case dtpl: DocTemplateEntity => new LinkToTpl(dtpl)
- case _ => new Tooltip(bTpl.qualifiedName)
- }
- case _ =>
- val oTpl = findTemplateMaybe(owner)
- (oTpl, oTpl flatMap (findMember(bSym, _))) match {
- case (Some(oTpl), Some(bMbr)) =>
- // (1) the owner's class
- LinkToMember(bMbr, oTpl)
- case _ =>
- val name = makeQualifiedName(bSym)
- if (!bSym.owner.isPackage)
- Tooltip(name)
- else
- findExternalLink(bSym, name).getOrElse (
- // (3) if we couldn't find neither the owner nor external URL to link to, show a tooltip with the qualified name
- Tooltip(name)
- )
- }
- }
-
- // SI-4360 Showing prefixes when necessary
- // We check whether there's any directly accessible type with the same name in the current template OR if the
- // type is inherited from one template to another. There may be multiple symbols with the same name in scope,
- // but we won't show the prefix if our symbol is among them, only if *it's not* -- that's equal to showing
- // the prefix only for ambiguous references, not for overloaded ones.
- def needsPrefix: Boolean = {
- if ((owner != bSym.owner || preSym.isRefinementClass) && (normalizeTemplate(owner) != inTpl.sym))
- return true
- // don't get tricked into prefixng method type params and existentials:
- // I tried several tricks BUT adding the method for which I'm creating the type => that simply won't scale,
- // as ValueParams are independent of their parent member, and I really don't want to add this information to
- // all terms, as we're already over the allowed memory footprint
- if (aSym.isTypeParameterOrSkolem || aSym.isExistentiallyBound /* existential or existential skolem */)
- return false
-
- for (tpl <- inTpl.sym.ownerChain) {
- tpl.info.member(bSym.name) match {
- case NoSymbol =>
- // No syms with that name, look further inside the owner chain
- case sym =>
- // Symbol found -- either the correct symbol, another one OR an overloaded alternative
- if (sym == bSym)
- return false
- else sym.info match {
- case OverloadedType(owner, alternatives) =>
- return alternatives.contains(bSym)
- case _ =>
- return true
- }
- }
- }
- // if it's not found in the owner chain, we can safely leave out the prefix
- false
- }
-
- val prefix =
- if (!settings.docNoPrefixes.value && needsPrefix && (bSym != AnyRefClass /* which we normalize */)) {
- if (!owner.isRefinementClass) {
- val qName = makeQualifiedName(owner, Some(inTpl.sym))
- if (qName != "") qName + "." else ""
- }
- else {
- nameBuffer append "("
- appendType0(pre)
- nameBuffer append ")#"
- "" // we already appended the prefix
- }
- } else ""
-
- //DEBUGGING:
- //if (makeQualifiedName(bSym) == "pack1.A") println("needsPrefix(" + bSym + ", " + owner + ", " + inTpl.qualifiedName + ") => " + needsPrefix + " and prefix=" + prefix)
-
- val name = prefix + bSym.nameString
- val pos0 = nameBuffer.length
- refBuffer += pos0 -> ((link, name.length))
- nameBuffer append name
-
- if (!targs.isEmpty) {
- nameBuffer append '['
- appendTypes0(targs, ", ")
- nameBuffer append ']'
- }
- /* Refined types */
- case RefinedType(parents, defs) =>
- val ignoreParents = Set[Symbol](AnyClass, ObjectClass)
- val filtParents = parents filterNot (x => ignoreParents(x.typeSymbol)) match {
- case Nil => parents
- case ps => ps
- }
- appendTypes0(filtParents, " with ")
- // XXX Still todo: properly printing refinements.
- // Since I didn't know how to go about displaying a multi-line type, I went with
- // printing single method refinements (which should be the most common) and printing
- // the number of members if there are more.
- defs.toList match {
- case Nil => ()
- case x :: Nil => nameBuffer append (" { " + x.defString + " }")
- case xs => nameBuffer append (" { ... /* %d definitions in type refinement */ }" format xs.size)
- }
- /* Eval-by-name types */
- case NullaryMethodType(result) =>
- nameBuffer append 'ā‡’'
- appendType0(result)
-
- /* Polymorphic types */
- case PolyType(tparams, result) => assert(tparams.nonEmpty)
- def typeParamsToString(tps: List[Symbol]): String = if (tps.isEmpty) "" else
- tps.map{tparam =>
- tparam.varianceString + tparam.name + typeParamsToString(tparam.typeParams)
- }.mkString("[", ", ", "]")
- nameBuffer append typeParamsToString(tparams)
- appendType0(result)
-
- case et@ExistentialType(quantified, underlying) =>
-
- def appendInfoStringReduced(sym: Symbol, tp: Type): Unit = {
- if (sym.isType && !sym.isAliasType && !sym.isClass) {
- tp match {
- case PolyType(tparams, _) =>
- nameBuffer append "["
- appendTypes0(tparams.map(_.tpe), ", ")
- nameBuffer append "]"
- case _ =>
- }
- tp.resultType match {
- case rt @ TypeBounds(_, _) =>
- appendType0(rt)
- case rt =>
- nameBuffer append " <: "
- appendType0(rt)
- }
- } else {
- // fallback to the Symbol infoString
- nameBuffer append sym.infoString(tp)
- }
- }
-
- def appendClauses = {
- nameBuffer append " forSome {"
- var first = true
- val qset = quantified.toSet
- for (sym <- quantified) {
- if (!first) { nameBuffer append ", " } else first = false
- if (sym.isSingletonExistential) {
- nameBuffer append "val "
- nameBuffer append tpnme.dropSingletonName(sym.name)
- nameBuffer append ": "
- appendType0(dropSingletonType(sym.info.bounds.hi))
- } else {
- if (sym.flagString != "") nameBuffer append (sym.flagString + " ")
- if (sym.keyString != "") nameBuffer append (sym.keyString + " ")
- nameBuffer append sym.varianceString
- nameBuffer append sym.nameString
- appendInfoStringReduced(sym, sym.info)
- }
- }
- nameBuffer append "}"
- }
-
- underlying match {
- case TypeRef(pre, sym, args) if et.isRepresentableWithWildcards =>
- appendType0(typeRef(pre, sym, Nil))
- nameBuffer append "["
- var first = true
- val qset = quantified.toSet
- for (arg <- args) {
- if (!first) { nameBuffer append ", " } else first = false
- arg match {
- case TypeRef(_, sym, _) if (qset contains sym) =>
- nameBuffer append "_"
- appendInfoStringReduced(sym, sym.info)
- case arg =>
- appendType0(arg)
- }
- }
- nameBuffer append "]"
- case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) =>
- nameBuffer append "("
- appendType0(underlying)
- nameBuffer append ")"
- appendClauses
- case _ =>
- appendType0(underlying)
- appendClauses
- }
-
- case tb@TypeBounds(lo, hi) =>
- if (tb.lo != TypeBounds.empty.lo) {
- nameBuffer append " >: "
- appendType0(lo)
- }
- if (tb.hi != TypeBounds.empty.hi) {
- nameBuffer append " <: "
- appendType0(hi)
- }
- // case tpen: ThisType | SingleType | SuperType =>
- // if (tpen.isInstanceOf[ThisType] && tpen.asInstanceOf[ThisType].sym.isEffectiveRoot) {
- // appendType0 typeRef(NoPrefix, sym, Nil)
- // } else {
- // val underlying =
- // val pre = underlying.typeSymbol.skipPackageObject
- // if (pre.isOmittablePrefix) pre.fullName + ".type"
- // else prefixString + "type"
- case tpen@ThisType(sym) =>
- appendType0(typeRef(NoPrefix, sym, Nil))
- nameBuffer append ".this"
- if (!tpen.underlying.typeSymbol.skipPackageObject.isOmittablePrefix) nameBuffer append ".type"
- case tpen@SuperType(thistpe, supertpe) =>
- nameBuffer append "super["
- appendType0(supertpe)
- nameBuffer append "]"
- case tpen@SingleType(pre, sym) =>
- appendType0(typeRef(pre, sym, Nil))
- if (!tpen.underlying.typeSymbol.skipPackageObject.isOmittablePrefix) nameBuffer append ".type"
- case tpen =>
- nameBuffer append tpen.toString
- }
- appendType0(aType)
- val refEntity = refBuffer
- val name = optimize(nameBuffer.toString)
- nameBuffer = null
- }
-
- // SI-4360: Entity caching depends on both the type AND the template it's in, as the prefixes might change for the
- // same type based on the template the type is shown in.
- if (settings.docNoPrefixes.value)
- typeCache.getOrElseUpdate(aType, createTypeEntity)
- else createTypeEntity
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala b/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala
deleted file mode 100644
index 5b4ec4a40b..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Chris James
- */
-
-package scala.tools.nsc
-package doc
-package model
-
-import scala.collection._
-
-
-/** A fragment of code. */
-abstract class TreeEntity {
-
- /** The human-readable representation of this abstract syntax tree. */
- def expression: String
-
- /** Maps which parts of this syntax tree's name reference entities. The map is indexed by the position of the first
- * character that reference some entity, and contains the entity and the position of the last referenced
- * character. The referenced character ranges do not to overlap or nest. The map is sorted by position. */
- def refEntity: SortedMap[Int, (Entity, Int)]
-
- /** The human-readable representation of this abstract syntax tree. */
- override def toString = expression
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
deleted file mode 100755
index fdad84d0bc..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-package scala.tools.nsc
-package doc
-package model
-
-import scala.collection._
-import scala.reflect.internal.util.{RangePosition, OffsetPosition, SourceFile}
-
-/** The goal of this trait is , using makeTree,
- * to browse a tree to
- * 1- have the String of the complete tree (tree.expression)
- * 2- fill references to create hyperLinks later in html.pageTemplate
- *
- * It is applied in ModelFactory => makeTree
- *
- */
-
-trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
-
- val global: Global
- import global._
-
- def makeTree(rhs: Tree): TreeEntity = {
-
- var expr = new StringBuilder
- var refs = new immutable.TreeMap[Int, (Entity, Int)] // start, (Entity to be linked to , end)
-
- rhs.pos match {
- case pos: RangePosition => {
- val source: SourceFile = pos.source
- val firstIndex = pos.startOrPoint
- val lastIndex = pos.endOrPoint
-
- assert(firstIndex < lastIndex, "Invalid position indices for tree " + rhs + " (" + firstIndex + ", " + lastIndex + ")")
- expr.appendAll(source.content, firstIndex, lastIndex - firstIndex)
-
- val traverser = new Traverser {
-
- /** Finds the Entity on which we will later create a link on,
- * stores it in tree.refs with its position
- */
- def makeLink(rhs: Tree){
- var start = pos.startOrPoint - firstIndex
- val end = pos.endOrPoint - firstIndex
- if(start != end) {
- var asym = rhs.symbol
- if (asym.isClass) makeTemplate(asym) match{
- case docTmpl: DocTemplateImpl =>
- refs += ((start, (docTmpl,end)))
- case _ =>
- }
- else if (asym.isTerm && asym.owner.isClass){
- if (asym.isSetter) asym = asym.getter(asym.owner)
- makeTemplate(asym.owner) match {
- case docTmpl: DocTemplateImpl =>
- val mbrs: Option[MemberImpl] = findMember(asym, docTmpl)
- mbrs foreach { mbr => refs += ((start, (mbr,end))) }
- case _ =>
- }
- }
- }
- }
- /**
- * Goes through the tree and makes links when a Select occurs,
- * The case of New(_) is ignored because the object we want to create a link on
- * will be reached with recursivity and we don't want a link on the "new" string
- * If a link is not created, its case is probably not defined in here
- */
- override def traverse(tree: Tree) = tree match {
- case Select(qualifier, name) =>
- qualifier match {
- case New(_) =>
- case _ => makeLink(tree)
- }
- traverse(qualifier)
- case Ident(_) => makeLink(tree)
- case _ =>
- super.traverse(tree)
- }
- }
-
- traverser.traverse(rhs)
-
- new TreeEntity {
- val expression = expr.toString
- val refEntity = refs
- }
- }
- case _ =>
- new TreeEntity {
- val expression = rhs.toString
- val refEntity = new immutable.TreeMap[Int, (Entity, Int)]
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala b/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala
deleted file mode 100644
index cf5c1fb3fb..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package model
-
-import scala.collection._
-
-/** A type. Note that types and templates contain the same information only for the simplest types. For example, a type
- * defines how a template's type parameters are instantiated (as in `List[Cow]`), what the template's prefix is
- * (as in `johnsFarm.Cow`), and supports compound or structural types. */
-abstract class TypeEntity {
-
- /** The human-readable representation of this type. */
- def name: String
-
- /** Maps which parts of this type's name reference entities. The map is indexed by the position of the first
- * character that reference some entity, and contains the entity and the position of the last referenced
- * character. The referenced character ranges do not to overlap or nest. The map is sorted by position. */
- def refEntity: SortedMap[Int, (base.LinkTo, Int)]
-
- /** The human-readable representation of this type. */
- override def toString = name
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala b/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala
deleted file mode 100644
index f712869a4b..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Gilles Dubochet
- */
-
-package scala.tools.nsc
-package doc
-package model
-
-
-/** A value that is passed as an argument to a value parameter. */
-trait ValueArgument {
-
- /** The parameter as argument to which this value is passed, if it is known. */
- def parameter: Option[ValueParam]
-
- /** The expression that calculates the value. */
- def value: TreeEntity
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/Visibility.scala b/src/compiler/scala/tools/nsc/doc/model/Visibility.scala
deleted file mode 100644
index 22580805aa..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/Visibility.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Gilles Dubochet
- */
-
-package scala.tools.nsc
-package doc
-package model
-
-/** An type that represents visibility of members. */
-sealed trait Visibility {
- def isProtected: Boolean = false
- def isPublic: Boolean = false
-}
-
-/** The visibility of `private[this]` members. */
-case class PrivateInInstance() extends Visibility
-
-/** The visibility of `protected[this]` members. */
-case class ProtectedInInstance() extends Visibility {
- override def isProtected = true
-}
-
-/** The visibility of `private[owner]` members. An unqualified private members
- * is encoded with `owner` equal to the members's `inTemplate`. */
-case class PrivateInTemplate(owner: TemplateEntity) extends Visibility
-
-/** The visibility of `protected[owner]` members. An unqualified protected
- * members is encoded with `owner` equal to the members's `inTemplate`.
- * Note that whilst the member is visible in any template owned by `owner`,
- * it is only visible in subclasses of the member's `inTemplate`. */
-case class ProtectedInTemplate(owner: TemplateEntity) extends Visibility {
- override def isProtected = true
-}
-
-/** The visibility of public members. */
-case class Public() extends Visibility {
- override def isPublic = true
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala
deleted file mode 100644
index c2aa1f17f3..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala
+++ /dev/null
@@ -1,146 +0,0 @@
-package scala.tools.nsc.doc
-package model
-package diagram
-
-import model._
-
-/**
- * The diagram base classes
- *
- * @author Damien Obrist
- * @author Vlad Ureche
- */
-abstract class Diagram {
- def nodes: List[Node]
- def edges: List[(Node, List[Node])]
- def isContentDiagram = false // Implemented by ContentDiagram
- def isInheritanceDiagram = false // Implemented by InheritanceDiagram
- def depthInfo: DepthInfo
-}
-
-case class ContentDiagram(nodes:List[/*Class*/Node], edges:List[(Node, List[Node])]) extends Diagram {
- override def isContentDiagram = true
- lazy val depthInfo = new ContentDiagramDepth(this)
-}
-
-/** A class diagram */
-case class InheritanceDiagram(thisNode: ThisNode,
- superClasses: List[/*Class*/Node],
- subClasses: List[/*Class*/Node],
- incomingImplicits: List[ImplicitNode],
- outgoingImplicits: List[ImplicitNode]) extends Diagram {
- def nodes = thisNode :: superClasses ::: subClasses ::: incomingImplicits ::: outgoingImplicits
- def edges = (thisNode -> (superClasses ::: outgoingImplicits)) ::
- (subClasses ::: incomingImplicits).map(_ -> List(thisNode))
-
- override def isInheritanceDiagram = true
- lazy val depthInfo = new DepthInfo {
- def maxDepth = 3
- def nodeDepth(node: Node) =
- if (node == thisNode) 1
- else if (superClasses.contains(node)) 0
- else if (subClasses.contains(node)) 2
- else if (incomingImplicits.contains(node) || outgoingImplicits.contains(node)) 1
- else -1
- }
-}
-
-trait DepthInfo {
- /** Gives the maximum depth */
- def maxDepth: Int
- /** Gives the depth of any node in the diagram or -1 if the node is not in the diagram */
- def nodeDepth(node: Node): Int
-}
-
-abstract class Node {
- def name = tpe.name
- def tpe: TypeEntity
- def tpl: Option[TemplateEntity]
- /** shortcut to get a DocTemplateEntity */
- def doctpl: Option[DocTemplateEntity] = tpl match {
- case Some(tpl) => tpl match {
- case d: DocTemplateEntity => Some(d)
- case _ => None
- }
- case _ => None
- }
- /* shortcuts to find the node type without matching */
- def isThisNode = false
- def isNormalNode = false
- def isClassNode = if (tpl.isDefined) (tpl.get.isClass || tpl.get.qualifiedName == "scala.AnyRef") else false
- def isTraitNode = if (tpl.isDefined) tpl.get.isTrait else false
- def isObjectNode= if (tpl.isDefined) tpl.get.isObject else false
- def isTypeNode = if (doctpl.isDefined) doctpl.get.isAbstractType || doctpl.get.isAliasType else false
- def isOtherNode = !(isClassNode || isTraitNode || isObjectNode || isTypeNode)
- def isImplicitNode = false
- def isOutsideNode = false
- def tooltip: Option[String]
-}
-
-// different matchers, allowing you to use the pattern matcher against any node
-// NOTE: A ThisNode or ImplicitNode can at the same time be ClassNode/TraitNode/OtherNode, not exactly according to
-// case class specification -- thus a complete match would be:
-// node match {
-// case ThisNode(tpe, _) => /* case for this node, you can still use .isClass, .isTrait and .isOther */
-// case ImplicitNode(tpe, _) => /* case for an implicit node, you can still use .isClass, .isTrait and .isOther */
-// case _ => node match {
-// case ClassNode(tpe, _) => /* case for a non-this, non-implicit Class node */
-// case TraitNode(tpe, _) => /* case for a non-this, non-implicit Trait node */
-// case OtherNode(tpe, _) => /* case for a non-this, non-implicit Other node */
-// }
-// }
-object Node { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = Some((n.tpe, n.tpl)) }
-object ClassNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isClassNode) Some((n.tpe, n.tpl)) else None }
-object TraitNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isTraitNode) Some((n.tpe, n.tpl)) else None }
-object TypeNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isTypeNode) Some((n.tpe, n.tpl)) else None }
-object ObjectNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isObjectNode) Some((n.tpe, n.tpl)) else None }
-object OutsideNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOutsideNode) Some((n.tpe, n.tpl)) else None }
-object OtherNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOtherNode) Some((n.tpe, n.tpl)) else None }
-
-
-
-/** The node for the current class */
-case class ThisNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isThisNode = true }
-
-/** The usual node */
-case class NormalNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isNormalNode = true }
-
-/** A class or trait the thisnode can be converted to by an implicit conversion
- * TODO: I think it makes more sense to use the tpe links to templates instead of the TemplateEntity for implicit nodes
- * since some implicit conversions convert the class to complex types that cannot be represented as a single tmeplate
- */
-case class ImplicitNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isImplicitNode = true }
-
-/** An outside node is shown in packages when a class from a different package makes it to the package diagram due to
- * its relation to a class in the template (see @contentDiagram hideInheritedNodes annotation) */
-case class OutsideNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isOutsideNode = true }
-
-
-// Computing and offering node depth information
-class ContentDiagramDepth(pack: ContentDiagram) extends DepthInfo {
- private[this] var _maxDepth = 0
- private[this] var _nodeDepth = Map[Node, Int]()
- private[this] var seedNodes = Set[Node]()
- private[this] val invertedEdges: Map[Node, List[Node]] =
- pack.edges.flatMap({case (node: Node, outgoing: List[Node]) => outgoing.map((_, node))}).groupBy(_._1).map({case (k, values) => (k, values.map(_._2))}).withDefaultValue(Nil)
- private[this] val directEdges: Map[Node, List[Node]] = pack.edges.toMap.withDefaultValue(Nil)
-
- // seed base nodes, to minimize noise - they can't all have parents, else there would only be cycles
- seedNodes ++= pack.nodes.filter(directEdges(_).isEmpty)
-
- while (!seedNodes.isEmpty) {
- var newSeedNodes = Set[Node]()
- for (node <- seedNodes) {
- val depth = 1 + (-1 :: directEdges(node).map(_nodeDepth.getOrElse(_, -1))).max
- if (depth != _nodeDepth.getOrElse(node, -1)) {
- _nodeDepth += (node -> depth)
- newSeedNodes ++= invertedEdges(node)
- if (depth > _maxDepth) _maxDepth = depth
- }
- }
- seedNodes = newSeedNodes
- }
-
- val maxDepth = _maxDepth
- def nodeDepth(node: Node) = _nodeDepth.getOrElse(node, -1)
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
deleted file mode 100644
index cd60865ce7..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
+++ /dev/null
@@ -1,261 +0,0 @@
-package scala.tools.nsc.doc
-package model
-package diagram
-
-import model._
-import java.util.regex.{Pattern, Matcher}
-import scala.util.matching.Regex
-
-// statistics
-import html.page.diagram.DiagramStats
-
-/**
- * This trait takes care of parsing @{inheritance, content}Diagram annotations
- *
- * @author Damien Obrist
- * @author Vlad Ureche
- */
-trait DiagramDirectiveParser {
- this: ModelFactory with DiagramFactory with CommentFactory with TreeFactory =>
-
- import this.global.definitions.AnyRefClass
-
- ///// DIAGRAM FILTERS //////////////////////////////////////////////////////////////////////////////////////////////
-
- /**
- * The DiagramFilter trait directs the diagram engine about the way the diagram should be displayed
- *
- * Vlad: There's an explanation I owe to people using diagrams and not finding a way to hide a specific class from
- * all diagrams at once. So why did I choose to allow you to only control the diagrams at class level? So, the
- * reason is you would break the separate scaladoc compilation:
- * If you have an "@diagram hideMyClass" annotation in class A and you run scaladoc on it along with its subclass B
- * A will not appear in B's diagram. But if you scaladoc only on B, A's comment will not be parsed and the
- * instructions to hide class A from all diagrams will not be available. Thus I prefer to force you to control the
- * diagrams of each class locally. The problem does not appear with scalac, as scalac stores all its necessary
- * information (like scala signatures) serialized in the .class file. But we couldn't store doc comments in the class
- * file, could we? (Turns out we could, but that's another story)
- *
- * Any flaming for this decision should go to scala-internals@googlegroups.com
- */
- trait DiagramFilter {
- /** A flag to hide the diagram completely */
- def hideDiagram: Boolean
- /** Hide incoming implicit conversions (for type hierarchy diagrams) */
- def hideIncomingImplicits: Boolean
- /** Hide outgoing implicit conversions (for type hierarchy diagrams) */
- def hideOutgoingImplicits: Boolean
- /** Hide superclasses (for type hierarchy diagrams) */
- def hideSuperclasses: Boolean
- /** Hide subclasses (for type hierarchy diagrams) */
- def hideSubclasses: Boolean
- /** Show related classes from other objects/traits/packages (for content diagrams) */
- def hideInheritedNodes: Boolean
- /** Hide a node from the diagram */
- def hideNode(clazz: Node): Boolean
- /** Hide an edge from the diagram */
- def hideEdge(clazz1: Node, clazz2: Node): Boolean
- }
-
- /** Main entry point into this trait: generate the filter for inheritance diagrams */
- def makeInheritanceDiagramFilter(template: DocTemplateImpl): DiagramFilter = {
-
- val defaultFilter =
- if (template.isClass || template.isTrait || template.sym == AnyRefClass)
- FullDiagram
- else
- NoDiagramAtAll
-
- if (template.comment.isDefined)
- makeDiagramFilter(template, template.comment.get.inheritDiagram, defaultFilter, true)
- else
- defaultFilter
- }
-
- /** Main entry point into this trait: generate the filter for content diagrams */
- def makeContentDiagramFilter(template: DocTemplateImpl): DiagramFilter = {
- val defaultFilter = if (template.isPackage || template.isObject) FullDiagram else NoDiagramAtAll
- if (template.comment.isDefined)
- makeDiagramFilter(template, template.comment.get.contentDiagram, defaultFilter, false)
- else
- defaultFilter
- }
-
- protected var tFilter = 0l
- protected var tModel = 0l
-
- /** Show the entire diagram, no filtering */
- case object FullDiagram extends DiagramFilter {
- val hideDiagram: Boolean = false
- val hideIncomingImplicits: Boolean = false
- val hideOutgoingImplicits: Boolean = false
- val hideSuperclasses: Boolean = false
- val hideSubclasses: Boolean = false
- val hideInheritedNodes: Boolean = false
- def hideNode(clazz: Node): Boolean = false
- def hideEdge(clazz1: Node, clazz2: Node): Boolean = false
- }
-
- /** Hide the diagram completely, no need for special filtering */
- case object NoDiagramAtAll extends DiagramFilter {
- val hideDiagram: Boolean = true
- val hideIncomingImplicits: Boolean = true
- val hideOutgoingImplicits: Boolean = true
- val hideSuperclasses: Boolean = true
- val hideSubclasses: Boolean = true
- val hideInheritedNodes: Boolean = true
- def hideNode(clazz: Node): Boolean = true
- def hideEdge(clazz1: Node, clazz2: Node): Boolean = true
- }
-
- /** The AnnotationDiagramFilter trait directs the diagram engine according to an annotation
- * TODO: Should document the annotation, for now see parseDiagramAnnotation in ModelFactory.scala */
- case class AnnotationDiagramFilter(hideDiagram: Boolean,
- hideIncomingImplicits: Boolean,
- hideOutgoingImplicits: Boolean,
- hideSuperclasses: Boolean,
- hideSubclasses: Boolean,
- hideInheritedNodes: Boolean,
- hideNodesFilter: List[Pattern],
- hideEdgesFilter: List[(Pattern, Pattern)]) extends DiagramFilter {
-
- private[this] def getName(n: Node): String =
- if (n.tpl.isDefined)
- n.tpl.get.qualifiedName
- else
- n.name
-
- def hideNode(clazz: Node): Boolean = {
- val qualifiedName = getName(clazz)
- for (hideFilter <- hideNodesFilter)
- if (hideFilter.matcher(qualifiedName).matches) {
- // println(hideFilter + ".matcher(" + qualifiedName + ").matches = " + hideFilter.matcher(qualifiedName).matches)
- return true
- }
- false
- }
-
- def hideEdge(clazz1: Node, clazz2: Node): Boolean = {
- val clazz1Name = getName(clazz1)
- val clazz2Name = getName(clazz2)
- for ((clazz1Filter, clazz2Filter) <- hideEdgesFilter) {
- if (clazz1Filter.matcher(clazz1Name).matches &&
- clazz2Filter.matcher(clazz2Name).matches) {
- // println(clazz1Filter + ".matcher(" + clazz1Name + ").matches = " + clazz1Filter.matcher(clazz1Name).matches)
- // println(clazz2Filter + ".matcher(" + clazz2Name + ").matches = " + clazz2Filter.matcher(clazz2Name).matches)
- return true
- }
- }
- false
- }
- }
-
- // TODO: This could certainly be improved -- right now the only regex is *, but there's no way to match a single identifier
- private val NodeSpecRegex = "\\\"[A-Za-z\\*][A-Za-z\\.\\*]*\\\""
- private val NodeSpecPattern = Pattern.compile(NodeSpecRegex)
- private val EdgeSpecRegex = "\\(" + NodeSpecRegex + "\\s*\\->\\s*" + NodeSpecRegex + "\\)"
- private val EdgeSpecPattern = Pattern.compile(NodeSpecRegex)
- // And the composed regexes:
- private val HideNodesRegex = new Regex("^hideNodes(\\s*" + NodeSpecRegex + ")+$")
- private val HideEdgesRegex = new Regex("^hideEdges(\\s*" + EdgeSpecRegex + ")+$")
-
- private def makeDiagramFilter(template: DocTemplateImpl,
- directives: List[String],
- defaultFilter: DiagramFilter,
- isInheritanceDiagram: Boolean): DiagramFilter = directives match {
-
- // if there are no specific diagram directives, return the default filter (either FullDiagram or NoDiagramAtAll)
- case Nil =>
- defaultFilter
-
- // compute the exact filters. By including the annotation, the diagram is autmatically added
- case _ =>
- tFilter -= System.currentTimeMillis
- var hideDiagram0: Boolean = false
- var hideIncomingImplicits0: Boolean = false
- var hideOutgoingImplicits0: Boolean = false
- var hideSuperclasses0: Boolean = false
- var hideSubclasses0: Boolean = false
- var hideInheritedNodes0: Boolean = false
- var hideNodesFilter0: List[Pattern] = Nil
- var hideEdgesFilter0: List[(Pattern, Pattern)] = Nil
-
- def warning(message: String) = {
- // we need the position from the package object (well, ideally its comment, but yeah ...)
- val sym = if (template.sym.isPackage) template.sym.info.member(global.nme.PACKAGE) else template.sym
- assert((sym != global.NoSymbol) || (sym == global.definitions.RootPackage))
- global.reporter.warning(sym.pos, message)
- }
-
- def preparePattern(className: String) =
- "^" + className.stripPrefix("\"").stripSuffix("\"").replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*") + "$"
-
- // separate entries:
- val entries = directives.foldRight("")(_ + " " + _).split(",").map(_.trim)
- for (entry <- entries)
- entry match {
- case "hideDiagram" =>
- hideDiagram0 = true
- case "hideIncomingImplicits" if isInheritanceDiagram =>
- hideIncomingImplicits0 = true
- case "hideOutgoingImplicits" if isInheritanceDiagram =>
- hideOutgoingImplicits0 = true
- case "hideSuperclasses" if isInheritanceDiagram =>
- hideSuperclasses0 = true
- case "hideSubclasses" if isInheritanceDiagram =>
- hideSubclasses0 = true
- case "hideInheritedNodes" if !isInheritanceDiagram =>
- hideInheritedNodes0 = true
- case HideNodesRegex(last) =>
- val matcher = NodeSpecPattern.matcher(entry)
- while (matcher.find()) {
- val classPattern = Pattern.compile(preparePattern(matcher.group()))
- hideNodesFilter0 ::= classPattern
- }
- case HideEdgesRegex(last) =>
- val matcher = NodeSpecPattern.matcher(entry)
- while (matcher.find()) {
- val class1Pattern = Pattern.compile(preparePattern(matcher.group()))
- assert(matcher.find()) // it's got to be there, just matched it!
- val class2Pattern = Pattern.compile(preparePattern(matcher.group()))
- hideEdgesFilter0 ::= ((class1Pattern, class2Pattern))
- }
- case "" =>
- // don't need to do anything about it
- case _ =>
- warning("Could not understand diagram annotation in " + template.kind + " " + template.qualifiedName +
- ": unmatched entry \"" + entry + "\".\n" +
- " This could be because:\n" +
- " - you forgot to separate entries by commas\n" +
- " - you used a tag that is not allowed in the current context (like @contentDiagram hideSuperclasses)\n"+
- " - you did not use one of the allowed tags (see docs.scala-lang.org for scaladoc annotations)")
- }
- val result =
- if (hideDiagram0)
- NoDiagramAtAll
- else if ((hideNodesFilter0.isEmpty) &&
- (hideEdgesFilter0.isEmpty) &&
- (hideIncomingImplicits0 == false) &&
- (hideOutgoingImplicits0 == false) &&
- (hideSuperclasses0 == false) &&
- (hideSubclasses0 == false) &&
- (hideInheritedNodes0 == false) &&
- (hideDiagram0 == false))
- FullDiagram
- else
- AnnotationDiagramFilter(
- hideDiagram = hideDiagram0,
- hideIncomingImplicits = hideIncomingImplicits0,
- hideOutgoingImplicits = hideOutgoingImplicits0,
- hideSuperclasses = hideSuperclasses0,
- hideSubclasses = hideSubclasses0,
- hideInheritedNodes = hideInheritedNodes0,
- hideNodesFilter = hideNodesFilter0,
- hideEdgesFilter = hideEdgesFilter0)
-
- if (settings.docDiagramsDebug.value && result != NoDiagramAtAll && result != FullDiagram)
- settings.printMsg(template.kind + " " + template.qualifiedName + " filter: " + result)
- tFilter += System.currentTimeMillis
-
- result
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
deleted file mode 100644
index cb54a739bf..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
+++ /dev/null
@@ -1,271 +0,0 @@
-package scala.tools.nsc.doc
-package model
-package diagram
-
-import model._
-import scala.collection.mutable
-
-// statistics
-import html.page.diagram.DiagramStats
-
-import scala.collection.immutable.SortedMap
-
-/**
- * This trait takes care of generating the diagram for classes and packages
- *
- * @author Damien Obrist
- * @author Vlad Ureche
- */
-trait DiagramFactory extends DiagramDirectiveParser {
- this: ModelFactory with ModelFactoryTypeSupport with DiagramFactory with CommentFactory with TreeFactory =>
-
- import this.global.definitions._
- import this.global._
-
- // the following can used for hardcoding different relations into the diagram, for bootstrapping purposes
- def aggregationNode(text: String) =
- NormalNode(new TypeEntity { val name = text; val refEntity = SortedMap[Int, (base.LinkTo, Int)]() }, None)()
-
- /** Create the inheritance diagram for this template */
- def makeInheritanceDiagram(tpl: DocTemplateImpl): Option[Diagram] = {
-
- tFilter = 0
- tModel = -System.currentTimeMillis
-
- // the diagram filter
- val diagramFilter = makeInheritanceDiagramFilter(tpl)
-
- def implicitTooltip(from: DocTemplateEntity, to: TemplateEntity, conv: ImplicitConversion) =
- Some(from.qualifiedName + " can be implicitly converted to " + conv.targetType + " by the implicit method "
- + conv.conversionShortName + " in " + conv.convertorOwner.kind + " " + conv.convertorOwner.qualifiedName)
-
- val result =
- if (diagramFilter == NoDiagramAtAll)
- None
- else {
- // the main node
- val thisNode = ThisNode(tpl.resultType, Some(tpl))(Some(tpl.qualifiedName + " (this " + tpl.kind + ")"))
-
- // superclasses
- var superclasses: List[Node] =
- tpl.parentTypes.collect {
- case p: (TemplateEntity, TypeEntity) if !classExcluded(p._1) => NormalNode(p._2, Some(p._1))()
- }.reverse
-
- // incoming implcit conversions
- lazy val incomingImplicitNodes = tpl.incomingImplicitlyConvertedClasses.map {
- case (incomingTpl, conv) =>
- ImplicitNode(makeType(incomingTpl.sym.tpe, tpl), Some(incomingTpl))(implicitTooltip(from=incomingTpl, to=tpl, conv=conv))
- }
-
- // subclasses
- var subclasses: List[Node] =
- tpl.directSubClasses.collect {
- case d: TemplateImpl if !classExcluded(d) => NormalNode(makeType(d.sym.tpe, tpl), Some(d))()
- }.sortBy(_.tpl.get.name)(implicitly[Ordering[String]].reverse)
-
- // outgoing implicit coversions
- lazy val outgoingImplicitNodes = tpl.outgoingImplicitlyConvertedClasses.map {
- case (outgoingTpl, outgoingType, conv) =>
- ImplicitNode(outgoingType, Some(outgoingTpl))(implicitTooltip(from=tpl, to=tpl, conv=conv))
- }
-
- // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams.
- // Currently, it's possible to leave nodes and edges out, but there's no way to create new nodes and edges
- // The implementation would need to add the annotations and the logic to select nodes (or create new ones)
- // and add edges to the diagram -- I bet it wouldn't take too long for someone to do it (one or two days
- // at most) and it would be a great add to the diagrams.
- if (tpl.sym == AnyRefClass)
- subclasses = List(aggregationNode("All user-defined classes and traits"))
-
- val filteredSuperclasses = if (diagramFilter.hideSuperclasses) Nil else superclasses
- val filteredIncomingImplicits = if (diagramFilter.hideIncomingImplicits) Nil else incomingImplicitNodes
- val filteredSubclasses = if (diagramFilter.hideSubclasses) Nil else subclasses
- val filteredImplicitOutgoingNodes = if (diagramFilter.hideOutgoingImplicits) Nil else outgoingImplicitNodes
-
- // final diagram filter
- filterDiagram(InheritanceDiagram(thisNode, filteredSuperclasses.reverse, filteredSubclasses.reverse, filteredIncomingImplicits, filteredImplicitOutgoingNodes), diagramFilter)
- }
-
- tModel += System.currentTimeMillis
- DiagramStats.addFilterTime(tFilter)
- DiagramStats.addModelTime(tModel-tFilter)
-
- result
- }
-
- /** Create the content diagram for this template */
- def makeContentDiagram(pack: DocTemplateImpl): Option[Diagram] = {
-
- tFilter = 0
- tModel = -System.currentTimeMillis
-
- // the diagram filter
- val diagramFilter = makeContentDiagramFilter(pack)
-
- val result =
- if (diagramFilter == NoDiagramAtAll)
- None
- else {
- var mapNodes = Map[TemplateEntity, Node]()
- var nodesShown = Set[TemplateEntity]()
- var edgesAll = List[(TemplateEntity, List[TemplateEntity])]()
-
- // classes is the entire set of classes and traits in the package, they are the superset of nodes in the diagram
- // we collect classes, traits and objects without a companion, which are usually used as values(e.g. scala.None)
- val nodesAll = pack.members collect {
- case d: TemplateEntity if ((!diagramFilter.hideInheritedNodes) || (d.inTemplate == pack)) => d
- }
-
- def listSuperClasses(member: MemberTemplateImpl) = {
- // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to add nodes to diagrams.
- (pack.sym, member.sym) match {
- case (ScalaPackage, NullClass) =>
- List(makeTemplate(AnyRefClass))
- case (ScalaPackage, NothingClass) =>
- (List(NullClass) ::: ScalaValueClasses) map { makeTemplate(_) }
- case _ =>
- member.parentTypes map {
- case (template, tpe) => template
- } filter {
- nodesAll.contains(_)
- }
- }
- }
-
- // for each node, add its subclasses
- for (node <- nodesAll if !classExcluded(node)) {
- node match {
- case dnode: MemberTemplateImpl =>
- val superClasses = listSuperClasses(dnode)
-
- if (!superClasses.isEmpty) {
- nodesShown += dnode
- nodesShown ++= superClasses
- }
- edgesAll ::= dnode -> superClasses
- case _ =>
- }
-
- mapNodes += node -> (
- if (node.inTemplate == pack && (node.isDocTemplate || node.isAbstractType || node.isAliasType))
- NormalNode(node.resultType, Some(node))()
- else
- OutsideNode(node.resultType, Some(node))()
- )
- }
-
- if (nodesShown.isEmpty)
- None
- else {
- val nodes = nodesAll.filter(nodesShown.contains(_)).flatMap(mapNodes.get(_))
- val edges = edgesAll.map {
- case (entity, superClasses) => {
- (mapNodes(entity), superClasses flatMap { mapNodes.get(_) })
- }
- } filterNot {
- case (node, superClassNodes) => superClassNodes.isEmpty
- }
-
- val diagram =
- // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams.
- if (pack.sym == ScalaPackage) {
- // Tried it, but it doesn't look good:
- // var anyRefSubtypes: List[Node] = List(mapNodes(makeTemplate(AnyRefClass)))
- // var dirty = true
- // do {
- // val length = anyRefSubtypes.length
- // anyRefSubtypes :::= edges.collect { case p: (Node, List[Node]) if p._2.exists(anyRefSubtypes.contains(_)) => p._1 }
- // anyRefSubtypes = anyRefSubtypes.distinct
- // dirty = (anyRefSubtypes.length != length)
- // } while (dirty)
- // println(anyRefSubtypes)
- val anyRefSubtypes = Nil
- val allAnyRefTypes = aggregationNode("All AnyRef subtypes")
- val nullTemplate = makeTemplate(NullClass)
- if (nullTemplate.isDocTemplate)
- ContentDiagram(allAnyRefTypes::nodes, (mapNodes(nullTemplate), allAnyRefTypes::anyRefSubtypes)::edges.filterNot(_._1.tpl == Some(nullTemplate)))
- else
- ContentDiagram(nodes, edges)
- } else
- ContentDiagram(nodes, edges)
-
- filterDiagram(diagram, diagramFilter)
- }
- }
-
- tModel += System.currentTimeMillis
- DiagramStats.addFilterTime(tFilter)
- DiagramStats.addModelTime(tModel-tFilter)
-
- result
- }
-
- /** Diagram filtering logic */
- private def filterDiagram(diagram: Diagram, diagramFilter: DiagramFilter): Option[Diagram] = {
- tFilter -= System.currentTimeMillis
-
- val result =
- if (diagramFilter == FullDiagram)
- Some(diagram)
- else if (diagramFilter == NoDiagramAtAll)
- None
- else {
- // Final diagram, with the filtered nodes and edges
- diagram match {
- case InheritanceDiagram(thisNode, _, _, _, _) if diagramFilter.hideNode(thisNode) =>
- None
-
- case InheritanceDiagram(thisNode, superClasses, subClasses, incomingImplicits, outgoingImplicits) =>
-
- def hideIncoming(node: Node): Boolean =
- diagramFilter.hideNode(node) || diagramFilter.hideEdge(node, thisNode)
-
- def hideOutgoing(node: Node): Boolean =
- diagramFilter.hideNode(node) || diagramFilter.hideEdge(thisNode, node)
-
- // println(thisNode)
- // println(superClasses.map(cl => "super: " + cl + " " + hideOutgoing(cl)).mkString("\n"))
- // println(subClasses.map(cl => "sub: " + cl + " " + hideIncoming(cl)).mkString("\n"))
- Some(InheritanceDiagram(thisNode,
- superClasses.filterNot(hideOutgoing(_)),
- subClasses.filterNot(hideIncoming(_)),
- incomingImplicits.filterNot(hideIncoming(_)),
- outgoingImplicits.filterNot(hideOutgoing(_))))
-
- case ContentDiagram(nodes0, edges0) =>
- // Filter out all edges that:
- // (1) are sources of hidden classes
- // (2) are manually hidden by the user
- // (3) are destinations of hidden classes
- val edges: List[(Node, List[Node])] =
- diagram.edges.flatMap({
- case (source, dests) if !diagramFilter.hideNode(source) =>
- val dests2 = dests.collect({ case dest if (!(diagramFilter.hideEdge(source, dest) || diagramFilter.hideNode(dest))) => dest })
- if (dests2 != Nil)
- List((source, dests2))
- else
- Nil
- case _ => Nil
- })
-
- // Only show the the non-isolated nodes
- // TODO: Decide if we really want to hide package members, I'm not sure that's a good idea (!!!)
- // TODO: Does .distinct cause any stability issues?
- val sourceNodes = edges.map(_._1)
- val sinkNodes = edges.map(_._2).flatten
- val nodes = (sourceNodes ::: sinkNodes).distinct
- Some(ContentDiagram(nodes, edges))
- }
- }
-
- tFilter += System.currentTimeMillis
-
- // eliminate all empty diagrams
- if (result.isDefined && result.get.edges.forall(_._2.isEmpty))
- None
- else
- result
- }
-
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
deleted file mode 100644
index 3e7ac573e9..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Iulian Dragos
- * @author Hubert Plocinicak
- */
-package scala.tools.nsc
-package interactive
-
-import scala.collection._
-
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.reflect.internal.util.FakePos
-
-import dependencies._
-import io.AbstractFile
-import scala.language.implicitConversions
-
-trait BuildManager {
-
- /** Add the given source files to the managed build process. */
- def addSourceFiles(files: Set[AbstractFile])
-
- /** Remove the given files from the managed build process. */
- def removeFiles(files: Set[AbstractFile])
-
- /** The given files have been modified by the user. Recompile
- * them and their dependent files.
- */
- def update(added: Set[AbstractFile], removed: Set[AbstractFile])
-
- /** Notification that the supplied set of files is being built */
- def buildingFiles(included: Set[AbstractFile]) {}
-
- /** Load saved dependency information. */
- def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean
-
- /** Save dependency information to `file`. */
- def saveTo(file: AbstractFile, fromFile: AbstractFile => String)
-
- def compiler: scala.tools.nsc.Global
-
- /** Delete classfiles derived from the supplied set of sources */
- def deleteClassfiles(sources : Set[AbstractFile]) {
- val targets = compiler.dependencyAnalysis.dependencies.targets
- for(source <- sources; cf <- targets(source))
- cf.delete
- }
-}
-
-
-/** Simple driver for testing the build manager. It presents
- * the user to a 'resident compiler' prompt. Each line is
- * interpreted as a set of files that have changed. The builder
- * then derives the dependent files and recompiles them.
- */
-object BuildManagerTest extends EvalLoop {
-
- def prompt = "builder > "
-
- private def buildError(msg: String) {
- println(msg + "\n scalac -help gives more information")
- }
-
- def main(args: Array[String]) {
- implicit def filesToSet(fs: List[String]): Set[AbstractFile] = {
- def partition(s: String, r: Tuple2[List[AbstractFile], List[String]])= {
- val v = AbstractFile.getFile(s)
- if (v == null) (r._1, s::r._2) else (v::r._1, r._2)
- }
- val result = fs.foldRight((List[AbstractFile](), List[String]()))(partition)
- if (!result._2.isEmpty)
- Console.err.println("No such file(s): " + result._2.mkString(","))
- Set.empty ++ result._1
- }
-
- val settings = new Settings(buildError)
- settings.Ybuildmanagerdebug.value = true
- val command = new CompilerCommand(args.toList, settings)
-// settings.make.value = "off"
-// val buildManager: BuildManager = new SimpleBuildManager(settings)
- val buildManager: BuildManager = new RefinedBuildManager(settings)
-
- buildManager.addSourceFiles(command.files)
-
- // enter resident mode
- loop { line =>
- val args = line.split(' ').toList
- val command = new CompilerCommand(args, settings)
- buildManager.update(command.files, Set.empty)
- }
-
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
deleted file mode 100644
index 8d12581c9c..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ /dev/null
@@ -1,481 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import scala.util.control.ControlThrowable
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.symtab._
-import scala.tools.nsc.ast._
-import scala.tools.nsc.util.FailedInterrupt
-import scala.tools.nsc.util.EmptyAction
-import scala.tools.nsc.util.WorkScheduler
-import scala.reflect.internal.util.{SourceFile, Position}
-import scala.tools.nsc.util.InterruptReq
-
-/** Interface of interactive compiler to a client such as an IDE
- * The model the presentation compiler consists of the following parts:
- *
- * unitOfFile: The map from sourcefiles to loaded units. A sourcefile/unit is loaded if it occurs in that map.
- *
- * manipulated by: removeUnitOf, reloadSources.
- *
- * A call to reloadSources will add the given sources to the loaded units, and
- * start a new background compiler pass to compile all loaded units (with the indicated sources first).
- * Each background compiler pass has its own typer run.
- * The background compiler thread can be interrupted each time an AST node is
- * completely typechecked in the following ways:
-
- * 1. by a new call to reloadSources. This starts a new background compiler pass with a new typer run.
- * 2. by a call to askTypeTree. This starts a new typer run if the forceReload parameter = true
- * 3. by a call to askTypeAt, askTypeCompletion, askScopeCompletion, askToDoFirst, askLinkPos, askLastType.
- * 4. by raising an exception in the scheduler.
- * 5. by passing a high-priority action wrapped in ask { ... }.
- *
- * Actions under 1-3 can themselves be interrupted if they involve typechecking
- * AST nodes. High-priority actions under 5 cannot; they always run to completion.
- * So these high-priority actions should to be short.
- *
- * Normally, an interrupted action continues after the interrupting action is finished.
- * However, if the interrupting action created a new typer run, the interrupted
- * action is aborted. If there's an outstanding response, it will be set to
- * a Right value with a FreshRunReq exception.
- */
-trait CompilerControl { self: Global =>
-
- import syntaxAnalyzer.UnitParser
-
- type Response[T] = scala.tools.nsc.interactive.Response[T]
-
- /** The scheduler by which client and compiler communicate
- * Must be initialized before starting compilerRunner
- */
- @volatile protected[interactive] var scheduler = new WorkScheduler
-
- /** Return the compilation unit attached to a source file, or None
- * if source is not loaded.
- */
- def getUnitOf(s: SourceFile): Option[RichCompilationUnit] = getUnit(s)
-
- /** Run operation `op` on a compilation unit associated with given `source`.
- * If source has a loaded compilation unit, this one is passed to `op`.
- * Otherwise a new compilation unit is created, but not added to the set of loaded units.
- */
- def onUnitOf[T](source: SourceFile)(op: RichCompilationUnit => T): T =
- op(unitOfFile.getOrElse(source.file, new RichCompilationUnit(source)))
-
- /** The compilation unit corresponding to a source file
- * if it does not yet exist create a new one atomically
- * Note: We want to get roid of this operation as it messes compiler invariants.
- */
- @deprecated("use getUnitOf(s) or onUnitOf(s) instead", "2.10.0")
- def unitOf(s: SourceFile): RichCompilationUnit = getOrCreateUnitOf(s)
-
- /** The compilation unit corresponding to a position */
- @deprecated("use getUnitOf(pos.source) or onUnitOf(pos.source) instead", "2.10.0")
- def unitOf(pos: Position): RichCompilationUnit = getOrCreateUnitOf(pos.source)
-
- /** Removes the CompilationUnit corresponding to the given SourceFile
- * from consideration for recompilation.
- */
- def removeUnitOf(s: SourceFile): Option[RichCompilationUnit] = { toBeRemoved += s.file; unitOfFile get s.file }
-
- /** Returns the top level classes and objects that were deleted
- * in the editor since last time recentlyDeleted() was called.
- */
- def recentlyDeleted(): List[Symbol] = deletedTopLevelSyms.synchronized {
- val result = deletedTopLevelSyms
- deletedTopLevelSyms.clear()
- result.toList
- }
-
- /** Locate smallest tree that encloses position
- * @pre Position must be loaded
- */
- def locateTree(pos: Position): Tree = onUnitOf(pos.source) { unit => new Locator(pos) locateIn unit.body }
-
- /** Locates smallest context that encloses position as an optional value.
- */
- def locateContext(pos: Position): Option[Context] =
- for (unit <- getUnit(pos.source); cx <- locateContext(unit.contexts, pos)) yield cx
-
- /** Returns the smallest context that contains given `pos`, throws FatalError if none exists.
- */
- def doLocateContext(pos: Position): Context = locateContext(pos) getOrElse {
- throw new FatalError("no context found for "+pos)
- }
-
- private def postWorkItem(item: WorkItem) =
- if (item.onCompilerThread) item() else scheduler.postWorkItem(item)
-
- /** Makes sure a set of compilation units is loaded and parsed.
- * Returns () to syncvar `response` on completion.
- * Afterwards a new background compiler run is started with
- * the given sources at the head of the list of to-be-compiled sources.
- */
- def askReload(sources: List[SourceFile], response: Response[Unit]) = {
- val superseeded = scheduler.dequeueAll {
- case ri: ReloadItem if ri.sources == sources => Some(ri)
- case _ => None
- }
- superseeded.foreach(_.response.set())
- postWorkItem(new ReloadItem(sources, response))
- }
-
- /** Removes source files and toplevel symbols, and issues a new typer run.
- * Returns () to syncvar `response` on completion.
- */
- def askFilesDeleted(sources: List[SourceFile], response: Response[Unit]) = {
- postWorkItem(new FilesDeletedItem(sources, response))
- }
-
- /** Sets sync var `response` to the smallest fully attributed tree that encloses position `pos`.
- * Note: Unlike for most other ask... operations, the source file belonging to `pos` needs not be loaded.
- */
- def askTypeAt(pos: Position, response: Response[Tree]) =
- postWorkItem(new AskTypeAtItem(pos, response))
-
- /** Sets sync var `response` to the fully attributed & typechecked tree contained in `source`.
- * @pre `source` needs to be loaded.
- *
- * @note Deprecated because of race conditions in the typechecker when the background compiler
- * is interrupted while typing the same `source`.
- * @see SI-6578
- */
- @deprecated("Use `askLoadedTyped` instead to avoid race conditions in the typechecker", "2.10.1")
- def askType(source: SourceFile, forceReload: Boolean, response: Response[Tree]) =
- postWorkItem(new AskTypeItem(source, forceReload, response))
-
- /** Sets sync var `response` to the position of the definition of the given link in
- * the given sourcefile.
- *
- * @param sym The symbol referenced by the link (might come from a classfile)
- * @param source The source file that's supposed to contain the definition
- * @param response A response that will be set to the following:
- * If `source` contains a definition that is referenced by the given link
- * the position of that definition, otherwise NoPosition.
- * Note: This operation does not automatically load `source`. If `source`
- * is unloaded, it stays that way.
- */
- def askLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) =
- postWorkItem(new AskLinkPosItem(sym, source, response))
-
- /** Sets sync var `response` to doc comment information for a given symbol.
- *
- * @param sym The symbol whose doc comment should be retrieved (might come from a classfile)
- * @param source The source file that's supposed to contain the definition
- * @param site The symbol where 'sym' is observed
- * @param fragments All symbols that can contribute to the generated documentation
- * together with their source files.
- * @param response A response that will be set to the following:
- * If `source` contains a definition of a given symbol that has a doc comment,
- * the (expanded, raw, position) triplet for a comment, otherwise ("", "", NoPosition).
- * Note: This operation does not automatically load sources that are not yet loaded.
- */
- def askDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]): Unit =
- postWorkItem(new AskDocCommentItem(sym, source, site, fragments, response))
-
- @deprecated("Use method that accepts fragments", "2.10.2")
- def askDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]): Unit =
- askDocComment(sym, source, site, (sym,source)::Nil, response)
-
- /** Sets sync var `response` to list of members that are visible
- * as members of the tree enclosing `pos`, possibly reachable by an implicit.
- * @pre source is loaded
- */
- def askTypeCompletion(pos: Position, response: Response[List[Member]]) =
- postWorkItem(new AskTypeCompletionItem(pos, response))
-
- /** Sets sync var `response` to list of members that are visible
- * as members of the scope enclosing `pos`.
- * @pre source is loaded
- */
- def askScopeCompletion(pos: Position, response: Response[List[Member]]) =
- postWorkItem(new AskScopeCompletionItem(pos, response))
-
- /** Asks to do unit corresponding to given source file on present and subsequent type checking passes.
- * If the file is in the 'crashedFiles' ignore list it is removed and typechecked normally.
- */
- def askToDoFirst(source: SourceFile) =
- postWorkItem(new AskToDoFirstItem(source))
-
- /** If source is not yet loaded, loads it, and starts a new run, otherwise
- * continues with current pass.
- * Waits until source is fully type checked and returns body in response.
- * @param source The source file that needs to be fully typed.
- * @param keepLoaded Whether to keep that file in the PC if it was not loaded before. If
- the file is already loaded, this flag is ignored.
- * @param response The response, which is set to the fully attributed tree of `source`.
- * If the unit corresponding to `source` has been removed in the meantime
- * the a NoSuchUnitError is raised in the response.
- */
- def askLoadedTyped(source:SourceFile, keepLoaded: Boolean, response: Response[Tree]): Unit =
- postWorkItem(new AskLoadedTypedItem(source, keepLoaded, response))
-
- final def askLoadedTyped(source: SourceFile, response: Response[Tree]): Unit =
- askLoadedTyped(source, false, response)
-
- /** If source if not yet loaded, get an outline view with askParseEntered.
- * If source is loaded, wait for it to be typechecked.
- * In both cases, set response to parsed (and possibly typechecked) tree.
- * @param keepSrcLoaded If set to `true`, source file will be kept as a loaded unit afterwards.
- */
- def askStructure(keepSrcLoaded: Boolean)(source: SourceFile, response: Response[Tree]) = {
- getUnit(source) match {
- case Some(_) => askLoadedTyped(source, keepSrcLoaded, response)
- case None => askParsedEntered(source, keepSrcLoaded, response)
- }
- }
-
- /** Set sync var `response` to the parse tree of `source` with all top-level symbols entered.
- * @param source The source file to be analyzed
- * @param keepLoaded If set to `true`, source file will be kept as a loaded unit afterwards.
- * If keepLoaded is `false` the operation is run at low priority, only after
- * everything is brought up to date in a regular type checker run.
- * @param response The response.
- */
- def askParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) =
- postWorkItem(new AskParsedEnteredItem(source, keepLoaded, response))
-
- /** Set sync var `response` to a pair consisting of
- * - the fully qualified name of the first top-level object definition in the file.
- * or "" if there are no object definitions.
- * - the text of the instrumented program which, when run,
- * prints its output and all defined values in a comment column.
- *
- * @param source The source file to be analyzed
- * @param keepLoaded If set to `true`, source file will be kept as a loaded unit afterwards.
- * If keepLoaded is `false` the operation is run at low priority, only after
- * everything is brought up to date in a regular type checker run.
- * @param response The response.
- */
- @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
- def askInstrumented(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) =
- postWorkItem(new AskInstrumentedItem(source, line, response))
-
- /** Cancels current compiler run and start a fresh one where everything will be re-typechecked
- * (but not re-loaded).
- */
- def askReset() = scheduler raise (new FreshRunReq)
-
- /** Tells the compile server to shutdown, and not to restart again */
- def askShutdown() = scheduler raise ShutdownReq
-
- @deprecated("use parseTree(source) instead", "2.10.0") // deleted 2nd parameter, as this has to run on 2.8 also.
- def askParse(source: SourceFile, response: Response[Tree]) = respond(response) {
- parseTree(source)
- }
-
- /** Returns parse tree for source `source`. No symbols are entered. Syntax errors are reported.
- *
- * This method is thread-safe and as such can safely run outside of the presentation
- * compiler thread.
- */
- def parseTree(source: SourceFile): Tree = {
- new UnitParser(new CompilationUnit(source)).parse()
- }
-
- /** Asks for a computation to be done quickly on the presentation compiler thread */
- def ask[A](op: () => A): A = if (self.onCompilerThread) op() else scheduler doQuickly op
-
- /** Asks for a computation to be done on presentation compiler thread, returning
- * a response with the result or an exception
- */
- def askForResponse[A](op: () => A): Response[A] = {
- val r = new Response[A]
- if (self.onCompilerThread) {
- try { r set op() }
- catch { case exc: Throwable => r raise exc }
- r
- } else {
- val ir = scheduler askDoQuickly op
- ir onComplete {
- case Left(result) => r set result
- case Right(exc) => r raise exc
- }
- r
- }
- }
-
- def onCompilerThread = Thread.currentThread == compileRunner
-
- /** Info given for every member found by completion
- */
- abstract class Member {
- val sym: Symbol
- val tpe: Type
- val accessible: Boolean
- def implicitlyAdded = false
- }
-
- case class TypeMember(
- sym: Symbol,
- tpe: Type,
- accessible: Boolean,
- inherited: Boolean,
- viaView: Symbol) extends Member {
- override def implicitlyAdded = viaView != NoSymbol
- }
-
- case class ScopeMember(
- sym: Symbol,
- tpe: Type,
- accessible: Boolean,
- viaImport: Tree) extends Member
-
- // items that get sent to scheduler
-
- abstract class WorkItem extends (() => Unit) {
- val onCompilerThread = self.onCompilerThread
-
- /** Raise a MissingReponse, if the work item carries a response. */
- def raiseMissing(): Unit
- }
-
- case class ReloadItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem {
- def apply() = reload(sources, response)
- override def toString = "reload "+sources
-
- def raiseMissing() =
- response raise new MissingResponse
- }
-
- case class FilesDeletedItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem {
- def apply() = filesDeleted(sources, response)
- override def toString = "files deleted "+sources
-
- def raiseMissing() =
- response raise new MissingResponse
- }
-
- case class AskTypeAtItem(val pos: Position, response: Response[Tree]) extends WorkItem {
- def apply() = self.getTypedTreeAt(pos, response)
- override def toString = "typeat "+pos.source+" "+pos.show
-
- def raiseMissing() =
- response raise new MissingResponse
- }
-
- case class AskTypeItem(val source: SourceFile, val forceReload: Boolean, response: Response[Tree]) extends WorkItem {
- def apply() = self.getTypedTree(source, forceReload, response)
- override def toString = "typecheck"
-
- def raiseMissing() =
- response raise new MissingResponse
- }
-
- case class AskTypeCompletionItem(val pos: Position, response: Response[List[Member]]) extends WorkItem {
- def apply() = self.getTypeCompletion(pos, response)
- override def toString = "type completion "+pos.source+" "+pos.show
-
- def raiseMissing() =
- response raise new MissingResponse
- }
-
- case class AskScopeCompletionItem(val pos: Position, response: Response[List[Member]]) extends WorkItem {
- def apply() = self.getScopeCompletion(pos, response)
- override def toString = "scope completion "+pos.source+" "+pos.show
-
- def raiseMissing() =
- response raise new MissingResponse
- }
-
- class AskToDoFirstItem(val source: SourceFile) extends WorkItem {
- def apply() = {
- moveToFront(List(source))
- enableIgnoredFile(source.file)
- }
- override def toString = "dofirst "+source
-
- def raiseMissing() = ()
- }
-
- case class AskLinkPosItem(val sym: Symbol, val source: SourceFile, response: Response[Position]) extends WorkItem {
- def apply() = self.getLinkPos(sym, source, response)
- override def toString = "linkpos "+sym+" in "+source
-
- def raiseMissing() =
- response raise new MissingResponse
- }
-
- case class AskDocCommentItem(val sym: Symbol, val source: SourceFile, val site: Symbol, val fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]) extends WorkItem {
- def apply() = self.getDocComment(sym, source, site, fragments, response)
- override def toString = "doc comment "+sym+" in "+source+" with fragments:"+fragments.mkString("(", ",", ")")
-
- def raiseMissing() =
- response raise new MissingResponse
- }
-
- case class AskLoadedTypedItem(val source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) extends WorkItem {
- def apply() = self.waitLoadedTyped(source, response, keepLoaded, this.onCompilerThread)
- override def toString = "wait loaded & typed "+source
-
- def raiseMissing() =
- response raise new MissingResponse
- }
-
- case class AskParsedEnteredItem(val source: SourceFile, val keepLoaded: Boolean, response: Response[Tree]) extends WorkItem {
- def apply() = self.getParsedEntered(source, keepLoaded, response, this.onCompilerThread)
- override def toString = "getParsedEntered "+source+", keepLoaded = "+keepLoaded
-
- def raiseMissing() =
- response raise new MissingResponse
- }
-
- @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
- case class AskInstrumentedItem(val source: SourceFile, line: Int, response: Response[(String, Array[Char])]) extends WorkItem {
- def apply() = self.getInstrumented(source, line, response)
- override def toString = "getInstrumented "+source
-
- def raiseMissing() =
- response raise new MissingResponse
- }
-
- /** A do-nothing work scheduler that responds immediately with MissingResponse.
- *
- * Used during compiler shutdown.
- */
- class NoWorkScheduler extends WorkScheduler {
-
- override def postWorkItem(action: Action) = synchronized {
- action match {
- case w: WorkItem => w.raiseMissing()
- case e: EmptyAction => // do nothing
- case _ => println("don't know what to do with this " + action.getClass)
- }
- }
-
- override def doQuickly[A](op: () => A): A = {
- throw new FailedInterrupt(new Exception("Posted a work item to a compiler that's shutting down"))
- }
-
- override def askDoQuickly[A](op: () => A): InterruptReq { type R = A } = {
- val ir = new InterruptReq {
- type R = A
- val todo = () => throw new MissingResponse
- }
- ir.execute()
- ir
- }
-
- }
-
-}
-
- // ---------------- Interpreted exceptions -------------------
-
-/** Signals a request for a fresh background compiler run.
- * Note: The object has to stay top-level so that the PresentationCompilerThread may access it.
- */
-class FreshRunReq extends ControlThrowable
-
-/** Signals a request for a shutdown of the presentation compiler.
- * Note: The object has to stay top-level so that the PresentationCompilerThread may access it.
- */
-object ShutdownReq extends ControlThrowable
-
-class NoSuchUnitError(file: AbstractFile) extends Exception("no unit found for file "+file)
-
-class MissingResponse extends Exception("response missing")
diff --git a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala b/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
deleted file mode 100644
index 4a61a98921..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
+++ /dev/null
@@ -1,165 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import scala.collection.mutable.ArrayBuffer
-import scala.annotation.tailrec
-
-trait ContextTrees { self: Global =>
-
- type Context = analyzer.Context
- lazy val NoContext = analyzer.NoContext
- type Contexts = ArrayBuffer[ContextTree]
-
- /** A context tree contains contexts that are indexed by positions.
- * It satisfies the following properties:
- * 1. All context come from compiling the same unit.
- * 2. Child contexts have parent contexts in their outer chain.
- * 3. The `pos` field of a context is the same as `context.tree.pos`, unless that
- * position is transparent. In that case, `pos` equals the position of
- * one of the solid descendants of `context.tree`.
- * 4. Children of a context have non-overlapping increasing positions.
- * 5. No context in the tree has a transparent position.
- */
- class ContextTree(val pos: Position, val context: Context, val children: ArrayBuffer[ContextTree]) {
- def this(pos: Position, context: Context) = this(pos, context, new ArrayBuffer[ContextTree])
- override def toString = "ContextTree("+pos+", "+children+")"
- }
-
- /** Returns the most precise context possible for the given `pos`.
- *
- * It looks for the finest ContextTree containing `pos`, and then look inside
- * this ContextTree for a child ContextTree located immediately before `pos`.
- * If such a child exists, returns its context, otherwise returns the context of
- * the parent ContextTree.
- *
- * This is required to always return a context which contains the all the imports
- * declared up to `pos` (see SI-7280 for a test case).
- *
- * Can return None if `pos` is before any valid Scala code.
- */
- def locateContext(contexts: Contexts, pos: Position): Option[Context] = synchronized {
- @tailrec
- def locateFinestContextTree(context: ContextTree): ContextTree = {
- if (context.pos includes pos) {
- locateContextTree(context.children, pos) match {
- case Some(x) =>
- locateFinestContextTree(x)
- case None =>
- context
- }
- } else {
- context
- }
- }
- locateContextTree(contexts, pos) map locateFinestContextTree map (_.context)
- }
-
- /** Returns the ContextTree containing `pos`, or the ContextTree positioned just before `pos`,
- * or None if `pos` is located before all ContextTrees.
- */
- def locateContextTree(contexts: Contexts, pos: Position): Option[ContextTree] = {
- if (contexts.isEmpty) None
- else {
- @tailrec
- def loop(lo: Int, hi: Int, previousSibling: Option[ContextTree]): Option[ContextTree] = {
- if (pos properlyPrecedes contexts(lo).pos)
- previousSibling
- else if (contexts(hi).pos properlyPrecedes pos)
- Some(contexts(hi))
- else {
- val mid = (lo + hi) / 2
- val midpos = contexts(mid).pos
- if (midpos includes pos)
- Some(contexts(mid))
- else if (midpos properlyPrecedes pos)
- loop(mid + 1, hi, Some(contexts(mid)))
- else
- loop(lo, mid, previousSibling)
- }
- }
- loop(0, contexts.length - 1, None)
- }
- }
-
- /** Insert a context at correct position into a buffer of context trees.
- * If the `context` has a transparent position, add it multiple times
- * at the positions of all its solid descendant trees.
- */
- def addContext(contexts: Contexts, context: Context): Unit = {
- val cpos = context.tree.pos
- if (cpos.isTransparent)
- for (t <- context.tree.children flatMap solidDescendants)
- addContext(contexts, context, t.pos)
- else
- addContext(contexts, context, cpos)
- }
-
- /** Insert a context with non-transparent position `cpos`
- * at correct position into a buffer of context trees.
- */
- def addContext(contexts: Contexts, context: Context, cpos: Position): Unit = synchronized {
- try {
- if (!cpos.isRange) {}
- else if (contexts.isEmpty) contexts += new ContextTree(cpos, context)
- else {
- val hi = contexts.length - 1
- if (contexts(hi).pos precedes cpos)
- contexts += new ContextTree(cpos, context)
- else if (contexts(hi).pos properlyIncludes cpos) // fast path w/o search
- addContext(contexts(hi).children, context, cpos)
- else if (cpos precedes contexts(0).pos)
- new ContextTree(cpos, context) +=: contexts
- else {
- def insertAt(idx: Int): Boolean = {
- val oldpos = contexts(idx).pos
- if (oldpos sameRange cpos) {
- contexts(idx) = new ContextTree(cpos, context, contexts(idx).children)
- true
- } else if (oldpos includes cpos) {
- addContext(contexts(idx).children, context, cpos)
- true
- } else if (cpos includes oldpos) {
- val start = contexts.indexWhere(cpos includes _.pos)
- val last = contexts.lastIndexWhere(cpos includes _.pos)
- contexts(start) = new ContextTree(cpos, context, contexts.slice(start, last + 1))
- contexts.remove(start + 1, last - start)
- true
- } else false
- }
- def loop(lo: Int, hi: Int) {
- if (hi - lo > 1) {
- val mid = (lo + hi) / 2
- val midpos = contexts(mid).pos
- if (cpos precedes midpos)
- loop(lo, mid)
- else if (midpos precedes cpos)
- loop(mid, hi)
- else
- addContext(contexts(mid).children, context, cpos)
- } else if (!insertAt(lo) && !insertAt(hi)) {
- val lopos = contexts(lo).pos
- val hipos = contexts(hi).pos
- if ((lopos precedes cpos) && (cpos precedes hipos))
- contexts.insert(hi, new ContextTree(cpos, context))
- else
- inform("internal error? skewed positions: "+lopos+" !< "+cpos+" !< "+hipos)
- }
- }
- loop(0, hi)
- }
- }
- } catch {
- case ex: Throwable =>
- println(ex)
- ex.printStackTrace()
- println("failure inserting "+cpos+" into "+contexts+"/"+contexts(contexts.length - 1).pos+"/"+
- (contexts(contexts.length - 1).pos includes cpos))
- throw ex
- }
- }
-}
-
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
deleted file mode 100644
index d6fa42b1cc..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ /dev/null
@@ -1,1214 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import java.io.{ PrintWriter, StringWriter, FileReader, FileWriter }
-import scala.collection.mutable
-import mutable.{LinkedHashMap, SynchronizedMap, HashSet, SynchronizedSet}
-import scala.concurrent.SyncVar
-import scala.util.control.ControlThrowable
-import scala.tools.nsc.io.{ AbstractFile, LogReplay, Logger, NullLogger, Replayer }
-import scala.tools.nsc.util.{ WorkScheduler, MultiHashMap }
-import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, RangePosition, NoPosition }
-import scala.tools.nsc.reporters._
-import scala.tools.nsc.symtab._
-import scala.tools.nsc.ast._
-import scala.tools.nsc.io.Pickler._
-import scala.tools.nsc.typechecker.DivergentImplicit
-import scala.annotation.tailrec
-import symtab.Flags.{ACCESSOR, PARAMACCESSOR}
-import scala.annotation.elidable
-import scala.language.implicitConversions
-
-/** The main class of the presentation compiler in an interactive environment such as an IDE
- */
-class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends {
- /* Is the compiler initializing? Early def, so that the field is true during the
- * execution of the super constructor.
- */
- private var initializing = true
-} with scala.tools.nsc.Global(settings, _reporter)
- with CompilerControl
- with RangePositions
- with ContextTrees
- with RichCompilationUnits
- with ScratchPadMaker
- with Picklers {
-
- import definitions._
-
- val debugIDE: Boolean = settings.YpresentationDebug.value
- val verboseIDE: Boolean = settings.YpresentationVerbose.value
-
- private def replayName = settings.YpresentationReplay.value
- private def logName = settings.YpresentationLog.value
- private def afterTypeDelay = settings.YpresentationDelay.value
- private final val SleepTime = 10
-
- val log =
- if (replayName != "") new Replayer(new FileReader(replayName))
- else if (logName != "") new Logger(new FileWriter(logName))
- else NullLogger
-
- import log.logreplay
- debugLog("logger: " + log.getClass + " writing to " + (new java.io.File(logName)).getAbsolutePath)
- debugLog("classpath: "+classPath)
-
- private var curTime = System.nanoTime
- private def timeStep = {
- val last = curTime
- curTime = System.nanoTime
- ", delay = " + (curTime - last) / 1000000 + "ms"
- }
-
- /** Print msg only when debugIDE is true. */
- @inline final def debugLog(msg: => String) =
- if (debugIDE) println("[%s] %s".format(projectName, msg))
-
- /** Inform with msg only when verboseIDE is true. */
- @inline final def informIDE(msg: => String) =
- if (verboseIDE) println("[%s][%s]".format(projectName, msg))
-
- override def forInteractive = true
-
- /** A map of all loaded files to the rich compilation units that correspond to them.
- */
- val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with
- SynchronizedMap[AbstractFile, RichCompilationUnit] {
- override def put(key: AbstractFile, value: RichCompilationUnit) = {
- val r = super.put(key, value)
- if (r.isEmpty) debugLog("added unit for "+key)
- r
- }
- override def remove(key: AbstractFile) = {
- val r = super.remove(key)
- if (r.nonEmpty) debugLog("removed unit for "+key)
- r
- }
- }
-
- /** A set containing all those files that need to be removed
- * Units are removed by getUnit, typically once a unit is finished compiled.
- */
- protected val toBeRemoved: mutable.Set[AbstractFile] =
- new HashSet[AbstractFile] with SynchronizedSet[AbstractFile]
-
- /** A set containing all those files that need to be removed after a full background compiler run
- */
- protected val toBeRemovedAfterRun: mutable.Set[AbstractFile] =
- new HashSet[AbstractFile] with SynchronizedSet[AbstractFile]
-
- class ResponseMap extends MultiHashMap[SourceFile, Response[Tree]] {
- override def += (binding: (SourceFile, Set[Response[Tree]])) = {
- assert(interruptsEnabled, "delayed operation within an ask")
- super.+=(binding)
- }
- }
-
- /** A map that associates with each abstract file the set of responses that are waiting
- * (via waitLoadedTyped) for the unit associated with the abstract file to be loaded and completely typechecked.
- */
- protected val waitLoadedTypeResponses = new ResponseMap
-
- /** A map that associates with each abstract file the set of responses that ware waiting
- * (via build) for the unit associated with the abstract file to be parsed and entered
- */
- protected var getParsedEnteredResponses = new ResponseMap
-
- private def cleanResponses(rmap: ResponseMap): Unit = {
- for ((source, rs) <- rmap.toList) {
- for (r <- rs) {
- if (getUnit(source).isEmpty)
- r raise new NoSuchUnitError(source.file)
- if (r.isComplete)
- rmap(source) -= r
- }
- if (rmap(source).isEmpty)
- rmap -= source
- }
- }
-
- private def cleanAllResponses() {
- cleanResponses(waitLoadedTypeResponses)
- cleanResponses(getParsedEnteredResponses)
- }
-
- private def checkNoOutstanding(rmap: ResponseMap): Unit =
- for ((_, rs) <- rmap.toList; r <- rs) {
- debugLog("ERROR: missing response, request will be discarded")
- r raise new MissingResponse
- }
-
- def checkNoResponsesOutstanding() {
- checkNoOutstanding(waitLoadedTypeResponses)
- checkNoOutstanding(getParsedEnteredResponses)
- }
-
- /** The compilation unit corresponding to a source file
- * if it does not yet exist create a new one atomically
- * Note: We want to remove this.
- */
- protected[interactive] def getOrCreateUnitOf(source: SourceFile): RichCompilationUnit =
- unitOfFile.getOrElse(source.file, { println("precondition violated: "+source+" is not loaded"); new Exception().printStackTrace(); new RichCompilationUnit(source) })
-
- /** Work through toBeRemoved list to remove any units.
- * Then return optionally unit associated with given source.
- */
- protected[interactive] def getUnit(s: SourceFile): Option[RichCompilationUnit] = {
- toBeRemoved.synchronized {
- for (f <- toBeRemoved) {
- informIDE("removed: "+s)
- unitOfFile -= f
- allSources = allSources filter (_.file != f)
- }
- toBeRemoved.clear()
- }
- unitOfFile get s.file
- }
-
- /** A list giving all files to be typechecked in the order they should be checked.
- */
- protected var allSources: List[SourceFile] = List()
-
- private var lastException: Option[Throwable] = None
-
- /** A list of files that crashed the compiler. They will be ignored during background
- * compilation until they are removed from this list.
- */
- private var ignoredFiles: Set[AbstractFile] = Set()
-
- /** Flush the buffer of sources that are ignored during background compilation. */
- def clearIgnoredFiles() {
- ignoredFiles = Set()
- }
-
- /** Remove a crashed file from the ignore buffer. Background compilation will take it into account
- * and errors will be reported against it. */
- def enableIgnoredFile(file: AbstractFile) {
- ignoredFiles -= file
- debugLog("Removed crashed file %s. Still in the ignored buffer: %s".format(file, ignoredFiles))
- }
-
- /** The currently active typer run */
- private var currentTyperRun: TyperRun = _
- newTyperRun()
-
- /** Is a background compiler run needed?
- * Note: outOfDate is true as long as there is a background compile scheduled or going on.
- */
- private var outOfDate = false
-
- def isOutOfDate: Boolean = outOfDate
-
- def demandNewCompilerRun() = {
- if (outOfDate) throw new FreshRunReq // cancel background compile
- else outOfDate = true // proceed normally and enable new background compile
- }
-
- protected[interactive] var minRunId = 1
-
- private[interactive] var interruptsEnabled = true
-
- private val NoResponse: Response[_] = new Response[Any]
-
- /** The response that is currently pending, i.e. the compiler
- * is working on providing an asnwer for it.
- */
- private var pendingResponse: Response[_] = NoResponse
-
- // ----------- Overriding hooks in nsc.Global -----------------------
-
- /** Called from parser, which signals hereby that a method definition has been parsed.
- */
- override def signalParseProgress(pos: Position) {
- // We only want to be interruptible when running on the PC thread.
- if(onCompilerThread) {
- checkForMoreWork(pos)
- }
- }
-
- /** Called from typechecker, which signals hereby that a node has been completely typechecked.
- * If the node includes unit.targetPos, abandons run and returns newly attributed tree.
- * Otherwise, if there's some higher priority work to be done, also abandons run with a FreshRunReq.
- * @param context The context that typechecked the node
- * @param old The original node
- * @param result The transformed node
- */
- override def signalDone(context: Context, old: Tree, result: Tree) {
- val canObserveTree = (
- interruptsEnabled
- && analyzer.lockedCount == 0
- && !context.bufferErrors // SI-7558 look away during exploratory typing in "silent mode"
- )
- if (canObserveTree) {
- if (context.unit.exists &&
- result.pos.isOpaqueRange &&
- (result.pos includes context.unit.targetPos)) {
- var located = new TypedLocator(context.unit.targetPos) locateIn result
- if (located == EmptyTree) {
- println("something's wrong: no "+context.unit+" in "+result+result.pos)
- located = result
- }
- throw new TyperResult(located)
- }
- else {
- try {
- checkForMoreWork(old.pos)
- } catch {
- case ex: ValidateException => // Ignore, this will have been reported elsewhere
- debugLog("validate exception caught: "+ex)
- case ex: Throwable =>
- log.flush()
- throw ex
- }
- }
- }
- }
-
- /** Called from typechecker every time a context is created.
- * Registers the context in a context tree
- */
- override def registerContext(c: Context) = c.unit match {
- case u: RichCompilationUnit => addContext(u.contexts, c)
- case _ =>
- }
-
- /** The top level classes and objects currently seen in the presentation compiler
- */
- private val currentTopLevelSyms = new mutable.LinkedHashSet[Symbol]
-
- /** The top level classes and objects no longer seen in the presentation compiler
- */
- val deletedTopLevelSyms = new mutable.LinkedHashSet[Symbol] with mutable.SynchronizedSet[Symbol]
-
- /** Called from typechecker every time a top-level class or object is entered.
- */
- override def registerTopLevelSym(sym: Symbol) { currentTopLevelSyms += sym }
-
- /** Symbol loaders in the IDE parse all source files loaded from a package for
- * top-level idents. Therefore, we can detect top-level symbols that have a name
- * different from their source file
- */
- override lazy val loaders = new BrowsingLoaders {
- val global: Global.this.type = Global.this
- }
-
- // ----------------- Polling ---------------------------------------
-
- case class WorkEvent(atNode: Int, atMillis: Long)
-
- private var moreWorkAtNode: Int = -1
- private var nodesSeen = 0
- private var lastWasReload = false
-
- /** The number of pollForWorks after which the presentation compiler yields.
- * Yielding improves responsiveness on systems with few cores because it
- * gives the UI thread a chance to get new tasks and interrupt the presentation
- * compiler with them.
- */
- private final val yieldPeriod = 10
-
- /** Called from runner thread and signalDone:
- * Poll for interrupts and execute them immediately.
- * Then, poll for exceptions and execute them.
- * Then, poll for work reload/typedTreeAt/doFirst commands during background checking.
- * @param pos The position of the tree if polling while typechecking, NoPosition otherwise
- *
- */
- private[interactive] def pollForWork(pos: Position) {
- if (!interruptsEnabled) return
- if (pos == NoPosition || nodesSeen % yieldPeriod == 0)
- Thread.`yield`()
-
- def nodeWithWork(): Option[WorkEvent] =
- if (scheduler.moreWork || pendingResponse.isCancelled) Some(new WorkEvent(nodesSeen, System.currentTimeMillis))
- else None
-
- nodesSeen += 1
- logreplay("atnode", nodeWithWork()) match {
- case Some(WorkEvent(id, _)) =>
- debugLog("some work at node "+id+" current = "+nodesSeen)
-// assert(id >= nodesSeen)
- moreWorkAtNode = id
- case None =>
- }
-
- if (nodesSeen >= moreWorkAtNode) {
-
- logreplay("asked", scheduler.pollInterrupt()) match {
- case Some(ir) =>
- try {
- interruptsEnabled = false
- debugLog("ask started"+timeStep)
- ir.execute()
- } finally {
- debugLog("ask finished"+timeStep)
- interruptsEnabled = true
- }
- pollForWork(pos)
- case _ =>
- }
-
- if (logreplay("cancelled", pendingResponse.isCancelled)) {
- throw CancelException
- }
-
- logreplay("exception thrown", scheduler.pollThrowable()) match {
- case Some(ex: FreshRunReq) =>
- newTyperRun()
- minRunId = currentRunId
- demandNewCompilerRun()
-
- case Some(ShutdownReq) =>
- scheduler.synchronized { // lock the work queue so no more items are posted while we clean it up
- val units = scheduler.dequeueAll {
- case item: WorkItem => Some(item.raiseMissing())
- case _ => Some(())
- }
-
- // don't forget to service interrupt requests
- val iqs = scheduler.dequeueAllInterrupts(_.execute())
-
- debugLog("ShutdownReq: cleaning work queue (%d items)".format(units.size))
- debugLog("Cleanup up responses (%d loadedType pending, %d parsedEntered pending)"
- .format(waitLoadedTypeResponses.size, getParsedEnteredResponses.size))
- checkNoResponsesOutstanding()
-
- log.flush();
- scheduler = new NoWorkScheduler
- throw ShutdownReq
- }
-
- case Some(ex: Throwable) => log.flush(); throw ex
- case _ =>
- }
-
- lastWasReload = false
-
- logreplay("workitem", scheduler.nextWorkItem()) match {
- case Some(action) =>
- try {
- debugLog("picked up work item at "+pos+": "+action+timeStep)
- action()
- debugLog("done with work item: "+action)
- } finally {
- debugLog("quitting work item: "+action+timeStep)
- }
- case None =>
- }
- }
- }
-
- protected def checkForMoreWork(pos: Position) {
- val typerRun = currentTyperRun
- pollForWork(pos)
- if (typerRun != currentTyperRun) demandNewCompilerRun()
- }
-
- def debugInfo(source : SourceFile, start : Int, length : Int): String = {
- println("DEBUG INFO "+source+"/"+start+"/"+length)
- val end = start+length
- val pos = rangePos(source, start, start, end)
-
- val tree = locateTree(pos)
- val sw = new StringWriter
- val pw = new PrintWriter(sw)
- newTreePrinter(pw).print(tree)
- pw.flush
-
- val typed = new Response[Tree]
- askTypeAt(pos, typed)
- val typ = typed.get.left.toOption match {
- case Some(tree) =>
- val sw = new StringWriter
- val pw = new PrintWriter(sw)
- newTreePrinter(pw).print(tree)
- pw.flush
- sw.toString
- case None => "<None>"
- }
-
- val completionResponse = new Response[List[Member]]
- askTypeCompletion(pos, completionResponse)
- val completion = completionResponse.get.left.toOption match {
- case Some(members) =>
- members mkString "\n"
- case None => "<None>"
- }
-
- source.content.view.drop(start).take(length).mkString+" : "+source.path+" ("+start+", "+end+
- ")\n\nlocateTree:\n"+sw.toString+"\n\naskTypeAt:\n"+typ+"\n\ncompletion:\n"+completion
- }
-
- // ----------------- The Background Runner Thread -----------------------
-
- private var threadId = 0
-
- /** The current presentation compiler runner */
- @volatile private[interactive] var compileRunner: Thread = newRunnerThread()
-
- /** Check that the currenyly executing thread is the presentation compiler thread.
- *
- * Compiler initialization may happen on a different thread (signalled by globalPhase being NoPhase)
- */
- @elidable(elidable.WARNING)
- override def assertCorrectThread() {
- assert(initializing || onCompilerThread,
- "Race condition detected: You are running a presentation compiler method outside the PC thread.[phase: %s]".format(globalPhase) +
- " Please file a ticket with the current stack trace at https://www.assembla.com/spaces/scala-ide/support/tickets")
- }
-
- /** Create a new presentation compiler runner.
- */
- private def newRunnerThread(): Thread = {
- threadId += 1
- compileRunner = new PresentationCompilerThread(this, projectName)
- compileRunner.setDaemon(true)
- compileRunner.start()
- compileRunner
- }
-
- private def ensureUpToDate(unit: RichCompilationUnit) =
- if (!unit.isUpToDate && unit.status != JustParsed) reset(unit) // reparse previously typechecked units.
-
- /** Compile all loaded source files in the order given by `allSources`.
- */
- private[interactive] final def backgroundCompile() {
- informIDE("Starting new presentation compiler type checking pass")
- reporter.reset()
-
- // remove any files in first that are no longer maintained by presentation compiler (i.e. closed)
- allSources = allSources filter (s => unitOfFile contains (s.file))
-
- // ensure all loaded units are parsed
- for (s <- allSources; unit <- getUnit(s)) {
- // checkForMoreWork(NoPosition) // disabled, as any work done here would be in an inconsistent state
- ensureUpToDate(unit)
- parseAndEnter(unit)
- serviceParsedEntered()
- }
-
- // sleep window
- if (afterTypeDelay > 0 && lastWasReload) {
- val limit = System.currentTimeMillis() + afterTypeDelay
- while (System.currentTimeMillis() < limit) {
- Thread.sleep(SleepTime)
- checkForMoreWork(NoPosition)
- }
- }
-
- // ensure all loaded units are typechecked
- for (s <- allSources; if !ignoredFiles(s.file); unit <- getUnit(s)) {
- try {
- if (!unit.isUpToDate)
- if (unit.problems.isEmpty || !settings.YpresentationStrict.value)
- typeCheck(unit)
- else debugLog("%s has syntax errors. Skipped typechecking".format(unit))
- else debugLog("already up to date: "+unit)
- for (r <- waitLoadedTypeResponses(unit.source))
- r set unit.body
- serviceParsedEntered()
- } catch {
- case ex: FreshRunReq => throw ex // propagate a new run request
- case ShutdownReq => throw ShutdownReq // propagate a shutdown request
- case ex: ControlThrowable => throw ex
- case ex: Throwable =>
- println("[%s]: exception during background compile: ".format(unit.source) + ex)
- ex.printStackTrace()
- for (r <- waitLoadedTypeResponses(unit.source)) {
- r.raise(ex)
- }
- serviceParsedEntered()
-
- lastException = Some(ex)
- ignoredFiles += unit.source.file
- println("[%s] marking unit as crashed (crashedFiles: %s)".format(unit, ignoredFiles))
-
- reporter.error(unit.body.pos, "Presentation compiler crashed while type checking this file: %s".format(ex.toString()))
- }
- }
-
- // move units removable after this run to the "to-be-removed" buffer
- toBeRemoved ++= toBeRemovedAfterRun
-
- // clean out stale waiting responses
- cleanAllResponses()
-
- // wind down
- if (waitLoadedTypeResponses.nonEmpty || getParsedEnteredResponses.nonEmpty) {
- // need another cycle to treat those
- newTyperRun()
- backgroundCompile()
- } else {
- outOfDate = false
- informIDE("Everything is now up to date")
- }
- }
-
- /** Service all pending getParsedEntered requests
- */
- private def serviceParsedEntered() {
- var atOldRun = true
- for ((source, rs) <- getParsedEnteredResponses; r <- rs) {
- if (atOldRun) { newTyperRun(); atOldRun = false }
- getParsedEnteredNow(source, r)
- }
- getParsedEnteredResponses.clear()
- }
-
- /** Reset unit to unloaded state */
- private def reset(unit: RichCompilationUnit): Unit = {
- unit.depends.clear()
- unit.defined.clear()
- unit.synthetics.clear()
- unit.toCheck.clear()
- unit.checkedFeatures = Set()
- unit.targetPos = NoPosition
- unit.contexts.clear()
- unit.problems.clear()
- unit.body = EmptyTree
- unit.status = NotLoaded
- }
-
- /** Parse unit and create a name index, unless this has already been done before */
- private def parseAndEnter(unit: RichCompilationUnit): Unit =
- if (unit.status == NotLoaded) {
- debugLog("parsing: "+unit)
- currentTyperRun.compileLate(unit)
- if (debugIDE && !reporter.hasErrors) validatePositions(unit.body)
- if (!unit.isJava) syncTopLevelSyms(unit)
- unit.status = JustParsed
- }
-
- /** Make sure unit is typechecked
- */
- private def typeCheck(unit: RichCompilationUnit) {
- debugLog("type checking: "+unit)
- parseAndEnter(unit)
- unit.status = PartiallyChecked
- currentTyperRun.typeCheck(unit)
- unit.lastBody = unit.body
- unit.status = currentRunId
- }
-
- /** Update deleted and current top-level symbols sets */
- def syncTopLevelSyms(unit: RichCompilationUnit) {
- val deleted = currentTopLevelSyms filter { sym =>
- /** We sync after namer phase and it resets all the top-level symbols
- * that survive the new parsing
- * round to NoPeriod.
- */
- sym.sourceFile == unit.source.file &&
- sym.validTo != NoPeriod &&
- runId(sym.validTo) < currentRunId
- }
- for (d <- deleted) {
- d.owner.info.decls unlink d
- deletedTopLevelSyms += d
- currentTopLevelSyms -= d
- }
- }
-
- /** Move list of files to front of allSources */
- def moveToFront(fs: List[SourceFile]) {
- allSources = fs ::: (allSources diff fs)
- }
-
- // ----------------- Implementations of client commands -----------------------
-
- def respond[T](result: Response[T])(op: => T): Unit =
- respondGradually(result)(Stream(op))
-
- def respondGradually[T](response: Response[T])(op: => Stream[T]): Unit = {
- val prevResponse = pendingResponse
- try {
- pendingResponse = response
- if (!response.isCancelled) {
- var results = op
- while (!response.isCancelled && results.nonEmpty) {
- val result = results.head
- results = results.tail
- if (results.isEmpty) {
- response set result
- debugLog("responded"+timeStep)
- } else response setProvisionally result
- }
- }
- } catch {
- case CancelException =>
- debugLog("cancelled")
- case ex: FreshRunReq =>
- if (debugIDE) {
- println("FreshRunReq thrown during response")
- ex.printStackTrace()
- }
- response raise ex
- throw ex
-
- case ex @ ShutdownReq =>
- if (debugIDE) {
- println("ShutdownReq thrown during response")
- ex.printStackTrace()
- }
- response raise ex
- throw ex
-
- case ex: Throwable =>
- if (debugIDE) {
- println("exception thrown during response: "+ex)
- ex.printStackTrace()
- }
- response raise ex
- } finally {
- pendingResponse = prevResponse
- }
- }
-
- private def reloadSource(source: SourceFile) {
- val unit = new RichCompilationUnit(source)
- unitOfFile(source.file) = unit
- toBeRemoved -= source.file
- toBeRemovedAfterRun -= source.file
- reset(unit)
- //parseAndEnter(unit)
- }
-
- /** Make sure a set of compilation units is loaded and parsed */
- private def reloadSources(sources: List[SourceFile]) {
- newTyperRun()
- minRunId = currentRunId
- sources foreach reloadSource
- moveToFront(sources)
- }
-
- /** Make sure a set of compilation units is loaded and parsed */
- private[interactive] def reload(sources: List[SourceFile], response: Response[Unit]) {
- informIDE("reload: " + sources)
- lastWasReload = true
- respond(response)(reloadSources(sources))
- demandNewCompilerRun()
- }
-
- private[interactive] def filesDeleted(sources: List[SourceFile], response: Response[Unit]) {
- informIDE("files deleted: " + sources)
- val deletedFiles = sources.map(_.file).toSet
- val deletedSyms = currentTopLevelSyms filter {sym => deletedFiles contains sym.sourceFile}
- for (d <- deletedSyms) {
- d.owner.info.decls unlink d
- deletedTopLevelSyms += d
- currentTopLevelSyms -= d
- }
- sources foreach (removeUnitOf(_))
- minRunId = currentRunId
- respond(response)(())
- demandNewCompilerRun()
- }
-
- /** Arrange for unit to be removed after run, to give a chance to typecheck the unit fully.
- * If we do just removeUnit, some problems with default parameters can ensue.
- * Calls to this method could probably be replaced by removeUnit once default parameters are handled more robustly.
- */
- private def afterRunRemoveUnitsOf(sources: List[SourceFile]) {
- toBeRemovedAfterRun ++= sources map (_.file)
- }
-
- /** A fully attributed tree located at position `pos` */
- private def typedTreeAt(pos: Position): Tree = getUnit(pos.source) match {
- case None =>
- reloadSources(List(pos.source))
- try typedTreeAt(pos)
- finally afterRunRemoveUnitsOf(List(pos.source))
- case Some(unit) =>
- informIDE("typedTreeAt " + pos)
- parseAndEnter(unit)
- val tree = locateTree(pos)
- debugLog("at pos "+pos+" was found: "+tree.getClass+" "+tree.pos.show)
- tree match {
- case Import(expr, _) =>
- debugLog("import found"+expr.tpe+(if (expr.tpe == null) "" else " "+expr.tpe.members))
- case _ =>
- }
- if (stabilizedType(tree) ne null) {
- debugLog("already attributed: "+tree.symbol+" "+tree.tpe)
- tree
- } else {
- unit.targetPos = pos
- try {
- debugLog("starting targeted type check")
- typeCheck(unit)
-// println("tree not found at "+pos)
- EmptyTree
- } catch {
- case ex: TyperResult => new Locator(pos) locateIn ex.tree
- } finally {
- unit.targetPos = NoPosition
- }
- }
- }
-
- /** A fully attributed tree corresponding to the entire compilation unit */
- private[interactive] def typedTree(source: SourceFile, forceReload: Boolean): Tree = {
- informIDE("typedTree " + source + " forceReload: " + forceReload)
- val unit = getOrCreateUnitOf(source)
- if (forceReload) reset(unit)
- parseAndEnter(unit)
- if (unit.status <= PartiallyChecked) typeCheck(unit)
- unit.body
- }
-
- /** Set sync var `response` to a fully attributed tree located at position `pos` */
- private[interactive] def getTypedTreeAt(pos: Position, response: Response[Tree]) {
- respond(response)(typedTreeAt(pos))
- }
-
- /** Set sync var `response` to a fully attributed tree corresponding to the
- * entire compilation unit */
- private[interactive] def getTypedTree(source: SourceFile, forceReload: Boolean, response: Response[Tree]) {
- respond(response)(typedTree(source, forceReload))
- }
-
- private def withTempUnits[T](sources: List[SourceFile])(f: (SourceFile => RichCompilationUnit) => T): T = {
- val unitOfSrc: SourceFile => RichCompilationUnit = src => unitOfFile(src.file)
- sources filterNot (getUnit(_).isDefined) match {
- case Nil =>
- f(unitOfSrc)
- case unknown =>
- reloadSources(unknown)
- try {
- f(unitOfSrc)
- } finally
- afterRunRemoveUnitsOf(unknown)
- }
- }
-
- private def withTempUnit[T](source: SourceFile)(f: RichCompilationUnit => T): T =
- withTempUnits(List(source)){ srcToUnit =>
- f(srcToUnit(source))
- }
-
- /** Find a 'mirror' of symbol `sym` in unit `unit`. Pre: `unit is loaded. */
- private def findMirrorSymbol(sym: Symbol, unit: RichCompilationUnit): Symbol = {
- val originalTypeParams = sym.owner.typeParams
- ensureUpToDate(unit)
- parseAndEnter(unit)
- val pre = adaptToNewRunMap(ThisType(sym.owner))
- val rawsym = pre.typeSymbol.info.decl(sym.name)
- val newsym = rawsym filter { alt =>
- sym.isType || {
- try {
- val tp1 = pre.memberType(alt) onTypeError NoType
- val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams)
- matchesType(tp1, tp2, false) || {
- debugLog(s"findMirrorSymbol matchesType($tp1, $tp2) failed")
- val tp3 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, alt.owner.typeParams)
- matchesType(tp1, tp3, false) || {
- debugLog(s"findMirrorSymbol fallback matchesType($tp1, $tp3) failed")
- false
- }
- }
- }
- catch {
- case ex: ControlThrowable => throw ex
- case ex: Throwable =>
- debugLog("error in findMirrorSymbol: " + ex)
- ex.printStackTrace()
- false
- }
- }
- }
- if (newsym == NoSymbol) {
- if (rawsym.exists && !rawsym.isOverloaded) rawsym
- else {
- debugLog("mirror not found " + sym + " " + unit.source + " " + pre)
- NoSymbol
- }
- } else if (newsym.isOverloaded) {
- settings.uniqid.value = true
- debugLog("mirror ambiguous " + sym + " " + unit.source + " " + pre + " " + newsym.alternatives)
- NoSymbol
- } else {
- debugLog("mirror found for " + newsym + ": " + newsym.pos)
- newsym
- }
- }
-
- /** Implements CompilerControl.askLinkPos */
- private[interactive] def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) {
- informIDE("getLinkPos "+sym+" "+source)
- respond(response) {
- if (sym.owner.isClass) {
- withTempUnit(source){ u =>
- findMirrorSymbol(sym, u).pos
- }
- } else {
- debugLog("link not in class "+sym+" "+source+" "+sym.owner)
- NoPosition
- }
- }
- }
-
- private def forceDocComment(sym: Symbol, unit: RichCompilationUnit) {
- unit.body foreachPartial {
- case DocDef(comment, defn) if defn.symbol == sym =>
- fillDocComment(defn.symbol, comment)
- EmptyTree
- case _: ValOrDefDef =>
- EmptyTree
- }
- }
-
- /** Implements CompilerControl.askDocComment */
- private[interactive] def getDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)],
- response: Response[(String, String, Position)]) {
- informIDE(s"getDocComment $sym at $source, site $site")
- respond(response) {
- withTempUnits(fragments.unzip._2){ units =>
- for((sym, src) <- fragments) {
- val mirror = findMirrorSymbol(sym, units(src))
- if (mirror ne NoSymbol) forceDocComment(mirror, units(src))
- }
- val mirror = findMirrorSymbol(sym, units(source))
- if (mirror eq NoSymbol)
- ("", "", NoPosition)
- else {
- (expandedDocComment(mirror, site), rawDocComment(mirror), docCommentPos(mirror))
- }
- }
- }
- // New typer run to remove temp units and drop per-run caches that might refer to symbols entered from temp units.
- newTyperRun()
- }
-
- def stabilizedType(tree: Tree): Type = tree match {
- case Ident(_) if tree.symbol.isStable =>
- singleType(NoPrefix, tree.symbol)
- case Select(qual, _) if qual.tpe != null && tree.symbol.isStable =>
- singleType(qual.tpe, tree.symbol)
- case Import(expr, selectors) =>
- tree.symbol.info match {
- case analyzer.ImportType(expr) => expr match {
- case s@Select(qual, name) => singleType(qual.tpe, s.symbol)
- case i : Ident => i.tpe
- case _ => tree.tpe
- }
- case _ => tree.tpe
- }
-
- case _ => tree.tpe
- }
-
- import analyzer.{SearchResult, ImplicitSearch}
-
- private[interactive] def getScopeCompletion(pos: Position, response: Response[List[Member]]) {
- informIDE("getScopeCompletion" + pos)
- respond(response) { scopeMembers(pos) }
- }
-
- private val Dollar = newTermName("$")
-
- private class Members[M <: Member] extends LinkedHashMap[Name, Set[M]] {
- override def default(key: Name) = Set()
-
- private def matching(sym: Symbol, symtpe: Type, ms: Set[M]): Option[M] = ms.find { m =>
- (m.sym.name == sym.name) && (m.sym.isType || (m.tpe matches symtpe))
- }
-
- private def keepSecond(m: M, sym: Symbol, implicitlyAdded: Boolean): Boolean =
- m.sym.hasFlag(ACCESSOR | PARAMACCESSOR) &&
- !sym.hasFlag(ACCESSOR | PARAMACCESSOR) &&
- (!implicitlyAdded || m.implicitlyAdded)
-
- def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M) {
- if ((sym.isGetter || sym.isSetter) && sym.accessed != NoSymbol) {
- add(sym.accessed, pre, implicitlyAdded)(toMember)
- } else if (!sym.name.decodedName.containsName(Dollar) && !sym.isSynthetic && sym.hasRawInfo) {
- val symtpe = pre.memberType(sym) onTypeError ErrorType
- matching(sym, symtpe, this(sym.name)) match {
- case Some(m) =>
- if (keepSecond(m, sym, implicitlyAdded)) {
- //print(" -+ "+sym.name)
- this(sym.name) = this(sym.name) - m + toMember(sym, symtpe)
- }
- case None =>
- //print(" + "+sym.name)
- this(sym.name) = this(sym.name) + toMember(sym, symtpe)
- }
- }
- }
-
- def addNonShadowed(other: Members[M]) = {
- for ((name, ms) <- other)
- if (ms.nonEmpty && this(name).isEmpty) this(name) = ms
- }
-
- def allMembers: List[M] = values.toList.flatten
- }
-
- /** Return all members visible without prefix in context enclosing `pos`. */
- private def scopeMembers(pos: Position): List[ScopeMember] = {
- typedTreeAt(pos) // to make sure context is entered
- val context = doLocateContext(pos)
- val locals = new Members[ScopeMember]
- val enclosing = new Members[ScopeMember]
- def addScopeMember(sym: Symbol, pre: Type, viaImport: Tree) =
- locals.add(sym, pre, false) { (s, st) =>
- // imported val and var are always marked as inaccessible, but they could be accessed through their getters. SI-7995
- if (s.hasGetter)
- new ScopeMember(s, st, context.isAccessible(s.getter, pre, superAccess = false), viaImport)
- else
- new ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport)
- }
- def localsToEnclosing() = {
- enclosing.addNonShadowed(locals)
- locals.clear()
- }
- //print("add scope members")
- var cx = context
- while (cx != NoContext) {
- for (sym <- cx.scope)
- addScopeMember(sym, NoPrefix, EmptyTree)
- localsToEnclosing()
- if (cx == cx.enclClass) {
- val pre = cx.prefix
- for (sym <- pre.members)
- addScopeMember(sym, pre, EmptyTree)
- localsToEnclosing()
- }
- cx = cx.outer
- }
- //print("\nadd imported members")
- for (imp <- context.imports) {
- val pre = imp.qual.tpe
- for (sym <- imp.allImportedSymbols)
- addScopeMember(sym, pre, imp.qual)
- localsToEnclosing()
- }
- // println()
- val result = enclosing.allMembers
-// if (debugIDE) for (m <- result) println(m)
- result
- }
-
- private[interactive] def getTypeCompletion(pos: Position, response: Response[List[Member]]) {
- informIDE("getTypeCompletion " + pos)
- respondGradually(response) { typeMembers(pos) }
- //if (debugIDE) typeMembers(pos)
- }
-
- private def typeMembers(pos: Position): Stream[List[TypeMember]] = {
- var tree = typedTreeAt(pos)
-
- // if tree consists of just x. or x.fo where fo is not yet a full member name
- // ignore the selection and look in just x.
- tree match {
- case Select(qual, name) if tree.tpe == ErrorType => tree = qual
- case _ =>
- }
-
- val context = doLocateContext(pos)
-
- val shouldTypeQualifier = tree.tpe match {
- case null => true
- case mt: MethodType => mt.isImplicit
- case _ => false
- }
-
- if (shouldTypeQualifier)
- // TODO: guard with try/catch to deal with ill-typed qualifiers.
- tree = analyzer.newTyper(context).typedQualifier(tree)
-
- debugLog("typeMembers at "+tree+" "+tree.tpe)
-
- val superAccess = tree.isInstanceOf[Super]
- val members = new Members[TypeMember]
-
- def addTypeMember(sym: Symbol, pre: Type, inherited: Boolean, viaView: Symbol) = {
- val implicitlyAdded = viaView != NoSymbol
- members.add(sym, pre, implicitlyAdded) { (s, st) =>
- new TypeMember(s, st,
- context.isAccessible(if (s.hasGetter) s.getter(s.owner) else s, pre, superAccess && !implicitlyAdded),
- inherited,
- viaView)
- }
- }
-
- /** Create a function application of a given view function to `tree` and typechecked it.
- */
- def viewApply(view: SearchResult): Tree = {
- assert(view.tree != EmptyTree)
- analyzer.newTyper(context.makeImplicit(reportAmbiguousErrors = false))
- .typed(Apply(view.tree, List(tree)) setPos tree.pos)
- .onTypeError(EmptyTree)
- }
-
- val pre = stabilizedType(tree)
-
- val ownerTpe = tree.tpe match {
- case analyzer.ImportType(expr) => expr.tpe
- case null => pre
- case MethodType(List(), rtpe) => rtpe
- case _ => tree.tpe
- }
-
- //print("add members")
- for (sym <- ownerTpe.members)
- addTypeMember(sym, pre, sym.owner != ownerTpe.typeSymbol, NoSymbol)
- members.allMembers #:: {
- //print("\nadd pimped")
- val applicableViews: List[SearchResult] =
- if (ownerTpe.isErroneous) List()
- else new ImplicitSearch(
- tree, functionType(List(ownerTpe), AnyClass.tpe), isView = true,
- context0 = context.makeImplicit(reportAmbiguousErrors = false)).allImplicits
- for (view <- applicableViews) {
- val vtree = viewApply(view)
- val vpre = stabilizedType(vtree)
- for (sym <- vtree.tpe.members) {
- addTypeMember(sym, vpre, false, view.tree.symbol)
- }
- }
- //println()
- Stream(members.allMembers)
- }
- }
-
- /** Implements CompilerControl.askLoadedTyped */
- private[interactive] def waitLoadedTyped(source: SourceFile, response: Response[Tree], keepLoaded: Boolean = false, onSameThread: Boolean = true) {
- getUnit(source) match {
- case Some(unit) =>
- if (unit.isUpToDate) {
- debugLog("already typed");
- response set unit.body
- } else if (ignoredFiles(source.file)) {
- response.raise(lastException.getOrElse(CancelException))
- } else if (onSameThread) {
- getTypedTree(source, forceReload = false, response)
- } else {
- debugLog("wait for later")
- outOfDate = true
- waitLoadedTypeResponses(source) += response
- }
- case None =>
- debugLog("load unit and type")
- try reloadSources(List(source))
- finally {
- waitLoadedTyped(source, response, onSameThread)
- if (!keepLoaded) removeUnitOf(source)
- }
- }
- }
-
- /** Implements CompilerControl.askParsedEntered */
- private[interactive] def getParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree], onSameThread: Boolean = true) {
- getUnit(source) match {
- case Some(unit) =>
- getParsedEnteredNow(source, response)
- case None =>
- try {
- if (keepLoaded || outOfDate && onSameThread)
- reloadSources(List(source))
- } finally {
- if (keepLoaded || !outOfDate || onSameThread)
- getParsedEnteredNow(source, response)
- else
- getParsedEnteredResponses(source) += response
- }
- }
- }
-
- /** Parses and enters given source file, stroring parse tree in response */
- private def getParsedEnteredNow(source: SourceFile, response: Response[Tree]) {
- respond(response) {
- onUnitOf(source) { unit =>
- parseAndEnter(unit)
- unit.body
- }
- }
- }
-
- @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
- def getInstrumented(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) =
- try {
- interruptsEnabled = false
- respond(response) {
- instrument(source, line)
- }
- } finally {
- interruptsEnabled = true
- }
-
- // ---------------- Helper classes ---------------------------
-
- /** A transformer that replaces tree `from` with tree `to` in a given tree */
- class TreeReplacer(from: Tree, to: Tree) extends Transformer {
- override def transform(t: Tree): Tree = {
- if (t == from) to
- else if ((t.pos includes from.pos) || t.pos.isTransparent) super.transform(t)
- else t
- }
- }
-
- /** The typer run */
- class TyperRun extends Run {
- // units is always empty
-
- /** canRedefine is used to detect double declarations of classes and objects
- * in multiple source files.
- * Since the IDE rechecks units several times in the same run, these tests
- * are disabled by always returning true here.
- */
- override def canRedefine(sym: Symbol) = true
-
- def typeCheck(unit: CompilationUnit): Unit = {
- applyPhase(typerPhase, unit)
- }
-
- /** Apply a phase to a compilation unit
- * @return true iff typechecked correctly
- */
- private def applyPhase(phase: Phase, unit: CompilationUnit) {
- atPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit }
- }
- }
-
- def newTyperRun() {
- currentTyperRun = new TyperRun
- }
-
- class TyperResult(val tree: Tree) extends ControlThrowable
-
- assert(globalPhase.id == 0)
-
- implicit def addOnTypeError[T](x: => T): OnTypeError[T] = new OnTypeError(x)
-
- // OnTypeError should still catch TypeError because of cyclic references,
- // but DivergentImplicit shouldn't leak anymore here
- class OnTypeError[T](op: => T) {
- def onTypeError(alt: => T) = try {
- op
- } catch {
- case ex: TypeError =>
- debugLog("type error caught: "+ex)
- alt
- case ex: DivergentImplicit =>
- if (settings.Xdivergence211.value) {
- debugLog("this shouldn't happen. DivergentImplicit exception has been thrown with -Xdivergence211 turned on: "+ex)
- alt
- } else {
- debugLog("divergent implicit caught: "+ex)
- alt
- }
- }
- }
-
- /** The compiler has been initialized. Constructors are evaluated in textual order,
- * so this is set to true only after all super constructors and the primary constructor
- * have been executed.
- */
- initializing = false
-}
-
-object CancelException extends Exception
diff --git a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala b/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
deleted file mode 100644
index 013b152e96..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import scala.collection.mutable.ArrayBuffer
-import scala.reflect.internal.util.Position
-import reporters.Reporter
-
-case class Problem(pos: Position, msg: String, severityLevel: Int)
-
-abstract class InteractiveReporter extends Reporter {
-
- def compiler: Global
-
- val otherProblems = new ArrayBuffer[Problem]
-
- override def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = try {
- severity.count += 1
- val problems =
- if (compiler eq null) {
- otherProblems
- } else if (pos.isDefined) {
- compiler.getUnit(pos.source) match {
- case Some(unit) =>
- compiler.debugLog(pos.source.file.name + ":" + pos.line + ": " + msg)
- unit.problems
- case None =>
- compiler.debugLog(pos.source.file.name + "[not loaded] :" + pos.line + ": " + msg)
- otherProblems
- }
- } else {
- compiler.debugLog("[no position] :" + msg)
- otherProblems
- }
- problems += Problem(pos, msg, severity.id)
- } catch {
- case ex: UnsupportedOperationException =>
- }
-
- override def reset() {
- super.reset()
- otherProblems.clear()
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
deleted file mode 100644
index 64e050e799..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala
+++ /dev/null
@@ -1,191 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import util.InterruptReq
-import scala.reflect.internal.util.{SourceFile, BatchSourceFile}
-import io.{AbstractFile, PlainFile}
-
-import util.EmptyAction
-import scala.reflect.internal.util.{Position, RangePosition, NoPosition, OffsetPosition, TransparentPosition}
-import io.{Pickler, CondPickler}
-import io.Pickler._
-import scala.collection.mutable
-import mutable.ListBuffer
-
-trait Picklers { self: Global =>
-
- lazy val freshRunReq =
- unitPickler
- .wrapped { _ => new FreshRunReq } { x => () }
- .labelled ("FreshRunReq")
- .cond (_.isInstanceOf[FreshRunReq])
-
- lazy val shutdownReq = singletonPickler(ShutdownReq)
-
- def defaultThrowable[T <: Throwable]: CondPickler[T] = javaInstancePickler[T] cond { _ => true }
-
- implicit lazy val throwable: Pickler[Throwable] =
- freshRunReq | shutdownReq | defaultThrowable
-
- implicit def abstractFile: Pickler[AbstractFile] =
- pkl[String]
- .wrapped[AbstractFile] { new PlainFile(_) } { _.path }
- .asClass (classOf[PlainFile])
-
- private val sourceFilesSeen = new mutable.HashMap[AbstractFile, Array[Char]] {
- override def default(key: AbstractFile) = Array()
- }
-
- type Diff = (Int /*start*/, Int /*end*/, String /*replacement*/)
-
- def delta(f: AbstractFile, cs: Array[Char]): Diff = {
- val bs = sourceFilesSeen(f)
- var start = 0
- while (start < bs.length && start < cs.length && bs(start) == cs(start)) start += 1
- var end = bs.length
- var end2 = cs.length
- while (end > start && end2 > start && bs(end - 1) == cs(end2 - 1)) { end -= 1; end2 -= 1 }
- sourceFilesSeen(f) = cs
- (start, end, cs.slice(start, end2).mkString(""))
- }
-
- def patch(f: AbstractFile, d: Diff): Array[Char] = {
- val (start, end, replacement) = d
- val patched = sourceFilesSeen(f).patch(start, replacement, end - start)
- sourceFilesSeen(f) = patched
- patched
- }
-
- implicit lazy val sourceFile: Pickler[SourceFile] =
- (pkl[AbstractFile] ~ pkl[Diff]).wrapped[SourceFile] {
- case f ~ d => new BatchSourceFile(f, patch(f, d))
- } {
- f => f.file ~ delta(f.file, f.content)
- }.asClass (classOf[BatchSourceFile])
-
- lazy val offsetPosition: CondPickler[OffsetPosition] =
- (pkl[SourceFile] ~ pkl[Int])
- .wrapped { case x ~ y => new OffsetPosition(x, y) } { p => p.source ~ p.point }
- .asClass (classOf[OffsetPosition])
-
- lazy val rangePosition: CondPickler[RangePosition] =
- (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int])
- .wrapped { case source ~ start ~ point ~ end => new RangePosition(source, start, point, end) } { p => p.source ~ p.start ~ p.point ~ p.end }
- .asClass (classOf[RangePosition])
-
- lazy val transparentPosition: CondPickler[TransparentPosition] =
- (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int])
- .wrapped { case source ~ start ~ point ~ end => new TransparentPosition(source, start, point, end) } { p => p.source ~ p.start ~ p.point ~ p.end }
- .asClass (classOf[TransparentPosition])
-
- lazy val noPosition = singletonPickler(NoPosition)
-
- implicit lazy val position: Pickler[Position] = transparentPosition | rangePosition | offsetPosition | noPosition
-
- implicit lazy val namePickler: Pickler[Name] =
- pkl[String] .wrapped[Name] {
- str => if ((str.length > 1) && (str endsWith "!")) newTypeName(str.init) else newTermName(str)
- } {
- name => if (name.isTypeName) name.toString+"!" else name.toString
- }
-
- implicit lazy val symPickler: Pickler[Symbol] = {
- def ownerNames(sym: Symbol, buf: ListBuffer[Name]): ListBuffer[Name] = {
- if (!sym.isRoot) {
- ownerNames(sym.owner, buf)
- buf += (if (sym.isModuleClass) sym.sourceModule else sym).name
- if (!sym.isType && !sym.isStable) {
- val sym1 = sym.owner.info.decl(sym.name)
- if (sym1.isOverloaded) {
- val index = sym1.alternatives.indexOf(sym)
- assert(index >= 0, sym1+" not found in alternatives "+sym1.alternatives)
- buf += newTermName(index.toString)
- }
- }
- }
- buf
- }
- def makeSymbol(root: Symbol, names: List[Name]): Symbol = names match {
- case List() =>
- root
- case name :: rest =>
- val sym = root.info.decl(name)
- if (sym.isOverloaded) makeSymbol(sym.alternatives(rest.head.toString.toInt), rest.tail)
- else makeSymbol(sym, rest)
- }
- pkl[List[Name]] .wrapped { makeSymbol(rootMirror.RootClass, _) } { ownerNames(_, new ListBuffer).toList }
- }
-
- implicit def workEvent: Pickler[WorkEvent] = {
- (pkl[Int] ~ pkl[Long])
- .wrapped { case id ~ ms => WorkEvent(id, ms) } { w => w.atNode ~ w.atMillis }
- }
-
- implicit def interruptReq: Pickler[InterruptReq] = {
- val emptyIR: InterruptReq = new InterruptReq { type R = Unit; val todo = () => () }
- pkl[Unit] .wrapped { _ => emptyIR } { _ => () }
- }
-
- implicit def reloadItem: CondPickler[ReloadItem] =
- pkl[List[SourceFile]]
- .wrapped { ReloadItem(_, new Response) } { _.sources }
- .asClass (classOf[ReloadItem])
-
- implicit def askTypeAtItem: CondPickler[AskTypeAtItem] =
- pkl[Position]
- .wrapped { new AskTypeAtItem(_, new Response) } { _.pos }
- .asClass (classOf[AskTypeAtItem])
-
- implicit def askTypeItem: CondPickler[AskTypeItem] =
- (pkl[SourceFile] ~ pkl[Boolean])
- .wrapped { case source ~ forceReload => new AskTypeItem(source, forceReload, new Response) } { w => w.source ~ w.forceReload }
- .asClass (classOf[AskTypeItem])
-
- implicit def askTypeCompletionItem: CondPickler[AskTypeCompletionItem] =
- pkl[Position]
- .wrapped { new AskTypeCompletionItem(_, new Response) } { _.pos }
- .asClass (classOf[AskTypeCompletionItem])
-
- implicit def askScopeCompletionItem: CondPickler[AskScopeCompletionItem] =
- pkl[Position]
- .wrapped { new AskScopeCompletionItem(_, new Response) } { _.pos }
- .asClass (classOf[AskScopeCompletionItem])
-
- implicit def askToDoFirstItem: CondPickler[AskToDoFirstItem] =
- pkl[SourceFile]
- .wrapped { new AskToDoFirstItem(_) } { _.source }
- .asClass (classOf[AskToDoFirstItem])
-
- implicit def askLinkPosItem: CondPickler[AskLinkPosItem] =
- (pkl[Symbol] ~ pkl[SourceFile])
- .wrapped { case sym ~ source => new AskLinkPosItem(sym, source, new Response) } { item => item.sym ~ item.source }
- .asClass (classOf[AskLinkPosItem])
-
- implicit def askDocCommentItem: CondPickler[AskDocCommentItem] =
- (pkl[Symbol] ~ pkl[SourceFile] ~ pkl[Symbol] ~ pkl[List[(Symbol,SourceFile)]])
- .wrapped { case sym ~ source ~ site ~ fragments => new AskDocCommentItem(sym, source, site, fragments, new Response) } { item => item.sym ~ item.source ~ item.site ~ item.fragments }
- .asClass (classOf[AskDocCommentItem])
-
- implicit def askLoadedTypedItem: CondPickler[AskLoadedTypedItem] =
- pkl[SourceFile]
- .wrapped { source => new AskLoadedTypedItem(source, false, new Response) } { _.source }
- .asClass (classOf[AskLoadedTypedItem])
-
- implicit def askParsedEnteredItem: CondPickler[AskParsedEnteredItem] =
- (pkl[SourceFile] ~ pkl[Boolean])
- .wrapped { case source ~ keepLoaded => new AskParsedEnteredItem(source, keepLoaded, new Response) } { w => w.source ~ w.keepLoaded }
- .asClass (classOf[AskParsedEnteredItem])
-
- implicit def emptyAction: CondPickler[EmptyAction] =
- pkl[Unit]
- .wrapped { _ => new EmptyAction } { _ => () }
- .asClass (classOf[EmptyAction])
-
- implicit def action: Pickler[() => Unit] =
- reloadItem | askTypeAtItem | askTypeItem | askTypeCompletionItem | askScopeCompletionItem |
- askToDoFirstItem | askLinkPosItem | askDocCommentItem | askLoadedTypedItem | askParsedEnteredItem | emptyAction
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala b/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala
deleted file mode 100644
index a2d8e5d49a..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- * @author Iulian Dragos
- */
-package scala.tools.nsc.interactive
-
-/** A presentation compiler thread. This is a lightweight class, delegating most
- * of its functionality to the compiler instance.
- *
- */
-final class PresentationCompilerThread(var compiler: Global, name: String = "")
- extends Thread("Scala Presentation Compiler [" + name + "]") {
-
- /** The presentation compiler loop.
- */
- override def run() {
- compiler.debugLog("starting new runner thread")
- while (compiler ne null) try {
- compiler.checkNoResponsesOutstanding()
- compiler.log.logreplay("wait for more work", { compiler.scheduler.waitForMoreWork(); true })
- compiler.pollForWork(compiler.NoPosition)
- while (compiler.isOutOfDate) {
- try {
- compiler.backgroundCompile()
- } catch {
- case ex: FreshRunReq =>
- compiler.debugLog("fresh run req caught, starting new pass")
- }
- compiler.log.flush()
- }
- } catch {
- case ex @ ShutdownReq =>
- compiler.debugLog("exiting presentation compiler")
- compiler.log.close()
-
- // make sure we don't keep around stale instances
- compiler = null
- case ex: Throwable =>
- compiler.log.flush()
-
- ex match {
- case ex: FreshRunReq =>
- compiler.debugLog("fresh run req caught outside presentation compiler loop; ignored") // This shouldn't be reported
- case _ : Global#ValidateException => // This will have been reported elsewhere
- compiler.debugLog("validate exception caught outside presentation compiler loop; ignored")
- case _ => ex.printStackTrace(); compiler.informIDE("Fatal Error: "+ex)
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala
deleted file mode 100644
index 4b64313e1b..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ /dev/null
@@ -1,222 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import scala.concurrent.SyncVar
-import scala.reflect.internal.util._
-import scala.tools.nsc.symtab._
-import scala.tools.nsc.ast._
-import scala.tools.nsc.reporters._
-import scala.tools.nsc.io._
-import scala.tools.nsc.scratchpad.SourceInserter
-import scala.tools.nsc.interpreter.AbstractFileClassLoader
-import java.io.{File, FileWriter}
-
-/** Interface of interactive compiler to a client such as an IDE
- */
-object REPL {
-
- val versionMsg = "Scala compiler " +
- Properties.versionString + " -- " +
- Properties.copyrightString
-
- val prompt = "> "
-
- var reporter: ConsoleReporter = _
-
- private def replError(msg: String) {
- reporter.error(/*new Position */FakePos("scalac"),
- msg + "\n scalac -help gives more information")
- }
-
- def process(args: Array[String]) {
- val settings = new Settings(replError)
- reporter = new ConsoleReporter(settings)
- val command = new CompilerCommand(args.toList, settings)
- if (command.settings.version.value)
- reporter.echo(versionMsg)
- else {
- try {
- object compiler extends Global(command.settings, reporter) {
-// printTypings = true
- }
- if (reporter.hasErrors) {
- reporter.flush()
- return
- }
- if (command.shouldStopWithInfo) {
- reporter.echo(command.getInfoMessage(compiler))
- } else {
- run(compiler)
- }
- } catch {
- case ex @ FatalError(msg) =>
- if (true || command.settings.debug.value) // !!!
- ex.printStackTrace();
- reporter.error(null, "fatal error: " + msg)
- }
- }
- }
-
- def main(args: Array[String]) {
- process(args)
- /*sys.*/exit(if (reporter.hasErrors) 1 else 0)// Don't use sys yet as this has to run on 2.8.2 also.
- }
-
- def loop(action: (String) => Unit) {
- Console.print(prompt)
- try {
- val line = Console.readLine
- if (line.length() > 0) {
- action(line)
- }
- loop(action)
- }
- catch {
- case _: java.io.EOFException => //nop
- }
- }
-
- /** Commands:
- *
- * reload file1 ... fileN
- * typeat file off1 off2?
- * complete file off1 off2?
- */
- def run(comp: Global) {
- val reloadResult = new Response[Unit]
- val typeatResult = new Response[comp.Tree]
- val completeResult = new Response[List[comp.Member]]
- val typedResult = new Response[comp.Tree]
- val structureResult = new Response[comp.Tree]
- @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
- val instrumentedResult = new Response[(String, Array[Char])]
-
- def makePos(file: String, off1: String, off2: String) = {
- val source = toSourceFile(file)
- comp.rangePos(source, off1.toInt, off1.toInt, off2.toInt)
- }
-
- def doTypeAt(pos: Position) {
- comp.askTypeAt(pos, typeatResult)
- show(typeatResult)
- }
-
- def doComplete(pos: Position) {
- comp.askTypeCompletion(pos, completeResult)
- show(completeResult)
- }
-
- def doStructure(file: String) {
- comp.askParsedEntered(toSourceFile(file), false, structureResult)
- show(structureResult)
- }
-
- /** Write instrumented source file to disk.
- * @param iFullName The full name of the first top-level object in source
- * @param iContents An Array[Char] containing the instrumented source
- * @return The name of the instrumented source file
- */
- @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
- def writeInstrumented(iFullName: String, suffix: String, iContents: Array[Char]): String = {
- val iSimpleName = iFullName drop ((iFullName lastIndexOf '.') + 1)
- val iSourceName = iSimpleName + suffix
- val ifile = new FileWriter(iSourceName)
- ifile.write(iContents)
- ifile.close()
- iSourceName
- }
-
- /** The method for implementing worksheet functionality.
- * @param arguments a file name, followed by optional command line arguments that are passed
- * to the compiler that processes the instrumented source.
- * @param line A line number that controls uop to which line results should be produced
- * If line = -1, results are produced for all expressions in the worksheet.
- * @return The generated file content containing original source in the left column
- * and outputs in the right column, or None if the presentation compiler
- * does not respond to askInstrumented.
- */
- @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
- def instrument(arguments: List[String], line: Int): Option[(String, String)] = {
- val source = toSourceFile(arguments.head)
- // strip right hand side comment column and any trailing spaces from all lines
- val strippedContents = SourceInserter.stripRight(source.content)
- val strippedSource = new BatchSourceFile(source.file, strippedContents)
- println("stripped source = "+strippedSource+":"+strippedContents.mkString)
- comp.askReload(List(strippedSource), reloadResult)
- comp.askInstrumented(strippedSource, line, instrumentedResult)
- using(instrumentedResult) {
- case (iFullName, iContents) =>
- println(s"instrumented source $iFullName = ${iContents.mkString}")
- val iSourceName = writeInstrumented(iFullName, "$instrumented.scala", iContents)
- val sSourceName = writeInstrumented(iFullName, "$stripped.scala", strippedContents)
- (iSourceName, sSourceName)
-/*
- * val vdirOpt = compileInstrumented(iSourceName, arguments.tail)
- runInstrumented(vdirOpt, iFullName, strippedSource.content)
- */
- }
- }
-
- loop { line =>
- (line split " ").toList match {
- case "reload" :: args =>
- comp.askReload(args map toSourceFile, reloadResult)
- show(reloadResult)
- case "reloadAndAskType" :: file :: millis :: Nil =>
- comp.askReload(List(toSourceFile(file)), reloadResult)
- Thread.sleep(millis.toInt)
- println("ask type now")
- comp.askLoadedTyped(toSourceFile(file), keepLoaded = true, typedResult)
- typedResult.get
- case List("typeat", file, off1, off2) =>
- doTypeAt(makePos(file, off1, off2))
- case List("typeat", file, off1) =>
- doTypeAt(makePos(file, off1, off1))
- case List("complete", file, off1, off2) =>
- doComplete(makePos(file, off1, off2))
- case List("complete", file, off1) =>
- doComplete(makePos(file, off1, off1))
- case "instrument" :: arguments =>
- println(instrument(arguments, -1))
- case "instrumentTo" :: line :: arguments =>
- println(instrument(arguments, line.toInt))
- case List("quit") =>
- comp.askShutdown()
- exit(1) // Don't use sys yet as this has to run on 2.8.2 also.
- case List("structure", file) =>
- doStructure(file)
- case _ =>
- print("""Available commands:
- | reload <file_1> ... <file_n>
- | reloadAndAskType <file> <sleep-ms>
- | typed <file>
- | typeat <file> <start-pos> <end-pos>
- | typeat <file> <pos>
- | complete <file> <start-pos> <end-pos>
- | compile <file> <pos>
- | instrument <file> <arg>*
- | instrumentTo <line-num> <file> <arg>*
- | structure <file>
- | quit
- |""".stripMargin)
- }
- }
- }
-
- def toSourceFile(name: String) = new BatchSourceFile(new PlainFile(new java.io.File(name)))
-
- def using[T, U](svar: Response[T])(op: T => U): Option[U] = {
- val res = svar.get match {
- case Left(result) => Some(op(result))
- case Right(exc) => exc.printStackTrace; println("ERROR: "+exc); None
- }
- svar.clear()
- res
- }
-
- def show[T](svar: Response[T]) = using(svar)(res => println("==> "+res))
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
deleted file mode 100644
index b95f1fa7ca..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
+++ /dev/null
@@ -1,285 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import ast.Trees
-import ast.Positions
-import scala.reflect.internal.util.{SourceFile, Position, RangePosition, NoPosition}
-import scala.tools.nsc.util.WorkScheduler
-import scala.collection.mutable.ListBuffer
-
-/** Handling range positions
- * atPos, the main method in this trait, will add positions to a tree,
- * and will ensure the following properties:
- *
- * 1. All nodes between the root of the tree and nodes that already have positions
- * will be assigned positions.
- * 2. No node which already has a position will be assigned a different range; however
- * a RangePosition might become a TransparentPosition.
- * 3. The position of each assigned node includes the positions of each of its children.
- * 4. The positions of all solid descendants of children of an assigned node
- * are mutually non-overlapping.
- *
- * Here, the solid descendant of a node are:
- *
- * If the node has a TransparentPosition, the solid descendants of all its children
- * Otherwise, the singleton consisting of the node itself.
- */
-trait RangePositions extends Trees with Positions {
-self: scala.tools.nsc.Global =>
-
- case class Range(pos: Position, tree: Tree) {
- def isFree = tree == EmptyTree
- }
-
- override def rangePos(source: SourceFile, start: Int, point: Int, end: Int) =
- new RangePosition(source, start, point, end)
-
- /** A position that wraps a set of trees.
- * The point of the wrapping position is the point of the default position.
- * If some of the trees are ranges, returns a range position enclosing all ranges
- * Otherwise returns default position that is either focused or not.
- */
- override def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = {
- val ranged = trees filter (_.pos.isRange)
- if (ranged.isEmpty) if (focus) default.focus else default
- else new RangePosition(default.source, (ranged map (_.pos.start)).min, default.point, (ranged map (_.pos.end)).max)
- }
-
- /** A position that wraps a non-empty set of trees.
- * The point of the wrapping position is the point of the first trees' position.
- * If some of the trees are ranges, returns a range position enclosing all ranges
- * Otherwise returns first tree's position.
- */
- override def wrappingPos(trees: List[Tree]): Position = {
- val headpos = trees.head.pos
- if (headpos.isDefined) wrappingPos(headpos, trees) else headpos
- }
-
- // -------------- ensuring no overlaps -------------------------------
-
- /** Ensure that given tree has no positions that overlap with
- * any of the positions of `others`. This is done by
- * shortening the range, assigning TransparentPositions
- * to some of the nodes in `tree` or focusing on the position.
- */
- override def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) {
- def isOverlapping(pos: Position) =
- pos.isRange && (others exists (pos overlaps _.pos))
- if (isOverlapping(tree.pos)) {
- val children = tree.children
- children foreach (ensureNonOverlapping(_, others, focus))
- if (tree.pos.isOpaqueRange) {
- val wpos = wrappingPos(tree.pos, children, focus)
- tree setPos (if (isOverlapping(wpos)) tree.pos.makeTransparent else wpos)
- }
- }
- }
-
- def solidDescendants(tree: Tree): List[Tree] =
- if (tree.pos.isTransparent) tree.children flatMap solidDescendants
- else List(tree)
-
- /** A free range from `lo` to `hi` */
- private def free(lo: Int, hi: Int): Range =
- Range(new RangePosition(null, lo, lo, hi), EmptyTree)
-
- /** The maximal free range */
- private lazy val maxFree: Range = free(0, Int.MaxValue)
-
- /** A singleton list of a non-empty range from `lo` to `hi`, or else the empty List */
- private def maybeFree(lo: Int, hi: Int) =
- if (lo < hi) List(free(lo, hi))
- else List()
-
- /** Insert `pos` into ranges `rs` if possible;
- * otherwise add conflicting trees to `conflicting`.
- */
- private def insert(rs: List[Range], t: Tree, conflicting: ListBuffer[Tree]): List[Range] = rs match {
- case List() =>
- assert(conflicting.nonEmpty)
- rs
- case r :: rs1 =>
- assert(!t.pos.isTransparent)
- if (r.isFree && (r.pos includes t.pos)) {
-// println("subdividing "+r+"/"+t.pos)
- maybeFree(t.pos.end, r.pos.end) ::: List(Range(t.pos, t)) ::: maybeFree(r.pos.start, t.pos.start) ::: rs1
- } else {
- if (!r.isFree && (r.pos overlaps t.pos)) conflicting += r.tree
- r :: insert(rs1, t, conflicting)
- }
- }
-
- /** Replace elem `t` of `ts` by `replacement` list. */
- private def replace(ts: List[Tree], t: Tree, replacement: List[Tree]): List[Tree] =
- if (ts.head == t) replacement ::: ts.tail
- else ts.head :: replace(ts.tail, t, replacement)
-
- /** Does given list of trees have mutually non-overlapping positions?
- * pre: None of the trees is transparent
- */
- def findOverlapping(cts: List[Tree]): List[(Tree, Tree)] = {
- var ranges = List(maxFree)
- for (ct <- cts) {
- if (ct.pos.isOpaqueRange) {
- val conflicting = new ListBuffer[Tree]
- ranges = insert(ranges, ct, conflicting)
- if (conflicting.nonEmpty) return conflicting.toList map (t => (t, ct))
- }
- }
- List()
- }
-
- // -------------- setting positions -------------------------------
-
- /** Set position of all children of a node
- * @param pos A target position.
- * Uses the point of the position as the point of all positions it assigns.
- * Uses the start of this position as an Offset position for unpositioed trees
- * without children.
- * @param trees The children to position. All children must be positionable.
- */
- private def setChildrenPos(pos: Position, trees: List[Tree]): Unit = try {
- for (tree <- trees) {
- if (!tree.isEmpty && tree.pos == NoPosition) {
- val children = tree.children
- if (children.isEmpty) {
- tree setPos pos.focus
- } else {
- setChildrenPos(pos, children)
- tree setPos wrappingPos(pos, children)
- }
- }
- }
- } catch {
- case ex: Exception =>
- println("error while set children pos "+pos+" of "+trees)
- throw ex
- }
-
- /** Position a tree.
- * This means: Set position of a node and position all its unpositioned children.
- */
- override def atPos[T <: Tree](pos: Position)(tree: T): T = {
- if (pos.isOpaqueRange) {
- if (!tree.isEmpty && tree.pos == NoPosition) {
- tree.setPos(pos)
- val children = tree.children
- if (children.nonEmpty) {
- if (children.tail.isEmpty) atPos(pos)(children.head)
- else setChildrenPos(pos, children)
- }
- }
- tree
- } else {
- super.atPos(pos)(tree)
- }
- }
-
- // ---------------- Validating positions ----------------------------------
-
- override def validatePositions(tree: Tree) {
- def reportTree(prefix : String, tree : Tree) {
- val source = if (tree.pos.isDefined) tree.pos.source else ""
- inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.productPrefix+" at "+tree.pos.show+source)
- inform("")
- inform(treeStatus(tree))
- inform("")
- }
-
- def positionError(msg: String)(body : => Unit) {
- inform("======= Position error\n" + msg)
- body
- inform("\nWhile validating #" + tree.id)
- inform(treeStatus(tree))
- inform("\nChildren:")
- tree.children map (t => " " + treeStatus(t, tree)) foreach inform
- inform("=======")
- throw new ValidateException(msg)
- }
-
- def validate(tree: Tree, encltree: Tree): Unit = {
-
- if (!tree.isEmpty) {
- if (settings.Yposdebug.value && (settings.verbose.value || settings.Yrangepos.value))
- println("[%10s] %s".format("validate", treeStatus(tree, encltree)))
-
- if (!tree.pos.isDefined)
- positionError("Unpositioned tree #"+tree.id) {
- inform("%15s %s".format("unpositioned", treeStatus(tree, encltree)))
- inform("%15s %s".format("enclosing", treeStatus(encltree)))
- encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree))))
- }
- if (tree.pos.isRange) {
- if (!encltree.pos.isRange)
- positionError("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") {
- reportTree("Enclosing", encltree)
- reportTree("Enclosed", tree)
- }
- if (!(encltree.pos includes tree.pos))
- positionError("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") {
- reportTree("Enclosing", encltree)
- reportTree("Enclosed", tree)
- }
-
- findOverlapping(tree.children flatMap solidDescendants) match {
- case List() => ;
- case xs => {
- positionError("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) {
- reportTree("Ancestor", tree)
- for((x, y) <- xs) {
- reportTree("First overlapping", x)
- reportTree("Second overlapping", y)
- }
- }
- }
- }
- }
- for (ct <- tree.children flatMap solidDescendants) validate(ct, tree)
- }
- }
-
- if (phase.id <= currentRun.typerPhase.id)
- validate(tree, tree)
- }
-
- class ValidateException(msg : String) extends Exception(msg)
-
- // ---------------- Locating trees ----------------------------------
-
- /** A locator for trees with given positions.
- * Given a position `pos`, locator.apply returns
- * the smallest tree that encloses `pos`.
- */
- class Locator(pos: Position) extends Traverser {
- var last: Tree = _
- def locateIn(root: Tree): Tree = {
- this.last = EmptyTree
- traverse(root)
- this.last
- }
- protected def isEligible(t: Tree) = !t.pos.isTransparent
- override def traverse(t: Tree) {
- t match {
- case tt : TypeTree if tt.original != null && (tt.pos includes tt.original.pos) =>
- traverse(tt.original)
- case _ =>
- if (t.pos includes pos) {
- if (isEligible(t)) last = t
- super.traverse(t)
- } else t match {
- case mdef: MemberDef =>
- traverseTrees(mdef.mods.annotations)
- case _ =>
- }
- }
- }
- }
-
- class TypedLocator(pos: Position) extends Locator(pos) {
- override protected def isEligible(t: Tree) = super.isEligible(t) && t.tpe != null
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
deleted file mode 100644
index b2ef45a7d8..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ /dev/null
@@ -1,355 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Iulian Dragos
- * @author Hubert Plocinicak
- */
-package scala.tools.nsc
-package interactive
-
-import scala.collection._
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.util.control.Breaks._
-import scala.tools.nsc.symtab.Flags
-
-import dependencies._
-import scala.reflect.internal.util.FakePos
-import util.ClassPath
-import io.AbstractFile
-import scala.tools.util.PathResolver
-
-/** A more defined build manager, based on change sets. For each
- * updated source file, it computes the set of changes to its
- * definitions, then checks all dependent units to see if the
- * changes require a compilation. It repeats this process until
- * a fixpoint is reached.
- */
-@deprecated("Use sbt incremental compilation mechanism", "2.10.0")
-class RefinedBuildManager(val settings: Settings) extends Changes with BuildManager {
-
- class BuilderGlobal(settings: Settings, reporter : Reporter) extends scala.tools.nsc.Global(settings, reporter) {
-
- def this(settings: Settings) =
- this(settings, new ConsoleReporter(settings))
-
- override def computeInternalPhases() {
- super.computeInternalPhases
- phasesSet += dependencyAnalysis
- }
- lazy val _classpath = new NoSourcePathPathResolver(settings).result
- override def classPath = _classpath.asInstanceOf[ClassPath[platform.BinaryRepr]]
- // See discussion in JavaPlatForm for why we need a cast here.
-
- def newRun() = new Run()
- }
-
- class NoSourcePathPathResolver(settings: Settings) extends PathResolver(settings) {
- override def containers = Calculated.basis.dropRight(1).flatten.distinct
- }
-
- protected def newCompiler(settings: Settings) = new BuilderGlobal(settings)
-
- val compiler = newCompiler(settings)
- import compiler.{ Symbol, Type, beforeErasure }
- import compiler.dependencyAnalysis.Inherited
-
- private case class SymWithHistory(sym: Symbol, befErasure: Type)
-
- /** Managed source files. */
- private val sources: mutable.Set[AbstractFile] = new mutable.HashSet[AbstractFile]
-
- private val definitions: mutable.Map[AbstractFile, List[SymWithHistory]] =
- new mutable.HashMap[AbstractFile, List[SymWithHistory]] {
- override def default(key: AbstractFile) = Nil
- }
-
- /** External references used by source file. */
- private var references: mutable.Map[AbstractFile, immutable.Set[String]] = _
-
- /** External references for inherited members */
- private var inherited: mutable.Map[AbstractFile, immutable.Set[Inherited]] = _
-
- /** Reverse of definitions, used for caching */
- private var classes: mutable.Map[String, AbstractFile] =
- new mutable.HashMap[String, AbstractFile] {
- override def default(key: String) = null
- }
-
- /** Add the given source files to the managed build process. */
- def addSourceFiles(files: Set[AbstractFile]) {
- sources ++= files
- update(files)
- }
-
- /** Remove the given files from the managed build process. */
- def removeFiles(files: Set[AbstractFile]) {
- sources --= files
- deleteClassfiles(files)
- update(invalidatedByRemove(files))
- }
-
- /** Return the set of invalidated files caused by removing the given files.
- */
- private def invalidatedByRemove(files: Set[AbstractFile]): Set[AbstractFile] = {
- val changes = new mutable.HashMap[Symbol, List[Change]]
- for (f <- files; SymWithHistory(sym, _) <- definitions(f))
- changes += sym -> List(Removed(Class(sym.fullName)))
- invalidated(files, changes)
- }
-
- def update(added: Set[AbstractFile], removed: Set[AbstractFile]) {
- sources --= removed
- deleteClassfiles(removed)
- update(added ++ invalidatedByRemove(removed))
- }
-
- /** The given files have been modified by the user. Recompile
- * them and all files that depend on them. Only files that
- * have been previously added as source files are recompiled.
- * Files that were already compiled are taken out from the result
- * of the dependency analysis.
- */
- private def update(files: Set[AbstractFile]) = {
- val coll: mutable.Map[AbstractFile, immutable.Set[AbstractFile]] =
- mutable.HashMap[AbstractFile, immutable.Set[AbstractFile]]()
- compiler.reporter.reset()
-
- // See if we really have corresponding symbols, not just those
- // which share the name
- def isCorrespondingSym(from: Symbol, to: Symbol): Boolean =
- (from.hasFlag(Flags.TRAIT) == to.hasFlag(Flags.TRAIT)) && // has to run in 2.8, so no hasTraitFlag
- (from.hasFlag(Flags.MODULE) == to.hasFlag(Flags.MODULE))
-
- // For testing purposes only, order irrelevant for compilation
- def toStringSet(set: Set[AbstractFile]): String =
- set.toList sortBy (_.name) mkString("Set(", ", ", ")")
-
- def update0(files: Set[AbstractFile]): Unit = if (!files.isEmpty) {
- deleteClassfiles(files)
- val run = compiler.newRun()
- if (settings.Ybuildmanagerdebug.value)
- compiler.inform("compiling " + toStringSet(files))
- buildingFiles(files)
-
- run.compileFiles(files.toList)
- if (compiler.reporter.hasErrors) {
- return
- }
-
- // Deterministic behaviour required by partest
- val changesOf = new mutable.HashMap[Symbol, List[Change]] {
- override def toString: String = {
- val changesOrdered =
- toList.map(e => {
- e._1.toString + " -> " +
- e._2.sortBy(_.toString).mkString("List(", ", ", ")")
- })
- changesOrdered.sorted.mkString("Map(", ", ", ")")
- }
- }
- val additionalDefs: mutable.HashSet[AbstractFile] = mutable.HashSet.empty
-
- val defs = compiler.dependencyAnalysis.definitions
- for (src <- files) {
- if (definitions(src).isEmpty)
- additionalDefs ++= compiler.dependencyAnalysis.
- dependencies.dependentFiles(1, mutable.Set(src))
- else {
- val syms = defs(src)
- for (sym <- syms) {
- definitions(src).find(
- s => (s.sym.fullName == sym.fullName) &&
- isCorrespondingSym(s.sym, sym)) match {
- case Some(SymWithHistory(oldSym, info)) =>
- val changes = changeSet(oldSym.info, sym)
- val changesErasure = beforeErasure(changeSet(info, sym))
-
- changesOf(oldSym) = (changes ++ changesErasure).distinct
- case _ =>
- // a new top level definition
- changesOf(sym) = sym.parentSymbols filter (_.isSealed) map (p =>
- changeChangeSet(p, sym+" extends a sealed "+p))
- }
- }
- // Create a change for the top level classes that were removed
- val removed = definitions(src) filterNot ((s:SymWithHistory) =>
- syms.find(_.fullName == (s.sym.fullName)) != None)
- for (s <- removed) {
- changesOf(s.sym) = List(removeChangeSet(s.sym))
- }
- }
- }
- if (settings.Ybuildmanagerdebug.value)
- compiler.inform("Changes: " + changesOf)
- updateDefinitions(files)
- val invalid = invalidated(files, changesOf, additionalDefs)
- update0(checkCycles(invalid, files, coll))
- }
-
- update0(files)
- // remove the current run in order to save some memory
- compiler.dropRun()
- }
-
- // Attempt to break the cycling reference deps as soon as possible and reduce
- // the number of compilations to minimum without having too coarse grained rules
- private def checkCycles(files: Set[AbstractFile], initial: Set[AbstractFile],
- collect: mutable.Map[AbstractFile, immutable.Set[AbstractFile]]):
- Set[AbstractFile] = {
- def followChain(set: Set[AbstractFile], rest: immutable.Set[AbstractFile]):
- immutable.Set[AbstractFile] = {
- val deps:Set[AbstractFile] = set.flatMap(
- s => collect.get(s) match {
- case Some(x) => x
- case _ => Set[AbstractFile]()
- })
- val newDeps = deps -- rest
- if (newDeps.isEmpty) rest else followChain(newDeps, rest ++ newDeps)
- }
- var res:Set[AbstractFile] = mutable.Set()
- files.foreach( f =>
- if (collect contains f) {
- val chain = followChain(Set(f), immutable.Set()) ++ files
- chain.foreach((fc: AbstractFile) => collect += fc -> chain)
- res ++= chain
- } else
- res += f
- )
-
- initial.foreach((f: AbstractFile) => collect += (f -> (collect.getOrElse(f, immutable.Set()) ++ res)))
- if (res.subsetOf(initial)) Set() else res
- }
-
- /** Return the set of source files that are invalidated by the given changes. */
- def invalidated(files: Set[AbstractFile], changesOf: scala.collection.Map[Symbol, List[Change]],
- processed: Set[AbstractFile] = Set.empty):
- Set[AbstractFile] = {
- val buf = new mutable.HashSet[AbstractFile]
- val newChangesOf = new mutable.HashMap[Symbol, List[Change]]
- var directDeps =
- compiler.dependencyAnalysis.dependencies.dependentFiles(1, files)
-
- def invalidate(file: AbstractFile, reason: String, change: Change) = {
- if (settings.Ybuildmanagerdebug.value)
- compiler.inform("invalidate " + file + " because " + reason + " [" + change + "]")
- buf += file
- directDeps -= file
- for (syms <- definitions(file)) // fixes #2557
- newChangesOf(syms.sym) = List(change, parentChangeSet(syms.sym))
- break
- }
-
- for ((oldSym, changes) <- changesOf; change <- changes) {
- def checkParents(cls: Symbol, file: AbstractFile) {
- val parentChange = cls.parentSymbols exists (_.fullName == oldSym.fullName)
- // if (settings.buildmanagerdebug.value)
- // compiler.inform("checkParents " + cls + " oldSym: " + oldSym + " parentChange: " + parentChange + " " + cls.info.parents)
- change match {
- case Changed(Class(_)) if parentChange =>
- invalidate(file, "parents have changed", change)
-
- case Changed(Definition(_)) if parentChange =>
- invalidate(file, "inherited method changed", change)
-
- case Added(Definition(_)) if parentChange =>
- invalidate(file, "inherited new method", change)
-
- case Removed(Definition(_)) if parentChange =>
- invalidate(file, "inherited method removed", change)
-
- case _ => ()
- }
- }
-
- def checkInterface(cls: Symbol, file: AbstractFile) {
- change match {
- case Added(Definition(name)) =>
- if (cls.info.decls.iterator.exists(_.fullName == name))
- invalidate(file, "of new method with existing name", change)
- case Changed(Class(name)) =>
- if (cls.info.typeSymbol.fullName == name)
- invalidate(file, "self type changed", change)
- case _ =>
- ()
- }
- }
-
- def checkReferences(file: AbstractFile) {
- //if (settings.buildmanagerdebug.value)
- // compiler.inform(file + ":" + references(file))
- val refs = references(file)
- if (refs.isEmpty)
- invalidate(file, "it is a direct dependency and we don't yet have finer-grained dependency information", change)
- else {
- change match {
- case Removed(Definition(name)) if refs(name) =>
- invalidate(file, "it references deleted definition", change)
- case Removed(Class(name)) if (refs(name)) =>
- invalidate(file, "it references deleted class", change)
- case Changed(Class(name)) if (refs(name)) =>
- invalidate(file, "it references changed class", change)
- case Changed(Definition(name)) if (refs(name)) =>
- invalidate(file, "it references changed definition", change)
- case Added(Definition(name)) if (refs(name)) =>
- invalidate(file, "it references added definition", change)
- case _ => ()
- }
- }
- }
-
- def checkInheritedReferences(file: AbstractFile) {
- val refs = inherited(file)
- if (!refs.isEmpty)
- change match {
- case ParentChanged(Class(name)) =>
- for (Inherited(q, member) <- refs.find(p => (p != null && p.qualifier == name));
- classFile <- classes.get(q);
- defs <- definitions.get(classFile);
- s <- defs.find(p => p.sym.fullName == q)
- if ((s.sym).tpe.nonPrivateMember(member) == compiler.NoSymbol))
- invalidate(file, "it references invalid (no longer inherited) definition", change)
- ()
- case _ => ()
- }
- }
-
- for (file <- directDeps) {
- breakable {
- for (cls <- definitions(file)) checkParents(cls.sym, file)
- for (cls <- definitions(file)) checkInterface(cls.sym, file)
- checkReferences(file)
- checkInheritedReferences(file)
- }
- }
- }
- if (buf.isEmpty)
- processed
- else
- invalidated(buf.clone() --= processed, newChangesOf, processed ++ buf)
- }
-
- /** Update the map of definitions per source file */
- private def updateDefinitions(files: Set[AbstractFile]) {
- for (src <- files; localDefs = compiler.dependencyAnalysis.definitions(src)) {
- definitions(src) = (localDefs map (s => {
- this.classes += s.fullName -> src
- SymWithHistory(s.cloneSymbol, beforeErasure(s.info.cloneInfo(s)))
- }))
- }
- this.references = compiler.dependencyAnalysis.references
- this.inherited = compiler.dependencyAnalysis.inherited
- }
-
- /** Load saved dependency information. */
- def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean = {
- val success = compiler.dependencyAnalysis.loadFrom(file, toFile)
- if (success)
- sources ++= compiler.dependencyAnalysis.managedFiles
- success
- }
-
- /** Save dependency information to `file`. */
- def saveTo(file: AbstractFile, fromFile: AbstractFile => String) {
- compiler.dependencyAnalysis.dependenciesFile = file
- compiler.dependencyAnalysis.saveDependencies(fromFile)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/Response.scala b/src/compiler/scala/tools/nsc/interactive/Response.scala
deleted file mode 100644
index f36f769ec9..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/Response.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-/** Typical interaction, given a predicate <user-input>, a function <display>,
- * and an exception handler <handle>:
- *
- * val TIMEOUT = 100 // (milliseconds) or something like that
- * val r = new Response()
- * while (!r.isComplete && !r.isCancelled) {
- * if (<user-input>) r.cancel()
- * else r.get(TIMEOUT) match {
- * case Some(Left(data)) => <display>(data)
- * case Some(Right(exc)) => <handle>(exc)
- * case None =>
- * }
- * }
- */
-class Response[T] {
-
- private var data: Option[Either[T, Throwable]] = None
- private var complete = false
- private var cancelled = false
-
- /** Set provisional data, more to come
- */
- def setProvisionally(x: T) = synchronized {
- data = Some(Left(x))
- }
-
- /** Set final data, and mark response as complete.
- */
- def set(x: T) = synchronized {
- data = Some(Left(x))
- complete = true
- notifyAll()
- }
-
- /** Store raised exception in data, and mark response as complete.
- */
- def raise(exc: Throwable) = synchronized {
- data = Some(Right(exc))
- complete = true
- notifyAll()
- }
-
- /** Get final data, wait as long as necessary.
- * When interrupted will return with Right(InterruptedException)
- */
- def get: Either[T, Throwable] = synchronized {
- while (!complete) {
- try {
- wait()
- } catch {
- case exc: InterruptedException => raise(exc)
- }
- }
- data.get
- }
-
- /** Optionally get data within `timeout` milliseconds.
- * When interrupted will return with Some(Right(InterruptedException))
- * When timeout ends, will return last stored provisional result,
- * or else None if no provisional result was stored.
- */
- def get(timeout: Long): Option[Either[T, Throwable]] = synchronized {
- val start = System.currentTimeMillis
- var current = start
- while (!complete && start + timeout > current) {
- try {
- wait(timeout - (current - start))
- } catch {
- case exc: InterruptedException => raise(exc)
- }
- current = System.currentTimeMillis
- }
- data
- }
-
- /** Final data set was stored
- */
- def isComplete = synchronized { complete }
-
- /** Cancel action computing this response (Only the
- * party that calls get on a response may cancel).
- */
- def cancel() = synchronized { cancelled = true }
-
- /** A cancel request for this response has been issued
- */
- def isCancelled = synchronized { cancelled }
-
- def clear() = synchronized {
- data = None
- complete = false
- cancelled = false
- }
-}
-
-
-
-
diff --git a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
deleted file mode 100644
index b83c2cd095..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import scala.reflect.internal.util.{SourceFile, Position, NoPosition}
-import scala.collection.mutable.ArrayBuffer
-
-trait RichCompilationUnits { self: Global =>
-
- /** The status value of a unit that has not yet been loaded */
- final val NotLoaded = -2
-
- /** The status value of a unit that has not yet been typechecked */
- final val JustParsed = -1
-
- /** The status value of a unit that has been partially typechecked */
- final val PartiallyChecked = 0
-
- class RichCompilationUnit(source: SourceFile) extends CompilationUnit(source) {
-
- /** The runid of the latest compiler run that typechecked this unit,
- * or else @see NotLoaded, JustParsed
- */
- var status: Int = NotLoaded
-
- /** Unit has been parsed */
- def isParsed: Boolean = status >= JustParsed
-
- /** Unit has been typechecked, but maybe not in latest runs */
- def isTypeChecked: Boolean = status > JustParsed
-
- /** Unit has been typechecked and is up to date */
- def isUpToDate: Boolean = status >= minRunId
-
- /** the current edit point offset */
- var editPoint: Int = -1
-
- /** The problems reported for this unit */
- val problems = new ArrayBuffer[Problem]
-
- /** The position of a targeted type check
- * If this is different from NoPosition, the type checking
- * will stop once a tree that contains this position range
- * is fully attributed.
- */
- var _targetPos: Position = NoPosition
- override def targetPos: Position = _targetPos
- def targetPos_=(p: Position) { _targetPos = p }
-
- var contexts: Contexts = new Contexts
-
- /** The last fully type-checked body of this unit */
- var lastBody: Tree = EmptyTree
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala b/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
deleted file mode 100644
index 7f0265bf4f..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
+++ /dev/null
@@ -1,200 +0,0 @@
-package scala.tools.nsc
-package interactive
-
-import scala.reflect.internal.util.{SourceFile, BatchSourceFile, RangePosition}
-import scala.collection.mutable.ArrayBuffer
-import scala.reflect.internal.Chars.{isLineBreakChar, isWhitespace}
-import ast.parser.Tokens._
-
-@deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
-trait ScratchPadMaker { self: Global =>
-
- import definitions._
-
- private case class Patch(offset: Int, text: String)
-
- private class Patcher(contents: Array[Char], lex: LexicalStructure, endOffset: Int) extends Traverser {
- var objectName: String = ""
-
- private val patches = new ArrayBuffer[Patch]
- private val toPrint = new ArrayBuffer[String]
- private var skipped = 0
- private var resNum: Int = -1
-
- private def nextRes(): String = {
- resNum += 1
- "res$"+resNum
- }
-
- private def nameType(name: String, tpe: Type): String = {
- // if name ends in symbol character, add a space to separate it from the following ':'
- val pad = if (Character.isLetter(name.last) || Character.isDigit(name.last)) "" else " "
- name+pad+": "+tpe
- }
-
- private def nameType(sym: Symbol): String = nameType(sym.name.decoded, sym.tpe)
-
- private def literal(str: String) = "\"\"\""+str+"\"\"\""
-
- private val prologue = ";import scala.runtime.WorksheetSupport._; def main(args: Array[String])=$execute{"
-
- private val epilogue = "}"
-
- private def applyPendingPatches(offset: Int) = {
- if (skipped == 0) patches += Patch(offset, prologue)
- for (msg <- toPrint) patches += Patch(offset, ";System.out.println("+msg+")")
- toPrint.clear()
- }
-
- /** The position where to insert an instrumentation statement in front of giuven statement.
- * This is at the latest `stat.pos.start`. But in order not to mess with column numbers
- * in position we try to insert it at the end of the previous token instead.
- * Furthermore, `(' tokens have to be skipped because they do not show up
- * in statement range positions.
- */
- private def instrumentPos(start: Int): Int = {
- val (prevToken, prevStart, prevEnd) = lex.locate(start - 1)
- if (prevStart >= start) start
- else if (prevToken == LPAREN) instrumentPos(prevStart)
- else prevEnd
- }
-
- private def addSkip(stat: Tree): Unit = {
- val ipos = instrumentPos(stat.pos.start)
- if (stat.pos.start > skipped) applyPendingPatches(ipos)
- if (stat.pos.start >= endOffset)
- patches += Patch(ipos, ";$stop()")
- var end = stat.pos.end
- if (end > skipped) {
- while (end < contents.length && !isLineBreakChar(contents(end))) end += 1
- patches += Patch(ipos, ";$skip("+(end-skipped)+"); ")
- skipped = end
- }
- }
-
- private def addSandbox(expr: Tree) = {}
-// patches += (Patch(expr.pos.start, "sandbox("), Patch(expr.pos.end, ")"))
-
- private def resultString(prefix: String, expr: String) =
- literal(prefix + " = ") + " + $show(" + expr + ")"
-
- private def traverseStat(stat: Tree) =
- if (stat.pos.isInstanceOf[RangePosition]) {
- stat match {
- case ValDef(_, _, _, rhs) =>
- addSkip(stat)
- if (stat.symbol.isLazy)
- toPrint += literal(nameType(stat.symbol) + " = <lazy>")
- else if (!stat.symbol.isSynthetic) {
- addSandbox(rhs)
- toPrint += resultString(nameType(stat.symbol), stat.symbol.name.toString)
- }
- case DefDef(_, _, _, _, _, _) =>
- addSkip(stat)
- toPrint += literal(nameType(stat.symbol))
- case Annotated(_, arg) =>
- traverse(arg)
- case DocDef(_, defn) =>
- traverse(defn)
- case _ =>
- if (stat.isTerm) {
- addSkip(stat)
- if (stat.tpe.typeSymbol == UnitClass) {
- addSandbox(stat)
- } else {
- val resName = nextRes()
- val dispResName = resName filter ('$' != _)
- val offset = instrumentPos(stat.pos.start)
- patches += Patch(offset, "val " + resName + " = ")
- addSandbox(stat)
- toPrint += resultString(nameType(dispResName, stat.tpe), resName)
- }
- }
- }
- }
-
- override def traverse(tree: Tree): Unit = tree match {
- case PackageDef(_, _) =>
- super.traverse(tree)
- case ModuleDef(_, name, Template(_, _, body)) =>
- val topLevel = objectName.isEmpty
- if (topLevel) {
- objectName = tree.symbol.fullName
- body foreach traverseStat
- if (skipped != 0) { // don't issue prologue and epilogue if there are no instrumented statements
- applyPendingPatches(skipped)
- patches += Patch(skipped, epilogue)
- }
- }
- case _ =>
- }
-
- /** The patched text.
- * @require traverse is run first
- */
- def result: Array[Char] = {
- val reslen = contents.length + (patches map (_.text.length)).sum
- val res = Array.ofDim[Char](reslen)
- var lastOffset = 0
- var from = 0
- var to = 0
- for (Patch(offset, text) <- patches) {
- val delta = offset - lastOffset
- assert(delta >= 0)
- Array.copy(contents, from, res, to, delta)
- from += delta
- to += delta
- lastOffset = offset
- text.copyToArray(res, to)
- to += text.length
- }
- assert(contents.length - from == reslen - to)
- Array.copy(contents, from, res, to, contents.length - from)
- res
- }
- }
-
- class LexicalStructure(source: SourceFile) {
- val token = new ArrayBuffer[Int]
- val startOffset = new ArrayBuffer[Int]
- val endOffset = new ArrayBuffer[Int]
- private val scanner = new syntaxAnalyzer.UnitScanner(new CompilationUnit(source))
- scanner.init()
- while (scanner.token != EOF) {
- startOffset += scanner.offset
- token += scanner.token
- scanner.nextToken
- endOffset += scanner.lastOffset
- }
-
- /** @return token that starts before or at offset, its startOffset, its endOffset
- */
- def locate(offset: Int): (Int, Int, Int) = {
- var lo = 0
- var hi = token.length - 1
- while (lo < hi) {
- val mid = (lo + hi + 1) / 2
- if (startOffset(mid) <= offset) lo = mid
- else hi = mid - 1
- }
- (token(lo), startOffset(lo), endOffset(lo))
- }
- }
-
- /** Compute an instrumented version of a sourcefile.
- * @param source The given sourcefile.
- * @param line The line up to which results should be printed, -1 = whole document.
- * @return A pair consisting of
- * - the fully qualified name of the first top-level object definition in the file.
- * or "" if there are no object definitions.
- * - the text of the instrumented program which, when run,
- * prints its output and all defined values in a comment column.
- */
- protected def instrument(source: SourceFile, line: Int): (String, Array[Char]) = {
- val tree = typedTree(source, true)
- val endOffset = if (line < 0) source.length else source.lineToOffset(line + 1)
- val patcher = new Patcher(source.content, new LexicalStructure(source), endOffset)
- patcher.traverse(tree)
- (patcher.objectName, patcher.result)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
deleted file mode 100644
index 465dcaaf1c..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
+++ /dev/null
@@ -1,103 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import scala.collection._
-
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import dependencies._
-
-import scala.reflect.internal.util.FakePos
-import io.AbstractFile
-
-/** A simple build manager, using the default scalac dependency tracker.
- * The transitive closure of all dependent files on a modified file
- * is recompiled at once.
- *
- * It is equivalent to using a resident compiler mode with the
- * '-make:transitive' option.
- */
-class SimpleBuildManager(val settings: Settings) extends BuildManager {
-
- class BuilderGlobal(settings: Settings, reporter : Reporter) extends scala.tools.nsc.Global(settings, reporter) {
-
- def this(settings: Settings) =
- this(settings, new ConsoleReporter(settings))
-
- def newRun() = new Run()
- }
-
- protected def newCompiler(settings: Settings) = new BuilderGlobal(settings)
-
- val compiler = newCompiler(settings)
-
- /** Managed source files. */
- private val sources: mutable.Set[AbstractFile] = new mutable.HashSet[AbstractFile]
-
- /** Add the given source files to the managed build process. */
- def addSourceFiles(files: Set[AbstractFile]) {
- sources ++= files
- update(files)
- }
-
- /** Remove the given files from the managed build process. */
- def removeFiles(files: Set[AbstractFile]) {
- sources --= files
- deleteClassfiles(files)
- update(invalidatedByRemove(files))
- }
-
-
- /** Return the set of invalidated files caused by removing the given files. */
- private def invalidatedByRemove(files: Set[AbstractFile]): Set[AbstractFile] = {
- val deps = compiler.dependencyAnalysis.dependencies
- deps.dependentFiles(Int.MaxValue, files)
- }
-
- def update(added: Set[AbstractFile], removed: Set[AbstractFile]) {
- sources --= removed
- deleteClassfiles(removed)
- update(added ++ invalidatedByRemove(removed))
- }
-
- /** The given files have been modified by the user. Recompile
- * them and all files that depend on them. Only files that
- * have been previously added as source files are recompiled.
- */
- def update(files: Set[AbstractFile]) {
- deleteClassfiles(files)
-
- val deps = compiler.dependencyAnalysis.dependencies
- val run = compiler.newRun()
- compiler.inform("compiling " + files)
-
- val toCompile =
- (files ++ deps.dependentFiles(Int.MaxValue, files)) intersect sources
-
-
- compiler.inform("Recompiling " +
- (if(settings.debug.value) toCompile.mkString(", ")
- else toCompile.size + " files"))
-
- buildingFiles(toCompile)
-
- run.compileFiles(files.toList)
- }
-
- /** Load saved dependency information. */
- def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean = {
- val success = compiler.dependencyAnalysis.loadFrom(file, toFile)
- if (success)
- sources ++= compiler.dependencyAnalysis.managedFiles
- success
- }
-
- /** Save dependency information to `file`. */
- def saveTo(file: AbstractFile, fromFile: AbstractFile => String) {
- compiler.dependencyAnalysis.dependenciesFile = file
- compiler.dependencyAnalysis.saveDependencies(fromFile)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
deleted file mode 100644
index 1c722ea3a0..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
+++ /dev/null
@@ -1,129 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-package tests
-
-import core._
-
-import java.io.File.pathSeparatorChar
-import java.io.File.separatorChar
-
-import scala.annotation.migration
-import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.SourceFile
-
-import scala.collection.mutable.ListBuffer
-
-/** A base class for writing interactive compiler tests.
- *
- * This class tries to cover common functionality needed when testing the presentation
- * compiler: instantiation source files, reloading, creating positions, instantiating
- * the presentation compiler, random stress testing.
- *
- * By default, this class loads all scala and java classes found under `src/`, going
- * recursively into subfolders. Loaded classes are found in `sourceFiles`. trait `TestResources`
- * The presentation compiler is available through `compiler`.
- *
- * It is easy to test member completion, type and hyperlinking at a given position. Source
- * files are searched for `TextMarkers`. By default, the completion marker is `/*!*/`, the
- * typedAt marker is `/*?*/` and the hyperlinking marker is `/*#*/`. Place these markers in
- * your source files, and the test framework will automatically pick them up and test the
- * corresponding actions. Sources are reloaded by `askReload(sourceFiles)` (blocking
- * call). All ask operations are placed on the work queue without waiting for each one to
- * complete before asking the next. After all asks, it waits for each response in turn and
- * prints the result. The default timeout is 1 second per operation.
- *
- * To define a custom operation you have to:
- *
- * (1) Define a new marker by extending `TestMarker`
- * (2) Provide an implementation for the operation you want to check by extending `PresentationCompilerTestDef`
- * (3) Add the class defined in (1) to the set of executed test actions by calling `++` on `InteractiveTest`.
- *
- * Then you can simply use the new defined `marker` in your test sources and the testing
- * framework will automatically pick it up.
- *
- * @see Check existing tests under test/files/presentation
- *
- * @author Iulian Dragos
- * @author Mirco Dotta
- */
-abstract class InteractiveTest
- extends AskParse
- with AskShutdown
- with AskReload
- with AskLoadedTyped
- with PresentationCompilerInstance
- with CoreTestDefs
- with InteractiveTestSettings { self =>
-
- protected val runRandomTests = false
-
- /** Should askAllSources wait for each ask to finish before issuing the next? */
- override protected val synchronousRequests = true
-
- /** The core set of test actions that are executed during each test run are
- * `CompletionAction`, `TypeAction` and `HyperlinkAction`.
- * Override this member if you need to change the default set of executed test actions.
- */
- protected lazy val testActions: ListBuffer[PresentationCompilerTestDef] = {
- ListBuffer(new TypeCompletionAction(compiler), new ScopeCompletionAction(compiler), new TypeAction(compiler), new HyperlinkAction(compiler))
- }
-
- /** Add new presentation compiler actions to test. Presentation compiler's test
- * need to extends trait `PresentationCompilerTestDef`.
- */
- protected def ++(tests: PresentationCompilerTestDef*) {
- testActions ++= tests
- }
-
- /** Test's entry point */
- def main(args: Array[String]) {
- try execute()
- finally shutdown()
- }
-
- protected def execute(): Unit = {
- loadSources()
- runDefaultTests()
- }
-
- /** Load all sources before executing the test. */
- protected def loadSources() {
- // ask the presentation compiler to track all sources. We do
- // not wait for the file to be entirely typed because we do want
- // to exercise the presentation compiler on scoped type requests.
- askReload(sourceFiles)
- // make sure all sources are parsed before running the test. This
- // is because test may depend on the sources having been parsed at
- // least once
- askParse(sourceFiles)
- }
-
- /** Run all defined `PresentationCompilerTestDef` */
- protected def runDefaultTests() {
- //TODO: integrate random tests!, i.e.: if (runRandomTests) randomTests(20, sourceFiles)
- testActions.foreach(_.runTest())
- }
-
- /** Perform n random tests with random changes. */
- private def randomTests(n: Int, files: Array[SourceFile]) {
- val tester = new Tester(n, files, settings) {
- override val compiler = self.compiler
- override val reporter = new reporters.StoreReporter
- }
- tester.run()
- }
-
- /** shutdown the presentation compiler. */
- protected def shutdown() {
- askShutdown()
-
- // this is actually needed to force exit on test completion.
- // Note: May be a bug on either the testing framework or (less likely)
- // the presentation compiler
- sys.exit(0)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
deleted file mode 100644
index 4d85ab9d88..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-package scala.tools.nsc
-package interactive
-package tests
-
-import java.io.File.pathSeparatorChar
-import java.io.File.separatorChar
-import scala.tools.nsc.interactive.tests.core.PresentationCompilerInstance
-import scala.tools.nsc.io.{File,Path}
-import core.Reporter
-import core.TestSettings
-
-trait InteractiveTestSettings extends TestSettings with PresentationCompilerInstance {
- /** Character delimiter for comments in .opts file */
- private final val CommentStartDelimiter = "#"
-
- private final val TestOptionsFileExtension = "flags"
-
- /** Prepare the settings object. Load the .opts file and adjust all paths from the
- * Unix-like syntax to the platform specific syntax. This is necessary so that a
- * single .opts file can be used on all platforms.
- *
- * @note Bootclasspath is treated specially. If there is a -bootclasspath option in
- * the file, the 'usejavacp' setting is set to false. This ensures that the
- * bootclasspath takes precedence over the scala-library used to run the current
- * test.
- */
- override protected def prepareSettings(settings: Settings) {
- import java.io.File._
- def adjustPaths(paths: settings.PathSetting*) {
- for (p <- paths if argsString.contains(p.name)) p.value = p.value.map {
- case '/' => separatorChar
- case ':' => pathSeparatorChar
- case c => c
- }
- }
-
- // need this so that the classpath comes from what partest
- // instead of scala.home
- settings.usejavacp.value = !argsString.contains("-bootclasspath")
-
- // pass any options coming from outside
- settings.processArgumentString(argsString) match {
- case (false, rest) =>
- println("error processing arguments (unprocessed: %s)".format(rest))
- case _ => ()
- }
-
- // Make the --sourcepath path provided in the .flags file (if any) relative to the test's base directory
- if(settings.sourcepath.isSetByUser)
- settings.sourcepath.value = (baseDir / Path(settings.sourcepath.value)).path
-
- adjustPaths(settings.bootclasspath, settings.classpath, settings.javabootclasspath, settings.sourcepath)
- }
-
- /** If there's a file ending in .opts, read it and parse it for cmd line arguments. */
- protected val argsString = {
- val optsFile = outDir / "%s.%s".format(System.getProperty("partest.testname"), TestOptionsFileExtension)
- val str = try File(optsFile).slurp() catch {
- case e: java.io.IOException => ""
- }
- str.lines.filter(!_.startsWith(CommentStartDelimiter)).mkString(" ")
- }
-
- override protected def printClassPath(implicit reporter: Reporter) {
- reporter.println("\toutDir: %s".format(outDir.path))
- reporter.println("\tbaseDir: %s".format(baseDir.path))
- reporter.println("\targsString: %s".format(argsString))
- super.printClassPath(reporter)
- }
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala b/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
deleted file mode 100644
index 26aabbd3e6..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
+++ /dev/null
@@ -1,208 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-package tests
-
-import scala.reflect.internal.util._
-import reporters._
-import io.AbstractFile
-import scala.collection.mutable.ArrayBuffer
-
-class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
-
- val reporter = new StoreReporter
- val compiler = new Global(settings, reporter)
-
- def askAndListen[T, U](msg: String, arg: T, op: (T, Response[U]) => Unit) {
- if (settings.verbose.value) print(msg+" "+arg+": ")
- val TIMEOUT = 10 // ms
- val limit = System.currentTimeMillis() + randomDelayMillis
- val res = new Response[U]
- op(arg, res)
- while (!res.isComplete && !res.isCancelled) {
- if (System.currentTimeMillis() > limit) {
- print("c"); res.cancel()
- } else res.get(TIMEOUT) match {
- case Some(Left(t)) =>
- /**/
- if (settings.verbose.value) println(t)
- case Some(Right(ex)) =>
- ex.printStackTrace()
- println(ex)
- case None =>
- }
- }
- }
-
- def askReload(sfs: SourceFile*) = askAndListen("reload", sfs.toList, compiler.askReload)
- def askTypeAt(pos: Position) = askAndListen("type at", pos, compiler.askTypeAt)
- def askTypeCompletion(pos: Position) = askAndListen("type at", pos, compiler.askTypeCompletion)
- def askScopeCompletion(pos: Position) = askAndListen("type at", pos, compiler.askScopeCompletion)
-
- val rand = new java.util.Random()
-
- private def randomInverse(n: Int) = n / (rand.nextInt(n) + 1)
-
- private def randomDecreasing(n: Int) = {
- var r = rand.nextInt((1 to n).sum)
- var limit = n
- var result = 0
- while (r > limit) {
- result += 1
- r -= limit
- limit -= 1
- }
- result
- }
-
- def randomSourceFileIdx() = rand.nextInt(inputs.length)
-
- def randomBatchesPerSourceFile(): Int = randomDecreasing(100)
-
- def randomChangesPerBatch(): Int = randomInverse(50)
-
- def randomPositionIn(sf: SourceFile) = rand.nextInt(sf.content.length)
-
- def randomNumChars() = randomInverse(100)
-
- def randomDelayMillis = randomInverse(10000)
-
- class Change(sfidx: Int, start: Int, nchars: Int, toLeft: Boolean) {
-
- private var pos = start
- private var deleted: List[Char] = List()
-
- override def toString =
- "In "+inputs(sfidx)+" at "+start+" take "+nchars+" to "+
- (if (toLeft) "left" else "right")
-
- def deleteOne() {
- val sf = inputs(sfidx)
- deleted = sf.content(pos) :: deleted
- val sf1 = new BatchSourceFile(sf.file, sf.content.take(pos) ++ sf.content.drop(pos + 1))
- inputs(sfidx) = sf1
- askReload(sf1)
- }
-
- def deleteAll() {
- print("/"+nchars)
- for (i <- 0 until nchars) {
- if (toLeft) {
- if (pos > 0 && pos <= inputs(sfidx).length) {
- pos -= 1
- deleteOne()
- }
- } else {
- if (pos < inputs(sfidx).length) {
- deleteOne()
- }
- }
- }
- }
-
- def insertAll() {
- for (chr <- if (toLeft) deleted else deleted.reverse) {
- val sf = inputs(sfidx)
- val (pre, post) = sf./**/content splitAt pos
- pos += 1
- val sf1 = new BatchSourceFile(sf.file, pre ++ (chr +: post))
- inputs(sfidx) = sf1
- askReload(sf1)
- }
- }
- }
-
- val testComment = "/**/"
-
- def testFileChanges(sfidx: Int) = {
- lazy val testPositions: Seq[Int] = {
- val sf = inputs(sfidx)
- val buf = new ArrayBuffer[Int]
- var pos = sf.content.indexOfSlice(testComment)
- while (pos > 0) {
- buf += pos
- pos = sf.content.indexOfSlice(testComment, pos + 1)
- }
- buf
- }
- def otherTest() {
- if (testPositions.nonEmpty) {
- val pos = new OffsetPosition(inputs(sfidx), rand.nextInt(testPositions.length))
- rand.nextInt(3) match {
- case 0 => askTypeAt(pos)
- case 1 => askTypeCompletion(pos)
- case 2 => askScopeCompletion(pos)
- }
- }
- }
- for (i <- 0 until randomBatchesPerSourceFile()) {
- val changes = Vector.fill(/**/randomChangesPerBatch()) {
- /**/
- new Change(sfidx, randomPositionIn(inputs(sfidx)), randomNumChars(), rand.nextBoolean())
- }
- doTest(sfidx, changes, testPositions, otherTest) match {
- case Some(errortrace) =>
- println(errortrace)
- minimize(errortrace)
- case None =>
- }
- }
- }
-
- def doTest(sfidx: Int, changes: Seq[Change], testPositions: Seq[Int], otherTest: () => Unit): Option[ErrorTrace] = {
- print("new round with "+changes.length+" changes:")
- changes foreach (_.deleteAll())
- otherTest()
- def errorCount() = compiler.ask(() => reporter.ERROR.count)
-// println("\nhalf test round: "+errorCount())
- changes.view.reverse foreach (_.insertAll())
- otherTest()
- println("done test round: "+errorCount())
- if (errorCount() != 0)
- Some(ErrorTrace(sfidx, changes, reporter.infos, inputs(sfidx).content))
- else
- None
- }
-
- case class ErrorTrace(
- sfidx: Int, changes: Seq[Change], infos: scala.collection.Set[reporter.Info], content: Array[Char]) {
- override def toString =
- "Sourcefile: "+inputs(sfidx)+
- "\nChanges:\n "+changes.mkString("\n ")+
- "\nErrors:\n "+infos.mkString("\n ")+
- "\nContents:\n"+content.mkString
- }
-
- def minimize(etrace: ErrorTrace) {}
-
- /**/
- def run() {
- askReload(inputs: _*)
- for (i <- 0 until ntests)
- testFileChanges(randomSourceFileIdx())
- }
-}
-
-/* A program to do presentation compiler stress tests.
- * Usage:
- *
- * scala scala.tools.nsc.interactive.test.Tester <n> <files>
- *
- * where <n> is the number os tests to be run and <files> is the set of files to test.
- * This will do random deletions and re-insertions in any of the files.
- * At places where an empty comment /**/ appears it will in addition randomly
- * do ask-types, type-completions, or scope-completions.
- */
-object Tester {
- def main(args: Array[String]) {
- val settings = new Settings()
- val (_, filenames) = settings.processArguments(args.toList.tail, true)
- println("filenames = "+filenames)
- val files = filenames.toArray map (str => new BatchSourceFile(AbstractFile.getFile(str)): SourceFile)
- new Tester(args(0).toInt, files, settings).run()
- sys.exit(0)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala
deleted file mode 100644
index d5da52bc13..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala
+++ /dev/null
@@ -1,122 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-package tests.core
-
-import scala.tools.nsc.interactive.Response
-import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.SourceFile
-
-/**
- * A trait for defining commands that can be queried to the
- * presentation compiler.
- * */
-trait AskCommand {
-
- /** presentation compiler's instance. */
- protected val compiler: Global
-
- /**
- * Presentation compiler's `askXXX` actions work by doing side-effects
- * on a `Response` instance passed as an argument during the `askXXX`
- * call.
- * The defined method `ask` is meant to encapsulate this behavior.
- * */
- protected def ask[T](op: Response[T] => Unit): Response[T] = {
- val r = new Response[T]
- op(r)
- r
- }
-}
-
-/** Ask the presentation compiler to shut-down. */
-trait AskShutdown extends AskCommand {
- def askShutdown() = compiler.askShutdown()
-}
-
-/** Ask the presentation compiler to parse a sequence of `sources` */
-trait AskParse extends AskCommand {
- import compiler.Tree
-
- /** `sources` need to be entirely parsed before running the test
- * (else commands such as `AskTypeCompletionAt` may fail simply because
- * the source's AST is not yet loaded).
- */
- def askParse(sources: Seq[SourceFile]) {
- val responses = sources map (askParse(_))
- responses.foreach(_.get) // force source files parsing
- }
-
- private def askParse(src: SourceFile, keepLoaded: Boolean = true): Response[Tree] = {
- ask {
- compiler.askParsedEntered(src, keepLoaded, _)
- }
- }
-}
-
-/** Ask the presentation compiler to reload a sequence of `sources` */
-trait AskReload extends AskCommand {
-
- /** Reload the given source files and wait for them to be reloaded. */
- protected def askReload(sources: Seq[SourceFile])(implicit reporter: Reporter): Response[Unit] = {
- val sortedSources = (sources map (_.file.name)).sorted
- reporter.println("reload: " + sortedSources.mkString(", "))
-
- ask {
- compiler.askReload(sources.toList, _)
- }
- }
-}
-
-/** Ask the presentation compiler for completion at a given position. */
-trait AskTypeCompletionAt extends AskCommand {
- import compiler.Member
-
- private[tests] def askTypeCompletionAt(pos: Position)(implicit reporter: Reporter): Response[List[Member]] = {
- reporter.println("\naskTypeCompletion at " + pos.source.file.name + ((pos.line, pos.column)))
-
- ask {
- compiler.askTypeCompletion(pos, _)
- }
- }
-}
-
-/** Ask the presentation compiler for scope completion at a given position. */
-trait AskScopeCompletionAt extends AskCommand {
- import compiler.Member
-
- private[tests] def askScopeCompletionAt(pos: Position)(implicit reporter: Reporter): Response[List[Member]] = {
- reporter.println("\naskScopeCompletion at " + pos.source.file.name + ((pos.line, pos.column)))
-
- ask {
- compiler.askScopeCompletion(pos, _)
- }
- }
-}
-
-/** Ask the presentation compiler for type info at a given position. */
-trait AskTypeAt extends AskCommand {
- import compiler.Tree
-
- private[tests] def askTypeAt(pos: Position)(implicit reporter: Reporter): Response[Tree] = {
- reporter.println("\naskType at " + pos.source.file.name + ((pos.line, pos.column)))
-
- ask {
- compiler.askTypeAt(pos, _)
- }
- }
-}
-
-trait AskLoadedTyped extends AskCommand {
- import compiler.Tree
-
- protected def askLoadedTyped(source: SourceFile, keepLoaded: Boolean = false)(implicit reporter: Reporter): Response[Tree] = {
- ask {
- compiler.askLoadedTyped(source, keepLoaded, _)
- }
- }
-
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
deleted file mode 100644
index 214f7a4553..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
+++ /dev/null
@@ -1,133 +0,0 @@
-package scala.tools.nsc
-package interactive
-package tests.core
-
-import scala.reflect.internal.util.Position
-import scala.tools.nsc.interactive.tests.core._
-
-/** Set of core test definitions that are executed for each test run. */
-private[tests] trait CoreTestDefs
- extends PresentationCompilerRequestsWorkingMode {
-
- import scala.tools.nsc.interactive.Global
-
- /** Ask the presentation compiler for completion at all locations
- * (in all sources) where the defined `marker` is found. */
- class TypeCompletionAction(override val compiler: Global)
- extends PresentationCompilerTestDef
- with AskTypeCompletionAt {
-
- def memberPrinter(member: compiler.Member): String =
- "[accessible: %5s] ".format(member.accessible) + "`" + (member.sym.toString() + member.tpe.toString()).trim() + "`"
-
- override def runTest() {
- askAllSources(TypeCompletionMarker) { pos =>
- askTypeCompletionAt(pos)
- } { (pos, members) =>
- withResponseDelimiter {
- reporter.println("[response] askTypeCompletion at " + format(pos))
- // we skip getClass because it changed signature between 1.5 and 1.6, so there is no
- // universal check file that we can provide for this to work
- reporter.println("retrieved %d members".format(members.size))
- compiler ask { () =>
- val filtered = members.filterNot(member => member.sym.name.toString == "getClass" || member.sym.isConstructor)
- reporter.println(filtered.map(memberPrinter).sortBy(_.toString()).mkString("\n"))
- }
- }
- }
- }
- }
-
- /** Ask the presentation compiler for completion at all locations
- * (in all sources) where the defined `marker` is found. */
- class ScopeCompletionAction(override val compiler: Global)
- extends PresentationCompilerTestDef
- with AskScopeCompletionAt {
-
- def memberPrinter(member: compiler.Member): String =
- "[accessible: %5s] ".format(member.accessible) + "`" + (member.sym.toString() + member.tpe.toString()).trim() + "`"
-
- override def runTest() {
- askAllSources(ScopeCompletionMarker) { pos =>
- askScopeCompletionAt(pos)
- } { (pos, members) =>
- withResponseDelimiter {
- reporter.println("[response] askScopeCompletion at " + format(pos))
- try {
- // exclude members not from source (don't have position), for more focussed and self contained tests.
- def eligible(sym: compiler.Symbol) = sym.pos != compiler.NoPosition
- val filtered = members.filter(member => eligible(member.sym))
- reporter.println("retrieved %d members".format(filtered.size))
- compiler ask { () =>
- reporter.println(filtered.map(memberPrinter).sortBy(_.toString()).mkString("\n"))
- }
- } catch {
- case t: Throwable =>
- t.printStackTrace()
- }
-
- }
- }
- }
- }
-
- /** Ask the presentation compiler for type info at all locations
- * (in all sources) where the defined `marker` is found. */
- class TypeAction(override val compiler: Global)
- extends PresentationCompilerTestDef
- with AskTypeAt {
-
- override def runTest() {
- askAllSources(TypeMarker) { pos =>
- askTypeAt(pos)
- } { (pos, tree) =>
- withResponseDelimiter {
- reporter.println("[response] askTypeAt at " + format(pos))
- compiler.ask(() => reporter.println(tree))
- }
- }
- }
- }
-
- /** Ask the presentation compiler for hyperlink at all locations
- * (in all sources) where the defined `marker` is found. */
- class HyperlinkAction(override val compiler: Global)
- extends PresentationCompilerTestDef
- with AskTypeAt
- with AskTypeCompletionAt {
-
- override def runTest() {
- askAllSources(HyperlinkMarker) { pos =>
- askTypeAt(pos)(NullReporter)
- } { (pos, tree) =>
- if(tree.symbol == compiler.NoSymbol) {
- reporter.println("\nNo symbol is associated with tree: "+tree)
- }
- else {
- reporter.println("\naskHyperlinkPos for `" + tree.symbol.name + "` at " + format(pos) + " " + pos.source.file.name)
- val r = new Response[Position]
- // `tree.symbol.sourceFile` was discovered to be null when testing using virtpatmat on the akka presentation test, where a position had shifted to point to `Int`
- // askHyperlinkPos for `Int` at (73,19) pi.scala --> class Int in package scala has null sourceFile!
- val treePath = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.path else null
- val treeName = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.name else null
- val sourceFile = sourceFiles.find(_.path == treePath) match {
- case Some(source) =>
- compiler.askLinkPos(tree.symbol, source, r)
- r.get match {
- case Left(pos) =>
- val resolvedPos = if (tree.symbol.pos.isDefined) tree.symbol.pos else pos
- withResponseDelimiter {
- reporter.println("[response] found askHyperlinkPos for `" + tree.symbol.name + "` at " + format(resolvedPos) + " " + tree.symbol.sourceFile.name)
- }
- case Right(ex) =>
- ex.printStackTrace()
- }
- case None =>
- reporter.println("[error] could not locate sourcefile `" + treeName + "`." +
- "Hint: Does the looked up definition come form a binary?")
- }
- }
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
deleted file mode 100644
index f304eda753..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-package scala.tools.nsc
-package interactive
-package tests.core
-
-import reporters.{Reporter => CompilerReporter}
-import scala.reflect.internal.util.Position
-
-/** Trait encapsulating the creation of a presentation compiler's instance.*/
-private[tests] trait PresentationCompilerInstance extends TestSettings {
- protected val settings = new Settings
- protected val withDocComments = false
-
- protected val compilerReporter: CompilerReporter = new InteractiveReporter {
- override def compiler = PresentationCompilerInstance.this.compiler
- }
-
- protected lazy val compiler: Global = {
- prepareSettings(settings)
- new Global(settings, compilerReporter) {
- override def forScaladoc = withDocComments
- }
- }
-
- /**
- * Called before instantiating the presentation compiler's instance.
- * You should provide an implementation of this method if you need
- * to customize the `settings` used to instantiate the presentation compiler.
- * */
- protected def prepareSettings(settings: Settings) {}
-
- protected def printClassPath(implicit reporter: Reporter) {
- reporter.println("\tbootClassPath: %s".format(settings.bootclasspath.value))
- reporter.println("\tverbose: %b".format(settings.verbose.value))
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
deleted file mode 100644
index b5ae5f2d75..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-package scala.tools.nsc
-package interactive
-package tests.core
-
-import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.SourceFile
-
-trait PresentationCompilerRequestsWorkingMode extends TestResources {
-
- protected def synchronousRequests: Boolean
-
- protected def askAllSources[T] = if (synchronousRequests) askAllSourcesSync[T] _ else askAllSourcesAsync[T] _
-
- /** Perform an operation on all sources at all positions that match the given
- * `marker`. For instance, askAllSources(TypeMarker)(askTypeAt)(println) would
- * ask the type at all positions marked with `TypeMarker.marker` and println the result.
- */
- private def askAllSourcesAsync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit) {
- val positions = allPositionsOf(str = marker.marker)
- val responses = for (pos <- positions) yield askAt(pos)
-
- for ((pos, r) <- positions zip responses) withResponse(pos, r)(f)
- }
-
- /** Synchronous version of askAllSources. Each position is treated in turn, waiting for the
- * response before going to the next one.
- */
- private def askAllSourcesSync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit) {
- val positions = allPositionsOf(str = marker.marker)
- for (pos <- positions) withResponse(pos, askAt(pos))(f)
- }
-
- /** All positions of the given string in all source files. */
- private def allPositionsOf(srcs: Seq[SourceFile] = sourceFiles, str: String): Seq[Position] =
- for (s <- srcs; p <- positionsOf(s, str)) yield p
-
- /** Return all positions of the given str in the given source file. */
- private def positionsOf(source: SourceFile, str: String): Seq[Position] = {
- val buf = new scala.collection.mutable.ListBuffer[Position]
- var pos = source.content.indexOfSlice(str)
- while (pos >= 0) {
- buf += source.position(pos - 1) // we need the position before the first character of this marker
- pos = source.content.indexOfSlice(str, pos + 1)
- }
- buf.toList
- }
-
- private def withResponse[T](pos: Position, response: Response[T])(f: (Position, T) => Unit) {
- /** Return the filename:line:col version of this position. */
- def showPos(pos: Position): String =
- "%s:%d:%d".format(pos.source.file.name, pos.line, pos.column)
-
- response.get(TIMEOUT) match {
- case Some(Left(t)) =>
- f(pos, t)
- case None =>
- println("TIMEOUT: " + showPos(pos))
- case Some(r) =>
- println("ERROR: " + r)
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
deleted file mode 100644
index 9cf2aa4fe4..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-package scala.tools.nsc.interactive.tests.core
-
-import scala.tools.nsc.interactive.Global
-import scala.reflect.internal.util.Position
-
-trait PresentationCompilerTestDef {
-
- private[tests] def runTest(): Unit
-
- protected def withResponseDelimiter(block: => Unit)(implicit reporter: Reporter) {
- def printDelimiter() = reporter.println("=" * 80)
- printDelimiter()
- block
- printDelimiter()
- }
-
- protected def format(pos: Position): String =
- (if(pos.isDefined) "(%d,%d)".format(pos.line, pos.column) else "<no position>")
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala
deleted file mode 100644
index 631504cda5..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package scala.tools.nsc.interactive.tests.core
-
-private[tests] trait Reporter {
- def println(msg: Any): Unit
-}
-
-/** Reporter that simply prints all messages in the standard output.*/
-private[tests] object ConsoleReporter extends Reporter {
- def println(msg: Any) { Console.println(msg) }
-}
-
-/** Reporter that swallows all passed message. */
-private[tests] object NullReporter extends Reporter {
- def println(msg: Any) {}
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
deleted file mode 100644
index e80b741a8d..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package scala.tools.nsc.interactive.tests.core
-
-import scala.reflect.internal.util.{SourceFile,BatchSourceFile}
-import scala.tools.nsc.io.{AbstractFile,Path}
-
-private[tests] object SourcesCollector {
- import Path._
- type SourceFilter = Path => Boolean
-
- /**
- * All files below `base` directory that pass the `filter`.
- * With the default `filter` only .scala and .java files are collected.
- * */
- def apply(base: Path, filter: SourceFilter): Array[SourceFile] = {
- assert(base.isDirectory)
- base.walk.filter(filter).map(source).toList.toArray.sortBy(_.file.name)
- }
-
- private def source(file: Path): SourceFile = source(AbstractFile.getFile(file.toFile))
- private def source(filename: String): SourceFile = source(AbstractFile.getFile(filename))
- private def source(file: AbstractFile): SourceFile = new BatchSourceFile(file)
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala
deleted file mode 100644
index 8698ada4ad..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-package scala.tools.nsc.interactive.tests.core
-
-case class DuplicateTestMarker(msg: String) extends Exception(msg)
-
-object TestMarker {
- import scala.collection.mutable.Map
- private val markers: Map[String, TestMarker] = Map.empty
-
- private def checkForDuplicate(marker: TestMarker) {
- markers.get(marker.marker) match {
- case None => markers(marker.marker) = marker
- case Some(otherMarker) =>
- val msg = "Marker `%s` is already used by %s. Please choose a different marker for %s".format(marker.marker, marker, otherMarker)
- throw new DuplicateTestMarker(msg)
- }
- }
-}
-
-abstract case class TestMarker(val marker: String) {
- TestMarker.checkForDuplicate(this)
-}
-
-object TypeCompletionMarker extends TestMarker("/*!*/")
-
-object ScopeCompletionMarker extends TestMarker("/*_*/")
-
-object TypeMarker extends TestMarker("/*?*/")
-
-object HyperlinkMarker extends TestMarker("/*#*/")
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala
deleted file mode 100644
index 887c3cf29b..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-package scala.tools.nsc.interactive.tests.core
-
-import scala.tools.nsc.io.Path
-import scala.reflect.internal.util.SourceFile
-
-/** Resources used by the test. */
-private[tests] trait TestResources extends TestSettings {
- /** collected source files that are to be used by the test runner */
- protected lazy val sourceFiles: Array[SourceFile] = SourcesCollector(baseDir / sourceDir, isScalaOrJavaSource)
-
- private def isScalaOrJavaSource(file: Path): Boolean = file.extension == "scala" | file.extension == "java"
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala
deleted file mode 100644
index 681204172b..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-package scala.tools.nsc.interactive.tests.core
-
-import scala.tools.nsc.io.Path
-
-/** Common settings for the test. */
-private[tests] trait TestSettings {
- protected final val TIMEOUT = 10000 // timeout in milliseconds
-
- /** The root directory for this test suite, usually the test kind ("test/files/presentation"). */
- protected val outDir = Path(Option(System.getProperty("partest.cwd")).getOrElse("."))
-
- /** The base directory for this test, usually a subdirectory of "test/files/presentation/" */
- protected val baseDir = Option(System.getProperty("partest.testname")).map(outDir / _).getOrElse(Path("."))
-
- /** Where source files are placed. */
- protected val sourceDir = "src"
-
- protected implicit val reporter: Reporter = ConsoleReporter
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
deleted file mode 100644
index 59508fa951..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
+++ /dev/null
@@ -1,107 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.tools.nsc.io.{ File, AbstractFile }
-import util.ScalaClassLoader
-import java.net.{ URL, URLConnection, URLStreamHandler }
-import scala.collection.{ mutable, immutable }
-
-/**
- * A class loader that loads files from a {@link scala.tools.nsc.io.AbstractFile}.
- *
- * @author Lex Spoon
- */
-class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
- extends ClassLoader(parent)
- with ScalaClassLoader
-{
- protected def classNameToPath(name: String): String =
- if (name endsWith ".class") name
- else name.replace('.', '/') + ".class"
-
- protected def findAbstractFile(name: String): AbstractFile = {
- var file: AbstractFile = root
- val pathParts = classNameToPath(name) split '/'
-
- for (dirPart <- pathParts.init) {
- file = file.lookupName(dirPart, true)
- if (file == null)
- return null
- }
-
- file.lookupName(pathParts.last, false) match {
- case null => null
- case file => file
- }
- }
-
- protected def dirNameToPath(name: String): String =
- name.replace('.', '/')
-
- protected def findAbstractDir(name: String): AbstractFile = {
- var file: AbstractFile = root
- val pathParts = dirNameToPath(name) split '/'
-
- for (dirPart <- pathParts) {
- file = file.lookupName(dirPart, true)
- if (file == null)
- return null
- }
-
- return file
- }
-
- // parent delegation in JCL uses getResource; so either add parent.getResAsStream
- // or implement findResource, which we do here as a study in scarlet (my complexion
- // after looking at CLs and URLs)
- override def findResource(name: String): URL = findAbstractFile(name) match {
- case null => null
- case file => new URL(null, "repldir:" + file.path, new URLStreamHandler {
- override def openConnection(url: URL): URLConnection = new URLConnection(url) {
- override def connect() { }
- override def getInputStream = file.input
- }
- })
- }
- // this inverts delegation order: super.getResAsStr calls parent.getRes if we fail
- override def getResourceAsStream(name: String) = findAbstractFile(name) match {
- case null => super.getResourceAsStream(name)
- case file => file.input
- }
- // ScalaClassLoader.classBytes uses getResAsStream, so we'll try again before delegating
- override def classBytes(name: String): Array[Byte] = findAbstractFile(name) match {
- case null => super.classBytes(name)
- case file => file.toByteArray
- }
- override def findClass(name: String): JClass = {
- val bytes = classBytes(name)
- if (bytes.length == 0)
- throw new ClassNotFoundException(name)
- else
- defineClass(name, bytes, 0, bytes.length)
- }
-
- private val packages = mutable.Map[String, Package]()
-
- override def definePackage(name: String, specTitle: String, specVersion: String, specVendor: String, implTitle: String, implVersion: String, implVendor: String, sealBase: URL): Package = {
- throw new UnsupportedOperationException()
- }
-
- override def getPackage(name: String): Package = {
- findAbstractDir(name) match {
- case null => super.getPackage(name)
- case file => packages.getOrElseUpdate(name, {
- val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader])
- ctor.setAccessible(true)
- ctor.newInstance(name, null, null, null, null, null, null, null, this)
- })
- }
- }
-
- override def getPackages(): Array[Package] =
- root.iterator.filter(_.isDirectory).map(dir => getPackage(dir.name)).toArray
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
deleted file mode 100644
index e66e4eff29..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-class AbstractOrMissingHandler[T](onError: String => Unit, value: T) extends PartialFunction[Throwable, T] {
- def isDefinedAt(t: Throwable) = t match {
- case _: AbstractMethodError => true
- case _: NoSuchMethodError => true
- case _: MissingRequirementError => true
- case _: NoClassDefFoundError => true
- case _ => false
- }
- def apply(t: Throwable) = t match {
- case x @ (_: AbstractMethodError | _: NoSuchMethodError | _: NoClassDefFoundError) =>
- onError("""
- |Failed to initialize compiler: %s.
- |This is most often remedied by a full clean and recompile.
- |Otherwise, your classpath may continue bytecode compiled by
- |different and incompatible versions of scala.
- |""".stripMargin.format(x.getClass.getName split '.' last)
- )
- x.printStackTrace()
- value
- case x: MissingRequirementError =>
- onError("""
- |Failed to initialize compiler: %s not found.
- |** Note that as of 2.8 scala does not assume use of the java classpath.
- |** For the old behavior pass -usejavacp to scala, or if using a Settings
- |** object programatically, settings.usejavacp.value = true.""".stripMargin.format(x.req)
- )
- value
- }
-}
-
-object AbstractOrMissingHandler {
- def apply[T]() = new AbstractOrMissingHandler[T](Console println _, null.asInstanceOf[T])
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala
deleted file mode 100644
index 40e9d3d600..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import java.lang.reflect
-import java.util.concurrent.ConcurrentHashMap
-import util.ScalaClassLoader
-import ScalaClassLoader.appLoader
-import scala.reflect.NameTransformer._
-
-object ByteCode {
- /** Until I figure out why I can't get scalap onto the classpath such
- * that the compiler will bootstrap, we have to use reflection.
- */
- private lazy val DECODER: Option[AnyRef] =
- for (clazz <- appLoader.tryToLoadClass[AnyRef]("scala.tools.scalap.Decode$")) yield
- clazz.getField(MODULE_INSTANCE_NAME).get(null)
-
- private def decoderMethod(name: String, args: JClass*): Option[reflect.Method] = {
- for (decoder <- DECODER ; m <- Option(decoder.getClass.getMethod(name, args: _*))) yield m
- }
-
- private lazy val aliasMap = {
- for (module <- DECODER ; method <- decoderMethod("typeAliases", classOf[String])) yield
- method.invoke(module, _: String).asInstanceOf[Option[Map[String, String]]]
- }
-
- /** Scala sig bytes.
- */
- def scalaSigBytesForPath(path: String) =
- for {
- module <- DECODER
- method <- decoderMethod("scalaSigAnnotationBytes", classOf[String])
- names <- method.invoke(module, path).asInstanceOf[Option[Array[Byte]]]
- }
- yield names
-
- /** Attempts to retrieve case parameter names for given class name.
- */
- def caseParamNamesForPath(path: String) =
- for {
- module <- DECODER
- method <- decoderMethod("caseParamNames", classOf[String])
- names <- method.invoke(module, path).asInstanceOf[Option[List[String]]]
- }
- yield names
-
- def aliasesForPackage(pkg: String) = aliasMap flatMap (_(pkg))
-
- /** Attempts to find type aliases in package objects.
- */
- def aliasForType(path: String): Option[String] = {
- val (pkg, name) = (path lastIndexOf '.') match {
- case -1 => return None
- case idx => (path take idx, path drop (idx + 1))
- }
- aliasesForPackage(pkg) flatMap (_ get name)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala
deleted file mode 100644
index 1741a82775..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import CodeHandlers.NoSuccess
-import scala.util.control.ControlThrowable
-
-/**
- * The start of a simpler interface for utilizing the compiler with piecemeal
- * code strings. The "T" here could potentially be a Tree, a Type, a Symbol,
- * a Boolean, or something even more exotic.
- */
-trait CodeHandlers[T] {
- self =>
-
- // Expressions are composed of operators and operands.
- def expr(code: String): T
-
- // Statements occur as parts of blocks and templates.
- // A statement can be an import, a definition or an expression, or it can be empty.
- // Statements used in the template of a class definition can also be declarations.
- def stmt(code: String): T
- def stmts(code: String): Seq[T]
-
- object opt extends CodeHandlers[Option[T]] {
- val handler: PartialFunction[Throwable, Option[T]] = {
- case _: NoSuccess => None
- }
- val handlerSeq: PartialFunction[Throwable, Seq[Option[T]]] = {
- case _: NoSuccess => Nil
- }
-
- def expr(code: String) = try Some(self.expr(code)) catch handler
- def stmt(code: String) = try Some(self.stmt(code)) catch handler
- def stmts(code: String) = try (self.stmts(code) map (x => Some(x))) catch handlerSeq
- }
-}
-
-object CodeHandlers {
- def incomplete() = throw CodeIncomplete
- def fail(msg: String) = throw new CodeException(msg)
-
- trait NoSuccess extends ControlThrowable
- class CodeException(msg: String) extends RuntimeException(msg) with NoSuccess { }
- object CodeIncomplete extends CodeException("CodeIncomplete")
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala b/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala
deleted file mode 100644
index 8042f0aee2..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Lex Spoon
- */
-
-package scala.tools.nsc
-package interpreter
-
-/** A command line for the interpreter.
- */
-class CommandLine(arguments: List[String], error: String => Unit) extends CompilerCommand(arguments, error) {
- override def cmdName = "scala"
- override lazy val fileEndings = List(".scalaint")
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
deleted file mode 100644
index 1dfccbfbf7..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import Completion._
-
-/** An implementation-agnostic completion interface which makes no
- * reference to the jline classes.
- */
-trait Completion {
- type ExecResult
- def resetVerbosity(): Unit
- def completer(): ScalaCompleter
-}
-object NoCompletion extends Completion {
- type ExecResult = Nothing
- def resetVerbosity() = ()
- def completer() = NullCompleter
-}
-
-object Completion {
- def empty: Completion = NoCompletion
-
- case class Candidates(cursor: Int, candidates: List[String]) { }
- val NoCandidates = Candidates(-1, Nil)
-
- object NullCompleter extends ScalaCompleter {
- def complete(buffer: String, cursor: Int): Candidates = NoCandidates
- }
- trait ScalaCompleter {
- def complete(buffer: String, cursor: Int): Candidates
- }
-
- def looksLikeInvocation(code: String) = (
- (code != null)
- && (code startsWith ".")
- && !(code == ".")
- && !(code startsWith "./")
- && !(code startsWith "..")
- )
- object Forwarder {
- def apply(forwardTo: () => Option[CompletionAware]): CompletionAware = new CompletionAware {
- def completions(verbosity: Int) = forwardTo() map (_ completions verbosity) getOrElse Nil
- override def follow(s: String) = forwardTo() flatMap (_ follow s)
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
deleted file mode 100644
index ab96f415db..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.reflect.NameTransformer
-
-/** An interface for objects which are aware of tab completion and
- * will supply their own candidates and resolve their own paths.
- */
-trait CompletionAware {
- /** The complete list of unqualified Strings to which this
- * object will complete.
- */
- def completions(verbosity: Int): List[String]
-
- /** The next completor in the chain.
- */
- def follow(id: String): Option[CompletionAware] = None
-
- /** A list of useful information regarding a specific uniquely
- * identified completion. This is specifically written for the
- * following situation, but should be useful elsewhere too:
- *
- * x.y.z.methodName<tab>
- *
- * If "methodName" is among z's completions, and verbosity > 0
- * indicating tab has been pressed twice consecutively, then we
- * call alternativesFor and show a list of overloaded method
- * signatures.
- */
- def alternativesFor(id: String): List[String] = Nil
-
- /** Given string 'buf', return a list of all the strings
- * to which it can complete. This may involve delegating
- * to other CompletionAware objects.
- */
- def completionsFor(parsed: Parsed): List[String] = {
- import parsed.{ buffer, verbosity }
- val comps = completions(verbosity) filter (_ startsWith buffer)
- val exact = comps contains buffer
-
- val results =
- if (parsed.isEmpty) comps
- else if (parsed.isUnqualified && !parsed.isLastDelimiter)
- if (verbosity > 0 && exact) alternativesFor(buffer)
- else comps
- else follow(parsed.bufferHead) map (_ completionsFor parsed.bufferTail) getOrElse Nil
-
- results.sorted
- }
-}
-
-object CompletionAware {
- val Empty = new CompletionAware { def completions(verbosity: Int) = Nil }
-
- def unapply(that: Any): Option[CompletionAware] = that match {
- case x: CompletionAware => Some((x))
- case _ => None
- }
-
- /** Create a CompletionAware object from the given functions.
- * The first should generate the list of completions whenever queried,
- * and the second should return Some(CompletionAware) object if
- * subcompletions are possible.
- */
- def apply(terms: () => List[String], followFunction: String => Option[CompletionAware]): CompletionAware =
- new CompletionAware {
- def completions = terms()
- def completions(verbosity: Int) = completions
- override def follow(id: String) = followFunction(id)
- }
-
- /** Convenience factories.
- */
- def apply(terms: () => List[String]): CompletionAware = apply(terms, _ => None)
- def apply(map: scala.collection.Map[String, CompletionAware]): CompletionAware =
- apply(() => map.keys.toList, map.get _)
-}
-
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
deleted file mode 100644
index d14b5c79e0..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-/** This has a lot of duplication with other methods in Symbols and Types,
- * but repl completion utility is very sensitive to precise output. Best
- * thing would be to abstract an interface for how such things are printed,
- * as is also in progress with error messages.
- */
-trait CompletionOutput {
- val global: Global
-
- import global._
- import definitions.{ isTupleType, isFunctionType, isRepeatedParamType }
-
- /** Reducing fully qualified noise for some common packages.
- */
- val typeTransforms = List(
- "java.lang." -> "",
- "scala.collection.immutable." -> "immutable.",
- "scala.collection.mutable." -> "mutable.",
- "scala.collection.generic." -> "generic."
- )
-
- def quietString(tp: String): String =
- typeTransforms.foldLeft(tp) {
- case (str, (prefix, replacement)) =>
- if (str startsWith prefix) replacement + (str stripPrefix prefix)
- else str
- }
-
- class MethodSymbolOutput(method: Symbol) {
- val pkg = method.ownerChain find (_.isPackageClass) map (_.fullName) getOrElse ""
-
- def relativize(str: String): String = quietString(str stripPrefix (pkg + "."))
- def relativize(tp: Type): String = relativize(tp.dealiasWiden.toString)
- def relativize(sym: Symbol): String = relativize(sym.info)
-
- def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]")
- def parenList(params: List[Any]) = params.mkString("(", ", ", ")")
-
- def methodTypeToString(mt: MethodType) =
- (mt.paramss map paramsString mkString "") + ": " + relativize(mt.finalResultType)
-
- def typeToString(tp: Type): String = relativize(
- tp match {
- case x if isFunctionType(x) => functionString(x)
- case x if isTupleType(x) => tupleString(x)
- case x if isRepeatedParamType(x) => typeToString(x.typeArgs.head) + "*"
- case mt @ MethodType(_, _) => methodTypeToString(mt)
- case x => x.toString
- }
- )
-
- def tupleString(tp: Type) = parenList(tp.dealiasWiden.typeArgs map relativize)
- def functionString(tp: Type) = tp.dealiasWiden.typeArgs match {
- case List(t, r) => t + " => " + r
- case xs => parenList(xs.init) + " => " + xs.last
- }
-
- def tparamsString(tparams: List[Symbol]) = braceList(tparams map (_.defString))
- def paramsString(params: List[Symbol]) = {
- def paramNameString(sym: Symbol) = if (sym.isSynthetic) "" else sym.nameString + ": "
- def paramString(sym: Symbol) = paramNameString(sym) + typeToString(sym.info.dealiasWiden)
-
- val isImplicit = params.nonEmpty && params.head.isImplicit
- val strs = (params map paramString) match {
- case x :: xs if isImplicit => ("implicit " + x) :: xs
- case xs => xs
- }
- parenList(strs)
- }
-
- def methodString() =
- method.keyString + " " + method.nameString + (method.info.normalize match {
- case NullaryMethodType(resType) => ": " + typeToString(resType)
- case PolyType(tparams, resType) => tparamsString(tparams) + typeToString(resType)
- case mt @ MethodType(_, _) => methodTypeToString(mt)
- case x => x.toString
- })
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
deleted file mode 100644
index 07e36f4f27..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.tools.jline.console.{ ConsoleReader, CursorBuffer }
-import scala.tools.jline.console.completer.CompletionHandler
-import Completion._
-
-trait ConsoleReaderHelper extends ConsoleReader {
- def currentLine = "" + getCursorBuffer.buffer
- def currentPos = getCursorBuffer.cursor
- def terminal = getTerminal()
- def width = terminal.getWidth()
- def height = terminal.getHeight()
- def paginate = isPaginationEnabled()
- def paginate_=(value: Boolean) = setPaginationEnabled(value)
-
- def goBack(num: Int): Unit
- def readOneKey(prompt: String): Int
- def eraseLine(): Unit
-
- private val marginSize = 3
- private def morePrompt = "--More--"
- private def emulateMore(): Int = {
- val key = readOneKey(morePrompt)
- try key match {
- case '\r' | '\n' => 1
- case 'q' => -1
- case _ => height - 1
- }
- finally {
- eraseLine()
- // TODO: still not quite managing to erase --More-- and get
- // back to a scala prompt without another keypress.
- if (key == 'q') {
- putString(getPrompt())
- redrawLine()
- flush()
- }
- }
- }
-
- override def printColumns(items: JCollection[_ <: CharSequence]): Unit =
- printColumns(items: List[String])
-
- def printColumns(items: List[String]): Unit = {
- if (items forall (_ == ""))
- return
-
- val longest = items map (_.length) max
- var linesLeft = if (isPaginationEnabled()) height - 1 else Int.MaxValue
- val columnSize = longest + marginSize
- val padded = items map ("%-" + columnSize + "s" format _)
- val groupSize = 1 max (width / columnSize) // make sure it doesn't divide to 0
-
- padded grouped groupSize foreach { xs =>
- println(xs.mkString)
- linesLeft -= 1
- if (linesLeft <= 0) {
- linesLeft = emulateMore()
- if (linesLeft < 0)
- return
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala b/src/compiler/scala/tools/nsc/interpreter/Delimited.scala
deleted file mode 100644
index 80debfacb9..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.tools.jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList }
-
-class JLineDelimiter extends ArgumentDelimiter {
- def toJLine(args: List[String], cursor: Int) = args match {
- case Nil => new ArgumentList(new Array[String](0), 0, 0, cursor)
- case xs => new ArgumentList(xs.toArray, xs.size - 1, xs.last.length, cursor)
- }
-
- def delimit(buffer: CharSequence, cursor: Int) = {
- val p = Parsed(buffer.toString, cursor)
- toJLine(p.args, cursor)
- }
- def isDelimiter(buffer: CharSequence, cursor: Int) = Parsed(buffer.toString, cursor).isDelimiter
-}
-
-trait Delimited {
- self: Parsed =>
-
- def delimited: Char => Boolean
- def escapeChars: List[Char] = List('\\')
- def quoteChars: List[(Char, Char)] = List(('\'', '\''), ('"', '"'))
-
- /** Break String into args based on delimiting function.
- */
- protected def toArgs(s: String): List[String] =
- if (s == "") Nil
- else (s indexWhere isDelimiterChar) match {
- case -1 => List(s)
- case idx => (s take idx) :: toArgs(s drop (idx + 1))
- }
-
- def isDelimiterChar(ch: Char) = delimited(ch)
- def isEscapeChar(ch: Char): Boolean = escapeChars contains ch
- def isQuoteStart(ch: Char): Boolean = quoteChars map (_._1) contains ch
- def isQuoteEnd(ch: Char): Boolean = quoteChars map (_._2) contains ch
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
deleted file mode 100644
index 827ebe1678..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
+++ /dev/null
@@ -1,107 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.reflect.internal.util.BatchSourceFile
-import scala.tools.nsc.ast.parser.Tokens.EOF
-
-trait ExprTyper {
- val repl: IMain
-
- import repl._
- import global.{ reporter => _, Import => _, _ }
- import definitions._
- import syntaxAnalyzer.{ UnitParser, UnitScanner, token2name }
- import naming.freshInternalVarName
-
- object codeParser extends { val global: repl.global.type = repl.global } with CodeHandlers[Tree] {
- def applyRule[T](code: String, rule: UnitParser => T): T = {
- reporter.reset()
- val scanner = newUnitParser(code)
- val result = rule(scanner)
-
- if (!reporter.hasErrors)
- scanner.accept(EOF)
-
- result
- }
-
- def defns(code: String) = stmts(code) collect { case x: DefTree => x }
- def expr(code: String) = applyRule(code, _.expr())
- def stmts(code: String) = applyRule(code, _.templateStats())
- def stmt(code: String) = stmts(code).last // guaranteed nonempty
- }
-
- /** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
- def parse(line: String): Option[List[Tree]] = debugging(s"""parse("$line")""") {
- var isIncomplete = false
- reporter.withIncompleteHandler((_, _) => isIncomplete = true) {
- val trees = codeParser.stmts(line)
- if (reporter.hasErrors) Some(Nil)
- else if (isIncomplete) None
- else Some(trees)
- }
- }
- // def parsesAsExpr(line: String) = {
- // import codeParser._
- // (opt expr line).isDefined
- // }
-
- def symbolOfLine(code: String): Symbol = {
- def asExpr(): Symbol = {
- val name = freshInternalVarName()
- // Typing it with a lazy val would give us the right type, but runs
- // into compiler bugs with things like existentials, so we compile it
- // behind a def and strip the NullaryMethodType which wraps the expr.
- val line = "def " + name + " = " + code
-
- interpretSynthetic(line) match {
- case IR.Success =>
- val sym0 = symbolOfTerm(name)
- // drop NullaryMethodType
- sym0.cloneSymbol setInfo afterTyper(sym0.info.finalResultType)
- case _ => NoSymbol
- }
- }
- def asDefn(): Symbol = {
- val old = repl.definedSymbolList.toSet
-
- interpretSynthetic(code) match {
- case IR.Success =>
- repl.definedSymbolList filterNot old match {
- case Nil => NoSymbol
- case sym :: Nil => sym
- case syms => NoSymbol.newOverloaded(NoPrefix, syms)
- }
- case _ => NoSymbol
- }
- }
- def asError(): Symbol = {
- interpretSynthetic(code)
- NoSymbol
- }
- beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError()
- }
-
- private var typeOfExpressionDepth = 0
- def typeOfExpression(expr: String, silent: Boolean = true): Type = {
- if (typeOfExpressionDepth > 2) {
- repldbg("Terminating typeOfExpression recursion for expression: " + expr)
- return NoType
- }
- typeOfExpressionDepth += 1
- // Don't presently have a good way to suppress undesirable success output
- // while letting errors through, so it is first trying it silently: if there
- // is an error, and errors are desired, then it re-evaluates non-silently
- // to induce the error message.
- try beSilentDuring(symbolOfLine(expr).tpe) match {
- case NoType if !silent => symbolOfLine(expr).tpe // generate error
- case tpe => tpe
- }
- finally typeOfExpressionDepth -= 1
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Formatting.scala b/src/compiler/scala/tools/nsc/interpreter/Formatting.scala
deleted file mode 100644
index 43e653edfd..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/Formatting.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import util.stringFromWriter
-
-trait Formatting {
- def prompt: String
-
- def spaces(code: String): String = {
- /** Heuristic to avoid indenting and thereby corrupting """-strings and XML literals. */
- val tokens = List("\"\"\"", "</", "/>")
- val noIndent = (code contains "\n") && (tokens exists code.contains)
-
- if (noIndent) ""
- else prompt drop 1 map (_ => ' ')
- }
- /** Indent some code by the width of the scala> prompt.
- * This way, compiler error messages read better.
- */
- def indentCode(code: String) = {
- val indent = spaces(code)
- stringFromWriter(str =>
- for (line <- code.lines) {
- str print indent
- str print (line + "\n")
- str.flush()
- }
- )
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
deleted file mode 100644
index ee45dc558a..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
+++ /dev/null
@@ -1,966 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Alexander Spoon
- */
-
-package scala.tools.nsc
-package interpreter
-
-import Predef.{ println => _, _ }
-import java.io.{ BufferedReader, FileReader }
-import java.util.concurrent.locks.ReentrantLock
-import scala.sys.process.Process
-import session._
-import scala.util.Properties.{ envOrNone, javaHome, jdkHome, javaVersion }
-import scala.tools.util.{ Javap }
-import scala.annotation.tailrec
-import scala.collection.mutable.ListBuffer
-import scala.concurrent.ops
-import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
-import interpreter._
-import io.{ File, Directory, Path }
-import scala.reflect.NameTransformer._
-import util.ScalaClassLoader
-import ScalaClassLoader._
-import scala.tools.util._
-import scala.language.{implicitConversions, existentials}
-import scala.reflect.{ClassTag, classTag}
-import scala.tools.reflect.StdRuntimeTags._
-
-/** The Scala interactive shell. It provides a read-eval-print loop
- * around the Interpreter class.
- * After instantiation, clients should call the main() method.
- *
- * If no in0 is specified, then input will come from the console, and
- * the class will attempt to provide input editing feature such as
- * input history.
- *
- * @author Moez A. Abdel-Gawad
- * @author Lex Spoon
- * @version 1.2
- */
-class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
- extends AnyRef
- with LoopCommands
- with ILoopInit
-{
- def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out)
- def this() = this(None, new JPrintWriter(Console.out, true))
-
- var in: InteractiveReader = _ // the input stream from which commands come
- var settings: Settings = _
- var intp: IMain = _
-
- @deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp
- @deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: Interpreter): Unit = intp = i
-
- /** Having inherited the difficult "var-ness" of the repl instance,
- * I'm trying to work around it by moving operations into a class from
- * which it will appear a stable prefix.
- */
- private def onIntp[T](f: IMain => T): T = f(intp)
-
- class IMainOps[T <: IMain](val intp: T) {
- import intp._
- import global._
-
- def printAfterTyper(msg: => String) =
- intp.reporter printUntruncatedMessage afterTyper(msg)
-
- /** Strip NullaryMethodType artifacts. */
- private def replInfo(sym: Symbol) = {
- sym.info match {
- case NullaryMethodType(restpe) if sym.isAccessor => restpe
- case info => info
- }
- }
- def echoTypeStructure(sym: Symbol) =
- printAfterTyper("" + deconstruct.show(replInfo(sym)))
-
- def echoTypeSignature(sym: Symbol, verbose: Boolean) = {
- if (verbose) ILoop.this.echo("// Type signature")
- printAfterTyper("" + replInfo(sym))
-
- if (verbose) {
- ILoop.this.echo("\n// Internal Type structure")
- echoTypeStructure(sym)
- }
- }
- }
- implicit def stabilizeIMain(intp: IMain) = new IMainOps[intp.type](intp)
-
- /** TODO -
- * -n normalize
- * -l label with case class parameter names
- * -c complete - leave nothing out
- */
- private def typeCommandInternal(expr: String, verbose: Boolean): Result = {
- onIntp { intp =>
- val sym = intp.symbolOfLine(expr)
- if (sym.exists) intp.echoTypeSignature(sym, verbose)
- else ""
- }
- }
-
- override def echoCommandMessage(msg: String) {
- intp.reporter printUntruncatedMessage msg
- }
-
- def isAsync = !settings.Yreplsync.value
- lazy val power = new Power(intp, new StdReplVals(this))(tagOfStdReplVals, classTag[StdReplVals])
- def history = in.history
-
- /** The context class loader at the time this object was created */
- protected val originalClassLoader = Thread.currentThread.getContextClassLoader
-
- // classpath entries added via :cp
- var addedClasspath: String = ""
-
- /** A reverse list of commands to replay if the user requests a :replay */
- var replayCommandStack: List[String] = Nil
-
- /** A list of commands to replay if the user requests a :replay */
- def replayCommands = replayCommandStack.reverse
-
- /** Record a command for replay should the user request a :replay */
- def addReplay(cmd: String) = replayCommandStack ::= cmd
-
- def savingReplayStack[T](body: => T): T = {
- val saved = replayCommandStack
- try body
- finally replayCommandStack = saved
- }
- def savingReader[T](body: => T): T = {
- val saved = in
- try body
- finally in = saved
- }
-
- /** Close the interpreter and set the var to null. */
- def closeInterpreter() {
- if (intp ne null) {
- intp.close()
- intp = null
- }
- }
-
- class ILoopInterpreter extends IMain(settings, out) {
- outer =>
-
- override lazy val formatting = new Formatting {
- def prompt = ILoop.this.prompt
- }
- override protected def parentClassLoader =
- settings.explicitParentLoader.getOrElse( classOf[ILoop].getClassLoader )
- }
-
- /** Create a new interpreter. */
- def createInterpreter() {
- if (addedClasspath != "")
- settings.classpath append addedClasspath
-
- intp = new ILoopInterpreter
- }
-
- /** print a friendly help message */
- def helpCommand(line: String): Result = {
- if (line == "") helpSummary()
- else uniqueCommand(line) match {
- case Some(lc) => echo("\n" + lc.longHelp)
- case _ => ambiguousError(line)
- }
- }
- private def helpSummary() = {
- val usageWidth = commands map (_.usageMsg.length) max
- val formatStr = "%-" + usageWidth + "s %s %s"
-
- echo("All commands can be abbreviated, e.g. :he instead of :help.")
- echo("Those marked with a * have more detailed help, e.g. :help imports.\n")
-
- commands foreach { cmd =>
- val star = if (cmd.hasLongHelp) "*" else " "
- echo(formatStr.format(cmd.usageMsg, star, cmd.help))
- }
- }
- private def ambiguousError(cmd: String): Result = {
- matchingCommands(cmd) match {
- case Nil => echo(cmd + ": no such command. Type :help for help.")
- case xs => echo(cmd + " is ambiguous: did you mean " + xs.map(":" + _.name).mkString(" or ") + "?")
- }
- Result(true, None)
- }
- private def matchingCommands(cmd: String) = commands filter (_.name startsWith cmd)
- private def uniqueCommand(cmd: String): Option[LoopCommand] = {
- // this lets us add commands willy-nilly and only requires enough command to disambiguate
- matchingCommands(cmd) match {
- case List(x) => Some(x)
- // exact match OK even if otherwise appears ambiguous
- case xs => xs find (_.name == cmd)
- }
- }
-
- /** Show the history */
- lazy val historyCommand = new LoopCommand("history", "show the history (optional num is commands to show)") {
- override def usage = "[num]"
- def defaultLines = 20
-
- def apply(line: String): Result = {
- if (history eq NoHistory)
- return "No history available."
-
- val xs = words(line)
- val current = history.index
- val count = try xs.head.toInt catch { case _: Exception => defaultLines }
- val lines = history.asStrings takeRight count
- val offset = current - lines.size + 1
-
- for ((line, index) <- lines.zipWithIndex)
- echo("%3d %s".format(index + offset, line))
- }
- }
-
- // When you know you are most likely breaking into the middle
- // of a line being typed. This softens the blow.
- protected def echoAndRefresh(msg: String) = {
- echo("\n" + msg)
- in.redrawLine()
- }
- protected def echo(msg: String) = {
- out println msg
- out.flush()
- }
- protected def echoNoNL(msg: String) = {
- out print msg
- out.flush()
- }
-
- /** Search the history */
- def searchHistory(_cmdline: String) {
- val cmdline = _cmdline.toLowerCase
- val offset = history.index - history.size + 1
-
- for ((line, index) <- history.asStrings.zipWithIndex ; if line.toLowerCase contains cmdline)
- echo("%d %s".format(index + offset, line))
- }
-
- private var currentPrompt = Properties.shellPromptString
- def setPrompt(prompt: String) = currentPrompt = prompt
- /** Prompt to print when awaiting input */
- def prompt = currentPrompt
-
- import LoopCommand.{ cmd, nullary }
-
- /** Standard commands **/
- lazy val standardCommands = List(
- cmd("cp", "<path>", "add a jar or directory to the classpath", addClasspath),
- cmd("help", "[command]", "print this summary or command-specific help", helpCommand),
- historyCommand,
- cmd("h?", "<string>", "search the history", searchHistory),
- cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand),
- cmd("implicits", "[-v]", "show the implicits in scope", implicitsCommand),
- cmd("javap", "<path|class>", "disassemble a file or class name", javapCommand),
- cmd("load", "<path>", "load and interpret a Scala file", loadCommand),
- nullary("paste", "enter paste mode: all input up to ctrl-D compiled together", pasteCommand),
- nullary("power", "enable power user mode", powerCmd),
- nullary("quit", "exit the interpreter", () => Result(false, None)),
- nullary("replay", "reset execution and replay all previous commands", replay),
- nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand),
- shCommand,
- nullary("silent", "disable/enable automatic printing of results", verbosity),
- cmd("type", "[-v] <expr>", "display the type of an expression without evaluating it", typeCommand),
- nullary("warnings", "show the suppressed warnings from the most recent line which had any", warningsCommand)
- )
-
- /** Power user commands */
- lazy val powerCommands: List[LoopCommand] = List(
- cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand)
- )
-
- private def dumpCommand(): Result = {
- echo("" + power)
- history.asStrings takeRight 30 foreach echo
- in.redrawLine()
- }
- private def valsCommand(): Result = power.valsDescription
-
- private val typeTransforms = List(
- "scala.collection.immutable." -> "immutable.",
- "scala.collection.mutable." -> "mutable.",
- "scala.collection.generic." -> "generic.",
- "java.lang." -> "jl.",
- "scala.runtime." -> "runtime."
- )
-
- private def importsCommand(line: String): Result = {
- val tokens = words(line)
- val handlers = intp.languageWildcardHandlers ++ intp.importHandlers
- val isVerbose = tokens contains "-v"
-
- handlers.filterNot(_.importedSymbols.isEmpty).zipWithIndex foreach {
- case (handler, idx) =>
- val (types, terms) = handler.importedSymbols partition (_.name.isTypeName)
- val imps = handler.implicitSymbols
- val found = tokens filter (handler importsSymbolNamed _)
- val typeMsg = if (types.isEmpty) "" else types.size + " types"
- val termMsg = if (terms.isEmpty) "" else terms.size + " terms"
- val implicitMsg = if (imps.isEmpty) "" else imps.size + " are implicit"
- val foundMsg = if (found.isEmpty) "" else found.mkString(" // imports: ", ", ", "")
- val statsMsg = List(typeMsg, termMsg, implicitMsg) filterNot (_ == "") mkString ("(", ", ", ")")
-
- intp.reporter.printMessage("%2d) %-30s %s%s".format(
- idx + 1,
- handler.importString,
- statsMsg,
- foundMsg
- ))
- }
- }
-
- private def implicitsCommand(line: String): Result = onIntp { intp =>
- import intp._
- import global._
-
- def p(x: Any) = intp.reporter.printMessage("" + x)
-
- // If an argument is given, only show a source with that
- // in its name somewhere.
- val args = line split "\\s+"
- val filtered = intp.implicitSymbolsBySource filter {
- case (source, syms) =>
- (args contains "-v") || {
- if (line == "") (source.fullName.toString != "scala.Predef")
- else (args exists (source.name.toString contains _))
- }
- }
-
- if (filtered.isEmpty)
- return "No implicits have been imported other than those in Predef."
-
- filtered foreach {
- case (source, syms) =>
- p("/* " + syms.size + " implicit members imported from " + source.fullName + " */")
-
- // This groups the members by where the symbol is defined
- val byOwner = syms groupBy (_.owner)
- val sortedOwners = byOwner.toList sortBy { case (owner, _) => afterTyper(source.info.baseClasses indexOf owner) }
-
- sortedOwners foreach {
- case (owner, members) =>
- // Within each owner, we cluster results based on the final result type
- // if there are more than a couple, and sort each cluster based on name.
- // This is really just trying to make the 100 or so implicits imported
- // by default into something readable.
- val memberGroups: List[List[Symbol]] = {
- val groups = members groupBy (_.tpe.finalResultType) toList
- val (big, small) = groups partition (_._2.size > 3)
- val xss = (
- (big sortBy (_._1.toString) map (_._2)) :+
- (small flatMap (_._2))
- )
-
- xss map (xs => xs sortBy (_.name.toString))
- }
-
- val ownerMessage = if (owner == source) " defined in " else " inherited from "
- p(" /* " + members.size + ownerMessage + owner.fullName + " */")
-
- memberGroups foreach { group =>
- group foreach (s => p(" " + intp.symbolDefString(s)))
- p("")
- }
- }
- p("")
- }
- }
-
- private[this] lazy val platformTools: Option[File] = {
- val jarName = "tools.jar"
- def jarPath(path: Path) = (path / "lib" / jarName).toFile
- def jarAt(path: Path) = {
- val f = jarPath(path)
- if (f.isFile) Some(f) else None
- }
- val jdkDir = {
- val d = Directory(jdkHome)
- if (d.isDirectory) Some(d) else None
- }
- def deeply(dir: Directory) = dir.deepFiles find (_.name == jarName)
-
- val home = envOrNone("JDK_HOME") orElse envOrNone("JAVA_HOME") map (p => Path(p))
- val install = Some(Path(javaHome))
-
- (home flatMap jarAt) orElse
- (install flatMap jarAt) orElse
- (install map (_.parent) flatMap jarAt) orElse
- (jdkDir flatMap deeply)
- }
- private def addToolsJarToLoader() = (
- if (Javap isAvailable intp.classLoader) {
- repldbg(":javap available on interpreter class path.")
- intp.classLoader
- } else {
- val cl = platformTools match {
- case Some(tools) => ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader)
- case _ => intp.classLoader
- }
- if (Javap isAvailable cl) {
- repldbg(":javap available on extended class path.")
- cl
- } else {
- repldbg(s":javap unavailable: no tools.jar at $jdkHome")
- intp.classLoader
- }
- }
- )
-
- protected def newJavap() = new JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp)) {
- override def tryClass(path: String): Array[Byte] = {
- val hd :: rest = path split '.' toList;
- // If there are dots in the name, the first segment is the
- // key to finding it.
- if (rest.nonEmpty) {
- intp optFlatName hd match {
- case Some(flat) =>
- val clazz = flat :: rest mkString NAME_JOIN_STRING
- val bytes = super.tryClass(clazz)
- if (bytes.nonEmpty) bytes
- else super.tryClass(clazz + MODULE_SUFFIX_STRING)
- case _ => super.tryClass(path)
- }
- }
- else {
- // Look for Foo first, then Foo$, but if Foo$ is given explicitly,
- // we have to drop the $ to find object Foo, then tack it back onto
- // the end of the flattened name.
- def className = intp flatName path
- def moduleName = (intp flatName path.stripSuffix(MODULE_SUFFIX_STRING)) + MODULE_SUFFIX_STRING
-
- val bytes = super.tryClass(className)
- if (bytes.nonEmpty) bytes
- else super.tryClass(moduleName)
- }
- }
- }
- private lazy val javap = substituteAndLog[Javap]("javap", NoJavap)(newJavap())
-
- // Still todo: modules.
- private def typeCommand(line0: String): Result = {
- line0.trim match {
- case "" => ":type [-v] <expression>"
- case s if s startsWith "-v " => typeCommandInternal(s stripPrefix "-v " trim, true)
- case s => typeCommandInternal(s, false)
- }
- }
-
- private def warningsCommand(): Result = {
- if (intp.lastWarnings.isEmpty)
- "Can't find any cached warnings."
- else
- intp.lastWarnings foreach { case (pos, msg) => intp.reporter.warning(pos, msg) }
- }
-
- private def javapCommand(line: String): Result = {
- if (javap == null)
- ":javap unavailable, no tools.jar at %s. Set JDK_HOME.".format(jdkHome)
- else if (javaVersion startsWith "1.7")
- ":javap not yet working with java 1.7"
- else if (line == "")
- ":javap [-lcsvp] [path1 path2 ...]"
- else
- javap(words(line)) foreach { res =>
- if (res.isError) return "Failed: " + res.value
- else res.show()
- }
- }
-
- private def wrapCommand(line: String): Result = {
- def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T"
- onIntp { intp =>
- import intp._
- import global._
-
- words(line) match {
- case Nil =>
- intp.executionWrapper match {
- case "" => "No execution wrapper is set."
- case s => "Current execution wrapper: " + s
- }
- case "clear" :: Nil =>
- intp.executionWrapper match {
- case "" => "No execution wrapper is set."
- case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper."
- }
- case wrapper :: Nil =>
- intp.typeOfExpression(wrapper) match {
- case PolyType(List(targ), MethodType(List(arg), restpe)) =>
- intp setExecutionWrapper intp.pathToTerm(wrapper)
- "Set wrapper to '" + wrapper + "'"
- case tp =>
- failMsg + "\nFound: <unknown>"
- }
- case _ => failMsg
- }
- }
- }
-
- private def pathToPhaseWrapper = intp.pathToTerm("$r") + ".phased.atCurrent"
- private def phaseCommand(name: String): Result = {
- val phased: Phased = power.phased
- import phased.NoPhaseName
-
- if (name == "clear") {
- phased.set(NoPhaseName)
- intp.clearExecutionWrapper()
- "Cleared active phase."
- }
- else if (name == "") phased.get match {
- case NoPhaseName => "Usage: :phase <expr> (e.g. typer, erasure.next, erasure+3)"
- case ph => "Active phase is '%s'. (To clear, :phase clear)".format(phased.get)
- }
- else {
- val what = phased.parse(name)
- if (what.isEmpty || !phased.set(what))
- "'" + name + "' does not appear to represent a valid phase."
- else {
- intp.setExecutionWrapper(pathToPhaseWrapper)
- val activeMessage =
- if (what.toString.length == name.length) "" + what
- else "%s (%s)".format(what, name)
-
- "Active phase is now: " + activeMessage
- }
- }
- }
-
- /** Available commands */
- def commands: List[LoopCommand] = standardCommands ++ (
- if (isReplPower) powerCommands else Nil
- )
-
- val replayQuestionMessage =
- """|That entry seems to have slain the compiler. Shall I replay
- |your session? I can re-run each line except the last one.
- |[y/n]
- """.trim.stripMargin
-
- private val crashRecovery: PartialFunction[Throwable, Boolean] = {
- case ex: Throwable =>
- echo(intp.global.throwableAsString(ex))
-
- ex match {
- case _: NoSuchMethodError | _: NoClassDefFoundError =>
- echo("\nUnrecoverable error.")
- throw ex
- case _ =>
- def fn(): Boolean =
- try in.readYesOrNo(replayQuestionMessage, { echo("\nYou must enter y or n.") ; fn() })
- catch { case _: RuntimeException => false }
-
- if (fn()) replay()
- else echo("\nAbandoning crashed session.")
- }
- true
- }
-
- /** The main read-eval-print loop for the repl. It calls
- * command() for each line of input, and stops when
- * command() returns false.
- */
- def loop() {
- def readOneLine() = {
- out.flush()
- in readLine prompt
- }
- // return false if repl should exit
- def processLine(line: String): Boolean = {
- if (isAsync) {
- if (!awaitInitialized()) return false
- runThunks()
- }
- if (line eq null) false // assume null means EOF
- else command(line) match {
- case Result(false, _) => false
- case Result(_, Some(finalLine)) => addReplay(finalLine) ; true
- case _ => true
- }
- }
- def innerLoop() {
- if ( try processLine(readOneLine()) catch crashRecovery )
- innerLoop()
- }
- innerLoop()
- }
-
- /** interpret all lines from a specified file */
- def interpretAllFrom(file: File) {
- savingReader {
- savingReplayStack {
- file applyReader { reader =>
- in = SimpleReader(reader, out, false)
- echo("Loading " + file + "...")
- loop()
- }
- }
- }
- }
-
- /** create a new interpreter and replay the given commands */
- def replay() {
- reset()
- if (replayCommandStack.isEmpty)
- echo("Nothing to replay.")
- else for (cmd <- replayCommands) {
- echo("Replaying: " + cmd) // flush because maybe cmd will have its own output
- command(cmd)
- echo("")
- }
- }
- def resetCommand() {
- echo("Resetting interpreter state.")
- if (replayCommandStack.nonEmpty) {
- echo("Forgetting this session history:\n")
- replayCommands foreach echo
- echo("")
- replayCommandStack = Nil
- }
- if (intp.namedDefinedTerms.nonEmpty)
- echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", "))
- if (intp.definedTypes.nonEmpty)
- echo("Forgetting defined types: " + intp.definedTypes.mkString(", "))
-
- reset()
- }
- def reset() {
- intp.reset()
- unleashAndSetPhase()
- }
-
- /** fork a shell and run a command */
- lazy val shCommand = new LoopCommand("sh", "run a shell command (result is implicitly => List[String])") {
- override def usage = "<command line>"
- def apply(line: String): Result = line match {
- case "" => showUsage()
- case _ =>
- val toRun = classOf[ProcessResult].getName + "(" + string2codeQuoted(line) + ")"
- intp interpret toRun
- ()
- }
- }
-
- def withFile(filename: String)(action: File => Unit) {
- val f = File(filename)
-
- if (f.exists) action(f)
- else echo("That file does not exist")
- }
-
- def loadCommand(arg: String) = {
- var shouldReplay: Option[String] = None
- withFile(arg)(f => {
- interpretAllFrom(f)
- shouldReplay = Some(":load " + arg)
- })
- Result(true, shouldReplay)
- }
-
- def addClasspath(arg: String): Unit = {
- val f = File(arg).normalize
- if (f.exists) {
- addedClasspath = ClassPath.join(addedClasspath, f.path)
- val totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath)
- echo("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, totalClasspath))
- replay()
- }
- else echo("The path '" + f + "' doesn't seem to exist.")
- }
-
- def powerCmd(): Result = {
- if (isReplPower) "Already in power mode."
- else enablePowerMode(false)
- }
- def enablePowerMode(isDuringInit: Boolean) = {
- replProps.power setValue true
- unleashAndSetPhase()
- asyncEcho(isDuringInit, power.banner)
- }
- private def unleashAndSetPhase() {
- if (isReplPower) {
- power.unleash()
- // Set the phase to "typer"
- intp beSilentDuring phaseCommand("typer")
- }
- }
-
- def asyncEcho(async: Boolean, msg: => String) {
- if (async) asyncMessage(msg)
- else echo(msg)
- }
-
- def verbosity() = {
- val old = intp.printResults
- intp.printResults = !old
- echo("Switched " + (if (old) "off" else "on") + " result printing.")
- }
-
- /** Run one command submitted by the user. Two values are returned:
- * (1) whether to keep running, (2) the line to record for replay,
- * if any. */
- def command(line: String): Result = {
- if (line startsWith ":") {
- val cmd = line.tail takeWhile (x => !x.isWhitespace)
- uniqueCommand(cmd) match {
- case Some(lc) => lc(line.tail stripPrefix cmd dropWhile (_.isWhitespace))
- case _ => ambiguousError(cmd)
- }
- }
- else if (intp.global == null) Result(false, None) // Notice failure to create compiler
- else Result(true, interpretStartingWith(line))
- }
-
- private def readWhile(cond: String => Boolean) = {
- Iterator continually in.readLine("") takeWhile (x => x != null && cond(x))
- }
-
- def pasteCommand(): Result = {
- echo("// Entering paste mode (ctrl-D to finish)\n")
- val code = readWhile(_ => true) mkString "\n"
- echo("\n// Exiting paste mode, now interpreting.\n")
- intp interpret code
- ()
- }
-
- private object paste extends Pasted {
- val ContinueString = " | "
- val PromptString = "scala> "
-
- def interpret(line: String): Unit = {
- echo(line.trim)
- intp interpret line
- echo("")
- }
-
- def transcript(start: String) = {
- echo("\n// Detected repl transcript paste: ctrl-D to finish.\n")
- apply(Iterator(start) ++ readWhile(_.trim != PromptString.trim))
- }
- }
- import paste.{ ContinueString, PromptString }
-
- /** Interpret expressions starting with the first line.
- * Read lines until a complete compilation unit is available
- * or until a syntax error has been seen. If a full unit is
- * read, go ahead and interpret it. Return the full string
- * to be recorded for replay, if any.
- */
- def interpretStartingWith(code: String): Option[String] = {
- // signal completion non-completion input has been received
- in.completion.resetVerbosity()
-
- def reallyInterpret = {
- val reallyResult = intp.interpret(code)
- (reallyResult, reallyResult match {
- case IR.Error => None
- case IR.Success => Some(code)
- case IR.Incomplete =>
- if (in.interactive && code.endsWith("\n\n")) {
- echo("You typed two blank lines. Starting a new command.")
- None
- }
- else in.readLine(ContinueString) match {
- case null =>
- // we know compilation is going to fail since we're at EOF and the
- // parser thinks the input is still incomplete, but since this is
- // a file being read non-interactively we want to fail. So we send
- // it straight to the compiler for the nice error message.
- intp.compileString(code)
- None
-
- case line => interpretStartingWith(code + "\n" + line)
- }
- })
- }
-
- /** Here we place ourselves between the user and the interpreter and examine
- * the input they are ostensibly submitting. We intervene in several cases:
- *
- * 1) If the line starts with "scala> " it is assumed to be an interpreter paste.
- * 2) If the line starts with "." (but not ".." or "./") it is treated as an invocation
- * on the previous result.
- * 3) If the Completion object's execute returns Some(_), we inject that value
- * and avoid the interpreter, as it's likely not valid scala code.
- */
- if (code == "") None
- else if (!paste.running && code.trim.startsWith(PromptString)) {
- paste.transcript(code)
- None
- }
- else if (Completion.looksLikeInvocation(code) && intp.mostRecentVar != "") {
- interpretStartingWith(intp.mostRecentVar + code)
- }
- else if (code.trim startsWith "//") {
- // line comment, do nothing
- None
- }
- else
- reallyInterpret._2
- }
-
- // runs :load `file` on any files passed via -i
- def loadFiles(settings: Settings) = settings match {
- case settings: GenericRunnerSettings =>
- for (filename <- settings.loadfiles.value) {
- val cmd = ":load " + filename
- command(cmd)
- addReplay(cmd)
- echo("")
- }
- case _ =>
- }
-
- /** Tries to create a JLineReader, falling back to SimpleReader:
- * unless settings or properties are such that it should start
- * with SimpleReader.
- */
- def chooseReader(settings: Settings): InteractiveReader = {
- if (settings.Xnojline.value || Properties.isEmacsShell)
- SimpleReader()
- else try new JLineReader(
- if (settings.noCompletion.value) NoCompletion
- else new JLineCompletion(intp)
- )
- catch {
- case ex @ (_: Exception | _: NoClassDefFoundError) =>
- echo("Failed to created JLineReader: " + ex + "\nFalling back to SimpleReader.")
- SimpleReader()
- }
- }
- def process(settings: Settings): Boolean = savingContextLoader {
- this.settings = settings
- createInterpreter()
-
- // sets in to some kind of reader depending on environmental cues
- in = in0 match {
- case Some(reader) => SimpleReader(reader, out, true)
- case None =>
- // some post-initialization
- chooseReader(settings) match {
- case x: JLineReader => addThunk(x.consoleReader.postInit) ; x
- case x => x
- }
- }
- // Bind intp somewhere out of the regular namespace where
- // we can get at it in generated code.
- addThunk(intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain])))
- addThunk({
- import scala.tools.nsc.io._
- import Properties.userHome
- import scala.compat.Platform.EOL
- val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
- if (autorun.isDefined) intp.quietRun(autorun.get)
- })
-
- loadFiles(settings)
- // it is broken on startup; go ahead and exit
- if (intp.reporter.hasErrors)
- return false
-
- // This is about the illusion of snappiness. We call initialize()
- // which spins off a separate thread, then print the prompt and try
- // our best to look ready. The interlocking lazy vals tend to
- // inter-deadlock, so we break the cycle with a single asynchronous
- // message to an actor.
- if (isAsync) {
- intp initialize initializedCallback()
- createAsyncListener() // listens for signal to run postInitialization
- }
- else {
- intp.initializeSynchronous()
- postInitialization()
- }
- printWelcome()
-
- try loop()
- catch AbstractOrMissingHandler()
- finally closeInterpreter()
-
- true
- }
-
- /** process command-line arguments and do as they request */
- def process(args: Array[String]): Boolean = {
- val command = new CommandLine(args.toList, echo)
- def neededHelp(): String =
- (if (command.settings.help.value) command.usageMsg + "\n" else "") +
- (if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "")
-
- // if they asked for no help and command is valid, we call the real main
- neededHelp() match {
- case "" => command.ok && process(command.settings)
- case help => echoNoNL(help) ; true
- }
- }
-
- @deprecated("Use `process` instead", "2.9.0")
- def main(settings: Settings): Unit = process(settings)
-}
-
-object ILoop {
- implicit def loopToInterpreter(repl: ILoop): IMain = repl.intp
- private def echo(msg: String) = Console println msg
-
- // Designed primarily for use by test code: take a String with a
- // bunch of code, and prints out a transcript of what it would look
- // like if you'd just typed it into the repl.
- def runForTranscript(code: String, settings: Settings): String = {
- import java.io.{ BufferedReader, StringReader, OutputStreamWriter }
-
- stringFromStream { ostream =>
- Console.withOut(ostream) {
- val output = new JPrintWriter(new OutputStreamWriter(ostream), true) {
- override def write(str: String) = {
- // completely skip continuation lines
- if (str forall (ch => ch.isWhitespace || ch == '|')) ()
- // print a newline on empty scala prompts
- else if ((str contains '\n') && (str.trim == "scala> ")) super.write("\n")
- else super.write(str)
- }
- }
- val input = new BufferedReader(new StringReader(code)) {
- override def readLine(): String = {
- val s = super.readLine()
- // helping out by printing the line being interpreted.
- if (s != null)
- output.println(s)
- s
- }
- }
- val repl = new ILoop(input, output)
- if (settings.classpath.isDefault)
- settings.classpath.value = sys.props("java.class.path")
-
- repl process settings
- }
- }
- }
-
- /** Creates an interpreter loop with default settings and feeds
- * the given code to it as input.
- */
- def run(code: String, sets: Settings = new Settings): String = {
- import java.io.{ BufferedReader, StringReader, OutputStreamWriter }
-
- stringFromStream { ostream =>
- Console.withOut(ostream) {
- val input = new BufferedReader(new StringReader(code))
- val output = new JPrintWriter(new OutputStreamWriter(ostream), true)
- val repl = new ILoop(input, output)
-
- if (sets.classpath.isDefault)
- sets.classpath.value = sys.props("java.class.path")
-
- repl process sets
- }
- }
- }
- def run(lines: List[String]): String = run(lines map (_ + "\n") mkString)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala b/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
deleted file mode 100644
index e3c0494fa3..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
+++ /dev/null
@@ -1,125 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.reflect.internal.util.Position
-import scala.util.control.Exception.ignoring
-import scala.tools.nsc.util.stackTraceString
-
-/**
- * Machinery for the asynchronous initialization of the repl.
- */
-trait ILoopInit {
- self: ILoop =>
-
- /** Print a welcome message */
- def printWelcome() {
- import Properties._
- val welcomeMsg =
- """|Welcome to Scala %s (%s, Java %s).
- |Type in expressions to have them evaluated.
- |Type :help for more information.""" .
- stripMargin.format(versionString, javaVmName, javaVersion)
- echo(welcomeMsg)
- replinfo("[info] started at " + new java.util.Date)
- }
-
- protected def asyncMessage(msg: String) {
- if (isReplInfo || isReplPower)
- echoAndRefresh(msg)
- }
-
- private val initLock = new java.util.concurrent.locks.ReentrantLock()
- private val initCompilerCondition = initLock.newCondition() // signal the compiler is initialized
- private val initLoopCondition = initLock.newCondition() // signal the whole repl is initialized
- private val initStart = System.nanoTime
-
- private def withLock[T](body: => T): T = {
- initLock.lock()
- try body
- finally initLock.unlock()
- }
- // a condition used to ensure serial access to the compiler.
- @volatile private var initIsComplete = false
- @volatile private var initError: String = null
- private def elapsed() = "%.3f".format((System.nanoTime - initStart).toDouble / 1000000000L)
-
- // the method to be called when the interpreter is initialized.
- // Very important this method does nothing synchronous (i.e. do
- // not try to use the interpreter) because until it returns, the
- // repl's lazy val `global` is still locked.
- protected def initializedCallback() = withLock(initCompilerCondition.signal())
-
- // Spins off a thread which awaits a single message once the interpreter
- // has been initialized.
- protected def createAsyncListener() = {
- io.spawn {
- withLock(initCompilerCondition.await())
- asyncMessage("[info] compiler init time: " + elapsed() + " s.")
- postInitialization()
- }
- }
-
- // called from main repl loop
- protected def awaitInitialized(): Boolean = {
- if (!initIsComplete)
- withLock { while (!initIsComplete) initLoopCondition.await() }
- if (initError != null) {
- println("""
- |Failed to initialize the REPL due to an unexpected error.
- |This is a bug, please, report it along with the error diagnostics printed below.
- |%s.""".stripMargin.format(initError)
- )
- false
- } else true
- }
- // private def warningsThunks = List(
- // () => intp.bind("lastWarnings", "" + typeTag[List[(Position, String)]], intp.lastWarnings _),
- // )
-
- protected def postInitThunks = List[Option[() => Unit]](
- Some(intp.setContextClassLoader _),
- if (isReplPower) Some(() => enablePowerMode(true)) else None
- ).flatten
- // ++ (
- // warningsThunks
- // )
- // called once after init condition is signalled
- protected def postInitialization() {
- try {
- postInitThunks foreach (f => addThunk(f()))
- runThunks()
- } catch {
- case ex: Throwable =>
- initError = stackTraceString(ex)
- throw ex
- } finally {
- initIsComplete = true
-
- if (isAsync) {
- asyncMessage("[info] total init time: " + elapsed() + " s.")
- withLock(initLoopCondition.signal())
- }
- }
- }
- // code to be executed only after the interpreter is initialized
- // and the lazy val `global` can be accessed without risk of deadlock.
- private var pendingThunks: List[() => Unit] = Nil
- protected def addThunk(body: => Unit) = synchronized {
- pendingThunks :+= (() => body)
- }
- protected def runThunks(): Unit = synchronized {
- if (pendingThunks.nonEmpty)
- repldbg("Clearing " + pendingThunks.size + " thunks.")
-
- while (pendingThunks.nonEmpty) {
- val thunk = pendingThunks.head
- pendingThunks = pendingThunks.tail
- thunk()
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
deleted file mode 100644
index bed8570bd0..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ /dev/null
@@ -1,1235 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package interpreter
-
-import Predef.{ println => _, _ }
-import util.stringFromWriter
-import scala.reflect.internal.util._
-import java.net.URL
-import scala.sys.BooleanProp
-import io.VirtualDirectory
-import scala.tools.nsc.io.AbstractFile
-import reporters._
-import symtab.Flags
-import scala.reflect.internal.Names
-import scala.tools.util.PathResolver
-import scala.tools.nsc.util.ScalaClassLoader
-import ScalaClassLoader.URLClassLoader
-import scala.tools.nsc.util.Exceptional.unwrap
-import scala.collection.{ mutable, immutable }
-import scala.util.control.Exception.{ ultimately }
-import IMain._
-import java.util.concurrent.Future
-import typechecker.Analyzer
-import scala.language.implicitConversions
-import scala.reflect.runtime.{ universe => ru }
-import scala.reflect.{ ClassTag, classTag }
-import scala.tools.reflect.StdRuntimeTags._
-
-/** directory to save .class files to */
-private class ReplVirtualDirectory(out: JPrintWriter) extends VirtualDirectory("(memory)", None) {
- private def pp(root: AbstractFile, indentLevel: Int) {
- val spaces = " " * indentLevel
- out.println(spaces + root.name)
- if (root.isDirectory)
- root.toList sortBy (_.name) foreach (x => pp(x, indentLevel + 1))
- }
- // print the contents hierarchically
- def show() = pp(this, 0)
-}
-
-/** An interpreter for Scala code.
- *
- * The main public entry points are compile(), interpret(), and bind().
- * The compile() method loads a complete Scala file. The interpret() method
- * executes one line of Scala code at the request of the user. The bind()
- * method binds an object to a variable that can then be used by later
- * interpreted code.
- *
- * The overall approach is based on compiling the requested code and then
- * using a Java classloader and Java reflection to run the code
- * and access its results.
- *
- * In more detail, a single compiler instance is used
- * to accumulate all successfully compiled or interpreted Scala code. To
- * "interpret" a line of code, the compiler generates a fresh object that
- * includes the line of code and which has public member(s) to export
- * all variables defined by that code. To extract the result of an
- * interpreted line to show the user, a second "result object" is created
- * which imports the variables exported by the above object and then
- * exports members called "$eval" and "$print". To accomodate user expressions
- * that read from variables or methods defined in previous statements, "import"
- * statements are used.
- *
- * This interpreter shares the strengths and weaknesses of using the
- * full compiler-to-Java. The main strength is that interpreted code
- * behaves exactly as does compiled code, including running at full speed.
- * The main weakness is that redefining classes and methods is not handled
- * properly, because rebinding at the Java level is technically difficult.
- *
- * @author Moez A. Abdel-Gawad
- * @author Lex Spoon
- */
-class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports {
- imain =>
-
- /** Leading with the eagerly evaluated.
- */
- val virtualDirectory: VirtualDirectory = new ReplVirtualDirectory(out) // "directory" for classfiles
- private var currentSettings: Settings = initialSettings
- private[nsc] var printResults = true // whether to print result lines
- private[nsc] var totalSilence = false // whether to print anything
- private var _initializeComplete = false // compiler is initialized
- private var _isInitialized: Future[Boolean] = null // set up initialization future
- private var bindExceptions = true // whether to bind the lastException variable
- private var _executionWrapper = "" // code to be wrapped around all lines
-
- /** We're going to go to some trouble to initialize the compiler asynchronously.
- * It's critical that nothing call into it until it's been initialized or we will
- * run into unrecoverable issues, but the perceived repl startup time goes
- * through the roof if we wait for it. So we initialize it with a future and
- * use a lazy val to ensure that any attempt to use the compiler object waits
- * on the future.
- */
- private var _classLoader: AbstractFileClassLoader = null // active classloader
- private val _compiler: Global = newCompiler(settings, reporter) // our private compiler
-
- private val nextReqId = {
- var counter = 0
- () => { counter += 1 ; counter }
- }
-
- def compilerClasspath: Seq[URL] = (
- if (isInitializeComplete) global.classPath.asURLs
- else new PathResolver(settings).result.asURLs // the compiler's classpath
- )
- def settings = currentSettings
- def mostRecentLine = prevRequestList match {
- case Nil => ""
- case req :: _ => req.originalLine
- }
- // Run the code body with the given boolean settings flipped to true.
- def withoutWarnings[T](body: => T): T = beQuietDuring {
- val saved = settings.nowarn.value
- if (!saved)
- settings.nowarn.value = true
-
- try body
- finally if (!saved) settings.nowarn.value = false
- }
-
- /** construct an interpreter that reports to Console */
- def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
- def this() = this(new Settings())
-
- lazy val repllog: Logger = new Logger {
- val out: JPrintWriter = imain.out
- val isInfo: Boolean = BooleanProp keyExists "scala.repl.info"
- val isDebug: Boolean = BooleanProp keyExists "scala.repl.debug"
- val isTrace: Boolean = BooleanProp keyExists "scala.repl.trace"
- }
- lazy val formatting: Formatting = new Formatting {
- val prompt = Properties.shellPromptString
- }
- lazy val reporter: ReplReporter = new ReplReporter(this)
-
- import formatting._
- import reporter.{ printMessage, withoutTruncating }
-
- // This exists mostly because using the reporter too early leads to deadlock.
- private def echo(msg: String) { Console println msg }
- private def _initSources = List(new BatchSourceFile("<init>", "class $repl_$init { }"))
- private def _initialize() = {
- try {
- // todo. if this crashes, REPL will hang
- new _compiler.Run() compileSources _initSources
- _initializeComplete = true
- true
- }
- catch AbstractOrMissingHandler()
- }
- private def tquoted(s: String) = "\"\"\"" + s + "\"\"\""
-
- // argument is a thunk to execute after init is done
- def initialize(postInitSignal: => Unit) {
- synchronized {
- if (_isInitialized == null) {
- _isInitialized = io.spawn {
- try _initialize()
- finally postInitSignal
- }
- }
- }
- }
- def initializeSynchronous(): Unit = {
- if (!isInitializeComplete) {
- _initialize()
- assert(global != null, global)
- }
- }
- def isInitializeComplete = _initializeComplete
-
- /** the public, go through the future compiler */
- lazy val global: Global = {
- if (isInitializeComplete) _compiler
- else {
- // If init hasn't been called yet you're on your own.
- if (_isInitialized == null) {
- repldbg("Warning: compiler accessed before init set up. Assuming no postInit code.")
- initialize(())
- }
- // blocks until it is ; false means catastrophic failure
- if (_isInitialized.get()) _compiler
- else null
- }
- }
- @deprecated("Use `global` for access to the compiler instance.", "2.9.0")
- lazy val compiler: global.type = global
-
- import global._
- import definitions.{ScalaPackage, JavaLangPackage, termMember, typeMember}
- import rootMirror.{RootClass, getClassIfDefined, getModuleIfDefined, getRequiredModule, getRequiredClass}
-
- implicit class ReplTypeOps(tp: Type) {
- def orElse(other: => Type): Type = if (tp ne NoType) tp else other
- def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp)
- }
-
- // TODO: If we try to make naming a lazy val, we run into big time
- // scalac unhappiness with what look like cycles. It has not been easy to
- // reduce, but name resolution clearly takes different paths.
- object naming extends {
- val global: imain.global.type = imain.global
- } with Naming {
- // make sure we don't overwrite their unwisely named res3 etc.
- def freshUserTermName(): TermName = {
- val name = newTermName(freshUserVarName())
- if (definedNameMap contains name) freshUserTermName()
- else name
- }
- def isUserTermName(name: Name) = isUserVarName("" + name)
- def isInternalTermName(name: Name) = isInternalVarName("" + name)
- }
- import naming._
-
- object deconstruct extends {
- val global: imain.global.type = imain.global
- } with StructuredTypeStrings
-
- lazy val memberHandlers = new {
- val intp: imain.type = imain
- } with MemberHandlers
- import memberHandlers._
-
- /** Temporarily be quiet */
- def beQuietDuring[T](body: => T): T = {
- val saved = printResults
- printResults = false
- try body
- finally printResults = saved
- }
- def beSilentDuring[T](operation: => T): T = {
- val saved = totalSilence
- totalSilence = true
- try operation
- finally totalSilence = saved
- }
-
- def quietRun[T](code: String) = beQuietDuring(interpret(code))
-
- /** takes AnyRef because it may be binding a Throwable or an Exceptional */
- private def withLastExceptionLock[T](body: => T, alt: => T): T = {
- assert(bindExceptions, "withLastExceptionLock called incorrectly.")
- bindExceptions = false
-
- try beQuietDuring(body)
- catch logAndDiscard("withLastExceptionLock", alt)
- finally bindExceptions = true
- }
-
- def executionWrapper = _executionWrapper
- def setExecutionWrapper(code: String) = _executionWrapper = code
- def clearExecutionWrapper() = _executionWrapper = ""
-
- /** interpreter settings */
- lazy val isettings = new ISettings(this)
-
- /** Instantiate a compiler. Overridable. */
- protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = {
- settings.outputDirs setSingleOutput virtualDirectory
- settings.exposeEmptyPackage.value = true
- new Global(settings, reporter) with ReplGlobal {
- override def toString: String = "<global>"
- }
- }
-
- /** Parent classloader. Overridable. */
- protected def parentClassLoader: ClassLoader =
- settings.explicitParentLoader.getOrElse( this.getClass.getClassLoader() )
-
- /* A single class loader is used for all commands interpreted by this Interpreter.
- It would also be possible to create a new class loader for each command
- to interpret. The advantages of the current approach are:
-
- - Expressions are only evaluated one time. This is especially
- significant for I/O, e.g. "val x = Console.readLine"
-
- The main disadvantage is:
-
- - Objects, classes, and methods cannot be rebound. Instead, definitions
- shadow the old ones, and old code objects refer to the old
- definitions.
- */
- def resetClassLoader() = {
- repldbg("Setting new classloader: was " + _classLoader)
- _classLoader = null
- ensureClassLoader()
- }
- final def ensureClassLoader() {
- if (_classLoader == null)
- _classLoader = makeClassLoader()
- }
- def classLoader: AbstractFileClassLoader = {
- ensureClassLoader()
- _classLoader
- }
- private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(virtualDirectory, parent) {
- /** Overridden here to try translating a simple name to the generated
- * class name if the original attempt fails. This method is used by
- * getResourceAsStream as well as findClass.
- */
- override protected def findAbstractFile(name: String): AbstractFile = {
- super.findAbstractFile(name) match {
- // deadlocks on startup if we try to translate names too early
- case null if isInitializeComplete =>
- generatedName(name) map (x => super.findAbstractFile(x)) orNull
- case file =>
- file
- }
- }
- }
- private def makeClassLoader(): AbstractFileClassLoader =
- new TranslatingClassLoader(parentClassLoader match {
- case null => ScalaClassLoader fromURLs compilerClasspath
- case p => new URLClassLoader(compilerClasspath, p)
- })
-
- def getInterpreterClassLoader() = classLoader
-
- // Set the current Java "context" class loader to this interpreter's class loader
- def setContextClassLoader() = classLoader.setAsContext()
-
- /** Given a simple repl-defined name, returns the real name of
- * the class representing it, e.g. for "Bippy" it may return
- * {{{
- * $line19.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$Bippy
- * }}}
- */
- def generatedName(simpleName: String): Option[String] = {
- if (simpleName endsWith nme.MODULE_SUFFIX_STRING) optFlatName(simpleName.init) map (_ + nme.MODULE_SUFFIX_STRING)
- else optFlatName(simpleName)
- }
- def flatName(id: String) = optFlatName(id) getOrElse id
- def optFlatName(id: String) = requestForIdent(id) map (_ fullFlatName id)
-
- def allDefinedNames = definedNameMap.keys.toList.sorted
- def pathToType(id: String): String = pathToName(newTypeName(id))
- def pathToTerm(id: String): String = pathToName(newTermName(id))
- def pathToName(name: Name): String = {
- if (definedNameMap contains name)
- definedNameMap(name) fullPath name
- else name.toString
- }
-
- /** Most recent tree handled which wasn't wholly synthetic. */
- private def mostRecentlyHandledTree: Option[Tree] = {
- prevRequests.reverse foreach { req =>
- req.handlers.reverse foreach {
- case x: MemberDefHandler if x.definesValue && !isInternalTermName(x.name) => return Some(x.member)
- case _ => ()
- }
- }
- None
- }
-
- /** Stubs for work in progress. */
- def handleTypeRedefinition(name: TypeName, old: Request, req: Request) = {
- for (t1 <- old.simpleNameOfType(name) ; t2 <- req.simpleNameOfType(name)) {
- repldbg("Redefining type '%s'\n %s -> %s".format(name, t1, t2))
- }
- }
-
- def handleTermRedefinition(name: TermName, old: Request, req: Request) = {
- for (t1 <- old.compilerTypeOf get name ; t2 <- req.compilerTypeOf get name) {
- // Printing the types here has a tendency to cause assertion errors, like
- // assertion failed: fatal: <refinement> has owner value x, but a class owner is required
- // so DBG is by-name now to keep it in the family. (It also traps the assertion error,
- // but we don't want to unnecessarily risk hosing the compiler's internal state.)
- repldbg("Redefining term '%s'\n %s -> %s".format(name, t1, t2))
- }
- }
-
- def recordRequest(req: Request) {
- if (req == null || referencedNameMap == null)
- return
-
- prevRequests += req
- req.referencedNames foreach (x => referencedNameMap(x) = req)
-
- // warning about serially defining companions. It'd be easy
- // enough to just redefine them together but that may not always
- // be what people want so I'm waiting until I can do it better.
- for {
- name <- req.definedNames filterNot (x => req.definedNames contains x.companionName)
- oldReq <- definedNameMap get name.companionName
- newSym <- req.definedSymbols get name
- oldSym <- oldReq.definedSymbols get name.companionName
- if Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule }
- } {
- afterTyper(replwarn(s"warning: previously defined $oldSym is not a companion to $newSym."))
- replwarn("Companions must be defined together; you may wish to use :paste mode for this.")
- }
-
- // Updating the defined name map
- req.definedNames foreach { name =>
- if (definedNameMap contains name) {
- if (name.isTypeName) handleTypeRedefinition(name.toTypeName, definedNameMap(name), req)
- else handleTermRedefinition(name.toTermName, definedNameMap(name), req)
- }
- definedNameMap(name) = req
- }
- }
-
- private[nsc] def replwarn(msg: => String) {
- if (!settings.nowarnings.value)
- printMessage(msg)
- }
-
- def isParseable(line: String): Boolean = {
- beSilentDuring {
- try parse(line) match {
- case Some(xs) => xs.nonEmpty // parses as-is
- case None => true // incomplete
- }
- catch { case x: Exception => // crashed the compiler
- replwarn("Exception in isParseable(\"" + line + "\"): " + x)
- false
- }
- }
- }
-
- def compileSourcesKeepingRun(sources: SourceFile*) = {
- val run = new Run()
- reporter.reset()
- run compileSources sources.toList
- (!reporter.hasErrors, run)
- }
-
- /** Compile an nsc SourceFile. Returns true if there are
- * no compilation errors, or false otherwise.
- */
- def compileSources(sources: SourceFile*): Boolean =
- compileSourcesKeepingRun(sources: _*)._1
-
- /** Compile a string. Returns true if there are no
- * compilation errors, or false otherwise.
- */
- def compileString(code: String): Boolean =
- compileSources(new BatchSourceFile("<script>", code))
-
- /** Build a request from the user. `trees` is `line` after being parsed.
- */
- private def buildRequest(line: String, trees: List[Tree]): Request = {
- executingRequest = new Request(line, trees)
- executingRequest
- }
-
- // rewriting "5 // foo" to "val x = { 5 // foo }" creates broken code because
- // the close brace is commented out. Strip single-line comments.
- // ... but for error message output reasons this is not used, and rather than
- // enclosing in braces it is constructed like "val x =\n5 // foo".
- private def removeComments(line: String): String = {
- showCodeIfDebugging(line) // as we're about to lose our // show
- line.lines map (s => s indexOf "//" match {
- case -1 => s
- case idx => s take idx
- }) mkString "\n"
- }
-
- private def safePos(t: Tree, alt: Int): Int =
- try t.pos.startOrPoint
- catch { case _: UnsupportedOperationException => alt }
-
- // Given an expression like 10 * 10 * 10 we receive the parent tree positioned
- // at a '*'. So look at each subtree and find the earliest of all positions.
- private def earliestPosition(tree: Tree): Int = {
- var pos = Int.MaxValue
- tree foreach { t =>
- pos = math.min(pos, safePos(t, Int.MaxValue))
- }
- pos
- }
-
- private def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
- val content = indentCode(line)
- val trees = parse(content) match {
- case None => return Left(IR.Incomplete)
- case Some(Nil) => return Left(IR.Error) // parse error or empty input
- case Some(trees) => trees
- }
- repltrace(
- trees map (t => {
- // [Eugene to Paul] previously it just said `t map ...`
- // because there was an implicit conversion from Tree to a list of Trees
- // however Martin and I have removed the conversion
- // (it was conflicting with the new reflection API),
- // so I had to rewrite this a bit
- val subs = t collect { case sub => sub }
- subs map (t0 =>
- " " + safePos(t0, -1) + ": " + t0.shortClass + "\n"
- ) mkString ""
- }) mkString "\n"
- )
- // If the last tree is a bare expression, pinpoint where it begins using the
- // AST node position and snap the line off there. Rewrite the code embodied
- // by the last tree as a ValDef instead, so we can access the value.
- trees.last match {
- case _:Assign => // we don't want to include assignments
- case _:TermTree | _:Ident | _:Select => // ... but do want other unnamed terms.
- val varName = if (synthetic) freshInternalVarName() else freshUserVarName()
- val rewrittenLine = (
- // In theory this would come out the same without the 1-specific test, but
- // it's a cushion against any more sneaky parse-tree position vs. code mismatches:
- // this way such issues will only arise on multiple-statement repl input lines,
- // which most people don't use.
- if (trees.size == 1) "val " + varName + " =\n" + content
- else {
- // The position of the last tree
- val lastpos0 = earliestPosition(trees.last)
- // Oh boy, the parser throws away parens so "(2+2)" is mispositioned,
- // with increasingly hard to decipher positions as we move on to "() => 5",
- // (x: Int) => x + 1, and more. So I abandon attempts to finesse and just
- // look for semicolons and newlines, which I'm sure is also buggy.
- val (raw1, raw2) = content splitAt lastpos0
- repldbg("[raw] " + raw1 + " <---> " + raw2)
-
- val adjustment = (raw1.reverse takeWhile (ch => (ch != ';') && (ch != '\n'))).size
- val lastpos = lastpos0 - adjustment
-
- // the source code split at the laboriously determined position.
- val (l1, l2) = content splitAt lastpos
- repldbg("[adj] " + l1 + " <---> " + l2)
-
- val prefix = if (l1.trim == "") "" else l1 + ";\n"
- // Note to self: val source needs to have this precise structure so that
- // error messages print the user-submitted part without the "val res0 = " part.
- val combined = prefix + "val " + varName + " =\n" + l2
-
- repldbg(List(
- " line" -> line,
- " content" -> content,
- " was" -> l2,
- "combined" -> combined) map {
- case (label, s) => label + ": '" + s + "'"
- } mkString "\n"
- )
- combined
- }
- )
- // Rewriting "foo ; bar ; 123"
- // to "foo ; bar ; val resXX = 123"
- requestFromLine(rewrittenLine, synthetic) match {
- case Right(req) => return Right(req withOriginalLine line)
- case x => return x
- }
- case _ =>
- }
- Right(buildRequest(line, trees))
- }
-
- // normalize non-public types so we don't see protected aliases like Self
- def normalizeNonPublic(tp: Type) = tp match {
- case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.dealias
- case _ => tp
- }
-
- /**
- * Interpret one line of input. All feedback, including parse errors
- * and evaluation results, are printed via the supplied compiler's
- * reporter. Values defined are available for future interpreted strings.
- *
- * The return value is whether the line was interpreter successfully,
- * e.g. that there were no parse errors.
- */
- def interpret(line: String): IR.Result = interpret(line, false)
- def interpretSynthetic(line: String): IR.Result = interpret(line, true)
- def interpret(line: String, synthetic: Boolean): IR.Result = {
- def loadAndRunReq(req: Request) = {
- classLoader.setAsContext()
- val (result, succeeded) = req.loadAndRun
-
- /** To our displeasure, ConsoleReporter offers only printMessage,
- * which tacks a newline on the end. Since that breaks all the
- * output checking, we have to take one off to balance.
- */
- if (succeeded) {
- if (printResults && result != "")
- printMessage(result stripSuffix "\n")
- else if (isReplDebug) // show quiet-mode activity
- printMessage(result.trim.lines map ("[quiet] " + _) mkString "\n")
-
- // Book-keeping. Have to record synthetic requests too,
- // as they may have been issued for information, e.g. :type
- recordRequest(req)
- IR.Success
- }
- else {
- // don't truncate stack traces
- withoutTruncating(printMessage(result))
- IR.Error
- }
- }
-
- if (global == null) IR.Error
- else requestFromLine(line, synthetic) match {
- case Left(result) => result
- case Right(req) =>
- // null indicates a disallowed statement type; otherwise compile and
- // fail if false (implying e.g. a type error)
- if (req == null || !req.compile) IR.Error
- else loadAndRunReq(req)
- }
- }
-
- /** Bind a specified name to a specified value. The name may
- * later be used by expressions passed to interpret.
- *
- * @param name the variable name to bind
- * @param boundType the type of the variable, as a string
- * @param value the object value to bind to it
- * @return an indication of whether the binding succeeded
- */
- def bind(name: String, boundType: String, value: Any, modifiers: List[String] = Nil): IR.Result = {
- val bindRep = new ReadEvalPrint()
- val run = bindRep.compile("""
- |object %s {
- | var value: %s = _
- | def set(x: Any) = value = x.asInstanceOf[%s]
- |}
- """.stripMargin.format(bindRep.evalName, boundType, boundType)
- )
- bindRep.callEither("set", value) match {
- case Left(ex) =>
- repldbg("Set failed in bind(%s, %s, %s)".format(name, boundType, value))
- repldbg(util.stackTraceString(ex))
- IR.Error
-
- case Right(_) =>
- val line = "%sval %s = %s.value".format(modifiers map (_ + " ") mkString, name, bindRep.evalPath)
- repldbg("Interpreting: " + line)
- interpret(line)
- }
- }
- def directBind(name: String, boundType: String, value: Any): IR.Result = {
- val result = bind(name, boundType, value)
- if (result == IR.Success)
- directlyBoundNames += newTermName(name)
- result
- }
- def directBind(p: NamedParam): IR.Result = directBind(p.name, p.tpe, p.value)
- def directBind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = directBind((name, value))
-
- def rebind(p: NamedParam): IR.Result = {
- val name = p.name
- val oldType = typeOfTerm(name) orElse { return IR.Error }
- val newType = p.tpe
- val tempName = freshInternalVarName()
-
- quietRun("val %s = %s".format(tempName, name))
- quietRun("val %s = %s.asInstanceOf[%s]".format(name, tempName, newType))
- }
- def quietImport(ids: String*): IR.Result = beQuietDuring(addImports(ids: _*))
- def addImports(ids: String*): IR.Result =
- if (ids.isEmpty) IR.Success
- else interpret("import " + ids.mkString(", "))
-
- def quietBind(p: NamedParam): IR.Result = beQuietDuring(bind(p))
- def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
- def bind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = bind((name, value))
- def bindSyntheticValue(x: Any): IR.Result = bindValue(freshInternalVarName(), x)
- def bindValue(x: Any): IR.Result = bindValue(freshUserVarName(), x)
- def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x)
-
- /** Reset this interpreter, forgetting all user-specified requests. */
- def reset() {
- clearExecutionWrapper()
- resetClassLoader()
- resetAllCreators()
- prevRequests.clear()
- referencedNameMap.clear()
- definedNameMap.clear()
- virtualDirectory.clear()
- }
-
- /** This instance is no longer needed, so release any resources
- * it is using. The reporter's output gets flushed.
- */
- def close() {
- reporter.flush()
- }
-
- /** Here is where we:
- *
- * 1) Read some source code, and put it in the "read" object.
- * 2) Evaluate the read object, and put the result in the "eval" object.
- * 3) Create a String for human consumption, and put it in the "print" object.
- *
- * Read! Eval! Print! Some of that not yet centralized here.
- */
- class ReadEvalPrint(lineId: Int) {
- def this() = this(freshLineId())
-
- private var lastRun: Run = _
- private var evalCaught: Option[Throwable] = None
- private var conditionalWarnings: List[ConditionalWarning] = Nil
-
- val packageName = sessionNames.line + lineId
- val readName = sessionNames.read
- val evalName = sessionNames.eval
- val printName = sessionNames.print
- val resultName = sessionNames.result
-
- def bindError(t: Throwable) = {
- if (!bindExceptions) // avoid looping if already binding
- throw t
-
- val unwrapped = unwrap(t)
- withLastExceptionLock[String]({
- directBind[Throwable]("lastException", unwrapped)(tagOfThrowable, classTag[Throwable])
- util.stackTraceString(unwrapped)
- }, util.stackTraceString(unwrapped))
- }
-
- // TODO: split it out into a package object and a regular
- // object and we can do that much less wrapping.
- def packageDecl = "package " + packageName
-
- def pathTo(name: String) = packageName + "." + name
- def packaged(code: String) = packageDecl + "\n\n" + code
-
- def readPath = pathTo(readName)
- def evalPath = pathTo(evalName)
- def printPath = pathTo(printName)
-
- def call(name: String, args: Any*): AnyRef = {
- val m = evalMethod(name)
- repldbg("Invoking: " + m)
- if (args.nonEmpty)
- repldbg(" with args: " + args.mkString(", "))
-
- m.invoke(evalClass, args.map(_.asInstanceOf[AnyRef]): _*)
- }
-
- def callEither(name: String, args: Any*): Either[Throwable, AnyRef] =
- try Right(call(name, args: _*))
- catch { case ex: Throwable => Left(ex) }
-
- def callOpt(name: String, args: Any*): Option[AnyRef] =
- try Some(call(name, args: _*))
- catch { case ex: Throwable => bindError(ex) ; None }
-
- class EvalException(msg: String, cause: Throwable) extends RuntimeException(msg, cause) { }
-
- private def evalError(path: String, ex: Throwable) =
- throw new EvalException("Failed to load '" + path + "': " + ex.getMessage, ex)
-
- private def load(path: String): Class[_] = {
- try Class.forName(path, true, classLoader)
- catch { case ex: Throwable => evalError(path, unwrap(ex)) }
- }
-
- lazy val evalClass = load(evalPath)
- lazy val evalValue = callEither(resultName) match {
- case Left(ex) => evalCaught = Some(ex) ; None
- case Right(result) => Some(result)
- }
-
- def compile(source: String): Boolean = compileAndSaveRun("<console>", source)
-
- /** The innermost object inside the wrapper, found by
- * following accessPath into the outer one.
- */
- def resolvePathToSymbol(accessPath: String): Symbol = {
- val readRoot = getRequiredModule(readPath) // the outermost wrapper
- (accessPath split '.').foldLeft(readRoot: Symbol) {
- case (sym, "") => sym
- case (sym, name) => afterTyper(termMember(sym, name))
- }
- }
- /** We get a bunch of repeated warnings for reasons I haven't
- * entirely figured out yet. For now, squash.
- */
- private def updateRecentWarnings(run: Run) {
- def loop(xs: List[(Position, String)]): List[(Position, String)] = xs match {
- case Nil => Nil
- case ((pos, msg)) :: rest =>
- val filtered = rest filter { case (pos0, msg0) =>
- (msg != msg0) || (pos.lineContent.trim != pos0.lineContent.trim) || {
- // same messages and same line content after whitespace removal
- // but we want to let through multiple warnings on the same line
- // from the same run. The untrimmed line will be the same since
- // there's no whitespace indenting blowing it.
- (pos.lineContent == pos0.lineContent)
- }
- }
- ((pos, msg)) :: loop(filtered)
- }
- val warnings = loop(run.allConditionalWarnings flatMap (_.warnings))
- if (warnings.nonEmpty)
- mostRecentWarnings = warnings
- }
- private def evalMethod(name: String) = evalClass.getMethods filter (_.getName == name) match {
- case Array(method) => method
- case xs => sys.error("Internal error: eval object " + evalClass + ", " + xs.mkString("\n", "\n", ""))
- }
- private def compileAndSaveRun(label: String, code: String) = {
- showCodeIfDebugging(code)
- val (success, run) = compileSourcesKeepingRun(new BatchSourceFile(label, packaged(code)))
- updateRecentWarnings(run)
- lastRun = run
- success
- }
- }
-
- /** One line of code submitted by the user for interpretation */
- // private
- class Request(val line: String, val trees: List[Tree]) {
- val reqId = nextReqId()
- val lineRep = new ReadEvalPrint()
-
- private var _originalLine: String = null
- def withOriginalLine(s: String): this.type = { _originalLine = s ; this }
- def originalLine = if (_originalLine == null) line else _originalLine
-
- /** handlers for each tree in this request */
- val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _)
- def defHandlers = handlers collect { case x: MemberDefHandler => x }
-
- /** all (public) names defined by these statements */
- val definedNames = handlers flatMap (_.definedNames)
-
- /** list of names used by this expression */
- val referencedNames: List[Name] = handlers flatMap (_.referencedNames)
-
- /** def and val names */
- def termNames = handlers flatMap (_.definesTerm)
- def typeNames = handlers flatMap (_.definesType)
- def definedOrImported = handlers flatMap (_.definedOrImported)
- def definedSymbolList = defHandlers flatMap (_.definedSymbols)
-
- def definedTypeSymbol(name: String) = definedSymbols(newTypeName(name))
- def definedTermSymbol(name: String) = definedSymbols(newTermName(name))
-
- /** Code to import bound names from previous lines - accessPath is code to
- * append to objectName to access anything bound by request.
- */
- val ComputedImports(importsPreamble, importsTrailer, accessPath) =
- importsCode(referencedNames.toSet)
-
- /** Code to access a variable with the specified name */
- def fullPath(vname: String) = (
- lineRep.readPath + accessPath + ".`%s`".format(vname)
- )
- /** Same as fullpath, but after it has been flattened, so:
- * $line5.$iw.$iw.$iw.Bippy // fullPath
- * $line5.$iw$$iw$$iw$Bippy // fullFlatName
- */
- def fullFlatName(name: String) =
- lineRep.readPath + accessPath.replace('.', '$') + nme.NAME_JOIN_STRING + name
-
- /** The unmangled symbol name, but supplemented with line info. */
- def disambiguated(name: Name): String = name + " (in " + lineRep + ")"
-
- /** Code to access a variable with the specified name */
- def fullPath(vname: Name): String = fullPath(vname.toString)
-
- /** the line of code to compute */
- def toCompute = line
-
- /** generate the source code for the object that computes this request */
- private object ObjectSourceCode extends CodeAssembler[MemberHandler] {
- def path = pathToTerm("$intp")
- def envLines = {
- if (!isReplPower) Nil // power mode only for now
- // $intp is not bound; punt, but include the line.
- else if (path == "$intp") List(
- "def $line = " + tquoted(originalLine),
- "def $trees = Nil"
- )
- else List(
- "def $line = " + tquoted(originalLine),
- "def $req = %s.requestForReqId(%s).orNull".format(path, reqId),
- "def $trees = if ($req eq null) Nil else $req.trees".format(lineRep.readName, path, reqId)
- )
- }
-
- val preamble = """
- |object %s {
- |%s%s%s
- """.stripMargin.format(lineRep.readName, envLines.map(" " + _ + ";\n").mkString, importsPreamble, indentCode(toCompute))
- val postamble = importsTrailer + "\n}"
- val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this
- }
-
- private object ResultObjectSourceCode extends CodeAssembler[MemberHandler] {
- /** We only want to generate this code when the result
- * is a value which can be referred to as-is.
- */
- val evalResult =
- if (!handlers.last.definesValue) ""
- else handlers.last.definesTerm match {
- case Some(vname) if typeOf contains vname =>
- "lazy val %s = %s".format(lineRep.resultName, fullPath(vname))
- case _ => ""
- }
- // first line evaluates object to make sure constructor is run
- // initial "" so later code can uniformly be: + etc
- val preamble = """
- |object %s {
- | %s
- | val %s: String = %s {
- | %s
- | (""
- """.stripMargin.format(
- lineRep.evalName, evalResult, lineRep.printName,
- executionWrapper, lineRep.readName + accessPath
- )
-
- val postamble = """
- | )
- | }
- |}
- """.stripMargin
- val generate = (m: MemberHandler) => m resultExtractionCode Request.this
- }
-
- // get it
- def getEvalTyped[T] : Option[T] = getEval map (_.asInstanceOf[T])
- def getEval: Option[AnyRef] = {
- // ensure it has been compiled
- compile
- // try to load it and call the value method
- lineRep.evalValue filterNot (_ == null)
- }
-
- /** Compile the object file. Returns whether the compilation succeeded.
- * If all goes well, the "types" map is computed. */
- lazy val compile: Boolean = {
- // error counting is wrong, hence interpreter may overlook failure - so we reset
- reporter.reset()
-
- // compile the object containing the user's code
- lineRep.compile(ObjectSourceCode(handlers)) && {
- // extract and remember types
- typeOf
- typesOfDefinedTerms
-
- // Assign symbols to the original trees
- // TODO - just use the new trees.
- defHandlers foreach { dh =>
- val name = dh.member.name
- definedSymbols get name foreach { sym =>
- dh.member setSymbol sym
- repldbg("Set symbol of " + name + " to " + sym.defString)
- }
- }
-
- // compile the result-extraction object
- withoutWarnings(lineRep compile ResultObjectSourceCode(handlers))
- }
- }
-
- lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath)
- def applyToResultMember[T](name: Name, f: Symbol => T) = afterTyper(f(resultSymbol.info.nonPrivateDecl(name)))
-
- /* typeOf lookup with encoding */
- def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString)))
- def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbol.simpleName)
-
- private def typeMap[T](f: Type => T) =
- mapFrom[Name, Name, T](termNames ++ typeNames)(x => f(cleanMemberDecl(resultSymbol, x)))
-
- /** Types of variables defined by this request. */
- lazy val compilerTypeOf = typeMap[Type](x => x) withDefaultValue NoType
- /** String representations of same. */
- lazy val typeOf = typeMap[String](tp => afterTyper(tp.toString))
-
- // lazy val definedTypes: Map[Name, Type] = {
- // typeNames map (x => x -> afterTyper(resultSymbol.info.nonPrivateDecl(x).tpe)) toMap
- // }
- lazy val definedSymbols = (
- termNames.map(x => x -> applyToResultMember(x, x => x)) ++
- typeNames.map(x => x -> compilerTypeOf(x).typeSymbolDirect)
- ).toMap[Name, Symbol] withDefaultValue NoSymbol
-
- lazy val typesOfDefinedTerms = mapFrom[Name, Name, Type](termNames)(x => applyToResultMember(x, _.tpe))
-
- /** load and run the code using reflection */
- def loadAndRun: (String, Boolean) = {
- try { ("" + (lineRep call sessionNames.print), true) }
- catch { case ex: Throwable => (lineRep.bindError(ex), false) }
- }
-
- override def toString = "Request(line=%s, %s trees)".format(line, trees.size)
- }
-
- /** Returns the name of the most recent interpreter result.
- * Mostly this exists so you can conveniently invoke methods on
- * the previous result.
- */
- def mostRecentVar: String =
- if (mostRecentlyHandledTree.isEmpty) ""
- else "" + (mostRecentlyHandledTree.get match {
- case x: ValOrDefDef => x.name
- case Assign(Ident(name), _) => name
- case ModuleDef(_, name, _) => name
- case _ => naming.mostRecentVar
- })
-
- private var mostRecentWarnings: List[(global.Position, String)] = Nil
- def lastWarnings = mostRecentWarnings
-
- def treesForRequestId(id: Int): List[Tree] =
- requestForReqId(id).toList flatMap (_.trees)
-
- def requestForReqId(id: Int): Option[Request] =
- if (executingRequest != null && executingRequest.reqId == id) Some(executingRequest)
- else prevRequests find (_.reqId == id)
-
- def requestForName(name: Name): Option[Request] = {
- assert(definedNameMap != null, "definedNameMap is null")
- definedNameMap get name
- }
-
- def requestForIdent(line: String): Option[Request] =
- requestForName(newTermName(line)) orElse requestForName(newTypeName(line))
-
- def requestHistoryForName(name: Name): List[Request] =
- prevRequests.toList.reverse filter (_.definedNames contains name)
-
- def definitionForName(name: Name): Option[MemberHandler] =
- requestForName(name) flatMap { req =>
- req.handlers find (_.definedNames contains name)
- }
-
- def valueOfTerm(id: String): Option[AnyRef] =
- requestForName(newTermName(id)) flatMap (_.getEval)
-
- def classOfTerm(id: String): Option[JClass] =
- valueOfTerm(id) map (_.getClass)
-
- def typeOfTerm(id: String): Type = newTermName(id) match {
- case nme.ROOTPKG => RootClass.tpe
- case name => requestForName(name).fold(NoType: Type)(_ compilerTypeOf name)
- }
-
- def symbolOfType(id: String): Symbol =
- requestForName(newTypeName(id)).fold(NoSymbol: Symbol)(_ definedTypeSymbol id)
-
- def symbolOfTerm(id: String): Symbol =
- requestForIdent(newTermName(id)).fold(NoSymbol: Symbol)(_ definedTermSymbol id)
-
- def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = {
- classOfTerm(id) flatMap { clazz =>
- clazz.supers find (!_.isScalaAnonymous) map { nonAnon =>
- (nonAnon, runtimeTypeOfTerm(id))
- }
- }
- }
-
- def runtimeTypeOfTerm(id: String): Type = {
- typeOfTerm(id) andAlso { tpe =>
- val clazz = classOfTerm(id) getOrElse { return NoType }
- val staticSym = tpe.typeSymbol
- val runtimeSym = getClassIfDefined(clazz.getName)
-
- if ((runtimeSym != NoSymbol) && (runtimeSym != staticSym) && (runtimeSym isSubClass staticSym))
- runtimeSym.info
- else NoType
- }
- }
- def cleanMemberDecl(owner: Symbol, member: Name): Type = afterTyper {
- normalizeNonPublic {
- owner.info.nonPrivateDecl(member).tpe match {
- case NullaryMethodType(tp) => tp
- case tp => tp
- }
- }
- }
-
- object exprTyper extends {
- val repl: IMain.this.type = imain
- } with ExprTyper { }
-
- def parse(line: String): Option[List[Tree]] = exprTyper.parse(line)
-
- def symbolOfLine(code: String): Symbol =
- exprTyper.symbolOfLine(code)
-
- def typeOfExpression(expr: String, silent: Boolean = true): Type =
- exprTyper.typeOfExpression(expr, silent)
-
- protected def onlyTerms(xs: List[Name]) = xs collect { case x: TermName => x }
- protected def onlyTypes(xs: List[Name]) = xs collect { case x: TypeName => x }
-
- def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalTermName
- def definedTypes = onlyTypes(allDefinedNames)
- def definedSymbols = prevRequestList.flatMap(_.definedSymbols.values).toSet[Symbol]
- def definedSymbolList = prevRequestList flatMap (_.definedSymbolList) filterNot (s => isInternalTermName(s.name))
-
- // Terms with user-given names (i.e. not res0 and not synthetic)
- def namedDefinedTerms = definedTerms filterNot (x => isUserVarName("" + x) || directlyBoundNames(x))
-
- private def findName(name: Name) = definedSymbols find (_.name == name) getOrElse NoSymbol
-
- /** Translate a repl-defined identifier into a Symbol.
- */
- def apply(name: String): Symbol =
- types(name) orElse terms(name)
-
- def types(name: String): Symbol = {
- val tpname = newTypeName(name)
- findName(tpname) orElse getClassIfDefined(tpname)
- }
- def terms(name: String): Symbol = {
- val termname = newTypeName(name)
- findName(termname) orElse getModuleIfDefined(termname)
- }
- // [Eugene to Paul] possibly you could make use of TypeTags here
- def types[T: ClassTag] : Symbol = types(classTag[T].runtimeClass.getName)
- def terms[T: ClassTag] : Symbol = terms(classTag[T].runtimeClass.getName)
- def apply[T: ClassTag] : Symbol = apply(classTag[T].runtimeClass.getName)
-
- def classSymbols = allDefSymbols collect { case x: ClassSymbol => x }
- def methodSymbols = allDefSymbols collect { case x: MethodSymbol => x }
-
- /** the previous requests this interpreter has processed */
- private var executingRequest: Request = _
- private val prevRequests = mutable.ListBuffer[Request]()
- private val referencedNameMap = mutable.Map[Name, Request]()
- private val definedNameMap = mutable.Map[Name, Request]()
- private val directlyBoundNames = mutable.Set[Name]()
-
- def allHandlers = prevRequestList flatMap (_.handlers)
- def allDefHandlers = allHandlers collect { case x: MemberDefHandler => x }
- def allDefSymbols = allDefHandlers map (_.symbol) filter (_ ne NoSymbol)
-
- def lastRequest = if (prevRequests.isEmpty) null else prevRequests.last
- def prevRequestList = prevRequests.toList
- def allSeenTypes = prevRequestList flatMap (_.typeOf.values.toList) distinct
- def allImplicits = allHandlers filter (_.definesImplicit) flatMap (_.definedNames)
- def importHandlers = allHandlers collect { case x: ImportHandler => x }
-
- def visibleTermNames: List[Name] = definedTerms ++ importedTerms distinct
-
- /** Another entry point for tab-completion, ids in scope */
- def unqualifiedIds = visibleTermNames map (_.toString) filterNot (_ contains "$") sorted
-
- /** Parse the ScalaSig to find type aliases */
- def aliasForType(path: String) = ByteCode.aliasForType(path)
-
- def withoutUnwrapping(op: => Unit): Unit = {
- val saved = isettings.unwrapStrings
- isettings.unwrapStrings = false
- try op
- finally isettings.unwrapStrings = saved
- }
-
- def symbolDefString(sym: Symbol) = {
- TypeStrings.quieter(
- afterTyper(sym.defString),
- sym.owner.name + ".this.",
- sym.owner.fullName + "."
- )
- }
-
- def showCodeIfDebugging(code: String) {
- /** Secret bookcase entrance for repl debuggers: end the line
- * with "// show" and see what's going on.
- */
- def isShow = code.lines exists (_.trim endsWith "// show")
- def isShowRaw = code.lines exists (_.trim endsWith "// raw")
-
- // old style
- beSilentDuring(parse(code)) foreach { ts =>
- ts foreach { t =>
- withoutUnwrapping(repldbg(asCompactString(t)))
- }
- }
- }
-
- // debugging
- def debugging[T](msg: String)(res: T) = {
- repldbg(msg + " " + res)
- res
- }
-}
-
-/** Utility methods for the Interpreter. */
-object IMain {
- // The two name forms this is catching are the two sides of this assignment:
- //
- // $line3.$read.$iw.$iw.Bippy =
- // $line3.$read$$iw$$iw$Bippy@4a6a00ca
- private def removeLineWrapper(s: String) = s.replaceAll("""\$line\d+[./]\$(read|eval|print)[$.]""", "")
- private def removeIWPackages(s: String) = s.replaceAll("""\$(iw|read|eval|print)[$.]""", "")
- def stripString(s: String) = removeIWPackages(removeLineWrapper(s))
-
- trait CodeAssembler[T] {
- def preamble: String
- def generate: T => String
- def postamble: String
-
- def apply(contributors: List[T]): String = stringFromWriter { code =>
- code println preamble
- contributors map generate foreach (code println _)
- code println postamble
- }
- }
-
- trait StrippingWriter {
- def isStripping: Boolean
- def stripImpl(str: String): String
- def strip(str: String): String = if (isStripping) stripImpl(str) else str
- }
- trait TruncatingWriter {
- def maxStringLength: Int
- def isTruncating: Boolean
- def truncate(str: String): String = {
- if (isTruncating && (maxStringLength != 0 && str.length > maxStringLength))
- (str take maxStringLength - 3) + "..."
- else str
- }
- }
- abstract class StrippingTruncatingWriter(out: JPrintWriter)
- extends JPrintWriter(out)
- with StrippingWriter
- with TruncatingWriter {
- self =>
-
- def clean(str: String): String = truncate(strip(str))
- override def write(str: String) = super.write(clean(str))
- }
- class ReplStrippingWriter(intp: IMain) extends StrippingTruncatingWriter(intp.out) {
- import intp._
- def maxStringLength = isettings.maxPrintString
- def isStripping = isettings.unwrapStrings
- def isTruncating = reporter.truncationOK
-
- def stripImpl(str: String): String = naming.unmangle(str)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala
deleted file mode 100644
index a8f77afcdf..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Alexander Spoon
- */
-
-package scala.tools.nsc
-package interpreter
-
-/** Settings for the interpreter
- *
- * @version 1.0
- * @author Lex Spoon, 2007/3/24
- **/
-class ISettings(intp: IMain) {
- /** A list of paths where :load should look */
- var loadPath = List(".")
-
- /** Set this to true to see repl machinery under -Yrich-exceptions.
- */
- var showInternalStackTraces = false
-
- /** The maximum length of toString to use when printing the result
- * of an evaluation. 0 means no maximum. If a printout requires
- * more than this number of characters, then the printout is
- * truncated.
- */
- var maxPrintString = replProps.maxPrintString.option.getOrElse(800)
-
- /** The maximum number of completion candidates to print for tab
- * completion without requiring confirmation.
- */
- var maxAutoprintCompletion = 250
-
- /** String unwrapping can be disabled if it is causing issues.
- * Settings this to false means you will see Strings like "$iw.$iw.".
- */
- var unwrapStrings = true
-
- def deprecation_=(x: Boolean) = {
- val old = intp.settings.deprecation.value
- intp.settings.deprecation.value = x
- if (!old && x) println("Enabled -deprecation output.")
- else if (old && !x) println("Disabled -deprecation output.")
- }
- def deprecation: Boolean = intp.settings.deprecation.value
-
- def allSettings = Map(
- "maxPrintString" -> maxPrintString,
- "maxAutoprintCompletion" -> maxAutoprintCompletion,
- "unwrapStrings" -> unwrapStrings,
- "deprecation" -> deprecation
- )
-
- private def allSettingsString =
- allSettings.toList sortBy (_._1) map { case (k, v) => " " + k + " = " + v + "\n" } mkString
-
- override def toString = """
- | ISettings {
- | %s
- | }""".stripMargin.format(allSettingsString)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/compiler/scala/tools/nsc/interpreter/Imports.scala
deleted file mode 100644
index 73d962b5b0..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala
+++ /dev/null
@@ -1,195 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.collection.{ mutable, immutable }
-
-trait Imports {
- self: IMain =>
-
- import global._
- import definitions.{ ScalaPackage, JavaLangPackage, PredefModule }
- import memberHandlers._
-
- def isNoImports = settings.noimports.value
- def isNoPredef = settings.nopredef.value
-
- /** Synthetic import handlers for the language defined imports. */
- private def makeWildcardImportHandler(sym: Symbol): ImportHandler = {
- val hd :: tl = sym.fullName.split('.').toList map newTermName
- val tree = Import(
- tl.foldLeft(Ident(hd): Tree)((x, y) => Select(x, y)),
- ImportSelector.wildList
- )
- tree setSymbol sym
- new ImportHandler(tree)
- }
-
- /** Symbols whose contents are language-defined to be imported. */
- def languageWildcardSyms: List[Symbol] = List(JavaLangPackage, ScalaPackage, PredefModule)
- def languageWildcards: List[Type] = languageWildcardSyms map (_.tpe)
- def languageWildcardHandlers = languageWildcardSyms map makeWildcardImportHandler
-
- def allImportedNames = importHandlers flatMap (_.importedNames)
- def importedTerms = onlyTerms(allImportedNames)
- def importedTypes = onlyTypes(allImportedNames)
-
- /** Types which have been wildcard imported, such as:
- * val x = "abc" ; import x._ // type java.lang.String
- * import java.lang.String._ // object java.lang.String
- *
- * Used by tab completion.
- *
- * XXX right now this gets import x._ and import java.lang.String._,
- * but doesn't figure out import String._. There's a lot of ad hoc
- * scope twiddling which should be swept away in favor of digging
- * into the compiler scopes.
- */
- def sessionWildcards: List[Type] = {
- importHandlers filter (_.importsWildcard) map (_.targetType) distinct
- }
- def wildcardTypes = languageWildcards ++ sessionWildcards
-
- def languageSymbols = languageWildcardSyms flatMap membersAtPickler
- def sessionImportedSymbols = importHandlers flatMap (_.importedSymbols)
- def importedSymbols = languageSymbols ++ sessionImportedSymbols
- def importedTermSymbols = importedSymbols collect { case x: TermSymbol => x }
- def importedTypeSymbols = importedSymbols collect { case x: TypeSymbol => x }
- def implicitSymbols = importedSymbols filter (_.isImplicit)
-
- def importedTermNamed(name: String): Symbol =
- importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol
-
- /** Tuples of (source, imported symbols) in the order they were imported.
- */
- def importedSymbolsBySource: List[(Symbol, List[Symbol])] = {
- val lang = languageWildcardSyms map (sym => (sym, membersAtPickler(sym)))
- val session = importHandlers filter (_.targetType != NoType) map { mh =>
- (mh.targetType.typeSymbol, mh.importedSymbols)
- }
-
- lang ++ session
- }
- def implicitSymbolsBySource: List[(Symbol, List[Symbol])] = {
- importedSymbolsBySource map {
- case (k, vs) => (k, vs filter (_.isImplicit))
- } filterNot (_._2.isEmpty)
- }
-
- /** Compute imports that allow definitions from previous
- * requests to be visible in a new request. Returns
- * three pieces of related code:
- *
- * 1. An initial code fragment that should go before
- * the code of the new request.
- *
- * 2. A code fragment that should go after the code
- * of the new request.
- *
- * 3. An access path which can be traversed to access
- * any bindings inside code wrapped by #1 and #2 .
- *
- * The argument is a set of Names that need to be imported.
- *
- * Limitations: This method is not as precise as it could be.
- * (1) It does not process wildcard imports to see what exactly
- * they import.
- * (2) If it imports any names from a request, it imports all
- * of them, which is not really necessary.
- * (3) It imports multiple same-named implicits, but only the
- * last one imported is actually usable.
- */
- case class ComputedImports(prepend: String, append: String, access: String)
- protected def importsCode(wanted: Set[Name]): ComputedImports = {
- /** Narrow down the list of requests from which imports
- * should be taken. Removes requests which cannot contribute
- * useful imports for the specified set of wanted names.
- */
- case class ReqAndHandler(req: Request, handler: MemberHandler) { }
-
- def reqsToUse: List[ReqAndHandler] = {
- /** Loop through a list of MemberHandlers and select which ones to keep.
- * 'wanted' is the set of names that need to be imported.
- */
- def select(reqs: List[ReqAndHandler], wanted: Set[Name]): List[ReqAndHandler] = {
- // Single symbol imports might be implicits! See bug #1752. Rather than
- // try to finesse this, we will mimic all imports for now.
- def keepHandler(handler: MemberHandler) = handler match {
- case _: ImportHandler => true
- case x => x.definesImplicit || (x.definedNames exists wanted)
- }
-
- reqs match {
- case Nil => Nil
- case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted)
- case rh :: rest =>
- import rh.handler._
- val newWanted = wanted ++ referencedNames -- definedNames -- importedNames
- rh :: select(rest, newWanted)
- }
- }
-
- /** Flatten the handlers out and pair each with the original request */
- select(allReqAndHandlers reverseMap { case (r, h) => ReqAndHandler(r, h) }, wanted).reverse
- }
-
- val code, trailingBraces, accessPath = new StringBuilder
- val currentImps = mutable.HashSet[Name]()
-
- // add code for a new object to hold some imports
- def addWrapper() {
- val impname = nme.INTERPRETER_IMPORT_WRAPPER
- code append "object %s {\n".format(impname)
- trailingBraces append "}\n"
- accessPath append ("." + impname)
-
- currentImps.clear
- }
-
- addWrapper()
-
- // loop through previous requests, adding imports for each one
- for (ReqAndHandler(req, handler) <- reqsToUse) {
- handler match {
- // If the user entered an import, then just use it; add an import wrapping
- // level if the import might conflict with some other import
- case x: ImportHandler =>
- if (x.importsWildcard || currentImps.exists(x.importedNames contains _))
- addWrapper()
-
- code append (x.member + "\n")
-
- // give wildcard imports a import wrapper all to their own
- if (x.importsWildcard) addWrapper()
- else currentImps ++= x.importedNames
-
- // For other requests, import each defined name.
- // import them explicitly instead of with _, so that
- // ambiguity errors will not be generated. Also, quote
- // the name of the variable, so that we don't need to
- // handle quoting keywords separately.
- case x =>
- for (imv <- x.definedNames) {
- if (currentImps contains imv) addWrapper()
-
- code append ("import " + (req fullPath imv) + "\n")
- currentImps += imv
- }
- }
- }
- // add one extra wrapper, to prevent warnings in the common case of
- // redefining the value bound in the last interpreter request.
- addWrapper()
- ComputedImports(code.toString, trailingBraces.toString, accessPath.toString)
- }
-
- private def allReqAndHandlers =
- prevRequestList flatMap (req => req.handlers map (req -> _))
-
- private def membersAtPickler(sym: Symbol): List[Symbol] =
- beforePickler(sym.info.nonPrivateMembers.toList)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
deleted file mode 100644
index 8331fddca6..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stepan Koltsov
- */
-
-package scala.tools.nsc
-package interpreter
-
-import java.io.IOException
-import java.nio.channels.ClosedByInterruptException
-import scala.util.control.Exception._
-import session.History
-import InteractiveReader._
-import Properties.isMac
-
-/** Reads lines from an input stream */
-trait InteractiveReader {
- val interactive: Boolean
-
- def init(): Unit
- def reset(): Unit
-
- def history: History
- def completion: Completion
- def eraseLine(): Unit
- def redrawLine(): Unit
- def currentLine: String
-
- def readYesOrNo(prompt: String, alt: => Boolean): Boolean = readOneKey(prompt) match {
- case 'y' => true
- case 'n' => false
- case _ => alt
- }
- def readAssumingNo(prompt: String) = readYesOrNo(prompt, false)
- def readAssumingYes(prompt: String) = readYesOrNo(prompt, true)
-
- protected def readOneLine(prompt: String): String
- protected def readOneKey(prompt: String): Int
-
- def readLine(prompt: String): String =
- // hack necessary for OSX jvm suspension because read calls are not restarted after SIGTSTP
- if (isMac) restartSysCalls(readOneLine(prompt), reset())
- else readOneLine(prompt)
-}
-
-object InteractiveReader {
- val msgEINTR = "Interrupted system call"
- def restartSysCalls[R](body: => R, reset: => Unit): R =
- try body catch {
- case e: IOException if e.getMessage == msgEINTR => reset ; body
- }
-
- def apply(): InteractiveReader = SimpleReader()
- @deprecated("Use `apply` instead.", "2.9.0")
- def createDefault(): InteractiveReader = apply()
-}
-
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
deleted file mode 100644
index 219cb35242..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ /dev/null
@@ -1,372 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.tools.jline._
-import scala.tools.jline.console.completer._
-import Completion._
-import scala.collection.mutable.ListBuffer
-
-// REPL completor - queries supplied interpreter for valid
-// completions based on current contents of buffer.
-class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput {
- val global: intp.global.type = intp.global
- import global._
- import definitions.{ PredefModule, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage }
- import rootMirror.{ RootClass, getModuleIfDefined }
- type ExecResult = Any
- import intp.{ debugging }
-
- // verbosity goes up with consecutive tabs
- private var verbosity: Int = 0
- def resetVerbosity() = verbosity = 0
-
- def getSymbol(name: String, isModule: Boolean) = (
- if (isModule) getModuleIfDefined(name)
- else getModuleIfDefined(name)
- )
- def getType(name: String, isModule: Boolean) = getSymbol(name, isModule).tpe
- def typeOf(name: String) = getType(name, false)
- def moduleOf(name: String) = getType(name, true)
-
- trait CompilerCompletion {
- def tp: Type
- def effectiveTp = tp match {
- case MethodType(Nil, resType) => resType
- case NullaryMethodType(resType) => resType
- case _ => tp
- }
-
- // for some reason any's members don't show up in subclasses, which
- // we need so 5.<tab> offers asInstanceOf etc.
- private def anyMembers = AnyClass.tpe.nonPrivateMembers
- def anyRefMethodsToShow = Set("isInstanceOf", "asInstanceOf", "toString")
-
- def tos(sym: Symbol): String = sym.decodedName
- def memberNamed(s: String) = afterTyper(effectiveTp member newTermName(s))
- def hasMethod(s: String) = memberNamed(s).isMethod
-
- // XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the
- // compiler to crash for reasons not yet known.
- def members = afterTyper((effectiveTp.nonPrivateMembers.toList ++ anyMembers) filter (_.isPublic))
- def methods = members.toList filter (_.isMethod)
- def packages = members.toList filter (_.isPackage)
- def aliases = members.toList filter (_.isAliasType)
-
- def memberNames = members map tos
- def methodNames = methods map tos
- def packageNames = packages map tos
- def aliasNames = aliases map tos
- }
-
- object NoTypeCompletion extends TypeMemberCompletion(NoType) {
- override def memberNamed(s: String) = NoSymbol
- override def members = Nil
- override def follow(s: String) = None
- override def alternativesFor(id: String) = Nil
- }
-
- object TypeMemberCompletion {
- def apply(tp: Type, runtimeType: Type, param: NamedParam): TypeMemberCompletion = {
- new TypeMemberCompletion(tp) {
- var upgraded = false
- lazy val upgrade = {
- intp rebind param
- intp.reporter.printMessage("\nRebinding stable value %s from %s to %s".format(param.name, tp, param.tpe))
- upgraded = true
- new TypeMemberCompletion(runtimeType)
- }
- override def completions(verbosity: Int) = {
- super.completions(verbosity) ++ (
- if (verbosity == 0) Nil
- else upgrade.completions(verbosity)
- )
- }
- override def follow(s: String) = super.follow(s) orElse {
- if (upgraded) upgrade.follow(s)
- else None
- }
- override def alternativesFor(id: String) = super.alternativesFor(id) ++ (
- if (upgraded) upgrade.alternativesFor(id)
- else Nil
- ) distinct
- }
- }
- def apply(tp: Type): TypeMemberCompletion = {
- if (tp eq NoType) NoTypeCompletion
- else if (tp.typeSymbol.isPackageClass) new PackageCompletion(tp)
- else new TypeMemberCompletion(tp)
- }
- def imported(tp: Type) = new ImportCompletion(tp)
- }
-
- class TypeMemberCompletion(val tp: Type) extends CompletionAware
- with CompilerCompletion {
- def excludeEndsWith: List[String] = Nil
- def excludeStartsWith: List[String] = List("<") // <byname>, <repeated>, etc.
- def excludeNames: List[String] = (anyref.methodNames filterNot anyRefMethodsToShow) :+ "_root_"
-
- def methodSignatureString(sym: Symbol) = {
- IMain stripString afterTyper(new MethodSymbolOutput(sym).methodString())
- }
-
- def exclude(name: String): Boolean = (
- (name contains "$") ||
- (excludeNames contains name) ||
- (excludeEndsWith exists (name endsWith _)) ||
- (excludeStartsWith exists (name startsWith _))
- )
- def filtered(xs: List[String]) = xs filterNot exclude distinct
-
- def completions(verbosity: Int) =
- debugging(tp + " completions ==> ")(filtered(memberNames))
-
- override def follow(s: String): Option[CompletionAware] =
- debugging(tp + " -> '" + s + "' ==> ")(Some(TypeMemberCompletion(memberNamed(s).tpe)) filterNot (_ eq NoTypeCompletion))
-
- override def alternativesFor(id: String): List[String] =
- debugging(id + " alternatives ==> ") {
- val alts = members filter (x => x.isMethod && tos(x) == id) map methodSignatureString
-
- if (alts.nonEmpty) "" :: alts else Nil
- }
-
- override def toString = "%s (%d members)".format(tp, members.size)
- }
-
- class PackageCompletion(tp: Type) extends TypeMemberCompletion(tp) {
- override def excludeNames = anyref.methodNames
- }
-
- class LiteralCompletion(lit: Literal) extends TypeMemberCompletion(lit.value.tpe) {
- override def completions(verbosity: Int) = verbosity match {
- case 0 => filtered(memberNames)
- case _ => memberNames
- }
- }
-
- class ImportCompletion(tp: Type) extends TypeMemberCompletion(tp) {
- override def completions(verbosity: Int) = verbosity match {
- case 0 => filtered(members filterNot (_.isSetter) map tos)
- case _ => super.completions(verbosity)
- }
- }
-
- // not for completion but for excluding
- object anyref extends TypeMemberCompletion(AnyRefClass.tpe) { }
-
- // the unqualified vals/defs/etc visible in the repl
- object ids extends CompletionAware {
- override def completions(verbosity: Int) = intp.unqualifiedIds ++ List("classOf") //, "_root_")
- // now we use the compiler for everything.
- override def follow(id: String): Option[CompletionAware] = {
- if (!completions(0).contains(id))
- return None
-
- val tpe = intp typeOfExpression id
- if (tpe == NoType)
- return None
-
- def default = Some(TypeMemberCompletion(tpe))
-
- // only rebinding vals in power mode for now.
- if (!isReplPower) default
- else intp runtimeClassAndTypeOfTerm id match {
- case Some((clazz, runtimeType)) =>
- val sym = intp.symbolOfTerm(id)
- if (sym.isStable) {
- val param = new NamedParam.Untyped(id, intp valueOfTerm id getOrElse null)
- Some(TypeMemberCompletion(tpe, runtimeType, param))
- }
- else default
- case _ =>
- default
- }
- }
- override def toString = "<repl ids> (%s)".format(completions(0).size)
- }
-
- // user-issued wildcard imports like "import global._" or "import String._"
- private def imported = intp.sessionWildcards map TypeMemberCompletion.imported
-
- // literal Ints, Strings, etc.
- object literals extends CompletionAware {
- def simpleParse(code: String): Tree = newUnitParser(code).templateStats().last
- def completions(verbosity: Int) = Nil
-
- override def follow(id: String) = simpleParse(id) match {
- case x: Literal => Some(new LiteralCompletion(x))
- case _ => None
- }
- }
-
- // top level packages
- object rootClass extends TypeMemberCompletion(RootClass.tpe) {
- override def completions(verbosity: Int) = super.completions(verbosity) :+ "_root_"
- override def follow(id: String) = id match {
- case "_root_" => Some(this)
- case _ => super.follow(id)
- }
- }
- // members of Predef
- object predef extends TypeMemberCompletion(PredefModule.tpe) {
- override def excludeEndsWith = super.excludeEndsWith ++ List("Wrapper", "ArrayOps")
- override def excludeStartsWith = super.excludeStartsWith ++ List("wrap")
- override def excludeNames = anyref.methodNames
-
- override def exclude(name: String) = super.exclude(name) || (
- (name contains "2")
- )
-
- override def completions(verbosity: Int) = verbosity match {
- case 0 => Nil
- case _ => super.completions(verbosity)
- }
- }
- // members of scala.*
- object scalalang extends PackageCompletion(ScalaPackage.tpe) {
- def arityClasses = List("Product", "Tuple", "Function")
- def skipArity(name: String) = arityClasses exists (x => name != x && (name startsWith x))
- override def exclude(name: String) = super.exclude(name) || (
- skipArity(name)
- )
-
- override def completions(verbosity: Int) = verbosity match {
- case 0 => filtered(packageNames ++ aliasNames)
- case _ => super.completions(verbosity)
- }
- }
- // members of java.lang.*
- object javalang extends PackageCompletion(JavaLangPackage.tpe) {
- override lazy val excludeEndsWith = super.excludeEndsWith ++ List("Exception", "Error")
- override lazy val excludeStartsWith = super.excludeStartsWith ++ List("CharacterData")
-
- override def completions(verbosity: Int) = verbosity match {
- case 0 => filtered(packageNames)
- case _ => super.completions(verbosity)
- }
- }
-
- // the list of completion aware objects which should be consulted
- // for top level unqualified, it's too noisy to let much in.
- lazy val topLevelBase: List[CompletionAware] = List(ids, rootClass, predef, scalalang, javalang, literals)
- def topLevel = topLevelBase ++ imported
- def topLevelThreshold = 50
-
- // the first tier of top level objects (doesn't include file completion)
- def topLevelFor(parsed: Parsed): List[String] = {
- val buf = new ListBuffer[String]
- topLevel foreach { ca =>
- buf ++= (ca completionsFor parsed)
-
- if (buf.size > topLevelThreshold)
- return buf.toList.sorted
- }
- buf.toList
- }
-
- // the most recent result
- def lastResult = Forwarder(() => ids follow intp.mostRecentVar)
-
- def lastResultFor(parsed: Parsed) = {
- /** The logic is a little tortured right now because normally '.' is
- * ignored as a delimiter, but on .<tab> it needs to be propagated.
- */
- val xs = lastResult completionsFor parsed
- if (parsed.isEmpty) xs map ("." + _) else xs
- }
-
- // generic interface for querying (e.g. interpreter loop, testing)
- def completions(buf: String): List[String] =
- topLevelFor(Parsed.dotted(buf + ".", buf.length + 1))
-
- def completer(): ScalaCompleter = new JLineTabCompletion
-
- /** This gets a little bit hairy. It's no small feat delegating everything
- * and also keeping track of exactly where the cursor is and where it's supposed
- * to end up. The alternatives mechanism is a little hacky: if there is an empty
- * string in the list of completions, that means we are expanding a unique
- * completion, so don't update the "last" buffer because it'll be wrong.
- */
- class JLineTabCompletion extends ScalaCompleter {
- // For recording the buffer on the last tab hit
- private var lastBuf: String = ""
- private var lastCursor: Int = -1
-
- // Does this represent two consecutive tabs?
- def isConsecutiveTabs(buf: String, cursor: Int) =
- cursor == lastCursor && buf == lastBuf
-
- // Longest common prefix
- def commonPrefix(xs: List[String]): String = {
- if (xs.isEmpty || xs.contains("")) ""
- else xs.head.head match {
- case ch =>
- if (xs.tail forall (_.head == ch)) "" + ch + commonPrefix(xs map (_.tail))
- else ""
- }
- }
-
- // This is jline's entry point for completion.
- override def complete(buf: String, cursor: Int): Candidates = {
- verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0
- repldbg("\ncomplete(%s, %d) last = (%s, %d), verbosity: %s".format(buf, cursor, lastBuf, lastCursor, verbosity))
-
- // we don't try lower priority completions unless higher ones return no results.
- def tryCompletion(p: Parsed, completionFunction: Parsed => List[String]): Option[Candidates] = {
- val winners = completionFunction(p)
- if (winners.isEmpty)
- return None
- val newCursor =
- if (winners contains "") p.cursor
- else {
- val advance = commonPrefix(winners)
- lastCursor = p.position + advance.length
- lastBuf = (buf take p.position) + advance
- repldbg("tryCompletion(%s, _) lastBuf = %s, lastCursor = %s, p.position = %s".format(
- p, lastBuf, lastCursor, p.position))
- p.position
- }
-
- Some(Candidates(newCursor, winners))
- }
-
- def mkDotted = Parsed.dotted(buf, cursor) withVerbosity verbosity
- def mkUndelimited = Parsed.undelimited(buf, cursor) withVerbosity verbosity
-
- // a single dot is special cased to completion on the previous result
- def lastResultCompletion =
- if (!looksLikeInvocation(buf)) None
- else tryCompletion(Parsed.dotted(buf drop 1, cursor), lastResultFor)
-
- def tryAll = (
- lastResultCompletion
- orElse tryCompletion(mkDotted, topLevelFor)
- getOrElse Candidates(cursor, Nil)
- )
-
- /**
- * This is the kickoff point for all manner of theoretically
- * possible compiler unhappiness. The fault may be here or
- * elsewhere, but we don't want to crash the repl regardless.
- * The compiler makes it impossible to avoid catching Throwable
- * with its unfortunate tendency to throw java.lang.Errors and
- * AssertionErrors as the hats drop. We take two swings at it
- * because there are some spots which like to throw an assertion
- * once, then work after that. Yeah, what can I say.
- */
- try tryAll
- catch { case ex: Throwable =>
- repldbg("Error: complete(%s, %s) provoked".format(buf, cursor) + ex)
- Candidates(cursor,
- if (isReplDebug) List("<error:" + ex + ">")
- else Nil
- )
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
deleted file mode 100644
index 5fd5b41625..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
+++ /dev/null
@@ -1,76 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stepan Koltsov
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.tools.jline.console.ConsoleReader
-import scala.tools.jline.console.completer._
-import session._
-import scala.collection.JavaConverters._
-import Completion._
-import io.Streamable.slurp
-
-/**
- * Reads from the console using JLine.
- */
-class JLineReader(_completion: => Completion) extends InteractiveReader {
- val interactive = true
- val consoleReader = new JLineConsoleReader()
-
- lazy val completion = _completion
- lazy val history: JLineHistory = JLineHistory()
-
- private def term = consoleReader.getTerminal()
- def reset() = term.reset()
- def init() = term.init()
-
- def scalaToJline(tc: ScalaCompleter): Completer = new Completer {
- def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = {
- val buf = if (_buf == null) "" else _buf
- val Candidates(newCursor, newCandidates) = tc.complete(buf, cursor)
- newCandidates foreach (candidates add _)
- newCursor
- }
- }
-
- class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper {
- if ((history: History) ne NoHistory)
- this setHistory history
-
- // working around protected/trait/java insufficiencies.
- def goBack(num: Int): Unit = back(num)
- def readOneKey(prompt: String) = {
- this.print(prompt)
- this.flush()
- this.readVirtualKey()
- }
- def eraseLine() = consoleReader.resetPromptLine("", "", 0)
- def redrawLineAndFlush(): Unit = { flush() ; drawLine() ; flush() }
- // override def readLine(prompt: String): String
-
- // A hook for running code after the repl is done initializing.
- lazy val postInit: Unit = {
- this setBellEnabled false
-
- if (completion ne NoCompletion) {
- val argCompletor: ArgumentCompleter =
- new ArgumentCompleter(new JLineDelimiter, scalaToJline(completion.completer()))
- argCompletor setStrict false
-
- this addCompleter argCompletor
- this setAutoprintThreshold 400 // max completion candidates without warning
- }
- }
- }
-
- def currentLine = consoleReader.getCursorBuffer.buffer.toString
- def redrawLine() = consoleReader.redrawLineAndFlush()
- def eraseLine() = consoleReader.eraseLine()
- // Alternate implementation, not sure if/when I need this.
- // def eraseLine() = while (consoleReader.delete()) { }
- def readOneLine(prompt: String) = consoleReader readLine prompt
- def readOneKey(prompt: String) = consoleReader readOneKey prompt
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Logger.scala b/src/compiler/scala/tools/nsc/interpreter/Logger.scala
deleted file mode 100644
index aeb25fc688..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/Logger.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-trait Logger {
- def isInfo: Boolean
- def isDebug: Boolean
- def isTrace: Boolean
- def out: JPrintWriter
-
- def info(msg: => Any): Unit = if (isInfo) out println msg
- def debug(msg: => Any): Unit = if (isDebug) out println msg
- def trace(msg: => Any): Unit = if (isTrace) out println msg
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
deleted file mode 100644
index 60325ece30..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
+++ /dev/null
@@ -1,107 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.collection.{ mutable, immutable }
-import mutable.ListBuffer
-import scala.language.implicitConversions
-
-class ProcessResult(val line: String) {
- import scala.sys.process._
- private val buffer = new ListBuffer[String]
-
- val builder = Process(line)
- val logger = ProcessLogger(buffer += _)
- val exitCode = builder ! logger
- def lines = buffer.toList
-
- def show() = lines foreach println
- override def toString = "`%s` (%d lines, exit %d)".format(line, buffer.size, exitCode)
-}
-object ProcessResult {
- implicit def processResultToOutputLines(pr: ProcessResult): List[String] = pr.lines
- def apply(line: String): ProcessResult = new ProcessResult(line)
-}
-
-trait LoopCommands {
- protected def out: JPrintWriter
-
- // So outputs can be suppressed.
- def echoCommandMessage(msg: String): Unit = out println msg
-
- // a single interpreter command
- abstract class LoopCommand(val name: String, val help: String) extends (String => Result) {
- private var _longHelp: String = null
- final def defaultHelp = usageMsg + " (no extended help available.)"
- def hasLongHelp = _longHelp != null || longHelp != defaultHelp
- def withLongHelp(text: String): this.type = { _longHelp = text ; this }
- def longHelp = _longHelp match {
- case null => defaultHelp
- case text => text
- }
- def usage: String = ""
- def usageMsg: String = ":" + name + (
- if (usage == "") "" else " " + usage
- )
- def apply(line: String): Result
-
- // called if no args are given
- def showUsage(): Result = {
- "usage is " + usageMsg
- Result(true, None)
- }
-
- def onError(msg: String) = {
- out.println("error: " + msg)
- showUsage()
- }
- }
- object LoopCommand {
- def nullary(name: String, help: String, f: () => Result): LoopCommand =
- new NullaryCmd(name, help, _ => f())
-
- def cmd(name: String, usage: String, help: String, f: String => Result): LoopCommand =
- if (usage == "") new NullaryCmd(name, help, f)
- else new LineCmd(name, usage, help, f)
-
- def varargs(name: String, usage: String, help: String, f: List[String] => Result): LoopCommand =
- new VarArgsCmd(name, usage, help, f)
- }
-
- class NullaryCmd(name: String, help: String, f: String => Result) extends LoopCommand(name, help) {
- def apply(line: String): Result = f(line)
- }
-
- class LineCmd(name: String, argWord: String, help: String, f: String => Result) extends LoopCommand(name, help) {
- override def usage = argWord
- def apply(line: String): Result = f(line)
- }
-
- class VarArgsCmd(name: String, argWord: String, help: String, f: List[String] => Result)
- extends LoopCommand(name, help) {
- override def usage = argWord
- def apply(line: String): Result = apply(words(line))
- def apply(args: List[String]) = f(args)
- }
-
- // the result of a single command
- case class Result(val keepRunning: Boolean, val lineToRecord: Option[String])
-
- object Result {
- // the default result means "keep running, and don't record that line"
- val default = Result(true, None)
-
- // most commands do not want to micromanage the Result, but they might want
- // to print something to the console, so we accomodate Unit and String returns.
- implicit def resultFromUnit(x: Unit): Result = default
- implicit def resultFromString(msg: String): Result = {
- echoCommandMessage(msg)
- default
- }
- }
-}
-
diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
deleted file mode 100644
index 67519cf90c..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ /dev/null
@@ -1,228 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.collection.{ mutable, immutable }
-import scala.PartialFunction.cond
-import scala.reflect.internal.Chars
-import scala.reflect.internal.Flags._
-import scala.language.implicitConversions
-
-trait MemberHandlers {
- val intp: IMain
-
- import intp.{ Request, global, naming }
- import global._
- import naming._
-
- private def codegenln(leadingPlus: Boolean, xs: String*): String = codegen(leadingPlus, (xs ++ Array("\n")): _*)
- private def codegenln(xs: String*): String = codegenln(true, xs: _*)
-
- private def codegen(xs: String*): String = codegen(true, xs: _*)
- private def codegen(leadingPlus: Boolean, xs: String*): String = {
- val front = if (leadingPlus) "+ " else ""
- front + (xs map string2codeQuoted mkString " + ")
- }
- private implicit def name2string(name: Name) = name.toString
-
- /** A traverser that finds all mentioned identifiers, i.e. things
- * that need to be imported. It might return extra names.
- */
- private class ImportVarsTraverser extends Traverser {
- val importVars = new mutable.HashSet[Name]()
-
- override def traverse(ast: Tree) = ast match {
- case Ident(name) =>
- // XXX this is obviously inadequate but it's going to require some effort
- // to get right.
- if (name.toString startsWith "x$") ()
- else importVars += name
- case _ => super.traverse(ast)
- }
- }
- private object ImportVarsTraverser {
- def apply(member: Tree) = {
- val ivt = new ImportVarsTraverser()
- ivt traverse member
- ivt.importVars.toList
- }
- }
-
- def chooseHandler(member: Tree): MemberHandler = member match {
- case member: DefDef => new DefHandler(member)
- case member: ValDef => new ValHandler(member)
- case member: Assign => new AssignHandler(member)
- case member: ModuleDef => new ModuleHandler(member)
- case member: ClassDef => new ClassHandler(member)
- case member: TypeDef => new TypeAliasHandler(member)
- case member: Import => new ImportHandler(member)
- case DocDef(_, documented) => chooseHandler(documented)
- case member => new GenericHandler(member)
- }
-
- sealed abstract class MemberDefHandler(override val member: MemberDef) extends MemberHandler(member) {
- def symbol = if (member.symbol eq null) NoSymbol else member.symbol
- def name: Name = member.name
- def mods: Modifiers = member.mods
- def keyword = member.keyword
- def prettyName = name.decode
-
- override def definesImplicit = member.mods.isImplicit
- override def definesTerm: Option[TermName] = Some(name.toTermName) filter (_ => name.isTermName)
- override def definesType: Option[TypeName] = Some(name.toTypeName) filter (_ => name.isTypeName)
- override def definedSymbols = if (symbol eq NoSymbol) Nil else List(symbol)
- }
-
- /** Class to handle one member among all the members included
- * in a single interpreter request.
- */
- sealed abstract class MemberHandler(val member: Tree) {
- def definesImplicit = false
- def definesValue = false
- def isLegalTopLevel = false
-
- def definesTerm = Option.empty[TermName]
- def definesType = Option.empty[TypeName]
-
- lazy val referencedNames = ImportVarsTraverser(member)
- def importedNames = List[Name]()
- def definedNames = definesTerm.toList ++ definesType.toList
- def definedOrImported = definedNames ++ importedNames
- def definedSymbols = List[Symbol]()
-
- def extraCodeToEvaluate(req: Request): String = ""
- def resultExtractionCode(req: Request): String = ""
-
- private def shortName = this.getClass.toString split '.' last
- override def toString = shortName + referencedNames.mkString(" (refs: ", ", ", ")")
- }
-
- class GenericHandler(member: Tree) extends MemberHandler(member)
-
- class ValHandler(member: ValDef) extends MemberDefHandler(member) {
- val maxStringElements = 1000 // no need to mkString billions of elements
- override def definesValue = true
-
- override def resultExtractionCode(req: Request): String = {
- val isInternal = isUserVarName(name) && req.lookupTypeOf(name) == "Unit"
- if (!mods.isPublic || isInternal) ""
- else {
- // if this is a lazy val we avoid evaluating it here
- val resultString =
- if (mods.isLazy) codegenln(false, "<lazy>")
- else any2stringOf(req fullPath name, maxStringElements)
-
- val vidString =
- if (replProps.vids) """" + " @ " + "%%8x".format(System.identityHashCode(%s)) + " """.trim.format(req fullPath name)
- else ""
-
- """ + "%s%s: %s = " + %s""".format(string2code(prettyName), vidString, string2code(req typeOf name), resultString)
- }
- }
- }
-
- class DefHandler(member: DefDef) extends MemberDefHandler(member) {
- private def vparamss = member.vparamss
- private def isMacro = member.symbol hasFlag MACRO
- // true if not a macro and 0-arity
- override def definesValue = !isMacro && flattensToEmpty(vparamss)
- override def resultExtractionCode(req: Request) =
- if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else ""
- }
-
- class AssignHandler(member: Assign) extends MemberHandler(member) {
- val Assign(lhs, rhs) = member
- val name = newTermName(freshInternalVarName())
-
- override def definesTerm = Some(name)
- override def definesValue = true
- override def extraCodeToEvaluate(req: Request) =
- """val %s = %s""".format(name, lhs)
-
- /** Print out lhs instead of the generated varName */
- override def resultExtractionCode(req: Request) = {
- val lhsType = string2code(req lookupTypeOf name)
- val res = string2code(req fullPath name)
- """ + "%s: %s = " + %s + "\n" """.format(string2code(lhs.toString), lhsType, res) + "\n"
- }
- }
-
- class ModuleHandler(module: ModuleDef) extends MemberDefHandler(module) {
- override def definesTerm = Some(name)
- override def definesValue = true
- override def isLegalTopLevel = true
-
- override def resultExtractionCode(req: Request) = codegenln("defined module ", name)
- }
-
- class ClassHandler(member: ClassDef) extends MemberDefHandler(member) {
- override def definesType = Some(name.toTypeName)
- override def definesTerm = Some(name.toTermName) filter (_ => mods.isCase)
- override def isLegalTopLevel = true
-
- override def resultExtractionCode(req: Request) =
- codegenln("defined %s %s".format(keyword, name))
- }
-
- class TypeAliasHandler(member: TypeDef) extends MemberDefHandler(member) {
- private def isAlias = mods.isPublic && treeInfo.isAliasTypeDef(member)
- override def definesType = Some(name.toTypeName) filter (_ => isAlias)
-
- override def resultExtractionCode(req: Request) =
- codegenln("defined type alias ", name) + "\n"
- }
-
- class ImportHandler(imp: Import) extends MemberHandler(imp) {
- val Import(expr, selectors) = imp
- def targetType: Type = intp.typeOfExpression("" + expr)
- override def isLegalTopLevel = true
-
- def createImportForName(name: Name): String = {
- selectors foreach {
- case sel @ ImportSelector(old, _, `name`, _) => return "import %s.{ %s }".format(expr, sel)
- case _ => ()
- }
- "import %s.%s".format(expr, name)
- }
- // TODO: Need to track these specially to honor Predef masking attempts,
- // because they must be the leading imports in the code generated for each
- // line. We can use the same machinery as Contexts now, anyway.
- def isPredefImport = isReferenceToPredef(expr)
-
- // wildcard imports, e.g. import foo._
- private def selectorWild = selectors filter (_.name == nme.USCOREkw)
- // renamed imports, e.g. import foo.{ bar => baz }
- private def selectorRenames = selectors map (_.rename) filterNot (_ == null)
-
- /** Whether this import includes a wildcard import */
- val importsWildcard = selectorWild.nonEmpty
-
- /** Whether anything imported is implicit .*/
- def importsImplicit = implicitSymbols.nonEmpty
-
- def implicitSymbols = importedSymbols filter (_.isImplicit)
- def importedSymbols = individualSymbols ++ wildcardSymbols
-
- lazy val individualSymbols: List[Symbol] =
- beforePickler(individualNames map (targetType nonPrivateMember _))
-
- lazy val wildcardSymbols: List[Symbol] =
- if (importsWildcard) beforePickler(targetType.nonPrivateMembers.toList)
- else Nil
-
- /** Complete list of names imported by a wildcard */
- lazy val wildcardNames: List[Name] = wildcardSymbols map (_.name)
- lazy val individualNames: List[Name] = selectorRenames filterNot (_ == nme.USCOREkw) flatMap (_.bothNames)
-
- /** The names imported by this statement */
- override lazy val importedNames: List[Name] = wildcardNames ++ individualNames
- lazy val importsSymbolNamed: Set[String] = importedNames map (_.toString) toSet
-
- def importString = imp.toString
- override def resultExtractionCode(req: Request) = codegenln(importString) + "\n"
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
deleted file mode 100644
index eff0ef59c5..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import NamedParam._
-import scala.language.implicitConversions
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.{ClassTag, classTag}
-
-trait NamedParamCreator {
- protected def freshName: () => String
-
- def apply(name: String, tpe: String, value: Any): NamedParam = NamedParamClass(name, tpe, value)
- def apply[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = new Typed[T](name, x)
- def apply[T: ru.TypeTag : ClassTag](x: T): NamedParam = apply(freshName(), x)
-
- def clazz(name: String, x: Any): NamedParam = new Untyped(name, x)
- def clazz(x: Any): NamedParam = clazz(freshName(), x)
-
- implicit def namedValue[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = apply(name, x)
- implicit def tuple[T: ru.TypeTag : ClassTag](pair: (String, T)): NamedParam = apply(pair._1, pair._2)
-}
-
-object NamedParam extends NamedParamCreator {
- class Typed[T: ru.TypeTag : ClassTag](val name: String, val value: T) extends NamedParam {
- val tpe = TypeStrings.fromTag[T]
- }
- class Untyped(val name: String, val value: Any) extends NamedParam {
- val tpe = TypeStrings.fromValue(value)
- }
-
- protected val freshName = {
- var counter = 0
- () => { counter += 1; "p" + counter }
- }
-}
-
-case class NamedParamClass(name: String, tpe: String, value: Any) extends NamedParam { }
-
-trait NamedParam {
- def name: String
- def tpe: String
- def value: Any
- override def toString = name + ": " + tpe
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Naming.scala b/src/compiler/scala/tools/nsc/interpreter/Naming.scala
deleted file mode 100644
index 0d03a8669a..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/Naming.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-/** This is for name logic which is independent of the compiler (notice there's no Global.)
- * That includes at least generating, metaquoting, mangling, and unmangling.
- */
-trait Naming {
- def unmangle(str: String): String = {
- val ESC = '\u001b'
- val cleaned = removeIWPackages(removeLineWrapper(str))
- // Looking to exclude binary data which hoses the terminal, but
- // let through the subset of it we need, like whitespace and also
- // <ESC> for ansi codes.
- val binaryChars = cleaned count (ch => ch < 32 && !ch.isWhitespace && ch != ESC)
- // Lots of binary chars - translate all supposed whitespace into spaces
- if (binaryChars > 5)
- cleaned map (ch => if (ch.isWhitespace) ' ' else if (ch < 32) '?' else ch)
- // Not lots - preserve whitespace and ESC
- else
- cleaned map (ch => if (ch.isWhitespace || ch == ESC) ch else if (ch < 32) '?' else ch)
- }
-
- // The two name forms this is catching are the two sides of this assignment:
- //
- // $line3.$read.$iw.$iw.Bippy =
- // $line3.$read$$iw$$iw$Bippy@4a6a00ca
-
- private def noMeta(s: String) = "\\Q" + s + "\\E"
- private lazy val lineRegex = {
- val sn = sessionNames
- val members = List(sn.read, sn.eval, sn.print) map noMeta mkString ("(?:", "|", ")")
- debugging("lineRegex")(noMeta(sn.line) + """\d+[./]""" + members + """[$.]""")
- }
-
- private def removeLineWrapper(s: String) = s.replaceAll(lineRegex, "")
- private def removeIWPackages(s: String) = s.replaceAll("""\$iw[$.]""", "")
-
- trait SessionNames {
- // All values are configurable by passing e.g. -Dscala.repl.name.read=XXX
- final def propOr(name: String): String = propOr(name, "$" + name)
- final def propOr(name: String, default: String): String =
- sys.props.getOrElse("scala.repl.name." + name, default)
-
- // Prefixes used in repl machinery. Default to $line, $read, etc.
- def line = propOr("line")
- def read = propOr("read")
- def eval = propOr("eval")
- def print = propOr("print")
- def result = propOr("result")
-
- // The prefix for unnamed results: by default res0, res1, etc.
- def res = propOr("res", "res") // INTERPRETER_VAR_PREFIX
- // Internal ones
- def ires = propOr("ires")
- }
- lazy val sessionNames: SessionNames = new SessionNames { }
-
- /** Generates names pre0, pre1, etc. via calls to apply method */
- class NameCreator(pre: String) {
- private var x = -1
- var mostRecent: String = ""
-
- def apply(): String = {
- x += 1
- mostRecent = pre + x
- mostRecent
- }
- def reset(): Unit = x = -1
- def didGenerate(name: String) =
- (name startsWith pre) && ((name drop pre.length) forall (_.isDigit))
- }
-
- private lazy val userVar = new NameCreator(sessionNames.res) // var name, like res0
- private lazy val internalVar = new NameCreator(sessionNames.ires) // internal var name, like $ires0
-
- def isLineName(name: String) = (name startsWith sessionNames.line) && (name stripPrefix sessionNames.line forall (_.isDigit))
- def isUserVarName(name: String) = userVar didGenerate name
- def isInternalVarName(name: String) = internalVar didGenerate name
-
- val freshLineId = {
- var x = 0
- () => { x += 1 ; x }
- }
- def freshUserVarName() = userVar()
- def freshInternalVarName() = internalVar()
-
- def resetAllCreators() {
- userVar.reset()
- internalVar.reset()
- }
-
- def mostRecentVar = userVar.mostRecent
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala
deleted file mode 100644
index b0be956df8..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.tools.jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList }
-import util.returning
-
-/** One instance of a command buffer.
- */
-class Parsed private (
- val buffer: String,
- val cursor: Int,
- val delimited: Char => Boolean
-) extends Delimited {
- def isEmpty = args.isEmpty
- def isUnqualified = args.size == 1
- def isQualified = args.size > 1
- def isAtStart = cursor <= 0
-
- private var _verbosity = 0
-
- def verbosity = _verbosity
- def withVerbosity(v: Int): this.type = returning[this.type](this)(_ => _verbosity = v)
-
- def args = toArgs(buffer take cursor).toList
- def bufferHead = args.head
- def headLength = bufferHead.length + 1
- def bufferTail = new Parsed(buffer drop headLength, cursor - headLength, delimited) withVerbosity verbosity
-
- def prev = new Parsed(buffer, cursor - 1, delimited) withVerbosity verbosity
- def next = new Parsed(buffer, cursor + 1, delimited) withVerbosity verbosity
- def currentChar = buffer(cursor)
- def currentArg = args.last
- def position =
- if (isEmpty) 0
- else if (isLastDelimiter) cursor
- else cursor - currentArg.length
-
- def isFirstDelimiter = !isEmpty && isDelimiterChar(buffer.head)
- def isLastDelimiter = !isEmpty && isDelimiterChar(buffer.last)
- def firstIfDelimiter = if (isFirstDelimiter) buffer.head.toString else ""
- def lastIfDelimiter = if (isLastDelimiter) buffer.last.toString else ""
-
- def isQuoted = false // TODO
- def isEscaped = !isAtStart && isEscapeChar(currentChar) && !isEscapeChar(prev.currentChar)
- def isDelimiter = !isQuoted && !isEscaped && isDelimiterChar(currentChar)
-
- override def toString = "Parsed(%s / %d)".format(buffer, cursor)
-}
-
-object Parsed {
- val DefaultDelimiters = "[]{},`; \t".toSet
-
- private def onull(s: String) = if (s == null) "" else s
-
- def apply(s: String): Parsed = apply(onull(s), onull(s).length)
- def apply(s: String, cursor: Int): Parsed = apply(onull(s), cursor, DefaultDelimiters)
- def apply(s: String, cursor: Int, delimited: Char => Boolean): Parsed =
- new Parsed(onull(s), cursor, delimited)
-
- def dotted(s: String): Parsed = dotted(onull(s), onull(s).length)
- def dotted(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ == '.')
-
- def undelimited(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ => false)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Pasted.scala b/src/compiler/scala/tools/nsc/interpreter/Pasted.scala
deleted file mode 100644
index f5db3d9e3a..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/Pasted.scala
+++ /dev/null
@@ -1,101 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-/** If it looks like they're pasting in a scala interpreter
- * transcript, remove all the formatting we inserted so we
- * can make some sense of it.
- *
- * Most of the interesting code in here is due to my goal of
- * "paste idempotence" i.e. the transcript resulting from pasting
- * a transcript should itself be pasteable and should achieve
- * the same result.
- */
-abstract class Pasted {
- def ContinueString: String
- def PromptString: String
- def interpret(line: String): Unit
-
- def matchesPrompt(line: String) = matchesString(line, PromptString)
- def matchesContinue(line: String) = matchesString(line, ContinueString)
- def running = isRunning
-
- private def matchesString(line: String, target: String): Boolean = (
- (line startsWith target) ||
- (line.nonEmpty && " \t".toSet(line.head) && matchesString(line.tail, target))
- )
- private def stripString(line: String, target: String) = line indexOf target match {
- case -1 => line
- case idx => line drop (idx + target.length)
- }
- private var isRunning = false
- private val resReference = """(?<!^)(res\d+)""".r
- private val resCreation = """^\s*(res\d+):.*""".r
- private val resAssign = """^val (res\d+).*""".r
-
- private class PasteAnalyzer(val lines: List[String]) {
- val referenced = lines flatMap (resReference findAllIn _.trim.stripPrefix("res")) toSet
- val cmds = lines reduceLeft append split PromptString filterNot (_.trim == "") toList
-
- /** If it's a prompt or continuation line, strip the formatting bits and
- * assemble the code. Otherwise ship it off to be analyzed for res references
- * and discarded.
- */
- def append(code: String, line: String): String =
- if (matchesPrompt(line)) code + "\n" + line
- else if (matchesContinue(line)) code + "\n" + stripString(line, ContinueString)
- else fixResRefs(code, line)
-
- /** If the line looks like
- * res15: Int
- *
- * and the additional conditions hold that:
- * 1) res15 is referenced from elsewhere in the transcript
- * 2) the preceding repl line is not "val res15 = ..." because that
- * indicates it has already been "val-ified" on a previous paste
- *
- * then we go back in time to the preceding scala> prompt and
- * rewrite the line containing <expr> as
- * val res15 = { <expr> }
- * and the rest as they say is rewritten history.
- *
- * In all other cases, discard the line.
- */
- def fixResRefs(code: String, line: String) = line match {
- case resCreation(resName) if referenced(resName) =>
- code.lastIndexOf(PromptString) match {
- case -1 => code
- case idx =>
- val (str1, str2) = code splitAt (idx + PromptString.length)
- str2 match {
- case resAssign(`resName`) => code
- case _ => "%sval %s = { %s }".format(str1, resName, str2)
- }
- }
- case _ => code
- }
-
- def run() {
- println("// Replaying %d commands from transcript.\n" format cmds.size)
- cmds foreach { cmd =>
- print(PromptString)
- interpret(cmd)
- }
- }
- }
-
- /** Commands start on lines beginning with "scala>" and each successive
- * line which begins with the continuation string is appended to that command.
- * Everything else is discarded. When the end of the transcript is spotted,
- * all the commands are replayed.
- */
- def apply(lines: TraversableOnce[String]) = {
- isRunning = true
- try new PasteAnalyzer(lines.toList) run()
- finally isRunning = false
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Phased.scala b/src/compiler/scala/tools/nsc/interpreter/Phased.scala
deleted file mode 100644
index 638944713a..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/Phased.scala
+++ /dev/null
@@ -1,162 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.collection.{ mutable, immutable }
-import scala.language.implicitConversions
-
-/** Mix this into an object and use it as a phasing
- * swiss army knife.
- */
-trait Phased {
- val global: Global
- import global._
-
- private var active: PhaseName = NoPhaseName
- private var multi: Seq[PhaseName] = Nil
-
- def get = active
- def set(phase: PhaseName): Boolean = phase match {
- case NoPhaseName => false
- case name => active = name ; true
- }
- def getMulti = multi
- def setMulti(phases: Seq[PhaseName]): Boolean = {
- if (phases contains NoPhaseName) false
- else {
- multi = phases
- true
- }
- }
-
- private def parsePhaseChange(str: String): Option[Int] = {
- if (str == "") Some(0)
- else if (str startsWith ".prev") parsePhaseChange(str drop 5) map (_ - 1)
- else if (str startsWith ".next") parsePhaseChange(str drop 5) map (_ + 1)
- else str.head match {
- case '+' | '-' =>
- val (num, rest) = str.tail.span(_.isDigit)
- val diff = if (str.head == '+') num.toInt else -num.toInt
- parsePhaseChange(rest) map (_ + diff)
- case _ =>
- None
- }
- }
-
- /** Takes a string like 4, typer+2, typer.next, etc.
- * and turns it into a PhaseName instance.
- */
- private def parseInternal(str: String): PhaseName = {
- if (str == "") NoPhaseName
- else if (str forall (_.isDigit)) PhaseName(str.toInt)
- else {
- val (name, rest) = str.toLowerCase span (_.isLetter)
- val start = PhaseName(name)
- val change = parsePhaseChange(rest)
-
- if (start.isEmpty || change.isEmpty) NoPhaseName
- else PhaseName(start.id + change.get)
- }
- }
- def parse(str: String): PhaseName =
- try parseInternal(str)
- catch { case _: Exception => NoPhaseName }
-
- def apply[T](body: => T) = immutable.SortedMap[PhaseName, T](atMap(PhaseName.all)(body): _*)
-
- def atCurrent[T](body: => T): T = atPhase(get)(body)
- def multi[T](body: => T): Seq[T] = multi map (ph => at(ph)(body))
- def all[T](body: => T): Seq[T] = atMulti(PhaseName.all)(body)
- def show[T](body: => T): Seq[T] = {
- val pairs = atMap(PhaseName.all)(body)
- pairs foreach { case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240)) }
- pairs map (_._2)
- }
-
- def at[T](ph: PhaseName)(body: => T): T = {
- val saved = get
- set(ph)
- try atCurrent(body)
- finally set(saved)
- }
- def atMulti[T](phs: Seq[PhaseName])(body: => T): Seq[T] = {
- val saved = multi
- setMulti(phs)
- try multi(body)
- finally setMulti(saved)
- }
-
- def showAt[T](phs: Seq[PhaseName])(body: => T): Unit =
- atMap[T](phs)(body) foreach {
- case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240))
- }
-
- def atMap[T](phs: Seq[PhaseName])(body: => T): Seq[(PhaseName, T)] =
- phs zip atMulti(phs)(body)
-
- object PhaseName {
- implicit lazy val phaseNameOrdering: Ordering[PhaseName] = Ordering[Int] on (_.id)
-
- lazy val all = List(
- Parser, Namer, Packageobjects, Typer, Superaccessors, Pickler, Refchecks,
- Selectiveanf, Liftcode, Selectivecps, Uncurry, Tailcalls, Specialize,
- Explicitouter, Erasure, Lazyvals, Lambdalift, Constructors, Flatten, Mixin,
- Cleanup, Icode, Inliner, Closelim, Dce, Jvm, Terminal
- )
- lazy val nameMap = all.map(x => x.name -> x).toMap withDefaultValue NoPhaseName
- multi = all
-
- def apply(id: Int): PhaseName = all find (_.id == id) getOrElse NoPhaseName
- implicit def apply(s: String): PhaseName = nameMap(s)
- implicit def defaultPhaseName: PhaseName = active
- }
- sealed abstract class PhaseName {
- lazy val id = phase.id
- lazy val name = toString.toLowerCase
- def phase = currentRun.phaseNamed(name)
- def isEmpty = this eq NoPhaseName
-
- // Execute some code during this phase.
- def apply[T](body: => T): T = atPhase(phase)(body)
- }
-
- case object Parser extends PhaseName
- case object Namer extends PhaseName
- case object Packageobjects extends PhaseName
- case object Typer extends PhaseName
- case object Superaccessors extends PhaseName
- case object Pickler extends PhaseName
- case object Refchecks extends PhaseName
- case object Selectiveanf extends PhaseName
- case object Liftcode extends PhaseName
- case object Selectivecps extends PhaseName
- case object Uncurry extends PhaseName
- case object Tailcalls extends PhaseName
- case object Specialize extends PhaseName
- case object Explicitouter extends PhaseName
- case object Erasure extends PhaseName
- case object Lazyvals extends PhaseName
- case object Lambdalift extends PhaseName
- case object Constructors extends PhaseName
- case object Flatten extends PhaseName
- case object Mixin extends PhaseName
- case object Cleanup extends PhaseName
- case object Icode extends PhaseName
- case object Inliner extends PhaseName
- case object Closelim extends PhaseName
- case object Dce extends PhaseName
- case object Jvm extends PhaseName
- case object Terminal extends PhaseName
- case object NoPhaseName extends PhaseName {
- override lazy val id = -1
- override lazy val name = phase.name
- override def phase = NoPhase
- }
-
- implicit def phaseEnumToPhase(name: PhaseName): Phase = name.phase
- implicit def phaseNameToPhase(name: String): Phase = currentRun.phaseNamed(name)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala
deleted file mode 100644
index 5e6bf8824d..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/Power.scala
+++ /dev/null
@@ -1,430 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.collection.{ mutable, immutable }
-import scala.util.matching.Regex
-import scala.reflect.internal.util.{ BatchSourceFile }
-import session.{ History }
-import scala.io.Codec
-import java.net.{ URL, MalformedURLException }
-import io.{ Path }
-import scala.language.implicitConversions
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.{ClassTag, classTag}
-
-/** Collecting some power mode examples.
-
-scala> trait F[@specialized(Int) T] { def f: T = ??? }
-defined trait F
-
-scala> trait G[@specialized(Long, Int) T] extends F[T] { override def f: T = super.f }
-defined trait G
-
-scala> changesAfterEachPhase(intp("G").info.members filter (_.name.toString contains "super")) >
-Gained after 1/parser {
- method super$f
-}
-
-Gained after 12/specialize {
- method super$f$mcJ$sp
- method super$f$mcI$sp
-}
-
-Lost after 18/flatten {
- method super$f$mcJ$sp
- method super$f$mcI$sp
- method super$f
-}
-*/
-
-/** A class for methods to be injected into the intp in power mode.
- */
-class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, replVals: ReplValsImpl) {
- import intp.{ beQuietDuring, typeOfExpression, interpret, parse }
- import intp.global._
- import definitions.{ compilerTypeFromTag, compilerSymbolFromTag}
- import rootMirror.{ getClassIfDefined, getModuleIfDefined }
-
- abstract class SymSlurper {
- def isKeep(sym: Symbol): Boolean
- def isIgnore(sym: Symbol): Boolean
- def isRecur(sym: Symbol): Boolean
- def isFinished(): Boolean
-
- val keep = mutable.HashSet[Symbol]()
- val seen = mutable.HashSet[Symbol]()
- def processed = keep.size + seen.size
- def discarded = seen.size - keep.size
-
- def members(x: Symbol): List[Symbol] =
- if (x.rawInfo.isComplete) x.info.members.toList
- else Nil
-
- var lastCount = -1
- var pass = 0
- val unseenHistory = new mutable.ListBuffer[Int]
-
- def loop(todo: Set[Symbol]): Set[Symbol] = {
- pass += 1
- val (repeats, unseen) = todo partition seen
- unseenHistory += unseen.size
- if (opt.verbose) {
- println("%3d %s accumulated, %s discarded. This pass: %s unseen, %s repeats".format(
- pass, keep.size, discarded, unseen.size, repeats.size))
- }
- if (lastCount == processed || unseen.isEmpty || isFinished())
- return keep.toSet
-
- lastCount = processed
- keep ++= (unseen filter isKeep filterNot isIgnore)
- seen ++= unseen
- loop(unseen filter isRecur flatMap members)
- }
-
- def apply(sym: Symbol): Set[Symbol] = {
- keep.clear()
- seen.clear()
- loop(Set(sym))
- }
- }
-
- class PackageSlurper(packageClass: Symbol) extends SymSlurper {
- /** Looking for dwindling returns */
- def droppedEnough() = unseenHistory.size >= 4 && {
- unseenHistory takeRight 4 sliding 2 forall { it =>
- val List(a, b) = it.toList
- a > b
- }
- }
-
- def isRecur(sym: Symbol) = true
- def isIgnore(sym: Symbol) = sym.isAnonOrRefinementClass || (sym.name.toString contains "$mc")
- def isKeep(sym: Symbol) = sym.hasTransOwner(packageClass)
- def isFinished() = droppedEnough()
- def slurp() = {
- if (packageClass.isPackageClass)
- apply(packageClass)
- else {
- repldbg("Not a package class! " + packageClass)
- Set()
- }
- }
- }
-
- private def customBanner = replProps.powerBanner.option flatMap (f => io.File(f).safeSlurp())
- private def customInit = replProps.powerInitCode.option flatMap (f => io.File(f).safeSlurp())
-
- def banner = customBanner getOrElse """
- |** Power User mode enabled - BEEP WHIR GYVE **
- |** :phase has been set to 'typer'. **
- |** scala.tools.nsc._ has been imported **
- |** global._, definitions._ also imported **
- |** Try :help, :vals, power.<tab> **
- """.stripMargin.trim
-
- private def initImports = List(
- "scala.tools.nsc._",
- "scala.collection.JavaConverters._",
- "intp.global.{ error => _, _ }",
- "definitions.{ getClass => _, _ }",
- "power.rutil._",
- "replImplicits._",
- "treedsl.CODE._"
- )
-
- def init = customInit match {
- case Some(x) => x
- case _ => initImports.mkString("import ", ", ", "")
- }
-
- /** Starts up power mode and runs whatever is in init.
- */
- def unleash(): Unit = beQuietDuring {
- // First we create the ReplVals instance and bind it to $r
- intp.bind("$r", replVals)
- // Then we import everything from $r.
- intp interpret ("import " + intp.pathToTerm("$r") + "._")
- // And whatever else there is to do.
- init.lines foreach (intp interpret _)
- }
- def valsDescription: String = {
- def to_str(m: Symbol) = "%12s %s".format(
- m.decodedName, "" + elimRefinement(m.accessedOrSelf.tpe) stripPrefix "scala.tools.nsc.")
-
- ( rutil.info[ReplValsImpl].membersDeclared
- filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor)
- sortBy (_.decodedName)
- map to_str
- mkString ("Name and type of values imported into the repl in power mode.\n\n", "\n", "")
- )
- }
-
- trait LowPriorityInternalInfo {
- implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfo[T] = new InternalInfo[T](None)
- }
- object InternalInfo extends LowPriorityInternalInfo { }
-
- /** Now dealing with the problem of acidentally calling a method on Type
- * when you're holding a Symbol and seeing the Symbol converted to the
- * type of Symbol rather than the type of the thing represented by the
- * symbol, by only implicitly installing one method, "?", and the rest
- * of the conveniences exist on that wrapper.
- */
- trait LowPriorityInternalInfoWrapper {
- implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None)
- }
- object InternalInfoWrapper extends LowPriorityInternalInfoWrapper {
-
- }
- class InternalInfoWrapper[T: ru.TypeTag : ClassTag](value: Option[T] = None) {
- def ? : InternalInfo[T] = new InternalInfo[T](value)
- }
-
- /** Todos...
- * translate tag type arguments into applied types
- * customizable symbol filter (had to hardcode no-spec to reduce noise)
- */
- class InternalInfo[T](value: Option[T] = None)(implicit typeEvidence: ru.TypeTag[T], runtimeClassEvidence: ClassTag[T]) {
- private def newInfo[U: ru.TypeTag : ClassTag](value: U): InternalInfo[U] = new InternalInfo[U](Some(value))
- private def isSpecialized(s: Symbol) = s.name.toString contains "$mc"
- private def isImplClass(s: Symbol) = s.name.toString endsWith "$class"
-
- /** Standard noise reduction filter. */
- def excludeMember(s: Symbol) = (
- isSpecialized(s)
- || isImplClass(s)
- || s.isAnonOrRefinementClass
- || s.isAnonymousFunction
- )
- def symbol = compilerSymbolFromTag(tag)
- def tpe = compilerTypeFromTag(tag)
- def name = symbol.name
- def companion = symbol.companionSymbol
- def info = symbol.info
- def moduleClass = symbol.moduleClass
- def owner = symbol.owner
- def owners = symbol.ownerChain drop 1
- def signature = symbol.defString
-
- def decls = info.decls
- def declsOverride = membersDeclared filter (_.isOverride)
- def declsOriginal = membersDeclared filterNot (_.isOverride)
-
- def members = membersUnabridged filterNot excludeMember
- def membersUnabridged = tpe.members.toList
- def membersDeclared = members filterNot excludeMember
- def membersInherited = members filterNot (membersDeclared contains _)
- def memberTypes = members filter (_.name.isTypeName)
- def memberMethods = members filter (_.isMethod)
-
- def pkg = symbol.enclosingPackage
- def pkgName = pkg.fullName
- def pkgClass = symbol.enclosingPackageClass
- def pkgMembers = pkg.info.members filterNot excludeMember
- def pkgClasses = pkgMembers filter (s => s.isClass && s.isDefinedInPackage)
- def pkgSymbols = new PackageSlurper(pkgClass).slurp() filterNot excludeMember
-
- def tag = typeEvidence
- def runtimeClass = runtimeClassEvidence.runtimeClass
- def shortClass = runtimeClass.getName split "[$.]" last
-
- def baseClasses = tpe.baseClasses
- def baseClassDecls = mapFrom(baseClasses)(_.info.decls.toList.sortBy(_.name))
- def ancestors = baseClasses drop 1
- def ancestorDeclares(name: String) = ancestors filter (_.info member newTermName(name) ne NoSymbol)
- def baseTypes = tpe.baseTypeSeq.toList
-
- def <:<[U: ru.TypeTag : ClassTag](other: U) = tpe <:< newInfo(other).tpe
- def lub[U: ru.TypeTag : ClassTag](other: U) = intp.global.lub(List(tpe, newInfo(other).tpe))
- def glb[U: ru.TypeTag : ClassTag](other: U) = intp.global.glb(List(tpe, newInfo(other).tpe))
-
- override def toString = value match {
- case Some(x) => "%s (%s)".format(x, shortClass)
- case _ => runtimeClass.getName
- }
- }
-
- trait LowPriorityPrettifier {
- implicit object AnyPrettifier extends Prettifier[Any] {
- def show(x: Any): Unit = prettify(x) foreach println
- def prettify(x: Any): TraversableOnce[String] = x match {
- case x: Name => List(x.decode)
- case Tuple2(k, v) => List(prettify(k).toIterator ++ Iterator("->") ++ prettify(v) mkString " ")
- case xs: Array[_] => xs.iterator flatMap prettify
- case xs: TraversableOnce[_] => xs flatMap prettify
- case x => List(Prettifier.stringOf(x))
- }
- }
- }
- object StringPrettifier extends Prettifier[String] {
- def show(x: String) = println(x)
- def prettify(x: String) = List(Prettifier stringOf x)
- }
- object Prettifier extends LowPriorityPrettifier {
- def stringOf(x: Any): String = scala.runtime.ScalaRunTime.stringOf(x)
- def prettify[T](value: T): TraversableOnce[String] = default[T] prettify value
- def default[T] = new Prettifier[T] {
- def prettify(x: T): TraversableOnce[String] = AnyPrettifier prettify x
- def show(x: T): Unit = AnyPrettifier show x
- }
- }
- trait Prettifier[T] {
- def show(x: T): Unit
- def prettify(x: T): TraversableOnce[String]
-
- def show(xs: TraversableOnce[T]): Unit = prettify(xs) foreach println
- def prettify(xs: TraversableOnce[T]): TraversableOnce[String] = xs flatMap (x => prettify(x))
- }
-
- abstract class PrettifierClass[T: Prettifier]() {
- val pretty = implicitly[Prettifier[T]]
- import pretty._
-
- def value: Seq[T]
-
- def pp(f: Seq[T] => Seq[T]): Unit =
- pretty prettify f(value) foreach (StringPrettifier show _)
-
- def freq[U](p: T => U) = (value.toSeq groupBy p mapValues (_.size)).toList sortBy (-_._2) map (_.swap)
- def ppfreq[U](p: T => U): Unit = freq(p) foreach { case (count, key) => println("%5d %s".format(count, key)) }
-
- def |[U](f: Seq[T] => Seq[U]): Seq[U] = f(value)
- def ^^[U](f: T => U): Seq[U] = value map f
- def ^?[U](pf: PartialFunction[T, U]): Seq[U] = value collect pf
-
- def >>!(implicit ord: Ordering[T]): Unit = pp(_.sorted.distinct)
- def >>(implicit ord: Ordering[T]): Unit = pp(_.sorted)
- def >!(): Unit = pp(_.distinct)
- def >(): Unit = pp(identity)
-
- def >#(): Unit = this ># (identity[T] _)
- def >#[U](p: T => U): Unit = this ppfreq p
-
- def >?(p: T => Boolean): Unit = pp(_ filter p)
- def >?(s: String): Unit = pp(_ filter (_.toString contains s))
- def >?(r: Regex): Unit = pp(_ filter (_.toString matches fixRegex(r)))
-
- private def fixRegex(r: scala.util.matching.Regex): String = {
- val s = r.pattern.toString
- val prefix = if (s startsWith "^") "" else """^.*?"""
- val suffix = if (s endsWith "$") "" else """.*$"""
-
- prefix + s + suffix
- }
- }
-
- class MultiPrettifierClass[T: Prettifier](val value: Seq[T]) extends PrettifierClass[T]() { }
- class SinglePrettifierClass[T: Prettifier](single: T) extends PrettifierClass[T]() {
- val value = List(single)
- }
-
- class RichReplString(s: String) {
- // make an url out of the string
- def u: URL = (
- if (s contains ":") new URL(s)
- else if (new JFile(s) exists) new JFile(s).toURI.toURL
- else new URL("http://" + s)
- )
- }
- class RichInputStream(in: InputStream)(implicit codec: Codec) {
- def bytes(): Array[Byte] = io.Streamable.bytes(in)
- def slurp(): String = io.Streamable.slurp(in)
- def <<(): String = slurp()
- }
- class RichReplURL(url: URL)(implicit codec: Codec) {
- def slurp(): String = io.Streamable.slurp(url)
- }
- class RichSymbolList(syms: List[Symbol]) {
- def sigs = syms map (_.defString)
- def infos = syms map (_.info)
- }
-
- trait Implicits1 {
- // fallback
- implicit def replPrinting[T](x: T)(implicit pretty: Prettifier[T] = Prettifier.default[T]) =
- new SinglePrettifierClass[T](x)
-
- implicit def liftToTypeName(s: String): TypeName = newTypeName(s)
- }
- trait Implicits2 extends Implicits1 {
- class RichSymbol(sym: Symbol) {
- // convenient type application
- def apply(targs: Type*): Type = typeRef(NoPrefix, sym, targs.toList)
- }
- object symbolSubtypeOrdering extends Ordering[Symbol] {
- def compare(s1: Symbol, s2: Symbol) =
- if (s1 eq s2) 0
- else if (s1 isLess s2) -1
- else 1
- }
- implicit lazy val powerSymbolOrdering: Ordering[Symbol] = Ordering[Name] on (_.name)
- implicit lazy val powerTypeOrdering: Ordering[Type] = Ordering[Symbol] on (_.typeSymbol)
-
- implicit def replInternalInfo[T: ru.TypeTag : ClassTag](x: T): InternalInfoWrapper[T] = new InternalInfoWrapper[T](Some(x))
- implicit def replEnhancedStrings(s: String): RichReplString = new RichReplString(s)
- implicit def replMultiPrinting[T: Prettifier](xs: TraversableOnce[T]): MultiPrettifierClass[T] =
- new MultiPrettifierClass[T](xs.toSeq)
- implicit def replPrettifier[T] : Prettifier[T] = Prettifier.default[T]
- implicit def replTypeApplication(sym: Symbol): RichSymbol = new RichSymbol(sym)
-
- implicit def replInputStream(in: InputStream)(implicit codec: Codec) = new RichInputStream(in)
- implicit def replEnhancedURLs(url: URL)(implicit codec: Codec): RichReplURL = new RichReplURL(url)(codec)
-
- implicit def liftToTermName(s: String): TermName = newTermName(s)
- implicit def replListOfSymbols(xs: List[Symbol]) = new RichSymbolList(xs)
- }
-
- trait ReplUtilities {
- // [Eugene to Paul] needs review!
- // def module[T: Manifest] = getModuleIfDefined(manifest[T].erasure.getName stripSuffix nme.MODULE_SUFFIX_STRING)
- // def clazz[T: Manifest] = getClassIfDefined(manifest[T].erasure.getName)
- def module[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isPackage)
- def clazz[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isClass)
- def info[T: ru.TypeTag : ClassTag] = InternalInfo[T]
- def ?[T: ru.TypeTag : ClassTag] = InternalInfo[T]
- def url(s: String) = {
- try new URL(s)
- catch { case _: MalformedURLException =>
- if (Path(s).exists) Path(s).toURL
- else new URL("http://" + s)
- }
- }
- def sanitize(s: String): String = sanitize(s.getBytes())
- def sanitize(s: Array[Byte]): String = (s map {
- case x if x.toChar.isControl => '?'
- case x => x.toChar
- }).mkString
-
- def strings(s: Seq[Byte]): List[String] = {
- if (s.length == 0) Nil
- else s dropWhile (_.toChar.isControl) span (x => !x.toChar.isControl) match {
- case (next, rest) => next.map(_.toChar).mkString :: strings(rest)
- }
- }
- }
-
- lazy val rutil: ReplUtilities = new ReplUtilities { }
- lazy val phased: Phased = new { val global: intp.global.type = intp.global } with Phased { }
-
- def context(code: String) = analyzer.rootContext(unit(code))
- def source(code: String) = newSourceFile(code)
- def unit(code: String) = newCompilationUnit(code)
- def trees(code: String) = parse(code) getOrElse Nil
- def typeOf(id: String) = intp.typeOfExpression(id)
-
- override def toString = """
- |** Power mode status **
- |Default phase: %s
- |Names: %s
- |Identifiers: %s
- """.stripMargin.format(
- phased.get,
- intp.allDefinedNames mkString " ",
- intp.unqualifiedIds mkString " "
- )
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala b/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
deleted file mode 100644
index 7cd0f436c4..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.util.control.ControlThrowable
-import util.Exceptional.unwrap
-import util.stackTraceString
-
-trait ReplConfig {
- lazy val replProps = new ReplProps
-
- class TapMaker[T](x: T) {
- def tapInfo(msg: => String): T = tap(x => replinfo(parens(x)))
- def tapDebug(msg: => String): T = tap(x => repldbg(parens(x)))
- def tapTrace(msg: => String): T = tap(x => repltrace(parens(x)))
- def tap[U](f: T => U): T = {
- f(x)
- x
- }
- }
-
- private def parens(x: Any) = "(" + x + ")"
- private def echo(msg: => String) =
- try Console println msg
- catch { case x: AssertionError => Console.println("Assertion error printing debugging output: " + x) }
-
- private[nsc] def repldbgex(ex: Throwable): Unit = {
- if (isReplDebug) {
- echo("Caught/suppressing: " + ex)
- ex.printStackTrace
- }
- }
- private[nsc] def repldbg(msg: => String) = if (isReplDebug) echo(msg)
- private[nsc] def repltrace(msg: => String) = if (isReplTrace) echo(msg)
- private[nsc] def replinfo(msg: => String) = if (isReplInfo) echo(msg)
-
- private[nsc] def logAndDiscard[T](label: String, alt: => T): PartialFunction[Throwable, T] = {
- case t: ControlThrowable => throw t
- case t: Throwable =>
- repldbg(label + ": " + unwrap(t))
- repltrace(stackTraceString(unwrap(t)))
- alt
- }
- private[nsc] def substituteAndLog[T](alt: => T)(body: => T): T =
- substituteAndLog("" + alt, alt)(body)
- private[nsc] def substituteAndLog[T](label: String, alt: => T)(body: => T): T = {
- try body
- catch logAndDiscard(label, alt)
- }
- private[nsc] def squashAndLog(label: String)(body: => Unit): Unit =
- substituteAndLog(label, ())(body)
-
- def isReplTrace: Boolean = replProps.trace
- def isReplDebug: Boolean = replProps.debug || isReplTrace
- def isReplInfo: Boolean = replProps.info || isReplDebug
- def isReplPower: Boolean = replProps.power
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala
deleted file mode 100644
index 7c698a2f3e..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import reporters._
-import typechecker.Analyzer
-
-/** A layer on top of Global so I can guarantee some extra
- * functionality for the repl. It doesn't do much yet.
- */
-trait ReplGlobal extends Global {
- // This exists mostly because using the reporter too early leads to deadlock.
- private def echo(msg: String) { Console println msg }
-
- override def abort(msg: String): Nothing = {
- echo("ReplGlobal.abort: " + msg)
- super.abort(msg)
- }
-
- override lazy val analyzer = new {
- val global: ReplGlobal.this.type = ReplGlobal.this
- } with Analyzer {
- override def newTyper(context: Context): Typer = new Typer(context) {
- override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
- val res = super.typed(tree, mode, pt)
- tree match {
- case Ident(name) if !tree.symbol.hasPackageFlag && !name.toString.startsWith("$") =>
- repldbg("typed %s: %s".format(name, res.tpe))
- case _ =>
- }
- res
- }
- }
- }
-
- object replPhase extends SubComponent {
- val global: ReplGlobal.this.type = ReplGlobal.this
- val phaseName = "repl"
- val runsAfter = List[String]("typer")
- val runsRightAfter = None
- def newPhase(_prev: Phase): StdPhase = new StdPhase(_prev) {
- def apply(unit: CompilationUnit) {
- repldbg("Running replPhase on " + unit.body)
- // newNamer(rootContext(unit)).enterSym(unit.body)
- }
- }
- }
-
- override protected def computePhaseDescriptors: List[SubComponent] = {
- addToPhasesSet(replPhase, "repl")
- super.computePhaseDescriptors
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala b/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala
deleted file mode 100644
index bc3e7a10d7..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.sys._
-import Prop._
-
-class ReplProps {
- private def bool(name: String) = BooleanProp.keyExists(name)
- private def int(name: String) = IntProp(name)
-
- val jlineDebug = bool("scala.tools.jline.internal.Log.debug")
- val jlineTrace = bool("scala.tools.jline.internal.Log.trace")
-
- val info = bool("scala.repl.info")
- val debug = bool("scala.repl.debug")
- val trace = bool("scala.repl.trace")
- val power = bool("scala.repl.power")
-
- val replInitCode = Prop[JFile]("scala.repl.initcode")
- val replAutorunCode = Prop[JFile]("scala.repl.autoruncode")
- val powerInitCode = Prop[JFile]("scala.repl.power.initcode")
- val powerBanner = Prop[JFile]("scala.repl.power.banner")
-
- val vids = bool("scala.repl.vids")
- val maxPrintString = int("scala.repl.maxprintstring")
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala b/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
deleted file mode 100644
index b20166d070..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2002-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import reporters._
-import IMain._
-
-/** Like ReplGlobal, a layer for ensuring extra functionality.
- */
-class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.in, new ReplStrippingWriter(intp)) {
- def printUntruncatedMessage(msg: String) = withoutTruncating(printMessage(msg))
-
- override def printMessage(msg: String) {
- // Avoiding deadlock if the compiler starts logging before
- // the lazy val is complete.
- if (intp.isInitializeComplete) {
- if (intp.totalSilence) {
- if (isReplTrace)
- super.printMessage("[silent] " + msg)
- }
- else super.printMessage(msg)
- }
- else Console.println("[init] " + msg)
- }
-
- override def displayPrompt() {
- if (intp.totalSilence) ()
- else super.displayPrompt()
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala b/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala
deleted file mode 100644
index f8ecc6c6fe..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.collection.{ mutable, immutable }
-import scala.PartialFunction.cond
-import scala.reflect.internal.Chars
-
-trait ReplStrings {
- /** Convert a string into code that can recreate the string.
- * This requires replacing all special characters by escape
- * codes. It does not add the surrounding " marks. */
- def string2code(str: String): String = {
- val res = new StringBuilder
- for (c <- str) c match {
- case '"' | '\'' | '\\' => res += '\\' ; res += c
- case _ if c.isControl => res ++= Chars.char2uescape(c)
- case _ => res += c
- }
- res.toString
- }
-
- def string2codeQuoted(str: String) =
- "\"" + string2code(str) + "\""
-
- def any2stringOf(x: Any, maxlen: Int) =
- "scala.runtime.ScalaRunTime.replStringOf(%s, %s)".format(x, maxlen)
-
- def words(s: String) = s.trim split "\\s+" filterNot (_ == "") toList
- def isQuoted(s: String) = (s.length >= 2) && (s.head == s.last) && ("\"'" contains s.head)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
deleted file mode 100644
index 53478bdc5d..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.language.implicitConversions
-import scala.reflect.api.{Universe => ApiUniverse}
-import scala.reflect.runtime.{universe => ru}
-
-/** A class which the repl utilizes to expose predefined objects.
- * The base implementation is empty; the standard repl implementation
- * is StdReplVals.
- */
-abstract class ReplVals { }
-
-class StdReplVals(final val r: ILoop) extends ReplVals {
- final lazy val repl = r
- final lazy val intp = r.intp
- final lazy val power = r.power
- final lazy val reader = r.in
- final lazy val vals = this
- final lazy val global: intp.global.type = intp.global
- final lazy val isettings = intp.isettings
- final lazy val completion = reader.completion
- final lazy val history = reader.history
- final lazy val phased = power.phased
- final lazy val analyzer = global.analyzer
-
- object treedsl extends { val global: intp.global.type = intp.global } with ast.TreeDSL { }
-
- final lazy val typer = analyzer.newTyper(
- analyzer.rootContext(
- power.unit("").asInstanceOf[analyzer.global.CompilationUnit]
- )
- )
- def lastRequest = intp.lastRequest
-
- class ReplImplicits extends power.Implicits2 {
- import intp.global._
-
- private val tagFn = ReplVals.mkCompilerTypeFromTag[intp.global.type](global)
- implicit def mkCompilerTypeFromTag(sym: Symbol) = tagFn(sym)
- }
-
- final lazy val replImplicits = new ReplImplicits
-
- def typed[T <: analyzer.global.Tree](tree: T): T = typer.typed(tree).asInstanceOf[T]
-}
-
-object ReplVals {
- /** Latest attempt to work around the challenge of foo.global.Type
- * not being seen as the same type as bar.global.Type even though
- * the globals are the same. Dependent method types to the rescue.
- */
- def mkCompilerTypeFromTag[T <: Global](global: T) = {
- import global._
- import definitions._
-
- /** We can't use definitions.compilerTypeFromTag directly because we're passing
- * it to map and the compiler refuses to perform eta expansion on a method
- * with a dependent return type. (Can this be relaxed?) To get around this
- * I have this forwarder which widens the type and then cast the result back
- * to the dependent type.
- */
- def compilerTypeFromTag(t: ApiUniverse # WeakTypeTag[_]): Global#Type =
- definitions.compilerTypeFromTag(t)
-
- class AppliedTypeFromTags(sym: Symbol) {
- def apply[M](implicit m1: ru.TypeTag[M]): Type =
- if (sym eq NoSymbol) NoType
- else appliedType(sym, compilerTypeFromTag(m1).asInstanceOf[Type])
-
- def apply[M1, M2](implicit m1: ru.TypeTag[M1], m2: ru.TypeTag[M2]): Type =
- if (sym eq NoSymbol) NoType
- else appliedType(sym, compilerTypeFromTag(m1).asInstanceOf[Type], compilerTypeFromTag(m2).asInstanceOf[Type])
- }
-
- (sym: Symbol) => new AppliedTypeFromTags(sym)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Results.scala b/src/compiler/scala/tools/nsc/interpreter/Results.scala
deleted file mode 100644
index e400906a58..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/Results.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package interpreter
-
-object Results {
- /** A result from the Interpreter interpreting one line of input. */
- abstract sealed class Result
-
- /** The line was interpreted successfully. */
- case object Success extends Result
-
- /** The line was erroneous in some way. */
- case object Error extends Result
-
- /** The input was incomplete. The caller should request more input.
- */
- case object Incomplete extends Result
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala b/src/compiler/scala/tools/nsc/interpreter/RichClass.scala
deleted file mode 100644
index 4371f7fe05..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.reflect.{ ClassTag, classTag }
-
-class RichClass[T](val clazz: Class[T]) {
- def toTag: ClassTag[T] = ClassTag[T](clazz)
- def toTypeString: String = TypeStrings.fromClazz(clazz)
-
- // Sadly isAnonymousClass does not return true for scala anonymous
- // classes because our naming scheme is not doing well against the
- // jvm's many assumptions.
- def isScalaAnonymous = (
- try clazz.isAnonymousClass || (clazz.getName contains "$anon$")
- catch { case _: java.lang.InternalError => false } // good ol' "Malformed class name"
- )
-
- /** It's not easy... to be... me... */
- def supermans: List[ClassTag[_]] = supers map (_.toTag)
- def superNames: List[String] = supers map (_.getName)
- def interfaces: List[JClass] = supers filter (_.isInterface)
-
- def hasAncestorName(f: String => Boolean) = superNames exists f
- def hasAncestor(f: JClass => Boolean) = supers exists f
- def hasAncestorInPackage(pkg: String) = hasAncestorName(_ startsWith (pkg + "."))
-
- def supers: List[JClass] = {
- def loop(x: JClass): List[JClass] = x.getSuperclass match {
- case null => List(x)
- case sc => x :: (x.getInterfaces.toList flatMap loop) ++ loop(sc)
- }
- loop(clazz).distinct
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
deleted file mode 100644
index bccd8158ec..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stepan Koltsov
- */
-
-package scala.tools.nsc
-package interpreter
-
-import java.io.{ BufferedReader }
-import session.NoHistory
-
-/** Reads using standard JDK API */
-class SimpleReader(
- in: BufferedReader,
- out: JPrintWriter,
- val interactive: Boolean)
-extends InteractiveReader
-{
- val history = NoHistory
- val completion = NoCompletion
-
- def init() = ()
- def reset() = ()
- def eraseLine() = ()
- def redrawLine() = ()
- def currentLine = ""
- def readOneLine(prompt: String): String = {
- if (interactive) {
- out.print(prompt)
- out.flush()
- }
- in.readLine()
- }
- def readOneKey(prompt: String) = sys.error("No char-based input in SimpleReader")
-}
-
-object SimpleReader {
- def defaultIn = Console.in
- def defaultOut = new JPrintWriter(Console.out)
-
- def apply(in: BufferedReader = defaultIn, out: JPrintWriter = defaultOut, interactive: Boolean = true): SimpleReader =
- new SimpleReader(in, out, interactive)
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala
deleted file mode 100644
index e3440c9f8b..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/package.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-
-import scala.language.implicitConversions
-
-/** The main REPL related classes and values are as follows.
- * In addition to standard compiler classes Global and Settings, there are:
- *
- * History: an interface for session history.
- * Completion: an interface for tab completion.
- * ILoop (formerly InterpreterLoop): The umbrella class for a session.
- * IMain (formerly Interpreter): Handles the evolving state of the session
- * and handles submitting code to the compiler and handling the output.
- * InteractiveReader: how ILoop obtains input.
- * History: an interface for session history.
- * Completion: an interface for tab completion.
- * Power: a repository for more advanced/experimental features.
- *
- * ILoop contains { in: InteractiveReader, intp: IMain, settings: Settings, power: Power }
- * InteractiveReader contains { history: History, completion: Completion }
- * IMain contains { global: Global }
- */
-package object interpreter extends ReplConfig with ReplStrings {
- type JFile = java.io.File
- type JClass = java.lang.Class[_]
- type JList[T] = java.util.List[T]
- type JCollection[T] = java.util.Collection[T]
- type JPrintWriter = java.io.PrintWriter
- type InputStream = java.io.InputStream
- type OutputStream = java.io.OutputStream
-
- val IR = Results
-
- implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
-
- private[interpreter] implicit def javaCharSeqCollectionToScala(xs: JCollection[_ <: CharSequence]): List[String] = {
- import scala.collection.JavaConverters._
- xs.asScala.toList map ("" + _)
- }
-
- private[nsc] implicit def enrichClass[T](clazz: Class[T]) = new RichClass[T](clazz)
- private[nsc] implicit def enrichAnyRefWithTap[T](x: T) = new TapMaker(x)
- private[nsc] def tracing[T](msg: String)(x: T): T = x.tapTrace(msg)
- private[nsc] def debugging[T](msg: String)(x: T) = x.tapDebug(msg)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala b/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
deleted file mode 100644
index dddfb1b8f6..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-package session
-
-import scala.tools.nsc.io._
-import FileBackedHistory._
-
-/** TODO: file locking.
- */
-trait FileBackedHistory extends JLineHistory with JPersistentHistory {
- def maxSize: Int
- protected lazy val historyFile: File = defaultFile
- private var isPersistent = true
-
- locally {
- load()
- }
-
- def withoutSaving[T](op: => T): T = {
- val saved = isPersistent
- isPersistent = false
- try op
- finally isPersistent = saved
- }
- def addLineToFile(item: CharSequence): Unit = {
- if (isPersistent)
- append(item + "\n")
- }
-
- /** Overwrites the history file with the current memory. */
- protected def sync(): Unit = {
- val lines = asStrings map (_ + "\n")
- historyFile.writeAll(lines: _*)
- }
- /** Append one or more lines to the history file. */
- protected def append(lines: String*): Unit = {
- historyFile.appendAll(lines: _*)
- }
-
- def load(): Unit = {
- if (!historyFile.canRead)
- historyFile.createFile()
-
- val lines: IndexedSeq[String] = {
- try historyFile.lines().toIndexedSeq
- catch {
- // It seems that control characters in the history file combined
- // with the default codec can lead to nio spewing exceptions. Rather
- // than abandon hope we'll try to read it as ISO-8859-1
- case _: Exception =>
- try historyFile.lines("ISO-8859-1").toIndexedSeq
- catch { case _: Exception => Vector() }
- }
- }
-
- repldbg("Loading " + lines.size + " into history.")
-
- // avoid writing to the history file
- withoutSaving(lines takeRight maxSize foreach add)
- // truncate the history file if it's too big.
- if (lines.size > maxSize) {
- repldbg("File exceeds maximum size: truncating to " + maxSize + " entries.")
- sync()
- }
- moveToEnd()
- }
-
- def flush(): Unit = ()
- def purge(): Unit = historyFile.truncate()
-}
-
-object FileBackedHistory {
- // val ContinuationChar = '\003'
- // val ContinuationNL: String = Array('\003', '\n').mkString
- import Properties.userHome
-
- def defaultFileName = ".scala_history"
- def defaultFile: File = File(Path(userHome) / defaultFileName)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/History.scala b/src/compiler/scala/tools/nsc/interpreter/session/History.scala
deleted file mode 100644
index daa05b86db..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/session/History.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-package session
-
-/** An implementation-agnostic history interface which makes no
- * reference to the jline classes. Very sparse right now.
- */
-trait History {
- def asStrings: List[String]
- def index: Int
- def size: Int
- def grep(s: String): List[String]
-}
-object NoHistory extends History {
- def asStrings = Nil
- def grep(s: String) = Nil
- def index = 0
- def size = 0
-}
-
-object History {
- def empty: History = NoHistory
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala b/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala
deleted file mode 100644
index 18e0ee7c85..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-package session
-
-/** A straight scalification of the jline interface which mixes
- * in the sparse jline-independent one too.
- */
-trait JLineHistory extends JHistory with History {
- def size: Int
- def isEmpty: Boolean
- def index: Int
- def clear(): Unit
- def get(index: Int): CharSequence
- def add(line: CharSequence): Unit
- def replace(item: CharSequence): Unit
-
- def entries(index: Int): JListIterator[JEntry]
- def entries(): JListIterator[JEntry]
- def iterator: JIterator[JEntry]
-
- def current(): CharSequence
- def previous(): Boolean
- def next(): Boolean
- def moveToFirst(): Boolean
- def moveToLast(): Boolean
- def moveTo(index: Int): Boolean
- def moveToEnd(): Unit
-}
-
-object JLineHistory {
- class JLineFileHistory extends SimpleHistory with FileBackedHistory {
- override def add(item: CharSequence): Unit = {
- if (!isEmpty && last == item)
- repldbg("Ignoring duplicate entry '" + item + "'")
- else {
- super.add(item)
- addLineToFile(item)
- }
- }
- override def toString = "History(size = " + size + ", index = " + index + ")"
- }
-
- def apply(): JLineHistory = try new JLineFileHistory catch { case x: Exception => new SimpleHistory() }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala
deleted file mode 100644
index 9f4e2b9df3..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-package session
-
-import scala.collection.mutable.{ Buffer, ListBuffer }
-import scala.collection.JavaConverters._
-
-class SimpleHistory extends JLineHistory {
- private var _index: Int = 0
- private val buf: Buffer[String] = new ListBuffer[String]
- private def toEntries(): Seq[JEntry] = buf.zipWithIndex map { case (x, i) => Entry(i, x) }
- private def setTo(num: Int) = { _index = num ; true }
- private def minusOne = { _index -= 1 ; true }
- private def plusOne = { _index += 1 ; true }
- private def lastIndex = size - 1
- private def fail(msg: String): String = {
- repldbg("Internal error in history(size %d, index %d): %s".format(
- size, index, msg)
- )
- ""
- }
-
- case class Entry(index: Int, value: CharSequence) extends JEntry {
- override def toString = value
- }
-
- def maxSize: Int = 2500
- def last = if (isEmpty) fail("last") else buf.last
-
- def size = buf.size
- def index = _index
- def isEmpty = buf.isEmpty
- def clear() = buf.clear()
- def get(idx: Int): CharSequence = buf(idx)
- def add(item: CharSequence): Unit = buf += item
- def replace(item: CharSequence): Unit = {
- buf trimEnd 1
- add(item)
- }
- def entries(idx: Int): JListIterator[JEntry] = toEntries().asJava.listIterator(idx)
- def entries(): JListIterator[JEntry] = toEntries().asJava.listIterator()
- def iterator: JIterator[JEntry] = toEntries().iterator.asJava
-
- def current() = if (index >= 0 && index < buf.size) buf(index) else fail("current()")
- def previous() = (index > 0) && minusOne
- def next() = (index <= lastIndex) && plusOne
- def moveToFirst() = (size > 0) && (index != 0) && setTo(0)
- def moveToLast() = (size > 0) && (index < lastIndex) && setTo(lastIndex)
- def moveTo(idx: Int) = (idx > 0) && (idx <= lastIndex) && setTo(idx)
- def moveToEnd(): Unit = setTo(size)
-
- // scala legacy interface
- def asList: List[JEntry] = toEntries().toList
- def asJavaList = entries()
- def asStrings = buf.toList
- def grep(s: String) = buf.toList filter (_ contains s)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/package.scala b/src/compiler/scala/tools/nsc/interpreter/session/package.scala
deleted file mode 100644
index c62cf21151..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/session/package.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-import scala.language.implicitConversions
-
-/** Files having to do with the state of a repl session:
- * lines of text entered, types and terms defined, etc.
- */
-package object session {
- type JIterator[T] = java.util.Iterator[T]
- type JListIterator[T] = java.util.ListIterator[T]
-
- type JEntry = scala.tools.jline.console.history.History.Entry
- type JHistory = scala.tools.jline.console.history.History
- type JMemoryHistory = scala.tools.jline.console.history.MemoryHistory
- type JPersistentHistory = scala.tools.jline.console.history.PersistentHistory
-
- private[interpreter] implicit def charSequenceFix(x: CharSequence): String = x.toString
-}
diff --git a/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala b/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala
deleted file mode 100644
index 98c3d27202..0000000000
--- a/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.util.concurrent._
-
-class DaemonThreadFactory extends ThreadFactory {
- def newThread(r: Runnable): Thread = {
- val thread = new Thread(r)
- thread setDaemon true
- thread
- }
-}
-
-object DaemonThreadFactory {
- def newPool() = Executors.newCachedThreadPool(new DaemonThreadFactory)
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/io/Fileish.scala b/src/compiler/scala/tools/nsc/io/Fileish.scala
deleted file mode 100644
index 7b4e385dd8..0000000000
--- a/src/compiler/scala/tools/nsc/io/Fileish.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.io.{ InputStream }
-import java.util.jar.JarEntry
-
-/** A common interface for File-based things and Stream-based things.
- * (In particular, io.File and JarEntry.)
- */
-class Fileish(val path: Path, val input: () => InputStream) extends Streamable.Chars {
- def inputStream() = input()
-
- def parent = path.parent
- def name = path.name
- def isSourceFile = path.hasExtension("java", "scala")
-
- private lazy val pkgLines = lines() collect { case x if x startsWith "package " => x stripPrefix "package" trim }
- lazy val pkgFromPath = parent.path.replaceAll("""[/\\]""", ".")
- lazy val pkgFromSource = pkgLines map (_ stripSuffix ";") mkString "."
-
- override def toString = path.path
-}
-
-object Fileish {
- def apply(f: File): Fileish = new Fileish(f, () => f.inputStream())
- def apply(f: JarEntry, in: () => InputStream): Fileish = new Fileish(Path(f.getName), in)
- def apply(path: String, in: () => InputStream): Fileish = new Fileish(Path(path), in)
-}
diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala
index e919621338..2967f67e9c 100644
--- a/src/compiler/scala/tools/nsc/io/Jar.scala
+++ b/src/compiler/scala/tools/nsc/io/Jar.scala
@@ -10,8 +10,7 @@ import java.io.{ InputStream, OutputStream, IOException, FileNotFoundException,
import java.util.jar._
import scala.collection.JavaConverters._
import Attributes.Name
-import util.ClassPath
-import scala.language.implicitConversions
+import scala.language.{ implicitConversions, postfixOps }
// Attributes.Name instances:
//
@@ -37,9 +36,6 @@ class Jar(file: File) extends Iterable[JarEntry] {
def this(jfile: JFile) = this(File(jfile))
def this(path: String) = this(File(path))
- protected def errorFn(msg: String): Unit = Console println msg
-
- lazy val jarFile = new JarFile(file.jfile)
lazy val manifest = withJarInput(s => Option(s.getManifest))
def mainClass = manifest map (f => f(Name.MAIN_CLASS))
@@ -51,6 +47,20 @@ class Jar(file: File) extends Iterable[JarEntry] {
case _ => Nil
}
+ /** Invoke f with input for named jar entry (or None). */
+ def withEntryStream[A](name: String)(f: Option[InputStream] => A) = {
+ val jarFile = new JarFile(file.jfile)
+ def apply() =
+ jarFile getEntry name match {
+ case null => f(None)
+ case entry =>
+ val in = Some(jarFile getInputStream entry)
+ try f(in)
+ finally in map (_.close())
+ }
+ try apply() finally jarFile.close()
+ }
+
def withJarInput[T](f: JarInputStream => T): T = {
val in = new JarInputStream(file.inputStream())
try f(in)
@@ -64,12 +74,6 @@ class Jar(file: File) extends Iterable[JarEntry] {
Iterator continually in.getNextJarEntry() takeWhile (_ != null) foreach f
}
override def iterator: Iterator[JarEntry] = this.toList.iterator
- def fileishIterator: Iterator[Fileish] = jarFile.entries.asScala map (x => Fileish(x, () => getEntryStream(x)))
-
- private def getEntryStream(entry: JarEntry) = jarFile getInputStream entry match {
- case null => errorFn("No such entry: " + entry) ; null
- case x => x
- }
override def toString = "" + file
}
@@ -111,9 +115,9 @@ class JarWriter(val file: File, val manifest: Manifest) {
val buf = new Array[Byte](10240)
def loop(): Unit = in.read(buf, 0, buf.length) match {
case -1 => in.close()
- case n => out.write(buf, 0, n) ; loop
+ case n => out.write(buf, 0, n) ; loop()
}
- loop
+ loop()
}
def close() = out.close()
@@ -131,7 +135,6 @@ object Jar {
m
}
def apply(manifest: JManifest): WManifest = new WManifest(manifest)
- implicit def unenrichManifest(x: WManifest): JManifest = x.underlying
}
class WManifest(manifest: JManifest) {
for ((k, v) <- initialMainAttrs)
@@ -148,12 +151,7 @@ object Jar {
}
def apply(name: Attributes.Name): String = attrs(name)
- def apply(name: String): String = apply(new Attributes.Name(name))
def update(key: Attributes.Name, value: String) = attrs.put(key, value)
- def update(key: String, value: String) = attrs.put(new Attributes.Name(key), value)
-
- def mainClass: String = apply(Name.MAIN_CLASS)
- def mainClass_=(value: String) = update(Name.MAIN_CLASS, value)
}
// See http://download.java.net/jdk7/docs/api/java/nio/file/Path.html
@@ -161,7 +159,7 @@ object Jar {
private val ZipMagicNumber = List[Byte](80, 75, 3, 4)
private def magicNumberIsZip(f: Path) = f.isFile && (f.toFile.bytes().take(4).toList == ZipMagicNumber)
- def isJarOrZip(f: Path): Boolean = isJarOrZip(f, true)
+ def isJarOrZip(f: Path): Boolean = isJarOrZip(f, examineFile = true)
def isJarOrZip(f: Path, examineFile: Boolean): Boolean =
f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f))
diff --git a/src/compiler/scala/tools/nsc/io/Lexer.scala b/src/compiler/scala/tools/nsc/io/Lexer.scala
deleted file mode 100644
index 5ffb5b4d4f..0000000000
--- a/src/compiler/scala/tools/nsc/io/Lexer.scala
+++ /dev/null
@@ -1,301 +0,0 @@
-package scala.tools.nsc.io
-
-import java.io.{Reader, Writer, StringReader, StringWriter}
-import scala.collection.mutable.{Buffer, ArrayBuffer}
-import scala.math.BigInt
-
-/** Companion object of class `Lexer` which defines tokens and some utility concepts
- * used for tokens and lexers
- */
-object Lexer {
-
- /** An exception raised if a if input does not correspond to what's expected
- * @param rdr the lexer form which the bad input is read
- * @param msg the error message
- */
- class MalformedInput(val rdr: Lexer, val msg: String) extends Exception("Malformed JSON input at "+rdr.tokenPos+": "+msg)
-
- /** The class of tokens, i.e. descriptions of input words (or: lexemes).
- * @param str the characters making up this token
- */
- class Token(val str: String) {
- override def toString = str
- }
-
- /** A subclass of `Token` representing single-character delimiters
- * @param char the delimiter character making up this token
- */
- case class Delim(char: Char) extends Token("'"+char.toString+"'")
-
- /** A subclass of token representing integer literals */
- case class IntLit(override val str: String) extends Token(str)
-
- /** A subclass of token representing floating point literals */
- case class FloatLit(override val str: String) extends Token(str)
-
- /** A subclass of token representing string literals */
- case class StringLit(override val str: String) extends Token(str) {
- override def toString = quoted(str)
- }
-
- /** The `true` token */
- val TrueLit = new Token("true")
-
- /** The `false` token */
- val FalseLit = new Token("false")
-
- /** The `null` token */
- val NullLit = new Token("null")
-
- /** The '`(`' token */
- val LParen = new Delim('(')
-
- /** The '`(`' token */
- val RParen = new Delim(')')
-
- /** The '`{`' token */
- val LBrace = new Delim('{')
-
- /** The '`}`' token */
- val RBrace = new Delim('}')
-
- /** The '`[`' token */
- val LBracket = new Delim('[')
-
- /** The '`]`' token */
- val RBracket = new Delim(']')
-
- /** The '`,`' token */
- val Comma = new Delim(',')
-
- /** The '`:`' token */
- val Colon = new Delim(':')
-
- /** The token representing end of input */
- val EOF = new Token("<end of input>")
-
- private def toUDigit(ch: Int): Char = {
- val d = ch & 0xF
- (if (d < 10) d + '0' else d - 10 + 'A').toChar
- }
-
- private def addToStr(buf: StringBuilder, ch: Char) {
- ch match {
- case '"' => buf ++= "\\\""
- case '\b' => buf ++= "\\b"
- case '\f' => buf ++= "\\f"
- case '\n' => buf ++= "\\n"
- case '\r' => buf ++= "\\r"
- case '\t' => buf ++= "\\t"
- case '\\' => buf ++= "\\\\"
- case _ =>
- if (' ' <= ch && ch < 128) buf += ch
- else buf ++= "\\u" += toUDigit(ch >>> 12) += toUDigit(ch >>> 8) += toUDigit(ch >>> 4) += toUDigit(ch)
- }
- }
-
- /** Returns given string enclosed in `"`-quotes with all string characters escaped
- * so that they correspond to the JSON standard.
- * Characters that escaped are: `"`, `\b`, `\f`, `\n`, `\r`, `\t`, `\`.
- * Furthermore, every other character which is not in the ASCII range 32-127 is
- * escaped as a four hex-digit unicode character of the form `\ u x x x x`.
- * @param str the string to be quoted
- */
- def quoted(str: String): String = {
- val buf = new StringBuilder += '\"'
- str foreach (addToStr(buf, _))
- buf += '\"'
- buf.toString
- }
-
- private val BUF_SIZE = 2 << 16
-}
-
-import Lexer._
-
-/** A simple lexer for tokens as they are used in JSON, plus parens `(`, `)`
- * Tokens understood are:
- *
- * `(`, `)`, `[`, `]`, `{`, `}`, `:`, `,`, `true`, `false`, `null`,
- * strings (syntax as in JSON),
- * integer numbers (syntax as in JSON: -?(0|\d+)
- * floating point numbers (syntax as in JSON: -?(0|\d+)(\.\d+)?((e|E)(+|-)?\d+)?)
- * The end of input is represented as its own token, EOF.
- * Lexers can keep one token lookahead
- *
- * @param rd the reader from which characters are read.
- */
-class Lexer(rd: Reader) {
-
- /** The last-read character */
- var ch: Char = 0
-
- /** The number of characters read so far */
- var pos: Long = 0
-
- /** The last-read token */
- var token: Token = _
-
- /** The number of characters read before the start of the last-read token */
- var tokenPos: Long = 0
-
- private var atEOF: Boolean = false
- private val buf = new Array[Char](BUF_SIZE)
- private var nread: Int = 0
- private var bp = 0
-
- /** Reads next character into `ch` */
- def nextChar() {
- assert(!atEOF)
- if (bp == nread) {
- nread = rd.read(buf)
- bp = 0
- if (nread <= 0) { ch = 0; atEOF = true; return }
- }
- ch = buf(bp)
- bp += 1
- pos += 1
- }
-
- /** If last-read character equals given character, reads next character,
- * otherwise raises an error
- * @param c the given character to compare with last-read character
- * @throws MalformedInput if character does not match
- */
- def acceptChar(c: Char) = if (ch == c) nextChar() else error("'"+c+"' expected")
-
- private val sb = new StringBuilder
-
- private def putChar() {
- sb += ch; nextChar()
- }
-
- private def putAcceptString(str: String) {
- str foreach acceptChar
- sb ++= str
- }
-
- /** Skips whitespace and reads next lexeme into `token`
- * @throws MalformedInput if lexeme not recognized as a valid token
- */
- def nextToken() {
- sb.clear()
- while (!atEOF && ch <= ' ') nextChar()
- tokenPos = pos - 1
- if (atEOF) token = EOF
- else ch match {
- case '(' => putChar(); token = LParen
- case ')' => putChar(); token = RParen
- case '{' => putChar(); token = LBrace
- case '}' => putChar(); token = RBrace
- case '[' => putChar(); token = LBracket
- case ']' => putChar(); token = RBracket
- case ',' => putChar(); token = Comma
- case ':' => putChar(); token = Colon
- case 't' => putAcceptString("true"); token = TrueLit
- case 'f' => putAcceptString("false"); token = FalseLit
- case 'n' => putAcceptString("null"); token = NullLit
- case '"' => getString()
- case '-' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => getNumber()
- case _ => error("unrecoginezed start of token: '"+ch+"'")
- }
- //println("["+token+"]")
- }
-
- /** Reads a string literal, and forms a `StringLit` token from it.
- * Last-read input character `ch` must be opening `"`-quote.
- * @throws MalformedInput if lexeme not recognized as a string literal.
- */
- def getString() {
- def udigit() = {
- nextChar()
- if ('0' <= ch && ch <= '9') ch - '9'
- else if ('A' <= ch && ch <= 'F') ch - 'A' + 10
- else if ('a' <= ch && ch <= 'f') ch - 'a' + 10
- else error("illegal unicode escape character: '"+ch+"'")
- }
- val delim = ch
- nextChar()
- while (ch != delim && ch >= ' ') {
- if (ch == '\\') {
- nextChar()
- ch match {
- case '\'' => sb += '\''
- case '"' => sb += '"'
- case '\\' => sb += '\\'
- case '/' => sb += '/'
- case 'b' => sb += '\b'
- case 'f' => sb += '\f'
- case 'n' => sb += '\n'
- case 'r' => sb += '\r'
- case 't' => sb += '\t'
- case 'u' => sb += (udigit() << 12 | udigit() << 8 | udigit() << 4 | udigit()).toChar
- case _ => error("illegal escape character: '"+ch+"'")
- }
- nextChar()
- } else {
- putChar()
- }
- }
- acceptChar(delim)
- token = StringLit(sb.toString)
- }
-
- /** Reads a numeric literal, and forms an `IntLit` or `FloatLit` token from it.
- * Last-read input character `ch` must be either `-` or a digit.
- * @throws MalformedInput if lexeme not recognized as a numeric literal.
- */
- def getNumber() {
- def digit() =
- if ('0' <= ch && ch <= '9') putChar()
- else error("<digit> expected")
- def digits() =
- do { digit() } while ('0' <= ch && ch <= '9')
- var isFloating = false
- if (ch == '-') putChar()
- if (ch == '0') digit()
- else digits()
- if (ch == '.') {
- isFloating = true
- putChar()
- digits()
- }
- if (ch == 'e' || ch == 'E') {
- isFloating = true
- putChar()
- if (ch == '+' || ch == '-') putChar()
- digits()
- }
- token = if (isFloating) FloatLit(sb.toString) else IntLit(sb.toString)
- }
-
- /** If current token equals given token, reads next token, otherwise raises an error.
- * @param t the given token to compare current token with
- * @throws MalformedInput if the two tokens do not match.
- */
- def accept(t: Token) {
- if (token == t) nextToken()
- else error(t+" expected, but "+token+" found")
- }
-
- /** The current token is a delimiter consisting of given character, reads next token,
- * otherwise raises an error.
- * @param c the given delimiter character to compare current token with
- * @throws MalformedInput if the current token `token` is not a delimiter, or
- * consists of a character different from `c`.
- */
- def accept(ch: Char) {
- token match {
- case Delim(`ch`) => nextToken()
- case _ => accept(Delim(ch))
- }
- }
-
- /** Always throws a `MalformedInput` exception with given error message.
- * @param msg the error message
- */
- def error(msg: String) = throw new MalformedInput(this, msg)
-
- nextChar()
- nextToken()
-}
diff --git a/src/compiler/scala/tools/nsc/io/MsilFile.scala b/src/compiler/scala/tools/nsc/io/MsilFile.scala
deleted file mode 100644
index 2f0a71fc60..0000000000
--- a/src/compiler/scala/tools/nsc/io/MsilFile.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import ch.epfl.lamp.compiler.msil.{ Type => MsilType, _ }
-
-/** This class wraps an MsilType. It exists only so
- * ClassPath can treat all of JVM/MSIL/bin/src files
- * uniformly, as AbstractFiles.
- */
-class MsilFile(val msilType: MsilType) extends VirtualFile(msilType.FullName, msilType.Namespace) {
-}
-
-object NoMsilFile extends MsilFile(null) { }
diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala
deleted file mode 100644
index b03a921e87..0000000000
--- a/src/compiler/scala/tools/nsc/io/Pickler.scala
+++ /dev/null
@@ -1,454 +0,0 @@
-package scala.tools.nsc.io
-
-import scala.annotation.unchecked
-import Lexer._
-import java.io.Writer
-import scala.language.implicitConversions
-import scala.reflect.ClassTag
-
-/** An abstract class for writing and reading Scala objects to and
- * from a legible representation. The presesentation follows the following grammar:
- * {{{
- * Pickled = `true` | `false` | `null` | NumericLit | StringLit |
- * Labelled | Pickled `,` Pickled
- * Labelled = StringLit `(` Pickled? `)`
- * }}}
- *
- * All ...Lit classes are as in JSON. @see scala.tools.nsc.io.Lexer
- *
- * Subclasses of `Pickler` each can write and read individual classes
- * of values.
- *
- * @param T the type of values handled by this pickler.
- *
- * These Picklers build on the work of Andrew Kennedy. They are most closely inspired by
- * Iulian Dragos' picklers for Scala to XML. See:
- *
- * <a href="http://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide">
- * http://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide
- * </a>
- */
-abstract class Pickler[T] {
-
- import Pickler._
-
- /** Writes value in pickled form
- * @param wr the writer to which pickled form is written
- * @param x the value to write
- */
- def pickle(wr: Writer, x: T)
-
- /** Reads value from pickled form.
- *
- * @param rd the lexer from which lexemes are read
- * @return An `UnpickleSuccess value if the current input corresponds to the
- * kind of value that is unpickled by the current subclass of `Pickler`,
- * an `UnpickleFailure` value otherwise.
- * @throws `Lexer.MalformedInput` if input is invalid, or if
- * an `Unpickle
- */
- def unpickle(rd: Lexer): Unpickled[T]
-
- /** A pickler representing a `~`-pair of values as two consecutive pickled
- * strings, separated by a comma.
- * @param that the second pickler which together with the current pickler makes
- * up the pair `this ~ that` to be pickled.
- */
- def ~ [U] (that: => Pickler[U]): Pickler[T ~ U] = seqPickler(this, that)
-
- /** A pickler that adds a label to the current pickler, using the representation
- * `label ( <current pickler> )`
- *
- * @label the string to be added as a label.
- */
- def labelled(label: String): Pickler[T] = labelledPickler(label, this)
-
- /** A pickler obtained from the current pickler by a pair of transformer functions
- * @param in the function that maps values handled by the current pickler to
- * values handled by the wrapped pickler.
- * @param out the function that maps values handled by the wrapped pickler to
- * values handled by the current pickler.
- */
- def wrapped [U] (in: T => U)(out: U => T): Pickler[U] = wrappedPickler(this)(in)(out)
-
- /** A pickler obtained from the current pickler by also admitting `null` as
- * a handled value, represented as the token `null`.
- *
- * @param fromNull an implicit evidence parameter ensuring that the type of values
- * handled by this pickler contains `null`.
- */
- def orNull(implicit fromNull: Null <:< T): Pickler[T] = nullablePickler(this)
-
- /** A conditional pickler obtained from the current pickler.
- * @param cond the condition to test to find out whether pickler can handle
- * some Scala value.
- */
- def cond(p: Any => Boolean): CondPickler[T] = conditionalPickler(this, p)
-
- /** A conditional pickler handling values of some Scala class. It adds the
- * class name as a label to the representation of the current pickler and
- * @param c the class of values handled by this pickler.
- */
- def asClass[U <: T](c: Class[U]): CondPickler[T] = this.labelled(c.getName).cond(c isInstance _)
-}
-
-object Pickler {
-
- var picklerDebugMode = false
-
- /** A base class representing unpickler result. It has two subclasses:
- * `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures,
- * where a value of the given type `T` could not be unpickled from input.
- * @param T the type of unpickled values in case of success.
- */
- abstract class Unpickled[+T] {
- /** Transforms success values to success values using given function,
- * leaves failures alone
- * @param f the function to apply.
- */
- def map[U](f: T => U): Unpickled[U] = this match {
- case UnpickleSuccess(x) => UnpickleSuccess(f(x))
- case f: UnpickleFailure => f
- }
- /** Transforms success values to successes or failures using given function,
- * leaves failures alone.
- * @param f the function to apply.
- */
- def flatMap[U](f: T => Unpickled[U]): Unpickled[U] = this match {
- case UnpickleSuccess(x) => f(x)
- case f: UnpickleFailure => f
- }
- /** Tries alternate expression if current result is a failure
- * @param alt the alternate expression to be tried in case of failure
- */
- def orElse[U >: T](alt: => Unpickled[U]): Unpickled[U] = this match {
- case UnpickleSuccess(x) => this
- case f: UnpickleFailure => alt
- }
-
- /** Transforms failures into thrown `MalformedInput` exceptions.
- * @throws MalformedInput if current result is a failure
- */
- def requireSuccess: UnpickleSuccess[T] = this match {
- case s @ UnpickleSuccess(x) => s
- case f: UnpickleFailure =>
- throw new MalformedInput(f.rd, "Unrecoverable unpickle failure:\n"+f.errMsg)
- }
- }
-
- /** A class representing successful unpicklings
- * @param T the type of the unpickled value
- * @param result the unpickled value
- */
- case class UnpickleSuccess[+T](result: T) extends Unpickled[T]
-
- /** A class representing unpickle failures
- * @param msg an error message describing what failed.
- * @param rd the lexer unpickled values were read from (can be used to get
- * error position, for instance).
- */
- class UnpickleFailure(msg: => String, val rd: Lexer) extends Unpickled[Nothing] {
- def errMsg = msg
- override def toString = "Failure at "+rd.tokenPos+":\n"+msg
- }
-
- private def errorExpected(rd: Lexer, msg: => String) =
- new UnpickleFailure("expected: "+msg+"\n" +
- "found : "+rd.token,
- rd)
-
- private def nextSuccess[T](rd: Lexer, result: T) = {
- rd.nextToken()
- UnpickleSuccess(result)
- }
-
- /** The implicit `Pickler` value for type `T`. Equivalent to `implicitly[Pickler[T]]`.
- */
- def pkl[T: Pickler] = implicitly[Pickler[T]]
-
- /** A class represenenting `~`-pairs */
- case class ~[+S, +T](fst: S, snd: T)
-
- /** A wrapper class to be able to use `~` s an infix method */
- implicit class TildeDecorator[S](x: S) {
- /** Infix method that forms a `~`-pair. */
- def ~ [T](y: T): S ~ T = new ~ (x, y)
- }
-
- /** A converter from binary functions to functions over `~`-pairs
- */
- implicit def fromTilde[T1, T2, R](f: (T1, T2) => R): T1 ~ T2 => R = { case x1 ~ x2 => f(x1, x2) }
-
- /** An converter from unctions returning Options over pair to functions returning `~`-pairs
- * The converted function will raise a `MatchError` where the original function returned
- * a `None`. This converter is useful for turning `unapply` methods of case classes
- * into wrapper methods that can be passed as second argument to `wrap`.
- */
- implicit def toTilde[T1, T2, S](f: S => Option[(T1, T2)]): S => T1 ~ T2 = { x => (f(x): @unchecked) match { case Some((x1, x2)) => x1 ~ x2 } }
-
- /** Same as `p.labelled(label)`.
- */
- def labelledPickler[T](label: String, p: Pickler[T]): Pickler[T] = new Pickler[T] {
- def pickle(wr: Writer, x: T) = {
- wr.write(quoted(label));
- wr.write("(")
- p.pickle(wr, x)
- wr.write(")")
- }
- def unpickle(rd: Lexer): Unpickled[T] =
- rd.token match {
- case StringLit(`label`) =>
- rd.nextToken()
- rd.accept('(')
- val result = p.unpickle(rd).requireSuccess
- rd.accept(')')
- result
- case _ =>
- errorExpected(rd, quoted(label)+"(...)")
- }
- }
-
- /** Same as `p.wrap(in)(out)`
- */
- def wrappedPickler[S, T](p: Pickler[S])(in: S => T)(out: T => S) = new Pickler[T] {
- def pickle(wr: Writer, x: T) = p.pickle(wr, out(x))
- def unpickle(rd: Lexer) = p.unpickle(rd) map in
- }
-
- /** Same as `p.cond(condition)`
- */
- def conditionalPickler[T](p: Pickler[T], condition: Any => Boolean) = new CondPickler[T](condition) {
- def pickle(wr: Writer, x: T) = p.pickle(wr, x)
- def unpickle(rd: Lexer) = p.unpickle(rd)
- }
-
- /** Same as `p ~ q`
- */
- def seqPickler[T, U](p: Pickler[T], q: => Pickler[U]) = new Pickler[T ~ U] {
- lazy val qq = q
- def pickle(wr: Writer, x: T ~ U) = {
- p.pickle(wr, x.fst)
- wr.write(',')
- q.pickle(wr, x.snd)
- }
- def unpickle(rd: Lexer) =
- for (x <- p.unpickle(rd); y <- { rd.accept(','); qq.unpickle(rd).requireSuccess })
- yield x ~ y
- }
-
- /** Same as `p | q`
- */
- def eitherPickler[T, U <: T, V <: T](p: CondPickler[U], q: => CondPickler[V]) =
- new CondPickler[T](x => p.canPickle(x) || q.canPickle(x)) {
- lazy val qq = q
- override def tryPickle(wr: Writer, x: Any): Boolean =
- p.tryPickle(wr, x) || qq.tryPickle(wr, x)
- def pickle(wr: Writer, x: T) =
- require(tryPickle(wr, x),
- "no pickler found for "+x+" of class "+x.getClass.getName)
- def unpickle(rd: Lexer) = p.unpickle(rd) orElse qq.unpickle(rd)
- }
-
- /** Same as `p.orNull`
- */
- def nullablePickler[T](p: Pickler[T])(implicit fromNull: Null <:< T): Pickler[T] = new Pickler[T] {
- def pickle(wr: Writer, x: T) =
- if (x == null) wr.write("null") else p.pickle(wr, x)
- def unpickle(rd: Lexer): Unpickled[T] =
- if (rd.token == NullLit) nextSuccess(rd, fromNull(null))
- else p.unpickle(rd)
- }
-
- /** A conditional pickler for singleton objects. It represents these
- * with the object's underlying class as a label.
- * Example: Object scala.None would be represented as `scala.None$()`.
- */
- def singletonPickler[T <: AnyRef](x: T): CondPickler[T] =
- unitPickler
- .wrapped { _ => x } { x => () }
- .labelled (x.getClass.getName)
- .cond (x eq _.asInstanceOf[AnyRef])
-
- /** A pickler the handles instances of classes that have an empty constructor.
- * It represents than as `$new ( <name of class> )`.
- * When unpickling, a new instance of the class is created using the empty
- * constructor of the class via `Class.forName(<name of class>).newInstance()`.
- */
- def javaInstancePickler[T <: AnyRef]: Pickler[T] =
- (stringPickler labelled "$new")
- .wrapped { name => Class.forName(name).newInstance().asInstanceOf[T] } { _.getClass.getName }
-
- /** A picklers that handles iterators. It pickles all values
- * returned by an iterator separated by commas.
- * When unpickling, it always returns an `UnpickleSuccess` containing an iterator.
- * This iterator returns 0 or more values that are obtained by unpickling
- * until a closing parenthesis, bracket or brace or the end of input is encountered.
- *
- * This means that iterator picklers should not be directly followed by `~`
- * because the pickler would also read any values belonging to the second
- * part of the `~`-pair.
- *
- * What's usually done instead is that the iterator pickler is wrapped and labelled
- * to handle other kinds of sequences.
- */
- implicit def iterPickler[T: Pickler]: Pickler[Iterator[T]] = new Pickler[Iterator[T]] {
- lazy val p = pkl[T]
- def pickle(wr: Writer, xs: Iterator[T]) {
- var first = true
- for (x <- xs) {
- if (first) first = false else wr.write(',')
- p.pickle(wr, x)
- }
- }
- def unpickle(rd: Lexer): Unpickled[Iterator[T]] = UnpickleSuccess(new Iterator[T] {
- var first = true
- def hasNext = {
- val t = rd.token
- t != EOF && t != RParen && t != RBrace && t != RBracket
- }
- def next(): T = {
- if (first) first = false else rd.accept(',')
- p.unpickle(rd).requireSuccess.result
- }
- })
- }
-
- /** A pickler that handles values that can be represented as a single token.
- * @param kind the kind of token representing the value, used in error messages
- * for unpickling.
- * @param matcher A partial function from tokens to handled values. Unpickling
- * succeeds if the matcher function is defined on the current token.
- */
- private def tokenPickler[T](kind: String)(matcher: PartialFunction[Token, T]) = new Pickler[T] {
- def pickle(wr: Writer, x: T) = wr.write(x.toString)
- def unpickle(rd: Lexer) =
- if (matcher isDefinedAt rd.token) nextSuccess(rd, matcher(rd.token))
- else errorExpected(rd, kind)
- }
-
- /** A pickler for values of type `Long`, represented as integer literals */
- implicit val longPickler: Pickler[Long] =
- tokenPickler("integer literal") { case IntLit(s) => s.toLong }
-
- /** A pickler for values of type `Double`, represented as floating point literals */
- implicit val doublePickler: Pickler[Double] =
- tokenPickler("floating point literal") { case FloatLit(s) => s.toDouble }
-
- /** A pickler for values of type `Byte`, represented as integer literals */
- implicit val bytePickler: Pickler[Byte] = longPickler.wrapped { _.toByte } { _.toLong }
-
- /** A pickler for values of type `Short`, represented as integer literals */
- implicit val shortPickler: Pickler[Short] = longPickler.wrapped { _.toShort } { _.toLong }
-
- /** A pickler for values of type `Int`, represented as integer literals */
- implicit val intPickler: Pickler[Int] = longPickler.wrapped { _.toInt } { _.toLong }
-
- /** A pickler for values of type `Float`, represented as floating point literals */
- implicit val floatPickler: Pickler[Float] = doublePickler.wrapped { _.toFloat } { _.toLong }
-
- /** A conditional pickler for the boolean value `true` */
- private val truePickler =
- tokenPickler("boolean literal") { case TrueLit => true } cond { _ == true }
-
- /** A conditional pickler for the boolean value `false` */
- private val falsePickler =
- tokenPickler("boolean literal") { case FalseLit => false } cond { _ == false }
-
- /** A pickler for values of type `Boolean`, represented as the literals `true` or `false`. */
- implicit def booleanPickler: Pickler[Boolean] = truePickler | falsePickler
-
- /** A pickler for values of type `Unit`, represented by the empty character string */
- implicit val unitPickler: Pickler[Unit] = new Pickler[Unit] {
- def pickle(wr: Writer, x: Unit) {}
- def unpickle(rd: Lexer): Unpickled[Unit] = UnpickleSuccess(())
- }
-
- /** A pickler for values of type `String`, represented as string literals */
- implicit val stringPickler: Pickler[String] = new Pickler[String] {
- def pickle(wr: Writer, x: String) = wr.write(if (x == null) "null" else quoted(x))
- def unpickle(rd: Lexer) = rd.token match {
- case StringLit(s) => nextSuccess(rd, s)
- case NullLit => nextSuccess(rd, null)
- case _ => errorExpected(rd, "string literal")
- }
- }
-
- /** A pickler for values of type `Char`, represented as string literals of length 1 */
- implicit val charPickler: Pickler[Char] =
- stringPickler
- .wrapped { s => require(s.length == 1, "single character string literal expected, but "+quoted(s)+" found"); s(0) } { _.toString }
-
- /** A pickler for pairs, represented as `~`-pairs */
- implicit def tuple2Pickler[T1: Pickler, T2: Pickler]: Pickler[(T1, T2)] =
- (pkl[T1] ~ pkl[T2])
- .wrapped { case x1 ~ x2 => (x1, x2) } { case (x1, x2) => x1 ~ x2 }
- .labelled ("tuple2")
-
- /** A pickler for 3-tuples, represented as `~`-tuples */
- implicit def tuple3Pickler[T1, T2, T3](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3]): Pickler[(T1, T2, T3)] =
- (p1 ~ p2 ~ p3)
- .wrapped { case x1 ~ x2 ~ x3 => (x1, x2, x3) } { case (x1, x2, x3) => x1 ~ x2 ~ x3 }
- .labelled ("tuple3")
-
- /** A pickler for 4-tuples, represented as `~`-tuples */
- implicit def tuple4Pickler[T1, T2, T3, T4](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3], p4: Pickler[T4]): Pickler[(T1, T2, T3, T4)] =
- (p1 ~ p2 ~ p3 ~ p4)
- .wrapped { case x1 ~ x2 ~ x3 ~ x4 => (x1, x2, x3, x4) } { case (x1, x2, x3, x4) => x1 ~ x2 ~ x3 ~ x4 }
- .labelled ("tuple4")
-
- /** A conditional pickler for the `scala.None` object */
- implicit val nonePickler = singletonPickler(None)
-
- /** A conditional pickler for instances of class `scala.Some` */
- implicit def somePickler[T: Pickler]: CondPickler[Some[T]] =
- pkl[T]
- .wrapped { Some(_) } { _.get }
- .asClass (classOf[Some[T]])
-
- /** A pickler for optional values */
- implicit def optionPickler[T: Pickler]: Pickler[Option[T]] = nonePickler | somePickler[T]
-
- /** A pickler for list values */
- implicit def listPickler[T: Pickler]: Pickler[List[T]] =
- iterPickler[T] .wrapped { _.toList } { _.iterator } .labelled ("scala.List")
-
- /** A pickler for vector values */
- implicit def vectorPickler[T: Pickler]: Pickler[Vector[T]] =
- iterPickler[T] .wrapped { Vector() ++ _ } { _.iterator } .labelled ("scala.Vector")
-
- /** A pickler for array values */
- implicit def array[T : ClassTag : Pickler]: Pickler[Array[T]] =
- iterPickler[T] .wrapped { _.toArray} { _.iterator } .labelled ("scala.Array")
-}
-
-/** A subclass of Pickler can indicate whether a particular value can be pickled by instances
- * of this class.
- * @param canPickle The predicate that indicates whether a given value
- * can be pickled by instances of this class.
- */
-abstract class CondPickler[T](val canPickle: Any => Boolean) extends Pickler[T] {
- import Pickler._
-
- /** Pickles given value `x` if possible, as indicated by `canPickle(x)`.
- */
- def tryPickle(wr: Writer, x: Any): Boolean = {
- val result = canPickle(x)
- if (result) pickle(wr, x.asInstanceOf[T])
- result
- }
-
- /** A pickler obtained from this pickler and an alternative pickler.
- * To pickle a value, this pickler is tried first. If it cannot handle
- * the object (as indicated by its `canPickle` test), then the
- * alternative pickler is tried.
- * To unpickle a value, this unpickler is tried first. If it cannot read
- * the input (as indicated by a `UnpickleFailure` result), then the
- * alternative pickler is tried.
- * @param V The handled type of the returned pickler.
- * @param U The handled type of the alternative pickler.
- * @param that The alternative pickler.
- */
- def | [V >: T, U <: V] (that: => CondPickler[U]): CondPickler[V] =
- eitherPickler[V, T, U](this, that)
-}
-
diff --git a/src/compiler/scala/tools/nsc/io/PrettyWriter.scala b/src/compiler/scala/tools/nsc/io/PrettyWriter.scala
deleted file mode 100644
index acd4847469..0000000000
--- a/src/compiler/scala/tools/nsc/io/PrettyWriter.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-package scala.tools.nsc.io
-
-import java.io.Writer
-
-class PrettyWriter(wr: Writer) extends Writer {
- protected val indentStep = " "
- private var indent = 0
- private def newLine() {
- wr.write('\n')
- wr.write(indentStep * indent)
- }
- def close() = wr.close()
- def flush() = wr.flush()
- def write(str: Array[Char], off: Int, len: Int): Unit = {
- if (off < str.length && off < len) {
- str(off) match {
- case '{' | '[' | '(' =>
- indent += 1
- wr.write(str(off))
- newLine()
- wr.write(str, off + 1, len - 1)
- case '}' | ']' | ')' =>
- wr.write(str, off, len)
- indent -= 1
- case ',' =>
- wr.write(',')
- newLine()
- wr.write(str, off + 1, len - 1)
- case ':' =>
- wr.write(':')
- wr.write(' ')
- wr.write(str, off + 1, len - 1)
- case _ =>
- wr.write(str, off, len)
- }
- } else {
- wr.write(str, off, len)
- }
- }
- override def toString = wr.toString
-}
diff --git a/src/compiler/scala/tools/nsc/io/Replayer.scala b/src/compiler/scala/tools/nsc/io/Replayer.scala
deleted file mode 100644
index 5cb61b6cb1..0000000000
--- a/src/compiler/scala/tools/nsc/io/Replayer.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-package scala.tools.nsc.io
-
-import java.io.{Reader, Writer}
-
-import Pickler._
-import Lexer.{Token, EOF}
-
-abstract class LogReplay {
- def logreplay(event: String, x: => Boolean): Boolean
- def logreplay[T: Pickler](event: String, x: => Option[T]): Option[T]
- def close()
- def flush()
-}
-
-class Logger(wr0: Writer) extends LogReplay {
- val wr = new PrettyWriter(wr0)
- private var first = true
- private def insertComma() = if (first) first = false else wr.write(",")
-
- def logreplay(event: String, x: => Boolean) = {
- val xx = x
- if (xx) { insertComma(); pkl[Unit].labelled(event).pickle(wr, ()) }
- xx
- }
- def logreplay[T: Pickler](event: String, x: => Option[T]) = {
- val xx = x
- xx match {
- case Some(y) => insertComma(); pkl[T].labelled(event).pickle(wr, y)
- case None =>
- }
- xx
- }
- def close() { wr.close() }
- def flush() { wr.flush() }
-}
-
-object NullLogger extends LogReplay {
- def logreplay(event: String, x: => Boolean) = x
- def logreplay[T: Pickler](event: String, x: => Option[T]) = x
- def close() {}
- def flush() {}
-}
-
-class Replayer(raw: Reader) extends LogReplay {
- private val rd = new Lexer(raw)
- private var nextComma = false
-
- private def eatComma() =
- if (nextComma) { rd.accept(','); nextComma = false }
-
- def logreplay(event: String, x: => Boolean) =
- if (rd.token == EOF) NullLogger.logreplay(event, x)
- else {
- eatComma()
- pkl[Unit].labelled(event).unpickle(rd) match {
- case UnpickleSuccess(_) => nextComma = true; true
- case _ => false
- }
- }
-
- def logreplay[T: Pickler](event: String, x: => Option[T]) =
- if (rd.token == EOF) NullLogger.logreplay(event, x)
- else {
- eatComma()
- pkl[T].labelled(event).unpickle(rd) match {
- case UnpickleSuccess(y) => nextComma = true; Some(y)
- case _ => None
- }
- }
-
- def close() { raw.close() }
- def flush() {}
-}
-
diff --git a/src/compiler/scala/tools/nsc/io/Socket.scala b/src/compiler/scala/tools/nsc/io/Socket.scala
index e766c1b2fd..a803e4121a 100644
--- a/src/compiler/scala/tools/nsc/io/Socket.scala
+++ b/src/compiler/scala/tools/nsc/io/Socket.scala
@@ -9,18 +9,11 @@ package io
import java.io.{ IOException, InputStreamReader, BufferedReader, PrintWriter, Closeable }
import java.io.{ BufferedOutputStream, BufferedReader }
import java.net.{ ServerSocket, SocketException, SocketTimeoutException, InetAddress, Socket => JSocket }
-import scala.sys.SystemProperties._
import scala.io.Codec
/** A skeletal only-as-much-as-I-need Socket wrapper.
*/
object Socket {
- def preferringIPv4[T](body: => T): T = exclusively {
- val saved = preferIPv4Stack.value
- try { preferIPv4Stack.enable() ; body }
- finally preferIPv4Stack setValue saved
- }
-
class Box[+T](f: () => T) {
private def handlerFn[U](f: Throwable => U): PartialFunction[Throwable, U] = {
case x @ (_: IOException | _: SecurityException) => f(x)
@@ -28,13 +21,10 @@ object Socket {
private val optHandler = handlerFn[Option[T]](_ => None)
private val eitherHandler = handlerFn[Either[Throwable, T]](x => Left(x))
- def getOrElse[T1 >: T](alt: T1): T1 = opt getOrElse alt
def either: Either[Throwable, T] = try Right(f()) catch eitherHandler
def opt: Option[T] = try Some(f()) catch optHandler
}
- def newIPv4Server(port: Int = 0) = new Box(() => preferringIPv4(new ServerSocket(0)))
- def newServer(port: Int = 0) = new Box(() => new ServerSocket(0))
def localhost(port: Int) = apply(InetAddress.getLocalHost(), port)
def apply(host: InetAddress, port: Int) = new Box(() => new Socket(new JSocket(host, port)))
def apply(host: String, port: Int) = new Box(() => new Socket(new JSocket(host, port)))
@@ -62,4 +52,4 @@ class Socket(jsocket: JSocket) extends Streamable.Bytes with Closeable {
out.close()
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala
index 569270f530..3220c2e2b2 100644
--- a/src/compiler/scala/tools/nsc/io/SourceReader.scala
+++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala
@@ -9,7 +9,7 @@ package io
import java.io.{ FileInputStream, InputStream, IOException }
import java.nio.{ByteBuffer, CharBuffer}
-import java.nio.channels.{FileChannel, ReadableByteChannel, Channels}
+import java.nio.channels.{ ReadableByteChannel, Channels }
import java.nio.charset.{CharsetDecoder, CoderResult}
import scala.tools.nsc.reporters._
@@ -33,9 +33,6 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) {
"Please try specifying another one using the -encoding option")
}
- /** Reads the file with the specified name. */
- def read(filename: String): Array[Char]= read(new JFile(filename))
-
/** Reads the specified file. */
def read(file: JFile): Array[Char] = {
val c = new FileInputStream(file).getChannel
@@ -77,7 +74,7 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) {
protected def read(bytes: ByteBuffer): Array[Char] = {
val decoder: CharsetDecoder = this.decoder.reset()
val chars: CharBuffer = this.chars; chars.clear()
- terminate(flush(decoder, decode(decoder, bytes, chars, true)))
+ terminate(flush(decoder, decode(decoder, bytes, chars, endOfInput = true)))
}
//########################################################################
diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala
index 711696bb6e..5f2f90c284 100644
--- a/src/compiler/scala/tools/nsc/io/package.scala
+++ b/src/compiler/scala/tools/nsc/io/package.scala
@@ -5,9 +5,6 @@
package scala.tools.nsc
-import java.util.concurrent.{ Future, Callable }
-import java.util.{ Timer, TimerTask }
-import java.util.jar.{ Attributes }
import scala.language.implicitConversions
package object io {
@@ -21,41 +18,13 @@ package object io {
type Path = scala.reflect.io.Path
val Path = scala.reflect.io.Path
type PlainFile = scala.reflect.io.PlainFile
- val PlainFile = scala.reflect.io.PlainFile
val Streamable = scala.reflect.io.Streamable
type VirtualDirectory = scala.reflect.io.VirtualDirectory
type VirtualFile = scala.reflect.io.VirtualFile
- val ZipArchive = scala.reflect.io.ZipArchive
type ZipArchive = scala.reflect.io.ZipArchive
-
- implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
type JManifest = java.util.jar.Manifest
type JFile = java.io.File
implicit def enrichManifest(m: JManifest): Jar.WManifest = Jar.WManifest(m)
- private lazy val daemonThreadPool = DaemonThreadFactory.newPool()
-
- def runnable(body: => Unit): Runnable = new Runnable { override def run() = body }
- def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
- def spawn[T](body: => T): Future[T] = daemonThreadPool submit callable(body)
- def submit(runnable: Runnable) = daemonThreadPool submit runnable
-
- // Create, start, and return a daemon thread
- def daemonize(body: => Unit): Thread = newThread(_ setDaemon true)(body)
- def newThread(f: Thread => Unit)(body: => Unit): Thread = {
- val thread = new Thread(runnable(body))
- f(thread)
- thread.start
- thread
- }
-
- // Set a timer to execute the given code.
- def timer(seconds: Int)(body: => Unit): Timer = {
- val alarm = new Timer(true) // daemon
- val tt = new TimerTask { def run() = body }
-
- alarm.schedule(tt, seconds * 1000)
- alarm
- }
}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 0779e648cd..9875d27047 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -8,11 +8,11 @@
package scala.tools.nsc
package javac
-import scala.reflect.internal.util.OffsetPosition
import scala.collection.mutable.ListBuffer
import symtab.Flags
import JavaTokens._
import scala.language.implicitConversions
+import scala.reflect.internal.util.Position
trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val global : Global
@@ -27,7 +27,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
def deprecationWarning(off: Int, msg: String) = unit.deprecationWarning(off, msg)
- implicit def i2p(offset : Int) : Position = new OffsetPosition(unit.source, offset)
+ implicit def i2p(offset : Int) : Position = Position.offset(unit.source, offset)
def warning(pos : Int, msg : String) : Unit = unit.warning(pos, msg)
def syntaxError(pos: Int, msg: String) : Unit = unit.error(pos, msg)
}
@@ -35,7 +35,6 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
abstract class JavaParser extends ParserCommon {
val in: JavaScanner
- protected def posToReport: Int = in.currentPos
def freshName(prefix : String): Name
protected implicit def i2p(offset : Int) : Position
private implicit def p2i(pos : Position): Int = if (pos.isDefined) pos.point else -1
@@ -75,7 +74,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
nbraces += 1
case _ =>
}
- in.nextToken
+ in.nextToken()
}
}
@@ -94,11 +93,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (skipIt)
skip()
}
- def warning(msg: String) : Unit = warning(in.currentPos, msg)
-
def errorTypeTree = TypeTree().setType(ErrorType) setPos in.currentPos
- def errorTermTree = Literal(Constant(null)) setPos in.currentPos
- def errorPatternTree = blankExpr setPos in.currentPos
// --------- tree building -----------------------------
@@ -123,14 +118,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def makeTemplate(parents: List[Tree], stats: List[Tree]) =
Template(
parents,
- emptyValDef,
+ noSelfType,
if (treeInfo.firstConstructor(stats) == EmptyTree) makeConstructor(List()) :: stats
else stats)
def makeSyntheticParam(count: Int, tpt: Tree): ValDef =
makeParam(nme.syntheticParamName(count), tpt)
def makeParam(name: String, tpt: Tree): ValDef =
- makeParam(newTypeName(name), tpt)
+ makeParam(name: TermName, tpt)
def makeParam(name: TermName, tpt: Tree): ValDef =
ValDef(Modifiers(Flags.JAVA | Flags.PARAM), name, tpt, EmptyTree)
@@ -153,7 +148,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
nbraces += 1
case _ =>
}
- in.nextToken
+ in.nextToken()
in.token match {
case RPAREN =>
nparens -= 1
@@ -168,7 +163,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
while (!(tokens contains in.token) && in.token != EOF) {
if (in.token == LBRACE) { skipAhead(); accept(RBRACE) }
else if (in.token == LPAREN) { skipAhead(); accept(RPAREN) }
- else in.nextToken
+ else in.nextToken()
}
}
@@ -178,18 +173,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def accept(token: Int): Int = {
val pos = in.currentPos
if (in.token != token) {
- val posToReport =
- //if (in.currentPos.line(unit.source).get(0) > in.lastPos.line(unit.source).get(0))
- // in.lastPos
- //else
- in.currentPos
+ val posToReport = in.currentPos
val msg =
JavaScannerConfiguration.token2string(token) + " expected but " +
JavaScannerConfiguration.token2string(in.token) + " found."
- syntaxError(posToReport, msg, true)
+ syntaxError(posToReport, msg, skipIt = true)
}
- if (in.token == token) in.nextToken
+ if (in.token == token) in.nextToken()
pos
}
@@ -209,7 +200,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def ident(): Name =
if (in.token == IDENTIFIER) {
val name = in.name
- in.nextToken
+ in.nextToken()
name
} else {
accept(IDENTIFIER)
@@ -219,7 +210,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def repsep[T <: Tree](p: () => T, sep: Int): List[T] = {
val buf = ListBuffer[T](p())
while (in.token == sep) {
- in.nextToken
+ in.nextToken()
buf += p()
}
buf.toList
@@ -233,7 +224,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
case AppliedTypeTree(_, _) | ExistentialTypeTree(_, _) | SelectFromTypeTree(_, _) =>
tree
case _ =>
- syntaxError(tree.pos, "identifier expected", false)
+ syntaxError(tree.pos, "identifier expected", skipIt = false)
errorTypeTree
}
}
@@ -243,7 +234,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def qualId(): RefTree = {
var t: RefTree = atPos(in.currentPos) { Ident(ident()) }
while (in.token == DOT) {
- in.nextToken
+ in.nextToken()
t = atPos(in.currentPos) { Select(t, ident()) }
}
t
@@ -252,7 +243,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def optArrayBrackets(tpt: Tree): Tree =
if (in.token == LBRACKET) {
val tpt1 = atPos(in.pos) { arrayOf(tpt) }
- in.nextToken
+ in.nextToken()
accept(RBRACKET)
optArrayBrackets(tpt1)
} else tpt
@@ -260,21 +251,21 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def basicType(): Tree =
atPos(in.pos) {
in.token match {
- case BYTE => in.nextToken; TypeTree(ByteClass.tpe)
- case SHORT => in.nextToken; TypeTree(ShortClass.tpe)
- case CHAR => in.nextToken; TypeTree(CharClass.tpe)
- case INT => in.nextToken; TypeTree(IntClass.tpe)
- case LONG => in.nextToken; TypeTree(LongClass.tpe)
- case FLOAT => in.nextToken; TypeTree(FloatClass.tpe)
- case DOUBLE => in.nextToken; TypeTree(DoubleClass.tpe)
- case BOOLEAN => in.nextToken; TypeTree(BooleanClass.tpe)
- case _ => syntaxError("illegal start of type", true); errorTypeTree
+ case BYTE => in.nextToken(); TypeTree(ByteTpe)
+ case SHORT => in.nextToken(); TypeTree(ShortTpe)
+ case CHAR => in.nextToken(); TypeTree(CharTpe)
+ case INT => in.nextToken(); TypeTree(IntTpe)
+ case LONG => in.nextToken(); TypeTree(LongTpe)
+ case FLOAT => in.nextToken(); TypeTree(FloatTpe)
+ case DOUBLE => in.nextToken(); TypeTree(DoubleTpe)
+ case BOOLEAN => in.nextToken(); TypeTree(BooleanTpe)
+ case _ => syntaxError("illegal start of type", skipIt = true); errorTypeTree
}
}
def typ(): Tree =
optArrayBrackets {
- if (in.token == FINAL) in.nextToken
+ if (in.token == FINAL) in.nextToken()
if (in.token == IDENTIFIER) {
var t = typeArgs(atPos(in.currentPos)(Ident(ident())))
// typeSelect generates Select nodes is the lhs is an Ident or Select,
@@ -287,7 +278,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
case _ => SelectFromTypeTree(t, name.toTypeName)
}
while (in.token == DOT) {
- in.nextToken
+ in.nextToken()
t = typeArgs(atPos(in.currentPos)(typeSelect(t, ident())))
}
convertToTypeId(t)
@@ -301,16 +292,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def typeArg(): Tree =
if (in.token == QMARK) {
val pos = in.currentPos
- in.nextToken
- var lo: Tree = TypeTree(NothingClass.tpe)
- var hi: Tree = TypeTree(AnyClass.tpe)
- if (in.token == EXTENDS) {
- in.nextToken
- hi = typ()
- } else if (in.token == SUPER) {
- in.nextToken
- lo = typ()
- }
+ in.nextToken()
+ val hi = if (in.token == EXTENDS) { in.nextToken() ; typ() } else EmptyTree
+ val lo = if (in.token == SUPER) { in.nextToken() ; typ() } else EmptyTree
val tdef = atPos(pos) {
TypeDef(
Modifiers(Flags.JAVA | Flags.DEFERRED),
@@ -324,7 +308,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
typ()
}
if (in.token == LT) {
- in.nextToken
+ in.nextToken()
val t1 = convertToTypeId(t)
val args = repsep(typeArg, COMMA)
acceptClosingAngle()
@@ -339,7 +323,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def annotations(): List[Tree] = {
//var annots = new ListBuffer[Tree]
while (in.token == AT) {
- in.nextToken
+ in.nextToken()
annotation()
}
List() // don't pass on annotations for now
@@ -348,46 +332,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
/** Annotation ::= TypeName [`(` AnnotationArgument {`,` AnnotationArgument} `)`]
*/
def annotation() {
- val pos = in.currentPos
- var t = qualId()
+ qualId()
if (in.token == LPAREN) { skipAhead(); accept(RPAREN) }
else if (in.token == LBRACE) { skipAhead(); accept(RBRACE) }
}
-/*
- def annotationArg() = {
- val pos = in.token
- if (in.token == IDENTIFIER && in.lookaheadToken == ASSIGN) {
- val name = ident()
- accept(ASSIGN)
- atPos(pos) {
- ValDef(Modifiers(Flags.JAVA), name, TypeTree(), elementValue())
- }
- } else {
- elementValue()
- }
- }
-
- def elementValue(): Tree =
- if (in.token == AT) annotation()
- else if (in.token == LBRACE) elementValueArrayInitializer()
- else expression1()
-
- def elementValueArrayInitializer() = {
- accept(LBRACE)
- val buf = new ListBuffer[Tree]
- def loop() =
- if (in.token != RBRACE) {
- buf += elementValue()
- if (in.token == COMMA) {
- in.nextToken
- loop()
- }
- }
- loop()
- accept(RBRACE)
- buf.toList
- }
- */
def modifiers(inInterface: Boolean): Modifiers = {
var flags: Long = Flags.JAVA
@@ -399,41 +347,41 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
while (true) {
in.token match {
case AT if (in.lookaheadToken != INTERFACE) =>
- in.nextToken
+ in.nextToken()
annotation()
case PUBLIC =>
isPackageAccess = false
- in.nextToken
+ in.nextToken()
case PROTECTED =>
flags |= Flags.PROTECTED
- in.nextToken
+ in.nextToken()
case PRIVATE =>
isPackageAccess = false
flags |= Flags.PRIVATE
- in.nextToken
+ in.nextToken()
case STATIC =>
flags |= Flags.STATIC
- in.nextToken
+ in.nextToken()
case ABSTRACT =>
flags |= Flags.ABSTRACT
- in.nextToken
+ in.nextToken()
case FINAL =>
flags |= Flags.FINAL
- in.nextToken
+ in.nextToken()
case DEFAULT =>
flags |= Flags.DEFAULTMETHOD
in.nextToken()
case NATIVE =>
addAnnot(NativeAttr)
- in.nextToken
+ in.nextToken()
case TRANSIENT =>
addAnnot(TransientAttr)
- in.nextToken
+ in.nextToken()
case VOLATILE =>
addAnnot(VolatileAttr)
- in.nextToken
+ in.nextToken()
case SYNCHRONIZED | STRICTFP =>
- in.nextToken
+ in.nextToken()
case _ =>
val privateWithin: TypeName =
if (isPackageAccess && !inInterface) thisPackageName
@@ -447,7 +395,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def typeParams(): List[TypeDef] =
if (in.token == LT) {
- in.nextToken
+ in.nextToken()
val tparams = repsep(typeParam, COMMA)
acceptClosingAngle()
tparams
@@ -456,27 +404,20 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def typeParam(): TypeDef =
atPos(in.currentPos) {
val name = identForType()
- val hi =
- if (in.token == EXTENDS) {
- in.nextToken
- bound()
- } else {
- scalaDot(tpnme.Any)
- }
- TypeDef(Modifiers(Flags.JAVA | Flags.DEFERRED | Flags.PARAM), name, List(),
- TypeBoundsTree(scalaDot(tpnme.Nothing), hi))
+ val hi = if (in.token == EXTENDS) { in.nextToken() ; bound() } else EmptyTree
+ TypeDef(Modifiers(Flags.JAVA | Flags.DEFERRED | Flags.PARAM), name, Nil, TypeBoundsTree(EmptyTree, hi))
}
def bound(): Tree =
atPos(in.currentPos) {
val buf = ListBuffer[Tree](typ())
while (in.token == AMP) {
- in.nextToken
+ in.nextToken()
buf += typ()
}
val ts = buf.toList
if (ts.tail.isEmpty) ts.head
- else CompoundTypeTree(Template(ts, emptyValDef, List()))
+ else CompoundTypeTree(Template(ts, noSelfType, List()))
}
def formalParams(): List[ValDef] = {
@@ -487,21 +428,21 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
}
def formalParam(): ValDef = {
- if (in.token == FINAL) in.nextToken
+ if (in.token == FINAL) in.nextToken()
annotations()
var t = typ()
if (in.token == DOTDOTDOT) {
- in.nextToken
+ in.nextToken()
t = atPos(t.pos) {
AppliedTypeTree(scalaDot(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME), List(t))
}
}
- varDecl(in.currentPos, Modifiers(Flags.JAVA | Flags.PARAM), t, ident())
+ varDecl(in.currentPos, Modifiers(Flags.JAVA | Flags.PARAM), t, ident().toTermName)
}
def optThrows() {
if (in.token == THROWS) {
- in.nextToken
+ in.nextToken()
repsep(typ, COMMA)
}
}
@@ -520,8 +461,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val isVoid = in.token == VOID
var rtpt =
if (isVoid) {
- in.nextToken
- TypeTree(UnitClass.tpe) setPos in.pos
+ in.nextToken()
+ TypeTree(UnitTpe) setPos in.pos
} else typ()
var pos = in.currentPos
val rtptName = rtpt match {
@@ -555,9 +496,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (parentToken == AT && in.token == DEFAULT) {
val annot =
atPos(pos) {
- New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), ListOfNil)
+ New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), Nil)
}
- mods1 = mods1 withAnnotations List(annot)
+ mods1 = mods1 withAnnotations annot :: Nil
skipTo(SEMI)
accept(SEMI)
blankExpr
@@ -569,7 +510,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (inInterface) mods1 |= Flags.DEFERRED
List {
atPos(pos) {
- DefDef(mods1, name, tparams, List(vparams), rtpt, body)
+ DefDef(mods1, name.toTermName, tparams, List(vparams), rtpt, body)
}
}
} else {
@@ -591,18 +532,18 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
* these potential definitions are real or not.
*/
def fieldDecls(pos: Position, mods: Modifiers, tpt: Tree, name: Name): List[Tree] = {
- val buf = ListBuffer[Tree](varDecl(pos, mods, tpt, name))
+ val buf = ListBuffer[Tree](varDecl(pos, mods, tpt, name.toTermName))
val maybe = new ListBuffer[Tree] // potential variable definitions.
while (in.token == COMMA) {
- in.nextToken
+ in.nextToken()
if (in.token == IDENTIFIER) { // if there's an ident after the comma ...
val name = ident()
if (in.token == ASSIGN || in.token == SEMI) { // ... followed by a `=` or `;`, we know it's a real variable definition
buf ++= maybe
- buf += varDecl(in.currentPos, mods, tpt.duplicate, name)
+ buf += varDecl(in.currentPos, mods, tpt.duplicate, name.toTermName)
maybe.clear()
} else if (in.token == COMMA) { // ... if there's a comma after the ident, it could be a real vardef or not.
- maybe += varDecl(in.currentPos, mods, tpt.duplicate, name)
+ maybe += varDecl(in.currentPos, mods, tpt.duplicate, name.toTermName)
} else { // ... if there's something else we were still in the initializer of the
// previous var def; skip to next comma or semicolon.
skipTo(COMMA, SEMI)
@@ -675,25 +616,25 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def collectIdents() : Int = {
if (in.token == ASTERISK) {
val starOffset = in.pos
- in.nextToken
+ in.nextToken()
buf += nme.WILDCARD
starOffset
} else {
val nameOffset = in.pos
buf += ident()
if (in.token == DOT) {
- in.nextToken
+ in.nextToken()
collectIdents()
} else nameOffset
}
}
- if (in.token == STATIC) in.nextToken
+ if (in.token == STATIC) in.nextToken()
else buf += nme.ROOTPKG
val lastnameOffset = collectIdents()
accept(SEMI)
val names = buf.toList
if (names.length < 2) {
- syntaxError(pos, "illegal import", false)
+ syntaxError(pos, "illegal import", skipIt = false)
List()
} else {
val qual = ((Ident(names.head): Tree) /: names.tail.init) (Select(_, _))
@@ -708,7 +649,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def interfacesOpt() =
if (in.token == IMPLEMENTS) {
- in.nextToken
+ in.nextToken()
repsep(typ, COMMA)
} else {
List()
@@ -721,7 +662,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val tparams = typeParams()
val superclass =
if (in.token == EXTENDS) {
- in.nextToken
+ in.nextToken()
typ()
} else {
javaLangObject()
@@ -740,10 +681,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val tparams = typeParams()
val parents =
if (in.token == EXTENDS) {
- in.nextToken
+ in.nextToken()
repsep(typ, COMMA)
} else {
- List(javaLangObject)
+ List(javaLangObject())
}
val (statics, body) = typeBody(INTERFACE, name)
addCompanionObject(statics, atPos(pos) {
@@ -770,7 +711,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
skipAhead() // skip init block, we just assume we have seen only static
accept(RBRACE)
} else if (in.token == SEMI) {
- in.nextToken
+ in.nextToken()
} else {
if (in.token == ENUM || definesInterface(in.token)) mods |= Flags.STATIC
val decls = memberDecl(mods, parentToken)
@@ -822,7 +763,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (in.token != RBRACE && in.token != SEMI && in.token != EOF) {
buf += enumConst(enumType)
if (in.token == COMMA) {
- in.nextToken
+ in.nextToken()
parseEnumConsts()
}
}
@@ -831,7 +772,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val consts = buf.toList
val (statics, body) =
if (in.token == SEMI) {
- in.nextToken
+ in.nextToken()
typeBodyDecls(ENUM, name)
} else {
(List(), List())
@@ -844,14 +785,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
blankExpr),
DefDef(
Modifiers(Flags.JAVA | Flags.STATIC), nme.valueOf, List(),
- List(List(makeParam("x", TypeTree(StringClass.tpe)))),
+ List(List(makeParam("x", TypeTree(StringTpe)))),
enumType,
blankExpr))
accept(RBRACE)
val superclazz =
AppliedTypeTree(javaLangDot(tpnme.Enum), List(enumType))
addCompanionObject(consts ::: statics ::: predefs, atPos(pos) {
- ClassDef(mods, name, List(),
+ ClassDef(mods | Flags.ENUM, name, List(),
makeTemplate(superclazz :: interfaces, body))
})
}
@@ -870,10 +811,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
skipAhead()
accept(RBRACE)
}
- // The STABLE flag is to signal to namer that this was read from a
- // java enum, and so should be given a Constant type (thereby making
- // it usable in annotations.)
- ValDef(Modifiers(Flags.STABLE | Flags.JAVA | Flags.STATIC), name, enumType, blankExpr)
+ ValDef(Modifiers(Flags.ENUM | Flags.STABLE | Flags.JAVA | Flags.STATIC), name.toTermName, enumType, blankExpr)
}
}
@@ -882,13 +820,13 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
case INTERFACE => interfaceDecl(mods)
case AT => annotationDecl(mods)
case CLASS => classDecl(mods)
- case _ => in.nextToken; syntaxError("illegal start of type declaration", true); List(errorTypeTree)
+ case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree)
}
/** CompilationUnit ::= [package QualId semi] TopStatSeq
*/
def compilationUnit(): Tree = {
- var pos = in.currentPos;
+ var pos = in.currentPos
val pkg: RefTree =
if (in.token == AT || in.token == PACKAGE) {
annotations()
@@ -908,9 +846,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
while (in.token == IMPORT)
buf ++= importDecl()
while (in.token != EOF && in.token != RBRACE) {
- while (in.token == SEMI) in.nextToken
+ while (in.token == SEMI) in.nextToken()
if (in.token != EOF)
- buf ++= typeDecl(modifiers(false))
+ buf ++= typeDecl(modifiers(inInterface = false))
}
accept(EOF)
atPos(pos) {
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index e230585a8b..b7ea70e2c7 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -10,7 +10,7 @@ import scala.tools.nsc.util.JavaCharArrayReader
import scala.reflect.internal.util._
import scala.reflect.internal.Chars._
import JavaTokens._
-import scala.annotation.switch
+import scala.annotation.{ switch, tailrec }
import scala.language.implicitConversions
// Todo merge these better with Scanners
@@ -57,23 +57,14 @@ trait JavaScanners extends ast.parser.ScannersCommon {
/** ...
*/
abstract class AbstractJavaScanner extends AbstractJavaTokenData {
- implicit def p2g(pos: Position): ScanPosition
implicit def g2p(pos: ScanPosition): Position
- /** the last error position
- */
- var errpos: ScanPosition
- var lastPos: ScanPosition
- def skipToken: ScanPosition
def nextToken(): Unit
def next: AbstractJavaTokenData
def intVal(negated: Boolean): Long
def floatVal(negated: Boolean): Double
- def intVal: Long = intVal(false)
- def floatVal: Double = floatVal(false)
- //def token2string(token : Int) : String = configuration.token2string(token)
- /** return recent scala doc, if any */
- def flushDoc: DocComment
+ def intVal: Long = intVal(negated = false)
+ def floatVal: Double = floatVal(negated = false)
def currentPos: Position
}
@@ -227,17 +218,9 @@ trait JavaScanners extends ast.parser.ScannersCommon {
abstract class JavaScanner extends AbstractJavaScanner with JavaTokenData with Cloneable with ScannerCommon {
override def intVal = super.intVal// todo: needed?
override def floatVal = super.floatVal
- override var errpos: Int = NoPos
def currentPos: Position = g2p(pos - 1)
-
var in: JavaCharArrayReader = _
- def dup: JavaScanner = {
- val dup = clone().asInstanceOf[JavaScanner]
- dup.in = in.dup
- dup
- }
-
/** character buffer for literals
*/
val cbuf = new StringBuilder()
@@ -252,22 +235,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
cbuf.setLength(0)
}
- /** buffer for the documentation comment
- */
- var docBuffer: StringBuilder = null
-
- def flushDoc: DocComment = {
- val ret = if (docBuffer != null) DocComment(docBuffer.toString, NoPosition) else null
- docBuffer = null
- ret
- }
-
- /** add the given character to the documentation buffer
- */
- protected def putDocChar(c: Char) {
- if (docBuffer ne null) docBuffer.append(c)
- }
-
private class JavaTokenData0 extends JavaTokenData
/** we need one token lookahead
@@ -277,13 +244,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
// Get next token ------------------------------------------------------------
- /** read next token and return last position
- */
- def skipToken: Int = {
- val p = pos; nextToken
- p - 1
- }
-
def nextToken() {
if (next.token == EMPTY) {
fetchToken()
@@ -296,7 +256,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
def lookaheadToken: Int = {
prev copyFrom this
- nextToken
+ nextToken()
val t = token
next copyFrom this
this copyFrom prev
@@ -308,11 +268,10 @@ trait JavaScanners extends ast.parser.ScannersCommon {
private def fetchToken() {
if (token == EOF) return
lastPos = in.cpos - 1
- //var index = bp
while (true) {
in.ch match {
case ' ' | '\t' | CR | LF | FF =>
- in.next
+ in.next()
case _ =>
pos = in.cpos
(in.ch: @switch) match {
@@ -329,47 +288,47 @@ trait JavaScanners extends ast.parser.ScannersCommon {
'u' | 'v' | 'w' | 'x' | 'y' |
'z' =>
putChar(in.ch)
- in.next
- getIdentRest
+ in.next()
+ getIdentRest()
return
case '0' =>
putChar(in.ch)
- in.next
+ in.next()
if (in.ch == 'x' || in.ch == 'X') {
- in.next
+ in.next()
base = 16
} else {
base = 8
}
- getNumber
+ getNumber()
return
case '1' | '2' | '3' | '4' |
'5' | '6' | '7' | '8' | '9' =>
base = 10
- getNumber
+ getNumber()
return
case '\"' =>
- in.next
+ in.next()
while (in.ch != '\"' && (in.isUnicode || in.ch != CR && in.ch != LF && in.ch != SU)) {
getlitch()
}
if (in.ch == '\"') {
token = STRINGLIT
setName()
- in.next
+ in.next()
} else {
syntaxError("unclosed string literal")
}
return
case '\'' =>
- in.next
+ in.next()
getlitch()
if (in.ch == '\'') {
- in.next
+ in.next()
token = CHARLIT
setName()
} else {
@@ -379,31 +338,31 @@ trait JavaScanners extends ast.parser.ScannersCommon {
case '=' =>
token = ASSIGN
- in.next
+ in.next()
if (in.ch == '=') {
token = EQEQ
- in.next
+ in.next()
}
return
case '>' =>
token = GT
- in.next
+ in.next()
if (in.ch == '=') {
token = GTEQ
- in.next
+ in.next()
} else if (in.ch == '>') {
token = GTGT
- in.next
+ in.next()
if (in.ch == '=') {
token = GTGTEQ
- in.next
+ in.next()
} else if (in.ch == '>') {
token = GTGTGT
- in.next
+ in.next()
if (in.ch == '=') {
token = GTGTGTEQ
- in.next
+ in.next()
}
}
}
@@ -411,145 +370,145 @@ trait JavaScanners extends ast.parser.ScannersCommon {
case '<' =>
token = LT
- in.next
+ in.next()
if (in.ch == '=') {
token = LTEQ
- in.next
+ in.next()
} else if (in.ch == '<') {
token = LTLT
- in.next
+ in.next()
if (in.ch == '=') {
token = LTLTEQ
- in.next
+ in.next()
}
}
return
case '!' =>
token = BANG
- in.next
+ in.next()
if (in.ch == '=') {
token = BANGEQ
- in.next
+ in.next()
}
return
case '~' =>
token = TILDE
- in.next
+ in.next()
return
case '?' =>
token = QMARK
- in.next
+ in.next()
return
case ':' =>
token = COLON
- in.next
+ in.next()
return
case '@' =>
token = AT
- in.next
+ in.next()
return
case '&' =>
token = AMP
- in.next
+ in.next()
if (in.ch == '&') {
token = AMPAMP
- in.next
+ in.next()
} else if (in.ch == '=') {
token = AMPEQ
- in.next
+ in.next()
}
return
case '|' =>
token = BAR
- in.next
+ in.next()
if (in.ch == '|') {
token = BARBAR
- in.next
+ in.next()
} else if (in.ch == '=') {
token = BAREQ
- in.next
+ in.next()
}
return
case '+' =>
token = PLUS
- in.next
+ in.next()
if (in.ch == '+') {
token = PLUSPLUS
- in.next
+ in.next()
} else if (in.ch == '=') {
token = PLUSEQ
- in.next
+ in.next()
}
return
case '-' =>
token = MINUS
- in.next
+ in.next()
if (in.ch == '-') {
token = MINUSMINUS
- in.next
+ in.next()
} else if (in.ch == '=') {
token = MINUSEQ
- in.next
+ in.next()
}
return
case '*' =>
token = ASTERISK
- in.next
+ in.next()
if (in.ch == '=') {
token = ASTERISKEQ
- in.next
+ in.next()
}
return
case '/' =>
- in.next
+ in.next()
if (!skipComment()) {
token = SLASH
- in.next
+ in.next()
if (in.ch == '=') {
token = SLASHEQ
- in.next
+ in.next()
}
return
}
case '^' =>
token = HAT
- in.next
+ in.next()
if (in.ch == '=') {
token = HATEQ
- in.next
+ in.next()
}
return
case '%' =>
token = PERCENT
- in.next
+ in.next()
if (in.ch == '=') {
token = PERCENTEQ
- in.next
+ in.next()
}
return
case '.' =>
token = DOT
- in.next
+ in.next()
if ('0' <= in.ch && in.ch <= '9') {
- putChar('.'); getFraction
+ putChar('.'); getFraction()
} else if (in.ch == '.') {
- in.next
+ in.next()
if (in.ch == '.') {
- in.next
+ in.next()
token = DOTDOTDOT
} else syntaxError("`.' character expected")
}
@@ -557,60 +516,60 @@ trait JavaScanners extends ast.parser.ScannersCommon {
case ';' =>
token = SEMI
- in.next
+ in.next()
return
case ',' =>
token = COMMA
- in.next
+ in.next()
return
case '(' =>
token = LPAREN
- in.next
+ in.next()
return
case '{' =>
token = LBRACE
- in.next
+ in.next()
return
case ')' =>
token = RPAREN
- in.next
+ in.next()
return
case '}' =>
token = RBRACE
- in.next
+ in.next()
return
case '[' =>
token = LBRACKET
- in.next
+ in.next()
return
case ']' =>
token = RBRACKET
- in.next
+ in.next()
return
case SU =>
if (!in.hasNext) token = EOF
else {
syntaxError("illegal character")
- in.next
+ in.next()
}
return
case _ =>
if (Character.isUnicodeIdentifierStart(in.ch)) {
putChar(in.ch)
- in.next
- getIdentRest
+ in.next()
+ getIdentRest()
} else {
syntaxError("illegal character: "+in.ch.toInt)
- in.next
+ in.next()
}
return
}
@@ -618,33 +577,20 @@ trait JavaScanners extends ast.parser.ScannersCommon {
}
}
- private def skipComment(): Boolean = {
- if (in.ch == '/') {
- do {
- in.next
- } while ((in.ch != CR) && (in.ch != LF) && (in.ch != SU))
- true
- } else if (in.ch == '*') {
- docBuffer = null
- in.next
- val scalaDoc = ("/**", "*/")
- if (in.ch == '*' && forScaladoc)
- docBuffer = new StringBuilder(scalaDoc._1)
- do {
- do {
- if (in.ch != '*' && in.ch != SU) {
- in.next; putDocChar(in.ch)
- }
- } while (in.ch != '*' && in.ch != SU)
- while (in.ch == '*') {
- in.next; putDocChar(in.ch)
- }
- } while (in.ch != '/' && in.ch != SU)
- if (in.ch == '/') in.next
- else incompleteInputError("unclosed comment")
- true
- } else {
- false
+ protected def skipComment(): Boolean = {
+ @tailrec def skipLineComment(): Unit = in.ch match {
+ case CR | LF | SU =>
+ case _ => in.next; skipLineComment()
+ }
+ @tailrec def skipJavaComment(): Unit = in.ch match {
+ case SU => incompleteInputError("unclosed comment")
+ case '*' => in.next; if (in.ch == '/') in.next else skipJavaComment()
+ case _ => in.next; skipJavaComment()
+ }
+ in.ch match {
+ case '/' => in.next ; skipLineComment() ; true
+ case '*' => in.next ; skipJavaComment() ; true
+ case _ => false
}
}
@@ -668,12 +614,12 @@ trait JavaScanners extends ast.parser.ScannersCommon {
'0' | '1' | '2' | '3' | '4' |
'5' | '6' | '7' | '8' | '9' =>
putChar(in.ch)
- in.next
+ in.next()
case '_' =>
putChar(in.ch)
- in.next
- getIdentRest
+ in.next()
+ getIdentRest()
return
case SU =>
setName()
@@ -682,7 +628,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
case _ =>
if (Character.isUnicodeIdentifierPart(in.ch)) {
putChar(in.ch)
- in.next
+ in.next()
} else {
setName()
token = JavaScannerConfiguration.name2token(name)
@@ -698,17 +644,17 @@ trait JavaScanners extends ast.parser.ScannersCommon {
*/
protected def getlitch() =
if (in.ch == '\\') {
- in.next
+ in.next()
if ('0' <= in.ch && in.ch <= '7') {
val leadch: Char = in.ch
var oct: Int = digit2int(in.ch, 8)
- in.next
+ in.next()
if ('0' <= in.ch && in.ch <= '7') {
oct = oct * 8 + digit2int(in.ch, 8)
- in.next
+ in.next()
if (leadch <= '3' && '0' <= in.ch && in.ch <= '7') {
oct = oct * 8 + digit2int(in.ch, 8)
- in.next
+ in.next()
}
}
putChar(oct.asInstanceOf[Char])
@@ -726,11 +672,11 @@ trait JavaScanners extends ast.parser.ScannersCommon {
syntaxError(in.cpos - 1, "invalid escape character")
putChar(in.ch)
}
- in.next
+ in.next()
}
} else {
putChar(in.ch)
- in.next
+ in.next()
}
/** read fractional part and exponent of floating point number
@@ -740,35 +686,35 @@ trait JavaScanners extends ast.parser.ScannersCommon {
token = DOUBLELIT
while ('0' <= in.ch && in.ch <= '9') {
putChar(in.ch)
- in.next
+ in.next()
}
if (in.ch == 'e' || in.ch == 'E') {
val lookahead = in.copy
- lookahead.next
+ lookahead.next()
if (lookahead.ch == '+' || lookahead.ch == '-') {
- lookahead.next
+ lookahead.next()
}
if ('0' <= lookahead.ch && lookahead.ch <= '9') {
putChar(in.ch)
- in.next
+ in.next()
if (in.ch == '+' || in.ch == '-') {
putChar(in.ch)
- in.next
+ in.next()
}
while ('0' <= in.ch && in.ch <= '9') {
putChar(in.ch)
- in.next
+ in.next()
}
}
token = DOUBLELIT
}
if (in.ch == 'd' || in.ch == 'D') {
putChar(in.ch)
- in.next
+ in.next()
token = DOUBLELIT
} else if (in.ch == 'f' || in.ch == 'F') {
putChar(in.ch)
- in.next
+ in.next()
token = FLOATLIT
}
setName()
@@ -778,7 +724,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
*/
def intVal(negated: Boolean): Long = {
if (token == CHARLIT && !negated) {
- if (name.length > 0) name.charAt(0) else 0
+ if (name.length > 0) name.charAt(0).toLong else 0
} else {
var value: Long = 0
val divider = if (base == 10) 1 else 2
@@ -828,23 +774,23 @@ trait JavaScanners extends ast.parser.ScannersCommon {
protected def getNumber() {
while (digit2int(in.ch, if (base < 10) 10 else base) >= 0) {
putChar(in.ch)
- in.next
+ in.next()
}
token = INTLIT
if (base <= 10 && in.ch == '.') {
val lookahead = in.copy
- lookahead.next
+ lookahead.next()
lookahead.ch match {
case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' |
'8' | '9' | 'd' | 'D' | 'e' | 'E' | 'f' | 'F' =>
putChar(in.ch)
- in.next
- return getFraction
+ in.next()
+ return getFraction()
case _ =>
if (!isIdentifierStart(lookahead.ch)) {
putChar(in.ch)
- in.next
- return getFraction
+ in.next()
+ return getFraction()
}
}
}
@@ -852,11 +798,11 @@ trait JavaScanners extends ast.parser.ScannersCommon {
(in.ch == 'e' || in.ch == 'E' ||
in.ch == 'f' || in.ch == 'F' ||
in.ch == 'd' || in.ch == 'D')) {
- return getFraction
+ return getFraction()
}
setName()
if (in.ch == 'l' || in.ch == 'L') {
- in.next
+ in.next()
token = LONGLIT
}
}
@@ -868,7 +814,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
def syntaxError(pos: Int, msg: String) {
error(pos, msg)
token = ERROR
- errpos = pos
}
/** generate an error at the current token position
@@ -879,7 +824,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
def incompleteInputError(msg: String) {
incompleteInputError(pos, msg)
token = EOF
- errpos = pos
}
override def toString() = token match {
@@ -908,21 +852,17 @@ trait JavaScanners extends ast.parser.ScannersCommon {
/** INIT: read lookahead character and token.
*/
def init() {
- in.next
- nextToken
+ in.next()
+ nextToken()
}
}
- /** ...
- */
class JavaUnitScanner(unit: CompilationUnit) extends JavaScanner {
in = new JavaCharArrayReader(unit.source.content, !settings.nouescape.value, syntaxError)
- init
- def warning(pos: Int, msg: String) = unit.warning(pos, msg)
+ init()
def error (pos: Int, msg: String) = unit. error(pos, msg)
def incompleteInputError(pos: Int, msg: String) = unit.incompleteInputError(pos, msg)
def deprecationWarning(pos: Int, msg: String) = unit.deprecationWarning(pos, msg)
- implicit def p2g(pos: Position): Int = if (pos.isDefined) pos.point else -1
- implicit def g2p(pos: Int): Position = new OffsetPosition(unit.source, pos)
+ implicit def g2p(pos: Int): Position = Position.offset(unit.source, pos)
}
}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
index a562de291d..953a3c6d82 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
@@ -68,9 +68,6 @@ object JavaTokens extends ast.parser.Tokens {
final val VOLATILE = 68
final val WHILE = 69
- def isKeyword(code : Int) =
- code >= ABSTRACT && code <= WHILE
-
/** special symbols */
final val COMMA = 70
final val SEMI = 71
@@ -115,9 +112,6 @@ object JavaTokens extends ast.parser.Tokens {
final val GTGTEQ = 113
final val GTGTGTEQ = 114
- def isSymbol(code : Int) =
- code >= COMMA && code <= GTGTGTEQ
-
/** parenthesis */
final val LPAREN = 115
final val RPAREN = 116
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
deleted file mode 100644
index 5ca9fd5062..0000000000
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ /dev/null
@@ -1,138 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import ast.{ Printers, Trees }
-import java.io.{ StringWriter, PrintWriter }
-import scala.annotation.elidable
-import scala.language.postfixOps
-
-/** Ancillary bits of ParallelMatching which are better off
- * out of the way.
- */
-trait MatchSupport extends ast.TreeDSL { self: ParallelMatching =>
-
- import global.{ typer => _, _ }
- import CODE._
-
- /** Debugging support: enable with -Ypmat-debug **/
- private final def trace = settings.Ypmatdebug.value
-
- def impossible: Nothing = abort("this never happens")
-
- def treeCollect[T](tree: Tree, pf: PartialFunction[Tree, T]): List[T] =
- tree filter (pf isDefinedAt _) map (x => pf(x))
-
- object Types {
- import definitions._
-
- val subrangeTypes = Set[Symbol](ByteClass, ShortClass, CharClass, IntClass)
-
- implicit class RichType(undecodedTpe: Type) {
- def tpe = decodedEqualsType(undecodedTpe)
- def isAnyRef = tpe <:< AnyRefClass.tpe
-
- // These tests for final classes can inspect the typeSymbol
- private def is(s: Symbol) = tpe.typeSymbol eq s
- def isByte = is(ByteClass)
- def isShort = is(ShortClass)
- def isInt = is(IntClass)
- def isChar = is(CharClass)
- def isBoolean = is(BooleanClass)
- def isNothing = is(NothingClass)
- def isArray = is(ArrayClass)
- }
- }
-
- object Debug {
- def typeToString(t: Type): String = t match {
- case NoType => "x"
- case x => x.toString
- }
- def symbolToString(s: Symbol): String = s match {
- case x => x.toString
- }
- def treeToString(t: Tree): String = treeInfo.unbind(t) match {
- case EmptyTree => "?"
- case WILD() => "_"
- case Literal(Constant(x)) => "LIT(%s)".format(x)
- case Apply(fn, args) => "%s(%s)".format(treeToString(fn), args map treeToString mkString ",")
- case Typed(expr, tpt) => "%s: %s".format(treeToString(expr), treeToString(tpt))
- case x => x.toString + " (" + x.getClass + ")"
- }
-
- // Formatting for some error messages
- private val NPAD = 15
- def pad(s: String): String = "%%%ds" format (NPAD-1) format s
- def pad(s: Any): String = pad(s match {
- case x: Tree => treeToString(x)
- case x => x.toString
- })
-
- // pretty print for debugging
- def pp(x: Any): String = pp(x, false)
- def pp(x: Any, newlines: Boolean): String = {
- val stripStrings = List("""java\.lang\.""", """\$iw\.""")
-
- def clean(s: String): String =
- stripStrings.foldLeft(s)((s, x) => s.replaceAll(x, ""))
-
- def pplist(xs: List[Any]): String =
- if (newlines) (xs map (" " + _ + "\n")).mkString("\n", "", "")
- else xs.mkString("(", ", ", ")")
-
- pp(x match {
- case s: String => return clean(s)
- case x: Tree => asCompactString(x)
- case xs: List[_] => pplist(xs map pp)
- case x: Tuple2[_,_] => "%s -> %s".format(pp(x._1), pp(x._2))
- case x => x.toString
- })
- }
-
- @elidable(elidable.FINE) def TRACE(f: String, xs: Any*): Unit = {
- if (trace) {
- val msg = if (xs.isEmpty) f else f.format(xs map pp: _*)
- println(msg)
- }
- }
- @elidable(elidable.FINE) def traceCategory(cat: String, f: String, xs: Any*) = {
- if (trace)
- TRACE("[" + """%10s""".format(cat) + "] " + f, xs: _*)
- }
- def tracing[T](s: String)(x: T): T = {
- if (trace)
- println(("[" + """%10s""".format(s) + "] %s") format pp(x))
-
- x
- }
- private[nsc] def printing[T](fmt: String, xs: Any*)(x: T): T = {
- println(fmt.format(xs: _*) + " == " + x)
- x
- }
- private[nsc] def debugging[T](fmt: String, xs: Any*)(x: T): T = {
- if (settings.debug.value) printing(fmt, xs: _*)(x)
- else x
- }
-
- def indent(s: Any) = s.toString() split "\n" map (" " + _) mkString "\n"
- def indentAll(s: Seq[Any]) = s map (" " + _.toString() + "\n") mkString
- }
-
- /** Drops the 'i'th element of a list.
- */
- def dropIndex[T](xs: List[T], n: Int) = {
- val (l1, l2) = xs splitAt n
- l1 ::: (l2 drop 1)
- }
-
- /** Extract the nth element of a list and return it and the remainder.
- */
- def extractIndex[T](xs: List[T], n: Int): (T, List[T]) =
- (xs(n), dropIndex(xs, n))
-}
diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala
deleted file mode 100644
index daefe4c545..0000000000
--- a/src/compiler/scala/tools/nsc/matching/Matrix.scala
+++ /dev/null
@@ -1,259 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import symtab.Flags
-import scala.collection.mutable
-import scala.language.implicitConversions
-
-trait Matrix extends MatrixAdditions {
- self: ExplicitOuter with ParallelMatching =>
-
- import global.{ typer => _, _ }
- import analyzer.Typer
- import CODE._
- import Debug._
- import Flags.{ SYNTHETIC, MUTABLE }
-
- private[matching] val NO_EXHAUSTIVE = Flags.TRANS_FLAG
-
- /** Translation of match expressions.
- *
- * `p`: pattern
- * `g`: guard
- * `bx`: body index
- *
- * internal representation is (tvars:List[Symbol], rows:List[Row])
- *
- * tmp1 tmp_n
- * Row( p_11 ... p_1n g_1 b_1 ) + subst
- *
- * Row( p_m1 ... p_mn g_m b_m ) + subst
- *
- * Implementation based on the algorithm described in
- *
- * "A Term Pattern-Match Compiler Inspired by Finite Automata Theory"
- * Mikael Pettersson
- * ftp://ftp.ida.liu.se/pub/labs/pelab/papers/cc92pmc.ps.gz
- *
- * @author Burak Emir
- */
-
- /** "The Mixture Rule"
-
- {v=pat1, pats1 .. } {q1}
- match {.. } {..}
- {v=patn, patsn .. } {qn}
-
- The is the real work-horse of the algorithm. There is some column whose top-most pattern is a
- constructor. (Forsimplicity, itisdepicted above asthe left-most column, but anycolumn will do.)
- The goal is to build a test state with the variablevand some outgoing arcs (one for each construc-
- tor and possibly a default arc). Foreach constructor in the selected column, its arc is deļ¬ned as
- follows:
-
- Let {i1,...,ij} be the rows-indices of the patterns in the column that match c. Since the pat-
- terns are viewed as regular expressions, this will be the indices of the patterns that either
- have the same constructor c, or are wildcards.
-
- Let {pat1,...,patj} be the patterns in the column corresponding to the indices computed
- above, and let nbe the arity of the constructor c, i.e. the number of sub-patterns it has. For
- eachpati, its n sub-patterns are extracted; if pat i is a wildcard, nwildcards are produced
- instead, each tagged with the right path variable. This results in a pattern matrix with n
- columns and j rows. This matrix is then appended to the result of selecting, from each col-
- umn in the rest of the original matrix, those rows whose indices are in {i1,...,ij}. Finally
- the indices are used to select the corresponding ļ¬nal states that go with these rows. Note
- that the order of the indices is signiļ¬cant; selected rows do not change their relative orders.
- The arc for the constructor c is now deļ¬ned as (cā€™,state), where cā€™ is cwith any
- immediate sub-patterns replaced by their path variables (thus cā€™ is a simple pattern), and
- state is the result of recursively applying match to the new matrix and the new sequence
- of ļ¬nal states.
-
- Finally, the possibility for matching failure is considered. If the set of constructors is exhaustive,
- then no more arcs are computed. Otherwise, a default arc(_,state)is the last arc. If there are
- any wildcard patterns in the selected column, then their rows are selected from the rest of the
- matrix and the ļ¬nal states, and the state is the result of applying match to the new matrix and
- states. Otherwise,the error state is used after its reference count has been incremented.
- **/
-
- /** Handles all translation of pattern matching.
- */
- def handlePattern(
- selector: Tree, // tree being matched upon (called scrutinee after this)
- cases: List[CaseDef], // list of cases in the match
- isChecked: Boolean, // whether exhaustiveness checking is enabled (disabled with @unchecked)
- context: MatrixContext): Tree =
- {
- import context._
- TRACE("handlePattern", "(%s: %s) match { %s cases }", selector, selector.tpe, cases.size)
-
- val matrixInit: MatrixInit = {
- val v = copyVar(selector, isChecked, selector.tpe, "temp")
- MatrixInit(List(v), cases, atPos(selector.pos)(MATCHERROR(v.ident)))
- }
- val matrix = new MatchMatrix(context) { lazy val data = matrixInit }
- val mch = typer typed matrix.expansion.toTree
- val dfatree = typer typed Block(matrix.data.valDefs, mch)
-
- // redundancy check
- matrix.targets filter (_.unreached) foreach (cs => cunit.error(cs.body.pos, "unreachable code"))
- // optimize performs squeezing and resets any remaining NO_EXHAUSTIVE
- tracing("handlePattern")(matrix optimize dfatree)
- }
-
- case class MatrixContext(
- cunit: CompilationUnit, // current unit
- handleOuter: Tree => Tree, // for outer pointer
- typer: Typer, // a local typer
- owner: Symbol, // the current owner
- matchResultType: Type) // the expected result type of the whole match
- extends Squeezer
- {
- private def ifNull[T](x: T, alt: T) = if (x == null) alt else x
-
- // NO_EXHAUSTIVE communicates there should be no exhaustiveness checking
- private def flags(checked: Boolean) = if (checked) Nil else List(NO_EXHAUSTIVE)
-
- // Recording the symbols of the synthetics we create so we don't go clearing
- // anyone else's mutable flags.
- private val _syntheticSyms = mutable.HashSet[Symbol]()
- def clearSyntheticSyms() = {
- _syntheticSyms foreach (_ resetFlag (NO_EXHAUSTIVE|MUTABLE))
- debuglog("Cleared NO_EXHAUSTIVE/MUTABLE on " + _syntheticSyms.size + " synthetic symbols.")
- _syntheticSyms.clear()
- }
- def recordSyntheticSym(sym: Symbol): Symbol = {
- _syntheticSyms += sym
- if (_syntheticSyms.size > 25000) {
- cunit.error(owner.pos, "Sanity check failed: over 25000 symbols created for pattern match.")
- abort("This is a bug in the pattern matcher.")
- }
- sym
- }
-
- case class MatrixInit(
- roots: List[PatternVar],
- cases: List[CaseDef],
- default: Tree
- ) {
- def tvars = roots map (_.lhs)
- def valDefs = roots map (_.valDef)
- override def toString() = "MatrixInit(roots = %s, %d cases)".format(pp(roots), cases.size)
- }
-
- implicit def pvlist2pvgroup(xs: List[PatternVar]): PatternVarGroup =
- PatternVarGroup(xs)
-
- object PatternVarGroup {
- def apply(xs: PatternVar*) = new PatternVarGroup(xs.toList)
- def apply(xs: List[PatternVar]) = new PatternVarGroup(xs)
-
- // XXX - transitional
- def fromBindings(vlist: List[Binding], freeVars: List[Symbol] = Nil) = {
- def vmap(v: Symbol): Option[Binding] = vlist find (_.pvar eq v)
- val info =
- if (freeVars.isEmpty) vlist
- else (freeVars map vmap).flatten
-
- val xs =
- for (Binding(lhs, rhs) <- info) yield
- new PatternVar(lhs, Ident(rhs) setType lhs.tpe, !(rhs hasFlag NO_EXHAUSTIVE))
-
- new PatternVarGroup(xs)
- }
- }
-
- val emptyPatternVarGroup = PatternVarGroup()
- class PatternVarGroup(val pvs: List[PatternVar]) {
- def syms = pvs map (_.sym)
- def valDefs = pvs map (_.valDef)
- def idents = pvs map (_.ident)
-
- def extractIndex(index: Int): (PatternVar, PatternVarGroup) = {
- val (t, ts) = self.extractIndex(pvs, index)
- (t, PatternVarGroup(ts))
- }
-
- def isEmpty = pvs.isEmpty
- def size = pvs.size
- def head = pvs.head
- def ::(t: PatternVar) = PatternVarGroup(t :: pvs)
- def :::(ts: List[PatternVar]) = PatternVarGroup(ts ::: pvs)
- def ++(other: PatternVarGroup) = PatternVarGroup(pvs ::: other.pvs)
-
- def apply(i: Int) = pvs(i)
- def zipWithIndex = pvs.zipWithIndex
- def indices = pvs.indices
- def map[T](f: PatternVar => T) = pvs map f
- def filter(p: PatternVar => Boolean) = PatternVarGroup(pvs filter p)
-
- override def toString() = pp(pvs)
- }
-
- /** Every temporary variable allocated is put in a PatternVar.
- */
- class PatternVar(val lhs: Symbol, val rhs: Tree, val checked: Boolean) {
- def sym = lhs
- def tpe = lhs.tpe
- if (checked)
- lhs resetFlag NO_EXHAUSTIVE
- else
- lhs setFlag NO_EXHAUSTIVE
-
- // See #1427 for an example of a crash which occurs unless we retype:
- // in that instance there is an existential in the pattern.
- lazy val ident = typer typed Ident(lhs)
- lazy val valDef = typer typedValDef ValDef(lhs, rhs)
-
- override def toString() = "%s: %s = %s".format(lhs, tpe, rhs)
- }
-
- /** Given a tree, creates a new synthetic variable of the same type
- * and assigns the tree to it.
- */
- def copyVar(
- root: Tree,
- checked: Boolean,
- _tpe: Type = null,
- label: String = "temp"): PatternVar =
- {
- val tpe = ifNull(_tpe, root.tpe)
- val name = cunit.freshTermName(label)
- val sym = newVar(root.pos, tpe, flags(checked), name)
-
- tracing("copy")(new PatternVar(sym, root, checked))
- }
-
- /** Creates a new synthetic variable of the specified type and
- * assigns the result of f(symbol) to it.
- */
- def createVar(tpe: Type, f: Symbol => Tree, checked: Boolean) = {
- val lhs = newVar(owner.pos, tpe, flags(checked))
- val rhs = f(lhs)
-
- tracing("create")(new PatternVar(lhs, rhs, checked))
- }
- def createLazy(tpe: Type, f: Symbol => Tree, checked: Boolean) = {
- val lhs = newVar(owner.pos, tpe, Flags.LAZY :: flags(checked))
- val rhs = f(lhs)
-
- tracing("createLazy")(new PatternVar(lhs, rhs, checked))
- }
-
- private def newVar(
- pos: Position,
- tpe: Type,
- flags: List[Long] = Nil,
- name: TermName = null): Symbol =
- {
- val n = if (name == null) cunit.freshTermName("temp") else name
- // careful: pos has special meaning
- val flagsLong = (SYNTHETIC.toLong /: flags)(_|_)
- recordSyntheticSym(owner.newVariable(n, pos, flagsLong) setInfo tpe)
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
deleted file mode 100644
index 7220253003..0000000000
--- a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
+++ /dev/null
@@ -1,193 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import PartialFunction._
-
-/** Traits which are mixed into MatchMatrix, but separated out as
- * (somewhat) independent components to keep them on the sidelines.
- */
-trait MatrixAdditions extends ast.TreeDSL {
- self: ExplicitOuter with ParallelMatching =>
-
- import global.{ typer => _, _ }
- import symtab.Flags
- import CODE._
- import Debug._
- import treeInfo._
- import definitions.{ isPrimitiveValueClass }
-
- /** The Squeezer, responsible for all the squeezing.
- */
- private[matching] trait Squeezer {
- self: MatrixContext =>
-
- private val settings_squeeze = !settings.Ynosqueeze.value
-
- class RefTraverser(vd: ValDef) extends Traverser {
- private val targetSymbol = vd.symbol
- private var safeRefs = 0
- private var isSafe = true
-
- def canDrop = isSafe && safeRefs == 0
- def canInline = isSafe && safeRefs == 1
-
- override def traverse(tree: Tree): Unit = tree match {
- case t: Ident if t.symbol eq targetSymbol =>
- // target symbol's owner should match currentOwner
- if (targetSymbol.owner == currentOwner) safeRefs += 1
- else isSafe = false
-
- case LabelDef(_, params, rhs) =>
- if (params exists (_.symbol eq targetSymbol)) // cannot substitute this one
- isSafe = false
-
- traverse(rhs)
- case _ if safeRefs > 1 => ()
- case _ =>
- super.traverse(tree)
- }
- }
-
- /** Compresses multiple Blocks. */
- private def combineBlocks(stats: List[Tree], expr: Tree): Tree = expr match {
- case Block(stats1, expr1) if stats.isEmpty => combineBlocks(stats1, expr1)
- case _ => Block(stats, expr)
- }
- def squeezedBlock(vds: List[Tree], exp: Tree): Tree =
- if (settings_squeeze) combineBlocks(Nil, squeezedBlock1(vds, exp))
- else combineBlocks(vds, exp)
-
- private def squeezedBlock1(vds: List[Tree], exp: Tree): Tree = {
- lazy val squeezedTail = squeezedBlock(vds.tail, exp)
- def default = squeezedTail match {
- case Block(vds2, exp2) => Block(vds.head :: vds2, exp2)
- case exp2 => Block(vds.head :: Nil, exp2)
- }
-
- if (vds.isEmpty) exp
- else vds.head match {
- case vd: ValDef =>
- val rt = new RefTraverser(vd)
- rt.atOwner(owner)(rt traverse squeezedTail)
-
- if (rt.canDrop)
- squeezedTail
- else if (isConstantType(vd.symbol.tpe) || rt.canInline)
- new TreeSubstituter(List(vd.symbol), List(vd.rhs)) transform squeezedTail
- else
- default
- case _ => default
- }
- }
- }
-
- /** The Optimizer, responsible for some of the optimizing.
- */
- private[matching] trait MatchMatrixOptimizer {
- self: MatchMatrix =>
-
- import self.context._
-
- final def optimize(tree: Tree): Tree = {
- // Uses treeInfo extractors rather than looking at trees directly
- // because the many Blocks obscure our vision.
- object lxtt extends Transformer {
- override def transform(tree: Tree): Tree = tree match {
- case Block(stats, ld @ LabelDef(_, _, body)) if targets exists (_ shouldInline ld.symbol) =>
- squeezedBlock(transformStats(stats, currentOwner), body)
- case IsIf(cond, IsTrue(), IsFalse()) =>
- transform(cond)
- case IsIf(cond1, IsIf(cond2, thenp, elsep1), elsep2) if elsep1 equalsStructure elsep2 =>
- transform(typer typed If(gen.mkAnd(cond1, cond2), thenp, elsep2))
- case If(cond1, IsIf(cond2, thenp, Apply(jmp, Nil)), ld: LabelDef) if jmp.symbol eq ld.symbol =>
- transform(typer typed If(gen.mkAnd(cond1, cond2), thenp, ld))
- case _ =>
- super.transform(tree)
- }
- }
- try lxtt transform tree
- finally clearSyntheticSyms()
- }
- }
-
- /** The Exhauster.
- */
- private[matching] trait MatrixExhaustiveness {
- self: MatchMatrix =>
-
- import self.context._
-
- /** Exhaustiveness checking requires looking for sealed classes
- * and if found, making sure all children are covered by a pattern.
- */
- class ExhaustivenessChecker(rep: Rep, matchPos: Position) {
- val Rep(tvars, rows) = rep
-
- import Flags.{ MUTABLE, ABSTRACT, SEALED }
-
- private case class Combo(index: Int, sym: Symbol) { }
-
- /* True if the patterns in 'row' cover the given type symbol combination, and has no guard. */
- private def rowCoversCombo(row: Row, combos: List[Combo]) =
- row.guard.isEmpty && combos.forall(c => row.pats(c.index) covers c.sym)
-
- private def requiresExhaustive(sym: Symbol) = {
- (sym.isMutable) && // indicates that have not yet checked exhaustivity
- !(sym hasFlag NO_EXHAUSTIVE) && // indicates @unchecked
- (sym.tpe.typeSymbol.isSealed) &&
- !isPrimitiveValueClass(sym.tpe.typeSymbol) // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
- }
-
- private lazy val inexhaustives: List[List[Combo]] = {
- // let's please not get too clever side-effecting the mutable flag.
- val toCollect = tvars.zipWithIndex filter { case (pv, i) => requiresExhaustive(pv.sym) }
- val collected = toCollect map { case (pv, i) =>
- // okay, now reset the flag
- pv.sym resetFlag MUTABLE
-
- i -> (
- pv.tpe.typeSymbol.sealedDescendants.toList sortBy (_.sealedSortName)
- // symbols which are both sealed and abstract need not be covered themselves, because
- // all of their children must be and they cannot otherwise be created.
- filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
- // have to filter out children which cannot match: see ticket #3683 for an example
- filter (_.tpe matchesPattern pv.tpe)
- )
- }
-
- val folded =
- collected.foldRight(List[List[Combo]]())((c, xs) => {
- val (i, syms) = c match { case (i, set) => (i, set.toList) }
- xs match {
- case Nil => syms map (s => List(Combo(i, s)))
- case _ => for (s <- syms ; rest <- xs) yield Combo(i, s) :: rest
- }
- })
-
- folded filterNot (combo => rows exists (r => rowCoversCombo(r, combo)))
- }
-
- private def mkPad(xs: List[Combo], i: Int): String = xs match {
- case Nil => pad("*")
- case Combo(j, sym) :: rest => if (j == i) pad(sym.name.toString) else mkPad(rest, i)
- }
- private def mkMissingStr(open: List[Combo]) =
- "missing combination %s\n" format tvars.indices.map(mkPad(open, _)).mkString
-
- /** The only public method. */
- def check = {
- def errMsg = (inexhaustives map mkMissingStr).mkString
- if (inexhaustives.nonEmpty)
- cunit.warning(matchPos, "match is not exhaustive!\n" + errMsg)
-
- rep
- }
- }
- }
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
deleted file mode 100644
index dbb9b7a003..0000000000
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ /dev/null
@@ -1,870 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Copyright 2007 Google Inc. All Rights Reserved.
- * Author: bqe@google.com (Burak Emir)
- */
-
-package scala.tools.nsc
-package matching
-
-import PartialFunction._
-import scala.collection.{ mutable }
-import scala.reflect.internal.util.Position
-import transform.ExplicitOuter
-import symtab.Flags
-import mutable.ListBuffer
-import scala.annotation.elidable
-import scala.language.postfixOps
-import scala.tools.nsc.settings.ScalaVersion
-
-trait ParallelMatching extends ast.TreeDSL
- with MatchSupport
- with Matrix
- with Patterns
- with PatternBindings
-{
- self: ExplicitOuter =>
-
- import global.{ typer => _, _ }
- import definitions.{
- AnyRefClass, IntClass, BooleanClass, SomeClass, OptionClass,
- getProductArgs, productProj, Object_eq, Any_asInstanceOf
- }
- import CODE._
- import Types._
- import Debug._
-
- /** Transition **/
- def toPats(xs: List[Tree]): List[Pattern] = xs map Pattern.apply
-
- /** The umbrella matrix class. **/
- abstract class MatchMatrix(val context: MatrixContext) extends MatchMatrixOptimizer with MatrixExhaustiveness {
- import context._
-
- def data: MatrixContext#MatrixInit
-
- lazy val MatrixInit(roots, cases, failTree) = data
- lazy val (rows, targets) = expand(roots, cases).unzip
- lazy val expansion: Rep = make(roots, rows)
-
- private val shortCuts = perRunCaches.newMap[Int, Symbol]()
-
- final def createShortCut(theLabel: Symbol): Int = {
- val key = shortCuts.size + 1
- shortCuts(key) = theLabel
- -key
- }
- def createLabelDef(namePrefix: String, body: Tree, params: List[Symbol] = Nil, restpe: Type = matchResultType) = {
- val labelName = cunit.freshTermName(namePrefix)
- val labelSym = owner.newLabel(labelName, owner.pos)
- val labelInfo = MethodType(params, restpe)
-
- LabelDef(labelSym setInfo labelInfo, params, body setType restpe)
- }
-
- /** This is the recursively focal point for translating the current
- * list of pattern variables and a list of pattern match rows into
- * a tree suitable for entering erasure.
- *
- * The first time it is called, the variables are (copies of) the
- * original pattern matcher roots, and the rows correspond to the
- * original casedefs.
- */
- final def make(roots1: PatternVarGroup, rows1: List[Row]): Rep = {
- traceCategory("New Match", "%sx%s (%s)", roots1.size, rows1.size, roots1.syms.mkString(", "))
- def classifyPat(opat: Pattern, j: Int): Pattern = opat simplify roots1(j)
-
- val newRows = rows1 flatMap (_ expandAlternatives classifyPat)
- if (rows1.length != newRows.length) make(roots1, newRows) // recursive call if any change
- else {
- val rep = Rep(roots1, newRows)
- new ExhaustivenessChecker(rep, roots.head.sym.pos).check
- rep
- }
- }
-
- override def toString() = "MatchMatrix(%s) { %s }".format(matchResultType, indentAll(targets))
-
- /**
- * Encapsulates a symbol being matched on. It is created from a
- * PatternVar, which encapsulates the symbol's creation and assignment.
- *
- * We never match on trees directly - a temporary variable is created
- * (in a PatternVar) for any expression being matched on.
- */
- class Scrutinee(val pv: PatternVar) {
- import definitions._
-
- // presenting a face of our symbol
- def sym = pv.sym
- def tpe = sym.tpe
- def pos = sym.pos
- def id = ID(sym) setPos pos // attributed ident
-
- def accessors = if (isCaseClass) sym.caseFieldAccessors else Nil
- def accessorTypes = accessors map (x => (tpe memberType x).resultType)
-
- lazy val accessorPatternVars = PatternVarGroup(
- for ((accessor, tpe) <- accessors zip accessorTypes) yield
- createVar(tpe, _ => fn(id, accessor))
- )
-
- private def extraValDefs = if (pv.rhs.isEmpty) Nil else List(pv.valDef)
- def allValDefs = extraValDefs ::: accessorPatternVars.valDefs
-
- // tests
- def isDefined = sym ne NoSymbol
- def isSubrangeType = subrangeTypes(tpe.typeSymbol)
- def isCaseClass = tpe.typeSymbol.isCase
-
- // sequences
- def seqType = tpe.widen baseType SeqClass
- def elemType = tpe typeArgs 0
-
- private def elemAt(i: Int) = (id DOT (tpe member nme.apply))(LIT(i))
- private def createElemVar(i: Int) = createVar(elemType, _ => elemAt(i))
- private def createSeqVar(drop: Int) = createVar(seqType, _ => id DROP drop)
-
- def createSequenceVars(count: Int): List[PatternVar] =
- (0 to count).toList map (i => if (i < count) createElemVar(i) else createSeqVar(i))
-
- // for propagating "unchecked" to synthetic vars
- def isChecked = !(sym hasFlag NO_EXHAUSTIVE)
- def flags: List[Long] = List(NO_EXHAUSTIVE) filter (sym hasFlag _)
-
- // this is probably where this actually belongs
- def createVar(tpe: Type, f: Symbol => Tree) = context.createVar(tpe, f, isChecked)
-
- def castedTo(headType: Type) =
- if (tpe =:= headType) this
- else new Scrutinee(createVar(headType, lhs => gen.mkAsInstanceOf(id, lhs.tpe)))
-
- override def toString() = "(%s: %s)".format(id, tpe)
- }
-
- def isPatternSwitch(scrut: Scrutinee, ps: List[Pattern]): Option[PatternSwitch] = {
- def isSwitchableConst(x: Pattern) = cond(x) { case x: LiteralPattern if x.isSwitchable => true }
- def isSwitchableDefault(x: Pattern) = isSwitchableConst(x) || x.isDefault
-
- // TODO - scala> (5: Any) match { case 5 => 5 ; case 6 => 7 }
- // ... should compile to a switch. It doesn't because the scrut isn't Int/Char, but
- // that could be handle in an if/else since every pattern requires an Int.
- // More immediately, Byte and Short scruts should also work.
- if (!scrut.isSubrangeType) None
- else {
- val (_lits, others) = ps span isSwitchableConst
- val lits = _lits collect { case x: LiteralPattern => x }
-
- condOpt(others) {
- case Nil => new PatternSwitch(scrut, lits, None)
- // TODO: This needs to also allow the case that the last is a compatible type pattern.
- case List(x) if isSwitchableDefault(x) => new PatternSwitch(scrut, lits, Some(x))
- }
- }
- }
-
- class PatternSwitch(
- scrut: Scrutinee,
- override val ps: List[LiteralPattern],
- val defaultPattern: Option[Pattern]
- ) extends PatternMatch(scrut, ps) {
- require(scrut.isSubrangeType && (ps forall (_.isSwitchable)))
- }
-
- case class PatternMatch(scrut: Scrutinee, ps: List[Pattern]) {
- def head = ps.head
- def tail = ps.tail
- def size = ps.length
-
- def headType = head.necessaryType
- private val dummyCount = if (head.isCaseClass) headType.typeSymbol.caseFieldAccessors.length else 0
- def dummies = emptyPatterns(dummyCount)
-
- def apply(i: Int): Pattern = ps(i)
- def pzip() = ps.zipWithIndex
- def pzip[T](others: List[T]) = {
- assert(ps.size == others.size, "Internal error: ps = %s, others = %s".format(ps, others))
- ps zip others
- }
-
- // Any unapply - returns Some(true) if a type test is needed before the unapply can
- // be called (e.g. def unapply(x: Foo) = { ... } but our scrutinee is type Any.)
- object AnyUnapply {
- def unapply(x: Pattern): Option[Boolean] = condOpt(x.tree) {
- case UnapplyParamType(tpe) => !(scrut.tpe <:< tpe)
- }
- }
-
- def mkRule(rest: Rep): RuleApplication = {
- tracing("Rule")(head match {
- case x if isEquals(x.tree.tpe) => new MixEquals(this, rest)
- case x: SequencePattern => new MixSequence(this, rest, x)
- case AnyUnapply(false) => new MixUnapply(this, rest)
- case _ =>
- isPatternSwitch(scrut, ps) match {
- case Some(x) => new MixLiteralInts(x, rest)
- case _ => new MixTypes(this, rest)
- }
- })
- }
- override def toString() = "%s match {%s}".format(scrut, indentAll(ps))
- } // PatternMatch
-
- /***** Rule Applications *****/
-
- sealed abstract class RuleApplication {
- def pmatch: PatternMatch
- def rest: Rep
- def cond: Tree
- def success: Tree
- def failure: Tree
-
- lazy val PatternMatch(scrut, patterns) = pmatch
- lazy val head = pmatch.head
- lazy val codegen: Tree = IF (cond) THEN (success) ELSE (failure)
-
- def mkFail(xs: List[Row]): Tree =
- if (xs.isEmpty) failTree
- else remake(xs).toTree
-
- def remake(
- rows: List[Row],
- pvgroup: PatternVarGroup = emptyPatternVarGroup,
- includeScrut: Boolean = true): Rep =
- {
- val scrutpvs = if (includeScrut) List(scrut.pv) else Nil
- make(pvgroup.pvs ::: scrutpvs ::: rest.tvars, rows)
- }
-
- /** translate outcome of the rule application into code (possible involving recursive application of rewriting) */
- def tree(): Tree
-
- override def toString =
- "Rule/%s (%s =^= %s)".format(getClass.getSimpleName, scrut, head)
- }
-
- /** {case ... if guard => bx} else {guardedRest} */
- /** VariableRule: The top-most rows has only variable (non-constructor) patterns. */
- case class VariableRule(subst: Bindings, guard: Tree, guardedRest: Rep, bx: Int) extends RuleApplication {
- def pmatch: PatternMatch = impossible
- def rest: Rep = guardedRest
-
- private lazy val (valDefs, successTree) = targets(bx) applyBindings subst.toMap
- lazy val cond = guard
- lazy val success = successTree
- lazy val failure = guardedRest.toTree
-
- final def tree(): Tree =
- if (bx < 0) REF(shortCuts(-bx))
- else squeezedBlock(
- valDefs,
- if (cond.isEmpty) success else codegen
- )
-
- override def toString = "(case %d) {\n Bindings: %s\n\n if (%s) { %s }\n else { %s }\n}".format(
- bx, subst, guard, success, guardedRest
- )
- }
-
- class MixLiteralInts(val pmatch: PatternSwitch, val rest: Rep) extends RuleApplication {
- val literals = pmatch.ps
- val defaultPattern = pmatch.defaultPattern
-
- private lazy val casted: Tree =
- if (!scrut.tpe.isInt) scrut.id DOT nme.toInt else scrut.id
-
- // creates a row transformer for injecting the default case bindings at a given index
- private def addDefaultVars(index: Int): Row => Row =
- if (defaultVars.isEmpty) identity
- else rebindAll(_, pmatch(index).boundVariables, scrut.sym)
-
- // add bindings for all the given vs to the given tvar
- private def rebindAll(r: Row, vs: Iterable[Symbol], tvar: Symbol) =
- r rebind r.subst.add(vs, tvar)
-
- private def bindVars(Tag: Int, orig: Bindings): Bindings = {
- def myBindVars(rest: List[(Int, List[Symbol])], bnd: Bindings): Bindings = rest match {
- case Nil => bnd
- case (Tag,vs)::xs => myBindVars(xs, bnd.add(vs, scrut.sym))
- case (_, vs)::xs => myBindVars(xs, bnd)
- }
- myBindVars(varMap, orig)
- }
-
- // bound vars and rows for default pattern (only one row, but a list is easier to use later)
- lazy val (defaultVars, defaultRows) = defaultPattern match {
- case None => (Nil, Nil)
- case Some(p) => (p.boundVariables, List(rebindAll(rest rows literals.size, p.boundVariables, scrut.sym)))
- }
-
- // literalMap is a map from each literal to a list of row indices.
- // varMap is a list from each literal to a list of the defined vars.
- lazy val (litPairs, varMap) = (
- literals.zipWithIndex map {
- case (lit, index) =>
- val tag = lit.intValue
- (tag -> index, tag -> lit.boundVariables)
- } unzip
- )
- def literalMap = litPairs groupBy (_._1) map {
- case (k, vs) => (k, vs map (_._2))
- }
-
- lazy val cases =
- for ((tag, indices) <- literalMap.toList.sortBy(_._1)) yield {
- val newRows = indices map (i => addDefaultVars(i)(rest rows i))
- val r = remake(newRows ++ defaultRows, includeScrut = false)
- val r2 = make(r.tvars, r.rows map (x => x rebind bindVars(tag, x.subst)))
-
- CASE(Literal(Constant(tag))) ==> r2.toTree
- }
-
- lazy val defaultTree = remake(defaultRows, includeScrut = false).toTree
- def defaultCase = CASE(WILD(IntClass.tpe)) ==> defaultTree
-
- // cond/success/failure only used if there is exactly one case.
- lazy val cond = scrut.id MEMBER_== cases.head.pat
- lazy val success = cases.head.body
- lazy val failure = defaultTree
-
- // only one case becomes if/else, otherwise match
- def tree() =
- if (cases.size == 1) codegen
- else casted MATCH (cases :+ defaultCase: _*)
- }
-
- /** mixture rule for unapply pattern
- */
- class MixUnapply(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
- val Pattern(UnApply(unMethod, unArgs)) = head
- val Apply(unTarget, _ :: trailing) = unMethod
-
- object SameUnapplyCall {
- def isSame(t: Tree) = isEquivalentTree(unTarget, t)
- def unapply(x: Pattern) = /*tracing("SameUnapplyCall (%s vs. %s)".format(unTarget, x))*/(x match {
- case Pattern(UnApply(Apply(fn, _), args)) if isSame(fn) => Some(args)
- case _ => None
- })
- }
- object SameUnapplyPattern {
- def isSame(t: Tree) = isEquivalentTree(unMethod, t)
- def apply(x: Pattern) = unapply(x).isDefined
- def unapply(x: Pattern) = /*tracing("SameUnapplyPattern (%s vs. %s)".format(unMethod, x))*/(x match {
- case Pattern(UnApply(t, _)) if isSame(t) => Some(unArgs)
- case _ => None
- })
- }
-
- private lazy val zipped = pmatch pzip rest.rows
-
- lazy val unapplyResult: PatternVar =
- scrut.createVar(unMethod.tpe, Apply(unTarget, scrut.id :: trailing) setType _.tpe)
-
- lazy val cond: Tree = unapplyResult.tpe.normalize match {
- case TypeRef(_, BooleanClass, _) => unapplyResult.ident
- case TypeRef(_, SomeClass, _) => TRUE
- case _ => NOT(unapplyResult.ident DOT nme.isEmpty)
- }
-
- lazy val failure =
- mkFail(zipped.tail filterNot (x => SameUnapplyPattern(x._1)) map { case (pat, r) => r insert pat })
-
- private def doSuccess: (List[PatternVar], List[PatternVar], List[Row]) = {
- // pattern variable for the unapply result of Some(x).get
- def unMethodTypeArg = unMethod.tpe.baseType(OptionClass).typeArgs match {
- case Nil => log("No type argument for unapply result! " + unMethod.tpe) ; NoType
- case arg :: _ => arg
- }
- lazy val pv = scrut.createVar(unMethodTypeArg, _ => fn(ID(unapplyResult.lhs), nme.get))
- def tuple = pv.lhs
-
- // at this point it's Some[T1,T2...]
- lazy val tpes = getProductArgs(tuple.tpe)
-
- // one pattern variable per tuple element
- lazy val tuplePVs =
- for ((tpe, i) <- tpes.zipWithIndex) yield
- scrut.createVar(tpe, _ => fn(ID(tuple), productProj(tuple, i + 1)))
-
- // the filter prevents infinite unapply recursion
- def mkNewRows(sameFilter: (List[Tree]) => List[Tree]) = {
- val dum = if (unArgs.length <= 1) unArgs.length else tpes.size
- for ((pat, r) <- zipped) yield pat match {
- case SameUnapplyCall(xs) => r.insert2(toPats(sameFilter(xs)) :+ NoPattern, pat.boundVariables, scrut.sym)
- case _ => r insert (emptyPatterns(dum) :+ pat)
- }
- }
-
- // 0 is Boolean, 1 is Option[T], 2+ is Option[(T1,T2,...)]
- unArgs.length match {
- case 0 => (Nil, Nil, mkNewRows((xs) => Nil))
- case 1 => (List(pv), List(pv), mkNewRows(xs => List(xs.head)))
- case _ => (pv :: tuplePVs, tuplePVs, mkNewRows(identity))
- }
- }
-
- lazy val success = {
- val (squeezePVs, pvs, rows) = doSuccess
- val srep = remake(rows, pvs).toTree
-
- squeezedBlock(squeezePVs map (_.valDef), srep)
- }
-
- final def tree() =
- squeezedBlock(List(handleOuter(unapplyResult.valDef)), codegen)
- }
-
- /** Handle Sequence patterns (including Star patterns.)
- * Note: pivot == head, just better typed.
- */
- sealed class MixSequence(val pmatch: PatternMatch, val rest: Rep, pivot: SequencePattern) extends RuleApplication {
- require(scrut.tpe <:< head.tpe)
-
- def hasStar = pivot.hasStar
- private def pivotLen = pivot.nonStarLength
- private def seqDummies = emptyPatterns(pivot.elems.length + 1)
-
- // Should the given pattern join the expanded pivot in the success matrix? If so,
- // this partial function will be defined for the pattern, and the result of the apply
- // is the expanded sequence of new patterns.
- lazy val successMatrixFn = new PartialFunction[Pattern, List[Pattern]] {
- private def seqIsDefinedAt(x: SequenceLikePattern) = (hasStar, x.hasStar) match {
- case (true, true) => true
- case (true, false) => pivotLen <= x.nonStarLength
- case (false, true) => pivotLen >= x.nonStarLength
- case (false, false) => pivotLen == x.nonStarLength
- }
-
- def isDefinedAt(pat: Pattern) = pat match {
- case x: SequenceLikePattern => seqIsDefinedAt(x)
- case WildcardPattern() => true
- case _ => false
- }
-
- def apply(pat: Pattern): List[Pattern] = pat match {
- case x: SequenceLikePattern =>
- def isSameLength = pivotLen == x.nonStarLength
- def rebound = x.nonStarPatterns :+ (x.elemPatterns.last rebindTo WILD(scrut.seqType))
-
- (pivot.hasStar, x.hasStar, isSameLength) match {
- case (true, true, true) => rebound :+ NoPattern
- case (true, true, false) => (seqDummies drop 1) :+ x
- case (true, false, true) => x.elemPatterns ++ List(NilPattern, NoPattern)
- case (false, true, true) => rebound
- case (false, false, true) => x.elemPatterns :+ NoPattern
- case _ => seqDummies
- }
-
- case _ => seqDummies
- }
- }
-
- // Should the given pattern be in the fail matrix? This is true of any sequences
- // as long as the result of the length test on the pivot doesn't make it impossible:
- // for instance if neither sequence is right ignoring and they are of different
- // lengths, the later one cannot match since its length must be wrong.
- def failureMatrixFn(c: Pattern) = (pivot ne c) && (c match {
- case x: SequenceLikePattern =>
- (hasStar, x.hasStar) match {
- case (_, true) => true
- case (true, false) => pivotLen > x.nonStarLength
- case (false, false) => pivotLen != x.nonStarLength
- }
- case WildcardPattern() => true
- case _ => false
- })
-
- // divide the remaining rows into success/failure branches, expanding subsequences of patterns
- val successRows = pmatch pzip rest.rows collect {
- case (c, row) if successMatrixFn isDefinedAt c => row insert successMatrixFn(c)
- }
- val failRows = pmatch pzip rest.rows collect {
- case (c, row) if failureMatrixFn(c) => row insert c
- }
-
- // the discrimination test for sequences is a call to lengthCompare. Note that
- // this logic must be fully consistent wiith successMatrixFn and failureMatrixFn above:
- // any inconsistency will (and frequently has) manifested as pattern matcher crashes.
- lazy val cond = {
- // the method call symbol
- val methodOp: Symbol = head.tpe member nme.lengthCompare
-
- // the comparison to perform. If the pivot is right ignoring, then a scrutinee sequence
- // of >= pivot length could match it; otherwise it must be exactly equal.
- val compareOp: (Tree, Tree) => Tree = if (hasStar) _ INT_>= _ else _ INT_== _
-
- // scrutinee.lengthCompare(pivotLength) [== | >=] 0
- val compareFn: Tree => Tree = (t: Tree) => compareOp((t DOT methodOp)(LIT(pivotLen)), ZERO)
-
- // wrapping in a null check on the scrutinee
- // XXX this needs to use the logic in "def condition"
- nullSafe(compareFn, FALSE)(scrut.id)
- // condition(head.tpe, scrut.id, head.boundVariables.nonEmpty)
- }
- lazy val success = {
- // one pattern var per sequence element up to elemCount, and one more for the rest of the sequence
- lazy val pvs = scrut createSequenceVars pivotLen
-
- squeezedBlock(pvs map (_.valDef), remake(successRows, pvs, hasStar).toTree)
- }
- lazy val failure = remake(failRows).toTree
-
- final def tree(): Tree = codegen
- }
-
- class MixEquals(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
- private lazy val rhs =
- decodedEqualsType(head.tpe) match {
- case SingleType(pre, sym) => REF(pre, sym)
- case PseudoType(o) => o
- }
- private lazy val labelDef =
- createLabelDef("fail%", remake((rest.rows.tail, pmatch.tail).zipped map (_ insert _)).toTree)
-
- lazy val cond = handleOuter(rhs MEMBER_== scrut.id)
- lazy val successOne = rest.rows.head.insert2(List(NoPattern), head.boundVariables, scrut.sym)
- lazy val successTwo = Row(emptyPatterns(1 + rest.tvars.size), NoBinding, EmptyTree, createShortCut(labelDef.symbol))
- lazy val success = remake(List(successOne, successTwo)).toTree
- lazy val failure = labelDef
-
- final def tree() = codegen
- override def toString() = "MixEquals(%s == %s)".format(scrut, head)
- }
-
- /** Mixture rule for type tests.
- * moreSpecific: more specific patterns
- * subsumed: more general patterns (subsuming current), rows index and subpatterns
- * remaining: remaining, rows index and pattern
- */
- class MixTypes(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
- case class Yes(bx: Int, moreSpecific: Pattern, subsumed: List[Pattern])
- case class No(bx: Int, remaining: Pattern)
-
- val (yeses, noes) = {
- val _ys = new ListBuffer[Yes]
- val _ns = new ListBuffer[No]
-
- for ((pattern, j) <- pmatch.pzip()) {
- // scrutinee, head of pattern group
- val (s, p) = (pattern.tpe, head.necessaryType)
-
- def isEquivalent = head.necessaryType =:= pattern.tpe
- def isObjectTest = pattern.isObject && (p =:= pattern.necessaryType)
-
- def sMatchesP = matches(s, p)
- def pMatchesS = matches(p, s)
-
- def ifEquiv(yes: Pattern): Pattern = if (isEquivalent) yes else pattern
-
- def passl(p: Pattern = NoPattern, ps: List[Pattern] = pmatch.dummies) = Some(Yes(j, p, ps))
- def passr() = Some( No(j, pattern))
-
- def typed(pp: Tree) = passl(ifEquiv(Pattern(pp)))
- def subs() = passl(ifEquiv(NoPattern), pattern subpatterns pmatch)
-
- val (oneY, oneN) = pattern match {
- case Pattern(LIT(null)) if !(p =:= s) => (None, passr) // (1)
- case x if isObjectTest => (passl(), None) // (2)
- case Pattern(Typed(pp, _)) if sMatchesP => (typed(pp), None) // (4)
- // The next line used to be this which "fixed" 1697 but introduced
- // numerous regressions including #3136.
- // case Pattern(_: UnApply, _) => (passl(), passr)
- case Pattern(_: UnApply) => (None, passr)
- case x if !x.isDefault && sMatchesP => (subs(), None)
- case x if x.isDefault || pMatchesS => (passl(), passr)
- case _ => (None, passr)
- }
- oneY map (_ys +=)
- oneN map (_ns +=)
- }
- (_ys.toList, _ns.toList)
- }
-
- val moreSpecific = yeses map (_.moreSpecific)
- val subsumed = yeses map (x => (x.bx, x.subsumed))
- val remaining = noes map (x => (x.bx, x.remaining))
-
- private def mkZipped =
- for (Yes(j, moreSpecific, subsumed) <- yeses) yield
- j -> (moreSpecific :: subsumed)
-
- lazy val casted = scrut castedTo pmatch.headType
- lazy val cond = condition(casted.tpe, scrut, head.boundVariables.nonEmpty)
-
- private def isAnyMoreSpecific = yeses exists (x => !x.moreSpecific.isEmpty)
- lazy val (subtests, subtestVars) =
- if (isAnyMoreSpecific) (mkZipped, List(casted.pv))
- else (subsumed, Nil)
-
- lazy val newRows =
- for ((j, ps) <- subtests) yield
- (rest rows j).insert2(ps, pmatch(j).boundVariables, casted.sym)
-
- lazy val success = {
- val srep = remake(newRows, subtestVars ::: casted.accessorPatternVars, includeScrut = false)
- squeezedBlock(casted.allValDefs, srep.toTree)
- }
-
- lazy val failure =
- mkFail(remaining map { case (p1, p2) => rest rows p1 insert p2 })
-
- final def tree(): Tree = codegen
- }
-
- /*** States, Rows, Etc. ***/
-
- case class Row(pats: List[Pattern], subst: Bindings, guard: Tree, bx: Int) {
- private def nobindings = subst.get().isEmpty
- private def bindstr = if (nobindings) "" else pp(subst)
-
- /** Extracts the 'i'th pattern. */
- def extractColumn(i: Int) = {
- val (x, xs) = extractIndex(pats, i)
- (x, copy(pats = xs))
- }
-
- /** Replaces the 'i'th pattern with the argument. */
- def replaceAt(i: Int, p: Pattern) = {
- val newps = (pats take i) ::: p :: (pats drop (i + 1))
- copy(pats = newps)
- }
-
- def insert(h: Pattern) = copy(pats = h :: pats)
- def insert(hs: List[Pattern]) = copy(pats = hs ::: pats) // prepends supplied pattern
- def rebind(b: Bindings) = copy(subst = b) // substitutes for bindings
-
- def insert2(hs: List[Pattern], vs: Iterable[Symbol], tvar: Symbol) =
- tracing("insert2")(copy(pats = hs ::: pats, subst = subst.add(vs, tvar)))
-
- // returns this rows with alternatives expanded
- def expandAlternatives(classifyPat: (Pattern, Int) => Pattern): List[Row] = {
- def isNotAlternative(p: Pattern) = !cond(p.tree) { case _: Alternative => true }
-
- // classify all the top level patterns - alternatives come back unaltered
- val newPats: List[Pattern] = pats.zipWithIndex map classifyPat.tupled
- // see if any alternatives were in there
- val (ps, others) = newPats span isNotAlternative
- // make a new row for each alternative, with it spliced into the original position
- if (others.isEmpty) List(copy(pats = ps))
- else extractBindings(others.head) map (x => replaceAt(ps.size, x))
- }
- override def toString() = {
- val bs = if (nobindings) "" else "\n" + bindstr
- "Row(%d)(%s%s)".format(bx, pp(pats), bs)
- }
- }
- abstract class State {
- def bx: Int // index into the list of rows
- def params: List[Symbol] // bound names to be supplied as arguments to labeldef
- def body: Tree // body to execute upon match
- def label: Option[LabelDef] // label definition for this state
-
- // Called with a bindings map when a match is achieved.
- // Returns a list of variable declarations based on the labeldef parameters
- // and the given substitution, and the body to execute.
- protected def applyBindingsImpl(subst: Map[Symbol, Symbol]): (List[ValDef], Tree)
-
- final def applyBindings(subst: Map[Symbol, Symbol]): (List[ValDef], Tree) = {
- _referenceCount += 1
- applyBindingsImpl(subst)
- }
-
- private var _referenceCount = 0
- def referenceCount = _referenceCount
- def unreached = referenceCount == 0
- def shouldInline(sym: Symbol) = referenceCount == 1 && label.exists(_.symbol == sym)
-
- // Creates a simple Ident if the symbol's type conforms to
- // the val definition's type, or a casted Ident if not.
- private def newValIdent(lhs: Symbol, rhs: Symbol) =
- if (rhs.tpe <:< lhs.tpe) Ident(rhs)
- else gen.mkTypeApply(Ident(rhs), Any_asInstanceOf, List(lhs.tpe))
-
- protected def newValDefinition(lhs: Symbol, rhs: Symbol) =
- typer typedValDef ValDef(lhs, newValIdent(lhs, rhs))
-
- protected def newValReference(lhs: Symbol, rhs: Symbol) =
- typer typed newValIdent(lhs, rhs)
-
- protected def valDefsFor(subst: Map[Symbol, Symbol]) = mapSubst(subst)(newValDefinition)
- protected def identsFor(subst: Map[Symbol, Symbol]) = mapSubst(subst)(newValReference)
-
- protected def mapSubst[T](subst: Map[Symbol, Symbol])(f: (Symbol, Symbol) => T): List[T] =
- params flatMap { lhs =>
- subst get lhs map (rhs => f(lhs, rhs)) orElse {
- // This should not happen; the code should be structured so it is
- // impossible, but that still lies ahead.
- cunit.warning(lhs.pos, "No binding")
- None
- }
- }
-
- // typer is not able to digest a body of type Nothing being assigned result type Unit
- protected def caseResultType =
- if (body.tpe.isNothing) body.tpe else matchResultType
- }
-
- case class LiteralState(bx: Int, params: List[Symbol], body: Tree) extends State {
- def label = None
-
- protected def applyBindingsImpl(subst: Map[Symbol, Symbol]) =
- (valDefsFor(subst), body.duplicate setType caseResultType)
- }
-
- case class FinalState(bx: Int, params: List[Symbol], body: Tree) extends State {
- traceCategory("Final State", "(%s) => %s", paramsString, body)
- def label = Some(labelDef)
-
- private lazy val labelDef = createLabelDef("body%" + bx, body, params, caseResultType)
-
- protected def applyBindingsImpl(subst: Map[Symbol, Symbol]) = {
- val tree =
- if (referenceCount > 1) ID(labelDef.symbol) APPLY identsFor(subst)
- else labelDef
-
- (valDefsFor(subst), tree)
- }
-
- private def paramsString = params map (s => s.name + ": " + s.tpe) mkString ", "
- override def toString() = pp("(%s) => %s".format(pp(params), body))
- }
-
- case class Rep(val tvars: PatternVarGroup, val rows: List[Row]) {
- lazy val Row(pats, subst, guard, index) = rows.head
- lazy val guardedRest = if (guard.isEmpty) Rep(Nil, Nil) else make(tvars, rows.tail)
- lazy val (defaults, others) = pats span (_.isDefault)
-
- /** Cut out the column containing the non-default pattern. */
- class Cut(index: Int) {
- /** The first two separate out the 'i'th pattern in each row from the remainder. */
- private val (_column, _rows) = rows map (_ extractColumn index) unzip
-
- /** Now the 'i'th tvar is separated out and used as a new Scrutinee. */
- private val (_pv, _tvars) = tvars extractIndex index
-
- /** The non-default pattern (others.head) replaces the column head. */
- private val (_ncol, _nrep) =
- (others.head :: _column.tail, make(_tvars, _rows))
-
- def mix() = {
- val newScrut = new Scrutinee(new PatternVar(_pv.sym, EmptyTree, _pv.checked))
- PatternMatch(newScrut, _ncol) mkRule _nrep
- }
- }
-
- /** Converts this to a tree - recursively acquires subreps. */
- final def toTree(): Tree = tracing("toTree")(typer typed applyRule())
-
- /** The VariableRule. */
- private def variable() = {
- val binding = (defaults map (_.boundVariables) zip tvars.pvs) .
- foldLeft(subst)((b, pair) => b.add(pair._1, pair._2.lhs))
-
- VariableRule(binding, guard, guardedRest, index)
- }
- /** The MixtureRule: picks a rewrite rule to apply. */
- private def mixture() = new Cut(defaults.size) mix()
-
- /** Applying the rule will result in one of:
- *
- * VariableRule - if all patterns are default patterns
- * MixtureRule - if one or more patterns are not default patterns
- * Error - no rows remaining
- */
- final def applyRule(): Tree =
- if (rows.isEmpty) failTree
- else if (others.isEmpty) variable.tree()
- else mixture.tree()
-
- def ppn(x: Any) = pp(x, newlines = true)
- override def toString() =
- if (tvars.isEmpty) "Rep(%d) = %s".format(rows.size, ppn(rows))
- else "Rep(%dx%d)%s%s".format(tvars.size, rows.size, ppn(tvars), ppn(rows))
- }
-
- /** Expands the patterns recursively. */
- final def expand(roots: List[PatternVar], cases: List[CaseDef]) = tracing("expand") {
- for ((CaseDef(pat, guard, body), bx) <- cases.zipWithIndex) yield {
- val subtrees = pat match {
- case x if roots.length <= 1 => List(x)
- case Apply(_, args) => args
- case WILD() => emptyTrees(roots.length)
- }
- val params = pat filter (_.isInstanceOf[Bind]) map (_.symbol) distinct
- val row = Row(toPats(subtrees), NoBinding, guard, bx)
- val state = body match {
- case x: Literal => LiteralState(bx, params, body)
- case _ => FinalState(bx, params, body)
- }
-
- row -> state
- }
- }
-
- /** returns the condition in "if (cond) k1 else k2"
- */
- final def condition(tpe: Type, scrut: Scrutinee, isBound: Boolean): Tree = {
- assert(scrut.isDefined)
- val cond = handleOuter(condition(tpe, scrut.id, isBound))
-
- if (!needsOuterTest(tpe, scrut.tpe, owner)) cond
- else addOuterCondition(cond, tpe, scrut.id)
- }
-
- final def condition(tpe: Type, scrutTree: Tree, isBound: Boolean): Tree = {
- assert((tpe ne NoType) && (scrutTree.tpe ne NoType))
- def isMatchUnlessNull = scrutTree.tpe <:< tpe && tpe.isAnyRef
- def isRef = scrutTree.tpe.isAnyRef
-
- // See ticket #1503 for the motivation behind checking for a binding.
- // The upshot is that it is unsound to assume equality means the right
- // type, but if the value doesn't appear on the right hand side of the
- // match that's unimportant; so we add an instance check only if there
- // is a binding.
- def bindingWarning() = {
- if (isBound && settings.Xmigration.value < ScalaVersion.twoDotEight) {
- cunit.warning(scrutTree.pos,
- "A bound pattern such as 'x @ Pattern' now matches fewer cases than the same pattern with no binding.")
- }
- }
-
- def genEquals(sym: Symbol): Tree = {
- val t1: Tree = REF(sym) MEMBER_== scrutTree
-
- if (isBound) {
- bindingWarning()
- t1 AND (scrutTree IS tpe.widen)
- }
- else t1
- }
-
- typer typed {
- tpe match {
- case ConstantType(Constant(null)) if isRef => scrutTree OBJ_EQ NULL
- case ConstantType(const) => scrutTree MEMBER_== Literal(const)
- case SingleType(NoPrefix, sym) => genEquals(sym)
- case SingleType(pre, sym) if sym.isStable => genEquals(sym)
- case ThisType(sym) if sym.isModule => genEquals(sym)
- case _ if isMatchUnlessNull => scrutTree OBJ_NE NULL
- case _ => scrutTree IS tpe
- }
- }
- }
-
- /** adds a test comparing the dynamic outer to the static outer */
- final def addOuterCondition(cond: Tree, tpe2test: Type, scrut: Tree) = {
- val TypeRef(prefix, _, _) = tpe2test
- val theRef = handleOuter(prefix match {
- case NoPrefix => abort("assertion failed: NoPrefix")
- case ThisType(clazz) => THIS(clazz)
- case pre => REF(pre.prefix, pre.termSymbol)
- })
- outerAccessor(tpe2test.typeSymbol) match {
- case NoSymbol => ifDebug(cunit.warning(scrut.pos, "no outer acc for " + tpe2test.typeSymbol)) ; cond
- case outerAcc =>
- val casted = gen.mkAsInstanceOf(scrut, tpe2test, any = true, wrapInApply = true)
- cond AND ((casted DOT outerAcc)() OBJ_EQ theRef)
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
deleted file mode 100644
index 7b2fcf0e9b..0000000000
--- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
+++ /dev/null
@@ -1,137 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import PartialFunction._
-import scala.language.postfixOps
-
-trait PatternBindings extends ast.TreeDSL
-{
- self: ExplicitOuter with ParallelMatching =>
-
- import global.{ typer => _, _ }
- import definitions.{ EqualsPatternClass }
- import CODE._
- import Debug._
-
- /** EqualsPattern **/
- def isEquals(tpe: Type) = tpe.typeSymbol == EqualsPatternClass
- def mkEqualsRef(tpe: Type) = typeRef(NoPrefix, EqualsPatternClass, List(tpe))
- def decodedEqualsType(tpe: Type) =
- if (tpe.typeSymbol == EqualsPatternClass) tpe.typeArgs.head else tpe
-
- // A subtype test which creates fresh existentials for type
- // parameters on the right hand side.
- def matches(arg1: Type, arg2: Type) = decodedEqualsType(arg1) matchesPattern decodedEqualsType(arg2)
-
- // For spotting duplicate unapplies
- def isEquivalentTree(t1: Tree, t2: Tree) = (t1.symbol == t2.symbol) && (t1 equalsStructure t2)
-
- // Reproduce the Bind trees wrapping oldTree around newTree
- def moveBindings(oldTree: Tree, newTree: Tree): Tree = oldTree match {
- case b @ Bind(x, body) => Bind(b.symbol, moveBindings(body, newTree))
- case _ => newTree
- }
-
- // used as argument to `EqualsPatternClass`
- case class PseudoType(o: Tree) extends SimpleTypeProxy {
- override def underlying: Type = o.tpe
- override def safeToString: String = "PseudoType("+o+")"
- }
-
- // If the given pattern contains alternatives, return it as a list of patterns.
- // Makes typed copies of any bindings found so all alternatives point to final state.
- def extractBindings(p: Pattern): List[Pattern] =
- toPats(_extractBindings(p.boundTree, identity))
-
- private def _extractBindings(p: Tree, prevBindings: Tree => Tree): List[Tree] = {
- def newPrev(b: Bind) = (x: Tree) => treeCopy.Bind(b, b.name, x) setType x.tpe
-
- p match {
- case b @ Bind(_, body) => _extractBindings(body, newPrev(b))
- case Alternative(ps) => ps map prevBindings
- }
- }
-
- trait PatternBindingLogic {
- self: Pattern =>
-
- // This is for traversing the pattern tree - pattern types which might have
- // bound variables beneath them return a list of said patterns for flatMapping.
- def subpatternsForVars: List[Pattern] = Nil
-
- // The outermost Bind(x1, Bind(x2, ...)) surrounding the tree.
- private var _boundTree: Tree = tree
- def boundTree = _boundTree
- def setBound(x: Bind): Pattern = {
- _boundTree = x
- this
- }
- def boundVariables = strip(boundTree)
-
- // If a tree has bindings, boundTree looks something like
- // Bind(v3, Bind(v2, Bind(v1, tree)))
- // This takes the given tree and creates a new pattern
- // using the same bindings.
- def rebindTo(t: Tree): Pattern = Pattern(moveBindings(boundTree, t))
-
- // Wrap this pattern's bindings around (_: Type)
- def rebindToType(tpe: Type, ascription: Type = null): Pattern = {
- val aType = if (ascription == null) tpe else ascription
- rebindTo(Typed(WILD(tpe), TypeTree(aType)) setType tpe)
- }
-
- // Wrap them around _
- def rebindToEmpty(tpe: Type): Pattern =
- rebindTo(Typed(EmptyTree, TypeTree(tpe)) setType tpe)
-
- // Wrap them around a singleton type for an EqualsPattern check.
- def rebindToEqualsCheck(): Pattern =
- rebindToType(equalsCheck)
-
- // Like rebindToEqualsCheck, but subtly different. Not trying to be
- // mysterious -- I haven't sorted it all out yet.
- def rebindToObjectCheck(): Pattern =
- rebindToType(mkEqualsRef(sufficientType), sufficientType)
-
- /** Helpers **/
- private def wrapBindings(vs: List[Symbol], pat: Tree): Tree = vs match {
- case Nil => pat
- case x :: xs => Bind(x, wrapBindings(xs, pat)) setType pat.tpe
- }
- private def strip(t: Tree): List[Symbol] = t match {
- case b @ Bind(_, pat) => b.symbol :: strip(pat)
- case _ => Nil
- }
- private def deepstrip(t: Tree): List[Symbol] =
- treeCollect(t, { case x: Bind => x.symbol })
- }
-
- case class Binding(pvar: Symbol, tvar: Symbol) {
- override def toString() = pvar.name + " -> " + tvar.name
- }
-
- class Bindings(private val vlist: List[Binding]) {
- // if (!vlist.isEmpty)
- // traceCategory("Bindings", this.toString)
-
- def get() = vlist
- def toMap = vlist map (x => (x.pvar, x.tvar)) toMap
-
- def add(vs: Iterable[Symbol], tvar: Symbol): Bindings = {
- val newBindings = vs.toList map (v => Binding(v, tvar))
- new Bindings(newBindings ++ vlist)
- }
-
- override def toString() =
- if (vlist.isEmpty) "<none>"
- else vlist.mkString(", ")
- }
-
- val NoBinding: Bindings = new Bindings(Nil)
-}
diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala
deleted file mode 100644
index ef41246af9..0000000000
--- a/src/compiler/scala/tools/nsc/matching/Patterns.scala
+++ /dev/null
@@ -1,499 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import symtab.Flags
-import PartialFunction._
-
-/** Patterns are wrappers for Trees with enhanced semantics.
- *
- * @author Paul Phillips
- */
-
-trait Patterns extends ast.TreeDSL {
- self: transform.ExplicitOuter =>
-
- import global.{ typer => _, _ }
- import definitions._
- import CODE._
- import Debug._
- import treeInfo.{ unbind, isStar, isVarPattern }
-
- type PatternMatch = MatchMatrix#PatternMatch
- private type PatternVar = MatrixContext#PatternVar
-
- // Fresh patterns
- def emptyPatterns(i: Int): List[Pattern] = List.fill(i)(NoPattern)
- def emptyTrees(i: Int): List[Tree] = List.fill(i)(EmptyTree)
-
- // An empty pattern
- def NoPattern = WildcardPattern()
-
- // The constant null pattern
- def NullPattern = LiteralPattern(NULL)
-
- // The Nil pattern
- def NilPattern = Pattern(gen.mkNil)
-
- // 8.1.1
- case class VariablePattern(tree: Ident) extends NamePattern {
- lazy val Ident(name) = tree
- require(isVarPattern(tree) && name != nme.WILDCARD)
- override def covers(sym: Symbol) = true
- override def description = "%s".format(name)
- }
-
- // 8.1.1 (b)
- case class WildcardPattern() extends Pattern {
- def tree = EmptyTree
- override def covers(sym: Symbol) = true
- override def isDefault = true
- override def description = "_"
- }
-
- // 8.1.2
- case class TypedPattern(tree: Typed) extends Pattern {
- lazy val Typed(expr, tpt) = tree
-
- override def covers(sym: Symbol) = newMatchesPattern(sym, tpt.tpe)
- override def sufficientType = tpt.tpe
- override def subpatternsForVars: List[Pattern] = List(Pattern(expr))
- override def simplify(pv: PatternVar) = Pattern(expr) match {
- case ExtractorPattern(ua) if pv.sym.tpe <:< tpt.tpe => this rebindTo expr
- case _ => this
- }
- override def description = "%s: %s".format(Pattern(expr), tpt)
- }
-
- // 8.1.3
- case class LiteralPattern(tree: Literal) extends Pattern {
- lazy val Literal(const @ Constant(value)) = tree
-
- def isSwitchable = cond(const.tag) { case ByteTag | ShortTag | IntTag | CharTag => true }
- def intValue = const.intValue
- override def description = {
- val s = if (value == null) "null" else value.toString
- "Lit(%s)".format(s)
- }
- }
-
- // 8.1.4 (a)
- case class ApplyIdentPattern(tree: Apply) extends ApplyPattern with NamePattern {
- // XXX - see bug 3411 for code which violates this assumption
- // require (!isVarPattern(fn) && args.isEmpty)
- lazy val ident @ Ident(name) = fn
-
- override def sufficientType = Pattern(ident).equalsCheck
- override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
- override def description = "Id(%s)".format(name)
- }
- // 8.1.4 (b)
- case class ApplySelectPattern(tree: Apply) extends ApplyPattern with SelectPattern {
- require (args.isEmpty)
- lazy val Apply(select: Select, _) = tree
-
- override lazy val sufficientType = qualifier.tpe match {
- case t: ThisType => singleType(t, sym) // this.X
- case _ =>
- qualifier match {
- case _: Apply => PseudoType(tree)
- case _ => singleType(Pattern(qualifier).necessaryType, sym)
- }
- }
-
- override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType)
- override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
- override def description = backticked match {
- case Some(s) => "this." + s
- case _ => "Sel(%s.%s)".format(Pattern(qualifier), name)
- }
-
- }
- // 8.1.4 (c)
- case class StableIdPattern(tree: Select) extends SelectPattern {
- def select = tree
- override def description = "St(%s)".format(printableSegments.mkString(" . "))
- private def printableSegments =
- pathSegments filter (x => !x.isEmpty && (x.toString != "$iw"))
- }
- // 8.1.4 (d)
- case class ObjectPattern(tree: Apply) extends ApplyPattern { // NamePattern?
- require(!fn.isType && isModule)
-
- override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType)
- override def sufficientType = tpe.narrow
- override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
- override def description = "Obj(%s)".format(fn)
- }
- // 8.1.4 (e)
- case class SimpleIdPattern(tree: Ident) extends NamePattern {
- val Ident(name) = tree
- override def covers(sym: Symbol) = newMatchesPattern(sym, tpe.narrow)
- override def description = "Id(%s)".format(name)
- }
-
- // 8.1.5
- case class ConstructorPattern(tree: Apply) extends ApplyPattern with NamePattern {
- require(fn.isType && this.isCaseClass, "tree: " + tree + " fn: " + fn)
- def name = tpe.typeSymbol.name
- def cleanName = tpe.typeSymbol.decodedName
- def hasPrefix = tpe.prefix.prefixString != ""
- def prefixedName =
- if (hasPrefix) "%s.%s".format(tpe.prefix.prefixString, cleanName)
- else cleanName
-
- private def isColonColon = cleanName == "::"
-
- override def subpatterns(pm: MatchMatrix#PatternMatch) =
- if (pm.head.isCaseClass) toPats(args)
- else super.subpatterns(pm)
-
- override def simplify(pv: PatternVar) =
- if (args.isEmpty) this rebindToEmpty tree.tpe
- else this
-
- override def covers(sym: Symbol) = {
- debugging("[constructor] Does " + this + " cover " + sym + " ? ") {
- sym.tpe.typeSymbol == this.tpe.typeSymbol
- }
- }
- override def description = {
- if (isColonColon) "%s :: %s".format(Pattern(args(0)), Pattern(args(1)))
- else "%s(%s)".format(name, toPats(args).mkString(", "))
- }
- }
- // 8.1.6
- case class TuplePattern(tree: Apply) extends ApplyPattern {
- override def description = "((%s))".format(args.size, toPats(args).mkString(", "))
- }
-
- // 8.1.7 / 8.1.8 (unapply and unapplySeq calls)
- case class ExtractorPattern(tree: UnApply) extends UnapplyPattern {
- private def uaTyped = Typed(tree, TypeTree(arg.tpe)) setType arg.tpe
-
- override def simplify(pv: PatternVar) = {
- if (pv.tpe <:< arg.tpe) this
- else this rebindTo uaTyped
- }
- override def description = "Unapply(%s => %s)".format(necessaryType, resTypesString)
- }
-
- // Special List handling. It was like that when I got here.
- case class ListExtractorPattern(tree: UnApply, tpt: Tree, elems: List[Tree]) extends UnapplyPattern with SequenceLikePattern {
- // As yet I can't testify this is doing any good relative to using
- // tpt.tpe, but it doesn't seem to hurt either.
- private lazy val packedType = global.typer.computeType(tpt, tpt.tpe)
- private lazy val consRef = appliedType(ConsClass, packedType)
- private lazy val listRef = appliedType(ListClass, packedType)
- private lazy val seqRef = appliedType(SeqClass, packedType)
-
- private def thisSeqRef = {
- val tc = (tree.tpe baseType SeqClass).typeConstructor
- if (tc.typeParams.size == 1) appliedType(tc, List(packedType))
- else seqRef
- }
-
- // Fold a list into a well-typed x :: y :: etc :: tree.
- private def listFolder(hd: Tree, tl: Tree): Tree = unbind(hd) match {
- case t @ Star(_) => moveBindings(hd, WILD(t.tpe))
- case _ =>
- val dummyMethod = NoSymbol.newTermSymbol(newTermName("matching$dummy"))
- val consType = MethodType(dummyMethod newSyntheticValueParams List(packedType, listRef), consRef)
-
- Apply(TypeTree(consType), List(hd, tl)) setType consRef
- }
- private def foldedPatterns = elems.foldRight(gen.mkNil)((x, y) => listFolder(x, y))
- override def necessaryType = if (nonStarPatterns.nonEmpty) consRef else listRef
-
- override def simplify(pv: PatternVar) = {
- if (pv.tpe <:< necessaryType)
- Pattern(foldedPatterns)
- else
- this rebindTo (Typed(tree, TypeTree(necessaryType)) setType necessaryType)
- }
- override def description = "List(%s => %s)".format(packedType, resTypesString)
- }
-
- trait SequenceLikePattern extends Pattern {
- def elems: List[Tree]
- override def hasStar = elems.nonEmpty && isStar(elems.last)
-
- def elemPatterns = toPats(elems)
- def nonStarElems = if (hasStar) elems.init else elems
- def nonStarPatterns = toPats(nonStarElems)
- def nonStarLength = nonStarElems.length
- }
-
- // 8.1.8 (b) (literal ArrayValues)
- case class SequencePattern(tree: ArrayValue) extends Pattern with SequenceLikePattern {
- lazy val ArrayValue(elemtpt, elems) = tree
-
- override def subpatternsForVars: List[Pattern] = elemPatterns
- override def description = "Seq(%s)".format(elemPatterns mkString ", ")
- }
-
- // 8.1.8 (c)
- case class StarPattern(tree: Star) extends Pattern {
- lazy val Star(elem) = tree
- override def description = "_*"
- }
- // XXX temporary?
- case class ThisPattern(tree: This) extends NamePattern {
- lazy val This(name) = tree
- override def description = "this"
- }
-
- // 8.1.9
- // InfixPattern ... subsumed by Constructor/Extractor Patterns
-
- // 8.1.10
- case class AlternativePattern(tree: Alternative) extends Pattern {
- private lazy val Alternative(subtrees) = tree
- private def alts = toPats(subtrees)
- override def description = "Alt(%s)".format(alts mkString " | ")
- }
-
- // 8.1.11
- // XMLPattern ... for now, subsumed by SequencePattern, but if we want
- // to make it work right, it probably needs special handling.
-
- private def abortUnknownTree(tree: Tree) =
- abort("Unknown Tree reached pattern matcher: %s/%s".format(tree, tree.getClass))
-
- object Pattern {
- // a small tree -> pattern cache
- private val cache = perRunCaches.newMap[Tree, Pattern]()
-
- def apply(tree: Tree): Pattern = {
- if (cache contains tree)
- return cache(tree)
-
- val p = tree match {
- case x: Bind => apply(unbind(tree)) setBound x
- case EmptyTree => WildcardPattern()
- case Ident(nme.WILDCARD) => WildcardPattern()
- case x @ Alternative(ps) => AlternativePattern(x)
- case x: Apply => ApplyPattern(x)
- case x: Typed => TypedPattern(x)
- case x: Literal => LiteralPattern(x)
- case x: UnApply => UnapplyPattern(x)
- case x: Ident => if (isVarPattern(x)) VariablePattern(x) else SimpleIdPattern(x)
- case x: ArrayValue => SequencePattern(x)
- case x: Select => StableIdPattern(x)
- case x: Star => StarPattern(x)
- case x: This => ThisPattern(x) // XXX ?
- case _ => abortUnknownTree(tree)
- }
- cache(tree) = p
-
- // limiting the trace output
- p match {
- case WildcardPattern() => p
- case _: LiteralPattern => p
- case _ => tracing("Pattern")(p)
- }
- }
- // matching on Pattern(...) always skips the bindings.
- def unapply(other: Any): Option[Tree] = other match {
- case x: Tree => unapply(Pattern(x))
- case x: Pattern => Some(x.tree)
- case _ => None
- }
- }
-
- object UnapplyPattern {
- private object UnapplySeq {
- def unapply(x: UnApply) = x match {
- case UnApply(
- Apply(TypeApply(Select(qual, nme.unapplySeq), List(tpt)), _),
- List(ArrayValue(_, elems))) =>
- Some((qual.symbol, tpt, elems))
- case _ =>
- None
- }
- }
-
- def apply(x: UnApply): Pattern = x match {
- case UnapplySeq(ListModule, tpt, elems) =>
- ListExtractorPattern(x, tpt, elems)
- case _ =>
- ExtractorPattern(x)
- }
- }
-
- // right now a tree like x @ Apply(fn, Nil) where !fn.isType
- // is handled by creating a singleton type:
- //
- // val stype = Types.singleType(x.tpe.prefix, x.symbol)
- //
- // and then passing that as a type argument to EqualsPatternClass:
- //
- // val tpe = typeRef(NoPrefix, EqualsPatternClass, List(stype))
- //
- // then creating a Typed pattern and rebinding.
- //
- // val newpat = Typed(EmptyTree, TypeTree(tpe)) setType tpe)
- //
- // This is also how Select(qual, name) is handled.
- object ApplyPattern {
- def apply(x: Apply): Pattern = {
- val Apply(fn, args) = x
- def isModule = x.symbol.isModule || x.tpe.termSymbol.isModule
-
- if (fn.isType) {
- if (isTupleType(fn.tpe)) TuplePattern(x)
- else ConstructorPattern(x)
- }
- else if (args.isEmpty) {
- if (isModule) ObjectPattern(x)
- else fn match {
- case _: Ident => ApplyIdentPattern(x)
- case _: Select => ApplySelectPattern(x)
- }
- }
- else abortUnknownTree(x)
- }
- }
-
- /** Some intermediate pattern classes with shared structure **/
-
- sealed trait SelectPattern extends NamePattern {
- def select: Select
- lazy val Select(qualifier, name) = select
- def pathSegments = getPathSegments(tree)
- def backticked: Option[String] = qualifier match {
- case _: This if nme.isVariableName(name) => Some("`%s`".format(name))
- case _ => None
- }
- override def covers(sym: Symbol) = newMatchesPattern(sym, tree.tpe)
- protected def getPathSegments(t: Tree): List[Name] = t match {
- case Select(q, name) => name :: getPathSegments(q)
- case Apply(f, Nil) => getPathSegments(f)
- case _ => Nil
- }
- }
-
- sealed trait NamePattern extends Pattern {
- def name: Name
- override def sufficientType = tpe.narrow
- override def simplify(pv: PatternVar) = this.rebindToEqualsCheck()
- override def description = name.toString
- }
-
- sealed trait UnapplyPattern extends Pattern {
- lazy val UnApply(unfn, args) = tree
- lazy val Apply(fn, _) = unfn
- lazy val MethodType(List(arg, _*), _) = fn.tpe
-
- // Covers if the symbol matches the unapply method's argument type,
- // and the return type of the unapply is Some.
- override def covers(sym: Symbol) = newMatchesPattern(sym, arg.tpe)
-
- // TODO: for alwaysCovers:
- // fn.tpe.finalResultType.typeSymbol == SomeClass
-
- override def necessaryType = arg.tpe
- override def subpatternsForVars = args match {
- case List(ArrayValue(elemtpe, elems)) => toPats(elems)
- case _ => toPats(args)
- }
-
- def resTypes = analyzer.unapplyTypeList(unfn.pos, unfn.symbol, unfn.tpe, args)
- def resTypesString = resTypes match {
- case Nil => "Boolean"
- case xs => xs.mkString(", ")
- }
- }
-
- sealed trait ApplyPattern extends Pattern {
- lazy val Apply(fn, args) = tree
- override def subpatternsForVars: List[Pattern] = toPats(args)
-
- override def dummies =
- if (!this.isCaseClass) Nil
- else emptyPatterns(sufficientType.typeSymbol.caseFieldAccessors.size)
-
- def isConstructorPattern = fn.isType
- override def covers(sym: Symbol) = newMatchesPattern(sym, fn.tpe)
- }
-
- sealed abstract class Pattern extends PatternBindingLogic {
- def tree: Tree
-
- // returns either a simplification of this pattern or identity.
- def simplify(pv: PatternVar): Pattern = this
-
- // the right number of dummies for this pattern
- def dummies: List[Pattern] = Nil
-
- // Is this a default pattern (untyped "_" or an EmptyTree inserted by the matcher)
- def isDefault = false
-
- // what type must a scrutinee have to have any chance of matching this pattern?
- def necessaryType = tpe
-
- // what type could a scrutinee have which would automatically indicate a match?
- // (nullness and guards will still be checked.)
- def sufficientType = tpe
-
- // the subpatterns for this pattern (at the moment, that means constructor arguments)
- def subpatterns(pm: MatchMatrix#PatternMatch): List[Pattern] = pm.dummies
-
- // if this pattern should be considered to cover the given symbol
- def covers(sym: Symbol): Boolean = newMatchesPattern(sym, sufficientType)
- def newMatchesPattern(sym: Symbol, pattp: Type) = {
- debugging("[" + kindString + "] Does " + pattp + " cover " + sym + " ? ") {
- (sym.isModuleClass && (sym.tpe.typeSymbol eq pattp.typeSymbol)) ||
- (sym.tpe.baseTypeSeq exists (_ matchesPattern pattp))
- }
- }
-
- def sym = tree.symbol
- def tpe = tree.tpe
- def isEmpty = tree.isEmpty
-
- def isModule = sym.isModule || tpe.termSymbol.isModule
- def isCaseClass = tpe.typeSymbol.isCase
- def isObject = (sym != null) && (sym != NoSymbol) && tpe.prefix.isStable // XXX not entire logic
-
- def hasStar = false
-
- def setType(tpe: Type): this.type = {
- tree setType tpe
- this
- }
-
- def equalsCheck =
- tracing("equalsCheck")(
- if (sym.isValue) singleType(NoPrefix, sym)
- else tpe.narrow
- )
-
- /** Standard methods **/
- override def equals(other: Any) = other match {
- case x: Pattern => this.boundTree == x.boundTree
- case _ => super.equals(other)
- }
- override def hashCode() = boundTree.hashCode()
- def description = super.toString
-
- final override def toString = description
-
- def toTypeString() = "%s <: x <: %s".format(necessaryType, sufficientType)
- def kindString = ""
- }
-
- /*** Extractors ***/
-
- object UnapplyParamType {
- def unapply(x: Tree): Option[Type] = condOpt(unbind(x)) {
- case UnApply(Apply(fn, _), _) => fn.tpe match {
- case m: MethodType => m.paramTypes.head
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/package.scala b/src/compiler/scala/tools/nsc/package.scala
index 00a9f3b39c..817a4a5c88 100644
--- a/src/compiler/scala/tools/nsc/package.scala
+++ b/src/compiler/scala/tools/nsc/package.scala
@@ -6,14 +6,23 @@
package scala.tools
package object nsc {
+ type Mode = scala.reflect.internal.Mode
+ val Mode = scala.reflect.internal.Mode
+
+ def EXPRmode = Mode.EXPRmode
+
type Phase = scala.reflect.internal.Phase
val NoPhase = scala.reflect.internal.NoPhase
+ type Variance = scala.reflect.internal.Variance
+ val Variance = scala.reflect.internal.Variance
+
type FatalError = scala.reflect.internal.FatalError
val FatalError = scala.reflect.internal.FatalError
type MissingRequirementError = scala.reflect.internal.MissingRequirementError
val MissingRequirementError = scala.reflect.internal.MissingRequirementError
- val ListOfNil = List(Nil)
+ @deprecated("Use scala.reflect.internal.util.ListOfNil", "2.11.0")
+ lazy val ListOfNil = scala.reflect.internal.util.ListOfNil
}
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index 2050ce7ffd..7837f9a11a 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -6,14 +6,15 @@
package scala.tools.nsc
package plugins
-import io.{ File, Path, Jar }
-import java.net.URLClassLoader
-import java.util.jar.JarFile
+import scala.tools.nsc.io.{ Jar }
+import scala.tools.nsc.util.ScalaClassLoader
+import scala.reflect.io.{ Directory, File, Path }
+import java.io.InputStream
import java.util.zip.ZipException
import scala.collection.mutable
import mutable.ListBuffer
-import scala.xml.XML
+import scala.util.{ Try, Success, Failure }
/** Information about a plugin loaded from a jar file.
*
@@ -37,14 +38,35 @@ abstract class Plugin {
val description: String
/** The compiler that this plugin uses. This is normally equated
- * to a constructor parameter in the concrete subclass. */
+ * to a constructor parameter in the concrete subclass.
+ */
val global: Global
- /** Handle any plugin-specific options. The `-P:plugname:` part
- * will not be present. */
- def processOptions(options: List[String], error: String => Unit) {
- if (!options.isEmpty)
- error("Error: " + name + " has no options")
+ def options: List[String] = {
+ // Process plugin options of form plugin:option
+ def namec = name + ":"
+ global.settings.pluginOptions.value filter (_ startsWith namec) map (_ stripPrefix namec)
+ }
+
+ /** Handle any plugin-specific options.
+ * The user writes `-P:plugname:opt1,opt2`,
+ * but the plugin sees `List(opt1, opt2)`.
+ * The plugin can opt out of further processing
+ * by returning false. For example, if the plugin
+ * has an "enable" flag, now would be a good time
+ * to sit on the bench.
+ * @param options plugin arguments
+ * @param error error function
+ * @return true to continue, or false to opt out
+ */
+ def init(options: List[String], error: String => Unit): Boolean = {
+ processOptions(options, error)
+ true
+ }
+
+ @deprecated("use Plugin#init instead", since="2.11")
+ def processOptions(options: List[String], error: String => Unit): Unit = {
+ if (!options.isEmpty) error(s"Error: $name takes no options")
}
/** A description of this plugin's options, suitable as a response
@@ -63,90 +85,116 @@ object Plugin {
private val PluginXML = "scalac-plugin.xml"
- /** Create a class loader with the specified file plus
+ /** Create a class loader with the specified locations plus
* the loader that loaded the Scala compiler.
*/
- private def loaderFor(jarfiles: Seq[Path]): ClassLoader = {
+ private def loaderFor(locations: Seq[Path]): ScalaClassLoader = {
val compilerLoader = classOf[Plugin].getClassLoader
- val jarurls = jarfiles map (_.toURL)
+ val urls = locations map (_.toURL)
- new URLClassLoader(jarurls.toArray, compilerLoader)
+ ScalaClassLoader fromURLs (urls, compilerLoader)
}
- /** Try to load a plugin description from the specified
- * file, returning <code>None</code> if it does not work.
+ /** Try to load a plugin description from the specified location.
*/
- private def loadDescription(jarfile: Path): Option[PluginDescription] =
- // XXX Return to this once we have some ARM support
- if (!jarfile.exists) None
- else try {
- val jar = new JarFile(jarfile.jfile)
-
- try {
- jar getEntry PluginXML match {
- case null => None
- case entry =>
- val in = jar getInputStream entry
- val packXML = XML load in
- in.close()
-
- PluginDescription fromXML packXML
- }
- }
- finally jar.close()
- }
- catch {
- case _: ZipException => None
+ private def loadDescriptionFromJar(jarp: Path): Try[PluginDescription] = {
+ // XXX Return to this once we have more ARM support
+ def read(is: Option[InputStream]) = is match {
+ case None => throw new PluginLoadException(jarp.path, s"Missing $PluginXML in $jarp")
+ case Some(is) => PluginDescription.fromXML(is)
}
+ Try(new Jar(jarp.jfile).withEntryStream(PluginXML)(read))
+ }
+
+ private def loadDescriptionFromFile(f: Path): Try[PluginDescription] =
+ Try(PluginDescription.fromXML(new java.io.FileInputStream(f.jfile)))
type AnyClass = Class[_]
- /** Loads a plugin class from the named jar file.
- *
- * @return `None` if the jar file has no plugin in it or
- * if the plugin is badly formed.
+ /** Use a class loader to load the plugin class.
*/
- def loadFrom(jarfile: Path, loader: ClassLoader): Option[AnyClass] =
- loadDescription(jarfile) match {
- case None =>
- println("Warning: could not load descriptor for plugin %s".format(jarfile))
- None
- case Some(pdesc) =>
- try Some(loader loadClass pdesc.classname) catch {
- case _: Exception =>
- println("Warning: class not found for plugin in %s (%s)".format(jarfile, pdesc.classname))
- None
- }
+ def load(classname: String, loader: ClassLoader): Try[AnyClass] = {
+ import scala.util.control.NonFatal
+ try {
+ Success[AnyClass](loader loadClass classname)
+ } catch {
+ case NonFatal(e) =>
+ Failure(new PluginLoadException(classname, s"Error: unable to load class: $classname"))
+ case e: NoClassDefFoundError =>
+ Failure(new PluginLoadException(classname, s"Error: class not found: ${e.getMessage} required by $classname"))
}
+ }
- /** Load all plugins found in the argument list, both in the
- * jar files explicitly listed, and in the jar files in the
- * directories specified. Skips all plugins in `ignoring`.
- * A single classloader is created and used to load all of them.
+ /** Load all plugins specified by the arguments.
+ * Each location of `paths` must be a valid plugin archive or exploded archive.
+ * Each of `paths` must define one plugin.
+ * Each of `dirs` may be a directory containing arbitrary plugin archives.
+ * Skips all plugins named in `ignoring`.
+ * A classloader is created to load each plugin.
*/
def loadAllFrom(
- jars: List[Path],
+ paths: List[List[Path]],
dirs: List[Path],
- ignoring: List[String]): List[AnyClass] =
+ ignoring: List[String]): List[Try[AnyClass]] =
{
- val alljars = (jars ::: (for {
- dir <- dirs if dir.isDirectory
- entry <- dir.toDirectory.files.toList sortBy (_.name)
-// was: if Path.isJarOrZip(entry)
- if Jar.isJarOrZip(entry)
- pdesc <- loadDescription(entry)
- if !(ignoring contains pdesc.name)
- } yield entry)).distinct
-
- val loader = loaderFor(alljars)
- (alljars map (loadFrom(_, loader))).flatten
+ // List[(jar, Try(descriptor))] in dir
+ def scan(d: Directory) =
+ d.files.toList sortBy (_.name) filter (Jar isJarOrZip _) map (j => (j, loadDescriptionFromJar(j)))
+
+ type PDResults = List[Try[(PluginDescription, ScalaClassLoader)]]
+
+ // scan plugin dirs for jars containing plugins, ignoring dirs with none and other jars
+ val fromDirs: PDResults = dirs filter (_.isDirectory) flatMap { d =>
+ scan(d.toDirectory) collect {
+ case (j, Success(pd)) => Success((pd, loaderFor(Seq(j))))
+ }
+ }
+
+ // scan jar paths for plugins, taking the first plugin you find.
+ // a path element can be either a plugin.jar or an exploded dir.
+ def findDescriptor(ps: List[Path]) = {
+ def loop(qs: List[Path]): Try[PluginDescription] = qs match {
+ case Nil => Failure(new MissingPluginException(ps))
+ case p :: rest =>
+ if (p.isDirectory) loadDescriptionFromFile(p.toDirectory / PluginXML)
+ else if (p.isFile) loadDescriptionFromJar(p.toFile)
+ else loop(rest)
+ }
+ loop(ps)
+ }
+ val fromPaths: PDResults = paths map (p => (p, findDescriptor(p))) map {
+ case (p, Success(pd)) => Success((pd, loaderFor(p)))
+ case (_, Failure(e)) => Failure(e)
+ }
+
+ val seen = mutable.HashSet[String]()
+ val enabled = (fromPaths ::: fromDirs) map {
+ case Success((pd, loader)) if seen(pd.classname) =>
+ // a nod to SI-7494, take the plugin classes distinctly
+ Failure(new PluginLoadException(pd.name, s"Ignoring duplicate plugin ${pd.name} (${pd.classname})"))
+ case Success((pd, loader)) if ignoring contains pd.name =>
+ Failure(new PluginLoadException(pd.name, s"Disabling plugin ${pd.name}"))
+ case Success((pd, loader)) =>
+ seen += pd.classname
+ Plugin.load(pd.classname, loader)
+ case Failure(e) =>
+ Failure(e)
+ }
+ enabled // distinct and not disabled
}
/** Instantiate a plugin class, given the class and
* the compiler it is to be used in.
*/
def instantiate(clazz: AnyClass, global: Global): Plugin = {
- val constructor = clazz getConstructor classOf[Global]
- (constructor newInstance global).asInstanceOf[Plugin]
+ (clazz getConstructor classOf[Global] newInstance global).asInstanceOf[Plugin]
}
}
+
+class PluginLoadException(val path: String, message: String, cause: Exception) extends Exception(message, cause) {
+ def this(path: String, message: String) = this(path, message, null)
+}
+
+class MissingPluginException(path: String) extends PluginLoadException(path, s"No plugin in path $path") {
+ def this(paths: List[Path]) = this(paths mkString File.pathSeparator)
+}
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
index 4d98b2563c..a6df08c331 100644
--- a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
+++ b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
@@ -15,11 +15,13 @@ package plugins
*/
abstract class PluginComponent extends SubComponent {
- /** Internal flag to tell external from internal phases */
+ /** By definition, plugin phases are externally provided. */
final override val internal = false
- /** Phases supplied by plugins should not have give the runsRightAfter constraint,
- * but can override it */
+ /** Only plugins are granted a reprieve from specifying whether they follow. */
val runsRightAfter: Option[String] = None
+ /** Useful for -Xshow-phases. */
+ def description: String = ""
+
}
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
index bd567400fb..bf78c93fcc 100644
--- a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
+++ b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
@@ -6,70 +6,50 @@
package scala.tools.nsc
package plugins
-import scala.xml.{Node,NodeSeq}
+import scala.reflect.internal.util.StringContextStripMarginOps
/** A description of a compiler plugin, suitable for serialization
* to XML for inclusion in the plugin's .jar file.
*
* @author Lex Spoon
* @version 1.0, 2007-5-21
+ * @author Adriaan Moors
+ * @version 2.0, 2013
+ * @param name A short name of the plugin, used to identify it in
+ * various contexts. The phase defined by the plugin
+ * should have the same name.
+ * @param classname The name of the main Plugin class.
*/
-abstract class PluginDescription {
-
- /** A short name of the compiler, used to identify it in
- * various contexts. The phase defined by the plugin
- * should have the same name.
- */
- val name: String
-
- /** The name of the main class for the plugin */
- val classname: String
-
- /** An XML representation of this description. It can be
- * read back using <code>PluginDescription.fromXML</code>.
+case class PluginDescription(name: String, classname: String) {
+ /** An XML representation of this description.
* It should be stored inside the jar archive file.
*/
- def toXML: Node = {
- <plugin>
- <name>{name}</name>
- <classname>{classname}</classname>
- </plugin>
- }
+ def toXML: String =
+ sm"""<plugin>
+ | <name>${name}</name>
+ | <classname>${classname}</classname>
+ |</plugin>"""
}
/** Utilities for the PluginDescription class.
*
- * @author Lex Spoon
- * @version 1.0, 2007-5-21
+ * @author Lex Spoon
+ * @version 1.0, 2007-5-21
+ * @author Adriaan Moors
+ * @version 2.0, 2013
*/
object PluginDescription {
-
- def fromXML(xml: Node): Option[PluginDescription] = {
- // check the top-level tag
- xml match {
- case <plugin>{_*}</plugin> => ()
- case _ => return None
- }
- // extract one field
- def getField(field: String): Option[String] = {
- val text = (xml \\ field).text.trim
- if (text == "") None else Some(text)
- }
-
- // extract the required fields
- val name1 = getField("name") match {
- case None => return None
- case Some(str) => str
- }
- val classname1 = getField("classname") match {
- case None => return None
- case Some(str) => str
- }
-
- Some(new PluginDescription {
- val name = name1
- val classname = classname1
- })
+ private def text(ns: org.w3c.dom.NodeList): String =
+ if (ns.getLength == 1) ns.item(0).getTextContent.trim
+ else throw new RuntimeException("Bad plugin descriptor.")
+
+ def fromXML(xml: java.io.InputStream): PluginDescription = {
+ import javax.xml.parsers.DocumentBuilderFactory
+ val root = DocumentBuilderFactory.newInstance.newDocumentBuilder.parse(xml).getDocumentElement
+ root.normalize()
+ if (root.getNodeName != "plugin")
+ throw new RuntimeException("Plugin descriptor root element must be <plugin>.")
+
+ PluginDescription(text(root.getElementsByTagName("name")), text(root.getElementsByTagName("classname")))
}
-
}
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala b/src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala
deleted file mode 100644
index c5da24993e..0000000000
--- a/src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Lex Spoon
- */
-
-package scala.tools.nsc
-package plugins
-
-/** ...
- *
- * @author Lex Spoon
- * @version 1.0, 2007-5-21
- */
-class PluginLoadException(filename: String, cause: Exception)
-extends Exception(cause)
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
index 736bd826e4..12f9aeba27 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
@@ -7,7 +7,9 @@
package scala.tools.nsc
package plugins
-import io.{ File, Path }
+import scala.reflect.io.{ File, Path }
+import scala.tools.nsc.util.ClassPath
+import scala.tools.util.PathResolver.Defaults
/** Support for run-time loading of compiler plugins.
*
@@ -15,8 +17,7 @@ import io.{ File, Path }
* @version 1.1, 2009/1/2
* Updated 2009/1/2 by Anders Bach Nielsen: Added features to implement SIP 00002
*/
-trait Plugins {
- self: Global =>
+trait Plugins { global: Global =>
/** Load a rough list of the plugins. For speed, it
* does not instantiate a compiler run. Therefore it cannot
@@ -24,9 +25,21 @@ trait Plugins {
* filtered from the final list of plugins.
*/
protected def loadRoughPluginsList(): List[Plugin] = {
- val jars = settings.plugin.value map Path.apply
- val dirs = (settings.pluginsDir.value split File.pathSeparator).toList map Path.apply
- val classes = Plugin.loadAllFrom(jars, dirs, settings.disable.value)
+ def asPath(p: String) = ClassPath split p
+ val paths = settings.plugin.value filter (_ != "") map (s => asPath(s) map Path.apply)
+ val dirs = {
+ def injectDefault(s: String) = if (s.isEmpty) Defaults.scalaPluginPath else s
+ asPath(settings.pluginsDir.value) map injectDefault map Path.apply
+ }
+ val maybes = Plugin.loadAllFrom(paths, dirs, settings.disable.value)
+ val (goods, errors) = maybes partition (_.isSuccess)
+ // Explicit parameterization of recover to suppress -Xlint warning about inferred Any
+ errors foreach (_.recover[Any] {
+ // legacy behavior ignores altogether, so at least warn devs
+ case e: MissingPluginException => if (global.isDeveloper) warning(e.getMessage)
+ case e: Exception => inform(e.getMessage)
+ })
+ val classes = goods map (_.get) // flatten
// Each plugin must only be instantiated once. A common pattern
// is to register annotation checkers during object construction, so
@@ -34,7 +47,7 @@ trait Plugins {
classes map (Plugin.instantiate(_, this))
}
- protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList
+ protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList()
/** Load all available plugins. Skips plugins that
* either have the same name as another one, or which
@@ -55,7 +68,7 @@ trait Plugins {
def withPlug = plug :: pick(tail, plugNames + plug.name, phaseNames ++ plugPhaseNames)
lazy val commonPhases = phaseNames intersect plugPhaseNames
- def note(msg: String): Unit = if (settings.verbose.value) inform(msg format plug.name)
+ def note(msg: String): Unit = if (settings.verbose) inform(msg format plug.name)
def fail(msg: String) = { note(msg) ; withoutPlug }
if (plugNames contains plug.name)
@@ -72,30 +85,21 @@ trait Plugins {
val plugs = pick(roughPluginsList, Set(), (phasesSet map (_.phaseName)).toSet)
- /** Verify requirements are present. */
+ // Verify required plugins are present.
for (req <- settings.require.value ; if !(plugs exists (_.name == req)))
globalError("Missing required plugin: " + req)
- /** Process plugin options. */
- def namec(plug: Plugin) = plug.name + ":"
- def optList(xs: List[String], p: Plugin) = xs filter (_ startsWith namec(p))
- def doOpts(p: Plugin): List[String] =
- optList(settings.pluginOptions.value, p) map (_ stripPrefix namec(p))
-
- for (p <- plugs) {
- val opts = doOpts(p)
- if (!opts.isEmpty)
- p.processOptions(opts, globalError)
- }
-
- /** Verify no non-existent plugin given with -P */
- for (opt <- settings.pluginOptions.value ; if plugs forall (p => optList(List(opt), p).isEmpty))
- globalError("bad option: -P:" + opt)
+ // Verify no non-existent plugin given with -P
+ for {
+ opt <- settings.pluginOptions.value
+ if !(plugs exists (opt startsWith _.name + ":"))
+ } globalError("bad option: -P:" + opt)
- plugs
+ // Plugins may opt out, unless we just want to show info
+ plugs filter (p => p.init(p.options, globalError) || (settings.debug && settings.isInfo))
}
- lazy val plugins: List[Plugin] = loadPlugins
+ lazy val plugins: List[Plugin] = loadPlugins()
/** A description of all the plugins that are loaded */
def pluginDescriptions: String =
@@ -106,7 +110,7 @@ trait Plugins {
* @see phasesSet
*/
protected def computePluginPhases(): Unit =
- phasesSet ++= (plugins flatMap (_.components))
+ for (p <- plugins; c <- p.components) addToPhasesSet(c, c.description)
/** Summary of the options for all loaded plugins */
def pluginOptionsHelp: String =
diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
index c7ee11dec0..16d432438a 100644
--- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
@@ -18,22 +18,20 @@ abstract class AbstractReporter extends Reporter {
def display(pos: Position, msg: String, severity: Severity): Unit
def displayPrompt(): Unit
- private val positions = new mutable.HashMap[Position, Severity]
+ private val positions = mutable.Map[Position, Severity]() withDefaultValue INFO
+ private val messages = mutable.Map[Position, List[String]]() withDefaultValue Nil
override def reset() {
- super.reset
- positions.clear
+ super.reset()
+ positions.clear()
+ messages.clear()
}
private def isVerbose = settings.verbose.value
private def noWarnings = settings.nowarnings.value
private def isPromptSet = settings.prompt.value
- protected def info0(pos: Position, msg: String, _severity: Severity, force: Boolean) {
- val severity =
- if (settings.fatalWarnings.value && _severity == WARNING) ERROR
- else _severity
-
+ protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean) {
if (severity == INFO) {
if (isVerbose || force) {
severity.count += 1
@@ -41,19 +39,20 @@ abstract class AbstractReporter extends Reporter {
}
}
else {
- val hidden = testAndLog(pos, severity)
+ val hidden = testAndLog(pos, severity, msg)
if (severity == WARNING && noWarnings) ()
else {
if (!hidden || isPromptSet) {
severity.count += 1
display(pos, msg, severity)
- } else if (settings.debug.value) {
+ }
+ else if (settings.debug) {
severity.count += 1
display(pos, "[ suppressed ] " + msg, severity)
}
if (isPromptSet)
- displayPrompt
+ displayPrompt()
}
}
}
@@ -61,12 +60,20 @@ abstract class AbstractReporter extends Reporter {
/** Logs a position and returns true if it was already logged.
* @note Two positions are considered identical for logging if they have the same point.
*/
- private def testAndLog(pos: Position, severity: Severity): Boolean =
+ private def testAndLog(pos: Position, severity: Severity, msg: String): Boolean =
pos != null && pos.isDefined && {
val fpos = pos.focus
- (positions get fpos) match {
- case Some(level) if level >= severity => true
- case _ => positions += (fpos -> severity) ; false
+ val suppress = positions(fpos) match {
+ case ERROR => true // already error at position
+ case highest if highest > severity => true // already message higher than present severity
+ case `severity` => messages(fpos) contains msg // already issued this exact message
+ case _ => false // good to go
+ }
+
+ suppress || {
+ positions(fpos) = severity
+ messages(fpos) ::= msg
+ false
}
}
}
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index e847fb5b86..3f210a543c 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -3,11 +3,13 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package reporters
import java.io.{ BufferedReader, IOException, PrintWriter }
import scala.reflect.internal.util._
+import StringOps._
/**
* This class implements a Reporter that displays messages on a text
@@ -34,15 +36,15 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
}
/** Returns the number of errors issued totally as a string.
- *
- * @param severity ...
- * @return ...
*/
private def getCountString(severity: Severity): String =
StringOps.countElementsAsString((severity).count, label(severity))
/** Prints the message. */
- def printMessage(msg: String) { writer.print(msg + "\n"); writer.flush() }
+ def printMessage(msg: String) {
+ writer print trimAllTrailingSpace(msg) + "\n"
+ writer.flush()
+ }
/** Prints the message with the given position indication. */
def printMessage(posIn: Position, msg: String) {
@@ -52,17 +54,7 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
printMessage(pos, clabel(severity) + msg)
}
- /**
- * @param pos ...
- */
- def printSourceLine(pos: Position) {
- printMessage(pos.lineContent.stripLineEnd)
- printColumnMarker(pos)
- }
-
/** Prints the column marker of the given position.
- *
- * @param pos ...
*/
def printColumnMarker(pos: Position) =
if (pos.isDefined) { printMessage(" " * (pos.column - 1) + "^") }
@@ -94,6 +86,5 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
}
}
- private def abort(msg: String) = throw new Error(msg)
override def flush() { writer.flush() }
}
diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
index 8871ae6555..68362c066d 100644
--- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package reporters
import scala.reflect.internal.util._
-import scala.reflect.internal.util.StringOps._
/**
* This interface provides methods to issue information, warning and
@@ -59,15 +58,15 @@ abstract class Reporter {
/** For sending a message which should not be labeled as a warning/error,
* but also shouldn't require -verbose to be visible.
*/
- def echo(msg: String): Unit = info(NoPosition, msg, true)
- def echo(pos: Position, msg: String): Unit = info(pos, msg, true)
+ def echo(msg: String): Unit = info(NoPosition, msg, force = true)
+ def echo(pos: Position, msg: String): Unit = info(pos, msg, force = true)
/** Informational messages, suppressed unless -verbose or force=true. */
def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force)
/** Warnings and errors. */
- def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, false))
- def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, false))
+ def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, force = false))
+ def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, force = false))
def incompleteInputError(pos: Position, msg: String): Unit = {
if (incompleteHandled) incompleteHandler(pos, msg)
else error(pos, msg)
@@ -81,10 +80,4 @@ abstract class Reporter {
WARNING.count = 0
cancelled = false
}
-
- // sbt compat
- @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
- def countElementsAsString(n: Int, elements: String): String = StringOps.countElementsAsString(n, elements)
- @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
- def countAsString(n: Int): String = StringOps.countAsString(n)
}
diff --git a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
index 34e2a8a96a..04c5bdf824 100644
--- a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
@@ -14,7 +14,7 @@ import scala.reflect.internal.util.Position
* console.
*/
class StoreReporter extends Reporter {
- class Info(val pos: Position, val msg: String, val severity: Severity) {
+ case class Info(pos: Position, msg: String, severity: Severity) {
override def toString() = "pos: " + pos + " " + msg + " " + severity
}
val infos = new mutable.LinkedHashSet[Info]
diff --git a/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala b/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
deleted file mode 100644
index 10e9982594..0000000000
--- a/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
+++ /dev/null
@@ -1,102 +0,0 @@
-package scala.tools.nsc.scratchpad
-
-import java.io.{FileInputStream, InputStreamReader, IOException}
-
-import scala.runtime.ScalaRunTime.stringOf
-import java.lang.reflect.InvocationTargetException
-import scala.reflect.runtime.ReflectionUtils._
-import scala.collection.mutable.ArrayBuffer
-
-@deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
-class Mixer {
-
- protected val stdSeparator = "//> "
- protected val ctdSeparator = "//| "
- protected val sepColumn = 50
- protected val tabInc = 8
-
- type Comments = Seq[(Int, Array[Char])]
-
- def parseComments(comments: Array[Char]): Iterator[(Int, Array[Char])] = new Iterator[(Int, Array[Char])] {
- var idx = 0
- def hasNext = idx < comments.length
- def next() = {
- val nextSpace = comments indexOf (' ', idx)
- var nextNL = comments indexOf ('\n', nextSpace + 1)
- if (nextNL < 0) nextNL = comments.length
- val result =
- (new String(comments.slice(idx, nextSpace)).toInt, comments.slice(nextSpace + 1, nextNL))
- idx = nextNL + 1
- result
- }
- }
-
- def mix(source: Array[Char], comments: Array[Char]): Array[Char] = {
- val mixed = new ArrayBuffer[Char]
- var written = 0
- def align() = {
- var idx = mixed.lastIndexOf('\n') + 1
- var col = 0
- while (idx < mixed.length) {
- col =
- if (mixed(idx) == '\t') (col / tabInc) * tabInc + tabInc
- else col + 1
- idx += 1
- }
- if (col > sepColumn) {
- mixed += '\n'
- col = 0
- }
- while (col < sepColumn) {
- mixed += ' '
- col += 1
- }
- }
- for ((offset, cs) <- parseComments(comments)) {
- val sep =
- if (written < offset) {
- for (i <- written until offset) mixed += source(i)
- written = offset
- stdSeparator
- } else {
- mixed += '\n'
- ctdSeparator
- }
- align()
- mixed ++= sep ++= cs
- }
- mixed ++= source.view(written, source.length)
- mixed.toArray
- }
-
-}
-
-object Mixer extends Mixer {
-
- def contents(name: String): Array[Char] = {
- val page = new Array[Char](2 << 14)
- val buf = new ArrayBuffer[Char]
- val in = new FileInputStream(name)
- val rdr = new InputStreamReader(in)
- var nread = 0
- do {
- nread = rdr.read(page, 0, page.length)
- buf ++= (if (nread == page.length) page else page.take(nread))
- } while (nread >= 0)
- buf.toArray
- }
-
- def main(args: Array[String]) {
- val mixer = new Mixer
- try {
- require(args.length == 2, "required arguments: file1 file2")
- val source = contents(args(0))
- val comments = contents(args(1))
- val mixed = mixer.mix(source, comments)
- println(mixed.mkString)
- } catch {
- case ex: IOException =>
- println("error: "+ ex.getMessage)
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala b/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
deleted file mode 100644
index 01dccd7521..0000000000
--- a/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package scala.tools.nsc
-package scratchpad
-
-import java.io.Writer
-import scala.reflect.internal.util.SourceFile
-import scala.reflect.internal.Chars._
-
-@deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
-object SourceInserter {
- def stripRight(cs: Array[Char]): Array[Char] = {
- val lines =
- new String(cs) split "\n"
- def leftPart(str: String) =
- (str split """//>|//\|""").head
- def isContinuation(str: String) =
- ((str contains "//>") || (str contains "//|")) && (leftPart(str) forall isWhitespace)
- def stripTrailingWS(str: String) =
- str take (str lastIndexWhere (!isWhitespace(_))) + 1
- val prefixes =
- lines filterNot isContinuation map leftPart map stripTrailingWS
- (prefixes mkString "\n").toArray
- }
-}
diff --git a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
index 783e249931..8b897b83b2 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package settings
trait AbsScalaSettings {
@@ -32,11 +33,4 @@ trait AbsScalaSettings {
def PhasesSetting(name: String, descr: String, default: String): PhasesSetting
def StringSetting(name: String, helpArg: String, descr: String, default: String): StringSetting
def PrefixSetting(name: String, prefix: String, descr: String): PrefixSetting
-
- /** **/
- abstract class SettingGroup(val prefix: String) extends AbsSetting {
- def name = prefix
- def helpDescription: String = sys.error("todo")
- def unparse: List[String] = List(name)
- }
}
diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
index adabeb02a3..4727e6d867 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
@@ -47,8 +47,6 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
}
})
- implicit lazy val SettingOrdering: Ordering[Setting] = Ordering.ordered
-
trait AbsSetting extends Ordered[Setting] with AbsSettingValue {
def name: String
def helpDescription: String
@@ -83,14 +81,6 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
this
}
- /** If the appearance of the setting should halt argument processing. */
- private var isTerminatorSetting = false
- def shouldStopProcessing = isTerminatorSetting
- def stopProcessing(): this.type = {
- isTerminatorSetting = true
- this
- }
-
/** Issue error and return */
def errorAndValue[T](msg: String, x: T): T = { errorFn(msg) ; x }
@@ -110,6 +100,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
/** Attempt to set from a properties file style property value.
* Currently used by Eclipse SDT only.
+ * !!! Needs test.
*/
def tryToSetFromPropertyValue(s: String): Unit = tryToSet(s :: Nil)
@@ -133,7 +124,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
case _ => false
}
override def hashCode() = name.hashCode + value.hashCode
- override def toString() = name + " = " + value
+ override def toString() = name + " = " + (if (value == "") "\"\"" else value)
}
trait InternalSetting extends AbsSetting {
diff --git a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala
deleted file mode 100644
index 0bec113743..0000000000
--- a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package settings
-
-trait AdvancedScalaSettings {
- self: AbsScalaSettings =>
-
- abstract class X extends SettingGroup("-X") {
- val assemextdirs: StringSetting
- val assemname: StringSetting
- val assempath: StringSetting
- val checkinit: BooleanSetting
- val disableassertions: BooleanSetting
- val elidebelow: IntSetting
- val experimental: BooleanSetting
- val future: BooleanSetting
- val generatephasegraph: StringSetting
- val logimplicits: BooleanSetting
- val mainClass: StringSetting
- val migration: BooleanSetting
- val noforwarders: BooleanSetting
- val nojline: BooleanSetting
- val nouescape: BooleanSetting
- val plugin: MultiStringSetting
- val plugindisable: MultiStringSetting
- val pluginlist: BooleanSetting
- val pluginrequire: MultiStringSetting
- val pluginsdir: StringSetting
- val print: PhasesSetting
- val printicode: BooleanSetting
- val printpos: BooleanSetting
- val printtypes: BooleanSetting
- val prompt: BooleanSetting
- val resident: BooleanSetting
- val script: StringSetting
- val showclass: StringSetting
- val showobject: StringSetting
- val showphases: BooleanSetting
- val sourcedir: StringSetting
- val sourcereader: StringSetting
- }
- // def Xexperimental = X.experimental
- // def Xmigration28 = X.migration
- // def Xnojline = X.nojline
- // def Xprint = X.print
- // def Xprintpos = X.printpos
- // def Xshowcls = X.showclass
- // def Xshowobj = X.showobject
- // def assemextdirs = X.assemextdirs
- // def assemname = X.assemname
- // def assemrefs = X.assempath
- // def checkInit = X.checkinit
- // def disable = X.plugindisable
- // def elideLevel = X.elidelevel
- // def future = X.future
- // def genPhaseGraph = X.generatephasegraph
- // def logimplicits = X.logimplicits
- // def noForwarders = X.noforwarders
- // def noassertions = X.disableassertions
- // def nouescape = X.nouescape
- // def plugin = X.plugin
- // def pluginsDir = X.pluginsdir
- // def printtypes = X.printtypes
- // def prompt = X.prompt
- // def require = X.require
- // def resident = X.resident
- // def script = X.script
- // def showPhases = X.showphases
- // def showPlugins = X.pluginlist
- // def sourceReader = X.sourcereader
- // def sourcedir = X.sourcedir
- // def writeICode = X.printicode
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala b/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
deleted file mode 100644
index da2c89d707..0000000000
--- a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package settings
-
-/** Taking flag checking to a somewhat higher level. */
-trait AestheticSettings {
- def settings: Settings
-
- // Some(value) if setting has been set by user, None otherwise.
- def optSetting[T](s: Settings#Setting): Option[T] =
- if (s.isDefault) None else Some(s.value.asInstanceOf[T])
-
- def script = optSetting[String](settings.script)
- def encoding = optSetting[String](settings.encoding)
- def sourceReader = optSetting[String](settings.sourceReader)
-
- def debug = settings.debug.value
- def declsOnly = false
- def deprecation = settings.deprecation.value
- def experimental = settings.Xexperimental.value
- def fatalWarnings = settings.fatalWarnings.value
- def feature = settings.feature.value
- def future = settings.future.value
- def logClasspath = settings.Ylogcp.value
- def printStats = settings.Ystatistics.value
- def target = settings.target.value
- def unchecked = settings.unchecked.value
- def verbose = settings.verbose.value
- def virtPatmat = !settings.XoldPatmat.value
-
- /** Derived values */
- def jvm = target startsWith "jvm"
- def msil = target == "msil"
- def verboseDebug = debug && verbose
-}
diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
index 5c852ae07c..8c2b510bfd 100644
--- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
@@ -8,7 +8,7 @@ package nsc
package settings
import util.ClassPath
-import io.{ Directory, Path, AbstractFile }
+import io.{ Path, AbstractFile }
class FscSettings(error: String => Unit) extends Settings(error) {
outer =>
@@ -38,13 +38,13 @@ class FscSettings(error: String => Unit) extends Settings(error) {
private def holdsPath = Set[Settings#Setting](
d, dependencyfile, pluginsDir, Ygenjavap
)
-
+
override def processArguments(arguments: List[String], processAll: Boolean): (Boolean, List[String]) = {
val (r, args) = super.processArguments(arguments, processAll)
// we need to ensure the files specified with relative locations are absolutized based on the currentDir
(r, args map {a => absolutizePath(a)})
}
-
+
/**
* Take an individual path and if it's not absolute turns it into an absolute path based on currentDir.
* If it's already absolute then it's left alone.
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index e4f99474e1..0536be92cf 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -9,9 +9,9 @@ package nsc
package settings
import io.{ AbstractFile, Jar, Path, PlainFile, VirtualDirectory }
-import scala.reflect.internal.util.StringOps
-import scala.collection.mutable.ListBuffer
+import scala.collection.generic.Clearable
import scala.io.Source
+import scala.reflect.internal.util.StringOps
import scala.reflect.{ ClassTag, classTag }
/** A mutable Settings object.
@@ -63,40 +63,33 @@ class MutableSettings(val errorFn: String => Unit)
(checkDependencies, residualArgs)
case "--" :: xs =>
(checkDependencies, xs)
+ // discard empties, sometimes they appear because of ant or etc.
+ // but discard carefully, because an empty string is valid as an argument
+ // to an option, e.g. -cp "" . So we discard them only when they appear
+ // where an option should be, not where an argument to an option should be.
+ case "" :: xs =>
+ loop(xs, residualArgs)
case x :: xs =>
- val isOpt = x startsWith "-"
- if (isOpt) {
- val newArgs = parseParams(args)
- if (args eq newArgs) {
- errorFn(s"bad option: '$x'")
- (false, args)
- }
- // discard empties, sometimes they appear because of ant or etc.
- // but discard carefully, because an empty string is valid as an argument
- // to an option, e.g. -cp "" . So we discard them only when they appear
- // in option position.
- else if (x == "") {
- loop(xs, residualArgs)
+ if (x startsWith "-") {
+ parseParams(args) match {
+ case newArgs if newArgs eq args => errorFn(s"bad option: '$x'") ; (false, args)
+ case newArgs => loop(newArgs, residualArgs)
}
- else lookupSetting(x) match {
- case Some(s) if s.shouldStopProcessing => (checkDependencies, newArgs)
- case _ => loop(newArgs, residualArgs)
- }
- }
- else {
- if (processAll) loop(xs, residualArgs :+ x)
- else (checkDependencies, args)
}
+ else if (processAll)
+ loop(xs, residualArgs :+ x)
+ else
+ (checkDependencies, args)
}
loop(arguments, Nil)
}
- def processArgumentString(params: String) = processArguments(splitParams(params), true)
+ def processArgumentString(params: String) = processArguments(splitParams(params), processAll = true)
/** Create a new Settings object, copying all user-set values.
*/
def copy(): Settings = {
val s = new Settings()
- s.processArguments(recreateArgs, true)
+ s.processArguments(recreateArgs, processAll = true)
s
}
@@ -115,7 +108,7 @@ class MutableSettings(val errorFn: String => Unit)
/** Split the given line into parameters.
*/
- def splitParams(line: String) = cmd.Parser.tokenize(line, errorFn)
+ def splitParams(line: String) = cmd.CommandLineParser.tokenize(line, errorFn)
/** Returns any unprocessed arguments.
*/
@@ -134,7 +127,7 @@ class MutableSettings(val errorFn: String => Unit)
// if arg is of form -Xfoo:bar,baz,quux
def parseColonArg(s: String): Option[List[String]] = {
- val (p, args) = StringOps.splitWhere(s, _ == ':', true) getOrElse (return None)
+ val (p, args) = StringOps.splitWhere(s, _ == ':', doDropIndex = true) getOrElse (return None)
// any non-Nil return value means failure and we return s unmodified
tryToSetIfExists(p, (args split ",").toList, (s: Setting) => s.tryToSetColon _)
@@ -184,7 +177,7 @@ class MutableSettings(val errorFn: String => Unit)
* The class loader defining `T` should provide resources `app.class.path`
* and `boot.class.path`. These resources should contain the application
* and boot classpaths in the same form as would be passed on the command line.*/
- def embeddedDefaults[T: ClassTag]: Unit =
+ def embeddedDefaults[T: ClassTag]: Unit = // called from sbt and repl
embeddedDefaults(classTag[T].runtimeClass.getClassLoader)
/** Initializes these settings for embedded use by a class from the given class loader.
@@ -248,7 +241,7 @@ class MutableSettings(val errorFn: String => Unit)
/** Add a destination directory for sources found under srcdir.
* Both directories should exits.
*/
- def add(srcDir: String, outDir: String): Unit =
+ def add(srcDir: String, outDir: String): Unit = // used in ide?
add(checkDir(AbstractFile.getDirectory(srcDir), srcDir),
checkDir(AbstractFile.getDirectory(outDir), outDir))
@@ -256,8 +249,7 @@ class MutableSettings(val errorFn: String => Unit)
private def checkDir(dir: AbstractFile, name: String, allowJar: Boolean = false): AbstractFile = (
if (dir != null && dir.isDirectory)
dir
-// was: else if (allowJar && dir == null && Path.isJarOrZip(name, false))
- else if (allowJar && dir == null && Jar.isJarOrZip(name, false))
+ else if (allowJar && dir == null && Jar.isJarOrZip(name, examineFile = false))
new PlainFile(Path(name))
else
throw new FatalError(name + " does not exist or is not a directory")
@@ -268,7 +260,7 @@ class MutableSettings(val errorFn: String => Unit)
*/
def setSingleOutput(outDir: String) {
val dst = AbstractFile.getDirectory(outDir)
- setSingleOutput(checkDir(dst, outDir, true))
+ setSingleOutput(checkDir(dst, outDir, allowJar = true))
}
def getSingleOutput: Option[AbstractFile] = singleOutDir
@@ -331,12 +323,12 @@ class MutableSettings(val errorFn: String => Unit)
case Some(d) =>
d match {
case _: VirtualDirectory | _: io.ZipArchive => Nil
- case _ => List(d.lookupPathUnchecked(srcPath, false))
+ case _ => List(d.lookupPathUnchecked(srcPath, directory = false))
}
case None =>
(outputs filter (isBelow _).tupled) match {
case Nil => Nil
- case matches => matches.map(_._1.lookupPathUnchecked(srcPath, false))
+ case matches => matches.map(_._1.lookupPathUnchecked(srcPath, directory = false))
}
}
}
@@ -390,7 +382,7 @@ class MutableSettings(val errorFn: String => Unit)
def max = range map (_._2) getOrElse IntMax
override def value_=(s: Int) =
- if (isInputValid(s)) super.value_=(s) else errorMsg
+ if (isInputValid(s)) super.value_=(s) else errorMsg()
// Validate that min and max are consistent
assert(min <= max)
@@ -422,7 +414,7 @@ class MutableSettings(val errorFn: String => Unit)
if (args.isEmpty) errorAndValue("missing argument", None)
else parseArgument(args.head) match {
case Some(i) => value = i ; Some(args.tail)
- case None => errorMsg ; None
+ case None => errorMsg() ; None
}
def unparse: List[String] =
@@ -443,7 +435,7 @@ class MutableSettings(val errorFn: String => Unit)
def tryToSet(args: List[String]) = { value = true ; Some(args) }
def unparse: List[String] = if (value) List(name) else Nil
- override def tryToSetFromPropertyValue(s : String) {
+ override def tryToSetFromPropertyValue(s : String) { // used from ide
value = s.equalsIgnoreCase("true")
}
}
@@ -494,8 +486,6 @@ class MutableSettings(val errorFn: String => Unit)
descr: String,
default: ScalaVersion)
extends Setting(name, descr) {
- import ScalaVersion._
-
type T = ScalaVersion
protected var v: T = NoScalaVersion
@@ -503,14 +493,14 @@ class MutableSettings(val errorFn: String => Unit)
value = default
Some(args)
}
-
+
override def tryToSetColon(args: List[String]) = args match {
case Nil => value = default; Some(Nil)
case x :: xs => value = ScalaVersion(x, errorFn) ; Some(xs)
}
-
+
override def tryToSetFromPropertyValue(s: String) = tryToSet(List(s))
-
+
def unparse: List[String] = if (value == NoScalaVersion) Nil else List(s"${name}:${value.unparse}")
withHelpSyntax(s"${name}:<${arg}>")
@@ -553,7 +543,7 @@ class MutableSettings(val errorFn: String => Unit)
name: String,
val arg: String,
descr: String)
- extends Setting(name, descr) {
+ extends Setting(name, descr) with Clearable {
type T = List[String]
protected var v: T = Nil
def appendToValue(str: String) { value ++= List(str) }
@@ -565,7 +555,8 @@ class MutableSettings(val errorFn: String => Unit)
Some(rest)
}
override def tryToSetColon(args: List[String]) = tryToSet(args)
- override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList)
+ override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) // used from ide
+ def clear(): Unit = (v = Nil)
def unparse: List[String] = value map (name + ":" + _)
withHelpSyntax(name + ":<" + arg + ">")
@@ -599,7 +590,7 @@ class MutableSettings(val errorFn: String => Unit)
}
def unparse: List[String] =
if (value == default) Nil else List(name + ":" + value)
- override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil)
+ override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil) // used from ide
withHelpSyntax(name + ":<" + helpArg + ">")
}
@@ -619,44 +610,49 @@ class MutableSettings(val errorFn: String => Unit)
name: String,
descr: String,
default: String
- ) extends Setting(name, mkPhasesHelp(descr, default)) {
+ ) extends Setting(name, mkPhasesHelp(descr, default)) with Clearable {
private[nsc] def this(name: String, descr: String) = this(name, descr, "")
type T = List[String]
- protected var v: T = Nil
- override def value = if (v contains "all") List("all") else super.value
- private lazy val (numericValues, stringValues) =
- value filterNot (_ == "" ) partition (_ forall (ch => ch.isDigit || ch == '-'))
-
- /** A little ad-hoc parsing. If a string is not the name of a phase, it can also be:
- * a phase id: 5
- * a phase id range: 5-10 (inclusive of both ends)
- * a range with no start: -5 means up to and including 5
- * a range with no end: 10- means 10 until completion.
- */
- private def stringToPhaseIdTest(s: String): Int => Boolean = (s indexOf '-') match {
- case -1 => (_ == s.toInt)
- case 0 => (_ <= s.tail.toInt)
- case idx =>
- if (s.last == '-') (_ >= s.init.toInt)
- else (s splitAt idx) match {
- case (s1, s2) => (id => id >= s1.toInt && id <= s2.tail.toInt)
- }
- }
- private lazy val phaseIdTest: Int => Boolean =
- (numericValues map stringToPhaseIdTest) match {
- case Nil => _ => false
- case fns => fns.reduceLeft((f1, f2) => id => f1(id) || f2(id))
+ private[this] var _v: T = Nil
+ private[this] var _numbs: List[(Int,Int)] = Nil
+ private[this] var _names: T = Nil
+ //protected var v: T = Nil
+ protected def v: T = _v
+ protected def v_=(t: T): Unit = {
+ // throws NumberFormat on bad range (like -5-6)
+ def asRange(s: String): (Int,Int) = (s indexOf '-') match {
+ case -1 => (s.toInt, s.toInt)
+ case 0 => (-1, s.tail.toInt)
+ case i if s.last == '-' => (s.init.toInt, Int.MaxValue)
+ case i => (s.take(i).toInt, s.drop(i+1).toInt)
}
+ val numsAndStrs = t filter (_.nonEmpty) partition (_ forall (ch => ch.isDigit || ch == '-'))
+ _numbs = numsAndStrs._1 map asRange
+ _names = numsAndStrs._2
+ _v = t
+ }
+ override def value = if (v contains "all") List("all") else super.value // i.e., v
+ private def numericValues = _numbs
+ private def stringValues = _names
+ private def phaseIdTest(i: Int): Boolean = numericValues exists (_ match {
+ case (min, max) => min <= i && i <= max
+ })
def tryToSet(args: List[String]) =
if (default == "") errorAndValue("missing phase", None)
- else { tryToSetColon(List(default)) ; Some(args) }
+ else tryToSetColon(List(default)) map (_ => args)
+
+ override def tryToSetColon(args: List[String]) = try {
+ args match {
+ case Nil => if (default == "") errorAndValue("missing phase", None)
+ else tryToSetColon(List(default))
+ case xs => value = (value ++ xs).distinct.sorted ; Some(Nil)
+ }
+ } catch { case _: NumberFormatException => None }
+
+ def clear(): Unit = (v = Nil)
- override def tryToSetColon(args: List[String]) = args match {
- case Nil => if (default == "") errorAndValue("missing phase", None) else tryToSetColon(List(default))
- case xs => value = (value ++ xs).distinct.sorted ; Some(Nil)
- }
// we slightly abuse the usual meaning of "contains" here by returning
// true if our phase list contains "all", regardless of the incoming argument
def contains(phName: String) = doAllPhases || containsName(phName)
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index dbfaa2c531..307f42c0bc 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -4,7 +4,8 @@
*/
// $Id$
-package scala.tools
+package scala
+package tools
package nsc
package settings
@@ -38,11 +39,18 @@ trait ScalaSettings extends AbsScalaSettings
protected def futureSettings = List[BooleanSetting]()
/** Enabled under -optimise. */
- protected def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce)
+ def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce, YconstOptimization)
+
+ /** If any of these settings is enabled, the compiler should print a message and exit. */
+ def infoSettings = List[Setting](help, Xhelp, Yhelp, showPlugins, showPhases, genPhaseGraph)
+
+ /** Is an info setting set? */
+ def isInfo = infoSettings exists (_.isSetByUser)
/** Internal use - syntax enhancements. */
private class EnableSettings[T <: BooleanSetting](val s: T) {
def enabling(toEnable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toEnable foreach (_.value = s.value))
+ def disabling(toDisable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toDisable foreach (_.value = !s.value))
def andThen(f: s.T => Unit): s.type = s withPostSetHook (setting => f(setting.value))
}
private implicit def installEnableSettings[T <: BooleanSetting](s: T) = new EnableSettings(s)
@@ -52,70 +60,72 @@ trait ScalaSettings extends AbsScalaSettings
val jvmargs = PrefixSetting("-J<flag>", "-J", "Pass <flag> directly to the runtime system.")
val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.")
- val toolcp = PathSetting("-toolcp", "Add to the runner classpath.", "")
+ /*val toolcp =*/ PathSetting("-toolcp", "Add to the runner classpath.", "")
val nobootcp = BooleanSetting("-nobootcp", "Do not use the boot classpath for the scala jars.")
/**
* Standard settings
*/
// argfiles is only for the help message
- val argfiles = BooleanSetting ("@<file>", "A text file containing compiler arguments (options and source files)")
+ /*val argfiles = */ BooleanSetting ("@<file>", "A text file containing compiler arguments (options and source files)")
val classpath = PathSetting ("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp"
val d = OutputSetting (outputDirs, ".")
val nospecialization = BooleanSetting ("-no-specialization", "Ignore @specialize annotations.")
val language = MultiStringSetting("-language", "feature", "Enable one or more language features.")
+ /*
+ * The previous "-source" option is intended to be used mainly
+ * though this helper.
+ */
+ lazy val isScala211: Boolean = (source.value >= ScalaVersion("2.11.0"))
+
/**
* -X "Advanced" settings
*/
- val Xhelp = BooleanSetting ("-X", "Print a synopsis of advanced options.")
- val assemname = StringSetting ("-Xassem-name", "file", "(Requires -target:msil) Name of the output assembly.", "").dependsOn(target, "msil")
- val assemrefs = StringSetting ("-Xassem-path", "path", "(Requires -target:msil) List of assemblies referenced by the program.", ".").dependsOn(target, "msil")
- val assemextdirs = StringSetting ("-Xassem-extdirs", "dirs", "(Requires -target:msil) List of directories containing assemblies. default:lib", Defaults.scalaLibDir.path).dependsOn(target, "msil")
- val sourcedir = StringSetting ("-Xsourcedir", "directory", "(Requires -target:msil) Mirror source folder structure in output directory.", ".").dependsOn(target, "msil")
- val checkInit = BooleanSetting ("-Xcheckinit", "Wrap field accessors to throw an exception on uninitialized access.")
- val noassertions = BooleanSetting ("-Xdisable-assertions", "Generate no assertions or assumptions.")
- val elidebelow = IntSetting ("-Xelide-below", "Calls to @elidable methods are omitted if method priority is lower than argument",
+ val Xhelp = BooleanSetting ("-X", "Print a synopsis of advanced options.")
+ val checkInit = BooleanSetting ("-Xcheckinit", "Wrap field accessors to throw an exception on uninitialized access.")
+ val developer = BooleanSetting ("-Xdev", "Indicates user is a developer - issue warnings about anything which seems amiss")
+ val noassertions = BooleanSetting ("-Xdisable-assertions", "Generate no assertions or assumptions.")
+ val elidebelow = IntSetting ("-Xelide-below", "Calls to @elidable methods are omitted if method priority is lower than argument",
elidable.MINIMUM, None, elidable.byName get _)
- val noForwarders = BooleanSetting ("-Xno-forwarders", "Do not generate static forwarders in mirror classes.")
- val genPhaseGraph = StringSetting ("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot.", "")
- val XlogImplicits = BooleanSetting ("-Xlog-implicits", "Show more detail on why some implicits are not applicable.")
- val logImplicitConv = BooleanSetting ("-Xlog-implicit-conversions", "Print a message whenever an implicit conversion is inserted.")
- val logReflectiveCalls = BooleanSetting("-Xlog-reflective-calls", "Print a message when a reflective method call is generated")
- val logFreeTerms = BooleanSetting ("-Xlog-free-terms", "Print a message when reification creates a free term.")
- val logFreeTypes = BooleanSetting ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
- val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
- val Xmigration = ScalaVersionSetting("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", AnyScalaVersion)
- val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
- val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
- val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
- val plugin = MultiStringSetting("-Xplugin", "file", "Load one or more plugins from files.")
- val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable the given plugin(s).")
- val showPlugins = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.")
- val require = MultiStringSetting("-Xplugin-require", "plugin", "Abort unless the given plugin(s) are available.")
- val pluginsDir = StringSetting ("-Xpluginsdir", "path", "Path to search compiler plugins.", Defaults.scalaPluginPath)
- val Xprint = PhasesSetting ("-Xprint", "Print out program after")
- val writeICode = PhasesSetting ("-Xprint-icode", "Log internal icode to *.icode files after", "icode")
- val Xprintpos = BooleanSetting ("-Xprint-pos", "Print tree positions, as offsets.")
- val printtypes = BooleanSetting ("-Xprint-types", "Print tree types (debugging option).")
- val prompt = BooleanSetting ("-Xprompt", "Display a prompt after each error (debugging option).")
- val resident = BooleanSetting ("-Xresident", "Compiler stays resident: read source filenames from standard input.")
- val script = StringSetting ("-Xscript", "object", "Treat the source file as a script and wrap it in a main method.", "")
- val mainClass = StringSetting ("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d <jar>)", "")
- val Xshowcls = StringSetting ("-Xshow-class", "class", "Show internal representation of class.", "")
- val Xshowobj = StringSetting ("-Xshow-object", "object", "Show internal representation of object.", "")
- val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.")
- val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
-
- val XoldPatmat = BooleanSetting ("-Xoldpatmat", "Use the pre-2.10 pattern matcher. Otherwise, the 'virtualizing' pattern matcher is used in 2.10.")
+ val noForwarders = BooleanSetting ("-Xno-forwarders", "Do not generate static forwarders in mirror classes.")
+ val genPhaseGraph = StringSetting ("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot.", "")
+ val XlogImplicits = BooleanSetting ("-Xlog-implicits", "Show more detail on why some implicits are not applicable.")
+ val logImplicitConv = BooleanSetting ("-Xlog-implicit-conversions", "Print a message whenever an implicit conversion is inserted.")
+ val logReflectiveCalls = BooleanSetting ("-Xlog-reflective-calls", "Print a message when a reflective method call is generated")
+ val logFreeTerms = BooleanSetting ("-Xlog-free-terms", "Print a message when reification creates a free term.")
+ val logFreeTypes = BooleanSetting ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
+ val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
+ val Xmigration = ScalaVersionSetting ("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", AnyScalaVersion)
+ val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
+ val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
+ val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
+ val plugin = MultiStringSetting ("-Xplugin", "paths", "Load a plugin from each classpath.")
+ val disable = MultiStringSetting ("-Xplugin-disable", "plugin", "Disable plugins by name.")
+ val showPlugins = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.")
+ val require = MultiStringSetting ("-Xplugin-require", "plugin", "Abort if a named plugin is not loaded.")
+ val pluginsDir = StringSetting ("-Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath)
+ val Xprint = PhasesSetting ("-Xprint", "Print out program after")
+ val writeICode = PhasesSetting ("-Xprint-icode", "Log internal icode to *.icode files after", "icode")
+ val Xprintpos = BooleanSetting ("-Xprint-pos", "Print tree positions, as offsets.")
+ val printtypes = BooleanSetting ("-Xprint-types", "Print tree types (debugging option).")
+ val prompt = BooleanSetting ("-Xprompt", "Display a prompt after each error (debugging option).")
+ val resident = BooleanSetting ("-Xresident", "Compiler stays resident: read source filenames from standard input.")
+ val script = StringSetting ("-Xscript", "object", "Treat the source file as a script and wrap it in a main method.", "")
+ val mainClass = StringSetting ("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d <jar>)", "")
+ val Xshowcls = StringSetting ("-Xshow-class", "class", "Show internal representation of class.", "")
+ val Xshowobj = StringSetting ("-Xshow-object", "object", "Show internal representation of object.", "")
+ val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.")
+ val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
+ val strictInference = BooleanSetting ("-Xstrict-inference", "Don't infer known-unsound types")
+ val source = ScalaVersionSetting ("-Xsource", "version", "Treat compiler input as Scala source for the specified version, see SI-8126.", ScalaVersion("2.11")) withPostSetHook ( _ => isScala211)
+
val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.")
- val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.")
- val Xdivergence211 = BooleanSetting ("-Xdivergence211", "Turn on the 2.11 behavior of implicit divergence not terminating recursive implicit searches (SI-7291).")
+ val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.")
/** Compatibility stubs for options whose value name did
* not previously match the option name.
*/
- def XO = optimise
def debuginfo = g
def dependenciesFile = dependencyfile
def nowarnings = nowarn
@@ -128,10 +138,12 @@ trait ScalaSettings extends AbsScalaSettings
val overrideObjects = BooleanSetting ("-Yoverride-objects", "Allow member objects to be overridden.")
val overrideVars = BooleanSetting ("-Yoverride-vars", "Allow vars to be overridden.")
val Yhelp = BooleanSetting ("-Y", "Print a synopsis of private options.")
+ val breakCycles = BooleanSetting ("-Ybreak-cycles", "Attempt to break cycles encountered during typing")
val browse = PhasesSetting ("-Ybrowse", "Browse the abstract syntax tree after")
val check = PhasesSetting ("-Ycheck", "Check the tree at the end of")
val Yshow = PhasesSetting ("-Yshow", "(Requires -Xshow-class or -Xshow-object) Show after")
val Xcloselim = BooleanSetting ("-Yclosure-elim", "Perform closure elimination.")
+ val YconstOptimization = BooleanSetting ("-Yconst-opt", "Perform optimization with constant values.")
val Ycompacttrees = BooleanSetting ("-Ycompact-trees", "Use compact tree printer when displaying trees.")
val noCompletion = BooleanSetting ("-Yno-completion", "Disable tab-completion in the REPL.")
val Xdce = BooleanSetting ("-Ydead-code", "Perform dead code elimination.")
@@ -159,52 +171,62 @@ trait ScalaSettings extends AbsScalaSettings
val Yshowsymkinds = BooleanSetting ("-Yshow-symkinds", "Print abbreviated symbol kinds next to symbol names.")
val skip = PhasesSetting ("-Yskip", "Skip")
val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "")
+ val Ygenasmp = StringSetting ("-Ygen-asmp", "dir", "Generate a parallel output directory of .asmp files (ie ASM Textifier output).", "")
val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "")
- val Ynosqueeze = BooleanSetting ("-Yno-squeeze", "Disable creation of compact code in matching.")
val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics.") andThen (scala.reflect.internal.util.Statistics.enabled = _)
val stopAfter = PhasesSetting ("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat
val stopBefore = PhasesSetting ("-Ystop-before", "Stop before")
- val refinementMethodDispatch
- = ChoiceSetting ("-Ystruct-dispatch", "policy", "structural method dispatch policy", List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache")
val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
- val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
+ val Ymemberpos = StringSetting ("-Yshow-member-pos", "output style", "Show start and end positions of members", "") withPostSetHook (_ => Yrangepos.value = true)
val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
- val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.")
+ val Ymacroexpand = ChoiceSetting ("-Ymacro-expand", "policy", "Control expansion of macros, useful for scaladoc and presentation compiler", List(MacroExpand.Normal, MacroExpand.None, MacroExpand.Discard), MacroExpand.Normal)
+ val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.") withDeprecationMessage(s"Use ${Ymacroexpand.name}:${MacroExpand.None}") withPostSetHook(_ => Ymacroexpand.value = MacroExpand.None)
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
- val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.")
+ val Yreplclassbased = BooleanSetting ("-Yrepl-class-based", "Use classes to wrap REPL snippets instead of objects")
+ val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "")
val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
- val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.")
+ val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.").withDeprecationMessage(removalIn212)
+ val inferByName = BooleanSetting ("-Yinfer-by-name", "Allow inference of by-name types. This is a temporary option to ease transition. See SI-7899.").withDeprecationMessage(removalIn212)
val Yinvalidate = StringSetting ("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "")
- val noSelfCheck = BooleanSetting ("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.")
val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
+ val YdisableUnreachablePrevention = BooleanSetting("-Ydisable-unreachable-prevention", "Disable the prevention of unreachable blocks in code generation.")
+ val YnoLoadImplClass = BooleanSetting ("-Yno-load-impl-class", "Do not load $class.class files.")
val exposeEmptyPackage = BooleanSetting("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly()
+ // the current standard is "inline" but we are moving towards "method"
+ val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "inline")
- def stop = stopAfter
+ private def removalIn212 = "This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug."
/** Area-specific debug output.
*/
- val Ybuildmanagerdebug = BooleanSetting("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.")
- val Ycompletion = BooleanSetting("-Ycompletion-debug", "Trace all tab completion activity.")
val Ydocdebug = BooleanSetting("-Ydoc-debug", "Trace all scaladoc activity.")
val Yidedebug = BooleanSetting("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
- val Yinferdebug = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.")
val Yissuedebug = BooleanSetting("-Yissue-debug", "Print stack traces when a context issues an error.")
val YmacrodebugLite = BooleanSetting("-Ymacro-debug-lite", "Trace essential macro-related activities.")
val YmacrodebugVerbose = BooleanSetting("-Ymacro-debug-verbose", "Trace all macro-related activities: compilation, generation of synthetics, classloading, expansion, exceptions.")
- val Ypmatdebug = BooleanSetting("-Ypmat-debug", "Trace all pattern matcher activity.")
val Yposdebug = BooleanSetting("-Ypos-debug", "Trace position validation.")
val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.")
- val Yrepldebug = BooleanSetting("-Yrepl-debug", "Trace all repl activity.") andThen (interpreter.replProps.debug setValue _)
val Ytyperdebug = BooleanSetting("-Ytyper-debug", "Trace all type assignments.")
val Ypatmatdebug = BooleanSetting("-Ypatmat-debug", "Trace pattern matching translation.")
+ val Yquasiquotedebug = BooleanSetting("-Yquasiquote-debug", "Trace quasiquote-related activities.")
+
+ // TODO 2.12 Remove
+ val Yinferdebug = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.") withDeprecationMessage("Use -Ytyper-debug") enabling(List(Ytyperdebug))
/** Groups of Settings.
*/
val future = BooleanSetting("-Xfuture", "Turn on future language features.") enabling futureSettings
val optimise = BooleanSetting("-optimise", "Generates faster bytecode by applying optimisations to the program") withAbbreviation "-optimize" enabling optimiseSettings
+ val nooptimise = BooleanSetting("-Ynooptimise", "Clears all the flags set by -optimise. Useful for testing optimizations in isolation.") withAbbreviation "-Ynooptimize" disabling optimise::optimiseSettings
val Xexperimental = BooleanSetting("-Xexperimental", "Enable experimental extensions.") enabling experimentalSettings
+ /**
+ * Settings motivated by GenBCode
+ */
+ val Ybackend = ChoiceSetting ("-Ybackend", "choice of bytecode emitter", "Choice of bytecode emitter.",
+ List("GenASM", "GenBCode"),
+ "GenASM")
// Feature extensions
val XmacroSettings = MultiStringSetting("-Xmacro-settings", "option", "Custom settings for macros.")
@@ -227,4 +249,17 @@ trait ScalaSettings extends AbsScalaSettings
/** Test whether this is scaladoc we're looking at */
def isScaladoc = false
+
+ /**
+ * Helper utilities for use by checkConflictingSettings()
+ */
+ def isBCodeActive = !isICodeAskedFor
+ def isBCodeAskedFor = (Ybackend.value != "GenASM")
+ def isICodeAskedFor = ((Ybackend.value == "GenASM") || optimiseSettings.exists(_.value) || writeICode.isSetByUser)
+
+ object MacroExpand {
+ val None = "none"
+ val Normal = "normal"
+ val Discard = "discard"
+ }
}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
index d6a0149411..4f45043c5e 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -4,13 +4,14 @@
*/
// $Id$
-package scala.tools.nsc.settings
+package scala
+package tools.nsc.settings
/**
* Represents a single Scala version in a manner that
* supports easy comparison and sorting.
*/
-abstract class ScalaVersion extends Ordered[ScalaVersion] {
+sealed abstract class ScalaVersion extends Ordered[ScalaVersion] {
def unparse: String
}
@@ -19,7 +20,7 @@ abstract class ScalaVersion extends Ordered[ScalaVersion] {
*/
case object NoScalaVersion extends ScalaVersion {
def unparse = "none"
-
+
def compare(that: ScalaVersion): Int = that match {
case NoScalaVersion => 0
case _ => 1
@@ -33,7 +34,7 @@ case object NoScalaVersion extends ScalaVersion {
* to segregate builds
*/
case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion {
- def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
+ def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
def compare(that: ScalaVersion): Int = that match {
case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) =>
@@ -48,7 +49,7 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu
else build compare thatBuild
case AnyScalaVersion => 1
case NoScalaVersion => -1
- }
+ }
}
/**
@@ -56,7 +57,7 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu
*/
case object AnyScalaVersion extends ScalaVersion {
def unparse = "any"
-
+
def compare(that: ScalaVersion): Int = that match {
case AnyScalaVersion => 0
case _ => -1
@@ -70,7 +71,7 @@ object ScalaVersion {
private val dot = "\\."
private val dash = "\\-"
private def not(s:String) = s"[^${s}]"
- private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
+ private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
def apply(versionString : String, errorHandler: String => Unit): ScalaVersion = {
def errorAndValue() = {
@@ -82,41 +83,41 @@ object ScalaVersion {
)
AnyScalaVersion
}
-
+
def toInt(s: String) = s match {
case null | "" => 0
case _ => s.toInt
}
-
+
def isInt(s: String) = util.Try(toInt(s)).isSuccess
-
+
def toBuild(s: String) = s match {
case null | "FINAL" => Final
case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2)))
case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1)))
case _ => Development(s)
}
-
+
try versionString match {
case "none" => NoScalaVersion
case "any" => AnyScalaVersion
- case R(_, majorS, _, minorS, _, revS, _, buildS) =>
+ case R(_, majorS, _, minorS, _, revS, _, buildS) =>
SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))
- case _ =>
+ case _ =>
errorAndValue()
} catch {
case e: NumberFormatException => errorAndValue()
}
}
-
- def apply(versionString: String): ScalaVersion =
+
+ def apply(versionString: String): ScalaVersion =
apply(versionString, msg => throw new NumberFormatException(msg))
-
+
/**
* The version of the compiler running now
*/
val current = apply(util.Properties.versionNumberString)
-
+
/**
* The 2.8.0 version.
*/
@@ -126,7 +127,7 @@ object ScalaVersion {
/**
* Represents the data after the dash in major.minor.rev-build
*/
-abstract class ScalaBuild extends Ordered[ScalaBuild] {
+abstract class ScalaBuild extends Ordered[ScalaBuild] {
/**
* Return a version of this build information that can be parsed back into the
* same ScalaBuild
@@ -138,7 +139,7 @@ abstract class ScalaBuild extends Ordered[ScalaBuild] {
*/
case class Development(id: String) extends ScalaBuild {
def unparse = s"-${id}"
-
+
def compare(that: ScalaBuild) = that match {
// sorting two development builds based on id is reasonably valid for two versions created with the same schema
// otherwise it's not correct, but since it's impossible to put a total ordering on development build versions
@@ -154,7 +155,7 @@ case class Development(id: String) extends ScalaBuild {
*/
case object Final extends ScalaBuild {
def unparse = ""
-
+
def compare(that: ScalaBuild) = that match {
case Final => 0
// a final is newer than anything other than a development build or another final
@@ -168,14 +169,14 @@ case object Final extends ScalaBuild {
*/
case class RC(n: Int) extends ScalaBuild {
def unparse = s"-RC${n}"
-
+
def compare(that: ScalaBuild) = that match {
// compare two rcs based on their RC numbers
case RC(thatN) => n - thatN
// an rc is older than anything other than a milestone or another rc
case Milestone(_) => 1
- case _ => -1
- }
+ case _ => -1
+ }
}
/**
@@ -183,12 +184,12 @@ case class RC(n: Int) extends ScalaBuild {
*/
case class Milestone(n: Int) extends ScalaBuild {
def unparse = s"-M${n}"
-
+
def compare(that: ScalaBuild) = that match {
// compare two milestones based on their milestone numbers
case Milestone(thatN) => n - thatN
// a milestone is older than anything other than another milestone
case _ => -1
-
- }
+
+ }
}
diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
index e866ad6ae0..37dfafb01c 100644
--- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
@@ -35,22 +35,15 @@ trait StandardScalaSettings {
val feature = BooleanSetting ("-feature", "Emit warning and location for usages of features that should be imported explicitly.")
val g = ChoiceSetting ("-g", "level", "Set level of generated debugging info.", List("none", "source", "line", "vars", "notailcalls"), "vars")
val help = BooleanSetting ("-help", "Print a synopsis of standard options")
- val make = ChoiceSetting ("-make", "policy", "Recompilation detection policy", List("all", "changed", "immediate", "transitive", "transitivenocp"), "all")
- . withDeprecationMessage ("this option is unmaintained. Use sbt or an IDE for selective recompilation.")
val nowarn = BooleanSetting ("-nowarn", "Generate no warnings.")
val optimise: BooleanSetting // depends on post hook which mutates other settings
val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.")
val target = ChoiceSetting ("-target", "target", "Target platform for object files. All JVM 1.5 targets are deprecated.",
- List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil"),
- "jvm-1.6")
+ List("jvm-1.5", "jvm-1.6", "jvm-1.7"), "jvm-1.6")
val unchecked = BooleanSetting ("-unchecked", "Enable additional warnings where generated code depends on assumptions.")
val uniqid = BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.")
val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.")
+ val usemanifestcp = BooleanSetting ("-usemanifestcp", "Utilize the manifest in classpath resolution.")
val verbose = BooleanSetting ("-verbose", "Output messages about what the compiler is doing.")
val version = BooleanSetting ("-version", "Print product version and exit.")
-
- /** These are @<file> and -Dkey=val style settings, which don't
- * nicely map to identifiers.
- */
- val argfiles: BooleanSetting // exists only to echo help message, should be done differently
}
diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala
index 9f9879210c..791d44153c 100644
--- a/src/compiler/scala/tools/nsc/settings/Warnings.scala
+++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala
@@ -19,18 +19,17 @@ trait Warnings {
// present form, but have the potential to offer useful info.
protected def allWarnings = lintWarnings ++ List(
warnDeadCode,
- warnSelectNullable,
warnValueDiscard,
warnNumericWiden
)
// These warnings should be pretty quiet unless you're doing
// something inadvisable.
protected def lintWarnings = List(
- // warnDeadCode,
warnInaccessible,
warnNullaryOverride,
warnNullaryUnit,
- warnAdaptedArgs
+ warnAdaptedArgs,
+ warnInferAny
)
// Warning groups.
@@ -38,24 +37,28 @@ trait Warnings {
BooleanSetting("-Xlint", "Enable recommended additional warnings.")
withPostSetHook (_ => lintWarnings foreach (_.value = true))
)
- val warnEverything = (
+
+ /*val warnEverything = */ (
BooleanSetting("-Ywarn-all", "Enable all -Y warnings.")
- withPostSetHook (_ => lintWarnings foreach (_.value = true))
+ withPostSetHook { _ =>
+ lint.value = true
+ allWarnings foreach (_.value = true)
+ }
)
+ private lazy val warnSelectNullable = BooleanSetting("-Xcheck-null", "This option is obsolete and does nothing.")
// Individual warnings.
- val warnSelectNullable = BooleanSetting ("-Xcheck-null", "Warn upon selection of nullable reference.")
val warnAdaptedArgs = BooleanSetting ("-Ywarn-adapted-args", "Warn if an argument list is modified to match the receiver.")
val warnDeadCode = BooleanSetting ("-Ywarn-dead-code", "Warn when dead code is identified.")
val warnValueDiscard = BooleanSetting ("-Ywarn-value-discard", "Warn when non-Unit expression results are unused.")
val warnNumericWiden = BooleanSetting ("-Ywarn-numeric-widen", "Warn when numerics are widened.")
val warnNullaryUnit = BooleanSetting ("-Ywarn-nullary-unit", "Warn when nullary methods return Unit.")
val warnInaccessible = BooleanSetting ("-Ywarn-inaccessible", "Warn about inaccessible types in method signatures.")
- val warnNullaryOverride = BooleanSetting ("-Ywarn-nullary-override",
- "Warn when non-nullary overrides nullary, e.g. `def foo()` over `def foo`.")
+ val warnNullaryOverride = BooleanSetting ("-Ywarn-nullary-override", "Warn when non-nullary overrides nullary, e.g. `def foo()` over `def foo`.")
+ val warnInferAny = BooleanSetting ("-Ywarn-infer-any", "Warn when a type argument is inferred to be `Any`.")
// Backward compatibility.
- def Xwarnfatal = fatalWarnings
- def Xchecknull = warnSelectNullable
- def Ywarndeadcode = warnDeadCode
+ @deprecated("Use fatalWarnings", "2.11.0") def Xwarnfatal = fatalWarnings // used by sbt
+ @deprecated("This option is being removed", "2.11.0") def Xchecknull = warnSelectNullable // used by ide
+ @deprecated("Use warnDeadCode", "2.11.0") def Ywarndeadcode = warnDeadCode // used by ide
}
diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
index c7bd678385..c2d0f5ccec 100644
--- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
@@ -6,16 +6,16 @@
package scala.tools.nsc
package symtab
-import scala.reflect.internal.util.BatchSourceFile
import scala.tools.nsc.io.AbstractFile
/** A subclass of SymbolLoaders that implements browsing behavior.
* This class should be used whenever file dependencies and recompile sets
* are managed automatically.
*/
-abstract class BrowsingLoaders extends SymbolLoaders {
- import global._
+abstract class BrowsingLoaders extends GlobalSymbolLoaders {
+ val global: Global
+ import global._
import syntaxAnalyzer.{OutlineParser, MalformedInput}
/** In browse mode, it can happen that an encountered symbol is already
@@ -28,7 +28,7 @@ abstract class BrowsingLoaders extends SymbolLoaders {
override protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = {
completer.sourcefile match {
case Some(src) =>
- (if (member.isModule) member.moduleClass else member).sourceFile = src
+ (if (member.isModule) member.moduleClass else member).associatedFile = src
case _ =>
}
val decls = owner.info.decls
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 9e5186b731..8b739958ff 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -10,10 +10,9 @@ import java.io.IOException
import scala.compat.Platform.currentTime
import scala.tools.nsc.util.{ ClassPath }
import classfile.ClassfileParser
-import scala.reflect.internal.Flags._
import scala.reflect.internal.MissingRequirementError
import scala.reflect.internal.util.Statistics
-import scala.tools.nsc.io.{ AbstractFile, MsilFile }
+import scala.reflect.io.{ AbstractFile, NoAbstractFile }
/** This class ...
*
@@ -21,8 +20,23 @@ import scala.tools.nsc.io.{ AbstractFile, MsilFile }
* @version 1.0
*/
abstract class SymbolLoaders {
- val global: Global
- import global._
+ val symbolTable: symtab.SymbolTable {
+ def settings: Settings
+ }
+ val platform: backend.Platform {
+ val symbolTable: SymbolLoaders.this.symbolTable.type
+ }
+ import symbolTable._
+ /**
+ * Required by ClassfileParser. Check documentation in that class for details.
+ */
+ def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol
+ /**
+ * Should forward to `Run.compileLate`. The more principled fix would be to
+ * determine why this functionality is needed and extract it into a separate
+ * interface.
+ */
+ protected def compileLate(srcfile: AbstractFile): Unit
import SymbolLoadersStats._
protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = {
@@ -31,6 +45,14 @@ abstract class SymbolLoaders {
member
}
+ protected def signalError(root: Symbol, ex: Throwable) {
+ if (settings.debug) ex.printStackTrace()
+ globalError(ex.getMessage() match {
+ case null => "i/o error while loading " + root.name
+ case msg => "error while loading " + root.name + ", " + msg
+ })
+ }
+
/** Enter class with given `name` into scope of `root`
* and give them `completer` as type.
*/
@@ -68,14 +90,14 @@ abstract class SymbolLoaders {
name+"\none of them needs to be removed from classpath"
)
else if (settings.termConflict.value == "package") {
- global.warning(
+ warning(
"Resolving package/object name conflict in favor of package " +
preExisting.fullName + ". The object will be inaccessible."
)
root.info.decls.unlink(preExisting)
}
else {
- global.warning(
+ warning(
"Resolving package/object name conflict in favor of object " +
preExisting.fullName + ". The package will be inaccessible."
)
@@ -132,20 +154,27 @@ abstract class SymbolLoaders {
/** Initialize toplevel class and module symbols in `owner` from class path representation `classRep`
*/
- def initializeFromClassPath(owner: Symbol, classRep: ClassPath[platform.BinaryRepr]#ClassRep) {
+ def initializeFromClassPath(owner: Symbol, classRep: ClassPath[AbstractFile]#ClassRep) {
((classRep.binary, classRep.source) : @unchecked) match {
case (Some(bin), Some(src))
if platform.needCompile(bin, src) && !binaryOnly(owner, classRep.name) =>
- if (settings.verbose.value) inform("[symloader] picked up newer source file for " + src.path)
- global.loaders.enterToplevelsFromSource(owner, classRep.name, src)
+ if (settings.verbose) inform("[symloader] picked up newer source file for " + src.path)
+ enterToplevelsFromSource(owner, classRep.name, src)
case (None, Some(src)) =>
- if (settings.verbose.value) inform("[symloader] no class, picked up source file for " + src.path)
- global.loaders.enterToplevelsFromSource(owner, classRep.name, src)
+ if (settings.verbose) inform("[symloader] no class, picked up source file for " + src.path)
+ enterToplevelsFromSource(owner, classRep.name, src)
case (Some(bin), _) =>
- global.loaders.enterClassAndModule(owner, classRep.name, platform.newClassLoader(bin))
+ enterClassAndModule(owner, classRep.name, newClassLoader(bin))
}
}
+ /** Create a new loader from a binary classfile.
+ * This is intented as a hook allowing to support loading symbols from
+ * files other than .class files.
+ */
+ protected def newClassLoader(bin: AbstractFile): SymbolLoader =
+ new ClassfileLoader(bin)
+
/**
* A lazy type that completes itself by calling parameter doComplete.
* Any linked modules/classes or module classes are also initialized.
@@ -160,7 +189,7 @@ abstract class SymbolLoaders {
def sourcefile: Option[AbstractFile] = None
/**
- * Description of the resource (ClassPath, AbstractFile, MsilFile)
+ * Description of the resource (ClassPath, AbstractFile)
* being processed by this loader
*/
protected def description: String
@@ -169,25 +198,13 @@ abstract class SymbolLoaders {
private def setSource(sym: Symbol) {
sourcefile foreach (sf => sym match {
- case cls: ClassSymbol => cls.sourceFile = sf
- case mod: ModuleSymbol => mod.moduleClass.sourceFile = sf
+ case cls: ClassSymbol => cls.associatedFile = sf
+ case mod: ModuleSymbol => mod.moduleClass.associatedFile = sf
case _ => ()
})
}
override def complete(root: Symbol) {
- def signalError(ex: Exception) {
- ok = false
- if (settings.debug.value) ex.printStackTrace()
- val msg = ex.getMessage()
- // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
- // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
- // that are not in their correct place (see bug for details)
- if (!settings.isScaladoc)
- globalError(
- if (msg eq null) "i/o error while loading " + root.name
- else "error while loading " + root.name + ", " + msg);
- }
try {
val start = currentTime
val currentphase = phase
@@ -197,11 +214,11 @@ abstract class SymbolLoaders {
ok = true
setSource(root)
setSource(root.companionSymbol) // module -> class, class -> module
- } catch {
- case ex: IOException =>
- signalError(ex)
- case ex: MissingRequirementError =>
- signalError(ex)
+ }
+ catch {
+ case ex @ (_: IOException | _: MissingRequirementError) =>
+ ok = false
+ signalError(root, ex)
}
initRoot(root)
if (!root.isPackageClass) initRoot(root.companionSymbol)
@@ -226,14 +243,13 @@ abstract class SymbolLoaders {
/**
* Load contents of a package
*/
- class PackageLoader(classpath: ClassPath[platform.BinaryRepr]) extends SymbolLoader with FlagAgnosticCompleter {
+ class PackageLoader(classpath: ClassPath[AbstractFile]) extends SymbolLoader with FlagAgnosticCompleter {
protected def description = "package loader "+ classpath.name
protected def doComplete(root: Symbol) {
assert(root.isPackageClass, root)
root.setInfo(new PackageClassInfoType(newScope, root))
- val sourcepaths = classpath.sourcepaths
if (!root.isRoot) {
for (classRep <- classpath.classes if platform.doLoad(classRep)) {
initializeFromClassPath(root, classRep)
@@ -250,8 +266,24 @@ abstract class SymbolLoaders {
}
class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
- private object classfileParser extends ClassfileParser {
- val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
+ private object classfileParser extends {
+ val symbolTable: SymbolLoaders.this.symbolTable.type = SymbolLoaders.this.symbolTable
+ } with ClassfileParser {
+ override protected type ThisConstantPool = ConstantPool
+ override protected def newConstantPool: ThisConstantPool = new ConstantPool
+ override protected def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol =
+ SymbolLoaders.this.lookupMemberAtTyperPhaseIfPossible(sym, name)
+ /*
+ * The type alias and the cast (where the alias is used) is needed due to problem described
+ * in SI-7585. In this particular case, the problem is that we need to make sure that symbol
+ * table used by symbol loaders is exactly the same as they one used by classfileParser.
+ * If you look at the path-dependent types we have here everything should work out ok but
+ * due to issue described in SI-7585 type-checker cannot tie the knot here.
+ *
+ */
+ private type SymbolLoadersRefined = SymbolLoaders { val symbolTable: classfileParser.symbolTable.type }
+ val loaders = SymbolLoaders.this.asInstanceOf[SymbolLoadersRefined]
+ val classPath = platform.classPath
}
protected def description = "class file "+ classfile.toString
@@ -259,7 +291,7 @@ abstract class SymbolLoaders {
protected def doComplete(root: Symbol) {
val start = if (Statistics.canEnable) Statistics.startTimer(classReadNanos) else null
classfileParser.parse(classfile, root)
- if (root.associatedFile eq null) {
+ if (root.associatedFile eq NoAbstractFile) {
root match {
// In fact, the ModuleSymbol forwards its setter to the module class
case _: ClassSymbol | _: ModuleSymbol =>
@@ -274,21 +306,11 @@ abstract class SymbolLoaders {
override def sourcefile: Option[AbstractFile] = classfileParser.srcfile
}
- class MsilFileLoader(msilFile: MsilFile) extends SymbolLoader with FlagAssigningCompleter {
- private def typ = msilFile.msilType
- private object typeParser extends clr.TypeParser {
- val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
- }
-
- protected def description = "MsilFile "+ typ.FullName + ", assembly "+ typ.Assembly.FullName
- protected def doComplete(root: Symbol) { typeParser.parse(typ, root) }
- }
-
class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
protected def description = "source file "+ srcfile.toString
override def fromSource = true
override def sourcefile = Some(srcfile)
- protected def doComplete(root: Symbol): Unit = global.currentRun.compileLate(srcfile)
+ protected def doComplete(root: Symbol): Unit = compileLate(srcfile)
}
object moduleClassLoader extends SymbolLoader with FlagAssigningCompleter {
@@ -296,11 +318,6 @@ abstract class SymbolLoaders {
protected def doComplete(root: Symbol) { root.sourceModule.initialize }
}
- object clrTypes extends clr.CLRTypes {
- val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
- if (global.forMSIL) init()
- }
-
/** used from classfile parser to avoid cyclies */
var parentsLevel = 0
var pendingLoadActions: List[() => Unit] = Nil
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
index 7a84441e09..daaa625164 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package symtab
-import scala.collection.{ mutable, immutable }
import scala.language.implicitConversions
import scala.language.postfixOps
@@ -17,9 +16,6 @@ trait SymbolTrackers {
val global: Global
import global._
- private implicit lazy val TreeOrdering: Ordering[Tree] =
- Ordering by (x => (x.shortClass, x.symbol))
-
private implicit lazy val SymbolOrdering: Ordering[Symbol] =
Ordering by (x => (x.kindString, x.name.toString))
@@ -76,7 +72,6 @@ trait SymbolTrackers {
private def isFlagsChange(sym: Symbol) = changed.flags contains sym
private implicit def NodeOrdering: Ordering[Node] = Ordering by (_.root)
- private def ownersString(sym: Symbol, num: Int) = sym.ownerChain drop 1 take num mkString " -> "
object Node {
def nodes(syms: Set[Symbol]): List[Node] = {
@@ -114,7 +109,6 @@ trait SymbolTrackers {
case Some(oldFlags) =>
val added = masked & ~oldFlags
val removed = oldFlags & ~masked
- val steady = masked & ~(added | removed)
val all = masked | oldFlags
val strs = 0 to 63 map { bit =>
val flag = 1L << bit
@@ -133,7 +127,7 @@ trait SymbolTrackers {
else " (" + Flags.flagsToString(masked) + ")"
}
def symString(sym: Symbol) = (
- if (settings.debug.value && sym.hasCompleteInfo) {
+ if (settings.debug && sym.hasCompleteInfo) {
val s = sym.defString take 240
if (s.length == 240) s + "..." else s
}
@@ -181,7 +175,7 @@ trait SymbolTrackers {
}
def show(label: String): String = {
val hierarchy = Node(current)
- val Change(added, removed, symMap, owners, flags) = history.head
+ val Change(_, removed, symMap, _, _) = history.head
def detailString(sym: Symbol) = {
val ownerString = sym.ownerChain splitAt 3 match {
case (front, back) =>
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
index 427b5bf887..17e3b08ec2 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
@@ -29,11 +29,6 @@ class AbstractFileReader(val file: AbstractFile) {
*/
var bp: Int = 0
- /** return byte at offset 'pos'
- */
- @throws(classOf[IndexOutOfBoundsException])
- def byteAt(pos: Int): Byte = buf(pos)
-
/** read a byte
*/
@throws(classOf[IndexOutOfBoundsException])
@@ -45,7 +40,7 @@ class AbstractFileReader(val file: AbstractFile) {
/** read some bytes
*/
- def nextBytes(len: Int): Array[Byte] = {
+ def nextBytes(len: Int): Array[Byte] = { // used in ide
bp += len
buf.slice(bp - len, bp)
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 2955986a7e..664645e53e 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package symtab
package classfile
@@ -12,48 +13,94 @@ import java.lang.Integer.toHexString
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
import scala.annotation.switch
+import scala.reflect.internal.{ JavaAccFlags }
import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs}
import scala.tools.nsc.io.AbstractFile
+import util.ClassPath
+
/** This abstract class implements a class file parser.
*
* @author Martin Odersky
* @version 1.0
*/
abstract class ClassfileParser {
- val global: Global
- import global._
- import definitions.{ AnnotationClass, ClassfileAnnotationClass }
+ val symbolTable: SymbolTable {
+ def settings: Settings
+ }
+ val loaders: SymbolLoaders {
+ val symbolTable: ClassfileParser.this.symbolTable.type
+ }
+
+ import symbolTable._
+ /**
+ * If typer phase is defined then perform member lookup of a symbol
+ * `sym` at typer phase. This method results from refactoring. The
+ * original author of the logic that uses typer phase didn't explain
+ * why we need to force infos at that phase specifically. It only mentioned
+ * that ClassfileParse can be called late (e.g. at flatten phase) and
+ * we make to make sure we handle such situation properly.
+ */
+ protected def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol
+
+ /** The compiler classpath. */
+ def classPath: ClassPath[AbstractFile]
+
+ import definitions._
import scala.reflect.internal.ClassfileConstants._
import Flags._
+ protected type ThisConstantPool <: ConstantPool
+ protected def newConstantPool: ThisConstantPool
+
protected var in: AbstractFileReader = _ // the class file reader
protected var clazz: Symbol = _ // the class symbol containing dynamic members
protected var staticModule: Symbol = _ // the module symbol containing static members
- protected var instanceScope: Scope = _ // the scope of all instance definitions
- protected var staticScope: Scope = _ // the scope of all static definitions
- protected var pool: ConstantPool = _ // the classfile's constant pool
+ protected var instanceScope: Scope = _ // the scope of all instance definitions
+ protected var staticScope: Scope = _ // the scope of all static definitions
+ protected var pool: ThisConstantPool = _ // the classfile's constant pool
protected var isScala: Boolean = _ // does class file describe a scala class?
protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation?
protected var isScalaRaw: Boolean = _ // this class file is a scala class with no pickled info
- protected var busy: Option[Symbol] = None // lock to detect recursive reads
+ protected var busy: Symbol = _ // lock to detect recursive reads
protected var currentClass: Name = _ // JVM name of the current class
protected var classTParams = Map[Name,Symbol]()
protected var srcfile0 : Option[AbstractFile] = None
protected def moduleClass: Symbol = staticModule.moduleClass
+ private var sawPrivateConstructor = false
+
+ private def ownerForFlags(jflags: JavaAccFlags) = if (jflags.isStatic) moduleClass else clazz
def srcfile = srcfile0
+ private def optimized = settings.optimise.value
+
+ // u1, u2, and u4 are what these data types are called in the JVM spec.
+ // They are an unsigned byte, unsigned char, and unsigned int respectively.
+ // We bitmask u1 into an Int to make sure it's 0-255 (and u1 isn't used
+ // for much beyond tags) but leave u2 alone as it's already unsigned.
+ protected final def u1(): Int = in.nextByte & 0xFF
+ protected final def u2(): Int = in.nextChar.toInt
+ protected final def u4(): Int = in.nextInt
+
+ private def readInnerClassFlags() = readClassFlags()
+ private def readClassFlags() = JavaAccFlags classFlags u2
+ private def readMethodFlags() = JavaAccFlags methodFlags u2
+ private def readFieldFlags() = JavaAccFlags fieldFlags u2
+ private def readTypeName() = readName().toTypeName
+ private def readName() = pool getName u2
+ private def readType() = pool getType u2
+
private object unpickler extends scala.reflect.internal.pickling.UnPickler {
- val global: ClassfileParser.this.global.type = ClassfileParser.this.global
+ val symbolTable: ClassfileParser.this.symbolTable.type = ClassfileParser.this.symbolTable
}
private def handleMissing(e: MissingRequirementError) = {
- if (settings.debug.value) e.printStackTrace
+ if (settings.debug) e.printStackTrace
throw new IOException(s"Missing dependency '${e.req}', required by ${in.file}")
}
private def handleError(e: Exception) = {
- if (settings.debug.value) e.printStackTrace()
+ if (settings.debug) e.printStackTrace()
throw new IOException(s"class file '${in.file}' is broken\n(${e.getClass}/${e.getMessage})")
}
private def mismatchError(c: Symbol) = {
@@ -65,16 +112,15 @@ abstract class ClassfileParser {
case e: RuntimeException => handleError(e)
}
@inline private def pushBusy[T](sym: Symbol)(body: => T): T = {
- busy match {
- case Some(`sym`) => throw new IOException(s"unsatisfiable cyclic dependency in '$sym'")
- case Some(sym1) => throw new IOException(s"illegal class file dependency between '$sym' and '$sym1'")
- case _ => ()
- }
+ if (busy eq sym)
+ throw new IOException(s"unsatisfiable cyclic dependency in '$sym'")
+ else if ((busy ne null) && (busy ne NoSymbol))
+ throw new IOException(s"illegal class file dependency between '$sym' and '$busy'")
- busy = Some(sym)
+ busy = sym
try body
catch parseErrorHandler
- finally busy = None
+ finally busy = NoSymbol
}
@inline private def raiseLoaderLevel[T](body: => T): T = {
loaders.parentsLevel += 1
@@ -94,75 +140,70 @@ abstract class ClassfileParser {
this.staticModule = if (root.isModule) root else root.companionModule
this.isScala = false
- parseHeader
- this.pool = new ConstantPool
+ parseHeader()
+ this.pool = newConstantPool
parseClass()
}
}
private def parseHeader() {
- val magic = in.nextInt
+ val magic = u4
if (magic != JAVA_MAGIC)
- throw new IOException("class file '" + in.file + "' "
- + "has wrong magic number 0x" + toHexString(magic)
- + ", should be 0x" + toHexString(JAVA_MAGIC))
- val minorVersion = in.nextChar.toInt
- val majorVersion = in.nextChar.toInt
- if ((majorVersion < JAVA_MAJOR_VERSION) ||
- ((majorVersion == JAVA_MAJOR_VERSION) &&
- (minorVersion < JAVA_MINOR_VERSION)))
- throw new IOException("class file '" + in.file + "' "
- + "has unknown version "
- + majorVersion + "." + minorVersion
- + ", should be at least "
- + JAVA_MAJOR_VERSION + "." + JAVA_MINOR_VERSION)
+ abort(s"class file ${in.file} has wrong magic number 0x${toHexString(magic)}")
+
+ val minor, major = u2
+ if (major < JAVA_MAJOR_VERSION || major == JAVA_MAJOR_VERSION && minor < JAVA_MINOR_VERSION)
+ abort(s"class file ${in.file} has unknown version $major.$minor, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION")
}
- class ConstantPool {
- private val len = in.nextChar
- private val starts = new Array[Int](len)
- private val values = new Array[AnyRef](len)
- private val internalized = new Array[Name](len)
+ /**
+ * Constructor of this class should not be called directly, use `newConstantPool` instead.
+ */
+ protected class ConstantPool {
+ protected val len = u2
+ protected val starts = new Array[Int](len)
+ protected val values = new Array[AnyRef](len)
+ protected val internalized = new Array[Name](len)
{ var i = 1
while (i < starts.length) {
starts(i) = in.bp
i += 1
- (in.nextByte.toInt: @switch) match {
- case CONSTANT_UTF8 | CONSTANT_UNICODE =>
- in.skip(in.nextChar)
- case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE=>
- in.skip(2)
- case CONSTANT_METHODHANDLE =>
- in.skip(3)
- case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF
- | CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT
- | CONSTANT_INVOKEDYNAMIC =>
- in.skip(4)
- case CONSTANT_LONG | CONSTANT_DOUBLE =>
- in.skip(8)
- i += 1
- case _ =>
- errorBadTag(in.bp - 1)
+ (u1: @switch) match {
+ case CONSTANT_UTF8 | CONSTANT_UNICODE => in skip u2
+ case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE => in skip 2
+ case CONSTANT_METHODHANDLE => in skip 3
+ case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF => in skip 4
+ case CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT => in skip 4
+ case CONSTANT_INVOKEDYNAMIC => in skip 4
+ case CONSTANT_LONG | CONSTANT_DOUBLE => in skip 8 ; i += 1
+ case _ => errorBadTag(in.bp - 1)
}
}
}
- /** Return the name found at given index. */
- def getName(index: Int): Name = {
- if (index <= 0 || len <= index)
- errorBadIndex(index)
+ def recordAtIndex[T <: AnyRef](value: T, idx: Int): T = {
+ values(idx) = value
+ value
+ }
- values(index) match {
+ def firstExpecting(index: Int, expected: Int): Int = {
+ val start = starts(index)
+ val first = in.buf(start).toInt
+ if (first == expected) start + 1
+ else this errorBadTag start
+ }
+
+ /** Return the name found at given index. */
+ def getName(index: Int): Name = (
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ else values(index) match {
case name: Name => name
- case null =>
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
- val name = newTermName(in.buf, start + 3, in.getChar(start + 1))
- values(index) = name
- name
+ case _ =>
+ val start = firstExpecting(index, CONSTANT_UTF8)
+ recordAtIndex(newTermName(in.buf, start + 2, in.getChar(start).toInt), index)
}
- }
+ )
/** Return the name found at given index in the constant pool, with '/' replaced by '.'. */
def getExternalName(index: Int): Name = {
@@ -177,91 +218,23 @@ abstract class ClassfileParser {
def getClassSymbol(index: Int): Symbol = {
if (index <= 0 || len <= index) errorBadIndex(index)
- var c = values(index).asInstanceOf[Symbol]
- if (c eq null) {
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
- val name = getExternalName(in.getChar(start + 1))
- if (nme.isModuleName(name))
- c = rootMirror.getModule(nme.stripModuleSuffix(name))
- else
- c = classNameToSymbol(name)
-
- values(index) = c
+ values(index) match {
+ case sym: Symbol => sym
+ case _ =>
+ val result = getClassName(index) match {
+ case name if nme.isModuleName(name) => rootMirror getModuleByName name.dropModule
+ case name => classNameToSymbol(name)
+ }
+ recordAtIndex(result, index)
}
- c
}
/** Return the external name of the class info structure found at 'index'.
* Use 'getClassSymbol' if the class is sure to be a top-level class.
*/
def getClassName(index: Int): Name = {
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
- getExternalName(in.getChar(start + 1))
- }
-
- /** Return the symbol of the class member at `index`.
- * The following special cases exist:
- * - If the member refers to special `MODULE$` static field, return
- * the symbol of the corresponding module.
- * - If the member is a field, and is not found with the given name,
- * another try is made by appending `nme.LOCAL_SUFFIX_STRING`
- * - If no symbol is found in the right tpe, a new try is made in the
- * companion class, in case the owner is an implementation class.
- */
- def getMemberSymbol(index: Int, static: Boolean): Symbol = {
- if (index <= 0 || len <= index) errorBadIndex(index)
- var f = values(index).asInstanceOf[Symbol]
- if (f eq null) {
- val start = starts(index)
- val first = in.buf(start).toInt
- if (first != CONSTANT_FIELDREF &&
- first != CONSTANT_METHODREF &&
- first != CONSTANT_INTFMETHODREF) errorBadTag(start)
- val ownerTpe = getClassOrArrayType(in.getChar(start + 1))
- debuglog("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.originalName)
- val (name0, tpe0) = getNameAndType(in.getChar(start + 3), ownerTpe)
- debuglog("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0)
-
- forceMangledName(tpe0.typeSymbol.name, false)
- val (name, tpe) = getNameAndType(in.getChar(start + 3), ownerTpe)
- if (name == nme.MODULE_INSTANCE_FIELD) {
- val index = in.getChar(start + 1)
- val name = getExternalName(in.getChar(starts(index) + 1))
- //assert(name.endsWith("$"), "Not a module class: " + name)
- f = forceMangledName(name dropRight 1, true)
- if (f == NoSymbol)
- f = rootMirror.getModule(name dropRight 1)
- } else {
- val origName = nme.originalName(name)
- val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol
- f = owner.info.findMember(origName, 0, 0, false).suchThat(_.tpe.widen =:= tpe)
- if (f == NoSymbol)
- f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe)
- if (f == NoSymbol) {
- // if it's an impl class, try to find it's static member inside the class
- if (ownerTpe.typeSymbol.isImplClass) {
- f = ownerTpe.findMember(origName, 0, 0, false).suchThat(_.tpe =:= tpe)
- } else {
- log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
- f = tpe match {
- case MethodType(_, _) => owner.newMethod(name, owner.pos)
- case _ => owner.newVariable(name, owner.pos)
- }
- f setInfo tpe
- log("created fake member " + f.fullName)
- }
- }
- }
- assert(f != NoSymbol,
- s"could not find $name: $tpe in $ownerTpe" + (
- if (settings.debug.value) ownerTpe.members.mkString(", members are:\n ", "\n ", "") else ""
- )
- )
- values(index) = f
- }
- f
+ val start = firstExpecting(index, CONSTANT_CLASS)
+ getExternalName((in getChar start).toInt)
}
/** Return a name and a type at the given index. If the type is a method
@@ -270,96 +243,69 @@ abstract class ClassfileParser {
* if a symbol of the given name already exists, and has a different
* type.
*/
- private def getNameAndType(index: Int, ownerTpe: Type): (Name, Type) = {
+ protected def getNameAndType(index: Int, ownerTpe: Type): (Name, Type) = {
if (index <= 0 || len <= index) errorBadIndex(index)
- var p = values(index).asInstanceOf[(Name, Type)]
- if (p eq null) {
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_NAMEANDTYPE) errorBadTag(start)
- val name = getName(in.getChar(start + 1).toInt)
- // create a dummy symbol for method types
- val dummySym = ownerTpe.typeSymbol.newMethod(name, ownerTpe.typeSymbol.pos)
- var tpe = getType(dummySym, in.getChar(start + 3).toInt)
-
- // fix the return type, which is blindly set to the class currently parsed
- if (name == nme.CONSTRUCTOR)
- tpe match {
- case MethodType(formals, restpe) =>
- tpe = MethodType(formals, ownerTpe)
+ values(index) match {
+ case p: ((Name @unchecked, Type @unchecked)) => p
+ case _ =>
+ val start = firstExpecting(index, CONSTANT_NAMEANDTYPE)
+ val name = getName(in.getChar(start).toInt)
+ // create a dummy symbol for method types
+ val dummy = ownerTpe.typeSymbol.newMethod(name.toTermName, ownerTpe.typeSymbol.pos)
+ val tpe = getType(dummy, in.getChar(start + 2).toInt)
+ // fix the return type, which is blindly set to the class currently parsed
+ val restpe = tpe match {
+ case MethodType(formals, _) if name == nme.CONSTRUCTOR => MethodType(formals, ownerTpe)
+ case _ => tpe
}
-
- p = (name, tpe)
+ ((name, restpe))
}
- p
}
/** Return the type of a class constant entry. Since
* arrays are considered to be class types, they might
* appear as entries in 'newarray' or 'cast' opcodes.
*/
- def getClassOrArrayType(index: Int): Type = {
+ def getClassOrArrayType(index: Int): Type = (
if (index <= 0 || len <= index) errorBadIndex(index)
- val value = values(index)
- var c: Type = null
- if (value eq null) {
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
- val name = getExternalName(in.getChar(start + 1))
- if (name.charAt(0) == ARRAY_TAG) {
- c = sigToType(null, name)
- values(index) = c
- } else {
- val sym = classNameToSymbol(name)
- /*if (name.endsWith("$")) definitions.getModule(name.subName(0, name.length - 1))
- else if (name.endsWith("$class")) definitions.getModule(name)
- else definitions.getClass(name)*/
- values(index) = sym
- c = sym.tpe
- }
- } else c = value match {
- case tp: Type => tp
- case cls: Symbol => cls.tpe
+ else values(index) match {
+ case tp: Type => tp
+ case cls: Symbol => cls.tpe_*
+ case _ =>
+ val name = getClassName(index)
+ name charAt 0 match {
+ case ARRAY_TAG => recordAtIndex(sigToType(null, name), index)
+ case _ => recordAtIndex(classNameToSymbol(name), index).tpe_*
+ }
}
- c
- }
-
- def getType(index: Int): Type = getType(null, index)
+ )
- def getType(sym: Symbol, index: Int): Type =
- sigToType(sym, getExternalName(index))
+ def getType(index: Int): Type = getType(null, index)
+ def getType(sym: Symbol, index: Int): Type = sigToType(sym, getExternalName(index))
+ def getSuperClass(index: Int): Symbol = if (index == 0) AnyClass else getClassSymbol(index)
- def getSuperClass(index: Int): Symbol =
- if (index == 0) definitions.AnyClass else getClassSymbol(index)
-
- def getConstant(index: Int): Constant = {
+ private def createConstant(index: Int): Constant = {
+ val start = starts(index)
+ Constant((in.buf(start).toInt: @switch) match {
+ case CONSTANT_STRING => getName(in.getChar(start + 1).toInt).toString
+ case CONSTANT_INTEGER => in.getInt(start + 1)
+ case CONSTANT_FLOAT => in.getFloat(start + 1)
+ case CONSTANT_LONG => in.getLong(start + 1)
+ case CONSTANT_DOUBLE => in.getDouble(start + 1)
+ case CONSTANT_CLASS => getClassOrArrayType(index).typeSymbol.tpe_* // !!! Is this necessary or desirable?
+ case _ => errorBadTag(start)
+ })
+ }
+ def getConstant(index: Char): Constant = getConstant(index.toInt)
+ def getConstant(index: Int): Constant = (
if (index <= 0 || len <= index) errorBadIndex(index)
- var value = values(index)
- if (value eq null) {
- val start = starts(index)
- value = (in.buf(start).toInt: @switch) match {
- case CONSTANT_STRING =>
- Constant(getName(in.getChar(start + 1).toInt).toString)
- case CONSTANT_INTEGER =>
- Constant(in.getInt(start + 1))
- case CONSTANT_FLOAT =>
- Constant(in.getFloat(start + 1))
- case CONSTANT_LONG =>
- Constant(in.getLong(start + 1))
- case CONSTANT_DOUBLE =>
- Constant(in.getDouble(start + 1))
- case CONSTANT_CLASS =>
- getClassOrArrayType(index).typeSymbol
- case _ =>
- errorBadTag(start)
- }
- values(index) = value
- }
- value match {
- case ct: Constant => ct
- case cls: Symbol => Constant(cls.tpe)
- case arr: Type => Constant(arr)
+ else values(index) match {
+ case const: Constant => const
+ case sym: Symbol => Constant(sym.tpe_*)
+ case tpe: Type => Constant(tpe)
+ case _ => recordAtIndex(createConstant(index), index)
}
- }
+ )
private def getSubArray(bytes: Array[Byte]): Array[Byte] = {
val decodedLength = ByteCodecs.decode(bytes)
@@ -368,71 +314,45 @@ abstract class ClassfileParser {
arr
}
- def getBytes(index: Int): Array[Byte] = {
+ def getBytes(index: Int): Array[Byte] = (
if (index <= 0 || len <= index) errorBadIndex(index)
- var value = values(index).asInstanceOf[Array[Byte]]
- if (value eq null) {
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
- val len = in.getChar(start + 1)
- val bytes = new Array[Byte](len)
- System.arraycopy(in.buf, start + 3, bytes, 0, len)
- value = getSubArray(bytes)
- values(index) = value
+ else values(index) match {
+ case xs: Array[Byte] => xs
+ case _ =>
+ val start = firstExpecting(index, CONSTANT_UTF8)
+ val len = (in getChar start).toInt
+ val bytes = new Array[Byte](len)
+ System.arraycopy(in.buf, start + 2, bytes, 0, len)
+ recordAtIndex(getSubArray(bytes), index)
}
- value
- }
+ )
def getBytes(indices: List[Int]): Array[Byte] = {
- assert(!indices.isEmpty, indices)
- var value = values(indices.head).asInstanceOf[Array[Byte]]
- if (value eq null) {
- val bytesBuffer = ArrayBuffer.empty[Byte]
- for (index <- indices) {
- if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index)
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
- val len = in.getChar(start + 1)
- bytesBuffer ++= in.buf.view(start + 3, start + 3 + len)
- }
- value = getSubArray(bytesBuffer.toArray)
- values(indices.head) = value
+ val head = indices.head
+ values(head) match {
+ case xs: Array[Byte] => xs
+ case _ =>
+ val arr: Array[Byte] = indices.toArray flatMap { index =>
+ if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index)
+ val start = firstExpecting(index, CONSTANT_UTF8)
+ val len = (in getChar start).toInt
+ in.buf drop start + 2 take len
+ }
+ recordAtIndex(getSubArray(arr), head)
}
- value
}
/** Throws an exception signaling a bad constant index. */
- private def errorBadIndex(index: Int) =
- throw new RuntimeException("bad constant pool index: " + index + " at pos: " + in.bp)
+ protected def errorBadIndex(index: Int) =
+ abort(s"bad constant pool index: $index at pos: ${in.bp}")
/** Throws an exception signaling a bad tag at given address. */
- private def errorBadTag(start: Int) =
- throw new RuntimeException("bad constant pool tag " + in.buf(start) + " at byte " + start)
- }
-
- /** Try to force the chain of enclosing classes for the given name. Otherwise
- * flatten would not lift classes that were not referenced in the source code.
- */
- def forceMangledName(name: Name, module: Boolean): Symbol = {
- val parts = name.decode.toString.split(Array('.', '$'))
- var sym: Symbol = rootMirror.RootClass
-
- // was "at flatten.prev"
- beforeFlatten {
- for (part0 <- parts; if !(part0 == ""); part = newTermName(part0)) {
- val sym1 = beforeIcode {
- sym.linkedClassOfClass.info
- sym.info.decl(part.encode)
- }//.suchThat(module == _.isModule)
-
- sym = sym1 orElse sym.info.decl(part.encode.toTypeName)
- }
- }
- sym
+ protected def errorBadTag(start: Int) =
+ abort(s"bad constant pool tag ${in.buf(start)} at byte $start")
}
private def loadClassSymbol(name: Name): Symbol = {
- val file = global.classPath findSourceFile ("" +name) getOrElse {
+ val file = classPath findSourceFile ("" +name) getOrElse {
// SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
// therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
// that are not in their correct place (see bug for details)
@@ -440,7 +360,7 @@ abstract class ClassfileParser {
warning(s"Class $name not found - continuing with a stub.")
return NoSymbol.newClass(name.toTypeName)
}
- val completer = new global.loaders.ClassfileLoader(file)
+ val completer = new loaders.ClassfileLoader(file)
var owner: Symbol = rootMirror.RootClass
var sym: Symbol = NoSymbol
var ss: Name = null
@@ -451,7 +371,7 @@ abstract class ClassfileParser {
ss = name.subName(start, end)
sym = owner.info.decls lookup ss
if (sym == NoSymbol) {
- sym = owner.newPackage(ss) setInfo completer
+ sym = owner.newPackage(ss.toTermName) setInfo completer
sym.moduleClass setInfo completer
owner.info.decls enter sym
}
@@ -466,6 +386,7 @@ abstract class ClassfileParser {
sym
}
}
+
/** FIXME - we shouldn't be doing ad hoc lookups in the empty package.
* The method called "getClassByName" should either return the class or not.
*/
@@ -485,30 +406,27 @@ abstract class ClassfileParser {
catch { case _: FatalError => loadClassSymbol(name) }
}
- var sawPrivateConstructor = false
-
def parseClass() {
- val jflags = in.nextChar
- val isAnnotation = hasAnnotation(jflags)
- var sflags = toScalaClassFlags(jflags)
- var nameIdx = in.nextChar
- currentClass = pool.getClassName(nameIdx)
-
- /** Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled.
- * Updates the read pointer of 'in'. */
+ val jflags = readClassFlags()
+ val sflags = jflags.toScalaFlags
+ val nameIdx = u2
+ currentClass = pool.getClassName(nameIdx)
+
+ /* Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled.
+ * Updates the read pointer of 'in'. */
def parseParents: List[Type] = {
if (isScala) {
- in.nextChar // skip superclass
- val ifaces = in.nextChar
- in.bp += ifaces * 2 // .. and iface count interfaces
- List(definitions.AnyRefClass.tpe) // dummy superclass, will be replaced by pickled information
+ u2 // skip superclass
+ val ifaces = u2
+ in.bp += ifaces * 2 // .. and iface count interfaces
+ List(AnyRefTpe) // dummy superclass, will be replaced by pickled information
}
else raiseLoaderLevel {
- val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe }
- else pool.getSuperClass(in.nextChar).tpe
- val ifaceCount = in.nextChar
- var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe
- if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces
+ val superType = if (jflags.isAnnotation) { u2; AnnotationClass.tpe }
+ else pool.getSuperClass(u2).tpe_*
+ val ifaceCount = u2
+ var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(u2).tpe_*
+ if (jflags.isAnnotation) ifaces ::= ClassfileAnnotationClass.tpe
superType :: ifaces
}
}
@@ -533,31 +451,30 @@ abstract class ClassfileParser {
val staticInfo = ClassInfoType(List(), staticScope, moduleClass)
if (!isScala && !isScalaRaw)
- enterOwnInnerClasses
+ enterOwnInnerClasses()
val curbp = in.bp
skipMembers() // fields
skipMembers() // methods
if (!isScala) {
clazz setFlag sflags
- importPrivateWithinFromJavaFlags(clazz, jflags)
- importPrivateWithinFromJavaFlags(staticModule, jflags)
- clazz.setInfo(classInfo)
+ propagatePackageBoundary(jflags, clazz, staticModule, staticModule.moduleClass)
+ clazz setInfo classInfo
moduleClass setInfo staticInfo
- staticModule.setInfo(moduleClass.tpe)
- staticModule.setFlag(JAVA)
- staticModule.moduleClass.setFlag(JAVA)
+ staticModule setInfo moduleClass.tpe
+ staticModule setFlag JAVA
+ staticModule.moduleClass setFlag JAVA
// attributes now depend on having infos set already
parseAttributes(clazz, classInfo)
def queueLoad() {
in.bp = curbp
- 0 until in.nextChar foreach (_ => parseField())
+ 0 until u2 foreach (_ => parseField())
sawPrivateConstructor = false
- 0 until in.nextChar foreach (_ => parseMethod())
+ 0 until u2 foreach (_ => parseMethod())
val needsConstructor = (
!sawPrivateConstructor
- && instanceScope.lookup(nme.CONSTRUCTOR) == NoSymbol
+ && !(instanceScope containsName nme.CONSTRUCTOR)
&& (sflags & INTERFACE) == 0
)
if (needsConstructor)
@@ -588,56 +505,62 @@ abstract class ClassfileParser {
}
def parseField() {
- val jflags = in.nextChar
- var sflags = toScalaFieldFlags(jflags)
- if ((sflags & PRIVATE) != 0L && !global.settings.optimise.value) {
+ val jflags = readFieldFlags()
+ val sflags = jflags.toScalaFlags
+
+ if ((sflags & PRIVATE) != 0L && !optimized) {
in.skip(4); skipAttributes()
} else {
- val name = pool.getName(in.nextChar)
- val info = pool.getType(in.nextChar)
- val sym = getOwner(jflags).newValue(name, NoPosition, sflags)
- val isEnum = (jflags & JAVA_ACC_ENUM) != 0
+ val name = readName()
+ val info = readType()
+ val sym = ownerForFlags(jflags).newValue(name.toTermName, NoPosition, sflags)
+ // Note: the info may be overwritten later with a generic signature
+ // parsed from SignatureATTR
sym setInfo {
- if (isEnum) ConstantType(Constant(sym))
+ if (jflags.isEnum) ConstantType(Constant(sym))
else info
}
- importPrivateWithinFromJavaFlags(sym, jflags)
+ propagatePackageBoundary(jflags, sym)
parseAttributes(sym, info)
- getScope(jflags).enter(sym)
+ getScope(jflags) enter sym
// sealed java enums
- if (isEnum) {
+ if (jflags.isEnum) {
val enumClass = sym.owner.linkedClassOfClass
- if (!enumClass.isSealed)
- enumClass setFlag (SEALED | ABSTRACT)
-
- enumClass addChild sym
+ enumClass match {
+ case NoSymbol =>
+ devWarning(s"no linked class for java enum $sym in ${sym.owner}. A referencing class file might be missing an InnerClasses entry.")
+ case linked =>
+ if (!linked.isSealed)
+ linked setFlag (SEALED | ABSTRACT)
+ linked addChild sym
+ }
}
}
}
def parseMethod() {
- val jflags = in.nextChar.toInt
- var sflags = toScalaMethodFlags(jflags)
- if (isPrivate(jflags) && !global.settings.optimise.value) {
- val name = pool.getName(in.nextChar)
+ val jflags = readMethodFlags()
+ val sflags = jflags.toScalaFlags
+ if (jflags.isPrivate && !optimized) {
+ val name = readName()
if (name == nme.CONSTRUCTOR)
sawPrivateConstructor = true
in.skip(2); skipAttributes()
} else {
- if ((sflags & PRIVATE) != 0L && global.settings.optimise.value) { // TODO this should be !optimize, no? See c4181f656d.
+ if ((sflags & PRIVATE) != 0L && optimized) { // TODO this should be !optimized, no? See c4181f656d.
in.skip(4); skipAttributes()
} else {
- val name = pool.getName(in.nextChar)
- val sym = getOwner(jflags).newMethod(name, NoPosition, sflags)
- var info = pool.getType(sym, (in.nextChar))
+ val name = readName()
+ val sym = ownerForFlags(jflags).newMethod(name.toTermName, NoPosition, sflags)
+ var info = pool.getType(sym, u2)
if (name == nme.CONSTRUCTOR)
info match {
case MethodType(params, restpe) =>
// if this is a non-static inner class, remove the explicit outer parameter
val paramsNoOuter = innerClasses getEntry currentClass match {
- case Some(entry) if !isScalaRaw && !isStatic(entry.jflags) =>
+ case Some(entry) if !isScalaRaw && !entry.jflags.isStatic =>
/* About `clazz.owner.isPackage` below: SI-5957
* For every nested java class A$B, there are two symbols in the scala compiler.
* 1. created by SymbolLoader, because of the existence of the A$B.class file, owner: package
@@ -651,7 +574,7 @@ abstract class ClassfileParser {
params
}
val newParams = paramsNoOuter match {
- case (init :+ tail) if (jflags & JAVA_ACC_SYNTHETIC) != 0L =>
+ case (init :+ tail) if jflags.isSynthetic =>
// SI-7455 strip trailing dummy argument ("access constructor tag") from synthetic constructors which
// are added when an inner class needs to access a private constructor.
init
@@ -661,13 +584,15 @@ abstract class ClassfileParser {
info = MethodType(newParams, clazz.tpe)
}
- sym.setInfo(info)
- importPrivateWithinFromJavaFlags(sym, jflags)
+ // Note: the info may be overrwritten later with a generic signature
+ // parsed from SignatureATTR
+ sym setInfo info
+ propagatePackageBoundary(jflags, sym)
parseAttributes(sym, info)
- if ((jflags & JAVA_ACC_VARARGS) != 0) {
- sym.setInfo(arrayToRepeated(sym.info))
- }
- getScope(jflags).enter(sym)
+ if (jflags.isVarargs)
+ sym modifyInfo arrayToRepeated
+
+ getScope(jflags) enter sym
}
}
}
@@ -687,15 +612,15 @@ abstract class ClassfileParser {
def sig2type(tparams: immutable.Map[Name,Symbol], skiptvs: Boolean): Type = {
val tag = sig.charAt(index); index += 1
tag match {
- case BYTE_TAG => definitions.ByteClass.tpe
- case CHAR_TAG => definitions.CharClass.tpe
- case DOUBLE_TAG => definitions.DoubleClass.tpe
- case FLOAT_TAG => definitions.FloatClass.tpe
- case INT_TAG => definitions.IntClass.tpe
- case LONG_TAG => definitions.LongClass.tpe
- case SHORT_TAG => definitions.ShortClass.tpe
- case VOID_TAG => definitions.UnitClass.tpe
- case BOOL_TAG => definitions.BooleanClass.tpe
+ case BYTE_TAG => ByteTpe
+ case CHAR_TAG => CharTpe
+ case DOUBLE_TAG => DoubleTpe
+ case FLOAT_TAG => FloatTpe
+ case INT_TAG => IntTpe
+ case LONG_TAG => LongTpe
+ case SHORT_TAG => ShortTpe
+ case VOID_TAG => UnitTpe
+ case BOOL_TAG => BooleanTpe
case 'L' =>
def processInner(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) if (!sym.isStatic) =>
@@ -720,7 +645,7 @@ abstract class ClassfileParser {
val tp = sig2type(tparams, skiptvs)
// sig2type seems to return AnyClass regardless of the situation:
// we don't want Any as a LOWER bound.
- if (tp.typeSymbol == definitions.AnyClass) TypeBounds.empty
+ if (tp.typeSymbol == AnyClass) TypeBounds.empty
else TypeBounds.lower(tp)
case '*' => TypeBounds.empty
}
@@ -734,15 +659,14 @@ abstract class ClassfileParser {
}
accept('>')
assert(xs.length > 0, tp)
- newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList))
- } else if (classSym.isMonomorphicType) {
- tp
- } else {
- // raw type - existentially quantify all type parameters
- val eparams = typeParamsToExistentials(classSym, classSym.unsafeTypeParams)
- val t = typeRef(pre, classSym, eparams.map(_.tpeHK))
- newExistentialType(eparams, t)
+ debuglogResult("new existential")(newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList)))
}
+ // isMonomorphicType is false if the info is incomplete, as it usually is here
+ // so have to check unsafeTypeParams.isEmpty before worrying about raw type case below,
+ // or we'll create a boatload of needless existentials.
+ else if (classSym.isMonomorphicType || classSym.unsafeTypeParams.isEmpty) tp
+ // raw type - existentially quantify all type parameters
+ else debuglogResult(s"raw type from $classSym")(unsafeClassExistentialType(classSym))
case tp =>
assert(sig.charAt(index) != '<', s"sig=$sig, index=$index, tp=$tp")
tp
@@ -750,12 +674,14 @@ abstract class ClassfileParser {
val classSym = classNameToSymbol(subName(c => c == ';' || c == '<'))
assert(!classSym.isOverloaded, classSym.alternatives)
- var tpe = processClassType(processInner(classSym.tpe))
+ var tpe = processClassType(processInner(classSym.tpe_*))
while (sig.charAt(index) == '.') {
accept('.')
val name = subName(c => c == ';' || c == '<' || c == '.').toTypeName
val clazz = tpe.member(name)
- tpe = processClassType(processInner(clazz.tpe))
+ val dummyArgs = Nil // the actual arguments are added in processClassType
+ val inner = typeRef(pre = tpe, sym = clazz, args = dummyArgs)
+ tpe = processClassType(inner)
}
accept(';')
tpe
@@ -768,11 +694,11 @@ abstract class ClassfileParser {
// NOTE that the comparison to Object only works for abstract types bounded by classes that are strict subclasses of Object
// if the bound is exactly Object, it will have been converted to Any, and the comparison will fail
// see also RestrictJavaArraysMap (when compiling java sources directly)
- if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe)) {
- elemtp = intersectionType(List(elemtp, definitions.ObjectClass.tpe))
+ if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< ObjectTpe)) {
+ elemtp = intersectionType(List(elemtp, ObjectTpe))
}
- definitions.arrayType(elemtp)
+ arrayType(elemtp)
case '(' =>
// we need a method symbol. given in line 486 by calling getType(methodSym, ..)
assert(sym ne null, sig)
@@ -783,14 +709,14 @@ abstract class ClassfileParser {
index += 1
val restype = if (sym != null && sym.isClassConstructor) {
accept('V')
- clazz.tpe
+ clazz.tpe_*
} else
sig2type(tparams, skiptvs)
JavaMethodType(sym.newSyntheticValueParams(paramtypes.toList), restype)
case 'T' =>
val n = subName(';'.==).toTypeName
index += 1
- if (skiptvs) definitions.AnyClass.tpe
+ if (skiptvs) AnyTpe
else tparams(n).typeConstructor
}
} // sig2type(tparams, skiptvs)
@@ -815,14 +741,14 @@ abstract class ClassfileParser {
val tpname = subName(':'.==).toTypeName
val s = sym.newTypeParameter(tpname)
tparams = tparams + (tpname -> s)
- sig2typeBounds(tparams, true)
+ sig2typeBounds(tparams, skiptvs = true)
newTParams += s
}
index = start
while (sig.charAt(index) != '>') {
val tpname = subName(':'.==).toTypeName
val s = tparams(tpname)
- s.setInfo(sig2typeBounds(tparams, false))
+ s.setInfo(sig2typeBounds(tparams, skiptvs = false))
}
accept('>')
}
@@ -831,36 +757,32 @@ abstract class ClassfileParser {
sym.setInfo(new TypeParamsType(ownTypeParams))
val tpe =
if ((sym eq null) || !sym.isClass)
- sig2type(tparams, false)
+ sig2type(tparams, skiptvs = false)
else {
classTParams = tparams
val parents = new ListBuffer[Type]()
while (index < end) {
- parents += sig2type(tparams, false) // here the variance doesnt'matter
+ parents += sig2type(tparams, skiptvs = false) // here the variance doesnt'matter
}
ClassInfoType(parents.toList, instanceScope, sym)
}
GenPolyType(ownTypeParams, tpe)
} // sigToType
- class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
- override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
- }
-
def parseAttributes(sym: Symbol, symtype: Type) {
def convertTo(c: Constant, pt: Type): Constant = {
- if (pt.typeSymbol == definitions.BooleanClass && c.tag == IntTag)
+ if (pt.typeSymbol == BooleanClass && c.tag == IntTag)
Constant(c.value != 0)
else
c convertTo pt
}
def parseAttribute() {
- val attrName = pool.getName(in.nextChar).toTypeName
- val attrLen = in.nextInt
+ val attrName = readTypeName()
+ val attrLen = u4
attrName match {
case tpnme.SignatureATTR =>
if (!isScala && !isScalaRaw) {
- val sig = pool.getExternalName(in.nextChar)
+ val sig = pool.getExternalName(u2)
val newType = sigToType(sym, sig)
sym.setInfo(newType)
}
@@ -869,14 +791,14 @@ abstract class ClassfileParser {
sym.setFlag(SYNTHETIC | ARTIFACT)
in.skip(attrLen)
case tpnme.BridgeATTR =>
- sym.setFlag(BRIDGE)
+ sym.setFlag(BRIDGE | ARTIFACT)
in.skip(attrLen)
case tpnme.DeprecatedATTR =>
val arg = Literal(Constant("see corresponding Javadoc for more information."))
- sym.addAnnotation(definitions.DeprecatedAttr, arg, Literal(Constant("")))
+ sym.addAnnotation(DeprecatedAttr, arg, Literal(Constant("")))
in.skip(attrLen)
case tpnme.ConstantValueATTR =>
- val c = pool.getConstant(in.nextChar)
+ val c = pool.getConstant(u2)
val c1 = convertTo(c, symtype)
if (c1 ne null) sym.setInfo(ConstantType(c1))
else debugwarn(s"failure to convert $c to $symtype")
@@ -890,7 +812,7 @@ abstract class ClassfileParser {
isScalaRaw = true
// Attribute on methods of java annotation classes when that method has a default
case tpnme.AnnotationDefaultATTR =>
- sym.addAnnotation(definitions.AnnotationDefaultAttr)
+ sym.addAnnotation(AnnotationDefaultAttr)
in.skip(attrLen)
// Java annotations on classes / methods / fields with RetentionPolicy.RUNTIME
case tpnme.RuntimeAnnotationATTR =>
@@ -920,7 +842,7 @@ abstract class ClassfileParser {
parseExceptions(attrLen)
case tpnme.SourceFileATTR =>
- val srcfileLeaf = pool.getName(in.nextChar).toString.trim
+ val srcfileLeaf = readName().toString.trim
val srcpath = sym.enclosingPackage match {
case NoSymbol => srcfileLeaf
case rootMirror.EmptyPackage => srcfileLeaf
@@ -939,8 +861,8 @@ abstract class ClassfileParser {
}
def parseAnnotArg: Option[ClassfileAnnotArg] = {
- val tag = in.nextByte.toChar
- val index = in.nextChar
+ val tag = u1
+ val index = u2
tag match {
case STRING_TAG =>
Some(LiteralAnnotArg(Constant(pool.getName(index).toString)))
@@ -951,7 +873,7 @@ abstract class ClassfileParser {
Some(LiteralAnnotArg(Constant(pool.getType(index))))
case ENUM_TAG =>
val t = pool.getType(index)
- val n = pool.getName(in.nextChar)
+ val n = readName()
val module = t.typeSymbol.companionModule
val s = module.info.decls.lookup(n)
if (s != NoSymbol) Some(LiteralAnnotArg(Constant(s)))
@@ -976,43 +898,43 @@ abstract class ClassfileParser {
}
def parseScalaSigBytes: Option[ScalaSigBytes] = {
- val tag = in.nextByte.toChar
+ val tag = u1
assert(tag == STRING_TAG, tag)
- Some(ScalaSigBytes(pool getBytes in.nextChar))
+ Some(ScalaSigBytes(pool getBytes u2))
}
def parseScalaLongSigBytes: Option[ScalaSigBytes] = {
- val tag = in.nextByte.toChar
+ val tag = u1
assert(tag == ARRAY_TAG, tag)
- val stringCount = in.nextChar
+ val stringCount = u2
val entries =
for (i <- 0 until stringCount) yield {
- val stag = in.nextByte.toChar
+ val stag = u1
assert(stag == STRING_TAG, stag)
- in.nextChar.toInt
+ u2
}
Some(ScalaSigBytes(pool.getBytes(entries.toList)))
}
- /** Parse and return a single annotation. If it is malformed,
- * return None.
+ /* Parse and return a single annotation. If it is malformed,
+ * return None.
*/
- def parseAnnotation(attrNameIndex: Char): Option[AnnotationInfo] = try {
+ def parseAnnotation(attrNameIndex: Int): Option[AnnotationInfo] = try {
val attrType = pool.getType(attrNameIndex)
- val nargs = in.nextChar
+ val nargs = u2
val nvpairs = new ListBuffer[(Name, ClassfileAnnotArg)]
var hasError = false
for (i <- 0 until nargs) {
- val name = pool.getName(in.nextChar)
+ val name = readName()
// The "bytes: String" argument of the ScalaSignature attribute is parsed specially so that it is
// available as an array of bytes (the pickled Scala signature) instead of as a string. The pickled signature
// is encoded as a string because of limitations in the Java class file format.
- if ((attrType == definitions.ScalaSignatureAnnotation.tpe) && (name == nme.bytes))
+ if ((attrType == ScalaSignatureAnnotation.tpe) && (name == nme.bytes))
parseScalaSigBytes match {
case Some(c) => nvpairs += ((name, c))
case None => hasError = true
}
- else if ((attrType == definitions.ScalaLongSignatureAnnotation.tpe) && (name == nme.bytes))
+ else if ((attrType == ScalaLongSignatureAnnotation.tpe) && (name == nme.bytes))
parseScalaLongSigBytes match {
case Some(c) => nvpairs += ((name, c))
case None => hasError = true
@@ -1037,20 +959,20 @@ abstract class ClassfileParser {
// with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example),
// and that should never be swallowed silently.
warning(s"Caught: $ex while parsing annotations in ${in.file}")
- if (settings.debug.value) ex.printStackTrace()
+ if (settings.debug) ex.printStackTrace()
None // ignore malformed annotations
}
- /**
+ /*
* Parse the "Exceptions" attribute which denotes the exceptions
* thrown by a method.
*/
def parseExceptions(len: Int) {
- val nClasses = in.nextChar
+ val nClasses = u2
for (n <- 0 until nClasses) {
// FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (SI-7065)
- val cls = pool.getClassSymbol(in.nextChar.toInt)
+ val cls = pool.getClassSymbol(u2)
// we call initialize due to the fact that we call Symbol.isMonomorphicType in addThrowsAnnotation
// and that method requires Symbol to be forced to give the right answers, see SI-7107 for details
cls.initialize
@@ -1058,16 +980,16 @@ abstract class ClassfileParser {
}
}
- /** Parse a sequence of annotations and attaches them to the
- * current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */
+ /* Parse a sequence of annotations and attaches them to the
+ * current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */
def parseAnnotations(len: Int): Option[AnnotationInfo] = {
- val nAttr = in.nextChar
+ val nAttr = u2
var scalaSigAnnot: Option[AnnotationInfo] = None
for (n <- 0 until nAttr)
- parseAnnotation(in.nextChar) match {
- case Some(scalaSig) if (scalaSig.atp == definitions.ScalaSignatureAnnotation.tpe) =>
+ parseAnnotation(u2) match {
+ case Some(scalaSig) if (scalaSig.atp == ScalaSignatureAnnotation.tpe) =>
scalaSigAnnot = Some(scalaSig)
- case Some(scalaSig) if (scalaSig.atp == definitions.ScalaLongSignatureAnnotation.tpe) =>
+ case Some(scalaSig) if (scalaSig.atp == ScalaLongSignatureAnnotation.tpe) =>
scalaSigAnnot = Some(scalaSig)
case Some(annot) =>
sym.addAnnotation(annot)
@@ -1077,7 +999,7 @@ abstract class ClassfileParser {
}
// begin parseAttributes
- for (i <- 0 until in.nextChar) parseAttribute()
+ for (i <- 0 until u2) parseAttribute()
}
/** Enter own inner classes in the right scope. It needs the scopes to be set up,
@@ -1087,16 +1009,17 @@ abstract class ClassfileParser {
def className(name: Name): Name =
name.subName(name.lastPos('.') + 1, name.length)
- def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) {
- val completer = new global.loaders.ClassfileLoader(file)
+ def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile) {
+ def jflags = entry.jflags
+ val completer = new loaders.ClassfileLoader(file)
val name = entry.originalName
- var sflags = toScalaClassFlags(jflags)
- val owner = getOwner(jflags)
+ val sflags = jflags.toScalaFlags
+ val owner = ownerForFlags(jflags)
val scope = getScope(jflags)
val innerClass = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer
val innerModule = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer
- innerModule.moduleClass setInfo global.loaders.moduleClassLoader
+ innerModule.moduleClass setInfo loaders.moduleClassLoader
List(innerClass, innerModule.moduleClass) foreach (_.associatedFile = file)
scope enter innerClass
@@ -1117,10 +1040,10 @@ abstract class ClassfileParser {
for (entry <- innerClasses.entries) {
// create a new class member for immediate inner classes
if (entry.outerName == currentClass) {
- val file = global.classPath.findSourceFile(entry.externalName.toString) getOrElse {
+ val file = classPath.findSourceFile(entry.externalName.toString) getOrElse {
throw new AssertionError(entry.externalName)
}
- enterClassAndModule(entry, file, entry.jflags)
+ enterClassAndModule(entry, file)
}
}
}
@@ -1133,26 +1056,27 @@ abstract class ClassfileParser {
skipSuperclasses()
skipMembers() // fields
skipMembers() // methods
- val attrs = in.nextChar
+ val attrs = u2
for (i <- 0 until attrs) {
- val attrName = pool.getName(in.nextChar).toTypeName
- val attrLen = in.nextInt
+ val attrName = readTypeName()
+ val attrLen = u4
attrName match {
case tpnme.SignatureATTR =>
in.skip(attrLen)
case tpnme.ScalaSignatureATTR =>
isScala = true
val pbuf = new PickleBuffer(in.buf, in.bp, in.bp + attrLen)
- pbuf.readNat; pbuf.readNat;
+ pbuf.readNat(); pbuf.readNat()
if (pbuf.readNat == 0) // a scala signature attribute with no entries means that the actual scala signature
isScalaAnnot = true // is in a ScalaSignature annotation.
in.skip(attrLen)
case tpnme.ScalaATTR =>
isScalaRaw = true
case tpnme.InnerClassesATTR if !isScala =>
- val entries = in.nextChar.toInt
+ val entries = u2
for (i <- 0 until entries) {
- val innerIndex, outerIndex, nameIndex, jflags = in.nextChar.toInt
+ val innerIndex, outerIndex, nameIndex = u2
+ val jflags = readInnerClassFlags()
if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0)
innerClasses add InnerClassEntry(innerIndex, outerIndex, nameIndex, jflags)
}
@@ -1164,31 +1088,19 @@ abstract class ClassfileParser {
}
/** An entry in the InnerClasses attribute of this class file. */
- case class InnerClassEntry(external: Int, outer: Int, name: Int, jflags: Int) {
+ case class InnerClassEntry(external: Int, outer: Int, name: Int, jflags: JavaAccFlags) {
def externalName = pool getClassName external
def outerName = pool getClassName outer
def originalName = pool getName name
- def isStatic = ClassfileParser.this.isStatic(jflags)
def isModule = originalName.isTermName
- def scope = if (isStatic) staticScope else instanceScope
- def enclosing = if (isStatic) enclModule else enclClass
+ def scope = if (jflags.isStatic) staticScope else instanceScope
+ def enclosing = if (jflags.isStatic) enclModule else enclClass
// The name of the outer class, without its trailing $ if it has one.
private def strippedOuter = nme stripModuleSuffix outerName
private def isInner = innerClasses contains strippedOuter
private def enclClass = if (isInner) innerClasses innerSymbol strippedOuter else classNameToSymbol(strippedOuter)
private def enclModule = enclClass.companionModule
-
- private def staticWord = if (isStatic) "static " else ""
- override def toString = s"$staticWord$originalName in $outerName ($externalName)"
- }
-
- /** Return the Symbol of the top level class enclosing `name`,
- * or the symbol of `name` itself if no enclosing classes are found.
- */
- def topLevelClass(name: Name): Symbol = innerClasses getEntry name match {
- case Some(entry) => topLevelClass(entry.outerName)
- case _ => classNameToSymbol(name)
}
/** Return the class symbol for the given name. It looks it up in its outer class.
@@ -1213,20 +1125,16 @@ abstract class ClassfileParser {
case Some(entry) => innerSymbol(entry)
case _ => NoSymbol
}
- // if loading during initialization of `definitions` typerPhase is not yet set.
- // in that case we simply load the member at the current phase
- @inline private def enteringTyperIfPossible(body: => Symbol): Symbol =
- if (currentRun.typerPhase eq null) body else beforeTyper(body)
private def innerSymbol(entry: InnerClassEntry): Symbol = {
val name = entry.originalName.toTypeName
val enclosing = entry.enclosing
def getMember = (
if (enclosing == clazz) entry.scope lookup name
- else enclosing.info member name
+ else lookupMemberAtTyperPhaseIfPossible(enclosing, name)
)
- enteringTyperIfPossible(getMember)
- /** There used to be an assertion that this result is not NoSymbol; changing it to an error
+ getMember
+ /* There used to be an assertion that this result is not NoSymbol; changing it to an error
* revealed it had been going off all the time, but has been swallowed by a catch t: Throwable
* in Repository.scala. Since it has been accomplishing nothing except misleading anyone who
* thought it wasn't triggering, I removed it entirely.
@@ -1234,6 +1142,9 @@ abstract class ClassfileParser {
}
}
+ class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
+ override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
+ }
class LazyAliasType(alias: Symbol) extends LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) {
sym setInfo createFromClonedSymbols(alias.initialize.typeParams, alias.tpe)(typeFun)
@@ -1241,32 +1152,29 @@ abstract class ClassfileParser {
}
def skipAttributes() {
- val attrCount = in.nextChar
- for (i <- 0 until attrCount) {
- in.skip(2); in.skip(in.nextInt)
+ var attrCount: Int = u2
+ while (attrCount > 0) {
+ in skip 2
+ in skip u4
+ attrCount -= 1
}
}
def skipMembers() {
- val memberCount = in.nextChar
- for (i <- 0 until memberCount) {
- in.skip(6); skipAttributes()
+ var memberCount: Int = u2
+ while (memberCount > 0) {
+ in skip 6
+ skipAttributes()
+ memberCount -= 1
}
}
def skipSuperclasses() {
in.skip(2) // superclass
- val ifaces = in.nextChar
+ val ifaces = u2
in.skip(2 * ifaces)
}
- protected def getOwner(flags: Int): Symbol =
- if (isStatic(flags)) moduleClass else clazz
-
- protected def getScope(flags: Int): Scope =
- if (isStatic(flags)) staticScope else instanceScope
-
- private def isPrivate(flags: Int) = (flags & JAVA_ACC_PRIVATE) != 0
- private def isStatic(flags: Int) = (flags & JAVA_ACC_STATIC) != 0
- private def hasAnnotation(flags: Int) = (flags & JAVA_ACC_ANNOTATION) != 0
+ protected def getScope(flags: JavaAccFlags): Scope =
+ if (flags.isStatic) staticScope else instanceScope
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index d0c540a2c6..6ca2205881 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -3,15 +3,15 @@
* @author Iulian Dragos
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package symtab
package classfile
import scala.collection.{ mutable, immutable }
import mutable.ListBuffer
-import backend.icode._
import ClassfileConstants._
-import scala.reflect.internal.Flags._
+import scala.reflect.internal.JavaAccFlags
/** ICode reader from Java bytecode.
*
@@ -20,6 +20,8 @@ import scala.reflect.internal.Flags._
*/
abstract class ICodeReader extends ClassfileParser {
val global: Global
+ val symbolTable: global.type
+ val loaders: global.loaders.type
import global._
import icodes._
@@ -28,12 +30,100 @@ abstract class ICodeReader extends ClassfileParser {
var method: IMethod = NoIMethod // the current IMethod
var isScalaModule = false
+ override protected type ThisConstantPool = ICodeConstantPool
+ override protected def newConstantPool = new ICodeConstantPool
+
+ /** Try to force the chain of enclosing classes for the given name. Otherwise
+ * flatten would not lift classes that were not referenced in the source code.
+ */
+ def forceMangledName(name: Name, module: Boolean): Symbol = {
+ val parts = name.decode.toString.split(Array('.', '$'))
+ var sym: Symbol = rootMirror.RootClass
+
+ // was "at flatten.prev"
+ enteringFlatten {
+ for (part0 <- parts; if !(part0 == ""); part = newTermName(part0)) {
+ val sym1 = enteringIcode {
+ sym.linkedClassOfClass.info
+ sym.info.decl(part.encode)
+ }//.suchThat(module == _.isModule)
+
+ sym = sym1 orElse sym.info.decl(part.encode.toTypeName)
+ }
+ }
+ sym
+ }
+
+ protected class ICodeConstantPool extends ConstantPool {
+ /** Return the symbol of the class member at `index`.
+ * The following special cases exist:
+ * - If the member refers to special `MODULE$` static field, return
+ * the symbol of the corresponding module.
+ * - If the member is a field, and is not found with the given name,
+ * another try is made by appending `nme.LOCAL_SUFFIX_STRING`
+ * - If no symbol is found in the right tpe, a new try is made in the
+ * companion class, in case the owner is an implementation class.
+ */
+ def getMemberSymbol(index: Int, static: Boolean): Symbol = {
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ var f = values(index).asInstanceOf[Symbol]
+ if (f eq null) {
+ val start = starts(index)
+ val first = in.buf(start).toInt
+ if (first != CONSTANT_FIELDREF &&
+ first != CONSTANT_METHODREF &&
+ first != CONSTANT_INTFMETHODREF) errorBadTag(start)
+ val ownerTpe = getClassOrArrayType(in.getChar(start + 1).toInt)
+ debuglog("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.originalName)
+ val (name0, tpe0) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe)
+ debuglog("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0)
+
+ forceMangledName(tpe0.typeSymbol.name, module = false)
+ val (name, tpe) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe)
+ if (name == nme.MODULE_INSTANCE_FIELD) {
+ val index = in.getChar(start + 1).toInt
+ val name = getExternalName(in.getChar(starts(index).toInt + 1).toInt)
+ //assert(name.endsWith("$"), "Not a module class: " + name)
+ f = forceMangledName(name dropRight 1, module = true)
+ if (f == NoSymbol)
+ f = rootMirror.getModuleByName(name dropRight 1)
+ } else {
+ val origName = nme.unexpandedName(name)
+ val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol
+ f = owner.info.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe.widen =:= tpe)
+ if (f == NoSymbol)
+ f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
+ if (f == NoSymbol) {
+ // if it's an impl class, try to find it's static member inside the class
+ if (ownerTpe.typeSymbol.isImplClass) {
+ f = ownerTpe.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
+ } else {
+ log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
+ f = tpe match {
+ case MethodType(_, _) => owner.newMethod(name.toTermName, owner.pos)
+ case _ => owner.newVariable(name.toTermName, owner.pos)
+ }
+ f setInfo tpe
+ log("created fake member " + f.fullName)
+ }
+ }
+ }
+ assert(f != NoSymbol,
+ s"could not find $name: $tpe in $ownerTpe" + (
+ if (settings.debug.value) ownerTpe.members.mkString(", members are:\n ", "\n ", "") else ""
+ )
+ )
+ values(index) = f
+ }
+ f
+ }
+ }
+
/** Read back bytecode for the given class symbol. It returns
* two IClass objects, one for static members and one
* for non-static members.
*/
def readClass(cls: Symbol): (IClass, IClass) = {
- var classFile: io.AbstractFile = null;
cls.info // ensure accurate type information
isScalaModule = cls.isModule && !cls.isJavaDefined
@@ -48,58 +138,55 @@ abstract class ICodeReader extends ClassfileParser {
(staticCode, instanceCode)
}
- /** If we're parsing a scala module, the owner of members is always
- * the module symbol.
- */
- override def getOwner(jflags: Int): Symbol =
- if (isScalaModule) this.staticModule
- else super.getOwner(jflags)
-
override def parseClass() {
this.instanceCode = new IClass(clazz)
this.staticCode = new IClass(staticModule)
- val jflags = in.nextChar
- val isAttribute = (jflags & JAVA_ACC_ANNOTATION) != 0
- val sflags = toScalaClassFlags(jflags) // what, this is never used??
- val c = pool getClassSymbol in.nextChar
+ u2
+ pool getClassSymbol u2
parseInnerClasses()
in.skip(2) // super class
- in.skip(2 * in.nextChar) // interfaces
- val fieldCount = in.nextChar
+ in.skip(2 * u2) // interfaces
+ val fieldCount = u2
for (i <- 0 until fieldCount) parseField()
- val methodCount = in.nextChar
- for (i <- 0 until methodCount) parseMethod();
+ val methodCount = u2
+ for (i <- 0 until methodCount) parseMethod()
instanceCode.methods = instanceCode.methods.reverse
staticCode.methods = staticCode.methods.reverse
}
override def parseField() {
- val (jflags, sym) = parseMember(true)
+ val (jflags, sym) = parseMember(field = true)
getCode(jflags) addField new IField(sym)
skipAttributes()
}
- private def parseMember(field: Boolean): (Int, Symbol) = {
- val jflags = in.nextChar
- val name = pool getName in.nextChar
- val owner = getOwner(jflags)
- val dummySym = owner.newMethod(name, owner.pos, toScalaMethodFlags(jflags))
+ private def parseMember(field: Boolean): (JavaAccFlags, Symbol) = {
+ val jflags = JavaAccFlags(u2)
+ val name = pool getName u2
+ /* If we're parsing a scala module, the owner of members is always
+ * the module symbol.
+ */
+ val owner = (
+ if (isScalaModule) staticModule
+ else if (jflags.isStatic) moduleClass
+ else clazz
+ )
+ val dummySym = owner.newMethod(name.toTermName, owner.pos, jflags.toScalaFlags)
try {
- val ch = in.nextChar
+ val ch = u2
val tpe = pool.getType(dummySym, ch)
if ("<clinit>" == name.toString)
(jflags, NoSymbol)
else {
- val owner = getOwner(jflags)
- var sym = owner.info.findMember(name, 0, 0, false).suchThat(old => sameType(old.tpe, tpe))
+ var sym = owner.info.findMember(name, 0, 0, stableOnly = false).suchThat(old => sameType(old.tpe, tpe))
if (sym == NoSymbol)
- sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe)
+ sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
if (sym == NoSymbol) {
- sym = if (field) owner.newValue(name, owner.pos, toScalaFieldFlags(jflags)) else dummySym
+ sym = if (field) owner.newValue(name.toTermName, owner.pos, jflags.toScalaFlags) else dummySym
sym setInfoAndEnter tpe
log(s"ICodeReader could not locate ${name.decode} in $owner. Created ${sym.defString}.")
}
@@ -124,31 +211,31 @@ abstract class ICodeReader extends ClassfileParser {
}
override def parseMethod() {
- val (jflags, sym) = parseMember(false)
- var beginning = in.bp
+ val (jflags, sym) = parseMember(field = false)
+ val beginning = in.bp
try {
if (sym != NoSymbol) {
this.method = new IMethod(sym)
this.method.returnType = toTypeKind(sym.tpe.resultType)
getCode(jflags).addMethod(this.method)
- if ((jflags & JAVA_ACC_NATIVE) != 0)
+ if (jflags.isNative)
this.method.native = true
- val attributeCount = in.nextChar
+ val attributeCount = u2
for (i <- 0 until attributeCount) parseAttribute()
} else {
- debuglog("Skipping non-existent method.");
- skipAttributes();
+ debuglog("Skipping non-existent method.")
+ skipAttributes()
}
} catch {
case e: MissingRequirementError =>
- in.bp = beginning; skipAttributes
- debuglog("Skipping non-existent method. " + e.msg);
+ in.bp = beginning; skipAttributes()
+ debuglog("Skipping non-existent method. " + e.msg)
}
}
def parseAttribute() {
- val attrName = pool.getName(in.nextChar).toTypeName
- val attrLen = in.nextInt
+ val attrName = pool.getName(u2).toTypeName
+ val attrLen = u4
attrName match {
case tpnme.CodeATTR =>
parseByteCode()
@@ -169,12 +256,12 @@ abstract class ICodeReader extends ClassfileParser {
rootMirror.getClassByName(name)
}
else if (nme.isModuleName(name)) {
- val strippedName = nme.stripModuleSuffix(name)
- forceMangledName(newTermName(strippedName.decode), true) orElse rootMirror.getModule(strippedName)
+ val strippedName = name.dropModule
+ forceMangledName(newTermName(strippedName.decode), module = true) orElse rootMirror.getModuleByName(strippedName)
}
else {
- forceMangledName(name, false)
- afterFlatten(rootMirror.getClassByName(name.toTypeName))
+ forceMangledName(name, module = false)
+ exitingFlatten(rootMirror.getClassByName(name.toTypeName))
}
if (sym.isModule)
sym.moduleClass
@@ -192,9 +279,9 @@ abstract class ICodeReader extends ClassfileParser {
/** Parse java bytecode into ICode */
def parseByteCode() {
- maxStack = in.nextChar
- maxLocals = in.nextChar
- val codeLength = in.nextInt
+ maxStack = u2
+ maxLocals = u2
+ val codeLength = u4
val code = new LinearCode
def parseInstruction() {
@@ -202,27 +289,26 @@ abstract class ICodeReader extends ClassfileParser {
import code._
var size = 1 // instruction size
- /** Parse 16 bit jump target. */
+ /* Parse 16 bit jump target. */
def parseJumpTarget = {
size += 2
- val offset = in.nextChar.toShort
+ val offset = u2.toShort
val target = pc + offset
assert(target >= 0 && target < codeLength, "Illegal jump target: " + target)
target
}
- /** Parse 32 bit jump target. */
+ /* Parse 32 bit jump target. */
def parseJumpTargetW: Int = {
size += 4
- val offset = in.nextInt
+ val offset = u4
val target = pc + offset
assert(target >= 0 && target < codeLength, "Illegal jump target: " + target + "pc: " + pc + " offset: " + offset)
target
}
- val instr = toUnsignedByte(in.nextByte)
- instr match {
- case JVM.nop => parseInstruction
+ u1 match {
+ case JVM.nop => parseInstruction()
case JVM.aconst_null => code emit CONSTANT(Constant(null))
case JVM.iconst_m1 => code emit CONSTANT(Constant(-1))
case JVM.iconst_0 => code emit CONSTANT(Constant(0))
@@ -240,21 +326,21 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.dconst_0 => code emit CONSTANT(Constant(0.0))
case JVM.dconst_1 => code emit CONSTANT(Constant(1.0))
- case JVM.bipush => code.emit(CONSTANT(Constant(in.nextByte))); size += 1
- case JVM.sipush => code.emit(CONSTANT(Constant(in.nextChar))); size += 2
- case JVM.ldc => code.emit(CONSTANT(pool.getConstant(toUnsignedByte(in.nextByte)))); size += 1
- case JVM.ldc_w => code.emit(CONSTANT(pool.getConstant(in.nextChar))); size += 2
- case JVM.ldc2_w => code.emit(CONSTANT(pool.getConstant(in.nextChar))); size += 2
- case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, INT))); size += 1
- case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, LONG))); size += 1
- case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, FLOAT))); size += 1
- case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, DOUBLE))); size += 1
+ case JVM.bipush => code.emit(CONSTANT(Constant(u1))); size += 1
+ case JVM.sipush => code.emit(CONSTANT(Constant(u2))); size += 2
+ case JVM.ldc => code.emit(CONSTANT(pool.getConstant(u1))); size += 1
+ case JVM.ldc_w => code.emit(CONSTANT(pool.getConstant(u2))); size += 2
+ case JVM.ldc2_w => code.emit(CONSTANT(pool.getConstant(u2))); size += 2
+ case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(u1, INT))); size += 1
+ case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(u1, LONG))); size += 1
+ case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(u1, FLOAT))); size += 1
+ case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(u1, DOUBLE))); size += 1
case JVM.aload =>
- val local = in.nextByte.toInt; size += 1
+ val local = u1.toInt; size += 1
if (local == 0 && !method.isStatic)
- code.emit(THIS(method.symbol.owner));
+ code.emit(THIS(method.symbol.owner))
else
- code.emit(LOAD_LOCAL(code.getLocal(local, ObjectReference)));
+ code.emit(LOAD_LOCAL(code.getLocal(local, ObjectReference)))
case JVM.iload_0 => code.emit(LOAD_LOCAL(code.getLocal(0, INT)))
case JVM.iload_1 => code.emit(LOAD_LOCAL(code.getLocal(1, INT)))
@@ -274,9 +360,9 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.dload_3 => code.emit(LOAD_LOCAL(code.getLocal(3, DOUBLE)))
case JVM.aload_0 =>
if (!method.isStatic)
- code.emit(THIS(method.symbol.owner));
+ code.emit(THIS(method.symbol.owner))
else
- code.emit(LOAD_LOCAL(code.getLocal(0, ObjectReference)));
+ code.emit(LOAD_LOCAL(code.getLocal(0, ObjectReference)))
case JVM.aload_1 => code.emit(LOAD_LOCAL(code.getLocal(1, ObjectReference)))
case JVM.aload_2 => code.emit(LOAD_LOCAL(code.getLocal(2, ObjectReference)))
case JVM.aload_3 => code.emit(LOAD_LOCAL(code.getLocal(3, ObjectReference)))
@@ -290,11 +376,11 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.caload => code.emit(LOAD_ARRAY_ITEM(CHAR))
case JVM.saload => code.emit(LOAD_ARRAY_ITEM(SHORT))
- case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, INT))); size += 1
- case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, LONG))); size += 1
- case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, FLOAT))); size += 1
- case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, DOUBLE))); size += 1
- case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, ObjectReference))); size += 1
+ case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(u1, INT))); size += 1
+ case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(u1, LONG))); size += 1
+ case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(u1, FLOAT))); size += 1
+ case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(u1, DOUBLE))); size += 1
+ case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(u1, ObjectReference))); size += 1
case JVM.istore_0 => code.emit(STORE_LOCAL(code.getLocal(0, INT)))
case JVM.istore_1 => code.emit(STORE_LOCAL(code.getLocal(1, INT)))
case JVM.istore_2 => code.emit(STORE_LOCAL(code.getLocal(2, INT)))
@@ -378,9 +464,9 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.lxor => code.emit(CALL_PRIMITIVE(Logical(XOR, LONG)))
case JVM.iinc =>
size += 2
- val local = code.getLocal(in.nextByte, INT)
+ val local = code.getLocal(u1, INT)
code.emit(LOAD_LOCAL(local))
- code.emit(CONSTANT(Constant(in.nextByte)))
+ code.emit(CONSTANT(Constant(u1)))
code.emit(CALL_PRIMITIVE(Arithmetic(ADD, INT)))
code.emit(STORE_LOCAL(local))
@@ -430,14 +516,14 @@ abstract class ICodeReader extends ClassfileParser {
size += padding
in.bp += padding
assert((pc + size % 4) != 0, pc)
-/* var byte1 = in.nextByte; size += 1;
- while (byte1 == 0) { byte1 = in.nextByte; size += 1; }
- val default = byte1 << 24 | in.nextByte << 16 | in.nextByte << 8 | in.nextByte;
+/* var byte1 = u1; size += 1;
+ while (byte1 == 0) { byte1 = u1; size += 1; }
+ val default = byte1 << 24 | u1 << 16 | u1 << 8 | u1;
size = size + 3
*/
- val default = pc + in.nextInt; size += 4
- val low = in.nextInt
- val high = in.nextInt
+ val default = pc + u4; size += 4
+ val low = u4
+ val high = u4
size += 8
assert(low <= high, "Value low not <= high for tableswitch.")
@@ -450,13 +536,13 @@ abstract class ICodeReader extends ClassfileParser {
size += padding
in.bp += padding
assert((pc + size % 4) != 0, pc)
- val default = pc + in.nextInt; size += 4
- val npairs = in.nextInt; size += 4
+ val default = pc + u4; size += 4
+ val npairs = u4; size += 4
var tags: List[List[Int]] = Nil
var targets: List[Int] = Nil
var i = 0
while (i < npairs) {
- tags = List(in.nextInt) :: tags; size += 4
+ tags = List(u4) :: tags; size += 4
targets = parseJumpTargetW :: targets; // parseJumpTargetW updates 'size' itself
i += 1
}
@@ -471,59 +557,59 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.return_ => code.emit(RETURN(UNIT))
case JVM.getstatic =>
- val field = pool.getMemberSymbol(in.nextChar, true); size += 2
+ val field = pool.getMemberSymbol(u2, static = true); size += 2
if (field.hasModuleFlag)
code emit LOAD_MODULE(field)
else
- code emit LOAD_FIELD(field, true)
+ code emit LOAD_FIELD(field, isStatic = true)
case JVM.putstatic =>
- val field = pool.getMemberSymbol(in.nextChar, true); size += 2
- code.emit(STORE_FIELD(field, true))
+ val field = pool.getMemberSymbol(u2, static = true); size += 2
+ code.emit(STORE_FIELD(field, isStatic = true))
case JVM.getfield =>
- val field = pool.getMemberSymbol(in.nextChar, false); size += 2
- code.emit(LOAD_FIELD(field, false))
+ val field = pool.getMemberSymbol(u2, static = false); size += 2
+ code.emit(LOAD_FIELD(field, isStatic = false))
case JVM.putfield =>
- val field = pool.getMemberSymbol(in.nextChar, false); size += 2
- code.emit(STORE_FIELD(field, false))
+ val field = pool.getMemberSymbol(u2, static = false); size += 2
+ code.emit(STORE_FIELD(field, isStatic = false))
case JVM.invokevirtual =>
- val m = pool.getMemberSymbol(in.nextChar, false); size += 2
+ val m = pool.getMemberSymbol(u2, static = false); size += 2
code.emit(CALL_METHOD(m, Dynamic))
method.updateRecursive(m)
case JVM.invokeinterface =>
- val m = pool.getMemberSymbol(in.nextChar, false); size += 4
+ val m = pool.getMemberSymbol(u2, static = false); size += 4
in.skip(2)
code.emit(CALL_METHOD(m, Dynamic))
// invokeinterface can't be recursive
case JVM.invokespecial =>
- val m = pool.getMemberSymbol(in.nextChar, false); size += 2
- val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(true)
- else SuperCall(m.owner.name);
+ val m = pool.getMemberSymbol(u2, static = false); size += 2
+ val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(onInstance = true)
+ else SuperCall(m.owner.name)
code.emit(CALL_METHOD(m, style))
method.updateRecursive(m)
case JVM.invokestatic =>
- val m = pool.getMemberSymbol(in.nextChar, true); size += 2
+ val m = pool.getMemberSymbol(u2, static = true); size += 2
if (isBox(m))
code.emit(BOX(toTypeKind(m.info.paramTypes.head)))
else if (isUnbox(m))
code.emit(UNBOX(toTypeKind(m.info.resultType)))
else {
- code.emit(CALL_METHOD(m, Static(false)))
+ code.emit(CALL_METHOD(m, Static(onInstance = false)))
method.updateRecursive(m)
}
case JVM.invokedynamic =>
// TODO, this is just a place holder. A real implementation must parse the class constant entry
debuglog("Found JVM invokedynamic instructionm, inserting place holder ICode INVOKE_DYNAMIC.")
containsInvokeDynamic = true
- val poolEntry = in.nextChar
+ val poolEntry = in.nextChar.toInt
in.skip(2)
code.emit(INVOKE_DYNAMIC(poolEntry))
case JVM.new_ =>
- code.emit(NEW(REFERENCE(pool.getClassSymbol(in.nextChar))))
+ code.emit(NEW(REFERENCE(pool.getClassSymbol(u2))))
size += 2
case JVM.newarray =>
- val kind = in.nextByte match {
+ val kind = u1 match {
case T_BOOLEAN => BOOL
case T_CHAR => CHAR
case T_FLOAT => FLOAT
@@ -537,35 +623,35 @@ abstract class ICodeReader extends ClassfileParser {
code.emit(CREATE_ARRAY(kind, 1))
case JVM.anewarray =>
- val tpe = pool.getClassOrArrayType(in.nextChar); size += 2
+ val tpe = pool.getClassOrArrayType(u2); size += 2
code.emit(CREATE_ARRAY(toTypeKind(tpe), 1))
case JVM.arraylength => code.emit(CALL_PRIMITIVE(ArrayLength(ObjectReference))); // the kind does not matter
case JVM.athrow => code.emit(THROW(definitions.ThrowableClass))
case JVM.checkcast =>
- code.emit(CHECK_CAST(toTypeKind(pool.getClassOrArrayType(in.nextChar)))); size += 2
+ code.emit(CHECK_CAST(toTypeKind(pool.getClassOrArrayType(u2)))); size += 2
case JVM.instanceof =>
- code.emit(IS_INSTANCE(toTypeKind(pool.getClassOrArrayType(in.nextChar)))); size += 2
+ code.emit(IS_INSTANCE(toTypeKind(pool.getClassOrArrayType(u2)))); size += 2
case JVM.monitorenter => code.emit(MONITOR_ENTER())
case JVM.monitorexit => code.emit(MONITOR_EXIT())
case JVM.wide =>
size += 1
- toUnsignedByte(in.nextByte) match {
- case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, INT))); size += 2
- case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, LONG))); size += 2
- case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, FLOAT))); size += 2
- case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, DOUBLE))); size += 2
- case JVM.aload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, ObjectReference))); size += 2
- case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, INT))); size += 2
- case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, LONG))); size += 2
- case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, FLOAT))); size += 2
- case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, DOUBLE))); size += 2
- case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, ObjectReference))); size += 2
+ u1 match {
+ case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(u2, INT))); size += 2
+ case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(u2, LONG))); size += 2
+ case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(u2, FLOAT))); size += 2
+ case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(u2, DOUBLE))); size += 2
+ case JVM.aload => code.emit(LOAD_LOCAL(code.getLocal(u2, ObjectReference))); size += 2
+ case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(u2, INT))); size += 2
+ case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(u2, LONG))); size += 2
+ case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(u2, FLOAT))); size += 2
+ case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(u2, DOUBLE))); size += 2
+ case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(u2, ObjectReference))); size += 2
case JVM.ret => sys.error("Cannot handle jsr/ret")
case JVM.iinc =>
size += 4
- val local = code.getLocal(in.nextChar, INT)
- code.emit(CONSTANT(Constant(in.nextChar)))
+ val local = code.getLocal(u2, INT)
+ code.emit(CONSTANT(Constant(u2)))
code.emit(CALL_PRIMITIVE(Arithmetic(ADD, INT)))
code.emit(STORE_LOCAL(local))
case _ => sys.error("Invalid 'wide' operand")
@@ -573,8 +659,8 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.multianewarray =>
size += 3
- val tpe = toTypeKind(pool getClassOrArrayType in.nextChar)
- val dim = in.nextByte
+ val tpe = toTypeKind(pool getClassOrArrayType u2)
+ val dim = u1
// assert(dim == 1, "Cannot handle multidimensional arrays yet.")
code emit CREATE_ARRAY(tpe, dim)
@@ -598,16 +684,16 @@ abstract class ICodeReader extends ClassfileParser {
}
pc = 0
- while (pc < codeLength) parseInstruction
+ while (pc < codeLength) parseInstruction()
- val exceptionEntries = in.nextChar.toInt
+ val exceptionEntries = u2.toInt
code.containsEHs = (exceptionEntries != 0)
var i = 0
while (i < exceptionEntries) {
// skip start end PC
in.skip(4)
// read the handler PC
- code.jmpTargets += in.nextChar
+ code.jmpTargets += u2
// skip the exception type
in.skip(2)
i += 1
@@ -643,15 +729,13 @@ abstract class ICodeReader extends ClassfileParser {
/** Return the icode class that should include members with the given flags.
* There are two possible classes, the static part and the instance part.
*/
- def getCode(flags: Int): IClass =
- if (isScalaModule) staticCode
- else if ((flags & JAVA_ACC_STATIC) != 0) staticCode
- else instanceCode
+ def getCode(flags: JavaAccFlags): IClass =
+ if (isScalaModule || flags.isStatic) staticCode else instanceCode
class LinearCode {
- var instrs: ListBuffer[(Int, Instruction)] = new ListBuffer
- var jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]()
- var locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap()
+ val instrs: ListBuffer[(Int, Instruction)] = new ListBuffer
+ val jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]()
+ val locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap()
var containsDUPX = false
var containsNEW = false
@@ -683,7 +767,6 @@ abstract class ICodeReader extends ClassfileParser {
val blocks = makeBasicBlocks
var otherBlock: BasicBlock = NoBasicBlock
- var disableJmpTarget = false
for ((pc, instr) <- instrs.iterator) {
// Console.println("> " + pc + ": " + instr);
@@ -691,7 +774,7 @@ abstract class ICodeReader extends ClassfileParser {
otherBlock = blocks(pc)
if (!bb.closed && otherBlock != bb) {
bb.emit(JUMP(otherBlock))
- bb.close
+ bb.close()
// Console.println("\t> closing bb: " + bb)
}
bb = otherBlock
@@ -734,46 +817,44 @@ abstract class ICodeReader extends ClassfileParser {
val tfa = new analysis.MethodTFA() {
import analysis._
- import analysis.typeFlowLattice.IState
/** Abstract interpretation for one instruction. */
override def mutatingInterpret(out: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = {
- val bindings = out.vars
val stack = out.stack
import stack.push
i match {
case DUP_X1 =>
val (one, two) = stack.pop2
- push(one); push(two); push(one);
+ push(one); push(two); push(one)
case DUP_X2 =>
val (one, two, three) = stack.pop3
- push(one); push(three); push(two); push(one);
+ push(one); push(three); push(two); push(one)
case DUP2_X1 =>
val (one, two) = stack.pop2
if (one.isWideType) {
- push(one); push(two); push(one);
+ push(one); push(two); push(one)
} else {
val three = stack.pop
- push(two); push(one); push(three); push(two); push(one);
+ push(two); push(one); push(three); push(two); push(one)
}
case DUP2_X2 =>
val (one, two) = stack.pop2
if (one.isWideType && two.isWideType) {
- push(one); push(two); push(one);
+ push(one); push(two); push(one)
} else if (one.isWideType) {
val three = stack.pop
assert(!three.isWideType, "Impossible")
- push(one); push(three); push(two); push(one);
+ push(one); push(three); push(two); push(one)
} else {
val three = stack.pop
if (three.isWideType) {
- push(two); push(one); push(one); push(three); push(two); push(one);
+ push(two); push(one); push(one); push(three); push(two); push(one)
} else {
val four = stack.pop
- push(two); push(one); push(four); push(one); push(three); push(two); push(one);
+ push(two); push(one); push(four); push(one); push(three); push(two); push(one)
}
}
@@ -786,7 +867,7 @@ abstract class ICodeReader extends ClassfileParser {
// method.dump
tfa.init(method)
- tfa.run
+ tfa.run()
for (bb <- linearizer.linearize(method)) {
var info = tfa.in(bb)
for (i <- bb.toList) {
@@ -801,7 +882,7 @@ abstract class ICodeReader extends ClassfileParser {
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
case DUP_X2 =>
val one = info.stack.types(0)
@@ -814,30 +895,30 @@ abstract class ICodeReader extends ClassfileParser {
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
else {
- val tmp3 = freshLocal(info.stack.types(2));
+ val tmp3 = freshLocal(info.stack.types(2))
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
STORE_LOCAL(tmp3),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
}
case DUP2_X1 =>
val one = info.stack.types(0)
val two = info.stack.types(1)
- val tmp1 = freshLocal(one);
- val tmp2 = freshLocal(two);
+ val tmp1 = freshLocal(one)
+ val tmp2 = freshLocal(two)
if (one.isWideType) {
assert(!two.isWideType, "Impossible")
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else {
val tmp3 = freshLocal(info.stack.types(2))
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
@@ -846,7 +927,7 @@ abstract class ICodeReader extends ClassfileParser {
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
}
case DUP2_X2 =>
@@ -859,21 +940,21 @@ abstract class ICodeReader extends ClassfileParser {
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else if (one.isWideType) {
val three = info.stack.types(2)
assert(!two.isWideType && !three.isWideType, "Impossible")
- val tmp3 = freshLocal(three);
+ val tmp3 = freshLocal(three)
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
STORE_LOCAL(tmp3),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else {
val three = info.stack.types(2)
- val tmp3 = freshLocal(three);
+ val tmp3 = freshLocal(three)
if (three.isWideType) {
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
@@ -882,10 +963,10 @@ abstract class ICodeReader extends ClassfileParser {
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else {
val four = info.stack.types(3)
- val tmp4 = freshLocal(three);
+ val tmp4 = freshLocal(three)
assert(!four.isWideType, "Impossible")
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
@@ -896,7 +977,7 @@ abstract class ICodeReader extends ClassfileParser {
LOAD_LOCAL(tmp4),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
}
}
case _ =>
@@ -911,11 +992,11 @@ abstract class ICodeReader extends ClassfileParser {
import opcodes._
val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis
rdef.init(method)
- rdef.run
+ rdef.run()
for (bb <- method.code.blocks ; (i, idx) <- bb.toList.zipWithIndex) i match {
case cm @ CALL_METHOD(m, Static(true)) if m.isClassConstructor =>
- def loop(bb0: BasicBlock, idx0: Int, depth: Int = 0): Unit = {
+ def loop(bb0: BasicBlock, idx0: Int, depth: Int): Unit = {
rdef.findDefs(bb0, idx0, 1, depth) match {
case ((bb1, idx1)) :: _ =>
bb1(idx1) match {
@@ -934,6 +1015,7 @@ abstract class ICodeReader extends ClassfileParser {
}
/** Return the local at given index, with the given type. */
+ def getLocal(idx: Char, kind: TypeKind): Local = getLocal(idx.toInt, kind)
def getLocal(idx: Int, kind: TypeKind): Local = {
assert(idx < maxLocals, "Index too large for local variable.")
@@ -952,7 +1034,7 @@ abstract class ICodeReader extends ClassfileParser {
locals.get(idx) match {
case Some(ls) =>
- val l = ls find { loc => loc._2 <:< kind }
+ val l = ls find { loc => loc._2 isAssignabledTo kind }
l match {
case Some((loc, _)) => loc
case None =>
@@ -963,8 +1045,8 @@ abstract class ICodeReader extends ClassfileParser {
l
}
case None =>
- checkValidIndex
- val l = freshLocal(idx, kind, false)
+ checkValidIndex()
+ val l = freshLocal(idx, kind, isArg = false)
debuglog("Added new local for idx " + idx + ": " + kind)
locals += (idx -> List((l, kind)))
l
@@ -976,7 +1058,7 @@ abstract class ICodeReader extends ClassfileParser {
/** Return a fresh Local variable for the given index.
*/
private def freshLocal(idx: Int, kind: TypeKind, isArg: Boolean) = {
- val sym = method.symbol.newVariable(newTermName("loc" + idx)).setInfo(kind.toType);
+ val sym = method.symbol.newVariable(newTermName("loc" + idx)).setInfo(kind.toType)
val l = new Local(sym, kind, isArg)
method.addLocal(l)
l
@@ -988,7 +1070,7 @@ abstract class ICodeReader extends ClassfileParser {
* the original method. */
def freshLocal(kind: TypeKind): Local = {
count += 1
- freshLocal(maxLocals + count, kind, false)
+ freshLocal(maxLocals + count, kind, isArg = false)
}
/** add a method param with the given index. */
@@ -1006,7 +1088,8 @@ abstract class ICodeReader extends ClassfileParser {
jmpTargets += pc
}
- case class LJUMP(pc: Int) extends LazyJump(pc);
+ case class LJUMP(pc: Int) extends LazyJump(pc)
+
case class LCJUMP(success: Int, failure: Int, cond: TestOp, kind: TypeKind)
extends LazyJump(success) {
override def toString(): String = "LCJUMP (" + kind + ") " + success + " : " + failure
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index ed7eb6d307..ce3e7b1bb5 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -11,6 +11,7 @@ import java.lang.Float.floatToIntBits
import java.lang.Double.doubleToLongBits
import scala.io.Codec
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
+import scala.reflect.internal.util.shortClassOfInstance
import scala.collection.mutable.LinkedHashMap
import PickleFormat._
import Flags._
@@ -26,12 +27,8 @@ import Flags._
abstract class Pickler extends SubComponent {
import global._
- private final val showSig = false
-
val phaseName = "pickler"
- currentRun
-
def newPhase(prev: Phase): StdPhase = new PicklePhase(prev)
class PicklePhase(prev: Phase) extends StdPhase(prev) {
@@ -68,7 +65,7 @@ abstract class Pickler extends SubComponent {
return
}
- if (!t.isDef && t.hasSymbol && t.symbol.isTermMacro) {
+ if (!t.isDef && t.hasSymbolField && t.symbol.isTermMacro) {
unit.error(t.pos, "macro has not been expanded")
return
}
@@ -84,7 +81,7 @@ abstract class Pickler extends SubComponent {
private var entries = new Array[AnyRef](256)
private var ep = 0
private val index = new LinkedHashMap[AnyRef, Int]
- private lazy val nonClassRoot = findOrElse(root.ownersIterator)(!_.isClass)(NoSymbol)
+ private lazy val nonClassRoot = findSymbol(root.ownersIterator)(!_.isClass)
private def isRootSym(sym: Symbol) =
sym.name.toTermName == rootName && sym.owner == rootOwner
@@ -92,12 +89,17 @@ abstract class Pickler extends SubComponent {
/** Returns usually symbol's owner, but picks classfile root instead
* for existentially bound variables that have a non-local owner.
* Question: Should this be done for refinement class symbols as well?
+ *
+ * Note: tree pickling also finds its way here; e.g. in SI-7501 the pickling
+ * of trees in annotation arguments considers the parameter symbol of a method
+ * called in such a tree as "local". The condition `sym.isValueParameter` was
+ * added to fix that bug, but there may be a better way.
*/
private def localizedOwner(sym: Symbol) =
if (isLocal(sym) && !isRootSym(sym) && !isLocal(sym.owner))
// don't use a class as the localized owner for type parameters that are not owned by a class: those are not instantiated by asSeenFrom
// however, they would suddenly be considered by asSeenFrom if their localized owner became a class (causing the crashes of #4079, #2741)
- (if(sym.isTypeParameter && !sym.owner.isClass) nonClassRoot
+ (if ((sym.isTypeParameter || sym.isValueParameter) && !sym.owner.isClass) nonClassRoot
else root)
else sym.owner
@@ -105,13 +107,14 @@ abstract class Pickler extends SubComponent {
* anyway? This is the case if symbol is a refinement class,
* an existentially bound variable, or a higher-order type parameter.
*/
- private def isLocal(sym: Symbol): Boolean =
- !sym.isPackageClass && sym != NoSymbol &&
- (isRootSym(sym) ||
- sym.isRefinementClass ||
- sym.isAbstractType && sym.hasFlag(EXISTENTIAL) || // existential param
- sym.isParameter ||
- isLocal(sym.owner))
+ private def isLocal(sym: Symbol): Boolean = (sym != NoSymbol) && !sym.isPackageClass && (
+ isRootSym(sym)
+ || sym.isRefinementClass
+ || sym.isAbstractType && sym.hasFlag(EXISTENTIAL) // existential param
+ || sym.isParameter
+ || isLocal(sym.owner)
+ )
+ private def isExternalSymbol(sym: Symbol): Boolean = (sym != NoSymbol) && !isLocal(sym)
// Phase 1 methods: Populate entries/index ------------------------------------
@@ -134,19 +137,47 @@ abstract class Pickler extends SubComponent {
true
}
+ private def deskolemizeTypeSymbols(ref: AnyRef): AnyRef = ref match {
+ case sym: Symbol => deskolemize(sym)
+ case _ => ref
+ }
+
+ /** If the symbol is a type skolem, deskolemize and log it.
+ * If we fail to deskolemize, in a method like
+ * trait Trait[+A] { def f[CC[X]] : CC[A] }
+ * the applied type CC[A] will hold a different CC symbol
+ * than the type-constructor type-parameter CC.
+ */
+ private def deskolemize(sym: Symbol): Symbol = {
+ if (sym.isTypeSkolem) {
+ val sym1 = sym.deSkolemize
+ log({
+ val what0 = sym.defString
+ val what = sym1.defString match {
+ case `what0` => what0
+ case other => what0 + "->" + other
+ }
+ val where = sym.enclMethod.fullLocationString
+ s"deskolemizing $what in $where"
+ })
+ sym1
+ }
+ else sym
+ }
+
/** Store symbol in index. If symbol is local, also store everything it references.
- *
- * @param sym ...
*/
- def putSymbol(sym: Symbol) {
+ def putSymbol(sym0: Symbol) {
+ val sym = deskolemize(sym0)
+
if (putEntry(sym)) {
if (isLocal(sym)) {
putEntry(sym.name)
putSymbol(sym.owner)
putSymbol(sym.privateWithin)
putType(sym.info)
- if (sym.thisSym.tpeHK != sym.tpeHK)
- putType(sym.typeOfThis);
+ if (sym.hasSelfType)
+ putType(sym.typeOfThis)
putSymbol(sym.alias)
if (!sym.children.isEmpty) {
val (locals, globals) = sym.children partition (_.isLocalClass)
@@ -173,257 +204,70 @@ abstract class Pickler extends SubComponent {
*/
private def putType(tp: Type): Unit = if (putEntry(tp)) {
tp match {
- case NoType | NoPrefix /*| DeBruijnIndex(_, _) */ =>
+ case NoType | NoPrefix =>
;
case ThisType(sym) =>
putSymbol(sym)
case SingleType(pre, sym) =>
- putType(pre); putSymbol(sym)
+ putType(pre)
+ putSymbol(sym)
case SuperType(thistpe, supertpe) =>
putType(thistpe)
putType(supertpe)
case ConstantType(value) =>
putConstant(value)
case TypeRef(pre, sym, args) =>
-// if (sym.isAbstractType && (sym hasFlag EXISTENTIAL))
-// if (!(boundSyms contains sym))
-// println("unbound existential: "+sym+sym.locationString)
- putType(pre); putSymbol(sym); putTypes(args)
+ putType(pre)
+ putSymbol(sym)
+ putTypes(args)
case TypeBounds(lo, hi) =>
- putType(lo); putType(hi)
- case RefinedType(parents, decls) =>
- val rclazz = tp.typeSymbol
- for (m <- decls.iterator)
- if (m.owner != rclazz) abort("bad refinement member "+m+" of "+tp+", owner = "+m.owner)
- putSymbol(rclazz); putTypes(parents); putSymbols(decls.toList)
- case ClassInfoType(parents, decls, clazz) =>
- putSymbol(clazz); putTypes(parents); putSymbols(decls.toList)
+ putType(lo)
+ putType(hi)
+ case tp: CompoundType =>
+ putSymbol(tp.typeSymbol)
+ putTypes(tp.parents)
+ putSymbols(tp.decls.toList)
case MethodType(params, restpe) =>
- putType(restpe); putSymbols(params)
+ putType(restpe)
+ putSymbols(params)
case NullaryMethodType(restpe) =>
putType(restpe)
case PolyType(tparams, restpe) =>
- /** no longer needed since all params are now local
- tparams foreach { tparam =>
- if (!isLocal(tparam)) locals += tparam // similar to existential types, these tparams are local
- }
- */
- putType(restpe); putSymbols(tparams)
+ putType(restpe)
+ putSymbols(tparams)
case ExistentialType(tparams, restpe) =>
-// val savedBoundSyms = boundSyms // boundSyms are known to be local based on the EXISTENTIAL flag (see isLocal)
-// boundSyms = tparams ::: boundSyms
-// try {
- putType(restpe);
-// } finally {
-// boundSyms = savedBoundSyms
-// }
+ putType(restpe)
putSymbols(tparams)
- case AnnotatedType(annotations, underlying, selfsym) =>
+ case AnnotatedType(_, underlying, selfsym) =>
putType(underlying)
- if (settings.selfInAnnots.value) putSymbol(selfsym)
- putAnnotations(annotations filter (_.isStatic))
+ if (settings.selfInAnnots) putSymbol(selfsym)
+ tp.staticAnnotations foreach putAnnotation
case _ =>
throw new FatalError("bad type: " + tp + "(" + tp.getClass + ")")
}
}
private def putTypes(tps: List[Type]) { tps foreach putType }
- private def putTree(tree: Tree): Unit = if (putEntry(tree)) {
- if (tree != EmptyTree)
- putType(tree.tpe)
- if (tree.hasSymbol)
- putSymbol(tree.symbol)
-
- tree match {
- case EmptyTree =>
-
- case tree@PackageDef(pid, stats) =>
- putTree(pid)
- putTrees(stats)
-
- case ClassDef(mods, name, tparams, impl) =>
- putMods(mods)
- putEntry(name)
- putTree(impl)
- putTrees(tparams)
-
- case ModuleDef(mods, name, impl) =>
- putMods(mods)
- putEntry(name)
- putTree(impl)
-
- case ValDef(mods, name, tpt, rhs) =>
- putMods(mods)
- putEntry(name)
- putTree(tpt)
- putTree(rhs)
-
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- putMods(mods)
- putEntry(name)
- putTrees(tparams)
- putTreess(vparamss)
- putTree(tpt)
- putTree(rhs)
-
- case TypeDef(mods, name, tparams, rhs) =>
- putMods(mods)
- putEntry(name)
- putTree(rhs)
- putTrees(tparams)
-
- case LabelDef(name, params, rhs) =>
- putEntry(name)
- putTree(rhs)
- putTrees(params)
-
- case Import(expr, selectors) =>
- putTree(expr)
- for (ImportSelector(from, _, to, _) <- selectors) {
- putEntry(from)
- putEntry(to)
- }
-/*
- case DocDef(comment, definition) => should not be needed
- putConstant(Constant(comment))
- putTree(definition)
-*/
- case Template(parents, self, body) =>
- putTrees(parents)
- putTree(self)
- putTrees(body)
-
- case Block(stats, expr) =>
- putTree(expr)
- putTrees(stats)
-
- case CaseDef(pat, guard, body) =>
- putTree(pat)
- putTree(guard)
- putTree(body)
-
- case Alternative(trees) =>
- putTrees(trees)
-
- case Star(elem) =>
- putTree(elem)
-
- case Bind(name, body) =>
- putEntry(name)
- putTree(body)
-
- case UnApply(fun: Tree, args) =>
- putTree(fun)
- putTrees(args)
-
- case ArrayValue(elemtpt, trees) =>
- putTree(elemtpt)
- putTrees(trees)
-
-
- case Function(vparams, body) =>
- putTree(body)
- putTrees(vparams)
-
- case Assign(lhs, rhs) =>
- putTree(lhs)
- putTree(rhs)
-
- case If(cond, thenp, elsep) =>
- putTree(cond)
- putTree(thenp)
- putTree(elsep)
-
- case Match(selector, cases) =>
- putTree(selector)
- putTrees(cases)
-
- case Return(expr) =>
- putTree(expr)
-
- case Try(block, catches, finalizer) =>
- putTree(block)
- putTree(finalizer)
- putTrees(catches)
-
- case Throw(expr) =>
- putTree(expr)
-
- case New(tpt) =>
- putTree(tpt)
-
- case Typed(expr, tpt) =>
- putTree(expr)
- putTree(tpt)
-
- case TypeApply(fun, args) =>
- putTree(fun)
- putTrees(args)
-
- case Apply(fun, args) =>
- putTree(fun)
- putTrees(args)
-
- case ApplyDynamic(qual, args) =>
- putTree(qual)
- putTrees(args)
-
- case Super(qual, mix) =>
- putTree(qual)
- putEntry(mix:Name)
-
- case This(qual) =>
- putEntry(qual)
-
- case Select(qualifier, selector) =>
- putTree(qualifier)
- putEntry(selector)
-
- case Ident(name) =>
- putEntry(name)
-
- case Literal(value) =>
- putEntry(value)
-
- case TypeTree() =>
-
- case Annotated(annot, arg) =>
- putTree(annot)
- putTree(arg)
-
- case SingletonTypeTree(ref) =>
- putTree(ref)
-
- case SelectFromTypeTree(qualifier, selector) =>
- putTree(qualifier)
- putEntry(selector)
-
- case CompoundTypeTree(templ: Template) =>
- putTree(templ)
-
- case AppliedTypeTree(tpt, args) =>
- putTree(tpt)
- putTrees(args)
-
- case TypeBoundsTree(lo, hi) =>
- putTree(lo)
- putTree(hi)
-
- case ExistentialTypeTree(tpt, whereClauses) =>
- putTree(tpt)
- putTrees(whereClauses)
+ private object putTreeTraverser extends Traverser {
+ // Only used when pickling trees, i.e. in an argument of some Annotation
+ // annotations in Modifiers are removed by the typechecker
+ override def traverseModifiers(mods: Modifiers): Unit = if (putEntry(mods)) putEntry(mods.privateWithin)
+ override def traverseName(name: Name): Unit = putEntry(name)
+ override def traverseConstant(const: Constant): Unit = putEntry(const)
+ override def traverse(tree: Tree): Unit = putTree(tree)
+
+ def put(tree: Tree): Unit = {
+ if (tree.canHaveAttrs)
+ putType(tree.tpe)
+ if (tree.hasSymbolField)
+ putSymbol(tree.symbol)
+
+ super.traverse(tree)
}
}
-
- private def putTrees(trees: List[Tree]) = trees foreach putTree
- private def putTreess(treess: List[List[Tree]]) = treess foreach putTrees
-
- /** only used when pickling trees, i.e. in an
- * argument of some Annotation */
- private def putMods(mods: Modifiers) = if (putEntry(mods)) {
- // annotations in Modifiers are removed by the typechecker
- val Modifiers(flags, privateWithin, Nil) = mods
- putEntry(privateWithin)
+ private def putTree(tree: Tree) {
+ if (putEntry(tree))
+ putTreeTraverser put tree
}
/** Store a constant in map index, along with anything it references.
@@ -437,7 +281,7 @@ abstract class Pickler extends SubComponent {
}
private def putChildren(sym: Symbol, children: List[Symbol]) {
- assert(putEntry((sym, children)))
+ putEntry(sym -> children)
children foreach putSymbol
}
@@ -445,14 +289,10 @@ abstract class Pickler extends SubComponent {
private def putAnnotation(sym: Symbol, annot: AnnotationInfo) {
// if an annotation with the same arguments is applied to the
// same symbol multiple times, it's only pickled once.
- if (putEntry((sym, annot)))
+ if (putEntry(sym -> annot))
putAnnotationBody(annot)
}
- /** used in AnnotatedType only, i.e. annotations on types */
- private def putAnnotations(annots: List[AnnotationInfo]) {
- annots foreach putAnnotation
- }
private def putAnnotation(annot: AnnotationInfo) {
if (putEntry(annot))
putAnnotationBody(annot)
@@ -467,14 +307,10 @@ abstract class Pickler extends SubComponent {
}
}
def putClassfileAnnotArg(carg: ClassfileAnnotArg) {
- carg match {
- case LiteralAnnotArg(const) =>
- putConstant(const)
- case ArrayAnnotArg(args) =>
- if (putEntry(carg))
- args foreach putClassfileAnnotArg
- case NestedAnnotArg(annInfo) =>
- putAnnotation(annInfo)
+ (carg: @unchecked) match {
+ case LiteralAnnotArg(const) => putConstant(const)
+ case ArrayAnnotArg(args) => if (putEntry(carg)) args foreach putClassfileAnnotArg
+ case NestedAnnotArg(annInfo) => putAnnotation(annInfo)
}
}
val AnnotationInfo(tpe, args, assocs) = annot
@@ -490,8 +326,11 @@ abstract class Pickler extends SubComponent {
/** Write a reference to object, i.e., the object's number in the map index.
*/
- private def writeRef(ref: AnyRef) { writeNat(index(ref)) }
- private def writeRefs(refs: List[AnyRef]) { refs foreach writeRef }
+ private def writeRef(ref: AnyRef) {
+ writeNat(index(deskolemizeTypeSymbols(ref)))
+ }
+ private def writeRefs(refs: List[AnyRef]): Unit = refs foreach writeRef
+
private def writeRefsWithLength(refs: List[AnyRef]) {
writeNat(refs.length)
writeRefs(refs)
@@ -502,7 +341,7 @@ abstract class Pickler extends SubComponent {
private def writeSymInfo(sym: Symbol) {
writeRef(sym.name)
writeRef(localizedOwner(sym))
- writeLongNat((rawToPickledFlags(sym.flags & PickledFlags)))
+ writeLongNat((rawToPickledFlags(sym.rawflags & PickledFlags)))
if (sym.hasAccessBoundary) writeRef(sym.privateWithin)
writeRef(sym.info)
}
@@ -534,567 +373,144 @@ abstract class Pickler extends SubComponent {
/** Write a ClassfileAnnotArg (argument to classfile annotation) */
def writeClassfileAnnotArg(carg: ClassfileAnnotArg) {
- carg match {
- case LiteralAnnotArg(const) =>
- writeRef(const)
- case ArrayAnnotArg(args) =>
- writeRef(carg)
- case NestedAnnotArg(annInfo) =>
- writeRef(annInfo)
+ (carg: @unchecked) match {
+ case LiteralAnnotArg(const) => writeRef(const)
+ case ArrayAnnotArg(args) => writeRef(carg)
+ case NestedAnnotArg(annInfo) => writeRef(annInfo)
}
}
- /** Write an entry */
- private def writeEntry(entry: AnyRef) {
- def writeBody(entry: AnyRef): Int = entry match {
- case name: Name =>
- writeName(name)
- if (name.isTermName) TERMname else TYPEname
- case NoSymbol =>
- NONEsym
- case sym: Symbol if !isLocal(sym) =>
- val tag =
- if (sym.isModuleClass) {
- writeRef(sym.name.toTermName); EXTMODCLASSref
- } else {
- writeRef(sym.name); EXTref
- }
- if (!sym.owner.isRoot) writeRef(sym.owner)
- tag
- case sym: ClassSymbol =>
- writeSymInfo(sym)
- if (sym.thisSym.tpe != sym.tpe) writeRef(sym.typeOfThis)
- CLASSsym
- case sym: TypeSymbol =>
- writeSymInfo(sym)
- if (sym.isAbstractType) TYPEsym else ALIASsym
- case sym: TermSymbol =>
- writeSymInfo(sym)
- if (sym.alias != NoSymbol) writeRef(sym.alias)
- if (sym.isModule) MODULEsym else VALsym
- case NoType =>
- NOtpe
- case NoPrefix =>
- NOPREFIXtpe
- case ThisType(sym) =>
- writeRef(sym); THIStpe
- case SingleType(pre, sym) =>
- writeRef(pre); writeRef(sym); SINGLEtpe
- case SuperType(thistpe, supertpe) =>
- writeRef(thistpe); writeRef(supertpe); SUPERtpe
- case ConstantType(value) =>
- writeRef(value); CONSTANTtpe
- case TypeRef(pre, sym, args) =>
- writeRef(pre); writeRef(sym); writeRefs(args); TYPEREFtpe
- case TypeBounds(lo, hi) =>
- writeRef(lo); writeRef(hi); TYPEBOUNDStpe
- case tp @ RefinedType(parents, decls) =>
- writeRef(tp.typeSymbol); writeRefs(parents); REFINEDtpe
- case ClassInfoType(parents, decls, clazz) =>
- writeRef(clazz); writeRefs(parents); CLASSINFOtpe
- case mt @ MethodType(formals, restpe) =>
- writeRef(restpe); writeRefs(formals) ; METHODtpe
- case mt @ NullaryMethodType(restpe) =>
- // reuse POLYtpe since those can never have an empty list of tparams.
- // TODO: is there any way this can come back and bite us in the bottom?
- // ugliness and thrift aside, this should make this somewhat more backward compatible
- // (I'm not sure how old scalac's would deal with nested PolyTypes, as these used to be folded into one)
- writeRef(restpe); writeRefs(Nil); POLYtpe
- case PolyType(tparams, restpe) => // invar: tparams nonEmpty
- writeRef(restpe); writeRefs(tparams); POLYtpe
- case ExistentialType(tparams, restpe) =>
- writeRef(restpe); writeRefs(tparams); EXISTENTIALtpe
- // case DeBruijnIndex(l, i) =>
- // writeNat(l); writeNat(i); DEBRUIJNINDEXtpe
- case c @ Constant(_) =>
- if (c.tag == BooleanTag) writeLong(if (c.booleanValue) 1 else 0)
- else if (ByteTag <= c.tag && c.tag <= LongTag) writeLong(c.longValue)
- else if (c.tag == FloatTag) writeLong(floatToIntBits(c.floatValue))
- else if (c.tag == DoubleTag) writeLong(doubleToLongBits(c.doubleValue))
- else if (c.tag == StringTag) writeRef(newTermName(c.stringValue))
- else if (c.tag == ClazzTag) writeRef(c.typeValue)
- else if (c.tag == EnumTag) writeRef(c.symbolValue)
- LITERAL + c.tag // also treats UnitTag, NullTag; no value required
- case AnnotatedType(annotations, tp, selfsym) =>
- annotations filter (_.isStatic) match {
- case Nil => writeBody(tp) // write the underlying type if there are no annotations
- case staticAnnots =>
- if (settings.selfInAnnots.value && selfsym != NoSymbol)
- writeRef(selfsym)
- writeRef(tp)
- writeRefs(staticAnnots)
- ANNOTATEDtpe
- }
- // annotations attached to a symbol (i.e. annots on terms)
- case (target: Symbol, annot@AnnotationInfo(_, _, _)) =>
- writeRef(target)
- writeAnnotation(annot)
- SYMANNOT
-
- case ArrayAnnotArg(args) =>
- args foreach writeClassfileAnnotArg
- ANNOTARGARRAY
-
- case (target: Symbol, children: List[_]) =>
- writeRef(target)
- writeRefs(children.asInstanceOf[List[Symbol]])
- CHILDREN
-
- case EmptyTree =>
- writeNat(EMPTYtree)
- TREE
-
- case tree@PackageDef(pid, stats) =>
- writeNat(PACKAGEtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(tree.mods)
- writeRef(pid)
- writeRefs(stats)
- TREE
-
- case tree@ClassDef(mods, name, tparams, impl) =>
- writeNat(CLASStree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(mods)
- writeRef(name)
- writeRef(impl)
- writeRefs(tparams)
- TREE
-
- case tree@ModuleDef(mods, name, impl) =>
- writeNat(MODULEtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(mods)
- writeRef(name)
- writeRef(impl)
- TREE
-
- case tree@ValDef(mods, name, tpt, rhs) =>
- writeNat(VALDEFtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(mods)
- writeRef(name)
- writeRef(tpt)
- writeRef(rhs)
- TREE
-
- case tree@DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- writeNat(DEFDEFtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(mods)
- writeRef(name)
- writeRefsWithLength(tparams)
- writeNat(vparamss.length)
- vparamss foreach writeRefsWithLength
- writeRef(tpt)
- writeRef(rhs)
- TREE
-
- case tree@TypeDef(mods, name, tparams, rhs) =>
- writeNat(TYPEDEFtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(mods)
- writeRef(name)
- writeRef(rhs)
- writeRefs(tparams)
- TREE
-
- case tree@LabelDef(name, params, rhs) =>
- writeNat(LABELtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(name)
- writeRef(rhs)
- writeRefs(params)
- TREE
-
- case tree@Import(expr, selectors) =>
- writeNat(IMPORTtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(expr)
- for (ImportSelector(from, _, to, _) <- selectors) {
- writeRef(from)
- writeRef(to)
- }
- TREE
-
- case tree@DocDef(comment, definition) =>
- writeNat(DOCDEFtree)
- writeRef(tree.tpe)
- writeRef(Constant(comment))
- writeRef(definition)
- TREE
-
- case tree@Template(parents, self, body) =>
- writeNat(TEMPLATEtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRefsWithLength(parents)
- writeRef(self)
- writeRefs(body)
- TREE
-
- case tree@Block(stats, expr) =>
- writeNat(BLOCKtree)
- writeRef(tree.tpe)
- writeRef(expr)
- writeRefs(stats)
- TREE
-
- case tree@CaseDef(pat, guard, body) =>
- writeNat(CASEtree)
- writeRef(tree.tpe)
- writeRef(pat)
- writeRef(guard)
- writeRef(body)
- TREE
-
- case tree@Alternative(trees) =>
- writeNat(ALTERNATIVEtree)
- writeRef(tree.tpe)
- writeRefs(trees)
- TREE
-
- case tree@Star(elem) =>
- writeNat(STARtree)
- writeRef(tree.tpe)
- writeRef(elem)
- TREE
-
- case tree@Bind(name, body) =>
- writeNat(BINDtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(name)
- writeRef(body)
- TREE
-
- case tree@UnApply(fun: Tree, args) =>
- writeNat(UNAPPLYtree)
- writeRef(tree.tpe)
- writeRef(fun)
- writeRefs(args)
- TREE
-
- case tree@ArrayValue(elemtpt, trees) =>
- writeNat(ARRAYVALUEtree)
- writeRef(tree.tpe)
- writeRef(elemtpt)
- writeRefs(trees)
- TREE
-
- case tree@Function(vparams, body) =>
- writeNat(FUNCTIONtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(body)
- writeRefs(vparams)
- TREE
-
- case tree@Assign(lhs, rhs) =>
- writeNat(ASSIGNtree)
- writeRef(tree.tpe)
- writeRef(lhs)
- writeRef(rhs)
- TREE
-
- case tree@If(cond, thenp, elsep) =>
- writeNat(IFtree)
- writeRef(tree.tpe)
- writeRef(cond)
- writeRef(thenp)
- writeRef(elsep)
- TREE
-
- case tree@Match(selector, cases) =>
- writeNat(MATCHtree)
- writeRef(tree.tpe)
- writeRef(selector)
- writeRefs(cases)
- TREE
-
- case tree@Return(expr) =>
- writeNat(RETURNtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(expr)
- TREE
-
- case tree@Try(block, catches, finalizer) =>
- writeNat(TREtree)
- writeRef(tree.tpe)
- writeRef(block)
- writeRef(finalizer)
- writeRefs(catches)
- TREE
-
- case tree@Throw(expr) =>
- writeNat(THROWtree)
- writeRef(tree.tpe)
- writeRef(expr)
- TREE
-
- case tree@New(tpt) =>
- writeNat(NEWtree)
- writeRef(tree.tpe)
- writeRef(tpt)
- TREE
-
- case tree@Typed(expr, tpt) =>
- writeNat(TYPEDtree)
- writeRef(tree.tpe)
- writeRef(expr)
- writeRef(tpt)
- TREE
-
- case tree@TypeApply(fun, args) =>
- writeNat(TYPEAPPLYtree)
- writeRef(tree.tpe)
- writeRef(fun)
- writeRefs(args)
- TREE
-
- case tree@Apply(fun, args) =>
- writeNat(APPLYtree)
- writeRef(tree.tpe)
- writeRef(fun)
- writeRefs(args)
- TREE
-
- case tree@ApplyDynamic(qual, args) =>
- writeNat(APPLYDYNAMICtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(qual)
- writeRefs(args)
- TREE
-
- case tree@Super(qual, mix) =>
- writeNat(SUPERtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(qual)
- writeRef(mix)
- TREE
-
- case tree@This(qual) =>
- writeNat(THIStree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(qual)
- TREE
-
- case tree@Select(qualifier, selector) =>
- writeNat(SELECTtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(qualifier)
- writeRef(selector)
- TREE
-
- case tree@Ident(name) =>
- writeNat(IDENTtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(name)
- TREE
-
- case tree@Literal(value) =>
- writeNat(LITERALtree)
- writeRef(tree.tpe)
- writeRef(value)
- TREE
-
- case tree@TypeTree() =>
- writeNat(TYPEtree)
- writeRef(tree.tpe)
- TREE
-
- case tree@Annotated(annot, arg) =>
- writeNat(ANNOTATEDtree)
- writeRef(tree.tpe)
- writeRef(annot)
- writeRef(arg)
- TREE
-
- case tree@SingletonTypeTree(ref) =>
- writeNat(SINGLETONTYPEtree)
+ private object writeTreeBodyTraverser extends Traverser {
+ private var refs = false
+ @inline private def asRefs[T](body: => T): T = {
+ val saved = refs
+ refs = true
+ try body finally refs = saved
+ }
+ override def traverseModifiers(mods: Modifiers): Unit = if (refs) writeRef(mods) else super.traverseModifiers(mods)
+ override def traverseName(name: Name): Unit = writeRef(name)
+ override def traverseConstant(const: Constant): Unit = writeRef(const)
+ override def traverseParams(params: List[Tree]): Unit = writeRefsWithLength(params)
+ override def traverseParamss(vparamss: List[List[Tree]]): Unit = {
+ writeNat(vparamss.length)
+ super.traverseParamss(vparamss)
+ }
+ override def traverse(tree: Tree): Unit = {
+ if (refs)
+ writeRef(tree)
+ else {
writeRef(tree.tpe)
- writeRef(ref)
- TREE
+ if (tree.hasSymbolField)
+ writeRef(tree.symbol)
- case tree@SelectFromTypeTree(qualifier, selector) =>
- writeNat(SELECTFROMTYPEtree)
- writeRef(tree.tpe)
- writeRef(qualifier)
- writeRef(selector)
- TREE
+ asRefs(super.traverse(tree))
+ }
+ }
+ }
- case tree@CompoundTypeTree(templ: Template) =>
- writeNat(COMPOUNDTYPEtree)
- writeRef(tree.tpe)
- writeRef(templ)
- TREE
+ /** Write an entry */
+ private def writeEntry(entry: AnyRef) {
+ def writeLocalSymbolBody(sym: Symbol) {
+ writeSymInfo(sym)
+ sym match {
+ case _: ClassSymbol if sym.hasSelfType => writeRef(sym.typeOfThis)
+ case _: TermSymbol if sym.alias.exists => writeRef(sym.alias)
+ case _ =>
+ }
+ }
+ def writeExtSymbolBody(sym: Symbol) {
+ val name = if (sym.isModuleClass) sym.name.toTermName else sym.name
+ writeRef(name)
+ if (!sym.owner.isRoot)
+ writeRef(sym.owner)
+ }
+ def writeSymbolBody(sym: Symbol) {
+ if (sym ne NoSymbol) {
+ if (isLocal(sym))
+ writeLocalSymbolBody(sym)
+ else
+ writeExtSymbolBody(sym)
+ }
+ }
- case tree@AppliedTypeTree(tpt, args) =>
- writeNat(APPLIEDTYPEtree)
- writeRef(tree.tpe)
- writeRef(tpt)
- writeRefs(args)
- TREE
+ // NullaryMethodType reuses POLYtpe since those can never have an empty list of tparams.
+ // TODO: is there any way this can come back and bite us in the bottom?
+ // ugliness and thrift aside, this should make this somewhat more backward compatible
+ // (I'm not sure how old scalac's would deal with nested PolyTypes, as these used to be folded into one)
+ def writeTypeBody(tpe: Type): Unit = tpe match {
+ case NoType | NoPrefix =>
+ case ThisType(sym) => writeRef(sym)
+ case SingleType(pre, sym) => writeRef(pre) ; writeRef(sym)
+ case SuperType(thistpe, supertpe) => writeRef(thistpe) ; writeRef(supertpe)
+ case ConstantType(value) => writeRef(value)
+ case TypeBounds(lo, hi) => writeRef(lo) ; writeRef(hi)
+ case TypeRef(pre, sym, args) => writeRef(pre) ; writeRef(sym); writeRefs(args)
+ case MethodType(formals, restpe) => writeRef(restpe) ; writeRefs(formals)
+ case NullaryMethodType(restpe) => writeRef(restpe); writeRefs(Nil)
+ case PolyType(tparams, restpe) => writeRef(restpe); writeRefs(tparams)
+ case ExistentialType(tparams, restpe) => writeRef(restpe); writeRefs(tparams)
+ case StaticallyAnnotatedType(annots, tp) => writeRef(tp) ; writeRefs(annots)
+ case AnnotatedType(_, tp, _) => writeTypeBody(tp) // write the underlying type if there are no static annotations
+ case CompoundType(parents, _, clazz) => writeRef(clazz); writeRefs(parents)
+ }
- case tree@TypeBoundsTree(lo, hi) =>
- writeNat(TYPEBOUNDStree)
- writeRef(tree.tpe)
- writeRef(lo)
- writeRef(hi)
- TREE
+ def writeTreeBody(tree: Tree) {
+ writeNat(picklerSubTag(tree))
+ if (!tree.isEmpty)
+ writeTreeBodyTraverser traverse tree
+ }
- case tree@ExistentialTypeTree(tpt, whereClauses) =>
- writeNat(EXISTENTIALTYPEtree)
- writeRef(tree.tpe)
- writeRef(tpt)
- writeRefs(whereClauses)
- TREE
+ def writeConstant(c: Constant): Unit = c.tag match {
+ case BooleanTag => writeLong(if (c.booleanValue) 1 else 0)
+ case FloatTag => writeLong(floatToIntBits(c.floatValue).toLong)
+ case DoubleTag => writeLong(doubleToLongBits(c.doubleValue))
+ case StringTag => writeRef(newTermName(c.stringValue))
+ case ClazzTag => writeRef(c.typeValue)
+ case EnumTag => writeRef(c.symbolValue)
+ case tag => if (ByteTag <= tag && tag <= LongTag) writeLong(c.longValue)
+ }
- case Modifiers(flags, privateWithin, _) =>
- val pflags = rawToPickledFlags(flags)
- writeNat((pflags >> 32).toInt)
- writeNat((pflags & 0xFFFFFFFF).toInt)
- writeRef(privateWithin)
- MODIFIERS
+ def writeModifiers(mods: Modifiers) {
+ val pflags = rawToPickledFlags(mods.flags)
+ writeNat((pflags >> 32).toInt)
+ writeNat((pflags & 0xFFFFFFFF).toInt)
+ writeRef(mods.privateWithin)
+ }
- // annotations on types (not linked to a symbol)
- case annot@AnnotationInfo(_, _, _) =>
- writeAnnotation(annot)
- ANNOTINFO
+ def writeSymbolTuple(target: Symbol, other: Any) {
+ writeRef(target)
+ other match {
+ case annot: AnnotationInfo => writeAnnotation(annot)
+ case children: List[Symbol @unchecked] => writeRefs(children)
+ case _ =>
+ }
+ }
- case _ =>
- throw new FatalError("bad entry: " + entry + " " + entry.getClass)
+ def writeBody(entry: AnyRef): Unit = entry match {
+ case tree: Tree => writeTreeBody(tree)
+ case sym: Symbol => writeSymbolBody(sym)
+ case tpe: Type => writeTypeBody(tpe)
+ case name: Name => writeName(name)
+ case const: Constant => writeConstant(const)
+ case mods: Modifiers => writeModifiers(mods)
+ case annot: AnnotationInfo => writeAnnotation(annot)
+ case (target: Symbol, other) => writeSymbolTuple(target, other)
+ case ArrayAnnotArg(args) => args foreach writeClassfileAnnotArg
+ case _ => devWarning(s"Unexpected entry to pickler ${shortClassOfInstance(entry)} $entry")
}
// begin writeEntry
- val startpos = writeIndex
- // reserve some space so that the patchNat's most likely won't need to shift
- writeByte(0); writeByte(0)
- patchNat(startpos, writeBody(entry))
- patchNat(startpos + 1, writeIndex - (startpos + 2))
- }
-
- /** Print entry for diagnostics */
- def printEntryAtIndex(idx: Int) = printEntry(entries(idx))
- def printEntry(entry: AnyRef) {
- def printRef(ref: AnyRef) {
- print(index(ref)+
- (if (ref.isInstanceOf[Name]) "("+ref+") " else " "))
- }
- def printRefs(refs: List[AnyRef]) { refs foreach printRef }
- def printSymInfo(sym: Symbol) {
- var posOffset = 0
- printRef(sym.name)
- printRef(localizedOwner(sym))
- print(flagsToString(sym.flags & PickledFlags)+" ")
- if (sym.hasAccessBoundary) printRef(sym.privateWithin)
- printRef(sym.info)
- }
- def printBody(entry: AnyRef) = entry match {
- case name: Name =>
- print((if (name.isTermName) "TERMname " else "TYPEname ")+name)
- case NoSymbol =>
- print("NONEsym")
- case sym: Symbol if !isLocal(sym) =>
- if (sym.isModuleClass) {
- print("EXTMODCLASSref "); printRef(sym.name.toTermName)
- } else {
- print("EXTref "); printRef(sym.name)
- }
- if (!sym.owner.isRoot) printRef(sym.owner)
- case sym: ClassSymbol =>
- print("CLASSsym ")
- printSymInfo(sym)
- if (sym.thisSym.tpe != sym.tpe) printRef(sym.typeOfThis)
- case sym: TypeSymbol =>
- print(if (sym.isAbstractType) "TYPEsym " else "ALIASsym ")
- printSymInfo(sym)
- case sym: TermSymbol =>
- print(if (sym.isModule) "MODULEsym " else "VALsym ")
- printSymInfo(sym)
- if (sym.alias != NoSymbol) printRef(sym.alias)
- case NoType =>
- print("NOtpe")
- case NoPrefix =>
- print("NOPREFIXtpe")
- case ThisType(sym) =>
- print("THIStpe "); printRef(sym)
- case SingleType(pre, sym) =>
- print("SINGLEtpe "); printRef(pre); printRef(sym);
- case ConstantType(value) =>
- print("CONSTANTtpe "); printRef(value);
- case TypeRef(pre, sym, args) =>
- print("TYPEREFtpe "); printRef(pre); printRef(sym); printRefs(args);
- case TypeBounds(lo, hi) =>
- print("TYPEBOUNDStpe "); printRef(lo); printRef(hi);
- case tp @ RefinedType(parents, decls) =>
- print("REFINEDtpe "); printRef(tp.typeSymbol); printRefs(parents);
- case ClassInfoType(parents, decls, clazz) =>
- print("CLASSINFOtpe "); printRef(clazz); printRefs(parents);
- case mt @ MethodType(formals, restpe) =>
- print("METHODtpe"); printRef(restpe); printRefs(formals)
- case PolyType(tparams, restpe) =>
- print("POLYtpe "); printRef(restpe); printRefs(tparams);
- case ExistentialType(tparams, restpe) =>
- print("EXISTENTIALtpe "); printRef(restpe); printRefs(tparams);
- print("||| "+entry)
- // case DeBruijnIndex(l, i) =>
- // print("DEBRUIJNINDEXtpe "); print(l+" "+i)
- case c @ Constant(_) =>
- print("LITERAL ")
- if (c.tag == BooleanTag) print("Boolean "+(if (c.booleanValue) 1 else 0))
- else if (c.tag == ByteTag) print("Byte "+c.longValue)
- else if (c.tag == ShortTag) print("Short "+c.longValue)
- else if (c.tag == CharTag) print("Char "+c.longValue)
- else if (c.tag == IntTag) print("Int "+c.longValue)
- else if (c.tag == LongTag) print("Long "+c.longValue)
- else if (c.tag == FloatTag) print("Float "+c.floatValue)
- else if (c.tag == DoubleTag) print("Double "+c.doubleValue)
- else if (c.tag == StringTag) { print("String "); printRef(newTermName(c.stringValue)) }
- else if (c.tag == ClazzTag) { print("Class "); printRef(c.typeValue) }
- else if (c.tag == EnumTag) { print("Enum "); printRef(c.symbolValue) }
- case AnnotatedType(annots, tp, selfsym) =>
- if (settings.selfInAnnots.value) {
- print("ANNOTATEDWSELFtpe ")
- printRef(tp)
- printRef(selfsym)
- printRefs(annots)
- } else {
- print("ANNOTATEDtpe ")
- printRef(tp)
- printRefs(annots)
- }
- case (target: Symbol, AnnotationInfo(atp, args, Nil)) =>
- print("SYMANNOT ")
- printRef(target)
- printRef(atp)
- for (c <- args) printRef(c)
- case (target: Symbol, children: List[_]) =>
- print("CHILDREN ")
- printRef(target)
- for (c <- children) printRef(c.asInstanceOf[Symbol])
- case AnnotationInfo(atp, args, Nil) =>
- print("ANNOTINFO")
- printRef(atp)
- for (c <- args) printRef(c)
- case _ =>
- throw new FatalError("bad entry: " + entry + " " + entry.getClass)
+ // The picklerTag method can't determine if it's an external symbol reference
+ val tag = entry match {
+ case sym: Symbol if isExternalSymbol(sym) => if (sym.isModuleClass) EXTMODCLASSref else EXTref
+ case _ => picklerTag(entry)
}
- printBody(entry); println()
+ writeNat(tag)
+ writeByte(0) // reserve a place to record the number of bytes written
+ val start = writeIndex
+ writeBody(entry)
+ val length = writeIndex - start
+ patchNat(start - 1, length) // patch bytes written over the placeholder
}
/** Write byte array */
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala b/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala
deleted file mode 100644
index 40189b9444..0000000000
--- a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala
+++ /dev/null
@@ -1,137 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2004-2013 LAMP/EPFL
- */
-
-
-package scala.tools.nsc
-package symtab
-package clr
-
-import java.io.File
-import java.util.{Comparator, StringTokenizer}
-import scala.util.Sorting
-import ch.epfl.lamp.compiler.msil._
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.util.{Position, NoPosition}
-
-/**
- * Collects all types from all reference assemblies.
- */
-abstract class CLRTypes {
-
- val global: Global
- import global.Symbol
- import global.definitions
-
- //##########################################################################
-
- var BYTE: Type = _
- var UBYTE: Type = _
- var SHORT: Type = _
- var USHORT: Type = _
- var CHAR: Type = _
- var INT: Type = _
- var UINT: Type = _
- var LONG: Type = _
- var ULONG: Type = _
- var FLOAT: Type = _
- var DOUBLE: Type = _
- var BOOLEAN: Type = _
- var VOID: Type = _
- var ENUM: Type = _
- var DELEGATE: Type = _
-
- var OBJECT: Type = _
- var STRING: Type = _
- var STRING_ARRAY: Type = _
-
- var VALUE_TYPE: Type = _
-
- var SCALA_SYMTAB_ATTR: Type = _
- var SYMTAB_CONSTR: ConstructorInfo = _
- var SYMTAB_DEFAULT_CONSTR: ConstructorInfo = _
-
- var DELEGATE_COMBINE: MethodInfo = _
- var DELEGATE_REMOVE: MethodInfo = _
-
- val types: mutable.Map[Symbol,Type] = new mutable.HashMap
- val constructors: mutable.Map[Symbol,ConstructorInfo] = new mutable.HashMap
- val methods: mutable.Map[Symbol,MethodInfo] = new mutable.HashMap
- val fields: mutable.Map[Symbol, FieldInfo] = new mutable.HashMap
- val sym2type: mutable.Map[Type,Symbol] = new mutable.HashMap
- val addressOfViews = new mutable.HashSet[Symbol]
- val mdgptrcls4clssym: mutable.Map[ /*cls*/ Symbol, /*cls*/ Symbol] = new mutable.HashMap
-
- def isAddressOf(msym : Symbol) = addressOfViews.contains(msym)
-
- def isNonEnumValuetype(cls: Symbol) = {
- val msilTOpt = types.get(cls)
- val res = msilTOpt.isDefined && {
- val msilT = msilTOpt.get
- msilT.IsValueType && !msilT.IsEnum
- }
- res
- }
-
- def isValueType(cls: Symbol): Boolean = {
- val opt = types.get(cls)
- opt.isDefined && opt.get.IsValueType
- }
-
- def init() = try { // initialize
- // the MsilClasspath (nsc/util/Classpath.scala) initializes the msil-library by calling
- // Assembly.LoadFrom("mscorlib.dll"), so this type should be found
- Type.initMSCORLIB(getTypeSafe("System.String").Assembly)
-
- BYTE = getTypeSafe("System.SByte")
- UBYTE = getTypeSafe("System.Byte")
- CHAR = getTypeSafe("System.Char")
- SHORT = getTypeSafe("System.Int16")
- USHORT = getTypeSafe("System.UInt16")
- INT = getTypeSafe("System.Int32")
- UINT = getTypeSafe("System.UInt32")
- LONG = getTypeSafe("System.Int64")
- ULONG = getTypeSafe("System.UInt64")
- FLOAT = getTypeSafe("System.Single")
- DOUBLE = getTypeSafe("System.Double")
- BOOLEAN = getTypeSafe("System.Boolean")
- VOID = getTypeSafe("System.Void")
- ENUM = getTypeSafe("System.Enum")
- DELEGATE = getTypeSafe("System.MulticastDelegate")
-
- OBJECT = getTypeSafe("System.Object")
- STRING = getTypeSafe("System.String")
- STRING_ARRAY = getTypeSafe("System.String[]")
- VALUE_TYPE = getTypeSafe("System.ValueType")
-
- SCALA_SYMTAB_ATTR = getTypeSafe("scala.runtime.SymtabAttribute")
- val bytearray: Array[Type] = Array(Type.GetType("System.Byte[]"))
- SYMTAB_CONSTR = SCALA_SYMTAB_ATTR.GetConstructor(bytearray)
- SYMTAB_DEFAULT_CONSTR = SCALA_SYMTAB_ATTR.GetConstructor(Type.EmptyTypes)
-
- val delegate: Type = getTypeSafe("System.Delegate")
- val dargs: Array[Type] = Array(delegate, delegate)
- DELEGATE_COMBINE = delegate.GetMethod("Combine", dargs)
- DELEGATE_REMOVE = delegate.GetMethod("Remove", dargs)
- }
- catch {
- case e: RuntimeException =>
- Console.println(e.getMessage)
- throw e
- }
-
- //##########################################################################
- // type mapping and lookup
-
- def getType(name: String): Type = Type.GetType(name)
-
- def getTypeSafe(name: String): Type = {
- val t = Type.GetType(name)
- assert(t != null, name)
- t
- }
-
- def mkArrayType(elemType: Type): Type = getType(elemType.FullName + "[]")
-
- def isDelegateType(t: Type): Boolean = { t.BaseType() == DELEGATE }
-} // CLRTypes
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
deleted file mode 100644
index 5a0253c18b..0000000000
--- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
+++ /dev/null
@@ -1,850 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2004-2013 LAMP/EPFL
- */
-
-package scala.tools.nsc
-package symtab
-package clr
-
-import java.io.IOException
-import io.MsilFile
-import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute, _}
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.pickling.UnPickler
-import ch.epfl.lamp.compiler.msil.Type.TMVarUsage
-import scala.language.implicitConversions
-
-/**
- * @author Nikolay Mihaylov
- */
-abstract class TypeParser {
-
- val global: Global
-
- import global._
- import loaders.clrTypes
-
- //##########################################################################
-
- private var clazz: Symbol = _
- private var instanceDefs: Scope = _ // was members
- private var staticModule: Symbol = _ // was staticsClass
- private var staticDefs: Scope = _ // was statics
-
- protected def statics: Symbol = staticModule.moduleClass
-
- protected var busy: Boolean = false // lock to detect recursive reads
-
- private object unpickler extends UnPickler {
- val global: TypeParser.this.global.type = TypeParser.this.global
- }
-
- def parse(typ: MSILType, root: Symbol) {
-
- def handleError(e: Throwable) = {
- if (settings.debug.value) e.printStackTrace() //debug
- throw new IOException("type '" + typ.FullName + "' is broken\n(" + e.getMessage() + ")")
- }
- assert(!busy)
- busy = true
-
- if (root.isModule) {
- this.clazz = root.companionClass
- this.staticModule = root
- } else {
- this.clazz = root
- this.staticModule = root.companionModule
- }
- try {
- parseClass(typ)
- } catch {
- case e: FatalError => handleError(e)
- case e: RuntimeException => handleError(e)
- }
- busy = false
- }
-
- class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
- override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
- }
-
- /* the names `classTParams` and `newTParams` stem from the forJVM version (ClassfileParser.sigToType())
- * but there are differences that should be kept in mind.
- * forMSIL, a nested class knows nothing about any type-params in the nesting class,
- * therefore newTParams is redundant (other than for recording lexical order),
- * it always contains the same elements as classTParams.value */
- val classTParams = scala.collection.mutable.Map[Int,Symbol]() // TODO should this be a stack? (i.e., is it possible for >1 invocation to getCLRType on the same TypeParser instance be active )
- val newTParams = new scala.collection.mutable.ListBuffer[Symbol]()
- val methodTParams = scala.collection.mutable.Map[Int,Symbol]()
-
- private def sig2typeBounds(tvarCILDef: GenericParamAndConstraints): Type = {
- val ts = new scala.collection.mutable.ListBuffer[Type]
- for (cnstrnt <- tvarCILDef.Constraints) {
- ts += getCLRType(cnstrnt) // TODO we're definitely not at or after erasure, no need to call objToAny, right?
- }
- TypeBounds.upper(intersectionType(ts.toList, clazz))
- // TODO variance???
- }
-
- private def createViewFromTo(viewSuffix : String, fromTpe : Type, toTpe : Type,
- addToboxMethodMap : Boolean, isAddressOf : Boolean) : Symbol = {
- val flags = Flags.JAVA | Flags.STATIC | Flags.IMPLICIT; // todo: static? shouldn't be final instead?
- val viewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(List(fromTpe)), toTpe)
- val vmsym = createMethod(nme.view_ + viewSuffix, flags, viewMethodType, null, true);
- // !!! this used to mutate a mutable map in definitions, but that map became
- // immutable and this kept "working" with a no-op. So now it's commented out
- // since I retired the deprecated code which allowed for that bug.
- //
- // if (addToboxMethodMap) definitions.boxMethod(clazz) = vmsym
-
- if (isAddressOf) clrTypes.addressOfViews += vmsym
- vmsym
- }
-
- private def createDefaultConstructor(typ: MSILType) {
- val attrs = MethodAttributes.Public | MethodAttributes.RTSpecialName | MethodAttributes.SpecialName // TODO instance
- val declType= typ
- val method = new ConstructorInfo(declType, attrs, Array[MSILType]())
- val flags = Flags.JAVA
- val owner = clazz
- val methodSym = owner.newMethod(nme.CONSTRUCTOR, NoPosition, flags)
- val rettype = clazz.tpe
- val mtype = methodType(Array[MSILType](), rettype);
- val mInfo = mtype(methodSym)
- methodSym.setInfo(mInfo)
- instanceDefs.enter(methodSym);
- clrTypes.constructors(methodSym) = method
- }
-
- private def parseClass(typ: MSILType) {
-
- {
- val t4c = clrTypes.types.get(clazz)
- assert(t4c == None || t4c == Some(typ))
- }
- clrTypes.types(clazz) = typ
-
- {
- val c4t = clrTypes.sym2type.get(typ)
- assert(c4t == None || c4t == Some(clazz))
- }
- clrTypes.sym2type(typ) = clazz
-
- if (typ.IsDefined(clrTypes.SCALA_SYMTAB_ATTR, false)) {
- val attrs = typ.GetCustomAttributes(clrTypes.SCALA_SYMTAB_ATTR, false);
- assert (attrs.length == 1, attrs.length);
- val a = attrs(0).asInstanceOf[MSILAttribute];
- assert (a.getConstructor() == clrTypes.SYMTAB_CONSTR);
- val symtab = a.getConstructorArguments()(0).asInstanceOf[Array[Byte]]
- unpickler.unpickle(symtab, 0, clazz, staticModule, typ.FullName);
- val mClass = clrTypes.getType(typ.FullName + "$");
- if (mClass != null) {
- clrTypes.types(statics) = mClass;
- val moduleInstance = mClass.GetField("MODULE$");
- assert (moduleInstance != null, mClass);
- clrTypes.fields(statics) = moduleInstance;
- }
- return
- }
- val flags = translateAttributes(typ)
-
- var clazzBoxed : Symbol = NoSymbol
- var clazzMgdPtr : Symbol = NoSymbol
-
- val canBeTakenAddressOf = (typ.IsValueType || typ.IsEnum) && (typ.FullName != "System.Enum")
-
- if(canBeTakenAddressOf) {
- clazzBoxed = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("Boxed"))
- clazzMgdPtr = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("MgdPtr"))
- clrTypes.mdgptrcls4clssym(clazz) = clazzMgdPtr
- /* adding typMgdPtr to clrTypes.sym2type should happen early (before metadata for supertypes is parsed,
- before metadata for members are parsed) so that clazzMgdPtr can be found by getClRType. */
- val typMgdPtr = MSILType.mkByRef(typ)
- clrTypes.types(clazzMgdPtr) = typMgdPtr
- clrTypes.sym2type(typMgdPtr) = clazzMgdPtr
- /* clazzMgdPtr but not clazzBoxed is mapped by clrTypes.types into an msil.Type instance,
- because there's no metadata-level representation for a "boxed valuetype" */
- val instanceDefsMgdPtr = newScope
- val classInfoMgdPtr = ClassInfoType(definitions.anyvalparam, instanceDefsMgdPtr, clazzMgdPtr)
- clazzMgdPtr.setFlag(flags)
- clazzMgdPtr.setInfo(classInfoMgdPtr)
- }
-
-/* START CLR generics (snippet 1) */
- // first pass
- for (tvarCILDef <- typ.getSortedTVars() ) {
- val tpname = newTypeName(tvarCILDef.Name.replaceAll("!", "")) // TODO are really all type-params named in all assemblies out there? (NO)
- val tpsym = clazz.newTypeParameter(tpname)
- classTParams.put(tvarCILDef.Number, tpsym)
- newTParams += tpsym
- // TODO wouldn't the following also be needed later, i.e. during getCLRType
- tpsym.setInfo(definitions.AnyClass.tpe)
- }
- // second pass
- for (tvarCILDef <- typ.getSortedTVars() ) {
- val tpsym = classTParams(tvarCILDef.Number)
- tpsym.setInfo(sig2typeBounds(tvarCILDef)) // we never skip bounds unlike in forJVM
- }
-/* END CLR generics (snippet 1) */
- val ownTypeParams = newTParams.toList
-/* START CLR generics (snippet 2) */
- if (!ownTypeParams.isEmpty) {
- clazz.setInfo(new TypeParamsType(ownTypeParams))
- if(typ.IsValueType && !typ.IsEnum) {
- clazzBoxed.setInfo(new TypeParamsType(ownTypeParams))
- }
- }
-/* END CLR generics (snippet 2) */
- instanceDefs = newScope
- staticDefs = newScope
-
- val classInfoAsInMetadata = {
- val ifaces: Array[MSILType] = typ.getInterfaces()
- val superType = if (typ.BaseType() != null) getCLRType(typ.BaseType())
- else if (typ.IsInterface()) definitions.ObjectClass.tpe
- else definitions.AnyClass.tpe; // this branch activates for System.Object only.
- // parents (i.e., base type and interfaces)
- val parents = new scala.collection.mutable.ListBuffer[Type]()
- parents += superType
- for (iface <- ifaces) {
- parents += getCLRType(iface) // here the variance doesn't matter
- }
- // methods, properties, events, fields are entered in a moment
- if (canBeTakenAddressOf) {
- val instanceDefsBoxed = newScope
- ClassInfoType(parents.toList, instanceDefsBoxed, clazzBoxed)
- } else
- ClassInfoType(parents.toList, instanceDefs, clazz)
- }
-
- val staticInfo = ClassInfoType(List(), staticDefs, statics)
-
- clazz.setFlag(flags)
-
- if (canBeTakenAddressOf) {
- clazzBoxed.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata
- else genPolyType(ownTypeParams, classInfoAsInMetadata) )
- clazzBoxed.setFlag(flags)
- val rawValueInfoType = ClassInfoType(definitions.anyvalparam, instanceDefs, clazz)
- clazz.setInfo( if (ownTypeParams.isEmpty) rawValueInfoType
- else genPolyType(ownTypeParams, rawValueInfoType) )
- } else {
- clazz.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata
- else genPolyType(ownTypeParams, classInfoAsInMetadata) )
- }
-
- // TODO I don't remember if statics.setInfo and staticModule.setInfo should also know about type params
- statics.setFlag(Flags.JAVA)
- statics.setInfo(staticInfo)
- staticModule.setFlag(Flags.JAVA)
- staticModule.setInfo(statics.tpe)
-
-
- if (canBeTakenAddressOf) {
- // implicit conversions are owned by staticModule.moduleClass
- createViewFromTo("2Boxed", clazz.tpe, clazzBoxed.tpe, addToboxMethodMap = true, isAddressOf = false)
- // createViewFromTo("2Object", clazz.tpe, definitions.ObjectClass.tpe, addToboxMethodMap = true, isAddressOf = false)
- createViewFromTo("2MgdPtr", clazz.tpe, clazzMgdPtr.tpe, addToboxMethodMap = false, isAddressOf = true)
- // a return can't have type managed-pointer, thus a dereference-conversion is not needed
- // similarly, a method can't declare as return type "boxed valuetype"
- if (!typ.IsEnum) {
- // a synthetic default constructor for raw-type allows `new X' syntax
- createDefaultConstructor(typ)
- }
- }
-
- // import nested types
- for (ntype <- typ.getNestedTypes() if !(ntype.IsNestedPrivate || ntype.IsNestedAssembly || ntype.IsNestedFamANDAssem)
- || ntype.IsInterface /* TODO why shouldn't nested ifaces be type-parsed too? */ )
- {
- val loader = new loaders.MsilFileLoader(new MsilFile(ntype))
- val nclazz = statics.newClass(ntype.Name)
- val nmodule = statics.newModule(ntype.Name)
- nclazz.setInfo(loader)
- nmodule.setInfo(loader)
- staticDefs.enter(nclazz)
- staticDefs.enter(nmodule)
-
- assert(nclazz.companionModule == nmodule, nmodule)
- assert(nmodule.companionClass == nclazz, nclazz)
- }
-
- val fields = typ.getFields()
- for (field <- fields
- if !(field.IsPrivate() || field.IsAssembly() || field.IsFamilyAndAssembly)
- if (getCLRType(field.FieldType) != null)
- ) {
- assert (!field.FieldType.IsPointer && !field.FieldType.IsByRef, "CLR requirement")
- val flags = translateAttributes(field);
- val name = newTermName(field.Name);
- val fieldType =
- if (field.IsLiteral && !field.FieldType.IsEnum && isDefinedAtgetConstant(getCLRType(field.FieldType)))
- ConstantType(getConstant(getCLRType(field.FieldType), field.getValue))
- else
- getCLRType(field.FieldType)
- val owner = if (field.IsStatic()) statics else clazz;
- val sym = owner.newValue(name, NoPosition, flags).setInfo(fieldType);
- // TODO: set private within!!! -> look at typechecker/Namers.scala
- (if (field.IsStatic()) staticDefs else instanceDefs).enter(sym);
- clrTypes.fields(sym) = field;
- }
-
- for (constr <- typ.getConstructors() if !constr.IsStatic() && !constr.IsPrivate() &&
- !constr.IsAssembly() && !constr.IsFamilyAndAssembly() && !constr.HasPtrParamOrRetType())
- createMethod(constr);
-
- // initially also contains getters and setters of properties.
- val methodsSet = new mutable.HashSet[MethodInfo]();
- methodsSet ++= typ.getMethods();
-
- for (prop <- typ.getProperties) {
- val propType: Type = getCLSType(prop.PropertyType);
- if (propType != null) {
- val getter: MethodInfo = prop.GetGetMethod(true);
- val setter: MethodInfo = prop.GetSetMethod(true);
- var gparamsLength: Int = -1;
- if (!(getter == null || getter.IsPrivate || getter.IsAssembly
- || getter.IsFamilyAndAssembly || getter.HasPtrParamOrRetType))
- {
- assert(prop.PropertyType == getter.ReturnType);
- val gparams: Array[ParameterInfo] = getter.GetParameters();
- gparamsLength = gparams.length;
- val name: TermName = if (gparamsLength == 0) prop.Name else nme.apply;
- val flags = translateAttributes(getter);
- val owner: Symbol = if (getter.IsStatic) statics else clazz;
- val methodSym = owner.newMethod(name, NoPosition, flags)
- val mtype: Type = if (gparamsLength == 0) NullaryMethodType(propType) // .NET properties can't be polymorphic
- else methodType(getter, getter.ReturnType)(methodSym)
- methodSym.setInfo(mtype);
- methodSym.setFlag(Flags.ACCESSOR);
- (if (getter.IsStatic) staticDefs else instanceDefs).enter(methodSym)
- clrTypes.methods(methodSym) = getter;
- methodsSet -= getter;
- }
- if (!(setter == null || setter.IsPrivate || setter.IsAssembly
- || setter.IsFamilyAndAssembly || setter.HasPtrParamOrRetType))
- {
- val sparams: Array[ParameterInfo] = setter.GetParameters()
- if(getter != null)
- assert(getter.IsStatic == setter.IsStatic);
- assert(setter.ReturnType == clrTypes.VOID);
- if(getter != null)
- assert(sparams.length == gparamsLength + 1, "" + getter + "; " + setter);
-
- val name: TermName = if (gparamsLength == 0) nme.getterToSetter(prop.Name)
- else nme.update;
- val flags = translateAttributes(setter);
- val mtype = methodType(setter, definitions.UnitClass.tpe);
- val owner: Symbol = if (setter.IsStatic) statics else clazz;
- val methodSym = owner.newMethod(name, NoPosition, flags)
- methodSym.setInfo(mtype(methodSym))
- methodSym.setFlag(Flags.ACCESSOR);
- (if (setter.IsStatic) staticDefs else instanceDefs).enter(methodSym);
- clrTypes.methods(methodSym) = setter;
- methodsSet -= setter;
- }
- }
- }
-
-/* for (event <- typ.GetEvents) {
- // adding += and -= methods to add delegates to an event.
- // raising the event ist not possible from outside the class (this is so
- // generally in .net world)
- val adder: MethodInfo = event.GetAddMethod();
- val remover: MethodInfo = event.GetRemoveMethod();
- if (!(adder == null || adder.IsPrivate || adder.IsAssembly
- || adder.IsFamilyAndAssembly))
- {
- assert(adder.ReturnType == clrTypes.VOID);
- assert(adder.GetParameters().map(_.ParameterType).toList == List(event.EventHandlerType));
- val name = encode("+=");
- val flags = translateAttributes(adder);
- val mtype: Type = methodType(adder, adder.ReturnType);
- createMethod(name, flags, mtype, adder, adder.IsStatic)
- methodsSet -= adder;
- }
- if (!(remover == null || remover.IsPrivate || remover.IsAssembly
- || remover.IsFamilyAndAssembly))
- {
- assert(remover.ReturnType == clrTypes.VOID);
- assert(remover.GetParameters().map(_.ParameterType).toList == List(event.EventHandlerType));
- val name = encode("-=");
- val flags = translateAttributes(remover);
- val mtype: Type = methodType(remover, remover.ReturnType);
- createMethod(name, flags, mtype, remover, remover.IsStatic)
- methodsSet -= remover;
- }
- } */
-
-/* Adds view amounting to syntax sugar for a CLR implicit overload.
- The long-form syntax can also be supported if "methodsSet -= method" (last statement) is removed.
-
- /* remember, there's typ.getMethods and type.GetMethods */
- for (method <- typ.getMethods)
- if(!method.HasPtrParamOrRetType &&
- method.IsPublic && method.IsStatic && method.IsSpecialName &&
- method.Name == "op_Implicit") {
- // create a view: typ => method's return type
- val viewRetType: Type = getCLRType(method.ReturnType)
- val viewParamTypes: List[Type] = method.GetParameters().map(_.ParameterType).map(getCLSType).toList;
- /* The spec says "The operator method shall be defined as a static method on either the operand or return type."
- * We don't consider the declaring type for the purposes of definitions.functionType,
- * instead we regard op_Implicit's argument type and return type as defining the view's signature.
- */
- if (viewRetType != null && !viewParamTypes.contains(null)) {
- /* The check above applies e.g. to System.Decimal that has a conversion from UInt16, a non-CLS type, whose CLS-mapping returns null */
- val funType: Type = definitions.functionType(viewParamTypes, viewRetType);
- val flags = Flags.JAVA | Flags.STATIC | Flags.IMPLICIT; // todo: static? shouldn't be final instead?
- val viewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(viewParamTypes), funType)
- val vmsym = createMethod(nme.view_, flags, viewMethodType, method, true);
- methodsSet -= method;
- }
- }
-*/
-
- for (method <- methodsSet.iterator)
- if (!method.IsPrivate() && !method.IsAssembly() && !method.IsFamilyAndAssembly()
- && !method.HasPtrParamOrRetType)
- createMethod(method);
-
- // Create methods and views for delegate support
- if (clrTypes.isDelegateType(typ)) {
- createDelegateView(typ)
- createDelegateChainers(typ)
- }
-
- // for enumerations introduce comparison and bitwise logical operations;
- // the backend will recognize them and replace them with comparison or
- // bitwise logical operations on the primitive underlying type
-
- if (typ.IsEnum) {
- val ENUM_CMP_NAMES = List(nme.EQ, nme.NE, nme.LT, nme.LE, nme.GT, nme.GE);
- val ENUM_BIT_LOG_NAMES = List(nme.OR, nme.AND, nme.XOR);
-
- val flags = Flags.JAVA | Flags.FINAL
- for (cmpName <- ENUM_CMP_NAMES) {
- val enumCmp = clazz.newMethod(cmpName)
- val enumCmpType = JavaMethodType(enumCmp.newSyntheticValueParams(List(clazz.tpe)), definitions.BooleanClass.tpe)
- enumCmp.setFlag(flags).setInfo(enumCmpType)
- instanceDefs.enter(enumCmp)
- }
-
- for (bitLogName <- ENUM_BIT_LOG_NAMES) {
- val enumBitLog = clazz.newMethod(bitLogName)
- val enumBitLogType = JavaMethodType(enumBitLog.newSyntheticValueParams(List(clazz.tpe)), clazz.tpe /* was classInfo, infinite typer */)
- enumBitLog.setFlag(flags).setInfo(enumBitLogType)
- instanceDefs.enter(enumBitLog)
- }
- }
-
- } // parseClass
-
- private def populateMethodTParams(method: MethodBase, methodSym: MethodSymbol) : List[Symbol] = {
- if(!method.IsGeneric) Nil
- else {
- methodTParams.clear
- val newMethodTParams = new scala.collection.mutable.ListBuffer[Symbol]()
-
- // first pass
- for (mvarCILDef <- method.getSortedMVars() ) {
- val mtpname = newTypeName(mvarCILDef.Name.replaceAll("!", "")) // TODO are really all method-level-type-params named in all assemblies out there? (NO)
- val mtpsym = methodSym.newTypeParameter(mtpname)
- methodTParams.put(mvarCILDef.Number, mtpsym)
- newMethodTParams += mtpsym
- // TODO wouldn't the following also be needed later, i.e. during getCLRType
- mtpsym.setInfo(definitions.AnyClass.tpe)
- }
- // second pass
- for (mvarCILDef <- method.getSortedMVars() ) {
- val mtpsym = methodTParams(mvarCILDef.Number)
- mtpsym.setInfo(sig2typeBounds(mvarCILDef)) // we never skip bounds unlike in forJVM
- }
-
- newMethodTParams.toList
- }
- }
-
- private def createMethod(method: MethodBase) {
-
- val flags = translateAttributes(method);
- val owner = if (method.IsStatic()) statics else clazz;
- val methodSym = owner.newMethod(getName(method), NoPosition, flags)
- /* START CLR generics (snippet 3) */
- val newMethodTParams = populateMethodTParams(method, methodSym)
- /* END CLR generics (snippet 3) */
-
- val rettype = if (method.IsConstructor()) clazz.tpe
- else getCLSType(method.asInstanceOf[MethodInfo].ReturnType);
- if (rettype == null) return;
- val mtype = methodType(method, rettype);
- if (mtype == null) return;
-/* START CLR generics (snippet 4) */
- val mInfo = if (method.IsGeneric) genPolyType(newMethodTParams, mtype(methodSym))
- else mtype(methodSym)
-/* END CLR generics (snippet 4) */
-/* START CLR non-generics (snippet 4)
- val mInfo = mtype(methodSym)
- END CLR non-generics (snippet 4) */
- methodSym.setInfo(mInfo)
- (if (method.IsStatic()) staticDefs else instanceDefs).enter(methodSym);
- if (method.IsConstructor())
- clrTypes.constructors(methodSym) = method.asInstanceOf[ConstructorInfo]
- else clrTypes.methods(methodSym) = method.asInstanceOf[MethodInfo];
- }
-
- private def createMethod(name: TermName, flags: Long, args: Array[MSILType], retType: MSILType, method: MethodInfo, statik: Boolean): Symbol = {
- val mtype = methodType(args, getCLSType(retType))
- assert(mtype != null)
- createMethod(name, flags, mtype, method, statik)
- }
-
- private def createMethod(name: TermName, flags: Long, mtype: Symbol => Type, method: MethodInfo, statik: Boolean): Symbol = {
- val methodSym: Symbol = (if (statik) statics else clazz).newMethod(name)
- methodSym.setFlag(flags).setInfo(mtype(methodSym))
- (if (statik) staticDefs else instanceDefs).enter(methodSym)
- if (method != null)
- clrTypes.methods(methodSym) = method
- methodSym
- }
-
- private def createDelegateView(typ: MSILType) = {
- val invoke: MethodInfo = typ.GetMember("Invoke")(0).asInstanceOf[MethodInfo];
- val invokeRetType: Type = getCLRType(invoke.ReturnType);
- val invokeParamTypes: List[Type] =invoke.GetParameters().map(_.ParameterType).map(getCLSType).toList;
- val funType: Type = definitions.functionType(invokeParamTypes, invokeRetType);
-
- val typClrType: Type = getCLRType(typ);
- val flags = Flags.JAVA | Flags.STATIC | Flags.IMPLICIT; // todo: static? think not needed
-
- // create the forward view: delegate => function
- val delegateParamTypes: List[Type] = List(typClrType);
- // not ImplicitMethodType, this is for methods with implicit parameters (not implicit methods)
- val forwardViewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(delegateParamTypes), funType)
- val fmsym = createMethod(nme.view_, flags, forwardViewMethodType, null, true);
-
- // create the backward view: function => delegate
- val functionParamTypes: List[Type] = List(funType);
- val backwardViewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(functionParamTypes), typClrType)
- val bmsym = createMethod(nme.view_, flags, backwardViewMethodType, null, true);
- }
-
- private def createDelegateChainers(typ: MSILType) = {
- val flags: Long = Flags.JAVA | Flags.FINAL
- val args: Array[MSILType] = Array(typ)
-
- var s = createMethod(encode("+="), flags, args, clrTypes.VOID, clrTypes.DELEGATE_COMBINE, false);
- s = createMethod(encode("-="), flags, args, clrTypes.VOID, clrTypes.DELEGATE_REMOVE, false);
-
- s = createMethod(nme.PLUS, flags, args, typ, clrTypes.DELEGATE_COMBINE, false);
- s = createMethod(nme.MINUS, flags, args, typ, clrTypes.DELEGATE_REMOVE, false);
- }
-
- private def getName(method: MethodBase): TermName = {
-
- def operatorOverload(name : String, paramsArity : Int) : Option[Name] = paramsArity match {
- case 1 => name match {
- // PartitionI.10.3.1
- case "op_Decrement" => Some(encode("--"))
- case "op_Increment" => Some(encode("++"))
- case "op_UnaryNegation" => Some(nme.UNARY_-)
- case "op_UnaryPlus" => Some(nme.UNARY_+)
- case "op_LogicalNot" => Some(nme.UNARY_!)
- case "op_OnesComplement" => Some(nme.UNARY_~)
- /* op_True and op_False have no operator symbol assigned,
- Other methods that will have to be written in full are:
- op_AddressOf & (unary)
- op_PointerDereference * (unary) */
- case _ => None
- }
- case 2 => name match {
- // PartitionI.10.3.2
- case "op_Addition" => Some(nme.ADD)
- case "op_Subtraction" => Some(nme.SUB)
- case "op_Multiply" => Some(nme.MUL)
- case "op_Division" => Some(nme.DIV)
- case "op_Modulus" => Some(nme.MOD)
- case "op_ExclusiveOr" => Some(nme.XOR)
- case "op_BitwiseAnd" => Some(nme.AND)
- case "op_BitwiseOr" => Some(nme.OR)
- case "op_LogicalAnd" => Some(nme.ZAND)
- case "op_LogicalOr" => Some(nme.ZOR)
- case "op_LeftShift" => Some(nme.LSL)
- case "op_RightShift" => Some(nme.ASR)
- case "op_Equality" => Some(nme.EQ)
- case "op_GreaterThan" => Some(nme.GT)
- case "op_LessThan" => Some(nme.LT)
- case "op_Inequality" => Some(nme.NE)
- case "op_GreaterThanOrEqual" => Some(nme.GE)
- case "op_LessThanOrEqual" => Some(nme.LE)
-
- /* op_MemberSelection is reserved in Scala */
-
- /* The standard does not assign operator symbols to op_Assign , op_SignedRightShift , op_UnsignedRightShift ,
- * and op_UnsignedRightShiftAssignment so those names will be used instead to invoke those methods. */
-
- /*
- The remaining binary operators are not overloaded in C# and are therefore not in widespread use. They have to be written in full.
-
- op_RightShiftAssignment >>=
- op_MultiplicationAssignment *=
- op_PointerToMemberSelection ->*
- op_SubtractionAssignment -=
- op_ExclusiveOrAssignment ^=
- op_LeftShiftAssignment <<=
- op_ModulusAssignment %=
- op_AdditionAssignment +=
- op_BitwiseAndAssignment &=
- op_BitwiseOrAssignment |=
- op_Comma ,
- op_DivisionAssignment /=
- */
- case _ => None
- }
- case _ => None
- }
-
- if (method.IsConstructor()) return nme.CONSTRUCTOR;
- val name = method.Name;
- if (method.IsStatic()) {
- if(method.IsSpecialName) {
- val paramsArity = method.GetParameters().size
- // handle operator overload, otherwise handle as any static method
- val operName = operatorOverload(name, paramsArity)
- if (operName.isDefined) { return operName.get; }
- }
- return newTermName(name);
- }
- val params = method.GetParameters();
- name match {
- case "GetHashCode" if (params.length == 0) => nme.hashCode_;
- case "ToString" if (params.length == 0) => nme.toString_;
- case "Finalize" if (params.length == 0) => nme.finalize_;
- case "Equals" if (params.length == 1 && params(0).ParameterType == clrTypes.OBJECT) =>
- nme.equals_;
- case "Invoke" if (clrTypes.isDelegateType(method.DeclaringType)) => nme.apply;
- case _ => newTermName(name);
- }
- }
-
- //##########################################################################
-
- private def methodType(method: MethodBase, rettype: MSILType): Symbol => Type = {
- val rtype = getCLSType(rettype);
- if (rtype == null) null else methodType(method, rtype);
- }
-
- /** Return a method type for the given method. */
- private def methodType(method: MethodBase, rettype: Type): Symbol => Type =
- methodType(method.GetParameters().map(_.ParameterType), rettype);
-
- /** Return a method type for the provided argument types and return type. */
- private def methodType(argtypes: Array[MSILType], rettype: Type): Symbol => Type = {
- def paramType(typ: MSILType): Type =
- if (typ eq clrTypes.OBJECT) definitions.AnyClass.tpe // TODO a hack to compile scalalib, should be definitions.AnyRefClass.tpe
- else getCLSType(typ);
- val ptypes = argtypes.map(paramType).toList;
- if (ptypes.contains(null)) null
- else method => JavaMethodType(method.newSyntheticValueParams(ptypes), rettype);
- }
-
- //##########################################################################
-
- private def getClassType(typ: MSILType): Type = {
- assert(typ != null);
- val res = rootMirror.getClassByName(typ.FullName.replace('+', '.') : TypeName).tpe;
- //if (res.isError())
- // global.reporter.error("unknown class reference " + type.FullName);
- res
- }
-
- private def getCLSType(typ: MSILType): Type = { // getCLS returns non-null for types GenMSIL can handle, be they CLS-compliant or not
- if (typ.IsTMVarUsage())
- /* START CLR generics (snippet 5) */
- getCLRType(typ)
- /* END CLR generics (snippet 5) */
- /* START CLR non-generics (snippet 5)
- null
- END CLR non-generics (snippet 5) */
- else if ( /* TODO hack if UBYE, uncommented, "ambiguous reference to overloaded definition" ensues, for example for System.Math.Max(x, y) */
- typ == clrTypes.USHORT || typ == clrTypes.UINT || typ == clrTypes.ULONG
- /* || typ == clrTypes.UBYTE */
- || typ.IsNotPublic() || typ.IsNestedPrivate()
- || typ.IsNestedAssembly() || typ.IsNestedFamANDAssem()
- || typ.IsPointer()
- || (typ.IsArray() && getCLRType(typ.GetElementType()) == null) /* TODO hack: getCLR instead of getCLS */
- || (typ.IsByRef() && !typ.GetElementType().CanBeTakenAddressOf()))
- null
- else
- getCLRType(typ)
- }
-
- private def getCLRTypeIfPrimitiveNullOtherwise(typ: MSILType): Type =
- if (typ == clrTypes.OBJECT)
- definitions.ObjectClass.tpe;
- else if (typ == clrTypes.VALUE_TYPE)
- definitions.AnyValClass.tpe
- else if (typ == clrTypes.STRING)
- definitions.StringClass.tpe;
- else if (typ == clrTypes.VOID)
- definitions.UnitClass.tpe
- else if (typ == clrTypes.BOOLEAN)
- definitions.BooleanClass.tpe
- else if (typ == clrTypes.CHAR)
- definitions.CharClass.tpe
- else if ((typ == clrTypes.BYTE) || (typ == clrTypes.UBYTE)) // TODO U... is a hack to compile scalalib
- definitions.ByteClass.tpe
- else if ((typ == clrTypes.SHORT) || (typ == clrTypes.SHORT)) // TODO U... is a hack to compile scalalib
- definitions.ShortClass.tpe
- else if ((typ == clrTypes.INT) || (typ == clrTypes.UINT)) // TODO U... is a hack to compile scalalib
- definitions.IntClass.tpe
- else if ((typ == clrTypes.LONG) || (typ == clrTypes.LONG)) // TODO U... is a hack to compile scalalib
- definitions.LongClass.tpe
- else if (typ == clrTypes.FLOAT)
- definitions.FloatClass.tpe
- else if (typ == clrTypes.DOUBLE)
- definitions.DoubleClass.tpe
- else null
-
-
- private def getCLRType(tMSIL: MSILType): Type = {
- var res = getCLRTypeIfPrimitiveNullOtherwise(tMSIL)
- if (res != null) res
- else if (tMSIL.isInstanceOf[ConstructedType]) {
- val ct = tMSIL.asInstanceOf[ConstructedType]
- /* START CLR generics (snippet 6) */
- val cttpArgs = ct.typeArgs.map(tmsil => getCLRType(tmsil)).toList
- appliedType(getCLRType(ct.instantiatedType), cttpArgs)
- /* END CLR generics (snippet 6) */
- /* START CLR non-generics (snippet 6)
- getCLRType(ct.instantiatedType)
- END CLR non-generics (snippet 6) */
- } else if (tMSIL.isInstanceOf[TMVarUsage]) {
- /* START CLR generics (snippet 7) */
- val tVarUsage = tMSIL.asInstanceOf[TMVarUsage]
- val tVarNumber = tVarUsage.Number
- if (tVarUsage.isTVar) classTParams(tVarNumber).typeConstructor // shouldn't fail, just return definitions.AnyClass.tpe at worst
- else methodTParams(tVarNumber).typeConstructor // shouldn't fail, just return definitions.AnyClass.tpe at worst
- /* END CLR generics (snippet 7) */
- /* START CLR non-generics (snippet 7)
- null // definitions.ObjectClass.tpe
- END CLR non-generics (snippet 7) */
- } else if (tMSIL.IsArray()) {
- var elemtp = getCLRType(tMSIL.GetElementType())
- // cut&pasted from ClassfileParser
- // make unbounded Array[T] where T is a type variable into Array[T with Object]
- // (this is necessary because such arrays have a representation which is incompatible
- // with arrays of primitive types).
- // TODO does that incompatibility also apply to .NET?
- if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe))
- elemtp = intersectionType(List(elemtp, definitions.ObjectClass.tpe))
- appliedType(definitions.ArrayClass.tpe, List(elemtp))
- } else {
- res = clrTypes.sym2type.get(tMSIL) match {
- case Some(sym) => sym.tpe
- case None => if (tMSIL.IsByRef && tMSIL.GetElementType.IsValueType) {
- val addressed = getCLRType(tMSIL.GetElementType)
- val clasym = addressed.typeSymbolDirect // TODO should be .typeSymbol?
- clasym.info.load(clasym)
- val secondAttempt = clrTypes.sym2type.get(tMSIL)
- secondAttempt match { case Some(sym) => sym.tpe
- case None => null
- }
- } else getClassType(tMSIL)
- }
- if (res == null)
- null // TODO new RuntimeException()
- else res
- }
- }
-
- // the values are Java-Box-Classes (e.g. Integer, Boolean, Character)
- // java.lang.Number to get the value (if a number, not for boolean, character)
- // see ch.epfl.lamp.compiler.msil.util.PEStream.java
- def getConstant(constType: Type, value: Object): Constant = {
- val typeClass = constType.typeSymbol
- if (typeClass == definitions.BooleanClass)
- Constant(value.asInstanceOf[java.lang.Boolean].booleanValue)
- else if (typeClass == definitions.ByteClass)
- Constant(value.asInstanceOf[java.lang.Number].byteValue)
- else if (typeClass == definitions.ShortClass)
- Constant(value.asInstanceOf[java.lang.Number].shortValue)
- else if (typeClass == definitions.CharClass)
- Constant(value.asInstanceOf[java.lang.Character].charValue)
- else if (typeClass == definitions.IntClass)
- Constant(value.asInstanceOf[java.lang.Number].intValue)
- else if (typeClass == definitions.LongClass)
- Constant(value.asInstanceOf[java.lang.Number].longValue)
- else if (typeClass == definitions.FloatClass)
- Constant(value.asInstanceOf[java.lang.Number].floatValue)
- else if (typeClass == definitions.DoubleClass)
- Constant(value.asInstanceOf[java.lang.Number].doubleValue)
- else if (typeClass == definitions.StringClass)
- Constant(value.asInstanceOf[java.lang.String])
- else
- abort("illegal value: " + value + ", class-symbol: " + typeClass)
- }
-
- def isDefinedAtgetConstant(constType: Type): Boolean = {
- val typeClass = constType.typeSymbol
- if ( (typeClass == definitions.BooleanClass)
- || (typeClass == definitions.ByteClass)
- || (typeClass == definitions.ShortClass)
- || (typeClass == definitions.CharClass)
- || (typeClass == definitions.IntClass)
- || (typeClass == definitions.LongClass)
- || (typeClass == definitions.FloatClass)
- || (typeClass == definitions.DoubleClass)
- || (typeClass == definitions.StringClass)
- )
- true
- else
- false
- }
-
- private def translateAttributes(typ: MSILType): Long = {
- var flags: Long = Flags.JAVA;
- if (typ.IsNotPublic() || typ.IsNestedPrivate()
- || typ.IsNestedAssembly() || typ.IsNestedFamANDAssem())
- flags = flags | Flags.PRIVATE;
- else if (typ.IsNestedFamily() || typ.IsNestedFamORAssem())
- flags = flags | Flags.PROTECTED;
- if (typ.IsAbstract())
- flags = flags | Flags.ABSTRACT;
- if (typ.IsSealed())
- flags = flags | Flags.FINAL;
- if (typ.IsInterface())
- flags = flags | Flags.INTERFACE | Flags.TRAIT | Flags.ABSTRACT;
-
- flags
- }
-
- private def translateAttributes(field: FieldInfo): Long = {
- var flags: Long = Flags.JAVA;
- if (field.IsPrivate() || field.IsAssembly() || field.IsFamilyAndAssembly())
- flags = flags | Flags.PRIVATE;
- else if (field.IsFamily() || field.IsFamilyOrAssembly())
- flags = flags | Flags.PROTECTED;
- if (field.IsInitOnly() || field.IsLiteral())
- flags = flags | Flags.FINAL;
- else
- flags = flags | Flags.MUTABLE;
- if (field.IsStatic)
- flags = flags | Flags.STATIC
-
- flags
- }
-
- private def translateAttributes(method: MethodBase): Long = {
- var flags: Long = Flags.JAVA;
- if (method.IsPrivate() || method.IsAssembly() || method.IsFamilyAndAssembly())
- flags = flags | Flags.PRIVATE;
- else if (method.IsFamily() || method.IsFamilyOrAssembly())
- flags = flags | Flags.PROTECTED;
- if (method.IsAbstract())
- flags = flags | Flags.DEFERRED;
- if (method.IsStatic)
- flags = flags | Flags.STATIC
-
- flags
- }
-}
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index bacd8c39e1..2b7c6cca2c 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -8,8 +8,6 @@ package transform
import symtab._
import Flags._
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
abstract class AddInterfaces extends InfoTransform { self: Erasure =>
import global._ // the global environment
@@ -94,7 +92,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
impl.typeOfThis = iface.typeOfThis
impl.thisSym setName iface.thisSym.name
}
- impl.sourceFile = iface.sourceFile
+ impl.associatedFile = iface.sourceFile
if (inClass)
iface.owner.info.decls enter impl
@@ -111,7 +109,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
def implClass(iface: Symbol): Symbol = {
iface.info
- implClassMap.getOrElse(iface, atPhase(implClassPhase) {
+ implClassMap.getOrElse(iface, enteringPhase(implClassPhase) {
if (iface.implClass eq NoSymbol)
debuglog(s"${iface.fullLocationString} has no implClass yet, creating it now.")
else
@@ -145,7 +143,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
decls enter (
implClass.newMethod(nme.MIXIN_CONSTRUCTOR, implClass.pos)
- setInfo MethodType(Nil, UnitClass.tpe)
+ setInfo MethodType(Nil, UnitTpe)
)
}
@@ -176,8 +174,8 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
override def complete(implSym: Symbol) {
debuglog("LazyImplClassType completing " + implSym)
- /** If `tp` refers to a non-interface trait, return a
- * reference to its implementation class. Otherwise return `tp`.
+ /* If `tp` refers to a non-interface trait, return a
+ * reference to its implementation class. Otherwise return `tp`.
*/
def mixinToImplClass(tp: Type): Type = AddInterfaces.this.erasure(implSym) {
tp match { //@MATN: no normalize needed (comes after erasure)
@@ -191,12 +189,12 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
case ClassInfoType(parents, decls, _) =>
assert(phase == implClassPhase, tp)
// Impl class parents: Object first, matching interface last.
- val implParents = ObjectClass.tpe +: (parents.tail map mixinToImplClass filter (_.typeSymbol != ObjectClass)) :+ iface.tpe
+ val implParents = ObjectTpe +: (parents.tail map mixinToImplClass filter (_.typeSymbol != ObjectClass)) :+ iface.tpe
ClassInfoType(implParents, implDecls(implSym, decls), implSym)
case PolyType(_, restpe) =>
implType(restpe)
}
- implSym setInfo implType(beforeErasure(iface.info))
+ implSym setInfo implType(enteringErasure(iface.info))
}
override def load(clazz: Symbol) { complete(clazz) }
@@ -211,7 +209,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
case Nil => Nil
case hd :: tl =>
assert(!hd.typeSymbol.isTrait, clazz)
- if (clazz.isTrait) erasedTypeRef(ObjectClass) :: tl
+ if (clazz.isTrait) ObjectTpe :: tl
else parents
}
val decls1 = scopeTransform(clazz)(
@@ -251,7 +249,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
private def ifaceMemberDef(tree: Tree): Tree = createMemberDef(tree, true)(t => DefDef(t.symbol, EmptyTree))
private def ifaceTemplate(templ: Template): Template =
- treeCopy.Template(templ, templ.parents, emptyValDef, templ.body map ifaceMemberDef)
+ treeCopy.Template(templ, templ.parents, noSelfType, templ.body map ifaceMemberDef)
/** Transforms the member tree containing the implementation
* into a member of the impl class.
@@ -278,11 +276,11 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
*/
private def addMixinConstructorDef(clazz: Symbol, stats: List[Tree]): List[Tree] =
if (treeInfo.firstConstructor(stats) != EmptyTree) stats
- else DefDef(clazz.primaryConstructor, Block(List(), Literal(Constant()))) :: stats
+ else DefDef(clazz.primaryConstructor, Block(List(), Literal(Constant(())))) :: stats
private def implTemplate(clazz: Symbol, templ: Template): Template = atPos(templ.pos) {
val templ1 = (
- Template(templ.parents, emptyValDef, addMixinConstructorDef(clazz, templ.body map implMemberDef))
+ Template(templ.parents, noSelfType, addMixinConstructorDef(clazz, templ.body map implMemberDef))
setSymbol clazz.newLocalDummy(templ.pos)
)
templ1.changeOwner(templ.symbol.owner -> clazz, templ.symbol -> templ1.symbol)
@@ -317,10 +315,10 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
// body until now, because the typer knows that Any has no
// constructor and won't accept a call to super.init.
assert((clazz isSubClass AnyValClass) || clazz.info.parents.isEmpty, clazz)
- Block(List(Apply(gen.mkSuperSelect, Nil)), expr)
+ Block(List(Apply(gen.mkSuperInitCall, Nil)), expr)
case Block(stats, expr) =>
- // needs `hasSymbol` check because `supercall` could be a block (named / default args)
+ // needs `hasSymbolField` check because `supercall` could be a block (named / default args)
val (presuper, supercall :: rest) = stats span (t => t.hasSymbolWhich(_ hasFlag PRESUPER))
treeCopy.Block(tree, presuper ::: (supercall :: mixinConstructorCalls ::: rest), expr)
}
@@ -340,7 +338,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
deriveDefDef(tree)(addMixinConstructorCalls(_, sym.owner)) // (3)
case Template(parents, self, body) =>
val parents1 = sym.owner.info.parents map (t => TypeTree(t) setPos tree.pos)
- treeCopy.Template(tree, parents1, emptyValDef, body)
+ treeCopy.Template(tree, parents1, noSelfType, body)
case This(_) if sym.needsImplClass =>
val impl = implClass(sym)
var owner = currentOwner
@@ -352,7 +350,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
val mix1 = mix
if (mix == tpnme.EMPTY) mix
else {
- val ps = beforeErasure {
+ val ps = enteringErasure {
sym.info.parents dropWhile (p => p.symbol.name != mix)
}
assert(!ps.isEmpty, tree);
@@ -369,29 +367,3 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
}
}
}
-/*
- val ensureNoEscapes = new TypeTraverser {
- def ensureNoEscape(sym: Symbol) {
- if (sym.hasFlag(PRIVATE)) {
- var o = currentOwner;
- while (o != NoSymbol && o != sym.owner && !o.isLocal && !o.hasFlag(PRIVATE))
- o = o.owner
- if (o == sym.owner) sym.makeNotPrivate(base);
- }
- }
- def traverse(t: Type): TypeTraverser = {
- t match {
- case TypeRef(qual, sym, args) =>
- ensureNoEscape(sym)
- mapOver(t)
- case ClassInfoType(parents, decls, clazz) =>
- parents foreach { p => traverse; () }
- traverse(t.typeOfThis)
- case _ =>
- mapOver(t)
- }
- this
- }
- }
-
-*/
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 7a0b034fd0..f14fce5de9 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -11,7 +11,7 @@ import Flags._
import scala.collection._
import scala.language.postfixOps
-abstract class CleanUp extends Transform with ast.TreeDSL {
+abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
import global._
import definitions._
import CODE._
@@ -20,10 +20,22 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/** the following two members override abstract members in Transform */
val phaseName: String = "cleanup"
+ /* used in GenBCode: collects ClassDef symbols owning a main(Array[String]) method */
+ private var entryPoints: List[Symbol] = null
+ def getEntryPoints: List[Symbol] = {
+ assert(settings.isBCodeActive, "Candidate Java entry points are collected here only when GenBCode in use.")
+ entryPoints sortBy ("" + _.fullName) // For predictably ordered error messages.
+ }
+
+ override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = {
+ entryPoints = if (settings.isBCodeActive) Nil else null;
+ super.newPhase(prev)
+ }
+
protected def newTransformer(unit: CompilationUnit): Transformer =
new CleanUpTransformer(unit)
- class CleanUpTransformer(unit: CompilationUnit) extends Transformer {
+ class CleanUpTransformer(unit: CompilationUnit) extends StaticsTransformer {
private val newStaticMembers = mutable.Buffer.empty[Tree]
private val newStaticInits = mutable.Buffer.empty[Tree]
private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol]
@@ -32,25 +44,12 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
newStaticInits.clear()
symbolsStoredAsStatic.clear()
}
- private def savingStatics[T](body: => T): T = {
- val savedNewStaticMembers : mutable.Buffer[Tree] = newStaticMembers.clone()
- val savedNewStaticInits : mutable.Buffer[Tree] = newStaticInits.clone()
- val savedSymbolsStoredAsStatic : mutable.Map[String, Symbol] = symbolsStoredAsStatic.clone()
- val result = body
-
- clearStatics()
- newStaticMembers ++= savedNewStaticMembers
- newStaticInits ++= savedNewStaticInits
- symbolsStoredAsStatic ++= savedSymbolsStoredAsStatic
-
- result
- }
private def transformTemplate(tree: Tree) = {
- val Template(parents, self, body) = tree
+ val Template(_, _, body) = tree
clearStatics()
val newBody = transformTrees(body)
val templ = deriveTemplate(tree)(_ => transformTrees(newStaticMembers.toList) ::: newBody)
- try addStaticInits(templ) // postprocess to include static ctors
+ try addStaticInits(templ, newStaticInits, localTyper) // postprocess to include static ctors
finally clearStatics()
}
private def mkTerm(prefix: String): TermName = unit.freshTermName(prefix)
@@ -60,21 +59,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
private var localTyper: analyzer.Typer = null
- private object MethodDispatchType extends scala.Enumeration {
- val NO_CACHE, MONO_CACHE, POLY_CACHE = Value
- }
- import MethodDispatchType.{ NO_CACHE, MONO_CACHE, POLY_CACHE }
- private def dispatchType() = settings.refinementMethodDispatch.value match {
- case "no-cache" => NO_CACHE
- case "mono-cache" => MONO_CACHE
- case "poly-cache" => POLY_CACHE
- }
-
- def shouldRewriteTry(tree: Try) = {
- val sym = tree.tpe.typeSymbol
- forMSIL && (sym != UnitClass) && (sym != NothingClass)
- }
-
private def typedWithPos(pos: Position)(tree: Tree) =
localTyper.typedPos(pos)(tree)
@@ -91,7 +75,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
def transformApplyDynamic(ad: ApplyDynamic) = {
val qual0 = ad.qual
val params = ad.args
- if (settings.logReflectiveCalls.value)
+ if (settings.logReflectiveCalls)
unit.echo(ad.pos, "method invocation uses reflection")
val typedPos = typedWithPos(ad.pos) _
@@ -106,11 +90,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
if (isFinal) FINAL else 0
)
- val varSym = currentClass.newVariable(mkTerm("" + forName), ad.pos, flags) setInfoAndEnter forType
+ val varSym = currentClass.newVariable(mkTerm("" + forName), ad.pos, flags.toLong) setInfoAndEnter forType
if (!isFinal)
varSym.addAnnotation(VolatileAttr)
- val varDef = typedPos( VAL(varSym) === forInit )
+ val varDef = typedPos(ValDef(varSym, forInit))
newStaticMembers append transform(varDef)
val varInit = typedPos( REF(varSym) === forInit )
@@ -120,7 +104,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
def addStaticMethodToClass(forBody: (Symbol, Symbol) => Tree): Symbol = {
- val methSym = currentClass.newMethod(mkTerm(nme.reflMethodName), ad.pos, STATIC | SYNTHETIC)
+ val methSym = currentClass.newMethod(mkTerm(nme.reflMethodName.toString), ad.pos, STATIC | SYNTHETIC)
val params = methSym.newSyntheticValueParams(List(ClassClass.tpe))
methSym setInfoAndEnter MethodType(params, MethodClass.tpe)
@@ -132,147 +116,79 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
def fromTypesToClassArrayLiteral(paramTypes: List[Type]): Tree =
ArrayValue(TypeTree(ClassClass.tpe), paramTypes map LIT)
- /* ... */
- def reflectiveMethodCache(method: String, paramTypes: List[Type]): Symbol = dispatchType match {
- case NO_CACHE =>
-
- /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)":
-
- var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
-
- def reflMethod$Method(forReceiver: JClass[_]): JMethod =
- forReceiver.getMethod("xyz", reflParams$Cache)
-
- */
-
- val reflParamsCacheSym: Symbol =
- addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
-
- addStaticMethodToClass((_, forReceiverSym) =>
- gen.mkMethodCall(REF(forReceiverSym), Class_getMethod, Nil, List(LIT(method), REF(reflParamsCacheSym)))
- )
+ def reflectiveMethodCache(method: String, paramTypes: List[Type]): Symbol = {
+ /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
+ (SoftReference so that it does not interfere with classloader garbage collection,
+ see ticket #2365 for details):
- case MONO_CACHE =>
+ var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
- /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
- (but with a SoftReference wrapping reflClass$Cache, similarly in the poly Cache) :
+ var reflPoly$Cache: SoftReference[scala.runtime.MethodCache] = new SoftReference(new EmptyMethodCache())
- var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
-
- var reflMethod$Cache: JMethod = null
-
- var reflClass$Cache: JClass[_] = null
-
- def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
- if (reflClass$Cache != forReceiver) {
- reflMethod$Cache = forReceiver.getMethod("xyz", reflParams$Cache)
- reflClass$Cache = forReceiver
- }
- reflMethod$Cache
- }
-
- */
-
- val reflParamsCacheSym: Symbol =
- addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
-
- val reflMethodCacheSym: Symbol =
- addStaticVariableToClass(nme.reflMethodCacheName, MethodClass.tpe, NULL, false)
-
- val reflClassCacheSym: Symbol =
- addStaticVariableToClass(nme.reflClassCacheName, SoftReferenceClass.tpe, NULL, false)
-
- def isCacheEmpty(receiver: Symbol): Tree =
- reflClassCacheSym.IS_NULL() OR (reflClassCacheSym.GET() OBJ_NE REF(receiver))
-
- addStaticMethodToClass((_, forReceiverSym) =>
- BLOCK(
- IF (isCacheEmpty(forReceiverSym)) THEN BLOCK(
- REF(reflMethodCacheSym) === ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym))) ,
- REF(reflClassCacheSym) === gen.mkSoftRef(REF(forReceiverSym)),
- UNIT
- ) ENDIF,
- REF(reflMethodCacheSym)
- )
- )
-
- case POLY_CACHE =>
-
- /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
- (SoftReference so that it does not interfere with classloader garbage collection, see ticket
- #2365 for details):
-
- var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
-
- var reflPoly$Cache: SoftReference[scala.runtime.MethodCache] = new SoftReference(new EmptyMethodCache())
+ def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
+ var methodCache: MethodCache = reflPoly$Cache.find(forReceiver)
+ if (methodCache eq null) {
+ methodCache = new EmptyMethodCache
+ reflPoly$Cache = new SoftReference(methodCache)
+ }
+ var method: JMethod = methodCache.find(forReceiver)
+ if (method ne null)
+ return method
+ else {
+ method = ScalaRunTime.ensureAccessible(forReceiver.getMethod("xyz", reflParams$Cache))
+ reflPoly$Cache = new SoftReference(methodCache.add(forReceiver, method))
+ return method
+ }
+ }
+ */
- def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
- var methodCache: MethodCache = reflPoly$Cache.find(forReceiver)
- if (methodCache eq null) {
- methodCache = new EmptyMethodCache
- reflPoly$Cache = new SoftReference(methodCache)
- }
- var method: JMethod = methodCache.find(forReceiver)
- if (method ne null)
- return method
- else {
- method = ScalaRunTime.ensureAccessible(forReceiver.getMethod("xyz", reflParams$Cache))
- reflPoly$Cache = new SoftReference(methodCache.add(forReceiver, method))
- return method
- }
- }
+ val reflParamsCacheSym: Symbol =
+ addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
- */
+ def mkNewPolyCache = gen.mkSoftRef(NEW(TypeTree(EmptyMethodCacheClass.tpe)))
+ val reflPolyCacheSym: Symbol = addStaticVariableToClass(nme.reflPolyCacheName, SoftReferenceClass.tpe, mkNewPolyCache, false)
- val reflParamsCacheSym: Symbol =
- addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
+ def getPolyCache = gen.mkCast(fn(REF(reflPolyCacheSym), nme.get), MethodCacheClass.tpe)
- def mkNewPolyCache = gen.mkSoftRef(NEW(TypeTree(EmptyMethodCacheClass.tpe)))
- val reflPolyCacheSym: Symbol = (
- addStaticVariableToClass(nme.reflPolyCacheName, SoftReferenceClass.tpe, mkNewPolyCache, false)
- )
- def getPolyCache = gen.mkCast(fn(REF(reflPolyCacheSym), nme.get), MethodCacheClass.tpe)
+ addStaticMethodToClass((reflMethodSym, forReceiverSym) => {
+ val methodCache = reflMethodSym.newVariable(mkTerm("methodCache"), ad.pos) setInfo MethodCacheClass.tpe
+ val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
- addStaticMethodToClass((reflMethodSym, forReceiverSym) => {
- val methodCache = reflMethodSym.newVariable(mkTerm("methodCache"), ad.pos) setInfo MethodCacheClass.tpe
- val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
+ BLOCK(
+ ValDef(methodCache, getPolyCache),
+ IF (REF(methodCache) OBJ_EQ NULL) THEN BLOCK(
+ REF(methodCache) === NEW(TypeTree(EmptyMethodCacheClass.tpe)),
+ REF(reflPolyCacheSym) === gen.mkSoftRef(REF(methodCache))
+ ) ENDIF,
+ ValDef(methodSym, (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym))),
+ IF (REF(methodSym) OBJ_NE NULL) .
+ THEN (Return(REF(methodSym)))
+ ELSE {
+ def methodSymRHS = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym)))
+ def cacheRHS = ((REF(methodCache) DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
BLOCK(
- VAR(methodCache) === getPolyCache,
- IF (REF(methodCache) OBJ_EQ NULL) THEN BLOCK(
- REF(methodCache) === NEW(TypeTree(EmptyMethodCacheClass.tpe)),
- REF(reflPolyCacheSym) === gen.mkSoftRef(REF(methodCache))
- ) ENDIF,
-
- VAR(methodSym) === (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym)),
- IF (REF(methodSym) OBJ_NE NULL) .
- THEN (Return(REF(methodSym)))
- ELSE {
- def methodSymRHS = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym)))
- def cacheRHS = ((REF(methodCache) DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
- BLOCK(
- REF(methodSym) === (REF(ensureAccessibleMethod) APPLY (methodSymRHS)),
- REF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
- Return(REF(methodSym))
- )
- }
+ REF(methodSym) === (REF(currentRun.runDefinitions.ensureAccessibleMethod) APPLY (methodSymRHS)),
+ REF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
+ Return(REF(methodSym))
)
- })
-
+ }
+ )
+ })
}
/* ### HANDLING METHODS NORMALLY COMPILED TO OPERATORS ### */
def testForName(name: Name): Tree => Tree = t => (
if (nme.CommonOpNames(name))
- gen.mkMethodCall(definitions.Boxes_isNumberOrBool, t :: Nil)
+ gen.mkMethodCall(currentRun.runDefinitions.Boxes_isNumberOrBool, t :: Nil)
else if (nme.BooleanOpNames(name))
t IS_OBJ BoxedBooleanClass.tpe
else
- gen.mkMethodCall(definitions.Boxes_isNumber, t :: Nil)
+ gen.mkMethodCall(currentRun.runDefinitions.Boxes_isNumber, t :: Nil)
)
- /** The Tree => Tree function in the return is necessary to prevent the original qual
+ /* The Tree => Tree function in the return is necessary to prevent the original qual
* from being duplicated in the resulting code. It may be a side-effecting expression,
* so all the test logic is routed through gen.evalOnce, which creates a block like
* { val x$1 = qual; if (x$1.foo || x$1.bar) f1(x$1) else f2(x$1) }
@@ -284,7 +200,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
else if (params.tail.isEmpty) nme.primitiveInfixMethodName(name)
else nme.NO_NAME
)
- definitions.getDeclIfDefined(BoxesRunTimeClass, methodName) match {
+ getDeclIfDefined(BoxesRunTimeClass, methodName) match {
case NoSymbol => None
case sym => assert(!sym.isOverloaded, sym) ; Some((sym, testForName(name)))
}
@@ -303,6 +219,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* ### CALLING THE APPLY ### */
def callAsReflective(paramTypes: List[Type], resType: Type): Tree = {
+ val runDefinitions = currentRun.runDefinitions
+ import runDefinitions._
+
gen.evalOnce(qual, currentOwner, unit) { qual1 =>
/* Some info about the type of the method being called. */
val methSym = ad.symbol
@@ -322,11 +241,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
// If there's any chance this signature could be met by an Array.
val isArrayMethodSignature = {
def typesMatchApply = paramTypes match {
- case List(tp) => tp <:< IntClass.tpe
+ case List(tp) => tp <:< IntTpe
case _ => false
}
def typesMatchUpdate = paramTypes match {
- case List(tp1, tp2) => (tp1 <:< IntClass.tpe) && isMaybeUnit
+ case List(tp1, tp2) => (tp1 <:< IntTpe) && isMaybeUnit
case _ => false
}
@@ -357,13 +276,13 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
else if (resultSym == ObjectClass) tree // no cast necessary
else gen.mkCast(tree, boxedResType) // cast to expected type
- /** Normal non-Array call */
+ /* Normal non-Array call */
def genDefaultCall = {
// reflective method call machinery
val invokeName = MethodClass.tpe member nme.invoke_ // scala.reflect.Method.invoke(...)
def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol
- def lookup = Apply(cache, List(qual1() GETCLASS)) // get Method object from cache
- def invokeArgs = ArrayValue(TypeTree(ObjectClass.tpe), params) // args for invocation
+ def lookup = Apply(cache, List(qual1() GETCLASS())) // get Method object from cache
+ def invokeArgs = ArrayValue(TypeTree(ObjectTpe), params) // args for invocation
def invocation = (lookup DOT invokeName)(qual1(), invokeArgs) // .invoke(qual1, ...)
// exception catching machinery
@@ -375,7 +294,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
fixResult(TRY (invocation) CATCH { CASE (catchVar) ==> catchBody } ENDTRY)
}
- /** A possible primitive method call, represented by methods in BoxesRunTime. */
+ /* A possible primitive method call, represented by methods in BoxesRunTime. */
def genValueCall(operator: Symbol) = fixResult(REF(operator) APPLY args)
def genValueCallWithTest = {
getPrimitiveReplacementForStructuralCall(methSym.name) match {
@@ -386,7 +305,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
}
- /** A native Array call. */
+ /* A native Array call. */
def genArrayCall = fixResult(
methSym.name match {
case nme.length => REF(boxMethod(IntClass)) APPLY (REF(arrayLengthMethod) APPLY args)
@@ -397,9 +316,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
mustBeUnit = methSym.name == nme.update
)
- /** A conditional Array call, when we can't determine statically if the argument is
- * an Array, but the structural type method signature is consistent with an Array method
- * so we have to generate both kinds of code.
+ /* A conditional Array call, when we can't determine statically if the argument is
+ * an Array, but the structural type method signature is consistent with an Array method
+ * so we have to generate both kinds of code.
*/
def genArrayCallWithTest =
IF ((qual1() GETCLASS()) DOT nme.isArray) THEN genArrayCall ELSE genDefaultCall
@@ -413,103 +332,88 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
}
- if (settings.refinementMethodDispatch.value == "invoke-dynamic") {
-/* val guardCallSite: Tree = {
- val cachedClass = addStaticVariableToClass("cachedClass", definitions.ClassClass.tpe, EmptyTree)
- val tmpVar = currentOwner.newVariable(ad.pos, unit.freshTermName(ad.pos, "x")).setInfo(definitions.AnyRefClass.tpe)
- atPos(ad.pos)(Block(List(
- ValDef(tmpVar, transform(qual))),
- If(Apply(Select(gen.mkAttributedRef(cachedClass), nme.EQ), List(getClass(Ident(tmpVar)))),
- Block(List(Assign(gen.mkAttributedRef(cachedClass), getClass(Ident(tmpVar)))),
- treeCopy.ApplyDynamic(ad, Ident(tmpVar), transformTrees(params))),
- EmptyTree)))
- }
- //println(guardCallSite)
-*/
- localTyper.typed(treeCopy.ApplyDynamic(ad, transform(qual), transformTrees(params)))
- }
- else {
-
- /* ### BODY OF THE TRANSFORMATION -> remember we're in case ad@ApplyDynamic(qual, params) ### */
-
- /* This creates the tree that does the reflective call (see general comment
- * on the apply-dynamic tree for its format). This tree is simply composed
- * of three successive calls, first to getClass on the callee, then to
- * getMethod on the class, then to invoke on the method.
- * - getMethod needs an array of classes for choosing one amongst many
- * overloaded versions of the method. This is provided by paramTypeClasses
- * and must be done on the static type as Scala's dispatching is static on
- * the parameters.
- * - invoke needs an array of AnyRefs that are the method's arguments. The
- * erasure phase guarantees that any parameter passed to a dynamic apply
- * is compatible (through boxing). Boxed ints et al. is what invoke expects
- * when the applied method expects ints, hence no change needed there.
- * - in the end, the result of invoke must be fixed, again to deal with arrays.
- * This is provided by fixResult. fixResult will cast the invocation's result
- * to the method's return type, which is generally ok, except when this type
- * is a value type (int et al.) in which case it must cast to the boxed version
- * because invoke only returns object and erasure made sure the result is
- * expected to be an AnyRef. */
- val t: Tree = {
- val (mparams, resType) = ad.symbol.tpe match {
- case MethodType(mparams, resType) =>
- assert(params.length == mparams.length, ((params, mparams)))
- (mparams, resType)
- case tpe @ OverloadedType(pre, alts) =>
- unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe))
- alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match {
- case mt @ MethodType(mparams, resType) :: Nil =>
- unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt)
- (mparams, resType)
- case _ =>
- unit.error(ad.pos, "Cannot resolve overload.")
- (Nil, NoType)
- }
- }
- typedPos {
- val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
- qual = REF(sym)
-
- BLOCK(
- VAL(sym) === qual0,
- callAsReflective(mparams map (_.tpe), resType)
- )
- }
+ {
+
+ /* ### BODY OF THE TRANSFORMATION -> remember we're in case ad@ApplyDynamic(qual, params) ### */
+
+ /* This creates the tree that does the reflective call (see general comment
+ * on the apply-dynamic tree for its format). This tree is simply composed
+ * of three successive calls, first to getClass on the callee, then to
+ * getMethod on the class, then to invoke on the method.
+ * - getMethod needs an array of classes for choosing one amongst many
+ * overloaded versions of the method. This is provided by paramTypeClasses
+ * and must be done on the static type as Scala's dispatching is static on
+ * the parameters.
+ * - invoke needs an array of AnyRefs that are the method's arguments. The
+ * erasure phase guarantees that any parameter passed to a dynamic apply
+ * is compatible (through boxing). Boxed ints et al. is what invoke expects
+ * when the applied method expects ints, hence no change needed there.
+ * - in the end, the result of invoke must be fixed, again to deal with arrays.
+ * This is provided by fixResult. fixResult will cast the invocation's result
+ * to the method's return type, which is generally ok, except when this type
+ * is a value type (int et al.) in which case it must cast to the boxed version
+ * because invoke only returns object and erasure made sure the result is
+ * expected to be an AnyRef. */
+ val t: Tree = {
+ val (mparams, resType) = ad.symbol.tpe match {
+ case MethodType(mparams, resType) =>
+ assert(params.length == mparams.length, ((params, mparams)))
+ (mparams, resType)
+ case tpe @ OverloadedType(pre, alts) =>
+ unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe))
+ alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match {
+ case mt @ MethodType(mparams, resType) :: Nil =>
+ unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt)
+ (mparams, resType)
+ case _ =>
+ unit.error(ad.pos, "Cannot resolve overload.")
+ (Nil, NoType)
+ }
}
+ typedPos {
+ val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
+ qual = REF(sym)
- /* For testing purposes, the dynamic application's condition
- * can be printed-out in great detail. Remove? */
- if (settings.debug.value) {
- def paramsToString(xs: Any*) = xs map (_.toString) mkString ", "
- val mstr = ad.symbol.tpe match {
- case MethodType(mparams, resType) =>
- """| with
- | - declared parameter types: '%s'
- | - passed argument types: '%s'
- | - result type: '%s'""" .
- stripMargin.format(
- paramsToString(mparams),
- paramsToString(params),
- resType.toString
- )
- case _ => ""
- }
- log(
- """Dynamically application '%s.%s(%s)' %s - resulting code: '%s'""".format(
- qual, ad.symbol.name, paramsToString(params), mstr, t
- )
+ BLOCK(
+ ValDef(sym, qual0),
+ callAsReflective(mparams map (_.tpe), resType)
)
}
+ }
- /* We return the dynamic call tree, after making sure no other
- * clean-up transformation are to be applied on it. */
- transform(t)
+ /* For testing purposes, the dynamic application's condition
+ * can be printed-out in great detail. Remove? */
+ if (settings.debug) {
+ def paramsToString(xs: Any*) = xs map (_.toString) mkString ", "
+ val mstr = ad.symbol.tpe match {
+ case MethodType(mparams, resType) =>
+ sm"""| with
+ | - declared parameter types: '${paramsToString(mparams)}'
+ | - passed argument types: '${paramsToString(params)}'
+ | - result type: '${resType.toString}'"""
+ case _ => ""
+ }
+ log(s"""Dynamically application '$qual.${ad.symbol.name}(${paramsToString(params)})' $mstr - resulting code: '$t'""")
}
- /* ### END OF DYNAMIC APPLY TRANSFORM ### */
+
+ /* We return the dynamic call tree, after making sure no other
+ * clean-up transformation are to be applied on it. */
+ transform(t)
+ /* ### END OF DYNAMIC APPLY TRANSFORM ### */
+ }
}
override def transform(tree: Tree): Tree = tree match {
+ case _: ClassDef
+ if (entryPoints != null) &&
+ genBCode.isJavaEntryPoint(tree.symbol, currentUnit)
+ =>
+ // collecting symbols for entry points here (as opposed to GenBCode where they are used)
+ // has the advantage of saving an additional pass over all ClassDefs.
+ entryPoints ::= tree.symbol
+ super.transform(tree)
+
/* Transforms dynamic calls (i.e. calls to methods that are undefined
* in the erased type space) to -- dynamically -- unsafe calls using
* reflection. This is used for structural sub-typing of refinement
@@ -555,10 +459,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* constructor. */
case Template(parents, self, body) =>
localTyper = typer.atOwner(tree, currentClass)
- if (forMSIL) savingStatics( transformTemplate(tree) )
- else transformTemplate(tree)
+ transformTemplate(tree)
- case Literal(c) if (c.tag == ClazzTag) && !forMSIL=>
+ case Literal(c) if c.tag == ClazzTag =>
val tpe = c.typeValue
typedWithPos(tree.pos) {
if (isPrimitiveValueClass(tpe.typeSymbol)) {
@@ -571,24 +474,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
else tree
}
- /* MSIL requires that the stack is empty at the end of a try-block.
- * Hence, we here rewrite all try blocks with a result != {Unit, All} such that they
- * store their result in a local variable. The catch blocks are adjusted as well.
- * The try tree is subsituted by a block whose result expression is read of that variable. */
- case theTry @ Try(block, catches, finalizer) if shouldRewriteTry(theTry) =>
- def transformTry = {
- val tpe = theTry.tpe.widen
- val tempVar = currentOwner.newVariable(mkTerm(nme.EXCEPTION_RESULT_PREFIX), theTry.pos).setInfo(tpe)
- def assignBlock(rhs: Tree) = super.transform(BLOCK(Ident(tempVar) === transform(rhs)))
-
- val newBlock = assignBlock(block)
- val newCatches = for (CaseDef(pattern, guard, body) <- catches) yield
- (CASE(super.transform(pattern)) IF (super.transform(guard))) ==> assignBlock(body)
- val newTry = Try(newBlock, newCatches, super.transform(finalizer))
-
- typedWithPos(theTry.pos)(BLOCK(VAL(tempVar) === EmptyTree, newTry, Ident(tempVar)))
- }
- transformTry
/*
* This transformation should identify Scala symbol invocations in the tree and replace them
* with references to a static member. Also, whenever a class has at least a single symbol invocation
@@ -596,18 +481,33 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* For instance, say we have a Scala class:
*
* class Cls {
- * // ...
- * def someSymbol = `symbolic
- * // ...
+ * def someSymbol1 = 'Symbolic1
+ * def someSymbol2 = 'Symbolic2
+ * def sameSymbol1 = 'Symbolic1
+ * val someSymbol3 = 'Symbolic3
* }
*
* After transformation, this class looks like this:
*
* class Cls {
- * private "static" val <some_name>$symbolic = Symbol("symbolic")
- * // ...
- * def someSymbol = <some_name>$symbolic
- * // ...
+ * private <static> var symbol$1: scala.Symbol
+ * private <static> var symbol$2: scala.Symbol
+ * private <static> var symbol$3: scala.Symbol
+ * private val someSymbol3: scala.Symbol
+ *
+ * private <static> def <clinit> = {
+ * symbol$1 = Symbol.apply("Symbolic1")
+ * symbol$2 = Symbol.apply("Symbolic2")
+ * }
+ *
+ * private def <init> = {
+ * someSymbol3 = symbol$3
+ * }
+ *
+ * def someSymbol1 = symbol$1
+ * def someSymbol2 = symbol$2
+ * def sameSymbol1 = symbol$1
+ * val someSymbol3 = someSymbol3
* }
*
* The reasoning behind this transformation is the following. Symbols get interned - they are stored
@@ -617,17 +517,17 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* is accessed only once during class loading, and after that, the unique symbol is in the static
* member. Hence, it is cheap to both reach the unique symbol and do equality checks on it.
*
- * And, finally, be advised - scala symbol literal and the Symbol class of the compiler
+ * And, finally, be advised - Scala's Symbol literal (scala.Symbol) and the Symbol class of the compiler
* have little in common.
*/
case Apply(fn, (arg @ Literal(Constant(symname: String))) :: Nil) if fn.symbol == Symbol_apply =>
def transformApply = {
- // add the symbol name to a map if it's not there already
- val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil)
- val staticFieldSym = getSymbolStaticField(tree.pos, symname, rhs, tree)
- // create a reference to a static field
- val ntree = typedWithPos(tree.pos)(REF(staticFieldSym))
- super.transform(ntree)
+ // add the symbol name to a map if it's not there already
+ val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil)
+ val staticFieldSym = getSymbolStaticField(tree.pos, symname, rhs, tree)
+ // create a reference to a static field
+ val ntree = typedWithPos(tree.pos)(REF(staticFieldSym))
+ super.transform(ntree)
}
transformApply
@@ -636,7 +536,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
//
// See SI-6611; we must *only* do this for literal vararg arrays.
case Apply(appMeth, List(Apply(wrapRefArrayMeth, List(arg @ StripCast(ArrayValue(_, _)))), _))
- if wrapRefArrayMeth.symbol == Predef_wrapRefArray && appMeth.symbol == ArrayModule_genericApply =>
+ if wrapRefArrayMeth.symbol == currentRun.runDefinitions.Predef_wrapRefArray && appMeth.symbol == ArrayModule_genericApply =>
super.transform(arg)
case Apply(appMeth, List(elem0, Apply(wrapArrayMeth, List(rest @ ArrayValue(elemtpt, _)))))
if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && appMeth.symbol == ArrayModule_apply(elemtpt.tpe) =>
@@ -657,12 +557,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
// create a symbol for the static field
val stfieldSym = (
currentClass.newVariable(mkTerm("symbol$"), pos, PRIVATE | STATIC | SYNTHETIC | FINAL)
- setInfo SymbolClass.tpe
+ setInfoAndEnter SymbolClass.tpe
)
- currentClass.info.decls enter stfieldSym
// create field definition and initialization
- val stfieldDef = theTyper.typedPos(pos)(VAL(stfieldSym) === rhs)
+ val stfieldDef = theTyper.typedPos(pos)(ValDef(stfieldSym, rhs))
val stfieldInit = theTyper.typedPos(pos)(REF(stfieldSym) === rhs)
// add field definition to new defs
@@ -673,44 +572,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
})
}
- /* finds the static ctor DefDef tree within the template if it exists. */
- private def findStaticCtor(template: Template): Option[Tree] =
- template.body find {
- case defdef @ DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => defdef.symbol.hasStaticFlag
- case _ => false
- }
-
- /* changes the template for the class so that it contains a static constructor with symbol fields inits,
- * augments an existing static ctor if one already existed.
- */
- private def addStaticInits(template: Template): Template = {
- if (newStaticInits.isEmpty)
- template
- else {
- val newCtor = findStaticCtor(template) match {
- // in case there already were static ctors - augment existing ones
- // currently, however, static ctors aren't being generated anywhere else
- case Some(ctor @ DefDef(_,_,_,_,_,_)) =>
- // modify existing static ctor
- deriveDefDef(ctor) {
- case block @ Block(stats, expr) =>
- // need to add inits to existing block
- treeCopy.Block(block, newStaticInits.toList ::: stats, expr)
- case term: TermTree =>
- // need to create a new block with inits and the old term
- treeCopy.Block(term, newStaticInits.toList, term)
- }
- case _ =>
- // create new static ctor
- val staticCtorSym = currentClass.newStaticConstructor(template.pos)
- val rhs = Block(newStaticInits.toList, Literal(Constant(())))
-
- localTyper.typedPos(template.pos)(DefDef(staticCtorSym, rhs))
- }
- deriveTemplate(template)(newCtor :: _)
- }
- }
-
} // CleanUpTransformer
}
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 1a1137f402..391bce5abb 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -9,12 +9,11 @@ package transform
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
import symtab.Flags._
-import util.TreeSet
/** This phase converts classes with parameters into Java-like classes with
* fields, which are assigned to from constructors.
*/
-abstract class Constructors extends Transform with ast.TreeDSL {
+abstract class Constructors extends Statics with Transform with ast.TreeDSL {
import global._
import definitions._
@@ -24,557 +23,709 @@ abstract class Constructors extends Transform with ast.TreeDSL {
protected def newTransformer(unit: CompilationUnit): Transformer =
new ConstructorTransformer(unit)
- private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = perRunCaches.newMap[Symbol, List[Tree]]
- private val ctorParams: mutable.Map[Symbol, List[Symbol]] = perRunCaches.newMap[Symbol, List[Symbol]]
+ private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = perRunCaches.newMap[Symbol, List[Tree]]()
+ private val ctorParams: mutable.Map[Symbol, List[Symbol]] = perRunCaches.newMap[Symbol, List[Symbol]]()
class ConstructorTransformer(unit: CompilationUnit) extends Transformer {
- def transformClassTemplate(impl: Template): Template = {
- val clazz = impl.symbol.owner // the transformed class
- val stats = impl.body // the transformed template body
- val localTyper = typer.atOwner(impl, clazz)
-
- val specializedFlag: Symbol = clazz.info.decl(nme.SPECIALIZED_INSTANCE)
- val shouldGuard = (specializedFlag != NoSymbol) && !clazz.hasFlag(SPECIALIZED)
-
- case class ConstrInfo(
- constr: DefDef, // The primary constructor
- constrParams: List[Symbol], // ... and its parameters
- constrBody: Block // ... and its body
+ /*
+ * Inspect for obvious out-of-order initialization; concrete, eager vals or vars, declared in this class,
+ * for which a reference to the member precedes its definition.
+ */
+ private def checkUninitializedReads(cd: ClassDef) {
+ val stats = cd.impl.body
+ val clazz = cd.symbol
+
+ def checkableForInit(sym: Symbol) = (
+ (sym ne null)
+ && (sym.isVal || sym.isVar)
+ && !(sym hasFlag LAZY | DEFERRED | SYNTHETIC)
)
- // decompose primary constructor into the three entities above.
- val constrInfo: ConstrInfo = {
- stats find (_.symbol.isPrimaryConstructor) match {
- case Some(ddef @ DefDef(_, _, _, List(vparams), _, rhs @ Block(_, _))) =>
- ConstrInfo(ddef, vparams map (_.symbol), rhs)
- case x =>
- // AnyVal constructor is OK
- assert(clazz eq AnyValClass, "no constructor in template: impl = " + impl)
- return impl
- }
- }
- import constrInfo._
-
- // The parameter accessor fields which are members of the class
- val paramAccessors = clazz.constrParamAccessors
-
- // The constructor parameter corresponding to an accessor
- def parameter(acc: Symbol): Symbol =
- parameterNamed(nme.getterName(acc.originalName))
-
- // The constructor parameter with given name. This means the parameter
- // has given name, or starts with given name, and continues with a `$` afterwards.
- def parameterNamed(name: Name): Symbol = {
- def matchesName(param: Symbol) = param.name == name || param.name.startsWith(name + nme.NAME_JOIN_STRING)
-
- (constrParams filter matchesName) match {
- case Nil => abort(name + " not in " + constrParams)
- case p :: _ => p
- }
- }
-
- var usesSpecializedField: Boolean = false
-
- // A transformer for expressions that go into the constructor
- val intoConstructorTransformer = new Transformer {
- def isParamRef(sym: Symbol) =
- sym.isParamAccessor &&
- sym.owner == clazz &&
- !(clazz isSubClass DelayedInitClass) &&
- !(sym.isGetter && sym.accessed.isVariable) &&
- !sym.isSetter
- private def possiblySpecialized(s: Symbol) = specializeTypes.specializedTypeVars(s).nonEmpty
- override def transform(tree: Tree): Tree = tree match {
- case Apply(Select(This(_), _), List()) =>
- // references to parameter accessor methods of own class become references to parameters
- // outer accessors become references to $outer parameter
- if (isParamRef(tree.symbol) && !possiblySpecialized(tree.symbol))
- gen.mkAttributedIdent(parameter(tree.symbol.accessed)) setPos tree.pos
- else if (tree.symbol.outerSource == clazz && !clazz.isImplClass)
- gen.mkAttributedIdent(parameterNamed(nme.OUTER)) setPos tree.pos
- else
- super.transform(tree)
- case Select(This(_), _) if (isParamRef(tree.symbol) && !possiblySpecialized(tree.symbol)) =>
- // references to parameter accessor field of own class become references to parameters
- gen.mkAttributedIdent(parameter(tree.symbol)) setPos tree.pos
- case Select(_, _) =>
- if (specializeTypes.specializedTypeVars(tree.symbol).nonEmpty)
- usesSpecializedField = true
- super.transform(tree)
- case _ =>
- super.transform(tree)
- }
- }
-
- // Move tree into constructor, take care of changing owner from `oldowner` to constructor symbol
- def intoConstructor(oldowner: Symbol, tree: Tree) =
- intoConstructorTransformer transform tree.changeOwner(oldowner -> constr.symbol)
-
- // Should tree be moved in front of super constructor call?
- def canBeMoved(tree: Tree) = tree match {
- case ValDef(mods, _, _, _) => (mods hasFlag PRESUPER | PARAMACCESSOR)
- case _ => false
- }
-
- // Create an assignment to class field `to` with rhs `from`
- def mkAssign(to: Symbol, from: Tree): Tree =
- localTyper.typedPos(to.pos) { Assign(Select(This(clazz), to), from) }
-
- // Create code to copy parameter to parameter accessor field.
- // If parameter is $outer, check that it is not null so that we NPE
- // here instead of at some unknown future $outer access.
- def copyParam(to: Symbol, from: Symbol): Tree = {
- import CODE._
- val result = mkAssign(to, Ident(from))
-
- if (from.name != nme.OUTER ||
- from.tpe.typeSymbol.isPrimitiveValueClass) result
- else localTyper.typedPos(to.pos) {
- IF (from OBJ_EQ NULL) THEN Throw(NullPointerExceptionClass.tpe) ELSE result
+ val uninitializedVals = mutable.Set[Symbol](
+ stats collect { case vd: ValDef if checkableForInit(vd.symbol) => vd.symbol.accessedOrSelf }: _*
+ )
+ if (uninitializedVals.size > 1)
+ log("Checking constructor for init order issues among: " + uninitializedVals.toList.map(_.name.toString.trim).distinct.sorted.mkString(", "))
+
+ for (stat <- stats) {
+ // Checking the qualifier symbol is necessary to prevent a selection on
+ // another instance of the same class from potentially appearing to be a forward
+ // reference on the member in the current class.
+ def check(tree: Tree) = {
+ for (t <- tree) t match {
+ case t: RefTree if uninitializedVals(t.symbol.accessedOrSelf) && t.qualifier.symbol == clazz =>
+ unit.warning(t.pos, s"Reference to uninitialized ${t.symbol.accessedOrSelf}")
+ case _ =>
+ }
}
- }
-
- // The list of definitions that go into class
- val defBuf = new ListBuffer[Tree]
-
- // The auxiliary constructors, separate from the defBuf since they should
- // follow the primary constructor
- val auxConstructorBuf = new ListBuffer[Tree]
-
- // The list of statements that go into constructor after and including the superclass constructor call
- val constrStatBuf = new ListBuffer[Tree]
-
- // The list of early initializer statements that go into constructor before the superclass constructor call
- val constrPrefixBuf = new ListBuffer[Tree]
-
- // The early initialized field definitions of the class (these are the class members)
- val presupers = treeInfo.preSuperFields(stats)
-
- // generate code to copy pre-initialized fields
- for (stat <- constrBody.stats) {
- constrStatBuf += stat
stat match {
- case ValDef(mods, name, _, _) if (mods hasFlag PRESUPER) =>
- // stat is the constructor-local definition of the field value
- val fields = presupers filter (
- vdef => nme.localToGetter(vdef.name) == name)
- assert(fields.length == 1)
- val to = fields.head.symbol
- if (!to.tpe.isInstanceOf[ConstantType])
- constrStatBuf += mkAssign(to, Ident(stat.symbol))
- case _ =>
+ case vd: ValDef =>
+ // doing this first allows self-referential vals, which to be a conservative
+ // warner we will do because it's possible though difficult for it to be useful.
+ uninitializedVals -= vd.symbol.accessedOrSelf
+ if (!vd.symbol.isLazy)
+ check(vd.rhs)
+ case _: MemberDef => // skip other member defs
+ case t => check(t) // constructor body statement
}
}
- // Triage all template definitions to go into defBuf/auxConstructorBuf, constrStatBuf, or constrPrefixBuf.
- for (stat <- stats) stat match {
- case DefDef(_,_,_,_,_,rhs) =>
- // methods with constant result type get literals as their body
- // all methods except the primary constructor go into template
- stat.symbol.tpe match {
- case MethodType(List(), tp @ ConstantType(c)) =>
- defBuf += deriveDefDef(stat)(Literal(c) setPos _.pos setType tp)
- case _ =>
- if (stat.symbol.isPrimaryConstructor) ()
- else if (stat.symbol.isConstructor) auxConstructorBuf += stat
- else defBuf += stat
+ } // end of checkUninitializedReads()
+
+ override def transform(tree: Tree): Tree = {
+ tree match {
+ case cd @ ClassDef(mods0, name0, tparams0, impl0) if !cd.symbol.isInterface && !isPrimitiveValueClass(cd.symbol) =>
+ if(cd.symbol eq AnyValClass) {
+ cd
}
- case ValDef(_, _, _, rhs) =>
- // val defs with constant right-hand sides are eliminated.
- // for all other val defs, an empty valdef goes into the template and
- // the initializer goes as an assignment into the constructor
- // if the val def is an early initialized or a parameter accessor, it goes
- // before the superclass constructor call, otherwise it goes after.
- // Lazy vals don't get the assignment in the constructor.
- if (!stat.symbol.tpe.isInstanceOf[ConstantType]) {
- if (rhs != EmptyTree && !stat.symbol.isLazy) {
- val rhs1 = intoConstructor(stat.symbol, rhs);
- (if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
- stat.symbol, rhs1)
- }
- defBuf += deriveValDef(stat)(_ => EmptyTree)
+ else {
+ checkUninitializedReads(cd)
+ val tplTransformer = new TemplateTransformer(unit, impl0)
+ treeCopy.ClassDef(cd, mods0, name0, tparams0, tplTransformer.transformed)
}
- case ClassDef(_, _, _, _) =>
- // classes are treated recursively, and left in the template
- defBuf += new ConstructorTransformer(unit).transform(stat)
case _ =>
- // all other statements go into the constructor
- constrStatBuf += intoConstructor(impl.symbol, stat)
+ super.transform(tree)
}
+ }
- // ----------- avoid making fields for symbols that are not accessed --------------
+ } // ConstructorTransformer
- // A sorted set of symbols that are known to be accessed outside the primary constructor.
- val accessedSyms = new TreeSet[Symbol]((x, y) => x isLess y)
+ /*
+ * Summary
+ * -------
+ *
+ * The following gets elided unless they're actually needed:
+ * (a) parameter-accessor fields for non-val, non-var, constructor-param-symbols, as well as
+ * (b) outer accessors of a final class which don't override anything.
+ *
+ *
+ * Gory details
+ * ------------
+ *
+ * The constructors phase elides
+ *
+ * (a) parameter-accessor fields for non-val, non-var, constructor-param-symbols
+ * provided they're only accessed within the primary constructor;
+ *
+ * as well as
+ *
+ * (b) outer accessors directly owned by the class of interest,
+ * provided that class is final, they don't override anything, and moreover they aren't accessed anywhere.
+ * An outer accessor is backed by a param-accessor field.
+ * If an outer-accessor can be elided then its supporting field can be elided as well.
+ *
+ * Once the potential candidates for elision are known (as described above) it remains to visit
+ * those program locations where they might be accessed, and only those.
+ *
+ * What trees can be visited at this point?
+ * To recap, by the time the constructors phase runs, local definitions have been hoisted out of their original owner.
+ * Moreover, by the time elision is about to happen, the `intoConstructors` rewriting
+ * of template-level statements has taken place (the resulting trees can be found in `constrStatBuf`).
+ *
+ * That means:
+ *
+ * - nested classes are to be found in `defBuf`
+ *
+ * - value and method definitions are also in `defBuf` and none of them contains local methods or classes.
+ *
+ * - auxiliary constructors are to be found in `auxConstructorBuf`
+ *
+ * Coming back to the question which trees may contain accesses:
+ *
+ * (c) regarding parameter-accessor fields, all candidates in (a) are necessarily private-local,
+ * and thus may only be accessed from value or method definitions owned by the current class
+ * (ie there's no point drilling down into nested classes).
+ *
+ * (d) regarding candidates in (b), they are accesible from all places listed in (c) and in addition
+ * from nested classes (nested at any number of levels).
+ *
+ * In all cases, we're done with traversing as soon as all candidates have been ruled out.
+ *
+ * Finally, the whole affair of eliding is avoided for DelayedInit subclasses,
+ * given that for them usually nothing gets elided anyway.
+ * That's a consequence from re-locating the post-super-calls statements from their original location
+ * (the primary constructor) into a dedicated synthetic method that an anon-closure may invoke, as required by DelayedInit.
+ *
+ */
+ private trait OmittablesHelper { self: TemplateTransformer =>
+
+ /*
+ * Initially populated with all elision candidates.
+ * Trees are traversed, and those candidates are removed which are actually needed.
+ * After that, `omittables` doesn't shrink anymore: each symbol it contains can be unlinked from clazz.info.decls.
+ */
+ val omittables = mutable.Set.empty[Symbol]
+
+ def populateOmittables() {
+
+ omittables.clear()
+
+ if(isDelayedInitSubclass) {
+ return
+ }
- // a list of outer accessor symbols and their bodies
- var outerAccessors: List[(Symbol, Tree)] = List()
+ def isParamCandidateForElision(sym: Symbol) = (sym.isParamAccessor && sym.isPrivateLocal)
+ def isOuterCandidateForElision(sym: Symbol) = (sym.isOuterAccessor && sym.owner.isEffectivelyFinal && !sym.isOverridingSymbol)
- // Could symbol's definition be omitted, provided it is not accessed?
- // This is the case if the symbol is defined in the current class, and
- // ( the symbol is an object private parameter accessor field, or
- // the symbol is an outer accessor of a final class which does not override another outer accessor. )
- def maybeOmittable(sym: Symbol) = sym.owner == clazz && (
- sym.isParamAccessor && sym.isPrivateLocal ||
- sym.isOuterAccessor && sym.owner.isEffectivelyFinal && !sym.isOverridingSymbol &&
- !(clazz isSubClass DelayedInitClass)
- )
+ val paramCandidatesForElision: Set[ /*Field*/ Symbol] = (clazz.info.decls.toSet filter isParamCandidateForElision)
+ val outerCandidatesForElision: Set[ /*Method*/ Symbol] = (clazz.info.decls.toSet filter isOuterCandidateForElision)
+
+ omittables ++= paramCandidatesForElision
+ omittables ++= outerCandidatesForElision
- // Is symbol known to be accessed outside of the primary constructor,
- // or is it a symbol whose definition cannot be omitted anyway?
- def mustbeKept(sym: Symbol) = !maybeOmittable(sym) || (accessedSyms contains sym)
+ val bodyOfOuterAccessor: Map[Symbol, DefDef] =
+ defBuf.collect { case dd: DefDef if outerCandidatesForElision(dd.symbol) => dd.symbol -> dd }.toMap
- // A traverser to set accessedSyms and outerAccessors
- val accessTraverser = new Traverser {
- override def traverse(tree: Tree) = {
+ // no point traversing further once omittables is empty, all candidates ruled out already.
+ object detectUsages extends Traverser {
+ private def markUsage(sym: Symbol) {
+ omittables -= debuglogResult("omittables -= ")(sym)
+ // recursive call to mark as needed the field supporting the outer-accessor-method.
+ bodyOfOuterAccessor get sym foreach (this traverse _.rhs)
+ }
+ override def traverse(tree: Tree): Unit = if (omittables.nonEmpty) {
+ def sym = tree.symbol
tree match {
- case DefDef(_, _, _, _, _, body)
- if (tree.symbol.isOuterAccessor && tree.symbol.owner == clazz && clazz.isEffectivelyFinal) =>
- debuglog("outerAccessors += " + tree.symbol.fullName)
- outerAccessors ::= ((tree.symbol, body))
- case Select(_, _) =>
- if (!mustbeKept(tree.symbol)) {
- debuglog("accessedSyms += " + tree.symbol.fullName)
- accessedSyms addEntry tree.symbol
- }
- super.traverse(tree)
- case _ =>
- super.traverse(tree)
+ // don't mark as "needed" the field supporting this outer-accessor, ie not just yet.
+ case _: DefDef if outerCandidatesForElision(sym) => ()
+ case _: Select if omittables(sym) => markUsage(sym) ; super.traverse(tree)
+ case _ => super.traverse(tree)
}
}
+ def walk(xs: Seq[Tree]) = xs.iterator foreach traverse
+ }
+ if (omittables.nonEmpty) {
+ detectUsages walk defBuf
+ detectUsages walk auxConstructorBuf
}
+ }
+ def mustBeKept(sym: Symbol) = !omittables(sym)
+
+ } // OmittablesHelper
+
+ /*
+ * TemplateTransformer rewrites DelayedInit subclasses.
+ * The list of statements that will end up in the primary constructor can be split into:
+ *
+ * (a) up to and including the super-constructor call.
+ * These statements can occur only in the (bytecode-level) primary constructor.
+ *
+ * (b) remaining statements
+ *
+ * The purpose of DelayedInit is leaving (b) out of the primary constructor and have their execution "delayed".
+ *
+ * The rewriting to achieve "delayed initialization" involves:
+ * (c) an additional, synthetic, public method encapsulating (b)
+ * (d) an additional, synthetic closure whose argless apply() just invokes (c)
+ * (e) after executing the statements in (a),
+ * the primary constructor instantiates (d) and passes it as argument
+ * to a `delayedInit()` invocation on the current instance.
+ * In turn, `delayedInit()` is a method defined as abstract in the `DelayedInit` trait
+ * so that it can be overridden (for an example see `scala.App`)
+ *
+ * The following helper methods prepare Trees as part of this rewriting:
+ *
+ * (f) `delayedEndpointDef()` prepares (c).
+ * A transformer, `constrStatTransformer`, is used to re-locate statements (b) from template-level
+ * to become statements in method (c). The main task here is re-formulating accesses to params
+ * of the primary constructors (to recap, (c) has zero-params) in terms of param-accessor fields.
+ * In a Delayed-Init subclass, each class-constructor gets a param-accessor field because `mustbeKept()` forces it.
+ *
+ * (g) `delayedInitClosure()` prepares (d)
+ *
+ * (h) `delayedInitCall()` prepares the `delayedInit()` invocation referred to in (e)
+ *
+ * Both (c) and (d) are added to the Template returned by `transformClassTemplate()`
+ *
+ * A note of historic interest: Previously the rewriting for DelayedInit would include in the closure body
+ * all of the delayed initialization sequence, which in turn required:
+ * - reformulating "accesses-on-this" into "accesses-on-outer", and
+ * - adding public getters and setters.
+ *
+ * @param stats the statements in (b) above
+ *
+ * @return the DefDef for (c) above
+ *
+ * */
+ private trait DelayedInitHelper { self: TemplateTransformer =>
+
+ private def delayedEndpointDef(stats: List[Tree]): DefDef = {
+
+ val methodName = currentUnit.freshTermName("delayedEndpoint$" + clazz.fullNameAsName('$').toString + "$")
+ val methodSym = clazz.newMethod(methodName, impl.pos, SYNTHETIC | FINAL)
+ methodSym setInfoAndEnter MethodType(Nil, UnitTpe)
+
+ // changeOwner needed because the `stats` contained in the DefDef were owned by the template, not long ago.
+ val blk = Block(stats, gen.mkZero(UnitTpe)).changeOwner(impl.symbol -> methodSym)
+ val delayedDD = localTyper typed { DefDef(methodSym, Nil, blk) }
+
+ delayedDD.asInstanceOf[DefDef]
+ }
+
+ private def delayedInitClosure(delayedEndPointSym: MethodSymbol): ClassDef = {
+ val satelliteClass = localTyper.typed {
+ atPos(impl.pos) {
+ val closureClass = clazz.newClass(nme.delayedInitArg.toTypeName, impl.pos, SYNTHETIC | FINAL)
+ val closureParents = List(AbstractFunctionClass(0).tpe)
+
+ closureClass setInfoAndEnter new ClassInfoType(closureParents, newScope, closureClass)
+
+ val outerField: TermSymbol = (
+ closureClass
+ newValue(nme.OUTER, impl.pos, PrivateLocal | PARAMACCESSOR)
+ setInfoAndEnter clazz.tpe
+ )
+ val applyMethod: MethodSymbol = (
+ closureClass
+ newMethod(nme.apply, impl.pos, FINAL)
+ setInfoAndEnter MethodType(Nil, ObjectTpe)
+ )
+ val outerFieldDef = ValDef(outerField)
+ val closureClassTyper = localTyper.atOwner(closureClass)
+ val applyMethodTyper = closureClassTyper.atOwner(applyMethod)
+
+ def applyMethodStat =
+ applyMethodTyper.typed {
+ atPos(impl.pos) {
+ val receiver = Select(This(closureClass), outerField)
+ Apply(Select(receiver, delayedEndPointSym), Nil)
+ }
+ }
- // first traverse all definitions except outeraccesors
- // (outeraccessors are avoided in accessTraverser)
- for (stat <- defBuf.iterator ++ auxConstructorBuf.iterator)
- accessTraverser.traverse(stat)
-
- // then traverse all bodies of outeraccessors which are accessed themselves
- // note: this relies on the fact that an outer accessor never calls another
- // outer accessor in the same class.
- for ((accSym, accBody) <- outerAccessors)
- if (mustbeKept(accSym)) accessTraverser.traverse(accBody)
-
- // Initialize all parameters fields that must be kept.
- val paramInits = paramAccessors filter mustbeKept map { acc =>
- // Check for conflicting symbol amongst parents: see bug #1960.
- // It would be better to mangle the constructor parameter name since
- // it can only be used internally, but I think we need more robust name
- // mangling before we introduce more of it.
- val conflict = clazz.info.nonPrivateMember(acc.name) filter (s => s.isGetter && !s.isOuterField && s.enclClass.isTrait)
- if (conflict ne NoSymbol)
- unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString))
-
- copyParam(acc, parameter(acc))
+ val applyMethodDef = DefDef(
+ sym = applyMethod,
+ vparamss = ListOfNil,
+ rhs = Block(applyMethodStat, gen.mkAttributedRef(BoxedUnit_UNIT)))
+
+ ClassDef(
+ sym = closureClass,
+ constrMods = Modifiers(0),
+ vparamss = List(List(outerFieldDef)),
+ body = applyMethodDef :: Nil,
+ superPos = impl.pos)
+ }
}
- /** Return a single list of statements, merging the generic class constructor with the
- * specialized stats. The original statements are retyped in the current class, and
- * assignments to generic fields that have a corresponding specialized assignment in
- * `specializedStats` are replaced by the specialized assignment.
- */
- def mergeConstructors(genericClazz: Symbol, originalStats: List[Tree], specializedStats: List[Tree]): List[Tree] = {
- val specBuf = new ListBuffer[Tree]
- specBuf ++= specializedStats
-
- def specializedAssignFor(sym: Symbol): Option[Tree] =
- specializedStats find {
- case Assign(sel @ Select(This(_), _), rhs) =>
- ( (sel.symbol hasFlag SPECIALIZED)
- && (nme.unspecializedName(nme.localToGetter(sel.symbol.name)) == nme.localToGetter(sym.name))
- )
- case _ => false
- }
+ satelliteClass.asInstanceOf[ClassDef]
+ }
- /** Rewrite calls to ScalaRunTime.array_update to the proper apply method in scala.Array.
- * Erasure transforms Array.update to ScalaRunTime.update when the element type is a type
- * variable, but after specialization this is a concrete primitive type, so it would
- * be an error to pass it to array_update(.., .., Object).
- */
- def rewriteArrayUpdate(tree: Tree): Tree = {
- val adapter = new Transformer {
- override def transform(t: Tree): Tree = t match {
- case Apply(fun @ Select(receiver, method), List(xs, idx, v)) if fun.symbol == arrayUpdateMethod =>
- localTyper.typed(Apply(gen.mkAttributedSelect(xs, arrayUpdateMethod), List(idx, v)))
- case _ => super.transform(t)
- }
- }
- adapter.transform(tree)
- }
+ private def delayedInitCall(closure: Tree) = localTyper.typedPos(impl.pos) {
+ gen.mkMethodCall(This(clazz), delayedInitMethod, Nil, List(New(closure.symbol.tpe, This(clazz))))
+ }
- log("merging: " + originalStats.mkString("\n") + "\nwith\n" + specializedStats.mkString("\n"))
- val res = for (s <- originalStats; stat = s.duplicate) yield {
- log("merge: looking at " + stat)
- val stat1 = stat match {
- case Assign(sel @ Select(This(_), field), _) =>
- specializedAssignFor(sel.symbol).getOrElse(stat)
- case _ => stat
- }
- if (stat1 ne stat) {
- log("replaced " + stat + " with " + stat1)
- specBuf -= stat1
- }
+ def rewriteDelayedInit() {
+ /* XXX This is not corect: remainingConstrStats.nonEmpty excludes too much,
+ * but excluding it includes too much. The constructor sequence being mimicked
+ * needs to be reproduced with total fidelity.
+ *
+ * See test case files/run/bug4680.scala, the output of which is wrong in many
+ * particulars.
+ */
+ val needsDelayedInit = (isDelayedInitSubclass && remainingConstrStats.nonEmpty)
- if (stat1 eq stat) {
- assert(ctorParams(genericClazz).length == constrParams.length)
- // this is just to make private fields public
- (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), constrParams, null, true))(stat1)
-
- val stat2 = rewriteArrayUpdate(stat1)
- // statements coming from the original class need retyping in the current context
- debuglog("retyping " + stat2)
-
- val d = new specializeTypes.Duplicator(Map[Symbol, Type]())
- d.retyped(localTyper.context1.asInstanceOf[d.Context],
- stat2,
- genericClazz,
- clazz,
- Map.empty)
- } else
- stat1
+ if (needsDelayedInit) {
+ val delayedHook: DefDef = delayedEndpointDef(remainingConstrStats)
+ defBuf += delayedHook
+ val hookCallerClass = {
+ // transform to make the closure-class' default constructor assign the the outer instance to its param-accessor field.
+ val drillDown = new ConstructorTransformer(unit)
+ drillDown transform delayedInitClosure(delayedHook.symbol.asInstanceOf[MethodSymbol])
}
- if (specBuf.nonEmpty)
- println("residual specialized constructor statements: " + specBuf)
- res
+ defBuf += hookCallerClass
+ remainingConstrStats = delayedInitCall(hookCallerClass) :: Nil
}
+ }
+
+ } // DelayedInitHelper
+
+ private trait GuardianOfCtorStmts { self: TemplateTransformer =>
+
+ /* Return a single list of statements, merging the generic class constructor with the
+ * specialized stats. The original statements are retyped in the current class, and
+ * assignments to generic fields that have a corresponding specialized assignment in
+ * `specializedStats` are replaced by the specialized assignment.
+ */
+ private def mergeConstructors(genericClazz: Symbol, originalStats: List[Tree], specializedStats: List[Tree]): List[Tree] = {
+ val specBuf = new ListBuffer[Tree]
+ specBuf ++= specializedStats
+
+ def specializedAssignFor(sym: Symbol): Option[Tree] =
+ specializedStats find {
+ case Assign(sel @ Select(This(_), _), _) =>
+ sel.symbol.isSpecialized && (nme.unspecializedName(sel.symbol.getterName) == sym.getterName)
+ case _ => false
+ }
- /** Add an 'if' around the statements coming after the super constructor. This
- * guard is necessary if the code uses specialized fields. A specialized field is
- * initialized in the subclass constructor, but the accessors are (already) overridden
- * and pointing to the (empty) fields. To fix this, a class with specialized fields
- * will not run its constructor statements if the instance is specialized. The specialized
- * subclass includes a copy of those constructor statements, and runs them. To flag that a class
- * has specialized fields, and their initialization should be deferred to the subclass, method
- * 'specInstance$' is added in phase specialize.
+ /* Rewrite calls to ScalaRunTime.array_update to the proper apply method in scala.Array.
+ * Erasure transforms Array.update to ScalaRunTime.update when the element type is a type
+ * variable, but after specialization this is a concrete primitive type, so it would
+ * be an error to pass it to array_update(.., .., Object).
*/
- def guardSpecializedInitializer(stats: List[Tree]): List[Tree] = if (settings.nospecialization.value) stats else {
- // split the statements in presuper and postsuper
- // var (prefix, postfix) = stats0.span(tree => !((tree.symbol ne null) && tree.symbol.isConstructor))
- // if (postfix.nonEmpty) {
- // prefix = prefix :+ postfix.head
- //postfix = postfix.tail
- //}
-
- if (usesSpecializedField && shouldGuard && stats.nonEmpty) {
- // save them for duplication in the specialized subclass
- guardedCtorStats(clazz) = stats
- ctorParams(clazz) = constrParams
-
- val tree =
- If(
- Apply(
- CODE.NOT (
- Apply(gen.mkAttributedRef(specializedFlag), List())),
- List()),
- Block(stats, Literal(Constant())),
- EmptyTree)
-
- List(localTyper.typed(tree))
- }
- else if (clazz.hasFlag(SPECIALIZED)) {
- // add initialization from its generic class constructor
- val genericName = nme.unspecializedName(clazz.name)
- val genericClazz = clazz.owner.info.decl(genericName.toTypeName)
- assert(genericClazz != NoSymbol, clazz)
-
- guardedCtorStats.get(genericClazz) match {
- case Some(stats1) => mergeConstructors(genericClazz, stats1, stats)
- case None => stats
+ def rewriteArrayUpdate(tree: Tree): Tree = {
+ val arrayUpdateMethod = currentRun.runDefinitions.arrayUpdateMethod
+ val adapter = new Transformer {
+ override def transform(t: Tree): Tree = t match {
+ case Apply(fun @ Select(receiver, method), List(xs, idx, v)) if fun.symbol == arrayUpdateMethod =>
+ localTyper.typed(Apply(gen.mkAttributedSelect(xs, arrayUpdateMethod), List(idx, v)))
+ case _ => super.transform(t)
}
- } else stats
- }
-/*
- def isInitDef(stat: Tree) = stat match {
- case dd: DefDef => dd.symbol == delayedInitMethod
- case _ => false
+ }
+ adapter.transform(tree)
}
-*/
- /** Create a getter or a setter and enter into `clazz` scope
- */
- def addAccessor(sym: Symbol, name: TermName, flags: Long) = {
- val m = clazz.newMethod(name, sym.pos, flags & ~(LOCAL | PRIVATE)) setPrivateWithin clazz
- clazz.info.decls enter m
- }
+ log("merging: " + originalStats.mkString("\n") + "\nwith\n" + specializedStats.mkString("\n"))
+ val res = for (s <- originalStats; stat = s.duplicate) yield {
+ log("merge: looking at " + stat)
+ val stat1 = stat match {
+ case Assign(sel @ Select(This(_), field), _) =>
+ specializedAssignFor(sel.symbol).getOrElse(stat)
+ case _ => stat
+ }
+ if (stat1 ne stat) {
+ log("replaced " + stat + " with " + stat1)
+ specBuf -= stat1
+ }
- def addGetter(sym: Symbol): Symbol = {
- val getr = addAccessor(
- sym, nme.getterName(sym.name), getterFlags(sym.flags))
- getr setInfo MethodType(List(), sym.tpe)
- defBuf += localTyper.typedPos(sym.pos)(DefDef(getr, Select(This(clazz), sym)))
- getr
+ if (stat1 eq stat) {
+ assert(ctorParams(genericClazz).length == constrInfo.constrParams.length)
+ // this is just to make private fields public
+ (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), constrInfo.constrParams, null, true))(stat1)
+
+ val stat2 = rewriteArrayUpdate(stat1)
+ // statements coming from the original class need retyping in the current context
+ debuglog("retyping " + stat2)
+
+ val d = new specializeTypes.Duplicator(Map[Symbol, Type]())
+ d.retyped(localTyper.context1.asInstanceOf[d.Context],
+ stat2,
+ genericClazz,
+ clazz,
+ Map.empty)
+ } else
+ stat1
}
-
- def addSetter(sym: Symbol): Symbol = {
- sym setFlag MUTABLE
- val setr = addAccessor(
- sym, nme.getterToSetter(nme.getterName(sym.name)), setterFlags(sym.flags))
- setr setInfo MethodType(setr.newSyntheticValueParams(List(sym.tpe)), UnitClass.tpe)
- defBuf += localTyper.typed {
- //util.trace("adding setter def for "+setr) {
- atPos(sym.pos) {
- DefDef(setr, paramss =>
- Assign(Select(This(clazz), sym), Ident(paramss.head.head)))
- }//}
+ if (specBuf.nonEmpty)
+ println("residual specialized constructor statements: " + specBuf)
+ res
+ }
+
+ /* Add an 'if' around the statements coming after the super constructor. This
+ * guard is necessary if the code uses specialized fields. A specialized field is
+ * initialized in the subclass constructor, but the accessors are (already) overridden
+ * and pointing to the (empty) fields. To fix this, a class with specialized fields
+ * will not run its constructor statements if the instance is specialized. The specialized
+ * subclass includes a copy of those constructor statements, and runs them. To flag that a class
+ * has specialized fields, and their initialization should be deferred to the subclass, method
+ * 'specInstance$' is added in phase specialize.
+ */
+ def guardSpecializedInitializer(stats: List[Tree]): List[Tree] = if (settings.nospecialization.value) stats else {
+ // // split the statements in presuper and postsuper
+ // var (prefix, postfix) = stats0.span(tree => !((tree.symbol ne null) && tree.symbol.isConstructor))
+ // if (postfix.nonEmpty) {
+ // prefix = prefix :+ postfix.head
+ // postfix = postfix.tail
+ // }
+
+ if (shouldGuard && usesSpecializedField && stats.nonEmpty) {
+ // save them for duplication in the specialized subclass
+ guardedCtorStats(clazz) = stats
+ ctorParams(clazz) = constrInfo.constrParams
+
+ val tree =
+ If(
+ Apply(
+ CODE.NOT (
+ Apply(gen.mkAttributedRef(specializedFlag), List())),
+ List()),
+ Block(stats, Literal(Constant(()))),
+ EmptyTree)
+
+ List(localTyper.typed(tree))
+ }
+ else if (clazz.hasFlag(SPECIALIZED)) {
+ // add initialization from its generic class constructor
+ val genericName = nme.unspecializedName(clazz.name)
+ val genericClazz = clazz.owner.info.decl(genericName.toTypeName)
+ assert(genericClazz != NoSymbol, clazz)
+
+ guardedCtorStats.get(genericClazz) match {
+ case Some(stats1) => mergeConstructors(genericClazz, stats1, stats)
+ case None => stats
}
- setr
+ } else stats
+ }
+
+ } // GuardianOfCtorStmts
+
+ private class TemplateTransformer(val unit: CompilationUnit, val impl: Template)
+ extends StaticsTransformer
+ with DelayedInitHelper
+ with OmittablesHelper
+ with GuardianOfCtorStmts {
+
+ val clazz = impl.symbol.owner // the transformed class
+ val stats = impl.body // the transformed template body
+ val localTyper = typer.atOwner(impl, clazz)
+
+ val specializedFlag: Symbol = clazz.info.decl(nme.SPECIALIZED_INSTANCE)
+ val shouldGuard = (specializedFlag != NoSymbol) && !clazz.hasFlag(SPECIALIZED)
+
+ val isDelayedInitSubclass = (clazz isSubClass DelayedInitClass)
+
+ case class ConstrInfo(
+ constr: DefDef, // The primary constructor
+ constrParams: List[Symbol], // ... and its parameters
+ constrBody: Block // ... and its body
+ )
+ // decompose primary constructor into the three entities above.
+ val constrInfo: ConstrInfo = {
+ val ddef = (stats find (_.symbol.isPrimaryConstructor))
+ ddef match {
+ case Some(ddef @ DefDef(_, _, _, List(vparams), _, rhs @ Block(_, _))) =>
+ ConstrInfo(ddef, vparams map (_.symbol), rhs)
+ case x =>
+ abort("no constructor in template: impl = " + impl)
}
+ }
+ import constrInfo._
- def ensureAccessor(sym: Symbol)(acc: => Symbol) =
- if (sym.owner == clazz && !sym.isMethod && sym.isPrivate) { // there's an access to a naked field of the enclosing class
- var getr = acc
- getr makeNotPrivate clazz
- getr
- } else {
- if (sym.owner == clazz) sym makeNotPrivate clazz
- NoSymbol
- }
+ // The parameter accessor fields which are members of the class
+ val paramAccessors = clazz.constrParamAccessors
- def ensureGetter(sym: Symbol): Symbol = ensureAccessor(sym) {
- val getr = sym.getter(clazz)
- if (getr != NoSymbol) getr else addGetter(sym)
- }
+ // The constructor parameter corresponding to an accessor
+ def parameter(acc: Symbol): Symbol = parameterNamed(acc.unexpandedName.getterName)
- def ensureSetter(sym: Symbol): Symbol = ensureAccessor(sym) {
- var setr = sym.setter(clazz, hasExpandedName = false)
- if (setr == NoSymbol) setr = sym.setter(clazz, hasExpandedName = true)
- if (setr == NoSymbol) setr = addSetter(sym)
- setr
+ // The constructor parameter with given name. This means the parameter
+ // has given name, or starts with given name, and continues with a `$` afterwards.
+ def parameterNamed(name: Name): Symbol = {
+ def matchesName(param: Symbol) = param.name == name || param.name.startsWith(name + nme.NAME_JOIN_STRING)
+
+ (constrParams filter matchesName) match {
+ case Nil => abort(name + " not in " + constrParams)
+ case p :: _ => p
}
+ }
+
+ /*
+ * `usesSpecializedField` makes a difference in deciding whether constructor-statements
+ * should be guarded in a `shouldGuard` class, ie in a class that's the generic super-class of
+ * one or more specialized sub-classes.
+ *
+ * Given that `usesSpecializedField` isn't read for any other purpose than the one described above,
+ * we skip setting `usesSpecializedField` in case the current class isn't `shouldGuard` to start with.
+ * That way, trips to a map in `specializeTypes` are saved.
+ */
+ var usesSpecializedField: Boolean = false
+
+ // A transformer for expressions that go into the constructor
+ private class IntoCtorTransformer extends Transformer {
+
+ private def isParamRef(sym: Symbol) = (sym.isParamAccessor && sym.owner == clazz)
+
+ // Terminology: a stationary location is never written after being read.
+ private def isStationaryParamRef(sym: Symbol) = (
+ isParamRef(sym) &&
+ !(sym.isGetter && sym.accessed.isVariable) &&
+ !sym.isSetter
+ )
- def delayedInitClosure(stats: List[Tree]) =
- localTyper.typed {
- atPos(impl.pos) {
- val closureClass = clazz.newClass(nme.delayedInitArg.toTypeName, impl.pos, SYNTHETIC | FINAL)
- val closureParents = List(AbstractFunctionClass(0).tpe)
-
- closureClass setInfoAndEnter new ClassInfoType(closureParents, newScope, closureClass)
-
- val outerField = (
- closureClass
- newValue(nme.OUTER, impl.pos, PrivateLocal | PARAMACCESSOR)
- setInfoAndEnter clazz.tpe
- )
- val applyMethod = (
- closureClass
- newMethod(nme.apply, impl.pos, FINAL)
- setInfoAndEnter MethodType(Nil, ObjectClass.tpe)
- )
- val outerFieldDef = ValDef(outerField)
- val closureClassTyper = localTyper.atOwner(closureClass)
- val applyMethodTyper = closureClassTyper.atOwner(applyMethod)
-
- val constrStatTransformer = new Transformer {
- override def transform(tree: Tree): Tree = tree match {
- case This(_) if tree.symbol == clazz =>
- applyMethodTyper.typed {
- atPos(tree.pos) {
- Select(This(closureClass), outerField)
- }
- }
- case _ =>
- super.transform {
- tree match {
- case Select(qual, _) =>
- val getter = ensureGetter(tree.symbol)
- if (getter != NoSymbol)
- applyMethodTyper.typed {
- atPos(tree.pos) {
- Apply(Select(qual, getter), List())
- }
- }
- else tree
- case Assign(lhs @ Select(qual, _), rhs) =>
- val setter = ensureSetter(lhs.symbol)
- if (setter != NoSymbol)
- applyMethodTyper.typed {
- atPos(tree.pos) {
- Apply(Select(qual, setter), List(rhs))
- }
- }
- else tree
- case _ =>
- tree.changeOwner(impl.symbol -> applyMethod)
- }
- }
- }
- }
+ private def possiblySpecialized(s: Symbol) = specializeTypes.specializedTypeVars(s).nonEmpty
- def applyMethodStats = constrStatTransformer.transformTrees(stats)
+ /*
+ * whether `sym` denotes a param-accessor (ie a field) that fulfills all of:
+ * (a) has stationary value, ie the same value provided via the corresponding ctor-arg; and
+ * (b) isn't subject to specialization. We might be processing statements for:
+ * (b.1) the constructur in the generic (super-)class; or
+ * (b.2) the constructor in the specialized (sub-)class.
+ * (c) isn't part of a DelayedInit subclass.
+ */
+ private def canBeSupplanted(sym: Symbol) = (!isDelayedInitSubclass && isStationaryParamRef(sym) && !possiblySpecialized(sym))
+
+ override def transform(tree: Tree): Tree = tree match {
+
+ case Apply(Select(This(_), _), List()) =>
+ // references to parameter accessor methods of own class become references to parameters
+ // outer accessors become references to $outer parameter
+ if (canBeSupplanted(tree.symbol))
+ gen.mkAttributedIdent(parameter(tree.symbol.accessed)) setPos tree.pos
+ else if (tree.symbol.outerSource == clazz && !clazz.isImplClass)
+ gen.mkAttributedIdent(parameterNamed(nme.OUTER)) setPos tree.pos
+ else
+ super.transform(tree)
- val applyMethodDef = DefDef(
- sym = applyMethod,
- vparamss = ListOfNil,
- rhs = Block(applyMethodStats, gen.mkAttributedRef(BoxedUnit_UNIT)))
+ case Select(This(_), _) if canBeSupplanted(tree.symbol) =>
+ // references to parameter accessor field of own class become references to parameters
+ gen.mkAttributedIdent(parameter(tree.symbol)) setPos tree.pos
- ClassDef(
- sym = closureClass,
- constrMods = Modifiers(0),
- vparamss = List(List(outerFieldDef)),
- argss = ListOfNil,
- body = List(applyMethodDef),
- superPos = impl.pos)
+ case Select(_, _) if shouldGuard => // reasoning behind this guard in the docu of `usesSpecializedField`
+ if (possiblySpecialized(tree.symbol)) {
+ usesSpecializedField = true
}
- }
+ super.transform(tree)
- def delayedInitCall(closure: Tree) = localTyper.typedPos(impl.pos) {
- gen.mkMethodCall(This(clazz), delayedInitMethod, Nil, List(New(closure.symbol.tpe, This(clazz))))
+ case _ =>
+ super.transform(tree)
}
- /** Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */
- def splitAtSuper(stats: List[Tree]) = {
- def isConstr(tree: Tree): Boolean = tree match {
- case Block(_, expr) => isConstr(expr) // SI-6481 account for named argument blocks
- case _ => (tree.symbol ne null) && tree.symbol.isConstructor
+ }
+
+ private val intoConstructorTransformer = new IntoCtorTransformer
+
+ // Move tree into constructor, take care of changing owner from `oldowner` to constructor symbol
+ def intoConstructor(oldowner: Symbol, tree: Tree) =
+ intoConstructorTransformer transform tree.changeOwner(oldowner -> constr.symbol)
+
+ // Should tree be moved in front of super constructor call?
+ def canBeMoved(tree: Tree) = tree match {
+ case ValDef(mods, _, _, _) => (mods hasFlag PRESUPER | PARAMACCESSOR)
+ case _ => false
+ }
+
+ // Create an assignment to class field `to` with rhs `from`
+ def mkAssign(to: Symbol, from: Tree): Tree =
+ localTyper.typedPos(to.pos) { Assign(Select(This(clazz), to), from) }
+
+ // Create code to copy parameter to parameter accessor field.
+ // If parameter is $outer, check that it is not null so that we NPE
+ // here instead of at some unknown future $outer access.
+ def copyParam(to: Symbol, from: Symbol): Tree = {
+ import CODE._
+ val result = mkAssign(to, Ident(from))
+
+ if (from.name != nme.OUTER ||
+ from.tpe.typeSymbol.isPrimitiveValueClass) result
+ else localTyper.typedPos(to.pos) {
+ // `throw null` has the same effect as `throw new NullPointerException`, see JVM spec on instruction `athrow`
+ IF (from OBJ_EQ NULL) THEN Throw(gen.mkZero(ThrowableTpe)) ELSE result
+ }
+ }
+
+ // The list of definitions that go into class
+ val defBuf = new ListBuffer[Tree]
+
+ // The auxiliary constructors, separate from the defBuf since they should
+ // follow the primary constructor
+ val auxConstructorBuf = new ListBuffer[Tree]
+
+ // The list of statements that go into the constructor after and including the superclass constructor call
+ val constrStatBuf = new ListBuffer[Tree]
+
+ // The list of early initializer statements that go into constructor before the superclass constructor call
+ val constrPrefixBuf = new ListBuffer[Tree]
+
+ // The early initialized field definitions of the class (these are the class members)
+ val presupers = treeInfo.preSuperFields(stats)
+
+ // The list of statements that go into the class initializer
+ val classInitStatBuf = new ListBuffer[Tree]
+
+ // generate code to copy pre-initialized fields
+ for (stat <- constrBody.stats) {
+ constrStatBuf += stat
+ stat match {
+ case ValDef(mods, name, _, _) if (mods hasFlag PRESUPER) =>
+ // stat is the constructor-local definition of the field value
+ val fields = presupers filter (_.getterName == name)
+ assert(fields.length == 1)
+ val to = fields.head.symbol
+ if (!to.tpe.isInstanceOf[ConstantType])
+ constrStatBuf += mkAssign(to, Ident(stat.symbol))
+ case _ =>
+ }
+ }
+
+ // Triage all template definitions to go into defBuf/auxConstructorBuf, constrStatBuf, or constrPrefixBuf.
+ for (stat <- stats) stat match {
+ case DefDef(_,_,_,_,_,rhs) =>
+ // methods with constant result type get literals as their body
+ // all methods except the primary constructor go into template
+ stat.symbol.tpe match {
+ case MethodType(List(), tp @ ConstantType(c)) =>
+ defBuf += deriveDefDef(stat)(Literal(c) setPos _.pos setType tp)
+ case _ =>
+ if (stat.symbol.isPrimaryConstructor) ()
+ else if (stat.symbol.isConstructor) auxConstructorBuf += stat
+ else defBuf += stat
+ }
+ case ValDef(mods, _, _, rhs) if !mods.hasStaticFlag =>
+ // val defs with constant right-hand sides are eliminated.
+ // for all other val defs, an empty valdef goes into the template and
+ // the initializer goes as an assignment into the constructor
+ // if the val def is an early initialized or a parameter accessor, it goes
+ // before the superclass constructor call, otherwise it goes after.
+ // Lazy vals don't get the assignment in the constructor.
+ if (!stat.symbol.tpe.isInstanceOf[ConstantType]) {
+ if (rhs != EmptyTree && !stat.symbol.isLazy) {
+ val rhs1 = intoConstructor(stat.symbol, rhs)
+ (if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
+ stat.symbol, rhs1)
+ }
+ defBuf += deriveValDef(stat)(_ => EmptyTree)
}
- val (pre, rest0) = stats span (!isConstr(_))
- val (supercalls, rest) = rest0 span (isConstr(_))
- (pre ::: supercalls, rest)
+ case ValDef(_, _, _, rhs) =>
+ // Add static initializer statements to classInitStatBuf and remove the rhs from the val def.
+ classInitStatBuf += mkAssign(stat.symbol, rhs)
+ defBuf += deriveValDef(stat)(_ => EmptyTree)
+
+ case ClassDef(_, _, _, _) =>
+ // classes are treated recursively, and left in the template
+ defBuf += new ConstructorTransformer(unit).transform(stat)
+ case _ =>
+ // all other statements go into the constructor
+ constrStatBuf += intoConstructor(impl.symbol, stat)
+ }
+
+ populateOmittables()
+
+ // Initialize all parameters fields that must be kept.
+ val paramInits = paramAccessors filter mustBeKept map { acc =>
+ // Check for conflicting symbol amongst parents: see bug #1960.
+ // It would be better to mangle the constructor parameter name since
+ // it can only be used internally, but I think we need more robust name
+ // mangling before we introduce more of it.
+ val conflict = clazz.info.nonPrivateMember(acc.name) filter (s => s.isGetter && !s.isOuterField && s.enclClass.isTrait)
+ if (conflict ne NoSymbol)
+ unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString))
+
+ copyParam(acc, parameter(acc))
+ }
+
+ /* Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */
+ def splitAtSuper(stats: List[Tree]) = {
+ def isConstr(tree: Tree): Boolean = tree match {
+ case Block(_, expr) => isConstr(expr) // SI-6481 account for named argument blocks
+ case _ => (tree.symbol ne null) && tree.symbol.isConstructor
}
+ val (pre, rest0) = stats span (!isConstr(_))
+ val (supercalls, rest) = rest0 span (isConstr(_))
+ (pre ::: supercalls, rest)
+ }
- var (uptoSuperStats, remainingConstrStats) = splitAtSuper(constrStatBuf.toList)
+ val (uptoSuperStats, remainingConstrStats0) = splitAtSuper(constrStatBuf.toList)
+ var remainingConstrStats = remainingConstrStats0
- /** XXX This is not corect: remainingConstrStats.nonEmpty excludes too much,
- * but excluding it includes too much. The constructor sequence being mimicked
- * needs to be reproduced with total fidelity.
- *
- * See test case files/run/bug4680.scala, the output of which is wrong in many
- * particulars.
- */
- val needsDelayedInit =
- (clazz isSubClass DelayedInitClass) /*&& !(defBuf exists isInitDef)*/ && remainingConstrStats.nonEmpty
+ rewriteDelayedInit()
- if (needsDelayedInit) {
- val dicl = new ConstructorTransformer(unit) transform delayedInitClosure(remainingConstrStats)
- defBuf += dicl
- remainingConstrStats = List(delayedInitCall(dicl))
- }
+ // Assemble final constructor
+ defBuf += deriveDefDef(constr)(_ =>
+ treeCopy.Block(
+ constrBody,
+ paramInits ::: constrPrefixBuf.toList ::: uptoSuperStats :::
+ guardSpecializedInitializer(remainingConstrStats),
+ constrBody.expr))
- // Assemble final constructor
- defBuf += deriveDefDef(constr)(_ =>
- treeCopy.Block(
- constrBody,
- paramInits ::: constrPrefixBuf.toList ::: uptoSuperStats :::
- guardSpecializedInitializer(remainingConstrStats),
- constrBody.expr))
+ // Followed by any auxiliary constructors
+ defBuf ++= auxConstructorBuf
- // Followed by any auxiliary constructors
- defBuf ++= auxConstructorBuf
+ // Unlink all fields that can be dropped from class scope
+ for (sym <- clazz.info.decls ; if !mustBeKept(sym))
+ clazz.info.decls unlink sym
- // Unlink all fields that can be dropped from class scope
- for (sym <- clazz.info.decls ; if !mustbeKept(sym))
- clazz.info.decls unlink sym
+ // Eliminate all field definitions that can be dropped from template
+ val templateWithoutOmittables: Template = deriveTemplate(impl)(_ => defBuf.toList filter (stat => mustBeKept(stat.symbol)))
+ // Add the static initializers
+ val transformed: Template = addStaticInits(templateWithoutOmittables, classInitStatBuf, localTyper)
- // Eliminate all field definitions that can be dropped from template
- deriveTemplate(impl)(_ => defBuf.toList filter (stat => mustbeKept(stat.symbol)))
- } // transformClassTemplate
+ } // TemplateTransformer
- override def transform(tree: Tree): Tree =
- tree match {
- case ClassDef(_,_,_,_) if !tree.symbol.isInterface && !isPrimitiveValueClass(tree.symbol) =>
- deriveClassDef(tree)(transformClassTemplate)
- case _ =>
- super.transform(tree)
- }
- } // ConstructorTransformer
}
diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
new file mode 100644
index 0000000000..933a2f70a1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
@@ -0,0 +1,467 @@
+package scala.tools.nsc
+package transform
+
+import symtab._
+import Flags._
+import scala.collection._
+import scala.language.postfixOps
+import scala.reflect.internal.Symbols
+import scala.collection.mutable.LinkedHashMap
+
+/**
+ * This transformer is responisble for turning lambdas into anonymous classes.
+ * The main assumption it makes is that a lambda {args => body} has been turned into
+ * {args => liftedBody()} where lifted body is a top level method that implements the body of the lambda.
+ * Currently Uncurry is responsible for that transformation.
+ *
+ * From a lambda, Delambdafy will create
+ * 1) a static forwarder at the top level of the class that contained the lambda
+ * 2) a new top level class that
+ a) has fields and a constructor taking the captured environment (including possbily the "this"
+ * reference)
+ * b) an apply method that calls the static forwarder
+ * c) if needed a bridge method for the apply method
+ * 3) an instantiation of the newly created class which replaces the lambda
+ *
+ * TODO the main work left to be done is to plug into specialization. Primarily that means choosing a
+ * specialized FunctionN trait instead of the generic FunctionN trait as a parent and creating the
+ * appropriately named applysp method
+ */
+abstract class Delambdafy extends Transform with TypingTransformers with ast.TreeDSL with TypeAdaptingTransformer {
+ import global._
+ import definitions._
+ import CODE._
+
+ val analyzer: global.analyzer.type = global.analyzer
+
+ /** the following two members override abstract members in Transform */
+ val phaseName: String = "delambdafy"
+
+ protected def newTransformer(unit: CompilationUnit): Transformer =
+ new DelambdafyTransformer(unit)
+
+ class DelambdafyTransformer(unit: CompilationUnit) extends TypingTransformer(unit) with TypeAdapter {
+ private val lambdaClassDefs = new mutable.LinkedHashMap[Symbol, List[Tree]] withDefaultValue Nil
+
+
+ val typer = localTyper
+
+ // we need to know which methods refer to the 'this' reference so that we can determine
+ // which lambdas need access to it
+ val thisReferringMethods: Set[Symbol] = {
+ val thisReferringMethodsTraverser = new ThisReferringMethodsTraverser()
+ thisReferringMethodsTraverser traverse unit.body
+ val methodReferringMap = thisReferringMethodsTraverser.liftedMethodReferences
+ val referrers = thisReferringMethodsTraverser.thisReferringMethods
+ // recursively find methods that refer to 'this' directly or indirectly via references to other methods
+ // for each method found add it to the referrers set
+ def refersToThis(symbol: Symbol): Boolean = {
+ if (referrers contains symbol) true
+ else if (methodReferringMap(symbol) exists refersToThis) {
+ // add it early to memoize
+ debuglog(s"$symbol indirectly refers to 'this'")
+ referrers += symbol
+ true
+ } else false
+ }
+ methodReferringMap.keys foreach refersToThis
+ referrers
+ }
+
+ val accessorMethods = mutable.ArrayBuffer[Tree]()
+
+ // the result of the transformFunction method. A class definition for the lambda, an expression
+ // insantiating the lambda class, and an accessor method for the lambda class to be able to
+ // call the implementation
+ case class TransformedFunction(lambdaClassDef: ClassDef, newExpr: Tree, accessorMethod: Tree)
+
+ // here's the main entry point of the transform
+ override def transform(tree: Tree): Tree = tree match {
+ // the main thing we care about is lambdas
+ case fun @ Function(_, _) =>
+ // a lambda beccomes a new class, an instantiation expression, and an
+ // accessor method
+ val TransformedFunction(lambdaClassDef, newExpr, accessorMethod) = transformFunction(fun)
+ // we'll add accessor methods to the current template later
+ accessorMethods += accessorMethod
+ val pkg = lambdaClassDef.symbol.owner
+
+ // we'll add the lambda class to the package later
+ lambdaClassDefs(pkg) = lambdaClassDef :: lambdaClassDefs(pkg)
+
+ super.transform(newExpr)
+ // when we encounter a template (basically the thing that holds body of a class/trait)
+ // we need to updated it to include newly created accesor methods after transforming it
+ case Template(_, _, _) =>
+ try {
+ // during this call accessorMethods will be populated from the Function case
+ val Template(parents, self, body) = super.transform(tree)
+ Template(parents, self, body ++ accessorMethods)
+ } finally accessorMethods.clear()
+ case _ => super.transform(tree)
+ }
+
+ // this entry point is aimed at the statements in the compilation unit.
+ // after working on the entire compilation until we'll have a set of
+ // new class definitions to add to the top level
+ override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
+ super.transformStats(stats, exprOwner) ++ lambdaClassDefs(exprOwner)
+ }
+
+ private def optionSymbol(sym: Symbol): Option[Symbol] = if (sym.exists) Some(sym) else None
+
+ // turns a lambda into a new class def, a New expression instantiating that class, and an
+ // accessor method fo the body of the lambda
+ private def transformFunction(originalFunction: Function): TransformedFunction = {
+ val functionTpe = originalFunction.tpe
+ val targs = functionTpe.typeArgs
+ val formals :+ restpe = targs
+ val oldClass = originalFunction.symbol.enclClass
+
+ // find which variables are free in the lambda because those are captures that need to be
+ // passed into the constructor of the anonymous function class
+ val captures = FreeVarTraverser.freeVarsOf(originalFunction)
+
+ /**
+ * Creates the apply method for the anonymous subclass of FunctionN
+ */
+ def createAccessorMethod(thisProxy: Symbol, fun: Function): DefDef = {
+ val target = targetMethod(fun)
+ if (!thisProxy.exists) {
+ target setFlag STATIC
+ }
+ val params = ((optionSymbol(thisProxy) map {proxy:Symbol => ValDef(proxy)}) ++ (target.paramss.flatten map ValDef)).toList
+
+ val methSym = oldClass.newMethod(unit.freshTermName(nme.accessor.toString()), target.pos, FINAL | BRIDGE | SYNTHETIC | PROTECTED | STATIC)
+
+ val paramSyms = params map {param => methSym.newSyntheticValueParam(param.symbol.tpe, param.name) }
+
+ params zip paramSyms foreach { case (valdef, sym) => valdef.symbol = sym }
+ params foreach (_.symbol.owner = methSym)
+
+ val methodType = MethodType(paramSyms, restpe)
+ methSym setInfo methodType
+
+ oldClass.info.decls enter methSym
+
+ val body = localTyper.typed {
+ val newTarget = Select(if (thisProxy.exists) gen.mkAttributedRef(paramSyms(0)) else gen.mkAttributedThis(oldClass), target)
+ val newParams = paramSyms drop (if (thisProxy.exists) 1 else 0) map Ident
+ Apply(newTarget, newParams)
+ } setPos fun.pos
+ val methDef = DefDef(methSym, List(params), body)
+
+ // Have to repack the type to avoid mismatches when existentials
+ // appear in the result - see SI-4869.
+ // TODO probably don't need packedType
+ methDef.tpt setType localTyper.packedType(body, methSym)
+ methDef
+ }
+
+ /**
+ * Creates the apply method for the anonymous subclass of FunctionN
+ */
+ def createApplyMethod(newClass: Symbol, fun: Function, accessor: DefDef, thisProxy: Symbol): DefDef = {
+ val methSym = newClass.newMethod(nme.apply, fun.pos, FINAL | SYNTHETIC)
+ val params = fun.vparams map (_.duplicate)
+
+ val paramSyms = map2(formals, params) {
+ (tp, vparam) => methSym.newSyntheticValueParam(tp, vparam.name)
+ }
+ params zip paramSyms foreach { case (valdef, sym) => valdef.symbol = sym }
+ params foreach (_.symbol.owner = methSym)
+
+ val methodType = MethodType(paramSyms, restpe)
+ methSym setInfo methodType
+
+ newClass.info.decls enter methSym
+
+ val Apply(_, oldParams) = fun.body
+
+ val body = localTyper typed Apply(Select(gen.mkAttributedThis(oldClass), accessor.symbol), (optionSymbol(thisProxy) map {tp => Select(gen.mkAttributedThis(newClass), tp)}).toList ++ oldParams)
+ body.substituteSymbols(fun.vparams map (_.symbol), params map (_.symbol))
+ body changeOwner (fun.symbol -> methSym)
+
+ val methDef = DefDef(methSym, List(params), body)
+
+ // Have to repack the type to avoid mismatches when existentials
+ // appear in the result - see SI-4869.
+ // TODO probably don't need packedType
+ methDef.tpt setType localTyper.packedType(body, methSym)
+ methDef
+ }
+
+ /**
+ * Creates the constructor on the newly created class. It will handle
+ * initialization of members that represent the captured environment
+ */
+ def createConstructor(newClass: Symbol, members: List[ValDef]): DefDef = {
+ val constrSym = newClass.newConstructor(originalFunction.pos, SYNTHETIC)
+
+ val (paramSymbols, params, assigns) = (members map {member =>
+ val paramSymbol = newClass.newVariable(member.symbol.name.toTermName, newClass.pos, 0)
+ paramSymbol.setInfo(member.symbol.info)
+ val paramVal = ValDef(paramSymbol)
+ val paramIdent = Ident(paramSymbol)
+ val assign = Assign(Select(gen.mkAttributedThis(newClass), member.symbol), paramIdent)
+
+ (paramSymbol, paramVal, assign)
+ }).unzip3
+
+ val constrType = MethodType(paramSymbols, newClass.thisType)
+ constrSym setInfoAndEnter constrType
+
+ val body =
+ Block(
+ List(
+ Apply(Select(Super(gen.mkAttributedThis(newClass), tpnme.EMPTY) setPos newClass.pos, nme.CONSTRUCTOR) setPos newClass.pos, Nil) setPos newClass.pos
+ ) ++ assigns,
+ Literal(Constant(())): Tree
+ ) setPos newClass.pos
+
+ (localTyper typed DefDef(constrSym, List(params), body) setPos newClass.pos).asInstanceOf[DefDef]
+ }
+
+ val pkg = oldClass.owner
+
+ // Parent for anonymous class def
+ val abstractFunctionErasedType = AbstractFunctionClass(formals.length).tpe
+
+ // anonymous subclass of FunctionN with an apply method
+ def makeAnonymousClass = {
+ val parents = addSerializable(abstractFunctionErasedType)
+ val funOwner = originalFunction.symbol.owner
+
+ val suffix = "$lambda$" + (
+ if (funOwner.isPrimaryConstructor) ""
+ else "$" + funOwner.name
+ )
+ val name = unit.freshTypeName(s"${oldClass.name.decode}$suffix")
+
+ val anonClass = pkg newClassSymbol(name, originalFunction.pos, FINAL | SYNTHETIC) addAnnotation SerialVersionUIDAnnotation
+ anonClass setInfo ClassInfoType(parents, newScope, anonClass)
+
+ val captureProxies2 = new LinkedHashMap[Symbol, TermSymbol]
+ captures foreach {capture =>
+ val sym = anonClass.newVariable(capture.name.toTermName, capture.pos, SYNTHETIC)
+ sym setInfo capture.info
+ captureProxies2 += ((capture, sym))
+ }
+
+ // the Optional proxy that will hold a reference to the 'this'
+ // object used by the lambda, if any. NoSymbol if there is no this proxy
+ val thisProxy = {
+ val target = targetMethod(originalFunction)
+ if (thisReferringMethods contains target) {
+ val sym = anonClass.newVariable(nme.FAKE_LOCAL_THIS, originalFunction.pos, SYNTHETIC)
+ sym.info = oldClass.tpe
+ sym
+ } else NoSymbol
+ }
+
+ val decapturify = new DeCapturifyTransformer(captureProxies2, unit, oldClass, anonClass, originalFunction.symbol.pos, thisProxy)
+
+ val accessorMethod = createAccessorMethod(thisProxy, originalFunction)
+
+ val decapturedFunction = decapturify.transform(originalFunction).asInstanceOf[Function]
+
+ val members = (optionSymbol(thisProxy).toList ++ (captureProxies2 map (_._2))) map {member =>
+ anonClass.info.decls enter member
+ ValDef(member, gen.mkZero(member.tpe)) setPos decapturedFunction.pos
+ }
+
+ // constructor
+ val constr = createConstructor(anonClass, members)
+
+ // apply method with same arguments and return type as original lambda.
+ val applyMethodDef = createApplyMethod(anonClass, decapturedFunction, accessorMethod, thisProxy)
+
+ val bridgeMethod = createBridgeMethod(anonClass, originalFunction, applyMethodDef)
+
+ def fulldef(sym: Symbol) =
+ if (sym == NoSymbol) sym.toString
+ else s"$sym: ${sym.tpe} in ${sym.owner}"
+
+ def clashError(bm: Symbol) = {
+ unit.error(
+ applyMethodDef.symbol.pos,
+ sm"""bridge generated for member ${fulldef(applyMethodDef.symbol)}
+ |which overrides ${fulldef(getMember(abstractFunctionErasedType.typeSymbol, nme.apply))}
+ |clashes with definition of the member itself;
+ |both have erased type ${exitingPostErasure(bm.tpe)}""")
+ }
+
+ bridgeMethod foreach (bm =>
+ if (bm.symbol.tpe =:= applyMethodDef.symbol.tpe)
+ clashError(bm.symbol)
+ )
+
+ val body = members ++ List(constr, applyMethodDef) ++ bridgeMethod
+
+ // TODO if member fields are private this complains that they're not accessible
+ (localTyper.typedPos(decapturedFunction.pos)(ClassDef(anonClass, body)).asInstanceOf[ClassDef], thisProxy, accessorMethod)
+ }
+
+ val (anonymousClassDef, thisProxy, accessorMethod) = makeAnonymousClass
+
+ pkg.info.decls enter anonymousClassDef.symbol
+
+ val thisArg = optionSymbol(thisProxy) map (_ => gen.mkAttributedThis(oldClass) setPos originalFunction.pos)
+ val captureArgs = captures map (capture => Ident(capture) setPos originalFunction.pos)
+
+ val newStat =
+ Typed(New(anonymousClassDef.symbol, (thisArg.toList ++ captureArgs): _*), TypeTree(abstractFunctionErasedType))
+
+ val typedNewStat = localTyper.typedPos(originalFunction.pos)(newStat)
+
+ TransformedFunction(anonymousClassDef, typedNewStat, accessorMethod)
+ }
+
+ /**
+ * Creates a bridge method if needed. The bridge method forwards from apply(x1: Object, x2: Object...xn: Object): Object to
+ * apply(x1: T1, x2: T2...xn: Tn): T0 using type adaptation on each input and output. The only time a bridge isn't needed
+ * is when the original lambda is already erased to type Object, Object, Object... => Object
+ */
+ def createBridgeMethod(newClass:Symbol, originalFunction: Function, applyMethod: DefDef): Option[DefDef] = {
+ val bridgeMethSym = newClass.newMethod(nme.apply, applyMethod.pos, FINAL | SYNTHETIC | BRIDGE)
+ val originalParams = applyMethod.vparamss(0)
+ val bridgeParams = originalParams map { originalParam =>
+ val bridgeSym = bridgeMethSym.newSyntheticValueParam(ObjectTpe, originalParam.name)
+ ValDef(bridgeSym)
+ }
+
+ val bridgeSyms = bridgeParams map (_.symbol)
+
+ val methodType = MethodType(bridgeSyms, ObjectTpe)
+ bridgeMethSym setInfo methodType
+
+ def adapt(tree: Tree, expectedTpe: Type): (Boolean, Tree) = {
+ if (tree.tpe =:= expectedTpe) (false, tree)
+ else (true, adaptToType(tree, expectedTpe))
+ }
+
+ def adaptAndPostErase(tree: Tree, pt: Type): (Boolean, Tree) = {
+ val (needsAdapt, adaptedTree) = adapt(tree, pt)
+ val trans = postErasure.newTransformer(unit)
+ val postErasedTree = trans.atOwner(currentOwner)(trans.transform(adaptedTree)) // SI-8017 elimnates ErasedValueTypes
+ (needsAdapt, postErasedTree)
+ }
+
+ enteringPhase(currentRun.posterasurePhase) {
+ // e.g, in:
+ // class C(val a: Int) extends AnyVal; (x: Int) => new C(x)
+ //
+ // This type is:
+ // (x: Int)ErasedValueType(class C, Int)
+ val liftedBodyDefTpe: MethodType = {
+ val liftedBodySymbol = {
+ val Apply(method, _) = originalFunction.body
+ method.symbol
+ }
+ liftedBodySymbol.info.asInstanceOf[MethodType]
+ }
+ val (paramNeedsAdaptation, adaptedParams) = (bridgeSyms zip liftedBodyDefTpe.params map {case (bridgeSym, param) => adapt(Ident(bridgeSym) setType bridgeSym.tpe, param.tpe)}).unzip
+ // SI-8017 Before, this code used `applyMethod.symbol.info.resultType`.
+ // But that symbol doesn't have a type history that goes back before `delambdafy`,
+ // so we just see a plain `Int`, rather than `ErasedValueType(C, Int)`.
+ // This triggered primitive boxing, rather than value class boxing.
+ val resTp = liftedBodyDefTpe.finalResultType
+ val body = Apply(gen.mkAttributedSelect(gen.mkAttributedThis(newClass), applyMethod.symbol), adaptedParams) setType resTp
+ val (needsReturnAdaptation, adaptedBody) = adaptAndPostErase(body, ObjectTpe)
+
+ val needsBridge = (paramNeedsAdaptation contains true) || needsReturnAdaptation
+ if (needsBridge) {
+ val methDef = DefDef(bridgeMethSym, List(bridgeParams), adaptedBody)
+ newClass.info.decls enter bridgeMethSym
+ Some((localTyper typed methDef).asInstanceOf[DefDef])
+ } else None
+ }
+ }
+ } // DelambdafyTransformer
+
+ // A traverser that finds symbols used but not defined in the given Tree
+ // TODO freeVarTraverser in LambdaLift does a very similar task. With some
+ // analysis this could probably be unified with it
+ class FreeVarTraverser extends Traverser {
+ val freeVars = mutable.LinkedHashSet[Symbol]()
+ val declared = mutable.LinkedHashSet[Symbol]()
+
+ override def traverse(tree: Tree) = {
+ tree match {
+ case Function(args, _) =>
+ args foreach {arg => declared += arg.symbol}
+ case ValDef(_, _, _, _) =>
+ declared += tree.symbol
+ case _: Bind =>
+ declared += tree.symbol
+ case Ident(_) =>
+ val sym = tree.symbol
+ if ((sym != NoSymbol) && sym.isLocal && sym.isTerm && !sym.isMethod && !declared.contains(sym)) freeVars += sym
+ case _ =>
+ }
+ super.traverse(tree)
+ }
+ }
+
+ object FreeVarTraverser {
+ def freeVarsOf(function: Function) = {
+ val freeVarsTraverser = new FreeVarTraverser
+ freeVarsTraverser.traverse(function)
+ freeVarsTraverser.freeVars
+ }
+ }
+
+ // A transformer that converts specified captured symbols into other symbols
+ // TODO this transform could look more like ThisSubstituter and TreeSymSubstituter. It's not clear that it needs that level of sophistication since the types
+ // at this point are always very simple flattened/erased types, but it would probably be more robust if it tried to take more complicated types into account
+ class DeCapturifyTransformer(captureProxies: Map[Symbol, TermSymbol], unit: CompilationUnit, oldClass: Symbol, newClass:Symbol, pos: Position, thisProxy: Symbol) extends TypingTransformer(unit) {
+ override def transform(tree: Tree) = tree match {
+ case tree@This(encl) if tree.symbol == oldClass && thisProxy.exists =>
+ gen mkAttributedSelect (gen mkAttributedThis newClass, thisProxy)
+ case Ident(name) if (captureProxies contains tree.symbol) =>
+ gen mkAttributedSelect (gen mkAttributedThis newClass, captureProxies(tree.symbol))
+ case _ => super.transform(tree)
+ }
+ }
+
+ /**
+ * Get the symbol of the target lifted lambad body method from a function. I.e. if
+ * the function is {args => anonfun(args)} then this method returns anonfun's symbol
+ */
+ private def targetMethod(fun: Function): Symbol = fun match {
+ case Function(_, Apply(target, _)) =>
+ target.symbol
+ case _ =>
+ // any other shape of Function is unexpected at this point
+ abort(s"could not understand function with tree $fun")
+ }
+
+ // finds all methods that reference 'this'
+ class ThisReferringMethodsTraverser() extends Traverser {
+ private var currentMethod: Symbol = NoSymbol
+ // the set of methods that refer to this
+ val thisReferringMethods = mutable.Set[Symbol]()
+ // the set of lifted lambda body methods that each method refers to
+ val liftedMethodReferences = mutable.Map[Symbol, Set[Symbol]]().withDefault(_ => mutable.Set())
+ override def traverse(tree: Tree) = tree match {
+ case DefDef(_, _, _, _, _, _) =>
+ // we don't expect defs within defs. At this phase trees should be very flat
+ if (currentMethod.exists) devWarning("Found a def within a def at a phase where defs are expected to be flattened out.")
+ currentMethod = tree.symbol
+ super.traverse(tree)
+ currentMethod = NoSymbol
+ case fun@Function(_, _) =>
+ // we don't drill into functions because at the beginning of this phase they will always refer to 'this'.
+ // They'll be of the form {(args...) => this.anonfun(args...)}
+ // but we do need to make note of the lifted body method in case it refers to 'this'
+ if (currentMethod.exists) liftedMethodReferences(currentMethod) += targetMethod(fun)
+ case This(_) =>
+ if (currentMethod.exists && tree.symbol == currentMethod.enclClass) {
+ debuglog(s"$currentMethod directly refers to 'this'")
+ thisReferringMethods add currentMethod
+ }
+ case _ =>
+ super.traverse(tree)
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index df220b7381..6732900ef2 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -10,17 +10,22 @@ import scala.reflect.internal.ClassfileConstants._
import scala.collection.{ mutable, immutable }
import symtab._
import Flags._
+import scala.reflect.internal.Mode._
abstract class Erasure extends AddInterfaces
with scala.reflect.internal.transform.Erasure
with typechecker.Analyzer
with TypingTransformers
with ast.TreeDSL
+ with TypeAdaptingTransformer
{
import global._
import definitions._
import CODE._
+ val analyzer: typechecker.Analyzer { val global: Erasure.this.global.type } =
+ this.asInstanceOf[typechecker.Analyzer { val global: Erasure.this.global.type }]
+
val phaseName: String = "erasure"
def newTransformer(unit: CompilationUnit): Transformer =
@@ -49,7 +54,7 @@ abstract class Erasure extends AddInterfaces
if (sym == ArrayClass) args foreach traverse
else if (sym.isTypeParameterOrSkolem || sym.isExistentiallyBound || !args.isEmpty) result = true
else if (sym.isClass) traverse(rebindInnerClass(pre, sym)) // #2585
- else if (!sym.owner.isPackageClass) traverse(pre)
+ else if (!sym.isTopLevel) traverse(pre)
case PolyType(_, _) | ExistentialType(_, _) =>
result = true
case RefinedType(parents, _) =>
@@ -65,8 +70,8 @@ abstract class Erasure extends AddInterfaces
}
}
- override protected def verifyJavaErasure = settings.Xverify.value || settings.debug.value
- def needsJavaSig(tp: Type) = !settings.Ynogenericsig.value && NeedsSigCollector.collect(tp)
+ override protected def verifyJavaErasure = settings.Xverify || settings.debug
+ def needsJavaSig(tp: Type) = !settings.Ynogenericsig && NeedsSigCollector.collect(tp)
// only refer to type params that will actually make it into the sig, this excludes:
// * higher-order type parameters
@@ -87,7 +92,7 @@ abstract class Erasure extends AddInterfaces
// more rigorous way up front rather than catching it after the fact,
// but that will be more involved.
private def dotCleanup(sig: String): String = {
- var last: Char = '\0'
+ var last: Char = '\u0000'
sig map {
case '.' if last != '>' => last = '.' ; '$'
case ch => last = ch ; ch
@@ -100,7 +105,7 @@ abstract class Erasure extends AddInterfaces
* unboxing some primitive types and further simplifications as they are done in jsig.
*/
val prepareSigMap = new TypeMap {
- def squashBoxed(tp: Type): Type = tp.normalize match {
+ def squashBoxed(tp: Type): Type = tp.dealiasWiden match {
case t @ RefinedType(parents, decls) =>
val parents1 = parents mapConserve squashBoxed
if (parents1 eq parents) tp
@@ -110,10 +115,10 @@ abstract class Erasure extends AddInterfaces
if (tpe1 eq tpe) t
else ExistentialType(tparams, tpe1)
case t =>
- if (boxedClass contains t.typeSymbol) ObjectClass.tpe
+ if (boxedClass contains t.typeSymbol) ObjectTpe
else tp
}
- def apply(tp: Type): Type = tp.normalize match {
+ def apply(tp: Type): Type = tp.dealiasWiden match {
case tp1 @ TypeBounds(lo, hi) =>
val lo1 = squashBoxed(apply(lo))
val hi1 = squashBoxed(apply(hi))
@@ -122,16 +127,16 @@ abstract class Erasure extends AddInterfaces
case tp1 @ TypeRef(pre, sym, args) =>
def argApply(tp: Type) = {
val tp1 = apply(tp)
- if (tp1.typeSymbol == UnitClass) ObjectClass.tpe
+ if (tp1.typeSymbol == UnitClass) ObjectTpe
else squashBoxed(tp1)
}
if (sym == ArrayClass && args.nonEmpty)
- if (unboundedGenericArrayLevel(tp1) == 1) ObjectClass.tpe
+ if (unboundedGenericArrayLevel(tp1) == 1) ObjectTpe
else mapOver(tp1)
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass)
- ObjectClass.tpe
+ ObjectTpe
else if (sym == UnitClass)
- BoxedUnitClass.tpe
+ BoxedUnitTpe
else if (sym == NothingClass)
RuntimeNothingClass.tpe
else if (sym == NullClass)
@@ -144,7 +149,7 @@ abstract class Erasure extends AddInterfaces
}
case tp1 @ MethodType(params, restpe) =>
val params1 = mapOver(params)
- val restpe1 = if (restpe.normalize.typeSymbol == UnitClass) UnitClass.tpe else apply(restpe)
+ val restpe1 = if (restpe.typeSymbol == UnitClass) UnitTpe else apply(restpe)
if ((params1 eq params) && (restpe1 eq restpe)) tp1
else MethodType(params1, restpe1)
case tp1 @ RefinedType(parents, decls) =>
@@ -162,8 +167,8 @@ abstract class Erasure extends AddInterfaces
}
}
- private def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.normalize match {
- case RefinedType(parents, _) => parents map (_.normalize)
+ private def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.dealiasWiden match {
+ case RefinedType(parents, _) => parents map (_.dealiasWiden)
case tp => tp :: Nil
}
@@ -172,7 +177,7 @@ abstract class Erasure extends AddInterfaces
/** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return
* type for constructors.
*/
- def javaSig(sym0: Symbol, info: Type): Option[String] = beforeErasure {
+ def javaSig(sym0: Symbol, info: Type): Option[String] = enteringErasure {
val isTraitSignature = sym0.enclClass.isTrait
def superSig(parents: List[Type]) = {
@@ -181,7 +186,7 @@ abstract class Erasure extends AddInterfaces
// java is unthrilled about seeing interfaces inherit from classes
val ok = parents filter (p => p.typeSymbol.isTrait || p.typeSymbol.isInterface)
// traits should always list Object.
- if (ok.isEmpty || ok.head.typeSymbol != ObjectClass) ObjectClass.tpe :: ok
+ if (ok.isEmpty || ok.head.typeSymbol != ObjectClass) ObjectTpe :: ok
else ok
}
else parents
@@ -192,7 +197,7 @@ abstract class Erasure extends AddInterfaces
def boundsSig(bounds: List[Type]) = {
val (isTrait, isClass) = bounds partition (_.typeSymbol.isTrait)
val classPart = isClass match {
- case Nil => ":" // + boxedSig(ObjectClass.tpe)
+ case Nil => ":" // + boxedSig(ObjectTpe)
case x :: _ => ":" + boxedSig(x)
}
classPart :: (isTrait map boxedSig) mkString ":"
@@ -206,7 +211,7 @@ abstract class Erasure extends AddInterfaces
// Anything which could conceivably be a module (i.e. isn't known to be
// a type parameter or similar) must go through here or the signature is
// likely to end up with Foo<T>.Empty where it needs Foo<T>.Empty$.
- def fullNameInSig(sym: Symbol) = "L" + beforeIcode(sym.javaBinaryName)
+ def fullNameInSig(sym: Symbol) = "L" + enteringIcode(sym.javaBinaryName)
def jsig(tp0: Type, existentiallyBound: List[Symbol] = Nil, toplevel: Boolean = false, primitiveOK: Boolean = true): String = {
val tp = tp0.dealias
@@ -219,8 +224,8 @@ abstract class Erasure extends AddInterfaces
def argSig(tp: Type) =
if (existentiallyBound contains tp.typeSymbol) {
val bounds = tp.typeSymbol.info.bounds
- if (!(AnyRefClass.tpe <:< bounds.hi)) "+" + boxedSig(bounds.hi)
- else if (!(bounds.lo <:< NullClass.tpe)) "-" + boxedSig(bounds.lo)
+ if (!(AnyRefTpe <:< bounds.hi)) "+" + boxedSig(bounds.hi)
+ else if (!(bounds.lo <:< NullTpe)) "-" + boxedSig(bounds.lo)
else "*"
} else {
boxedSig(tp)
@@ -246,7 +251,7 @@ abstract class Erasure extends AddInterfaces
// If args isEmpty, Array is being used as a type constructor
if (sym == ArrayClass && args.nonEmpty) {
- if (unboundedGenericArrayLevel(tp) == 1) jsig(ObjectClass.tpe)
+ if (unboundedGenericArrayLevel(tp) == 1) jsig(ObjectTpe)
else ARRAY_TAG.toString+(args map (jsig(_))).mkString
}
else if (isTypeParameterInSig(sym, sym0)) {
@@ -254,20 +259,20 @@ abstract class Erasure extends AddInterfaces
"" + TVAR_TAG + sym.name + ";"
}
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass)
- jsig(ObjectClass.tpe)
+ jsig(ObjectTpe)
else if (sym == UnitClass)
- jsig(BoxedUnitClass.tpe)
+ jsig(BoxedUnitTpe)
else if (sym == NothingClass)
jsig(RuntimeNothingClass.tpe)
else if (sym == NullClass)
jsig(RuntimeNullClass.tpe)
else if (isPrimitiveValueClass(sym)) {
- if (!primitiveOK) jsig(ObjectClass.tpe)
- else if (sym == UnitClass) jsig(BoxedUnitClass.tpe)
+ if (!primitiveOK) jsig(ObjectTpe)
+ else if (sym == UnitClass) jsig(BoxedUnitTpe)
else abbrvTag(sym).toString
}
else if (sym.isDerivedValueClass) {
- val unboxed = sym.derivedValueClassUnbox.info.finalResultType
+ val unboxed = sym.derivedValueClassUnbox.tpe_*.finalResultType
val unboxedSeen = (tp memberType sym.derivedValueClassUnbox).finalResultType
def unboxedMsg = if (unboxed == unboxedSeen) "" else s", seen within ${sym.simpleName} as $unboxedSeen"
logResult(s"Erasure of value class $sym (underlying type $unboxed$unboxedMsg) is") {
@@ -340,10 +345,9 @@ abstract class Erasure extends AddInterfaces
case _ => tp.deconst
}
}
-
+
// ## requires a little translation
private lazy val poundPoundMethods = Set[Symbol](Any_##, Object_##)
-
// Methods on Any/Object which we rewrite here while we still know what
// is a primitive and what arrived boxed.
private lazy val interceptedMethods = poundPoundMethods ++ primitiveGetClassMethods
@@ -352,44 +356,6 @@ abstract class Erasure extends AddInterfaces
override def newTyper(context: Context) = new Eraser(context)
- private def safeToRemoveUnbox(cls: Symbol): Boolean =
- (cls == definitions.NullClass) || isBoxedValueClass(cls)
-
- /** An extractor object for unboxed expressions (maybe subsumed by posterasure?) */
- object Unboxed {
- def unapply(tree: Tree): Option[Tree] = tree match {
- case Apply(fn, List(arg)) if isUnbox(fn.symbol) && safeToRemoveUnbox(arg.tpe.typeSymbol) =>
- Some(arg)
- case Apply(
- TypeApply(
- cast @ Select(
- Apply(
- sel @ Select(arg, acc),
- List()),
- asinstanceof),
- List(tpt)),
- List())
- if cast.symbol == Object_asInstanceOf &&
- tpt.tpe.typeSymbol.isDerivedValueClass &&
- sel.symbol == tpt.tpe.typeSymbol.derivedValueClassUnbox =>
- Some(arg)
- case _ =>
- None
- }
- }
-
- /** An extractor object for boxed expressions (maybe subsumed by posterasure?) */
- object Boxed {
- def unapply(tree: Tree): Option[Tree] = tree match {
- case Apply(Select(New(tpt), nme.CONSTRUCTOR), List(arg)) if (tpt.tpe.typeSymbol.isDerivedValueClass) =>
- Some(arg)
- case LabelDef(name, params, Boxed(rhs)) =>
- Some(treeCopy.LabelDef(tree, name, params, rhs) setType rhs.tpe)
- case _ =>
- None
- }
- }
-
class ComputeBridges(unit: CompilationUnit, root: Symbol) {
assert(phase == currentRun.erasurePhase, phase)
@@ -399,22 +365,19 @@ abstract class Erasure extends AddInterfaces
val bridgeTarget = mutable.HashMap[Symbol, Symbol]()
var bridges = List[Tree]()
- val opc = beforeExplicitOuter {
+ val opc = enteringExplicitOuter {
new overridingPairs.Cursor(root) {
override def parents = List(root.info.firstParent)
- override def exclude(sym: Symbol) = !sym.isMethod || sym.isPrivate || super.exclude(sym)
+ override def exclude(sym: Symbol) = !sym.isMethod || super.exclude(sym)
}
}
def compute(): (List[Tree], immutable.Set[Symbol]) = {
while (opc.hasNext) {
- val member = opc.overriding
- val other = opc.overridden
- //println("bridge? " + member + ":" + member.tpe + member.locationString + " to " + other + ":" + other.tpe + other.locationString)//DEBUG
- if (beforeExplicitOuter(!member.isDeferred))
- checkPair(member, other)
+ if (enteringExplicitOuter(!opc.low.isDeferred))
+ checkPair(opc.currentPair)
- opc.next
+ opc.next()
}
(bridges, toBeRemoved)
}
@@ -441,11 +404,11 @@ abstract class Erasure extends AddInterfaces
sm"""bridge generated for member ${fulldef(member)}
|which overrides ${fulldef(other)}
|clashes with definition of $what;
- |both have erased type ${afterPostErasure(bridge.tpe)}""")
+ |both have erased type ${exitingPostErasure(bridge.tpe)}""")
}
for (bc <- root.baseClasses) {
- if (settings.debug.value)
- afterPostErasure(println(
+ if (settings.debug)
+ exitingPostErasure(println(
sm"""check bridge overrides in $bc
|${bc.info.nonPrivateDecl(bridge.name)}
|${site.memberType(bridge)}
@@ -454,13 +417,13 @@ abstract class Erasure extends AddInterfaces
def overriddenBy(sym: Symbol) =
sym.matchingSymbol(bc, site).alternatives filter (sym => !sym.isBridge)
- for (overBridge <- afterPostErasure(overriddenBy(bridge))) {
+ for (overBridge <- exitingPostErasure(overriddenBy(bridge))) {
if (overBridge == member) {
clashError("the member itself")
} else {
val overMembers = overriddenBy(member)
if (!overMembers.exists(overMember =>
- afterPostErasure(overMember.tpe =:= overBridge.tpe))) {
+ exitingPostErasure(overMember.tpe =:= overBridge.tpe))) {
clashError(fulldef(overBridge))
}
}
@@ -469,9 +432,16 @@ abstract class Erasure extends AddInterfaces
noclash
}
- def checkPair(member: Symbol, other: Symbol) {
- val otpe = specialErasure(root)(other.tpe)
- val bridgeNeeded = afterErasure (
+ /** TODO - work through this logic with a fine-toothed comb, incorporating
+ * into SymbolPairs where appropriate.
+ */
+ def checkPair(pair: SymbolPair) {
+ import pair._
+ val member = low
+ val other = high
+ val otpe = highErased
+
+ val bridgeNeeded = exitingErasure (
!member.isMacro &&
!(other.tpe =:= member.tpe) &&
!(deconstMap(other.tpe) =:= deconstMap(member.tpe)) &&
@@ -484,7 +454,7 @@ abstract class Erasure extends AddInterfaces
if (!bridgeNeeded)
return
- val newFlags = (member.flags | BRIDGE) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
+ val newFlags = (member.flags | BRIDGE | ARTIFACT) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
val bridge = other.cloneSymbolImpl(root, newFlags) setPos root.pos
debuglog("generating bridge from %s (%s): %s to %s: %s".format(
@@ -499,9 +469,9 @@ abstract class Erasure extends AddInterfaces
if (!(member.tpe exists (_.typeSymbol.isDerivedValueClass)) ||
checkBridgeOverrides(member, other, bridge)) {
- afterErasure(root.info.decls enter bridge)
+ exitingErasure(root.info.decls enter bridge)
if (other.owner == root) {
- afterErasure(root.info.decls.unlink(other))
+ exitingErasure(root.info.decls.unlink(other))
toBeRemoved += other
}
@@ -510,7 +480,7 @@ abstract class Erasure extends AddInterfaces
}
}
- def makeBridgeDefDef(bridge: Symbol, member: Symbol, other: Symbol) = afterErasure {
+ def makeBridgeDefDef(bridge: Symbol, member: Symbol, other: Symbol) = exitingErasure {
// type checking ensures we can safely call `other`, but unless `member.tpe <:< other.tpe`,
// calling `member` is not guaranteed to succeed in general, there's
// nothing we can do about this, except for an unapply: when this subtype test fails,
@@ -521,10 +491,10 @@ abstract class Erasure extends AddInterfaces
def maybeWrap(bridgingCall: Tree): Tree = {
val guardExtractor = ( // can't statically know which member is going to be selected, so don't let this depend on member.isSynthetic
(member.name == nme.unapply || member.name == nme.unapplySeq)
- && !afterErasure((member.tpe <:< other.tpe))) // no static guarantees (TODO: is the subtype test ever true?)
+ && !exitingErasure((member.tpe <:< other.tpe))) // no static guarantees (TODO: is the subtype test ever true?)
import CODE._
- val _false = FALSE_typed
+ val _false = FALSE
val pt = member.tpe.resultType
lazy val zero =
if (_false.tpe <:< pt) _false
@@ -544,168 +514,13 @@ abstract class Erasure extends AddInterfaces
maybeWrap(bridgingCall)
}
- atPos(bridge.pos)(DefDef(bridge, rhs))
+ DefDef(bridge, rhs)
}
}
/** The modifier typer which retypes with erased types. */
- class Eraser(_context: Context) extends Typer(_context) {
-
- private def isPrimitiveValueType(tpe: Type) = isPrimitiveValueClass(tpe.typeSymbol)
-
- private def isDifferentErasedValueType(tpe: Type, other: Type) =
- isErasedValueType(tpe) && (tpe ne other)
-
- private def isPrimitiveValueMember(sym: Symbol) =
- sym != NoSymbol && isPrimitiveValueClass(sym.owner)
-
- @inline private def box(tree: Tree, target: => String): Tree = {
- val result = box1(tree)
- log(s"boxing ${tree.summaryString}: ${tree.tpe} into $target: ${result.tpe}")
- result
- }
-
- /** Box `tree` of unboxed type */
- private def box1(tree: Tree): Tree = tree match {
- case LabelDef(_, _, _) =>
- val ldef = deriveLabelDef(tree)(box1)
- ldef setType ldef.rhs.tpe
- case _ =>
- val tree1 = tree.tpe match {
- case ErasedValueType(tref) =>
- val clazz = tref.sym
- tree match {
- case Unboxed(arg) if arg.tpe.typeSymbol == clazz =>
- log("shortcircuiting unbox -> box "+arg); arg
- case _ =>
- New(clazz, cast(tree, underlyingOfValueClass(clazz)))
- }
- case _ =>
- tree.tpe.typeSymbol match {
- case UnitClass =>
- if (treeInfo isExprSafeToInline tree) REF(BoxedUnit_UNIT)
- else BLOCK(tree, REF(BoxedUnit_UNIT))
- case NothingClass => tree // a non-terminating expression doesn't need boxing
- case x =>
- assert(x != ArrayClass)
- tree match {
- /** Can't always remove a Box(Unbox(x)) combination because the process of boxing x
- * may lead to throwing an exception.
- *
- * This is important for specialization: calls to the super constructor should not box/unbox specialized
- * fields (see TupleX). (ID)
- */
- case Apply(boxFun, List(arg)) if isUnbox(tree.symbol) && safeToRemoveUnbox(arg.tpe.typeSymbol) =>
- log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}")
- arg
- case _ =>
- (REF(boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectClass.tpe
- }
- }
- }
- typedPos(tree.pos)(tree1)
- }
-
- private def unbox(tree: Tree, pt: Type): Tree = {
- val result = unbox1(tree, pt)
- log(s"unboxing ${tree.summaryString}: ${tree.tpe} with pt=$pt as type ${result.tpe}")
- result
- }
-
- /** Unbox `tree` of boxed type to expected type `pt`.
- *
- * @param tree the given tree
- * @param pt the expected type.
- * @return the unboxed tree
- */
- private def unbox1(tree: Tree, pt: Type): Tree = tree match {
-/*
- case Boxed(unboxed) =>
- println("unbox shorten: "+tree) // this never seems to kick in during build and test; therefore disabled.
- adaptToType(unboxed, pt)
- */
- case LabelDef(_, _, _) =>
- val ldef = deriveLabelDef(tree)(unbox(_, pt))
- ldef setType ldef.rhs.tpe
- case _ =>
- val tree1 = pt match {
- case ErasedValueType(tref) =>
- tree match {
- case Boxed(arg) if arg.tpe.isInstanceOf[ErasedValueType] =>
- log("shortcircuiting box -> unbox "+arg)
- arg
- case _ =>
- val clazz = tref.sym
- log("not boxed: "+tree)
- lazy val underlying = underlyingOfValueClass(clazz)
- val tree0 =
- if (tree.tpe.typeSymbol == NullClass &&
- isPrimitiveValueClass(underlying.typeSymbol)) {
- // convert `null` directly to underlying type, as going
- // via the unboxed type would yield a NPE (see SI-5866)
- unbox1(tree, underlying)
- } else
- Apply(Select(adaptToType(tree, clazz.tpe), clazz.derivedValueClassUnbox), List())
- cast(tree0, pt)
- }
- case _ =>
- pt.typeSymbol match {
- case UnitClass =>
- if (treeInfo isExprSafeToInline tree) UNIT
- else BLOCK(tree, UNIT)
- case x =>
- assert(x != ArrayClass)
- // don't `setType pt` the Apply tree, as the Apply's fun won't be typechecked if the Apply tree already has a type
- Apply(unboxMethod(pt.typeSymbol), tree)
- }
- }
- typedPos(tree.pos)(tree1)
- }
-
- /** Generate a synthetic cast operation from tree.tpe to pt.
- * @pre pt eq pt.normalize
- */
- private def cast(tree: Tree, pt: Type): Tree = logResult(s"cast($tree, $pt)") {
- if (pt.typeSymbol == UnitClass) {
- // See SI-4731 for one example of how this occurs.
- log("Attempted to cast to Unit: " + tree)
- tree.duplicate setType pt
- } else if (tree.tpe != null && tree.tpe.typeSymbol == ArrayClass && pt.typeSymbol == ArrayClass) {
- // See SI-2386 for one example of when this might be necessary.
- val needsExtraCast = isPrimitiveValueType(tree.tpe.typeArgs.head) && !isPrimitiveValueType(pt.typeArgs.head)
- val tree1 = if (needsExtraCast) gen.mkRuntimeCall(nme.toObjectArray, List(tree)) else tree
- gen.mkAttributedCast(tree1, pt)
- } else gen.mkAttributedCast(tree, pt)
- }
-
- /** Adapt `tree` to expected type `pt`.
- *
- * @param tree the given tree
- * @param pt the expected type
- * @return the adapted tree
- */
- private def adaptToType(tree: Tree, pt: Type): Tree = {
- if (settings.debug.value && pt != WildcardType)
- log("adapting " + tree + ":" + tree.tpe + " : " + tree.tpe.parents + " to " + pt)//debug
- if (tree.tpe <:< pt)
- tree
- else if (isDifferentErasedValueType(tree.tpe, pt))
- adaptToType(box(tree, pt.toString), pt)
- else if (isDifferentErasedValueType(pt, tree.tpe))
- adaptToType(unbox(tree, pt), pt)
- else if (isPrimitiveValueType(tree.tpe) && !isPrimitiveValueType(pt)) {
- adaptToType(box(tree, pt.toString), pt)
- } else if (isMethodTypeWithEmptyParams(tree.tpe)) {
- // [H] this assert fails when trying to typecheck tree !(SomeClass.this.bitmap) for single lazy val
- //assert(tree.symbol.isStable, "adapt "+tree+":"+tree.tpe+" to "+pt)
- adaptToType(Apply(tree, List()) setPos tree.pos setType tree.tpe.resultType, pt)
-// } else if (pt <:< tree.tpe)
-// cast(tree, pt)
- } else if (isPrimitiveValueType(pt) && !isPrimitiveValueType(tree.tpe))
- adaptToType(unbox(tree, pt), pt)
- else
- cast(tree, pt)
- }
+ class Eraser(_context: Context) extends Typer(_context) with TypeAdapter {
+ val typer = this.asInstanceOf[analyzer.Typer]
/** Replace member references as follows:
*
@@ -723,45 +538,35 @@ abstract class Erasure extends AddInterfaces
private def adaptMember(tree: Tree): Tree = {
//Console.println("adaptMember: " + tree);
tree match {
- case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
+ case Apply(ta @ TypeApply(sel @ Select(qual, name), List(targ)), List())
if tree.symbol == Any_asInstanceOf =>
- val qual1 = typedQualifier(qual, NOmode, ObjectClass.tpe) // need to have an expected type, see #3037
- val qualClass = qual1.tpe.typeSymbol
-/*
- val targClass = targ.tpe.typeSymbol
-
- if (isNumericValueClass(qualClass) && isNumericValueClass(targClass))
- // convert numeric type casts
- atPos(tree.pos)(Apply(Select(qual1, "to" + targClass.name), List()))
- else
-*/
+ val qual1 = typedQualifier(qual, NOmode, ObjectTpe) // need to have an expected type, see #3037
+ // !!! Make pending/run/t5866b.scala work. The fix might be here and/or in unbox1.
if (isPrimitiveValueType(targ.tpe) || isErasedValueType(targ.tpe)) {
val noNullCheckNeeded = targ.tpe match {
- case ErasedValueType(tref) =>
- atPhase(currentRun.erasurePhase) {
- isPrimitiveValueClass(erasedValueClassArg(tref).typeSymbol)
- }
+ case ErasedValueType(_, underlying) =>
+ isPrimitiveValueClass(underlying.typeSymbol)
case _ =>
true
}
if (noNullCheckNeeded) unbox(qual1, targ.tpe)
else {
- def nullConst = Literal(Constant(null)) setType NullClass.tpe
val untyped =
// util.trace("new asinstanceof test") {
gen.evalOnce(qual1, context.owner, context.unit) { qual =>
- If(Apply(Select(qual(), nme.eq), List(Literal(Constant(null)) setType NullClass.tpe)),
+ If(Apply(Select(qual(), nme.eq), List(Literal(Constant(null)) setType NullTpe)),
Literal(Constant(null)) setType targ.tpe,
unbox(qual(), targ.tpe))
}
// }
typed(untyped)
}
- } else tree
+ } else treeCopy.Apply(tree, treeCopy.TypeApply(ta, treeCopy.Select(sel, qual1, name), List(targ)), List())
+
case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
if tree.symbol == Any_isInstanceOf =>
targ.tpe match {
- case ErasedValueType(tref) => targ.setType(tref.sym.tpe)
+ case ErasedValueType(clazz, _) => targ.setType(clazz.tpe)
case _ =>
}
tree
@@ -791,7 +596,7 @@ abstract class Erasure extends AddInterfaces
tree.symbol = NoSymbol
selectFrom(qual1)
} else if (isMethodTypeWithEmptyParams(qual1.tpe)) {
- assert(qual1.symbol.isStable, qual1.symbol);
+ assert(qual1.symbol.isStable, qual1.symbol)
val applied = Apply(qual1, List()) setPos qual1.pos setType qual1.tpe.resultType
adaptMember(selectFrom(applied))
} else if (!(qual1.isInstanceOf[Super] || (qual1.tpe.typeSymbol isSubClass tree.symbol.owner))) {
@@ -812,23 +617,23 @@ abstract class Erasure extends AddInterfaces
/** A replacement for the standard typer's adapt method.
*/
- override protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree =
+ override protected def adapt(tree: Tree, mode: Mode, pt: Type, original: Tree = EmptyTree): Tree =
adaptToType(tree, pt)
/** A replacement for the standard typer's `typed1` method.
*/
- override def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
+ override def typed1(tree: Tree, mode: Mode, pt: Type): Tree = {
val tree1 = try {
tree match {
case InjectDerivedValue(arg) =>
(tree.attachments.get[TypeRefAttachment]: @unchecked) match {
case Some(itype) =>
val tref = itype.tpe
- val argPt = atPhase(currentRun.erasurePhase)(erasedValueClassArg(tref))
+ val argPt = enteringErasure(erasedValueClassArg(tref))
log(s"transforming inject $arg -> $tref/$argPt")
val result = typed(arg, mode, argPt)
log(s"transformed inject $arg -> $tref/$argPt = $result:${result.tpe}")
- return result setType ErasedValueType(tref)
+ return result setType ErasedValueType(tref.sym, result.tpe)
}
case _ =>
@@ -852,7 +657,7 @@ abstract class Erasure extends AddInterfaces
newCdef setType newCdef.body.tpe
}
def adaptBranch(branch: Tree): Tree =
- if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe);
+ if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe)
tree1 match {
case If(cond, thenp, elsep) =>
@@ -868,8 +673,7 @@ abstract class Erasure extends AddInterfaces
alt => alt == first || !(first.tpe looselyMatches alt.tpe)
}
if (tree.symbol ne sym1) {
- tree1.symbol = sym1
- tree1.tpe = sym1.tpe
+ tree1 setSymbol sym1 setType sym1.tpe
}
}
tree1
@@ -877,48 +681,40 @@ abstract class Erasure extends AddInterfaces
tree1
}
}
-
- private def isMethodTypeWithEmptyParams(tpe: Type) = tpe match {
- case MethodType(Nil, _) => true
- case _ => false
- }
}
/** The erasure transformer */
class ErasureTransformer(unit: CompilationUnit) extends Transformer {
- /** Emit an error if there is a double definition. This can happen if:
- *
- * - A template defines two members with the same name and erased type.
- * - A template defines and inherits two members `m` with different types,
- * but their erased types are the same.
- * - A template inherits two members `m` with different types,
- * but their erased types are the same.
- */
- private def checkNoDoubleDefs(root: Symbol) {
- def sameTypeAfterErasure(sym1: Symbol, sym2: Symbol) =
- afterPostErasure(sym1.info =:= sym2.info) && !sym1.isMacro && !sym2.isMacro
-
- def doubleDefError(sym1: Symbol, sym2: Symbol) {
- // the .toString must also be computed at the earlier phase
- val tpe1 = afterRefchecks(root.thisType.memberType(sym1))
- val tpe2 = afterRefchecks(root.thisType.memberType(sym2))
- if (!tpe1.isErroneous && !tpe2.isErroneous)
- unit.error(
- if (sym1.owner == root) sym1.pos else root.pos,
- (if (sym1.owner == sym2.owner) "double definition:\n"
- else if (sym1.owner == root) "name clash between defined and inherited member:\n"
- else "name clash between inherited members:\n") +
- sym1 + ":" + afterRefchecks(tpe1.toString) +
- (if (sym1.owner == root) "" else sym1.locationString) + " and\n" +
- sym2 + ":" + afterRefchecks(tpe2.toString) +
- (if (sym2.owner == root) " at line " + (sym2.pos).line else sym2.locationString) +
- "\nhave same type" +
- (if (afterRefchecks(tpe1 =:= tpe2)) "" else " after erasure: " + afterPostErasure(sym1.tpe)))
- sym1.setInfo(ErrorType)
+ import overridingPairs.Cursor
+
+ private def doubleDefError(pair: SymbolPair) {
+ import pair._
+
+ if (!pair.isErroneous) {
+ val what = (
+ if (low.owner == high.owner) "double definition"
+ else if (low.owner == base) "name clash between defined and inherited member"
+ else "name clash between inherited members"
+ )
+ val when = if (exitingRefchecks(lowType matches highType)) "" else " after erasure: " + exitingPostErasure(highType)
+
+ unit.error(pos,
+ s"""|$what:
+ |${exitingRefchecks(highString)} and
+ |${exitingRefchecks(lowString)}
+ |have same type$when""".trim.stripMargin
+ )
}
+ low setInfo ErrorType
+ }
- val decls = root.info.decls
+ private def sameTypeAfterErasure(sym1: Symbol, sym2: Symbol) =
+ exitingPostErasure(sym1.info =:= sym2.info) && !sym1.isMacro && !sym2.isMacro
+ /** TODO - adapt SymbolPairs so it can be used here. */
+ private def checkNoDeclaredDoubleDefs(base: Symbol) {
+ val decls = base.info.decls
+
// SI-8010 force infos, otherwise makeNotPrivate in ExplicitOuter info transformer can trigger
// a scope rehash while were iterating and we can see the same entry twice!
// Inspection of SymbolPairs (the basis of OverridingPairs), suggests that it is immune
@@ -928,62 +724,52 @@ abstract class Erasure extends AddInterfaces
// Why not just create a temporary scope here? We need to force the name changes in any case before
// we do these checks, so that we're comparing same-named methods based on the expanded names that actually
// end up in the bytecode.
- afterPostErasure(decls.foreach(_.info))
-
+ exitingPostErasure(decls.foreach(_.info))
+
var e = decls.elems
while (e ne null) {
if (e.sym.isTerm) {
- var e1 = decls.lookupNextEntry(e)
+ var e1 = decls lookupNextEntry e
while (e1 ne null) {
- assert(e.sym ne e1.sym, s"Internal error: encountered ${e.sym.debugLocationString} twice during scope traversal. This might be related to SI-8010.")
- if (sameTypeAfterErasure(e1.sym, e.sym)) doubleDefError(e.sym, e1.sym)
- e1 = decls.lookupNextEntry(e1)
+ assert(e.sym ne e1.sym, s"Internal error: encountered ${e.sym.debugLocationString} twice during scope traversal. This might be related to SI-8010.")
+ if (sameTypeAfterErasure(e.sym, e1.sym))
+ doubleDefError(new SymbolPair(base, e.sym, e1.sym))
+
+ e1 = decls lookupNextEntry e1
}
}
e = e.next
}
+ }
- val opc = new overridingPairs.Cursor(root) {
- override def exclude(sym: Symbol): Boolean =
- (!sym.isTerm || sym.isPrivate || super.exclude(sym)
- // specialized members have no type history before 'specialize', causing double def errors for curried defs
- || !sym.hasTypeAt(currentRun.refchecksPhase.id))
-
- override def matches(sym1: Symbol, sym2: Symbol): Boolean =
- afterPostErasure(sym1.tpe =:= sym2.tpe)
+ /** Emit an error if there is a double definition. This can happen if:
+ *
+ * - A template defines two members with the same name and erased type.
+ * - A template defines and inherits two members `m` with different types,
+ * but their erased types are the same.
+ * - A template inherits two members `m` with different types,
+ * but their erased types are the same.
+ */
+ private def checkNoDoubleDefs(root: Symbol) {
+ checkNoDeclaredDoubleDefs(root)
+ object opc extends Cursor(root) {
+ // specialized members have no type history before 'specialize', causing double def errors for curried defs
+ override def exclude(sym: Symbol): Boolean = (
+ sym.isType
+ || sym.isPrivate
+ || super.exclude(sym)
+ || !sym.hasTypeAt(currentRun.refchecksPhase.id)
+ )
+ override def matches(sym1: Symbol, sym2: Symbol) = true
}
- while (opc.hasNext) {
- if (!afterRefchecks(
- root.thisType.memberType(opc.overriding) matches
- root.thisType.memberType(opc.overridden)) &&
- sameTypeAfterErasure(opc.overriding, opc.overridden)) {
- debuglog("" + opc.overriding.locationString + " " +
- opc.overriding.infosString +
- opc.overridden.locationString + " " +
- opc.overridden.infosString)
- doubleDefError(opc.overriding, opc.overridden)
- }
- opc.next
+ def isErasureDoubleDef(pair: SymbolPair) = {
+ import pair._
+ log(s"Considering for erasure clash:\n$pair")
+ !exitingRefchecks(lowType matches highType) && sameTypeAfterErasure(low, high)
}
+ opc.iterator filter isErasureDoubleDef foreach doubleDefError
}
-/*
- for (bc <- root.info.baseClasses.tail; other <- bc.info.decls.toList) {
- if (other.isTerm && !other.isConstructor && !(other hasFlag (PRIVATE | BRIDGE))) {
- for (member <- root.info.nonPrivateMember(other.name).alternatives) {
- if (member != other &&
- !(member hasFlag BRIDGE) &&
- afterErasure(member.tpe =:= other.tpe) &&
- !afterRefchecks(
- root.thisType.memberType(member) matches root.thisType.memberType(other))) {
- debuglog("" + member.locationString + " " + member.infosString + other.locationString + " " + other.infosString);
- doubleDefError(member, other)
- }
- }
- }
- }
-*/
-
/** Add bridge definitions to a template. This means:
*
* If there is a concrete member `m` which overrides a member in a base
@@ -998,7 +784,6 @@ abstract class Erasure extends AddInterfaces
*/
private def bridgeDefs(owner: Symbol): (List[Tree], immutable.Set[Symbol]) = {
assert(phase == currentRun.erasurePhase, phase)
- debuglog("computing bridges for " + owner)
new ComputeBridges(unit, owner) compute()
}
@@ -1037,7 +822,6 @@ abstract class Erasure extends AddInterfaces
case Select(qual, _) => qual
case TypeApply(Select(qual, _), _) => qual
}
-
def preEraseAsInstanceOf = {
(fn: @unchecked) match {
case TypeApply(Select(qual, _), List(targ)) =>
@@ -1054,7 +838,7 @@ abstract class Erasure extends AddInterfaces
def preEraseIsInstanceOf = {
fn match {
case TypeApply(sel @ Select(qual, name), List(targ)) =>
- if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefClass.tpe)
+ if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefTpe)
unit.error(sel.pos, "isInstanceOf cannot test if value types are references.")
def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
@@ -1065,7 +849,7 @@ abstract class Erasure extends AddInterfaces
List()) setPos tree.pos
targ.tpe match {
case SingleType(_, _) | ThisType(_) | SuperType(_, _) =>
- val cmpOp = if (targ.tpe <:< AnyValClass.tpe) Any_equals else Object_eq
+ val cmpOp = if (targ.tpe <:< AnyValTpe) Any_equals else Object_eq
atPos(tree.pos) {
Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
}
@@ -1095,7 +879,9 @@ abstract class Erasure extends AddInterfaces
preEraseAsInstanceOf
} else if (fn.symbol == Any_isInstanceOf) {
preEraseIsInstanceOf
- } else if (fn.symbol.owner.isRefinementClass && !fn.symbol.isOverridingSymbol) {
+ } else if (fn.symbol.isOnlyRefinementMember) {
+ // !!! Another spot where we produce overloaded types (see test pos/t6301)
+ log(s"${fn.symbol.fullLocationString} originates in refinement class - call will be implemented via reflection.")
ApplyDynamic(qualifier, args) setSymbol fn.symbol setPos tree.pos
} else if (fn.symbol.isMethodWithExtension && !fn.symbol.tpe.isErroneous) {
Apply(gen.mkAttributedRef(extensionMethods.extensionMethod(fn.symbol)), qualifier :: args)
@@ -1154,7 +940,8 @@ abstract class Erasure extends AddInterfaces
SelectFromArray(qual, name, erasure(tree.symbol)(qual.tpe)).copyAttrs(fn),
args)
}
- } else if (args.isEmpty && interceptedMethods(fn.symbol)) {
+ }
+ else if (args.isEmpty && interceptedMethods(fn.symbol)) {
if (poundPoundMethods.contains(fn.symbol)) {
// This is unattractive, but without it we crash here on ().## because after
// erasure the ScalaRunTime.hash overload goes from Unit => Int to BoxedUnit => Int.
@@ -1166,13 +953,24 @@ abstract class Erasure extends AddInterfaces
case s @ (ShortClass | ByteClass | CharClass) => numericConversion(qual, s)
case BooleanClass => If(qual, LIT(true.##), LIT(false.##))
case _ =>
- global.typer.typed(gen.mkRuntimeCall(nme.hash_, List(qual)))
+ // Since we are past typer, we need to avoid creating trees carrying
+ // overloaded types. This logic is custom (and technically incomplete,
+ // although serviceable) for def hash. What is really needed is for
+ // the overloading logic presently hidden away in a few different
+ // places to be properly exposed so we can just call "resolveOverload"
+ // after typer. Until then:
+ val alts = ScalaRunTimeModule.info.member(nme.hash_).alternatives
+ def alt1 = alts find (_.info.paramTypes.head =:= qual.tpe)
+ def alt2 = ScalaRunTimeModule.info.member(nme.hash_) suchThat (_.info.paramTypes.head.typeSymbol == AnyClass)
+ val newTree = gen.mkRuntimeCall(nme.hash_, qual :: Nil) setSymbol (alt1 getOrElse alt2)
+
+ global.typer.typed(newTree)
}
} else if (isPrimitiveValueClass(qual.tpe.typeSymbol)) {
// Rewrite 5.getClass to ScalaRunTime.anyValClass(5)
global.typer.typed(gen.mkRuntimeCall(nme.anyValClass, List(qual, typer.resolveClassTag(tree.pos, qual.tpe.widen))))
} else if (primitiveGetClassMethods.contains(fn.symbol)) {
- // if we got here then we're trying to send a primitive getClass method to either
+ // if we got here then we're trying to send a primitive getClass method to either
// a) an Any, in which cage Object_getClass works because Any erases to object. Or
//
// b) a non-primitive, e.g. because the qualifier's type is a refinement type where one parent
@@ -1211,12 +1009,19 @@ abstract class Erasure extends AddInterfaces
preErase(fun)
case Select(qual, name) =>
- val owner = tree.symbol.owner
- // println("preXform: "+ (tree, tree.symbol, tree.symbol.owner, tree.symbol.owner.isRefinementClass))
+ val sym = tree.symbol
+ val owner = sym.owner
if (owner.isRefinementClass) {
- val overridden = tree.symbol.nextOverriddenSymbol
- assert(overridden != NoSymbol, tree.symbol)
- tree.symbol = overridden
+ sym.allOverriddenSymbols filterNot (_.owner.isRefinementClass) match {
+ case overridden :: _ =>
+ log(s"${sym.fullLocationString} originates in refinement class - replacing with ${overridden.fullLocationString}.")
+ tree.symbol = overridden
+ case Nil =>
+ // Ideally this should not be reached or reachable; anything which would
+ // get here should have been caught in the surrounding Apply.
+ devWarning(s"Failed to rewrite reflective apply - now don't know what to do with " + tree)
+ return treeCopy.Select(tree, gen.mkAttributedCast(qual, qual.tpe.widen), name)
+ }
}
def isAccessible(sym: Symbol) = localTyper.context.isAccessible(sym, sym.owner.thisType)
@@ -1243,7 +1048,7 @@ abstract class Erasure extends AddInterfaces
assert(!currentOwner.isImplClass)
//Console.println("checking no dble defs " + tree)//DEBUG
checkNoDoubleDefs(tree.symbol.owner)
- treeCopy.Template(tree, parents, emptyValDef, addBridges(body, currentOwner))
+ treeCopy.Template(tree, parents, noSelfType, addBridges(body, currentOwner))
case Match(selector, cases) =>
Match(Typed(selector, TypeTree(selector.tpe)), cases)
@@ -1251,7 +1056,7 @@ abstract class Erasure extends AddInterfaces
case Literal(ct) if ct.tag == ClazzTag
&& ct.typeValue.typeSymbol != definitions.UnitClass =>
val erased = ct.typeValue match {
- case TypeRef(pre, clazz, args) if clazz.isDerivedValueClass => scalaErasure.eraseNormalClassRef(pre, clazz)
+ case tr @ TypeRef(_, clazz, _) if clazz.isDerivedValueClass => scalaErasure.eraseNormalClassRef(tr)
case tpe => specialScalaErasure(tpe)
}
treeCopy.Literal(tree, Constant(erased))
@@ -1283,13 +1088,12 @@ abstract class Erasure extends AddInterfaces
tree1 setType specialScalaErasure(tree1.tpe)
case ArrayValue(elemtpt, trees) =>
treeCopy.ArrayValue(
- tree1, elemtpt setType specialScalaErasure.applyInArray(elemtpt.tpe), trees map transform) setType null
+ tree1, elemtpt setType specialScalaErasure.applyInArray(elemtpt.tpe), trees map transform).clearType()
case DefDef(_, _, _, _, tpt, _) =>
- val result = super.transform(tree1) setType null
- tpt.tpe = specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType
- result
+ try super.transform(tree1).clearType()
+ finally tpt setType specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType
case _ =>
- super.transform(tree1) setType null
+ super.transform(tree1).clearType()
}
}
}
@@ -1301,11 +1105,11 @@ abstract class Erasure extends AddInterfaces
override def transform(tree: Tree): Tree = {
val tree1 = preTransformer.transform(tree)
// log("tree after pretransform: "+tree1)
- afterErasure {
+ exitingErasure {
val tree2 = mixinTransformer.transform(tree1)
// debuglog("tree after addinterfaces: \n" + tree2)
- newTyper(rootContext(unit, tree, true)).typed(tree2)
+ newTyper(rootContext(unit, tree, erasedTypes = true)).typed(tree2)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 970519ab7c..b2e071579e 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -3,14 +3,14 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package transform
import symtab._
import Flags.{ CASE => _, _ }
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
-import matching.{ Patterns, ParallelMatching }
import scala.tools.nsc.settings.ScalaVersion
/** This class ...
@@ -19,15 +19,12 @@ import scala.tools.nsc.settings.ScalaVersion
* @version 1.0
*/
abstract class ExplicitOuter extends InfoTransform
- with Patterns
- with ParallelMatching
with TypingTransformers
with ast.TreeDSL
{
import global._
import definitions._
import CODE._
- import Debug.TRACE
/** The following flags may be set by this phase: */
override def phaseNewFlags: Long = notPROTECTED
@@ -76,28 +73,20 @@ abstract class ExplicitOuter extends InfoTransform
class RemoveBindingsTransformer(toRemove: Set[Symbol]) extends Transformer {
override def transform(tree: Tree) = tree match {
- case Bind(_, body) if toRemove(tree.symbol) =>
- TRACE("Dropping unused binding: " + tree.symbol)
- super.transform(body)
+ case Bind(_, body) if toRemove(tree.symbol) => super.transform(body)
case _ => super.transform(tree)
}
}
- /** Issue a migration warning for instance checks which might be on an Array and
- * for which the type parameter conforms to Seq, because these answers changed in 2.8.
- */
- def isArraySeqTest(lhs: Type, rhs: Type) =
- (ArrayClass.tpe <:< lhs.widen) && (rhs.widen matchesPattern SeqClass.tpe)
-
def outerAccessor(clazz: Symbol): Symbol = {
val firstTry = clazz.info.decl(nme.expandedName(nme.OUTER, clazz))
if (firstTry != NoSymbol && firstTry.outerSource == clazz) firstTry
else findOrElse(clazz.info.decls)(_.outerSource == clazz)(NoSymbol)
}
def newOuterAccessor(clazz: Symbol) = {
- val accFlags = SYNTHETIC | ARTIFACT | METHOD | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
+ val accFlags = SYNTHETIC | ARTIFACT | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
val sym = clazz.newMethod(nme.OUTER, clazz.pos, accFlags)
- val restpe = if (clazz.isTrait) clazz.outerClass.tpe else clazz.outerClass.thisType
+ val restpe = if (clazz.isTrait) clazz.outerClass.tpe_* else clazz.outerClass.thisType
sym expandName clazz
sym.referenced = clazz
@@ -139,7 +128,7 @@ abstract class ExplicitOuter extends InfoTransform
* <ol>
* <li>
* Add an outer parameter to the formal parameters of a constructor
- * in a inner non-trait class;
+ * in an inner non-trait class;
* </li>
* <li>
* Add a protected $outer field to an inner class which is
@@ -187,16 +176,13 @@ abstract class ExplicitOuter extends InfoTransform
var decls1 = decls
if (isInner(clazz) && !clazz.isInterface) {
decls1 = decls.cloneScope
- val outerAcc = clazz.newMethod(nme.OUTER, clazz.pos) // 3
- outerAcc expandName clazz
-
- decls1 enter newOuterAccessor(clazz)
+ decls1 enter newOuterAccessor(clazz) // 3
if (hasOuterField(clazz)) //2
decls1 enter newOuterField(clazz)
}
if (!clazz.isTrait && !parents.isEmpty) {
for (mc <- clazz.mixinClasses) {
- val mixinOuterAcc: Symbol = afterExplicitOuter(outerAccessor(mc))
+ val mixinOuterAcc: Symbol = exitingExplicitOuter(outerAccessor(mc))
if (mixinOuterAcc != NoSymbol) {
if (skipMixinOuterAccessor(clazz, mc))
debuglog(s"Reusing outer accessor symbol of $clazz for the mixin outer accessor of $mc")
@@ -230,7 +216,7 @@ abstract class ExplicitOuter extends InfoTransform
* values for outer parameters of constructors.
* The class provides methods for referencing via outer.
*/
- abstract class OuterPathTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ abstract class OuterPathTransformer(unit: CompilationUnit) extends TypingTransformer(unit) with UnderConstructionTransformer {
/** The directly enclosing outer parameter, if we are in a constructor */
protected var outerParam: Symbol = NoSymbol
@@ -239,9 +225,10 @@ abstract class ExplicitOuter extends InfoTransform
*
* Will return `EmptyTree` if there is no outer accessor because of a premature self reference.
*/
- protected def outerValue: Tree =
- if (outerParam != NoSymbol) ID(outerParam)
- else outerSelect(THIS(currentClass))
+ protected def outerValue: Tree = outerParam match {
+ case NoSymbol => outerSelect(gen.mkAttributedThis(currentClass))
+ case outerParam => gen.mkAttributedIdent(outerParam)
+ }
/** Select and apply outer accessor from 'base'
* The result is typed but not positioned.
@@ -281,11 +268,6 @@ abstract class ExplicitOuter extends InfoTransform
* <blockquote><pre>`base'.$outer$$C1 ... .$outer$$Cn</pre></blockquote>
* which refers to the outer instance of class to of
* value base. The result is typed but not positioned.
- *
- * @param base ...
- * @param from ...
- * @param to ...
- * @return ...
*/
protected def outerPath(base: Tree, from: Symbol, to: Symbol): Tree = {
//Console.println("outerPath from "+from+" to "+to+" at "+base+":"+base.tpe)
@@ -294,34 +276,19 @@ abstract class ExplicitOuter extends InfoTransform
else outerPath(outerSelect(base), from.outerClass, to)
}
-
- /** The stack of class symbols in which a call to this() or to the super
- * constructor, or early definition is active
- */
- protected def isUnderConstruction(clazz: Symbol) = selfOrSuperCalls contains clazz
- protected val selfOrSuperCalls = mutable.Stack[Symbol]()
- @inline protected def inSelfOrSuperCall[A](sym: Symbol)(a: => A) = {
- selfOrSuperCalls push sym
- try a finally selfOrSuperCalls.pop()
- }
-
override def transform(tree: Tree): Tree = {
+ def sym = tree.symbol
val savedOuterParam = outerParam
try {
tree match {
case Template(_, _, _) =>
outerParam = NoSymbol
- case DefDef(_, _, _, vparamss, _, _) =>
- if (tree.symbol.isClassConstructor && isInner(tree.symbol.owner)) {
- outerParam = vparamss.head.head.symbol
- assert(outerParam.name startsWith nme.OUTER, outerParam.name)
- }
+ case DefDef(_, _, _, (param :: _) :: _, _, _) if sym.isClassConstructor && isInner(sym.owner) =>
+ outerParam = param.symbol
+ assert(outerParam.name startsWith nme.OUTER, outerParam.name)
case _ =>
}
- if ((treeInfo isSelfOrSuperConstrCall tree) || (treeInfo isEarlyDef tree))
- inSelfOrSuperCall(currentOwner.owner)(super.transform(tree))
- else
- super.transform(tree)
+ super.transform(tree)
}
finally outerParam = savedOuterParam
}
@@ -387,22 +354,14 @@ abstract class ExplicitOuter extends InfoTransform
/** The definition tree of the outer accessor of current class
*/
- def outerFieldDef: Tree =
- VAL(outerField(currentClass)) === EmptyTree
+ def outerFieldDef: Tree = ValDef(outerField(currentClass))
/** The definition tree of the outer accessor of current class
*/
- def outerAccessorDef: Tree = {
- val outerAcc = outerAccessor(currentClass)
- var rhs: Tree =
- if (outerAcc.isDeferred) EmptyTree
- else This(currentClass) DOT outerField(currentClass)
-
- /** If we don't re-type the tree, we see self-type related crashes like #266.
- */
- localTyper typed {
- (DEF(outerAcc) withPos currentClass.pos withType null) === rhs
- }
+ def outerAccessorDef: Tree = localTyper typed {
+ val acc = outerAccessor(currentClass)
+ val rhs = if (acc.isDeferred) EmptyTree else Select(This(currentClass), outerField(currentClass))
+ DefDef(acc, rhs)
}
/** The definition tree of the outer accessor for class mixinClass.
@@ -423,80 +382,8 @@ abstract class ExplicitOuter extends InfoTransform
else if (mixinPrefix.typeArgs.nonEmpty) gen.mkAttributedThis(mixinPrefix.typeSymbol)
else gen.mkAttributedQualifier(mixinPrefix)
)
- localTyper typed {
- (DEF(outerAcc) withPos currentClass.pos) === {
- // Need to cast for nested outer refs in presence of self-types. See ticket #3274.
- gen.mkCast(transformer.transform(path), outerAcc.info.resultType)
- }
- }
- }
-
- // requires settings.XoldPatmat.value
- def matchTranslation(tree: Match) = {
- val Match(selector, cases) = tree
- var nselector = transform(selector)
-
- def makeGuardDef(vs: List[Symbol], guard: Tree) = {
- val gdname = unit.freshTermName("gd")
- val method = currentOwner.newMethod(gdname, tree.pos, SYNTHETIC)
- val params = method newSyntheticValueParams vs.map(_.tpe)
- method setInfo new MethodType(params, BooleanClass.tpe)
-
- localTyper typed {
- DEF(method) === guard.changeOwner(currentOwner -> method).substituteSymbols(vs, params)
- }
- }
-
- val nguard = new ListBuffer[Tree]
- val ncases =
- for (CaseDef(pat, guard, body) <- cases) yield {
- // Strip out any unused pattern bindings up front
- val patternIdents = for (b @ Bind(_, _) <- pat) yield b.symbol
- val references: Set[Symbol] = Set(guard, body) flatMap { t => for (id @ Ident(name) <- t) yield id.symbol }
- val (used, unused) = patternIdents partition references
- val strippedPat = if (unused.isEmpty) pat else new RemoveBindingsTransformer(unused.toSet) transform pat
-
- val gdcall =
- if (guard == EmptyTree) EmptyTree
- else {
- val guardDef = makeGuardDef(used, guard)
- nguard += transform(guardDef) // building up list of guards
-
- localTyper typed (Ident(guardDef.symbol) APPLY (used map Ident))
- }
-
- (CASE(transform(strippedPat)) IF gdcall) ==> transform(body)
- }
-
- val (checkExhaustive, requireSwitch) = nselector match {
- case Typed(nselector1, tpt) =>
- val unchecked = tpt.tpe hasAnnotation UncheckedClass
- if (unchecked)
- nselector = nselector1
-
- // Don't require a tableswitch if there are 1-2 casedefs
- // since the matcher intentionally emits an if-then-else.
- (!unchecked, treeInfo.isSwitchAnnotation(tpt.tpe) && ncases.size > 2)
- case _ =>
- (true, false)
- }
-
- val t = atPos(tree.pos) {
- val context = MatrixContext(currentUnit, transform, localTyper, currentOwner, tree.tpe)
- val t_untyped = handlePattern(nselector, ncases, checkExhaustive, context)
-
- /* if @switch annotation is present, verify the resulting tree is a Match */
- if (requireSwitch) t_untyped match {
- case Block(_, Match(_, _)) => // ok
- case _ =>
- unit.error(tree.pos, "could not emit switch for @switch annotated match")
- }
-
- localTyper.typed(t_untyped, context.matchResultType)
- }
-
- if (nguard.isEmpty) t
- else Block(nguard.toList, t) setType t.tpe
+ // Need to cast for nested outer refs in presence of self-types. See ticket #3274.
+ localTyper typed DefDef(outerAcc, gen.mkCast(transformer.transform(path), outerAcc.info.resultType))
}
/** The main transformation method */
@@ -583,14 +470,10 @@ abstract class ExplicitOuter extends InfoTransform
})
super.transform(treeCopy.Apply(tree, sel, outerVal :: args))
- // entry point for pattern matcher translation
- case m: Match if settings.XoldPatmat.value => // the new pattern matcher runs in its own phase right after typer
- matchTranslation(m)
-
// for the new pattern matcher
// base.<outer>.eq(o) --> base.$outer().eq(o) if there's an accessor, else the whole tree becomes TRUE
// TODO remove the synthetic `<outer>` method from outerFor??
- case Apply(eqsel@Select(eqapp@Apply(sel@Select(base, nme.OUTER_SYNTH), Nil), eq), args) if !settings.XoldPatmat.value =>
+ case Apply(eqsel@Select(eqapp@Apply(sel@Select(base, nme.OUTER_SYNTH), Nil), eq), args) =>
val outerFor = sel.symbol.owner.toInterface // TODO: toInterface necessary?
val acc = outerAccessor(outerFor)
@@ -599,24 +482,17 @@ abstract class ExplicitOuter extends InfoTransform
// at least don't crash... this duplicates maybeOmittable from constructors
(acc.owner.isEffectivelyFinal && !acc.isOverridingSymbol)) {
unit.uncheckedWarning(tree.pos, "The outer reference in this type test cannot be checked at run time.")
- return transform(TRUE) // urgh... drop condition if there's no accessor (or if it may disappear after constructors)
+ transform(TRUE) // urgh... drop condition if there's no accessor (or if it may disappear after constructors)
} else {
// println("(base, acc)= "+(base, acc))
val outerSelect = localTyper typed Apply(Select(base, acc), Nil)
// achieves the same as: localTyper typed atPos(tree.pos)(outerPath(base, base.tpe.typeSymbol, outerFor.outerClass))
// println("(b, tpsym, outerForI, outerFor, outerClass)= "+ (base, base.tpe.typeSymbol, outerFor, sel.symbol.owner, outerFor.outerClass))
// println("outerSelect = "+ outerSelect)
- return transform(treeCopy.Apply(tree, treeCopy.Select(eqsel, outerSelect, eq), args))
+ transform(treeCopy.Apply(tree, treeCopy.Select(eqsel, outerSelect, eq), args))
}
case _ =>
- if (settings.Xmigration.value < ScalaVersion.twoDotEight) tree match {
- case TypeApply(fn @ Select(qual, _), args) if fn.symbol == Object_isInstanceOf || fn.symbol == Any_isInstanceOf =>
- if (isArraySeqTest(qual.tpe, args.head.tpe))
- unit.warning(tree.pos, "An Array will no longer match as Seq[_].")
- case _ => ()
- }
-
val x = super.transform(tree)
if (x.tpe eq null) x
else x setType transformInfo(currentOwner, x.tpe)
@@ -625,7 +501,7 @@ abstract class ExplicitOuter extends InfoTransform
/** The transformation method for whole compilation units */
override def transformUnit(unit: CompilationUnit) {
- afterExplicitOuter(super.transformUnit(unit))
+ exitingExplicitOuter(super.transformUnit(unit))
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index e0c0cd0fdb..2235a93ca4 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -8,9 +8,6 @@ package transform
import symtab._
import Flags._
import scala.collection.{ mutable, immutable }
-import scala.collection.mutable
-import scala.tools.nsc.util.FreshNameCreator
-import scala.runtime.ScalaRunTime.{ isAnyVal, isTuple }
/**
* Perform Step 1 in the inline classes SIP: Creates extension methods for all
@@ -23,7 +20,6 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
import global._ // the global environment
import definitions._ // standard classes and methods
- import typer.{ typed, atOwner } // methods to type trees
/** the following two members override abstract members in Transform */
val phaseName: String = "extmethods"
@@ -70,7 +66,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
}
/** Return the extension method that corresponds to given instance method `meth`. */
- def extensionMethod(imeth: Symbol): Symbol = atPhase(currentRun.refchecksPhase) {
+ def extensionMethod(imeth: Symbol): Symbol = enteringPhase(currentRun.refchecksPhase) {
val companionInfo = companionModuleForce(imeth.owner).info
val candidates = extensionNames(imeth) map (companionInfo.decl(_)) filter (_.exists)
val matching = candidates filter (alt => normalize(alt.tpe, imeth.owner) matches imeth.tpe)
@@ -87,7 +83,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
|
| ${candidates.map(c => c.name+":"+normalize(c.tpe, imeth.owner)).mkString("\n")}
|
- | Eligible Names: ${extensionNames(imeth).mkString(",")}"""")
+ | Eligible Names: ${extensionNames(imeth).mkString(",")}" """)
matching.head
}
@@ -133,7 +129,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
if (seen contains clazz)
unit.error(pos, "value class may not unbox to itself")
else {
- val unboxed = erasure.underlyingOfValueClass(clazz).typeSymbol
+ val unboxed = definitions.underlyingOfValueClass(clazz).typeSymbol
if (unboxed.isDerivedValueClass) checkNonCyclic(pos, seen + clazz, unboxed)
}
@@ -185,6 +181,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
// bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154]
// good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151]
}
+
override def transform(tree: Tree): Tree = {
tree match {
case Template(_, _, _) =>
@@ -194,6 +191,9 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
checkNonCyclic(currentOwner.pos, Set(), currentOwner) */
extensionDefs(currentOwner.companionModule) = new mutable.ListBuffer[Tree]
currentOwner.primaryConstructor.makeNotPrivate(NoSymbol)
+ // SI-7859 make param accessors accessible so the erasure can generate unbox operations.
+ val paramAccessors = currentOwner.info.decls.filter(sym => sym.isParamAccessor && sym.isMethod)
+ paramAccessors.foreach(_.makeNotPrivate(currentOwner))
super.transform(tree)
} else if (currentOwner.isStaticOwner) {
super.transform(tree)
@@ -206,11 +206,12 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
val companion = origThis.companionModule
def makeExtensionMethodSymbol = {
- val extensionName = extensionNames(origMeth).head
+ val extensionName = extensionNames(origMeth).head.toTermName
val extensionMeth = (
- companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
+ companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
setAnnotations origMeth.annotations
)
+ origMeth.removeAnnotation(TailrecClass) // it's on the extension method, now.
companion.info.decls.enter(extensionMeth)
}
@@ -224,21 +225,22 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
val extensionParams = allParameters(extensionMono)
val extensionThis = gen.mkAttributedStableRef(thiz setPos extensionMeth.pos)
- val extensionBody = (
- rhs
+ val extensionBody: Tree = {
+ val tree = rhs
.substituteSymbols(origTpeParams, extensionTpeParams)
.substituteSymbols(origParams, extensionParams)
.substituteThis(origThis, extensionThis)
.changeOwner(origMeth -> extensionMeth)
- )
+ new SubstututeRecursion(origMeth, extensionMeth, unit).transform(tree)
+ }
val castBody =
if (extensionBody.tpe <:< extensionMono.finalResultType)
extensionBody
else
gen.mkCastPreservingAnnotations(extensionBody, extensionMono.finalResultType) // SI-7818 e.g. mismatched existential skolems
- // Record the extension method ( FIXME: because... ? )
- extensionDefs(companion) += atPos(tree.pos)(DefDef(extensionMeth, castBody))
+ // Record the extension method. Later, in `Extender#transformStats`, these will be added to the companion object.
+ extensionDefs(companion) += DefDef(extensionMeth, castBody)
// These three lines are assembling Foo.bar$extension[T1, T2, ...]($this)
// which leaves the actual argument application for extensionCall.
@@ -261,14 +263,43 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] =
super.transformStats(stats, exprOwner) map {
- case md @ ModuleDef(_, _, _) if extensionDefs contains md.symbol =>
- val defns = extensionDefs(md.symbol).toList map (member =>
- atOwner(md.symbol)(localTyper.typedPos(md.pos.focus)(member))
- )
- extensionDefs -= md.symbol
- deriveModuleDef(md)(tmpl => deriveTemplate(tmpl)(_ ++ defns))
+ case md @ ModuleDef(_, _, _) =>
+ val extraStats = extensionDefs remove md.symbol match {
+ case Some(defns) => defns.toList map (defn => atOwner(md.symbol)(localTyper.typedPos(md.pos.focus)(defn.duplicate)))
+ case _ => Nil
+ }
+ if (extraStats.isEmpty) md
+ else deriveModuleDef(md)(tmpl => deriveTemplate(tmpl)(_ ++ extraStats))
case stat =>
stat
}
}
+
+ final class SubstututeRecursion(origMeth: Symbol, extensionMeth: Symbol,
+ unit: CompilationUnit) extends TypingTransformer(unit) {
+ override def transform(tree: Tree): Tree = tree match {
+ // SI-6574 Rewrite recursive calls against the extension method so they can
+ // be tail call optimized later. The tailcalls phases comes before
+ // erasure, which performs this translation more generally at all call
+ // sites.
+ //
+ // // Source
+ // class C[C] { def meth[M](a: A) = { { <expr>: C[C'] }.meth[M'] } }
+ //
+ // // Translation
+ // class C[C] { def meth[M](a: A) = { { <expr>: C[C'] }.meth[M'](a1) } }
+ // object C { def meth$extension[M, C](this$: C[C], a: A)
+ // = { meth$extension[M', C']({ <expr>: C[C'] })(a1) } }
+ case treeInfo.Applied(sel @ Select(qual, _), targs, argss) if sel.symbol == origMeth =>
+ localTyper.typedPos(tree.pos) {
+ val allArgss = List(qual) :: argss
+ val origThis = extensionMeth.owner.companionClass
+ val baseType = qual.tpe.baseType(origThis)
+ val allTargs = targs.map(_.tpe) ::: baseType.typeArgs
+ val fun = gen.mkAttributedTypeApply(gen.mkAttributedThis(extensionMeth.owner), extensionMeth, allTargs)
+ allArgss.foldLeft(fun)(Apply(_, _))
+ }
+ case _ => super.transform(tree)
+ }
+ }
}
diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala
index cd26f95958..b4329965fc 100644
--- a/src/compiler/scala/tools/nsc/transform/Flatten.scala
+++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala
@@ -8,29 +8,24 @@ package transform
import symtab._
import Flags._
-import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
abstract class Flatten extends InfoTransform {
import global._
- import definitions._
+ import treeInfo.isQualifierSafeToElide
/** the following two members override abstract members in Transform */
val phaseName: String = "flatten"
- /** Updates the owning scope with the given symbol; returns the old symbol.
+ /** Updates the owning scope with the given symbol, unlinking any others.
*/
- private def replaceSymbolInCurrentScope(sym: Symbol): Symbol = afterFlatten {
+ private def replaceSymbolInCurrentScope(sym: Symbol): Unit = exitingFlatten {
val scope = sym.owner.info.decls
- val old = scope lookup sym.name andAlso scope.unlink
+ val old = (scope lookupUnshadowedEntries sym.name).toList
+ old foreach (scope unlink _)
scope enter sym
-
- if (old eq NoSymbol)
- log(s"lifted ${sym.fullLocationString}")
- else
- log(s"lifted ${sym.fullLocationString} after unlinking existing $old from scope.")
-
- old
+ def old_s = old map (_.sym) mkString ", "
+ debuglog(s"In scope of ${sym.owner}, unlinked $old_s and entered $sym")
}
private def liftClass(sym: Symbol) {
@@ -53,7 +48,7 @@ abstract class Flatten extends InfoTransform {
clazz.isClass && !clazz.isPackageClass && {
// Cannot flatten here: class A[T] { object B }
// was "at erasurePhase.prev"
- beforeErasure(clazz.typeParams.isEmpty)
+ enteringErasure(clazz.typeParams.isEmpty)
}
}
@@ -67,11 +62,11 @@ abstract class Flatten extends InfoTransform {
val decls1 = scopeTransform(clazz) {
val decls1 = newScope
if (clazz.isPackageClass) {
- afterFlatten { decls foreach (decls1 enter _) }
+ exitingFlatten { decls foreach (decls1 enter _) }
}
else {
val oldowner = clazz.owner
- afterFlatten { oldowner.info }
+ exitingFlatten { oldowner.info }
parents1 = parents mapConserve (this)
for (sym <- decls) {
@@ -90,7 +85,7 @@ abstract class Flatten extends InfoTransform {
val restp1 = apply(restp)
if (restp1 eq restp) tp else copyMethodType(tp, params, restp1)
case PolyType(tparams, restp) =>
- val restp1 = apply(restp);
+ val restp1 = apply(restp)
if (restp1 eq restp) tp else PolyType(tparams, restp1)
case _ =>
mapOver(tp)
@@ -105,25 +100,44 @@ abstract class Flatten extends InfoTransform {
/** Buffers for lifted out classes */
private val liftedDefs = perRunCaches.newMap[Symbol, ListBuffer[Tree]]()
- override def transform(tree: Tree): Tree = {
+ override def transform(tree: Tree): Tree = postTransform {
tree match {
case PackageDef(_, _) =>
liftedDefs(tree.symbol.moduleClass) = new ListBuffer
+ super.transform(tree)
case Template(_, _, _) if tree.symbol.isDefinedInPackage =>
liftedDefs(tree.symbol.owner) = new ListBuffer
+ super.transform(tree)
+ case ClassDef(_, _, _, _) if tree.symbol.isNestedClass =>
+ // SI-5508 Ordering important. In `object O { trait A { trait B } }`, we want `B` to appear after `A` in
+ // the sequence of lifted trees in the enclosing package. Why does this matter? Currently, mixin
+ // needs to transform `A` first to a chance to create accessors for private[this] trait fields
+ // *before* it transforms inner classes that refer to them. This also fixes SI-6231.
+ //
+ // Alternative solutions
+ // - create the private[this] accessors eagerly in Namer (but would this cover private[this] fields
+ // added later phases in compilation?)
+ // - move the accessor creation to the Mixin info transformer
+ val liftedBuffer = liftedDefs(tree.symbol.enclosingTopLevelClass.owner)
+ val index = liftedBuffer.length
+ liftedBuffer.insert(index, super.transform(tree))
+ EmptyTree
case _ =>
+ super.transform(tree)
}
- postTransform(super.transform(tree))
}
private def postTransform(tree: Tree): Tree = {
val sym = tree.symbol
val tree1 = tree match {
- case ClassDef(_, _, _, _) if sym.isNestedClass =>
- liftedDefs(sym.enclosingTopLevelClass.owner) += tree
- EmptyTree
- case Select(qual, name) if (sym.isStaticModule && !sym.owner.isPackageClass) =>
- afterFlatten(atPos(tree.pos)(gen.mkAttributedRef(sym)))
+ case Select(qual, name) if sym.isStaticModule && !sym.isTopLevel =>
+ exitingFlatten {
+ atPos(tree.pos) {
+ val ref = gen.mkAttributedRef(sym)
+ if (isQualifierSafeToElide(qual)) ref
+ else Block(List(qual), ref).setType(tree.tpe) // need to execute the qualifier but refer directly to the lifted module.
+ }
+ }
case _ =>
tree
}
@@ -133,7 +147,10 @@ abstract class Flatten extends InfoTransform {
/** Transform statements and add lifted definitions to them. */
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
val stats1 = super.transformStats(stats, exprOwner)
- if (currentOwner.isPackageClass) stats1 ::: liftedDefs(currentOwner).toList
+ if (currentOwner.isPackageClass) {
+ val lifted = liftedDefs(currentOwner).toList
+ stats1 ::: lifted
+ }
else stats1
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
index b6dbacaa29..dc321e26ca 100644
--- a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
@@ -10,11 +10,11 @@ package transform
* An InfoTransform contains a compiler phase that transforms trees and symbol infos -- making sure they stay consistent.
* The symbol info is transformed assuming it is consistent right before this phase.
* The info transformation is triggered by Symbol::rawInfo, which caches the results in the symbol's type history.
- * This way sym.info (during an atPhase(p)) can look up what the symbol's info should look like at the beginning of phase p.
+ * This way sym.info (during an enteringPhase(p)) can look up what the symbol's info should look like at the beginning of phase p.
* (If the transformed info had not been stored yet, rawInfo will compute the info by composing the info-transformers
* of the most recent phase before p, up to the transformer of the phase right before p.)
*
- * Concretely, atPhase(p) { sym.info } yields the info *before* phase p has transformed it. Imagine you're a phase and it all makes sense.
+ * Concretely, enteringPhase(p) { sym.info } yields the info *before* phase p has transformed it. Imagine you're a phase and it all makes sense.
*/
trait InfoTransform extends Transform {
import global.{Symbol, Type, InfoTransformer, infoTransformers}
diff --git a/src/compiler/scala/tools/nsc/transform/InlineErasure.scala b/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
index 0af3cf732f..1bbe1b8410 100644
--- a/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
@@ -1,9 +1,11 @@
package scala.tools.nsc
package transform
-trait InlineErasure { self: Erasure =>
-
+trait InlineErasure {
+ self: Erasure =>
+
+/*
import global._
import definitions._
-
-} \ No newline at end of file
+ */
+}
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 631468dd0c..acef2a50d8 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -8,9 +8,8 @@ package transform
import symtab._
import Flags._
-import util.TreeSet
import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.{ LinkedHashMap, LinkedHashSet }
+import scala.collection.mutable.{ LinkedHashMap, LinkedHashSet, TreeSet }
abstract class LambdaLift extends InfoTransform {
import global._
@@ -32,6 +31,21 @@ abstract class LambdaLift extends InfoTransform {
}
}
+ /** scala.runtime.*Ref classes */
+ private lazy val allRefClasses: Set[Symbol] = {
+ refClass.values.toSet ++ volatileRefClass.values.toSet ++ Set(VolatileObjectRefClass, ObjectRefClass)
+ }
+
+ /** Each scala.runtime.*Ref class has a static method `create(value)` that simply instantiates the Ref to carry that value. */
+ private lazy val refCreateMethod: Map[Symbol, Symbol] = {
+ mapFrom(allRefClasses.toList)(x => getMemberMethod(x.companionModule, nme.create))
+ }
+
+ /** Quite frequently a *Ref is initialized with its zero (e.g., null, 0.toByte, etc.) Method `zero()` of *Ref class encapsulates that pattern. */
+ private lazy val refZeroMethod: Map[Symbol, Symbol] = {
+ mapFrom(allRefClasses.toList)(x => getMemberMethod(x.companionModule, nme.zero))
+ }
+
def transformInfo(sym: Symbol, tp: Type): Type =
if (sym.isCapturedVariable) capturedVariableType(sym, tpe = lifted(tp), erasedTypes = true)
else lifted(tp)
@@ -41,6 +55,8 @@ abstract class LambdaLift extends InfoTransform {
class LambdaLifter(unit: CompilationUnit) extends explicitOuter.OuterPathTransformer(unit) {
+ private type SymSet = TreeSet[Symbol]
+
/** A map storing free variables of functions and classes */
private val free = new LinkedHashMap[Symbol, SymSet]
@@ -53,6 +69,12 @@ abstract class LambdaLift extends InfoTransform {
/** Symbols that are called from an inner class. */
private val calledFromInner = new LinkedHashSet[Symbol]
+ private val ord = Ordering.fromLessThan[Symbol](_ isLess _)
+ private def newSymSet = TreeSet.empty[Symbol](ord)
+
+ private def symSet(f: LinkedHashMap[Symbol, SymSet], sym: Symbol): SymSet =
+ f.getOrElseUpdate(sym, newSymSet)
+
/** The set of symbols that need to be renamed. */
private val renamable = newSymSet
@@ -92,13 +114,6 @@ abstract class LambdaLift extends InfoTransform {
/** Buffers for lifted out classes and methods */
private val liftedDefs = new LinkedHashMap[Symbol, List[Tree]]
- private type SymSet = TreeSet[Symbol]
-
- private def newSymSet = new TreeSet[Symbol](_ isLess _)
-
- private def symSet(f: LinkedHashMap[Symbol, SymSet], sym: Symbol): SymSet =
- f.getOrElseUpdate(sym, newSymSet)
-
private def isSameOwnerEnclosure(sym: Symbol) =
sym.owner.logicallyEnclosingMember == currentOwner.logicallyEnclosingMember
@@ -140,10 +155,10 @@ abstract class LambdaLift extends InfoTransform {
else {
val ss = symSet(free, enclosure)
if (!ss(sym)) {
- ss addEntry sym
- renamable addEntry sym
+ ss += sym
+ renamable += sym
changedFreeVars = true
- debuglog("" + sym + " is free in " + enclosure);
+ debuglog("" + sym + " is free in " + enclosure)
if (sym.isVariable) sym setFlag CAPTURED
}
!enclosure.isClass
@@ -153,7 +168,7 @@ abstract class LambdaLift extends InfoTransform {
private def markCalled(sym: Symbol, owner: Symbol) {
debuglog("mark called: " + sym + " of " + sym.owner + " is called by " + owner)
- symSet(called, owner) addEntry sym
+ symSet(called, owner) += sym
if (sym.enclClass != owner.enclClass) calledFromInner += sym
}
@@ -161,7 +176,7 @@ abstract class LambdaLift extends InfoTransform {
private val freeVarTraverser = new Traverser {
override def traverse(tree: Tree) {
try { //debug
- val sym = tree.symbol;
+ val sym = tree.symbol
tree match {
case ClassDef(_, _, _, _) =>
liftedDefs(tree.symbol) = Nil
@@ -180,17 +195,17 @@ abstract class LambdaLift extends InfoTransform {
if (sym.isImplClass)
localImplClasses((sym.owner, tpnme.interfaceName(sym.name))) = sym
else {
- renamable addEntry sym
+ renamable += sym
if (sym.isTrait)
localTraits((sym, sym.name)) = sym.owner
}
}
case DefDef(_, _, _, _, _, _) =>
if (sym.isLocal) {
- renamable addEntry sym
+ renamable += sym
sym setFlag (PrivateLocal | FINAL)
} else if (sym.isPrimaryConstructor) {
- symSet(called, sym) addEntry sym.owner
+ symSet(called, sym) += sym.owner
}
case Ident(name) =>
if (sym == NoSymbol) {
@@ -199,7 +214,7 @@ abstract class LambdaLift extends InfoTransform {
val owner = currentOwner.logicallyEnclosingMember
if (sym.isTerm && !sym.isMethod) markFree(sym, owner)
else if (sym.isMethod) markCalled(sym, owner)
- //symSet(called, owner) addEntry sym
+ //symSet(called, owner) += sym
}
case Select(_, _) =>
if (sym.isConstructor && sym.owner.isLocal)
@@ -209,7 +224,7 @@ abstract class LambdaLift extends InfoTransform {
super.traverse(tree)
} catch {//debug
case ex: Throwable =>
- Console.println("exception when traversing " + tree)
+ Console.println(s"$ex while traversing $tree")
throw ex
}
}
@@ -245,16 +260,15 @@ abstract class LambdaLift extends InfoTransform {
freshen(sym.name + nme.NAME_JOIN_STRING + sym.owner.name + nme.NAME_JOIN_STRING)
} else {
// SI-5652 If the lifted symbol is accessed from an inner class, it will be made public. (where?)
- // Generating a a unique name, mangled with the enclosing class name, avoids a VerifyError
+ // Generating a unique name, mangled with the enclosing class name, avoids a VerifyError
// in the case that a sub-class happens to lifts out a method with the *same* name.
- val name = freshen(sym.name + nme.NAME_JOIN_STRING)
- if (originalName.isTermName && !sym.enclClass.isImplClass && calledFromInner(sym)) nme.expandedName(name, sym.enclClass)
+ val name = freshen("" + sym.name + nme.NAME_JOIN_STRING)
+ if (originalName.isTermName && !sym.enclClass.isImplClass && calledFromInner(sym)) nme.expandedName(name.toTermName, sym.enclClass)
else name
}
}
- /** Rename a trait's interface and implementation class in coordinated fashion.
- */
+ /* Rename a trait's interface and implementation class in coordinated fashion. */
def renameTrait(traitSym: Symbol, implSym: Symbol) {
val originalImplName = implSym.name
renameSym(traitSym)
@@ -290,7 +304,7 @@ abstract class LambdaLift extends InfoTransform {
proxies(owner) =
for (fv <- freeValues.toList) yield {
val proxyName = proxyNames.getOrElse(fv, fv.name)
- val proxy = owner.newValue(proxyName, owner.pos, newFlags) setInfo fv.info
+ val proxy = owner.newValue(proxyName.toTermName, owner.pos, newFlags.toLong) setInfo fv.info
if (owner.isClass) owner.info.decls enter proxy
proxy
}
@@ -422,8 +436,15 @@ abstract class LambdaLift extends InfoTransform {
private def liftDef(tree: Tree): Tree = {
val sym = tree.symbol
val oldOwner = sym.owner
- if (sym.owner.isAuxiliaryConstructor && sym.isMethod) // # bug 1909
- sym setFlag STATIC
+ if (sym.isMethod && isUnderConstruction(sym.owner.owner)) { // # bug 1909
+ if (sym.isModule) { // Yes, it can be a module and a method, see comments on `isModuleNotMethod`!
+ // TODO promote to an implementation restriction if we can reason that this *always* leads to VerifyError.
+ // See neg/t1909-object.scala
+ def msg = s"SI-1909 Unable to STATICally lift $sym, which is defined in the self- or super-constructor call of ${sym.owner.owner}. A VerifyError is likely."
+ devWarning(tree.pos, msg)
+ } else sym setFlag STATIC
+ }
+
sym.owner = sym.owner.enclClass
if (sym.isClass) sym.owner = sym.owner.toInterface
if (sym.isMethod) sym setFlag LIFTED
@@ -445,56 +466,21 @@ abstract class LambdaLift extends InfoTransform {
case ValDef(mods, name, tpt, rhs) =>
if (sym.isCapturedVariable) {
val tpt1 = TypeTree(sym.tpe) setPos tpt.pos
- /* Creating a constructor argument if one isn't present. */
- val constructorArg = rhs match {
- case EmptyTree =>
- sym.tpe.typeSymbol.primaryConstructor.info.paramTypes match {
- case List(tp) => gen.mkZero(tp)
- case _ =>
- debugwarn("Couldn't determine how to properly construct " + sym)
- rhs
- }
- case arg => arg
+
+ val refTypeSym = sym.tpe.typeSymbol
+
+ val factoryCall = typer.typedPos(rhs.pos) {
+ rhs match {
+ case EmptyTree =>
+ val zeroMSym = refZeroMethod(refTypeSym)
+ gen.mkMethodCall(zeroMSym, Nil)
+ case arg =>
+ val createMSym = refCreateMethod(refTypeSym)
+ gen.mkMethodCall(createMSym, arg :: Nil)
+ }
}
-
- /** Wrap expr argument in new *Ref(..) constructor. But try/catch
- * is a problem because a throw will clear the stack and post catch
- * we would expect the partially-constructed object to be on the stack
- * for the call to init. So we recursively
- * search for "leaf" result expressions where we know its safe
- * to put the new *Ref(..) constructor or, if all else fails, transform
- * an expr to { val temp=expr; new *Ref(temp) }.
- * The reason we narrowly look for try/catch in captured var definitions
- * is because other try/catch expression have already been lifted
- * see SI-6863
- */
- def refConstr(expr: Tree): Tree = typer.typedPos(expr.pos) {expr match {
- // very simple expressions can be wrapped in a new *Ref(expr) because they can't have
- // a try/catch in final expression position.
- case Ident(_) | Apply(_, _) | Literal(_) | New(_) | Select(_, _) | Throw(_) | Assign(_, _) | ValDef(_, _, _, _) | Return(_) | EmptyTree =>
- New(sym.tpe, expr)
- case Try(block, catches, finalizer) =>
- Try(refConstr(block), catches map refConstrCase, finalizer)
- case Block(stats, expr) =>
- Block(stats, refConstr(expr))
- case If(cond, trueBranch, falseBranch) =>
- If(cond, refConstr(trueBranch), refConstr(falseBranch))
- case Match(selector, cases) =>
- Match(selector, cases map refConstrCase)
- // if we can't figure out what else to do, turn expr into {val temp1 = expr; new *Ref(temp1)} to avoid
- // any possibility of try/catch in the *Ref constructor. This should be a safe tranformation as a default
- // though it potentially wastes a variable slot. In particular this case handles LabelDefs.
- case _ =>
- debuglog("assigning expr to temp: " + (expr.pos))
- val tempSym = currentOwner.newValue(unit.freshTermName("temp"), expr.pos) setInfo expr.tpe
- val tempDef = ValDef(tempSym, expr) setPos expr.pos
- val tempRef = Ident(tempSym) setPos expr.pos
- Block(tempDef, New(sym.tpe, tempRef))
- }}
- def refConstrCase(cdef: CaseDef): CaseDef =
- CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
-
- treeCopy.ValDef(tree, mods, name, tpt1, refConstr(constructorArg))
+
+ treeCopy.ValDef(tree, mods, name, tpt1, factoryCall)
} else tree
case Return(Block(stats, value)) =>
Block(stats, treeCopy.Return(tree, value)) setType tree.tpe setPos tree.pos
@@ -510,7 +496,7 @@ abstract class LambdaLift extends InfoTransform {
treeCopy.Assign(tree, qual, rhs)
case Ident(name) =>
val tree1 =
- if (sym != NoSymbol && sym.isTerm && !sym.isLabel)
+ if (sym.isTerm && !sym.isLabel)
if (sym.isMethod)
atPos(tree.pos)(memberRef(sym))
else if (sym.isLocal && !isSameOwnerEnclosure(sym))
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index 21213cf9d9..b71d14a04f 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -68,7 +68,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
curTree = tree
tree match {
-
+
case Block(_, _) =>
val block1 = super.transform(tree)
val Block(stats, expr) = block1
@@ -79,7 +79,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
List(stat)
})
treeCopy.Block(block1, stats1, expr)
-
+
case DefDef(_, _, _, _, _, rhs) => atOwner(tree.symbol) {
val (res, slowPathDef) = if (!sym.owner.isClass && sym.isLazy) {
val enclosingClassOrDummyOrMethod = {
@@ -100,9 +100,9 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
val (rhs1, sDef) = mkLazyDef(enclosingClassOrDummyOrMethod, transform(rhs), idx, sym)
sym.resetFlag((if (lazyUnit(sym)) 0 else LAZY) | ACCESSOR)
(rhs1, sDef)
- } else
+ } else
(transform(rhs), EmptyTree)
-
+
val ddef1 = deriveDefDef(tree)(_ => if (LocalLazyValFinder.find(res)) typed(addBitmapDefs(sym, res)) else res)
if (slowPathDef != EmptyTree) Block(slowPathDef, ddef1) else ddef1
}
@@ -183,30 +183,31 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
if (bmps.isEmpty) rhs else rhs match {
case Block(assign, l @ LabelDef(name, params, _))
- if name.toString == ("_" + methSym.name) && isMatch(params) =>
+ if (name string_== "_" + methSym.name) && isMatch(params) =>
Block(assign, deriveLabelDef(l)(rhs => typed(prependStats(bmps, rhs))))
case _ => prependStats(bmps, rhs)
}
}
-
+
def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
stats: List[Tree], retVal: Tree): Tree = {
- val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, STABLE | PRIVATE)
+ val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, STABLE | PRIVATE)
defSym setInfo MethodType(List(), lzyVal.tpe.resultType)
defSym.owner = lzyVal.owner
debuglog(s"crete slow compute path $defSym with owner ${defSym.owner} for lazy val $lzyVal")
if (bitmaps.contains(lzyVal))
bitmaps(lzyVal).map(_.owner = defSym)
val rhs: Tree = (gen.mkSynchronizedCheck(clazz, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
- DEF(defSym).mkTree(addBitmapDefs(lzyVal, BLOCK(rhs, retVal))) setSymbol defSym
+
+ DefDef(defSym, addBitmapDefs(lzyVal, BLOCK(rhs, retVal)))
}
-
-
+
+
def mkFastPathBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
stats: List[Tree], retVal: Tree): (Tree, Tree) = {
val slowPathDef: Tree = mkSlowPathDef(clazz, lzyVal, cond, syncBody, stats, retVal)
- (If(cond, Apply(ID(slowPathDef.symbol), List()), retVal), slowPathDef)
+ (If(cond, Apply(Ident(slowPathDef.symbol), Nil), retVal), slowPathDef)
}
/** return a 'lazified' version of rhs. Rhs should conform to the
@@ -221,7 +222,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
* Similarly as for normal lazy val members (see Mixin), the result will be a tree of the form
* { if ((bitmap&n & MASK) == 0) this.l$compute()
* else l$
- *
+ *
* def l$compute() = { synchronized(enclosing_class_or_dummy) {
* if ((bitmap$n & MASK) == 0) {
* l$ = <rhs>
@@ -277,8 +278,8 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
if (bmps.length > n)
bmps(n)
else {
- val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(ByteClass.tpe)
- beforeTyper {
+ val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(ByteTpe)
+ enteringTyper {
sym addAnnotation VolatileAttr
}
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index e92450c9c0..89f9cb4b06 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -27,14 +27,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
private val treatedClassInfos = perRunCaches.newMap[Symbol, Type]() withDefaultValue NoType
/** Map a lazy, mixedin field accessor to it's trait member accessor */
- private val initializer = perRunCaches.newMap[Symbol, Symbol]
+ private val initializer = perRunCaches.newMap[Symbol, Symbol]()
// --------- helper functions -----------------------------------------------
/** A member of a trait is implemented statically if its implementation after the
* mixin transform is in the static implementation module. To be statically
* implemented, a member must be a method that belonged to the trait's implementation class
- * before (e.g. it is not abstract). Not statically implemented are
+ * before (i.e. it is not abstract). Not statically implemented are
* - non-private modules: these are implemented directly in the mixin composition class
* (private modules, on the other hand, are implemented statically, but their
* module variable is not. all such private modules are lifted, because
@@ -68,7 +68,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* maps all other types to themselves.
*/
private def toInterface(tp: Type): Type =
- beforeMixin(tp.typeSymbol.toInterface).tpe
+ enteringMixin(tp.typeSymbol.toInterface).tpe
private def isFieldWithBitmap(field: Symbol) = {
field.info // ensure that nested objects are transformed
@@ -86,9 +86,10 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* Note: The `checkinit` option does not check if transient fields are initialized.
*/
private def needsInitFlag(sym: Symbol) = (
- settings.checkInit.value
+ settings.checkInit
&& sym.isGetter
&& !sym.isInitializedToDefault
+ && !isConstantType(sym.info.finalResultType) // SI-4742
&& !sym.hasFlag(PARAMACCESSOR | SPECIALIZED | LAZY)
&& !sym.accessed.hasFlag(PRESUPER)
&& !sym.isOuterAccessor
@@ -102,7 +103,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
private val toInterfaceMap = new TypeMap {
def apply(tp: Type): Type = mapOver( tp match {
case TypeRef(pre, sym, args) if sym.isImplClass =>
- typeRef(pre, beforeMixin(sym.toInterface), args)
+ typeRef(pre, enteringMixin(sym.toInterface), args)
case _ => tp
})
}
@@ -119,14 +120,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* @param mixinClass The mixin class that produced the superaccessor
*/
private def rebindSuper(base: Symbol, member: Symbol, mixinClass: Symbol): Symbol =
- afterSpecialize {
+ exitingSpecialize {
var bcs = base.info.baseClasses.dropWhile(mixinClass != _).tail
var sym: Symbol = NoSymbol
debuglog("starting rebindsuper " + base + " " + member + ":" + member.tpe +
" " + mixinClass + " " + base.info.baseClasses + "/" + bcs)
while (!bcs.isEmpty && sym == NoSymbol) {
- if (settings.debug.value) {
- val other = bcs.head.info.nonPrivateDecl(member.name);
+ if (settings.debug) {
+ val other = bcs.head.info.nonPrivateDecl(member.name)
debuglog("rebindsuper " + bcs.head + " " + other + " " + other.tpe +
" " + other.isDeferred)
}
@@ -148,7 +149,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
sym =>
isConcreteAccessor(sym) &&
!sym.hasFlag(MIXEDIN) &&
- matchesType(sym.tpe, member.tpe, true))
+ matchesType(sym.tpe, member.tpe, alwaysMatchSimple = true))
}
( bcs.head != member.owner
&& (hasOverridingAccessor(bcs.head) || isOverriddenAccessor(member, bcs.tail))
@@ -165,7 +166,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
addMember(clazz, cloneBeforeErasure(mixinClass, mixinMember, clazz))
def cloneBeforeErasure(mixinClass: Symbol, mixinMember: Symbol, clazz: Symbol): Symbol = {
- val newSym = beforeErasure {
+ val newSym = enteringErasure {
// since we used `mixinMember` from the interface that represents the trait that's
// being mixed in, have to instantiate the interface type params (that may occur in mixinMember's
// info) as they are seen from the class. We can't use the member that we get from the
@@ -185,11 +186,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
newSym updateInfo (mixinMember.info cloneInfo newSym)
}
- def needsExpandedSetterName(field: Symbol) = !field.isLazy && (
- if (field.isMethod) field.hasStableFlag
- else !field.isMutable
- )
-
/** Add getters and setters for all non-module fields of an implementation
* class to its interface unless they are already present. This is done
* only once per class. The mixedin flag is used to remember whether late
@@ -197,32 +193,29 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - lazy fields don't get a setter.
*/
def addLateInterfaceMembers(clazz: Symbol) {
- def makeConcrete(member: Symbol) =
- member setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED)
-
if (treatedClassInfos(clazz) != clazz.info) {
treatedClassInfos(clazz) = clazz.info
assert(phase == currentRun.mixinPhase, phase)
- /** Create a new getter. Getters are never private or local. They are
+ /* Create a new getter. Getters are never private or local. They are
* always accessors and deferred. */
def newGetter(field: Symbol): Symbol = {
// println("creating new getter for "+ field +" : "+ field.info +" at "+ field.locationString+(field hasFlag MUTABLE))
val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED | ( if (field.isMutable) 0 else STABLE )
// TODO preserve pre-erasure info?
- clazz.newMethod(nme.getterName(field.name), field.pos, newFlags) setInfo MethodType(Nil, field.info)
+ clazz.newMethod(field.getterName, field.pos, newFlags) setInfo MethodType(Nil, field.info)
}
- /** Create a new setter. Setters are never private or local. They are
- * always accessors and deferred. */
+ /* Create a new setter. Setters are never private or local. They are
+ * always accessors and deferred. */
def newSetter(field: Symbol): Symbol = {
//println("creating new setter for "+field+field.locationString+(field hasFlag MUTABLE))
- val setterName = nme.getterToSetter(nme.getterName(field.name))
+ val setterName = field.setterName
val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED
val setter = clazz.newMethod(setterName, field.pos, newFlags)
// TODO preserve pre-erasure info?
- setter setInfo MethodType(setter.newSyntheticValueParams(List(field.info)), UnitClass.tpe)
- if (needsExpandedSetterName(field))
+ setter setInfo MethodType(setter.newSyntheticValueParams(List(field.info)), UnitTpe)
+ if (field.needsExpandedSetterName)
setter.name = nme.expandedSetterName(setter.name, clazz)
setter
@@ -240,12 +233,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val getter = member.getter(clazz)
if (getter == NoSymbol) addMember(clazz, newGetter(member))
if (!member.tpe.isInstanceOf[ConstantType] && !member.isLazy) {
- val setter = member.setter(clazz, needsExpandedSetterName(member))
+ val setter = member.setter(clazz)
if (setter == NoSymbol) addMember(clazz, newSetter(member))
}
}
}
- debuglog("new defs of " + clazz + " = " + clazz.info.decls);
+ debuglog("new defs of " + clazz + " = " + clazz.info.decls)
}
}
@@ -267,7 +260,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
resetFlag DEFERRED | lateDEFERRED
)
- /** Mix in members of implementation class mixinClass into class clazz */
+ /* Mix in members of implementation class mixinClass into class clazz */
def mixinImplClassMembers(mixinClass: Symbol, mixinInterface: Symbol) {
if (!mixinClass.isImplClass) debugwarn ("Impl class flag is not set " +
((mixinClass.debugLocationString, mixinInterface.debugLocationString)))
@@ -276,23 +269,23 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val imember = member overriddenSymbol mixinInterface
imember overridingSymbol clazz match {
case NoSymbol =>
- if (clazz.info.findMember(member.name, 0, lateDEFERRED, false).alternatives contains imember)
+ if (clazz.info.findMember(member.name, 0, lateDEFERRED, stableOnly = false).alternatives contains imember)
cloneAndAddMixinMember(mixinInterface, imember).asInstanceOf[TermSymbol] setAlias member
case _ =>
}
}
}
- /** Mix in members of trait mixinClass into class clazz. Also,
- * for each lazy field in mixinClass, add a link from its mixed in member to its
- * initializer method inside the implclass.
+ /* Mix in members of trait mixinClass into class clazz. Also,
+ * for each lazy field in mixinClass, add a link from its mixed in member to its
+ * initializer method inside the implclass.
*/
def mixinTraitMembers(mixinClass: Symbol) {
// For all members of a trait's interface do:
for (mixinMember <- mixinClass.info.decls) {
if (isConcreteAccessor(mixinMember)) {
if (isOverriddenAccessor(mixinMember, clazz.info.baseClasses))
- debugwarn("!!! is overridden val: "+mixinMember.fullLocationString)
+ devWarning(s"Overridden concrete accessor: ${mixinMember.fullLocationString}")
else {
// mixin field accessors
val mixedInAccessor = cloneAndAddMixinMember(mixinClass, mixinMember)
@@ -311,14 +304,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// mixinMember is a value of type unit. No field needed
;
case _ => // otherwise mixin a field as well
- // atPhase: the private field is moved to the implementation class by erasure,
+ // enteringPhase: the private field is moved to the implementation class by erasure,
// so it can no longer be found in the mixinMember's owner (the trait)
- val accessed = beforePickler(mixinMember.accessed)
+ val accessed = enteringPickler(mixinMember.accessed)
// #3857, need to retain info before erasure when cloning (since cloning only
// carries over the current entry in the type history)
- val sym = beforeErasure {
+ val sym = enteringErasure {
// so we have a type history entry before erasure
- clazz.newValue(nme.getterToLocal(mixinMember.name), mixinMember.pos).setInfo(mixinMember.tpe.resultType)
+ clazz.newValue(mixinMember.localName, mixinMember.pos).setInfo(mixinMember.tpe.resultType)
}
sym updateInfo mixinMember.tpe.resultType // info at current phase
@@ -379,35 +372,34 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
var parents1 = parents
var decls1 = decls
if (!clazz.isPackageClass) {
- afterMixin(clazz.owner.info)
+ exitingMixin(clazz.owner.info)
if (clazz.isImplClass) {
clazz setFlag lateMODULE
var sourceModule = clazz.owner.info.decls.lookup(sym.name.toTermName)
- if (sourceModule != NoSymbol) {
- sourceModule setPos sym.pos
- if (sourceModule.flags != MODULE) {
- log("!!! Directly setting sourceModule flags from %s to MODULE".format(flagsToString(sourceModule.flags)))
- sourceModule.flags = MODULE
- }
- }
- else {
+ if (sourceModule == NoSymbol) {
sourceModule = (
clazz.owner.newModuleSymbol(sym.name.toTermName, sym.pos, MODULE)
setModuleClass sym.asInstanceOf[ClassSymbol]
)
clazz.owner.info.decls enter sourceModule
}
+ else {
+ sourceModule setPos sym.pos
+ if (sourceModule.flags != MODULE) {
+ log("!!! Directly setting sourceModule flags from %s to MODULE".format(sourceModule.flagString))
+ sourceModule.flags = MODULE
+ }
+ }
sourceModule setInfo sym.tpe
// Companion module isn't visible for anonymous class at this point anyway
- assert(clazz.sourceModule != NoSymbol || clazz.isAnonymousClass,
- clazz + " has no sourceModule: sym = " + sym + " sym.tpe = " + sym.tpe)
+ assert(clazz.sourceModule != NoSymbol || clazz.isAnonymousClass, s"$clazz has no sourceModule: $sym ${sym.tpe}")
parents1 = List()
decls1 = newScopeWith(decls.toList filter isImplementedStatically: _*)
} else if (!parents.isEmpty) {
parents1 = parents.head :: (parents.tail map toInterface)
}
}
- //decls1 = atPhase(phase.next)(newScopeWith(decls1.toList: _*))//debug
+ //decls1 = enteringPhase(phase.next)(newScopeWith(decls1.toList: _*))//debug
if ((parents1 eq parents) && (decls1 eq decls)) tp
else ClassInfoType(parents1, decls1, clazz)
@@ -437,7 +429,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
tree match {
case Assign(lhs, rhs) => traverse(rhs) // assignments don't count
case _ =>
- if (tree.hasSymbol && tree.symbol != NoSymbol) {
+ if (tree.hasSymbolField && tree.symbol != NoSymbol) {
val sym = tree.symbol
if ((sym.hasAccessorFlag || (sym.isTerm && !sym.isMethod))
&& sym.isPrivate
@@ -481,7 +473,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
/** The typer */
private var localTyper: erasure.Typer = _
private def typedPos(pos: Position)(tree: Tree): Tree = localTyper.typedPos(pos)(tree)
- private def localTyped(pos: Position, tree: Tree, pt: Type) = localTyper.typed(atPos(pos)(tree), pt)
/** Map lazy values to the fields they should null after initialization. */
private var lazyValNullables: Map[Symbol, Set[Symbol]] = _
@@ -515,7 +506,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - create a new method definition that also has a `self` parameter
* (which comes first) Iuli: this position is assumed by tail call elimination
* on a different receiver. Storing a new 'this' assumes it is located at
- * index 0 in the local variable table. See 'STORE_THIS' and GenJVM/GenMSIL.
+ * index 0 in the local variable table. See 'STORE_THIS' and GenASM.
* - Map implementation class types in type-apply's to their interfaces
* - Remove all fields in implementation classes
*/
@@ -524,7 +515,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
tree match {
case Template(parents, self, body) =>
localTyper = erasure.newTyper(rootContext.make(tree, currentOwner))
- afterMixin(currentOwner.owner.info)//todo: needed?
+ exitingMixin(currentOwner.owner.info)//todo: needed?
if (!currentOwner.isTrait && !isPrimitiveValueClass(currentOwner))
addMixedinMembers(currentOwner, unit)
@@ -543,17 +534,23 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else EmptyTree
}
else {
- if (currentOwner.isTrait && sym.isSetter && !beforePickler(sym.isDeferred)) {
+ if (currentOwner.isTrait && sym.isSetter && !enteringPickler(sym.isDeferred)) {
sym.addAnnotation(TraitSetterAnnotationClass)
}
tree
}
+ // !!! What is this doing, and why is it only looking for exactly
+ // one type parameter? It would seem to be
+ // "Map implementation class types in type-apply's to their interfaces"
+ // from the comment on preTransform, but is there some way we should know
+ // that impl class types in type applies can only appear in single
+ // type parameter type constructors?
case Apply(tapp @ TypeApply(fn, List(arg)), List()) =>
if (arg.tpe.typeSymbol.isImplClass) {
val ifacetpe = toInterface(arg.tpe)
- arg.tpe = ifacetpe
- tapp.tpe = MethodType(List(), ifacetpe)
- tree.tpe = ifacetpe
+ arg setType ifacetpe
+ tapp setType MethodType(Nil, ifacetpe)
+ tree setType ifacetpe
}
tree
case ValDef(_, _, _, _) if currentOwner.isImplClass =>
@@ -590,18 +587,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
tree
}
- /** Create a static reference to given symbol <code>sym</code> of the
- * form <code>M.sym</code> where M is the symbol's implementation module.
+ /** Create a static reference to given symbol `sym` of the
+ * form `M.sym` where M is the symbol's implementation module.
*/
private def staticRef(sym: Symbol): Tree = {
sym.owner.info //todo: needed?
sym.owner.owner.info //todo: needed?
- assert(
- sym.owner.sourceModule ne NoSymbol,
- "" + sym.fullLocationString + " in " + sym.owner.owner + " " + sym.owner.owner.info.decls
- )
- REF(sym.owner.sourceModule) DOT sym
+ if (sym.owner.sourceModule eq NoSymbol)
+ abort(s"Cannot create static reference to $sym because ${sym.safeOwner} has no source module")
+ else
+ REF(sym.owner.sourceModule) DOT sym
}
def needsInitAndHasOffset(sym: Symbol) =
@@ -647,34 +643,34 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
private def addNewDefs(clazz: Symbol, stats: List[Tree]): List[Tree] = {
val newDefs = mutable.ListBuffer[Tree]()
- /** Attribute given tree and anchor at given position */
+ /* Attribute given tree and anchor at given position */
def attributedDef(pos: Position, tree: Tree): Tree = {
debuglog("add new def to " + clazz + ": " + tree)
typedPos(pos)(tree)
}
- /** The position of given symbol, or, if this is undefined,
- * the position of the current class.
+ /* The position of given symbol, or, if this is undefined,
+ * the position of the current class.
*/
def position(sym: Symbol) =
if (sym.pos == NoPosition) clazz.pos else sym.pos
- /** Add tree at given position as new definition */
+ /* Add tree at given position as new definition */
def addDef(pos: Position, tree: Tree) {
newDefs += attributedDef(pos, tree)
}
- /** Add new method definition.
+ /* Add new method definition.
*
- * @param sym The method symbol.
- * @param rhs The method body.
+ * @param sym The method symbol.
+ * @param rhs The method body.
*/
def addDefDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(position(sym), DefDef(sym, rhs))
def addValDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(position(sym), ValDef(sym, rhs))
- /** Add `newdefs` to `stats`, removing any abstract method definitions
- * in <code>stats</code> that are matched by some symbol defined in
- * <code>newDefs</code>.
+ /* Add `newdefs` to `stats`, removing any abstract method definitions
+ * in `stats` that are matched by some symbol defined in
+ * `newDefs`.
*/
def add(stats: List[Tree], newDefs: List[Tree]) = {
val newSyms = newDefs map (_.symbol)
@@ -690,30 +686,30 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else newDefs ::: (stats filter isNotDuplicate)
}
- /** If `stat` is a superaccessor, complete it by adding a right-hand side.
- * Note: superaccessors are always abstract until this point.
- * The method to call in a superaccessor is stored in the accessor symbol's alias field.
- * The rhs is:
- * super.A(xs) where A is the super accessor's alias and xs are its formal parameters.
- * This rhs is typed and then mixin transformed.
+ /* If `stat` is a superaccessor, complete it by adding a right-hand side.
+ * Note: superaccessors are always abstract until this point.
+ * The method to call in a superaccessor is stored in the accessor symbol's alias field.
+ * The rhs is:
+ * super.A(xs) where A is the super accessor's alias and xs are its formal parameters.
+ * This rhs is typed and then mixin transformed.
*/
def completeSuperAccessor(stat: Tree) = stat match {
case DefDef(_, _, _, vparams :: Nil, _, EmptyTree) if stat.symbol.isSuperAccessor =>
- val rhs0 = (Super(clazz, tpnme.EMPTY) DOT stat.symbol.alias)(vparams map (v => Ident(v.symbol)): _*)
- val rhs1 = localTyped(stat.pos, rhs0, stat.symbol.tpe.resultType)
+ val body = atPos(stat.pos)(Apply(Select(Super(clazz, tpnme.EMPTY), stat.symbol.alias), vparams map (v => Ident(v.symbol))))
+ val pt = stat.symbol.tpe.resultType
- deriveDefDef(stat)(_ => beforeMixin(transform(rhs1)))
+ copyDefDef(stat)(rhs = enteringMixin(transform(localTyper.typed(body, pt))))
case _ =>
stat
}
- /**
+ /*
* Return the bitmap field for 'offset'. Depending on the hierarchy it is possible to reuse
* the bitmap of its parents. If that does not exist yet we create one.
*/
def bitmapFor(clazz0: Symbol, offset: Int, field: Symbol): Symbol = {
val category = bitmapCategory(field)
- val bitmapName = nme.newBitmapName(category, offset / flagsPerBitmap(field))
+ val bitmapName = nme.newBitmapName(category, offset / flagsPerBitmap(field)).toTermName
val sym = clazz0.info.decl(bitmapName)
assert(!sym.isOverloaded, sym)
@@ -721,15 +717,15 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def createBitmap: Symbol = {
val bitmapKind = bitmapKindForCategory(category)
val sym = clazz0.newVariable(bitmapName, clazz0.pos) setInfo bitmapKind.tpe
- beforeTyper(sym addAnnotation VolatileAttr)
+ enteringTyper(sym addAnnotation VolatileAttr)
category match {
case nme.BITMAP_TRANSIENT | nme.BITMAP_CHECKINIT_TRANSIENT => sym addAnnotation TransientAttr
case _ =>
}
val init = bitmapKind match {
- case BooleanClass => VAL(sym) === FALSE
- case _ => VAL(sym) === ZERO
+ case BooleanClass => ValDef(sym, FALSE)
+ case _ => ValDef(sym, ZERO)
}
sym setFlag PrivateLocal
@@ -738,10 +734,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
sym
}
- if (sym ne NoSymbol)
- sym
- else
- createBitmap
+ sym orElse createBitmap
}
def maskForOffset(offset: Int, sym: Symbol, kind: ClassSymbol): Tree = {
@@ -749,7 +742,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
if (kind == LongClass ) LIT(1L << realOffset) else LIT(1 << realOffset)
}
- /** Return an (untyped) tree of the form 'Clazz.this.bmp = Clazz.this.bmp | mask'. */
+ /* Return an (untyped) tree of the form 'Clazz.this.bmp = Clazz.this.bmp | mask'. */
def mkSetFlag(clazz: Symbol, offset: Int, valSym: Symbol, kind: ClassSymbol): Tree = {
val bmp = bitmapFor(clazz, offset, valSym)
def mask = maskForOffset(offset, valSym, kind)
@@ -759,8 +752,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
x === newValue
}
- /** Return an (untyped) tree of the form 'clazz.this.bitmapSym & mask (==|!=) 0', the
- * precise comparison operator depending on the value of 'equalToZero'.
+ /* Return an (untyped) tree of the form 'clazz.this.bitmapSym & mask (==|!=) 0', the
+ * precise comparison operator depending on the value of 'equalToZero'.
*/
def mkTest(clazz: Symbol, mask: Tree, bitmapSym: Symbol, equalToZero: Boolean, kind: ClassSymbol): Tree = {
val bitmapTree = (This(clazz) DOT bitmapSym)
@@ -777,12 +770,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
stats: List[Tree], retVal: Tree, attrThis: Tree, args: List[Tree]): Symbol = {
- val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, PRIVATE)
+ val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, PRIVATE)
val params = defSym newSyntheticValueParams args.map(_.symbol.tpe)
defSym setInfoAndEnter MethodType(params, lzyVal.tpe.resultType)
val rhs: Tree = (gen.mkSynchronizedCheck(attrThis, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
val strictSubst = new TreeSymSubstituterWithCopying(args.map(_.symbol), params)
- addDef(position(defSym), DEF(defSym).mkTree(strictSubst(BLOCK(rhs, retVal))) setSymbol defSym)
+ addDef(position(defSym), DefDef(defSym, strictSubst(BLOCK(rhs, retVal))))
defSym
}
@@ -798,19 +791,19 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
- /** Always copy the tree if we are going to perform sym substitution,
- * otherwise we will side-effect on the tree that is used in the fast path
- */
- class TreeSymSubstituterWithCopying(from: List[Symbol], to: List[Symbol]) extends TreeSymSubstituter(from, to) {
- override def transform(tree: Tree): Tree =
- if (tree.hasSymbol && from.contains(tree.symbol))
- super.transform(tree.duplicate)
- else super.transform(tree.duplicate)
+ /* Always copy the tree if we are going to perform sym substitution,
+ * otherwise we will side-effect on the tree that is used in the fast path
+ */
+ class TreeSymSubstituterWithCopying(from: List[Symbol], to: List[Symbol]) extends TreeSymSubstituter(from, to) {
+ override def transform(tree: Tree): Tree =
+ if (tree.hasSymbolField && from.contains(tree.symbol))
+ super.transform(tree.duplicate)
+ else super.transform(tree.duplicate)
- override def apply[T <: Tree](tree: T): T = if (from.isEmpty) tree else super.apply(tree)
- }
+ override def apply[T <: Tree](tree: T): T = if (from.isEmpty) tree else super.apply(tree)
+ }
- /** return a 'lazified' version of rhs. It uses double-checked locking to ensure
+ /* return a 'lazified' version of rhs. It uses double-checked locking to ensure
* initialization is performed at most once. For performance reasons the double-checked
* locking is split into two parts, the first (fast) path checks the bitmap without
* synchronizing, and if that fails it initializes the lazy val within the
@@ -819,8 +812,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* Private fields used only in this initializer are subsequently set to null.
*
* @param clazz The class symbol
+ * @param lzyVal The symbol of this lazy field
* @param init The tree which initializes the field ( f = <rhs> )
- * @param fieldSym The symbol of this lazy field
* @param offset The offset of this field in the flags bitmap
*
* The result will be a tree of the form
@@ -853,7 +846,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val bitmapSym = bitmapFor(clazz, offset, lzyVal)
val kind = bitmapKind(lzyVal)
val mask = maskForOffset(offset, lzyVal, kind)
- def cond = mkTest(clazz, mask, bitmapSym, true, kind)
+ def cond = mkTest(clazz, mask, bitmapSym, equalToZero = true, kind)
val nulls = lazyValNullables(lzyVal).toList sortBy (_.id) map nullify
def syncBody = init ::: List(mkSetFlag(clazz, offset, lzyVal, kind), UNIT)
@@ -870,7 +863,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val cond = Apply(Select(moduleVarRef, Object_eq), List(NULL))
mkFastPathBody(clazz, moduleSym, cond, List(assign), List(NULL), returnTree, attrThis, args)
case _ =>
- abort("Invalid getter " + rhs + " for module in class " + clazz)
+ abort(s"Invalid getter $rhs for module in $clazz")
}
def mkCheckedAccessor(clazz: Symbol, retVal: Tree, offset: Int, pos: Position, fieldSym: Symbol): Tree = {
@@ -878,20 +871,20 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val bitmapSym = bitmapFor(clazz, offset, sym)
val kind = bitmapKind(sym)
val mask = maskForOffset(offset, sym, kind)
- val msg = "Uninitialized field: " + unit.source + ": " + pos.line
+ val msg = s"Uninitialized field: ${unit.source}: ${pos.line}"
val result =
- IF (mkTest(clazz, mask, bitmapSym, false, kind)) .
+ IF (mkTest(clazz, mask, bitmapSym, equalToZero = false, kind)) .
THEN (retVal) .
- ELSE (THROW(UninitializedErrorClass, LIT(msg)))
+ ELSE (Throw(NewFromConstructor(UninitializedFieldConstructor, LIT(msg))))
typedPos(pos)(BLOCK(result, retVal))
}
- /** Complete lazy field accessors. Applies only to classes,
- * for it's own (non inherited) lazy fields. If 'checkinit'
- * is enabled, getters that check for the initialized bit are
- * generated, and the class constructor is changed to set the
- * initialized bits.
+ /* Complete lazy field accessors. Applies only to classes,
+ * for it's own (non inherited) lazy fields. If 'checkinit'
+ * is enabled, getters that check for the initialized bit are
+ * generated, and the class constructor is changed to set the
+ * initialized bits.
*/
def addCheckedGetters(clazz: Symbol, stats: List[Tree]): List[Tree] = {
def dd(stat: DefDef) = {
@@ -922,7 +915,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else if (sym.isConstructor) {
deriveDefDef(stat)(addInitBits(clazz, _))
}
- else if (settings.checkInit.value && !clazz.isTrait && sym.isSetter) {
+ else if (settings.checkInit && !clazz.isTrait && sym.isSetter) {
val getter = sym.getter(clazz)
if (needsInitFlag(getter) && fieldOffset.isDefinedAt(getter))
deriveDefDef(stat)(rhs => Block(List(rhs, localTyper.typed(mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter)))), UNIT))
@@ -972,23 +965,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
}
- /** Adds statements to set the 'init' bit for each field initialized
- * in the body of a constructor.
+ /* Adds statements to set the 'init' bit for each field initialized
+ * in the body of a constructor.
*/
def addInitBits(clazz: Symbol, rhs: Tree): Tree =
new AddInitBitsTransformer(clazz) transform rhs
- def isCheckInitField(field: Symbol) =
- needsInitFlag(field) && !field.isDeferred
-
- def superClassesToCheck(clazz: Symbol) =
- clazz.ancestors filterNot (_ hasFlag TRAIT | JAVA)
-
// begin addNewDefs
- /** Fill the map from fields to offset numbers.
- * Instead of field symbols, the map keeps their getter symbols. This makes
- * code generation easier later.
+ /* Fill the map from fields to offset numbers.
+ * Instead of field symbols, the map keeps their getter symbols. This makes
+ * code generation easier later.
*/
def buildBitmapOffsets() {
def fold(fields: List[Symbol], category: Name) = {
@@ -1044,16 +1031,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
// if class is not a trait add accessor definitions
else if (!clazz.isTrait) {
- if (sym.hasAccessorFlag && (!sym.isDeferred || sym.hasFlag(lateDEFERRED))) {
+ // This needs to be a def to avoid sharing trees
+ def accessedRef = accessedReference(sym)
+ if (isConcreteAccessor(sym)) {
// add accessor definitions
addDefDef(sym, {
- val accessedRef = accessedReference(sym)
if (sym.isSetter) {
if (isOverriddenSetter(sym)) UNIT
else accessedRef match {
- case Literal(_) => accessedRef
- case _ =>
- val init = Assign(accessedRef, Ident(sym.firstParam))
+ case ref @ Literal(_) => ref
+ case ref =>
+ val init = Assign(ref, Ident(sym.firstParam))
val getter = sym.getter(clazz)
if (!needsInitFlag(getter)) init
@@ -1063,16 +1051,18 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else if (needsInitFlag(sym))
mkCheckedAccessor(clazz, accessedRef, fieldOffset(sym), sym.pos, sym)
else
- gen.mkCheckInit(accessedRef)
+ accessedRef
})
}
else if (sym.isModule && !(sym hasFlag LIFTED | BRIDGE)) {
// add modules
- val vdef = gen.mkModuleVarDef(sym)
- addDef(position(sym), vdef)
+ val vsym = sym.owner.newModuleVarSymbol(sym)
+ addDef(position(sym), ValDef(vsym))
- val rhs = gen.newModule(sym, vdef.symbol.tpe)
- val assignAndRet = gen.mkAssignAndReturn(vdef.symbol, rhs)
+ // !!! TODO - unravel the enormous duplication between this code and
+ // eliminateModuleDefs in RefChecks.
+ val rhs = gen.newModule(sym, vsym.tpe)
+ val assignAndRet = gen.mkAssignAndReturn(vsym, rhs)
val attrThis = gen.mkAttributedThis(clazz)
val rhs1 = mkInnerClassAccessorDoubleChecked(attrThis, assignAndRet, sym, List())
@@ -1090,7 +1080,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// add forwarders
assert(sym.alias != NoSymbol, sym)
// debuglog("New forwarder: " + sym.defString + " => " + sym.alias.defString)
- if (!sym.isTermMacro) addDefDef(sym, Apply(staticRef(sym.alias), gen.mkAttributedThis(clazz) :: sym.paramss.head.map(Ident)))
+ if (!sym.isMacro) addDefDef(sym, Apply(staticRef(sym.alias), gen.mkAttributedThis(clazz) :: sym.paramss.head.map(Ident)))
}
}
}
@@ -1135,7 +1125,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// change every node type that refers to an implementation class to its
// corresponding interface, unless the node's symbol is an implementation class.
if (tree.tpe.typeSymbol.isImplClass && ((sym eq null) || !sym.isImplClass))
- tree.tpe = toInterface(tree.tpe)
+ tree modifyType toInterface
tree match {
case templ @ Template(parents, self, body) =>
@@ -1151,9 +1141,9 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
qual
case Apply(Select(qual, _), args) =>
- /** Changes <code>qual.m(args)</code> where m refers to an implementation
+ /* Changes `qual.m(args)` where m refers to an implementation
* class method to Q.m(S, args) where Q is the implementation module of
- * <code>m</code> and S is the self parameter for the call, which
+ * `m` and S is the self parameter for the call, which
* is determined as follows:
* - if qual != super, qual itself
* - if qual == super, and we are in an implementation class,
@@ -1164,7 +1154,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def implSym = implClass(sym.owner).info.member(sym.name)
assert(target ne NoSymbol,
List(sym + ":", sym.tpe, sym.owner, implClass(sym.owner), implSym,
- beforePrevPhase(implSym.tpe), phase) mkString " "
+ enteringPrevPhase(implSym.tpe), phase) mkString " "
)
typedPos(tree.pos)(Apply(staticRef(target), transformSuper(qual) :: args))
}
@@ -1193,7 +1183,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
typedPos(tree.pos)((transformSuper(qual) DOT sym1)())
}
else {
- staticCall(beforePrevPhase(sym.overridingSymbol(implClass(sym.owner))))
+ staticCall(enteringPrevPhase(sym.overridingSymbol(implClass(sym.owner))))
}
}
else {
@@ -1211,36 +1201,19 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
tree
case Select(qual, name) if sym.owner.isImplClass && !isStaticOnly(sym) =>
- assert(!sym.isMethod, "no method allowed here: %s%s %s".format(sym, sym.isImplOnly, flagsToString(sym.flags)))
+ assert(!sym.isMethod, "no method allowed here: %s%s %s".format(sym, sym.isImplOnly, sym.flagString))
// refer to fields in some implementation class via an abstract
// getter in the interface.
val iface = toInterface(sym.owner.tpe).typeSymbol
val ifaceGetter = sym getter iface
- def si6231Restriction() {
- // See SI-6231 comments in LamdaLift for ideas on how to lift the restriction.
- val msg = sm"""Implementation restriction: local ${iface.fullLocationString} is unable to automatically capture the
- |free variable ${sym} on behalf of ${currentClass}. You can manually assign it to a val inside the trait,
- |and refer that that val in ${currentClass}. For more details, see SI-6231."""
- reporter.error(tree.pos, msg)
- }
-
- if (ifaceGetter == NoSymbol) {
- if (sym.isParamAccessor) {
- si6231Restriction()
- EmptyTree
- }
- else abort("No getter for " + sym + " in " + iface)
- }
+ if (ifaceGetter == NoSymbol) abort("No getter for " + sym + " in " + iface)
else typedPos(tree.pos)((qual DOT ifaceGetter)())
case Assign(Apply(lhs @ Select(qual, _), List()), rhs) =>
// assign to fields in some implementation class via an abstract
// setter in the interface.
- def setter = lhs.symbol.setter(
- toInterface(lhs.symbol.owner.tpe).typeSymbol,
- needsExpandedSetterName(lhs.symbol)
- ) setPos lhs.pos
+ def setter = lhs.symbol.setter(toInterface(lhs.symbol.owner.tpe).typeSymbol) setPos lhs.pos
typedPos(tree.pos)((qual DOT setter)(rhs))
@@ -1258,7 +1231,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val tree1 = super.transform(preTransform(tree))
// localTyper needed when not flattening inner classes. parts after an
// inner class will otherwise be typechecked with a wrong scope
- try afterMixin(postTransform(tree1))
+ try exitingMixin(postTransform(tree1))
finally localTyper = saved
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index 67be81bd3c..4222c4d8c8 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -6,221 +6,33 @@
package scala.tools.nsc
package transform
-import scala.collection.mutable
import symtab.Flags._
-import util.HashSet
-import scala.annotation.tailrec
+import scala.reflect.internal.SymbolPairs
/** A class that yields a kind of iterator (`Cursor`),
- * which yields all pairs of overriding/overridden symbols
- * that are visible in some baseclass, unless there's a parent class
- * that already contains the same pairs.
- * @author Martin Odersky
- * @version 1.0
+ * which yields pairs of corresponding symbols visible in some base class,
+ * unless there's a parent class that already contains the same pairs.
+ * Most of the logic is in SymbolPairs, which contains generic
+ * pair-oriented traversal logic.
*/
-abstract class OverridingPairs {
-
- val global: Global
+abstract class OverridingPairs extends SymbolPairs {
import global._
- /** The cursor class
- * @param base the base class that contains the overriding pairs
- */
- class Cursor(base: Symbol) {
-
- private val self = base.thisType
-
- /** Symbols to exclude: Here these are constructors, private locals,
- * and bridges. But it may be refined in subclasses.
- *
- */
- protected def exclude(sym: Symbol): Boolean =
- sym.isConstructor || sym.isPrivateLocal || sym.hasFlag(BRIDGE)
-
- /** The parents of base (may also be refined).
- */
- protected def parents: List[Type] = base.info.parents
-
- /** Does `sym1` match `sym2` so that it qualifies as overriding.
- * Types always match. Term symbols match if their membertypes
- * relative to <base>.this do
- */
- protected def matches(sym1: Symbol, sym2: Symbol): Boolean = {
- def tp_s(s: Symbol) = self.memberType(s) + "/" + self.memberType(s).getClass
- val result = sym1.isType || (self.memberType(sym1) matches self.memberType(sym2))
- debuglog("overriding-pairs? %s matches %s (%s vs. %s) == %s".format(
- sym1.fullLocationString, sym2.fullLocationString, tp_s(sym1), tp_s(sym2), result))
-
- result
- }
+ class Cursor(base: Symbol) extends super.Cursor(base) {
+ lazy val relatively = new RelativeTo(base.thisType)
- /** An implementation of BitSets as arrays (maybe consider collection.BitSet
- * for that?) The main purpose of this is to implement
- * intersectionContainsElement efficiently.
+ /** Symbols to exclude: Here these are constructors and private/artifact symbols,
+ * including bridges. But it may be refined in subclasses.
*/
- private type BitSet = Array[Int]
-
- private def include(bs: BitSet, n: Int) {
- val nshifted = n >> 5
- val nmask = 1 << (n & 31)
- bs(nshifted) = bs(nshifted) | nmask
- }
-
- /** Implements `bs1 * bs2 * {0..n} != 0.
- * Used in hasCommonParentAsSubclass */
- private def intersectionContainsElementLeq(bs1: BitSet, bs2: BitSet, n: Int): Boolean = {
- val nshifted = n >> 5
- val nmask = 1 << (n & 31)
- var i = 0
- while (i < nshifted) {
- if ((bs1(i) & bs2(i)) != 0) return true
- i += 1
- }
- (bs1(nshifted) & bs2(nshifted) & (nmask | nmask - 1)) != 0
- }
-
- /** The symbols that can take part in an overriding pair */
- private val decls = newScope
+ override protected def exclude(sym: Symbol) = (sym hasFlag PRIVATE | ARTIFACT) || sym.isConstructor
- // fill `decls` with overriding shadowing overridden */
- { def fillDecls(bcs: List[Symbol], deferredflag: Int) {
- if (!bcs.isEmpty) {
- fillDecls(bcs.tail, deferredflag)
- var e = bcs.head.info.decls.elems;
- while (e ne null) {
- if (e.sym.getFlag(DEFERRED) == deferredflag.toLong && !exclude(e.sym))
- decls enter e.sym;
- e = e.next
- }
- }
- }
- // first, deferred (this wil need to change if we change lookup rules!
- fillDecls(base.info.baseClasses, DEFERRED)
- // then, concrete.
- fillDecls(base.info.baseClasses, 0)
- }
-
- private val size = base.info.baseClasses.length
-
- /** A map from baseclasses of <base> to ints, with smaller ints meaning lower in
- * linearization order.
- * symbols that are not baseclasses map to -1.
+ /** Types always match. Term symbols match if their member types
+ * relative to `self` match.
*/
- private val index = new mutable.HashMap[Symbol, Int] {
- override def default(key: Symbol) = -1
- }
-
- // Note: overridingPairs can be called at odd instances by the Eclipse plugin
- // Soemtimes symbols are not yet defined and we get missing keys.
- // The implementation here is hardened so that it does not crash on a missing key.
-
- { var i = 0
- for (bc <- base.info.baseClasses) {
- index(bc) = i
- i += 1
- }
- }
-
- /** A mapping from all base class indices to a bitset
- * which indicates whether parents are subclasses.
- *
- * i \in subParents(j) iff
- * exists p \in parents, b \in baseClasses:
- * i = index(p)
- * j = index(b)
- * p isSubClass b
- * p.baseType(b) == self.baseType(b)
- */
- private val subParents = new Array[BitSet](size)
-
- { for (i <- List.range(0, size))
- subParents(i) = new BitSet(size);
- for (p <- parents) {
- val pIndex = index(p.typeSymbol)
- if (pIndex >= 0)
- for (bc <- p.baseClasses)
- if (p.baseType(bc) =:= self.baseType(bc)) {
- val bcIndex = index(bc)
- if (bcIndex >= 0)
- include(subParents(bcIndex), pIndex)
- }
- }
- }
-
- /** Do `sym1` and `sym2` have a common subclass in `parents`?
- * In that case we do not follow their overriding pairs
- */
- private def hasCommonParentAsSubclass(sym1: Symbol, sym2: Symbol) = {
- val index1 = index(sym1.owner)
- (index1 >= 0) && {
- val index2 = index(sym2.owner)
- (index2 >= 0) && {
- intersectionContainsElementLeq(
- subParents(index1), subParents(index2), index1 min index2)
- }
- }
- }
-
- /** The scope entries that have already been visited as overridden
- * (maybe excluded because of hasCommonParentAsSubclass).
- * These will not appear as overriding
- */
- private val visited = HashSet[ScopeEntry]("visited", 64)
-
- /** The current entry candidate for overriding
- */
- private var curEntry = decls.elems
-
- /** The current entry candidate for overridden */
- private var nextEntry = curEntry
-
- /** The current candidate symbol for overriding */
- var overriding: Symbol = _
-
- /** If not null: The symbol overridden by overriding */
- var overridden: Symbol = _
-
- //@M: note that next is called once during object initialization
- def hasNext: Boolean = curEntry ne null
-
- @tailrec
- final def next() {
- if (curEntry ne null) {
- overriding = curEntry.sym
- if (nextEntry ne null) {
- do {
- do {
- nextEntry = decls.lookupNextEntry(nextEntry);
- /* DEBUG
- if ((nextEntry ne null) &&
- !(nextEntry.sym hasFlag PRIVATE) &&
- !(overriding.owner == nextEntry.sym.owner) &&
- !matches(overriding, nextEntry.sym))
- println("skipping "+overriding+":"+self.memberType(overriding)+overriding.locationString+" to "+nextEntry.sym+":"+self.memberType(nextEntry.sym)+nextEntry.sym.locationString)
- */
- } while ((nextEntry ne null) &&
- ((nextEntry.sym hasFlag PRIVATE) ||
- (overriding.owner == nextEntry.sym.owner) ||
- (!matches(overriding, nextEntry.sym)) ||
- (exclude(overriding))))
- if (nextEntry ne null) visited addEntry nextEntry
- // skip nextEntry if a class in `parents` is a subclass of the owners of both
- // overriding and nextEntry.sym
- } while ((nextEntry ne null) && (hasCommonParentAsSubclass(overriding, nextEntry.sym)))
- if (nextEntry ne null) {
- overridden = nextEntry.sym;
- //Console.println("yield: " + overriding + overriding.locationString + " / " + overridden + overridden.locationString);//DEBUG
- } else {
- do {
- curEntry = curEntry.next
- } while ((curEntry ne null) && (visited contains curEntry));
- nextEntry = curEntry
- next
- }
- }
- }
- }
-
- next
+ override protected def matches(sym1: Symbol, sym2: Symbol) = sym1.isType || (
+ (sym1.owner != sym2.owner)
+ && !exclude(sym2)
+ && relatively.matches(sym1, sym2)
+ )
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/PostErasure.scala b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
index 3ef32caa2c..cc78e27282 100644
--- a/src/compiler/scala/tools/nsc/transform/PostErasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
@@ -9,10 +9,10 @@ package transform
* performs peephole optimizations.
*/
trait PostErasure extends InfoTransform with TypingTransformers {
-
val global: Global
+
import global._
- import definitions._
+ import treeInfo._
val phaseName: String = "posterasure"
@@ -21,51 +21,33 @@ trait PostErasure extends InfoTransform with TypingTransformers {
object elimErasedValueType extends TypeMap {
def apply(tp: Type) = tp match {
- case ConstantType(Constant(tp: Type)) =>
- ConstantType(Constant(apply(tp)))
- case ErasedValueType(tref) =>
- atPhase(currentRun.erasurePhase)(erasure.erasedValueClassArg(tref))
- case _ => mapOver(tp)
+ case ConstantType(Constant(tp: Type)) => ConstantType(Constant(apply(tp)))
+ case ErasedValueType(_, underlying) => underlying
+ case _ => mapOver(tp)
}
}
def transformInfo(sym: Symbol, tp: Type) = elimErasedValueType(tp)
class PostErasureTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ override def transform(tree: Tree) = {
+ def finish(res: Tree) = logResult(s"Posterasure reduction\n Old: $tree\n New")(res)
+
+ /* We use the name of the operation being performed and not the symbol
+ * itself because the symbol hails from the boxed class, and this transformation
+ * exists to operate directly on the values. So we are for instance looking
+ * up == on an lhs of type Int, whereas the symbol which has been passed in
+ * is from java.lang.Integer.
+ */
+ def binop(lhs: Tree, op: Symbol, rhs: Tree) =
+ finish(localTyper typed (Apply(Select(lhs, op.name) setPos tree.pos, rhs :: Nil) setPos tree.pos))
- override def transform(tree: Tree) =
super.transform(tree) setType elimErasedValueType(tree.tpe) match {
- case // new C(arg).underlying ==> arg
- Apply(sel @ Select(
- Apply(Select(New(tpt), nme.CONSTRUCTOR), List(arg)),
- acc), List())
- if atPhase(currentRun.erasurePhase) {
- tpt.tpe.typeSymbol.isDerivedValueClass &&
- sel.symbol == tpt.tpe.typeSymbol.derivedValueClassUnbox
- } =>
- if (settings.debug.value) log("Removing "+tree+" -> "+arg)
- arg
- case // new C(arg1) == new C(arg2) ==> arg1 == arg2
- Apply(sel @ Select(
- Apply(Select(New(tpt1), nme.CONSTRUCTOR), List(arg1)),
- cmp),
- List(Apply(Select(New(tpt2), nme.CONSTRUCTOR), List(arg2))))
- if atPhase(currentRun.erasurePhase) {
- tpt1.tpe.typeSymbol.isDerivedValueClass &&
- (sel.symbol == Object_== || sel.symbol == Object_!=) &&
- tpt2.tpe.typeSymbol == tpt1.tpe.typeSymbol
- } =>
- val result = Apply(Select(arg1, cmp) setPos sel.pos, List(arg2)) setPos tree.pos
- log("shortcircuiting equality "+tree+" -> "+result)
- localTyper.typed(result)
-
- case // arg.asInstanceOf[T] ==> arg if arg.tpe == T
- Apply(TypeApply(cast @ Select(arg, asinstanceof), List(tpt)), List())
- if cast.symbol == Object_asInstanceOf && arg.tpe =:= tpt.tpe => // !!! <:< ?
- if (settings.debug.value) log("Shortening "+tree+" -> "+arg)
- arg
- case tree1 =>
- tree1
+ case AsInstanceOf(v, tpe) if v.tpe <:< tpe => finish(v) // x.asInstanceOf[X] ==> x
+ case ValueClass.BoxAndUnbox(v) => finish(v) // (new B(v)).unbox ==> v
+ case ValueClass.BoxAndCompare(v1, op, v2) => binop(v1, op, v2) // new B(v1) == new B(v2) ==> v1 == v2
+ case tree => tree
}
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
index 44d8860916..cffb483072 100644
--- a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
@@ -11,9 +11,8 @@ package transform
abstract class SampleTransform extends Transform {
// inherits abstract value `global` and class `Phase` from Transform
- import global._ // the global environment
- import definitions._ // standard classes and methods
- import typer.{typed, atOwner} // methods to type trees
+ import global._ // the global environment
+ import typer.typed // method to type trees
/** the following two members override abstract members in Transform */
val phaseName: String = "sample-phase"
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 7e85647592..c505d9dc5f 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -3,13 +3,15 @@
* @author Iulian Dragos
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package transform
import scala.tools.nsc.symtab.Flags
import scala.collection.{ mutable, immutable }
import scala.language.postfixOps
import scala.language.existentials
+import scala.annotation.tailrec
/** Specialize code on types.
*
@@ -50,7 +52,11 @@ import scala.language.existentials
*/
abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
import global._
+ import definitions._
import Flags._
+
+ private val inlineFunctionExpansion = settings.Ydelambdafy.value == "inline"
+
/** the name of the phase: */
val phaseName: String = "specialize"
@@ -66,13 +72,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private implicit val typeOrdering: Ordering[Type] = Ordering[String] on ("" + _.typeSymbol.name)
- import definitions.{
- BooleanClass, UnitClass, ArrayClass,
- ScalaValueClasses, isPrimitiveValueClass, isPrimitiveValueType,
- SpecializedClass, UnspecializedClass, AnyRefClass, ObjectClass,
- GroupOfSpecializable, uncheckedVarianceClass, ScalaInlineClass
- }
- import rootMirror.RootClass
/** TODO - this is a lot of maps.
*/
@@ -101,8 +100,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Concrete methods that use a specialized type, or override such methods. */
private val concreteSpecMethods = perRunCaches.newWeakSet[Symbol]()
- private def specializedTypes(tps: List[Symbol]) = tps filter (_.isSpecialized)
private def specializedOn(sym: Symbol): List[Symbol] = {
+ val GroupOfSpecializable = currentRun.runDefinitions.GroupOfSpecializable
sym getAnnotation SpecializedClass match {
case Some(AnnotationInfo(_, Nil, _)) => specializableTypes.map(_.typeSymbol)
case Some(ann @ AnnotationInfo(_, args, _)) => {
@@ -119,14 +118,30 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- // If we replace `isBoundedGeneric` with (tp <:< AnyRefClass.tpe),
+ @annotation.tailrec private def findSymbol[T](candidates: List[T], f: T => Symbol): Symbol = {
+ if (candidates.isEmpty) NoSymbol
+ else f(candidates.head) match {
+ case NoSymbol => findSymbol(candidates.tail, f)
+ case sym => sym
+ }
+ }
+ private def hasNewParents(tree: Tree) = {
+ val parents = tree.symbol.info.parents
+ val prev = enteringPrevPhase(tree.symbol.info.parents)
+ (parents != prev) && {
+ debuglog(s"$tree parents changed from: $prev to: $parents")
+ true
+ }
+ }
+
+ // If we replace `isBoundedGeneric` with (tp <:< AnyRefTpe),
// then pos/spec-List.scala fails - why? Does this kind of check fail
// for similar reasons? Does `sym.isAbstractType` make a difference?
private def isSpecializedAnyRefSubtype(tp: Type, sym: Symbol) = {
specializedOn(sym).exists(s => !isPrimitiveValueClass(s)) &&
!isPrimitiveValueClass(tp.typeSymbol) &&
isBoundedGeneric(tp)
- //(tp <:< AnyRefClass.tpe)
+ //(tp <:< AnyRefTpe)
}
object TypeEnv {
@@ -147,7 +162,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def includes(t1: TypeEnv, t2: TypeEnv) = t1 forall {
case (sym, tpe) =>
t2 get sym exists { t2tp =>
- (tpe == t2tp) || !(isPrimitiveValueType(tpe) || isPrimitiveValueType(t2tp)) // u.t.b. (t2tp <:< AnyRefClass.tpe)
+ (tpe == t2tp) || !(isPrimitiveValueType(tpe) || isPrimitiveValueType(t2tp)) // u.t.b. (t2tp <:< AnyRefTpe)
}
}
@@ -163,22 +178,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
env forall { case (tvar, tpe) =>
tvar.isSpecialized && (concreteTypes(tvar) contains tpe) && {
(sym.typeParams contains tvar) ||
- (sym.owner != RootClass && (sym.owner.typeParams contains tvar))
+ (sym.owner != rootMirror.RootClass && (sym.owner.typeParams contains tvar))
}
}
}
}
- /** Returns the generic class that was specialized to 'sClass', or
- * 'sClass' itself if sClass is not a specialized subclass.
- */
- def genericClass(sClass: Symbol): Symbol =
- if (sClass.isSpecialized) sClass.superClass
- else sClass
-
case class Overload(sym: Symbol, env: TypeEnv) {
override def toString = "specialized overload " + sym + " in " + env
- def matchesSym(other: Symbol) = sym.tpe =:= other.tpe
+ def matchesSym(sym1: Symbol) = sym.info =:= sym1.info
def matchesEnv(env1: TypeEnv) = TypeEnv.includes(env, env1)
}
private def newOverload(method: Symbol, specializedMethod: Symbol, env: TypeEnv) = {
@@ -207,8 +215,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* type bounds of other @specialized type parameters (and not in its result type).
*/
def degenerate = false
-
- def isAccessor = false
}
/** Symbol is a special overloaded method of 'original', in the environment env. */
@@ -226,11 +232,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def target = t
}
- /** Symbol is a specialized accessor for the `target` field. */
- case class SpecializedAccessor(target: Symbol) extends SpecializedInfo {
- override def isAccessor = true
+ /** Symbol is a special overload of the super accessor. */
+ case class SpecialSuperAccessor(t: Symbol) extends SpecializedInfo {
+ def target = t
}
+ /** Symbol is a specialized accessor for the `target` field. */
+ case class SpecializedAccessor(target: Symbol) extends SpecializedInfo { }
+
/** Symbol is a specialized method whose body should be the target's method body. */
case class Implementation(target: Symbol) extends SpecializedInfo
@@ -268,9 +277,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def specializedParams(sym: Symbol): List[Symbol] =
sym.info.typeParams filter (_.isSpecialized)
- def splitParams(tps: List[Symbol]) =
- tps partition (_.isSpecialized)
-
/** Given an original class symbol and a list of types its type parameters are instantiated at
* returns a list of type parameters that should remain in the TypeRef when instantiating a
* specialized type.
@@ -286,7 +292,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// when searching for a specialized class, take care to map all
// type parameters that are subtypes of AnyRef to AnyRef
val args1 = map2(args, sym.info.typeParams)((tp, orig) =>
- if (isSpecializedAnyRefSubtype(tp, orig)) AnyRefClass.tpe
+ if (isSpecializedAnyRefSubtype(tp, orig)) AnyRefTpe
else tp
)
specializedClass.get((sym, TypeEnv.fromSpecialization(sym, args1))) match {
@@ -315,20 +321,20 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Specialize name for the two list of types. The first one denotes
* specialization on method type parameters, the second on outer environment.
*/
- private def specializedName(name: Name, types1: List[Type], types2: List[Type]): TermName = {
- if (nme.INITIALIZER == name || (types1.isEmpty && types2.isEmpty))
- name
+ private def specializedName(name: Name, types1: List[Type], types2: List[Type]): TermName = (
+ if (name == nme.CONSTRUCTOR || (types1.isEmpty && types2.isEmpty))
+ name.toTermName
else if (nme.isSetterName(name))
- nme.getterToSetter(specializedName(nme.setterToGetter(name), types1, types2))
+ specializedName(name.getterName, types1, types2).setterName
else if (nme.isLocalName(name))
- nme.getterToLocal(specializedName(nme.localToGetter(name), types1, types2))
+ specializedName(name.getterName, types1, types2).localName
else {
val (base, cs, ms) = nme.splitSpecializedName(name)
newTermName(base.toString + "$"
- + "m" + ms + types1.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "")
- + "c" + cs + types2.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "$sp"))
+ + "m" + ms + types1.map(t => abbrvTag(t.typeSymbol)).mkString("", "", "")
+ + "c" + cs + types2.map(t => abbrvTag(t.typeSymbol)).mkString("", "", "$sp"))
}
- }
+ )
lazy val specializableTypes = ScalaValueClasses map (_.tpe) sorted
@@ -352,7 +358,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
specializedOn(sym) map (s => specializesClass(s).tpe) sorted
if (isBoundedGeneric(sym.tpe) && (types contains AnyRefClass))
- reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefClass.tpe + ".")
+ reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefTpe + ".")
types
}
@@ -372,7 +378,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// zip the keys with each permutation to create a TypeEnv.
// If we don't exclude the "all AnyRef" specialization, we will
// incur duplicate members and crash during mixin.
- loop(keys map concreteTypes) filterNot (_ forall (_ <:< AnyRefClass.tpe)) map (xss => Map(keys zip xss: _*))
+ loop(keys map concreteTypes) filterNot (_ forall (_ <:< AnyRefTpe)) map (xss => Map(keys zip xss: _*))
}
/** Does the given 'sym' need to be specialized in the environment 'env'?
@@ -397,11 +403,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case _ => false
})
def specializedTypeVars(tpes: List[Type]): immutable.Set[Symbol] = {
- val buf = Set.newBuilder[Symbol]
- tpes foreach (tp => buf ++= specializedTypeVars(tp))
- buf.result
+ @tailrec def loop(result: immutable.Set[Symbol], xs: List[Type]): immutable.Set[Symbol] = {
+ if (xs.isEmpty) result
+ else loop(result ++ specializedTypeVars(xs.head), xs.tail)
+ }
+ loop(immutable.Set.empty, tpes)
}
- def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = beforeTyper(specializedTypeVars(sym.info))
+ def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = (
+ if (neverHasTypeParameters(sym)) immutable.Set.empty
+ else enteringTyper(specializedTypeVars(sym.info))
+ )
/** Return the set of @specialized type variables mentioned by the given type.
* It only counts type variables that appear:
@@ -412,7 +423,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def specializedTypeVars(tpe: Type): immutable.Set[Symbol] = tpe match {
case TypeRef(pre, sym, args) =>
if (sym.isAliasType)
- specializedTypeVars(tpe.normalize)
+ specializedTypeVars(tpe.dealiasWiden)
else if (sym.isTypeParameter && sym.isSpecialized || (sym.isTypeSkolem && sym.deSkolemize.isSpecialized))
Set(sym)
else if (sym == ArrayClass)
@@ -430,7 +441,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case AnnotatedType(_, tp, _) => specializedTypeVars(tp)
case TypeBounds(lo, hi) => specializedTypeVars(lo :: hi :: Nil)
case RefinedType(parents, _) => parents flatMap specializedTypeVars toSet
- case _ => Set()
+ case _ => immutable.Set.empty
}
/** Returns the type parameter in the specialized class `sClass` that corresponds to type parameter
@@ -441,7 +452,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
sClassMap.getOrElseUpdate(tparam,
tparam.cloneSymbol(sClass, tparam.flags, tparam.name append tpnme.SPECIALIZED_SUFFIX)
- modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
+ modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefTpe))
).tpe
}
@@ -475,7 +486,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
foreach2(syms, cloned) { (orig, cln) =>
cln.removeAnnotation(SpecializedClass)
if (env.contains(orig))
- cln modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
+ cln modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefTpe))
}
cloned map (_ substInfo (syms, cloned))
}
@@ -484,7 +495,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* the specialized symbol (class (specialization) or member (normalization)), leaves everything else as-is.
*/
private def mapAnyRefsInSpecSym(env: TypeEnv, origsym: Symbol, specsym: Symbol): TypeEnv = env map {
- case (sym, tp) if tp == AnyRefClass.tpe && sym.owner == origsym => (sym, typeParamSubAnyRef(sym, specsym))
+ case (sym, AnyRefTpe) if sym.owner == origsym => (sym, typeParamSubAnyRef(sym, specsym))
case x => x
}
@@ -492,8 +503,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* the original class, leaves everything else as-is.
*/
private def mapAnyRefsInOrigCls(env: TypeEnv, origcls: Symbol): TypeEnv = env map {
- case (sym, tp) if (tp == AnyRefClass.tpe) && sym.owner == origcls => (sym, sym.tpe)
- case x => x
+ case (sym, AnyRefTpe) if sym.owner == origcls => (sym, sym.tpe)
+ case x => x
}
/** Specialize 'clazz', in the environment `outerEnv`. The outer
@@ -506,9 +517,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
def specializeClass(clazz: Symbol, outerEnv: TypeEnv): List[Symbol] = {
def specializedClass(env0: TypeEnv, normMembers: List[Symbol]): Symbol = {
- /** It gets hard to follow all the clazz and cls, and specializedClass
- * was both already used for a map and mucho long. So "sClass" is the
- * specialized subclass of "clazz" throughout this file.
+ /* It gets hard to follow all the clazz and cls, and specializedClass
+ * was both already used for a map and mucho long. So "sClass" is the
+ * specialized subclass of "clazz" throughout this file.
*/
// SI-5545: Eliminate classes with the same name loaded from the bytecode already present - all we need to do is
@@ -525,7 +536,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def cloneInSpecializedClass(member: Symbol, flagFn: Long => Long, newName: Name = null) =
member.cloneSymbol(sClass, flagFn(member.flags | SPECIALIZED), newName)
- sClass.sourceFile = clazz.sourceFile
+ sClass.associatedFile = clazz.sourceFile
currentRun.symSource(sClass) = clazz.sourceFile // needed later on by mixin
val env = mapAnyRefsInSpecSym(env0, clazz, sClass)
@@ -537,7 +548,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
var newClassTParams: List[Symbol] = Nil // unspecialized type parameters of 'specializedClass' (cloned)
// has to be a val in order to be computed early. It is later called
- // within 'atPhase(next)', which would lead to an infinite cycle otherwise
+ // within 'enteringPhase(next)', which would lead to an infinite cycle otherwise
val specializedInfoType: Type = {
oldClassTParams = survivingParams(clazz.info.typeParams, env)
newClassTParams = produceTypeParameters(oldClassTParams, sClass, env) map subst(env)
@@ -546,18 +557,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def applyContext(tpe: Type) =
subst(env, tpe).instantiateTypeParams(oldClassTParams, newClassTParams map (_.tpe))
- /** Return a list of specialized parents to be re-mixed in a specialized subclass.
- * Assuming env = [T -> Int] and
- * class Integral[@specialized T] extends Numeric[T]
- * and Numeric[U] is specialized on U, this produces List(Numeric$mcI).
+ /* Return a list of specialized parents to be re-mixed in a specialized subclass.
+ * Assuming env = [T -> Int] and
+ * class Integral[@specialized T] extends Numeric[T]
+ * and Numeric[U] is specialized on U, this produces List(Numeric$mcI).
*
- * so that class Integral$mci extends Integral[Int] with Numeric$mcI.
+ * so that class Integral$mci extends Integral[Int] with Numeric$mcI.
*/
def specializedParents(parents: List[Type]): List[Type] = {
var res: List[Type] = Nil
// log(specializedClass + ": seeking specialized parents of class with parents: " + parents.map(_.typeSymbol))
for (p <- parents) {
- val stp = afterSpecialize(specializedType(p))
+ val stp = exitingSpecialize(specializedType(p))
if (stp != p)
if (p.typeSymbol.isTrait) res ::= stp
else if (currentRun.compiles(clazz))
@@ -567,7 +578,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
res
}
- var parents = List(applyContext(beforeTyper(clazz.tpe)))
+ var parents = List(applyContext(enteringTyper(clazz.tpe_*)))
// log("!!! Parents: " + parents + ", sym: " + parents.map(_.typeSymbol))
if (parents.head.typeSymbol.isTrait)
parents = parents.head.parents.head :: parents
@@ -589,13 +600,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
GenPolyType(newClassTParams, ClassInfoType(parents ::: extraSpecializedMixins, decls1, sClass))
}
- afterSpecialize(sClass setInfo specializedInfoType)
+ exitingSpecialize(sClass setInfo specializedInfoType)
val fullEnv = outerEnv ++ env
- /** Enter 'sym' in the scope of the current specialized class. It's type is
- * mapped through the active environment, binding type variables to concrete
- * types. The existing typeEnv for `sym` is composed with the current active
- * environment
+ /* Enter 'sym' in the scope of the current specialized class. It's type is
+ * mapped through the active environment, binding type variables to concrete
+ * types. The existing typeEnv for `sym` is composed with the current active
+ * environment
*/
def enterMember(sym: Symbol): Symbol = {
typeEnv(sym) = fullEnv ++ typeEnv(sym) // append the full environment
@@ -608,18 +619,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
decls1 enter subst(fullEnv)(sym)
}
- /** Create and enter in scope an overridden symbol m1 for `m` that forwards
- * to `om`. `om` is a fresh, special overload of m1 that is an implementation
- * of `m`. For example, for a
+ /* Create and enter in scope an overridden symbol m1 for `m` that forwards
+ * to `om`. `om` is a fresh, special overload of m1 that is an implementation
+ * of `m`. For example, for a
*
- * class Foo[@specialized A] {
- * def m(x: A) = <body> // m
- * }
- * , for class Foo$I extends Foo[Int], this method enters two new symbols in
- * the scope of Foo$I:
+ * class Foo[@specialized A] {
+ * def m(x: A) = <body> // m
+ * }
+ * , for class Foo$I extends Foo[Int], this method enters two new symbols in
+ * the scope of Foo$I:
*
- * def m(x: Int) = m$I(x) // m1
- * def m$I(x: Int) = <body>/adapted to env {A -> Int} // om
+ * def m(x: Int) = m$I(x) // m1
+ * def m$I(x: Int) = <body>/adapted to env {A -> Int} // om
*/
def forwardToOverload(m: Symbol): Symbol = {
val specMember = enterMember(cloneInSpecializedClass(m, f => (f | OVERRIDE) & ~(DEFERRED | CASEACCESSOR)))
@@ -683,7 +694,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def mkAccessor(field: Symbol, name: Name) = {
val newFlags = (SPECIALIZED | m.getter(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR)
// we rely on the super class to initialize param accessors
- val sym = sClass.newMethod(name, field.pos, newFlags)
+ val sym = sClass.newMethod(name.toTermName, field.pos, newFlags)
info(sym) = SpecializedAccessor(field)
sym
}
@@ -702,7 +713,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// debuglog("m: " + m + " isLocal: " + nme.isLocalName(m.name) + " specVal: " + specVal.name + " isLocal: " + nme.isLocalName(specVal.name))
if (nme.isLocalName(m.name)) {
- val specGetter = mkAccessor(specVal, nme.localToGetter(specVal.name)) setInfo MethodType(Nil, specVal.info)
+ val specGetter = mkAccessor(specVal, specVal.getterName) setInfo MethodType(Nil, specVal.info)
val origGetter = overrideIn(sClass, m.getter(clazz))
info(origGetter) = Forward(specGetter)
enterMember(specGetter)
@@ -717,10 +728,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
if (specVal.isVariable && m.setter(clazz) != NoSymbol) {
- val specSetter = mkAccessor(specVal, nme.getterToSetter(specGetter.name))
+ val specSetter = mkAccessor(specVal, specGetter.setterName)
.resetFlag(STABLE)
specSetter.setInfo(MethodType(specSetter.newSyntheticValueParams(List(specVal.info)),
- UnitClass.tpe))
+ UnitTpe))
val origSetter = overrideIn(sClass, m.setter(clazz))
info(origSetter) = Forward(specSetter)
enterMember(specSetter)
@@ -777,7 +788,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (existing != NoSymbol)
clazz.owner.info.decls.unlink(existing)
- afterSpecialize(clazz.owner.info.decls enter spc) //!!! assumes fully specialized classes
+ exitingSpecialize(clazz.owner.info.decls enter spc) //!!! assumes fully specialized classes
}
if (subclasses.nonEmpty) clazz.resetFlag(FINAL)
cleanAnyRefSpecCache(clazz, decls1)
@@ -795,7 +806,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
private def normalizeMember(owner: Symbol, sym: Symbol, outerEnv: TypeEnv): List[Symbol] = {
sym :: (
- if (!sym.isMethod || beforeTyper(sym.typeParams.isEmpty)) Nil
+ if (!sym.isMethod || enteringTyper(sym.typeParams.isEmpty)) Nil
else if (sym.hasDefault) {
/* Specializing default getters is useless, also see SI-7329 . */
sym.resetFlag(SPECIALIZED)
@@ -882,6 +893,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec))
+ owner.info.decls.enter(specMember)
typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec
wasSpecializedForTypeVars(specMember) ++= spec collect { case (s, tp) if s.tpe == tp => s }
@@ -910,10 +922,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
/** Return the specialized overload of `m`, in the given environment. */
- private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv): Symbol = {
- val newFlags = (sym.flags | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR)
+ private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv, nameSymbol: Symbol = NoSymbol): Symbol = {
+ val newFlags = (sym.flags | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR | LAZY)
// this method properly duplicates the symbol's info
- ( sym.cloneSymbol(owner, newFlags, newName = specializedName(sym, env))
+ val specname = specializedName(nameSymbol orElse sym, env)
+ ( sym.cloneSymbol(owner, newFlags, newName = specname)
modifyInfo (info => subst(env, info.asSeenFrom(owner.thisType, sym.owner)))
)
}
@@ -929,13 +942,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* this method will return List('apply$mcII$sp')
*/
private def specialOverrides(clazz: Symbol) = logResultIf[List[Symbol]]("specialized overrides in " + clazz, _.nonEmpty) {
- /** Return the overridden symbol in syms that needs a specialized overriding symbol,
- * together with its specialization environment. The overridden symbol may not be
- * the closest to 'overriding', in a given hierarchy.
+ /* Return the overridden symbol in syms that needs a specialized overriding symbol,
+ * together with its specialization environment. The overridden symbol may not be
+ * the closest to 'overriding', in a given hierarchy.
*
- * An method m needs a special override if
- * * m overrides a method whose type contains specialized type variables
- * * there is a valid specialization environment that maps the overridden method type to m's type.
+ * An method m needs a special override if
+ * * m overrides a method whose type contains specialized type variables
+ * * there is a valid specialization environment that maps the overridden method type to m's type.
*/
def needsSpecialOverride(overriding: Symbol): (Symbol, TypeEnv) = {
def checkOverriddenTParams(overridden: Symbol) {
@@ -960,7 +973,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
checkOverriddenTParams(overridden)
val env = unify(overridden.info, overriding.info, emptyEnv, false, true)
- def atNext = afterSpecialize(overridden.owner.info.decl(specializedName(overridden, env)))
+ def atNext = exitingSpecialize(overridden.owner.info.decl(specializedName(overridden, env)))
if (TypeEnv.restrict(env, stvars).nonEmpty && TypeEnv.isValid(env, overridden) && atNext != NoSymbol) {
debuglog(" " + pp(env) + " found " + atNext)
@@ -973,18 +986,36 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
(clazz.info.decls flatMap { overriding =>
needsSpecialOverride(overriding) match {
- case (NoSymbol, _) => None
+ case (NoSymbol, _) =>
+ if (overriding.isSuperAccessor) {
+ val alias = overriding.alias
+ debuglog("checking special overload for super accessor: %s, alias for %s".format(overriding.fullName, alias.fullName))
+ needsSpecialOverride(alias) match {
+ case nope @ (NoSymbol, _) => None
+ case (overridden, env) =>
+ val om = specializedOverload(clazz, overriding, env, overridden)
+ om.setName(nme.superName(om.name))
+ om.asInstanceOf[TermSymbol].setAlias(info(alias).target)
+ om.owner.info.decls.enter(om)
+ info(om) = SpecialSuperAccessor(om)
+ om.makeNotPrivate(om.owner)
+ newOverload(overriding, om, env)
+ Some(om)
+ }
+ } else None
case (overridden, env) =>
val om = specializedOverload(clazz, overridden, env)
+ clazz.info.decls.enter(om)
foreachWithIndex(om.paramss) { (params, i) =>
foreachWithIndex(params) { (param, j) =>
param.name = overriding.paramss(i)(j).name // SI-6555 Retain the parameter names from the subclass.
}
}
debuglog("specialized overload %s for %s in %s: %s".format(om, overriding.name.decode, pp(env), om.info))
+ if (overriding.isAbstractOverride) om.setFlag(ABSOVERRIDE)
typeEnv(om) = env
addConcreteSpecMethod(overriding)
- if (overriding.isDeferred) { // abstract override
+ if (overriding.isDeferred) { // abstract override
debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName)
info(om) = Forward(overriding)
}
@@ -1002,8 +1033,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
info(overriding) = Forward(om setPos overriding.pos)
}
+
newOverload(overriding, om, env)
- ifDebug(afterSpecialize(assert(
+ ifDebug(exitingSpecialize(assert(
overridden.owner.info.decl(om.name) != NoSymbol,
"Could not find " + om.name + " in " + overridden.owner.info.decls))
)
@@ -1032,7 +1064,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (isPrimitiveValueClass(tp2.typeSymbol) || isSpecializedAnyRefSubtype(tp2, sym1))
env + ((sym1, tp2))
else if (isSpecializedAnyRefSubtype(tp2, sym1))
- env + ((sym1, tp2)) // env + ((sym1, AnyRefClass.tpe))
+ env + ((sym1, tp2))
else if (strict)
unifyError(tp1, tp2)
else
@@ -1089,10 +1121,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- /** Apply type bindings in the given environment `env` to all declarations. */
- private def subst(env: TypeEnv, decls: List[Symbol]): List[Symbol] =
- decls map subst(env)
-
/** Apply the type environment 'env' to the given type. All type
* bindings are supposed to be to primitive types. A type variable
* that is annotated with 'uncheckedVariance' is mapped to the corresponding
@@ -1119,35 +1147,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def subst(env: TypeEnv)(decl: Symbol): Symbol =
decl modifyInfo (info =>
- if (decl.isConstructor) MethodType(subst(env, info).params, decl.owner.tpe)
+ if (decl.isConstructor) MethodType(subst(env, info).params, decl.owner.tpe_*)
else subst(env, info)
)
- /** Checks if the type parameter symbol is not specialized
- * and is used as type parameters when extending a class with a specialized
- * type parameter.
- * At some point we may remove this restriction.
- *
- * Example:
- *
- * class Base[@specialized T]
- * class Derived[T] extends Base[T] // a non-specialized T is
- * // used as a type param for Base
- * // -> returning true
- */
- private def notSpecializedIn(tsym: Symbol, supertpe: Type) = supertpe match {
- case TypeRef(_, supersym, supertargs) =>
- val tspec = specializedOn(tsym).toSet
- for (supt <- supersym.typeParams) {
- val supspec = specializedOn(supt).toSet
- if (tspec != supspec && tspec.subsetOf(supspec))
- reporter.error(tsym.pos, "Type parameter has to be specialized at least for the same types as in the superclass. Missing types: " + (supspec.diff(tspec)).mkString(", "))
- }
- case _ => //log("nope")
- }
-
private def unspecializableClass(tp: Type) = (
- definitions.isRepeatedParamType(tp) // ???
+ isRepeatedParamType(tp) // ???
|| tp.typeSymbol.isJavaDefined
|| tp.typeSymbol.isPackageClass
)
@@ -1156,12 +1161,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* If it is a 'no-specialization' run, it is applied only to loaded symbols.
*/
override def transformInfo(sym: Symbol, tpe: Type): Type = {
- if (settings.nospecialization.value && currentRun.compiles(sym)) tpe
+ if (settings.nospecialization && currentRun.compiles(sym)) tpe
else tpe.resultType match {
case cinfo @ ClassInfoType(parents, decls, clazz) if !unspecializableClass(cinfo) =>
val tparams = tpe.typeParams
if (tparams.isEmpty)
- afterSpecialize(parents map (_.typeSymbol.info))
+ exitingSpecialize(parents map (_.typeSymbol.info))
val parents1 = parents mapConserve specializedType
if (parents ne parents1) {
@@ -1182,7 +1187,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*
* A conflicting type environment could still be satisfiable.
*/
- def conflicting(env: TypeEnv) = !nonConflicting(env)
def nonConflicting(env: TypeEnv) = env forall { case (tvar, tpe) =>
(subst(env, tvar.info.bounds.lo) <:< tpe) && (tpe <:< subst(env, tvar.info.bounds.hi))
}
@@ -1252,9 +1256,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
class BodyDuplicator(_context: Context) extends super.BodyDuplicator(_context) {
override def castType(tree: Tree, pt: Type): Tree = {
- // log(" expected type: " + pt)
- // log(" tree type: " + tree.tpe)
- tree.tpe = if (tree.tpe != null) fixType(tree.tpe) else null
+ tree modifyType fixType
// log(" tree type: " + tree.tpe)
val ntree = if (tree.tpe != null && !(tree.tpe <:< pt)) {
val casttpe = CastMap(tree.tpe)
@@ -1262,8 +1264,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else if (casttpe <:< CastMap(pt)) gen.mkCast(tree, pt)
else tree
} else tree
- ntree.tpe = null
- ntree
+
+ ntree.clearType()
}
}
@@ -1296,7 +1298,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
class SpecializationDuplicator(casts: Map[Symbol, Type]) extends Duplicator(casts) {
override def retyped(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol, env: scala.collection.Map[Symbol, Type]): Tree =
- beforeSpecialize(super.retyped(context, tree, oldThis, newThis, env))
+ enteringSpecialize(super.retyped(context, tree, oldThis, newThis, env))
}
/** A tree symbol substituter that substitutes on type skolems.
@@ -1319,7 +1321,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
private def isAccessible(sym: Symbol): Boolean =
- (currentClass == sym.owner.enclClass) && (currentClass != targetClass)
+ if (currentOwner.isAnonymousFunction) {
+ if (inlineFunctionExpansion) devWarning("anonymous function made it to specialization even though inline expansion is set.")
+ false
+ }
+ else (currentClass == sym.owner.enclClass) && (currentClass != targetClass)
private def shouldMakePublic(sym: Symbol): Boolean =
sym.hasFlag(PRIVATE | PROTECTED) && (addressFields || !nme.isLocalName(sym.name))
@@ -1333,7 +1339,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (sym.isPrivate) debuglog(
"seeing private member %s, currentClass: %s, owner: %s, isAccessible: %b, isLocalName: %b".format(
sym, currentClass, sym.owner.enclClass, isAccessible(sym), nme.isLocalName(sym.name))
- )
+ )
if (shouldMakePublic(sym) && !isAccessible(sym)) {
debuglog("changing private flag of " + sym)
sym.makeNotPrivate(sym.owner)
@@ -1418,28 +1424,64 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def transform1(tree: Tree) = {
val symbol = tree.symbol
-
- /** The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
- def specSym(qual: Tree): Option[Symbol] = {
+ /* The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
+ def specSym(qual: Tree): Symbol = {
val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
- debuglog("[specSym] checking for rerouting: %s with \n\tsym.tpe: %s, \n\ttree.tpe: %s \n\tenv: %s \n\tname: %s"
- .format(tree, symbol.tpe, tree.tpe, env, specializedName(symbol, env)))
- if (env.nonEmpty) { // a method?
- val specCandidates = qual.tpe.member(specializedName(symbol, env))
- val specMember = specCandidates suchThat { s =>
- doesConform(symbol, tree.tpe, qual.tpe.memberType(s), env)
+ def isMatch(member: Symbol) = {
+ val memberType = qual.tpe memberType member
+
+ val residualTreeType = tree match {
+ case TypeApply(fun, targs) if fun.symbol == symbol =>
+ // SI-6308 Handle methods with only some type parameters specialized.
+ // drop the specialized type parameters from the PolyType, and
+ // substitute in the type environment.
+ val GenPolyType(tparams, tpe) = fun.tpe
+ val (from, to) = env.toList.unzip
+ val residualTParams = tparams.filterNot(env.contains)
+ GenPolyType(residualTParams, tpe).substituteTypes(from, to)
+ case _ => tree.tpe
}
- debuglog("[specSym] found: " + specCandidates.tpe + ", instantiated as: " + tree.tpe)
- debuglog("[specSym] found specMember: " + specMember)
- if (specMember ne NoSymbol)
- if (TypeEnv.includes(typeEnv(specMember), env)) Some(specMember)
- else {
- debuglog("wrong environments for specialized member: \n\ttypeEnv(%s) = %s\n\tenv = %s".format(specMember, typeEnv(specMember), env))
- None
- }
- else None
- } else None
+ (
+ doesConform(symbol, residualTreeType, memberType, env)
+ && TypeEnv.includes(typeEnv(member), env)
+ )
+ }
+ if (env.isEmpty) NoSymbol
+ else qual.tpe member specializedName(symbol, env) suchThat isMatch
+ }
+
+ def matchingSymbolInPrefix(pre: Type, member: Symbol, env: TypeEnv): Symbol = {
+ pre member specializedName(member, env) suchThat (_.tpe matches subst(env, member.tpe))
+ }
+
+ def transformSelect(sel: Select) = {
+ val Select(qual, name) = sel
+ debuglog(s"specializing Select(sym=${symbol.defString}, tree.tpe=${tree.tpe})")
+
+ val qual1 = transform(qual)
+ def copySelect = treeCopy.Select(tree, qual1, name)
+ def newSelect(member: Symbol) = atPos(tree.pos)(Select(qual1, member))
+ def typedOp(member: Symbol) = localTyper typedOperator newSelect(member)
+ def typedTree(member: Symbol) = localTyper typed newSelect(member)
+
+ val ignoreEnv = specializedTypeVars(symbol.info).isEmpty || name == nme.CONSTRUCTOR
+ if (ignoreEnv) overloads(symbol) find (_ matchesSym symbol) match {
+ case Some(Overload(member, _)) => typedOp(member)
+ case _ => copySelect
+ }
+ else {
+ val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
+ overloads(symbol) find (_ matchesEnv env) match {
+ case Some(Overload(member, _)) => typedOp(member)
+ case _ =>
+ matchingSymbolInPrefix(qual1.tpe, symbol, env) match {
+ case NoSymbol => copySelect
+ case member if member.isMethod => typedOp(member)
+ case member => typedTree(member)
+ }
+ }
+ }
}
/** Computes residual type parameters after rewiring, like "String" in the following example:
@@ -1448,25 +1490,23 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* specMe[Int, String](1, "2") => specMe$mIc$sp[String](1, "2")
* ```
*/
- def computeResidualTypeVars(baseTree: Tree, specTree: Tree, baseTargs: List[Tree], env: TypeEnv) = {
- val baseSym: Symbol = baseTree.symbol
- val specSym: Symbol = specTree.symbol
- val residualTargs = baseSym.info.typeParams zip baseTargs collect {
+ def computeResidualTypeVars(baseTree: Tree, specMember: Symbol, specTree: Tree, baseTargs: List[Tree], env: TypeEnv): Tree = {
+ val residualTargs = symbol.info.typeParams zip baseTargs collect {
case (tvar, targ) if !env.contains(tvar) || !isPrimitiveValueClass(env(tvar).typeSymbol) => targ
}
-
- if (specSym.info.typeParams.isEmpty && residualTargs.nonEmpty) {
- log("!!! Type args to be applied, but symbol says no parameters: " + ((specSym.defString, residualTargs)))
+ // See SI-5583. Don't know why it happens now if it didn't before.
+ if (specMember.info.typeParams.isEmpty && residualTargs.nonEmpty) {
+ devWarning("Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs)))
baseTree
}
else {
- ifDebug(assert(residualTargs.length == specSym.info.typeParams.length,
- "residual: %s, tparams: %s, env: %s".format(residualTargs, specSym.info.typeParams, env))
+ ifDebug(assert(residualTargs.length == specMember.info.typeParams.length,
+ "residual: %s, tparams: %s, env: %s".format(residualTargs, specMember.info.typeParams, env))
)
val tree1 = gen.mkTypeApply(specTree, residualTargs)
debuglog("rewrote " + tree + " to " + tree1)
- localTyper.typedOperator(atPos(tree.pos)(tree1))
+ localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
}
}
@@ -1474,31 +1514,24 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
def transformNew = {
- debuglog("Attempting to specialize new %s(%s)".format(tpt, args.mkString(", ")))
- val found = findSpec(tpt.tpe)
- if (found.typeSymbol ne tpt.tpe.typeSymbol) {
- // the ctor can be specialized
- debuglog("** instantiated specialized type: " + found)
- reportError {
- localTyper.typedPos(tree.pos)(New(found, transformTrees(args): _*))
- } {
- _ => super.transform(tree)
+ debuglog("Attempting to specialize new %s(%s)".format(tpt, args.mkString(", ")))
+ val found = specializedType(tpt.tpe)
+ if (found.typeSymbol ne tpt.tpe.typeSymbol) { // the ctor can be specialized
+ val inst = New(found, transformTrees(args): _*)
+ reportError(localTyper.typedPos(tree.pos)(inst))(_ => super.transform(tree))
}
- } else super.transform(tree)
+ else
+ super.transform(tree)
}
transformNew
- case Apply(sel @ Select(sup @ Super(qual, name), name1), args)
- if (sup.symbol.info.parents != beforePrevPhase(sup.symbol.info.parents)) =>
+ case Apply(sel @ Select(sup @ Super(qual, name), name1), args) if hasNewParents(sup) =>
def transformSuperApply = {
-
- def parents = sup.symbol.info.parents
- debuglog(tree + " parents changed from: " + beforePrevPhase(parents) + " to: " + parents)
-
- val res = localTyper.typed(
- Apply(Select(Super(qual, name) setPos sup.pos, name1) setPos sel.pos, transformTrees(args)) setPos tree.pos)
- debuglog("retyping call to super, from: " + symbol + " to " + res.symbol)
- res
+ val sup1 = Super(qual, name) setPos sup.pos
+ val tree1 = Apply(Select(sup1, name1) setPos sel.pos, transformTrees(args))
+ val res = localTyper.typedPos(tree.pos)(tree1)
+ debuglog(s"retyping call to super, from: $symbol to ${res.symbol}")
+ res
}
transformSuperApply
@@ -1513,17 +1546,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val qual1 = transform(qual)
log(">>> TypeApply: " + tree + ", qual1: " + qual1)
specSym(qual1) match {
- case Some(specMember) =>
+ case NoSymbol =>
+ // See pos/exponential-spec.scala - can't call transform on the whole tree again.
+ treeCopy.TypeApply(tree, treeCopy.Select(sel, qual1, name), transformTrees(targs))
+ case specMember =>
debuglog("found " + specMember.fullName)
ifDebug(assert(symbol.info.typeParams.length == targs.length, symbol.info.typeParams + " / " + targs))
val env = typeEnv(specMember)
- computeResidualTypeVars(tree, gen.mkAttributedSelect(qual1, specMember), targs, env)
-
- case None =>
- treeCopy.TypeApply(tree, treeCopy.Select(sel, qual1, name), super.transformTrees(targs))
- // See pos/exponential-spec.scala - can't call transform on the whole tree again.
- // super.transform(tree)
+ computeResidualTypeVars(tree, specMember, gen.mkAttributedSelect(qual1, specMember), targs, env)
}
// This rewires calls to specialized methods defined in the local scope. For example:
@@ -1536,7 +1567,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (env.isEmpty) super.transform(tree)
else {
overloads(symbol) find (_ matchesEnv env) match {
- case Some(Overload(specMember, _)) => computeResidualTypeVars(tree, Ident(specMember), targs, env)
+ case Some(Overload(specMember, _)) => computeResidualTypeVars(tree, specMember, Ident(specMember), targs, env)
case _ => super.transform(tree)
}
}
@@ -1546,36 +1577,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.\n" + pos.lineContent)
tree
- case Select(qual, name) if name != nme.CONSTRUCTOR && specializedTypeVars(symbol.info).nonEmpty =>
- debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe))
- val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
- if (env.isEmpty) super.transform(tree)
- else {
- val qual1 = transform(qual)
- def reselect(member: Symbol) = {
- val newSelect = atPos(tree.pos)(Select(qual1, member))
- if (member.isMethod) localTyper typedOperator newSelect
- else localTyper typed newSelect
- }
- overloads(symbol) find (_ matchesEnv env) match {
- case Some(Overload(member, _)) => reselect(member)
- case _ =>
- val specMember = qual1.tpe.member(specializedName(symbol, env)).suchThat(_.tpe matches subst(env, symbol.tpe))
- if (specMember ne NoSymbol)
- reselect(specMember)
- else
- treeCopy.Select(tree, qual1, name)
- }
- }
- case Select(qual, _) =>
- overloads(symbol) find (_ matchesSym symbol) match {
- case Some(Overload(member, _)) =>
- val newTree = Select(transform(qual), member)
- debuglog(s"** routing $tree to ${member.fullName} tree: $newTree")
- localTyper.typedOperator(atPos(tree.pos)(newTree))
- case None =>
- super.transform(tree)
- }
+ case sel @ Select(_, _) =>
+ transformSelect(sel)
case PackageDef(pid, stats) =>
tree.symbol.info // make sure specializations have been performed
@@ -1600,47 +1603,37 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
transformTemplate
case ddef @ DefDef(_, _, _, vparamss, _, _) if info.isDefinedAt(symbol) =>
- def transformDefDef = {
- // log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol))
- def reportTypeError(body: =>Tree) = reportError(body)(_ => ddef)
-
+ def transformDefDef = {
if (symbol.isConstructor) {
-
- val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperSelect, vparamss, symbol.owner))
-
+ val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperInitCall, vparamss, symbol.owner))
if (symbol.isPrimaryConstructor)
- localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant()))))
+ localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant(())))))
else // duplicate the original constructor
- reportTypeError(duplicateBody(ddef, info(symbol).target))
+ reportError(duplicateBody(ddef, info(symbol).target))(_ => ddef)
}
else info(symbol) match {
case Implementation(target) =>
assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
// we have an rhs, specialize it
- val tree1 = reportTypeError {
- duplicateBody(ddef, target)
- }
+ val tree1 = reportError(duplicateBody(ddef, target))(_ => ddef)
debuglog("implementation: " + tree1)
deriveDefDef(tree1)(transform)
case NormalizedMember(target) =>
- val constraints = satisfiabilityConstraints(typeEnv(symbol))
- log("constraints: " + constraints)
- if (target.isDeferred || constraints == None) {
- deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called."))
- } else {
- // we have an rhs, specialize it
- val tree1 = reportTypeError {
- duplicateBody(ddef, target, constraints.get)
- }
- debuglog("implementation: " + tree1)
- deriveDefDef(tree1)(transform)
+ logResult("constraints")(satisfiabilityConstraints(typeEnv(symbol))) match {
+ case Some(constraint) if !target.isDeferred =>
+ // we have an rhs, specialize it
+ val tree1 = reportError(duplicateBody(ddef, target, constraint))(_ => ddef)
+ debuglog("implementation: " + tree1)
+ deriveDefDef(tree1)(transform)
+ case _ =>
+ deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called."))
}
case SpecialOverride(target) =>
assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
//debuglog("moving implementation, body of target " + target + ": " + body(target))
- debuglog("%s is param accessor? %b".format(ddef.symbol, ddef.symbol.isParamAccessor))
+ log("%s is param accessor? %b".format(ddef.symbol, ddef.symbol.isParamAccessor))
// we have an rhs, specialize it
val tree1 = addBody(ddef, target)
(new ChangeOwnerTraverser(target, tree1.symbol))(tree1.rhs)
@@ -1688,6 +1681,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case Abstract(targ) =>
debuglog("abstract: " + targ)
localTyper.typed(deriveDefDef(tree)(rhs => rhs))
+
+ case SpecialSuperAccessor(targ) =>
+ debuglog("special super accessor: " + targ + " for " + tree)
+ localTyper.typed(deriveDefDef(tree)(rhs => rhs))
}
}
expandInnerNormalizedMembers(transformDefDef)
@@ -1713,7 +1710,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
deriveValDef(newValDef)(transform)
}
transformValDef
-
case _ =>
super.transform(tree)
}
@@ -1783,7 +1779,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val symbol = tree.symbol
debuglog("specializing body of" + symbol.defString)
val DefDef(_, _, tparams, vparams :: Nil, tpt, _) = tree
-// val (_, origtparams) = splitParams(source.typeParams)
val env = typeEnv(symbol)
val boundTvars = env.keySet
val origtparams = source.typeParams.filter(tparam => !boundTvars(tparam) || !isPrimitiveValueType(env(tparam)))
@@ -1810,8 +1805,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
false) // don't make private fields public
val newBody = symSubstituter(body(source).duplicate)
- tpt.tpe = tpt.tpe.substSym(oldtparams, newtparams)
-
+ tpt modifyType (_.substSym(oldtparams, newtparams))
copyDefDef(tree)(vparamss = List(newSyms map ValDef), rhs = newBody)
}
@@ -1849,26 +1843,24 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
// ctor
- mbrs += atPos(m.pos)(DefDef(m, Modifiers(m.flags), mmap(List(vparams))(ValDef), EmptyTree))
+ mbrs += DefDef(m, Modifiers(m.flags), mmap(List(vparams))(ValDef), EmptyTree)
} else {
- mbrs += atPos(m.pos)(DefDef(m, { paramss => EmptyTree }))
+ mbrs += DefDef(m, { paramss => EmptyTree })
}
} else if (m.isValue) {
- mbrs += ValDef(m, EmptyTree).setType(NoType).setPos(m.pos)
+ mbrs += ValDef(m).setType(NoType)
} else if (m.isClass) {
// mbrs +=
-// ClassDef(m, Template(m.info.parents map TypeTree, emptyValDef, List())
+// ClassDef(m, Template(m.info.parents map TypeTree, noSelfType, List())
// .setSymbol(m.newLocalDummy(m.pos)))
// log("created synthetic class: " + m.fullName)
}
}
if (hasSpecializedFields) {
val isSpecializedInstance = sClass :: sClass.parentSymbols exists (_ hasFlag SPECIALIZED)
- val sym = sClass.newMethod(nme.SPECIALIZED_INSTANCE, sClass.pos) setInfoAndEnter MethodType(Nil, BooleanClass.tpe)
+ val sym = sClass.newMethod(nme.SPECIALIZED_INSTANCE, sClass.pos) setInfoAndEnter MethodType(Nil, BooleanTpe)
- mbrs += atPos(sym.pos) {
- DefDef(sym, Literal(Constant(isSpecializedInstance)).setType(BooleanClass.tpe)).setType(NoType)
- }
+ mbrs += DefDef(sym, Literal(Constant(isSpecializedInstance)).setType(BooleanTpe)).setType(NoType)
}
mbrs.toList
}
@@ -1881,7 +1873,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) yield {
debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env))
val parents = specCls.info.parents.map(TypeTree)
- ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List()))
+ ClassDef(specCls, atPos(impl.pos)(Template(parents, noSelfType, List()))
.setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos
}
case _ => Nil
@@ -1914,16 +1906,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* }}
*/
private def forwardCtorCall(pos: scala.reflect.internal.util.Position, receiver: Tree, paramss: List[List[ValDef]], clazz: Symbol): Tree = {
+ log(s"forwardCtorCall($pos, $receiver, $paramss, $clazz)")
- /** A constructor parameter `f` initializes a specialized field
- * iff:
- * - it is specialized itself
- * - there is a getter for the original (non-specialized) field in the same class
- * - there is a getter for the specialized field in the same class
+ /* A constructor parameter `f` initializes a specialized field
+ * iff:
+ * - it is specialized itself
+ * - there is a getter for the original (non-specialized) field in the same class
+ * - there is a getter for the specialized field in the same class
*/
def initializesSpecializedField(f: Symbol) = (
(f.name endsWith nme.SPECIALIZED_SUFFIX)
- && clazz.info.member(nme.originalName(f.name)).isPublic
+ && clazz.info.member(f.unexpandedName).isPublic
&& clazz.info.decl(f.name).suchThat(_.isGetter) != NoSymbol
)
@@ -1950,16 +1943,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
//! TODO: make sure the param types are seen from the right prefix
map2(fun.info.paramTypes, vparams)((tp, arg) => gen.maybeMkAsInstanceOf(Ident(arg), tp, arg.tpe))
)
- private def findSpec(tp: Type): Type = tp match {
- case TypeRef(pre, sym, _ :: _) => specializedType(tp)
- case _ => tp
- }
class SpecializationTransformer(unit: CompilationUnit) extends Transformer {
informProgress("specializing " + unit)
override def transform(tree: Tree) = {
- val resultTree = if (settings.nospecialization.value) tree
- else afterSpecialize(specializeCalls(unit).transform(tree))
+ val resultTree = if (settings.nospecialization) tree
+ else exitingSpecialize(specializeCalls(unit).transform(tree))
// Remove the final modifier and @inline annotation from anything in the
// original class (since it's being overridden in at least onesubclass).
@@ -1977,13 +1966,5 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
resultTree
- }
- }
-
- def printSpecStats() {
- println(" concreteSpecMembers: %7d".format(concreteSpecMethods.size))
- println(" overloads: %7d".format(overloads.size))
- println(" typeEnv: %7d".format(typeEnv.size))
- println(" info: %7d".format(info.size))
- }
+ } }
}
diff --git a/src/compiler/scala/tools/nsc/transform/Statics.scala b/src/compiler/scala/tools/nsc/transform/Statics.scala
new file mode 100644
index 0000000000..e2508b8d08
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/Statics.scala
@@ -0,0 +1,52 @@
+package scala.tools.nsc
+package transform
+
+import symtab._
+import Flags._
+
+import collection.mutable.Buffer
+
+abstract class Statics extends Transform with ast.TreeDSL {
+ import global._
+
+ class StaticsTransformer extends Transformer {
+
+ /** finds the static ctor DefDef tree within the template if it exists. */
+ def findStaticCtor(template: Template): Option[Tree] =
+ template.body find {
+ case defdef @ DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => defdef.symbol.hasStaticFlag
+ case _ => false
+ }
+
+ /** changes the template for the class so that it contains a static constructor with symbol fields inits,
+ * augments an existing static ctor if one already existed.
+ */
+ def addStaticInits(template: Template, newStaticInits: Buffer[Tree], localTyper: analyzer.Typer): Template = {
+ if (newStaticInits.isEmpty)
+ template
+ else {
+ val newCtor = findStaticCtor(template) match {
+ // in case there already were static ctors - augment existing ones
+ // currently, however, static ctors aren't being generated anywhere else
+ case Some(ctor @ DefDef(_,_,_,_,_,_)) =>
+ // modify existing static ctor
+ deriveDefDef(ctor) {
+ case block @ Block(stats, expr) =>
+ // need to add inits to existing block
+ treeCopy.Block(block, newStaticInits.toList ::: stats, expr)
+ case term: TermTree =>
+ // need to create a new block with inits and the old term
+ treeCopy.Block(term, newStaticInits.toList, term)
+ }
+ case _ =>
+ // create new static ctor
+ val staticCtorSym = currentClass.newStaticConstructor(template.pos)
+ val rhs = Block(newStaticInits.toList, Literal(Constant(())))
+
+ localTyper.typedPos(template.pos)(DefDef(staticCtorSym, rhs))
+ }
+ deriveTemplate(template)(newCtor :: _)
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 938499261e..b471d16ddd 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -3,7 +3,8 @@
* @author Iulian Dragos
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package transform
import symtab.Flags
@@ -17,7 +18,7 @@ import Flags.SYNTHETIC
abstract class TailCalls extends Transform {
import global._ // the global environment
import definitions._ // standard classes and methods
- import typer.{ typed, typedPos } // methods to type trees
+ import typer.typedPos // methods to type trees
val phaseName: String = "tailcalls"
@@ -31,7 +32,7 @@ abstract class TailCalls extends Transform {
class Phase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) {
def apply(unit: global.CompilationUnit) {
if (!(settings.debuginfo.value == "notailcalls")) {
- newTransformer(unit).transformUnit(unit);
+ newTransformer(unit).transformUnit(unit)
}
}
}
@@ -82,104 +83,117 @@ abstract class TailCalls extends Transform {
* that label.
* </p>
* <p>
- * Assumes: <code>Uncurry</code> has been run already, and no multiple
+ * Assumes: `Uncurry` has been run already, and no multiple
* parameter lists exit.
* </p>
*/
class TailCallElimination(unit: CompilationUnit) extends Transformer {
- private val defaultReason = "it contains a recursive call not in tail position"
+ private def defaultReason = "it contains a recursive call not in tail position"
+ private val failPositions = perRunCaches.newMap[TailContext, Position]() withDefault (_.methodPos)
+ private val failReasons = perRunCaches.newMap[TailContext, String]() withDefaultValue defaultReason
+ private def tailrecFailure(ctx: TailContext) {
+ val method = ctx.method
+ val failReason = failReasons(ctx)
+ val failPos = failPositions(ctx)
+
+ unit.error(failPos, s"could not optimize @tailrec annotated $method: $failReason")
+ }
/** Has the label been accessed? Then its symbol is in this set. */
- private val accessed = new scala.collection.mutable.HashSet[Symbol]()
+ private val accessed = perRunCaches.newSet[Symbol]()
// `accessed` was stored as boolean in the current context -- this is no longer tenable
// with jumps to labels in tailpositions now considered in tailposition,
// a downstream context may access the label, and the upstream one will be none the wiser
// this is necessary because tail-calls may occur in places where syntactically they seem impossible
// (since we now consider jumps to labels that are in tailposition, such as matchEnd(x) {x})
+ sealed trait TailContext {
+ def method: Symbol // current method
+ def tparams: List[Symbol] // type parameters
+ def methodPos: Position // default position for failure reporting
+ def tailPos: Boolean // context is in tail position
+ def label: Symbol // new label, tail call target
+ def tailLabels: Set[Symbol]
+
+ def enclosingType = method.enclClass.typeOfThis
+ def isEligible = method.isEffectivelyFinal
+ def isMandatory = method.hasAnnotation(TailrecClass)
+ def isTransformed = isEligible && accessed(label)
+
+ def newThis(pos: Position) = {
+ def msg = "Creating new `this` during tailcalls\n method: %s\n current class: %s".format(
+ method.ownerChain.mkString(" -> "),
+ currentClass.ownerChain.mkString(" -> ")
+ )
+ logResult(msg)(method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis)
+ }
+ override def toString = s"${method.name} tparams=$tparams tailPos=$tailPos label=$label label info=${label.info}"
+ }
- class Context() {
- /** The current method */
- var method: Symbol = NoSymbol
-
- // symbols of label defs in this method that are in tail position
- var tailLabels: Set[Symbol] = Set()
-
- /** The current tail-call label */
- var label: Symbol = NoSymbol
-
- /** The expected type arguments of self-recursive calls */
- var tparams: List[Symbol] = Nil
-
- /** Tells whether we are in a (possible) tail position */
- var tailPos = false
-
- /** The reason this method could not be optimized. */
- var failReason = defaultReason
- var failPos = method.pos
+ object EmptyTailContext extends TailContext {
+ def method = NoSymbol
+ def tparams = Nil
+ def methodPos = NoPosition
+ def tailPos = false
+ def label = NoSymbol
+ def tailLabels = Set.empty[Symbol]
+ }
- def this(that: Context) = {
- this()
- this.method = that.method
- this.tparams = that.tparams
- this.tailPos = that.tailPos
- this.failPos = that.failPos
- this.label = that.label
- this.tailLabels = that.tailLabels
+ class DefDefTailContext(dd: DefDef) extends TailContext {
+ def method = dd.symbol
+ def tparams = dd.tparams map (_.symbol)
+ def methodPos = dd.pos
+ def tailPos = true
+
+ lazy val label = mkLabel()
+ lazy val tailLabels = {
+ // labels are local to a method, so only traverse the rhs of a defdef
+ val collector = new TailPosLabelsTraverser
+ collector traverse dd.rhs
+ collector.tailLabels.toSet
}
- def this(dd: DefDef) {
- this()
- this.method = dd.symbol
- this.tparams = dd.tparams map (_.symbol)
- this.tailPos = true
- this.failPos = dd.pos
-
- /** Create a new method symbol for the current method and store it in
- * the label field.
- */
- this.label = {
- val label = method.newLabel(newTermName("_" + method.name), method.pos)
- val thisParam = method.newSyntheticValueParam(currentClass.typeOfThis)
- label setInfo MethodType(thisParam :: method.tpe.params, method.tpe.finalResultType)
- }
+
+ private def mkLabel() = {
+ val label = method.newLabel(newTermName("_" + method.name), method.pos)
+ val thisParam = method.newSyntheticValueParam(currentClass.typeOfThis)
+ label setInfo MethodType(thisParam :: method.tpe.params, method.tpe_*.finalResultType)
if (isEligible)
label substInfo (method.tpe.typeParams, tparams)
- }
- def enclosingType = method.enclClass.typeOfThis
- def methodTypeParams = method.tpe.typeParams
- def isEligible = method.isEffectivelyFinal
- // @tailrec annotation indicates mandatory transformation
- def isMandatory = method.hasAnnotation(TailrecClass) && !forMSIL
- def isTransformed = isEligible && accessed(label)
- def tailrecFailure() = unit.error(failPos, "could not optimize @tailrec annotated " + method + ": " + failReason)
-
- def newThis(pos: Position) = logResult("Creating new `this` during tailcalls\n method: %s\n current class: %s".format(
- method.ownerChain.mkString(" -> "), currentClass.ownerChain.mkString(" -> "))) {
- method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis
+ label
}
-
- override def toString(): String = (
- "" + method.name + " tparams: " + tparams + " tailPos: " + tailPos +
- " Label: " + label + " Label type: " + label.info
- )
+ private def isRecursiveCall(t: Tree) = {
+ val receiver = t.symbol
+
+ ( (receiver != null)
+ && receiver.isMethod
+ && (method.name == receiver.name)
+ && (method.enclClass isSubClass receiver.enclClass)
+ )
+ }
+ def containsRecursiveCall(t: Tree) = t exists isRecursiveCall
}
-
- private var ctx: Context = new Context()
- private def noTailContext() = {
- val t = new Context(ctx)
- t.tailPos = false
- t
+ class ClonedTailContext(that: TailContext, override val tailPos: Boolean) extends TailContext {
+ def method = that.method
+ def tparams = that.tparams
+ def methodPos = that.methodPos
+ def tailLabels = that.tailLabels
+ def label = that.label
}
+ private var ctx: TailContext = EmptyTailContext
+ private def noTailContext() = new ClonedTailContext(ctx, tailPos = false)
+ private def yesTailContext() = new ClonedTailContext(ctx, tailPos = true)
+
/** Rewrite this tree to contain no tail recursive calls */
- def transform(tree: Tree, nctx: Context): Tree = {
+ def transform(tree: Tree, nctx: TailContext): Tree = {
val saved = ctx
ctx = nctx
try transform(tree)
finally this.ctx = saved
}
+ def yesTailTransform(tree: Tree): Tree = transform(tree, yesTailContext())
def noTailTransform(tree: Tree): Tree = transform(tree, noTailContext())
def noTailTransforms(trees: List[Tree]) = {
val nctx = noTailContext()
@@ -187,38 +201,33 @@ abstract class TailCalls extends Transform {
}
override def transform(tree: Tree): Tree = {
- /** A possibly polymorphic apply to be considered for tail call transformation.
- */
+ /* A possibly polymorphic apply to be considered for tail call transformation. */
def rewriteApply(target: Tree, fun: Tree, targs: List[Tree], args: List[Tree]) = {
val receiver: Tree = fun match {
case Select(qual, _) => qual
case _ => EmptyTree
}
-
def receiverIsSame = ctx.enclosingType.widen =:= receiver.tpe.widen
def receiverIsSuper = ctx.enclosingType.widen <:< receiver.tpe.widen
def isRecursiveCall = (ctx.method eq fun.symbol) && ctx.tailPos
def transformArgs = noTailTransforms(args)
def matchesTypeArgs = ctx.tparams sameElements (targs map (_.tpe.typeSymbol))
- /** Records failure reason in Context for reporting.
- * Position is unchanged (by default, the method definition.)
+ /* Records failure reason in Context for reporting.
+ * Position is unchanged (by default, the method definition.)
*/
def fail(reason: String) = {
debuglog("Cannot rewrite recursive call at: " + fun.pos + " because: " + reason)
-
- ctx.failReason = reason
+ failReasons(ctx) = reason
treeCopy.Apply(tree, noTailTransform(target), transformArgs)
}
- /** Position of failure is that of the tree being considered.
- */
+ /* Position of failure is that of the tree being considered. */
def failHere(reason: String) = {
- ctx.failPos = fun.pos
+ failPositions(ctx) = fun.pos
fail(reason)
}
def rewriteTailCall(recv: Tree): Tree = {
debuglog("Rewriting tail recursive call: " + fun.pos.lineContent.trim)
-
accessed += ctx.label
typedPos(fun.pos) {
val args = mapWithIndex(transformArgs)((arg, i) => mkAttributedCastHack(arg, ctx.label.info.params(i + 1).tpe))
@@ -228,12 +237,11 @@ abstract class TailCalls extends Transform {
if (!ctx.isEligible) fail("it is neither private nor final so can be overridden")
else if (!isRecursiveCall) {
- if (receiverIsSuper) failHere("it contains a recursive call targeting supertype " + receiver.tpe)
+ if (receiverIsSuper) failHere("it contains a recursive call targeting a supertype")
else failHere(defaultReason)
}
else if (!matchesTypeArgs) failHere("it is called recursively with different type arguments")
else if (receiver == EmptyTree) rewriteTailCall(This(currentClass))
- else if (forMSIL) fail("it cannot be optimized on MSIL")
else if (!receiverIsSame) failHere("it changes type of 'this' on a polymorphic recursive call")
else rewriteTailCall(receiver)
}
@@ -245,37 +253,23 @@ abstract class TailCalls extends Transform {
super.transform(tree)
- case dd @ DefDef(_, _, _, vparamss0, _, rhs0) if !dd.symbol.hasAccessorFlag =>
- val newCtx = new Context(dd)
- def isRecursiveCall(t: Tree) = {
- val sym = t.symbol
- (sym != null) && {
- sym.isMethod && (dd.symbol.name == sym.name) && (dd.symbol.enclClass isSubClass sym.enclClass)
- }
- }
- if (newCtx.isMandatory) {
- if (!rhs0.exists(isRecursiveCall)) {
- unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
- }
- }
-
- // labels are local to a method, so only traverse the rhs of a defdef
- val collectTailPosLabels = new TailPosLabelsTraverser
- collectTailPosLabels traverse rhs0
- newCtx.tailLabels = collectTailPosLabels.tailLabels.toSet
+ case dd @ DefDef(_, name, _, vparamss0, _, rhs0) if !dd.symbol.hasAccessorFlag =>
+ val newCtx = new DefDefTailContext(dd)
+ if (newCtx.isMandatory && !(newCtx containsRecursiveCall rhs0))
+ unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
- debuglog("Considering " + dd.name + " for tailcalls, with labels in tailpos: "+ newCtx.tailLabels)
+ debuglog(s"Considering $name for tailcalls, with labels in tailpos: ${newCtx.tailLabels}")
val newRHS = transform(rhs0, newCtx)
- deriveDefDef(tree){rhs =>
+ deriveDefDef(tree) { rhs =>
if (newCtx.isTransformed) {
- /** We have rewritten the tree, but there may be nested recursive calls remaining.
- * If @tailrec is given we need to fail those now.
+ /* We have rewritten the tree, but there may be nested recursive calls remaining.
+ * If @tailrec is given we need to fail those now.
*/
if (newCtx.isMandatory) {
for (t @ Apply(fn, _) <- newRHS ; if fn.symbol == newCtx.method) {
- newCtx.failPos = t.pos
- newCtx.tailrecFailure()
+ failPositions(newCtx) = t.pos
+ tailrecFailure(newCtx)
}
}
val newThis = newCtx.newThis(tree.pos)
@@ -287,8 +281,8 @@ abstract class TailCalls extends Transform {
))
}
else {
- if (newCtx.isMandatory && newRHS.exists(isRecursiveCall))
- newCtx.tailrecFailure()
+ if (newCtx.isMandatory && (newCtx containsRecursiveCall newRHS))
+ tailrecFailure(newCtx)
newRHS
}
@@ -349,27 +343,25 @@ abstract class TailCalls extends Transform {
case Apply(tapply @ TypeApply(fun, targs), vargs) =>
rewriteApply(tapply, fun, targs, vargs)
- case Apply(fun, args) =>
- if (fun.symbol == Boolean_or || fun.symbol == Boolean_and)
- treeCopy.Apply(tree, fun, transformTrees(args))
- else if (fun.symbol.isLabel && args.nonEmpty && args.tail.isEmpty && ctx.tailLabels(fun.symbol)) {
- // this is to detect tailcalls in translated matches
- // it's a one-argument call to a label that is in a tailposition and that looks like label(x) {x}
- // thus, the argument to the call is in tailposition
- val saved = ctx.tailPos
- ctx.tailPos = true
- debuglog("in tailpos label: "+ args.head)
- val res = transform(args.head)
- ctx.tailPos = saved
- if (res ne args.head) {
- // we tail-called -- TODO: shield from false-positives where we rewrite but don't tail-call
- // must leave the jump to the original tailpos-label (fun)!
- // there might be *a* tailcall *in* res, but it doesn't mean res *always* tailcalls
- treeCopy.Apply(tree, fun, List(res))
- }
- else rewriteApply(fun, fun, Nil, args)
- } else rewriteApply(fun, fun, Nil, args)
+ case Apply(fun, args) if fun.symbol == Boolean_or || fun.symbol == Boolean_and =>
+ treeCopy.Apply(tree, fun, transformTrees(args))
+
+ // this is to detect tailcalls in translated matches
+ // it's a one-argument call to a label that is in a tailposition and that looks like label(x) {x}
+ // thus, the argument to the call is in tailposition
+ case Apply(fun, args @ (arg :: Nil)) if fun.symbol.isLabel && ctx.tailLabels(fun.symbol) =>
+ debuglog(s"in tailpos label: $arg")
+ val res = yesTailTransform(arg)
+ // we tail-called -- TODO: shield from false-positives where we rewrite but don't tail-call
+ // must leave the jump to the original tailpos-label (fun)!
+ // there might be *a* tailcall *in* res, but it doesn't mean res *always* tailcalls
+ if (res ne arg)
+ treeCopy.Apply(tree, fun, res :: Nil)
+ else
+ rewriteApply(fun, fun, Nil, args)
+ case Apply(fun, args) =>
+ rewriteApply(fun, fun, Nil, args)
case Alternative(_) | Star(_) | Bind(_, _) =>
sys.error("We should've never gotten inside a pattern")
case Select(qual, name) =>
@@ -404,7 +396,7 @@ abstract class TailCalls extends Transform {
finally maybeTail = saved
}
- def traverseNoTail(tree: Tree) = traverse(tree, false)
+ def traverseNoTail(tree: Tree) = traverse(tree, maybeTailNew = false)
def traverseTreesNoTail(trees: List[Tree]) = trees foreach traverseNoTail
override def traverse(tree: Tree) = tree match {
diff --git a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
new file mode 100644
index 0000000000..f83b6f857e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
@@ -0,0 +1,187 @@
+package scala.tools.nsc
+package transform
+
+import scala.reflect.internal._
+import scala.tools.nsc.ast.TreeDSL
+import scala.tools.nsc.Global
+
+/**
+ * A trait usable by transforms that need to adapt trees of one type to another type
+ */
+trait TypeAdaptingTransformer {
+ self: TreeDSL =>
+
+ val analyzer: typechecker.Analyzer { val global: self.global.type }
+
+ trait TypeAdapter {
+ val typer: analyzer.Typer
+ import global._
+ import definitions._
+ import CODE._
+
+ def isMethodTypeWithEmptyParams(tpe: Type) = tpe match {
+ case MethodType(Nil, _) => true
+ case _ => false
+ }
+
+ private def isSafelyRemovableUnbox(fn: Tree, arg: Tree): Boolean = {
+ currentRun.runDefinitions.isUnbox(fn.symbol) && {
+ val cls = arg.tpe.typeSymbol
+ (cls == definitions.NullClass) || isBoxedValueClass(cls)
+ }
+ }
+
+ private def isPrimitiveValueType(tpe: Type) = isPrimitiveValueClass(tpe.typeSymbol)
+
+ private def isErasedValueType(tpe: Type) = tpe.isInstanceOf[ErasedValueType]
+
+ private def isDifferentErasedValueType(tpe: Type, other: Type) =
+ isErasedValueType(tpe) && (tpe ne other)
+
+ def isPrimitiveValueMember(sym: Symbol) = isPrimitiveValueClass(sym.owner)
+
+ @inline def box(tree: Tree, target: => String): Tree = {
+ val result = box1(tree)
+ if (tree.tpe =:= UnitTpe) ()
+ else log(s"boxing ${tree.summaryString}: ${tree.tpe} into $target: ${result.tpe}")
+ result
+ }
+
+ /** Box `tree` of unboxed type */
+ private def box1(tree: Tree): Tree = tree match {
+ case LabelDef(_, _, _) =>
+ val ldef = deriveLabelDef(tree)(box1)
+ ldef setType ldef.rhs.tpe
+ case _ =>
+ val tree1 = tree.tpe match {
+ case ErasedValueType(clazz, _) =>
+ New(clazz, cast(tree, underlyingOfValueClass(clazz)))
+ case _ =>
+ tree.tpe.typeSymbol match {
+ case UnitClass =>
+ if (treeInfo isExprSafeToInline tree) REF(BoxedUnit_UNIT)
+ else BLOCK(tree, REF(BoxedUnit_UNIT))
+ case NothingClass => tree // a non-terminating expression doesn't need boxing
+ case x =>
+ assert(x != ArrayClass)
+ tree match {
+ /* Can't always remove a Box(Unbox(x)) combination because the process of boxing x
+ * may lead to throwing an exception.
+ *
+ * This is important for specialization: calls to the super constructor should not box/unbox specialized
+ * fields (see TupleX). (ID)
+ */
+ case Apply(boxFun, List(arg)) if isSafelyRemovableUnbox(tree, arg) =>
+ log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}")
+ arg
+ case _ =>
+ (REF(currentRun.runDefinitions.boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectTpe
+ }
+ }
+ }
+ typer.typedPos(tree.pos)(tree1)
+ }
+
+ def unbox(tree: Tree, pt: Type): Tree = {
+ val result = unbox1(tree, pt)
+ log(s"unboxing ${tree.shortClass}: ${tree.tpe} as a ${result.tpe}")
+ result
+ }
+
+ /** Unbox `tree` of boxed type to expected type `pt`.
+ *
+ * @param tree the given tree
+ * @param pt the expected type.
+ * @return the unboxed tree
+ */
+ private def unbox1(tree: Tree, pt: Type): Tree = tree match {
+/*
+ case Boxed(unboxed) =>
+ println("unbox shorten: "+tree) // this never seems to kick in during build and test; therefore disabled.
+ adaptToType(unboxed, pt)
+ */
+ case LabelDef(_, _, _) =>
+ val ldef = deriveLabelDef(tree)(unbox(_, pt))
+ ldef setType ldef.rhs.tpe
+ case _ =>
+ val tree1 = pt match {
+ case ErasedValueType(clazz, underlying) =>
+ val tree0 =
+ if (tree.tpe.typeSymbol == NullClass &&
+ isPrimitiveValueClass(underlying.typeSymbol)) {
+ // convert `null` directly to underlying type, as going
+ // via the unboxed type would yield a NPE (see SI-5866)
+ unbox1(tree, underlying)
+ } else
+ Apply(Select(adaptToType(tree, clazz.tpe), clazz.derivedValueClassUnbox), List())
+ cast(tree0, pt)
+ case _ =>
+ pt.typeSymbol match {
+ case UnitClass =>
+ if (treeInfo isExprSafeToInline tree) UNIT
+ else BLOCK(tree, UNIT)
+ case x =>
+ assert(x != ArrayClass)
+ // don't `setType pt` the Apply tree, as the Apply's fun won't be typechecked if the Apply tree already has a type
+ Apply(currentRun.runDefinitions.unboxMethod(pt.typeSymbol), tree)
+ }
+ }
+ typer.typedPos(tree.pos)(tree1)
+ }
+
+ /** Generate a synthetic cast operation from tree.tpe to pt.
+ * @pre pt eq pt.normalize
+ */
+ def cast(tree: Tree, pt: Type): Tree = {
+ if ((tree.tpe ne null) && !(tree.tpe =:= ObjectTpe)) {
+ def word = (
+ if (tree.tpe <:< pt) "upcast"
+ else if (pt <:< tree.tpe) "downcast"
+ else if (pt weak_<:< tree.tpe) "coerce"
+ else if (tree.tpe weak_<:< pt) "widen"
+ else "cast"
+ )
+ log(s"erasure ${word}s from ${tree.tpe} to $pt")
+ }
+ if (pt =:= UnitTpe) {
+ // See SI-4731 for one example of how this occurs.
+ log("Attempted to cast to Unit: " + tree)
+ tree.duplicate setType pt
+ } else if (tree.tpe != null && tree.tpe.typeSymbol == ArrayClass && pt.typeSymbol == ArrayClass) {
+ // See SI-2386 for one example of when this might be necessary.
+ val needsExtraCast = isPrimitiveValueType(tree.tpe.typeArgs.head) && !isPrimitiveValueType(pt.typeArgs.head)
+ val tree1 = if (needsExtraCast) gen.mkRuntimeCall(nme.toObjectArray, List(tree)) else tree
+ gen.mkAttributedCast(tree1, pt)
+ } else gen.mkAttributedCast(tree, pt)
+ }
+
+ /** Adapt `tree` to expected type `pt`.
+ *
+ * @param tree the given tree
+ * @param pt the expected type
+ * @return the adapted tree
+ */
+ def adaptToType(tree: Tree, pt: Type): Tree = {
+ if (settings.debug && pt != WildcardType)
+ log("adapting " + tree + ":" + tree.tpe + " : " + tree.tpe.parents + " to " + pt)//debug
+ if (tree.tpe <:< pt)
+ tree
+ else if (isDifferentErasedValueType(tree.tpe, pt))
+ adaptToType(box(tree, pt.toString), pt)
+ else if (isDifferentErasedValueType(pt, tree.tpe))
+ adaptToType(unbox(tree, pt), pt)
+ else if (isPrimitiveValueType(tree.tpe) && !isPrimitiveValueType(pt)) {
+ adaptToType(box(tree, pt.toString), pt)
+ } else if (isMethodTypeWithEmptyParams(tree.tpe)) {
+ // [H] this assert fails when trying to typecheck tree !(SomeClass.this.bitmap) for single lazy val
+ //assert(tree.symbol.isStable, "adapt "+tree+":"+tree.tpe+" to "+pt)
+ adaptToType(Apply(tree, List()) setPos tree.pos setType tree.tpe.resultType, pt)
+// } else if (pt <:< tree.tpe)
+// cast(tree, pt)
+ } else if (isPrimitiveValueType(pt) && !isPrimitiveValueType(tree.tpe))
+ adaptToType(unbox(tree, pt), pt)
+ else
+ cast(tree, pt)
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
index c7bc16f249..3feadcd9b2 100644
--- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
+++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
@@ -6,8 +6,6 @@
package scala.tools.nsc
package transform
-import scala.collection.{ mutable, immutable }
-
/** A base class for transforms.
* A transform contains a compiler phase which applies a tree transformer.
*/
@@ -19,17 +17,15 @@ trait TypingTransformers {
abstract class TypingTransformer(unit: CompilationUnit) extends Transformer {
var localTyper: analyzer.Typer =
if (phase.erasedTypes)
- erasure.newTyper(erasure.rootContext(unit, EmptyTree, true)).asInstanceOf[analyzer.Typer]
+ erasure.newTyper(erasure.rootContext(unit, EmptyTree, erasedTypes = true)).asInstanceOf[analyzer.Typer]
else
analyzer.newTyper(analyzer.rootContext(unit, EmptyTree, true))
protected var curTree: Tree = _
- protected def typedPos(pos: Position)(tree: Tree) = localTyper typed { atPos(pos)(tree) }
override final def atOwner[A](owner: Symbol)(trans: => A): A = atOwner(curTree, owner)(trans)
def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = {
val savedLocalTyper = localTyper
-// println("transformer atOwner: " + owner + " isPackage? " + owner.isPackage)
localTyper = localTyper.atOwner(tree, if (owner.isModule) owner.moduleClass else owner)
val result = super.atOwner(owner)(trans)
localTyper = savedLocalTyper
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 430129aaff..ef50ae276f 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -3,7 +3,8 @@
* @author
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package transform
import symtab.Flags._
@@ -61,27 +62,9 @@ abstract class UnCurry extends InfoTransform
// uncurry and uncurryType expand type aliases
- /** Traverse tree omitting local method definitions.
- * If a `return` is encountered, set `returnFound` to true.
- * Used for MSIL only.
- */
- private object lookForReturns extends Traverser {
- var returnFound = false
- override def traverse(tree: Tree): Unit = tree match {
- case Return(_) => returnFound = true
- case DefDef(_, _, _, _, _, _) => ;
- case _ => super.traverse(tree)
- }
- def found(tree: Tree) = {
- returnFound = false
- traverse(tree)
- returnFound
- }
- }
-
class UnCurryTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ private val inlineFunctionExpansion = settings.Ydelambdafy.value == "inline"
private var needTryLift = false
- private var inPattern = false
private var inConstructorFlag = 0L
private val byNameArgs = mutable.HashSet[Tree]()
private val noApply = mutable.HashSet[Tree]()
@@ -96,12 +79,6 @@ abstract class UnCurry extends InfoTransform
@inline private def useNewMembers[T](owner: Symbol)(f: List[Tree] => T): T =
f(newMembers.remove(owner).getOrElse(Nil).toList)
- @inline private def withInPattern[T](value: Boolean)(body: => T): T = {
- inPattern = value
- try body
- finally inPattern = !value
- }
-
private def newFunction0(body: Tree): Tree = {
val result = localTyper.typedPos(body.pos)(Function(Nil, body)).asInstanceOf[Function]
log("Change owner from %s to %s in %s".format(currentOwner, result.symbol, result.body))
@@ -109,11 +86,6 @@ abstract class UnCurry extends InfoTransform
transformFunction(result)
}
- private lazy val serialVersionUIDAnnotation =
- AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List())
-
- private var nprinted = 0
-
// I don't have a clue why I'm catching TypeErrors here, but it's better
// than spewing stack traces at end users for internal errors. Examples
// which hit at this point should not be hard to come by, but the immediate
@@ -134,19 +106,10 @@ abstract class UnCurry extends InfoTransform
def isByNameRef(tree: Tree) = (
tree.isTerm
&& !byNameArgs(tree)
- && tree.hasSymbolWhich(s => isByNameParamType(s.tpe))
+ && (tree.symbol ne null)
+ && (isByName(tree.symbol))
)
- /** Uncurry a type of a tree node.
- * This function is sensitive to whether or not we are in a pattern -- when in a pattern
- * additional parameter sections of a case class are skipped.
- */
- def uncurryTreeType(tp: Type): Type = tp match {
- case MethodType(params, MethodType(params1, restpe)) if inPattern =>
- uncurryTreeType(MethodType(params, restpe))
- case _ =>
- uncurry(tp)
- }
// ------- Handling non-local returns -------------------------------------------------
@@ -160,7 +123,7 @@ abstract class UnCurry extends InfoTransform
/** Return non-local return key for given method */
private def nonLocalReturnKey(meth: Symbol) =
nonLocalReturnKeys.getOrElseUpdate(meth,
- meth.newValue(unit.freshTermName("nonLocalReturnKey"), meth.pos, SYNTHETIC) setInfo ObjectClass.tpe
+ meth.newValue(unit.freshTermName("nonLocalReturnKey"), meth.pos, SYNTHETIC) setInfo ObjectTpe
)
/** Generate a non-local return throw with given return expression from given method.
@@ -193,18 +156,28 @@ abstract class UnCurry extends InfoTransform
*/
private def nonLocalReturnTry(body: Tree, key: Symbol, meth: Symbol) = {
localTyper typed {
- val extpe = nonLocalReturnExceptionType(meth.tpe.finalResultType)
+ val restpe = meth.tpe_*.finalResultType
+ val extpe = nonLocalReturnExceptionType(restpe)
val ex = meth.newValue(nme.ex, body.pos) setInfo extpe
- val argType = meth.tpe.finalResultType withAnnotation (AnnotationInfo marker UncheckedClass.tpe)
+ val argType = restpe withAnnotation (AnnotationInfo marker UncheckedClass.tpe)
val pat = gen.mkBindForCase(ex, NonLocalReturnControlClass, List(argType))
- val rhs = (
+ val rhs = (
IF ((ex DOT nme.key)() OBJ_EQ Ident(key))
THEN ((ex DOT nme.value)())
ELSE (Throw(Ident(ex)))
)
- val keyDef = ValDef(key, New(ObjectClass.tpe))
+ val keyDef = ValDef(key, New(ObjectTpe))
val tryCatch = Try(body, pat -> rhs)
+ import treeInfo.{catchesThrowable, isSyntheticCase}
+ for {
+ Try(t, catches, _) <- body
+ cdef <- catches
+ if catchesThrowable(cdef) && !isSyntheticCase(cdef)
+ } {
+ unit.warning(body.pos, "catch block may intercept non-local return from " + meth)
+ }
+
Block(List(keyDef), tryCatch)
}
}
@@ -228,8 +201,6 @@ abstract class UnCurry extends InfoTransform
* }
* new $anon()
*
- * If `settings.XoldPatmat.value`, also synthesized AbstractPartialFunction subclasses (see synthPartialFunction).
- *
*/
def transformFunction(fun: Function): Tree = {
fun.tpe match {
@@ -245,169 +216,39 @@ abstract class UnCurry extends InfoTransform
deEta(fun) match {
// nullary or parameterless
case fun1 if fun1 ne fun => fun1
- case _ if fun.tpe.typeSymbol == PartialFunctionClass =>
- // only get here when running under -Xoldpatmat
- synthPartialFunction(fun)
case _ =>
- val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe))
- val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
- anonClass setInfo ClassInfoType(parents, newScope, anonClass)
-
- val targs = fun.tpe.typeArgs
- val (formals, restpe) = (targs.init, targs.last)
-
- val applyMethodDef = {
- val methSym = anonClass.newMethod(nme.apply, fun.pos, FINAL)
- val paramSyms = map2(formals, fun.vparams) {
- (tp, param) => methSym.newSyntheticValueParam(tp, param.name)
+ def typedFunPos(t: Tree) = localTyper.typedPos(fun.pos)(t)
+ val funParams = fun.vparams map (_.symbol)
+ def mkMethod(owner: Symbol, name: TermName, additionalFlags: FlagSet = NoFlags): DefDef =
+ gen.mkMethodFromFunction(localTyper)(fun, owner, name, additionalFlags)
+
+ if (inlineFunctionExpansion) {
+ val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe))
+ val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation SerialVersionUIDAnnotation
+ anonClass setInfo ClassInfoType(parents, newScope, anonClass)
+
+ val applyMethodDef = mkMethod(anonClass, nme.apply)
+ anonClass.info.decls enter applyMethodDef.symbol
+
+ typedFunPos {
+ Block(
+ ClassDef(anonClass, NoMods, ListOfNil, List(applyMethodDef), fun.pos),
+ Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
}
- methSym setInfoAndEnter MethodType(paramSyms, restpe)
-
- fun.vparams foreach (_.symbol.owner = methSym)
- fun.body changeOwner (fun.symbol -> methSym)
-
- val body = localTyper.typedPos(fun.pos)(fun.body)
- val methDef = DefDef(methSym, List(fun.vparams), body)
-
- // Have to repack the type to avoid mismatches when existentials
- // appear in the result - see SI-4869.
- methDef.tpt setType localTyper.packedType(body, methSym)
- methDef
- }
-
- localTyper.typedPos(fun.pos) {
- Block(
- List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyMethodDef), fun.pos)),
- Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
- }
-
- }
- }
-
- /** Transform a function node (x => body) of type PartialFunction[T, R] where
- * body = expr match { case P_i if G_i => E_i }_i=1..n
- * to (assuming none of the cases is a default case):
- *
- * class $anon() extends AbstractPartialFunction[T, R] with Serializable {
- * def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = (expr: @unchecked) match {
- * case P_1 if G_1 => E_1
- * ...
- * case P_n if G_n => E_n
- * case _ => default(expr)
- * }
- * def isDefinedAt(x: T): boolean = (x: @unchecked) match {
- * case P_1 if G_1 => true
- * ...
- * case P_n if G_n => true
- * case _ => false
- * }
- * }
- * new $anon()
- *
- * If there's a default case, the original match is used for applyOrElse, and isDefinedAt returns `true`
- */
- def synthPartialFunction(fun: Function) = {
- if (!settings.XoldPatmat.value) debugwarn("Under the new pattern matching scheme, PartialFunction should have been synthesized during typers.")
-
- val targs = fun.tpe.typeArgs
- val (formals, restpe) = (targs.init, targs.last)
-
- val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
- val parents = addSerializable(appliedType(AbstractPartialFunctionClass, targs: _*))
- anonClass setInfo ClassInfoType(parents, newScope, anonClass)
-
- // duplicate before applyOrElseMethodDef is run so that it does not mess up our trees and label symbols (we have a fresh set)
- // otherwise `TreeSymSubstituter(fun.vparams map (_.symbol), params)` won't work as the subst has been run already
- val bodyForIDA = {
- val duped = fun.body.duplicate
- val oldParams = new mutable.ListBuffer[Symbol]()
- val newParams = new mutable.ListBuffer[Symbol]()
-
- val oldSyms0 =
- duped filter {
- case l@LabelDef(_, params, _) =>
- params foreach {p =>
- val oldSym = p.symbol
- p.symbol = oldSym.cloneSymbol
- oldParams += oldSym
- newParams += p.symbol
- }
- true
- case _ => false
- } map (_.symbol)
- val oldSyms = oldParams.toList ++ oldSyms0
- val newSyms = newParams.toList ++ (oldSyms0 map (_.cloneSymbol))
- // println("duping "+ oldSyms +" --> "+ (newSyms map (_.ownerChain)))
-
- val substLabels = new TreeSymSubstituter(oldSyms, newSyms)
-
- substLabels(duped)
- }
-
- // def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
- val applyOrElseMethodDef = {
- val methSym = anonClass.newMethod(fun.pos, nme.applyOrElse) setFlag (FINAL | OVERRIDE)
-
- val List(argtpe) = formals
- val A1 = methSym newTypeParameter(newTypeName("A1")) setInfo TypeBounds.upper(argtpe)
- val B1 = methSym newTypeParameter(newTypeName("B1")) setInfo TypeBounds.lower(restpe)
- val methFormals = List(A1.tpe, functionType(List(A1.tpe), B1.tpe))
- val params@List(x, default) = methSym newSyntheticValueParams methFormals
- methSym setInfoAndEnter polyType(List(A1, B1), MethodType(params, B1.tpe))
-
- val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), List(x))
- val body = localTyper.typedPos(fun.pos) { import CODE._
- def defaultAction(scrut: Tree) = REF(default) APPLY (REF(x))
-
- substParam(fun.body) match {
- case orig@Match(selector, cases) =>
- if (cases exists treeInfo.isDefaultCase) orig
- else {
- val defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, defaultAction(selector.duplicate))
- Match(/*gen.mkUnchecked*/(selector), cases :+ defaultCase)
- }
-
+ } else {
+ // method definition with the same arguments, return type, and body as the original lambda
+ val liftedMethod = mkMethod(fun.symbol.owner, nme.ANON_FUN_NAME, additionalFlags = ARTIFACT)
+
+ // new function whose body is just a call to the lifted method
+ val newFun = deriveFunction(fun)(_ => typedFunPos(
+ gen.mkForwarder(gen.mkAttributedRef(liftedMethod.symbol), funParams :: Nil)
+ ))
+ typedFunPos(Block(liftedMethod, super.transform(newFun)))
}
}
- body.changeOwner(fun.symbol -> methSym)
-
- val methDef = DefDef(methSym, body)
-
- // Have to repack the type to avoid mismatches when existentials
- // appear in the result - see SI-4869.
- methDef.tpt setType localTyper.packedType(body, methSym)
- methDef
- }
-
- val isDefinedAtMethodDef = {
- val methSym = anonClass.newMethod(nme.isDefinedAt, fun.pos, FINAL | SYNTHETIC)
- val params = methSym newSyntheticValueParams formals
- methSym setInfoAndEnter MethodType(params, BooleanClass.tpe)
-
- val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), params)
- def doSubst(x: Tree) = substParam(resetLocalAttrsKeepLabels(x)) // see pos/t1761 for why `resetLocalAttrs`, but must keep label symbols around
-
- val body = bodyForIDA match {
- case Match(selector, cases) =>
- if (cases exists treeInfo.isDefaultCase) TRUE_typed
- else
- doSubst(Match(/*gen.mkUnchecked*/(selector),
- (cases map (c => deriveCaseDef(c)(x => TRUE_typed))) :+ (
- DEFAULT ==> FALSE_typed)))
-
- }
- body.changeOwner(fun.symbol -> methSym)
-
- DefDef(methSym, body)
- }
-
- localTyper.typedPos(fun.pos) {
- Block(
- List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyOrElseMethodDef, isDefinedAtMethodDef), fun.pos)),
- Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
- }
}
+
def transformArgs(pos: Position, fun: Symbol, args: List[Tree], formals: List[Type]) = {
val isJava = fun.isJavaDefined
def transformVarargs(varargsElemType: Type) = {
@@ -416,7 +257,7 @@ abstract class UnCurry extends InfoTransform
// when calling into scala varargs, make sure it's a sequence.
def arrayToSequence(tree: Tree, elemtp: Type) = {
- afterUncurry {
+ exitingUncurry {
localTyper.typedPos(pos) {
val pt = arrayType(elemtp)
val adaptedTree = // might need to cast to Array[elemtp], as arrays are not covariant
@@ -435,7 +276,7 @@ abstract class UnCurry extends InfoTransform
def getClassTag(tp: Type): Tree = {
val tag = localTyper.resolveClassTag(tree.pos, tp)
// Don't want bottom types getting any further than this (SI-4024)
- if (tp.typeSymbol.isBottomClass) getClassTag(AnyClass.tpe)
+ if (tp.typeSymbol.isBottomClass) getClassTag(AnyTpe)
else if (!tag.isEmpty) tag
else if (tp.bounds.hi ne tp) getClassTag(tp.bounds.hi)
else localTyper.TyperErrorGen.MissingClassTagError(tree, tp)
@@ -446,7 +287,7 @@ abstract class UnCurry extends InfoTransform
case _ => EmptyTree
}
}
- afterUncurry {
+ exitingUncurry {
localTyper.typedPos(pos) {
gen.mkMethodCall(tree, toArraySym, Nil, List(traversableClassTag(tree.tpe)))
}
@@ -465,12 +306,12 @@ abstract class UnCurry extends InfoTransform
}
else {
def mkArray = mkArrayValue(args drop (formals.length - 1), varargsElemType)
- if (isJava || inPattern) mkArray
+ if (isJava) mkArray
else if (args.isEmpty) gen.mkNil // avoid needlessly double-wrapping an empty argument list
else arrayToSequence(mkArray, varargsElemType)
}
- afterUncurry {
+ exitingUncurry {
if (isJava && !isReferenceArray(suffix.tpe) && isArrayOfSymbol(fun.tpe.params.last.tpe, ObjectClass)) {
// The array isn't statically known to be a reference array, so call ScalaRuntime.toObjectArray.
suffix = localTyper.typedPos(pos) {
@@ -491,7 +332,7 @@ abstract class UnCurry extends InfoTransform
arg setType functionType(Nil, arg.tpe)
}
else {
- log(s"Argument '$arg' at line ${arg.pos.safeLine} is $formal from ${fun.fullName}")
+ log(s"Argument '$arg' at line ${arg.pos.line} is $formal from ${fun.fullName}")
def canUseDirectly(recv: Tree) = (
recv.tpe.typeSymbol.isSubClass(FunctionClass(0))
&& treeInfo.isExprSafeToInline(recv)
@@ -538,7 +379,7 @@ abstract class UnCurry extends InfoTransform
deriveDefDef(dd)(_ => body)
case _ => tree
}
- def isNonLocalReturn(ret: Return) = ret.symbol != currentOwner.enclMethod || currentOwner.isLazy
+ def isNonLocalReturn(ret: Return) = ret.symbol != currentOwner.enclMethod || currentOwner.isLazy || currentOwner.isAnonymousFunction
// ------ The tree transformers --------------------------------------------------------
@@ -550,15 +391,7 @@ abstract class UnCurry extends InfoTransform
finally needTryLift = saved
}
- /** A try or synchronized needs to be lifted anyway for MSIL if it contains
- * return statements. These are disallowed in the CLR. By lifting
- * such returns will be converted to throws.
- */
- def shouldBeLiftedAnyway(tree: Tree) = false && // buggy, see #1981
- forMSIL && lookForReturns.found(tree)
-
- /** Transform tree `t` to { def f = t; f } where `f` is a fresh name
- */
+ /* Transform tree `t` to { def f = t; f } where `f` is a fresh name */
def liftTree(tree: Tree) = {
debuglog("lifting tree at: " + (tree.pos))
val sym = currentOwner.newMethod(unit.freshTermName("liftedTree"), tree.pos)
@@ -578,10 +411,14 @@ abstract class UnCurry extends InfoTransform
}
val sym = tree.symbol
+
+ // true if the taget is a lambda body that's been lifted into a method
+ def isLiftedLambdaBody(target: Tree) = target.symbol.isLocal && target.symbol.isArtifact && target.symbol.name.containsName(nme.ANON_FUN_NAME)
+
val result = (
// TODO - settings.noassertions.value temporarily retained to avoid
// breakage until a reasonable interface is settled upon.
- if ((sym ne null) && (sym.elisionLevel.exists (_ < settings.elidebelow.value || settings.noassertions.value)))
+ if ((sym ne null) && (sym.elisionLevel.exists (_ < settings.elidebelow.value || settings.noassertions)))
replaceElidableTree(tree)
else translateSynchronized(tree) match {
case dd @ DefDef(mods, name, tparams, _, tpt, rhs) =>
@@ -592,7 +429,7 @@ abstract class UnCurry extends InfoTransform
if (dd.symbol hasAnnotation VarargsClass) saveRepeatedParams(dd)
- withNeedLift(false) {
+ withNeedLift(needLift = false) {
if (dd.symbol.isClassConstructor) {
atOwner(sym) {
val rhs1 = (rhs: @unchecked) match {
@@ -616,37 +453,32 @@ abstract class UnCurry extends InfoTransform
case ValDef(_, _, _, rhs) =>
if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit)
if (!sym.owner.isSourceMethod)
- withNeedLift(true) { super.transform(tree) }
+ withNeedLift(needLift = true) { super.transform(tree) }
else
super.transform(tree)
case UnApply(fn, args) =>
- val fn1 = withInPattern(false)(transform(fn))
- val args1 = transformTrees(fn.symbol.name match {
- case nme.unapply => args
- case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args))
- case _ => sys.error("internal error: UnApply node has wrong symbol")
- })
+ val fn1 = transform(fn)
+ val args1 = fn.symbol.name match {
+ case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, patmat.alignPatterns(tree).expectedTypes)
+ case _ => args
+ }
treeCopy.UnApply(tree, fn1, args1)
case Apply(fn, args) =>
- if (fn.symbol == Object_synchronized && shouldBeLiftedAnyway(args.head))
- transform(treeCopy.Apply(tree, fn, List(liftTree(args.head))))
- else {
- val needLift = needTryLift || !fn.symbol.isLabel // SI-6749, no need to lift in args to label jumps.
- withNeedLift(needLift) {
- val formals = fn.tpe.paramTypes
- treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
- }
+ val needLift = needTryLift || !fn.symbol.isLabel // SI-6749, no need to lift in args to label jumps.
+ withNeedLift(needLift) {
+ val formals = fn.tpe.paramTypes
+ treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
}
case Assign(_: RefTree, _) =>
- withNeedLift(true) { super.transform(tree) }
+ withNeedLift(needLift = true) { super.transform(tree) }
case Assign(lhs, _) if lhs.symbol.owner != currentMethod || lhs.symbol.hasFlag(LAZY | ACCESSOR) =>
- withNeedLift(true) { super.transform(tree) }
+ withNeedLift(needLift = true) { super.transform(tree) }
case ret @ Return(_) if (isNonLocalReturn(ret)) =>
- withNeedLift(true) { super.transform(ret) }
+ withNeedLift(needLift = true) { super.transform(ret) }
case Try(_, Nil, _) =>
// try-finally does not need lifting: lifting is needed only for try-catch
@@ -656,13 +488,17 @@ abstract class UnCurry extends InfoTransform
super.transform(tree)
case Try(block, catches, finalizer) =>
- if (needTryLift || shouldBeLiftedAnyway(tree)) transform(liftTree(tree))
+ if (needTryLift) transform(liftTree(tree))
else super.transform(tree)
case CaseDef(pat, guard, body) =>
- val pat1 = withInPattern(true)(transform(pat))
+ val pat1 = transform(pat)
treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
+ // if a lambda is already the right shape we don't need to transform it again
+ case fun @ Function(_, Apply(target, _)) if (!inlineFunctionExpansion) && isLiftedLambdaBody(target) =>
+ super.transform(fun)
+
case fun @ Function(_, _) =>
mainTransform(transformFunction(fun))
@@ -681,11 +517,11 @@ abstract class UnCurry extends InfoTransform
tree1
}
)
- assert(result.tpe != null, result + " tpe is null")
- result setType uncurryTreeType(result.tpe)
+ assert(result.tpe != null, result.shortClass + " tpe is null:\n" + result)
+ result modifyType uncurry
}
- def postTransform(tree: Tree): Tree = afterUncurry {
+ def postTransform(tree: Tree): Tree = exitingUncurry {
def applyUnary(): Tree = {
// TODO_NMT: verify that the inner tree of a type-apply also gets parens if the
// whole tree is a polymorphic nullary method application
@@ -703,44 +539,13 @@ abstract class UnCurry extends InfoTransform
def isThrowable(pat: Tree): Boolean = pat match {
case Typed(Ident(nme.WILDCARD), tpt) =>
- tpt.tpe =:= ThrowableClass.tpe
+ tpt.tpe =:= ThrowableTpe
case Bind(_, pat) =>
isThrowable(pat)
case _ =>
false
}
- def isDefaultCatch(cdef: CaseDef) = isThrowable(cdef.pat) && cdef.guard.isEmpty
-
- def postTransformTry(tree: Try) = {
- val body = tree.block
- val catches = tree.catches
- val finalizer = tree.finalizer
- if (opt.virtPatmat) {
- if (catches exists (cd => !treeInfo.isCatchCase(cd)))
- debugwarn("VPM BUG! illegal try/catch " + catches)
- tree
- } else if (catches forall treeInfo.isCatchCase) {
- tree
- } else {
- val exname = unit.freshTermName("ex$")
- val cases =
- if ((catches exists treeInfo.isDefaultCase) || isDefaultCatch(catches.last)) catches
- else catches :+ CaseDef(Ident(nme.WILDCARD), EmptyTree, Throw(Ident(exname)))
- val catchall =
- atPos(tree.pos) {
- CaseDef(
- Bind(exname, Ident(nme.WILDCARD)),
- EmptyTree,
- Match(Ident(exname), cases))
- }
- debuglog("rewrote try: " + catches + " ==> " + catchall);
- val catches1 = localTyper.typedCases(
- List(catchall), ThrowableClass.tpe, WildcardType)
- treeCopy.Try(tree, body, catches1, finalizer)
- }
- }
-
tree match {
/* Some uncurry post transformations add members to templates.
*
@@ -779,7 +584,9 @@ abstract class UnCurry extends InfoTransform
addJavaVarargsForwarders(dd, flatdd)
case tree: Try =>
- postTransformTry(tree)
+ if (tree.catches exists (cd => !treeInfo.isCatchCase(cd)))
+ devWarning("VPM BUG - illegal try/catch " + tree.catches)
+ tree
case Apply(Apply(fn, args), args1) =>
treeCopy.Apply(tree, fn, args ::: args1)
@@ -837,7 +644,7 @@ abstract class UnCurry extends InfoTransform
final case class Packed(param: ValDef, tempVal: ValDef) extends ParamTransform
def isDependent(dd: DefDef): Boolean =
- beforeUncurry {
+ enteringUncurry {
val methType = dd.symbol.info
methType.isDependentMethodType && mexists(methType.paramss)(_.info exists (_.isImmediatelyDependent))
}
@@ -912,10 +719,6 @@ abstract class UnCurry extends InfoTransform
if (!dd.symbol.hasAnnotation(VarargsClass) || !repeatedParams.contains(dd.symbol))
return flatdd
- def toSeqType(tp: Type): Type = {
- val arg = elementType(ArrayClass, tp)
- seqType(arg)
- }
def toArrayType(tp: Type): Type = {
val arg = elementType(SeqClass, tp)
// to prevent generation of an `Object` parameter from `Array[T]` parameter later
@@ -924,7 +727,7 @@ abstract class UnCurry extends InfoTransform
// becomes def foo[T](a: Int, b: Array[Object])
// instead of def foo[T](a: Int, b: Array[T]) ===> def foo[T](a: Int, b: Object)
arrayType(
- if (arg.typeSymbol.isTypeParameterOrSkolem) ObjectClass.tpe
+ if (arg.typeSymbol.isTypeParameterOrSkolem) ObjectTpe
else arg
)
}
@@ -939,7 +742,7 @@ abstract class UnCurry extends InfoTransform
case p if rpsymbols(p.symbol) => toArrayType(p.symbol.tpe)
case p => p.symbol.tpe
}
- val forwresult = dd.symbol.tpe.finalResultType
+ val forwresult = dd.symbol.tpe_*.finalResultType
val forwformsyms = map2(forwformals, flatparams)((tp, oldparam) =>
currentClass.newValueParameter(oldparam.name, oldparam.symbol.pos).setInfo(tp)
)
@@ -950,11 +753,12 @@ abstract class UnCurry extends InfoTransform
}
// create the symbol
- val forwsym = currentClass.newMethod(dd.name, dd.pos, VARARGS | SYNTHETIC | flatdd.symbol.flags) setInfo forwtype
+ val forwsym = currentClass.newMethod(dd.name.toTermName, dd.pos, VARARGS | SYNTHETIC | flatdd.symbol.flags) setInfo forwtype
+ def forwParams = forwsym.info.paramss.flatten
// create the tree
val forwtree = theTyper.typedPos(dd.pos) {
- val locals = map2(forwsym ARGS, flatparams) {
+ val locals = map2(forwParams, flatparams) {
case (_, fp) if !rpsymbols(fp.symbol) => null
case (argsym, fp) =>
Block(Nil,
@@ -964,15 +768,13 @@ abstract class UnCurry extends InfoTransform
)
)
}
- val seqargs = map2(locals, forwsym ARGS) {
+ val seqargs = map2(locals, forwParams) {
case (null, argsym) => Ident(argsym)
case (l, _) => l
}
val end = if (forwsym.isConstructor) List(UNIT) else Nil
- DEF(forwsym) === BLOCK(
- Apply(gen.mkAttributedRef(flatdd.symbol), seqargs) :: end : _*
- )
+ DefDef(forwsym, BLOCK(Apply(gen.mkAttributedRef(flatdd.symbol), seqargs) :: end : _*))
}
// check if the method with that name and those arguments already exists in the template
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
index dbe08315f4..e0bc478fad 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -4,16 +4,15 @@
* @author Adriaan Moors
*/
-package scala.tools.nsc.transform.patmat
+package scala
+package tools.nsc.transform.patmat
-import scala.tools.nsc.symtab._
import scala.language.postfixOps
import scala.collection.mutable
import scala.reflect.internal.util.Statistics
import scala.reflect.internal.util.Position
import scala.reflect.internal.util.HashSet
-
trait Logic extends Debugging {
import PatternMatchingStats._
@@ -113,8 +112,8 @@ trait Logic extends Debugging {
case object False extends Prop
// symbols are propositions
- abstract case class Sym(val variable: Var, val const: Const) extends Prop {
- private[this] val id = Sym.nextSymId
+ abstract case class Sym(variable: Var, const: Const) extends Prop {
+ private val id: Int = Sym.nextSymId
override def toString = variable +"="+ const +"#"+ id
}
@@ -126,6 +125,7 @@ trait Logic extends Debugging {
(uniques findEntryOrUpdate newSym)
}
private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
+ implicit val SymOrdering: Ordering[Sym] = Ordering.by(_.id)
}
def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
@@ -162,13 +162,17 @@ trait Logic extends Debugging {
// to govern how much time we spend analyzing matches for unreachability/exhaustivity
object AnalysisBudget {
- import scala.tools.cmd.FromString.IntFromString
- val max = sys.props.get("scalac.patmat.analysisBudget").collect(IntFromString.orElse{case "off" => Integer.MAX_VALUE}).getOrElse(256)
+ private val budgetProp = scala.sys.Prop[Int]("scalac.patmat.analysisBudget")
+ private val budgetOff = "off"
+ val max: Int = {
+ val DefaultBudget = 256
+ budgetProp.option.getOrElse(if (budgetProp.get.equalsIgnoreCase("off")) Integer.MAX_VALUE else DefaultBudget)
+ }
abstract class Exception(val advice: String) extends RuntimeException("CNF budget exceeded")
object exceeded extends Exception(
- s"(The analysis required more space than allowed. Please try with scalac -Dscalac.patmat.analysisBudget=${AnalysisBudget.max*2} or -Dscalac.patmat.analysisBudget=off.)")
+ s"(The analysis required more space than allowed. Please try with scalac -D${budgetProp.key}=${AnalysisBudget.max*2} or -D${budgetProp.key}=${budgetOff}.)")
}
@@ -212,7 +216,7 @@ trait Logic extends Debugging {
}
props foreach gatherEqualities.apply
- if (modelNull) vars foreach (_.registerNull)
+ if (modelNull) vars foreach (_.registerNull())
val pure = props map (p => eqFreePropToSolvable(rewriteEqualsToProp(p)))
@@ -280,7 +284,7 @@ trait Logic extends Debugging {
def eqFreePropToSolvable(p: Prop): Formula
def cnfString(f: Formula): String
- type Model = Map[Sym, Boolean]
+ type Model = collection.immutable.SortedMap[Sym, Boolean]
val EmptyModel: Model
val NoModel: Model
@@ -293,6 +297,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
trait TreesAndTypesDomain extends PropositionalLogic with CheckableTreeAndTypeAnalysis {
type Type = global.Type
type Tree = global.Tree
+ import global.definitions.ConstantNull
// resets hash consing -- only supposed to be called by TreeMakersToProps
def prepareNewAnalysis(): Unit = { Var.resetUniques(); Const.resetUniques() }
@@ -321,7 +326,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
val staticTpCheckable: Type = checkableType(staticTp)
private[this] var _mayBeNull = false
- def registerNull(): Unit = { ensureCanModify; if (NullTp <:< staticTpCheckable) _mayBeNull = true }
+ def registerNull(): Unit = { ensureCanModify(); if (ConstantNull <:< staticTpCheckable) _mayBeNull = true }
def mayBeNull: Boolean = _mayBeNull
// case None => domain is unknown,
@@ -345,16 +350,16 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
} else
subConsts
- observed; allConsts
+ observed(); allConsts
}
// populate equalitySyms
// don't care about the result, but want only one fresh symbol per distinct constant c
- def registerEquality(c: Const): Unit = {ensureCanModify; symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
+ def registerEquality(c: Const): Unit = {ensureCanModify(); symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
// return the symbol that represents this variable being equal to the constant `c`, if it exists, otherwise False (for robustness)
// (registerEquality(c) must have been called prior, either when constructing the domain or from outside)
- def propForEqualsTo(c: Const): Prop = {observed; symForEqualsTo.getOrElse(c, False)}
+ def propForEqualsTo(c: Const): Prop = {observed(); symForEqualsTo.getOrElse(c, False)}
// [implementation NOTE: don't access until all potential equalities have been registered using registerEquality]p
/** the information needed to construct the boolean proposition that encods the equality proposition (V = C)
@@ -366,7 +371,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
* and thus in this variable's equality symbols), but reachability also requires us to model things like V = 1 precluding V = "1"
*/
lazy val implications = {
- /** when we know V = C, which other equalities must hold
+ /* when we know V = C, which other equalities must hold
*
* in general, equality to some type implies equality to its supertypes
* (this multi-valued kind of equality is necessary for unreachability)
@@ -479,7 +484,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable))
// don't access until all potential equalities have been registered using registerEquality
- private lazy val equalitySyms = {observed; symForEqualsTo.values.toList}
+ private lazy val equalitySyms = {observed(); symForEqualsTo.values.toList}
// don't call until all equalities have been registered and registerNull has been called (if needed)
def describe = {
@@ -494,7 +499,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
import global.{ConstantType, Constant, SingletonType, Literal, Ident, singleType}
- import global.definitions.{AnyClass, UnitClass}
+ import global.definitions._
// all our variables range over types
@@ -514,11 +519,11 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
uniques.get(tp).getOrElse(
uniques.find {case (oldTp, oldC) => oldTp =:= tp} match {
case Some((_, c)) =>
- debug.patmat("unique const: "+ (tp, c))
+ debug.patmat("unique const: "+ ((tp, c)))
c
case _ =>
val fresh = mkFresh
- debug.patmat("uniqued const: "+ (tp, fresh))
+ debug.patmat("uniqued const: "+ ((tp, fresh)))
uniques(tp) = fresh
fresh
})
@@ -534,12 +539,12 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
if (!t.symbol.isStable) t.tpe.narrow
else trees find (a => a.correspondsStructure(t)(sameValue)) match {
case Some(orig) =>
- debug.patmat("unique tp for tree: "+ (orig, orig.tpe))
+ debug.patmat("unique tp for tree: "+ ((orig, orig.tpe)))
orig.tpe
case _ =>
// duplicate, don't mutate old tree (TODO: use a map tree -> type instead?)
val treeWithNarrowedType = t.duplicate setType t.tpe.narrow
- debug.patmat("uniqued: "+ (t, t.tpe, treeWithNarrowedType.tpe))
+ debug.patmat("uniqued: "+ ((t, t.tpe, treeWithNarrowedType.tpe)))
trees += treeWithNarrowedType
treeWithNarrowedType.tpe
}
@@ -549,7 +554,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
def tp: Type
def wideTp: Type
- def isAny = wideTp.typeSymbol == AnyClass
+ def isAny = wideTp =:= AnyTpe
def isValue: Boolean //= tp.isStable
// note: use reference equality on Const since they're hash-consed (doing type equality all the time is too expensive)
@@ -564,11 +569,12 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
// (At least conceptually: `true` is an instance of class `Boolean`)
private def widenToClass(tp: Type): Type =
if (tp.typeSymbol.isClass) tp
+ else if (tp.baseClasses.isEmpty) sys.error("Bad type: " + tp)
else tp.baseType(tp.baseClasses.head)
object TypeConst extends TypeConstExtractor {
def apply(tp: Type) = {
- if (tp =:= NullTp) NullConst
+ if (tp =:= ConstantNull) NullConst
else if (tp.isInstanceOf[SingletonType]) ValueConst.fromType(tp)
else Const.unique(tp, new TypeConst(tp))
}
@@ -577,7 +583,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
// corresponds to a type test that does not imply any value-equality (well, except for outer checks, which we don't model yet)
sealed class TypeConst(val tp: Type) extends Const {
- assert(!(tp =:= NullTp))
+ assert(!(tp =:= ConstantNull))
/*private[this] val id: Int = */ Const.nextTypeId
val wideTp = widenToClass(tp)
@@ -598,7 +604,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
}
def apply(p: Tree) = {
val tp = p.tpe.normalize
- if (tp =:= NullTp) NullConst
+ if (tp =:= ConstantNull) NullConst
else {
val wideTp = widenToClass(tp)
@@ -606,7 +612,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
if (tp.isInstanceOf[SingletonType]) tp
else p match {
case Literal(c) =>
- if (c.tpe.typeSymbol == UnitClass) c.tpe
+ if (c.tpe =:= UnitTpe) c.tpe
else ConstantType(c)
case Ident(_) if p.symbol.isStable =>
// for Idents, can encode uniqueness of symbol as uniqueness of the corresponding singleton type
@@ -626,16 +632,14 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
}
sealed class ValueConst(val tp: Type, val wideTp: Type, override val toString: String) extends Const {
// debug.patmat("VC"+(tp, wideTp, toString))
- assert(!(tp =:= NullTp)) // TODO: assert(!tp.isStable)
+ assert(!(tp =:= ConstantNull)) // TODO: assert(!tp.isStable)
/*private[this] val id: Int = */Const.nextValueId
def isValue = true
}
-
- lazy val NullTp = ConstantType(Constant(null))
case object NullConst extends Const {
- def tp = NullTp
- def wideTp = NullTp
+ def tp = ConstantNull
+ def wideTp = ConstantNull
def isValue = true
override def toString = "null"
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
index 9558542533..2a3c631a66 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -12,27 +12,23 @@ import scala.reflect.internal.util.Statistics
import scala.reflect.internal.util.Position
trait TreeAndTypeAnalysis extends Debugging {
- import global.{Tree, Type, Symbol, definitions, analyzer,
- ConstantType, Literal, Constant, appliedType, WildcardType, TypeRef, ModuleClassSymbol,
- nestedMemberType, TypeMap, Ident}
-
+ import global._
import definitions._
import analyzer.Typer
-
// we use subtyping as a model for implication between instanceof tests
// i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
// unfortunately this is not true in general:
- // SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefClass.tpe)
+ // SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefTpe)
def instanceOfTpImplies(tp: Type, tpImplied: Type) = {
- val tpValue = tp.typeSymbol.isPrimitiveValueClass
+ val tpValue = isPrimitiveValueType(tp)
// pretend we're comparing to Any when we're actually comparing to AnyVal or AnyRef
// (and the subtype is respectively a value type or not a value type)
// this allows us to reuse subtyping as a model for implication between instanceOf tests
// the latter don't see a difference between AnyRef, Object or Any when comparing non-value types -- SI-6022
val tpImpliedNormalizedToAny =
- if (tpImplied =:= (if (tpValue) AnyValClass.tpe else AnyRefClass.tpe)) AnyClass.tpe
+ if (tpImplied =:= (if (tpValue) AnyValTpe else AnyRefTpe)) AnyTpe
else tpImplied
tp <:< tpImpliedNormalizedToAny
@@ -52,28 +48,31 @@ trait TreeAndTypeAnalysis extends Debugging {
tp.typeSymbol match {
// TODO case _ if tp.isTupleType => // recurse into component types?
case UnitClass =>
- Some(List(UnitClass.tpe))
+ Some(List(UnitTpe))
case BooleanClass =>
- Some((List(ConstantType(Constant(true)), ConstantType(Constant(false)))))
+ Some(ConstantTrue :: ConstantFalse :: Nil)
// TODO case _ if tp.isTupleType => // recurse into component types
case modSym: ModuleClassSymbol =>
Some(List(tp))
// make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
- debug.patmat("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
+ debug.patmat("enum unsealed "+ ((tp, sym, sym.isSealed, isPrimitiveValueClass(sym))))
None
case sym =>
- val subclasses = (
- sym.sealedDescendants.toList sortBy (_.sealedSortName)
+ val subclasses = debug.patmatResult(s"enum $sym sealed, subclasses")(
// symbols which are both sealed and abstract need not be covered themselves, because
// all of their children must be and they cannot otherwise be created.
- filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)))
- debug.patmat("enum sealed -- subclasses: "+ (sym, subclasses))
+ sym.sealedDescendants.toList
+ sortBy (_.sealedSortName)
+ filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
+ )
val tpApprox = typer.infer.approximateAbstracts(tp)
val pre = tpApprox.prefix
+
+ Some(debug.patmatResult(s"enum sealed tp=$tp, tpApprox=$tpApprox as") {
// valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
- val validSubTypes = (subclasses flatMap {sym =>
+ subclasses flatMap { sym =>
// have to filter out children which cannot match: see ticket #3683 for an example
// compare to the fully known type `tp` (modulo abstract types),
// so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
@@ -85,9 +84,8 @@ trait TreeAndTypeAnalysis extends Debugging {
// debug.patmat("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
else None
- })
- debug.patmat("enum sealed "+ (tp, tpApprox) + " as "+ validSubTypes)
- Some(validSubTypes)
+ }
+ })
}
// approximate a type to the static type that is fully checkable at run time,
@@ -108,10 +106,7 @@ trait TreeAndTypeAnalysis extends Debugging {
mapOver(tp)
}
}
-
- val res = typeArgsToWildcardsExceptArray(tp)
- debug.patmat("checkable "+(tp, res))
- res
+ debug.patmatResult(s"checkableType($tp)")(typeArgsToWildcardsExceptArray(tp))
}
// a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed)
@@ -128,8 +123,8 @@ trait TreeAndTypeAnalysis extends Debugging {
}
trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchTreeMaking {
- import global.{Tree, Type, NoType, Symbol, NoSymbol, ConstantType, Literal, Constant, Ident, UniqueType, RefinedType, EmptyScope}
- import global.definitions.{ListClass, NilModule}
+ import global._
+ import global.definitions._
/**
* Represent a match as a formula in propositional logic that encodes whether the match matches (abstractly: we only consider types)
@@ -140,20 +135,17 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
var currId = 0
}
case class Test(prop: Prop, treeMaker: TreeMaker) {
- // private val reusedBy = new scala.collection.mutable.HashSet[Test]
+ // private val reusedBy = new mutable.HashSet[Test]
var reuses: Option[Test] = None
def registerReuseBy(later: Test): Unit = {
assert(later.reuses.isEmpty, later.reuses)
// reusedBy += later
later.reuses = Some(this)
}
-
val id = { Test.currId += 1; Test.currId}
- override def toString =
- "T"+ id + "C("+ prop +")" //+ (reuses map ("== T"+_.id) getOrElse (if(reusedBy.isEmpty) treeMaker else reusedBy mkString (treeMaker+ " -->(", ", ",")")))
+ override def toString = s"T${id}C($prop)"
}
-
class TreeMakersToPropsIgnoreNullChecks(root: Symbol) extends TreeMakersToProps(root) {
override def uniqueNonNullProp(p: Tree): Prop = True
}
@@ -162,9 +154,9 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
class TreeMakersToProps(val root: Symbol) {
prepareNewAnalysis() // reset hash consing for Var and Const
- private[this] val uniqueEqualityProps = new scala.collection.mutable.HashMap[(Tree, Tree), Eq]
- private[this] val uniqueNonNullProps = new scala.collection.mutable.HashMap[Tree, Not]
- private[this] val uniqueTypeProps = new scala.collection.mutable.HashMap[(Tree, Type), Eq]
+ private[this] val uniqueEqualityProps = new mutable.HashMap[(Tree, Tree), Eq]
+ private[this] val uniqueNonNullProps = new mutable.HashMap[Tree, Not]
+ private[this] val uniqueTypeProps = new mutable.HashMap[(Tree, Type), Eq]
def uniqueEqualityProp(testedPath: Tree, rhs: Tree): Prop =
uniqueEqualityProps getOrElseUpdate((testedPath, rhs), Eq(Var(testedPath), ValueConst(rhs)))
@@ -226,7 +218,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
// so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal
val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition {
case (f, t) =>
- t.isInstanceOf[Ident] && (t.symbol ne NoSymbol) && pointsToBound(f)
+ t.isInstanceOf[Ident] && t.symbol.exists && pointsToBound(f)
}
val (boundFrom, boundTo) = boundSubst.unzip
val (unboundFrom, unboundTo) = unboundSubst.unzip
@@ -279,9 +271,9 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
case SubstOnlyTreeMaker(_, _) => True
case GuardTreeMaker(guard) =>
guard.tpe match {
- case ConstantType(Constant(true)) => True
- case ConstantType(Constant(false)) => False
- case _ => handleUnknown(tm)
+ case ConstantTrue => True
+ case ConstantFalse => False
+ case _ => handleUnknown(tm)
}
case ExtractorTreeMaker(_, _, _) |
ProductExtractorTreeMaker(_, _) |
@@ -335,19 +327,13 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
debug.patmat("treeMakers:")
debug.patmat(alignAcrossRows(cases, ">>"))
}
-
- def showTests(testss: List[List[Test]]) = {
- debug.patmat("tests: ")
- debug.patmat(alignAcrossRows(testss, "&"))
- }
}
-
}
trait MatchAnalysis extends MatchApproximation {
import PatternMatchingStats._
- import global.{Tree, Type, Symbol, NoSymbol, Ident, Select}
- import global.definitions.{isPrimitiveValueClass, ConsClass, isTupleSymbol}
+ import global._
+ import global.definitions._
trait MatchAnalyzer extends MatchApproximator {
def uncheckedWarning(pos: Position, msg: String) = global.currentUnit.uncheckedWarning(pos, msg)
@@ -492,8 +478,13 @@ trait MatchAnalysis extends MatchApproximation {
object CounterExample {
def prune(examples: List[CounterExample]): List[CounterExample] = {
- val distinct = examples.filterNot(_ == NoExample).toSet
- distinct.filterNot(ce => distinct.exists(other => (ce ne other) && ce.coveredBy(other))).toList
+ // SI-7669 Warning: we don't used examples.distinct here any more as
+ // we can have A != B && A.coveredBy(B) && B.coveredBy(A)
+ // with Nil and List().
+ val result = mutable.Buffer[CounterExample]()
+ for (example <- examples if (!result.exists(example coveredBy _)))
+ result += example
+ result.toList
}
}
@@ -595,7 +586,7 @@ trait MatchAnalysis extends MatchApproximation {
private def unique(variable: Var): VariableAssignment =
uniques.getOrElseUpdate(variable, {
val (eqTo, neqTo) = varAssignment.getOrElse(variable, (Nil, Nil)) // TODO
- VariableAssignment(variable, eqTo.toList, neqTo.toList, mutable.HashMap.empty)
+ VariableAssignment(variable, eqTo.toList, neqTo.toList)
})
def apply(variable: Var): VariableAssignment = {
@@ -609,7 +600,7 @@ trait MatchAnalysis extends MatchApproximation {
else {
findVar(pre) foreach { preVar =>
val outerCtor = this(preVar)
- outerCtor.fields(field) = newCtor
+ outerCtor.addField(field, newCtor)
}
newCtor
}
@@ -617,15 +608,21 @@ trait MatchAnalysis extends MatchApproximation {
}
// node in the tree that describes how to construct a counter-example
- case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: scala.collection.mutable.Map[Symbol, VariableAssignment]) {
+ case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const]) {
+ private val fields: mutable.Map[Symbol, VariableAssignment] = mutable.HashMap.empty
// need to prune since the model now incorporates all super types of a constant (needed for reachability)
private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp)))
private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor
- private lazy val ctorParams = if (ctor == NoSymbol || ctor.paramss.isEmpty) Nil else ctor.paramss.head
- private lazy val cls = if (ctor == NoSymbol) NoSymbol else ctor.owner
- private lazy val caseFieldAccs = if (cls == NoSymbol) Nil else cls.caseFieldAccessors
-
+ private lazy val ctorParams = if (ctor.paramss.isEmpty) Nil else ctor.paramss.head
+ private lazy val cls = ctor.safeOwner
+ private lazy val caseFieldAccs = cls.caseFieldAccessors
+
+ def addField(symbol: Symbol, assign: VariableAssignment) {
+ // SI-7669 Only register this field if if this class contains it.
+ val shouldConstrainField = !symbol.isCaseAccessor || caseFieldAccs.contains(symbol)
+ if (shouldConstrainField) fields(symbol) = assign
+ }
def allFieldAssignmentsLegal: Boolean =
(fields.keySet subsetOf caseFieldAccs.toSet) && fields.values.forall(_.allFieldAssignmentsLegal)
@@ -638,7 +635,7 @@ trait MatchAnalysis extends MatchApproximation {
def toCounterExample(beBrief: Boolean = false): CounterExample =
if (!allFieldAssignmentsLegal) NoExample
else {
- debug.patmat("describing "+ (variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))
+ debug.patmat("describing "+ ((variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal)))
val res = prunedEqualTo match {
// a definite assignment to a value
case List(eq: ValueConst) if fields.isEmpty => ValueExample(eq)
@@ -659,7 +656,7 @@ trait MatchAnalysis extends MatchApproximation {
cls match {
case ConsClass => ListExample(args())
- case _ if isTupleSymbol(cls) => TupleExample(args(true))
+ case _ if isTupleSymbol(cls) => TupleExample(args(brevity = true))
case _ => ConstructorExample(cls, args())
}
@@ -679,8 +676,7 @@ trait MatchAnalysis extends MatchApproximation {
// TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
case _ => NoExample
}
- debug.patmat("described as: "+ res)
- res
+ debug.patmatResult("described as")(res)
}
override def toString = toCounterExample().toString
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
index 57fab4eafa..06b39b035a 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
@@ -10,7 +10,6 @@ import scala.tools.nsc.symtab.Flags.SYNTHETIC
import scala.language.postfixOps
import scala.reflect.internal.util.Statistics
import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.NoPosition
/** Factory methods used by TreeMakers to make the actual trees.
*
@@ -18,10 +17,7 @@ import scala.reflect.internal.util.NoPosition
* and pure (aka "virtualized": match is parametric in its monad).
*/
trait MatchCodeGen extends Interface {
- import PatternMatchingStats._
- import global.{nme, treeInfo, definitions, gen, Tree, Type, Symbol, NoSymbol,
- appliedType, NoType, MethodType, newTermName, Name,
- Block, Literal, Constant, EmptyTree, Function, Typed, ValDef, LabelDef}
+ import global._
import definitions._
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -66,45 +62,44 @@ trait MatchCodeGen extends Interface {
def codegen: AbsCodegen
abstract class CommonCodegen extends AbsCodegen { import CODE._
- def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
- def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
- def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
- def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
- def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+ def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
+ def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
+ def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
+
+ // Right now this blindly calls drop on the result of the unapplySeq
+ // unless it verifiably has no drop method (this is the case in particular
+ // with Array.) You should not actually have to write a method called drop
+ // for name-based matching, but this was an expedient route for the basics.
+ def drop(tgt: Tree)(n: Int): Tree = {
+ def callDirect = fn(tgt, nme.drop, LIT(n))
+ def callRuntime = Apply(REF(currentRun.runDefinitions.traversableDropMethod), tgt :: LIT(n) :: Nil)
+ def needsRuntime = (tgt.tpe ne null) && (typeOfMemberNamedDrop(tgt.tpe) == NoType)
+
+ if (needsRuntime) callRuntime else callDirect
+ }
+
+ // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+ def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder)
// the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
def _asInstanceOf(b: Symbol, tp: Type): Tree = if (b.info <:< tp) REF(b) else gen.mkCastPreservingAnnotations(REF(b), tp)
- def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false)
-
- // duplicated out of frustration with cast generation
- def mkZero(tp: Type): Tree = {
- tp.typeSymbol match {
- case UnitClass => Literal(Constant())
- case BooleanClass => Literal(Constant(false))
- case FloatClass => Literal(Constant(0.0f))
- case DoubleClass => Literal(Constant(0.0d))
- case ByteClass => Literal(Constant(0.toByte))
- case ShortClass => Literal(Constant(0.toShort))
- case IntClass => Literal(Constant(0))
- case LongClass => Literal(Constant(0L))
- case CharClass => Literal(Constant(0.toChar))
- case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
- }
+ def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, any = true, wrapInApply = false)
+
+ def mkZero(tp: Type): Tree = gen.mkConstantZero(tp) match {
+ case Constant(null) => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
+ case const => Literal(const)
}
}
}
trait PureMatchMonadInterface extends MatchMonadInterface {
val matchStrategy: Tree
-
- def inMatchMonad(tp: Type): Type = appliedType(oneSig, List(tp)).finalResultType
- def pureType(tp: Type): Type = appliedType(oneSig, List(tp)).paramTypes.headOption getOrElse NoType // fail gracefully (otherwise we get crashes)
- protected def matchMonadSym = oneSig.finalResultType.typeSymbol
-
import CODE._
def _match(n: Name): SelectStart = matchStrategy DOT n
- private lazy val oneSig: Type = typer.typedOperator(_match(vpmName.one)).tpe // TODO: error message
+ // TODO: error message
+ private lazy val oneType = typer.typedOperator(_match(vpmName.one)).tpe
+ override def pureType(tp: Type): Type = firstParamType(appliedType(oneType, tp :: Nil))
}
trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
@@ -132,17 +127,11 @@ trait MatchCodeGen extends Interface {
// __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`)
def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = flatMap(guard(cond, res), nextBinder, next)
// __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`)
- def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), next)
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitTpe)), next)
}
}
- trait OptimizedMatchMonadInterface extends MatchMonadInterface {
- override def inMatchMonad(tp: Type): Type = optionType(tp)
- override def pureType(tp: Type): Type = tp
- override protected def matchMonadSym = OptionClass
- }
-
- trait OptimizedCodegen extends CodegenCore with TypedSubstitution with OptimizedMatchMonadInterface {
+ trait OptimizedCodegen extends CodegenCore with TypedSubstitution with MatchMonadInterface {
override def codegen: AbsCodegen = optimizedCodegen
// when we know we're targetting Option, do some inlining the optimizer won't do
@@ -158,9 +147,8 @@ trait MatchCodeGen extends Interface {
* if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
*/
def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = {
- val matchEnd = newSynthCaseLabel("matchEnd")
val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, newFlags = SYNTHETIC) setInfo restpe.withoutAnnotations
- matchEnd setInfo MethodType(List(matchRes), restpe)
+ val matchEnd = newSynthCaseLabel("matchEnd") setInfo MethodType(List(matchRes), restpe)
def newCaseSym = newSynthCaseLabel("case") setInfo MethodType(Nil, restpe)
var _currCase = newCaseSym
@@ -172,23 +160,22 @@ trait MatchCodeGen extends Interface {
LabelDef(currCase, Nil, mkCase(new OptimizedCasegen(matchEnd, nextCase)))
}
-
// must compute catchAll after caseLabels (side-effects nextCase)
// catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
// if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
val catchAllDef = matchFailGen map { matchFailGen =>
- val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
+ val scrutRef = scrutSym.fold(EmptyTree: Tree)(REF) // for alternatives
LabelDef(_currCase, Nil, matchEnd APPLY (matchFailGen(scrutRef)))
} toList // at most 1 element
// scrutSym == NoSymbol when generating an alternatives matcher
- val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
+ val scrutDef = scrutSym.fold(List[Tree]())(ValDef(_, scrut) :: Nil) // for alternatives
// the generated block is taken apart in TailCalls under the following assumptions
- // the assumption is once we encounter a case, the remainder of the block will consist of cases
- // the prologue may be empty, usually it is the valdef that stores the scrut
- // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
Block(
scrutDef ++ caseDefs ++ catchAllDef,
LabelDef(matchEnd, List(matchRes), REF(matchRes))
@@ -210,15 +197,14 @@ trait MatchCodeGen extends Interface {
// next: MatchMonad[U]
// returns MatchMonad[U]
def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
- val tp = inMatchMonad(b.tpe)
- val prevSym = freshSym(prev.pos, tp, "o")
- val isEmpty = tp member vpmName.isEmpty
- val get = tp member vpmName.get
-
+ val prevSym = freshSym(prev.pos, prev.tpe, "o")
BLOCK(
- VAL(prevSym) === prev,
+ ValDef(prevSym, prev),
// must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
- ifThenElseZero(NOT(prevSym DOT isEmpty), Substitution(b, prevSym DOT get)(next))
+ ifThenElseZero(
+ NOT(prevSym DOT vpmName.isEmpty),
+ Substitution(b, prevSym DOT vpmName.get)(next)
+ )
)
}
@@ -228,14 +214,12 @@ trait MatchCodeGen extends Interface {
// next == MatchMonad[U]
// returns MatchMonad[U]
def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = {
- val rest =
+ val rest = (
// only emit a local val for `nextBinder` if it's actually referenced in `next`
if (next.exists(_.symbol eq nextBinder))
- BLOCK(
- VAL(nextBinder) === res,
- next
- )
+ BLOCK(ValDef(nextBinder, res), next)
else next
+ )
ifThenElseZero(cond, rest)
}
@@ -255,4 +239,4 @@ trait MatchCodeGen extends Interface {
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala
new file mode 100644
index 0000000000..0d08120e43
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala
@@ -0,0 +1,37 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+/** Segregating this super hacky CPS code. */
+trait MatchCps {
+ self: PatternMatching =>
+
+ import global._
+
+ // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
+ private object CpsSymbols {
+ private def cpsSymbol(name: String) = rootMirror.getClassIfDefined(s"scala.util.continuations.$name")
+
+ val MarkerCPSAdaptPlus = cpsSymbol("cpsPlus")
+ val MarkerCPSAdaptMinus = cpsSymbol("cpsMinus")
+ val MarkerCPSSynth = cpsSymbol("cpsSynth")
+ val MarkerCPSTypes = cpsSymbol("cpsParam")
+ val stripTriggerCPSAnns = Set[Symbol](MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
+ val strippedCPSAnns = stripTriggerCPSAnns + MarkerCPSTypes
+
+ // when one of the internal cps-type-state annotations is present, strip all CPS annotations
+ // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
+ // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
+ def removeCPSFromPt(pt: Type): Type = (
+ if (MarkerCPSAdaptPlus.exists && (stripTriggerCPSAnns exists pt.hasAnnotation))
+ pt filterAnnotations (ann => !(strippedCPSAnns exists ann.matches))
+ else
+ pt
+ )
+ }
+ def removeCPSFromPt(pt: Type): Type = CpsSymbols removeCPSFromPt pt
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
index c570dd8572..8ff7824159 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
@@ -11,7 +11,6 @@ import scala.language.postfixOps
import scala.collection.mutable
import scala.reflect.internal.util.Statistics
import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.NoPosition
/** Optimize and analyze matches based on their TreeMaker-representation.
*
@@ -20,15 +19,9 @@ import scala.reflect.internal.util.NoPosition
*/
// TODO: split out match analysis
trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
- import PatternMatchingStats._
- import global.{Tree, Type, Symbol, NoSymbol, CaseDef, atPos,
- ConstantType, Literal, Constant, gen, EmptyTree, distinctBy,
- Typed, treeInfo, nme, Ident,
- Apply, If, Bind, lub, Alternative, deriveCaseDef, Match, MethodType, LabelDef, TypeTree, Throw}
-
+ import global._
import global.definitions._
-
////
trait CommonSubconditionElimination extends OptimizedCodegen with MatchApproximator {
/** a flow-sensitive, generalised, common sub-expression elimination
@@ -148,19 +141,19 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
object ReusedCondTreeMaker {
def apply(orig: CondTreeMaker) = new ReusedCondTreeMaker(orig.prevBinder, orig.nextBinder, orig.cond, orig.res, orig.pos)
}
- class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, val pos: Position) extends TreeMaker { import CODE._
+ class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, val pos: Position) extends TreeMaker {
lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
- lazy val storedCond = freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE
+ lazy val storedCond = freshSym(pos, BooleanTpe, "rc") setFlag MUTABLE
lazy val treesToHoist: List[Tree] = {
nextBinder setFlag MUTABLE
- List(storedCond, nextBinder) map { b => VAL(b) === codegen.mkZero(b.info) }
+ List(storedCond, nextBinder) map (b => ValDef(b, codegen.mkZero(b.info)))
}
// TODO: finer-grained duplication
def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen)
atPos(pos)(casegen.asInstanceOf[optimizedCodegen.OptimizedCasegen].flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
- override def toString = "Memo"+(nextBinder.name, storedCond.name, cond, res, substitution)
+ override def toString = "Memo"+((nextBinder.name, storedCond.name, cond, res, substitution))
}
case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._
@@ -199,7 +192,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
// and in its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
casegen.ifThenElseZero(REF(lastReusedTreeMaker.storedCond), substitution(next).duplicate)
}
- override def toString = "R"+(lastReusedTreeMaker.storedCond.name, substitution)
+ override def toString = "R"+((lastReusedTreeMaker.storedCond.name, substitution))
}
}
@@ -217,7 +210,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
// }
//// SWITCHES -- TODO: operate on Tests rather than TreeMakers
- trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface {
+ trait SwitchEmission extends TreeMakers with MatchMonadInterface {
import treeInfo.isGuardedCase
abstract class SwitchMaker {
@@ -240,9 +233,6 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
def defaultBody: Tree
def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef
- private def sequence[T](xs: List[Option[T]]): Option[List[T]] =
- if (xs exists (_.isEmpty)) None else Some(xs.flatten)
-
object GuardAndBodyTreeMakers {
def unapply(tms: List[TreeMaker]): Option[(Tree, Tree)] = {
tms match {
@@ -409,23 +399,15 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
private def noGuards(cs: List[CaseDef]): Boolean = !cs.exists(isGuardedCase)
// must do this before removing guards from cases and collapsing (SI-6011, SI-6048)
- private def unreachableCase(cs: List[CaseDef]): Option[CaseDef] = {
- var cases = cs
- var unreachable: Option[CaseDef] = None
-
- while (cases.nonEmpty && unreachable.isEmpty) {
- val currCase = cases.head
- if (isDefault(currCase) && cases.tail.nonEmpty) // subsumed by the `else if` that follows, but faster
- unreachable = Some(cases.tail.head)
- else if (!isGuardedCase(currCase) || currCase.guard.tpe =:= ConstantType(Constant(true)))
- unreachable = cases.tail.find(caseImplies(currCase))
- else if (currCase.guard.tpe =:= ConstantType(Constant(false)))
- unreachable = Some(currCase)
-
- cases = cases.tail
+ private def unreachableCase(cases: List[CaseDef]): Option[CaseDef] = {
+ def loop(cases: List[CaseDef]): Option[CaseDef] = cases match {
+ case head :: next :: _ if isDefault(head) => Some(next) // subsumed by the next case, but faster
+ case head :: rest if !isGuardedCase(head) || head.guard.tpe =:= ConstantTrue => rest find caseImplies(head) orElse loop(rest)
+ case head :: _ if head.guard.tpe =:= ConstantFalse => Some(head)
+ case _ :: rest => loop(rest)
+ case _ => None
}
-
- unreachable
+ loop(cases)
}
// empty list ==> failure
@@ -510,7 +492,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
}
class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree], val unchecked: Boolean) extends SwitchMaker {
- val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
+ val switchableTpe = Set(ByteTpe, ShortTpe, IntTpe, CharTpe)
val alternativesSupported = true
val canJump = true
@@ -535,7 +517,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
}
def defaultSym: Symbol = scrutSym
- def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse MATCHERROR(REF(scrutSym)) }
+ def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse Throw(MatchErrorClass.tpe, REF(scrutSym)) }
def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
(DEFAULT IF guard) ==> body
}}
@@ -550,10 +532,10 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
else {
// match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
val scrutToInt: Tree =
- if (scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
+ if (scrutSym.tpe =:= IntTpe) REF(scrutSym)
else (REF(scrutSym) DOT (nme.toInt))
Some(BLOCK(
- VAL(scrutSym) === scrut,
+ ValDef(scrutSym, scrut),
Match(scrutToInt, caseDefsWithDefault) // a switch
))
}
@@ -578,16 +560,16 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
}
def isDefault(x: CaseDef): Boolean = x match {
- case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
- case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+ case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableTpe) => true
+ case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableTpe) => true
case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
case _ => false
}
- lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableClass.tpe)
+ lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableTpe)
def defaultBody: Tree = Throw(CODE.REF(defaultSym))
def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
- (CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) IF guard) ==> body
+ (CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableTpe)))) IF guard) ==> body
}}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index 90c52e3eb6..699e98f963 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -12,92 +12,183 @@ import scala.reflect.internal.util.Statistics
/** Translate typed Trees that represent pattern matches into the patternmatching IR, defined by TreeMakers.
*/
-trait MatchTranslation { self: PatternMatching =>
+trait MatchTranslation {
+ self: PatternMatching =>
+
import PatternMatchingStats._
- import global.{phase, currentRun, Symbol,
- Apply, Bind, CaseDef, ClassInfoType, Ident, Literal, Match,
- Alternative, Constant, EmptyTree, Select, Star, This, Throw, Typed, UnApply,
- Type, MethodType, WildcardType, PolyType, ErrorType, NoType, TypeRef, typeRef,
- Name, NoSymbol, Position, Tree, atPos, glb, rootMirror, treeInfo, nme, Transformer,
- elimAnonymousClass, asCompactDebugString, hasLength}
- import global.definitions.{ThrowableClass, SeqClass, ScalaPackageClass, BooleanClass, UnitClass, RepeatedParamClass,
- repeatedToSeq, isRepeatedParamType, getProductArgs}
+ import global._
+ import definitions._
import global.analyzer.{ErrorUtils, formalTypes}
+ import treeInfo.{ WildcardStarArg, Unapplied, isStar, unbind }
+ import CODE._
+
+ // Always map repeated params to sequences
+ private def setVarInfo(sym: Symbol, info: Type) =
+ sym setInfo debug.patmatResult(s"changing ${sym.defString} to")(repeatedToSeq(info))
+
+ private def hasSym(t: Tree) = t.symbol != null && t.symbol != NoSymbol
- trait MatchTranslator extends TreeMakers {
+ trait MatchTranslator extends TreeMakers with TreeMakerWarnings {
import typer.context
- // Why is it so difficult to say "here's a name and a context, give me any
- // matching symbol in scope" ? I am sure this code is wrong, but attempts to
- // use the scopes of the contexts in the enclosing context chain discover
- // nothing. How to associate a name with a symbol would would be a wonderful
- // linkage for which to establish a canonical acquisition mechanism.
- def matchingSymbolInScope(pat: Tree): Symbol = {
- def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
- case PolyType(tparams, restpe) => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
- case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
- case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
- case _ => NoSymbol
+ /** A conservative approximation of which patterns do not discern anything.
+ * They are discarded during the translation.
+ */
+ object WildcardPattern {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
+ case Star(WildcardPattern()) => true
+ case x: Ident => treeInfo.isVarPattern(x)
+ case Alternative(ps) => ps forall unapply
+ case EmptyTree => true
+ case _ => false
}
- pat match {
- case Bind(name, _) =>
- context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
- res orElse declarationOfName(ctx.owner.rawInfo, name))
- case _ => NoSymbol
+ }
+
+ object PatternBoundToUnderscore {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
+ case Ident(nme.WILDCARD) => true
+ case Alternative(ps) => ps forall unapply
+ case Typed(PatternBoundToUnderscore(), _) => true
+ case _ => false
}
}
- // Issue better warnings than "unreachable code" when people mis-use
- // variable patterns thinking they bind to existing identifiers.
- //
- // Possible TODO: more deeply nested variable patterns, like
- // case (a, b) => 1 ; case (c, d) => 2
- // However this is a pain (at least the way I'm going about it)
- // and I have to think these detailed errors are primarily useful
- // for beginners, not people writing nested pattern matches.
- def checkMatchVariablePatterns(cases: List[CaseDef]) {
- // A string describing the first variable pattern
- var vpat: String = null
- // Using an iterator so we can recognize the last case
- val it = cases.iterator
-
- def addendum(pat: Tree) = {
- matchingSymbolInScope(pat) match {
- case NoSymbol => ""
- case sym =>
- val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
- s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
+ object SymbolBound {
+ def unapply(tree: Tree): Option[(Symbol, Tree)] = tree match {
+ case Bind(_, expr) if hasSym(tree) => Some(tree.symbol -> expr)
+ case _ => None
+ }
+ }
+
+ def newBoundTree(tree: Tree, pt: Type): BoundTree = tree match {
+ case SymbolBound(sym, expr) => BoundTree(setVarInfo(sym, pt), expr)
+ case _ => BoundTree(setVarInfo(freshSym(tree.pos, prefix = "p"), pt), tree)
+ }
+
+ final case class BoundTree(binder: Symbol, tree: Tree) {
+ private lazy val extractor = ExtractorCall(tree)
+
+ def pos = tree.pos
+ def tpe = binder.info.dealiasWiden // the type of the variable bound to the pattern
+ def pt = unbound match {
+ case Star(tpt) => this glbWith seqType(tpt.tpe)
+ case TypeBound(tpe) => tpe
+ case tree => tree.tpe
+ }
+ def glbWith(other: Type) = glb(tpe :: other :: Nil).normalize
+
+ object SymbolAndTypeBound {
+ def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
+ case SymbolBound(sym, SymbolAndTypeBound(_, tpe)) => Some(sym -> tpe)
+ case TypeBound(tpe) => Some(binder -> tpe)
+ case _ => None
}
}
- while (it.hasNext) {
- val cdef = it.next
- // If a default case has been seen, then every succeeding case is unreachable.
- if (vpat != null)
- context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
- // If this is a default case and more cases follow, warn about this one so
- // we have a reason to mention its pattern variable name and any corresponding
- // symbol in scope. Errors will follow from the remaining cases, at least
- // once we make the above warning an error.
- else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
- val vpatName = cdef.pat match {
- case Bind(name, _) => s" '$name'"
- case _ => ""
- }
- vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
- context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
+ object TypeBound {
+ def unapply(tree: Tree): Option[Type] = unbind(tree) match {
+ case Typed(Ident(_), _) if tree.tpe != null => Some(tree.tpe)
+ case _ => None
}
}
+
+ private def rebindTo(pattern: Tree) = BoundTree(binder, pattern)
+ private def step(treeMakers: TreeMaker*)(subpatterns: BoundTree*): TranslationStep = TranslationStep(treeMakers.toList, subpatterns.toList)
+
+ private def bindingStep(sub: Symbol, subpattern: Tree) = step(SubstOnlyTreeMaker(sub, binder))(rebindTo(subpattern))
+ private def equalityTestStep() = step(EqualityTestTreeMaker(binder, tree, pos))()
+ private def typeTestStep(sub: Symbol, subPt: Type) = step(TypeTestTreeMaker(sub, binder, subPt, glbWith(subPt))(pos))()
+ private def alternativesStep(alts: List[Tree]) = step(AlternativesTreeMaker(binder, translatedAlts(alts), alts.head.pos))()
+ private def translatedAlts(alts: List[Tree]) = alts map (alt => rebindTo(alt).translate())
+ private def noStep() = step()()
+
+ private def unsupportedPatternMsg = sm"""
+ |unsupported pattern: ${tree.shortClass} / $this (this is a scalac bug.)
+ |""".trim
+
+ // example check: List[Int] <:< ::[Int]
+ private def extractorStep(): TranslationStep = {
+ def paramType = extractor.aligner.wholeType
+ import extractor.treeMaker
+ // chain a type-testing extractor before the actual extractor call
+ // it tests the type, checks the outer pointer and casts to the expected type
+ // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
+ // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
+ lazy val typeTest = TypeTestTreeMaker(binder, binder, paramType, paramType)(pos, extractorArgTypeTest = true)
+ // check whether typetest implies binder is not null,
+ // even though the eventual null check will be on typeTest.nextBinder
+ // it'll be equal to binder casted to paramType anyway (and the type test is on binder)
+ def extraction: TreeMaker = treeMaker(typeTest.nextBinder, typeTest impliesBinderNonNull binder, pos)
+
+ // paramType = the type expected by the unapply
+ // TODO: paramType may contain unbound type params (run/t2800, run/t3530)
+ val makers = (
+ // Statically conforms to paramType
+ if (this ensureConformsTo paramType) treeMaker(binder, false, pos) :: Nil
+ else typeTest :: extraction :: Nil
+ )
+ step(makers: _*)(extractor.subBoundTrees: _*)
+ }
+
+ // Summary of translation cases. I moved the excerpts from the specification further below so all
+ // the logic can be seen at once.
+ //
+ // [1] skip wildcard trees -- no point in checking them
+ // [2] extractor and constructor patterns
+ // [3] replace subpatBinder by patBinder, as if the Bind was not there.
+ // It must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type,
+ // this is not guaranteed until we cast
+ // [4] typed patterns - a typed pattern never has any subtrees
+ // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
+ // [5] literal and stable id patterns
+ // [6] pattern alternatives
+ // [7] symbol-less bind patterns - this happens in certain ill-formed programs, there'll be an error later
+ // don't fail here though (or should we?)
+ def nextStep(): TranslationStep = tree match {
+ case WildcardPattern() => noStep()
+ case _: UnApply | _: Apply => extractorStep()
+ case SymbolAndTypeBound(sym, tpe) => typeTestStep(sym, tpe)
+ case TypeBound(tpe) => typeTestStep(binder, tpe)
+ case SymbolBound(sym, expr) => bindingStep(sym, expr)
+ case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) => equalityTestStep()
+ case Alternative(alts) => alternativesStep(alts)
+ case _ => context.unit.error(pos, unsupportedPatternMsg) ; noStep()
+ }
+ def translate(): List[TreeMaker] = nextStep() merge (_.translate())
+
+ private def setInfo(paramType: Type): Boolean = {
+ devWarning(s"resetting info of $this to $paramType")
+ setVarInfo(binder, paramType)
+ true
+ }
+ // If <:< but not =:=, no type test needed, but the tree maker relies on the binder having
+ // exactly paramType (and not just some type compatible with it.) SI-6624 shows this is necessary
+ // because apparently patBinder may have an unfortunate type (.decls don't have the case field
+ // accessors) TODO: get to the bottom of this -- I assume it happens when type checking
+ // infers a weird type for an unapply call. By going back to the parameterType for the
+ // extractor call we get a saner type, so let's just do that for now.
+ def ensureConformsTo(paramType: Type): Boolean = (
+ (tpe =:= paramType)
+ || (tpe <:< paramType) && setInfo(paramType)
+ )
+
+ private def concreteType = tpe.bounds.hi
+ private def unbound = unbind(tree)
+ private def tpe_s = if (pt <:< concreteType) "" + pt else s"$pt (binder: $tpe)"
+ private def at_s = unbound match {
+ case WildcardPattern() => ""
+ case pat => s" @ $pat"
+ }
+ override def toString = s"${binder.name}: $tpe_s$at_s"
}
- // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
- private lazy val MarkerCPSAdaptPlus = rootMirror.getClassIfDefined("scala.util.continuations.cpsPlus")
- private lazy val MarkerCPSAdaptMinus = rootMirror.getClassIfDefined("scala.util.continuations.cpsMinus")
- private lazy val MarkerCPSSynth = rootMirror.getClassIfDefined("scala.util.continuations.cpsSynth")
- private lazy val stripTriggerCPSAnns = List(MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
- private lazy val MarkerCPSTypes = rootMirror.getClassIfDefined("scala.util.continuations.cpsParam")
- private lazy val strippedCPSAnns = MarkerCPSTypes :: stripTriggerCPSAnns
- private def removeCPSAdaptAnnotations(tp: Type) = tp filterAnnotations (ann => !(strippedCPSAnns exists (ann matches _)))
+ // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
+ final case class TranslationStep(makers: List[TreeMaker], subpatterns: List[BoundTree]) {
+ def merge(f: BoundTree => List[TreeMaker]): List[TreeMaker] = makers ::: (subpatterns flatMap f)
+ override def toString = if (subpatterns.isEmpty) "" else subpatterns.mkString("(", ", ", ")")
+ }
/** Implement a pattern match by turning its cases (including the implicit failure case)
* into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
@@ -113,10 +204,8 @@ trait MatchTranslation { self: PatternMatching =>
val Match(selector, cases) = match_
val (nonSyntheticCases, defaultOverride) = cases match {
- case init :+ last if treeInfo isSyntheticDefaultCase last =>
- (init, Some(((scrut: Tree) => last.body)))
- case _ =>
- (cases, None)
+ case init :+ last if treeInfo isSyntheticDefaultCase last => (init, Some(((scrut: Tree) => last.body)))
+ case _ => (cases, None)
}
checkMatchVariablePatterns(nonSyntheticCases)
@@ -133,18 +222,11 @@ trait MatchTranslation { self: PatternMatching =>
val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations))
- val origPt = match_.tpe
// when one of the internal cps-type-state annotations is present, strip all CPS annotations
- // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
- // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
- val ptUnCPS =
- if (MarkerCPSAdaptPlus != NoSymbol && (stripTriggerCPSAnns exists origPt.hasAnnotation))
- removeCPSAdaptAnnotations(origPt)
- else origPt
-
+ val origPt = removeCPSFromPt(match_.tpe)
// relevant test cases: pos/existentials-harmful.scala, pos/gadt-gilles.scala, pos/t2683.scala, pos/virtpatmat_exist4.scala
// pt is the skolemized version
- val pt = repeatedToSeq(ptUnCPS)
+ val pt = repeatedToSeq(origPt)
// val packedPt = repeatedToSeq(typer.packedType(match_, context.owner))
val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS
@@ -169,36 +251,34 @@ trait MatchTranslation { self: PatternMatching =>
val bindersAndCases = caseDefs map { caseDef =>
// generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
// if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
- val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ val caseScrutSym = freshSym(pos, pureType(ThrowableTpe))
(caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
}
- for(cases <- emitTypeSwitch(bindersAndCases, pt).toList;
+ for(cases <- emitTypeSwitch(bindersAndCases, pt).toList
if cases forall treeInfo.isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end
cse <- cases) yield fixerUpper(matchOwner, pos)(cse).asInstanceOf[CaseDef]
}
val catches = if (swatches.nonEmpty) swatches else {
- val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ val scrutSym = freshSym(pos, pureType(ThrowableTpe))
val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))}
- val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex")
+ val exSym = freshSym(pos, pureType(ThrowableTpe), "ex")
List(
atPos(pos) {
CaseDef(
Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
EmptyTree,
- combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(CODE.REF(exSym))))
+ combineCasesNoSubstOnly(REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(REF(exSym))))
)
})
}
- typer.typedCases(catches, ThrowableClass.tpe, WildcardType)
+ typer.typedCases(catches, ThrowableTpe, WildcardType)
}
-
-
/** The translation of `pat if guard => body` has two aspects:
* 1) the substitution due to the variables bound by patterns
* 2) the combination of the extractor calls using `flatMap`.
@@ -227,166 +307,12 @@ trait MatchTranslation { self: PatternMatching =>
* a function that will take care of binding and substitution of the next ast (to the right).
*
*/
- def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = caseDef match { case CaseDef(pattern, guard, body) =>
- translatePattern(scrutSym, pattern) ++ translateGuard(guard) :+ translateBody(body, pt)
+ def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = {
+ val CaseDef(pattern, guard, body) = caseDef
+ translatePattern(BoundTree(scrutSym, pattern)) ++ translateGuard(guard) :+ translateBody(body, pt)
}
- def translatePattern(patBinder: Symbol, patTree: Tree): List[TreeMaker] = {
- // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
- type TranslationStep = (List[TreeMaker], List[(Symbol, Tree)])
- def withSubPats(treeMakers: List[TreeMaker], subpats: (Symbol, Tree)*): TranslationStep = (treeMakers, subpats.toList)
- def noFurtherSubPats(treeMakers: TreeMaker*): TranslationStep = (treeMakers.toList, Nil)
-
- val pos = patTree.pos
-
- def translateExtractorPattern(extractor: ExtractorCall): TranslationStep = {
- if (!extractor.isTyped) ErrorUtils.issueNormalTypeError(patTree, "Could not typecheck extractor call: "+ extractor)(context)
- // if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
-
- debug.patmat("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))
-
- // must use type `tp`, which is provided by extractor's result, not the type expected by binder,
- // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
- // (it will later result in a type test when `tp` is not a subtype of `b.info`)
- // TODO: can we simplify this, together with the Bound case?
- (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) =>
- debug.patmat("changing "+ b +" : "+ b.info +" -> "+ tp)
- b setInfo tp
- }
-
- // example check: List[Int] <:< ::[Int]
- // TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
- // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType`
- val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) =
- if (patBinder.info.widen <:< extractor.paramType) {
- // no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it)
- // SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors)
- // TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call
- // by going back to the parameterType for the extractor call we get a saner type, so let's just do that for now
- /* TODO: uncomment when `settings.developer` and `devWarning` become available
- if (settings.developer.value && !(patBinder.info =:= extractor.paramType))
- devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}")
- */
- (Nil, patBinder setInfo extractor.paramType, false)
- } else {
- // chain a type-testing extractor before the actual extractor call
- // it tests the type, checks the outer pointer and casts to the expected type
- // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
- // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
- val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
-
- // check whether typetest implies patBinder is not null,
- // even though the eventual null check will be on patBinderOrCasted
- // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder)
- (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder))
- }
-
- withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*)
- }
-
-
- object MaybeBoundTyped {
- /** Decompose the pattern in `tree`, of shape C(p_1, ..., p_N), into a list of N symbols, and a list of its N sub-trees
- * The list of N symbols contains symbols for every bound name as well as the un-named sub-patterns (fresh symbols are generated here for these).
- * The returned type is the one inferred by inferTypedPattern (`owntype`)
- *
- * @arg patBinder symbol used to refer to the result of the previous pattern's extractor (will later be replaced by the outer tree with the correct tree to refer to that patterns result)
- */
- def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
- // the Ident subpattern can be ignored, subpatBinder or patBinder tell us all we need to know about it
- case Bound(subpatBinder, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((subpatBinder, typed.tpe))
- case Bind(_, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((patBinder, typed.tpe))
- case Typed(Ident(_), tpt) if tree.tpe ne null => Some((patBinder, tree.tpe))
- case _ => None
- }
- }
-
- val (treeMakers, subpats) = patTree match {
- // skip wildcard trees -- no point in checking them
- case WildcardPattern() => noFurtherSubPats()
- case UnApply(unfun, args) =>
- // TODO: check unargs == args
- // debug.patmat("unfun: "+ (unfun.tpe, unfun.symbol.ownerChain, unfun.symbol.info, patBinder.info))
- translateExtractorPattern(ExtractorCall(unfun, args))
-
- /** A constructor pattern is of the form c(p1, ..., pn) where n ā‰„ 0.
- It consists of a stable identifier c, followed by element patterns p1, ..., pn.
- The constructor c is a simple or qualified name which denotes a case class (Ā§5.3.2).
-
- If the case class is monomorphic, then it must conform to the expected type of the pattern,
- and the formal parameter types of xā€™s primary constructor (Ā§5.3) are taken as the expected types of the element patterns p1, ..., pn.
-
- If the case class is polymorphic, then its type parameters are instantiated so that the instantiation of c conforms to the expected type of the pattern.
- The instantiated formal parameter types of cā€™s primary constructor are then taken as the expected types of the component patterns p1, ..., pn.
-
- The pattern matches all objects created from constructor invocations c(v1, ..., vn) where each element pattern pi matches the corresponding value vi .
- A special case arises when cā€™s formal parameter types end in a repeated parameter. This is further discussed in (Ā§8.1.9).
- **/
- case Apply(fun, args) =>
- ExtractorCall.fromCaseClass(fun, args) map translateExtractorPattern getOrElse {
- ErrorUtils.issueNormalTypeError(patTree, "Could not find unapply member for "+ fun +" with args "+ args)(context)
- noFurtherSubPats()
- }
-
- /** A typed pattern x : T consists of a pattern variable x and a type pattern T.
- The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
- This pattern matches any value matched by the type pattern T (Ā§8.2); it binds the variable name to that value.
- **/
- // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
- case MaybeBoundTyped(subPatBinder, pt) =>
- val next = glb(List(dealiasWiden(patBinder.info), pt)).normalize
- // a typed pattern never has any subtrees
- noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, next)(pos))
-
- /** A pattern binder x@p consists of a pattern variable x and a pattern p.
- The type of the variable x is the static type T of the pattern p.
- This pattern matches any value v matched by the pattern p,
- provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
- and it binds the variable name to that value.
- **/
- case Bound(subpatBinder, p) =>
- // replace subpatBinder by patBinder (as if the Bind was not there)
- withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)),
- // must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type, this is not guaranteed until we cast
- (patBinder, p)
- )
-
- /** 8.1.4 Literal Patterns
- A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
- The type of L must conform to the expected type of the pattern.
-
- 8.1.5 Stable Identifier Patterns (a stable identifier r (see Ā§3.1))
- The pattern matches any value v such that r == v (Ā§12.1).
- The type of r must conform to the expected type of the pattern.
- **/
- case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) =>
- noFurtherSubPats(EqualityTestTreeMaker(patBinder, patTree, pos))
-
- case Alternative(alts) =>
- noFurtherSubPats(AlternativesTreeMaker(patBinder, alts map (translatePattern(patBinder, _)), alts.head.pos))
-
- /* TODO: Paul says about future version: I think this should work, and always intended to implement if I can get away with it.
- case class Foo(x: Int, y: String)
- case class Bar(z: Int)
-
- def f(x: Any) = x match { case Foo(x, _) | Bar(x) => x } // x is lub of course.
- */
-
- case Bind(n, p) => // this happens in certain ill-formed programs, there'll be an error later
- debug.patmat("WARNING: Bind tree with unbound symbol "+ patTree)
- noFurtherSubPats() // there's no symbol -- something's wrong... don't fail here though (or should we?)
-
- // case Star(_) | ArrayValue => error("stone age pattern relics encountered!")
-
- case _ =>
- typer.context.unit.error(patTree.pos, s"unsupported pattern: $patTree (a ${patTree.getClass}).\n This is a scalac bug. Tree diagnostics: ${asCompactDebugString(patTree)}.")
- noFurtherSubPats()
- }
-
- treeMakers ++ subpats.flatMap { case (binder, pat) =>
- translatePattern(binder, pat) // recurse on subpatterns
- }
- }
+ def translatePattern(bound: BoundTree): List[TreeMaker] = bound.translate()
def translateGuard(guard: Tree): List[TreeMaker] =
if (guard == EmptyTree) Nil
@@ -401,27 +327,70 @@ trait MatchTranslation { self: PatternMatching =>
def translateBody(body: Tree, matchPt: Type): TreeMaker =
BodyTreeMaker(body, matchPt)
+ // Some notes from the specification
+
+ /*A constructor pattern is of the form c(p1, ..., pn) where n ā‰„ 0.
+ It consists of a stable identifier c, followed by element patterns p1, ..., pn.
+ The constructor c is a simple or qualified name which denotes a case class (Ā§5.3.2).
+
+ If the case class is monomorphic, then it must conform to the expected type of the pattern,
+ and the formal parameter types of xā€™s primary constructor (Ā§5.3) are taken as the expected
+ types of the element patterns p1, ..., pn.
+
+ If the case class is polymorphic, then its type parameters are instantiated so that the
+ instantiation of c conforms to the expected type of the pattern.
+ The instantiated formal parameter types of cā€™s primary constructor are then taken as the
+ expected types of the component patterns p1, ..., pn.
+
+ The pattern matches all objects created from constructor invocations c(v1, ..., vn)
+ where each element pattern pi matches the corresponding value vi .
+ A special case arises when cā€™s formal parameter types end in a repeated parameter.
+ This is further discussed in (Ā§8.1.9).
+ **/
+
+ /* A typed pattern x : T consists of a pattern variable x and a type pattern T.
+ The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
+ This pattern matches any value matched by the type pattern T (Ā§8.2); it binds the variable name to that value.
+ */
+
+ /* A pattern binder x@p consists of a pattern variable x and a pattern p.
+ The type of the variable x is the static type T of the pattern p.
+ This pattern matches any value v matched by the pattern p,
+ provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
+ and it binds the variable name to that value.
+ */
+
+ /* 8.1.4 Literal Patterns
+ A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
+ The type of L must conform to the expected type of the pattern.
+
+ 8.1.5 Stable Identifier Patterns (a stable identifier r (see Ā§3.1))
+ The pattern matches any value v such that r == v (Ā§12.1).
+ The type of r must conform to the expected type of the pattern.
+ */
+
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
object ExtractorCall {
- def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCallRegular(unfun, args)
- def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = Some(new ExtractorCallProd(fun, args))
+ // TODO: check unargs == args
+ def apply(tree: Tree): ExtractorCall = tree match {
+ case UnApply(unfun, args) => new ExtractorCallRegular(alignPatterns(tree), unfun, args) // extractor
+ case Apply(fun, args) => new ExtractorCallProd(alignPatterns(tree), fun, args) // case class
+ }
}
- abstract class ExtractorCall(val args: List[Tree]) {
- val nbSubPats = args.length
+ abstract class ExtractorCall(val aligner: PatternAligned) {
+ import aligner._
+ def fun: Tree
+ def args: List[Tree]
- // everything okay, captain?
- def isTyped : Boolean
-
- def isSeq: Boolean
- lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last)
-
- // to which type should the previous binder be casted?
- def paramType : Type
+ // don't go looking for selectors if we only expect one pattern
+ def rawSubPatTypes = aligner.extractedTypes
+ def resultInMonad = if (isBool) UnitTpe else typeOfMemberNamedGet(resultType)
+ def resultType = fun.tpe.finalResultType
/** Create the TreeMaker that embodies this extractor call
*
@@ -433,79 +402,82 @@ trait MatchTranslation { self: PatternMatching =>
// `subPatBinders` are the variables bound by this pattern in the following patterns
// subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
- lazy val subPatBinders = args map {
- case Bound(b, p) => b
- case p => freshSym(p.pos, prefix = "p")
- }
-
- lazy val subBindersAndPatterns: List[(Symbol, Tree)] = (subPatBinders zip args) map {
- case (b, Bound(_, p)) => (b, p)
- case bp => bp
- }
+ // must set infos to `subPatTypes`, which are provided by extractor's result,
+ // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
+ // (it will later result in a type test when `tp` is not a subtype of `b.info`)
+ // TODO: can we simplify this, together with the Bound case?
+ def subPatBinders = subBoundTrees map (_.binder)
+ lazy val subBoundTrees = (args, subPatTypes).zipped map newBoundTree
// never store these in local variables (for PreserveSubPatBinders)
- lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{
- case (b, PatternBoundToUnderscore()) => b
- }.toSet
-
- def subPatTypes: List[Type] =
- if(isSeq) {
- val TypeRef(pre, SeqClass, args) = seqTp
- // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
- val formalsWithRepeated = rawSubPatTypes.init :+ typeRef(pre, RepeatedParamClass, args)
-
- if (lastIsStar) formalTypes(formalsWithRepeated, nbSubPats - 1) :+ seqTp
- else formalTypes(formalsWithRepeated, nbSubPats)
- } else rawSubPatTypes
-
- protected def rawSubPatTypes: List[Type]
-
- protected def seqTp = rawSubPatTypes.last baseType SeqClass
- protected def seqLenCmp = rawSubPatTypes.last member nme.lengthCompare
- protected lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
- protected lazy val lastIndexingBinder = if(lastIsStar) nbSubPats-2 else nbSubPats-1
- protected lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1
- protected lazy val minLenToCheck = if(lastIsStar) 1 else 0
- protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder+1)
+ lazy val ignoredSubPatBinders: Set[Symbol] = subPatBinders zip args collect { case (b, PatternBoundToUnderscore()) => b } toSet
+
+ // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
+ private def nonStarSubPatTypes = aligner.typedNonStarPatterns map (_.tpe)
+
+ def subPatTypes: List[Type] = typedPatterns map (_.tpe)
+
+ // there are `productArity` non-seq elements in the tuple.
+ protected def firstIndexingBinder = productArity
+ protected def expectedLength = elementArity
+ protected def lastIndexingBinder = totalArity - starArity - 1
+
+ private def productElemsToN(binder: Symbol, n: Int): List[Tree] = 1 to n map tupleSel(binder) toList
+ private def genTake(binder: Symbol, n: Int): List[Tree] = (0 until n).toList map (codegen index seqTree(binder))
+ private def genDrop(binder: Symbol, n: Int): List[Tree] = codegen.drop(seqTree(binder))(expectedLength) :: Nil
+
+ // codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+ protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder + 1)
protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i)
- // the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require isSeq
+ // the trees that select the subpatterns on the extractor's result,
+ // referenced by `binder`
protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
- val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder))
- val nbIndexingIndices = indexingIndices.length
-
+ def lastTrees: List[Tree] = (
+ if (!aligner.isStar) Nil
+ else if (expectedLength == 0) seqTree(binder) :: Nil
+ else genDrop(binder, expectedLength)
+ )
// this error-condition has already been checked by checkStarPatOK:
- // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
- // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
- (((1 to firstIndexingBinder) map tupleSel(binder)) ++
- // then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
- (indexingIndices map codegen.index(seqTree(binder))) ++
- // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder
- (if(!lastIsStar) Nil else List(
- if(nbIndexingIndices == 0) seqTree(binder)
- else codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+ // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == totalArity, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
+
+ // [1] there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
+ // [2] then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
+ // [3] the last one -- if the last subpattern is a sequence wildcard:
+ // drop the prefix (indexed by the refs on the preceding line), return the remainder
+ ( productElemsToN(binder, firstIndexingBinder)
+ ++ genTake(binder, expectedLength)
+ ++ lastTrees
+ ).toList
}
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
// require (nbSubPats > 0 && (!lastIsStar || isSeq))
- protected def subPatRefs(binder: Symbol): List[Tree] =
- if (nbSubPats == 0) Nil
- else if (isSeq) subPatRefsSeq(binder)
- else ((1 to nbSubPats) map tupleSel(binder)).toList
+ protected def subPatRefs(binder: Symbol): List[Tree] = (
+ if (totalArity > 0 && isSeq) subPatRefsSeq(binder)
+ else productElemsToN(binder, totalArity)
+ )
+
+ private def compareInts(t1: Tree, t2: Tree) =
+ gen.mkMethodCall(termMember(ScalaPackage, "math"), TermName("signum"), Nil, (t1 INT_- t2) :: Nil)
protected def lengthGuard(binder: Symbol): Option[Tree] =
// no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
- checkedLength map { expectedLength => import CODE._
+ checkedLength map { expectedLength =>
// `binder.lengthCompare(expectedLength)`
- def checkExpectedLength = (seqTree(binder) DOT seqLenCmp)(LIT(expectedLength))
+ // ...if binder has a lengthCompare method, otherwise
+ // `scala.math.signum(binder.length - expectedLength)`
+ def checkExpectedLength = sequenceType member nme.lengthCompare match {
+ case NoSymbol => compareInts(Select(seqTree(binder), nme.length), LIT(expectedLength))
+ case lencmp => (seqTree(binder) DOT lencmp)(LIT(expectedLength))
+ }
// the comparison to perform
// when the last subpattern is a wildcard-star the expectedLength is but a lower bound
// (otherwise equality is required)
def compareOp: (Tree, Tree) => Tree =
- if (lastIsStar) _ INT_>= _
- else _ INT_== _
+ if (aligner.isStar) _ INT_>= _
+ else _ INT_== _
// `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
(seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO)
@@ -513,35 +485,14 @@ trait MatchTranslation { self: PatternMatching =>
def checkedLength: Option[Int] =
// no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
- if (!isSeq || (expectedLength < minLenToCheck)) None
+ if (!isSeq || expectedLength < starArity) None
else Some(expectedLength)
-
}
// TODO: to be called when there's a def unapplyProd(x: T): U
// U must have N members _1,..., _N -- the _i are type checked, call their type Ti,
- //
// for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
- class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) {
- // TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here:
- /*override def equals(x$1: Any): Boolean = ...
- val o5: Option[com.mosol.sl.Span[Any]] = // Span[Any] --> Any is not a legal type argument for Span!
- */
- // private val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
- // private val origExtractorTp = unapplyMember(orig.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe).tpe
- // private val extractorTp = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)
- // debug.patmat("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)))
- // debug.patmat("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe)))
- private def constructorTp = fun.tpe
-
- def isTyped = fun.isTyped
-
- // to which type should the previous binder be casted?
- def paramType = constructorTp.finalResultType
-
- def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
- protected def rawSubPatTypes = constructorTp.paramTypes
-
+ class ExtractorCallProd(aligner: PatternAligned, val fun: Tree, val args: List[Tree]) extends ExtractorCall(aligner) {
/** Create the TreeMaker that embodies this extractor call
*
* `binder` has been casted to `paramType` if necessary
@@ -553,34 +504,27 @@ trait MatchTranslation { self: PatternMatching =>
// binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
// make an exception for classes under the scala package as they should be well-behaved,
// to optimize matching on List
- val mutableBinders =
+ val mutableBinders = (
if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) &&
(paramAccessors exists (_.isMutable)))
subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
else Nil
+ )
// checks binder ne null before chaining to the next extractor
ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
}
// reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
- override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
+ override protected def tupleSel(binder: Symbol)(i: Int): Tree = {
val accessors = binder.caseFieldAccessors
if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
}
-
- override def toString(): String = "case class "+ (if (constructorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args
}
- class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) {
- private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false }
-
- def tpe = extractorCall.tpe
- def isTyped = (tpe ne NoType) && extractorCall.isTyped && (resultInMonad ne ErrorType)
- def paramType = tpe.paramTypes.head
- def resultType = tpe.finalResultType
- def isSeq = extractorCall.symbol.name == nme.unapplySeq
+ class ExtractorCallRegular(aligner: PatternAligned, extractorCallIncludingDummy: Tree, val args: List[Tree]) extends ExtractorCall(aligner) {
+ val Unapplied(fun) = extractorCallIncludingDummy
/** Create the TreeMaker that embodies this extractor call
*
@@ -593,82 +537,53 @@ trait MatchTranslation { self: PatternMatching =>
* Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
*/
def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
- // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
+ // the extractor call (applied to the binder bound by the flatMap corresponding
+ // to the previous (i.e., enclosing/outer) pattern)
val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
- val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
- ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
+ // can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely
+ // wrong when isSeq, and resultInMonad should always be correct since it comes
+ // directly from the extractor's result type
+ val binder = freshSym(pos, pureType(resultInMonad))
+
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(
+ subPatBinders,
+ subPatRefs(binder),
+ aligner.isBool,
+ checkedLength,
+ patBinderOrCasted,
+ ignoredSubPatBinders
+ )
}
override protected def seqTree(binder: Symbol): Tree =
- if (firstIndexingBinder == 0) CODE.REF(binder)
+ if (firstIndexingBinder == 0) REF(binder)
else super.seqTree(binder)
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require (nbSubPats > 0 && (!lastIsStar || isSeq))
+ // require (totalArity > 0 && (!lastIsStar || isSeq))
override protected def subPatRefs(binder: Symbol): List[Tree] =
- if (!isSeq && nbSubPats == 1) List(CODE.REF(binder)) // special case for extractors
+ if (aligner.isSingle) REF(binder) :: Nil // special case for extractors
else super.subPatRefs(binder)
protected def spliceApply(binder: Symbol): Tree = {
object splice extends Transformer {
+ def binderRef(pos: Position): Tree =
+ REF(binder) setPos pos
override def transform(t: Tree) = t match {
+ // duplicated with the extractor Unapplied
case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) =>
- treeCopy.Apply(t, x, List(CODE.REF(binder).setPos(i.pos)))
- case _ => super.transform(t)
+ treeCopy.Apply(t, x, binderRef(i.pos) :: Nil)
+ // SI-7868 Account for numeric widening, e.g. <unappplySelector>.toInt
+ case Apply(x, List(i @ (sel @ Select(Ident(nme.SELECTOR_DUMMY), name)))) =>
+ treeCopy.Apply(t, x, treeCopy.Select(sel, binderRef(i.pos), name) :: Nil)
+ case _ =>
+ super.transform(t)
}
}
- splice.transform(extractorCallIncludingDummy)
- }
-
- // what's the extractor's result type in the monad?
- // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
- protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
- if (resultType.typeSymbol == BooleanClass) UnitClass.tpe
- else matchMonadResult(resultType)
- }
-
- protected lazy val rawSubPatTypes =
- if (resultInMonad.typeSymbol eq UnitClass) Nil
- else if(!isSeq && nbSubPats == 1) List(resultInMonad)
- else getProductArgs(resultInMonad) match {
- case Nil => List(resultInMonad)
- case x => x
- }
-
- override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")."
- }
-
- /** A conservative approximation of which patterns do not discern anything.
- * They are discarded during the translation.
- */
- object WildcardPattern {
- def unapply(pat: Tree): Boolean = pat match {
- case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
- case Ident(nme.WILDCARD) => true
- case Star(WildcardPattern()) => true
- case x: Ident => treeInfo.isVarPattern(x)
- case Alternative(ps) => ps forall (WildcardPattern.unapply(_))
- case EmptyTree => true
- case _ => false
- }
- }
-
- object PatternBoundToUnderscore {
- def unapply(pat: Tree): Boolean = pat match {
- case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
- case Ident(nme.WILDCARD) => true
- case Alternative(ps) => ps forall (PatternBoundToUnderscore.unapply(_))
- case Typed(PatternBoundToUnderscore(), _) => true
- case _ => false
+ splice transform extractorCallIncludingDummy
}
- }
- object Bound {
- def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
- case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
- Some((t.symbol, p))
- case _ => None
- }
+ override def rawSubPatTypes = aligner.extractor.varargsTypes
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index 202f3444f8..a80f158949 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -11,7 +11,6 @@ import scala.language.postfixOps
import scala.collection.mutable
import scala.reflect.internal.util.Statistics
import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.NoPosition
/** Translate our IR (TreeMakers) into actual Scala Trees using the factory methods in MatchCodeGen.
*
@@ -19,13 +18,8 @@ import scala.reflect.internal.util.NoPosition
* mostly agnostic to whether we're in optimized/pure (virtualized) mode.
*/
trait MatchTreeMaking extends MatchCodeGen with Debugging {
- import PatternMatchingStats._
- import global.{Tree, Type, Symbol, CaseDef, atPos, settings,
- Select, Block, ThisType, SingleType, NoPrefix, NoType, needsOuterTest,
- ConstantType, Literal, Constant, gen, This, EmptyTree, map2, NoSymbol, Traverser,
- Function, Typed, treeInfo, TypeRef, DefTree, Ident, nme}
-
- import global.definitions.{SomeClass, AnyRefClass, UncheckedClass, BooleanClass}
+ import global._
+ import definitions._
final case class Suppression(exhaustive: Boolean, unreachable: Boolean)
object Suppression {
@@ -60,7 +54,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
if (currSub ne null) {
- debug.patmat("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst))
+ debug.patmat("BUG: incorporateOuterSubstitution called more than once for "+ ((this, currSub, outerSubst)))
Thread.dumpStack()
}
else currSub = outerSubst >> substitution
@@ -85,7 +79,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def chainBefore(next: Tree)(casegen: Casegen): Tree
}
- trait NoNewBinders extends TreeMaker {
+ sealed trait NoNewBinders extends TreeMaker {
protected val localSubstitution: Substitution = EmptySubstitution
}
@@ -100,7 +94,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree)
atPos(body.pos)(casegen.one(substitution(body))) // since SubstOnly treemakers are dropped, need to do it here
- override def toString = "B"+(body, matchPt)
+ override def toString = "B"+((body, matchPt))
}
case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
@@ -111,12 +105,12 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
override def toString = "S"+ localSubstitution
}
- abstract class FunTreeMaker extends TreeMaker {
+ sealed abstract class FunTreeMaker extends TreeMaker {
val nextBinder: Symbol
def pos = nextBinder.pos
}
- abstract class CondTreeMaker extends FunTreeMaker {
+ sealed abstract class CondTreeMaker extends FunTreeMaker {
val prevBinder: Symbol
val nextBinderTp: Type
val cond: Tree
@@ -132,7 +126,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
// unless we're optimizing, emit local variable bindings for all subpatterns of extractor/case class patterns
protected val debugInfoEmitVars = !settings.optimise.value
- trait PreserveSubPatBinders extends TreeMaker {
+ sealed trait PreserveSubPatBinders extends TreeMaker {
val subPatBinders: List[Symbol]
val subPatRefs: List[Tree]
val ignoredSubPatBinders: Set[Symbol]
@@ -165,7 +159,6 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
override def subPatternsAsSubstitution =
Substitution(subPatBinders, subPatRefs) >> super.subPatternsAsSubstitution
- import CODE._
def bindSubPats(in: Tree): Tree =
if (!emitVars) in
else {
@@ -180,7 +173,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
else {
// only store binders actually used
val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip
- Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(ValDef(_, _)), in)
}
}
}
@@ -207,6 +200,16 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def extraStoredBinders: Set[Symbol] = Set()
+ debug.patmat(s"""
+ |ExtractorTreeMaker($extractor, $extraCond, $nextBinder) {
+ | $subPatBinders
+ | $subPatRefs
+ | $extractorReturnsBoolean
+ | $checkedLength
+ | $prevBinder
+ | $ignoredSubPatBinders
+ |}""".stripMargin)
+
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val condAndNext = extraCond match {
case Some(cond) =>
@@ -220,7 +223,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
)
}
- override def toString = "X"+(extractor, nextBinder.name)
+ override def toString = "X"+((extractor, nextBinder.name))
}
/**
@@ -274,7 +277,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
}
}
- override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution)
+ override def toString = "P"+((prevBinder.name, extraCond getOrElse "", localSubstitution))
}
object IrrefutableExtractorTreeMaker {
@@ -284,8 +287,8 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def irrefutableExtractorType(tp: Type): Boolean = tp.resultType.dealias match {
case TypeRef(_, SomeClass, _) => true
// probably not useful since this type won't be inferred nor can it be written down (yet)
- case ConstantType(Constant(true)) => true
- case _ => false
+ case ConstantTrue => true
+ case _ => false
}
def unapply(xtm: ExtractorTreeMaker): Option[(Tree, Symbol)] = xtm match {
@@ -324,9 +327,9 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def outerTest(testedBinder: Symbol, expectedTp: Type): Tree = {
val expectedOuter = expectedTp.prefix match {
- case ThisType(clazz) => THIS(clazz)
- case pre if pre != NoType => REF(pre.prefix, pre.termSymbol)
- case _ => mkTRUE // fallback for SI-6183
+ case ThisType(clazz) => This(clazz)
+ case NoType => mkTRUE // fallback for SI-6183
+ case pre => REF(pre.prefix, pre.termSymbol)
}
// ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
@@ -389,11 +392,13 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
**/
case class TypeTestTreeMaker(prevBinder: Symbol, testedBinder: Symbol, expectedTp: Type, nextBinderTp: Type)(override val pos: Position, extractorArgTypeTest: Boolean = false) extends CondTreeMaker {
import TypeTestTreeMaker._
- debug.patmat("TTTM"+(prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))
+ debug.patmat("TTTM"+((prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp)))
lazy val outerTestNeeded = (
- !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
- && needsOuterTest(expectedTp, testedBinder.info, matchOwner))
+ (expectedTp.prefix ne NoPrefix)
+ && !expectedTp.prefix.typeSymbol.isPackageClass
+ && needsOuterTest(expectedTp, testedBinder.info, matchOwner)
+ )
// the logic to generate the run-time test that follows from the fact that
// a `prevBinder` is expected to have type `expectedTp`
@@ -403,44 +408,52 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def renderCondition(cs: TypeTestCondStrategy): cs.Result = {
import cs._
- def default =
- // do type test first to ensure we won't select outer on null
- if (outerTestNeeded) and(typeTest(testedBinder, expectedTp), outerTest(testedBinder, expectedTp))
- else typeTest(testedBinder, expectedTp)
-
// propagate expected type
def expTp(t: Tree): t.type = t setType expectedTp
+ def testedWide = testedBinder.info.widen
+ def expectedWide = expectedTp.widen
+ def isAnyRef = testedWide <:< AnyRefTpe
+ def isAsExpected = testedWide <:< expectedTp
+ def isExpectedPrimitiveType = isAsExpected && isPrimitiveValueType(expectedTp)
+ def isExpectedReferenceType = isAsExpected && (expectedTp <:< AnyRefTpe)
+ def mkNullTest = nonNullTest(testedBinder)
+ def mkOuterTest = outerTest(testedBinder, expectedTp)
+ def mkTypeTest = typeTest(testedBinder, expectedWide)
+
+ def mkEqualsTest(lhs: Tree): cs.Result = equalsTest(lhs, testedBinder)
+ def mkEqTest(lhs: Tree): cs.Result = eqTest(lhs, testedBinder)
+ def addOuterTest(res: cs.Result): cs.Result = if (outerTestNeeded) and(res, mkOuterTest) else res
+
+ // If we conform to expected primitive type:
+ // it cannot be null and cannot have an outer pointer. No further checking.
+ // If we conform to expected reference type:
+ // have to test outer and non-null
+ // If we do not conform to expected type:
+ // have to test type and outer (non-null is implied by successful type test)
+ def mkDefault = (
+ if (isExpectedPrimitiveType) tru
+ else addOuterTest(
+ if (isExpectedReferenceType) mkNullTest
+ else mkTypeTest
+ )
+ )
+
// true when called to type-test the argument to an extractor
// don't do any fancy equality checking, just test the type
- if (extractorArgTypeTest) default
+ // TODO: verify that we don't need to special-case Array
+ // I think it's okay:
+ // - the isInstanceOf test includes a test for the element type
+ // - Scala's arrays are invariant (so we don't drop type tests unsoundly)
+ if (extractorArgTypeTest) mkDefault
else expectedTp match {
- // TODO: [SPEC] the spec requires `eq` instead of `==` for singleton types
- // this implies sym.isStable
- case SingleType(_, sym) => and(equalsTest(gen.mkAttributedQualifier(expectedTp), testedBinder), typeTest(testedBinder, expectedTp.widen))
- // must use == to support e.g. List() == Nil
- case ThisType(sym) if sym.isModule => and(equalsTest(CODE.REF(sym), testedBinder), typeTest(testedBinder, expectedTp.widen))
- case ConstantType(Constant(null)) if testedBinder.info.widen <:< AnyRefClass.tpe
- => eqTest(expTp(CODE.NULL), testedBinder)
- case ConstantType(const) => equalsTest(expTp(Literal(const)), testedBinder)
- case ThisType(sym) => eqTest(expTp(This(sym)), testedBinder)
-
- // TODO: verify that we don't need to special-case Array
- // I think it's okay:
- // - the isInstanceOf test includes a test for the element type
- // - Scala's arrays are invariant (so we don't drop type tests unsoundly)
- case _ if testedBinder.info.widen <:< expectedTp =>
- // if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
- // since the types conform, no further checking is required
- if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
- // have to test outer and non-null only when it's a reference type
- else if (expectedTp <:< AnyRefClass.tpe) {
- // do non-null check first to ensure we won't select outer on null
- if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
- else nonNullTest(testedBinder)
- } else default
-
- case _ => default
+ // TODO: [SPEC] the spec requires `eq` instead of `==` for singleton types - this implies sym.isStable
+ case SingleType(_, sym) => and(mkEqualsTest(gen.mkAttributedQualifier(expectedTp)), mkTypeTest)
+ case ThisType(sym) if sym.isModule => and(mkEqualsTest(CODE.REF(sym)), mkTypeTest) // must use == to support e.g. List() == Nil
+ case ConstantType(Constant(null)) if isAnyRef => mkEqTest(expTp(CODE.NULL))
+ case ConstantType(const) => mkEqualsTest(expTp(Literal(const)))
+ case ThisType(sym) => mkEqTest(expTp(This(sym)))
+ case _ => mkDefault
}
}
@@ -452,7 +465,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder))
- override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp)
+ override def toString = "TT"+((expectedTp, testedBinder.name, nextBinderTp))
}
// need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp)
@@ -463,7 +476,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
// equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
val cond = codegen._equals(patTree, prevBinder)
val res = CODE.REF(prevBinder)
- override def toString = "ET"+(prevBinder.name, patTree)
+ override def toString = "ET"+((prevBinder.name, patTree))
}
case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders {
@@ -474,7 +487,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
altss = altss map (alts => propagateSubstitution(alts, substitution))
}
- def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = { import CODE._
+ def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = {
atPos(pos){
// one alternative may still generate multiple trees (e.g., an extractor call + equality test)
// (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
@@ -482,7 +495,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(mkTRUE)))(casegen))
)
- val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => mkFALSE))
+ val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanTpe)(combinedAlts, Some(x => mkFALSE))
codegenAlt.ifThenElseZero(findAltMatcher, substitution(next))
}
}
@@ -523,12 +536,13 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
// pt is the fully defined type of the cases (either pt or the lub of the types of the cases)
def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree =
- fixerUpper(owner, scrut.pos){
- def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree)))
+ fixerUpper(owner, scrut.pos) {
+ def matchFailGen = matchFailGenOverride orElse Some(Throw(MatchErrorClass.tpe, _: Tree))
+
debug.patmat("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
val (suppression, requireSwitch): (Suppression, Boolean) =
- if (settings.XnoPatmatAnalysis.value) (Suppression.NoSuppression, false)
+ if (settings.XnoPatmatAnalysis) (Suppression.NoSuppression, false)
else scrut match {
case Typed(tree, tpt) =>
val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass
@@ -587,18 +601,17 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
t match {
case Function(_, _) if t.symbol == NoSymbol =>
t.symbol = currentOwner.newAnonymousFunctionValue(t.pos)
- debug.patmat("new symbol for "+ (t, t.symbol.ownerChain))
+ debug.patmat("new symbol for "+ ((t, t.symbol.ownerChain)))
case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
- debug.patmat("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
+ debug.patmat("fundef: "+ ((t, t.symbol.ownerChain, currentOwner.ownerChain)))
t.symbol.owner = currentOwner
case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
- debug.patmat("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
- if(d.symbol.moduleClass ne NoSymbol)
- d.symbol.moduleClass.owner = currentOwner
+ debug.patmat("def: "+ ((d, d.symbol.ownerChain, currentOwner.ownerChain)))
+ d.symbol.moduleClass andAlso (_.owner = currentOwner)
d.symbol.owner = currentOwner
// case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
- debug.patmat("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
+ debug.patmat("untouched "+ ((t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain)))
case _ =>
}
super.traverse(t)
@@ -611,4 +624,4 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
// currentRun.trackerFactory.snapshot()
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
new file mode 100644
index 0000000000..a7d7680db1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
@@ -0,0 +1,86 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+
+trait MatchWarnings {
+ self: PatternMatching =>
+
+ import global._
+
+ trait TreeMakerWarnings {
+ self: MatchTranslator =>
+
+ import typer.context
+
+ // Why is it so difficult to say "here's a name and a context, give me any
+ // matching symbol in scope" ? I am sure this code is wrong, but attempts to
+ // use the scopes of the contexts in the enclosing context chain discover
+ // nothing. How to associate a name with a symbol would would be a wonderful
+ // linkage for which to establish a canonical acquisition mechanism.
+ private def matchingSymbolInScope(pat: Tree): Symbol = {
+ def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
+ case PolyType(tparams, restpe) => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
+ case _ => NoSymbol
+ }
+ pat match {
+ case Bind(name, _) =>
+ context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
+ res orElse declarationOfName(ctx.owner.rawInfo, name))
+ case _ => NoSymbol
+ }
+ }
+
+ // Issue better warnings than "unreachable code" when people mis-use
+ // variable patterns thinking they bind to existing identifiers.
+ //
+ // Possible TODO: more deeply nested variable patterns, like
+ // case (a, b) => 1 ; case (c, d) => 2
+ // However this is a pain (at least the way I'm going about it)
+ // and I have to think these detailed errors are primarily useful
+ // for beginners, not people writing nested pattern matches.
+ def checkMatchVariablePatterns(cases: List[CaseDef]) {
+ // A string describing the first variable pattern
+ var vpat: String = null
+ // Using an iterator so we can recognize the last case
+ val it = cases.iterator
+
+ def addendum(pat: Tree) = {
+ matchingSymbolInScope(pat) match {
+ case NoSymbol => ""
+ case sym =>
+ val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
+ s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
+ }
+ }
+
+ while (it.hasNext) {
+ val cdef = it.next()
+ // If a default case has been seen, then every succeeding case is unreachable.
+ if (vpat != null)
+ context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
+ // If this is a default case and more cases follow, warn about this one so
+ // we have a reason to mention its pattern variable name and any corresponding
+ // symbol in scope. Errors will follow from the remaining cases, at least
+ // once we make the above warning an error.
+ else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
+ val vpatName = cdef.pat match {
+ case Bind(name, _) => s" '$name'"
+ case _ => ""
+ }
+ vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
+ context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala
new file mode 100644
index 0000000000..e84ccbf754
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala
@@ -0,0 +1,155 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala
+package tools
+package nsc
+package transform
+package patmat
+
+/** An extractor returns: F1, F2, ..., Fi, opt[Seq[E] or E*]
+ * A case matches: P1, P2, ..., Pj, opt[Seq[E]]
+ * Put together: P1/F1, P2/F2, ... Pi/Fi, Pi+1/E, Pi+2/E, ... Pj/E, opt[Seq[E]]
+ *
+ * Here Pm/Fi is the last pattern to match the fixed arity section.
+ *
+ * productArity: the value of i, i.e. the number of non-sequence types in the extractor
+ * nonStarArity: the value of j, i.e. the number of non-star patterns in the case definition
+ * elementArity: j - i, i.e. the number of non-star patterns which must match sequence elements
+ * starArity: 1 or 0 based on whether there is a star (sequence-absorbing) pattern
+ * totalArity: nonStarArity + starArity, i.e. the number of patterns in the case definition
+ *
+ * Note that productArity is a function only of the extractor, and
+ * nonStar/star/totalArity are all functions of the patterns. The key
+ * value for aligning and typing the patterns is elementArity, as it
+ * is derived from both sets of information.
+ */
+trait PatternExpander[Pattern, Type] {
+ /** You'll note we're not inside the cake. "Pattern" and "Type" are
+ * arbitrary types here, and NoPattern and NoType arbitrary values.
+ */
+ def NoPattern: Pattern
+ def NoType: Type
+
+ /** It's not optimal that we're carrying both sequence and repeated
+ * type here, but the implementation requires more unraveling before
+ * it can be avoided.
+ *
+ * sequenceType is Seq[T], elementType is T, repeatedType is T*.
+ */
+ sealed case class Repeated(sequenceType: Type, elementType: Type, repeatedType: Type) {
+ def exists = elementType != NoType
+
+ def elementList = if (exists) elementType :: Nil else Nil
+ def sequenceList = if (exists) sequenceType :: Nil else Nil
+ def repeatedList = if (exists) repeatedType :: Nil else Nil
+
+ override def toString = s"${elementType}*"
+ }
+ object NoRepeated extends Repeated(NoType, NoType, NoType) {
+ override def toString = "<none>"
+ }
+
+ final case class Patterns(fixed: List[Pattern], star: Pattern) {
+ def hasStar = star != NoPattern
+ def starArity = if (hasStar) 1 else 0
+ def nonStarArity = fixed.length
+ def totalArity = nonStarArity + starArity
+ def starPatterns = if (hasStar) star :: Nil else Nil
+ def all = fixed ::: starPatterns
+
+ override def toString = all mkString ", "
+ }
+
+ /** An 'extractor' can be a case class or an unapply or unapplySeq method.
+ * Decoding what it is that they extract takes place before we arrive here,
+ * so that this class can concentrate only on the relationship between
+ * patterns and types.
+ *
+ * In a case class, the class is the unextracted type and the fixed and
+ * repeated types are derived from its constructor parameters.
+ *
+ * In an unapply, this is reversed: the parameter to the unapply is the
+ * unextracted type, and the other types are derived based on the return
+ * type of the unapply method.
+ *
+ * In other words, this case class and unapply are encoded the same:
+ *
+ * case class Foo(x: Int, y: Int, zs: Char*)
+ * def unapplySeq(x: Foo): Option[(Int, Int, Seq[Char])]
+ *
+ * Both are Extractor(Foo, Int :: Int :: Nil, Repeated(Seq[Char], Char, Char*))
+ *
+ * @param whole The type in its unextracted form
+ * @param fixed The non-sequence types which are extracted
+ * @param repeated The sequence type which is extracted
+ */
+ final case class Extractor(whole: Type, fixed: List[Type], repeated: Repeated) {
+ require(whole != NoType, s"expandTypes($whole, $fixed, $repeated)")
+
+ def productArity = fixed.length
+ def hasSeq = repeated.exists
+ def elementType = repeated.elementType
+ def sequenceType = repeated.sequenceType
+ def allTypes = fixed ::: repeated.sequenceList
+ def varargsTypes = fixed ::: repeated.repeatedList
+ def isErroneous = allTypes contains NoType
+
+ private def typeStrings = fixed.map("" + _) ::: ( if (hasSeq) List("" + repeated) else Nil )
+
+ def offeringString = if (isErroneous) "<error>" else typeStrings match {
+ case Nil => "Boolean"
+ case tp :: Nil => tp
+ case tps => tps.mkString("(", ", ", ")")
+ }
+ override def toString = "%s => %s".format(whole, offeringString)
+ }
+
+ final case class TypedPat(pat: Pattern, tpe: Type) {
+ override def toString = s"$pat: $tpe"
+ }
+
+ /** If elementArity is...
+ * 0: A perfect match between extractor and the fixed patterns.
+ * If there is a star pattern it will match any sequence.
+ * > 0: There are more patterns than products. There will have to be a
+ * sequence which can populate at least <elementArity> patterns.
+ * < 0: There are more products than patterns: compile time error.
+ */
+ final case class Aligned(patterns: Patterns, extractor: Extractor) {
+ def elementArity = patterns.nonStarArity - productArity
+ def productArity = extractor.productArity
+ def starArity = patterns.starArity
+ def totalArity = patterns.totalArity
+
+ def wholeType = extractor.whole
+ def sequenceType = extractor.sequenceType
+ def productTypes = extractor.fixed
+ def extractedTypes = extractor.allTypes
+ def typedNonStarPatterns = products ::: elements
+ def typedPatterns = typedNonStarPatterns ::: stars
+
+ def isBool = !isSeq && productArity == 0
+ def isSingle = !isSeq && totalArity == 1
+ def isStar = patterns.hasStar
+ def isSeq = extractor.hasSeq
+
+ private def typedAsElement(pat: Pattern) = TypedPat(pat, extractor.elementType)
+ private def typedAsSequence(pat: Pattern) = TypedPat(pat, extractor.sequenceType)
+ private def productPats = patterns.fixed take productArity
+ private def elementPats = patterns.fixed drop productArity
+ private def products = (productPats, productTypes).zipped map TypedPat
+ private def elements = elementPats map typedAsElement
+ private def stars = patterns.starPatterns map typedAsSequence
+
+ override def toString = s"""
+ |Aligned {
+ | patterns $patterns
+ | extractor $extractor
+ | arities $productArity/$elementArity/$starArity // product/element/star
+ | typed ${typedPatterns mkString ", "}
+ |}""".stripMargin.trim
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
index df4e699620..f6c960d089 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -34,23 +34,25 @@ import scala.reflect.internal.util.Position
* - recover GADT typing by locally inserting implicit witnesses to type equalities derived from the current case, and considering these witnesses during subtyping (?)
* - recover exhaustivity/unreachability of user-defined extractors by partitioning the types they match on using an HList or similar type-level structure
*/
-trait PatternMatching extends Transform with TypingTransformers
+trait PatternMatching extends Transform
+ with TypingTransformers
with Debugging
with Interface
with MatchTranslation
with MatchTreeMaking
with MatchCodeGen
+ with MatchCps
with ScalaLogic
with Solving
with MatchAnalysis
- with MatchOptimization {
+ with MatchOptimization
+ with MatchWarnings
+ with ScalacPatternExpanders {
import global._
val phaseName: String = "patmat"
- def newTransformer(unit: CompilationUnit): Transformer =
- if (opt.virtPatmat) new MatchTransformer(unit)
- else noopTransformer
+ def newTransformer(unit: CompilationUnit): Transformer = new MatchTransformer(unit)
class MatchTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
override def transform(tree: Tree): Tree = tree match {
@@ -96,24 +98,26 @@ trait Debugging {
// TODO: the inliner fails to inline the closures to debug.patmat unless the method is nested in an object
object debug {
val printPatmat = global.settings.Ypatmatdebug.value
- @inline final def patmat(s: => String) = if (printPatmat) println(s)
+ @inline final def patmat(s: => String) = if (printPatmat) Console.err.println(s)
+ @inline final def patmatResult[T](s: => String)(result: T): T = {
+ if (printPatmat) Console.err.println(s + ": " + result)
+ result
+ }
}
}
trait Interface extends ast.TreeDSL {
- import global.{newTermName, analyzer, Type, ErrorType, Symbol, Tree}
+ import global._
import analyzer.Typer
// 2.10/2.11 compatibility
- protected final def dealiasWiden(tp: Type) = tp.dealias // 2.11: dealiasWiden
- protected final def mkTRUE = CODE.TRUE_typed // 2.11: CODE.TRUE
- protected final def mkFALSE = CODE.FALSE_typed // 2.11: CODE.FALSE
- protected final def hasStableSymbol(p: Tree) = p.hasSymbol && p.symbol.isStable // 2.11: p.hasSymbolField && p.symbol.isStable
- protected final def devWarning(str: String) = global.debugwarn(str) // 2.11: omit
+ protected final def dealiasWiden(tp: Type) = tp.dealiasWiden
+ protected final def mkTRUE = CODE.TRUE
+ protected final def mkFALSE = CODE.FALSE
+ protected final def hasStableSymbol(p: Tree) = p.hasSymbolField && p.symbol.isStable
object vpmName {
val one = newTermName("one")
- val drop = newTermName("drop")
val flatMap = newTermName("flatMap")
val get = newTermName("get")
val guard = newTermName("guard")
@@ -132,8 +136,9 @@ trait Interface extends ast.TreeDSL {
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/** Interface with user-defined match monad?
- * if there's a `__match` in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
+ * if there's a <code>__match</code> in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
+ {{{
type Matcher[P[_], M[+_], A] = {
def flatMap[B](f: P[A] => M[B]): M[B]
def orElse[B >: A](alternative: => M[B]): M[B]
@@ -147,12 +152,14 @@ trait Interface extends ast.TreeDSL {
def one[T](x: P[T]): M[T]
def guard[T](cond: P[Boolean], then: => P[T]): M[T]
}
+ }}}
* P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`)
- * if no `__match` is found, we assume the following implementation (and generate optimized code accordingly)
+ * if no <code>__match</code> is found, we assume the following implementation (and generate optimized code accordingly)
+ {{{
object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] {
def zero = None
def one[T](x: T) = Some(x)
@@ -160,11 +167,13 @@ trait Interface extends ast.TreeDSL {
def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None
def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x))
}
+ }}}
*/
trait MatchMonadInterface {
val typer: Typer
val matchOwner = typer.context.owner
+ def pureType(tp: Type): Type = tp
def reportUnreachable(pos: Position) = typer.context.unit.warning(pos, "unreachable code")
def reportMissingCases(pos: Position, counterExamples: List[String]) = {
@@ -174,16 +183,6 @@ trait Interface extends ast.TreeDSL {
typer.context.unit.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
}
-
- def inMatchMonad(tp: Type): Type
- def pureType(tp: Type): Type
- final def matchMonadResult(tp: Type): Type =
- tp.baseType(matchMonadSym).typeArgs match {
- case arg :: Nil => arg
- case _ => ErrorType
- }
-
- protected def matchMonadSym: Symbol
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
new file mode 100644
index 0000000000..7858cb5586
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
@@ -0,0 +1,154 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala
+package tools
+package nsc
+package transform
+package patmat
+
+/** This is scalac-specific logic layered on top of the scalac-agnostic
+ * "matching products to patterns" logic defined in PatternExpander.
+ */
+trait ScalacPatternExpanders {
+ val global: Global
+
+ import global._
+ import definitions._
+ import treeInfo._
+
+ type PatternAligned = ScalacPatternExpander#Aligned
+
+ implicit class AlignedOps(val aligned: PatternAligned) {
+ import aligned._
+ def expectedTypes = typedPatterns map (_.tpe)
+ def unexpandedFormals = extractor.varargsTypes
+ }
+ trait ScalacPatternExpander extends PatternExpander[Tree, Type] {
+ def NoPattern = EmptyTree
+ def NoType = global.NoType
+
+ def newPatterns(patterns: List[Tree]): Patterns = patterns match {
+ case init :+ last if isStar(last) => Patterns(init, last)
+ case _ => Patterns(patterns, NoPattern)
+ }
+ def elementTypeOf(tpe: Type) = {
+ val seq = repeatedToSeq(tpe)
+
+ ( typeOfMemberNamedHead(seq)
+ orElse typeOfMemberNamedApply(seq)
+ orElse definitions.elementType(ArrayClass, seq)
+ )
+ }
+ def newExtractor(whole: Type, fixed: List[Type], repeated: Repeated): Extractor =
+ logResult(s"newExtractor($whole, $fixed, $repeated")(Extractor(whole, fixed, repeated))
+
+ // Turn Seq[A] into Repeated(Seq[A], A, A*)
+ def repeatedFromSeq(seqType: Type): Repeated = {
+ val elem = elementTypeOf(seqType)
+ val repeated = scalaRepeatedType(elem)
+
+ Repeated(seqType, elem, repeated)
+ }
+ // Turn A* into Repeated(Seq[A], A, A*)
+ def repeatedFromVarargs(repeated: Type): Repeated =
+ Repeated(repeatedToSeq(repeated), repeatedToSingle(repeated), repeated)
+
+ /** In this case we are basing the pattern expansion on a case class constructor.
+ * The argument is the MethodType carried by the primary constructor.
+ */
+ def applyMethodTypes(method: Type): Extractor = {
+ val whole = method.finalResultType
+
+ method.paramTypes match {
+ case init :+ last if isScalaRepeatedParamType(last) => newExtractor(whole, init, repeatedFromVarargs(last))
+ case tps => newExtractor(whole, tps, NoRepeated)
+ }
+ }
+
+ /** In this case, expansion is based on an unapply or unapplySeq method.
+ * Unfortunately the MethodType does not carry the information of whether
+ * it was unapplySeq, so we have to funnel that information in separately.
+ */
+ def unapplyMethodTypes(method: Type, isSeq: Boolean): Extractor = {
+ val whole = firstParamType(method)
+ val result = method.finalResultType
+ val expanded = (
+ if (result =:= BooleanTpe) Nil
+ else typeOfMemberNamedGet(result) match {
+ case rawGet if !hasSelectors(rawGet) => rawGet :: Nil
+ case rawGet => typesOfSelectors(rawGet)
+ }
+ )
+ expanded match {
+ case init :+ last if isSeq => newExtractor(whole, init, repeatedFromSeq(last))
+ case tps => newExtractor(whole, tps, NoRepeated)
+ }
+ }
+ }
+ object alignPatterns extends ScalacPatternExpander {
+ /** Converts a T => (A, B, C) extractor to a T => ((A, B, CC)) extractor.
+ */
+ def tupleExtractor(extractor: Extractor): Extractor =
+ extractor.copy(fixed = tupleType(extractor.fixed) :: Nil)
+
+ private def validateAligned(tree: Tree, aligned: Aligned): Aligned = {
+ import aligned._
+
+ def owner = tree.symbol.owner
+ def offering = extractor.offeringString
+ def symString = tree.symbol.fullLocationString
+ def offerString = if (extractor.isErroneous) "" else s" offering $offering"
+ def arityExpected = ( if (extractor.hasSeq) "at least " else "" ) + productArity
+
+ def err(msg: String) = currentUnit.error(tree.pos, msg)
+ def warn(msg: String) = currentUnit.warning(tree.pos, msg)
+ def arityError(what: String) = err(s"$what patterns for $owner$offerString: expected $arityExpected, found $totalArity")
+
+ if (isStar && !isSeq)
+ err("Star pattern must correspond with varargs or unapplySeq")
+ else if (elementArity < 0)
+ arityError("not enough")
+ else if (elementArity > 0 && !extractor.hasSeq)
+ arityError("too many")
+
+ aligned
+ }
+
+ def apply(sel: Tree, args: List[Tree]): Aligned = {
+ val fn = sel match {
+ case Unapplied(fn) => fn
+ case _ => sel
+ }
+ val patterns = newPatterns(args)
+ val isSeq = sel.symbol.name == nme.unapplySeq
+ val isUnapply = sel.symbol.name == nme.unapply
+ val extractor = sel.symbol.name match {
+ case nme.unapply => unapplyMethodTypes(fn.tpe, isSeq = false)
+ case nme.unapplySeq => unapplyMethodTypes(fn.tpe, isSeq = true)
+ case _ => applyMethodTypes(fn.tpe)
+ }
+
+ /** Rather than let the error that is SI-6675 pollute the entire matching
+ * process, we will tuple the extractor before creation Aligned so that
+ * it contains known good values.
+ */
+ def productArity = extractor.productArity
+ def acceptMessage = if (extractor.isErroneous) "" else s" to hold ${extractor.offeringString}"
+ val requiresTupling = isUnapply && patterns.totalArity == 1 && productArity > 1
+
+ if (settings.lint && requiresTupling && effectivePatternArity(args) == 1)
+ currentUnit.warning(sel.pos, s"${sel.symbol.owner} expects $productArity patterns$acceptMessage but crushing into $productArity-tuple to fit single pattern (SI-6675)")
+
+ val normalizedExtractor = if (requiresTupling) tupleExtractor(extractor) else extractor
+ validateAligned(fn, Aligned(patterns, normalizedExtractor))
+ }
+
+ def apply(tree: Tree): Aligned = tree match {
+ case Apply(fn, args) => apply(fn, args)
+ case UnApply(fn, args) => apply(fn, args)
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
index ec66bf6f20..1902606d86 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -8,18 +8,13 @@ package scala.tools.nsc.transform.patmat
import scala.collection.mutable
import scala.reflect.internal.util.Statistics
+import scala.language.postfixOps
+import scala.reflect.internal.util.Collections._
// naive CNF translation and simple DPLL solver
trait Solving extends Logic {
import PatternMatchingStats._
trait CNF extends PropositionalLogic {
-
- /** Override Array creation for efficiency (to not go through reflection). */
- private implicit val clauseTag: scala.reflect.ClassTag[Clause] = new scala.reflect.ClassTag[Clause] {
- def runtimeClass: java.lang.Class[Clause] = classOf[Clause]
- final override def newArray(len: Int): Array[Clause] = new Array[Clause](len)
- }
-
import scala.collection.mutable.ArrayBuffer
type FormulaBuilder = ArrayBuffer[Clause]
def formulaBuilder = ArrayBuffer[Clause]()
@@ -31,9 +26,12 @@ trait Solving extends Logic {
type Formula = FormulaBuilder
def formula(c: Clause*): Formula = ArrayBuffer(c: _*)
- type Clause = Set[Lit]
+ type Clause = collection.Set[Lit]
// a clause is a disjunction of distinct literals
- def clause(l: Lit*): Clause = l.toSet
+ def clause(l: Lit*): Clause = (
+ // neg/t7020.scala changes output 1% of the time, the non-determinism is quelled with this linked set
+ mutable.LinkedHashSet(l: _*)
+ )
type Lit
def Lit(sym: Sym, pos: Boolean = true): Lit
@@ -71,7 +69,7 @@ trait Solving extends Logic {
val TrueF = formula()
val FalseF = formula(clause())
def lit(s: Sym) = formula(clause(Lit(s)))
- def negLit(s: Sym) = formula(clause(Lit(s, false)))
+ def negLit(s: Sym) = formula(clause(Lit(s, pos = false)))
def conjunctiveNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Formula = {
def distribute(a: Formula, b: Formula, budget: Int): Formula =
@@ -139,7 +137,7 @@ trait Solving extends Logic {
def cnfString(f: Formula) = alignAcrossRows(f map (_.toList) toList, "\\/", " /\\\n")
// adapted from http://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat)
- val EmptyModel = Map.empty[Sym, Boolean]
+ val EmptyModel = collection.immutable.SortedMap.empty[Sym, Boolean]
val NoModel: Model = null
// returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??)
@@ -164,7 +162,7 @@ trait Solving extends Logic {
else Nil
}
val forced = unassigned flatMap { s =>
- force(Lit(s, true)) ++ force(Lit(s, false))
+ force(Lit(s, pos = true)) ++ force(Lit(s, pos = false))
}
debug.patmat("forced "+ forced)
val negated = negateModel(model)
@@ -211,9 +209,8 @@ trait Solving extends Logic {
// SI-7020 Linked- for deterministic counter examples.
val pos = new mutable.LinkedHashSet[Sym]()
val neg = new mutable.LinkedHashSet[Sym]()
- f.foreach{_.foreach{ lit =>
- if (lit.pos) pos += lit.sym else neg += lit.sym
- }}
+ mforeach(f)(lit => if (lit.pos) pos += lit.sym else neg += lit.sym)
+
// appearing in both positive and negative
val impures: mutable.LinkedHashSet[Sym] = pos intersect neg
// appearing only in either positive/negative positions
@@ -235,9 +232,8 @@ trait Solving extends Logic {
}
}
- if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start)
-
- satisfiableWithModel
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start)
+ satisfiableWithModel
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
index 62c584e97b..1e544e54f6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
@@ -24,6 +24,8 @@ trait Adaptations {
trait Adaptation {
self: Typer =>
+ import runDefinitions._
+
def checkValidAdaptation(t: Tree, args: List[Tree]): Boolean = {
def applyArg = t match {
case Apply(_, arg :: Nil) => arg
@@ -41,11 +43,11 @@ trait Adaptations {
def givenString = if (args.isEmpty) "<none>" else args.mkString(", ")
def adaptedArgs = if (args.isEmpty) "(): Unit" else args.mkString("(", ", ", "): " + applyArg.tpe)
- def adaptWarning(msg: String) = context.warning(t.pos, msg +
+ def adaptWarningMessage(msg: String, showAdaptation: Boolean = true) = msg +
"\n signature: " + sigString +
"\n given arguments: " + givenString +
- "\n after adaptation: " + callString + "(" + adaptedArgs + ")"
- )
+ (if (showAdaptation) "\n after adaptation: " + callString + "(" + adaptedArgs + ")" else "")
+
// A one-argument method accepting Object (which may look like "Any"
// at this point if the class is java defined) is a "leaky target" for
// which we should be especially reluctant to insert () or auto-tuple.
@@ -66,18 +68,21 @@ trait Adaptations {
)
}
- if (settings.noAdaptedArgs.value)
- adaptWarning("No automatic adaptation here: use explicit parentheses.")
- else if (settings.warnAdaptedArgs.value)
- adaptWarning(
- if (args.isEmpty) "Adapting argument list by inserting (): " + (
- if (isLeakyTarget) "leaky (Object-receiving) target makes this especially dangerous."
- else "this is unlikely to be what you want."
- )
- else "Adapting argument list by creating a " + args.size + "-tuple: this may not be what you want."
- )
+ if (settings.noAdaptedArgs)
+ context.warning(t.pos, adaptWarningMessage("No automatic adaptation here: use explicit parentheses."))
+ else if (args.isEmpty) {
+ if (settings.future)
+ context.error(t.pos, adaptWarningMessage("Adaptation of argument list by inserting () has been removed.", showAdaptation = false))
+ else {
+ val msg = "Adaptation of argument list by inserting () has been deprecated: " + (
+ if (isLeakyTarget) "leaky (Object-receiving) target makes this especially dangerous."
+ else "this is unlikely to be what you want.")
+ context.unit.deprecationWarning(t.pos, adaptWarningMessage(msg))
+ }
+ } else if (settings.warnAdaptedArgs)
+ context.warning(t.pos, adaptWarningMessage(s"Adapting argument list by creating a ${args.size}-tuple: this may not be what you want."))
- !settings.noAdaptedArgs.value
+ !settings.noAdaptedArgs || !(args.isEmpty && settings.future)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index b50486306d..5c02516c47 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -16,7 +16,6 @@ trait Analyzer extends AnyRef
with Typers
with Infer
with Implicits
- with Variances
with EtaExpansion
with SyntheticMethods
with Unapplies
@@ -30,8 +29,9 @@ trait Analyzer extends AnyRef
val global : Global
import global._
- object namerFactory extends SubComponent {
+ object namerFactory extends {
val global: Analyzer.this.global.type = Analyzer.this.global
+ } with SubComponent {
val phaseName = "namer"
val runsAfter = List[String]("parser")
val runsRightAfter = None
@@ -45,8 +45,9 @@ trait Analyzer extends AnyRef
}
}
- object packageObjects extends SubComponent {
+ object packageObjects extends {
val global: Analyzer.this.global.type = Analyzer.this.global
+ } with SubComponent {
val phaseName = "packageobjects"
val runsAfter = List[String]()
val runsRightAfter= Some("namer")
@@ -72,9 +73,10 @@ trait Analyzer extends AnyRef
}
}
- object typerFactory extends SubComponent {
- import scala.reflect.internal.TypesStats.typerNanos
+ object typerFactory extends {
val global: Analyzer.this.global.type = Analyzer.this.global
+ } with SubComponent {
+ import scala.reflect.internal.TypesStats.typerNanos
val phaseName = "typer"
val runsAfter = List[String]()
val runsRightAfter = Some("packageobjects")
@@ -88,22 +90,25 @@ trait Analyzer extends AnyRef
override def run() {
val start = if (Statistics.canEnable) Statistics.startTimer(typerNanos) else null
global.echoPhaseSummary(this)
- currentRun.units foreach applyPhase
- undoLog.clear()
- // need to clear it after as well or 10K+ accumulated entries are
- // uncollectable the rest of the way.
+ for (unit <- currentRun.units) {
+ applyPhase(unit)
+ undoLog.clear()
+ }
if (Statistics.canEnable) Statistics.stopTimer(typerNanos, start)
}
def apply(unit: CompilationUnit) {
try {
- unit.body = newTyper(rootContext(unit)).typed(unit.body)
- if (global.settings.Yrangepos.value && !global.reporter.hasErrors) global.validatePositions(unit.body)
+ val typer = newTyper(rootContext(unit))
+ unit.body = typer.typed(unit.body)
+ if (global.settings.Yrangepos && !global.reporter.hasErrors) global.validatePositions(unit.body)
for (workItem <- unit.toCheck) workItem()
- } finally {
+ if (settings.lint)
+ typer checkUnused unit
+ }
+ finally {
unit.toCheck.clear()
}
}
}
}
}
-
diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
index 28f620dbb5..fa6e5399eb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
@@ -13,7 +13,6 @@ package typechecker
trait AnalyzerPlugins { self: Analyzer =>
import global._
-
trait AnalyzerPlugin {
/**
* Selectively activate this analyzer plugin, e.g. according to the compiler phase.
@@ -33,7 +32,7 @@ trait AnalyzerPlugins { self: Analyzer =>
/**
* Let analyzer plugins change the expected type before type checking a tree.
*/
- def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type = pt
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Mode): Type = pt
/**
* Let analyzer plugins modify the type that has been computed for a tree.
@@ -44,7 +43,7 @@ trait AnalyzerPlugins { self: Analyzer =>
* @param mode Mode that was used for typing `tree`
* @param pt Expected type that was used for typing `tree`
*/
- def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = tpe
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = tpe
/**
* Let analyzer plugins change the types assigned to definitions. For definitions that have
@@ -133,7 +132,7 @@ trait AnalyzerPlugins { self: Analyzer =>
* Decide whether this analyzer plugin can adapt a tree that has an annotated type to the
* given type tp, taking into account the given mode (see method adapt in trait Typers).
*/
- def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = false
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = false
/**
* Adapt a tree that has an annotated type to the given type tp, taking into account the given
@@ -142,11 +141,11 @@ trait AnalyzerPlugins { self: Analyzer =>
* An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
* class cannot do the adapting, it should return the tree unchanged.
*/
- def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = tree
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Tree = tree
/**
* Modify the type of a return expression. By default, return expressions have type
- * NothingClass.tpe.
+ * NothingTpe.
*
* @param tpe The type of the return expression
* @param typer The typer that was used for typing the return tree
@@ -156,6 +155,117 @@ trait AnalyzerPlugins { self: Analyzer =>
def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = tpe
}
+ /**
+ * @define nonCumulativeReturnValueDoc Returns `None` if the plugin doesn't want to customize the default behavior
+ * or something else if the plugin knows better that the implementation provided in scala-compiler.jar.
+ * If multiple plugins return a non-empty result, it's going to be a compilation error.
+ */
+ trait MacroPlugin {
+ /**
+ * Selectively activate this analyzer plugin, e.g. according to the compiler phase.
+ *
+ * Note that the current phase can differ from the global compiler phase (look for `enteringPhase`
+ * invocations in the compiler). For instance, lazy types created by the UnPickler are completed
+ * at the phase in which their symbol is created. Observations show that this can even be the
+ * parser phase. Since symbol completion can trigger subtyping, typing etc, your plugin might
+ * need to be active also in phases other than namer and typer.
+ *
+ * Typically, this method can be implemented as
+ *
+ * global.phase.id < global.currentRun.picklerPhase.id
+ */
+ def isActive(): Boolean = true
+
+ /**
+ * Typechecks the right-hand side of a macro definition (which typically features
+ * a mere reference to a macro implementation).
+ *
+ * Default implementation provided in `self.standardTypedMacroBody` makes sure that the rhs
+ * resolves to a reference to a method in either a static object or a macro bundle,
+ * verifies that the referred method is compatible with the macro def and upon success
+ * attaches a macro impl binding to the macro def's symbol.
+ *
+ * $nonCumulativeReturnValueDoc.
+ */
+ def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Option[Tree] = None
+
+ /**
+ * Expands an application of a def macro (i.e. of a symbol that has the MACRO flag set),
+ * possibly using the current typer mode and the provided prototype.
+ *
+ * Default implementation provided in `self.standardMacroExpand` figures out whether the `expandee`
+ * needs to be expanded right away or its expansion has to be delayed until all undetermined
+ * parameters are inferred, then loads the macro implementation using `self.pluginsMacroRuntime`,
+ * prepares the invocation arguments for the macro implementation using `self.pluginsMacroArgs`,
+ * and finally calls into the macro implementation. After the call returns, it typechecks
+ * the expansion and performs some bookkeeping.
+ *
+ * This method is typically implemented if your plugin requires significant changes to the macro engine.
+ * If you only need to customize the macro context, consider implementing `pluginsMacroArgs`.
+ * If you only need to customize how macro implementation are invoked, consider going for `pluginsMacroRuntime`.
+ *
+ * $nonCumulativeReturnValueDoc.
+ */
+ def pluginsMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Option[Tree] = None
+
+ /**
+ * Computes the arguments that need to be passed to the macro impl corresponding to a particular expandee.
+ *
+ * Default implementation provided in `self.standardMacroArgs` instantiates a `scala.reflect.macros.contexts.Context`,
+ * gathers type and value arguments of the macro application and throws them together into `MacroArgs`.
+ *
+ * $nonCumulativeReturnValueDoc.
+ */
+ def pluginsMacroArgs(typer: Typer, expandee: Tree): Option[MacroArgs] = None
+
+ /**
+ * Summons a function that encapsulates macro implementation invocations for a particular expandee.
+ *
+ * Default implementation provided in `self.standardMacroRuntime` returns a function that
+ * loads the macro implementation binding from the macro definition symbol,
+ * then uses either Java or Scala reflection to acquire the method that corresponds to the impl,
+ * and then reflectively calls into that method.
+ *
+ * $nonCumulativeReturnValueDoc.
+ */
+ def pluginsMacroRuntime(expandee: Tree): Option[MacroRuntime] = None
+
+ /**
+ * Creates a symbol for the given tree in lexical context encapsulated by the given namer.
+ *
+ * Default implementation provided in `namer.standardEnterSym` handles MemberDef's and Imports,
+ * doing nothing for other trees (DocDef's are seen through and rewrapped). Typical implementation
+ * of `enterSym` for a particular tree flavor creates a corresponding symbol, assigns it to the tree,
+ * enters the symbol into scope and then might even perform some code generation.
+ *
+ * $nonCumulativeReturnValueDoc.
+ */
+ def pluginsEnterSym(namer: Namer, tree: Tree): Boolean = false
+
+ /**
+ * Makes sure that for the given class definition, there exists a companion object definition.
+ *
+ * Default implementation provided in `namer.standardEnsureCompanionObject` looks up a companion symbol for the class definition
+ * and then checks whether the resulting symbol exists or not. If it exists, then nothing else is done.
+ * If not, a synthetic object definition is created using the provided factory, which is then entered into namer's scope.
+ *
+ * $nonCumulativeReturnValueDoc.
+ */
+ def pluginsEnsureCompanionObject(namer: Namer, cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Option[Symbol] = None
+
+ /**
+ * Prepares a list of statements for being typechecked by performing domain-specific type-agnostic code synthesis.
+ *
+ * Trees passed into this method are going to be named, but not typed.
+ * In particular, you can rely on the compiler having called `enterSym` on every stat prior to passing calling this method.
+ *
+ * Default implementation does nothing. Current approaches to code syntheses (generation of underlying fields
+ * for getters/setters, creation of companion objects for case classes, etc) are too disparate and ad-hoc
+ * to be treated uniformly, so I'm leaving this for future work.
+ */
+ def pluginsEnterStats(typer: Typer, stats: List[Tree]): List[Tree] = stats
+ }
+
/** A list of registered analyzer plugins */
@@ -167,59 +277,158 @@ trait AnalyzerPlugins { self: Analyzer =>
analyzerPlugins = plugin :: analyzerPlugins
}
+ private abstract class CumulativeOp[T] {
+ def default: T
+ def accumulate: (T, AnalyzerPlugin) => T
+ }
+
+ private def invoke[T](op: CumulativeOp[T]): T = {
+ if (analyzerPlugins.isEmpty) op.default
+ else analyzerPlugins.foldLeft(op.default)((current, plugin) =>
+ if (!plugin.isActive()) current else op.accumulate(current, plugin))
+ }
/** @see AnalyzerPlugin.pluginsPt */
- def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type =
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Mode): Type =
+ // performance opt
if (analyzerPlugins.isEmpty) pt
- else analyzerPlugins.foldLeft(pt)((pt, plugin) =>
- if (!plugin.isActive()) pt else plugin.pluginsPt(pt, typer, tree, mode))
+ else invoke(new CumulativeOp[Type] {
+ def default = pt
+ def accumulate = (pt, p) => p.pluginsPt(pt, typer, tree, mode)
+ })
/** @see AnalyzerPlugin.pluginsTyped */
- def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
- // support deprecated methods in annotation checkers
- val annotCheckersTpe = addAnnotations(tree, tpe)
- if (analyzerPlugins.isEmpty) annotCheckersTpe
- else analyzerPlugins.foldLeft(annotCheckersTpe)((tpe, plugin) =>
- if (!plugin.isActive()) tpe else plugin.pluginsTyped(tpe, typer, tree, mode, pt))
- }
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type =
+ // performance opt
+ if (analyzerPlugins.isEmpty) addAnnotations(tree, tpe)
+ else invoke(new CumulativeOp[Type] {
+ // support deprecated methods in annotation checkers
+ def default = addAnnotations(tree, tpe)
+ def accumulate = (tpe, p) => p.pluginsTyped(tpe, typer, tree, mode, pt)
+ })
/** @see AnalyzerPlugin.pluginsTypeSig */
- def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type =
- if (analyzerPlugins.isEmpty) tpe
- else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
- if (!plugin.isActive()) tpe else plugin.pluginsTypeSig(tpe, typer, defTree, pt))
+ def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = invoke(new CumulativeOp[Type] {
+ def default = tpe
+ def accumulate = (tpe, p) => p.pluginsTypeSig(tpe, typer, defTree, pt)
+ })
/** @see AnalyzerPlugin.pluginsTypeSigAccessor */
- def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type =
- if (analyzerPlugins.isEmpty) tpe
- else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
- if (!plugin.isActive()) tpe else plugin.pluginsTypeSigAccessor(tpe, typer, tree, sym))
+ def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type = invoke(new CumulativeOp[Type] {
+ def default = tpe
+ def accumulate = (tpe, p) => p.pluginsTypeSigAccessor(tpe, typer, tree, sym)
+ })
/** @see AnalyzerPlugin.canAdaptAnnotations */
- def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = invoke(new CumulativeOp[Boolean] {
// support deprecated methods in annotation checkers
- val annotCheckersExists = global.canAdaptAnnotations(tree, mode, pt)
- annotCheckersExists || {
- if (analyzerPlugins.isEmpty) false
- else analyzerPlugins.exists(plugin =>
- plugin.isActive() && plugin.canAdaptAnnotations(tree, typer, mode, pt))
- }
- }
+ def default = global.canAdaptAnnotations(tree, mode, pt)
+ def accumulate = (curr, p) => curr || p.canAdaptAnnotations(tree, typer, mode, pt)
+ })
/** @see AnalyzerPlugin.adaptAnnotations */
- def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Tree = invoke(new CumulativeOp[Tree] {
// support deprecated methods in annotation checkers
- val annotCheckersTree = global.adaptAnnotations(tree, mode, pt)
- if (analyzerPlugins.isEmpty) annotCheckersTree
- else analyzerPlugins.foldLeft(annotCheckersTree)((tree, plugin) =>
- if (!plugin.isActive()) tree else plugin.adaptAnnotations(tree, typer, mode, pt))
- }
+ def default = global.adaptAnnotations(tree, mode, pt)
+ def accumulate = (tree, p) => p.adaptAnnotations(tree, typer, mode, pt)
+ })
/** @see AnalyzerPlugin.pluginsTypedReturn */
- def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = {
- val annotCheckersType = adaptTypeOfReturn(tree.expr, pt, tpe)
- if (analyzerPlugins.isEmpty) annotCheckersType
- else analyzerPlugins.foldLeft(annotCheckersType)((tpe, plugin) =>
- if (!plugin.isActive()) tpe else plugin.pluginsTypedReturn(tpe, typer, tree, pt))
+ def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = invoke(new CumulativeOp[Type] {
+ def default = adaptTypeOfReturn(tree.expr, pt, tpe)
+ def accumulate = (tpe, p) => p.pluginsTypedReturn(tpe, typer, tree, pt)
+ })
+
+ /** A list of registered macro plugins */
+ private var macroPlugins: List[MacroPlugin] = Nil
+
+ /** Registers a new macro plugin */
+ def addMacroPlugin(plugin: MacroPlugin) {
+ if (!macroPlugins.contains(plugin))
+ macroPlugins = plugin :: macroPlugins
+ }
+
+ private abstract class NonCumulativeOp[T] {
+ def position: Position
+ def description: String
+ def default: T
+ def custom(plugin: MacroPlugin): Option[T]
+ }
+
+ private def invoke[T](op: NonCumulativeOp[T]): T = {
+ if (macroPlugins.isEmpty) op.default
+ else {
+ val results = macroPlugins.filter(_.isActive()).map(plugin => (plugin, op.custom(plugin)))
+ results.flatMap { case (p, Some(result)) => Some((p, result)); case _ => None } match {
+ case (p1, _) :: (p2, _) :: _ => typer.context.error(op.position, s"both $p1 and $p2 want to ${op.description}"); op.default
+ case (_, custom) :: Nil => custom
+ case Nil => op.default
+ }
+ }
+ }
+
+ /** @see MacroPlugin.pluginsTypedMacroBody */
+ def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Tree = invoke(new NonCumulativeOp[Tree] {
+ def position = ddef.pos
+ def description = "typecheck this macro definition"
+ def default = standardTypedMacroBody(typer, ddef)
+ def custom(plugin: MacroPlugin) = plugin.pluginsTypedMacroBody(typer, ddef)
+ })
+
+ /** @see MacroPlugin.pluginsMacroExpand */
+ def pluginsMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = invoke(new NonCumulativeOp[Tree] {
+ def position = expandee.pos
+ def description = "expand this macro application"
+ def default = standardMacroExpand(typer, expandee, mode, pt)
+ def custom(plugin: MacroPlugin) = plugin.pluginsMacroExpand(typer, expandee, mode, pt)
+ })
+
+ /** @see MacroPlugin.pluginsMacroArgs */
+ def pluginsMacroArgs(typer: Typer, expandee: Tree): MacroArgs = invoke(new NonCumulativeOp[MacroArgs] {
+ def position = expandee.pos
+ def description = "compute macro arguments for this macro application"
+ def default = standardMacroArgs(typer, expandee)
+ def custom(plugin: MacroPlugin) = plugin.pluginsMacroArgs(typer, expandee)
+ })
+
+ /** @see MacroPlugin.pluginsMacroRuntime */
+ def pluginsMacroRuntime(expandee: Tree): MacroRuntime = invoke(new NonCumulativeOp[MacroRuntime] {
+ def position = expandee.pos
+ def description = "compute macro runtime for this macro application"
+ def default = standardMacroRuntime(expandee)
+ def custom(plugin: MacroPlugin) = plugin.pluginsMacroRuntime(expandee)
+ })
+
+ /** @see MacroPlugin.pluginsEnterSym */
+ def pluginsEnterSym(namer: Namer, tree: Tree): Context =
+ if (macroPlugins.isEmpty) namer.standardEnterSym(tree)
+ else invoke(new NonCumulativeOp[Context] {
+ def position = tree.pos
+ def description = "enter a symbol for this tree"
+ def default = namer.standardEnterSym(tree)
+ def custom(plugin: MacroPlugin) = {
+ val hasExistingSym = tree.symbol != NoSymbol
+ val result = plugin.pluginsEnterSym(namer, tree)
+ if (result && hasExistingSym) Some(namer.context)
+ else if (result && tree.isInstanceOf[Import]) Some(namer.context.make(tree))
+ else if (result) Some(namer.context)
+ else None
+ }
+ })
+
+ /** @see MacroPlugin.pluginsEnsureCompanionObject */
+ def pluginsEnsureCompanionObject(namer: Namer, cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = invoke(new NonCumulativeOp[Symbol] {
+ def position = cdef.pos
+ def description = "enter a companion symbol for this tree"
+ def default = namer.standardEnsureCompanionObject(cdef, creator)
+ def custom(plugin: MacroPlugin) = plugin.pluginsEnsureCompanionObject(namer, cdef, creator)
+ })
+
+ /** @see MacroPlugin.pluginsEnterStats */
+ def pluginsEnterStats(typer: Typer, stats: List[Tree]): List[Tree] = {
+ // performance opt
+ if (macroPlugins.isEmpty) stats
+ else macroPlugins.foldLeft(stats)((current, plugin) =>
+ if (!plugin.isActive()) current else plugin.pluginsEnterStats(typer, stats))
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index d30b5c2601..94f8f509fc 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -6,12 +6,8 @@
package scala.tools.nsc
package typechecker
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
-import scala.util.control.ControlThrowable
-import symtab.Flags._
-import scala.annotation.tailrec
import Checkability._
+import scala.language.postfixOps
/** On pattern matcher checkability:
*
@@ -66,6 +62,9 @@ trait Checkable {
bases foreach { bc =>
val tps1 = (from baseType bc).typeArgs
val tps2 = (tvarType baseType bc).typeArgs
+ if (tps1.size != tps2.size)
+ devWarning(s"Unequally sized type arg lists in propagateKnownTypes($from, $to): ($tps1, $tps2)")
+
(tps1, tps2).zipped foreach (_ =:= _)
// Alternate, variance respecting formulation causes
// neg/unchecked3.scala to fail (abstract types). TODO -
@@ -82,7 +81,7 @@ trait Checkable {
val resArgs = tparams zip tvars map {
case (_, tvar) if tvar.instValid => tvar.constr.inst
- case (tparam, _) => tparam.tpe
+ case (tparam, _) => tparam.tpeHK
}
appliedType(to, resArgs: _*)
}
@@ -112,7 +111,7 @@ trait Checkable {
private class CheckabilityChecker(val X: Type, val P: Type) {
def Xsym = X.typeSymbol
def Psym = P.typeSymbol
- def XR = propagateKnownTypes(X, Psym)
+ def XR = if (Xsym == AnyClass) classExistentialType(Psym) else propagateKnownTypes(X, Psym)
// sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean]
def P1 = X matchesPattern P
def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P)
@@ -134,7 +133,7 @@ trait Checkable {
else if (P3) RuntimeCheckable
else if (uncheckableType == NoType) {
// Avoid warning (except ourselves) if we can't pinpoint the uncheckable type
- debugwarn("Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n" + summaryString)
+ debuglog("Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n" + summaryString)
CheckabilityError
}
else Uncheckable
@@ -154,6 +153,7 @@ trait Checkable {
def neverSubClass = isNeverSubClass(Xsym, Psym)
def neverMatches = result == StaticallyFalse
def isUncheckable = result == Uncheckable
+ def isCheckable = !isUncheckable
def uncheckableMessage = uncheckableType match {
case NoType => "something"
case tp @ RefinedType(_, _) => "refinement " + tp
@@ -195,19 +195,27 @@ trait Checkable {
* so I will consult with moors about the optimal time to be doing this.
*/
def areIrreconcilableAsParents(sym1: Symbol, sym2: Symbol): Boolean = areUnrelatedClasses(sym1, sym2) && (
- sym1.initialize.isEffectivelyFinal // initialization important
- || sym2.initialize.isEffectivelyFinal
+ isEffectivelyFinal(sym1) // initialization important
+ || isEffectivelyFinal(sym2)
|| !sym1.isTrait && !sym2.isTrait
|| sym1.isSealed && sym2.isSealed && allChildrenAreIrreconcilable(sym1, sym2) && !currentRun.compiles(sym1) && !currentRun.compiles(sym2)
)
+ private def isEffectivelyFinal(sym: Symbol): Boolean = (
+ // initialization important
+ sym.initialize.isEffectivelyFinal || (
+ settings.future && isTupleSymbol(sym) // SI-7294 step into the future and treat TupleN as final.
+ )
+ )
+
def isNeverSubClass(sym1: Symbol, sym2: Symbol) = areIrreconcilableAsParents(sym1, sym2)
private def isNeverSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol]): Boolean = /*logResult(s"isNeverSubArgs($tps1, $tps2, $tparams)")*/ {
- def isNeverSubArg(t1: Type, t2: Type, variance: Int) = {
- if (variance > 0) isNeverSubType(t2, t1)
- else if (variance < 0) isNeverSubType(t1, t2)
- else isNeverSameType(t1, t2)
- }
+ def isNeverSubArg(t1: Type, t2: Type, variance: Variance) = (
+ if (variance.isInvariant) isNeverSameType(t1, t2)
+ else if (variance.isCovariant) isNeverSubType(t2, t1)
+ else if (variance.isContravariant) isNeverSubType(t1, t2)
+ else false
+ )
exists3(tps1, tps2, tparams map (_.variance))(isNeverSubArg)
}
private def isNeverSameType(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
@@ -232,6 +240,17 @@ trait Checkable {
trait InferCheckable {
self: Inferencer =>
+ def isUncheckable(P0: Type) = !isCheckable(P0)
+
+ def isCheckable(P0: Type): Boolean = (
+ uncheckedOk(P0) || (P0.widen match {
+ case TypeRef(_, NothingClass | NullClass | AnyValClass, _) => false
+ case RefinedType(_, decls) if !decls.isEmpty => false
+ case RefinedType(parents, _) => parents forall isCheckable
+ case p => new CheckabilityChecker(AnyTpe, p) isCheckable
+ })
+ )
+
/** TODO: much better error positions.
* Kind of stuck right now because they just pass us the one tree.
* TODO: Eliminate inPattern, canRemedy, which have no place here.
@@ -240,10 +259,12 @@ trait Checkable {
if (uncheckedOk(P0)) return
def where = if (inPattern) "pattern " else ""
- // singleton types not considered here
- val P = P0.widen
+ // singleton types not considered here, dealias the pattern for SI-XXXX
+ val P = P0.dealiasWiden
val X = X0.widen
+ def PString = if (P eq P0) P.toString else s"$P (the underlying of $P0)"
+
P match {
// Prohibit top-level type tests for these, but they are ok nested (e.g. case Foldable[Nothing] => ... )
case TypeRef(_, NothingClass | NullClass | AnyValClass, _) =>
@@ -254,17 +275,21 @@ trait Checkable {
// Matching on types like case _: AnyRef { def bippy: Int } => doesn't work -- yet.
case RefinedType(_, decls) if !decls.isEmpty =>
getContext.unit.warning(tree.pos, s"a pattern match on a refinement type is unchecked")
+ case RefinedType(parents, _) =>
+ parents foreach (p => checkCheckable(tree, p, X, inPattern, canRemedy))
case _ =>
val checker = new CheckabilityChecker(X, P)
- log(checker.summaryString)
+ if (checker.result == RuntimeCheckable)
+ log(checker.summaryString)
+
if (checker.neverMatches) {
val addendum = if (checker.neverSubClass) "" else " (but still might match its erasure)"
- getContext.unit.warning(tree.pos, s"fruitless type test: a value of type $X cannot also be a $P$addendum")
+ getContext.unit.warning(tree.pos, s"fruitless type test: a value of type $X cannot also be a $PString$addendum")
}
else if (checker.isUncheckable) {
val msg = (
- if (checker.uncheckableType =:= P) s"abstract type $where$P"
- else s"${checker.uncheckableMessage} in type $where$P"
+ if (checker.uncheckableType =:= P) s"abstract type $where$PString"
+ else s"${checker.uncheckableMessage} in type $where$PString"
)
getContext.unit.warning(tree.pos, s"$msg is unchecked since it is eliminated by erasure")
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
index 89e2ee44be..56ed0ee16c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
@@ -3,10 +3,10 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package typechecker
-
import java.lang.ArithmeticException
/** This class ...
@@ -18,7 +18,6 @@ abstract class ConstantFolder {
val global: Global
import global._
- import definitions._
/** If tree is a constant operation, replace with result. */
def apply(tree: Tree): Tree = fold(tree, tree match {
@@ -29,9 +28,6 @@ abstract class ConstantFolder {
/** If tree is a constant value that can be converted to type `pt`, perform
* the conversion.
- *
- * @param tree ...
- * @param pt ...
*/
def apply(tree: Tree, pt: Type): Tree = fold(apply(tree), tree.tpe match {
case ConstantType(x) => x convertTo pt
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index a7b0e47214..4d0eda2377 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -6,55 +6,54 @@
package scala.tools.nsc
package typechecker
-import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.StringOps.{ countElementsAsString, countAsString }
-import symtab.Flags.{ PRIVATE, PROTECTED, IS_ERROR }
+import symtab.Flags.IS_ERROR
import scala.compat.Platform.EOL
import scala.reflect.runtime.ReflectionUtils
import scala.reflect.macros.runtime.AbortMacroException
import scala.util.control.NonFatal
import scala.tools.nsc.util.stackTraceString
+import scala.reflect.io.NoAbstractFile
trait ContextErrors {
self: Analyzer =>
import global._
import definitions._
- import treeInfo._
- object ErrorKinds extends Enumeration {
- type ErrorKind = Value
- val Normal, Access, Ambiguous, Divergent = Value
- }
-
- import ErrorKinds.ErrorKind
-
- trait AbsTypeError extends Throwable {
+ sealed abstract class AbsTypeError extends Throwable {
def errPos: Position
def errMsg: String
- def kind: ErrorKind
+ override def toString() = "[Type error at:" + errPos + "] " + errMsg
}
- case class NormalTypeError(underlyingTree: Tree, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
- extends AbsTypeError {
-
- def errPos:Position = underlyingTree.pos
- override def toString() = "[Type error at:" + underlyingTree.pos + "] " + errMsg
+ sealed abstract class TreeTypeError extends AbsTypeError {
+ def underlyingTree: Tree
+ def errPos = underlyingTree.pos
}
- case class SymbolTypeError(underlyingSym: Symbol, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
+ case class NormalTypeError(underlyingTree: Tree, errMsg: String)
+ extends TreeTypeError
+
+ case class AccessTypeError(underlyingTree: Tree, errMsg: String)
+ extends TreeTypeError
+
+ case class AmbiguousTypeError(errPos: Position, errMsg: String)
+ extends AbsTypeError
+
+ case class SymbolTypeError(underlyingSym: Symbol, errMsg: String)
extends AbsTypeError {
def errPos = underlyingSym.pos
}
- case class TypeErrorWrapper(ex: TypeError, kind: ErrorKind = ErrorKinds.Normal)
+ case class TypeErrorWrapper(ex: TypeError)
extends AbsTypeError {
def errMsg = ex.msg
def errPos = ex.pos
}
- case class TypeErrorWithUnderlyingTree(tree: Tree, ex: TypeError, kind: ErrorKind = ErrorKinds.Normal)
+ case class TypeErrorWithUnderlyingTree(tree: Tree, ex: TypeError)
extends AbsTypeError {
def errMsg = ex.msg
def errPos = tree.pos
@@ -68,19 +67,19 @@ trait ContextErrors {
// (pt at the point of divergence gives less information to the user)
// Note: it is safe to delay error message generation in this case
// becasue we don't modify implicits' infos.
- // only issued when -Xdivergence211 is turned on
- case class DivergentImplicitTypeError(tree: Tree, pt0: Type, sym: Symbol) extends AbsTypeError {
- def errPos: Position = tree.pos
+ case class DivergentImplicitTypeError(underlyingTree: Tree, pt0: Type, sym: Symbol)
+ extends TreeTypeError {
def errMsg: String = errMsgForPt(pt0)
- def kind = ErrorKinds.Divergent
- def withPt(pt: Type): AbsTypeError = NormalTypeError(tree, errMsgForPt(pt), kind)
+ def withPt(pt: Type): AbsTypeError = this.copy(pt0 = pt)
private def errMsgForPt(pt: Type) =
s"diverging implicit expansion for type ${pt}\nstarting with ${sym.fullLocationString}"
}
- case class AmbiguousTypeError(underlyingTree: Tree, errPos: Position, errMsg: String, kind: ErrorKind = ErrorKinds.Ambiguous) extends AbsTypeError
+ case class AmbiguousImplicitTypeError(underlyingTree: Tree, errMsg: String)
+ extends TreeTypeError
- case class PosAndMsgTypeError(errPos: Position, errMsg: String, kind: ErrorKind = ErrorKinds.Normal) extends AbsTypeError
+ case class PosAndMsgTypeError(errPos: Position, errMsg: String)
+ extends AbsTypeError
object ErrorUtils {
def issueNormalTypeError(tree: Tree, msg: String)(implicit context: Context) {
@@ -91,22 +90,13 @@ trait ContextErrors {
issueTypeError(SymbolTypeError(sym, msg))
}
- // only called when -Xdivergence211 is turned off
- def issueDivergentImplicitsError(tree: Tree, msg: String)(implicit context: Context) {
- issueTypeError(NormalTypeError(tree, msg, ErrorKinds.Divergent))
- }
-
def issueAmbiguousTypeError(pre: Type, sym1: Symbol, sym2: Symbol, err: AmbiguousTypeError)(implicit context: Context) {
context.issueAmbiguousError(pre, sym1, sym2, err)
}
def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) }
- def typeErrorMsg(found: Type, req: Type, possiblyMissingArgs: Boolean) = {
- def missingArgsMsg = if (possiblyMissingArgs) "\n possible cause: missing arguments for method or constructor" else ""
-
- "type mismatch" + foundReqMsg(found, req) + missingArgsMsg
- }
+ def typeErrorMsg(found: Type, req: Type) = "type mismatch" + foundReqMsg(found, req)
}
def notAnyRefMessage(found: Type): String = {
@@ -147,7 +137,7 @@ trait ContextErrors {
}
issueNormalTypeError(tree,
"stable identifier required, but "+tree+" found." + (
- if (isStableExceptVolatile(tree)) addendum else ""))
+ if (treeInfo.hasVolatileType(tree)) addendum else ""))
setError(tree)
}
@@ -155,28 +145,40 @@ trait ContextErrors {
def errMsg = {
val paramName = param.name
val paramTp = param.tpe
+ def evOrParam = (
+ if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX)
+ "evidence parameter of type"
+ else
+ s"parameter $paramName:"
+ )
paramTp.typeSymbolDirect match {
- case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp)
- case _ =>
- "could not find implicit value for "+
- (if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX) "evidence parameter of type "
- else "parameter "+paramName+": ")+paramTp
+ case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp)
+ case _ => s"could not find implicit value for $evOrParam $paramTp"
}
}
issueNormalTypeError(tree, errMsg)
}
def AdaptTypeError(tree: Tree, found: Type, req: Type) = {
+ // SI-3971 unwrapping to the outermost Apply helps prevent confusion with the
+ // error message point.
+ def callee = {
+ def unwrap(t: Tree): Tree = t match {
+ case Apply(app: Apply, _) => unwrap(app)
+ case _ => t
+ }
+ unwrap(tree)
+ }
+
// If the expected type is a refinement type, and the found type is a refinement or an anon
// class, we can greatly improve the error message by retyping the tree to recover the actual
// members present, then display along with the expected members. This is done here because
// this is the last point where we still have access to the original tree, rather than just
// the found/req types.
- val foundType: Type = req.normalize match {
+ val foundType: Type = req.dealiasWiden match {
case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass =>
- val retyped = typed (tree.duplicate setType null)
+ val retyped = typed (tree.duplicate.clearType())
val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic)
-
if (foundDecls.isEmpty || (found.typeSymbol eq NoSymbol)) found
else {
// The members arrive marked private, presumably because there was no
@@ -190,11 +192,10 @@ trait ContextErrors {
case _ =>
found
}
- assert(!found.isErroneous && !req.isErroneous, (found, req))
+ assert(!foundType.isErroneous && !req.isErroneous, (foundType, req))
- issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req))) )
- if (settings.explaintypes.value)
- explainTypes(found, req)
+ issueNormalTypeError(callee, withAddendum(callee.pos)(typeErrorMsg(foundType, req)))
+ infer.explainTypes(foundType, req)
}
def WithFilterError(tree: Tree, ex: AbsTypeError) = {
@@ -203,14 +204,18 @@ trait ContextErrors {
}
def ParentTypesError(templ: Template, ex: TypeError) = {
- templ.tpe = null
- issueNormalTypeError(templ, ex.getMessage())
+ templ.clearType()
+ issueNormalTypeError(templ, ex.getMessage())
+ setError(templ)
}
// additional parentTypes errors
- def ConstrArgsInTraitParentTpeError(arg: Tree, parent: Symbol) =
+ def ConstrArgsInParentWhichIsTraitError(arg: Tree, parent: Symbol) =
issueNormalTypeError(arg, parent + " is a trait; does not take constructor arguments")
+ def ConstrArgsInParentOfTraitError(arg: Tree, parent: Symbol) =
+ issueNormalTypeError(arg, "parents of traits may not have parameters")
+
def MissingTypeArgumentsParentTpeError(supertpt: Tree) =
issueNormalTypeError(supertpt, "missing type arguments")
@@ -318,7 +323,7 @@ trait ContextErrors {
val target = qual.tpe.widen
def targetKindString = if (owner.isTypeParameterOrSkolem) "type parameter " else ""
def nameString = decodeWithKind(name, owner)
- /** Illuminating some common situations and errors a bit further. */
+ /* Illuminating some common situations and errors a bit further. */
def addendum = {
val companion = {
if (name.isTermName && owner.isPackageClass) {
@@ -403,11 +408,28 @@ trait ContextErrors {
setError(tree)
}
- def MissingParameterTypeError(fun: Tree, vparam: ValDef, pt: Type) =
+ def MissingParameterTypeError(fun: Tree, vparam: ValDef, pt: Type, withTupleAddendum: Boolean) = {
+ def issue(what: String) = {
+ val addendum: String = fun match {
+ case Function(params, _) if withTupleAddendum =>
+ val funArity = params.length
+ val example = analyzer.exampleTuplePattern(params map (_.name))
+ (pt baseType FunctionClass(1)) match {
+ case TypeRef(_, _, arg :: _) if arg.typeSymbol == TupleClass(funArity) && funArity > 1 =>
+ sm"""|
+ |Note: The expected type requires a one-argument function accepting a $funArity-Tuple.
+ | Consider a pattern matching anoynmous function, `{ case $example => ... }`"""
+ case _ => ""
+ }
+ case _ => ""
+ }
+ issueNormalTypeError(vparam, what + addendum)
+ }
if (vparam.mods.isSynthetic) fun match {
case Function(_, Match(_, _)) => MissingParameterTypeAnonMatchError(vparam, pt)
- case _ => issueNormalTypeError(vparam, "missing parameter type for expanded function " + fun)
- } else issueNormalTypeError(vparam, "missing parameter type")
+ case _ => issue("missing parameter type for expanded function " + fun)
+ } else issue("missing parameter type")
+ }
def MissingParameterTypeAnonMatchError(vparam: Tree, pt: Type) =
issueNormalTypeError(vparam, "missing parameter type for expanded function\n"+
@@ -437,9 +459,6 @@ trait ContextErrors {
def ArrayConstantsTypeMismatchError(tree: Tree, pt: Type) =
NormalTypeError(tree, "found array constant, expected argument of type " + pt)
- def UnexpectedTreeAnnotation(tree: Tree) =
- NormalTypeError(tree, "unexpected tree in annotation: "+ tree)
-
def AnnotationTypeMismatchError(tree: Tree, expected: Type, found: Type) =
NormalTypeError(tree, "expected annotation of type " + expected + ", found " + found)
@@ -468,7 +487,7 @@ trait ContextErrors {
def AbstractionFromVolatileTypeError(vd: ValDef) =
issueNormalTypeError(vd, "illegal abstraction from value with volatile type "+vd.symbol.tpe)
- private[ContextErrors] def TypedApplyWrongNumberOfTpeParametersErrorMessage(fun: Tree) =
+ private[scala] def TypedApplyWrongNumberOfTpeParametersErrorMessage(fun: Tree) =
"wrong number of type parameters for "+treeSymTypeMsg(fun)
def TypedApplyWrongNumberOfTpeParametersError(tree: Tree, fun: Tree) = {
@@ -484,7 +503,7 @@ trait ContextErrors {
// doTypeApply
//tryNamesDefaults
def NamedAndDefaultArgumentsNotSupportedForMacros(tree: Tree, fun: Tree) =
- NormalTypeError(tree, "macros application do not support named and/or default arguments")
+ NormalTypeError(tree, "macro applications do not support named and/or default arguments")
def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree) =
NormalTypeError(tree, "too many arguments for "+treeSymTypeMsg(fun))
@@ -525,6 +544,12 @@ trait ContextErrors {
def TooManyArgsPatternError(fun: Tree) =
NormalTypeError(fun, "too many arguments for unapply pattern, maximum = "+definitions.MaxTupleArity)
+ def BlackboxExtractorExpansion(fun: Tree) =
+ NormalTypeError(fun, "extractor macros can only be whitebox")
+
+ def WrongShapeExtractorExpansion(fun: Tree) =
+ NormalTypeError(fun, "extractor macros can only expand into extractor calls")
+
def WrongNumberOfArgsError(tree: Tree, fun: Tree) =
NormalTypeError(tree, "wrong number of arguments for "+ treeSymTypeMsg(fun))
@@ -532,7 +557,7 @@ trait ContextErrors {
NormalTypeError(tree, fun.tpe+" does not take parameters")
// Dynamic
- def DynamicVarArgUnsupported(tree: Tree, name: String) =
+ def DynamicVarArgUnsupported(tree: Tree, name: Name) =
issueNormalTypeError(tree, name+ " does not support passing a vararg parameter")
def DynamicRewriteError(tree: Tree, err: AbsTypeError) = {
@@ -578,11 +603,13 @@ trait ContextErrors {
//adapt
def MissingArgsForMethodTpeError(tree: Tree, meth: Symbol) = {
- issueNormalTypeError(tree,
- "missing arguments for " + meth.fullLocationString + (
+ val message =
+ if (meth.isMacro) MacroTooFewArgumentListsMessage
+ else "missing arguments for " + meth.fullLocationString + (
if (meth.isConstructor) ""
else ";\nfollow this method with `_' if you want to treat it as a partially applied function"
- ))
+ )
+ issueNormalTypeError(tree, message)
setError(tree)
}
@@ -598,8 +625,12 @@ trait ContextErrors {
setError(tree)
}
- def CaseClassConstructorError(tree: Tree) = {
- issueNormalTypeError(tree, tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method")
+ def CaseClassConstructorError(tree: Tree, baseMessage: String) = {
+ val addendum = directUnapplyMember(tree.symbol.info) match {
+ case sym if hasMultipleNonImplicitParamLists(sym) => s"\nNote: ${sym.defString} exists in ${tree.symbol}, but it cannot be used as an extractor due to its second non-implicit parameter list"
+ case _ => ""
+ }
+ issueNormalTypeError(tree, baseMessage + addendum)
setError(tree)
}
@@ -663,7 +694,7 @@ trait ContextErrors {
val addendums = List(
if (sym0.associatedFile eq sym1.associatedFile)
Some("conflicting symbols both originated in file '%s'".format(sym0.associatedFile.canonicalPath))
- else if ((sym0.associatedFile ne null) && (sym1.associatedFile ne null))
+ else if ((sym0.associatedFile ne NoAbstractFile) && (sym1.associatedFile ne NoAbstractFile))
Some("conflicting symbols originated in files '%s' and '%s'".format(sym0.associatedFile.canonicalPath, sym1.associatedFile.canonicalPath))
else None ,
if (isBug) Some("Note: this may be due to a bug in the compiler involving wildcards in package objects") else None
@@ -680,8 +711,8 @@ trait ContextErrors {
def CyclicAliasingOrSubtypingError(errPos: Position, sym0: Symbol) =
issueTypeError(PosAndMsgTypeError(errPos, "cyclic aliasing or subtyping involving "+sym0))
- def CyclicReferenceError(errPos: Position, lockedSym: Symbol) =
- issueTypeError(PosAndMsgTypeError(errPos, "illegal cyclic reference involving " + lockedSym))
+ def CyclicReferenceError(errPos: Position, tp: Type, lockedSym: Symbol) =
+ issueTypeError(PosAndMsgTypeError(errPos, s"illegal cyclic reference involving $tp and $lockedSym"))
// macro-related errors (also see MacroErrors below)
@@ -690,26 +721,43 @@ trait ContextErrors {
setError(tree)
}
- // same reason as for MacroBodyTypecheckException
+ def MacroTooManyArgumentListsError(expandee: Tree, fun: Symbol) = {
+ NormalTypeError(expandee, "too many argument lists for " + fun)
+ }
+
+ def MacroIncompatibleEngineError(macroEngine: String) = {
+ val message = s"macro cannot be expanded, because it was compiled by an incompatible macro engine $macroEngine"
+ issueNormalTypeError(lastTreeToTyper, message)
+ }
+
case object MacroExpansionException extends Exception with scala.util.control.ControlThrowable
- private def macroExpansionError(expandee: Tree, msg: String = null, pos: Position = NoPosition) = {
+ protected def macroExpansionError(expandee: Tree, msg: String, pos: Position = NoPosition) = {
def msgForLog = if (msg != null && (msg contains "exception during macro expansion")) msg.split(EOL).drop(1).headOption.getOrElse("?") else msg
macroLogLite("macro expansion has failed: %s".format(msgForLog))
- val errorPos = if (pos != NoPosition) pos else (if (expandee.pos != NoPosition) expandee.pos else enclosingMacroPosition)
- if (msg != null) context.error(errorPos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions
+ if (msg != null) context.error(if (pos.isDefined) pos else expandee.pos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions
setError(expandee)
throw MacroExpansionException
}
- def MacroPartialApplicationError(expandee: Tree) = {
+ private def macroExpansionError2(expandee: Tree, msg: String) = {
// macroExpansionError won't work => swallows positions, hence needed to do issueTypeError
// kinda contradictory to the comment in `macroExpansionError`, but this is how it works
- issueNormalTypeError(expandee, "macros cannot be partially applied")
+ issueNormalTypeError(expandee, msg)
setError(expandee)
throw MacroExpansionException
}
+ private def MacroTooFewArgumentListsMessage = "too few argument lists for macro invocation"
+ def MacroTooFewArgumentListsError(expandee: Tree) = macroExpansionError2(expandee, MacroTooFewArgumentListsMessage)
+
+ private def MacroTooManyArgumentListsMessage = "too many argument lists for macro invocation"
+ def MacroTooManyArgumentListsError(expandee: Tree) = macroExpansionError2(expandee, MacroTooManyArgumentListsMessage)
+
+ def MacroTooFewArgumentsError(expandee: Tree) = macroExpansionError2(expandee, "too few arguments for macro invocation")
+
+ def MacroTooManyArgumentsError(expandee: Tree) = macroExpansionError2(expandee, "too many arguments for macro invocation")
+
def MacroGeneratedAbort(expandee: Tree, ex: AbortMacroException) = {
// errors have been reported by the macro itself, so we do nothing here
macroLogVerbose("macro expansion has been aborted")
@@ -731,7 +779,7 @@ trait ContextErrors {
try {
// [Eugene] is there a better way?
// [Paul] See Exceptional.scala and Origins.scala.
- val relevancyThreshold = realex.getStackTrace().indexWhere(_.getMethodName endsWith "macroExpand1")
+ val relevancyThreshold = realex.getStackTrace().indexWhere(_.getMethodName endsWith "macroExpandWithRuntime")
if (relevancyThreshold == -1) None
else {
var relevantElements = realex.getStackTrace().take(relevancyThreshold + 1)
@@ -771,23 +819,29 @@ trait ContextErrors {
macroExpansionError(expandee, template(sym.name.nameKind).format(sym.name + " " + sym.origin, forgotten))
}
- def MacroExpansionIsNotExprError(expandee: Tree, expanded: Any) =
+ def MacroExpansionHasInvalidTypeError(expandee: Tree, expanded: Any) = {
+ def isUnaffiliatedExpr = expanded.isInstanceOf[scala.reflect.api.Exprs#Expr[_]]
+ def isUnaffiliatedTree = expanded.isInstanceOf[scala.reflect.api.Trees#TreeApi]
+ val expected = "expr or tree"
+ val actual = if (isUnaffiliatedExpr) "an expr" else if (isUnaffiliatedTree) "a tree" else "unexpected"
+ val isPathMismatch = expanded != null && (isUnaffiliatedExpr || isUnaffiliatedTree)
macroExpansionError(expandee,
- "macro must return a compiler-specific expr; returned value is " + (
+ s"macro must return a compiler-specific $expected; returned value is " + (
if (expanded == null) "null"
- else if (expanded.isInstanceOf[Expr[_]]) " Expr, but it doesn't belong to this compiler's universe"
- else " of " + expanded.getClass
+ else if (isPathMismatch) s"$actual, but it doesn't belong to this compiler's universe"
+ else "of " + expanded.getClass
))
-
- def MacroImplementationNotFoundError(expandee: Tree) = {
- val message =
- "macro implementation not found: " + expandee.symbol.name + " " +
- "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)" +
- (if (forScaladoc) ". When generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
- else "")
- macroExpansionError(expandee, message)
}
+
+ def MacroImplementationNotFoundError(expandee: Tree) =
+ macroExpansionError(expandee, macroImplementationNotFoundMessage(expandee.symbol.name))
}
+
+ /** This file will be the death of me. */
+ protected def macroImplementationNotFoundMessage(name: Name): String = (
+ s"""|macro implementation not found: $name
+ |(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)""".stripMargin
+ )
}
trait InferencerContextErrors {
@@ -829,14 +883,17 @@ trait ContextErrors {
)
}
- def AccessError(tree: Tree, sym: Symbol, pre: Type, owner0: Symbol, explanation: String) = {
+ def AccessError(tree: Tree, sym: Symbol, ctx: Context, explanation: String): AbsTypeError =
+ AccessError(tree, sym, ctx.enclClass.owner.thisType, ctx.enclClass.owner, explanation)
+
+ def AccessError(tree: Tree, sym: Symbol, pre: Type, owner0: Symbol, explanation: String): AbsTypeError = {
def errMsg = {
val location = if (sym.isClassConstructor) owner0 else pre.widen.directObjectString
underlyingSymbol(sym).fullLocationString + " cannot be accessed in " +
location + explanation
}
- NormalTypeError(tree, errMsg, ErrorKinds.Access)
+ AccessTypeError(tree, errMsg)
}
def NoMethodInstanceError(fn: Tree, args: List[Tree], msg: String) =
@@ -881,7 +938,7 @@ trait ContextErrors {
"argument types " + argtpes.mkString("(", ",", ")") +
(if (pt == WildcardType) "" else " and expected result type " + pt)
val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, msg0)
- issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(pos, msg))
setErrorOnLastTry(lastTry, tree)
} else setError(tree) // do not even try further attempts because they should all fail
// even if this is not the last attempt (because of the SO's possibility on the horizon)
@@ -889,13 +946,13 @@ trait ContextErrors {
}
def NoBestExprAlternativeError(tree: Tree, pt: Type, lastTry: Boolean) = {
- issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(tree.symbol.tpe, pt, isPossiblyMissingArgs(tree.symbol.tpe, pt))))
+ issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(tree.symbol.tpe, pt)))
setErrorOnLastTry(lastTry, tree)
}
def AmbiguousExprAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, pt: Type, lastTry: Boolean) = {
val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, "expected type " + pt)
- issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(pos, msg))
setErrorOnLastTry(lastTry, tree)
}
@@ -909,7 +966,7 @@ trait ContextErrors {
kindErrors.toList.mkString("\n", ", ", ""))
}
- private[ContextErrors] def NotWithinBoundsErrorMessage(prefix: String, targs: List[Type], tparams: List[Symbol], explaintypes: Boolean) = {
+ private[scala] def NotWithinBoundsErrorMessage(prefix: String, targs: List[Type], tparams: List[Symbol], explaintypes: Boolean) = {
if (explaintypes) {
val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds)
(targs, bounds).zipped foreach ((targ, bound) => explainTypes(bound.lo, targ))
@@ -925,7 +982,7 @@ trait ContextErrors {
def NotWithinBounds(tree: Tree, prefix: String, targs: List[Type],
tparams: List[Symbol], kindErrors: List[String]) =
issueNormalTypeError(tree,
- NotWithinBoundsErrorMessage(prefix, targs, tparams, settings.explaintypes.value))
+ NotWithinBoundsErrorMessage(prefix, targs, tparams, settings.explaintypes))
//substExpr
def PolymorphicExpressionInstantiationError(tree: Tree, undetparams: List[Symbol], pt: Type) =
@@ -1033,20 +1090,14 @@ trait ContextErrors {
val s1 = if (prevSym.isModule) "case class companion " else ""
val s2 = if (prevSym.isSynthetic) "(compiler-generated) " + s1 else ""
val s3 = if (prevSym.isCase) "case class " + prevSym.name else "" + prevSym
- val where = if (currentSym.owner.isPackageClass != prevSym.owner.isPackageClass) {
- val inOrOut = if (prevSym.owner.isPackageClass) "outside of" else "in"
+ val where = if (currentSym.isTopLevel != prevSym.isTopLevel) {
+ val inOrOut = if (prevSym.isTopLevel) "outside of" else "in"
" %s package object %s".format(inOrOut, ""+prevSym.effectiveOwner.name)
} else ""
issueSymbolTypeError(currentSym, prevSym.name + " is already defined as " + s2 + s3 + where)
}
- def MaxParametersCaseClassError(tree: Tree) =
- issueNormalTypeError(tree, "Implementation restriction: case classes cannot have more than " + definitions.MaxFunctionArity + " parameters.")
-
- def InheritsItselfError(tree: Tree) =
- issueNormalTypeError(tree, tree.tpe.typeSymbol+" inherits itself")
-
def MissingParameterOrValTypeError(vparam: Tree) =
issueNormalTypeError(vparam, "missing parameter type")
@@ -1097,11 +1148,11 @@ trait ContextErrors {
def AbstractMemberWithModiferError(sym: Symbol, flag: Int) =
- issueSymbolTypeError(sym, "abstract member may not have " + Flags.flagsToString(flag) + " modifier")
+ issueSymbolTypeError(sym, "abstract member may not have " + Flags.flagsToString(flag.toLong) + " modifier")
def IllegalModifierCombination(sym: Symbol, flag1: Int, flag2: Int) =
issueSymbolTypeError(sym, "illegal combination of modifiers: %s and %s for: %s".format(
- Flags.flagsToString(flag1), Flags.flagsToString(flag2), sym))
+ Flags.flagsToString(flag1.toLong), Flags.flagsToString(flag2.toLong), sym))
def IllegalDependentMethTpeError(sym: Symbol)(context: Context) = {
val errorAddendum =
@@ -1143,7 +1194,7 @@ trait ContextErrors {
// failures which have nothing to do with implicit conversions
// per se, but which manifest as implicit conversion conflicts
// involving Any, are further explained from foundReqMsg.
- if (AnyRefClass.tpe <:< req) (
+ if (AnyRefTpe <:< req) (
if (sym == AnyClass || sym == UnitClass) (
sm"""|Note: ${sym.name} is not implicitly converted to AnyRef. You can safely
|pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so."""
@@ -1159,11 +1210,11 @@ trait ContextErrors {
sm"""|Note that implicit conversions are not applicable because they are ambiguous:
|${coreMsg}are possible conversion functions from $found to $req"""
}
- typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req)) + (
+ typeErrorMsg(found, req) + (
if (explanation == "") "" else "\n" + explanation
)
}
- context.issueAmbiguousError(AmbiguousTypeError(tree, tree.pos,
+ context.issueAmbiguousError(AmbiguousImplicitTypeError(tree,
if (isView) viewMsg
else s"ambiguous implicit values:\n${coreMsg}match expected type $pt")
)
@@ -1171,13 +1222,7 @@ trait ContextErrors {
}
def DivergingImplicitExpansionError(tree: Tree, pt: Type, sym: Symbol)(implicit context0: Context) =
- if (settings.Xdivergence211.value) {
- issueTypeError(DivergentImplicitTypeError(tree, pt, sym))
- } else {
- issueDivergentImplicitsError(tree,
- "diverging implicit expansion for type "+pt+"\nstarting with "+
- sym.fullLocationString)
- }
+ issueTypeError(DivergentImplicitTypeError(tree, pt, sym))
}
object NamesDefaultsErrorsGen {
@@ -1216,7 +1261,7 @@ trait ContextErrors {
def DoubleParamNamesDefaultError(arg: Tree, name: Name, pos: Int, otherName: Option[Name])(implicit context: Context) = {
val annex = otherName match {
- case Some(oName) => "\nNote that that '"+ oName +"' is not a parameter name of the invoked method."
+ case Some(oName) => "\nNote that '"+ oName +"' is not a parameter name of the invoked method."
case None => ""
}
issueNormalTypeError(arg, "parameter '"+ name +"' is already specified at parameter position "+ pos + annex)
@@ -1228,145 +1273,4 @@ trait ContextErrors {
setError(arg)
}
}
-
- // using an exception here is actually a good idea
- // because the lifespan of this exception is extremely small and controlled
- // moreover exceptions let us avoid an avalanche of "if (!hasError) do stuff" checks
- case object MacroBodyTypecheckException extends Exception with scala.util.control.ControlThrowable
-
- trait MacroErrors {
- self: MacroTyper =>
-
- private implicit val context0 = typer.context
- val context = typer.context
-
- // helpers
-
- private def lengthMsg(flavor: String, violation: String, extra: Symbol) = {
- val noun = if (flavor == "value") "parameter" else "type parameter"
- val message = noun + " lists have different length, " + violation + " extra " + noun
- val suffix = if (extra ne NoSymbol) " " + extra.defString else ""
- message + suffix
- }
-
- private def abbreviateCoreAliases(s: String): String = List("WeakTypeTag", "Expr").foldLeft(s)((res, x) => res.replace("c.universe." + x, "c." + x))
-
- private def showMeth(pss: List[List[Symbol]], restpe: Type, abbreviate: Boolean) = {
- var argsPart = (pss map (ps => ps map (_.defString) mkString ("(", ", ", ")"))).mkString
- if (abbreviate) argsPart = abbreviateCoreAliases(argsPart)
- var retPart = restpe.toString
- if (abbreviate || macroDdef.tpt.tpe == null) retPart = abbreviateCoreAliases(retPart)
- argsPart + ": " + retPart
- }
-
- // not exactly an error generator, but very related
- // and I dearly wanted to push it away from Macros.scala
- private def checkSubType(slot: String, rtpe: Type, atpe: Type) = {
- val ok = if (macroDebugVerbose || settings.explaintypes.value) {
- if (rtpe eq atpe) println(rtpe + " <: " + atpe + "?" + EOL + "true")
- withTypesExplained(rtpe <:< atpe)
- } else rtpe <:< atpe
- if (!ok) {
- compatibilityError("type mismatch for %s: %s does not conform to %s".format(slot, abbreviateCoreAliases(rtpe.toString), abbreviateCoreAliases(atpe.toString)))
- }
- }
-
- // errors
-
- private def fail() = {
- // need to set the IS_ERROR flag to prohibit spurious expansions
- if (macroDef != null) macroDef setFlag IS_ERROR
- // not setting ErrorSymbol as in `infer.setError`, because we still need to know that it's a macro
- // otherwise assignTypeToTree in Namers might fail if macroDdef.tpt == EmptyTree
- macroDdef setType ErrorType
- throw MacroBodyTypecheckException
- }
-
- private def genericError(tree: Tree, message: String) = {
- issueNormalTypeError(tree, message)
- fail()
- }
-
- private def implRefError(message: String) = {
- val treeInfo.Applied(implRef, _, _) = macroDdef.rhs
- genericError(implRef, message)
- }
-
- private def compatibilityError(message: String) =
- implRefError(
- "macro implementation has incompatible shape:"+
- "\n required: " + showMeth(rparamss, rret, abbreviate = true) +
- "\n found : " + showMeth(aparamss, aret, abbreviate = false) +
- "\n" + message)
-
- // Phase I: sanity checks
-
- def MacroDefIsFastTrack() = {
- macroLogVerbose("typecheck terminated unexpectedly: macro is fast track")
- assert(!macroDdef.tpt.isEmpty, "fast track macros must provide result type")
- throw MacroBodyTypecheckException // don't call fail, because we don't need IS_ERROR
- }
-
- def MacroDefIsQmarkQmarkQmark() = {
- macroLogVerbose("typecheck terminated unexpectedly: macro is ???")
- throw MacroBodyTypecheckException
- }
-
- def MacroFeatureNotEnabled() = {
- macroLogVerbose("typecheck terminated unexpectedly: language.experimental.macros feature is not enabled")
- fail()
- }
-
- // Phase II: typecheck the right-hand side of the macro def
-
- // do nothing, just fail. relevant typecheck errors have already been reported
- def MacroDefUntypeableBodyError() = fail()
-
- def MacroDefInvalidBodyError() = genericError(macroDdef, "macro body has wrong shape:\n required: macro [<implementation object>].<method name>[[<type args>]]")
-
- def MacroImplNotPublicError() = implRefError("macro implementation must be public")
-
- def MacroImplOverloadedError() = implRefError("macro implementation cannot be overloaded")
-
- def MacroImplWrongNumberOfTypeArgumentsError(macroImplRef: Tree) = {
- val MacroImplReference(owner, meth, targs) = macroImplRef
- val diagnostic = if (meth.typeParams.length > targs.length) "has too few type arguments" else "has too many arguments"
- implRefError(s"macro implementation reference $diagnostic for " + treeSymTypeMsg(macroImplRef))
- }
-
- def MacroImplNotStaticError() = implRefError("macro implementation must be in statically accessible object")
-
- // Phase III: check compatibility between the macro def and its macro impl
- // aXXX (e.g. aparams) => characteristics of the macro impl ("a" stands for "actual")
- // rXXX (e.g. rparams) => characteristics of a reference macro impl signature synthesized from the macro def ("r" stands for "reference")
-
- def MacroImplNonTagImplicitParameters(params: List[Symbol]) = implRefError("macro implementations cannot have implicit parameters other than WeakTypeTag evidences")
-
- def MacroImplParamssMismatchError() = compatibilityError("number of parameter sections differ")
-
- def MacroImplExtraParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(lengthMsg("value", "found", aparams(rparams.length)))
-
- def MacroImplMissingParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(abbreviateCoreAliases(lengthMsg("value", "required", rparams(aparams.length))))
-
- def checkMacroImplParamTypeMismatch(atpe: Type, rparam: Symbol) = checkSubType("parameter " + rparam.name, rparam.tpe, atpe)
-
- def checkMacroImplResultTypeMismatch(atpe: Type, rret: Type) = checkSubType("return type", atpe, rret)
-
- def MacroImplParamNameMismatchError(aparam: Symbol, rparam: Symbol) = compatibilityError("parameter names differ: " + rparam.name + " != " + aparam.name)
-
- def MacroImplVarargMismatchError(aparam: Symbol, rparam: Symbol) = {
- if (isRepeated(rparam) && !isRepeated(aparam))
- compatibilityError("types incompatible for parameter " + rparam.name + ": corresponding is not a vararg parameter")
- if (!isRepeated(rparam) && isRepeated(aparam))
- compatibilityError("types incompatible for parameter " + aparam.name + ": corresponding is not a vararg parameter")
- }
-
- def MacroImplTargMismatchError(atargs: List[Type], atparams: List[Symbol]) =
- compatibilityError(typer.infer.InferErrorGen.NotWithinBoundsErrorMessage("", atargs, atparams, macroDebugVerbose || settings.explaintypes.value))
-
- def MacroImplTparamInstantiationError(atparams: List[Symbol], ex: NoInstance) =
- compatibilityError(
- "type parameters "+(atparams map (_.defString) mkString ", ")+" cannot be instantiated\n"+
- ex.getMessage)
- }
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 3fe98ed127..53bc9a2772 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -6,9 +6,9 @@
package scala.tools.nsc
package typechecker
-import symtab.Flags._
-import scala.collection.mutable.{LinkedHashSet, Set}
+import scala.collection.{ immutable, mutable }
import scala.annotation.tailrec
+import scala.reflect.internal.util.shortClassOfInstance
/**
* @author Martin Odersky
@@ -16,32 +16,63 @@ import scala.annotation.tailrec
*/
trait Contexts { self: Analyzer =>
import global._
+ import definitions.{ JavaLangPackage, ScalaPackage, PredefModule, ScalaXmlTopScope, ScalaXmlPackage }
+ import ContextMode._
- object NoContext extends Context {
- outer = this
+ protected def onTreeCheckerError(pos: Position, msg: String): Unit = ()
+
+ object NoContext
+ extends Context(EmptyTree, NoSymbol, EmptyScope, NoCompilationUnit,
+ null) { // We can't pass the uninitialized `this`. Instead, we treat null specially in `Context#outer`
enclClass = this
enclMethod = this
+ override val depth = 0
override def nextEnclosing(p: Context => Boolean): Context = this
override def enclosingContextChain: List[Context] = Nil
override def implicitss: List[List[ImplicitInfo]] = Nil
+ override def imports: List[ImportInfo] = Nil
+ override def firstImport: Option[ImportInfo] = None
override def toString = "NoContext"
}
private object RootImports {
- import definitions._
// Possible lists of root imports
val javaList = JavaLangPackage :: Nil
val javaAndScalaList = JavaLangPackage :: ScalaPackage :: Nil
val completeList = JavaLangPackage :: ScalaPackage :: PredefModule :: Nil
}
+ def ambiguousImports(imp1: ImportInfo, imp2: ImportInfo) =
+ LookupAmbiguous(s"it is imported twice in the same scope by\n$imp1\nand $imp2")
+ def ambiguousDefnAndImport(owner: Symbol, imp: ImportInfo) =
+ LookupAmbiguous(s"it is both defined in $owner and imported subsequently by \n$imp")
+
private lazy val startContext = {
NoContext.make(
- Template(List(), emptyValDef, List()) setSymbol global.NoSymbol setType global.NoType,
+ Template(List(), noSelfType, List()) setSymbol global.NoSymbol setType global.NoType,
rootMirror.RootClass,
rootMirror.RootClass.info.decls)
}
+ private lazy val allUsedSelectors =
+ mutable.Map[ImportInfo, Set[ImportSelector]]() withDefaultValue Set()
+ private lazy val allImportInfos =
+ mutable.Map[CompilationUnit, List[ImportInfo]]() withDefaultValue Nil
+
+ def warnUnusedImports(unit: CompilationUnit) = {
+ for (imps <- allImportInfos.remove(unit)) {
+ for (imp <- imps.reverse.distinct) {
+ val used = allUsedSelectors(imp)
+ def isMask(s: ImportSelector) = s.name != nme.WILDCARD && s.rename == nme.WILDCARD
+
+ imp.tree.selectors filterNot (s => isMask(s) || used(s)) foreach { sel =>
+ unit.warning(imp posOf sel, "Unused import")
+ }
+ }
+ allUsedSelectors --= imps
+ }
+ }
+
var lastAccessCheckDetails: String = ""
/** List of symbols to import from in a root context. Typically that
@@ -55,292 +86,407 @@ trait Contexts { self: Analyzer =>
protected def rootImports(unit: CompilationUnit): List[Symbol] = {
assert(definitions.isDefinitionsInitialized, "definitions uninitialized")
- if (settings.noimports.value) Nil
+ if (settings.noimports) Nil
else if (unit.isJava) RootImports.javaList
- else if (settings.nopredef.value || treeInfo.noPredefImportForUnit(unit.body)) RootImports.javaAndScalaList
+ else if (settings.nopredef || treeInfo.noPredefImportForUnit(unit.body)) {
+ debuglog("Omitted import of Predef._ for " + unit)
+ RootImports.javaAndScalaList
+ }
else RootImports.completeList
}
- def rootContext(unit: CompilationUnit): Context = rootContext(unit, EmptyTree, false)
- def rootContext(unit: CompilationUnit, tree: Tree): Context = rootContext(unit, tree, false)
- def rootContext(unit: CompilationUnit, tree: Tree, erasedTypes: Boolean): Context = {
- import definitions._
- var sc = startContext
- for (sym <- rootImports(unit)) {
- sc = sc.makeNewImport(sym)
- sc.depth += 1
- }
- val c = sc.make(unit, tree, sc.owner, sc.scope, sc.imports)
+
+ def rootContext(unit: CompilationUnit, tree: Tree = EmptyTree, erasedTypes: Boolean = false): Context = {
+ val rootImportsContext = (startContext /: rootImports(unit))((c, sym) => c.make(gen.mkWildcardImport(sym)))
+
+ // there must be a scala.xml package when xml literals were parsed in this unit
+ if (unit.hasXml && ScalaXmlPackage == NoSymbol)
+ unit.error(unit.firstXmlPos, "To compile XML syntax, the scala.xml package must be on the classpath.\nPlease see https://github.com/scala/scala/wiki/Scala-2.11#xml.")
+
+ // scala-xml needs `scala.xml.TopScope` to be in scope globally as `$scope`
+ // We detect `scala-xml` by looking for `scala.xml.TopScope` and
+ // inject the equivalent of `import scala.xml.{TopScope => $scope}`
+ val contextWithXML =
+ if (!unit.hasXml || ScalaXmlTopScope == NoSymbol) rootImportsContext
+ else rootImportsContext.make(gen.mkImport(ScalaXmlPackage, nme.TopScope, nme.dollarScope))
+
+ val c = contextWithXML.make(tree, unit = unit)
if (erasedTypes) c.setThrowErrors() else c.setReportErrors()
- c.implicitsEnabled = !erasedTypes
- c.enrichmentEnabled = c.implicitsEnabled
+ c(EnrichmentEnabled | ImplicitsEnabled) = !erasedTypes
c
}
def resetContexts() {
- var sc = startContext
- while (sc != NoContext) {
- sc.tree match {
- case Import(qual, _) => qual.tpe = singleType(qual.symbol.owner.thisType, qual.symbol)
- case _ =>
+ startContext.enclosingContextChain foreach { context =>
+ context.tree match {
+ case Import(qual, _) => qual setType singleType(qual.symbol.owner.thisType, qual.symbol)
+ case _ =>
}
- sc.flushAndReturnBuffer()
- sc.flushAndReturnWarningsBuffer()
- sc = sc.outer
+ context.reportBuffer.clearAll()
}
}
- private object Errors {
- final val ReportErrors = 1 << 0
- final val BufferErrors = 1 << 1
- final val AmbiguousErrors = 1 << 2
- final val notThrowMask = ReportErrors | BufferErrors
- final val AllMask = ReportErrors | BufferErrors | AmbiguousErrors
- }
+ /**
+ * A motley collection of the state and loosely associated behaviour of the type checker.
+ * Each `Typer` has an associated context, and as it descends into the tree new `(Typer, Context)`
+ * pairs are spawned.
+ *
+ * Meet the crew; first the state:
+ *
+ * - A tree, symbol, and scope representing the focus of the typechecker
+ * - An enclosing context, `outer`.
+ * - The current compilation unit.
+ * - A variety of bits that track the current error reporting policy (more on this later);
+ * whether or not implicits/macros are enabled, whether we are in a self or super call or
+ * in a constructor suffix. These are represented as bits in the mask `contextMode`.
+ * - Some odds and ends: undetermined type pararameters of the current line of type inference;
+ * contextual augmentation for error messages, tracking of the nesting depth.
+ *
+ * And behaviour:
+ *
+ * - The central point for issuing errors and warnings from the typechecker, with a means
+ * to buffer these for use in 'silent' type checking, when some recovery might be possible.
+ * - `Context` is something of a Zipper for the tree were are typechecking: it `enclosingContextChain`
+ * is the path back to the root. This is exactly what we need to resolve names (`lookupSymbol`)
+ * and to collect in-scope implicit defintions (`implicitss`)
+ * Supporting these are `imports`, which represents all `Import` trees in in the enclosing context chain.
+ * - In a similar vein, we can assess accessiblity (`isAccessible`.)
+ *
+ * More on error buffering:
+ * When are type errors recoverable? In quite a few places, it turns out. Some examples:
+ * trying to type an application with/without the expected type, or with/without implicit views
+ * enabled. This is usually mediated by `Typer.silent`, `Inferencer#tryTwice`.
+ *
+ * Intially, starting from the `typer` phase, the contexts either buffer or report errors;
+ * afterwards errors are thrown. This is configured in `rootContext`. Additionally, more
+ * fine grained control is needed based on the kind of error; ambiguity errors are often
+ * suppressed during exploraratory typing, such as determining whether `a == b` in an argument
+ * position is an assignment or a named argument, when `Infererencer#isApplicableSafe` type checks
+ * applications with and without an expected type, or whtn `Typer#tryTypedApply` tries to fit arguments to
+ * a function type with/without implicit views.
+ *
+ * When the error policies entails error/warning buffering, the mutable [[ReportBuffer]] records
+ * everything that is issued. It is important to note, that child Contexts created with `make`
+ * "inherit" the very same `ReportBuffer` instance, whereas children spawned through `makeSilent`
+ * receive an separate, fresh buffer.
+ *
+ * @param tree Tree associated with this context
+ * @param owner The current owner
+ * @param scope The current scope
+ * @param _outer The next outer context.
+ */
+ class Context private[typechecker](val tree: Tree, val owner: Symbol, val scope: Scope,
+ val unit: CompilationUnit, _outer: Context) {
+ private def outerIsNoContext = _outer eq null
+ final def outer: Context = if (outerIsNoContext) NoContext else _outer
- class Context private[typechecker] {
- import Errors._
-
- var unit: CompilationUnit = NoCompilationUnit
- var tree: Tree = _ // Tree associated with this context
- var owner: Symbol = NoSymbol // The current owner
- var scope: Scope = _ // The current scope
- var outer: Context = _ // The next outer context
- var enclClass: Context = _ // The next outer context whose tree is a
- // template or package definition
- @inline final def savingEnclClass[A](c: Context)(a: => A): A = {
+ /** The next outer context whose tree is a template or package definition */
+ var enclClass: Context = _
+
+ @inline private def savingEnclClass[A](c: Context)(a: => A): A = {
val saved = enclClass
enclClass = c
try a finally enclClass = saved
}
- var enclMethod: Context = _ // The next outer context whose tree is a method
- var variance: Int = _ // Variance relative to enclosing class
- private var _undetparams: List[Symbol] = List() // Undetermined type parameters,
- // not inherited to child contexts
- var depth: Int = 0
- var imports: List[ImportInfo] = List() // currently visible imports
- var openImplicits: List[OpenImplicit] = List() // types for which implicit arguments
- // are currently searched
- // for a named application block (Tree) the corresponding NamedApplyInfo
- var namedApplyBlockInfo: Option[(Tree, NamedApplyInfo)] = None
- var prefix: Type = NoPrefix
- var inConstructorSuffix = false // are we in a secondary constructor
- // after the this constructor call?
- var returnsSeen = false // for method context: were returns encountered?
- var inSelfSuperCall = false // is this context (enclosed in) a constructor call?
- // (the call to the super or self constructor in the first line of a constructor)
- // in this context the object's fields should not be in scope
+ /** A bitmask containing all the boolean flags in a context, e.g. are implicit views enabled */
+ var contextMode: ContextMode = ContextMode.DefaultMode
- var diagnostic: List[String] = Nil // these messages are printed when issuing an error
- var implicitsEnabled = false
- var macrosEnabled = true
- var enrichmentEnabled = false // to selectively allow enrichment in patterns, where other kinds of implicit conversions are not allowed
- var checking = false
- var retyping = false
+ /** Update all modes in `mask` to `value` */
+ def update(mask: ContextMode, value: Boolean) {
+ contextMode = contextMode.set(value, mask)
+ }
+
+ /** Set all modes in the mask `enable` to true, and all in `disable` to false. */
+ def set(enable: ContextMode = NOmode, disable: ContextMode = NOmode): this.type = {
+ contextMode = contextMode.set(true, enable).set(false, disable)
+ this
+ }
+
+ /** Is this context in all modes in the given `mask`? */
+ def apply(mask: ContextMode): Boolean = contextMode.inAll(mask)
+
+ /** The next outer context whose tree is a method */
+ var enclMethod: Context = _
+
+ /** Variance relative to enclosing class */
+ var variance: Variance = Variance.Invariant
+
+ private var _undetparams: List[Symbol] = List()
- var savedTypeBounds: List[(Symbol, Type)] = List() // saved type bounds
- // for type parameters which are narrowed in a GADT
+ protected def outerDepth = if (outerIsNoContext) 0 else outer.depth
- var typingIndentLevel: Int = 0
- def typingIndent = " " * typingIndentLevel
+ val depth: Int = {
+ val increasesDepth = isRootImport || outerIsNoContext || (outer.scope != scope)
+ ( if (increasesDepth) 1 else 0 ) + outerDepth
+ }
+
+ /** The currently visible imports */
+ def imports: List[ImportInfo] = outer.imports
+ /** Equivalent to `imports.headOption`, but more efficient */
+ def firstImport: Option[ImportInfo] = outer.firstImport
+ def isRootImport: Boolean = false
- var buffer: Set[AbsTypeError] = _
- var warningsBuffer: Set[(Position, String)] = _
+ /** Types for which implicit arguments are currently searched */
+ var openImplicits: List[OpenImplicit] = List()
+ /* For a named application block (`Tree`) the corresponding `NamedApplyInfo`. */
+ var namedApplyBlockInfo: Option[(Tree, NamedApplyInfo)] = None
+ var prefix: Type = NoPrefix
+
+ def inSuperInit_=(value: Boolean) = this(SuperInit) = value
+ def inSuperInit = this(SuperInit)
+ def inConstructorSuffix_=(value: Boolean) = this(ConstructorSuffix) = value
+ def inConstructorSuffix = this(ConstructorSuffix)
+ def inPatAlternative_=(value: Boolean) = this(PatternAlternative) = value
+ def inPatAlternative = this(PatternAlternative)
+ def starPatterns_=(value: Boolean) = this(StarPatterns) = value
+ def starPatterns = this(StarPatterns)
+ def returnsSeen_=(value: Boolean) = this(ReturnsSeen) = value
+ def returnsSeen = this(ReturnsSeen)
+ def inSelfSuperCall_=(value: Boolean) = this(SelfSuperCall) = value
+ def inSelfSuperCall = this(SelfSuperCall)
+ def implicitsEnabled_=(value: Boolean) = this(ImplicitsEnabled) = value
+ def implicitsEnabled = this(ImplicitsEnabled)
+ def macrosEnabled_=(value: Boolean) = this(MacrosEnabled) = value
+ def macrosEnabled = this(MacrosEnabled)
+ def enrichmentEnabled_=(value: Boolean) = this(EnrichmentEnabled) = value
+ def enrichmentEnabled = this(EnrichmentEnabled)
+ def checking_=(value: Boolean) = this(Checking) = value
+ def checking = this(Checking)
+ def retyping_=(value: Boolean) = this(ReTyping) = value
+ def retyping = this(ReTyping)
+ def inSecondTry = this(SecondTry)
+ def inSecondTry_=(value: Boolean) = this(SecondTry) = value
+ def inReturnExpr = this(ReturnExpr)
+ def inTypeConstructorAllowed = this(TypeConstructorAllowed)
+
+ def defaultModeForTyped: Mode = if (inTypeConstructorAllowed) Mode.NOmode else Mode.EXPRmode
+
+ /** These messages are printed when issuing an error */
+ var diagnostic: List[String] = Nil
+
+ /** Saved type bounds for type parameters which are narrowed in a GADT. */
+ var savedTypeBounds: List[(Symbol, Type)] = List()
+
+ /** The next enclosing context (potentially `this`) that is owned by a class or method */
def enclClassOrMethod: Context =
- if ((owner eq NoSymbol) || (owner.isClass) || (owner.isMethod)) this
+ if (!owner.exists || owner.isClass || owner.isMethod) this
else outer.enclClassOrMethod
+ /** The next enclosing context (potentially `this`) that has a `CaseDef` as a tree */
+ def enclosingCaseDef = nextEnclosing(_.tree.isInstanceOf[CaseDef])
+
+ /** ...or an Apply. */
+ def enclosingApply = nextEnclosing(_.tree.isInstanceOf[Apply])
+
+ def siteString = {
+ def what_s = if (owner.isConstructor) "" else owner.kindString
+ def where_s = if (owner.isClass) "" else "in " + enclClass.owner.decodedName
+ List(what_s, owner.decodedName, where_s) filterNot (_ == "") mkString " "
+ }
+ //
+ // Tracking undetermined type parameters for type argument inference.
+ //
def undetparamsString =
if (undetparams.isEmpty) ""
else undetparams.mkString("undetparams=", ", ", "")
- def undetparams = _undetparams
+ /** Undetermined type parameters. See `Infer#{inferExprInstance, adjustTypeArgs}`. Not inherited to child contexts */
+ def undetparams: List[Symbol] = _undetparams
def undetparams_=(ps: List[Symbol]) = { _undetparams = ps }
- def extractUndetparams() = {
+ /** Return and clear the undetermined type parameters */
+ def extractUndetparams(): List[Symbol] = {
val tparams = undetparams
undetparams = List()
tparams
}
- private[this] var mode = 0
-
- def errBuffer = buffer
- def hasErrors = buffer.nonEmpty
- def hasWarnings = warningsBuffer.nonEmpty
-
- def state: Int = mode
- def restoreState(state0: Int) = mode = state0
-
- def reportErrors = (state & ReportErrors) != 0
- def bufferErrors = (state & BufferErrors) != 0
- def ambiguousErrors = (state & AmbiguousErrors) != 0
- def throwErrors = (state & notThrowMask) == 0
-
- def setReportErrors() = mode = (ReportErrors | AmbiguousErrors)
- def setBufferErrors() = {
- //assert(bufferErrors || !hasErrors, "When entering the buffer state, context has to be clean. Current buffer: " + buffer)
- mode = BufferErrors
+ /** Run `body` with this context with no undetermined type parameters, restore the original
+ * the original list afterwards.
+ * @param reportAmbiguous Should ambiguous errors be reported during evaluation of `body`?
+ */
+ def savingUndeterminedTypeParams[A](reportAmbiguous: Boolean = ambiguousErrors)(body: => A): A = {
+ withMode() {
+ this(AmbiguousErrors) = reportAmbiguous
+ val saved = extractUndetparams()
+ try body
+ finally undetparams = saved
+ }
}
- def setThrowErrors() = mode &= (~AllMask)
- def setAmbiguousErrors(report: Boolean) = if (report) mode |= AmbiguousErrors else mode &= notThrowMask
- def updateBuffer(errors: Set[AbsTypeError]) = buffer ++= errors
- def condBufferFlush(removeP: AbsTypeError => Boolean) {
- val elems = buffer.filter(removeP)
- buffer --= elems
- }
- def flushBuffer() { buffer.clear() }
- def flushAndReturnBuffer(): Set[AbsTypeError] = {
- val current = buffer.clone()
- buffer.clear()
+ //
+ // Error reporting policies and buffer.
+ //
+
+ private var _reportBuffer: ReportBuffer = new ReportBuffer
+ /** A buffer for errors and warnings, used with `this.bufferErrors == true` */
+ def reportBuffer = _reportBuffer
+ /** Discard the current report buffer, and replace with an empty one */
+ def useFreshReportBuffer() = _reportBuffer = new ReportBuffer
+ /** Discard the current report buffer, and replace with `other` */
+ def restoreReportBuffer(other: ReportBuffer) = _reportBuffer = other
+
+ /** The first error, if any, in the report buffer */
+ def firstError: Option[AbsTypeError] = reportBuffer.firstError
+ def errors: Seq[AbsTypeError] = reportBuffer.errors
+ /** Does the report buffer contain any errors? */
+ def hasErrors = reportBuffer.hasErrors
+
+ def reportErrors = this(ReportErrors)
+ def bufferErrors = this(BufferErrors)
+ def ambiguousErrors = this(AmbiguousErrors)
+ def throwErrors = contextMode.inNone(ReportErrors | BufferErrors)
+
+ def setReportErrors(): Unit = set(enable = ReportErrors | AmbiguousErrors, disable = BufferErrors)
+ def setBufferErrors(): Unit = set(enable = BufferErrors, disable = ReportErrors | AmbiguousErrors)
+ def setThrowErrors(): Unit = this(ReportErrors | AmbiguousErrors | BufferErrors) = false
+ def setAmbiguousErrors(report: Boolean): Unit = this(AmbiguousErrors) = report
+
+ /** Append the given errors to the report buffer */
+ def updateBuffer(errors: Traversable[AbsTypeError]) = reportBuffer ++= errors
+ /** Clear all errors from the report buffer */
+ def flushBuffer() { reportBuffer.clearAllErrors() }
+ /** Return and clear all errors from the report buffer */
+ def flushAndReturnBuffer(): immutable.Seq[AbsTypeError] = {
+ val current = reportBuffer.errors
+ reportBuffer.clearAllErrors()
current
}
- def flushAndReturnWarningsBuffer(): Set[(Position, String)] = {
- val current = warningsBuffer.clone()
- warningsBuffer.clear()
- current
+
+ /** Issue and clear all warnings from the report buffer */
+ def flushAndIssueWarnings() {
+ reportBuffer.warnings foreach {
+ case (pos, msg) => unit.warning(pos, msg)
+ }
+ reportBuffer.clearAllWarnings()
}
- def logError(err: AbsTypeError) = buffer += err
+ //
+ // Temporary mode adjustment
+ //
- def withImplicitsEnabled[T](op: => T): T = {
- val saved = implicitsEnabled
- implicitsEnabled = true
+ @inline def withMode[T](enabled: ContextMode = NOmode, disabled: ContextMode = NOmode)(op: => T): T = {
+ val saved = contextMode
+ set(enabled, disabled)
try op
- finally implicitsEnabled = saved
+ finally contextMode = saved
}
- def withImplicitsDisabled[T](op: => T): T = {
- val saved = implicitsEnabled
- implicitsEnabled = false
- val savedP = enrichmentEnabled
- enrichmentEnabled = false
- try op
- finally {
- implicitsEnabled = saved
- enrichmentEnabled = savedP
- }
+ @inline final def withImplicitsEnabled[T](op: => T): T = withMode(enabled = ImplicitsEnabled)(op)
+ @inline final def withImplicitsDisabled[T](op: => T): T = withMode(disabled = ImplicitsEnabled | EnrichmentEnabled)(op)
+ @inline final def withImplicitsDisabledAllowEnrichment[T](op: => T): T = withMode(enabled = EnrichmentEnabled, disabled = ImplicitsEnabled)(op)
+ @inline final def withMacrosEnabled[T](op: => T): T = withMode(enabled = MacrosEnabled)(op)
+ @inline final def withMacrosDisabled[T](op: => T): T = withMode(disabled = MacrosEnabled)(op)
+ @inline final def withinStarPatterns[T](op: => T): T = withMode(enabled = StarPatterns)(op)
+ @inline final def withinSuperInit[T](op: => T): T = withMode(enabled = SuperInit)(op)
+ @inline final def withinSecondTry[T](op: => T): T = withMode(enabled = SecondTry)(op)
+ @inline final def withinPatAlternative[T](op: => T): T = withMode(enabled = PatternAlternative)(op)
+
+ /** TypeConstructorAllowed is enabled when we are typing a higher-kinded type.
+ * adapt should then check kind-arity based on the prototypical type's kind
+ * arity. Type arguments should not be inferred.
+ */
+ @inline final def withinTypeConstructorAllowed[T](op: => T): T = withMode(enabled = TypeConstructorAllowed)(op)
+
+ /* TODO - consolidate returnsSeen (which seems only to be used by checkDead)
+ * and ReturnExpr.
+ */
+ @inline final def withinReturnExpr[T](op: => T): T = {
+ enclMethod.returnsSeen = true
+ withMode(enabled = ReturnExpr)(op)
}
- def withImplicitsDisabledAllowEnrichment[T](op: => T): T = {
- val saved = implicitsEnabled
- implicitsEnabled = false
- val savedP = enrichmentEnabled
- enrichmentEnabled = true
- try op
- finally {
- implicitsEnabled = saved
- enrichmentEnabled = savedP
+ // See comment on FormerNonStickyModes.
+ @inline final def withOnlyStickyModes[T](op: => T): T = withMode(disabled = FormerNonStickyModes)(op)
+
+ /** @return true if the `expr` evaluates to true within a silent Context that incurs no errors */
+ @inline final def inSilentMode(expr: => Boolean): Boolean = {
+ withMode() { // withMode with no arguments to restore the mode mutated by `setBufferErrors`.
+ setBufferErrors()
+ try expr && !hasErrors
+ finally reportBuffer.clearAll()
}
}
- def withMacrosEnabled[T](op: => T): T = {
- val saved = macrosEnabled
- macrosEnabled = true
- try op
- finally macrosEnabled = saved
- }
+ //
+ // Child Context Creation
+ //
- def withMacrosDisabled[T](op: => T): T = {
- val saved = macrosEnabled
- macrosEnabled = false
- try op
- finally macrosEnabled = saved
- }
-
- def make(unit: CompilationUnit, tree: Tree, owner: Symbol,
- scope: Scope, imports: List[ImportInfo]): Context = {
- val c = new Context
- c.unit = unit
- c.tree = tree
- c.owner = owner
- c.scope = scope
- c.outer = this
-
- tree match {
- case Template(_, _, _) | PackageDef(_, _) =>
- c.enclClass = c
- c.prefix = c.owner.thisType
- c.inConstructorSuffix = false
- case _ =>
- c.enclClass = this.enclClass
- c.prefix =
- if (c.owner != this.owner && c.owner.isTerm) NoPrefix
- else this.prefix
- c.inConstructorSuffix = this.inConstructorSuffix
+ /**
+ * Construct a child context. The parent and child will share the report buffer.
+ * Compare with `makeSilent`, in which the child has a fresh report buffer.
+ *
+ * If `tree` is an `Import`, that import will be avaiable at the head of
+ * `Context#imports`.
+ */
+ def make(tree: Tree = tree, owner: Symbol = owner,
+ scope: Scope = scope, unit: CompilationUnit = unit): Context = {
+ val isTemplateOrPackage = tree match {
+ case _: Template | _: PackageDef => true
+ case _ => false
+ }
+ val isDefDef = tree match {
+ case _: DefDef => true
+ case _ => false
}
- tree match {
- case DefDef(_, _, _, _, _, _) =>
- c.enclMethod = c
- case _ =>
- c.enclMethod = this.enclMethod
+ val isImport = tree match {
+ case _: Import => true
+ case _ => false
}
- c.variance = this.variance
- c.depth = if (scope == this.scope) this.depth else this.depth + 1
- c.imports = imports
- c.inSelfSuperCall = inSelfSuperCall
- c.restoreState(this.state)
- c.diagnostic = this.diagnostic
- c.typingIndentLevel = typingIndentLevel
- c.implicitsEnabled = this.implicitsEnabled
- c.macrosEnabled = this.macrosEnabled
- c.enrichmentEnabled = this.enrichmentEnabled
- c.checking = this.checking
- c.retyping = this.retyping
- c.openImplicits = this.openImplicits
- c.buffer = if (this.buffer == null) LinkedHashSet[AbsTypeError]() else this.buffer // need to initialize
- c.warningsBuffer = if (this.warningsBuffer == null) LinkedHashSet[(Position, String)]() else this.warningsBuffer
+ val sameOwner = owner == this.owner
+ val prefixInChild =
+ if (isTemplateOrPackage) owner.thisType
+ else if (!sameOwner && owner.isTerm) NoPrefix
+ else prefix
+
+ // The blank canvas
+ val c = if (isImport)
+ new Context(tree, owner, scope, unit, this) with ImportContext
+ else
+ new Context(tree, owner, scope, unit, this)
+
+ // Fields that are directly propagated
+ c.variance = variance
+ c.diagnostic = diagnostic
+ c.openImplicits = openImplicits
+ c.contextMode = contextMode // note: ConstructorSuffix, a bit within `mode`, is conditionally overwritten below.
+ c._reportBuffer = reportBuffer
+
+ // Fields that may take on a different value in the child
+ c.prefix = prefixInChild
+ c.enclClass = if (isTemplateOrPackage) c else enclClass
+ c(ConstructorSuffix) = !isTemplateOrPackage && c(ConstructorSuffix)
+ c.enclMethod = if (isDefDef) c else enclMethod
+
registerContext(c.asInstanceOf[analyzer.Context])
debuglog("[context] ++ " + c.unit + " / " + tree.summaryString)
c
}
- // TODO: remove? Doesn't seem to be used
- def make(unit: CompilationUnit): Context = {
- val c = make(unit, EmptyTree, owner, scope, imports)
- c.setReportErrors()
- c.implicitsEnabled = true
- c.macrosEnabled = true
- c
- }
-
- def makeNewImport(sym: Symbol): Context =
- makeNewImport(gen.mkWildcardImport(sym))
-
- def makeNewImport(imp: Import): Context =
- make(unit, imp, owner, scope, new ImportInfo(imp, depth) :: imports)
-
def make(tree: Tree, owner: Symbol, scope: Scope): Context =
+ // TODO SI-7345 Moving this optimization into the main overload of `make` causes all tests to fail.
+ // even if it is extened to check that `unit == this.unit`. Why is this?
if (tree == this.tree && owner == this.owner && scope == this.scope) this
- else make0(tree, owner, scope)
-
- private def make0(tree: Tree, owner: Symbol, scope: Scope): Context =
- make(unit, tree, owner, scope, imports)
+ else make(tree, owner, scope, unit)
+ /** Make a child context that represents a new nested scope */
def makeNewScope(tree: Tree, owner: Symbol): Context =
make(tree, owner, newNestedScope(scope))
- // IDE stuff: distinguish between scopes created for typing and scopes created for naming.
- def make(tree: Tree, owner: Symbol): Context =
- make0(tree, owner, scope)
-
- def make(tree: Tree): Context =
- make(tree, owner)
-
- def makeSilent(reportAmbiguousErrors: Boolean, newtree: Tree = tree): Context = {
+ /** Make a child context that buffers errors and warnings into a fresh report buffer. */
+ def makeSilent(reportAmbiguousErrors: Boolean = ambiguousErrors, newtree: Tree = tree): Context = {
val c = make(newtree)
c.setBufferErrors()
c.setAmbiguousErrors(reportAmbiguousErrors)
- c.buffer = new LinkedHashSet[AbsTypeError]()
+ c._reportBuffer = new ReportBuffer // A fresh buffer so as not to leak errors/warnings into `this`.
c
}
+ /** Make a silent child context does not allow implicits. Used to prevent chaining of implicit views. */
def makeImplicit(reportAmbiguousErrors: Boolean) = {
val c = makeSilent(reportAmbiguousErrors)
- c.implicitsEnabled = false
- c.enrichmentEnabled = false
+ c(ImplicitsEnabled | EnrichmentEnabled) = false
c
}
@@ -355,12 +501,10 @@ trait Contexts { self: Analyzer =>
* accessible.
*/
def makeConstructorContext = {
- var baseContext = enclClass.outer
- while (baseContext.tree.isInstanceOf[Template])
- baseContext = baseContext.outer
+ val baseContext = enclClass.outer.nextEnclosing(!_.tree.isInstanceOf[Template])
val argContext = baseContext.makeNewScope(tree, owner)
+ argContext.contextMode = contextMode
argContext.inSelfSuperCall = true
- argContext.restoreState(this.state)
def enterElems(c: Context) {
def enterLocalElems(e: ScopeEntry) {
if (e != null && e.owner == c.scope) {
@@ -368,7 +512,7 @@ trait Contexts { self: Analyzer =>
argContext.scope enter e.sym
}
}
- if (c.owner.isTerm && !c.owner.isLocalDummy) {
+ if (c.isLocal && !c.owner.isLocalDummy) {
enterElems(c.outer)
enterLocalElems(c.scope.elems)
}
@@ -379,6 +523,10 @@ trait Contexts { self: Analyzer =>
argContext
}
+ //
+ // Error and warning issuance
+ //
+
private def addDiagString(msg: String) = {
val ds =
if (diagnostic.isEmpty) ""
@@ -386,23 +534,27 @@ trait Contexts { self: Analyzer =>
if (msg endsWith ds) msg else msg + ds
}
- private def unitError(pos: Position, msg: String) =
- unit.error(pos, if (checking) "\n**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg)
+ private def unitError(pos: Position, msg: String): Unit =
+ if (checking) onTreeCheckerError(pos, msg) else unit.error(pos, msg)
@inline private def issueCommon(err: AbsTypeError)(pf: PartialFunction[AbsTypeError, Unit]) {
- debugwarn("issue error: " + err.errMsg)
- if (settings.Yissuedebug.value) (new Exception).printStackTrace()
+ if (settings.Yissuedebug) {
+ log("issue error: " + err.errMsg)
+ (new Exception).printStackTrace()
+ }
if (pf isDefinedAt err) pf(err)
- else if (bufferErrors) { buffer += err }
+ else if (bufferErrors) { reportBuffer += err }
else throw new TypeError(err.errPos, err.errMsg)
}
+ /** Issue/buffer/throw the given type error according to the current mode for error reporting. */
def issue(err: AbsTypeError) {
issueCommon(err) { case _ if reportErrors =>
unitError(err.errPos, addDiagString(err.errMsg))
}
}
+ /** Issue/buffer/throw the given implicit ambiguity error according to the current mode for error reporting. */
def issueAmbiguousError(pre: Type, sym1: Symbol, sym2: Symbol, err: AbsTypeError) {
issueCommon(err) { case _ if ambiguousErrors =>
if (!pre.isErroneous && !sym1.isErroneous && !sym2.isErroneous)
@@ -410,44 +562,31 @@ trait Contexts { self: Analyzer =>
}
}
+ /** Issue/buffer/throw the given implicit ambiguity error according to the current mode for error reporting. */
def issueAmbiguousError(err: AbsTypeError) {
issueCommon(err) { case _ if ambiguousErrors => unitError(err.errPos, addDiagString(err.errMsg)) }
}
- // TODO remove
+ /** Issue/throw the given `err` according to the current mode for error reporting. */
def error(pos: Position, err: Throwable) =
if (reportErrors) unitError(pos, addDiagString(err.getMessage()))
else throw err
+ /** Issue/throw the given error message according to the current mode for error reporting. */
def error(pos: Position, msg: String) = {
val msg1 = addDiagString(msg)
if (reportErrors) unitError(pos, msg1)
else throw new TypeError(pos, msg1)
}
- def warning(pos: Position, msg: String): Unit = warning(pos, msg, false)
- def warning(pos: Position, msg: String, force: Boolean) {
+ /** Issue/throw the given error message according to the current mode for error reporting. */
+ def warning(pos: Position, msg: String, force: Boolean = false) {
if (reportErrors || force) unit.warning(pos, msg)
- else if (bufferErrors) warningsBuffer += ((pos, msg))
+ else if (bufferErrors) reportBuffer += (pos -> msg)
}
- def isLocal(): Boolean = tree match {
- case Block(_,_) => true
- case PackageDef(_, _) => false
- case EmptyTree => false
- case _ => outer.isLocal()
- }
-
- /** Fast path for some slow checks (ambiguous assignment in Refchecks, and
- * existence of __match for MatchTranslation in virtpatmat.) This logic probably
- * needs improvement.
- */
- def isNameInScope(name: Name) = (
- enclosingContextChain exists (ctx =>
- (ctx.scope.lookupEntry(name) != null)
- || (ctx.owner.rawInfo.member(name) != NoSymbol)
- )
- )
+ /** Is the owning symbol of this context a term? */
+ final def isLocal: Boolean = owner.isTerm
// nextOuter determines which context is searched next for implicits
// (after `this`, which contributes `newImplicits` below.) In
@@ -473,26 +612,35 @@ trait Contexts { self: Analyzer =>
def enclosingContextChain: List[Context] = this :: outer.enclosingContextChain
- override def toString = "Context(%s@%s unit=%s scope=%s errors=%b, reportErrors=%b, throwErrors=%b)".format(
- owner.fullName, tree.shortClass, unit, scope.##, hasErrors, reportErrors, throwErrors
- )
- /** Is `sub` a subclass of `base` or a companion object of such a subclass?
- */
- def isSubClassOrCompanion(sub: Symbol, base: Symbol) =
- sub.isNonBottomSubClass(base) ||
- sub.isModuleClass && sub.linkedClassOfClass.isNonBottomSubClass(base)
-
- /** Return closest enclosing context that defines a superclass of `clazz`, or a
- * companion module of a superclass of `clazz`, or NoContext if none exists */
- def enclosingSuperClassContext(clazz: Symbol): Context = {
- var c = this.enclClass
- while (c != NoContext &&
- !clazz.isNonBottomSubClass(c.owner) &&
- !(c.owner.isModuleClass && clazz.isNonBottomSubClass(c.owner.companionClass)))
- c = c.outer.enclClass
- c
+ private def treeTruncated = tree.toString.replaceAll("\\s+", " ").lines.mkString("\\n").take(70)
+ private def treeIdString = if (settings.uniqid.value) "#" + System.identityHashCode(tree).toString.takeRight(3) else ""
+ private def treeString = tree match {
+ case x: Import => "" + x
+ case Template(parents, `noSelfType`, body) =>
+ val pstr = if ((parents eq null) || parents.isEmpty) "Nil" else parents mkString " "
+ val bstr = if (body eq null) "" else body.length + " stats"
+ s"""Template($pstr, _, $bstr)"""
+ case x => s"${tree.shortClass}${treeIdString}:${treeTruncated}"
}
+ override def toString =
+ sm"""|Context($unit) {
+ | owner = $owner
+ | tree = $treeString
+ | scope = ${scope.size} decls
+ | contextMode = $contextMode
+ | outer.owner = ${outer.owner}
+ |}"""
+
+ //
+ // Accessibility checking
+ //
+
+ /** Is `sub` a subclass of `base` or a companion object of such a subclass? */
+ private def isSubClassOrCompanion(sub: Symbol, base: Symbol) =
+ sub.isNonBottomSubClass(base) ||
+ sub.isModuleClass && sub.linkedClassOfClass.isNonBottomSubClass(base)
+
/** Return the closest enclosing context that defines a subclass of `clazz`
* or a companion object thereof, or `NoContext` if no such context exists.
*/
@@ -503,22 +651,18 @@ trait Contexts { self: Analyzer =>
c
}
- /** Is `sym` accessible as a member of tree `site` with type
- * `pre` in current context?
- */
+ /** Is `sym` accessible as a member of `pre` in current context? */
def isAccessible(sym: Symbol, pre: Type, superAccess: Boolean = false): Boolean = {
lastAccessCheckDetails = ""
// Console.println("isAccessible(%s, %s, %s)".format(sym, pre, superAccess))
+ // don't have access if there is no linked class (so exclude linkedClass=NoSymbol)
def accessWithinLinked(ab: Symbol) = {
- val linked = ab.linkedClassOfClass
- // don't have access if there is no linked class
- // (before adding the `ne NoSymbol` check, this was a no-op when linked eq NoSymbol,
- // since `accessWithin(NoSymbol) == true` whatever the symbol)
- (linked ne NoSymbol) && accessWithin(linked)
+ val linked = linkedClassOfClassOf(ab, this)
+ linked.fold(false)(accessWithin)
}
- /** Are we inside definition of `ab`? */
+ /* Are we inside definition of `ab`? */
def accessWithin(ab: Symbol) = {
// #3663: we must disregard package nesting if sym isJavaDefined
if (sym.isJavaDefined) {
@@ -530,26 +674,12 @@ trait Contexts { self: Analyzer =>
} else (owner hasTransOwner ab)
}
-/*
- var c = this
- while (c != NoContext && c.owner != owner) {
- if (c.outer eq null) abort("accessWithin(" + owner + ") " + c);//debug
- if (c.outer.enclClass eq null) abort("accessWithin(" + owner + ") " + c);//debug
- c = c.outer.enclClass
- }
- c != NoContext
- }
-*/
- /** Is `clazz` a subclass of an enclosing class? */
- def isSubClassOfEnclosing(clazz: Symbol): Boolean =
- enclosingSuperClassContext(clazz) != NoContext
-
def isSubThisType(pre: Type, clazz: Symbol): Boolean = pre match {
case ThisType(pclazz) => pclazz isNonBottomSubClass clazz
case _ => false
}
- /** Is protected access to target symbol permitted */
+ /* Is protected access to target symbol permitted */
def isProtectedAccessOK(target: Symbol) = {
val c = enclosingSubClassContext(sym.owner)
if (c == NoContext)
@@ -589,8 +719,7 @@ trait Contexts { self: Analyzer =>
( superAccess
|| pre.isInstanceOf[ThisType]
|| phase.erasedTypes
- || isProtectedAccessOK(sym)
- || (sym.allOverriddenSymbols exists isProtectedAccessOK)
+ || (sym.overrideChain exists isProtectedAccessOK)
// that last condition makes protected access via self types work.
)
)
@@ -600,27 +729,51 @@ trait Contexts { self: Analyzer =>
}
}
+ //
+ // Type bound management
+ //
+
def pushTypeBounds(sym: Symbol) {
+ sym.info match {
+ case tb: TypeBounds => if (!tb.isEmptyBounds) log(s"Saving $sym info=$tb")
+ case info => devWarning(s"Something other than a TypeBounds seen in pushTypeBounds: $info is a ${shortClassOfInstance(info)}")
+ }
savedTypeBounds ::= ((sym, sym.info))
}
def restoreTypeBounds(tp: Type): Type = {
- var current = tp
- for ((sym, info) <- savedTypeBounds) {
- debuglog("resetting " + sym + " to " + info);
- sym.info match {
- case TypeBounds(lo, hi) if (hi <:< lo && lo <:< hi) =>
- current = current.instantiateTypeParams(List(sym), List(lo))
-//@M TODO: when higher-kinded types are inferred, probably need a case PolyType(_, TypeBounds(...)) if ... =>
- case _ =>
- }
- sym.setInfo(info)
+ def restore(): Type = savedTypeBounds.foldLeft(tp) { case (current, (sym, savedInfo)) =>
+ def bounds_s(tb: TypeBounds) = if (tb.isEmptyBounds) "<empty bounds>" else s"TypeBounds(lo=${tb.lo}, hi=${tb.hi})"
+ //@M TODO: when higher-kinded types are inferred, probably need a case PolyType(_, TypeBounds(...)) if ... =>
+ val TypeBounds(lo, hi) = sym.info.bounds
+ val isUnique = lo <:< hi && hi <:< lo
+ val isPresent = current contains sym
+ def saved_s = bounds_s(savedInfo.bounds)
+ def current_s = bounds_s(sym.info.bounds)
+
+ if (isUnique && isPresent)
+ devWarningResult(s"Preserving inference: ${sym.nameString}=$hi in $current (based on $current_s) before restoring $sym to saved $saved_s")(
+ current.instantiateTypeParams(List(sym), List(hi))
+ )
+ else if (isPresent)
+ devWarningResult(s"Discarding inferred $current_s because it does not uniquely determine $sym in")(current)
+ else
+ logResult(s"Discarding inferred $current_s because $sym does not appear in")(current)
+ }
+ try restore()
+ finally {
+ for ((sym, savedInfo) <- savedTypeBounds)
+ sym setInfo debuglogResult(s"Discarding inferred $sym=${sym.info}, restoring saved info")(savedInfo)
+
+ savedTypeBounds = Nil
}
- savedTypeBounds = List()
- current
}
- private var implicitsCache: List[List[ImplicitInfo]] = null
+ //
+ // Implicit collection
+ //
+
+ private var implicitsCache: List[ImplicitInfo] = null
private var implicitsRunId = NoRunId
def resetCache() {
@@ -662,7 +815,7 @@ trait Contexts { self: Analyzer =>
case ImportSelector(from, _, to, _) :: sels1 =>
var impls = collect(sels1) filter (info => info.name != from)
if (to != nme.WILDCARD) {
- for (sym <- imp.importedSymbol(to).alternatives)
+ for (sym <- importedAccessibleSymbol(imp, to).alternatives)
if (isQualifyingImplicit(to, sym, pre, imported = true))
impls = new ImplicitInfo(to, pre, sym) :: impls
}
@@ -679,33 +832,351 @@ trait Contexts { self: Analyzer =>
* filtered out later by `eligibleInfos` (SI-4270 / 9129cfe9), as they don't type-check.
*/
def implicitss: List[List[ImplicitInfo]] = {
- if (implicitsRunId != currentRunId) {
- implicitsRunId = currentRunId
- implicitsCache = List()
- val newImplicits: List[ImplicitInfo] =
- if (owner != nextOuter.owner && owner.isClass && !owner.isPackageClass && !inSelfSuperCall) {
- if (!owner.isInitialized) return nextOuter.implicitss
- // debuglog("collect member implicits " + owner + ", implicit members = " + owner.thisType.implicitMembers)//DEBUG
- savingEnclClass(this) {
- // !!! In the body of `class C(implicit a: A) { }`, `implicitss` returns `List(List(a), List(a), List(<predef..)))`
- // it handled correctly by implicit search, which considers the second `a` to be shadowed, but should be
- // remedied nonetheless.
- collectImplicits(owner.thisType.implicitMembers, owner.thisType)
- }
- } else if (scope != nextOuter.scope && !owner.isPackageClass) {
- debuglog("collect local implicits " + scope.toList)//DEBUG
- collectImplicits(scope, NoPrefix)
- } else if (imports != nextOuter.imports) {
- assert(imports.tail == nextOuter.imports, (imports, nextOuter.imports))
- collectImplicitImports(imports.head)
- } else if (owner.isPackageClass) {
- // the corresponding package object may contain implicit members.
- collectImplicits(owner.tpe.implicitMembers, owner.tpe)
- } else List()
- implicitsCache = if (newImplicits.isEmpty) nextOuter.implicitss
- else newImplicits :: nextOuter.implicitss
+ val nextOuter = this.nextOuter
+ def withOuter(is: List[ImplicitInfo]): List[List[ImplicitInfo]] =
+ is match {
+ case Nil => nextOuter.implicitss
+ case _ => is :: nextOuter.implicitss
+ }
+
+ val CycleMarker = NoRunId - 1
+ if (implicitsRunId == CycleMarker) {
+ debuglog(s"cycle while collecting implicits at owner ${owner}, probably due to an implicit without an explicit return type. Continuing with implicits from enclosing contexts.")
+ withOuter(Nil)
+ } else if (implicitsRunId != currentRunId) {
+ implicitsRunId = CycleMarker
+ implicits(nextOuter) match {
+ case None =>
+ implicitsRunId = NoRunId
+ withOuter(Nil)
+ case Some(is) =>
+ implicitsRunId = currentRunId
+ implicitsCache = is
+ withOuter(is)
+ }
}
- implicitsCache
+ else withOuter(implicitsCache)
+ }
+
+ /** @return None if a cycle is detected, or Some(infos) containing the in-scope implicits at this context */
+ private def implicits(nextOuter: Context): Option[List[ImplicitInfo]] = {
+ val imports = this.imports
+ if (owner != nextOuter.owner && owner.isClass && !owner.isPackageClass && !inSelfSuperCall) {
+ if (!owner.isInitialized) None
+ else savingEnclClass(this) {
+ // !!! In the body of `class C(implicit a: A) { }`, `implicitss` returns `List(List(a), List(a), List(<predef..)))`
+ // it handled correctly by implicit search, which considers the second `a` to be shadowed, but should be
+ // remedied nonetheless.
+ Some(collectImplicits(owner.thisType.implicitMembers, owner.thisType))
+ }
+ } else if (scope != nextOuter.scope && !owner.isPackageClass) {
+ debuglog("collect local implicits " + scope.toList)//DEBUG
+ Some(collectImplicits(scope, NoPrefix))
+ } else if (firstImport != nextOuter.firstImport) {
+ assert(imports.tail.headOption == nextOuter.firstImport, (imports, nextOuter.imports))
+ Some(collectImplicitImports(imports.head))
+ } else if (owner.isPackageClass) {
+ // the corresponding package object may contain implicit members.
+ Some(collectImplicits(owner.tpe.implicitMembers, owner.tpe))
+ } else Some(Nil)
+ }
+
+ //
+ // Imports and symbol lookup
+ //
+
+ /** It's possible that seemingly conflicting identifiers are
+ * identifiably the same after type normalization. In such cases,
+ * allow compilation to proceed. A typical example is:
+ * package object foo { type InputStream = java.io.InputStream }
+ * import foo._, java.io._
+ */
+ private def resolveAmbiguousImport(name: Name, imp1: ImportInfo, imp2: ImportInfo): Option[ImportInfo] = {
+ val imp1Explicit = imp1 isExplicitImport name
+ val imp2Explicit = imp2 isExplicitImport name
+ val ambiguous = if (imp1.depth == imp2.depth) imp1Explicit == imp2Explicit else !imp1Explicit && imp2Explicit
+ val imp1Symbol = (imp1 importedSymbol name).initialize filter (s => isAccessible(s, imp1.qual.tpe, superAccess = false))
+ val imp2Symbol = (imp2 importedSymbol name).initialize filter (s => isAccessible(s, imp2.qual.tpe, superAccess = false))
+
+ // The types of the qualifiers from which the ambiguous imports come.
+ // If the ambiguous name is a value, these must be the same.
+ def t1 = imp1.qual.tpe
+ def t2 = imp2.qual.tpe
+ // The types of the ambiguous symbols, seen as members of their qualifiers.
+ // If the ambiguous name is a monomorphic type, we can relax this far.
+ def mt1 = t1 memberType imp1Symbol
+ def mt2 = t2 memberType imp2Symbol
+
+ def characterize = List(
+ s"types: $t1 =:= $t2 ${t1 =:= t2} members: ${mt1 =:= mt2}",
+ s"member type 1: $mt1",
+ s"member type 2: $mt2"
+ ).mkString("\n ")
+
+ if (!ambiguous || !imp2Symbol.exists) Some(imp1)
+ else if (!imp1Symbol.exists) Some(imp2)
+ else (
+ // The symbol names are checked rather than the symbols themselves because
+ // each time an overloaded member is looked up it receives a new symbol.
+ // So foo.member("x") != foo.member("x") if x is overloaded. This seems
+ // likely to be the cause of other bugs too...
+ if (t1 =:= t2 && imp1Symbol.name == imp2Symbol.name) {
+ log(s"Suppressing ambiguous import: $t1 =:= $t2 && $imp1Symbol == $imp2Symbol")
+ Some(imp1)
+ }
+ // Monomorphism restriction on types is in part because type aliases could have the
+ // same target type but attach different variance to the parameters. Maybe it can be
+ // relaxed, but doesn't seem worth it at present.
+ else if (mt1 =:= mt2 && name.isTypeName && imp1Symbol.isMonomorphicType && imp2Symbol.isMonomorphicType) {
+ log(s"Suppressing ambiguous import: $mt1 =:= $mt2 && $imp1Symbol and $imp2Symbol are equivalent")
+ Some(imp1)
+ }
+ else {
+ log(s"Import is genuinely ambiguous:\n " + characterize)
+ None
+ }
+ )
+ }
+
+ /** The symbol with name `name` imported via the import in `imp`,
+ * if any such symbol is accessible from this context.
+ */
+ def importedAccessibleSymbol(imp: ImportInfo, name: Name): Symbol =
+ importedAccessibleSymbol(imp, name, requireExplicit = false)
+
+ private def importedAccessibleSymbol(imp: ImportInfo, name: Name, requireExplicit: Boolean): Symbol =
+ imp.importedSymbol(name, requireExplicit) filter (s => isAccessible(s, imp.qual.tpe, superAccess = false))
+
+ /** Is `sym` defined in package object of package `pkg`?
+ * Since sym may be defined in some parent of the package object,
+ * we cannot inspect its owner only; we have to go through the
+ * info of the package object. However to avoid cycles we'll check
+ * what other ways we can before pushing that way.
+ */
+ def isInPackageObject(sym: Symbol, pkg: Symbol): Boolean = {
+ def uninitialized(what: String) = {
+ log(s"Cannot look for $sym in package object of $pkg; $what is not initialized.")
+ false
+ }
+ def pkgClass = if (pkg.isTerm) pkg.moduleClass else pkg
+ def matchesInfo = (
+ // need to be careful here to not get a cyclic reference during bootstrap
+ if (pkg.isInitialized) {
+ val module = pkg.info member nme.PACKAGEkw
+ if (module.isInitialized)
+ module.info.member(sym.name).alternatives contains sym
+ else
+ uninitialized("" + module)
+ }
+ else uninitialized("" + pkg)
+ )
+ def inPackageObject(sym: Symbol) = (
+ // To be in the package object, one of these must be true:
+ // 1) sym.owner is a package object class, and sym.owner.owner is the package class for `pkg`
+ // 2) sym.owner is inherited by the correct package object class
+ // We try to establish 1) by inspecting the owners directly, and then we try
+ // to rule out 2), and only if both those fail do we resort to looking in the info.
+ !sym.isPackage && sym.owner.exists && (
+ if (sym.owner.isPackageObjectClass)
+ sym.owner.owner == pkgClass
+ else
+ !sym.owner.isPackageClass && matchesInfo
+ )
+ )
+
+ // An overloaded symbol might not have the expected owner!
+ // The alternatives must be inspected directly.
+ pkgClass.isPackageClass && (
+ if (sym.isOverloaded)
+ sym.alternatives forall (isInPackageObject(_, pkg))
+ else
+ inPackageObject(sym)
+ )
+ }
+
+ def isNameInScope(name: Name) = lookupSymbol(name, _ => true).isSuccess
+
+ /** Find the symbol of a simple name starting from this context.
+ * All names are filtered through the "qualifies" predicate,
+ * the search continuing as long as no qualifying name is found.
+ */
+ def lookupSymbol(name: Name, qualifies: Symbol => Boolean): NameLookup = {
+ var lookupError: NameLookup = null // set to non-null if a definite error is encountered
+ var inaccessible: NameLookup = null // records inaccessible symbol for error reporting in case none is found
+ var defSym: Symbol = NoSymbol // the directly found symbol
+ var pre: Type = NoPrefix // the prefix type of defSym, if a class member
+ var cx: Context = this // the context under consideration
+ var symbolDepth: Int = -1 // the depth of the directly found symbol
+
+ def finish(qual: Tree, sym: Symbol): NameLookup = (
+ if (lookupError ne null) lookupError
+ else sym match {
+ case NoSymbol if inaccessible ne null => inaccessible
+ case NoSymbol => LookupNotFound
+ case _ => LookupSucceeded(qual, sym)
+ }
+ )
+ def finishDefSym(sym: Symbol, pre0: Type): NameLookup =
+ if (requiresQualifier(sym))
+ finish(gen.mkAttributedQualifier(pre0), sym)
+ else
+ finish(EmptyTree, sym)
+
+ def isPackageOwnedInDifferentUnit(s: Symbol) = (
+ s.isDefinedInPackage && (
+ !currentRun.compiles(s)
+ || unit.exists && s.sourceFile != unit.source.file
+ )
+ )
+ def requiresQualifier(s: Symbol) = (
+ s.owner.isClass
+ && !s.owner.isPackageClass
+ && !s.isTypeParameterOrSkolem
+ )
+ def lookupInPrefix(name: Name) = pre member name filter qualifies
+ def accessibleInPrefix(s: Symbol) = isAccessible(s, pre, superAccess = false)
+
+ def searchPrefix = {
+ cx = cx.enclClass
+ val found0 = lookupInPrefix(name)
+ val found1 = found0 filter accessibleInPrefix
+ if (found0.exists && !found1.exists && inaccessible == null)
+ inaccessible = LookupInaccessible(found0, analyzer.lastAccessCheckDetails)
+
+ found1
+ }
+
+ def lookupInScope(scope: Scope) =
+ (scope lookupUnshadowedEntries name filter (e => qualifies(e.sym))).toList
+
+ def newOverloaded(owner: Symbol, pre: Type, entries: List[ScopeEntry]) =
+ logResult(s"overloaded symbol in $pre")(owner.newOverloaded(pre, entries map (_.sym)))
+
+ // Constructor lookup should only look in the decls of the enclosing class
+ // not in the self-type, nor in the enclosing context, nor in imports (SI-4460, SI-6745)
+ if (name == nme.CONSTRUCTOR) return {
+ val enclClassSym = cx.enclClass.owner
+ val scope = cx.enclClass.prefix.baseType(enclClassSym).decls
+ val constructorSym = lookupInScope(scope) match {
+ case Nil => NoSymbol
+ case hd :: Nil => hd.sym
+ case entries => newOverloaded(enclClassSym, cx.enclClass.prefix, entries)
+ }
+ finishDefSym(constructorSym, cx.enclClass.prefix)
+ }
+
+ // cx.scope eq null arises during FixInvalidSyms in Duplicators
+ while (defSym == NoSymbol && (cx ne NoContext) && (cx.scope ne null)) {
+ pre = cx.enclClass.prefix
+ defSym = lookupInScope(cx.scope) match {
+ case Nil => searchPrefix
+ case entries @ (hd :: tl) =>
+ // we have a winner: record the symbol depth
+ symbolDepth = (cx.depth - cx.scope.nestingLevel) + hd.depth
+ if (tl.isEmpty) hd.sym
+ else newOverloaded(cx.owner, pre, entries)
+ }
+ if (!defSym.exists)
+ cx = cx.outer // push further outward
+ }
+ if (symbolDepth < 0)
+ symbolDepth = cx.depth
+
+ var impSym: Symbol = NoSymbol
+ var imports = Context.this.imports
+ def imp1 = imports.head
+ def imp2 = imports.tail.head
+ def sameDepth = imp1.depth == imp2.depth
+ def imp1Explicit = imp1 isExplicitImport name
+ def imp2Explicit = imp2 isExplicitImport name
+
+ def lookupImport(imp: ImportInfo, requireExplicit: Boolean) =
+ importedAccessibleSymbol(imp, name, requireExplicit) filter qualifies
+
+ // Java: A single-type-import declaration d in a compilation unit c of package p
+ // that imports a type named n shadows, throughout c, the declarations of:
+ //
+ // 1) any top level type named n declared in another compilation unit of p
+ //
+ // A type-import-on-demand declaration never causes any other declaration to be shadowed.
+ //
+ // Scala: Bindings of different kinds have a precedence deļ¬ned on them:
+ //
+ // 1) Deļ¬nitions and declarations that are local, inherited, or made available by a
+ // package clause in the same compilation unit where the deļ¬nition occurs have
+ // highest precedence.
+ // 2) Explicit imports have next highest precedence.
+ def depthOk(imp: ImportInfo) = (
+ imp.depth > symbolDepth
+ || (unit.isJava && imp.isExplicitImport(name) && imp.depth == symbolDepth)
+ )
+
+ while (!impSym.exists && imports.nonEmpty && depthOk(imports.head)) {
+ impSym = lookupImport(imp1, requireExplicit = false)
+ if (!impSym.exists)
+ imports = imports.tail
+ }
+
+ if (defSym.exists && impSym.exists) {
+ // imported symbols take precedence over package-owned symbols in different compilation units.
+ if (isPackageOwnedInDifferentUnit(defSym))
+ defSym = NoSymbol
+ // Defined symbols take precedence over erroneous imports.
+ else if (impSym.isError || impSym.name == nme.CONSTRUCTOR)
+ impSym = NoSymbol
+ // Otherwise they are irreconcilably ambiguous
+ else
+ return ambiguousDefnAndImport(defSym.alternatives.head.owner, imp1)
+ }
+
+ // At this point only one or the other of defSym and impSym might be set.
+ if (defSym.exists)
+ finishDefSym(defSym, pre)
+ else if (impSym.exists) {
+ // We continue walking down the imports as long as the tail is non-empty, which gives us:
+ // imports == imp1 :: imp2 :: _
+ // And at least one of the following is true:
+ // - imp1 and imp2 are at the same depth
+ // - imp1 is a wildcard import, so all explicit imports from outer scopes must be checked
+ def keepLooking = (
+ lookupError == null
+ && imports.tail.nonEmpty
+ && (sameDepth || !imp1Explicit)
+ )
+ // If we find a competitor imp2 which imports the same name, possible outcomes are:
+ //
+ // - same depth, imp1 wild, imp2 explicit: imp2 wins, drop imp1
+ // - same depth, imp1 wild, imp2 wild: ambiguity check
+ // - same depth, imp1 explicit, imp2 explicit: ambiguity check
+ // - differing depth, imp1 wild, imp2 explicit: ambiguity check
+ // - all others: imp1 wins, drop imp2
+ //
+ // The ambiguity check is: if we can verify that both imports refer to the same
+ // symbol (e.g. import foo.X followed by import foo._) then we discard imp2
+ // and proceed. If we cannot, issue an ambiguity error.
+ while (keepLooking) {
+ // If not at the same depth, limit the lookup to explicit imports.
+ // This is desirable from a performance standpoint (compare to
+ // filtering after the fact) but also necessary to keep the unused
+ // import check from being misled by symbol lookups which are not
+ // actually used.
+ val other = lookupImport(imp2, requireExplicit = !sameDepth)
+ def imp1wins() = { imports = imp1 :: imports.tail.tail }
+ def imp2wins() = { impSym = other ; imports = imports.tail }
+
+ if (!other.exists) // imp1 wins; drop imp2 and continue.
+ imp1wins()
+ else if (sameDepth && !imp1Explicit && imp2Explicit) // imp2 wins; drop imp1 and continue.
+ imp2wins()
+ else resolveAmbiguousImport(name, imp1, imp2) match {
+ case Some(imp) => if (imp eq imp1) imp1wins() else imp2wins()
+ case _ => lookupError = ambiguousImports(imp1, imp2)
+ }
+ }
+ // optimization: don't write out package prefixes
+ finish(resetPos(imp1.qual.duplicate), impSym)
+ }
+ else finish(EmptyTree, NoSymbol)
}
/**
@@ -731,12 +1202,84 @@ trait Contexts { self: Analyzer =>
}
} //class Context
+ /** A `Context` focussed on an `Import` tree */
+ trait ImportContext extends Context {
+ private val impInfo: ImportInfo = {
+ val info = new ImportInfo(tree.asInstanceOf[Import], outerDepth)
+ if (settings.lint && !isRootImport) // excludes java.lang/scala/Predef imports
+ allImportInfos(unit) ::= info
+ info
+ }
+ override final def imports = impInfo :: super.imports
+ override final def firstImport = Some(impInfo)
+ override final def isRootImport = !tree.pos.isDefined
+ override final def toString = super.toString + " with " + s"ImportContext { $impInfo; outer.owner = ${outer.owner} }"
+ }
+
+ /** A buffer for warnings and errors that are accumulated during speculative type checking. */
+ final class ReportBuffer {
+ type Error = AbsTypeError
+ type Warning = (Position, String)
+
+ private def newBuffer[A] = mutable.LinkedHashSet.empty[A] // Important to use LinkedHS for stable results.
+
+ // [JZ] Contexts, pre- the SI-7345 refactor, avoided allocating the buffers until needed. This
+ // is replicated here out of conservatism.
+ private var _errorBuffer: mutable.LinkedHashSet[Error] = _
+ private def errorBuffer = {if (_errorBuffer == null) _errorBuffer = newBuffer; _errorBuffer}
+ def errors: immutable.Seq[Error] = errorBuffer.toVector
+
+ private var _warningBuffer: mutable.LinkedHashSet[Warning] = _
+ private def warningBuffer = {if (_warningBuffer == null) _warningBuffer = newBuffer; _warningBuffer}
+ def warnings: immutable.Seq[Warning] = warningBuffer.toVector
+
+ def +=(error: AbsTypeError): this.type = {
+ errorBuffer += error
+ this
+ }
+ def ++=(errors: Traversable[AbsTypeError]): this.type = {
+ errorBuffer ++= errors
+ this
+ }
+ def +=(warning: Warning): this.type = {
+ warningBuffer += warning
+ this
+ }
+
+ def clearAll(): this.type = {
+ clearAllErrors(); clearAllWarnings();
+ }
+
+ def clearAllErrors(): this.type = {
+ errorBuffer.clear()
+ this
+ }
+ def clearErrors(removeF: PartialFunction[AbsTypeError, Boolean]): this.type = {
+ errorBuffer.retain(!PartialFunction.cond(_)(removeF))
+ this
+ }
+ def retainErrors(leaveF: PartialFunction[AbsTypeError, Boolean]): this.type = {
+ errorBuffer.retain(PartialFunction.cond(_)(leaveF))
+ this
+ }
+ def clearAllWarnings(): this.type = {
+ warningBuffer.clear()
+ this
+ }
+
+ def hasErrors = errorBuffer.nonEmpty
+ def firstError = errorBuffer.headOption
+ }
+
class ImportInfo(val tree: Import, val depth: Int) {
+ def pos = tree.pos
+ def posOf(sel: ImportSelector) = tree.pos withPoint sel.namePos
+
/** The prefix expression */
def qual: Tree = tree.symbol.info match {
case ImportType(expr) => expr
- case ErrorType => tree setType NoType // fix for #2870
- case _ => throw new FatalError("symbol " + tree.symbol + " has bad type: " + tree.symbol.info) //debug
+ case ErrorType => tree setType NoType // fix for #2870
+ case _ => throw new FatalError("symbol " + tree.symbol + " has bad type: " + tree.symbol.info) //debug
}
/** Is name imported explicitly, not via wildcard? */
@@ -745,25 +1288,53 @@ trait Contexts { self: Analyzer =>
/** The symbol with name `name` imported from import clause `tree`.
*/
- def importedSymbol(name: Name): Symbol = {
+ def importedSymbol(name: Name): Symbol = importedSymbol(name, requireExplicit = false)
+
+ private def recordUsage(sel: ImportSelector, result: Symbol) {
+ def posstr = pos.source.file.name + ":" + posOf(sel).line
+ def resstr = if (tree.symbol.hasCompleteInfo) s"(qual=$qual, $result)" else s"(expr=${tree.expr}, ${result.fullLocationString})"
+ debuglog(s"In $this at $posstr, selector '${selectorString(sel)}' resolved to $resstr")
+ allUsedSelectors(this) += sel
+ }
+
+ /** If requireExplicit is true, wildcard imports are not considered. */
+ def importedSymbol(name: Name, requireExplicit: Boolean): Symbol = {
var result: Symbol = NoSymbol
var renamed = false
var selectors = tree.selectors
- while (selectors != Nil && result == NoSymbol) {
- if (selectors.head.rename == name.toTermName)
+ def current = selectors.head
+ while (selectors.nonEmpty && result == NoSymbol) {
+ if (current.rename == name.toTermName)
result = qual.tpe.nonLocalMember( // new to address #2733: consider only non-local members for imports
- if (name.isTypeName) selectors.head.name.toTypeName else selectors.head.name)
- else if (selectors.head.name == name.toTermName)
+ if (name.isTypeName) current.name.toTypeName else current.name)
+ else if (current.name == name.toTermName)
renamed = true
- else if (selectors.head.name == nme.WILDCARD && !renamed)
+ else if (current.name == nme.WILDCARD && !renamed && !requireExplicit)
result = qual.tpe.nonLocalMember(name)
- selectors = selectors.tail
+
+ if (result == NoSymbol)
+ selectors = selectors.tail
}
- result
+ if (settings.lint && selectors.nonEmpty && result != NoSymbol && pos != NoPosition)
+ recordUsage(current, result)
+
+ // Harden against the fallout from bugs like SI-6745
+ //
+ // [JZ] I considered issuing a devWarning and moving the
+ // check inside the above loop, as I believe that
+ // this always represents a mistake on the part of
+ // the caller.
+ if (definitions isImportable result) result
+ else NoSymbol
+ }
+ private def selectorString(s: ImportSelector): String = {
+ if (s.name == nme.WILDCARD && s.rename == null) "_"
+ else if (s.name == s.rename) "" + s.name
+ else s.name + " => " + s.rename
}
def allImportedSymbols: Iterable[Symbol] =
- qual.tpe.members flatMap (transformImport(tree.selectors, _))
+ importableMembers(qual.tpe) flatMap (transformImport(tree.selectors, _))
private def transformImport(selectors: List[ImportSelector], sym: Symbol): List[Symbol] = selectors match {
case List() => List()
@@ -774,10 +1345,124 @@ trait Contexts { self: Analyzer =>
case _ :: rest => transformImport(rest, sym)
}
- override def toString() = tree.toString()
+ override def hashCode = tree.##
+ override def equals(other: Any) = other match {
+ case that: ImportInfo => (tree == that.tree)
+ case _ => false
+ }
+ override def toString = tree.toString
}
case class ImportType(expr: Tree) extends Type {
override def safeToString = "ImportType("+expr+")"
}
}
+
+object ContextMode {
+ import scala.language.implicitConversions
+ private implicit def liftIntBitsToContextState(bits: Int): ContextMode = apply(bits)
+ def apply(bits: Int): ContextMode = new ContextMode(bits)
+ final val NOmode: ContextMode = 0
+
+ final val ReportErrors: ContextMode = 1 << 0
+ final val BufferErrors: ContextMode = 1 << 1
+ final val AmbiguousErrors: ContextMode = 1 << 2
+
+ /** Are we in a secondary constructor after the this constructor call? */
+ final val ConstructorSuffix: ContextMode = 1 << 3
+
+ /** For method context: were returns encountered? */
+ final val ReturnsSeen: ContextMode = 1 << 4
+
+ /** Is this context (enclosed in) a constructor call?
+ * (the call to the super or self constructor in the first line of a constructor.)
+ * In such a context, the object's fields should not be in scope
+ */
+ final val SelfSuperCall: ContextMode = 1 << 5
+
+ // TODO harvest documentation for this
+ final val ImplicitsEnabled: ContextMode = 1 << 6
+
+ final val MacrosEnabled: ContextMode = 1 << 7
+
+ /** To selectively allow enrichment in patterns, where other kinds of implicit conversions are not allowed */
+ final val EnrichmentEnabled: ContextMode = 1 << 8
+
+ /** Are we in a run of [[scala.tools.nsc.typechecker.TreeCheckers]]? */
+ final val Checking: ContextMode = 1 << 9
+
+ /** Are we retypechecking arguments independently from the function applied to them? See `Typer.tryTypedApply`
+ * TODO - iron out distinction/overlap with SecondTry.
+ */
+ final val ReTyping: ContextMode = 1 << 10
+
+ /** Are we typechecking pattern alternatives. Formerly ALTmode. */
+ final val PatternAlternative: ContextMode = 1 << 11
+
+ /** Are star patterns allowed. Formerly STARmode. */
+ final val StarPatterns: ContextMode = 1 << 12
+
+ /** Are we typing the "super" in a superclass constructor call super.<init>. Formerly SUPERCONSTRmode. */
+ final val SuperInit: ContextMode = 1 << 13
+
+ /* Is this the second attempt to type this tree? In that case functions
+ * may no longer be coerced with implicit views. Formerly SNDTRYmode.
+ */
+ final val SecondTry: ContextMode = 1 << 14
+
+ /** Are we in return position? Formerly RETmode. */
+ final val ReturnExpr: ContextMode = 1 << 15
+
+ /** Are unapplied type constructors allowed here? Formerly HKmode. */
+ final val TypeConstructorAllowed: ContextMode = 1 << 16
+
+ /** TODO: The "sticky modes" are EXPRmode, PATTERNmode, TYPEmode.
+ * To mimick the sticky mode behavior, when captain stickyfingers
+ * comes around we need to propagate those modes but forget the other
+ * context modes which were once mode bits; those being so far the
+ * ones listed here.
+ */
+ final val FormerNonStickyModes: ContextMode = (
+ PatternAlternative | StarPatterns | SuperInit | SecondTry | ReturnExpr | TypeConstructorAllowed
+ )
+
+ final val DefaultMode: ContextMode = MacrosEnabled
+
+ private val contextModeNameMap = Map(
+ ReportErrors -> "ReportErrors",
+ BufferErrors -> "BufferErrors",
+ AmbiguousErrors -> "AmbiguousErrors",
+ ConstructorSuffix -> "ConstructorSuffix",
+ SelfSuperCall -> "SelfSuperCall",
+ ImplicitsEnabled -> "ImplicitsEnabled",
+ MacrosEnabled -> "MacrosEnabled",
+ Checking -> "Checking",
+ ReTyping -> "ReTyping",
+ PatternAlternative -> "PatternAlternative",
+ StarPatterns -> "StarPatterns",
+ SuperInit -> "SuperInit",
+ SecondTry -> "SecondTry",
+ TypeConstructorAllowed -> "TypeConstructorAllowed"
+ )
+}
+
+/**
+ * A value class to carry the boolean flags of a context, such as whether errors should
+ * be buffered or reported.
+ */
+final class ContextMode private (val bits: Int) extends AnyVal {
+ import ContextMode._
+
+ def &(other: ContextMode): ContextMode = new ContextMode(bits & other.bits)
+ def |(other: ContextMode): ContextMode = new ContextMode(bits | other.bits)
+ def &~(other: ContextMode): ContextMode = new ContextMode(bits & ~(other.bits))
+ def set(value: Boolean, mask: ContextMode) = if (value) |(mask) else &~(mask)
+
+ def inAll(required: ContextMode) = (this & required) == required
+ def inAny(required: ContextMode) = (this & required) != NOmode
+ def inNone(prohibited: ContextMode) = (this & prohibited) == NOmode
+
+ override def toString =
+ if (bits == 0) "NOmode"
+ else (contextModeNameMap filterKeys inAll).values.toList.sorted mkString " "
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
index 3e249e57bb..73572bcae9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
@@ -6,8 +6,6 @@
package scala.tools.nsc
package typechecker
-import scala.language.implicitConversions
-
/** A generic means of breaking down types into their subcomponents.
* Types are decomposed top down, and recognizable substructure is
* dispatched via self-apparently named methods. Those methods can
@@ -37,8 +35,6 @@ trait DestructureTypes {
def wrapSequence(nodes: List[Node]): Node
def wrapAtom[U](value: U): Node
- private implicit def liftToTerm(name: String): TermName = newTermName(name)
-
private val openSymbols = scala.collection.mutable.Set[Symbol]()
private def nodeList[T](elems: List[T], mkNode: T => Node): Node =
@@ -68,15 +64,6 @@ trait DestructureTypes {
},
tree.productPrefix
)
- def wrapSymbol(label: String, sym: Symbol): Node = {
- if (sym eq NoSymbol) wrapEmpty
- else atom(label, sym)
- }
- def wrapInfo(sym: Symbol) = sym.info match {
- case TypeBounds(lo, hi) => typeBounds(lo, hi)
- case PolyType(tparams, restpe) => polyFunction(tparams, restpe)
- case _ => wrapEmpty
- }
def wrapSymbolInfo(sym: Symbol): Node = {
if ((sym eq NoSymbol) || openSymbols(sym)) wrapEmpty
else {
@@ -99,7 +86,6 @@ trait DestructureTypes {
def constant(label: String, const: Constant): Node = atom(label, const)
def scope(decls: Scope): Node = node("decls", scopeMemberList(decls.toList))
- def const[T](named: (String, T)): Node = constant(named._1, Constant(named._2))
def resultType(restpe: Type): Node = this("resultType", restpe)
def typeParams(tps: List[Symbol]): Node = node("typeParams", symbolList(tps))
@@ -188,7 +174,6 @@ trait DestructureTypes {
case AntiPolyType(pre, targs) => product(tp, prefix(pre), typeArgs(targs))
case ClassInfoType(parents, decls, clazz) => product(tp, parentList(parents), scope(decls), wrapAtom(clazz))
case ConstantType(const) => product(tp, constant("value", const))
- case DeBruijnIndex(level, index, args) => product(tp, const("level" -> level), const("index" -> index), typeArgs(args))
case OverloadedType(pre, alts) => product(tp, prefix(pre), node("alts", typeList(alts map pre.memberType)))
case RefinedType(parents, decls) => product(tp, parentList(parents), scope(decls))
case SingleType(pre, sym) => product(tp, prefix(pre), wrapAtom(sym))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 25a1228bf6..69ae6ec0c8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -17,12 +17,7 @@ import scala.collection.{ mutable, immutable }
*/
abstract class Duplicators extends Analyzer {
import global._
- import definitions.{ AnyRefClass, AnyValClass }
-
- def retyped(context: Context, tree: Tree): Tree = {
- resetClassOwners
- (newBodyDuplicator(context)).typed(tree)
- }
+ import definitions._
/** Retype the given tree in the given context. Use this method when retyping
* a method in a different class. The typer will replace references to the this of
@@ -33,7 +28,7 @@ abstract class Duplicators extends Analyzer {
if (oldThis ne newThis) {
oldClassOwner = oldThis
newClassOwner = newThis
- } else resetClassOwners
+ } else resetClassOwners()
envSubstitution = new SubstSkolemsTypeMap(env.keysIterator.toList, env.valuesIterator.toList)
debuglog("retyped with env: " + env)
@@ -79,22 +74,19 @@ abstract class Duplicators extends Analyzer {
override def mapOver(tpe: Type): Type = tpe match {
case TypeRef(NoPrefix, sym, args) if sym.isTypeParameterOrSkolem =>
- var sym1 = context.scope.lookup(sym.name)
- if (sym1 eq NoSymbol) {
- // try harder (look in outer scopes)
- // with virtpatmat, this can happen when the sym is referenced in the scope of a LabelDef but is defined in the scope of an outer DefDef (e.g., in AbstractPartialFunction's andThen)
- BodyDuplicator.super.silent(_.typedType(Ident(sym.name))) match {
- case SilentResultValue(t) =>
- sym1 = t.symbol
- debuglog("fixed by trying harder: "+(sym, sym1, context))
- case _ =>
- }
- }
-// assert(sym1 ne NoSymbol, tpe)
- if ((sym1 ne NoSymbol) && (sym1 ne sym)) {
- debuglog("fixing " + sym + " -> " + sym1)
+ val sym1 = (
+ context.scope lookup sym.name orElse {
+ // try harder (look in outer scopes)
+ // with virtpatmat, this can happen when the sym is referenced in the scope of a LabelDef but
+ // is defined in the scope of an outer DefDef (e.g., in AbstractPartialFunction's andThen)
+ BodyDuplicator.super.silent(_ typedType Ident(sym.name)).fold(NoSymbol: Symbol)(_.symbol)
+ } filter (_ ne sym)
+ )
+ if (sym1.exists) {
+ debuglog(s"fixing $sym -> $sym1")
typeRef(NoPrefix, sym1, mapOverArgs(args, sym1.typeParams))
- } else super.mapOver(tpe)
+ }
+ else super.mapOver(tpe)
case TypeRef(pre, sym, args) =>
val newsym = updateSym(sym)
@@ -144,8 +136,8 @@ abstract class Duplicators extends Analyzer {
sym
private def invalidate(tree: Tree, owner: Symbol = NoSymbol) {
- debuglog("attempting to invalidate " + tree.symbol)
- if (tree.isDef && tree.symbol != NoSymbol) {
+ debuglog(s"attempting to invalidate symbol = ${tree.symbol}")
+ if ((tree.isDef || tree.isInstanceOf[Function]) && tree.symbol != NoSymbol) {
debuglog("invalid " + tree.symbol)
invalidSyms(tree.symbol) = tree
@@ -162,7 +154,7 @@ abstract class Duplicators extends Analyzer {
case vdef @ ValDef(mods, name, _, rhs) if mods.hasFlag(Flags.LAZY) =>
debuglog("ValDef " + name + " sym.info: " + vdef.symbol.info)
invalidSyms(vdef.symbol) = vdef
- val newowner = if (owner != NoSymbol) owner else context.owner
+ val newowner = owner orElse context.owner
val newsym = vdef.symbol.cloneSymbol(newowner)
newsym.setInfo(fixType(vdef.symbol.info))
vdef.symbol = newsym
@@ -174,6 +166,11 @@ abstract class Duplicators extends Analyzer {
invalidateAll(tparams ::: vparamss.flatten)
tree.symbol = NoSymbol
+ case Function(vparams, _) =>
+ // invalidate parameters
+ invalidateAll(vparams)
+ tree.symbol = NoSymbol
+
case _ =>
tree.symbol = NoSymbol
}
@@ -184,17 +181,6 @@ abstract class Duplicators extends Analyzer {
stats.foreach(invalidate(_, owner))
}
- private def inspectTpe(tpe: Type) = {
- tpe match {
- case MethodType(_, res) =>
- res + ", " + res.bounds.hi + ", " + (res.bounds.hi match {
- case TypeRef(_, _, args) if (args.length > 0) => args(0) + ", " + args(0).bounds.hi
- case _ => "non-tref: " + res.bounds.hi.getClass
- })
- case _ =>
- }
- }
-
/** Optionally cast this tree into some other type, if required.
* Unless overridden, just returns the tree.
*/
@@ -214,10 +200,10 @@ abstract class Duplicators extends Analyzer {
* their symbols are recreated ad-hoc and their types are fixed inline, instead of letting the
* namer/typer handle them, or Idents that refer to them.
*/
- override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+ override def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
debuglog("typing " + tree + ": " + tree.tpe + ", " + tree.getClass)
val origtreesym = tree.symbol
- if (tree.hasSymbol && tree.symbol != NoSymbol
+ if (tree.hasSymbolField && tree.symbol != NoSymbol
&& !tree.symbol.isLabel // labels cannot be retyped by the type checker as LabelDef has no ValDef/return type trees
&& invalidSyms.isDefinedAt(tree.symbol)) {
debuglog("removed symbol " + tree.symbol)
@@ -227,40 +213,39 @@ abstract class Duplicators extends Analyzer {
tree match {
case ttree @ TypeTree() =>
// log("fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol)
- ttree.tpe = fixType(ttree.tpe)
- ttree
+ ttree modifyType fixType
case Block(stats, res) =>
debuglog("invalidating block")
invalidateAll(stats)
invalidate(res)
- tree.tpe = null
- super.typed(tree, mode, pt)
+ super.typed(tree.clearType(), mode, pt)
case ClassDef(_, _, _, tmpl @ Template(parents, _, stats)) =>
// log("invalidating classdef " + tree)
tmpl.symbol = tree.symbol.newLocalDummy(tree.pos)
invalidateAll(stats, tree.symbol)
- tree.tpe = null
- super.typed(tree, mode, pt)
+ super.typed(tree.clearType(), mode, pt)
case ddef @ DefDef(_, _, _, _, tpt, rhs) =>
- ddef.tpt.tpe = fixType(ddef.tpt.tpe)
- ddef.tpe = null
- super.typed(ddef, mode, pt)
+ ddef.tpt modifyType fixType
+ super.typed(ddef.clearType(), mode, pt)
+
+ case fun: Function =>
+ debuglog("Clearing the type and retyping Function: " + fun)
+ super.typed(fun.clearType, mode, pt)
case vdef @ ValDef(mods, name, tpt, rhs) =>
// log("vdef fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol + " and " + invalidSyms)
//if (mods.hasFlag(Flags.LAZY)) vdef.symbol.resetFlag(Flags.MUTABLE) // Martin to Iulian: lazy vars can now appear because they are no longer boxed; Please check that deleting this statement is OK.
- vdef.tpt.tpe = fixType(vdef.tpt.tpe)
- vdef.tpe = null
- super.typed(vdef, mode, pt)
+ vdef.tpt modifyType fixType
+ super.typed(vdef.clearType(), mode, pt)
case ldef @ LabelDef(name, params, rhs) =>
// log("label def: " + ldef)
// in case the rhs contains any definitions -- TODO: is this necessary?
invalidate(rhs)
- ldef.tpe = null
+ ldef.clearType()
// is this LabelDef generated by tailcalls?
val isTailLabel = (ldef.params.length >= 1) && (ldef.params.head.name == nme.THIS)
@@ -278,27 +263,23 @@ abstract class Duplicators extends Analyzer {
val params1 = params map newParam
val rhs1 = (new TreeSubstituter(params map (_.symbol), params1) transform rhs) // TODO: duplicate?
- rhs1.tpe = null
- super.typed(treeCopy.LabelDef(tree, name, params1, rhs1), mode, pt)
+ super.typed(treeCopy.LabelDef(tree, name, params1, rhs1.clearType()), mode, pt)
case Bind(name, _) =>
// log("bind: " + tree)
invalidate(tree)
- tree.tpe = null
- super.typed(tree, mode, pt)
+ super.typed(tree.clearType(), mode, pt)
case Ident(_) if tree.symbol.isLabel =>
debuglog("Ident to labeldef " + tree + " switched to ")
tree.symbol = updateSym(tree.symbol)
- tree.tpe = null
- super.typed(tree, mode, pt)
+ super.typed(tree.clearType(), mode, pt)
case Ident(_) if (origtreesym ne null) && origtreesym.isLazy =>
debuglog("Ident to a lazy val " + tree + ", " + tree.symbol + " updated to " + origtreesym)
tree.symbol = updateSym(origtreesym)
- tree.tpe = null
- super.typed(tree, mode, pt)
+ super.typed(tree.clearType(), mode, pt)
case Select(th @ This(_), sel) if (oldClassOwner ne null) && (th.symbol == oldClassOwner) =>
// We use the symbol name instead of the tree name because the symbol
@@ -320,9 +301,15 @@ abstract class Duplicators extends Analyzer {
case ((alt, tpe)) :: Nil =>
log(s"Arrested overloaded type in Duplicators, narrowing to ${alt.defStringSeenAs(tpe)}\n Overload was: $memberString")
Select(This(newClassOwner), alt)
- case _ =>
- log(s"Could not disambiguate $memberString in Duplicators. Attempting name-based selection, but this may not end well...")
- nameSelection
+ case xs =>
+ alts filter (alt => (alt.paramss corresponds tree.symbol.paramss)(_.size == _.size)) match {
+ case alt :: Nil =>
+ log(s"Resorted to parameter list arity to disambiguate to $alt\n Overload was: $memberString")
+ Select(This(newClassOwner), alt)
+ case _ =>
+ log(s"Could not disambiguate $memberTypes. Attempting name-based selection, but we may crash later.")
+ nameSelection
+ }
}
}
else nameSelection
@@ -351,7 +338,7 @@ abstract class Duplicators extends Analyzer {
super.typed(atPos(tree.pos)(tree1))
*/
case Match(scrut, cases) =>
- val scrut1 = typed(scrut, EXPRmode | BYVALmode, WildcardType)
+ val scrut1 = typedByValueExpr(scrut)
val scrutTpe = scrut1.tpe.widen
val cases1 = {
if (scrutTpe.isFinalType) cases filter {
@@ -366,8 +353,8 @@ abstract class Duplicators extends Analyzer {
// Without this, AnyRef specializations crash on patterns like
// case _: Boolean => ...
// Not at all sure this is safe.
- else if (scrutTpe <:< AnyRefClass.tpe)
- cases filterNot (_.pat.tpe <:< AnyValClass.tpe)
+ else if (scrutTpe <:< AnyRefTpe)
+ cases filterNot (_.pat.tpe <:< AnyValTpe)
else
cases
}
@@ -381,12 +368,11 @@ abstract class Duplicators extends Analyzer {
case _ =>
debuglog("Duplicators default case: " + tree.summaryString)
debuglog(" ---> " + tree)
- if (tree.hasSymbol && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) {
+ if (tree.hasSymbolField && tree.symbol.safeOwner == AnyClass)
tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
- }
+
val ntree = castType(tree, pt)
- val res = super.typed(ntree, mode, pt)
- res
+ super.typed(ntree, mode, pt)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index 57b9dfe3e4..7092f00bff 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -33,7 +33,7 @@ trait EtaExpansion { self: Analyzer =>
}
/** <p>
- * Expand partial function applications of type <code>type</code>.
+ * Expand partial function applications of type `type`.
* </p><pre>
* p.f(es_1)...(es_n)
* ==> {
@@ -56,11 +56,8 @@ trait EtaExpansion { self: Analyzer =>
}
val defs = new ListBuffer[Tree]
- /** Append to <code>defs</code> value definitions for all non-stable
- * subexpressions of the function application <code>tree</code>.
- *
- * @param tree ...
- * @return ...
+ /* Append to `defs` value definitions for all non-stable
+ * subexpressions of the function application `tree`.
*/
def liftoutPrefix(tree: Tree): Tree = {
def liftout(tree: Tree, byName: Boolean): Tree =
@@ -97,12 +94,12 @@ trait EtaExpansion { self: Analyzer =>
// with repeated params, there might be more or fewer args than params
liftout(arg, byName(i).getOrElse(false))
}
- treeCopy.Apply(tree, liftoutPrefix(fn), newArgs) setType null
+ treeCopy.Apply(tree, liftoutPrefix(fn), newArgs).clearType()
case TypeApply(fn, args) =>
- treeCopy.TypeApply(tree, liftoutPrefix(fn), args) setType null
+ treeCopy.TypeApply(tree, liftoutPrefix(fn), args).clearType()
case Select(qual, name) =>
val name = tree.symbol.name // account for renamed imports, SI-7233
- treeCopy.Select(tree, liftout(qual, false), name) setSymbol NoSymbol setType null
+ treeCopy.Select(tree, liftout(qual, byName = false), name).clearType() setSymbol NoSymbol
case Ident(name) =>
tree
}
@@ -110,8 +107,7 @@ trait EtaExpansion { self: Analyzer =>
tree1
}
- /** Eta-expand lifted tree.
- */
+ /* Eta-expand lifted tree. */
def expand(tree: Tree, tpe: Type): Tree = tpe match {
case mt @ MethodType(paramSyms, restpe) if !mt.isImplicit =>
val params: List[(ValDef, Boolean)] = paramSyms.map {
@@ -119,7 +115,7 @@ trait EtaExpansion { self: Analyzer =>
val origTpe = sym.tpe
val isRepeated = definitions.isRepeatedParamType(origTpe)
// SI-4176 Don't leak A* in eta-expanded function types. See t4176b.scala
- val droppedStarTpe = if (settings.etaExpandKeepsStar.value) origTpe else dropRepeatedParamType(origTpe)
+ val droppedStarTpe = if (settings.etaExpandKeepsStar) origTpe else dropIllegalStarTypes(origTpe)
val valDef = ValDef(Modifiers(SYNTHETIC | PARAM), sym.name.toTermName, TypeTree(droppedStarTpe), EmptyTree)
(valDef, isRepeated)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 35a4461ccc..06a1e21e8b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -8,7 +8,8 @@
//todo: disallow C#D in superclass
//todo: treat :::= correctly
-package scala.tools.nsc
+package scala
+package tools.nsc
package typechecker
import scala.annotation.tailrec
@@ -16,7 +17,7 @@ import scala.collection.{ mutable, immutable }
import mutable.{ LinkedHashMap, ListBuffer }
import scala.util.matching.Regex
import symtab.Flags._
-import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.{TriState, Statistics}
import scala.language.implicitConversions
/** This trait provides methods to find various kinds of implicits.
@@ -30,11 +31,11 @@ trait Implicits {
import global._
import definitions._
import ImplicitsStats._
- import typeDebug.{ ptTree, ptBlock, ptLine }
- import global.typer.{ printTyping, deindentTyping, indentTyping, printInference }
+ import typingStack.{ printTyping }
+ import typeDebug._
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult =
- inferImplicit(tree, pt, reportAmbiguous, isView, context, true, tree.pos)
+ inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent = true, tree.pos)
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean): SearchResult =
inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent, tree.pos)
@@ -59,40 +60,35 @@ trait Implicits {
* @return A search result
*/
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean, pos: Position): SearchResult = {
- printInference("[infer %s] %s with pt=%s in %s".format(
- if (isView) "view" else "implicit",
- tree, pt, context.owner.enclClass)
- )
- printTyping(
- ptBlock("infer implicit" + (if (isView) " view" else ""),
- "tree" -> tree,
- "pt" -> pt,
- "undetparams" -> context.outer.undetparams
- )
- )
- indentTyping()
-
+ // Note that the isInvalidConversionTarget seems to make a lot more sense right here, before all the
+ // work is performed, than at the point where it presently exists.
+ val shouldPrint = printTypings && !context.undetparams.isEmpty
val rawTypeStart = if (Statistics.canEnable) Statistics.startCounter(rawTypeImpl) else null
val findMemberStart = if (Statistics.canEnable) Statistics.startCounter(findMemberImpl) else null
val subtypeStart = if (Statistics.canEnable) Statistics.startCounter(subtypeImpl) else null
val start = if (Statistics.canEnable) Statistics.startTimer(implicitNanos) else null
- if (printInfers && !tree.isEmpty && !context.undetparams.isEmpty)
- printTyping("typing implicit: %s %s".format(tree, context.undetparamsString))
+ if (shouldPrint)
+ typingStack.printTyping(tree, "typing implicit: %s %s".format(tree, context.undetparamsString))
val implicitSearchContext = context.makeImplicit(reportAmbiguous)
val result = new ImplicitSearch(tree, pt, isView, implicitSearchContext, pos).bestImplicit
- if ((result.isFailure || !settings.Xdivergence211.value) && saveAmbiguousDivergent && implicitSearchContext.hasErrors) {
- context.updateBuffer(implicitSearchContext.errBuffer.filter(err => err.kind == ErrorKinds.Ambiguous || err.kind == ErrorKinds.Divergent))
- debugwarn("update buffer: " + implicitSearchContext.errBuffer)
+ if (result.isFailure && saveAmbiguousDivergent && implicitSearchContext.hasErrors) {
+ context.updateBuffer(implicitSearchContext.reportBuffer.errors.collect {
+ case dte: DivergentImplicitTypeError => dte
+ case ate: AmbiguousImplicitTypeError => ate
+ })
+ debuglog("update buffer: " + implicitSearchContext.reportBuffer.errors)
}
- printInference("[infer implicit] inferred " + result)
- context.undetparams = context.undetparams filterNot result.subst.from.contains
+ // SI-7944 undetermined type parameters that result from inference within typedImplicit land in
+ // `implicitSearchContext.undetparams`, *not* in `context.undetparams`
+ // Here, we copy them up to parent context (analogously to the way the errors are copied above),
+ // and then filter out any which *were* inferred and are part of the substitutor in the implicit search result.
+ context.undetparams = ((context.undetparams ++ implicitSearchContext.undetparams) filterNot result.subst.from.contains).distinct
if (Statistics.canEnable) Statistics.stopTimer(implicitNanos, start)
if (Statistics.canEnable) Statistics.stopCounter(rawTypeImpl, rawTypeStart)
if (Statistics.canEnable) Statistics.stopCounter(findMemberImpl, findMemberStart)
if (Statistics.canEnable) Statistics.stopCounter(subtypeImpl, subtypeStart)
- deindentTyping()
- printTyping("Implicit search yielded: "+ result)
+
result
}
@@ -101,24 +97,14 @@ trait Implicits {
def inferImplicit(tree: Tree, pt: Type, isView: Boolean, context: Context, silent: Boolean, withMacrosDisabled: Boolean, pos: Position, onError: (Position, String) => Unit): Tree = {
val wrapper1 = if (!withMacrosDisabled) (context.withMacrosEnabled[SearchResult] _) else (context.withMacrosDisabled[SearchResult] _)
def wrapper(inference: => SearchResult) = wrapper1(inference)
- def fail(reason: Option[String]) = {
- if (!silent) {
- if (context.hasErrors) onError(context.errBuffer.head.errPos, context.errBuffer.head.errMsg)
- else onError(pos, reason getOrElse "implicit search has failed. to find out the reason, turn on -Xlog-implicits")
- }
- EmptyTree
- }
- try {
- wrapper(inferImplicit(tree, pt, reportAmbiguous = true, isView = isView, context = context, saveAmbiguousDivergent = !silent, pos = pos)) match {
- case failure if failure.tree.isEmpty => fail(None)
- case success => success.tree
- }
- } catch {
- case ex: DivergentImplicit =>
- if (settings.Xdivergence211.value)
- debugwarn("this shouldn't happen. DivergentImplicit exception has been thrown with -Xdivergence211 turned on: "+ex)
- fail(Some("divergent implicit expansion"))
+ val result = wrapper(inferImplicit(tree, pt, reportAmbiguous = true, isView = isView, context = context, saveAmbiguousDivergent = !silent, pos = pos))
+ if (result.isFailure && !silent) {
+ val err = context.firstError
+ val errPos = err.map(_.errPos).getOrElse(pos)
+ val errMsg = err.map(_.errMsg).getOrElse("implicit search has failed. to find out the reason, turn on -Xlog-implicits")
+ onError(errPos, errMsg)
}
+ result.tree
}
/** Find all views from type `tp` (in which `tpars` are free)
@@ -137,7 +123,7 @@ trait Implicits {
val tvars = tpars map (TypeVar untouchable _)
val tpSubsted = tp.subst(tpars, tvars)
- val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyClass.tpe), true, context.makeImplicit(false))
+ val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyTpe), true, context.makeImplicit(reportAmbiguousErrors = false))
search.allImplicitsPoly(tvars)
}
@@ -149,6 +135,16 @@ trait Implicits {
private val implicitsCache = new LinkedHashMap[Type, Infoss]
private val infoMapCache = new LinkedHashMap[Symbol, InfoMap]
private val improvesCache = perRunCaches.newMap[(ImplicitInfo, ImplicitInfo), Boolean]()
+ private val implicitSearchId = { var id = 1 ; () => try id finally id += 1 }
+
+ private def isInvalidConversionTarget(tpe: Type): Boolean = tpe match {
+ case Function1(_, out) => AnyRefClass.tpe <:< out
+ case _ => false
+ }
+ private def isInvalidConversionSource(tpe: Type): Boolean = tpe match {
+ case Function1(in, _) => in <:< NullClass.tpe
+ case _ => false
+ }
def resetImplicits() {
implicitsCache.clear()
@@ -157,7 +153,7 @@ trait Implicits {
}
/* Map a polytype to one in which all type parameters and argument-dependent types are replaced by wildcards.
- * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate DebruijnIndex types
+ * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate debruijn index types
* when checking whether `b` is a valid implicit, as we haven't even searched a value for the implicit arg `x`,
* so we have to approximate (otherwise it is excluded a priori).
*/
@@ -177,7 +173,6 @@ trait Implicits {
def isFailure = false
def isAmbiguousFailure = false
- // only used when -Xdivergence211 is turned on
def isDivergent = false
final def isSuccess = !isFailure
}
@@ -186,7 +181,6 @@ trait Implicits {
override def isFailure = true
}
- // only used when -Xdivergence211 is turned on
lazy val DivergentSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
override def isFailure = true
override def isDivergent = true
@@ -204,6 +198,7 @@ trait Implicits {
*/
class ImplicitInfo(val name: Name, val pre: Type, val sym: Symbol) {
private var tpeCache: Type = null
+ private var isCyclicOrErroneousCache: TriState = TriState.Unknown
/** Computes member type of implicit from prefix `pre` (cached). */
def tpe: Type = {
@@ -211,7 +206,12 @@ trait Implicits {
tpeCache
}
- def isCyclicOrErroneous =
+ def isCyclicOrErroneous: Boolean = {
+ if (!isCyclicOrErroneousCache.isKnown) isCyclicOrErroneousCache = computeIsCyclicOrErroneous
+ isCyclicOrErroneousCache.booleanValue
+ }
+
+ private[this] final def computeIsCyclicOrErroneous =
try sym.hasFlag(LOCKED) || containsError(tpe)
catch { case _: CyclicReference => true }
@@ -226,20 +226,13 @@ trait Implicits {
case NullaryMethodType(restpe) =>
containsError(restpe)
case mt @ MethodType(_, restpe) =>
- (mt.paramTypes exists typeIsError) || containsError(restpe)
+ // OPT avoiding calling `mt.paramTypes` which creates a new list.
+ (mt.params exists symTypeIsError) || containsError(restpe)
case _ =>
tp.isError
}
- /** Todo reconcile with definition of stability given in Types.scala */
- private def isStable(tp: Type): Boolean = tp match {
- case TypeRef(pre, sym, _) =>
- sym.isPackageClass ||
- sym.isModuleClass && isStable(pre) /*||
- sym.isAliasType && isStable(tp.normalize)*/
- case _ => tp.isStable
- }
- def isStablePrefix = isStable(pre)
+ def isStablePrefix = pre.isStable
override def equals(other: Any) = other match {
case that: ImplicitInfo =>
@@ -249,7 +242,10 @@ trait Implicits {
case _ => false
}
override def hashCode = name.## + pre.## + sym.##
- override def toString = name + ": " + tpe
+ override def toString = (
+ if (tpeCache eq null) name + ": ?"
+ else name + ": " + tpe
+ )
}
/** A class which is used to track pending implicits to prevent infinite implicit searches.
@@ -281,16 +277,13 @@ trait Implicits {
object HasMember {
private val hasMemberCache = perRunCaches.newMap[Name, Type]()
def apply(name: Name): Type = hasMemberCache.getOrElseUpdate(name, memberWildcardType(name, WildcardType))
- def unapply(pt: Type): Option[Name] = pt match {
- case RefinedType(List(WildcardType), Scope(sym)) if sym.tpe == WildcardType => Some(sym.name)
- case _ => None
}
- }
/** An extractor for types of the form ? { name: (? >: argtpe <: Any*)restp }
*/
object HasMethodMatching {
- val dummyMethod = NoSymbol.newTermSymbol(newTermName("typer$dummy"))
+ val dummyMethod = NoSymbol.newTermSymbol("typer$dummy") setInfo NullaryMethodType(AnyTpe)
+
def templateArgType(argtpe: Type) = new BoundedWildcardType(TypeBounds.lower(argtpe))
def apply(name: Name, argtpes: List[Type], restpe: Type): Type = {
@@ -317,7 +310,7 @@ trait Implicits {
*/
object Function1 {
val Sym = FunctionClass(1)
- def unapply(tp: Type) = tp match {
+ def unapply(tp: Type) = tp baseType Sym match {
case TypeRef(_, Sym, arg1 :: arg2 :: _) => Some((arg1, arg2))
case _ => None
}
@@ -332,27 +325,33 @@ trait Implicits {
* (useful when we infer synthetic stuff and pass EmptyTree in the `tree` argument)
* If it's set to NoPosition, then position-based services will use `tree.pos`
*/
- class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context, pos0: Position = NoPosition)
- extends Typer(context0) with ImplicitsContextErrors {
- printTyping(
- ptBlock("new ImplicitSearch",
- "tree" -> tree,
- "pt" -> pt,
- "isView" -> isView,
- "context0" -> context0,
- "undetparams" -> context.outer.undetparams
- )
- )
-// assert(tree.isEmpty || tree.pos.isDefined, tree)
+ class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context, pos0: Position = NoPosition) extends Typer(context0) with ImplicitsContextErrors {
+ val searchId = implicitSearchId()
+ private def typingLog(what: String, msg: => String) =
+ typingStack.printTyping(tree, f"[search #$searchId] $what $msg")
+
+ import infer._
+ if (Statistics.canEnable) Statistics.incCounter(implicitSearchCount)
+
+ /** The type parameters to instantiate */
+ val undetParams = if (isView) Nil else context.outer.undetparams
+ val wildPt = approximate(pt)
+
+ private val runDefintions = currentRun.runDefinitions
+ import runDefintions._
+
+ def undet_s = if (undetParams.isEmpty) "" else undetParams.mkString(" inferring ", ", ", "")
+ def tree_s = typeDebug ptTree tree
+ def ctx_s = fullSiteString(context)
+ typingLog("start", s"`$tree_s`$undet_s, searching for adaptation to pt=$pt $ctx_s")
+
def pos = if (pos0 != NoPosition) pos0 else tree.pos
def failure(what: Any, reason: String, pos: Position = this.pos): SearchResult = {
- if (settings.XlogImplicits.value)
+ if (settings.XlogImplicits)
reporter.echo(pos, what+" is not a valid implicit value for "+pt+" because:\n"+reason)
SearchFailure
}
-
- import infer._
/** Is implicit info `info1` better than implicit info `info2`?
*/
def improves(info1: ImplicitInfo, info2: ImplicitInfo) = {
@@ -360,7 +359,7 @@ trait Implicits {
(info2 == NoImplicitInfo) ||
(info1 != NoImplicitInfo) && {
if (info1.sym.isStatic && info2.sym.isStatic) {
- improvesCache get (info1, info2) match {
+ improvesCache get ((info1, info2)) match {
case Some(b) => if (Statistics.canEnable) Statistics.incCounter(improvesCachedCount); b
case None =>
val result = isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym)
@@ -388,12 +387,12 @@ trait Implicits {
* if one or both are intersection types with a pair of overlapping parent types.
*/
private def dominates(dtor: Type, dted: Type): Boolean = {
- def core(tp: Type): Type = tp.normalize match {
- case RefinedType(parents, defs) => intersectionType(parents map core, tp.typeSymbol.owner)
+ def core(tp: Type): Type = tp.dealiasWiden match {
+ case RefinedType(parents, defs) => intersectionType(parents map core, tp.typeSymbol.owner)
case AnnotatedType(annots, tp, selfsym) => core(tp)
- case ExistentialType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.bounds.hi)))
- case PolyType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.bounds.hi)))
- case _ => tp
+ case ExistentialType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.bounds.hi)))
+ case PolyType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.bounds.hi)))
+ case _ => tp
}
def stripped(tp: Type): Type = {
// `t.typeSymbol` returns the symbol of the normalized type. If that normalized type
@@ -402,37 +401,26 @@ trait Implicits {
val syms = for (t <- tp; if t.typeSymbol.isTypeParameter) yield t.typeSymbol
deriveTypeWithWildcards(syms.distinct)(tp)
}
- def sum(xs: List[Int]) = (0 /: xs)(_ + _)
- def complexity(tp: Type): Int = tp.normalize match {
- case NoPrefix =>
- 0
- case SingleType(pre, sym) =>
- if (sym.isPackage) 0 else complexity(tp.normalize.widen)
- case TypeRef(pre, sym, args) =>
- complexity(pre) + sum(args map complexity) + 1
- case RefinedType(parents, _) =>
- sum(parents map complexity) + 1
- case _ =>
- 1
+ def complexity(tp: Type): Int = tp.dealias match {
+ case NoPrefix => 0
+ case SingleType(pre, sym) => if (sym.isPackage) 0 else complexity(tp.dealiasWiden)
+ case ThisType(sym) => if (sym.isPackage) 0 else 1
+ case TypeRef(pre, sym, args) => complexity(pre) + (args map complexity).sum + 1
+ case RefinedType(parents, _) => (parents map complexity).sum + 1
+ case _ => 1
}
def overlaps(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
case (RefinedType(parents, _), _) => parents exists (overlaps(_, tp2))
case (_, RefinedType(parents, _)) => parents exists (overlaps(tp1, _))
- case _ => tp1.typeSymbol == tp2.typeSymbol
+ case _ => tp1.typeSymbol == tp2.typeSymbol
}
val dtor1 = stripped(core(dtor))
val dted1 = stripped(core(dted))
overlaps(dtor1, dted1) && (dtor1 =:= dted1 || complexity(dtor1) > complexity(dted1))
}
- if (Statistics.canEnable) Statistics.incCounter(implicitSearchCount)
-
- /** The type parameters to instantiate */
- val undetParams = if (isView) List() else context.outer.undetparams
-
/** The expected type with all undetermined type parameters replaced with wildcards. */
def approximate(tp: Type) = deriveTypeWithWildcards(undetParams)(tp)
- val wildPt = approximate(pt)
/** Try to construct a typed tree from given implicit info with given
* expected type.
@@ -458,45 +446,21 @@ trait Implicits {
(context.openImplicits find { case OpenImplicit(info, tp, tree1) => !info.sym.isMacro && tree1.symbol == tree.symbol && dominates(pt, tp)}) match {
case Some(pending) =>
//println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
- if (settings.Xdivergence211.value) DivergentSearchFailure
- else throw DivergentImplicit
+ DivergentSearchFailure
case None =>
- def pre211DivergenceLogic() = {
try {
context.openImplicits = OpenImplicit(info, pt, tree) :: context.openImplicits
// println(" "*context.openImplicits.length+"typed implicit "+info+" for "+pt) //@MDEBUG
- typedImplicit0(info, ptChecked, isLocal)
- } catch {
- case ex: DivergentImplicit =>
+ val result = typedImplicit0(info, ptChecked, isLocal)
+ if (result.isDivergent) {
//println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
- if (context.openImplicits.tail.isEmpty) {
- if (!pt.isErroneous && !info.sym.isMacro)
- DivergingImplicitExpansionError(tree, pt, info.sym)(context)
- SearchFailure
- } else {
- throw DivergentImplicit
- }
+ if (context.openImplicits.tail.isEmpty && !pt.isErroneous)
+ DivergingImplicitExpansionError(tree, pt, info.sym)(context)
+ }
+ result
} finally {
context.openImplicits = context.openImplicits.tail
}
- }
- def post211DivergenceLogic() = {
- try {
- context.openImplicits = OpenImplicit(info, pt, tree) :: context.openImplicits
- // println(" "*context.openImplicits.length+"typed implicit "+info+" for "+pt) //@MDEBUG
- val result = typedImplicit0(info, ptChecked, isLocal)
- if (result.isDivergent) {
- //println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
- if (context.openImplicits.tail.isEmpty && !pt.isErroneous)
- DivergingImplicitExpansionError(tree, pt, info.sym)(context)
- }
- result
- } finally {
- context.openImplicits = context.openImplicits.tail
- }
- }
- if (settings.Xdivergence211.value) post211DivergenceLogic()
- else pre211DivergenceLogic()
}
}
@@ -512,10 +476,8 @@ trait Implicits {
val start = if (Statistics.canEnable) Statistics.startTimer(matchesPtNanos) else null
val result = normSubType(tp, pt) || isView && {
pt match {
- case TypeRef(_, Function1.Sym, arg1 :: arg2 :: Nil) =>
- matchesPtView(tp, arg1, arg2, undet)
- case _ =>
- false
+ case Function1(arg1, arg2) => matchesPtView(tp, arg1, arg2, undet)
+ case _ => false
}
}
if (Statistics.canEnable) Statistics.stopTimer(matchesPtNanos, start)
@@ -606,7 +568,7 @@ trait Implicits {
// side is a class, else we may not know enough.
case tr1 @ TypeRef(_, sym1, _) if sym1.isClass =>
tp2.dealiasWiden match {
- case TypeRef(_, sym2, _) => sym2.isClass && !(sym1 isWeakSubClass sym2)
+ case TypeRef(_, sym2, _) => ((sym1 eq ByNameParamClass) != (sym2 eq ByNameParamClass)) || (sym2.isClass && !(sym1 isWeakSubClass sym2))
case RefinedType(parents, decls) => decls.nonEmpty && tr1.member(decls.head.name) == NoSymbol
case _ => false
}
@@ -615,31 +577,21 @@ trait Implicits {
private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean, isLocal: Boolean): SearchResult = {
if (Statistics.canEnable) Statistics.incCounter(plausiblyCompatibleImplicits)
- printTyping (
- ptBlock("typedImplicit0",
- "info.name" -> info.name,
- "ptChecked" -> ptChecked,
- "pt" -> wildPt,
- "orig" -> ptBlock("info",
- "undetParams" -> undetParams,
- "info.pre" -> info.pre
- ).replaceAll("\\n", "\n ")
- )
- )
-
- if (ptChecked || matchesPt(info))
- typedImplicit1(info, isLocal)
- else
- SearchFailure
+ val ok = ptChecked || matchesPt(info) && {
+ def word = if (isLocal) "local " else ""
+ typingLog("match", s"$word$info")
+ true
+ }
+ if (ok) typedImplicit1(info, isLocal) else SearchFailure
}
private def typedImplicit1(info: ImplicitInfo, isLocal: Boolean): SearchResult = {
if (Statistics.canEnable) Statistics.incCounter(matchingImplicits)
- val itree = atPos(pos.focus) {
- // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints
- val isScalaDoc = context.tree == EmptyTree
+ // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints
+ val isScalaDoc = context.tree == EmptyTree
+ val itree0 = atPos(pos.focus) {
if (isLocal && !isScalaDoc) {
// SI-4270 SI-5376 Always use an unattributed Ident for implicits in the local scope,
// rather than an attributed Select, to detect shadowing.
@@ -651,36 +603,50 @@ trait Implicits {
Select(gen.mkAttributedQualifier(info.pre), implicitMemberName)
}
}
- printTyping("typedImplicit1 %s, pt=%s, from implicit %s:%s".format(
- typeDebug.ptTree(itree), wildPt, info.name, info.tpe)
- )
+ val itree1 = if (isBlackbox(info.sym)) suppressMacroExpansion(itree0) else itree0
+ typingLog("considering", typeDebug.ptTree(itree1))
- def fail(reason: String): SearchResult = failure(itree, reason)
+ def fail(reason: String): SearchResult = failure(itree0, reason)
+ def fallback = typed1(itree1, EXPRmode, wildPt)
try {
- val itree1 =
- if (isView) {
- val arg1 :: arg2 :: _ = pt.typeArgs
+ val itree2 = if (!isView) fallback else pt match {
+ case Function1(arg1, arg2) =>
typed1(
- atPos(itree.pos)(Apply(itree, List(Ident("<argument>") setType approximate(arg1)))),
+ atPos(itree0.pos)(Apply(itree1, List(Ident("<argument>") setType approximate(arg1)))),
EXPRmode,
approximate(arg2)
- )
- }
- else
- typed1(itree, EXPRmode, wildPt)
-
- if (context.hasErrors)
- return fail(context.errBuffer.head.errMsg)
+ ) match {
+ // try to infer implicit parameters immediately in order to:
+ // 1) guide type inference for implicit views
+ // 2) discard ineligible views right away instead of risking spurious ambiguous implicits
+ //
+ // this is an improvement of the state of the art that brings consistency to implicit resolution rules
+ // (and also helps fundep materialization to be applicable to implicit views)
+ //
+ // there's one caveat though. we need to turn this behavior off for scaladoc
+ // because scaladoc usually doesn't know the entire story
+ // and is just interested in views that are potentially applicable
+ // for instance, if we have `class C[T]` and `implicit def conv[T: Numeric](c: C[T]) = ???`
+ // then Scaladoc will give us something of type `C[T]`, and it would like to know
+ // that `conv` is potentially available under such and such conditions
+ case tree if isImplicitMethodType(tree.tpe) && !isScalaDoc => applyImplicitArgs(tree)
+ case tree => tree
+ }
+ case _ => fallback
+ }
+ context.firstError match { // using match rather than foreach to avoid non local return.
+ case Some(err) =>
+ log("implicit adapt failed: " + err.errMsg)
+ return fail(err.errMsg)
+ case None =>
+ }
if (Statistics.canEnable) Statistics.incCounter(typedImplicits)
- printTyping("typed implicit %s:%s, pt=%s".format(itree1, itree1.tpe, wildPt))
- val itree2 = if (isView) (itree1: @unchecked) match { case Apply(fun, _) => fun }
- else adapt(itree1, EXPRmode, wildPt)
+ val itree3 = if (isView) treeInfo.dissectApplied(itree2).callee
+ else adapt(itree2, EXPRmode, wildPt)
- printTyping("adapted implicit %s:%s to %s".format(
- itree1.symbol, itree2.tpe, wildPt)
- )
+ typingStack.showAdapt(itree0, itree3, pt, context)
def hasMatchingSymbol(tree: Tree): Boolean = (tree.symbol == info.sym) || {
tree match {
@@ -692,31 +658,29 @@ trait Implicits {
}
if (context.hasErrors)
- fail("hasMatchingSymbol reported error: " + context.errBuffer.head.errMsg)
- else if (isLocal && !hasMatchingSymbol(itree1))
+ fail("hasMatchingSymbol reported error: " + context.firstError.get.errMsg)
+ else if (itree3.isErroneous)
+ fail("error typechecking implicit candidate")
+ else if (isLocal && !hasMatchingSymbol(itree2))
fail("candidate implicit %s is shadowed by %s".format(
- info.sym.fullLocationString, itree1.symbol.fullLocationString))
+ info.sym.fullLocationString, itree2.symbol.fullLocationString))
else {
val tvars = undetParams map freshVar
def ptInstantiated = pt.instantiateTypeParams(undetParams, tvars)
- printInference("[search] considering %s (pt contains %s) trying %s against pt=%s".format(
- if (undetParams.isEmpty) "no tparams" else undetParams.map(_.name).mkString(", "),
- typeVarsInType(ptInstantiated) filterNot (_.isGround) match { case Nil => "no tvars" ; case tvs => tvs.mkString(", ") },
- itree2.tpe, pt
- ))
-
- if (matchesPt(itree2.tpe, ptInstantiated, undetParams)) {
+ if (matchesPt(itree3.tpe, ptInstantiated, undetParams)) {
if (tvars.nonEmpty)
- printTyping(ptLine("" + info.sym, "tvars" -> tvars, "tvars.constr" -> tvars.map(_.constr)))
+ typingLog("solve", ptLine("tvars" -> tvars, "tvars.constr" -> tvars.map(_.constr)))
- val targs = solvedTypes(tvars, undetParams, undetParams map varianceInType(pt),
- false, lubDepth(List(itree2.tpe, pt)))
+ val targs = solvedTypes(tvars, undetParams, undetParams map varianceInType(pt), upper = false, lubDepth(itree3.tpe :: pt :: Nil))
// #2421: check that we correctly instantiated type parameters outside of the implicit tree:
- checkBounds(itree2, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
- if (context.hasErrors)
- return fail("type parameters weren't correctly instantiated outside of the implicit tree: " + context.errBuffer.head.errMsg)
+ checkBounds(itree3, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
+ context.firstError match {
+ case Some(err) =>
+ return fail("type parameters weren't correctly instantiated outside of the implicit tree: " + err.errMsg)
+ case None =>
+ }
// filter out failures from type inference, don't want to remove them from undetParams!
// we must be conservative in leaving type params in undetparams
@@ -727,7 +691,7 @@ trait Implicits {
if (okParams.isEmpty) EmptyTreeTypeSubstituter
else {
val subst = new TreeTypeSubstituter(okParams, okArgs)
- subst traverse itree2
+ subst traverse itree3
notifyUndetparamsInferred(okParams, okArgs)
subst
}
@@ -741,26 +705,27 @@ trait Implicits {
// duplicating the code here, but this is probably a
// hotspot (and you can't just call typed, need to force
// re-typecheck)
- // TODO: the return tree is ignored. This seems to make
- // no difference, but it's bad practice regardless.
-
-
- val checked = itree2 match {
- case TypeApply(fun, args) => typedTypeApply(itree2, EXPRmode, fun, args)
- case Apply(TypeApply(fun, args), _) => typedTypeApply(itree2, EXPRmode, fun, args) // t2421c
+ //
+ // This is just called for the side effect of error detection,
+ // see SI-6966 to see what goes wrong if we use the result of this
+ // as the SearchResult.
+ itree3 match {
+ case TypeApply(fun, args) => typedTypeApply(itree3, EXPRmode, fun, args)
+ case Apply(TypeApply(fun, args), _) => typedTypeApply(itree3, EXPRmode, fun, args) // t2421c
case t => t
}
- if (context.hasErrors)
- fail("typing TypeApply reported errors for the implicit tree: " + context.errBuffer.head.errMsg)
- else {
- val result = new SearchResult(itree2, subst)
- if (Statistics.canEnable) Statistics.incCounter(foundImplicits)
- printInference("[success] found %s for pt %s".format(result, ptInstantiated))
- result
+ context.firstError match {
+ case Some(err) =>
+ fail("typing TypeApply reported errors for the implicit tree: " + err.errMsg)
+ case None =>
+ val result = new SearchResult(unsuppressMacroExpansion(itree3), subst)
+ if (Statistics.canEnable) Statistics.incCounter(foundImplicits)
+ typingLog("success", s"inferred value of type $ptInstantiated is $result")
+ result
}
}
- else fail("incompatible: %s does not match expected type %s".format(itree2.tpe, ptInstantiated))
+ else fail("incompatible: %s does not match expected type %s".format(itree3.tpe, ptInstantiated))
}
}
catch {
@@ -842,7 +807,7 @@ trait Implicits {
private def isIneligible(info: ImplicitInfo) = (
info.isCyclicOrErroneous
- || isView && isPredefMemberNamed(info.sym, nme.conforms)
+ || isView && (info.sym eq Predef_conforms)
|| shadower.isShadowed(info.name)
|| (!context.macrosEnabled && info.sym.isTermMacro)
)
@@ -865,26 +830,6 @@ trait Implicits {
/** Preventing a divergent implicit from terminating implicit search,
* so that if there is a best candidate it can still be selected.
- *
- * The old way of handling divergence.
- * Only enabled when -Xdivergence211 is turned off.
- */
- private var divergence = false
- private val divergenceHandler: PartialFunction[Throwable, SearchResult] = {
- var remaining = 1;
- { case x: DivergentImplicit if remaining > 0 =>
- remaining -= 1
- divergence = true
- log("discarding divergent implicit during implicit search")
- SearchFailure
- }
- }
-
- /** Preventing a divergent implicit from terminating implicit search,
- * so that if there is a best candidate it can still be selected.
- *
- * The new way of handling divergence.
- * Only enabled when -Xdivergence211 is turned on.
*/
object DivergentImplicitRecovery {
// symbol of the implicit that caused the divergence.
@@ -897,7 +842,7 @@ trait Implicits {
if (search.isDivergent && countdown > 0) {
countdown -= 1
implicitSym = i.sym
- log("discarding divergent implicit ${implicitSym} during implicit search")
+ log(s"discarding divergent implicit $implicitSym during implicit search")
SearchFailure
} else search
}
@@ -915,10 +860,7 @@ trait Implicits {
matches sortBy (x => if (isView) -x.useCountView else -x.useCountArg)
}
if (eligible.nonEmpty)
- printInference("[search%s] %s with pt=%s in %s, eligible:\n %s".format(
- if (isView) " view" else "",
- tree, pt, context.owner.enclClass, eligible.mkString("\n "))
- )
+ printTyping(tree, eligible.size + s" eligible for pt=$pt at ${fullSiteString(context)}")
/** Faster implicit search. Overall idea:
* - prune aggressively
@@ -928,24 +870,15 @@ trait Implicits {
@tailrec private def rankImplicits(pending: Infos, acc: Infos): Infos = pending match {
case Nil => acc
case i :: is =>
- def pre211tryImplicitInfo(i: ImplicitInfo) =
- try typedImplicit(i, ptChecked = true, isLocal)
- catch divergenceHandler
-
- def post211tryImplicitInfo(i: ImplicitInfo) =
- DivergentImplicitRecovery(typedImplicit(i, ptChecked = true, isLocal), i)
-
- {
- if (settings.Xdivergence211.value) post211tryImplicitInfo(i)
- else pre211tryImplicitInfo(i)
- } match {
- // only used if -Xdivergence211 is turned on
+ DivergentImplicitRecovery(typedImplicit(i, ptChecked = true, isLocal), i) match {
case sr if sr.isDivergent =>
Nil
case sr if sr.isFailure =>
// We don't want errors that occur during checking implicit info
// to influence the check of further infos.
- context.condBufferFlush(_.kind != ErrorKinds.Divergent)
+ context.reportBuffer.retainErrors {
+ case err: DivergentImplicitTypeError => true
+ }
rankImplicits(is, acc)
case newBest =>
best = newBest
@@ -954,10 +887,7 @@ trait Implicits {
try improves(i, alt)
catch {
case e: CyclicReference =>
- if (printInfers) {
- println(i+" discarded because cyclic reference occurred")
- e.printStackTrace()
- }
+ debugwarn(s"Discarding $i during implicit search due to cyclic reference")
true
}
})
@@ -990,12 +920,11 @@ trait Implicits {
}
if (best.isFailure) {
- /** If there is no winner, and we witnessed and caught divergence,
- * now we can throw it for the error message.
+ /* If there is no winner, and we witnessed and caught divergence,
+ * now we can throw it for the error message.
*/
- if (divergence || DivergentImplicitRecovery.sym != null) {
- if (settings.Xdivergence211.value) DivergingImplicitExpansionError(tree, pt, DivergentImplicitRecovery.sym)(context)
- else throw DivergentImplicit
+ if (DivergentImplicitRecovery.sym != null) {
+ DivergingImplicitExpansionError(tree, pt, DivergentImplicitRecovery.sym)(context)
}
if (invalidImplicits.nonEmpty)
@@ -1053,8 +982,8 @@ trait Implicits {
*/
private def companionImplicitMap(tp: Type): InfoMap = {
- /** Populate implicit info map by traversing all parts of type `tp`.
- * Parameters as for `getParts`.
+ /* Populate implicit info map by traversing all parts of type `tp`.
+ * Parameters as for `getParts`.
*/
def getClassParts(tp: Type)(implicit infoMap: InfoMap, seen: mutable.Set[Type], pending: Set[Symbol]) = tp match {
case TypeRef(pre, sym, args) =>
@@ -1086,13 +1015,13 @@ trait Implicits {
}
}
- /** Populate implicit info map by traversing all parts of type `tp`.
- * This method is performance critical.
- * @param tp The type for which we want to traverse parts
- * @param infoMap The infoMap in which implicit infos corresponding to parts are stored
- * @param seen The types that were already visited previously when collecting parts for the given infoMap
- * @param pending The set of static symbols for which we are currently trying to collect their parts
- * in order to cache them in infoMapCache
+ /* Populate implicit info map by traversing all parts of type `tp`.
+ * This method is performance critical.
+ * @param tp The type for which we want to traverse parts
+ * @param infoMap The infoMap in which implicit infos corresponding to parts are stored
+ * @param seen The types that were already visited previously when collecting parts for the given infoMap
+ * @param pending The set of static symbols for which we are currently trying to collect their parts
+ * in order to cache them in infoMapCache
*/
def getParts(tp: Type)(implicit infoMap: InfoMap, seen: mutable.Set[Type], pending: Set[Symbol]) {
if (seen(tp))
@@ -1101,23 +1030,21 @@ trait Implicits {
tp match {
case TypeRef(pre, sym, args) =>
if (sym.isClass) {
- if (!((sym.name == tpnme.REFINE_CLASS_NAME) ||
- (sym.name startsWith tpnme.ANON_CLASS_NAME) ||
- (sym.name == tpnme.ROOT))) {
+ if (!sym.isAnonOrRefinementClass && !sym.isRoot) {
if (sym.isStatic && !(pending contains sym))
infoMap ++= {
infoMapCache get sym match {
case Some(imap) => imap
case None =>
val result = new InfoMap
- getClassParts(sym.tpe)(result, new mutable.HashSet(), pending + sym)
+ getClassParts(sym.tpeHK)(result, new mutable.HashSet(), pending + sym)
infoMapCache(sym) = result
result
}
}
else
getClassParts(tp)
- args foreach (getParts(_))
+ args foreach getParts
}
} else if (sym.isAliasType) {
getParts(tp.normalize) // SI-7180 Normalize needed to expand HK type refs
@@ -1145,9 +1072,9 @@ trait Implicits {
val infoMap = new InfoMap
getParts(tp)(infoMap, new mutable.HashSet(), Set())
- printInference(
- ptBlock("companionImplicitMap " + tp, infoMap.toSeq.map({ case (k, v) => ("" + k, v.mkString(", ")) }): _*)
- )
+ if (infoMap.nonEmpty)
+ printTyping(tree, infoMap.size + " implicits in companion scope")
+
infoMap
}
@@ -1179,13 +1106,6 @@ trait Implicits {
}
}
- private def TagSymbols = TagMaterializers.keySet
- private val TagMaterializers = Map[Symbol, Symbol](
- ClassTagClass -> materializeClassTag,
- WeakTypeTagClass -> materializeWeakTypeTag,
- TypeTagClass -> materializeTypeTag
- )
-
/** Creates a tree will produce a tag of the requested flavor.
* An EmptyTree is returned if materialization fails.
*/
@@ -1204,8 +1124,10 @@ trait Implicits {
try {
val tree1 = typedPos(pos.focus)(arg)
- if (context.hasErrors) processMacroExpansionError(context.errBuffer.head.errPos, context.errBuffer.head.errMsg)
- else new SearchResult(tree1, EmptyTreeTypeSubstituter)
+ context.firstError match {
+ case Some(err) => processMacroExpansionError(err.errPos, err.errMsg)
+ case None => new SearchResult(tree1, EmptyTreeTypeSubstituter)
+ }
} catch {
case ex: TypeError =>
processMacroExpansionError(ex.pos, ex.msg)
@@ -1222,8 +1144,8 @@ trait Implicits {
case ThisType(thisSym) =>
gen.mkAttributedThis(thisSym)
case _ =>
- // if ``pre'' is not a PDT, e.g. if someone wrote
- // implicitly[scala.reflect.macros.Context#TypeTag[Int]]
+ // if `pre` is not a PDT, e.g. if someone wrote
+ // implicitly[scala.reflect.macros.blackbox.Context#TypeTag[Int]]
// then we need to fail, because we don't know the prefix to use during type reification
// upd. we also need to fail silently, because this is a very common situation
// e.g. quite often we're searching for BaseUniverse#TypeTag, e.g. for a type tag in any universe
@@ -1236,8 +1158,8 @@ trait Implicits {
}
)
// todo. migrate hardcoded materialization in Implicits to corresponding implicit macros
- var materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List()))
- if (settings.XlogImplicits.value) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer))
+ val materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List()))
+ if (settings.XlogImplicits) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer))
if (context.macrosEnabled) success(materializer)
// don't call `failure` here. if macros are disabled, we just fail silently
// otherwise -Xlog-implicits will spam the long with zillions of "macros are disabled"
@@ -1245,8 +1167,6 @@ trait Implicits {
else SearchFailure
}
- private val ManifestSymbols = Set[Symbol](PartialManifestClass, FullManifestClass, OptManifestClass)
-
/** Creates a tree that calls the relevant factory method in object
* scala.reflect.Manifest for type 'tp'. An EmptyTree is returned if
* no manifest is found. todo: make this instantiate take type params as well?
@@ -1255,23 +1175,23 @@ trait Implicits {
val full = flavor == FullManifestClass
val opt = flavor == OptManifestClass
- /** Creates a tree that calls the factory method called constructor in object scala.reflect.Manifest */
+ /* Creates a tree that calls the factory method called constructor in object scala.reflect.Manifest */
def manifestFactoryCall(constructor: String, tparg: Type, args: Tree*): Tree =
if (args contains EmptyTree) EmptyTree
else typedPos(tree.pos.focus) {
val mani = gen.mkManifestFactoryCall(full, constructor, tparg, args.toList)
- if (settings.debug.value) println("generated manifest: "+mani) // DEBUG
+ if (settings.debug) println("generated manifest: "+mani) // DEBUG
mani
}
- /** Creates a tree representing one of the singleton manifests.*/
+ /* Creates a tree representing one of the singleton manifests.*/
def findSingletonManifest(name: String) = typedPos(tree.pos.focus) {
Select(gen.mkAttributedRef(FullManifestModule), name)
}
- /** Re-wraps a type in a manifest before calling inferImplicit on the result */
+ /* Re-wraps a type in a manifest before calling inferImplicit on the result */
def findManifest(tp: Type, manifestClass: Symbol = if (full) FullManifestClass else PartialManifestClass) =
- inferImplicit(tree, appliedType(manifestClass, tp), true, false, context).tree
+ inferImplicit(tree, appliedType(manifestClass, tp), reportAmbiguous = true, isView = false, context).tree
def findSubManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else OptManifestClass)
def mot(tp0: Type, from: List[Symbol], to: List[Type]): SearchResult = {
@@ -1313,8 +1233,8 @@ trait Implicits {
// looking for a manifest of a type parameter that hasn't been inferred by now,
// can't do much, but let's not fail
else if (undetParams contains sym) {
- // #3859: need to include the mapping from sym -> NothingClass.tpe in the SearchResult
- mot(NothingClass.tpe, sym :: from, NothingClass.tpe :: to)
+ // #3859: need to include the mapping from sym -> NothingTpe in the SearchResult
+ mot(NothingTpe, sym :: from, NothingTpe :: to)
} else {
// a manifest should have been found by normal searchImplicit
EmptyTree
@@ -1406,7 +1326,7 @@ trait Implicits {
val failstart = if (Statistics.canEnable) Statistics.startTimer(inscopeFailNanos) else null
val succstart = if (Statistics.canEnable) Statistics.startTimer(inscopeSucceedNanos) else null
- var result = searchImplicit(context.implicitss, true)
+ var result = searchImplicit(context.implicitss, isLocal = true)
if (result.isFailure) {
if (Statistics.canEnable) Statistics.stopTimer(inscopeFailNanos, failstart)
@@ -1421,34 +1341,44 @@ trait Implicits {
val wasAmbigious = result.isAmbiguousFailure // SI-6667, never search companions after an ambiguous error in in-scope implicits
result = materializeImplicit(pt)
-
// `materializeImplicit` does some preprocessing for `pt`
// is it only meant for manifests/tags or we need to do the same for `implicitsOfExpectedType`?
- if (result.isFailure) result = searchImplicit(implicitsOfExpectedType, false)
+ if (result.isFailure && !wasAmbigious)
+ result = searchImplicit(implicitsOfExpectedType, isLocal = false)
if (result.isFailure) {
context.updateBuffer(previousErrs)
if (Statistics.canEnable) Statistics.stopTimer(oftypeFailNanos, failstart)
} else {
- if (wasAmbigious && settings.lint.value)
- reporter.warning(tree.pos,
- "Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667. \n" +
- previousErrs.map(_.errMsg).mkString("\n"))
-
if (Statistics.canEnable) Statistics.stopTimer(oftypeSucceedNanos, succstart)
if (Statistics.canEnable) Statistics.incCounter(oftypeImplicitHits)
}
}
-
- if (result.isFailure && settings.debug.value)
- log("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType)
+ if (result.isSuccess && isView) {
+ def maybeInvalidConversionError(msg: String) {
+ // We have to check context.ambiguousErrors even though we are calling "issueAmbiguousError"
+ // which ostensibly does exactly that before issuing the error. Why? I have no idea. Test is pos/t7690.
+ if (context.ambiguousErrors)
+ context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, msg))
+ }
+ if (isInvalidConversionTarget(pt)) {
+ maybeInvalidConversionError("the result type of an implicit conversion must be more specific than AnyRef")
+ result = SearchFailure
+ }
+ else if (settings.isScala211 && isInvalidConversionSource(pt)) {
+ maybeInvalidConversionError("an expression of type Null is ineligible for implicit conversion")
+ result = SearchFailure
+ }
+ }
+ if (result.isFailure)
+ debuglog("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType)
result
}
def allImplicits: List[SearchResult] = {
def search(iss: Infoss, isLocal: Boolean) = applicableInfos(iss, isLocal).values
- (search(context.implicitss, true) ++ search(implicitsOfExpectedType, false)).toList.filter(_.tree ne EmptyTree)
+ (search(context.implicitss, isLocal = true) ++ search(implicitsOfExpectedType, isLocal = false)).toList.filter(_.tree ne EmptyTree)
}
// find all implicits for some type that contains type variables
@@ -1494,13 +1424,15 @@ trait Implicits {
case None => Some("Missing argument `msg` on implicitNotFound annotation.")
})
+ // http://dcsobral.blogspot.com/2010/01/string-interpolation-in-scala-with.html
+ private val Intersobralator = """\$\{\s*([^}\s]+)\s*\}""".r
class Message(sym: Symbol, msg: String) {
- // http://dcsobral.blogspot.com/2010/01/string-interpolation-in-scala-with.html
- private def interpolate(text: String, vars: Map[String, String]) = {
- """\$\{([^}]+)\}""".r.replaceAllIn(text, (_: Regex.Match) match {
- case Regex.Groups(v) => java.util.regex.Matcher.quoteReplacement(vars.getOrElse(v, "")) // #3915: need to quote replacement string since it may include $'s (such as the interpreter's $iw)
- })}
+ private def interpolate(text: String, vars: Map[String, String]) =
+ Intersobralator.replaceAllIn(text, (_: Regex.Match) match {
+ case Regex.Groups(v) => Regex quoteReplacement vars.getOrElse(v, "")
+ // #3915: need to quote replacement string since it may include $'s (such as the interpreter's $iw)
+ })
private lazy val typeParamNames: List[String] = sym.typeParams.map(_.decodedName)
@@ -1509,18 +1441,16 @@ trait Implicits {
interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc?
def validate: Option[String] = {
- import scala.util.matching.Regex; import scala.collection.breakOut
- // is there a shorter way to avoid the intermediate toList?
- val refs = """\$\{([^}]+)\}""".r.findAllIn(msg).matchData.map(_ group 1).toSet
+ val refs = Intersobralator.findAllMatchIn(msg).map(_ group 1).toSet
val decls = typeParamNames.toSet
(refs &~ decls) match {
case s if s.isEmpty => None
- case unboundNames =>
+ case unboundNames =>
val singular = unboundNames.size == 1
- Some("The type parameter"+( if(singular) " " else "s " )+ unboundNames.mkString(", ") +
- " referenced in the message of the @implicitNotFound annotation "+( if(singular) "is" else "are" )+
- " not defined by "+ sym +".")
+ val ess = if (singular) "" else "s"
+ val bee = if (singular) "is" else "are"
+ Some(s"The type parameter$ess ${unboundNames mkString ", "} referenced in the message of the @implicitNotFound annotation $bee not defined by $sym.")
}
}
}
@@ -1535,9 +1465,7 @@ object ImplicitsStats {
val subtypeImpl = Statistics.newSubCounter(" of which in implicit", subtypeCount)
val findMemberImpl = Statistics.newSubCounter(" of which in implicit", findMemberCount)
val subtypeAppInfos = Statistics.newSubCounter(" of which in app impl", subtypeCount)
- val subtypeImprovCount = Statistics.newSubCounter(" of which in improves", subtypeCount)
val implicitSearchCount = Statistics.newCounter ("#implicit searches", "typer")
- val triedImplicits = Statistics.newSubCounter(" #tried", implicitSearchCount)
val plausiblyCompatibleImplicits
= Statistics.newSubCounter(" #plausibly compatible", implicitSearchCount)
val matchingImplicits = Statistics.newSubCounter(" #matching", implicitSearchCount)
@@ -1557,7 +1485,3 @@ object ImplicitsStats {
val implicitCacheAccs = Statistics.newCounter ("implicit cache accesses", "typer")
val implicitCacheHits = Statistics.newSubCounter("implicit cache hits", implicitCacheAccs)
}
-
-// only used when -Xdivergence211 is turned off
-class DivergentImplicit extends Exception
-object DivergentImplicit extends DivergentImplicit
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 55e0a954f0..dd0923a696 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -7,12 +7,11 @@ package scala.tools.nsc
package typechecker
import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
import scala.util.control.ControlThrowable
import symtab.Flags._
-import scala.annotation.tailrec
+import scala.reflect.internal.Depth
-/** This trait ...
+/** This trait contains methods related to type parameter inference.
*
* @author Martin Odersky
* @version 1.0
@@ -22,138 +21,64 @@ trait Infer extends Checkable {
import global._
import definitions._
- import typer.printInference
import typeDebug.ptBlock
-
-/* -- Type parameter inference utility functions --------------------------- */
-
- private def assertNonCyclic(tvar: TypeVar) =
- assert(tvar.constr.inst != tvar, tvar.origin)
-
- /** The formal parameter types corresponding to <code>formals</code>.
- * If <code>formals</code> has a repeated last parameter, a list of
- * (nargs - params.length + 1) copies of its type is returned.
- * By-name types are replaced with their underlying type.
+ import typeDebug.str.parentheses
+ import typingStack.{ printTyping }
+
+ /** The formal parameter types corresponding to `formals`.
+ * If `formals` has a repeated last parameter, a list of
+ * (numArgs - numFormals + 1) copies of its type is appended
+ * to the other formals. By-name types are replaced with their
+ * underlying type.
*
* @param removeByName allows keeping ByName parameters. Used in NamesDefaults.
* @param removeRepeated allows keeping repeated parameter (if there's one argument). Used in NamesDefaults.
*/
- def formalTypes(formals: List[Type], nargs: Int, removeByName: Boolean = true, removeRepeated: Boolean = true): List[Type] = {
- val formals1 = if (removeByName) formals mapConserve {
- case TypeRef(_, ByNameParamClass, List(arg)) => arg
- case formal => formal
- } else formals
- if (isVarArgTypes(formals1) && (removeRepeated || formals.length != nargs)) {
- val ft = formals1.last.dealiasWiden.typeArgs.head
- formals1.init ::: (for (i <- List.range(formals1.length - 1, nargs)) yield ft)
- } else formals1
+ def formalTypes(formals: List[Type], numArgs: Int, removeByName: Boolean = true, removeRepeated: Boolean = true): List[Type] = {
+ val numFormals = formals.length
+ val formals1 = if (removeByName) formals mapConserve dropByName else formals
+ val expandLast = (
+ (removeRepeated || numFormals != numArgs)
+ && isVarArgTypes(formals1)
+ )
+ def lastType = formals1.last.dealiasWiden.typeArgs.head
+ def expanded(n: Int) = (1 to n).toList map (_ => lastType)
+
+ if (expandLast)
+ formals1.init ::: expanded(numArgs - numFormals + 1)
+ else
+ formals1
}
- /** Returns `(formals, formalsExpanded)` where `formalsExpanded` are the expected types
- * for the `nbSubPats` sub-patterns of an extractor pattern, of which the corresponding
- * unapply[Seq] call is assumed to have result type `resTp`.
- *
- * `formals` are the formal types before expanding a potential repeated parameter (must come last in `formals`, if at all)
- *
- * @param nbSubPats The number of arguments to the extractor pattern
- * @param effectiveNbSubPats `nbSubPats`, unless there is one sub-pattern which, after unwrapping
- * bind patterns, is a Tuple pattern, in which case it is the number of
- * elements. Used to issue warnings about binding a `TupleN` to a single value.
- * @throws TypeError when the unapply[Seq] definition is ill-typed
- * @returns (null, null) when the expected number of sub-patterns cannot be satisfied by the given extractor
- *
- * This is the spec currently implemented -- TODO: update it.
- *
- * 8.1.8 ExtractorPatterns
- *
- * An extractor pattern x(p1, ..., pn) where n ā‰„ 0 is of the same syntactic form as a constructor pattern.
- * However, instead of a case class, the stable identifier x denotes an object which has a member method named unapply or unapplySeq that matches the pattern.
- *
- * An `unapply` method with result type `R` in an object `x` matches the
- * pattern `x(p_1, ..., p_n)` if it takes exactly one argument and, either:
- * - `n = 0` and `R =:= Boolean`, or
- * - `n = 1` and `R <:< Option[T]`, for some type `T`.
- * The argument pattern `p1` is typed in turn with expected type `T`.
- * - Or, `n > 1` and `R <:< Option[Product_n[T_1, ..., T_n]]`, for some
- * types `T_1, ..., T_n`. The argument patterns `p_1, ..., p_n` are
- * typed with expected types `T_1, ..., T_n`.
- *
- * An `unapplySeq` method in an object `x` matches the pattern `x(p_1, ..., p_n)`
- * if it takes exactly one argument and its result type is of the form `Option[S]`,
- * where either:
- * - `S` is a subtype of `Seq[U]` for some element type `U`, (set `m = 0`)
- * - or `S` is a `ProductX[T_1, ..., T_m]` and `T_m <: Seq[U]` (`m <= n`).
- *
- * The argument patterns `p_1, ..., p_n` are typed with expected types
- * `T_1, ..., T_m, U, ..., U`. Here, `U` is repeated `n-m` times.
- *
+ /** Sorts the alternatives according to the given comparison function.
+ * Returns a list containing the best alternative as well as any which
+ * the best fails to improve upon.
*/
- def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int,
- unappSym: Symbol, effectiveNbSubPats: Int): (List[Type], List[Type]) = {
- val isUnapplySeq = unappSym.name == nme.unapplySeq
- val booleanExtractor = resTp.typeSymbolDirect == BooleanClass
-
- def seqToRepeatedChecked(tp: Type) = {
- val toRepeated = seqToRepeated(tp)
- if (tp eq toRepeated) throw new TypeError("(the last tuple-component of) the result type of an unapplySeq must be a Seq[_]")
- else toRepeated
- }
-
- // empty list --> error, otherwise length == 1
- lazy val optionArgs = resTp.baseType(OptionClass).typeArgs
- // empty list --> not a ProductN, otherwise product element types
- def productArgs = getProductArgs(optionArgs.head)
-
- val formals =
- // convert Seq[T] to the special repeated argument type
- // so below we can use formalTypes to expand formals to correspond to the number of actuals
- if (isUnapplySeq) {
- if (optionArgs.nonEmpty)
- productArgs match {
- case Nil => List(seqToRepeatedChecked(optionArgs.head))
- case normalTps :+ seqTp => normalTps :+ seqToRepeatedChecked(seqTp)
- }
- else throw new TypeError(s"result type $resTp of unapplySeq defined in ${unappSym.fullLocationString} does not conform to Option[_]")
- } else {
- if (booleanExtractor && nbSubPats == 0) Nil
- else if (optionArgs.nonEmpty)
- if (nbSubPats == 1) {
- val productArity = productArgs.size
- if (settings.lint.value && productArity > 1 && productArity != effectiveNbSubPats)
- global.currentUnit.warning(pos,
- s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}")
- optionArgs
- }
- // TODO: update spec to reflect we allow any ProductN, not just TupleN
- else productArgs
- else
- throw new TypeError(s"result type $resTp of unapply defined in ${unappSym.fullLocationString} does not conform to Option[_] or Boolean")
- }
-
- // for unapplySeq, replace last vararg by as many instances as required by nbSubPats
- val formalsExpanded =
- if (isUnapplySeq && formals.nonEmpty) formalTypes(formals, nbSubPats)
- else formals
+ private def bestAlternatives(alternatives: List[Symbol])(isBetter: (Symbol, Symbol) => Boolean): List[Symbol] = {
+ def improves(sym1: Symbol, sym2: Symbol) = (
+ (sym2 eq NoSymbol)
+ || sym2.isError
+ || (sym2 hasAnnotation BridgeClass)
+ || isBetter(sym1, sym2)
+ )
- if (formalsExpanded.lengthCompare(nbSubPats) != 0) (null, null)
- else (formals, formalsExpanded)
+ alternatives sortWith improves match {
+ case best :: rest if rest.nonEmpty => best :: rest.filterNot(alt => improves(best, alt))
+ case bests => bests
+ }
}
- def actualTypes(actuals: List[Type], nformals: Int): List[Type] =
- if (nformals == 1 && !hasLength(actuals, 1))
- List(if (actuals.isEmpty) UnitClass.tpe else tupleType(actuals))
- else actuals
-
- def actualArgs(pos: Position, actuals: List[Tree], nformals: Int): List[Tree] = {
- val inRange = nformals == 1 && !hasLength(actuals, 1) && actuals.lengthCompare(MaxTupleArity) <= 0
- if (inRange && !phase.erasedTypes) List(atPos(pos)(gen.mkTuple(actuals)))
- else actuals
+ // we must not allow CyclicReference to be thrown when sym.info is called
+ // in checkAccessible, because that would mark the symbol erroneous, which it
+ // is not. But if it's a true CyclicReference then macro def will report it.
+ // See comments to TypeSigError for an explanation of this special case.
+ // [Eugene] is there a better way?
+ private object CheckAccessibleMacroCycle extends TypeCompleter {
+ val tree = EmptyTree
+ override def complete(sym: Symbol) = ()
}
/** A fresh type variable with given type parameter as origin.
- *
- * @param tparam ...
- * @return ...
*/
def freshVar(tparam: Symbol): TypeVar = TypeVar(tparam)
@@ -170,50 +95,34 @@ trait Infer extends Checkable {
*/
object instantiate extends TypeMap {
private var excludedVars = immutable.Set[TypeVar]()
+ private def applyTypeVar(tv: TypeVar): Type = tv match {
+ case TypeVar(origin, constr) if !constr.instValid => throw new DeferredNoInstance(() => s"no unique instantiation of type variable $origin could be found")
+ case _ if excludedVars(tv) => throw new NoInstance("cyclic instantiation")
+ case TypeVar(_, constr) =>
+ excludedVars += tv
+ try apply(constr.inst)
+ finally excludedVars -= tv
+ }
def apply(tp: Type): Type = tp match {
- case WildcardType | BoundedWildcardType(_) | NoType =>
- throw new NoInstance("undetermined type")
- case tv @ TypeVar(origin, constr) if !tv.untouchable =>
- if (constr.inst == NoType) {
- throw new DeferredNoInstance(() =>
- "no unique instantiation of type variable " + origin + " could be found")
- } else if (excludedVars(tv)) {
- throw new NoInstance("cyclic instantiation")
- } else {
- excludedVars += tv
- val res = apply(constr.inst)
- excludedVars -= tv
- res
- }
- case _ =>
- mapOver(tp)
+ case WildcardType | BoundedWildcardType(_) | NoType => throw new NoInstance("undetermined type")
+ case tv: TypeVar if !tv.untouchable => applyTypeVar(tv)
+ case _ => mapOver(tp)
}
}
+ @inline final def falseIfNoInstance(body: => Boolean): Boolean =
+ try body catch { case _: NoInstance => false }
+
/** Is type fully defined, i.e. no embedded anytypes or wildcards in it?
- *
- * @param tp ...
- * @return ...
*/
private[typechecker] def isFullyDefined(tp: Type): Boolean = tp match {
- case WildcardType | BoundedWildcardType(_) | NoType =>
- false
- case NoPrefix | ThisType(_) | ConstantType(_) =>
- true
- case TypeRef(pre, sym, args) =>
- isFullyDefined(pre) && (args forall isFullyDefined)
- case SingleType(pre, sym) =>
- isFullyDefined(pre)
- case RefinedType(ts, decls) =>
- ts forall isFullyDefined
- case TypeVar(origin, constr) if (constr.inst == NoType) =>
- false
- case _ =>
- try {
- instantiate(tp); true
- } catch {
- case ex: NoInstance => false
- }
+ case WildcardType | BoundedWildcardType(_) | NoType => false
+ case NoPrefix | ThisType(_) | ConstantType(_) => true
+ case TypeRef(pre, _, args) => isFullyDefined(pre) && (args forall isFullyDefined)
+ case SingleType(pre, _) => isFullyDefined(pre)
+ case RefinedType(ts, _) => ts forall isFullyDefined
+ case TypeVar(_, constr) if constr.inst == NoType => false
+ case _ => falseIfNoInstance({ instantiate(tp) ; true })
}
/** Solve constraint collected in types `tvars`.
@@ -225,32 +134,17 @@ trait Infer extends Checkable {
* @param upper When `true` search for max solution else min.
* @throws NoInstance
*/
- def solvedTypes(tvars: List[TypeVar], tparams: List[Symbol],
- variances: List[Int], upper: Boolean, depth: Int): List[Type] = {
-
- if (tvars.nonEmpty)
- printInference("[solve types] solving for " + tparams.map(_.name).mkString(", ") + " in " + tvars.mkString(", "))
-
- if (!solve(tvars, tparams, variances, upper, depth)) {
- // no panic, it's good enough to just guess a solution, we'll find out
- // later whether it works. *ZAP* @M danger, Will Robinson! this means
- // that you should never trust inferred type arguments!
- //
- // Need to call checkBounds on the args/typars or type1 on the tree
- // for the expression that results from type inference see e.g., #2421:
- // implicit search had been ignoring this caveat
- // throw new DeferredNoInstance(() =>
- // "no solution exists for constraints"+(tvars map boundsString))
+ def solvedTypes(tvars: List[TypeVar], tparams: List[Symbol], variances: List[Variance], upper: Boolean, depth: Depth): List[Type] = {
+ if (tvars.isEmpty) Nil else {
+ printTyping("solving for " + parentheses((tparams, tvars).zipped map ((p, tv) => s"${p.name}: $tv")))
+ // !!! What should be done with the return value of "solve", which is at present ignored?
+ // The historical commentary says "no panic, it's good enough to just guess a solution,
+ // we'll find out later whether it works", meaning don't issue an error here when types
+ // don't conform to bounds. That means you can never trust the results of implicit search.
+ // For an example where this was not being heeded, SI-2421.
+ solve(tvars, tparams, variances, upper, depth)
+ tvars map instantiate
}
- for (tvar <- tvars ; if tvar.constr.inst == tvar) {
- if (tvar.origin.typeSymbol.info eq ErrorType)
- // this can happen if during solving a cyclic type parameter
- // such as T <: T gets completed. See #360
- tvar.constr.inst = ErrorType
- else
- abort(tvar.origin+" at "+tvar.origin.typeSymbol.owner)
- }
- tvars map instantiate
}
def skipImplicit(tp: Type) = tp match {
@@ -265,16 +159,15 @@ trait Infer extends Checkable {
* This method seems to be performance critical.
*/
def normalize(tp: Type): Type = tp match {
- case mt @ MethodType(params, restpe) if mt.isImplicit =>
- normalize(restpe)
- case mt @ MethodType(_, restpe) if !mt.isDependentMethodType =>
- functionType(mt.paramTypes, normalize(restpe))
- case NullaryMethodType(restpe) =>
- normalize(restpe)
- case ExistentialType(tparams, qtpe) =>
- newExistentialType(tparams, normalize(qtpe))
- case tp1 =>
- tp1 // @MAT aliases already handled by subtyping
+ case PolyType(_, restpe) =>
+ logResult(sm"""|Normalizing PolyType in infer:
+ | was: $restpe
+ | now""")(normalize(restpe))
+ case mt @ MethodType(_, restpe) if mt.isImplicit => normalize(restpe)
+ case mt @ MethodType(_, restpe) if !mt.isDependentMethodType => functionType(mt.paramTypes, normalize(restpe))
+ case NullaryMethodType(restpe) => normalize(restpe)
+ case ExistentialType(tparams, qtpe) => newExistentialType(tparams, normalize(qtpe))
+ case _ => tp // @MAT aliases already handled by subtyping
}
private lazy val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR)
@@ -286,12 +179,8 @@ trait Infer extends Checkable {
/* -- Error Messages --------------------------------------------------- */
def setError[T <: Tree](tree: T): T = {
- debuglog("set error: "+ tree)
- // this breaks -Ydebug pretty radically
- // if (settings.debug.value) { // DEBUG
- // println("set error: "+tree);
- // throw new Error()
- // }
+ // SI-7388, one can incur a cycle calling sym.toString
+ // (but it'd be nicer if that weren't so)
def name = {
val sym = tree.symbol
val nameStr = try sym.toString catch { case _: CyclicReference => sym.nameString }
@@ -301,7 +190,7 @@ trait Infer extends Checkable {
def errorValue = if (context.reportErrors) context.owner.newErrorValue(name) else stdErrorValue
def errorSym = if (tree.isType) errorClass else errorValue
- if (tree.hasSymbol)
+ if (tree.hasSymbolField)
tree setSymbol errorSym
tree setType ErrorType
@@ -311,102 +200,87 @@ trait Infer extends Checkable {
def issue(err: AbsTypeError): Unit = context.issue(err)
- def isPossiblyMissingArgs(found: Type, req: Type) = (
- false
- /** However it is that this condition is expected to imply
- * "is possibly missing args", it is too weak. It is
- * better to say nothing than to offer misleading guesses.
+ def explainTypes(tp1: Type, tp2: Type) = {
+ if (context.reportErrors)
+ withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2))
+ }
- (found.resultApprox ne found)
- && isWeaklyCompatible(found.resultApprox, req)
- */
- )
+ // When filtering sym down to the accessible alternatives leaves us empty handed.
+ private def checkAccessibleError(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree = {
+ if (settings.debug) {
+ Console.println(context)
+ Console.println(tree)
+ Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType))
+ }
+ ErrorUtils.issueTypeError(AccessError(tree, sym, pre, context.enclClass.owner,
+ if (settings.check.isDefault)
+ analyzer.lastAccessCheckDetails
+ else
+ ptBlock("because of an internal error (no accessible symbol)",
+ "sym.ownerChain" -> sym.ownerChain,
+ "underlyingSymbol(sym)" -> underlyingSymbol(sym),
+ "pre" -> pre,
+ "site" -> site,
+ "tree" -> tree,
+ "sym.accessBoundary(sym.owner)" -> sym.accessBoundary(sym.owner),
+ "context.owner" -> context.owner,
+ "context.outer.enclClass.owner" -> context.outer.enclClass.owner
+ )
+ ))(context)
- def explainTypes(tp1: Type, tp2: Type) =
- withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2))
+ setError(tree)
+ }
/* -- Tests & Checks---------------------------------------------------- */
- /** Check that <code>sym</code> is defined and accessible as a member of
- * tree <code>site</code> with type <code>pre</code> in current context.
+ /** Check that `sym` is defined and accessible as a member of
+ * tree `site` with type `pre` in current context.
+ * @PP: In case it's not abundantly obvious to anyone who might read
+ * this, the method does a lot more than "check" these things, as does
+ * nearly every method in the compiler, so don't act all shocked.
+ * This particular example "checks" its way to assigning both the
+ * symbol and type of the incoming tree, in addition to forcing lots
+ * of symbol infos on its way to transforming java raw types (but
+ * only of terms - why?)
*
* Note: pre is not refchecked -- moreover, refchecking the resulting tree may not refcheck pre,
* since pre may not occur in its type (callers should wrap the result in a TypeTreeWithDeferredRefCheck)
*/
- def checkAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree =
- if (sym.isError) {
- tree setSymbol sym setType ErrorType
- } else {
- val topClass = context.owner.enclosingTopLevelClass
- if (context.unit.exists)
- context.unit.depends += sym.enclosingTopLevelClass
-
- var sym1 = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super]))
- // Console.println("check acc " + (sym, sym1) + ":" + (sym.tpe, sym1.tpe) + " from " + pre);//DEBUG
- if (sym1 == NoSymbol && sym.isJavaDefined && context.unit.isJava) // don't try to second guess Java; see #4402
- sym1 = sym
-
- if (sym1 == NoSymbol) {
- if (settings.debug.value) {
- Console.println(context)
- Console.println(tree)
- Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType))
- }
- ErrorUtils.issueTypeError(AccessError(tree, sym, pre, context.enclClass.owner,
- if (settings.check.isDefault)
- analyzer.lastAccessCheckDetails
- else
- ptBlock("because of an internal error (no accessible symbol)",
- "sym.ownerChain" -> sym.ownerChain,
- "underlyingSymbol(sym)" -> underlyingSymbol(sym),
- "pre" -> pre,
- "site" -> site,
- "tree" -> tree,
- "sym.accessBoundary(sym.owner)" -> sym.accessBoundary(sym.owner),
- "context.owner" -> context.owner,
- "context.outer.enclClass.owner" -> context.outer.enclClass.owner
- )
- ))(context)
- setError(tree)
- }
- else {
- if (context.owner.isTermMacro && (sym1 hasFlag LOCKED)) {
- // we must not let CyclicReference to be thrown from sym1.info
- // because that would mark sym1 erroneous, which it is not
- // but if it's a true CyclicReference then macro def will report it
- // see comments to TypeSigError for an explanation of this special case
- // [Eugene] is there a better way?
- val dummy = new TypeCompleter { val tree = EmptyTree; override def complete(sym: Symbol) {} }
- throw CyclicReference(sym1, dummy)
- }
+ def checkAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree = {
+ def malformed(ex: MalformedType, instance: Type): Type = {
+ val what = if (ex.msg contains "malformed type") "is malformed" else s"contains a ${ex.msg}"
+ val message = s"\n because its instance type $instance $what"
+ val error = AccessError(tree, sym, pre, context.enclClass.owner, message)
+ ErrorUtils.issueTypeError(error)(context)
+ ErrorType
+ }
+ def accessible = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super])) match {
+ case NoSymbol if sym.isJavaDefined && context.unit.isJava => sym // don't try to second guess Java; see #4402
+ case sym1 => sym1
+ }
+ // XXX So... what's this for exactly?
+ if (context.unit.exists)
+ context.unit.depends += sym.enclosingTopLevelClass
- if (sym1.isTerm)
- sym1.cookJavaRawInfo() // xform java rawtypes into existentials
-
- val owntype = {
- try pre.memberType(sym1)
- catch {
- case ex: MalformedType =>
- if (settings.debug.value) ex.printStackTrace
- val sym2 = underlyingSymbol(sym1)
- val itype = pre.memberType(sym2)
- ErrorUtils.issueTypeError(
- AccessError(tree, sym, pre, context.enclClass.owner,
- "\n because its instance type "+itype+
- (if ("malformed type: "+itype.toString==ex.msg) " is malformed"
- else " contains a "+ex.msg)))(context)
- ErrorType
- }
- }
- tree setSymbol sym1 setType {
+ if (sym.isError)
+ tree setSymbol sym setType ErrorType
+ else accessible match {
+ case NoSymbol => checkAccessibleError(tree, sym, pre, site)
+ case sym if context.owner.isTermMacro && (sym hasFlag LOCKED) => throw CyclicReference(sym, CheckAccessibleMacroCycle)
+ case sym =>
+ val sym1 = if (sym.isTerm) sym.cookJavaRawInfo() else sym // xform java rawtypes into existentials
+ val owntype = (
+ try pre memberType sym1
+ catch { case ex: MalformedType => malformed(ex, pre memberType underlyingSymbol(sym)) }
+ )
+ tree setSymbol sym1 setType (
pre match {
case _: SuperType => owntype map (tp => if (tp eq pre) site.symbol.thisType else tp)
case _ => owntype
}
- }
- }
+ )
}
-
+ }
/** "Compatible" means conforming after conversions.
* "Raising to a thunk" is not implicit; therefore, for purposes of applicability and
@@ -417,45 +291,38 @@ trait Infer extends Checkable {
* since that induces a tie between m(=>A) and m(=>A,B*) [SI-3761]
*/
private def isCompatible(tp: Type, pt: Type): Boolean = {
- def isCompatibleByName(tp: Type, pt: Type): Boolean = pt match {
- case TypeRef(_, ByNameParamClass, List(res)) if !isByNameParamType(tp) => isCompatible(tp, res)
- case _ => false
- }
+ def isCompatibleByName(tp: Type, pt: Type): Boolean = (
+ isByNameParamType(pt)
+ && !isByNameParamType(tp)
+ && isCompatible(tp, dropByName(pt))
+ )
val tp1 = normalize(tp)
- (tp1 weak_<:< pt) || isCoercible(tp1, pt) || isCompatibleByName(tp, pt)
+
+ ( (tp1 weak_<:< pt)
+ || isCoercible(tp1, pt)
+ || isCompatibleByName(tp, pt)
+ )
}
- def isCompatibleArgs(tps: List[Type], pts: List[Type]) =
- (tps corresponds pts)(isCompatible)
+ def isCompatibleArgs(tps: List[Type], pts: List[Type]) = (tps corresponds pts)(isCompatible)
- def isWeaklyCompatible(tp: Type, pt: Type): Boolean =
- pt.typeSymbol == UnitClass || // can perform unit coercion
- isCompatible(tp, pt) ||
- tp.isInstanceOf[MethodType] && // can perform implicit () instantiation
- tp.params.isEmpty && isCompatible(tp.resultType, pt)
+ def isWeaklyCompatible(tp: Type, pt: Type): Boolean = {
+ def isCompatibleNoParamsMethod = tp match {
+ case MethodType(Nil, restpe) => isCompatible(restpe, pt)
+ case _ => false
+ }
+ ( pt.typeSymbol == UnitClass // can perform unit coercion
+ || isCompatible(tp, pt)
+ || isCompatibleNoParamsMethod // can perform implicit () instantiation
+ )
+ }
- /** Like weakly compatible but don't apply any implicit conversions yet.
+ /* Like weakly compatible but don't apply any implicit conversions yet.
* Used when comparing the result type of a method with its prototype.
- *
- * [Martin] I think Infer is also created by Erasure, with the default
- * implementation of isCoercible
- * [Paulp] (Assuming the above must refer to my comment on isCoercible)
- * Nope, I examined every occurrence of Inferencer in trunk. It
- * appears twice as a self-type, once at its definition, and once
- * where it is instantiated in Typers. There are no others.
- *
- % ack -A0 -B0 --no-filename '\bInferencer\b' src
- self: Inferencer =>
- self: Inferencer =>
- class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
- val infer = new Inferencer(context0) {
*/
def isConservativelyCompatible(tp: Type, pt: Type): Boolean =
context.withImplicitsDisabled(isWeaklyCompatible(tp, pt))
- /** This is overridden in the Typer.infer with some logic, but since
- * that's the only place in the compiler an Inferencer is ever created,
- * I suggest this should either be abstract or have the implementation.
- */
+ // Overridden at the point of instantiation, where inferView is visible.
def isCoercible(tp: Type, pt: Type): Boolean = false
/* -- Type instantiation------------------------------------------------ */
@@ -464,112 +331,99 @@ trait Infer extends Checkable {
* by existentially bound variables.
*/
def makeFullyDefined(tp: Type): Type = {
- val tparams = new ListBuffer[Symbol]
+ var tparams: List[Symbol] = Nil
def addTypeParam(bounds: TypeBounds): Type = {
val tparam = context.owner.newExistential(newTypeName("_"+tparams.size), context.tree.pos.focus) setInfo bounds
- tparams += tparam
+ tparams ::= tparam
tparam.tpe
}
val tp1 = tp map {
- case WildcardType =>
- addTypeParam(TypeBounds.empty)
- case BoundedWildcardType(bounds) =>
- addTypeParam(bounds)
- case t => t
+ case WildcardType => addTypeParam(TypeBounds.empty)
+ case BoundedWildcardType(bounds) => addTypeParam(bounds)
+ case t => t
}
- existentialAbstraction(tparams.toList, tp1)
+ if (tp eq tp1) tp
+ else existentialAbstraction(tparams.reverse, tp1)
}
+ def ensureFullyDefined(tp: Type): Type = if (isFullyDefined(tp)) tp else makeFullyDefined(tp)
/** Return inferred type arguments of polymorphic expression, given
- * its type parameters and result type and a prototype <code>pt</code>.
- * If no minimal type variables exist that make the
- * instantiated type a subtype of <code>pt</code>, return null.
- *
- * @param tparams ...
- * @param restpe ...
- * @param pt ...
- * @return ...
+ * type vars, its type parameters and result type and a prototype `pt`.
+ * If the type variables cannot be instantiated such that the type
+ * conforms to `pt`, return null.
*/
- private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean = false): (List[Type], List[TypeVar]) = {
- val tvars = tparams map freshVar
- val instResTp = restpe.instantiateTypeParams(tparams, tvars)
- if ( if (useWeaklyCompatible) isWeaklyCompatible(instResTp, pt) else isCompatible(instResTp, pt) ) {
- try {
- // If the restpe is an implicit method, and the expected type is fully defined
- // optimize type variables wrt to the implicit formals only; ignore the result type.
- // See test pos/jesper.scala
- val varianceType = restpe match {
- case mt: MethodType if mt.isImplicit && isFullyDefined(pt) =>
- MethodType(mt.params, AnyClass.tpe)
- case _ =>
- restpe
- }
- //println("try to solve "+tvars+" "+tparams)
- (solvedTypes(tvars, tparams, tparams map varianceInType(varianceType),
- false, lubDepth(List(restpe, pt))), tvars)
- } catch {
- case ex: NoInstance => (null, null)
- }
- } else (null, null)
+ private def exprTypeArgs(tvars: List[TypeVar], tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean): List[Type] = {
+ def restpeInst = restpe.instantiateTypeParams(tparams, tvars)
+ def conforms = if (useWeaklyCompatible) isWeaklyCompatible(restpeInst, pt) else isCompatible(restpeInst, pt)
+ // If the restpe is an implicit method, and the expected type is fully defined
+ // optimize type variables wrt to the implicit formals only; ignore the result type.
+ // See test pos/jesper.scala
+ def variance = restpe match {
+ case mt: MethodType if mt.isImplicit && isFullyDefined(pt) => MethodType(mt.params, AnyTpe)
+ case _ => restpe
+ }
+ def solve() = solvedTypes(tvars, tparams, tparams map varianceInType(variance), upper = false, lubDepth(restpe :: pt :: Nil))
+
+ if (conforms)
+ try solve() catch { case _: NoInstance => null }
+ else
+ null
}
+ /** Overload which allocates fresh type vars.
+ * The other one exists because apparently inferExprInstance needs access to the typevars
+ * after the call, and its wasteful to return a tuple and throw it away almost every time.
+ */
+ private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean): List[Type] =
+ exprTypeArgs(tparams map freshVar, tparams, restpe, pt, useWeaklyCompatible)
/** Return inferred proto-type arguments of function, given
* its type and value parameters and result type, and a
- * prototype <code>pt</code> for the function result.
+ * prototype `pt` for the function result.
* Type arguments need to be either determined precisely by
* the prototype, or they are maximized, if they occur only covariantly
* in the value parameter list.
* If instantiation of a type parameter fails,
* take WildcardType for the proto-type argument.
- *
- * @param tparams ...
- * @param formals ...
- * @param restype ...
- * @param pt ...
- * @return ...
*/
- def protoTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type,
- pt: Type): List[Type] = {
- /** Map type variable to its instance, or, if `variance` is covariant/contravariant,
- * to its upper/lower bound */
- def instantiateToBound(tvar: TypeVar, variance: Int): Type = try {
+ def protoTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type, pt: Type): List[Type] = {
+ // Map type variable to its instance, or, if `variance` is variant,
+ // to its upper or lower bound
+ def instantiateToBound(tvar: TypeVar, variance: Variance): Type = {
lazy val hiBounds = tvar.constr.hiBounds
lazy val loBounds = tvar.constr.loBounds
- lazy val upper = glb(hiBounds)
- lazy val lower = lub(loBounds)
+ lazy val upper = glb(hiBounds)
+ lazy val lower = lub(loBounds)
def setInst(tp: Type): Type = {
tvar setInst tp
- assertNonCyclic(tvar)//debug
+ assert(tvar.constr.inst != tvar, tvar.origin)
instantiate(tvar.constr.inst)
}
- //Console.println("instantiate "+tvar+tvar.constr+" variance = "+variance);//DEBUG
- if (tvar.constr.inst != NoType)
+ if (tvar.constr.instValid)
instantiate(tvar.constr.inst)
- else if ((variance & COVARIANT) != 0 && hiBounds.nonEmpty)
- setInst(upper)
- else if ((variance & CONTRAVARIANT) != 0 && loBounds.nonEmpty)
+ else if (loBounds.nonEmpty && variance.isContravariant)
setInst(lower)
- else if (hiBounds.nonEmpty && loBounds.nonEmpty && upper <:< lower)
+ else if (hiBounds.nonEmpty && (variance.isPositive || loBounds.nonEmpty && upper <:< lower))
setInst(upper)
else
WildcardType
- } catch {
- case ex: NoInstance => WildcardType
}
+
val tvars = tparams map freshVar
if (isConservativelyCompatible(restpe.instantiateTypeParams(tparams, tvars), pt))
map2(tparams, tvars)((tparam, tvar) =>
- instantiateToBound(tvar, varianceInTypes(formals)(tparam)))
+ try instantiateToBound(tvar, varianceInTypes(formals)(tparam))
+ catch { case ex: NoInstance => WildcardType }
+ )
else
- tvars map (tvar => WildcardType)
+ tvars map (_ => WildcardType)
}
/** [Martin] Can someone comment this please? I have no idea what it's for
* and the code is not exactly readable.
*/
object AdjustedTypeArgs {
- val Result = scala.collection.mutable.LinkedHashMap
- type Result = scala.collection.mutable.LinkedHashMap[Symbol, Option[Type]]
+ val Result = mutable.LinkedHashMap
+ type Result = mutable.LinkedHashMap[Symbol, Option[Type]]
def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists(
(m collect {case (p, Some(a)) => (p, a)}).unzip ))
@@ -586,7 +440,7 @@ trait Infer extends Checkable {
def unapply(m: Result): Some[(List[Symbol], List[Type], List[Type], List[Symbol])] = Some(toLists{
val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null)
val (okArgs, okTparams) = ok.unzip
- (okArgs, okTparams, m.values.map(_.getOrElse(NothingClass.tpe)), nok.keys)
+ (okArgs, okTparams, m.values.map(_.getOrElse(NothingTpe)), nok.keys)
})
}
@@ -608,7 +462,7 @@ trait Infer extends Checkable {
*
* Rewrite for repeated param types: Map T* entries to Seq[T].
* @return map from tparams to inferred arg, if inference was successful, tparams that map to None are considered left undetermined
- * type parameters that are inferred as `scala.Nothing` and that are not covariant in <code>restpe</code> are taken to be undetermined
+ * type parameters that are inferred as `scala.Nothing` and that are not covariant in `restpe` are taken to be undetermined
*/
def adjustTypeArgs(tparams: List[Symbol], tvars: List[TypeVar], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = {
val buf = AdjustedTypeArgs.Result.newBuilder[Symbol, Option[Type]]
@@ -616,33 +470,32 @@ trait Infer extends Checkable {
foreach3(tparams, tvars, targs) { (tparam, tvar, targ) =>
val retract = (
targ.typeSymbol == NothingClass // only retract Nothings
- && (restpe.isWildcard || (varianceInType(restpe)(tparam) & COVARIANT) == 0) // don't retract covariant occurrences
+ && (restpe.isWildcard || !varianceInType(restpe)(tparam).isPositive) // don't retract covariant occurrences
)
- // checks opt.virtPatmat directly so one need not run under -Xexperimental to use virtpatmat
buf += ((tparam,
if (retract) None
else Some(
if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass)
else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass)
// this infers Foo.type instead of "object Foo" (see also widenIfNecessary)
- else if (targ.typeSymbol.isModuleClass || ((opt.experimental || opt.virtPatmat) && tvar.constr.avoidWiden)) targ
+ else if (targ.typeSymbol.isModuleClass || tvar.constr.avoidWiden) targ
else targ.widen
)
))
}
- buf.result
+ buf.result()
}
/** Return inferred type arguments, given type parameters, formal parameters,
* argument types, result type and expected result type.
- * If this is not possible, throw a <code>NoInstance</code> exception.
- * Undetermined type arguments are represented by `definitions.NothingClass.tpe`.
+ * If this is not possible, throw a `NoInstance` exception.
+ * Undetermined type arguments are represented by `definitions.NothingTpe`.
* No check that inferred parameters conform to their bounds is made here.
*
* @param tparams the type parameters of the method
* @param formals the value parameter types of the method
- * @param restp the result type of the method
+ * @param restpe the result type of the method
* @param argtpes the argument types of the application
* @param pt the expected return type of the application
* @return @see adjustTypeArgs
@@ -689,35 +542,70 @@ trait Infer extends Checkable {
"argument expression's type is not compatible with formal parameter type" + foundReqMsg(tp1, pt1))
}
}
- val targs = solvedTypes(
- tvars, tparams, tparams map varianceInTypes(formals),
- false, lubDepth(formals) max lubDepth(argtpes)
- )
+ val targs = solvedTypes(tvars, tparams, tparams map varianceInTypes(formals), upper = false, lubDepth(formals) max lubDepth(argtpes))
+ // Can warn about inferring Any/AnyVal as long as they don't appear
+ // explicitly anywhere amongst the formal, argument, result, or expected type.
+ def canWarnAboutAny = !(pt :: restpe :: formals ::: argtpes exists (t => (t contains AnyClass) || (t contains AnyValClass)))
+ def argumentPosition(idx: Int): Position = context.tree match {
+ case x: ValOrDefDef => x.rhs match {
+ case Apply(fn, args) if idx < args.size => args(idx).pos
+ case _ => context.tree.pos
+ }
+ case _ => context.tree.pos
+ }
+ if (settings.warnInferAny.value && context.reportErrors && canWarnAboutAny) {
+ foreachWithIndex(targs) ((targ, idx) =>
+ targ.typeSymbol match {
+ case sym @ (AnyClass | AnyValClass) =>
+ context.unit.warning(argumentPosition(idx), s"a type was inferred to be `${sym.name}`; this may indicate a programming error.")
+ case _ =>
+ }
+ )
+ }
adjustTypeArgs(tparams, tvars, targs, restpe)
}
+ /** One must step carefully when assessing applicability due to
+ * complications from varargs, tuple-conversion, named arguments.
+ * This method is used to filter out inapplicable methods,
+ * its behavior slightly configurable based on what stage of
+ * overloading resolution we're at.
+ *
+ * This method has boolean parameters, which is usually suboptimal
+ * but I didn't work out a better way. They don't have defaults,
+ * and the method's scope is limited.
+ */
+ private[typechecker] def isApplicableBasedOnArity(tpe: Type, argsCount: Int, varargsStar: Boolean, tuplingAllowed: Boolean): Boolean = followApply(tpe) match {
+ case OverloadedType(pre, alts) =>
+ alts exists (alt => isApplicableBasedOnArity(pre memberType alt, argsCount, varargsStar, tuplingAllowed))
+ case _ =>
+ val paramsCount = tpe.params.length
+ val simpleMatch = paramsCount == argsCount
+ val varargsTarget = isVarArgsList(tpe.params)
+ def varargsMatch = varargsTarget && (paramsCount - 1) <= argsCount
+ def tuplingMatch = tuplingAllowed && eligibleForTupleConversion(paramsCount, argsCount, varargsTarget)
+
+ // A varargs star call, e.g. (x, y:_*) can only match a varargs method
+ // with the same number of parameters. See SI-5859 for an example of what
+ // would fail were this not enforced before we arrived at isApplicable.
+ if (varargsStar)
+ varargsTarget && simpleMatch
+ else
+ simpleMatch || varargsMatch || tuplingMatch
+ }
+
private[typechecker] def followApply(tp: Type): Type = tp match {
case NullaryMethodType(restp) =>
val restp1 = followApply(restp)
if (restp1 eq restp) tp else restp1
case _ =>
- val appmeth = {
- //OPT cut down on #closures by special casing non-overloaded case
- // was: tp.nonPrivateMember(nme.apply) filter (_.isPublic)
- val result = tp.nonPrivateMember(nme.apply)
- if ((result eq NoSymbol) || !result.isOverloaded && result.isPublic) result
- else result filter (_.isPublic)
+ //OPT cut down on #closures by special casing non-overloaded case
+ // was: tp.nonPrivateMember(nme.apply) filter (_.isPublic)
+ tp nonPrivateMember nme.apply match {
+ case NoSymbol => tp
+ case sym if !sym.isOverloaded && sym.isPublic => OverloadedType(tp, sym.alternatives)
+ case sym => OverloadedType(tp, sym.filter(_.isPublic).alternatives)
}
- if (appmeth == NoSymbol) tp
- else OverloadedType(tp, appmeth.alternatives)
- }
-
- def hasExactlyNumParams(tp: Type, n: Int): Boolean = tp match {
- case OverloadedType(pre, alts) =>
- alts exists (alt => hasExactlyNumParams(pre.memberType(alt), n))
- case _ =>
- val len = tp.params.length
- len == n || isVarArgsList(tp.params) && len <= n + 1
}
/**
@@ -731,7 +619,7 @@ trait Infer extends Checkable {
* to the corresponding position in params
* - namesOK is false when there's an invalid use of named arguments
*/
- private def checkNames(argtpes: List[Type], params: List[Symbol]) = {
+ private def checkNames(argtpes: List[Type], params: List[Symbol]): (List[Type], Array[Int], Boolean) = {
val argPos = Array.fill(argtpes.length)(-1)
var positionalAllowed, namesOK = true
var index = 0
@@ -743,7 +631,7 @@ trait Infer extends Checkable {
if (pos == -1) {
if (positionalAllowed) { // treat assignment as positional argument
argPos(index) = index
- res = UnitClass.tpe
+ res = UnitTpe
} else // unknown parameter name
namesOK = false
} else if (argPos.contains(pos)) { // parameter specified twice
@@ -765,207 +653,190 @@ trait Infer extends Checkable {
(argtpes1, argPos, namesOK)
}
- /** don't do a () to (()) conversion for methods whose second parameter
- * is a varargs. This is a fairly kludgey way to address #3224.
- * We'll probably find a better way to do this by identifying
- * tupled and n-ary methods, but thiws is something for a future major revision.
+ /** True if the given parameter list can accept a tupled argument list,
+ * and the argument list can be tupled (based on its length.)
*/
- def isUnitForVarArgs(args: List[AnyRef], params: List[Symbol]): Boolean =
- args.isEmpty && hasLength(params, 2) && isVarArgsList(params)
+ def eligibleForTupleConversion(paramsCount: Int, argsCount: Int, varargsTarget: Boolean): Boolean = {
+ def canSendTuple = argsCount match {
+ case 0 => !varargsTarget // avoid () to (()) conversion - SI-3224
+ case 1 => false // can't tuple a single argument
+ case n => n <= MaxTupleArity // <= 22 arguments
+ }
+ def canReceiveTuple = paramsCount match {
+ case 1 => true
+ case 2 => varargsTarget
+ case _ => false
+ }
+ canSendTuple && canReceiveTuple
+ }
+ def eligibleForTupleConversion(formals: List[Type], argsCount: Int): Boolean = formals match {
+ case p :: Nil => eligibleForTupleConversion(1, argsCount, varargsTarget = isScalaRepeatedParamType(p))
+ case _ :: p :: Nil if isScalaRepeatedParamType(p) => eligibleForTupleConversion(2, argsCount, varargsTarget = true)
+ case _ => false
+ }
+
+ /** The type of an argument list after being coerced to a tuple.
+ * @pre: the argument list is eligible for tuple conversion.
+ */
+ private def typeAfterTupleConversion(argtpes: List[Type]): Type = (
+ if (argtpes.isEmpty) UnitTpe // aka "Tuple0"
+ else tupleType(argtpes map {
+ case NamedType(name, tp) => UnitTpe // not a named arg - only assignments here
+ case RepeatedType(tp) => tp // but probably shouldn't be tupling a call containing :_*
+ case tp => tp
+ })
+ )
- /** Is there an instantiation of free type variables <code>undetparams</code>
- * such that function type <code>ftpe</code> is applicable to
- * <code>argtpes</code> and its result conform to <code>pt</code>?
+ /** If the argument list needs to be tupled for the parameter list,
+ * a list containing the type of the tuple. Otherwise, the original
+ * argument list.
+ */
+ def tupleIfNecessary(formals: List[Type], argtpes: List[Type]): List[Type] = {
+ if (eligibleForTupleConversion(formals, argtpes.size))
+ typeAfterTupleConversion(argtpes) :: Nil
+ else
+ argtpes
+ }
+
+ private def isApplicableToMethod(undetparams: List[Symbol], mt: MethodType, argtpes0: List[Type], pt: Type): Boolean = {
+ val formals = formalTypes(mt.paramTypes, argtpes0.length, removeByName = false)
+ def missingArgs = missingParams[Type](argtpes0, mt.params, x => Some(x) collect { case NamedType(n, _) => n })
+ def argsTupled = tupleIfNecessary(mt.paramTypes, argtpes0)
+ def argsPlusDefaults = missingArgs match {
+ case (args, _) if args forall (_.hasDefault) => argtpes0 ::: makeNamedTypes(args)
+ case _ => argsTupled
+ }
+ // If args eq the incoming arg types, fail; otherwise recurse with these args.
+ def tryWithArgs(args: List[Type]) = (
+ (args ne argtpes0)
+ && isApplicable(undetparams, mt, args, pt)
+ )
+ def tryInstantiating(args: List[Type]) = falseIfNoInstance {
+ val restpe = mt resultType args
+ val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, args, pt)
+ val restpeInst = restpe.instantiateTypeParams(okparams, okargs)
+ // #2665: must use weak conformance, not regular one (follow the monomorphic case above)
+ exprTypeArgs(leftUndet, restpeInst, pt, useWeaklyCompatible = true) match {
+ case null => false
+ case _ => isWithinBounds(NoPrefix, NoSymbol, okparams, okargs)
+ }
+ }
+ def typesCompatible(args: List[Type]) = undetparams match {
+ case Nil => isCompatibleArgs(args, formals) && isWeaklyCompatible(mt resultType args, pt)
+ case _ => tryInstantiating(args)
+ }
+
+ // when using named application, the vararg param has to be specified exactly once
+ def reorderedTypesCompatible = checkNames(argtpes0, mt.params) match {
+ case (_, _, false) => false // names are not ok
+ case (_, pos, _) if !allArgsArePositional(pos) && !sameLength(formals, mt.params) => false // different length lists and all args not positional
+ case (args, pos, _) => typesCompatible(reorderArgs(args, pos))
+ }
+ compareLengths(argtpes0, formals) match {
+ case 0 if containsNamedType(argtpes0) => reorderedTypesCompatible // right number of args, wrong order
+ case 0 => typesCompatible(argtpes0) // fast track if no named arguments are used
+ case x if x > 0 => tryWithArgs(argsTupled) // too many args, try tupling
+ case _ => tryWithArgs(argsPlusDefaults) // too few args, try adding defaults or tupling
+ }
+ }
+
+ /** Is there an instantiation of free type variables `undetparams` such that
+ * function type `ftpe` is applicable to `argtpes0` and its result conform to `pt`?
*
- * @param undetparams ...
* @param ftpe the type of the function (often a MethodType)
- * @param argtpes the argument types; a NamedType(name, tp) for named
+ * @param argtpes0 the argument types; a NamedType(name, tp) for named
* arguments. For each NamedType, if `name` does not exist in `ftpe`, that
* type is set to `Unit`, i.e. the corresponding argument is treated as
* an assignment expression (@see checkNames).
- * @param pt ...
- * @return ...
*/
- private def isApplicable(undetparams: List[Symbol], ftpe: Type,
- argtpes0: List[Type], pt: Type): Boolean =
+ private def isApplicable(undetparams: List[Symbol], ftpe: Type, argtpes0: List[Type], pt: Type): Boolean = (
ftpe match {
- case OverloadedType(pre, alts) =>
- alts exists (alt => isApplicable(undetparams, pre.memberType(alt), argtpes0, pt))
- case ExistentialType(tparams, qtpe) =>
- isApplicable(undetparams, qtpe, argtpes0, pt)
- case mt @ MethodType(params, _) =>
- val formals = formalTypes(mt.paramTypes, argtpes0.length, removeByName = false)
-
- def tryTupleApply: Boolean = {
- // if 1 formal, 1 argtpe (a tuple), otherwise unmodified argtpes0
- val tupleArgTpes = actualTypes(argtpes0 map {
- // no assignment is treated as named argument here
- case NamedType(name, tp) => UnitClass.tpe
- case tp => tp
- }, formals.length)
-
- !sameLength(argtpes0, tupleArgTpes) &&
- !isUnitForVarArgs(argtpes0, params) &&
- isApplicable(undetparams, ftpe, tupleArgTpes, pt)
- }
- def typesCompatible(argtpes: List[Type]) = {
- val restpe = ftpe.resultType(argtpes)
- if (undetparams.isEmpty) {
- isCompatibleArgs(argtpes, formals) && isWeaklyCompatible(restpe, pt)
- } else {
- try {
- val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
- // #2665: must use weak conformance, not regular one (follow the monomorphic case above)
- (exprTypeArgs(leftUndet, restpe.instantiateTypeParams(okparams, okargs), pt, useWeaklyCompatible = true)._1 ne null) &&
- isWithinBounds(NoPrefix, NoSymbol, okparams, okargs)
- } catch {
- case ex: NoInstance => false
- }
- }
- }
-
- // very similar logic to doTypedApply in typechecker
- val lencmp = compareLengths(argtpes0, formals)
- if (lencmp > 0) tryTupleApply
- else if (lencmp == 0) {
- if (!argtpes0.exists(_.isInstanceOf[NamedType])) {
- // fast track if no named arguments are used
- typesCompatible(argtpes0)
- }
- else {
- // named arguments are used
- val (argtpes1, argPos, namesOK) = checkNames(argtpes0, params)
- // when using named application, the vararg param has to be specified exactly once
- ( namesOK && (isIdentity(argPos) || sameLength(formals, params)) &&
- // nb. arguments and names are OK, check if types are compatible
- typesCompatible(reorderArgs(argtpes1, argPos))
- )
- }
- }
- else {
- // not enough arguments, check if applicable using defaults
- val missing = missingParams[Type](argtpes0, params, {
- case NamedType(name, _) => Some(name)
- case _ => None
- })._1
- if (missing forall (_.hasDefault)) {
- // add defaults as named arguments
- val argtpes1 = argtpes0 ::: (missing map (p => NamedType(p.name, p.tpe)))
- isApplicable(undetparams, ftpe, argtpes1, pt)
- }
- else tryTupleApply
- }
-
- case NullaryMethodType(restpe) => // strip nullary method type, which used to be done by the polytype case below
- isApplicable(undetparams, restpe, argtpes0, pt)
- case PolyType(tparams, restpe) =>
- createFromClonedSymbols(tparams, restpe)((tps1, restpe1) => isApplicable(tps1 ::: undetparams, restpe1, argtpes0, pt))
- case ErrorType =>
- true
- case _ =>
- false
+ case OverloadedType(pre, alts) => alts exists (alt => isApplicable(undetparams, pre memberType alt, argtpes0, pt))
+ case ExistentialType(_, qtpe) => isApplicable(undetparams, qtpe, argtpes0, pt)
+ case mt @ MethodType(_, _) => isApplicableToMethod(undetparams, mt, argtpes0, pt)
+ case NullaryMethodType(restpe) => isApplicable(undetparams, restpe, argtpes0, pt)
+ case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, restpe)((tps1, res1) => isApplicable(tps1 ::: undetparams, res1, argtpes0, pt))
+ case ErrorType => true
+ case _ => false
}
+ )
/**
- * Todo: Try to make isApplicable always safe (i.e. not cause TypeErrors).
- * The chance of TypeErrors should be reduced through context errors
+ * Are arguments of the given types applicable to `ftpe`? Type argument inference
+ * is tried twice: firstly with the given expected type, and secondly with `WildcardType`.
*/
- private[typechecker] def isApplicableSafe(undetparams: List[Symbol], ftpe: Type,
- argtpes0: List[Type], pt: Type): Boolean = {
- val silentContext = context.makeSilent(false)
- val typer0 = newTyper(silentContext)
- val res1 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, pt)
- if (pt != WildcardType && silentContext.hasErrors) {
- silentContext.flushBuffer()
- val res2 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, WildcardType)
- if (silentContext.hasErrors) false else res2
- } else res1
+ // Todo: Try to make isApplicable always safe (i.e. not cause TypeErrors).
+ // The chance of TypeErrors should be reduced through context errors
+ private[typechecker] def isApplicableSafe(undetparams: List[Symbol], ftpe: Type, argtpes0: List[Type], pt: Type): Boolean = {
+ def applicableExpectingPt(pt: Type): Boolean = {
+ val silent = context.makeSilent(reportAmbiguousErrors = false)
+ val result = newTyper(silent).infer.isApplicable(undetparams, ftpe, argtpes0, pt)
+ if (silent.hasErrors && !pt.isWildcard)
+ applicableExpectingPt(WildcardType) // second try
+ else
+ result
+ }
+ applicableExpectingPt(pt)
}
- /** Is type <code>ftpe1</code> strictly more specific than type <code>ftpe2</code>
+ /** Is type `ftpe1` strictly more specific than type `ftpe2`
* when both are alternatives in an overloaded function?
* @see SLS (sec:overloading-resolution)
- *
- * @param ftpe1 ...
- * @param ftpe2 ...
- * @return ...
*/
- def isAsSpecific(ftpe1: Type, ftpe2: Type): Boolean = ftpe1 match {
- case OverloadedType(pre, alts) =>
- alts exists (alt => isAsSpecific(pre.memberType(alt), ftpe2))
- case et: ExistentialType =>
- isAsSpecific(ftpe1.skolemizeExistential, ftpe2)
- //et.withTypeVars(isAsSpecific(_, ftpe2))
- case NullaryMethodType(res) =>
- isAsSpecific(res, ftpe2)
- case mt: MethodType if mt.isImplicit =>
- isAsSpecific(ftpe1.resultType, ftpe2)
- case mt @ MethodType(params, _) if params.nonEmpty =>
- var argtpes = mt.paramTypes
- if (isVarArgsList(params) && isVarArgsList(ftpe2.params))
- argtpes = argtpes map (argtpe =>
- if (isRepeatedParamType(argtpe)) argtpe.typeArgs.head else argtpe)
- isApplicable(List(), ftpe2, argtpes, WildcardType)
- case PolyType(tparams, NullaryMethodType(res)) =>
- isAsSpecific(PolyType(tparams, res), ftpe2)
- case PolyType(tparams, mt: MethodType) if mt.isImplicit =>
- isAsSpecific(PolyType(tparams, mt.resultType), ftpe2)
- case PolyType(_, (mt @ MethodType(params, _))) if params.nonEmpty =>
- isApplicable(List(), ftpe2, mt.paramTypes, WildcardType)
- // case NullaryMethodType(res) =>
- // isAsSpecific(res, ftpe2)
- case ErrorType =>
- true
- case _ =>
- ftpe2 match {
- case OverloadedType(pre, alts) =>
- alts forall (alt => isAsSpecific(ftpe1, pre.memberType(alt)))
- case et: ExistentialType =>
- et.withTypeVars(isAsSpecific(ftpe1, _))
- case mt: MethodType =>
- !mt.isImplicit || isAsSpecific(ftpe1, mt.resultType)
- case NullaryMethodType(res) =>
- isAsSpecific(ftpe1, res)
- case PolyType(tparams, NullaryMethodType(res)) =>
- isAsSpecific(ftpe1, PolyType(tparams, res))
- case PolyType(tparams, mt: MethodType) =>
- !mt.isImplicit || isAsSpecific(ftpe1, PolyType(tparams, mt.resultType))
- case _ =>
- isAsSpecificValueType(ftpe1, ftpe2, List(), List())
- }
+ def isAsSpecific(ftpe1: Type, ftpe2: Type): Boolean = {
+ def checkIsApplicable(argtpes: List[Type]) = isApplicable(Nil, ftpe2, argtpes, WildcardType)
+ def bothAreVarargs = isVarArgsList(ftpe1.params) && isVarArgsList(ftpe2.params)
+ def onRight = ftpe2 match {
+ case OverloadedType(pre, alts) => alts forall (alt => isAsSpecific(ftpe1, pre memberType alt))
+ case et: ExistentialType => et.withTypeVars(isAsSpecific(ftpe1, _))
+ case mt @ MethodType(_, restpe) => !mt.isImplicit || isAsSpecific(ftpe1, restpe)
+ case NullaryMethodType(res) => isAsSpecific(ftpe1, res)
+ case PolyType(tparams, NullaryMethodType(restpe)) => isAsSpecific(ftpe1, PolyType(tparams, restpe))
+ case PolyType(tparams, mt @ MethodType(_, restpe)) => !mt.isImplicit || isAsSpecific(ftpe1, PolyType(tparams, restpe))
+ case _ => isAsSpecificValueType(ftpe1, ftpe2, Nil, Nil)
+ }
+ ftpe1 match {
+ case OverloadedType(pre, alts) => alts exists (alt => isAsSpecific(pre memberType alt, ftpe2))
+ case et: ExistentialType => isAsSpecific(et.skolemizeExistential, ftpe2)
+ case NullaryMethodType(restpe) => isAsSpecific(restpe, ftpe2)
+ case mt @ MethodType(_, restpe) if mt.isImplicit => isAsSpecific(restpe, ftpe2)
+ case mt @ MethodType(_, _) if bothAreVarargs => checkIsApplicable(mt.paramTypes mapConserve repeatedToSingle)
+ case mt @ MethodType(params, _) if params.nonEmpty => checkIsApplicable(mt.paramTypes)
+ case PolyType(tparams, NullaryMethodType(restpe)) => isAsSpecific(PolyType(tparams, restpe), ftpe2)
+ case PolyType(tparams, mt @ MethodType(_, restpe)) if mt.isImplicit => isAsSpecific(PolyType(tparams, restpe), ftpe2)
+ case PolyType(_, mt @ MethodType(params, _)) if params.nonEmpty => checkIsApplicable(mt.paramTypes)
+ case ErrorType => true
+ case _ => onRight
+ }
}
- private def isAsSpecificValueType(tpe1: Type, tpe2: Type, undef1: List[Symbol], undef2: List[Symbol]): Boolean = (tpe1, tpe2) match {
- case (PolyType(tparams1, rtpe1), _) =>
+ private def isAsSpecificValueType(tpe1: Type, tpe2: Type, undef1: List[Symbol], undef2: List[Symbol]): Boolean = tpe1 match {
+ case PolyType(tparams1, rtpe1) =>
isAsSpecificValueType(rtpe1, tpe2, undef1 ::: tparams1, undef2)
- case (_, PolyType(tparams2, rtpe2)) =>
- isAsSpecificValueType(tpe1, rtpe2, undef1, undef2 ::: tparams2)
- case _ =>
- existentialAbstraction(undef1, tpe1) <:< existentialAbstraction(undef2, tpe2)
+ case _ =>
+ tpe2 match {
+ case PolyType(tparams2, rtpe2) => isAsSpecificValueType(tpe1, rtpe2, undef1, undef2 ::: tparams2)
+ case _ => existentialAbstraction(undef1, tpe1) <:< existentialAbstraction(undef2, tpe2)
+ }
}
-
-/*
- def isStrictlyMoreSpecific(ftpe1: Type, ftpe2: Type): Boolean =
- ftpe1.isError || isAsSpecific(ftpe1, ftpe2) &&
- (!isAsSpecific(ftpe2, ftpe1) ||
- !ftpe1.isInstanceOf[OverloadedType] && ftpe2.isInstanceOf[OverloadedType] ||
- phase.erasedTypes && covariantReturnOverride(ftpe1, ftpe2))
-*/
/** Is sym1 (or its companion class in case it is a module) a subclass of
* sym2 (or its companion class in case it is a module)?
*/
def isProperSubClassOrObject(sym1: Symbol, sym2: Symbol): Boolean = (
- (sym1 != sym2) && (sym1 != NoSymbol) && (
- (sym1 isSubClass sym2)
- || (sym1.isModuleClass && isProperSubClassOrObject(sym1.linkedClassOfClass, sym2))
- || (sym2.isModuleClass && isProperSubClassOrObject(sym1, sym2.linkedClassOfClass))
- )
+ (sym1 ne sym2)
+ && (sym1 ne NoSymbol)
+ && ( (sym1 isSubClass sym2)
+ || (sym1.isModuleClass && isProperSubClassOrObject(sym1.linkedClassOfClass, sym2))
+ || (sym2.isModuleClass && isProperSubClassOrObject(sym1, sym2.linkedClassOfClass))
+ )
)
/** is symbol `sym1` defined in a proper subclass of symbol `sym2`?
*/
- def isInProperSubClassOrObject(sym1: Symbol, sym2: Symbol) =
- sym2 == NoSymbol || isProperSubClassOrObject(sym1.owner, sym2.owner)
+ def isInProperSubClassOrObject(sym1: Symbol, sym2: Symbol) = (
+ (sym2 eq NoSymbol)
+ || isProperSubClassOrObject(sym1.safeOwner, sym2.owner)
+ )
def isStrictlyMoreSpecific(ftpe1: Type, ftpe2: Type, sym1: Symbol, sym2: Symbol): Boolean = {
// ftpe1 / ftpe2 are OverloadedTypes (possibly with one single alternative) if they
@@ -978,92 +849,36 @@ trait Infer extends Checkable {
(!phase.erasedTypes || covariantReturnOverride(ftpe1, ftpe2))) 1 else 0)
val subClassCount = (if (isInProperSubClassOrObject(sym1, sym2)) 1 else 0) -
(if (isInProperSubClassOrObject(sym2, sym1)) 1 else 0)
-// println("is more specific? "+sym1+":"+ftpe1+sym1.locationString+"/"+sym2+":"+ftpe2+sym2.locationString+":"+
-// specificCount+"/"+subClassCount)
specificCount + subClassCount > 0
}
}
-/*
- ftpe1.isError || {
- if (isAsSpecific(ftpe1, ftpe2))
- (!isAsSpecific(ftpe2, ftpe1) ||
- isProperSubClassOrObject(sym1.owner, sym2.owner) ||
- !ftpe1.isInstanceOf[OverloadedType] && ftpe2.isInstanceOf[OverloadedType] ||
- phase.erasedTypes && covariantReturnOverride(ftpe1, ftpe2))
- else
- !isAsSpecific(ftpe2, ftpe1) &&
- isProperSubClassOrObject(sym1.owner, sym2.owner)
- }
-*/
- private def covariantReturnOverride(ftpe1: Type, ftpe2: Type): Boolean = (ftpe1, ftpe2) match {
- case (MethodType(_, rtpe1), MethodType(_, rtpe2)) =>
- rtpe1 <:< rtpe2 || rtpe2.typeSymbol == ObjectClass
- case _ =>
- false
- }
-/*
- /** Is type `tpe1` a strictly better expression alternative than type `tpe2`?
- */
- def isStrictlyBetterExpr(tpe1: Type, tpe2: Type) = {
- isMethod(tpe2) && !isMethod(tpe1) ||
- isNullary(tpe1) && !isNullary(tpe2) ||
- isStrictlyBetter(tpe1, tpe2)
- }
- /** Is type `tpe1` a strictly better alternative than type `tpe2`?
- * non-methods are always strictly better than methods
- * nullary methods are always strictly better than non-nullary
- * if both are non-nullary methods, then tpe1 is strictly better than tpe2 if
- * - tpe1 specializes tpe2 and tpe2 does not specialize tpe1
- * - tpe1 and tpe2 specialize each other and tpe1 has a strictly better resulttype than
- * tpe2
- */
- def isStrictlyBetter(tpe1: Type, tpe2: Type) = {
- def isNullary(tpe: Type): Boolean = tpe match {
- case tp: RewrappingTypeProxy => isNullary(tp.underlying)
- case _ => tpe.paramSectionCount == 0 || tpe.params.isEmpty
- }
- def isMethod(tpe: Type): Boolean = tpe match {
- case tp: RewrappingTypeProxy => isMethod(tp.underlying)
- case MethodType(_, _) | PolyType(_, _) => true
- case _ => false
- }
- def hasStrictlyBetterResult =
- resultIsBetter(tpe1, tpe2, List(), List()) && !resultIsBetter(tpe2, tpe1, List(), List())
- if (!isMethod(tpe1))
- isMethod(tpe2) || hasStrictlyBetterResult
-
- isNullary(tpe1) && !isNullary(tpe2) ||
- is
-
- else if (isNullary(tpe1))
- isMethod(tpe2) && (!isNullary(tpe2) || hasStrictlyBetterResult)
- else
- specializes(tpe1, tpe2) && (!specializes(tpe2, tpe1) || hasStrictlyBetterResult)
+ private def covariantReturnOverride(ftpe1: Type, ftpe2: Type): Boolean = ftpe1 match {
+ case MethodType(_, rtpe1) =>
+ ftpe2 match {
+ case MethodType(_, rtpe2) => rtpe1 <:< rtpe2 || rtpe2.typeSymbol == ObjectClass
+ case _ => false
+ }
+ case _ => false
}
-*/
/** error if arguments not within bounds. */
- def checkBounds(tree: Tree, pre: Type, owner: Symbol,
- tparams: List[Symbol], targs: List[Type], prefix: String): Boolean =
- if ((targs exists (_.isErroneous)) || (tparams exists (_.isErroneous))) true
- else {
- //@M validate variances & bounds of targs wrt variances & bounds of tparams
- //@M TODO: better place to check this?
- //@M TODO: errors for getters & setters are reported separately
- val kindErrors = checkKindBounds(tparams, targs, pre, owner)
- kindErrors match {
- case Nil =>
- def notWithinBounds() = NotWithinBounds(tree, prefix, targs, tparams, Nil)
- isWithinBounds(pre, owner, tparams, targs) || {notWithinBounds(); false}
- case errors =>
- def kindBoundErrors() = KindBoundErrors(tree, prefix, targs, tparams, errors)
- (targs contains WildcardType) || {kindBoundErrors(); false}
- }
+ def checkBounds(tree: Tree, pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type], prefix: String): Boolean = {
+ def issueBoundsError() = { NotWithinBounds(tree, prefix, targs, tparams, Nil) ; false }
+ def issueKindBoundErrors(errs: List[String]) = { KindBoundErrors(tree, prefix, targs, tparams, errs) ; false }
+ //@M validate variances & bounds of targs wrt variances & bounds of tparams
+ //@M TODO: better place to check this?
+ //@M TODO: errors for getters & setters are reported separately
+ def check() = checkKindBounds(tparams, targs, pre, owner) match {
+ case Nil => isWithinBounds(pre, owner, tparams, targs) || issueBoundsError()
+ case errs => (targs contains WildcardType) || issueKindBoundErrors(errs)
}
+ targs.exists(_.isErroneous) || tparams.exists(_.isErroneous) || check()
+ }
+
def checkKindBounds(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): List[String] = {
- checkKindBounds0(tparams, targs, pre, owner, true) map {
+ checkKindBounds0(tparams, targs, pre, owner, explainErrors = true) map {
case (targ, tparam, kindErrors) =>
kindErrors.errorMessage(targ, tparam)
}
@@ -1078,21 +893,13 @@ trait Infer extends Checkable {
* attempts fail, an error is produced.
*/
def inferArgumentInstance(tree: Tree, undetparams: List[Symbol], strictPt: Type, lenientPt: Type) {
- printInference(
- ptBlock("inferArgumentInstance",
- "tree" -> tree,
- "tree.tpe" -> tree.tpe,
- "undetparams" -> undetparams,
- "strictPt" -> strictPt,
- "lenientPt" -> lenientPt
- )
- )
- var targs = exprTypeArgs(undetparams, tree.tpe, strictPt)._1
+ printTyping(tree, s"inferring arg instance based on pt0=$strictPt, pt1=$lenientPt")
+ var targs = exprTypeArgs(undetparams, tree.tpe, strictPt, useWeaklyCompatible = false)
if ((targs eq null) || !(tree.tpe.subst(undetparams, targs) <:< strictPt))
- targs = exprTypeArgs(undetparams, tree.tpe, lenientPt)._1
+ targs = exprTypeArgs(undetparams, tree.tpe, lenientPt, useWeaklyCompatible = false)
substExpr(tree, undetparams, targs, lenientPt)
- printInference("[inferArgumentInstance] finished, targs = " + targs)
+ printTyping(tree, s"infer arg instance from pt0=$strictPt, pt1=$lenientPt; targs=$targs")
}
/** Infer type arguments `targs` for `tparams` of polymorphic expression in `tree`, given prototype `pt`.
@@ -1101,31 +908,28 @@ trait Infer extends Checkable {
* If passed, infers against specified type `treeTp` instead of `tree.tp`.
*/
def inferExprInstance(tree: Tree, tparams: List[Symbol], pt: Type = WildcardType, treeTp0: Type = null, keepNothings: Boolean = true, useWeaklyCompatible: Boolean = false): List[Symbol] = {
- val treeTp = if(treeTp0 eq null) tree.tpe else treeTp0 // can't refer to tree in default for treeTp0
- val (targs, tvars) = exprTypeArgs(tparams, treeTp, pt, useWeaklyCompatible)
- printInference(
- ptBlock("inferExprInstance",
- "tree" -> tree,
- "tree.tpe"-> tree.tpe,
- "tparams" -> tparams,
- "pt" -> pt,
- "targs" -> targs,
- "tvars" -> tvars
- )
- )
+ val treeTp = if (treeTp0 eq null) tree.tpe else treeTp0 // can't refer to tree in default for treeTp0
+ val tvars = tparams map freshVar
+ val targs = exprTypeArgs(tvars, tparams, treeTp, pt, useWeaklyCompatible)
+ def infer_s = map3(tparams, tvars, targs)((tparam, tvar, targ) => s"$tparam=$tvar/$targ") mkString ","
+ printTyping(tree, s"infer expr instance from pt=$pt, $infer_s")
+
+ // SI-7899 infering by-name types is unsound. The correct behaviour is conditional because the hole is
+ // exploited in Scalaz (Free.scala), as seen in: run/t7899-regression.
+ def dropByNameIfStrict(tp: Type): Type = if (settings.inferByName) tp else dropByName(tp)
+ def targsStrict = if (targs eq null) null else targs mapConserve dropByNameIfStrict
if (keepNothings || (targs eq null)) { //@M: adjustTypeArgs fails if targs==null, neg/t0226
- substExpr(tree, tparams, targs, pt)
+ substExpr(tree, tparams, targsStrict, pt)
List()
} else {
- val AdjustedTypeArgs.Undets(okParams, okArgs, leftUndet) = adjustTypeArgs(tparams, tvars, targs)
- printInference(
- ptBlock("inferExprInstance/AdjustedTypeArgs",
- "okParams" -> okParams,
- "okArgs" -> okArgs,
- "leftUndet" -> leftUndet
- )
- )
+ val AdjustedTypeArgs.Undets(okParams, okArgs, leftUndet) = adjustTypeArgs(tparams, tvars, targsStrict)
+ def solved_s = map2(okParams, okArgs)((p, a) => s"$p=$a") mkString ","
+ def undet_s = leftUndet match {
+ case Nil => ""
+ case ps => ps.mkString(", undet=", ",", "")
+ }
+ printTyping(tree, s"infer solved $solved_s$undet_s")
substExpr(tree, okParams, okArgs, pt)
leftUndet
}
@@ -1133,30 +937,25 @@ trait Infer extends Checkable {
/** Substitute free type variables `undetparams` of polymorphic argument
* expression `tree` to `targs`, Error if `targs` is null.
- *
- * @param tree ...
- * @param undetparams ...
- * @param targs ...
- * @param pt ...
*/
- private def substExpr(tree: Tree, undetparams: List[Symbol],
- targs: List[Type], pt: Type) {
+ private def substExpr(tree: Tree, undetparams: List[Symbol], targs: List[Type], pt: Type) {
if (targs eq null) {
if (!tree.tpe.isErroneous && !pt.isErroneous)
PolymorphicExpressionInstantiationError(tree, undetparams, pt)
- } else {
+ }
+ else {
new TreeTypeSubstituter(undetparams, targs).traverse(tree)
notifyUndetparamsInferred(undetparams, targs)
}
}
- /** Substitute free type variables <code>undetparams</code> of application
- * <code>fn(args)</code>, given prototype <code>pt</code>.
+ /** Substitute free type variables `undetparams` of application
+ * `fn(args)`, given prototype `pt`.
*
* @param fn fn: the function that needs to be instantiated.
* @param undetparams the parameters that need to be determined
* @param args the actual arguments supplied in the call.
- * @param pt the expected type of the function application
+ * @param pt0 the expected type of the function application
* @return The type parameters that remain uninstantiated,
* and that thus have not been substituted.
*/
@@ -1166,20 +965,12 @@ trait Infer extends Checkable {
try {
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
val formals = formalTypes(mt.paramTypes, args.length)
- val argtpes = actualTypes(args map (x => elimAnonymousClass(x.tpe.deconst)), formals.length)
+ val argtpes = tupleIfNecessary(formals, args map (x => elimAnonymousClass(x.tpe.deconst)))
val restpe = fn.tpe.resultType(argtpes)
val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) =
methTypeArgs(undetparams, formals, restpe, argtpes, pt)
- printInference("[infer method] solving for %s in %s based on (%s)%s (%s)".format(
- undetparams.map(_.name).mkString(", "),
- fn.tpe,
- argtpes.mkString(", "),
- restpe,
- (okparams map (_.name), okargs).zipped.map(_ + "=" + _).mkString("solved: ", ", ", "")
- ))
-
if (checkBounds(fn, NoPrefix, NoSymbol, undetparams, allargs, "inferred ")) {
val treeSubst = new TreeTypeSubstituter(okparams, okargs)
treeSubst traverseTrees fn :: args
@@ -1202,25 +993,22 @@ trait Infer extends Checkable {
}
}
- def widen(tp: Type): Type = abstractTypesToBounds(tp)
-
- /** Substitute free type variables <code>undetparams</code> of type constructor
- * <code>tree</code> in pattern, given prototype <code>pt</code>.
+ /** Substitute free type variables `undetparams` of type constructor
+ * `tree` in pattern, given prototype `pt`.
*
* @param tree the constuctor that needs to be instantiated
* @param undetparams the undetermined type parameters
- * @param pt the expected result type of the instance
+ * @param pt0 the expected result type of the instance
*/
def inferConstructorInstance(tree: Tree, undetparams: List[Symbol], pt0: Type) {
- val pt = widen(pt0)
+ val pt = abstractTypesToBounds(pt0)
val ptparams = freeTypeParamsOfTerms(pt)
val ctorTp = tree.tpe
val resTp = ctorTp.finalResultType
debuglog("infer constr inst "+ tree +"/"+ undetparams +"/ pt= "+ pt +" pt0= "+ pt0 +" resTp: "+ resTp)
- /** Compute type arguments for undetermined params
- */
+ /* Compute type arguments for undetermined params */
def inferFor(pt: Type): Option[List[Type]] = {
val tvars = undetparams map freshVar
val resTpV = resTp.instantiateTypeParams(undetparams, tvars)
@@ -1232,13 +1020,16 @@ trait Infer extends Checkable {
val variances =
if (ctorTp.paramTypes.isEmpty) undetparams map varianceInType(ctorTp)
else undetparams map varianceInTypes(ctorTp.paramTypes)
- val targs = solvedTypes(tvars, undetparams, variances, true, lubDepth(List(resTp, pt)))
+
+ // Note: this is the only place where solvedTypes (or, indirectly, solve) is called
+ // with upper = true.
+ val targs = solvedTypes(tvars, undetparams, variances, upper = true, lubDepth(resTp :: pt :: Nil))
// checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
// no checkBounds here. If we enable it, test bug602 fails.
// TODO: reinstate checkBounds, return params that fail to meet their bounds to undetparams
Some(targs)
} catch ifNoInstance { msg =>
- debuglog("NO INST "+ (tvars, tvars map (_.constr)))
+ debuglog("NO INST "+ ((tvars, tvars map (_.constr))))
NoConstructorInstanceError(tree, resTp, pt, msg)
None
}
@@ -1272,109 +1063,68 @@ trait Infer extends Checkable {
}
} else None
- (inferFor(pt) orElse inferForApproxPt) map { targs =>
- new TreeTypeSubstituter(undetparams, targs).traverse(tree)
- notifyUndetparamsInferred(undetparams, targs)
- } getOrElse {
- debugwarn("failed inferConstructorInstance for "+ tree +" : "+ tree.tpe +" under "+ undetparams +" pt = "+ pt +(if(isFullyDefined(pt)) " (fully defined)" else " (not fully defined)"))
- // if (settings.explaintypes.value) explainTypes(resTp.instantiateTypeParams(undetparams, tvars), pt)
- ConstrInstantiationError(tree, resTp, pt)
+ inferFor(pt) orElse inferForApproxPt match {
+ case Some(targs) =>
+ new TreeTypeSubstituter(undetparams, targs).traverse(tree)
+ notifyUndetparamsInferred(undetparams, targs)
+ case _ =>
+ def not = if (isFullyDefined(pt)) "" else "not "
+ devWarning(s"failed inferConstructorInstance for $tree: ${tree.tpe} undet=$undetparams, pt=$pt (${not}fully defined)")
+ ConstrInstantiationError(tree, resTp, pt)
}
}
-
- def instBounds(tvar: TypeVar): (Type, Type) = {
- val tparam = tvar.origin.typeSymbol
- val instType = toOrigin(tvar.constr.inst)
+ def instBounds(tvar: TypeVar): TypeBounds = {
+ val tparam = tvar.origin.typeSymbol
+ val instType = toOrigin(tvar.constr.inst)
+ val TypeBounds(lo, hi) = tparam.info.bounds
val (loBounds, hiBounds) =
- if (instType != NoType && isFullyDefined(instType)) (List(instType), List(instType))
+ if (isFullyDefined(instType)) (List(instType), List(instType))
else (tvar.constr.loBounds, tvar.constr.hiBounds)
- val lo = lub(tparam.info.bounds.lo :: loBounds map toOrigin)
- val hi = glb(tparam.info.bounds.hi :: hiBounds map toOrigin)
- (lo, hi)
+
+ TypeBounds(
+ lub(lo :: loBounds map toOrigin),
+ glb(hi :: hiBounds map toOrigin)
+ )
}
def isInstantiatable(tvars: List[TypeVar]) = {
val tvars1 = tvars map (_.cloneInternal)
// Note: right now it's not clear that solving is complete, or how it can be made complete!
// So we should come back to this and investigate.
- solve(tvars1, tvars1 map (_.origin.typeSymbol), tvars1 map (x => COVARIANT), false)
+ solve(tvars1, tvars1 map (_.origin.typeSymbol), tvars1 map (_ => Variance.Covariant), upper = false, Depth.AnyDepth)
}
- // this is quite nasty: it destructively changes the info of the syms of e.g., method type params (see #3692, where the type param T's bounds were set to >: T <: T, so that parts looped)
+ // this is quite nasty: it destructively changes the info of the syms of e.g., method type params
+ // (see #3692, where the type param T's bounds were set to > : T <: T, so that parts looped)
// the changes are rolled back by restoreTypeBounds, but might be unintentially observed in the mean time
def instantiateTypeVar(tvar: TypeVar) {
- val tparam = tvar.origin.typeSymbol
- if (false &&
- tvar.constr.inst != NoType &&
- isFullyDefined(tvar.constr.inst) &&
- (tparam.info.bounds containsType tvar.constr.inst)) {
- context.nextEnclosing(_.tree.isInstanceOf[CaseDef]).pushTypeBounds(tparam)
- tparam setInfo tvar.constr.inst
- tparam resetFlag DEFERRED
- debuglog("new alias of " + tparam + " = " + tparam.info)
- } else {
- val (lo, hi) = instBounds(tvar)
- if (lo <:< hi) {
- if (!((lo <:< tparam.info.bounds.lo) && (tparam.info.bounds.hi <:< hi)) // bounds were improved
- && tparam != lo.typeSymbolDirect && tparam != hi.typeSymbolDirect) { // don't create illegal cycles
- context.nextEnclosing(_.tree.isInstanceOf[CaseDef]).pushTypeBounds(tparam)
- tparam setInfo TypeBounds(lo, hi)
- debuglog("new bounds of " + tparam + " = " + tparam.info)
- } else {
- debuglog("redundant: "+tparam+" "+tparam.info+"/"+lo+" "+hi)
- }
- } else {
- debuglog("inconsistent: "+tparam+" "+lo+" "+hi)
- }
- }
- }
-
- /** Does `tp` contain any types that cannot be checked at run-time (i.e., after erasure, will isInstanceOf[erased(tp)] imply conceptualIsInstanceOf[tp]?)
- * we should find a way to ask erasure: hey, is `tp` going to make it through you with all of its isInstanceOf resolving powers intact?
- * TODO: at the very least, reduce duplication wrt checkCheckable
- */
- def containsUnchecked(tp: Type): Boolean = {
- def check(tp: Type, bound: List[Symbol]): Boolean = {
- def isSurroundingTypeParam(sym: Symbol) = {
- val e = context.scope.lookupEntry(sym.name)
- ( (e ne null)
- && (e.sym == sym )
- && !e.sym.isTypeParameterOrSkolem
- && (e.owner == context.scope)
- )
- }
- def isLocalBinding(sym: Symbol) = (
- sym.isAbstractType && (
- (bound contains sym)
- || (sym.name == tpnme.WILDCARD)
- || isSurroundingTypeParam(sym)
- )
- )
- tp.normalize match {
- case SingleType(pre, _) =>
- check(pre, bound)
- case TypeRef(_, ArrayClass, arg :: _) =>
- check(arg, bound)
- case tp @ TypeRef(pre, sym, args) =>
- ( (sym.isAbstractType && !isLocalBinding(sym))
- || (args exists (x => !isLocalBinding(x.typeSymbol)))
- || check(pre, bound)
- )
- // case RefinedType(_, decls) if decls.nonEmpty =>
- // patternWarning(tp, "refinement ")
- case RefinedType(parents, _) =>
- parents exists (p => check(p, bound))
- case ExistentialType(quantified, tp1) =>
- check(tp1, bound ::: quantified)
- case _ =>
- false
+ val tparam = tvar.origin.typeSymbol
+ val TypeBounds(lo0, hi0) = tparam.info.bounds
+ val tb @ TypeBounds(lo1, hi1) = instBounds(tvar)
+ val enclCase = context.enclosingCaseDef
+ def enclCase_s = enclCase.toString.replaceAll("\\n", " ").take(60)
+
+ if (enclCase.savedTypeBounds.nonEmpty) log(
+ sm"""|instantiateTypeVar with nonEmpty saved type bounds {
+ | enclosing $enclCase_s
+ | saved ${enclCase.savedTypeBounds}
+ | tparam ${tparam.shortSymbolClass} ${tparam.defString}
+ |}""")
+
+ if (lo1 <:< hi1) {
+ if (lo1 <:< lo0 && hi0 <:< hi1) // bounds unimproved
+ log(s"redundant bounds: discarding TypeBounds($lo1, $hi1) for $tparam, no improvement on TypeBounds($lo0, $hi0)")
+ else if (tparam == lo1.typeSymbolDirect || tparam == hi1.typeSymbolDirect)
+ log(s"cyclical bounds: discarding TypeBounds($lo1, $hi1) for $tparam because $tparam appears as bounds")
+ else {
+ enclCase pushTypeBounds tparam
+ tparam setInfo logResult(s"updated bounds: $tparam from ${tparam.info} to")(tb)
}
}
- check(tp, Nil)
+ else log(s"inconsistent bounds: discarding TypeBounds($lo1, $hi1)")
}
-
/** Type intersection of simple type tp1 with general type tp2.
* The result eliminates some redundancies.
*/
@@ -1393,16 +1143,16 @@ trait Infer extends Checkable {
}
def inferTypedPattern(tree0: Tree, pattp: Type, pt0: Type, canRemedy: Boolean): Type = {
- val pt = widen(pt0)
+ val pt = abstractTypesToBounds(pt0)
val ptparams = freeTypeParamsOfTerms(pt)
val tpparams = freeTypeParamsOfTerms(pattp)
def ptMatchesPattp = pt matchesPattern pattp.widen
def pattpMatchesPt = pattp matchesPattern pt
- /** If we can absolutely rule out a match we can fail early.
- * This is the case if the scrutinee has no unresolved type arguments
- * and is a "final type", meaning final + invariant in all type parameters.
+ /* If we can absolutely rule out a match we can fail early.
+ * This is the case if the scrutinee has no unresolved type arguments
+ * and is a "final type", meaning final + invariant in all type parameters.
*/
if (pt.isFinalType && ptparams.isEmpty && !ptMatchesPattp) {
IncompatibleScrutineeTypeError(tree0, pattp, pt)
@@ -1438,9 +1188,9 @@ trait Infer extends Checkable {
}
tvars foreach instantiateTypeVar
}
- /** If the scrutinee has free type parameters but the pattern does not,
- * we have to flip the arguments so the expected type is treated as more
- * general when calculating the intersection. See run/bug2755.scala.
+ /* If the scrutinee has free type parameters but the pattern does not,
+ * we have to flip the arguments so the expected type is treated as more
+ * general when calculating the intersection. See run/bug2755.scala.
*/
if (tpparams.isEmpty && ptparams.nonEmpty) intersect(pattp, pt)
else intersect(pt, pattp)
@@ -1500,193 +1250,139 @@ trait Infer extends Checkable {
/* -- Overload Resolution ---------------------------------------------- */
-/*
- def checkNotShadowed(pos: Position, pre: Type, best: Symbol, eligible: List[Symbol]) =
- if (!phase.erasedTypes)
- for (alt <- eligible) {
- if (isProperSubClassOrObject(alt.owner, best.owner))
- error(pos,
- "erroneous reference to overloaded definition,\n"+
- "most specific definition is: "+best+best.locationString+" of type "+pre.memberType(best)+
- ",\nyet alternative definition "+alt+alt.locationString+" of type "+pre.memberType(alt)+
- "\nis defined in a subclass")
- }
-*/
-
- /** Assign <code>tree</code> the symbol and type of the alternative which
- * matches prototype <code>pt</code>, if it exists.
+ /** Assign `tree` the symbol and type of the alternative which
+ * matches prototype `pt`, if it exists.
* If several alternatives match `pt`, take parameterless one.
* If no alternative matches `pt`, take the parameterless one anyway.
*/
- def inferExprAlternative(tree: Tree, pt: Type) = tree.tpe match {
- case OverloadedType(pre, alts) => tryTwice { isSecondTry =>
- val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt))
- val noAlternatives = alts0.isEmpty
- val alts1 = if (noAlternatives) alts else alts0
-
- //println("trying "+alts1+(alts1 map (_.tpe))+(alts1 map (_.locationString))+" for "+pt)
- def improves(sym1: Symbol, sym2: Symbol): Boolean =
- sym2 == NoSymbol || sym2.hasAnnotation(BridgeClass) ||
- { val tp1 = pre.memberType(sym1)
- val tp2 = pre.memberType(sym2)
- (tp2 == ErrorType ||
- !global.typer.infer.isWeaklyCompatible(tp2, pt) && global.typer.infer.isWeaklyCompatible(tp1, pt) ||
- isStrictlyMoreSpecific(tp1, tp2, sym1, sym2)) }
-
- val best = ((NoSymbol: Symbol) /: alts1) ((best, alt) =>
- if (improves(alt, best)) alt else best)
-
- val competing = alts1 dropWhile (alt => best == alt || improves(best, alt))
-
- if (best == NoSymbol) {
- if (settings.debug.value) {
- tree match {
- case Select(qual, _) =>
- Console.println("qual: " + qual + ":" + qual.tpe +
- " with decls " + qual.tpe.decls +
- " with members " + qual.tpe.members +
- " with members " + qual.tpe.member(newTermName("$minus")))
- case _ =>
- }
- }
- // todo: missing test case
- NoBestExprAlternativeError(tree, pt, isSecondTry)
- } else if (!competing.isEmpty) {
- if (noAlternatives) NoBestExprAlternativeError(tree, pt, isSecondTry)
- else if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt, isSecondTry)
- else {
+ def inferExprAlternative(tree: Tree, pt: Type): Tree = {
+ def tryOurBests(pre: Type, alts: List[Symbol], isSecondTry: Boolean): Unit = {
+ val alts0 = alts filter (alt => isWeaklyCompatible(pre memberType alt, pt))
+ val alts1 = if (alts0.isEmpty) alts else alts0
+ val bests = bestAlternatives(alts1) { (sym1, sym2) =>
+ val tp1 = pre memberType sym1
+ val tp2 = pre memberType sym2
+
+ ( (tp2 eq ErrorType)
+ || isWeaklyCompatible(tp1, pt) && !isWeaklyCompatible(tp2, pt)
+ || isStrictlyMoreSpecific(tp1, tp2, sym1, sym2)
+ )
+ }
+ // todo: missing test case for bests.isEmpty
+ bests match {
+ case best :: Nil => tree setSymbol best setType (pre memberType best)
+ case best :: competing :: _ if alts0.nonEmpty =>
// SI-6912 Don't give up and leave an OverloadedType on the tree.
// Originally I wrote this as `if (secondTry) ... `, but `tryTwice` won't attempt the second try
// unless an error is issued. We're not issuing an error, in the assumption that it would be
// spurious in light of the erroneous expected type
- setError(tree)
- }
- } else {
-// val applicable = alts1 filter (alt =>
-// global.typer.infer.isWeaklyCompatible(pre.memberType(alt), pt))
-// checkNotShadowed(tree.pos, pre, best, applicable)
- tree.setSymbol(best).setType(pre.memberType(best))
+ if (pt.isErroneous) setError(tree)
+ else AmbiguousExprAlternativeError(tree, pre, best, competing, pt, isSecondTry)
+ case _ => if (bests.isEmpty || alts0.isEmpty) NoBestExprAlternativeError(tree, pt, isSecondTry)
}
}
- }
-
- @inline private def inSilentMode(context: Context)(expr: => Boolean): Boolean = {
- val oldState = context.state
- context.setBufferErrors()
- val res = expr
- val contextWithErrors = context.hasErrors
- context.flushBuffer()
- context.restoreState(oldState)
- res && !contextWithErrors
+ tree.tpe match {
+ case OverloadedType(pre, alts) => tryTwice(tryOurBests(pre, alts, _)) ; tree
+ case _ => tree
+ }
}
// Checks against the name of the parameter and also any @deprecatedName.
private def paramMatchesName(param: Symbol, name: Name) =
param.name == name || param.deprecatedParamName.exists(_ == name)
- // Check the first parameter list the same way.
- private def methodMatchesName(method: Symbol, name: Name) = method.paramss match {
- case ps :: _ => ps exists (p => paramMatchesName(p, name))
- case _ => false
+ private def containsNamedType(argtpes: List[Type]): Boolean = argtpes match {
+ case Nil => false
+ case NamedType(_, _) :: _ => true
+ case _ :: rest => containsNamedType(rest)
}
-
- private def resolveOverloadedMethod(argtpes: List[Type], eligible: List[Symbol]) = {
+ private def namesOfNamedArguments(argtpes: List[Type]) =
+ argtpes collect { case NamedType(name, _) => name }
+
+ /** Given a list of argument types and eligible method overloads, whittle the
+ * list down to the methods which should be considered for specificity
+ * testing, taking into account here:
+ * - named arguments at the call site (keep only methods with name-matching parameters)
+ * - if multiple methods are eligible, drop any methods which take default arguments
+ * - drop any where arity cannot match under any conditions (allowing for
+ * overloaded applies, varargs, and tupling conversions)
+ * This method is conservative; it can tolerate some varieties of false positive,
+ * but no false negatives.
+ *
+ * @param eligible the overloaded method symbols
+ * @param argtpes the argument types at the call site
+ * @param varargsStar true if the call site has a `: _*` attached to the last argument
+ */
+ private def overloadsToConsiderBySpecificity(eligible: List[Symbol], argtpes: List[Type], varargsStar: Boolean): List[Symbol] = {
// If there are any foo=bar style arguments, and any of the overloaded
// methods has a parameter named `foo`, then only those methods are considered.
- val namesOfArgs = argtpes collect { case NamedType(name, _) => name }
- val namesMatch = (
- if (namesOfArgs.isEmpty) Nil
- else eligible filter { m =>
- namesOfArgs forall { name =>
- methodMatchesName(m, name)
- }
- }
- )
-
- if (namesMatch.nonEmpty) namesMatch
- else if (eligible.isEmpty || eligible.tail.isEmpty) eligible
- else eligible filter { alt =>
- // for functional values, the `apply` method might be overloaded
- val mtypes = followApply(alt.tpe) match {
- case OverloadedType(_, alts) => alts map (_.tpe)
- case t => t :: Nil
- }
- // Drop those that use a default; keep those that use vararg/tupling conversion.
- mtypes exists (t =>
- !t.typeSymbol.hasDefaultFlag && (
- compareLengths(t.params, argtpes) < 0 // tupling (*)
- || hasExactlyNumParams(t, argtpes.length) // same nb or vararg
- )
- )
- // (*) more arguments than parameters, but still applicable: tupling conversion works.
- // todo: should not return "false" when paramTypes = (Unit) no argument is given
- // (tupling would work)
+ val namesMatch = namesOfNamedArguments(argtpes) match {
+ case Nil => Nil
+ case names => eligible filter (m => names forall (name => m.info.params exists (p => paramMatchesName(p, name))))
}
+ if (namesMatch.nonEmpty)
+ namesMatch
+ else if (eligible.isEmpty || eligible.tail.isEmpty)
+ eligible
+ else
+ eligible filter (alt =>
+ !alt.hasDefault && isApplicableBasedOnArity(alt.tpe, argtpes.length, varargsStar, tuplingAllowed = true)
+ )
}
- /** Assign <code>tree</code> the type of an alternative which is applicable
- * to <code>argtpes</code>, and whose result type is compatible with `pt`.
+ /** Assign `tree` the type of an alternative which is applicable
+ * to `argtpes`, and whose result type is compatible with `pt`.
* If several applicable alternatives exist, drop the alternatives which use
* default arguments, then select the most specialized one.
* If no applicable alternative exists, and pt != WildcardType, try again
* with pt = WildcardType.
* Otherwise, if there is no best alternative, error.
*
- * @param argtpes contains the argument types. If an argument is named, as
+ * @param argtpes0 contains the argument types. If an argument is named, as
* "a = 3", the corresponding type is `NamedType("a", Int)'. If the name
* of some NamedType does not exist in an alternative's parameter names,
* the type is replaces by `Unit`, i.e. the argument is treated as an
* assignment expression.
+ *
+ * @pre tree.tpe is an OverloadedType.
*/
- def inferMethodAlternative(tree: Tree, undetparams: List[Symbol],
- argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false, lastInferAttempt: Boolean = true): Unit = tree.tpe match {
- case OverloadedType(pre, alts) =>
- val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
- tryTwice { isSecondTry =>
- debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
-
- def varargsApplicableCheck(alt: Symbol) = !varArgsOnly || (
- isVarArgsList(alt.tpe.params)
- && (argtpes.size >= alt.tpe.params.size) // must be checked now due to SI-5859
- )
- val applicable = resolveOverloadedMethod(argtpes,
- alts filter (alt =>
- varargsApplicableCheck(alt)
- && inSilentMode(context)(isApplicable(undetparams, followApply(pre memberType alt), argtpes, pt))
- )
- )
-
- def improves(sym1: Symbol, sym2: Symbol) = {
- // util.trace("improve "+sym1+sym1.locationString+" on "+sym2+sym2.locationString)
- sym2 == NoSymbol || sym2.isError || sym2.hasAnnotation(BridgeClass) ||
- isStrictlyMoreSpecific(followApply(pre.memberType(sym1)),
- followApply(pre.memberType(sym2)), sym1, sym2)
- }
-
- val best = ((NoSymbol: Symbol) /: applicable) ((best, alt) =>
- if (improves(alt, best)) alt else best)
- val competing = applicable.dropWhile(alt => best == alt || improves(best, alt))
- if (best == NoSymbol) {
- if (pt == WildcardType) NoBestMethodAlternativeError(tree, argtpes, pt, isSecondTry && lastInferAttempt)
- else inferMethodAlternative(tree, undetparams, argtpes, WildcardType, lastInferAttempt = isSecondTry)
- } else if (!competing.isEmpty) {
- AmbiguousMethodAlternativeError(tree, pre, best, competing.head, argtpes, pt, isSecondTry && lastInferAttempt)
- } else {
-// checkNotShadowed(tree.pos, pre, best, applicable)
- tree.setSymbol(best).setType(pre.memberType(best))
- }
+ def inferMethodAlternative(tree: Tree, undetparams: List[Symbol], argtpes0: List[Type], pt0: Type): Unit = {
+ val OverloadedType(pre, alts) = tree.tpe
+ var varargsStar = false
+ val argtpes = argtpes0 mapConserve {
+ case RepeatedType(tp) => varargsStar = true ; tp
+ case tp => tp
+ }
+ def followType(sym: Symbol) = followApply(pre memberType sym)
+ def bestForExpectedType(pt: Type, isLastTry: Boolean): Unit = {
+ val applicable0 = alts filter (alt => context inSilentMode isApplicable(undetparams, followType(alt), argtpes, pt))
+ val applicable = overloadsToConsiderBySpecificity(applicable0, argtpes, varargsStar)
+ val ranked = bestAlternatives(applicable)((sym1, sym2) =>
+ isStrictlyMoreSpecific(followType(sym1), followType(sym2), sym1, sym2)
+ )
+ ranked match {
+ case best :: competing :: _ => AmbiguousMethodAlternativeError(tree, pre, best, competing, argtpes, pt, isLastTry) // ambiguous
+ case best :: Nil => tree setSymbol best setType (pre memberType best) // success
+ case Nil if pt.isWildcard => NoBestMethodAlternativeError(tree, argtpes, pt, isLastTry) // failed
+ case Nil => bestForExpectedType(WildcardType, isLastTry) // failed, but retry with WildcardType
}
- case _ =>
+ }
+ // This potentially makes up to four attempts: tryTwice may execute
+ // with and without views enabled, and bestForExpectedType will try again
+ // with pt = WildcardType if it fails with pt != WildcardType.
+ tryTwice { isLastTry =>
+ val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
+ debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
+ bestForExpectedType(pt, isLastTry)
+ }
}
/** Try inference twice, once without views and once with views,
* unless views are already disabled.
- *
- * @param infer ...
*/
def tryTwice(infer: Boolean => Unit): Unit = {
if (context.implicitsEnabled) {
- val saved = context.state
+ val savedContextMode = context.contextMode
var fallback = false
context.setBufferErrors()
// We cache the current buffer because it is impossible to
@@ -1700,65 +1396,59 @@ trait Infer extends Checkable {
context.withImplicitsDisabled(infer(false))
if (context.hasErrors) {
fallback = true
- context.restoreState(saved)
+ context.contextMode = savedContextMode
context.flushBuffer()
infer(true)
}
} catch {
case ex: CyclicReference => throw ex
case ex: TypeError => // recoverable cyclic references
- context.restoreState(saved)
+ context.contextMode = savedContextMode
if (!fallback) infer(true) else ()
} finally {
- context.restoreState(saved)
+ context.contextMode = savedContextMode
context.updateBuffer(errorsToRestore)
}
}
else infer(true)
}
- /** Assign <code>tree</code> the type of all polymorphic alternatives
- * with <code>nparams</code> as the number of type parameters, if it exists.
+ /** Assign `tree` the type of all polymorphic alternatives
+ * which have the same number of type parameters as does `argtypes`
+ * with all argtypes are within the corresponding type parameter bounds.
* If no such polymorphic alternative exist, error.
- *
- * @param tree ...
- * @param nparams ...
*/
def inferPolyAlternatives(tree: Tree, argtypes: List[Type]): Unit = {
val OverloadedType(pre, alts) = tree.tpe
- val sym0 = tree.symbol filter (alt => sameLength(alt.typeParams, argtypes))
- def fail(kind: PolyAlternativeErrorKind.ErrorType) =
- PolyAlternativeError(tree, argtypes, sym0, kind)
-
- if (sym0 == NoSymbol) return (
- if (alts exists (_.typeParams.nonEmpty))
- fail(PolyAlternativeErrorKind.WrongNumber)
- else fail(PolyAlternativeErrorKind.NoParams))
-
- val (resSym, resTpe) = {
- if (!sym0.isOverloaded)
- (sym0, pre.memberType(sym0))
- else {
- val sym = sym0 filter (alt => isWithinBounds(pre, alt.owner, alt.typeParams, argtypes))
- if (sym == NoSymbol) {
- if (argtypes forall (x => !x.isErroneous))
- fail(PolyAlternativeErrorKind.ArgsDoNotConform)
- return
- }
- else if (sym.isOverloaded) {
- val xs = sym.alternatives
- val tparams = new AsSeenFromMap(pre, xs.head.owner) mapOver xs.head.typeParams
- val bounds = tparams map (_.tpeHK) // see e.g., #1236
- val tpe = PolyType(tparams, OverloadedType(AntiPolyType(pre, bounds), xs))
-
- (sym setInfo tpe, tpe)
- }
- else (sym, pre.memberType(sym))
- }
+ // Alternatives with a matching length type parameter list
+ val matchingLength = tree.symbol filter (alt => sameLength(alt.typeParams, argtypes))
+ def allMonoAlts = alts forall (_.typeParams.isEmpty)
+ def errorKind = matchingLength match {
+ case NoSymbol if allMonoAlts => PolyAlternativeErrorKind.NoParams // no polymorphic method alternative
+ case NoSymbol => PolyAlternativeErrorKind.WrongNumber // wrong number of tparams
+ case _ => PolyAlternativeErrorKind.ArgsDoNotConform // didn't conform to bounds
+ }
+ def fail() = PolyAlternativeError(tree, argtypes, matchingLength, errorKind)
+ def finish(sym: Symbol, tpe: Type) = tree setSymbol sym setType tpe
+ // Alternatives which conform to bounds
+ def checkWithinBounds(sym: Symbol) = sym.alternatives match {
+ case Nil if argtypes.exists(_.isErroneous) =>
+ case Nil => fail()
+ case alt :: Nil => finish(alt, pre memberType alt)
+ case alts @ (hd :: _) =>
+ log(s"Attaching AntiPolyType-carrying overloaded type to $sym")
+ // Multiple alternatives which are within bounds; spin up an
+ // overloaded type which carries an "AntiPolyType" as a prefix.
+ val tparams = newAsSeenFromMap(pre, hd.owner) mapOver hd.typeParams
+ val bounds = tparams map (_.tpeHK) // see e.g., #1236
+ val tpe = PolyType(tparams, OverloadedType(AntiPolyType(pre, bounds), alts))
+ finish(sym setInfo tpe, tpe)
+ }
+ matchingLength.alternatives match {
+ case Nil => fail()
+ case alt :: Nil => finish(alt, pre memberType alt)
+ case _ => checkWithinBounds(matchingLength filter (alt => isWithinBounds(pre, alt.owner, alt.typeParams, argtypes)))
}
- // Side effects tree with symbol and type
- tree setSymbol resSym setType resTpe
}
}
}
-
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index d6ec5f2cb0..cf82d6baac 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -1,19 +1,21 @@
package scala.tools.nsc
package typechecker
+import java.lang.Math.min
import symtab.Flags._
import scala.tools.nsc.util._
-import scala.tools.nsc.util.ClassPath._
import scala.reflect.runtime.ReflectionUtils
import scala.collection.mutable.ListBuffer
-import scala.compat.Platform.EOL
+import scala.reflect.ClassTag
import scala.reflect.internal.util.Statistics
import scala.reflect.macros.util._
-import java.lang.{Class => jClass}
-import java.lang.reflect.{Array => jArray, Method => jMethod}
-import scala.reflect.internal.util.Collections._
import scala.util.control.ControlThrowable
-import scala.reflect.macros.runtime.AbortMacroException
+import scala.reflect.macros.runtime.{AbortMacroException, MacroRuntimes}
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.macros.compiler.DefaultMacroCompiler
+import scala.tools.reflect.FastTrack
+import scala.runtime.ScalaRunTime
+import Fingerprint._
/**
* Code to deal with macros, namely with:
@@ -27,7 +29,7 @@ import scala.reflect.macros.runtime.AbortMacroException
* Then fooBar needs to point to a static method of the following form:
*
* def fooBar[T: c.WeakTypeTag] // type tag annotation is optional
- * (c: scala.reflect.macros.Context)
+ * (c: scala.reflect.macros.blackbox.Context)
* (xs: c.Expr[List[T]])
* : c.Expr[T] = {
* ...
@@ -40,15 +42,22 @@ import scala.reflect.macros.runtime.AbortMacroException
* (Expr(elems))
* (TypeTag(Int))
*/
-trait Macros extends scala.tools.reflect.FastTrack with Traces {
+trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
self: Analyzer =>
import global._
import definitions._
import treeInfo.{isRepeatedParamType => _, _}
import MacrosStats._
+
def globalSettings = global.settings
+ protected def findMacroClassLoader(): ClassLoader = {
+ val classpath = global.classPath.asURLs
+ macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath))
+ ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
+ }
+
/** `MacroImplBinding` and its companion module are responsible for
* serialization/deserialization of macro def -> impl bindings.
*
@@ -58,7 +67,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
*
* This solution is very simple, but unfortunately it's also lacking. If we use it, then
* signatures of macro defs become transitively dependent on scala-reflect.jar
- * (because they refer to macro impls, and macro impls refer to scala.reflect.macros.Context defined in scala-reflect.jar).
+ * (because they refer to macro impls, and macro impls refer to *box.Context defined in scala-reflect.jar).
* More details can be found in comments to https://issues.scala-lang.org/browse/SI-5940.
*
* Therefore we have to avoid putting macro impls into binding pickles and come up with our own serialization format.
@@ -71,64 +80,85 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* Includes a path to load the implementation via Java reflection,
* and various accounting information necessary when composing an argument list for the reflective invocation.
*/
- private case class MacroImplBinding(
- // Java class name of the class that contains the macro implementation
- // is used to load the corresponding object with Java reflection
- val className: String,
- // method name of the macro implementation
- // `className` and `methName` are all we need to reflectively invoke a macro implementation
- // because macro implementations cannot be overloaded
- val methName: String,
- // flattens the macro impl's parameter lists having symbols replaced with metadata
- // currently metadata is an index of the type parameter corresponding to that type tag (if applicable)
- // f.ex. for: def impl[T: WeakTypeTag, U: WeakTypeTag, V](c: Context)(x: c.Expr[T]): (U, V) = ???
- // `signature` will be equal to List(-1, -1, 0, 1)
- val signature: List[Int],
- // type arguments part of a macro impl ref (the right-hand side of a macro definition)
- // these trees don't refer to a macro impl, so we can pickle them as is
- val targs: List[Tree])
+ case class MacroImplBinding(
+ // Is this macro impl a bundle (a trait extending *box.Macro) or a vanilla def?
+ val isBundle: Boolean,
+ // Is this macro impl blackbox (i.e. having blackbox.Context in its signature)?
+ val isBlackbox: Boolean,
+ // Java class name of the class that contains the macro implementation
+ // is used to load the corresponding object with Java reflection
+ className: String,
+ // method name of the macro implementation
+ // `className` and `methName` are all we need to reflectively invoke a macro implementation
+ // because macro implementations cannot be overloaded
+ methName: String,
+ // flattens the macro impl's parameter lists having symbols replaced with their fingerprints
+ // currently fingerprints are calculated solely from types of the symbols:
+ // * c.Expr[T] => LiftedTyped
+ // * c.Tree => LiftedUntyped
+ // * c.WeakTypeTag[T] => Tagged(index of the type parameter corresponding to that type tag)
+ // * everything else (e.g. *box.Context) => Other
+ // f.ex. for: def impl[T: WeakTypeTag, U, V: WeakTypeTag](c: blackbox.Context)(x: c.Expr[T], y: c.Tree): (U, V) = ???
+ // `signature` will be equal to List(List(Other), List(LiftedTyped, LiftedUntyped), List(Tagged(0), Tagged(2)))
+ signature: List[List[Fingerprint]],
+ // type arguments part of a macro impl ref (the right-hand side of a macro definition)
+ // these trees don't refer to a macro impl, so we can pickle them as is
+ targs: List[Tree]) {
+ // Was this binding derived from a `def ... = macro ???` definition?
+ def is_??? = {
+ val Predef_??? = currentRun.runDefinitions.Predef_???
+ className == Predef_???.owner.javaClassName && methName == Predef_???.name.encoded
+ }
+ def isWhitebox = !isBlackbox
+ }
/** Macro def -> macro impl bindings are serialized into a `macroImpl` annotation
* with synthetic content that carries the payload described in `MacroImplBinding`.
*
* For example, for a pair of macro definition and macro implementation:
- * def impl(c: scala.reflect.macros.Context): c.Expr[Unit] = c.literalUnit;
+ * def impl(c: scala.reflect.macros.blackbox.Context): c.Expr[Unit] = ???
* def foo: Unit = macro impl
*
* We will have the following annotation added on the macro definition `foo`:
*
* @scala.reflect.macros.internal.macroImpl(
* `macro`(
- * "signature" = List(-1),
+ * "macroEngine" = <current macro engine>,
+ * "isBundle" = false,
+ * "isBlackbox" = true,
+ * "signature" = List(Other),
* "methodName" = "impl",
- * "versionFormat" = 1,
* "className" = "Macros$"))
*/
- private object MacroImplBinding {
- val versionFormat = 1
-
+ def macroEngine = "v7.0 (implemented in Scala 2.11.0-M8)"
+ object MacroImplBinding {
def pickleAtom(obj: Any): Tree =
obj match {
case list: List[_] => Apply(Ident(ListModule), list map pickleAtom)
case s: String => Literal(Constant(s))
- case i: Int => Literal(Constant(i))
+ case d: Double => Literal(Constant(d))
+ case b: Boolean => Literal(Constant(b))
+ case f: Fingerprint => Literal(Constant(f.value))
}
def unpickleAtom(tree: Tree): Any =
tree match {
case Apply(list @ Ident(_), args) if list.symbol == ListModule => args map unpickleAtom
case Literal(Constant(s: String)) => s
- case Literal(Constant(i: Int)) => i
+ case Literal(Constant(d: Double)) => d
+ case Literal(Constant(b: Boolean)) => b
+ case Literal(Constant(i: Int)) => Fingerprint(i)
}
def pickle(macroImplRef: Tree): Tree = {
- val MacroImplReference(owner, macroImpl, targs) = macroImplRef
- val paramss = macroImpl.paramss
+ val runDefinitions = currentRun.runDefinitions
+ import runDefinitions._
+ val MacroImplReference(isBundle, isBlackbox, owner, macroImpl, targs) = macroImplRef
// todo. refactor when fixing SI-5498
def className: String = {
def loop(sym: Symbol): String = sym match {
- case sym if sym.owner.isPackageClass =>
+ case sym if sym.isTopLevel =>
val suffix = if (sym.isModuleClass) "$" else ""
sym.fullName + suffix
case sym =>
@@ -139,16 +169,25 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
loop(owner)
}
- def signature: List[Int] = {
- val transformed = transformTypeTagEvidenceParams(paramss, (param, tparam) => tparam)
- transformed.flatten map (p => if (p.isTerm) -1 else p.paramPos)
+ def signature: List[List[Fingerprint]] = {
+ def fingerprint(tpe: Type): Fingerprint = tpe.dealiasWiden match {
+ case TypeRef(_, RepeatedParamClass, underlying :: Nil) => fingerprint(underlying)
+ case ExprClassOf(_) => LiftedTyped
+ case TreeType() => LiftedUntyped
+ case _ => Other
+ }
+
+ val transformed = transformTypeTagEvidenceParams(macroImplRef, (param, tparam) => tparam)
+ mmap(transformed)(p => if (p.isTerm) fingerprint(p.info) else Tagged(p.paramPos))
}
val payload = List[(String, Any)](
- "versionFormat" -> versionFormat,
- "className" -> className,
- "methodName" -> macroImpl.name.toString,
- "signature" -> signature
+ "macroEngine" -> macroEngine,
+ "isBundle" -> isBundle,
+ "isBlackbox" -> isBlackbox,
+ "className" -> className,
+ "methodName" -> macroImpl.name.toString,
+ "signature" -> signature
)
// the shape of the nucleus is chosen arbitrarily. it doesn't carry any payload.
@@ -185,498 +224,294 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
val Apply(_, pickledPayload) = wrapped
val payload = pickledPayload.map{ case Assign(k, v) => (unpickleAtom(k), unpickleAtom(v)) }.toMap
- val pickleVersionFormat = payload("versionFormat").asInstanceOf[Int]
- if (versionFormat != pickleVersionFormat) throw new Error("macro impl binding format mismatch: expected $versionFormat, actual $pickleVersionFormat")
+ def fail(msg: String) = abort(s"bad macro impl binding: $msg")
+ def unpickle[T](field: String, clazz: Class[T]): T = {
+ def failField(msg: String) = fail(s"$field $msg")
+ if (!payload.contains(field)) failField("is supposed to be there")
+ val raw: Any = payload(field)
+ if (raw == null) failField(s"is not supposed to be null")
+ val expected = ScalaRunTime.box(clazz)
+ val actual = raw.getClass
+ if (!expected.isAssignableFrom(actual)) failField(s"has wrong type: expected $expected, actual $actual")
+ raw.asInstanceOf[T]
+ }
+
+ val macroEngine = unpickle("macroEngine", classOf[String])
+ if (self.macroEngine != macroEngine) typer.TyperErrorGen.MacroIncompatibleEngineError(macroEngine)
- val className = payload("className").asInstanceOf[String]
- val methodName = payload("methodName").asInstanceOf[String]
- val signature = payload("signature").asInstanceOf[List[Int]]
- MacroImplBinding(className, methodName, signature, targs)
+ val isBundle = unpickle("isBundle", classOf[Boolean])
+ val isBlackbox = unpickle("isBlackbox", classOf[Boolean])
+ val className = unpickle("className", classOf[String])
+ val methodName = unpickle("methodName", classOf[String])
+ val signature = unpickle("signature", classOf[List[List[Fingerprint]]])
+ MacroImplBinding(isBundle, isBlackbox, className, methodName, signature, targs)
}
}
- private def bindMacroImpl(macroDef: Symbol, macroImplRef: Tree): Unit = {
+ def bindMacroImpl(macroDef: Symbol, macroImplRef: Tree): Unit = {
val pickle = MacroImplBinding.pickle(macroImplRef)
macroDef withAnnotation AnnotationInfo(MacroImplAnnotation.tpe, List(pickle), Nil)
}
- private def loadMacroImplBinding(macroDef: Symbol): MacroImplBinding = {
- val Some(AnnotationInfo(_, List(pickle), _)) = macroDef.getAnnotation(MacroImplAnnotation)
- MacroImplBinding.unpickle(pickle)
- }
-
- /** Transforms parameters lists of a macro impl.
- * The `transform` function is invoked only for WeakTypeTag evidence parameters.
- *
- * The transformer takes two arguments: a value parameter from the parameter list
- * and a type parameter that is witnesses by the value parameter.
- *
- * If the transformer returns a NoSymbol, the value parameter is not included from the result.
- * If the transformer returns something else, this something else is included in the result instead of the value parameter.
- *
- * Despite of being highly esoteric, this function significantly simplifies signature analysis.
- * For example, it can be used to strip macroImpl.paramss from the evidences (necessary when checking def <-> impl correspondence)
- * or to streamline creation of the list of macro arguments.
- */
- private def transformTypeTagEvidenceParams(paramss: List[List[Symbol]], transform: (Symbol, Symbol) => Symbol): List[List[Symbol]] = {
- if (paramss.isEmpty || paramss.last.isEmpty) return paramss // no implicit parameters in the signature => nothing to do
- if (paramss.head.isEmpty || !(paramss.head.head.tpe <:< MacroContextClass.tpe)) return paramss // no context parameter in the signature => nothing to do
- def transformTag(param: Symbol): Symbol = param.tpe.dealias match {
- case TypeRef(SingleType(SingleType(NoPrefix, c), universe), WeakTypeTagClass, targ :: Nil)
- if c == paramss.head.head && universe == MacroContextUniverse =>
- transform(param, targ.typeSymbol)
- case _ =>
- param
+ def loadMacroImplBinding(macroDef: Symbol): Option[MacroImplBinding] =
+ macroDef.getAnnotation(MacroImplAnnotation) collect {
+ case AnnotationInfo(_, List(pickle), _) => MacroImplBinding.unpickle(pickle)
}
- val transformed = paramss.last map transformTag filter (_ ne NoSymbol)
- if (transformed.isEmpty) paramss.init else paramss.init :+ transformed
- }
- def computeMacroDefTypeFromMacroImpl(macroDdef: DefDef, macroImpl: Symbol): Type = {
- // Step I. Transform c.Expr[T] to T
- var runtimeType = macroImpl.tpe.finalResultType.dealias match {
- case TypeRef(_, ExprClass, runtimeType :: Nil) => runtimeType
- case _ => AnyTpe // so that macro impls with rhs = ??? don't screw up our inference
- }
+ def isBlackbox(expandee: Tree): Boolean = isBlackbox(dissectApplied(expandee).core.symbol)
+ def isBlackbox(macroDef: Symbol): Boolean = {
+ val fastTrackBoxity = fastTrack.get(macroDef).map(_.isBlackbox)
+ val bindingBoxity = loadMacroImplBinding(macroDef).map(_.isBlackbox)
+ fastTrackBoxity orElse bindingBoxity getOrElse false
+ }
- // Step II. Transform type parameters of a macro implementation into type arguments in a macro definition's body
- runtimeType = runtimeType.substituteTypes(macroImpl.typeParams, loadMacroImplBinding(macroDdef.symbol).targs.map(_.tpe))
-
- // Step III. Transform c.prefix.value.XXX to this.XXX and implParam.value.YYY to defParam.YYY
- def unsigma(tpe: Type): Type =
- transformTypeTagEvidenceParams(macroImpl.paramss, (param, tparam) => NoSymbol) match {
- case (implCtxParam :: Nil) :: implParamss =>
- val implToDef = flatMap2(implParamss, macroDdef.vparamss)(map2(_, _)((_, _))).toMap
- object UnsigmaTypeMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) =>
- val pre1 = pre match {
- case SingleType(SingleType(SingleType(NoPrefix, c), prefix), value) if c == implCtxParam && prefix == MacroContextPrefix && value == ExprValue =>
- ThisType(macroDdef.symbol.owner)
- case SingleType(SingleType(NoPrefix, implParam), value) if value == ExprValue =>
- implToDef get implParam map (defParam => SingleType(NoPrefix, defParam.symbol)) getOrElse pre
+ def computeMacroDefTypeFromMacroImplRef(macroDdef: DefDef, macroImplRef: Tree): Type = {
+ macroImplRef match {
+ case MacroImplReference(_, _, _, macroImpl, targs) =>
+ // Step I. Transform c.Expr[T] to T and everything else to Any
+ var runtimeType = decreaseMetalevel(macroImpl.info.finalResultType)
+
+ // Step II. Transform type parameters of a macro implementation into type arguments in a macro definition's body
+ runtimeType = runtimeType.substituteTypes(macroImpl.typeParams, targs map (_.tpe))
+
+ // Step III. Transform c.prefix.value.XXX to this.XXX and implParam.value.YYY to defParam.YYY
+ def unsigma(tpe: Type): Type =
+ transformTypeTagEvidenceParams(macroImplRef, (param, tparam) => NoSymbol) match {
+ case (implCtxParam :: Nil) :: implParamss =>
+ val implToDef = flatMap2(implParamss, macroDdef.vparamss)(map2(_, _)((_, _))).toMap
+ object UnsigmaTypeMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, args) =>
+ val pre1 = pre match {
+ case SingleType(SingleType(SingleType(NoPrefix, c), prefix), value) if c == implCtxParam && prefix == MacroContextPrefix && value == ExprValue =>
+ ThisType(macroDdef.symbol.owner)
+ case SingleType(SingleType(NoPrefix, implParam), value) if value == ExprValue =>
+ implToDef get implParam map (defParam => SingleType(NoPrefix, defParam.symbol)) getOrElse pre
+ case _ =>
+ pre
+ }
+ val args1 = args map mapOver
+ TypeRef(pre1, sym, args1)
case _ =>
- pre
+ mapOver(tp)
}
- val args1 = args map mapOver
- TypeRef(pre1, sym, args1)
- case _ =>
- mapOver(tp)
- }
- }
-
- UnsigmaTypeMap(tpe)
- case _ =>
- tpe
- }
-
- unsigma(runtimeType)
- }
-
- /** A reference macro implementation signature compatible with a given macro definition.
- *
- * In the example above for the following macro def:
- * def foo[T](xs: List[T]): T = macro fooBar
- *
- * This function will return:
- * (c: scala.reflect.macros.Context)(xs: c.Expr[List[T]]): c.Expr[T]
- *
- * Note that type tag evidence parameters are not included into the result.
- * Type tag context bounds for macro impl tparams are optional.
- * Therefore compatibility checks ignore such parameters, and we don't need to bother about them here.
- *
- * @param macroDef The macro definition symbol
- * @param tparams The type parameters of the macro definition
- * @param vparamss The value parameters of the macro definition
- * @param retTpe The return type of the macro definition
- */
- private def macroImplSig(macroDef: Symbol, tparams: List[TypeDef], vparamss: List[List[ValDef]], retTpe: Type): (List[List[Symbol]], Type) = {
- // had to move method's body to an object because of the recursive dependencies between sigma and param
- object SigGenerator {
- def sigma(tpe: Type): Type = {
- class SigmaTypeMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) =>
- val pre1 = pre match {
- case ThisType(sym) if sym == macroDef.owner =>
- SingleType(SingleType(SingleType(NoPrefix, ctxParam), MacroContextPrefix), ExprValue)
- case SingleType(NoPrefix, sym) =>
- mfind(vparamss)(_.symbol == sym) match {
- case Some(macroDefParam) => SingleType(SingleType(NoPrefix, param(macroDefParam)), ExprValue)
- case _ => pre
- }
- case _ =>
- pre
}
- TypeRef(pre1, sym, args map mapOver)
+
+ UnsigmaTypeMap(tpe)
case _ =>
- mapOver(tp)
+ tpe
}
- }
- new SigmaTypeMap() apply tpe
- }
-
- def makeParam(name: Name, pos: Position, tpe: Type, flags: Long = 0L) =
- macroDef.newValueParameter(name, pos, flags) setInfo tpe
- val ctxParam = makeParam(nme.macroContext, macroDef.pos, MacroContextClass.tpe, SYNTHETIC)
- def implType(isType: Boolean, origTpe: Type): Type =
- if (isRepeatedParamType(origTpe))
- appliedType(
- RepeatedParamClass.typeConstructor,
- List(implType(isType, sigma(origTpe.typeArgs.head))))
- else {
- val tsym = getMember(MacroContextClass, if (isType) tpnme.WeakTypeTag else tpnme.Expr)
- typeRef(singleType(NoPrefix, ctxParam), tsym, List(sigma(origTpe)))
- }
- val paramCache = scala.collection.mutable.Map[Symbol, Symbol]()
- def param(tree: Tree): Symbol =
- paramCache.getOrElseUpdate(tree.symbol, {
- val sym = tree.symbol
- makeParam(sym.name, sym.pos, implType(sym.isType, sym.tpe), sym.flags)
- })
-
- val paramss = List(ctxParam) :: mmap(vparamss)(param)
- val implRetTpe = typeRef(singleType(NoPrefix, ctxParam), getMember(MacroContextClass, tpnme.Expr), List(sigma(retTpe)))
+ unsigma(runtimeType)
+ case _ =>
+ ErrorType
}
-
- import SigGenerator._
- macroLogVerbose(sm"""
- |generating macroImplSigs for: $macroDef
- |tparams are: $tparams
- |vparamss are: $vparamss
- |retTpe is: $retTpe
- |macroImplSig is: $paramss, $implRetTpe
- """.trim)
- (paramss, implRetTpe)
}
- /** Verifies that the body of a macro def typechecks to a reference to a static public non-overloaded method,
+ /** Verifies that the body of a macro def typechecks to a reference to a static public non-overloaded method or a top-level macro bundle,
* and that that method is signature-wise compatible with the given macro definition.
*
- * @return Typechecked rhs of the given macro definition if everything is okay.
+ * @return Macro impl reference for the given macro definition if everything is okay.
* EmptyTree if an error occurs.
*/
- def typedMacroBody(typer: Typer, macroDdef: DefDef): Tree =
- try new MacroTyper(typer, macroDdef).typed
- catch { case MacroBodyTypecheckException => EmptyTree }
-
- class MacroTyper(val typer: Typer, val macroDdef: DefDef) extends MacroErrors {
- // Phase I: sanity checks
- val macroDef = macroDdef.symbol
- macroLogVerbose("typechecking macro def %s at %s".format(macroDef, macroDdef.pos))
- assert(macroDef.isTermMacro, macroDdef)
- if (fastTrack contains macroDef) MacroDefIsFastTrack()
- if (!typer.checkFeature(macroDdef.pos, MacrosFeature, immediate = true)) MacroFeatureNotEnabled()
-
- // we use typed1 instead of typed, because otherwise adapt is going to mess us up
- // if adapt sees <qualifier>.<method>, it will want to perform eta-expansion and will fail
- // unfortunately, this means that we have to manually trigger macro expansion
- // because it's adapt which is responsible for automatic expansion during typechecking
- def typecheckRhs(rhs: Tree): Tree = {
- try {
- // interestingly enough, just checking isErroneous doesn't cut it
- // e.g. a "type arguments [U] do not conform to method foo's type parameter bounds" error
- // doesn't manifest itself as an error in the resulting tree
- val prevNumErrors = reporter.ERROR.count
- var rhs1 = typer.typed1(rhs, EXPRmode, WildcardType)
- def rhsNeedsMacroExpansion = rhs1.symbol != null && rhs1.symbol.isTermMacro && !rhs1.symbol.isErroneous
- while (rhsNeedsMacroExpansion) {
- rhs1 = macroExpand1(typer, rhs1) match {
- case Success(expanded) =>
- try {
- val typechecked = typer.typed1(expanded, EXPRmode, WildcardType)
- macroLogVerbose("typechecked1:%n%s%n%s".format(typechecked, showRaw(typechecked)))
- typechecked
- } finally {
- popMacroContext()
- }
- case Delay(delayed) =>
- typer.instantiate(delayed, EXPRmode, WildcardType)
- case Fallback(fallback) =>
- typer.typed1(fallback, EXPRmode, WildcardType)
- case Other(result) =>
- result
- }
- }
- val typecheckedWithErrors = (rhs1 exists (_.isErroneous)) || reporter.ERROR.count != prevNumErrors
- if (typecheckedWithErrors) MacroDefUntypeableBodyError()
- rhs1
- } catch {
- case ex: TypeError =>
- typer.reportTypeError(context, rhs.pos, ex)
- MacroDefUntypeableBodyError()
- }
- }
-
- // Phase II: typecheck the right-hand side of the macro def
- val typed = typecheckRhs(macroDdef.rhs)
- typed match {
- case MacroImplReference(_, meth, _) if meth == Predef_??? =>
- bindMacroImpl(macroDef, typed)
- MacroDefIsQmarkQmarkQmark()
- case MacroImplReference(owner, meth, targs) =>
- if (!meth.isMethod) MacroDefInvalidBodyError()
- if (!meth.isPublic) MacroImplNotPublicError()
- if (meth.isOverloaded) MacroImplOverloadedError()
- if (!owner.isStaticOwner && !owner.moduleClass.isStaticOwner) MacroImplNotStaticError()
- if (meth.typeParams.length != targs.length) MacroImplWrongNumberOfTypeArgumentsError(typed)
- bindMacroImpl(macroDef, typed)
- case _ =>
- MacroDefInvalidBodyError()
- }
-
- // Phase III: check compatibility between the macro def and its macro impl
- // this check ignores type tag evidence parameters, because type tag context bounds are optional
- // aXXX (e.g. aparamss) => characteristics of the macro impl ("a" stands for "actual")
- // rXXX (e.g. rparamss) => characteristics of a reference macro impl signature synthesized from the macro def ("r" stands for "reference")
- val macroImpl = typed.symbol
- val aparamss = transformTypeTagEvidenceParams(macroImpl.paramss, (param, tparam) => NoSymbol)
- val aret = macroImpl.tpe.finalResultType
- val macroDefRet =
- if (!macroDdef.tpt.isEmpty) typer.typedType(macroDdef.tpt).tpe
- else computeMacroDefTypeFromMacroImpl(macroDdef, macroImpl)
- val (rparamss, rret) = macroImplSig(macroDef, macroDdef.tparams, macroDdef.vparamss, macroDefRet)
-
- val implicitParams = aparamss.flatten filter (_.isImplicit)
- if (implicitParams.nonEmpty) MacroImplNonTagImplicitParameters(implicitParams)
- if (aparamss.length != rparamss.length) MacroImplParamssMismatchError()
-
- val atparams = macroImpl.typeParams
- val atvars = atparams map freshVar
- def atpeToRtpe(atpe: Type) = atpe.substSym(aparamss.flatten, rparamss.flatten).instantiateTypeParams(atparams, atvars)
-
- try {
- map2(aparamss, rparamss)((aparams, rparams) => {
- if (aparams.length < rparams.length) MacroImplMissingParamsError(aparams, rparams)
- if (rparams.length < aparams.length) MacroImplExtraParamsError(aparams, rparams)
- })
-
- // cannot fuse these loops because if aparamss.flatten != rparamss.flatten
- // then `atpeToRtpe` is going to fail with an unsound substitution
- map2(aparamss.flatten, rparamss.flatten)((aparam, rparam) => {
- if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam)
- if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam)
- val aparamtpe = aparam.tpe.dealias match {
- case RefinedType(List(tpe), Scope(sym)) if tpe =:= MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
- case tpe => tpe
- }
- checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam)
- })
-
- checkMacroImplResultTypeMismatch(atpeToRtpe(aret), rret)
-
- val maxLubDepth = lubDepth(aparamss.flatten map (_.tpe)) max lubDepth(rparamss.flatten map (_.tpe))
- val atargs = solvedTypes(atvars, atparams, atparams map varianceInType(aret), upper = false, depth = maxLubDepth)
- val boundsOk = typer.silent(_.infer.checkBounds(macroDdef, NoPrefix, NoSymbol, atparams, atargs, ""))
- boundsOk match {
- case SilentResultValue(true) => // do nothing, success
- case SilentResultValue(false) | SilentTypeError(_) => MacroImplTargMismatchError(atargs, atparams)
- }
- } catch {
- case ex: NoInstance => MacroImplTparamInstantiationError(atparams, ex)
- }
- }
+ def typedMacroBody(typer: Typer, macroDdef: DefDef): Tree = pluginsTypedMacroBody(typer, macroDdef)
- /** Macro classloader that is used to resolve and run macro implementations.
- * Loads classes from from -cp (aka the library classpath).
- * Is also capable of detecting REPL and reusing its classloader.
+ /** Default implementation of `typedMacroBody`.
+ * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsTypedMacroBody for more details)
*/
- lazy val macroClassloader: ClassLoader = {
- if (global.forMSIL)
- throw new UnsupportedOperationException("Scala reflection not available on this platform")
-
- val classpath = global.classPath.asURLs
- macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath))
- val loader = ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
-
- // a heuristic to detect the REPL
- if (global.settings.exposeEmptyPackage.value) {
- macroLogVerbose("macro classloader: initializing from a REPL classloader".format(global.classPath.asURLs))
- import scala.tools.nsc.interpreter._
- val virtualDirectory = global.settings.outputDirs.getSingleOutput.get
- new AbstractFileClassLoader(virtualDirectory, loader) {}
- } else {
- loader
- }
- }
+ def standardTypedMacroBody(typer: Typer, macroDdef: DefDef): Tree = {
+ val macroDef = macroDdef.symbol
+ assert(macroDef.isMacro, macroDdef)
- /** Produces a function that can be used to invoke macro implementation for a given macro definition:
- * 1) Looks up macro implementation symbol in this universe.
- * 2) Loads its enclosing class from the macro classloader.
- * 3) Loads the companion of that enclosing class from the macro classloader.
- * 4) Resolves macro implementation within the loaded companion.
- *
- * @return Requested runtime if macro implementation can be loaded successfully from either of the mirrors,
- * `null` otherwise.
- */
- type MacroRuntime = MacroArgs => Any
- private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime]
- private def macroRuntime(macroDef: Symbol): MacroRuntime = {
- macroLogVerbose(s"looking for macro implementation: $macroDef")
+ macroLogVerbose("typechecking macro def %s at %s".format(macroDef, macroDdef.pos))
if (fastTrack contains macroDef) {
- macroLogVerbose("macro expansion is serviced by a fast track")
- fastTrack(macroDef)
+ macroLogVerbose("typecheck terminated unexpectedly: macro is fast track")
+ assert(!macroDdef.tpt.isEmpty, "fast track macros must provide result type")
+ EmptyTree
} else {
- macroRuntimesCache.getOrElseUpdate(macroDef, {
- val binding = loadMacroImplBinding(macroDef)
- val className = binding.className
- val methName = binding.methName
- macroLogVerbose(s"resolved implementation as $className.$methName")
-
- if (binding.className == Predef_???.owner.fullName.toString && binding.methName == Predef_???.name.encoded) {
- args => throw new AbortMacroException(args.c.enclosingPosition, "macro implementation is missing")
- } else {
- // I don't use Scala reflection here, because it seems to interfere with JIT magic
- // whenever you instantiate a mirror (and not do anything with in, just instantiate), performance drops by 15-20%
- // I'm not sure what's the reason - for me it's pure voodoo
- // upd. my latest experiments show that everything's okay
- // it seems that in 2.10.1 we can easily switch to Scala reflection
- try {
- macroLogVerbose(s"loading implementation class: $className")
- macroLogVerbose(s"classloader is: ${ReflectionUtils.show(macroClassloader)}")
- val implObj = ReflectionUtils.staticSingletonInstance(macroClassloader, className)
- // relies on the fact that macro impls cannot be overloaded
- // so every methName can resolve to at maximum one method
- val implMeths = implObj.getClass.getDeclaredMethods.find(_.getName == methName)
- val implMeth = implMeths getOrElse { throw new NoSuchMethodException(s"$className.$methName") }
- macroLogVerbose(s"successfully loaded macro impl as ($implObj, $implMeth)")
- args => implMeth.invoke(implObj, ((args.c +: args.others) map (_.asInstanceOf[AnyRef])): _*)
- } catch {
- case ex: Exception =>
- macroLogVerbose(s"macro runtime failed to load: ${ex.toString}")
- macroDef setFlag IS_ERROR
- null
- }
- }
- })
+ def fail() = { if (macroDef != null) macroDef setFlag IS_ERROR; macroDdef setType ErrorType; EmptyTree }
+ def success(macroImplRef: Tree) = { bindMacroImpl(macroDef, macroImplRef); macroImplRef }
+
+ if (!typer.checkFeature(macroDdef.pos, currentRun.runDefinitions.MacrosFeature, immediate = true)) {
+ macroLogVerbose("typecheck terminated unexpectedly: language.experimental.macros feature is not enabled")
+ fail()
+ } else {
+ val macroDdef1: macroDdef.type = macroDdef
+ val typer1: typer.type = typer
+ val macroCompiler = new {
+ val global: self.global.type = self.global
+ val typer: self.global.analyzer.Typer = typer1.asInstanceOf[self.global.analyzer.Typer]
+ val macroDdef: self.global.DefDef = macroDdef1
+ } with DefaultMacroCompiler
+ val macroImplRef = macroCompiler.resolveMacroImpl
+ if (macroImplRef.isEmpty) fail() else success(macroImplRef)
+ }
}
}
- private def macroContext(typer: Typer, prefixTree: Tree, expandeeTree: Tree): MacroContext =
+ def macroContext(typer: Typer, prefixTree: Tree, expandeeTree: Tree): MacroContext = {
new {
val universe: self.global.type = self.global
val callsiteTyper: universe.analyzer.Typer = typer.asInstanceOf[global.analyzer.Typer]
- val expandee = expandeeTree
+ val expandee = universe.analyzer.macroExpanderAttachment(expandeeTree).original orElse duplicateAndKeepPositions(expandeeTree)
} with UnaffiliatedMacroContext {
val prefix = Expr[Nothing](prefixTree)(TypeTag.Nothing)
override def toString = "MacroContext(%s@%s +%d)".format(expandee.symbol.name, expandee.pos, enclosingMacros.length - 1 /* exclude myself */)
}
+ }
/** Calculate the arguments to pass to a macro implementation when expanding the provided tree.
*/
case class MacroArgs(c: MacroContext, others: List[Any])
- private def macroArgs(typer: Typer, expandee: Tree): MacroArgs = {
- val macroDef = expandee.symbol
- val prefixTree = expandee.collect{ case Select(qual, name) => qual }.headOption.getOrElse(EmptyTree)
- val context = expandee.attachments.get[MacroRuntimeAttachment].flatMap(_.macroContext).getOrElse(macroContext(typer, prefixTree, expandee))
- var typeArgs = List[Tree]()
- val exprArgs = ListBuffer[List[Expr[_]]]()
- def collectMacroArgs(tree: Tree): Unit = tree match {
- case Apply(fn, args) =>
- // todo. infer precise typetag for this Expr, namely the declared type of the corresponding macro impl argument
- exprArgs.prepend(args map (arg => context.Expr[Nothing](arg)(TypeTag.Nothing)))
- collectMacroArgs(fn)
- case TypeApply(fn, args) =>
- typeArgs = args
- collectMacroArgs(fn)
- case _ =>
- }
- collectMacroArgs(expandee)
+ def macroArgs(typer: Typer, expandee: Tree): MacroArgs = pluginsMacroArgs(typer, expandee)
- val argcDoesntMatch = macroDef.paramss.length != exprArgs.length
- val nullaryArgsEmptyParams = exprArgs.isEmpty && macroDef.paramss == ListOfNil
- if (argcDoesntMatch && !nullaryArgsEmptyParams) { typer.TyperErrorGen.MacroPartialApplicationError(expandee) }
+ /** Default implementation of `macroArgs`.
+ * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroArgs for more details)
+ */
+ def standardMacroArgs(typer: Typer, expandee: Tree): MacroArgs = {
+ val macroDef = expandee.symbol
+ val paramss = macroDef.paramss
+ val treeInfo.Applied(core, targs, argss) = expandee
+ val prefix = core match { case Select(qual, _) => qual; case _ => EmptyTree }
+ val context = expandee.attachments.get[MacroRuntimeAttachment].flatMap(_.macroContext).getOrElse(macroContext(typer, prefix, expandee))
+
+ macroLogVerbose(sm"""
+ |context: $context
+ |prefix: $prefix
+ |targs: $targs
+ |argss: $argss
+ |paramss: $paramss
+ """.trim)
- val argss: List[List[Any]] = exprArgs.toList
- macroLogVerbose(s"context: $context")
- macroLogVerbose(s"argss: $argss")
+ import typer.TyperErrorGen._
+ val isNullaryArgsEmptyParams = argss.isEmpty && paramss == ListOfNil
+ if (paramss.length < argss.length) MacroTooManyArgumentListsError(expandee)
+ if (paramss.length > argss.length && !isNullaryArgsEmptyParams) MacroTooFewArgumentListsError(expandee)
- val preparedArgss: List[List[Any]] =
+ val macroImplArgs: List[Any] =
if (fastTrack contains macroDef) {
- if (fastTrack(macroDef) validate context) argss
- else typer.TyperErrorGen.MacroPartialApplicationError(expandee)
- } else {
- // if paramss have typetag context bounds, add an arglist to argss if necessary and instantiate the corresponding evidences
- // consider the following example:
- //
- // class D[T] {
- // class C[U] {
- // def foo[V] = macro Impls.foo[T, U, V]
- // }
- // }
- //
- // val outer1 = new D[Int]
- // val outer2 = new outer1.C[String]
- // outer2.foo[Boolean]
- //
- // then T and U need to be inferred from the lexical scope of the call using `asSeenFrom`
- // whereas V won't be resolved by asSeenFrom and need to be loaded directly from `expandee` which needs to contain a TypeApply node
- // also, macro implementation reference may contain a regular type as a type argument, then we pass it verbatim
- val binding = loadMacroImplBinding(macroDef)
- macroLogVerbose(s"binding: $binding")
- val tags = binding.signature filter (_ != -1) map (paramPos => {
- val targ = binding.targs(paramPos).tpe.typeSymbol
- val tpe = if (targ.isTypeParameterOrSkolem) {
- if (targ.owner == macroDef) {
- // doesn't work when macro def is compiled separately from its usages
- // then targ is not a skolem and isn't equal to any of macroDef.typeParams
- // val argPos = targ.deSkolemize.paramPos
- val argPos = macroDef.typeParams.indexWhere(_.name == targ.name)
- typeArgs(argPos).tpe
+ // Take a dry run of the fast track implementation
+ if (fastTrack(macroDef) validate expandee) argss.flatten
+ else MacroTooFewArgumentListsError(expandee)
+ }
+ else {
+ def calculateMacroArgs(binding: MacroImplBinding) = {
+ val signature = if (binding.isBundle) binding.signature else binding.signature.tail
+ macroLogVerbose(s"binding: $binding")
+
+ // STEP I: prepare value arguments of the macro expansion
+ // wrap argss in c.Expr if necessary (i.e. if corresponding macro impl param is of type c.Expr[T])
+ // expand varargs (nb! varargs can apply to any parameter section, not necessarily to the last one)
+ val trees = map3(argss, paramss, signature)((args, defParams, implParams) => {
+ val isVarargs = isVarArgsList(defParams)
+ if (isVarargs) {
+ if (defParams.length > args.length + 1) MacroTooFewArgumentsError(expandee)
+ } else {
+ if (defParams.length < args.length) MacroTooManyArgumentsError(expandee)
+ if (defParams.length > args.length) MacroTooFewArgumentsError(expandee)
+ }
+
+ val wrappedArgs = mapWithIndex(args)((arg, j) => {
+ val fingerprint = implParams(min(j, implParams.length - 1))
+ fingerprint match {
+ case LiftedTyped => context.Expr[Nothing](arg.duplicate)(TypeTag.Nothing) // TODO: SI-5752
+ case LiftedUntyped => arg.duplicate
+ case _ => abort(s"unexpected fingerprint $fingerprint in $binding with paramss being $paramss " +
+ s"corresponding to arg $arg in $argss")
+ }
+ })
+
+ if (isVarargs) {
+ val (normal, varargs) = wrappedArgs splitAt (defParams.length - 1)
+ normal :+ varargs // pack all varargs into a single Seq argument (varargs Scala style)
+ } else wrappedArgs
+ })
+ macroLogVerbose(s"trees: $trees")
+
+ // STEP II: prepare type arguments of the macro expansion
+ // if paramss have typetag context bounds, add an arglist to argss if necessary and instantiate the corresponding evidences
+ // consider the following example:
+ //
+ // class D[T] {
+ // class C[U] {
+ // def foo[V] = macro Impls.foo[T, U, V]
+ // }
+ // }
+ //
+ // val outer1 = new D[Int]
+ // val outer2 = new outer1.C[String]
+ // outer2.foo[Boolean]
+ //
+ // then T and U need to be inferred from the lexical scope of the call using `asSeenFrom`
+ // whereas V won't be resolved by asSeenFrom and need to be loaded directly from `expandee` which needs to contain a TypeApply node
+ // also, macro implementation reference may contain a regular type as a type argument, then we pass it verbatim
+ val tags = signature.flatten collect { case f if f.isTag => f.paramPos } map (paramPos => {
+ val targ = binding.targs(paramPos).tpe.typeSymbol
+ val tpe = if (targ.isTypeParameterOrSkolem) {
+ if (targ.owner == macroDef) {
+ // doesn't work when macro def is compiled separately from its usages
+ // then targ is not a skolem and isn't equal to any of macroDef.typeParams
+ // val argPos = targ.deSkolemize.paramPos
+ val argPos = macroDef.typeParams.indexWhere(_.name == targ.name)
+ targs(argPos).tpe
+ } else
+ targ.tpe.asSeenFrom(
+ if (prefix == EmptyTree) macroDef.owner.tpe else prefix.tpe,
+ macroDef.owner)
} else
- targ.tpe.asSeenFrom(
- if (prefixTree == EmptyTree) macroDef.owner.tpe else prefixTree.tpe,
- macroDef.owner)
- } else
- targ.tpe
- context.WeakTypeTag(tpe)
- })
- macroLogVerbose(s"tags: $tags")
-
- // transforms argss taking into account varargness of paramss
- // note that typetag context bounds are only declared on macroImpls
- // so this optional arglist might not match macroDef's paramlist
- // nb! varargs can apply to any parameter section, not necessarily to the last one
- mapWithIndex(argss :+ tags)((as, i) => {
- val mapsToParamss = macroDef.paramss.indices contains i
- if (mapsToParamss) {
- val ps = macroDef.paramss(i)
- if (isVarArgsList(ps)) {
- val (normal, varargs) = as splitAt (ps.length - 1)
- normal :+ varargs // pack all varargs into a single List argument
- } else as
- } else as
- })
+ targ.tpe
+ context.WeakTypeTag(tpe)
+ })
+ macroLogVerbose(s"tags: $tags")
+
+ // if present, tags always come in a separate parameter/argument list
+ // that's because macro impls can't have implicit parameters other than c.WeakTypeTag[T]
+ (trees :+ tags).flatten
+ }
+
+ val binding = loadMacroImplBinding(macroDef).get
+ if (binding.is_???) Nil
+ else calculateMacroArgs(binding)
}
- macroLogVerbose(s"preparedArgss: $preparedArgss")
- MacroArgs(context, preparedArgss.flatten)
+ macroLogVerbose(s"macroImplArgs: $macroImplArgs")
+ MacroArgs(context, macroImplArgs)
}
/** Keeps track of macros in-flight.
- * See more informations in comments to `openMacros` in `scala.reflect.macros.Context`.
+ * See more informations in comments to `openMacros` in `scala.reflect.macros.whitebox.Context`.
*/
- private var _openMacros = List[MacroContext]()
+ var _openMacros = List[MacroContext]()
def openMacros = _openMacros
- private def pushMacroContext(c: MacroContext) = _openMacros ::= c
- private def popMacroContext() = _openMacros = _openMacros.tail
+ def pushMacroContext(c: MacroContext) = _openMacros ::= c
+ def popMacroContext() = _openMacros = _openMacros.tail
def enclosingMacroPosition = openMacros map (_.macroApplication.pos) find (_ ne NoPosition) getOrElse NoPosition
- private sealed abstract class MacroExpansionResult
- private case class Success(expanded: Tree) extends MacroExpansionResult
- private case class Delay(delayed: Tree) extends MacroExpansionResult
- private case class Fallback(fallback: Tree) extends MacroExpansionResult { currentRun.seenMacroExpansionsFallingBack = true }
- private case class Other(result: Tree) extends MacroExpansionResult
- private def Skip(expanded: Tree) = Other(expanded)
- private def Cancel(expandee: Tree) = Other(expandee)
- private def Failure(expandee: Tree) = Other(expandee)
-
/** Performs macro expansion:
- * 1) Checks whether the expansion needs to be delayed (see `mustDelayMacroExpansion`)
- * 2) Loads macro implementation using `macroMirror`
- * 3) Synthesizes invocation arguments for the macro implementation
- * 4) Checks that the result is a tree bound to this universe
- * 5) Typechecks the result against the return type of the macro definition
+ *
+ * ========= Expandable trees =========
+ *
+ * A term of one of the following shapes:
+ *
+ * Ident(<term macro>)
+ * Select(<any qualifier>, <term macro>)
+ * TypeApply(<any of the above>, <targs>)
+ * Apply(...Apply(<any of the above>, <args1>)...<argsN>)
+ *
+ * ========= Macro expansion =========
+ *
+ * First of all `macroExpandXXX`:
+ * 1) If necessary desugars the `expandee` to fit into the default expansion scheme
+ * that is understood by `macroExpandWithRuntime` / `macroExpandWithoutRuntime`
+ *
+ * Then `macroExpandWithRuntime`:
+ * 2) Checks whether the expansion needs to be delayed
+ * 3) Loads macro implementation using `macroMirror`
+ * 4) Synthesizes invocation arguments for the macro implementation
+ * 5) Checks that the result is a tree or an expr bound to this universe
+ *
+ * Finally `macroExpandXXX`:
+ * 6) Validates the expansion against the white list of supported tree shapes
+ * 7) Typechecks the result as required by the circumstances of the macro application
*
* If -Ymacro-debug-lite is enabled, you will get basic notifications about macro expansion
* along with macro expansions logged in the form that can be copy/pasted verbatim into REPL.
@@ -687,123 +522,197 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
*
* @return
* the expansion result if the expansion has been successful,
- * the fallback method invocation if the expansion has been unsuccessful, but there is a fallback,
+ * the fallback tree if the expansion has been unsuccessful, but there is a fallback,
* the expandee unchanged if the expansion has been delayed,
* the expandee fully expanded if the expansion has been delayed before and has been expanded now,
* the expandee with an error marker set if the expansion has been cancelled due malformed arguments or implementation
* the expandee with an error marker set if there has been an error
*/
- def macroExpand(typer: Typer, expandee: Tree, mode: Int = EXPRmode, pt: Type = WildcardType): Tree = {
- if (settings.Ymacronoexpand.value) return expandee // SI-6812
- val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
- if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
- try {
- macroExpand1(typer, expandee) match {
- case Success(expanded) =>
- try {
- def typecheck(phase: String, tree: Tree, pt: Type): Tree = {
- if (tree.isErroneous) return tree
- macroLogVerbose(s"typechecking against $phase $pt: $expanded")
- val numErrors = reporter.ERROR.count
- def hasNewErrors = reporter.ERROR.count > numErrors
- val result = typer.context.withImplicitsEnabled(typer.typed(tree, EXPRmode, pt))
- macroLogVerbose(s"""${if (hasNewErrors) "failed to typecheck" else "successfully typechecked"} against $phase $pt:\n$result""")
- result
- }
+ abstract class MacroExpander(val typer: Typer, val expandee: Tree) {
+ def onSuccess(expanded: Tree): Tree
+ def onFallback(expanded: Tree): Tree
+ def onSuppressed(expandee: Tree): Tree = expandee
+ def onDelayed(expanded: Tree): Tree = expanded
+ def onSkipped(expanded: Tree): Tree = expanded
+ def onFailure(expanded: Tree): Tree = { typer.infer.setError(expandee); expandee }
+
+ def apply(desugared: Tree): Tree = {
+ if (isMacroExpansionSuppressed(desugared)) onSuppressed(expandee)
+ else expand(desugared)
+ }
- var expectedTpe = expandee.tpe
- if (isNullaryInvocation(expandee)) expectedTpe = expectedTpe.finalResultType
- // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc
- val expanded0 = duplicateAndKeepPositions(expanded)
- val expanded1 = typecheck("macro def return type", expanded0, expectedTpe)
- val expanded2 = typecheck("expected type", expanded1, pt)
- expanded2
- } finally {
- popMacroContext()
+ protected def expand(desugared: Tree): Tree = {
+ def showDetailed(tree: Tree) = showRaw(tree, printIds = true, printTypes = true)
+ def summary() = s"expander = $this, expandee = ${showDetailed(expandee)}, desugared = ${if (expandee == desugared) () else showDetailed(desugared)}"
+ if (macroDebugVerbose) println(s"macroExpand: ${summary()}")
+ linkExpandeeAndDesugared(expandee, desugared)
+
+ val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
+ if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
+ try {
+ withInfoLevel(nodePrinters.InfoLevel.Quiet) { // verbose printing might cause recursive macro expansions
+ if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) {
+ val reason = if (expandee.symbol.isErroneous) "not found or incompatible macro implementation" else "erroneous arguments"
+ macroLogVerbose(s"cancelled macro expansion because of $reason: $expandee")
+ onFailure(typer.infer.setError(expandee))
+ } else try {
+ val expanded = {
+ val runtime = macroRuntime(expandee)
+ if (runtime != null) macroExpandWithRuntime(typer, expandee, runtime)
+ else macroExpandWithoutRuntime(typer, expandee)
+ }
+ expanded match {
+ case Success(expanded) =>
+ // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc
+ val expanded1 = try onSuccess(duplicateAndKeepPositions(expanded)) finally popMacroContext()
+ if (!hasMacroExpansionAttachment(expanded1)) linkExpandeeAndExpanded(expandee, expanded1)
+ if (settings.Ymacroexpand.value == settings.MacroExpand.Discard) expandee.setType(expanded1.tpe)
+ else expanded1
+ case Fallback(fallback) => onFallback(fallback)
+ case Delayed(delayed) => onDelayed(delayed)
+ case Skipped(skipped) => onSkipped(skipped)
+ case Failure(failure) => onFailure(failure)
+ }
+ } catch {
+ case typer.TyperErrorGen.MacroExpansionException => onFailure(expandee)
}
- case Delay(delayed) =>
- // =========== THE SITUATION ===========
- //
- // If we've been delayed (i.e. bailed out of the expansion because of undetermined type params present in the expandee),
- // then there are two possible situations we're in:
- //
- // 1) We're in POLYmode, when the typer tests the waters wrt type inference
- // (e.g. as in typedArgToPoly in doTypedApply).
- //
- // 2) We're out of POLYmode, which means that the typer is out of tricks to infer our type
- // (e.g. if we're an argument to a function call, then this means that no previous argument lists
- // can determine our type variables for us).
- //
- // Situation #1 is okay for us, since there's no pressure. In POLYmode we're just verifying that
- // there's nothing outrageously wrong with our undetermined type params (from what I understand!).
- //
- // Situation #2 requires measures to be taken. If we're in it, then noone's going to help us infer
- // the undetermined type params. Therefore we need to do something ourselves or otherwise this
- // expandee will forever remaing not expanded (see SI-5692).
- //
- // A traditional way out of this conundrum is to call `instantiate` and let the inferencer
- // try to find the way out. It works for simple cases, but sometimes, if the inferencer lacks
- // information, it will be forced to approximate.
- //
- // =========== THE PROBLEM ===========
- //
- // Consider the following example (thanks, Miles!):
- //
- // // Iso represents an isomorphism between two datatypes:
- // // 1) An arbitrary one (e.g. a random case class)
- // // 2) A uniform representation for all datatypes (e.g. an HList)
- // trait Iso[T, U] {
- // def to(t : T) : U
- // def from(u : U) : T
- // }
- // implicit def materializeIso[T, U]: Iso[T, U] = macro ???
- //
- // case class Foo(i: Int, s: String, b: Boolean)
- // def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c)
- // foo(Foo(23, "foo", true))
- //
- // In the snippet above, even though we know that there's a fundep going from T to U
- // (in a sense that a datatype's uniform representation is unambiguously determined by the datatype,
- // e.g. for Foo it will be Int :: String :: Boolean :: HNil), there's no way to convey this information
- // to the typechecker. Therefore the typechecker will infer Nothing for L, which is hardly what we want.
- val shouldInstantiate = typer.context.undetparams.nonEmpty && !inPolyMode(mode)
- if (shouldInstantiate) typer.instantiatePossiblyExpectingUnit(delayed, mode, pt)
- else delayed
- case Fallback(fallback) =>
- typer.context.withImplicitsEnabled(typer.typed(fallback, EXPRmode, pt))
- case Other(result) =>
- result
+ }
+ } finally {
+ if (Statistics.canEnable) Statistics.stopTimer(macroExpandNanos, start)
}
- } finally {
- if (Statistics.canEnable) Statistics.stopTimer(macroExpandNanos, start)
}
}
- /** Does the same as `macroExpand`, but without typechecking the expansion
- * Meant for internal use within the macro infrastructure, don't use it elsewhere.
+ /** Expands a term macro used in apply role as `M(2)(3)` in `val x = M(2)(3)`.
+ * @param outerPt Expected type that comes from enclosing context (something that's traditionally called `pt`).
+ * @param innerPt Expected type that comes from the signature of a macro def, possibly wildcarded to help type inference.
*/
- private def macroExpand1(typer: Typer, expandee: Tree): MacroExpansionResult =
- // verbose printing might cause recursive macro expansions, so I'm shutting it down here
- withInfoLevel(nodePrinters.InfoLevel.Quiet) {
- if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) {
- val reason = if (expandee.symbol.isErroneous) "not found or incompatible macro implementation" else "erroneous arguments"
- macroLogVerbose(s"cancelled macro expansion because of $reason: $expandee")
- return Cancel(typer.infer.setError(expandee))
+ class DefMacroExpander(typer: Typer, expandee: Tree, mode: Mode, outerPt: Type)
+ extends MacroExpander(typer, expandee) {
+ lazy val innerPt = {
+ val tp = if (isNullaryInvocation(expandee)) expandee.tpe.finalResultType else expandee.tpe
+ if (isBlackbox(expandee)) tp
+ else {
+ // approximation is necessary for whitebox macros to guide type inference
+ // read more in the comments for onDelayed below
+ val undetparams = tp collect { case tp if tp.typeSymbol.isTypeParameter => tp.typeSymbol }
+ deriveTypeWithWildcards(undetparams)(tp)
}
+ }
+ override def onSuccess(expanded0: Tree) = {
+ // prematurely annotate the tree with a macro expansion attachment
+ // so that adapt called indirectly by typer.typed knows that it needs to apply the existential fixup
+ linkExpandeeAndExpanded(expandee, expanded0)
- try {
- val runtime = macroRuntime(expandee.symbol)
- if (runtime != null) macroExpandWithRuntime(typer, expandee, runtime)
- else macroExpandWithoutRuntime(typer, expandee)
- } catch {
- case typer.TyperErrorGen.MacroExpansionException => Failure(expandee)
+ def typecheck(label: String, tree: Tree, pt: Type): Tree = {
+ if (tree.isErrorTyped) tree
+ else {
+ if (macroDebugVerbose) println(s"$label (against pt = $pt): $tree")
+ // `macroExpandApply` is called from `adapt`, where implicit conversions are disabled
+ // therefore we need to re-enable the conversions back temporarily
+ val result = typer.context.withImplicitsEnabled(typer.typed(tree, mode, pt))
+ if (result.isErrorTyped && macroDebugVerbose) println(s"$label has failed: ${typer.context.reportBuffer.errors}")
+ result
+ }
+ }
+
+ if (isBlackbox(expandee)) {
+ val expanded1 = atPos(enclosingMacroPosition.makeTransparent)(Typed(expanded0, TypeTree(innerPt)))
+ typecheck("blackbox typecheck", expanded1, outerPt)
+ } else {
+ val expanded1 = expanded0
+ val expanded2 = typecheck("whitebox typecheck #1", expanded1, outerPt)
+ typecheck("whitebox typecheck #2", expanded2, innerPt)
}
}
+ override def onDelayed(delayed: Tree) = {
+ // =========== THE SITUATION ===========
+ //
+ // If we've been delayed (i.e. bailed out of the expansion because of undetermined type params present in the expandee),
+ // then there are two possible situations we're in:
+ // 1) We're in POLYmode, when the typer tests the waters wrt type inference
+ // (e.g. as in typedArgToPoly in doTypedApply).
+ // 2) We're out of POLYmode, which means that the typer is out of tricks to infer our type
+ // (e.g. if we're an argument to a function call, then this means that no previous argument lists
+ // can determine our type variables for us).
+ //
+ // Situation #1 is okay for us, since there's no pressure. In POLYmode we're just verifying that
+ // there's nothing outrageously wrong with our undetermined type params (from what I understand!).
+ //
+ // Situation #2 requires measures to be taken. If we're in it, then noone's going to help us infer
+ // the undetermined type params. Therefore we need to do something ourselves or otherwise this
+ // expandee will forever remaing not expanded (see SI-5692). A traditional way out of this conundrum
+ // is to call `instantiate` and let the inferencer try to find the way out. It works for simple cases,
+ // but sometimes, if the inferencer lacks information, it will be forced to approximate.
+ //
+ // =========== THE PROBLEM ===========
+ //
+ // Consider the following example (thanks, Miles!):
+ //
+ // Iso represents an isomorphism between two datatypes:
+ // 1) An arbitrary one (e.g. a random case class)
+ // 2) A uniform representation for all datatypes (e.g. an HList)
+ //
+ // trait Iso[T, U] {
+ // def to(t : T) : U
+ // def from(u : U) : T
+ // }
+ // implicit def materializeIso[T, U]: Iso[T, U] = macro ???
+ //
+ // case class Foo(i: Int, s: String, b: Boolean)
+ // def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c)
+ // foo(Foo(23, "foo", true))
+ //
+ // In the snippet above, even though we know that there's a fundep going from T to U
+ // (in a sense that a datatype's uniform representation is unambiguously determined by the datatype,
+ // e.g. for Foo it will be Int :: String :: Boolean :: HNil), there's no way to convey this information
+ // to the typechecker. Therefore the typechecker will infer Nothing for L, which is hardly what we want.
+ //
+ // =========== THE SOLUTION (ENABLED ONLY FOR WHITEBOX MACROS) ===========
+ //
+ // To give materializers a chance to say their word before vanilla inference kicks in,
+ // we infer as much as possible (e.g. in the example above even though L is hopeless, C still can be inferred to Foo)
+ // and then trigger macro expansion with the undetermined type parameters still there.
+ // Thanks to that the materializer can take a look at what's going on and react accordingly.
+ val shouldInstantiate = typer.context.undetparams.nonEmpty && !mode.inPolyMode
+ if (shouldInstantiate) {
+ if (isBlackbox(expandee)) typer.instantiatePossiblyExpectingUnit(delayed, mode, outerPt)
+ else {
+ forced += delayed
+ typer.infer.inferExprInstance(delayed, typer.context.extractUndetparams(), outerPt, keepNothings = false)
+ macroExpand(typer, delayed, mode, outerPt)
+ }
+ } else delayed
+ }
+ override def onFallback(fallback: Tree) = typer.typed(fallback, mode, outerPt)
+ }
+
+ /** Expands a term macro used in apply role as `M(2)(3)` in `val x = M(2)(3)`.
+ * @see DefMacroExpander
+ */
+ def macroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = pluginsMacroExpand(typer, expandee, mode, pt)
+
+ /** Default implementation of `macroExpand`.
+ * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroExpand for more details)
+ */
+ def standardMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = {
+ val expander = new DefMacroExpander(typer, expandee, mode, pt)
+ expander(expandee)
+ }
+
+ sealed abstract class MacroStatus(val result: Tree)
+ case class Success(expanded: Tree) extends MacroStatus(expanded)
+ case class Fallback(fallback: Tree) extends MacroStatus(fallback) { currentRun.seenMacroExpansionsFallingBack = true }
+ case class Delayed(delayed: Tree) extends MacroStatus(delayed)
+ case class Skipped(skipped: Tree) extends MacroStatus(skipped)
+ case class Failure(failure: Tree) extends MacroStatus(failure)
+ def Delay(expanded: Tree) = Delayed(expanded)
+ def Skip(expanded: Tree) = Skipped(expanded)
/** Expands a macro when a runtime (i.e. the macro implementation) can be successfully loaded
* Meant for internal use within the macro infrastructure, don't use it elsewhere.
*/
- private def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroExpansionResult = {
+ def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroStatus = {
val wasDelayed = isDelayed(expandee)
val undetparams = calculateUndetparams(expandee)
val nowDelayed = !typer.context.macrosEnabled || undetparams.nonEmpty
@@ -829,15 +738,41 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
def hasNewErrors = reporter.ERROR.count > numErrors
val expanded = { pushMacroContext(args.c); runtime(args) }
if (hasNewErrors) MacroGeneratedTypeError(expandee)
+ def validateResultingTree(expanded: Tree) = {
+ macroLogVerbose("original:")
+ macroLogLite("" + expanded + "\n" + showRaw(expanded))
+ val freeSyms = expanded.freeTerms ++ expanded.freeTypes
+ freeSyms foreach (sym => MacroFreeSymbolError(expandee, sym))
+ // Macros might have spliced arguments with range positions into non-compliant
+ // locations, notably, under a tree without a range position. Or, they might
+ // splice a tree that `resetAttrs` has assigned NoPosition.
+ //
+ // Here, we just convert all positions in the tree to offset positions, and
+ // convert NoPositions to something sensible.
+ //
+ // Given that the IDE now sees the expandee (by using -Ymacro-expand:discard),
+ // this loss of position fidelity shouldn't cause any real problems.
+ //
+ // Alternatively, we could pursue a way to exclude macro expansions from position
+ // invariant checking, or find a way not to touch expansions that happen to validate.
+ //
+ // This would be useful for cases like:
+ //
+ // macro1 { macro2 { "foo" } }
+ //
+ // to allow `macro1` to see the range position of the "foo".
+ val expandedPos = enclosingMacroPosition.focus
+ def fixPosition(pos: Position) =
+ if (pos == NoPosition) expandedPos else pos.focus
+ expanded.foreach(t => t.pos = fixPosition(t.pos))
+
+ val result = atPos(enclosingMacroPosition.focus)(expanded)
+ Success(result)
+ }
expanded match {
- case expanded: Expr[_] =>
- macroLogVerbose("original:")
- macroLogLite("" + expanded.tree + "\n" + showRaw(expanded.tree))
- val freeSyms = expanded.tree.freeTerms ++ expanded.tree.freeTypes
- freeSyms foreach (sym => MacroFreeSymbolError(expandee, sym))
- Success(atPos(enclosingMacroPosition.focus)(expanded.tree updateAttachment MacroExpansionAttachment(expandee)))
- case _ =>
- MacroExpansionIsNotExprError(expandee, expanded)
+ case expanded: Expr[_] if expandee.symbol.isTermMacro => validateResultingTree(expanded.tree)
+ case expanded: Tree if expandee.symbol.isTermMacro => validateResultingTree(expanded)
+ case _ => MacroExpansionHasInvalidTypeError(expandee, expanded)
}
} catch {
case ex: Throwable =>
@@ -858,7 +793,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
/** Expands a macro when a runtime (i.e. the macro implementation) cannot be loaded
* Meant for internal use within the macro infrastructure, don't use it elsewhere.
*/
- private def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroExpansionResult = {
+ def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroStatus = {
import typer.TyperErrorGen._
val fallbackSym = expandee.symbol.nextOverriddenSymbol orElse MacroImplementationNotFoundError(expandee)
macroLogLite(s"falling back to: $fallbackSym")
@@ -886,10 +821,12 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* 2) undetparams (sym.isTypeParameter && !sym.isSkolem)
*/
var hasPendingMacroExpansions = false
- private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]
+ private val forced = perRunCaches.newWeakSet[Tree]
+ private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]()
private def isDelayed(expandee: Tree) = delayed contains expandee
private def calculateUndetparams(expandee: Tree): scala.collection.mutable.Set[Int] =
- delayed.get(expandee).getOrElse {
+ if (forced(expandee)) scala.collection.mutable.Set[Int]()
+ else delayed.getOrElse(expandee, {
val calculated = scala.collection.mutable.Set[Symbol]()
expandee foreach (sub => {
def traverse(sym: Symbol) = if (sym != null && (undetparams contains sym.id)) calculated += sym
@@ -898,8 +835,8 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
})
macroLogVerbose("calculateUndetparams: %s".format(calculated))
calculated map (_.id)
- }
- private val undetparams = perRunCaches.newSet[Int]
+ })
+ private val undetparams = perRunCaches.newSet[Int]()
def notifyUndetparamsAdded(newUndets: List[Symbol]): Unit = {
undetparams ++= newUndets map (_.id)
if (macroDebugVerbose) newUndets foreach (sym => println("undetParam added: %s".format(sym)))
@@ -928,7 +865,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
new Transformer {
override def transform(tree: Tree) = super.transform(tree match {
// todo. expansion should work from the inside out
- case tree if (delayed contains tree) && calculateUndetparams(tree).isEmpty =>
+ case tree if (delayed contains tree) && calculateUndetparams(tree).isEmpty && !tree.isErroneous =>
val context = tree.attachments.get[MacroRuntimeAttachment].get.typerContext
delayed -= tree
context.implicitsEnabled = typer.context.implicitsEnabled
@@ -946,3 +883,22 @@ object MacrosStats {
val macroExpandCount = Statistics.newCounter ("#macro expansions", "typer")
val macroExpandNanos = Statistics.newSubTimer("time spent in macroExpand", typerNanos)
}
+
+class Fingerprint private[Fingerprint](val value: Int) extends AnyVal {
+ def paramPos = { assert(isTag, this); value }
+ def isTag = value >= 0
+ override def toString = this match {
+ case Other => "Other"
+ case LiftedTyped => "Expr"
+ case LiftedUntyped => "Tree"
+ case _ => s"Tag($value)"
+ }
+}
+
+object Fingerprint {
+ def apply(value: Int) = new Fingerprint(value)
+ def Tagged(tparamPos: Int) = new Fingerprint(tparamPos)
+ val Other = new Fingerprint(-1)
+ val LiftedTyped = new Fingerprint(-2)
+ val LiftedUntyped = new Fingerprint(-3)
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index 99557d1527..ec2b7d49f5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -6,7 +6,6 @@ package scala.tools.nsc
package typechecker
import symtab.Flags._
-import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.StringOps.{ ojoin }
import scala.reflect.ClassTag
import scala.reflect.runtime.{ universe => ru }
@@ -22,74 +21,23 @@ trait MethodSynthesis {
import definitions._
import CODE._
- object synthesisUtil {
- type TT[T] = ru.TypeTag[T]
- type CT[T] = ClassTag[T]
-
- def ValOrDefDef(sym: Symbol, body: Tree) =
- if (sym.isLazy) ValDef(sym, body)
- else DefDef(sym, body)
-
- def applyTypeInternal(tags: List[TT[_]]): Type = {
- val symbols = tags map compilerSymbolFromTag
- val container :: args = symbols
- val tparams = container.typeConstructor.typeParams
-
- // Conservative at present - if manifests were more usable this could do a lot more.
- // [Eugene to Paul] all right, they are now. what do you have in mind?
- require(symbols forall (_ ne NoSymbol), "Must find all tags: " + symbols)
- require(container.owner.isPackageClass, "Container must be a top-level class in a package: " + container)
- require(tparams.size == args.size, "Arguments must match type constructor arity: " + tparams + ", " + args)
-
- appliedType(container, args map (_.tpe): _*)
- }
-
- def companionType[T](implicit ct: CT[T]) =
- rootMirror.getRequiredModule(ct.runtimeClass.getName).tpe
-
- // Use these like `applyType[List, Int]` or `applyType[Map, Int, String]`
- def applyType[CC](implicit t1: TT[CC]): Type =
- applyTypeInternal(List(t1))
-
- def applyType[CC[X1], X1](implicit t1: TT[CC[_]], t2: TT[X1]): Type =
- applyTypeInternal(List(t1, t2))
-
- def applyType[CC[X1, X2], X1, X2](implicit t1: TT[CC[_,_]], t2: TT[X1], t3: TT[X2]): Type =
- applyTypeInternal(List(t1, t2, t3))
-
- def applyType[CC[X1, X2, X3], X1, X2, X3](implicit t1: TT[CC[_,_,_]], t2: TT[X1], t3: TT[X2], t4: TT[X3]): Type =
- applyTypeInternal(List(t1, t2, t3, t4))
-
- def newMethodType[F](owner: Symbol)(implicit t: TT[F]): Type = {
- val fnSymbol = compilerSymbolFromTag(t)
- val formals = compilerTypeFromTag(t).typeArguments
- assert(fnSymbol isSubClass FunctionClass(formals.size - 1), (owner, t))
- val params = owner newSyntheticValueParams formals
- MethodType(params, formals.last)
+ /** The annotations amongst those found on the original symbol which
+ * should be propagated to this kind of accessor.
+ */
+ def deriveAnnotations(initial: List[AnnotationInfo], category: Symbol, keepClean: Boolean): List[AnnotationInfo] = {
+ def annotationFilter(ann: AnnotationInfo) = ann.metaAnnotations match {
+ case Nil if ann.defaultTargets.isEmpty => keepClean // no meta-annotations or default targets
+ case Nil => ann.defaultTargets contains category // default targets exist for ann
+ case metas => metas exists (_ matches category) // meta-annotations attached to ann
}
-
- /** The annotations amongst those found on the original symbol which
- * should be propagated to this kind of accessor.
- */
- def deriveAnnotations(initial: List[AnnotationInfo], category: Symbol, keepClean: Boolean): List[AnnotationInfo] = {
- initial filter { ann =>
- // There are no meta-annotation arguments attached to `ann`
- if (ann.metaAnnotations.isEmpty) {
- // A meta-annotation matching `annotKind` exists on `ann`'s definition.
- (ann.defaultTargets contains category) ||
- // `ann`'s definition has no meta-annotations, and `keepClean` is true.
- (ann.defaultTargets.isEmpty && keepClean)
- }
- // There are meta-annotation arguments, and one of them matches `annotKind`
- else ann.metaAnnotations exists (_ matches category)
- }
- }
- }
- import synthesisUtil._
+ initial filter annotationFilter
+ }
class ClassMethodSynthesis(val clazz: Symbol, localTyper: Typer) {
def mkThis = This(clazz) setPos clazz.pos.focus
- def mkThisSelect(sym: Symbol) = atPos(clazz.pos.focus)(Select(mkThis, sym))
+ def mkThisSelect(sym: Symbol) = atPos(clazz.pos.focus)(
+ if (clazz.isClass) Select(This(clazz), sym) else Ident(sym)
+ )
private def isOverride(name: TermName) =
clazzMember(name).alternatives exists (sym => !sym.isDeferred && (sym.owner != clazz))
@@ -99,19 +47,24 @@ trait MethodSynthesis {
overrideFlag | SYNTHETIC
}
def newMethodFlags(method: Symbol) = {
- val overrideFlag = if (isOverride(method.name)) OVERRIDE else 0L
+ val overrideFlag = if (isOverride(method.name.toTermName)) OVERRIDE else 0L
(method.flags | overrideFlag | SYNTHETIC) & ~DEFERRED
}
private def finishMethod(method: Symbol, f: Symbol => Tree): Tree =
- localTyper typed ValOrDefDef(method, f(method))
+ localTyper typed (
+ if (method.isLazy) ValDef(method, f(method))
+ else DefDef(method, f(method))
+ )
private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = {
- val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name))
+ val name1 = name.toTermName
+ val m = clazz.newMethod(name1, clazz.pos.focus, newMethodFlags(name1))
finishMethod(m setInfoAndEnter info, f)
}
private def createInternal(name: Name, f: Symbol => Tree, infoFn: Symbol => Type): Tree = {
- val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name))
+ val name1 = name.toTermName
+ val m = clazz.newMethod(name1, clazz.pos.focus, newMethodFlags(name1))
finishMethod(m setInfoAndEnter infoFn(m), f)
}
private def cloneInternal(original: Symbol, f: Symbol => Tree, name: Name): Tree = {
@@ -119,22 +72,9 @@ trait MethodSynthesis {
finishMethod(clazz.info.decls enter m, f)
}
- private def cloneInternal(original: Symbol, f: Symbol => Tree): Tree =
- cloneInternal(original, f, original.name)
-
def clazzMember(name: Name) = clazz.info nonPrivateMember name
def typeInClazz(sym: Symbol) = clazz.thisType memberType sym
- /** Function argument takes the newly created method symbol of
- * the same type as `name` in clazz, and returns the tree to be
- * added to the template.
- */
- def overrideMethod(name: Name)(f: Symbol => Tree): Tree =
- overrideMethod(clazzMember(name))(f)
-
- def overrideMethod(original: Symbol)(f: Symbol => Tree): Tree =
- cloneInternal(original, sym => f(sym setFlag OVERRIDE))
-
def deriveMethod(original: Symbol, nameFn: Name => Name)(f: Symbol => Tree): Tree =
cloneInternal(original, f, nameFn(original.name))
@@ -151,9 +91,9 @@ trait MethodSynthesis {
createMethod(original)(m => gen.mkMethodCall(newMethod, transformArgs(m.paramss.head map Ident)))
def createSwitchMethod(name: Name, range: Seq[Int], returnType: Type)(f: Int => Tree) = {
- createMethod(name, List(IntClass.tpe), returnType) { m =>
+ createMethod(name, List(IntTpe), returnType) { m =>
val arg0 = Ident(m.firstParam)
- val default = DEFAULT ==> THROW(IndexOutOfBoundsExceptionClass, arg0)
+ val default = DEFAULT ==> Throw(IndexOutOfBoundsExceptionClass.tpe_*, fn(arg0, nme.toString_))
val cases = range.map(num => CASE(LIT(num)) ==> f(num)).toList :+ default
Match(arg0, cases)
@@ -174,7 +114,7 @@ trait MethodSynthesis {
/** There are two key methods in here.
*
- * 1) Enter methods such as enterGetterSetterare called
+ * 1) Enter methods such as enterGetterSetter are called
* from Namer with a tree which may generate further trees such as accessors or
* implicit wrappers. Some setup is performed. In general this creates symbols
* and enters them into the scope of the owner.
@@ -219,14 +159,46 @@ trait MethodSynthesis {
enterBeans(tree)
}
+ /** This is called for those ValDefs which addDerivedTrees ignores, but
+ * which might have a warnable annotation situation.
+ */
+ private def warnForDroppedAnnotations(tree: Tree) {
+ val annotations = tree.symbol.initialize.annotations
+ val targetClass = defaultAnnotationTarget(tree)
+ val retained = deriveAnnotations(annotations, targetClass, keepClean = true)
+
+ annotations filterNot (retained contains _) foreach (ann => issueAnnotationWarning(tree, ann, targetClass))
+ }
+ private def issueAnnotationWarning(tree: Tree, ann: AnnotationInfo, defaultTarget: Symbol) {
+ global.reporter.warning(ann.pos,
+ s"no valid targets for annotation on ${tree.symbol} - it is discarded unused. " +
+ s"You may specify targets with meta-annotations, e.g. @($ann @${defaultTarget.name})")
+ }
+
def addDerivedTrees(typer: Typer, stat: Tree): List[Tree] = stat match {
case vd @ ValDef(mods, name, tpt, rhs) if !noFinishGetterSetter(vd) =>
// If we don't save the annotations, they seem to wander off.
val annotations = stat.symbol.initialize.annotations
- ( allValDefDerived(vd)
+ val trees = (
+ allValDefDerived(vd)
map (acc => atPos(vd.pos.focus)(acc derive annotations))
filterNot (_ eq EmptyTree)
)
+ // Verify each annotation landed safely somewhere, else warn.
+ // Filtering when isParamAccessor is a necessary simplification
+ // because there's a bunch of unwritten annotation code involving
+ // the propagation of annotations - constructor parameter annotations
+ // may need to make their way to parameters of the constructor as
+ // well as fields of the class, etc.
+ if (!mods.isParamAccessor) annotations foreach (ann =>
+ if (!trees.exists(_.symbol hasAnnotation ann.symbol))
+ issueAnnotationWarning(vd, ann, GetterTargetClass)
+ )
+
+ trees
+ case vd: ValDef =>
+ warnForDroppedAnnotations(vd)
+ vd :: Nil
case cd @ ClassDef(mods, _, _, _) if mods.isImplicit =>
val annotations = stat.symbol.initialize.annotations
// TODO: need to shuffle annotations between wrapper and class.
@@ -253,8 +225,7 @@ trait MethodSynthesis {
)
def beanAccessors(vd: ValDef): List[DerivedFromValDef] = {
val setter = if (vd.mods.isMutable) List(BeanSetter(vd)) else Nil
- if (forMSIL) Nil
- else if (vd.symbol hasAnnotation BeanPropertyAttr)
+ if (vd.symbol hasAnnotation BeanPropertyAttr)
BeanGetter(vd) :: setter
else if (vd.symbol hasAnnotation BooleanBeanPropertyAttr)
BooleanBeanGetter(vd) :: setter
@@ -276,7 +247,7 @@ trait MethodSynthesis {
* So it's important that creating an instance of Derived does not have a side effect,
* or if it has a side effect, control that it is done only once.
*/
- trait Derived {
+ sealed trait Derived {
/** The tree from which we are deriving a synthetic member. Typically, that's
* given as an argument of the instance. */
@@ -305,22 +276,21 @@ trait MethodSynthesis {
def derivedTree: Tree
}
- trait DerivedFromMemberDef extends Derived {
+ sealed trait DerivedFromMemberDef extends Derived {
def tree: MemberDef
def enclClass: Symbol
// Final methods to make the rest easier to reason about.
final def mods = tree.mods
final def basisSym = tree.symbol
- final def derivedFlags: Long = basisSym.flags & flagsMask | flagsExtra
}
- trait DerivedFromClassDef extends DerivedFromMemberDef {
+ sealed trait DerivedFromClassDef extends DerivedFromMemberDef {
def tree: ClassDef
final def enclClass = basisSym.owner.enclClass
}
- trait DerivedFromValDef extends DerivedFromMemberDef {
+ sealed trait DerivedFromValDef extends DerivedFromMemberDef {
def tree: ValDef
final def enclClass = basisSym.enclClass
@@ -359,10 +329,10 @@ trait MethodSynthesis {
logDerived(derivedTree)
}
}
- trait DerivedGetter extends DerivedFromValDef {
+ sealed trait DerivedGetter extends DerivedFromValDef {
// TODO
}
- trait DerivedSetter extends DerivedFromValDef {
+ sealed trait DerivedSetter extends DerivedFromValDef {
override def isSetter = true
private def setterParam = derivedSym.paramss match {
case (p :: Nil) :: _ => p
@@ -396,11 +366,11 @@ trait MethodSynthesis {
def name: TermName = tree.name.toTermName
}
- abstract class BaseGetter(tree: ValDef) extends DerivedGetter {
+ sealed abstract class BaseGetter(tree: ValDef) extends DerivedGetter {
def name = tree.name
def category = GetterTargetClass
def flagsMask = GetterFlags
- def flagsExtra = ACCESSOR | ( if (tree.mods.isMutable) 0 else STABLE )
+ def flagsExtra = ACCESSOR.toLong | ( if (tree.mods.isMutable) 0 else STABLE )
override def validate() {
assert(derivedSym != NoSymbol, tree)
@@ -411,12 +381,9 @@ trait MethodSynthesis {
}
}
case class Getter(tree: ValDef) extends BaseGetter(tree) {
- override def derivedSym = (
- if (mods.isDeferred) basisSym
- else basisSym.getter(enclClass)
- )
-
- override def derivedTree: DefDef = {
+ override def derivedSym = if (mods.isDeferred) basisSym else basisSym.getter(enclClass)
+ private def derivedRhs = if (mods.isDeferred) EmptyTree else fieldSelection
+ private def derivedTpt = {
// For existentials, don't specify a type for the getter, even one derived
// from the symbol! This leads to incompatible existentials for the field and
// the getter. Let the typer do all the work. You might think "why only for
@@ -425,29 +392,16 @@ trait MethodSynthesis {
// starts compiling (instead of failing like it's supposed to) because the typer
// expects to be able to identify escaping locals in typedDefDef, and fails to
// spot that brand of them. In other words it's an artifact of the implementation.
- val tpt = derivedSym.tpe.finalResultType match {
- case ExistentialType(_, _) => TypeTree()
- case _ if mods.isDeferred => TypeTree()
+ val tpt = derivedSym.tpe_*.finalResultType.widen match {
+ // Range position errors ensue if we don't duplicate this in some
+ // circumstances (at least: concrete vals with existential types.)
+ case ExistentialType(_, _) => TypeTree() setOriginal (tree.tpt.duplicate setPos tree.tpt.pos.focus)
+ case _ if mods.isDeferred => TypeTree() setOriginal tree.tpt // keep type tree of original abstract field
case tp => TypeTree(tp)
}
- tpt setPos derivedSym.pos.focus
- // keep type tree of original abstract field
- if (mods.isDeferred)
- tpt setOriginal tree.tpt
-
- // TODO - reconcile this with the DefDef creator in Trees (which
- // at this writing presented no way to pass a tree in for tpt.)
- atPos(derivedSym.pos) {
- DefDef(
- Modifiers(derivedSym.flags),
- derivedSym.name.toTermName,
- Nil,
- Nil,
- tpt,
- if (mods.isDeferred) EmptyTree else gen.mkCheckInit(fieldSelection)
- ) setSymbol derivedSym
- }
+ tpt setPos tree.tpt.pos.focus
}
+ override def derivedTree: DefDef = newDefDef(derivedSym, derivedRhs)(tpt = derivedTpt)
}
/** Implements lazy value accessors:
* - for lazy values of type Unit and all lazy fields inside traits,
@@ -458,7 +412,7 @@ trait MethodSynthesis {
case class LazyValGetter(tree: ValDef) extends BaseGetter(tree) {
class ChangeOwnerAndModuleClassTraverser(oldowner: Symbol, newowner: Symbol)
extends ChangeOwnerTraverser(oldowner, newowner) {
-
+
override def traverse(tree: Tree) {
tree match {
case _: DefTree => change(tree.symbol.moduleClass)
@@ -478,8 +432,8 @@ trait MethodSynthesis {
if (tree.symbol.owner.isTrait || hasUnitType(basisSym)) rhs1
else gen.mkAssignAndReturn(basisSym, rhs1)
)
- derivedSym.setPos(tree.pos) // cannot set it at createAndEnterSymbol because basisSym can possible stil have NoPosition
- val ddefRes = atPos(tree.pos)(DefDef(derivedSym, new ChangeOwnerAndModuleClassTraverser(basisSym, derivedSym)(body)))
+ derivedSym setPos tree.pos // cannot set it at createAndEnterSymbol because basisSym can possible stil have NoPosition
+ val ddefRes = DefDef(derivedSym, new ChangeOwnerAndModuleClassTraverser(basisSym, derivedSym)(body))
// ValDef will have its position focused whereas DefDef will have original correct rangepos
// ideally positions would be correct at the creation time but lazy vals are really a special case
// here so for the sake of keeping api clean we fix positions manually in LazyValGetter
@@ -489,7 +443,7 @@ trait MethodSynthesis {
}
}
case class Setter(tree: ValDef) extends DerivedSetter {
- def name = nme.getterToSetter(tree.name)
+ def name = tree.setterName
def category = SetterTargetClass
def flagsMask = SetterFlags
def flagsExtra = ACCESSOR
@@ -497,7 +451,7 @@ trait MethodSynthesis {
override def derivedSym = basisSym.setter(enclClass)
}
case class Field(tree: ValDef) extends DerivedFromValDef {
- def name = nme.getterToLocal(tree.name)
+ def name = tree.localName
def category = FieldTargetClass
def flagsMask = FieldFlags
def flagsExtra = PrivateLocal
@@ -528,7 +482,7 @@ trait MethodSynthesis {
def flagsExtra = 0
override def derivedSym = enclClass.info decl name
}
- trait AnyBeanGetter extends BeanAccessor with DerivedGetter {
+ sealed trait AnyBeanGetter extends BeanAccessor with DerivedGetter {
def category = BeanGetterTargetClass
override def validate() {
if (derivedSym == NoSymbol) {
@@ -558,7 +512,7 @@ trait MethodSynthesis {
// No Symbols available.
private def beanAccessorsFromNames(tree: ValDef) = {
- val ValDef(mods, name, tpt, _) = tree
+ val ValDef(mods, _, _, _) = tree
val hasBP = mods hasAnnotationNamed tpnme.BeanPropertyAnnot
val hasBoolBP = mods hasAnnotationNamed tpnme.BooleanBeanPropertyAnnot
@@ -575,9 +529,6 @@ trait MethodSynthesis {
}
protected def enterBeans(tree: ValDef) {
- if (forMSIL)
- return
-
val ValDef(mods, name, _, _) = tree
val beans = beanAccessorsFromNames(tree)
if (beans.nonEmpty) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Modes.scala b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
deleted file mode 100644
index d650762ac1..0000000000
--- a/src/compiler/scala/tools/nsc/typechecker/Modes.scala
+++ /dev/null
@@ -1,140 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package typechecker
-
-/** Mode constants.
- */
-trait Modes {
- /** NOmode, EXPRmode and PATTERNmode are mutually exclusive.
- */
- final val NOmode = 0x000
- final val EXPRmode = 0x001
- final val PATTERNmode = 0x002
-
- /** TYPEmode needs a comment. <-- XXX.
- */
- final val TYPEmode = 0x004
-
- /** SCCmode is orthogonal to above. When set we are
- * in the this or super constructor call of a constructor.
- */
- final val SCCmode = 0x008
-
- /** FUNmode is orthogonal to above.
- * When set we are looking for a method or constructor.
- */
- final val FUNmode = 0x010
-
- /** POLYmode is orthogonal to above.
- * When set expression types can be polymorphic.
- */
- final val POLYmode = 0x020
-
- /** QUALmode is orthogonal to above. When set
- * expressions may be packages and Java statics modules.
- */
- final val QUALmode = 0x040
-
- /** TAPPmode is set for the function/type constructor
- * part of a type application. When set we do not decompose PolyTypes.
- */
- final val TAPPmode = 0x080
-
- /** SUPERCONSTRmode is set for the super
- * in a superclass constructor call super.<init>.
- */
- final val SUPERCONSTRmode = 0x100
-
- /** SNDTRYmode indicates that an application is typed for the 2nd time.
- * In that case functions may no longer be coerced with implicit views.
- */
- final val SNDTRYmode = 0x200
-
- /** LHSmode is set for the left-hand side of an assignment.
- */
- final val LHSmode = 0x400
-
- /** STARmode is set when star patterns are allowed.
- * (This was formerly called REGPATmode.)
- */
- final val STARmode = 0x1000
-
- /** ALTmode is set when we are under a pattern alternative.
- */
- final val ALTmode = 0x2000
-
- /** HKmode is set when we are typing a higher-kinded type.
- * adapt should then check kind-arity based on the prototypical type's
- * kind arity. Type arguments should not be inferred.
- */
- final val HKmode = 0x4000 // @M: could also use POLYmode | TAPPmode
-
- /** BYVALmode is set when we are typing an expression
- * that occurs in a by-value position. An expression e1 is in by-value
- * position within expression e2 iff it will be reduced to a value at that
- * position during the evaluation of e2. Examples are by-value function
- * arguments or the conditional of an if-then-else clause.
- * This mode has been added to support continuations.
- */
- final val BYVALmode = 0x8000
-
- /** TYPEPATmode is set when we are typing a type in a pattern.
- */
- final val TYPEPATmode = 0x10000
-
- /** RETmode is set when we are typing a return expression.
- */
- final val RETmode = 0x20000
-
- final private val StickyModes = EXPRmode | PATTERNmode | TYPEmode | ALTmode
-
- final def onlyStickyModes(mode: Int) =
- mode & StickyModes
-
- final def forFunMode(mode: Int) =
- mode & (StickyModes | SCCmode) | FUNmode | POLYmode | BYVALmode
-
- final def forTypeMode(mode: Int) =
- if (inAnyMode(mode, PATTERNmode | TYPEPATmode)) TYPEmode | TYPEPATmode
- else TYPEmode
-
- final def inAllModes(mode: Int, required: Int) = (mode & required) == required
- final def inAnyMode(mode: Int, required: Int) = (mode & required) != 0
- final def inNoModes(mode: Int, prohibited: Int) = (mode & prohibited) == 0
- final def inHKMode(mode: Int) = (mode & HKmode) != 0
- final def inFunMode(mode: Int) = (mode & FUNmode) != 0
- final def inPolyMode(mode: Int) = (mode & POLYmode) != 0
- final def inPatternMode(mode: Int) = (mode & PATTERNmode) != 0
- final def inExprModeOr(mode: Int, others: Int) = (mode & (EXPRmode | others)) != 0
- final def inExprModeButNot(mode: Int, prohibited: Int) =
- (mode & (EXPRmode | prohibited)) == EXPRmode
-
- /** Translates a mask of mode flags into something readable.
- */
- private val modeNameMap = Map[Int, String](
- (1 << 0) -> "EXPRmode",
- (1 << 1) -> "PATTERNmode",
- (1 << 2) -> "TYPEmode",
- (1 << 3) -> "SCCmode",
- (1 << 4) -> "FUNmode",
- (1 << 5) -> "POLYmode",
- (1 << 6) -> "QUALmode",
- (1 << 7) -> "TAPPmode",
- (1 << 8) -> "SUPERCONSTRmode",
- (1 << 9) -> "SNDTRYmode",
- (1 << 10) -> "LHSmode",
- (1 << 11) -> "<DOES NOT EXIST mode>",
- (1 << 12) -> "STARmode",
- (1 << 13) -> "ALTmode",
- (1 << 14) -> "HKmode",
- (1 << 15) -> "BYVALmode",
- (1 << 16) -> "TYPEPATmode"
- )
- def modeString(mode: Int): String =
- if (mode == 0) "NOmode"
- else (modeNameMap filterKeys (bit => inAllModes(mode, bit))).values mkString " "
-}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index bb938074cb..27e8698676 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -8,9 +8,8 @@ package typechecker
import scala.collection.mutable
import scala.annotation.tailrec
-import scala.ref.WeakReference
import symtab.Flags._
-import scala.tools.nsc.io.AbstractFile
+import scala.language.postfixOps
/** This trait declares methods to create symbols and to enter them into scopes.
*
@@ -23,7 +22,7 @@ trait Namers extends MethodSynthesis {
import global._
import definitions._
- private var _lockedCount = 0
+ var _lockedCount = 0
def lockedCount = this._lockedCount
/** Replaces any Idents for which cond is true with fresh TypeTrees().
@@ -36,7 +35,8 @@ trait Namers extends MethodSynthesis {
}
def apply(tree: Tree) = {
val r = transform(tree)
- if (r.exists(_.isEmpty)) TypeTree()
+ if (r exists { case tt: TypeTree => tt.isEmpty case _ => false })
+ TypeTree()
else r
}
}
@@ -49,10 +49,10 @@ trait Namers extends MethodSynthesis {
private class NormalNamer(context: Context) extends Namer(context)
def newNamer(context: Context): Namer = new NormalNamer(context)
- def newNamerFor(context: Context, tree: Tree): Namer =
- newNamer(context.makeNewScope(tree, tree.symbol))
abstract class Namer(val context: Context) extends MethodSynth with NamerContextErrors { thisNamer =>
+ // overridden by the presentation compiler
+ def saveDefaultGetter(meth: Symbol, default: Symbol) { }
import NamerErrorGen._
val typer = newTyper(context)
@@ -107,8 +107,8 @@ trait Namers extends MethodSynthesis {
}
protected def owner = context.owner
- private def contextFile = context.unit.source.file
- private def typeErrorHandler[T](tree: Tree, alt: T): PartialFunction[Throwable, T] = {
+ def contextFile = context.unit.source.file
+ def typeErrorHandler[T](tree: Tree, alt: T): PartialFunction[Throwable, T] = {
case ex: TypeError =>
// H@ need to ensure that we handle only cyclic references
TypeSigError(tree, ex)
@@ -122,10 +122,31 @@ trait Namers extends MethodSynthesis {
|| (vd.mods.isPrivateLocal && !vd.mods.isCaseAccessor)
|| (vd.name startsWith nme.OUTER)
|| (context.unit.isJava)
+ || isEnumConstant(vd)
)
+
def noFinishGetterSetter(vd: ValDef) = (
(vd.mods.isPrivateLocal && !vd.mods.isLazy) // all lazy vals need accessors, even private[this]
- || vd.symbol.isModuleVar)
+ || vd.symbol.isModuleVar
+ || isEnumConstant(vd))
+
+ /** Determines whether this field holds an enum constant.
+ * To qualify, the following conditions must be met:
+ * - The field's class has the ENUM flag set
+ * - The field's class extends java.lang.Enum
+ * - The field has the ENUM flag set
+ * - The field is static
+ * - The field is stable
+ */
+ def isEnumConstant(vd: ValDef) = {
+ val ownerHasEnumFlag =
+ // Necessary to check because scalac puts Java's static members into the companion object
+ // while Scala's enum constants live directly in the class.
+ // We don't check for clazz.superClass == JavaEnumClass, because this causes a illegal
+ // cyclic reference error. See the commit message for details.
+ if (context.unit.isJava) owner.companionClass.hasEnumFlag else owner.hasEnumFlag
+ vd.mods.hasAllFlags(ENUM | STABLE | STATIC) && ownerHasEnumFlag
+ }
def setPrivateWithin[T <: Symbol](tree: Tree, sym: T, mods: Modifiers): T =
if (sym.isPrivateLocal || !mods.hasAccessBoundary) sym
@@ -135,7 +156,8 @@ trait Namers extends MethodSynthesis {
setPrivateWithin(tree, sym, tree.mods)
def inConstructorFlag: Long = {
- val termOwnedContexts: List[Context] = context.enclosingContextChain.takeWhile(_.owner.isTerm)
+ val termOwnedContexts: List[Context] =
+ context.enclosingContextChain.takeWhile(c => c.owner.isTerm && !c.owner.isAnonymousFunction)
val constructorNonSuffix = termOwnedContexts exists (c => c.owner.isConstructor && !c.inConstructorSuffix)
val earlyInit = termOwnedContexts exists (_.owner.isEarlyInitialized)
if (constructorNonSuffix || earlyInit) INCONSTRUCTOR else 0L
@@ -150,7 +172,7 @@ trait Namers extends MethodSynthesis {
sym reset NoType setFlag newFlags setPos pos
sym.moduleClass andAlso (updatePosFlags(_, pos, moduleClassFlags(flags)))
- if (sym.owner.isPackageClass) {
+ if (sym.isTopLevel) {
companionSymbolOf(sym, context) andAlso { companion =>
val assignNoType = companion.rawInfo match {
case _: SymLoader => true
@@ -173,21 +195,24 @@ trait Namers extends MethodSynthesis {
else innerNamer
}
+ // FIXME - this logic needs to be thoroughly explained
+ // and justified. I know it's wrong with repect to package
+ // objects, but I think it's also wrong in other ways.
protected def conflict(newS: Symbol, oldS: Symbol) = (
( !oldS.isSourceMethod
|| nme.isSetterName(newS.name)
- || newS.owner.isPackageClass
+ || newS.isTopLevel
) &&
!( // @M: allow repeated use of `_` for higher-order type params
(newS.owner.isTypeParameter || newS.owner.isAbstractType)
// FIXME: name comparisons not successful, are these underscores
// sometimes nme.WILDCARD and sometimes tpnme.WILDCARD?
- && (newS.name.toString == nme.WILDCARD.toString)
+ && (newS.name string_== nme.WILDCARD)
)
)
private def allowsOverload(sym: Symbol) = (
- sym.isSourceMethod && sym.owner.isClass && !sym.owner.isPackageClass
+ sym.isSourceMethod && sym.owner.isClass && !sym.isTopLevel
)
private def inCurrentScope(m: Symbol): Boolean = {
@@ -200,6 +225,19 @@ trait Namers extends MethodSynthesis {
/** Enter symbol into given scope and return symbol itself */
def enterInScope(sym: Symbol, scope: Scope): Symbol = {
+ // FIXME - this is broken in a number of ways.
+ //
+ // 1) If "sym" allows overloading, that is not itself sufficient to skip
+ // the check, because "prev.sym" also must allow overloading.
+ //
+ // 2) There is nothing which reconciles a package's scope with
+ // the package object's scope. This is the source of many bugs
+ // with e.g. defining a case class in a package object. When
+ // compiling against classes, the class symbol is created in the
+ // package and in the package object, and the conflict is undetected.
+ // There is also a non-deterministic outcome for situations like
+ // an object with the same name as a method in the package object.
+
// allow for overloaded methods
if (!allowsOverload(sym)) {
val prev = scope.lookupEntry(sym.name)
@@ -226,7 +264,12 @@ trait Namers extends MethodSynthesis {
validate(sym2.companionClass)
}
- def enterSym(tree: Tree): Context = {
+ def enterSym(tree: Tree): Context = pluginsEnterSym(this, tree)
+
+ /** Default implementation of `enterSym`.
+ * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsEnterSym for more details)
+ */
+ def standardEnterSym(tree: Tree): Context = {
def dispatch() = {
var returnContext = this.context
tree match {
@@ -239,7 +282,7 @@ trait Namers extends MethodSynthesis {
case DocDef(_, defn) => enterSym(defn)
case tree @ Import(_, _) =>
assignSymbol(tree)
- returnContext = context.makeNewImport(tree)
+ returnContext = context.make(tree)
case _ =>
}
returnContext
@@ -275,10 +318,13 @@ trait Namers extends MethodSynthesis {
}
private def logAssignSymbol(tree: Tree, sym: Symbol): Symbol = {
- sym.name.toTermName match {
+ if (isPastTyper) sym.name.toTermName match {
case nme.IMPORT | nme.OUTER | nme.ANON_CLASS_NAME | nme.ANON_FUN_NAME | nme.CONSTRUCTOR => ()
case _ =>
- log("[+symbol] " + sym.debugLocationString)
+ tree match {
+ case md: DefDef => log("[+symbol] " + sym.debugLocationString)
+ case _ =>
+ }
}
tree.symbol = sym
sym
@@ -289,7 +335,7 @@ trait Namers extends MethodSynthesis {
* be transferred to the symbol as they are, supply a mask containing
* the flags to keep.
*/
- private def createMemberSymbol(tree: MemberDef, name: Name, mask: Long): Symbol = {
+ def createMemberSymbol(tree: MemberDef, name: Name, mask: Long): Symbol = {
val pos = tree.pos
val isParameter = tree.mods.isParameter
val flags = tree.mods.flags & mask
@@ -300,21 +346,21 @@ trait Namers extends MethodSynthesis {
case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => owner.newConstructor(pos, flags)
case DefDef(_, _, _, _, _, _) => owner.newMethod(name.toTermName, pos, flags)
case ClassDef(_, _, _, _) => owner.newClassSymbol(name.toTypeName, pos, flags)
- case ModuleDef(_, _, _) => owner.newModule(name, pos, flags)
+ case ModuleDef(_, _, _) => owner.newModule(name.toTermName, pos, flags)
case PackageDef(pid, _) => createPackageSymbol(pos, pid)
case ValDef(_, _, _, _) =>
- if (isParameter) owner.newValueParameter(name, pos, flags)
- else owner.newValue(name, pos, flags)
+ if (isParameter) owner.newValueParameter(name.toTermName, pos, flags)
+ else owner.newValue(name.toTermName, pos, flags)
}
}
- private def createFieldSymbol(tree: ValDef): TermSymbol =
- owner.newValue(nme.getterToLocal(tree.name), tree.pos, tree.mods.flags & FieldFlags | PrivateLocal)
+ def createFieldSymbol(tree: ValDef): TermSymbol =
+ owner.newValue(tree.localName, tree.pos, tree.mods.flags & FieldFlags | PrivateLocal)
- private def createImportSymbol(tree: Tree) =
+ def createImportSymbol(tree: Tree) =
NoSymbol.newImport(tree.pos) setInfo completerOf(tree)
/** All PackageClassInfoTypes come from here. */
- private def createPackageSymbol(pos: Position, pid: RefTree): Symbol = {
+ def createPackageSymbol(pos: Position, pid: RefTree): Symbol = {
val pkgOwner = pid match {
case Ident(_) => if (owner.isEmptyPackageClass) rootMirror.RootClass else owner
case Select(qual: RefTree, _) => createPackageSymbol(pos, qual).moduleClass
@@ -335,11 +381,10 @@ trait Namers extends MethodSynthesis {
}
private def enterClassSymbol(tree: ClassDef, clazz: ClassSymbol): Symbol = {
- val file = contextFile
if (clazz.sourceFile != null && clazz.sourceFile != contextFile)
- debugwarn("!!! Source mismatch in " + clazz + ": " + clazz.sourceFile + " vs. " + contextFile)
+ devWarning(s"Source file mismatch in $clazz: ${clazz.sourceFile} vs. $contextFile")
- clazz.sourceFile = contextFile
+ clazz.associatedFile = contextFile
if (clazz.sourceFile != null) {
assert(currentRun.canRedefine(clazz) || clazz.sourceFile == currentRun.symSource(clazz), clazz.sourceFile)
currentRun.symSource(clazz) = clazz.sourceFile
@@ -353,7 +398,7 @@ trait Namers extends MethodSynthesis {
val existing = context.scope.lookup(tree.name)
val isRedefinition = (
existing.isType
- && existing.owner.isPackageClass
+ && existing.isTopLevel
&& context.scope == existing.owner.info.decls
&& currentRun.canRedefine(existing)
)
@@ -366,21 +411,19 @@ trait Namers extends MethodSynthesis {
else assignAndEnterSymbol(tree) setFlag inConstructorFlag
}
clazz match {
- case csym: ClassSymbol if csym.owner.isPackageClass => enterClassSymbol(tree, csym)
- case _ => clazz
+ case csym: ClassSymbol if csym.isTopLevel => enterClassSymbol(tree, csym)
+ case _ => clazz
}
}
/** Given a ClassDef or ModuleDef, verifies there isn't a companion which
* has been defined in a separate file.
*/
- private def validateCompanionDefs(tree: ImplDef) {
- val sym = tree.symbol
- if (sym eq NoSymbol) return
-
+ def validateCompanionDefs(tree: ImplDef) {
+ val sym = tree.symbol orElse { return }
val ctx = if (context.owner.isPackageObjectClass) context.outer else context
- val module = if (sym.isModule) sym else ctx.scope lookup tree.name.toTermName
- val clazz = if (sym.isClass) sym else ctx.scope lookup tree.name.toTypeName
+ val module = if (sym.isModule) sym else ctx.scope lookupModule tree.name
+ val clazz = if (sym.isClass) sym else ctx.scope lookupClass tree.name
val fails = (
module.isModule
&& clazz.isClass
@@ -408,11 +451,10 @@ trait Namers extends MethodSynthesis {
sym
}
- /** Enter a module symbol. The tree parameter can be either
- * a module definition or a class definition.
+ /** Enter a module symbol.
*/
def enterModuleSymbol(tree : ModuleDef): Symbol = {
- var m: Symbol = context.scope lookupAll tree.name find (_.isModule) getOrElse NoSymbol
+ var m: Symbol = context.scope lookupModule tree.name
val moduleFlags = tree.mods.flags | MODULE
if (m.isModule && !m.isPackage && inCurrentScope(m) && (currentRun.canRedefine(m) || m.isSynthetic)) {
updatePosFlags(m, tree.pos, moduleFlags)
@@ -426,8 +468,8 @@ trait Namers extends MethodSynthesis {
m.moduleClass setFlag moduleClassFlags(moduleFlags)
setPrivateWithin(tree, m.moduleClass)
}
- if (m.owner.isPackageClass && !m.isPackage) {
- m.moduleClass.sourceFile = contextFile
+ if (m.isTopLevel && !m.isPackage) {
+ m.moduleClass.associatedFile = contextFile
currentRun.symSource(m) = m.moduleClass.sourceFile
registerTopLevelSym(m)
}
@@ -450,7 +492,13 @@ trait Namers extends MethodSynthesis {
* class definition tree.
* @return the companion object symbol.
*/
- def ensureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = {
+ def ensureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol =
+ pluginsEnsureCompanionObject(this, cdef, creator)
+
+ /** Default implementation of `ensureCompanionObject`.
+ * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsEnsureCompanionObject for more details)
+ */
+ def standardEnsureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = {
val m = companionSymbolOf(cdef.symbol, context)
// @luc: not sure why "currentRun.compiles(m)" is needed, things breaks
// otherwise. documentation welcome.
@@ -489,7 +537,7 @@ trait Namers extends MethodSynthesis {
typer.permanentlyHiddenWarning(pos, to0, e.sym)
else if (context ne context.enclClass) {
val defSym = context.prefix.member(to) filter (
- sym => sym.exists && context.isAccessible(sym, context.prefix, false))
+ sym => sym.exists && context.isAccessible(sym, context.prefix, superAccess = false))
defSym andAlso (typer.permanentlyHiddenWarning(pos, to0, _))
}
@@ -509,7 +557,7 @@ trait Namers extends MethodSynthesis {
if (from != nme.WILDCARD && base != ErrorType) {
if (isValid(from)) {
// for Java code importing Scala objects
- if (!nme.isModuleName(from) || isValid(nme.stripModuleSuffix(from))) {
+ if (!nme.isModuleName(from) || isValid(from.dropModule)) {
typer.TyperErrorGen.NotAMemberError(tree, expr, from)
}
}
@@ -546,8 +594,8 @@ trait Namers extends MethodSynthesis {
val sym = copyDef.symbol
val lazyType = completerOf(copyDef)
- /** Assign the types of the class parameters to the parameters of the
- * copy method. See comment in `Unapplies.caseClassCopyMeth` */
+ /* Assign the types of the class parameters to the parameters of the
+ * copy method. See comment in `Unapplies.caseClassCopyMeth` */
def assignParamTypes() {
val clazz = sym.owner
val constructorType = clazz.primaryConstructor.tpe
@@ -587,28 +635,13 @@ trait Namers extends MethodSynthesis {
}
}
- def enterIfNotThere(sym: Symbol) {
- val scope = context.scope
- @tailrec def search(e: ScopeEntry) {
- if ((e eq null) || (e.owner ne scope))
- scope enter sym
- else if (e.sym ne sym) // otherwise, aborts since we found sym
- search(e.tail)
- }
- search(scope lookupEntry sym.name)
- }
-
def enterValDef(tree: ValDef) {
if (noEnterGetterSetter(tree))
assignAndEnterFinishedSymbol(tree)
else
enterGetterSetter(tree)
- // When java enums are read from bytecode, they are known to have
- // constant types by the jvm flag and assigned accordingly. When
- // they are read from source, the java parser marks them with the
- // STABLE flag, and now we receive that signal.
- if (tree.symbol hasAllFlags STABLE | JAVA)
+ if (isEnumConstant(tree))
tree.symbol setInfo ConstantType(Constant(tree.symbol))
}
@@ -620,7 +653,7 @@ trait Namers extends MethodSynthesis {
// via "x$lzy" as can be seen in test #3927.
val sym = (
if (owner.isClass) createFieldSymbol(tree)
- else owner.newValue(tree.name append nme.LAZY_LOCAL, tree.pos, tree.mods.flags & ~IMPLICIT)
+ else owner.newValue(tree.name append nme.LAZY_LOCAL, tree.pos, (tree.mods.flags | ARTIFACT) & ~IMPLICIT)
)
enterValSymbol(tree, sym setFlag MUTABLE setLazyAccessor lazyAccessor)
}
@@ -641,7 +674,7 @@ trait Namers extends MethodSynthesis {
case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) =>
assignAndEnterFinishedSymbol(tree)
case DefDef(mods, name, tparams, _, _, _) =>
- val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE else 0
+ val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE | ARTIFACT else 0
val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag
if (name == nme.copy && sym.isSynthetic)
@@ -651,15 +684,12 @@ trait Namers extends MethodSynthesis {
}
def enterClassDef(tree: ClassDef) {
- val ClassDef(mods, name, tparams, impl) = tree
+ val ClassDef(mods, _, _, impl) = tree
val primaryConstructorArity = treeInfo.firstConstructorArgs(impl.body).size
tree.symbol = enterClassSymbol(tree)
tree.symbol setInfo completerOf(tree)
if (mods.isCase) {
- if (primaryConstructorArity > MaxFunctionArity)
- MaxParametersCaseClassError(tree)
-
val m = ensureCompanionObject(tree, caseModuleDef)
m.moduleClass.updateAttachment(new ClassForCaseCompanionAttachment(tree))
}
@@ -672,7 +702,7 @@ trait Namers extends MethodSynthesis {
m.updateAttachment(new ConstructorDefaultsAttachment(tree, null))
}
val owner = tree.symbol.owner
- if (settings.lint.value && owner.isPackageObjectClass && !mods.isImplicit) {
+ if (settings.lint && owner.isPackageObjectClass && !mods.isImplicit) {
context.unit.warning(tree.pos,
"it is not recommended to define classes/objects inside of package objects.\n" +
"If possible, define " + tree.symbol + " in " + owner.skipPackageObject + " instead."
@@ -690,22 +720,9 @@ trait Namers extends MethodSynthesis {
validateCompanionDefs(tree)
}
- // this logic is needed in case typer was interrupted half
- // way through and then comes back to do the tree again. In
- // that case the definitions that were already attributed as
- // well as any default parameters of such methods need to be
- // re-entered in the current scope.
- protected def enterExistingSym(sym: Symbol): Context = {
- if (forInteractive && sym != null && sym.owner.isTerm) {
- enterIfNotThere(sym)
- if (sym.isLazy)
- sym.lazyAccessor andAlso enterIfNotThere
-
- for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment])
- defAtt.defaultGetters foreach enterIfNotThere
- }
- this.context
- }
+ // Hooks which are overridden in the presentation compiler
+ def enterExistingSym(sym: Symbol): Context = this.context
+ def enterIfNotThere(sym: Symbol) { }
def enterSyntheticSym(tree: Tree): Symbol = {
enterSym(tree)
@@ -715,41 +732,55 @@ trait Namers extends MethodSynthesis {
// --- Lazy Type Assignment --------------------------------------------------
- def initializeLowerBounds(tp: Type): Type = {
+ def findCyclicalLowerBound(tp: Type): Symbol = {
tp match {
case TypeBounds(lo, _) =>
// check that lower bound is not an F-bound
- for (TypeRef(_, sym, _) <- lo)
- sym.initialize
+ // but carefully: class Foo[T <: Bar[_ >: T]] should be allowed
+ for (tp1 @ TypeRef(_, sym, _) <- lo) {
+ if (settings.breakCycles) {
+ if (!sym.maybeInitialize) {
+ log(s"Cycle inspecting $lo for possible f-bounds: ${sym.fullLocationString}")
+ return sym
+ }
+ }
+ else sym.initialize
+ }
case _ =>
}
- tp
+ NoSymbol
}
def monoTypeCompleter(tree: Tree) = mkTypeCompleter(tree) { sym =>
+ // this early test is there to avoid infinite baseTypes when
+ // adding setters and getters --> bug798
+ // It is a def in an attempt to provide some insulation against
+ // uninitialized symbols misleading us. It is not a certainty
+ // this accomplishes anything, but performance is a non-consideration
+ // on these flag checks so it can't hurt.
+ def needsCycleCheck = sym.isNonClassType && !sym.isParameter && !sym.isExistential
logAndValidate(sym) {
- val tp = initializeLowerBounds(typeSig(tree))
+ val tp = typeSig(tree)
+
+ findCyclicalLowerBound(tp) andAlso { sym =>
+ if (needsCycleCheck) {
+ // neg/t1224: trait C[T] ; trait A { type T >: C[T] <: C[C[T]] }
+ // To avoid an infinite loop on the above, we cannot break all cycles
+ log(s"Reinitializing info of $sym to catch any genuine cycles")
+ sym reset sym.info
+ sym.initialize
+ }
+ }
sym setInfo {
if (sym.isJavaDefined) RestrictJavaArraysMap(tp)
else tp
}
- // this early test is there to avoid infinite baseTypes when
- // adding setters and getters --> bug798
- val needsCycleCheck = (sym.isAliasType || sym.isAbstractType) && !sym.isParameter
- if (needsCycleCheck && !typer.checkNonCyclic(tree.pos, tp))
- sym setInfo ErrorType
+ if (needsCycleCheck) {
+ log(s"Needs cycle check: ${sym.debugLocationString}")
+ if (!typer.checkNonCyclic(tree.pos, tp))
+ sym setInfo ErrorType
+ }
}
- // tree match {
- // case ClassDef(_, _, _, impl) =>
- // val parentsOK = (
- // treeInfo.isInterface(sym, impl.body)
- // || (sym eq ArrayClass)
- // || (sym isSubClass AnyValClass)
- // )
- // if (!parentsOK)
- // ensureParent(sym, AnyRefClass)
- // case _ => ()
- // }
}
def moduleClassTypeCompleter(tree: ModuleDef) = {
@@ -764,7 +795,7 @@ trait Namers extends MethodSynthesis {
def accessorTypeCompleter(tree: ValDef, isSetter: Boolean) = mkTypeCompleter(tree) { sym =>
logAndValidate(sym) {
sym setInfo {
- val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitClass.tpe)
+ val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitTpe)
else NullaryMethodType(typeSig(tree))
pluginsTypeSigAccessor(tp, typer, tree, sym)
}
@@ -807,31 +838,28 @@ trait Namers extends MethodSynthesis {
case _ =>
false
}
-
- val tpe1 = dropRepeatedParamType(tpe.deconst)
- val tpe2 = tpe1.widen
-
- // This infers Foo.type instead of "object Foo"
- // See Infer#adjustTypeArgs for the polymorphic case.
- if (tpe.typeSymbolDirect.isModuleClass) tpe1
- else if (sym.isVariable || sym.isMethod && !sym.hasAccessorFlag)
- if (tpe2 <:< pt) tpe2 else tpe1
- else if (isHidden(tpe)) tpe2
- // In an attempt to make pattern matches involving method local vals
- // compilable into switches, for a time I had a more generous condition:
- // `if (sym.isFinal || sym.isLocal) tpe else tpe1`
- // This led to issues with expressions like classOf[List[_]] which apparently
- // depend on being deconst-ed here, so this is again the original:
- else if (!sym.isFinal) tpe1
- else tpe
+ val shouldWiden = (
+ !tpe.typeSymbolDirect.isModuleClass // Infer Foo.type instead of "object Foo"
+ && (tpe.widen <:< pt) // Don't widen our way out of conforming to pt
+ && ( sym.isVariable
+ || sym.isMethod && !sym.hasAccessorFlag
+ || isHidden(tpe)
+ )
+ )
+ dropIllegalStarTypes(
+ if (shouldWiden) tpe.widen
+ else if (sym.isFinal) tpe // "final val" allowed to retain constant type
+ else tpe.deconst
+ )
}
/** Computes the type of the body in a ValDef or DefDef, and
* assigns the type to the tpt's node. Returns the type.
*/
private def assignTypeToTree(tree: ValOrDefDef, defnTyper: Typer, pt: Type): Type = {
- val rhsTpe =
- if (tree.symbol.isTermMacro) defnTyper.computeMacroDefType(tree, pt)
- else defnTyper.computeType(tree.rhs, pt)
+ val rhsTpe = tree match {
+ case ddef: DefDef if tree.symbol.isTermMacro => defnTyper.computeMacroDefType(ddef, pt)
+ case _ => defnTyper.computeType(tree.rhs, pt)
+ }
val defnTpe = widenIfNecessary(tree.symbol, rhsTpe, pt)
tree.tpt defineType defnTpe setPos tree.pos.focus
@@ -841,7 +869,7 @@ trait Namers extends MethodSynthesis {
// owner is the class with the self type
def enterSelf(self: ValDef) {
val ValDef(_, name, tpt, _) = self
- if (self eq emptyValDef)
+ if (self eq noSelfType)
return
val hasName = name != nme.WILDCARD
@@ -851,7 +879,7 @@ trait Namers extends MethodSynthesis {
val sym = (
if (hasType || hasName) {
- owner.typeOfThis = if (hasType) selfTypeCompleter(tpt) else owner.tpe
+ owner.typeOfThis = if (hasType) selfTypeCompleter(tpt) else owner.tpe_*
val selfSym = owner.thisSym setPos self.pos
if (hasName) selfSym setName name else selfSym
}
@@ -866,16 +894,11 @@ trait Namers extends MethodSynthesis {
private def templateSig(templ: Template): Type = {
val clazz = context.owner
def checkParent(tpt: Tree): Type = {
- val tp = tpt.tpe
- val inheritsSelf = tp.typeSymbol == owner
- if (inheritsSelf)
- InheritsItselfError(tpt)
-
- if (inheritsSelf || tp.isError) AnyRefClass.tpe
- else tp
+ if (tpt.tpe.isError) AnyRefTpe
+ else tpt.tpe
}
- val parents = typer.parentTypes(templ) map checkParent
+ val parents = typer.typedParentTypes(templ) map checkParent
enterSelf(templ.self)
@@ -901,11 +924,10 @@ trait Namers extends MethodSynthesis {
val modClass = companionSymbolOf(clazz, context).moduleClass
modClass.attachments.get[ClassForCaseCompanionAttachment] foreach { cma =>
val cdef = cma.caseClass
- def hasCopy(decls: Scope) = (decls lookup nme.copy) != NoSymbol
+ def hasCopy = (decls containsName nme.copy) || parents.exists(_ member nme.copy exists)
+
// SI-5956 needs (cdef.symbol == clazz): there can be multiple class symbols with the same name
- if (cdef.symbol == clazz && !hasCopy(decls) &&
- !parents.exists(p => hasCopy(p.typeSymbol.info.decls)) &&
- !parents.flatMap(_.baseClasses).distinct.exists(bc => hasCopy(bc.info.decls)))
+ if (cdef.symbol == clazz && !hasCopy)
addCopyMethod(cdef, templateNamer)
}
}
@@ -951,9 +973,9 @@ trait Namers extends MethodSynthesis {
// Assign the moduleClass info (templateSig returns a ClassInfoType)
val clazz = moduleSym.moduleClass
clazz setInfo pluginsTp
- // clazz.tpe returns a `ModuleTypeRef(clazz)`, a typeRef that links to the module class `clazz`
+ // clazz.tpe_* returns a `ModuleTypeRef(clazz)`, a typeRef that links to the module class `clazz`
// (clazz.info would the ClassInfoType, which is not what should be assigned to the module symbol)
- clazz.tpe
+ clazz.tpe_*
}
/**
@@ -997,7 +1019,7 @@ trait Namers extends MethodSynthesis {
var vparamSymss = enterValueParams(vparamss)
- /**
+ /*
* Creates a method type using tparamSyms and vparamsSymss as argument symbols and `respte` as result type.
* All typeRefs to type skolems are replaced by references to the corresponding non-skolem type parameter,
* so the resulting type is a valid external method type, it does not contain (references to) skolems.
@@ -1031,7 +1053,7 @@ trait Namers extends MethodSynthesis {
res.substSym(tparamSkolems, tparamSyms)
}
- /**
+ /*
* Creates a schematic method type which has WildcardTypes for non specified
* return or parameter types. For instance, in `def f[T](a: T, b) = ...`, the
* type schema is
@@ -1055,7 +1077,7 @@ trait Namers extends MethodSynthesis {
// def overriddenSymbol = meth.nextOverriddenSymbol
- /**
+ /*
* If `meth` doesn't have an explicit return type, extracts the return type from the method
* overridden by `meth` (if there's an unique one). This type is lateron used as the expected
* type for computing the type of the rhs. The resulting type references type skolems for
@@ -1093,6 +1115,9 @@ trait Namers extends MethodSynthesis {
overriddenTp = overriddenTp.resultType
}
+ // SI-7668 Substitute parameters from the parent method with those of the overriding method.
+ overriddenTp = overriddenTp.substSym(overridden.paramss.flatten, vparamss.flatten.map(_.symbol))
+
overriddenTp match {
case NullaryMethodType(rtpe) => overriddenTp = rtpe
case MethodType(List(), rtpe) => overriddenTp = rtpe
@@ -1111,7 +1136,7 @@ trait Namers extends MethodSynthesis {
}
if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
- tpt defineType context.enclClass.owner.tpe
+ tpt defineType context.enclClass.owner.tpe_*
tpt setPos meth.pos.focus
}
@@ -1136,7 +1161,7 @@ trait Namers extends MethodSynthesis {
}
}
- addDefaultGetters(meth, vparamss, tparams, overriddenSymbol(methResTp))
+ addDefaultGetters(meth, ddef, vparamss, tparams, overriddenSymbol(methResTp))
// fast track macros, i.e. macros defined inside the compiler, are hardcoded
// hence we make use of that and let them have whatever right-hand side they need
@@ -1147,7 +1172,7 @@ trait Namers extends MethodSynthesis {
// because @macroImpl annotation only gets assigned during typechecking
// otherwise macro defs wouldn't be able to robustly coexist with their clients
// because a client could be typechecked before a macro def that it uses
- if (meth.isTermMacro) {
+ if (meth.isMacro) {
typer.computeMacroDefType(ddef, resTpFromOverride)
}
@@ -1178,7 +1203,12 @@ trait Namers extends MethodSynthesis {
* typechecked, the corresponding param would not yet have the "defaultparam"
* flag.
*/
- private def addDefaultGetters(meth: Symbol, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) {
+ private def addDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) {
+ val DefDef(_, _, rtparams0, rvparamss0, _, _) = resetLocalAttrs(ddef.duplicate)
+ // having defs here is important to make sure that there's no sneaky tree sharing
+ // in methods with multiple default parameters
+ def rtparams = rtparams0.map(_.duplicate)
+ def rvparamss = rvparamss0.map(_.map(_.duplicate))
val methOwner = meth.owner
val isConstr = meth.isConstructor
val overridden = if (isConstr || !methOwner.isClass) NoSymbol else overriddenSymbol
@@ -1186,8 +1216,8 @@ trait Namers extends MethodSynthesis {
// value parameters of the base class (whose defaults might be overridden)
var baseParamss = (vparamss, overridden.tpe.paramss) match {
// match empty and missing parameter list
- case (Nil, List(Nil)) => Nil
- case (List(Nil), Nil) => ListOfNil
+ case (Nil, ListOfNil) => Nil
+ case (ListOfNil, Nil) => ListOfNil
case (_, paramss) => paramss
}
assert(
@@ -1210,23 +1240,36 @@ trait Namers extends MethodSynthesis {
//
vparamss.foldLeft(Nil: List[List[ValDef]]) { (previous, vparams) =>
assert(!overrides || vparams.length == baseParamss.head.length, ""+ meth.fullName + ", "+ overridden.fullName)
+ val rvparams = rvparamss(previous.length)
var baseParams = if (overrides) baseParamss.head else Nil
- for (vparam <- vparams) {
+ map2(vparams, rvparams)((vparam, rvparam) => {
val sym = vparam.symbol
// true if the corresponding parameter of the base class has a default argument
val baseHasDefault = overrides && baseParams.head.hasDefault
if (sym.hasDefault) {
- // generate a default getter for that argument
+ // Create a "default getter", i.e. a DefDef that will calculate vparam.rhs
+ // for those who are going to call meth without providing an argument corresponding to vparam.
+ // After the getter is created, a corresponding synthetic symbol is created and entered into the parent namer.
+ //
+ // In the ideal world, this DefDef would be a simple one-liner that just returns vparam.rhs,
+ // but in scalac things are complicated in two different ways.
+ //
+ // 1) Because the underlying language is quite sophisticated, we must allow for those sophistications in our getter.
+ // Namely: a) our getter has to copy type parameters from the associated method (or the associated class
+ // if meth is a constructor), because vparam.rhs might refer to one of them, b) our getter has to copy
+ // preceding value parameter lists from the associated method, because again vparam.rhs might refer to one of them.
+ //
+ // 2) Because we have already assigned symbols to type and value parameters that we have to copy, we must jump through
+ // hoops in order to destroy them and allow subsequent naming create new symbols for our getter. Previously this
+ // was done in an overly brutal way akin to resetAllAttrs, but now we utilize a resetLocalAttrs-based approach.
+ // Still far from ideal, but at least enables things like run/macro-default-params that were previously impossible.
+
val oflag = if (baseHasDefault) OVERRIDE else 0
val name = nme.defaultGetterName(meth.name, posCounter)
- // Create trees for the defaultGetter. Uses tools from Unapplies.scala
- var deftParams = tparams map copyUntyped[TypeDef]
- val defvParamss = mmap(previous) { p =>
- // in the default getter, remove the default parameter
- val p1 = atPos(p.pos.focus) { ValDef(p.mods &~ DEFAULTPARAM, p.name, p.tpt.duplicate, EmptyTree) }
- UnTyper.traverse(p1)
- p1
+ var defTparams = rtparams
+ val defVparamss = mmap(rvparamss.take(previous.length)){ rvp =>
+ copyValDef(rvp)(mods = rvp.mods &~ DEFAULTPARAM, rhs = EmptyTree)
}
val parentNamer = if (isConstr) {
@@ -1248,7 +1291,8 @@ trait Namers extends MethodSynthesis {
return // fix #3649 (prevent crash in erroneous source code)
}
}
- deftParams = cdef.tparams map copyUntypedInvariant
+ val ClassDef(_, _, rtparams, _) = resetLocalAttrs(cdef.duplicate)
+ defTparams = rtparams.map(rt => copyTypeDef(rt)(mods = rt.mods &~ (COVARIANT | CONTRAVARIANT)))
nmr
}
else ownerNamer getOrElse {
@@ -1259,47 +1303,45 @@ trait Namers extends MethodSynthesis {
nmr
}
- // If the parameter type mentions any type parameter of the method, let the compiler infer the
- // return type of the default getter => allow "def foo[T](x: T = 1)" to compile.
- // This is better than always using Wildcard for inferring the result type, for example in
- // def f(i: Int, m: Int => Int = identity _) = m(i)
- // if we use Wildcard as expected, we get "Nothing => Nothing", and the default is not usable.
- val names = deftParams map { case TypeDef(_, name, _, _) => name }
- val subst = new TypeTreeSubstituter(names contains _)
-
- val defTpt = subst(copyUntyped(vparam.tpt match {
- // default getter for by-name params
- case AppliedTypeTree(_, List(arg)) if sym.hasFlag(BYNAMEPARAM) => arg
- case t => t
- }))
- val defRhs = copyUntyped(vparam.rhs)
+ val defTpt =
+ // don't mess with tpt's of case copy default getters, because assigning something other than TypeTree()
+ // will break the carefully orchestrated naming/typing logic that involves enterCopyMethod and caseClassCopyMeth
+ if (meth.isCaseCopy) TypeTree()
+ else {
+ // If the parameter type mentions any type parameter of the method, let the compiler infer the
+ // return type of the default getter => allow "def foo[T](x: T = 1)" to compile.
+ // This is better than always using Wildcard for inferring the result type, for example in
+ // def f(i: Int, m: Int => Int = identity _) = m(i)
+ // if we use Wildcard as expected, we get "Nothing => Nothing", and the default is not usable.
+ // TODO: this is a very brittle approach; I sincerely hope that Denys's research into hygiene
+ // will open the doors to a much better way of doing this kind of stuff
+ val tparamNames = defTparams map { case TypeDef(_, name, _, _) => name }
+ val eraseAllMentionsOfTparams = new TypeTreeSubstituter(tparamNames contains _)
+ eraseAllMentionsOfTparams(rvparam.tpt match {
+ // default getter for by-name params
+ case AppliedTypeTree(_, List(arg)) if sym.hasFlag(BYNAMEPARAM) => arg
+ case t => t
+ })
+ }
+ val defRhs = rvparam.rhs
val defaultTree = atPos(vparam.pos.focus) {
- DefDef(
- Modifiers(meth.flags & DefaultGetterFlags) | SYNTHETIC | DEFAULTPARAM | oflag,
- name, deftParams, defvParamss, defTpt, defRhs)
+ DefDef(Modifiers(paramFlagsToDefaultGetter(meth.flags)) | oflag, name, defTparams, defVparamss, defTpt, defRhs)
}
if (!isConstr)
methOwner.resetFlag(INTERFACE) // there's a concrete member now
val default = parentNamer.enterSyntheticSym(defaultTree)
- if (forInteractive && default.owner.isTerm) {
- // save the default getters as attachments in the method symbol. if compiling the
- // same local block several times (which can happen in interactive mode) we might
- // otherwise not find the default symbol, because the second time it the method
- // symbol will be re-entered in the scope but the default parameter will not.
- val att = meth.attachments.get[DefaultsOfLocalMethodAttachment] match {
- case Some(att) => att.defaultGetters += default
- case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default))
- }
- }
- } else if (baseHasDefault) {
+ if (default.owner.isTerm)
+ saveDefaultGetter(meth, default)
+ }
+ else if (baseHasDefault) {
// the parameter does not have a default itself, but the
// corresponding parameter in the base class does.
sym.setFlag(DEFAULTPARAM)
}
posCounter += 1
if (overrides) baseParams = baseParams.tail
- }
+ })
if (overrides) baseParamss = baseParamss.tail
previous :+ vparams
}
@@ -1358,20 +1400,22 @@ trait Namers extends MethodSynthesis {
private def importSig(imp: Import) = {
val Import(expr, selectors) = imp
val expr1 = typer.typedQualifier(expr)
- typer checkStable expr1
+
if (expr1.symbol != null && expr1.symbol.isRootPackage)
RootImportError(imp)
if (expr1.isErrorTyped)
ErrorType
else {
+ if (!treeInfo.isStableIdentifierPattern(expr1))
+ typer.TyperErrorGen.UnstableTreeError(expr1)
+
val newImport = treeCopy.Import(imp, expr1, selectors).asInstanceOf[Import]
checkSelectors(newImport)
transformed(imp) = newImport
// copy symbol and type attributes back into old expression
// so that the structure builder will find it.
- expr.symbol = expr1.symbol
- expr.tpe = expr1.tpe
+ expr setSymbol expr1.symbol setType expr1.tpe
ImportType(expr1)
}
}
@@ -1393,7 +1437,9 @@ trait Namers extends MethodSynthesis {
if (!cdef.symbol.hasAbstractFlag)
namer.enterSyntheticSym(caseModuleApplyMeth(cdef))
- namer.enterSyntheticSym(caseModuleUnapplyMeth(cdef))
+ val primaryConstructorArity = treeInfo.firstConstructorArgs(cdef.impl.body).size
+ if (primaryConstructorArity <= MaxTupleArity)
+ namer.enterSyntheticSym(caseModuleUnapplyMeth(cdef))
}
def addCopyMethod(cdef: ClassDef, namer: Namer) {
@@ -1407,12 +1453,12 @@ trait Namers extends MethodSynthesis {
*/
def typeSig(tree: Tree): Type = {
// log("typeSig " + tree)
- /** For definitions, transform Annotation trees to AnnotationInfos, assign
- * them to the sym's annotations. Type annotations: see Typer.typedAnnotated
- * We have to parse definition annotations here (not in the typer when traversing
- * the MemberDef tree): the typer looks at annotations of certain symbols; if
- * they were added only in typer, depending on the compilation order, they may
- * or may not be visible.
+ /* For definitions, transform Annotation trees to AnnotationInfos, assign
+ * them to the sym's annotations. Type annotations: see Typer.typedAnnotated
+ * We have to parse definition annotations here (not in the typer when traversing
+ * the MemberDef tree): the typer looks at annotations of certain symbols; if
+ * they were added only in typer, depending on the compilation order, they may
+ * or may not be visible.
*/
def annotate(annotated: Symbol) = {
// typeSig might be called multiple times, e.g. on a ValDef: val, getter, setter
@@ -1425,7 +1471,7 @@ trait Namers extends MethodSynthesis {
annCtx.setReportErrors()
// need to be lazy, #1782. beforeTyper to allow inferView in annotation args, SI-5892.
AnnotationInfo lazily {
- beforeTyper(newTyper(annCtx) typedAnnotation ann)
+ enteringTyper(newTyper(annCtx) typedAnnotation ann)
}
}
if (ainfos.nonEmpty) {
@@ -1477,12 +1523,6 @@ trait Namers extends MethodSynthesis {
tpe
}
- def ensureParent(clazz: Symbol, parent: Symbol) = {
- val info0 = clazz.info
- val info1 = includeParent(info0, parent)
- if (info0 ne info1) clazz setInfo info1
- }
-
class LogTransitions[S](onEnter: S => String, onExit: S => String) {
val enabled = settings.debug.value
@inline final def apply[T](entity: S)(body: => T): T = {
@@ -1512,8 +1552,8 @@ trait Namers extends MethodSynthesis {
private object RestrictJavaArraysMap extends TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(pre, ArrayClass, List(elemtp))
- if elemtp.typeSymbol.isAbstractType && !(elemtp <:< ObjectClass.tpe) =>
- TypeRef(pre, ArrayClass, List(intersectionType(List(elemtp, ObjectClass.tpe))))
+ if elemtp.typeSymbol.isAbstractType && !(elemtp <:< ObjectTpe) =>
+ TypeRef(pre, ArrayClass, List(intersectionType(List(elemtp, ObjectTpe))))
case _ =>
mapOver(tp)
}
@@ -1535,7 +1575,7 @@ trait Namers extends MethodSynthesis {
AbstractMemberWithModiferError(sym, flag)
}
def checkNoConflict(flag1: Int, flag2: Int) {
- if (sym hasAllFlags flag1 | flag2)
+ if (sym hasAllFlags flag1.toLong | flag2)
IllegalModifierCombination(sym, flag1, flag2)
}
if (sym.isImplicit) {
@@ -1543,7 +1583,7 @@ trait Namers extends MethodSynthesis {
fail(ImplicitConstr)
if (!(sym.isTerm || (sym.isClass && !sym.isTrait)))
fail(ImplicitNotTermOrClass)
- if (sym.owner.isPackageClass)
+ if (sym.isTopLevel)
fail(ImplicitAtToplevel)
}
if (sym.isClass) {
@@ -1609,7 +1649,7 @@ trait Namers extends MethodSynthesis {
val tree: Tree
}
- def mkTypeCompleter(t: Tree)(c: Symbol => Unit) = new LockingTypeCompleter {
+ def mkTypeCompleter(t: Tree)(c: Symbol => Unit) = new LockingTypeCompleter with FlagAgnosticCompleter {
val tree = t
def completeImpl(sym: Symbol) = c(sym)
}
@@ -1651,7 +1691,7 @@ trait Namers extends MethodSynthesis {
// @M an abstract type's type parameters are entered.
// TODO: change to isTypeMember ?
if (defnSym.isAbstractType)
- newNamerFor(ctx, tree) enterSyms tparams //@M
+ newNamer(ctx.makeNewScope(tree, tree.symbol)) enterSyms tparams //@M
restp complete sym
}
}
@@ -1689,13 +1729,6 @@ trait Namers extends MethodSynthesis {
}
}
- @deprecated("Use underlyingSymbol instead", "2.10.0")
- def underlying(member: Symbol): Symbol = underlyingSymbol(member)
- @deprecated("Use `companionSymbolOf` instead", "2.10.0")
- def companionClassOf(module: Symbol, ctx: Context): Symbol = companionSymbolOf(module, ctx)
- @deprecated("Use `companionSymbolOf` instead", "2.10.0")
- def companionModuleOf(clazz: Symbol, ctx: Context): Symbol = companionSymbolOf(clazz, ctx)
-
/** The companion class or companion module of `original`.
* Calling .companionModule does not work for classes defined inside methods.
*
@@ -1705,11 +1738,23 @@ trait Namers extends MethodSynthesis {
* call this method?
*/
def companionSymbolOf(original: Symbol, ctx: Context): Symbol = {
+ val owner = original.owner
+ // SI-7264 Force the info of owners from previous compilation runs.
+ // Doing this generally would trigger cycles; that's what we also
+ // use the lower-level scan through the current Context as a fall back.
+ if (!currentRun.compiles(owner)) owner.initialize
original.companionSymbol orElse {
- ctx.lookup(original.name.companionName, original.owner).suchThat(sym =>
+ ctx.lookup(original.name.companionName, owner).suchThat(sym =>
(original.isTerm || sym.hasModuleFlag) &&
(sym isCoDefinedWith original)
)
}
}
+
+ /** A version of `Symbol#linkedClassOfClass` that works with local companions, ala `companionSymbolOf`. */
+ final def linkedClassOfClassOf(original: Symbol, ctx: Context): Symbol =
+ if (original.isModuleClass)
+ companionSymbolOf(original.sourceModule, ctx)
+ else
+ companionSymbolOf(original, ctx).moduleClass
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 70f2f41ec7..46ff98875f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -8,7 +8,6 @@ package typechecker
import symtab.Flags._
import scala.collection.mutable
-import scala.ref.WeakReference
import scala.reflect.ClassTag
/**
@@ -20,6 +19,7 @@ trait NamesDefaults { self: Analyzer =>
import global._
import definitions._
import NamesDefaultsErrorsGen._
+ import treeInfo.WildcardStarArg
// Default getters of constructors are added to the companion object in the
// typeCompleter of the constructor (methodSig). To compute the signature,
@@ -42,13 +42,11 @@ trait NamesDefaults { self: Analyzer =>
blockTyper: Typer
) { }
- val noApplyInfo = NamedApplyInfo(None, Nil, Nil, null)
-
- def nameOf(arg: Tree) = arg match {
- case AssignOrNamedArg(Ident(name), rhs) => Some(name)
- case _ => None
+ private def nameOfNamedArg(arg: Tree) = Some(arg) collect { case AssignOrNamedArg(Ident(name), _) => name }
+ def isNamedArg(arg: Tree) = arg match {
+ case AssignOrNamedArg(Ident(_), _) => true
+ case _ => false
}
- def isNamed(arg: Tree) = nameOf(arg).isDefined
/** @param pos maps indices from old to new */
def reorderArgs[T: ClassTag](args: List[T], pos: Int => Int): List[T] = {
@@ -58,13 +56,13 @@ trait NamesDefaults { self: Analyzer =>
}
/** @param pos maps indices from new to old (!) */
- def reorderArgsInv[T: ClassTag](args: List[T], pos: Int => Int): List[T] = {
+ private def reorderArgsInv[T: ClassTag](args: List[T], pos: Int => Int): List[T] = {
val argsArray = args.toArray
(argsArray.indices map (i => argsArray(pos(i)))).toList
}
/** returns `true` if every element is equal to its index */
- def isIdentity(a: Array[Int]) = (0 until a.length).forall(i => a(i) == i)
+ def allArgsArePositional(a: Array[Int]) = (0 until a.length).forall(i => a(i) == i)
/**
* Transform a function application into a Block, and assigns typer.context
@@ -107,14 +105,14 @@ trait NamesDefaults { self: Analyzer =>
* @return the transformed application (a Block) together with the NamedApplyInfo.
* if isNamedApplyBlock(tree), returns the existing context.namedApplyBlockInfo
*/
- def transformNamedApplication(typer: Typer, mode: Int, pt: Type)
+ def transformNamedApplication(typer: Typer, mode: Mode, pt: Type)
(tree: Tree, argPos: Int => Int): Tree = {
import typer._
import typer.infer._
val context = typer.context
import context.unit
- /**
+ /*
* Transform a function into a block, and passing context.namedApplyBlockInfo to
* the new block as side-effect.
*
@@ -164,14 +162,14 @@ trait NamesDefaults { self: Analyzer =>
// never used for constructor calls, they always have a stable qualifier
def blockWithQualifier(qual: Tree, selected: Name) = {
- val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos) setInfo uncheckedBounds(qual.tpe) setPos (qual.pos.makeTransparent)
+ val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), newFlags = ARTIFACT) setInfo uncheckedBounds(qual.tpe) setPos (qual.pos.makeTransparent)
blockTyper.context.scope enter sym
val vd = atPos(sym.pos)(ValDef(sym, qual) setType NoType)
// it stays in Vegas: SI-5720, SI-5727
qual changeOwner (blockTyper.context.owner -> sym)
val newQual = atPos(qual.pos.focus)(blockTyper.typedQualifier(Ident(sym.name)))
- var baseFunTransformed = atPos(baseFun.pos.makeTransparent) {
+ val baseFunTransformed = atPos(baseFun.pos.makeTransparent) {
// setSymbol below is important because the 'selected' function might be overloaded. by
// assigning the correct method symbol, typedSelect will just assign the type. the reason
// to still call 'typed' is to correctly infer singleton types, SI-5259.
@@ -207,7 +205,7 @@ trait NamesDefaults { self: Analyzer =>
if (module == NoSymbol) None
else {
val ref = atPos(pos.focus)(gen.mkAttributedRef(pre, module))
- if (module.isStable && pre.isStable) // fixes #4524. the type checker does the same for
+ if (treeInfo.admitsTypeSelection(ref)) // fixes #4524. the type checker does the same for
ref.setType(singleType(pre, module)) // typedSelect, it calls "stabilize" on the result.
Some(ref)
}
@@ -262,7 +260,7 @@ trait NamesDefaults { self: Analyzer =>
}
}
- /**
+ /*
* For each argument (arg: T), create a local value
* x$n: T = arg
*
@@ -284,8 +282,8 @@ trait NamesDefaults { self: Analyzer =>
val repeated = isScalaRepeatedParamType(paramTpe)
val argTpe = (
if (repeated) arg match {
- case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
- case _ => seqType(arg.tpe)
+ case WildcardStarArg(expr) => expr.tpe
+ case _ => seqType(arg.tpe)
}
else {
// TODO In 83c9c764b, we tried to a stable type here to fix SI-7234. But the resulting TypeTree over a
@@ -294,7 +292,7 @@ trait NamesDefaults { self: Analyzer =>
arg.tpe
}
).widen // have to widen or types inferred from literal defaults will be singletons
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo {
+ val s = context.owner.newValue(unit.freshTermName(), arg.pos, newFlags = ARTIFACT) setInfo {
val tp = if (byName) functionType(Nil, argTpe) else argTpe
uncheckedBounds(tp)
}
@@ -311,11 +309,8 @@ trait NamesDefaults { self: Analyzer =>
} else {
new ChangeOwnerTraverser(context.owner, sym) traverse arg // fixes #4502
if (repeated) arg match {
- case Typed(expr, Ident(tpnme.WILDCARD_STAR)) =>
- expr
- case _ =>
- val factory = Select(gen.mkAttributedRef(SeqModule), nme.apply)
- blockTyper.typed(Apply(factory, List(resetLocalAttrs(arg))))
+ case WildcardStarArg(expr) => expr
+ case _ => blockTyper typed gen.mkSeqApply(resetLocalAttrs(arg))
} else arg
}
Some(atPos(body.pos)(ValDef(sym, body).setType(NoType)))
@@ -334,7 +329,7 @@ trait NamesDefaults { self: Analyzer =>
assert(isNamedApplyBlock(transformedFun), transformedFun)
val NamedApplyInfo(qual, targs, vargss, blockTyper) =
context.namedApplyBlockInfo.get._2
- val existingBlock @ Block(stats, funOnly) = transformedFun
+ val Block(stats, funOnly) = transformedFun
// type the application without names; put the arguments in definition-site order
val typedApp = doTypedApply(tree, funOnly, reorderArgs(namelessArgs, argPos), mode, pt)
@@ -382,7 +377,9 @@ trait NamesDefaults { self: Analyzer =>
}
}
- def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name] = nameOf _): (List[Symbol], Boolean) = {
+ def makeNamedTypes(syms: List[Symbol]) = syms map (sym => NamedType(sym.name, sym.tpe))
+
+ def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name] = nameOfNamedArg _): (List[Symbol], Boolean) = {
val namedArgs = args.dropWhile(arg => {
val n = argName(arg)
n.isEmpty || params.forall(p => p.name != n.get)
@@ -417,7 +414,7 @@ trait NamesDefaults { self: Analyzer =>
// TODO #3649 can create spurious errors when companion object is gone (because it becomes unlinked from scope)
if (defGetter == NoSymbol) None // prevent crash in erroneous trees, #3649
else {
- var default1 = qual match {
+ var default1: Tree = qual match {
case Some(q) => gen.mkAttributedSelect(q.duplicate, defGetter)
case None => gen.mkAttributedRef(defGetter)
@@ -463,20 +460,6 @@ trait NamesDefaults { self: Analyzer =>
} else NoSymbol
}
- private def savingUndeterminedTParams[T](context: Context)(fn: List[Symbol] => T): T = {
- val savedParams = context.extractUndetparams()
- val savedReporting = context.ambiguousErrors
-
- context.setAmbiguousErrors(false)
- try fn(savedParams)
- finally {
- context.setAmbiguousErrors(savedReporting)
- //@M note that we don't get here when an ambiguity was detected (during the computation of res),
- // as errorTree throws an exception
- context.undetparams = savedParams
- }
- }
-
/** A full type check is very expensive; let's make sure there's a name
* somewhere which could potentially be ambiguous before we go that route.
*/
@@ -491,12 +474,10 @@ trait NamesDefaults { self: Analyzer =>
// def f[T](x: T) = x
// var x = 0
// f(x = 1) << "x = 1" typechecks with expected type WildcardType
- savingUndeterminedTParams(context) { udp =>
+ val udp = context.undetparams
+ context.savingUndeterminedTypeParams(reportAmbiguous = false) {
val subst = new SubstTypeMap(udp, udp map (_ => WildcardType)) {
- override def apply(tp: Type): Type = super.apply(tp match {
- case TypeRef(_, ByNameParamClass, x :: Nil) => x
- case _ => tp
- })
+ override def apply(tp: Type): Type = super.apply(dropByName(tp))
}
// This throws an exception which is caught in `tryTypedApply` (as it
// uses `silent`) - unfortunately, tryTypedApply recovers from the
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
new file mode 100644
index 0000000000..41c656f8ce
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
@@ -0,0 +1,374 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala
+package tools
+package nsc
+package typechecker
+
+import scala.collection.mutable
+import symtab.Flags
+import Mode._
+
+ /**
+ *
+ * A pattern match such as
+ *
+ * x match { case Foo(a, b) => ...}
+ *
+ * Might match an instance of any of the following definitions of Foo.
+ * Note the analogous treatment between case classes and unapplies.
+ *
+ * case class Foo(xs: Int*)
+ * case class Foo(a: Int, xs: Int*)
+ * case class Foo(a: Int, b: Int)
+ * case class Foo(a: Int, b: Int, xs: Int*)
+ *
+ * object Foo { def unapplySeq(x: Any): Option[Seq[Int]] }
+ * object Foo { def unapplySeq(x: Any): Option[(Int, Seq[Int])] }
+ * object Foo { def unapply(x: Any): Option[(Int, Int)] }
+ * object Foo { def unapplySeq(x: Any): Option[(Int, Int, Seq[Int])] }
+ */
+
+trait PatternTypers {
+ self: Analyzer =>
+
+ import global._
+ import definitions._
+
+ private object FixedAndRepeatedTypes {
+ def unapply(types: List[Type]) = types match {
+ case init :+ last if isRepeatedParamType(last) => Some((init, dropRepeated(last)))
+ case _ => Some((types, NoType))
+ }
+ }
+
+ trait PatternTyper {
+ self: Typer =>
+
+ import TyperErrorGen._
+ import infer._
+
+ private def unit = context.unit
+
+ // If the tree's symbol's type does not define an extractor, maybe the tree's type does.
+ // this is the case when we encounter an arbitrary tree as the target of an unapply call
+ // (rather than something that looks like a constructor call.) (for now, this only happens
+ // due to wrapClassTagUnapply, but when we support parameterized extractors, it will become
+ // more common place)
+ private def hasUnapplyMember(tpe: Type): Boolean = reallyExists(unapplyMember(tpe))
+ private def hasUnapplyMember(sym: Symbol): Boolean = hasUnapplyMember(sym.tpe_*)
+ private def hasUnapplyMember(fun: Tree): Boolean = hasUnapplyMember(fun.symbol) || hasUnapplyMember(fun.tpe)
+
+ // ad-hoc overloading resolution to deal with unapplies and case class constructors
+ // If some but not all alternatives survive filtering the tree's symbol with `p`,
+ // then update the tree's symbol and type to exclude the filtered out alternatives.
+ private def inPlaceAdHocOverloadingResolution(fun: Tree)(p: Symbol => Boolean): Tree = fun.symbol filter p match {
+ case sym if sym.exists && (sym ne fun.symbol) => fun setSymbol sym modifyType (tp => filterOverloadedAlts(tp)(p))
+ case _ => fun
+ }
+ private def filterOverloadedAlts(tpe: Type)(p: Symbol => Boolean): Type = tpe match {
+ case OverloadedType(pre, alts) => overloadedType(pre, alts filter p)
+ case tp => tp
+ }
+
+ def typedConstructorPattern(fun0: Tree, pt: Type): Tree = {
+ // Do some ad-hoc overloading resolution and update the tree's symbol and type
+ // do not update the symbol if the tree's symbol's type does not define an unapply member
+ // (e.g. since it's some method that returns an object with an unapply member)
+ val fun = inPlaceAdHocOverloadingResolution(fun0)(hasUnapplyMember)
+ val caseClass = fun.tpe.typeSymbol.linkedClassOfClass
+ val member = unapplyMember(fun.tpe)
+ def resultType = (fun.tpe memberType member).finalResultType
+ def isEmptyType = resultOfMatchingMethod(resultType, nme.isEmpty)()
+ def isOkay = (
+ resultType.isErroneous
+ || (resultType <:< BooleanTpe)
+ || (isEmptyType <:< BooleanTpe)
+ || member.isMacro
+ || member.isOverloaded // the whole overloading situation is over the rails
+ )
+
+ // Dueling test cases: pos/overloaded-unapply.scala, run/case-class-23.scala, pos/t5022.scala
+ // A case class with 23+ params has no unapply method.
+ // A case class constructor may be overloaded with unapply methods in the companion.
+ if (caseClass.isCase && !member.isOverloaded)
+ logResult(s"convertToCaseConstructor($fun, $caseClass, pt=$pt)")(convertToCaseConstructor(fun, caseClass, pt))
+ else if (!reallyExists(member))
+ CaseClassConstructorError(fun, s"${fun.symbol} is not a case class, nor does it have an unapply/unapplySeq member")
+ else if (isOkay)
+ fun
+ else if (isEmptyType == NoType)
+ CaseClassConstructorError(fun, s"an unapply result must have a member `def isEmpty: Boolean")
+ else
+ CaseClassConstructorError(fun, s"an unapply result must have a member `def isEmpty: Boolean (found: def isEmpty: $isEmptyType)")
+ }
+
+ def typedArgsForFormals(args: List[Tree], formals: List[Type], mode: Mode): List[Tree] = {
+ def typedArgWithFormal(arg: Tree, pt: Type) = {
+ val newMode = if (isByNameParamType(pt)) mode.onlySticky else mode.onlySticky | BYVALmode
+ typedArg(arg, mode, newMode, dropByName(pt))
+ }
+ val FixedAndRepeatedTypes(fixed, elem) = formals
+ val front = (args, fixed).zipped map typedArgWithFormal
+ def rest = context withinStarPatterns (args drop front.length map (typedArgWithFormal(_, elem)))
+
+ elem match {
+ case NoType => front
+ case _ => front ::: rest
+ }
+ }
+
+ private def boundedArrayType(bound: Type): Type = {
+ val tparam = context.owner freshExistential "" setInfo (TypeBounds upper bound)
+ newExistentialType(tparam :: Nil, arrayType(tparam.tpe_*))
+ }
+
+ protected def typedStarInPattern(tree: Tree, mode: Mode, pt: Type) = {
+ val Typed(expr, tpt) = tree
+ val exprTyped = typed(expr, mode)
+ val baseClass = exprTyped.tpe.typeSymbol match {
+ case ArrayClass => ArrayClass
+ case _ => SeqClass
+ }
+ val starType = baseClass match {
+ case ArrayClass if isPrimitiveValueType(pt) || !isFullyDefined(pt) => arrayType(pt)
+ case ArrayClass => boundedArrayType(pt)
+ case _ => seqType(pt)
+ }
+ val exprAdapted = adapt(exprTyped, mode, starType)
+ exprAdapted.tpe baseType baseClass match {
+ case TypeRef(_, _, elemtp :: Nil) => treeCopy.Typed(tree, exprAdapted, tpt setType elemtp) setType elemtp
+ case _ => setError(tree)
+ }
+ }
+
+ protected def typedInPattern(tree: Typed, mode: Mode, pt: Type) = {
+ val Typed(expr, tpt) = tree
+ val tptTyped = typedType(tpt, mode)
+ val tpe = tptTyped.tpe
+ val exprTyped = typed(expr, mode, tpe.deconst)
+ val extractor = extractorForUncheckedType(tpt.pos, tpe)
+
+ val canRemedy = tpe match {
+ case RefinedType(_, decls) if !decls.isEmpty => false
+ case RefinedType(parents, _) if parents exists isUncheckable => false
+ case _ => extractor.nonEmpty
+ }
+
+ val ownType = inferTypedPattern(tptTyped, tpe, pt, canRemedy)
+ val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped) setType ownType
+
+ extractor match {
+ case EmptyTree => treeTyped
+ case _ => wrapClassTagUnapply(treeTyped, extractor, tpe)
+ }
+ }
+ private class VariantToSkolemMap extends TypeMap(trackVariance = true) {
+ private val skolemBuffer = mutable.ListBuffer[TypeSymbol]()
+
+ // !!! FIXME - skipping this when variance.isInvariant allows unsoundness, see SI-5189
+ // Test case which presently requires the exclusion is run/gadts.scala.
+ def eligible(tparam: Symbol) = (
+ tparam.isTypeParameterOrSkolem
+ && tparam.owner.isTerm
+ && (settings.strictInference || !variance.isInvariant)
+ )
+
+ def skolems = try skolemBuffer.toList finally skolemBuffer.clear()
+ def apply(tp: Type): Type = mapOver(tp) match {
+ case tp @ TypeRef(NoPrefix, tpSym, Nil) if eligible(tpSym) =>
+ val bounds = (
+ if (variance.isInvariant) tpSym.tpeHK.bounds
+ else if (variance.isPositive) TypeBounds.upper(tpSym.tpeHK)
+ else TypeBounds.lower(tpSym.tpeHK)
+ )
+ // origin must be the type param so we can deskolemize
+ val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?" + tpSym.name), tpSym, bounds)
+ skolemBuffer += skolem
+ logResult(s"Created gadt skolem $skolem: ${skolem.tpe_*} to stand in for $tpSym")(skolem.tpe_*)
+ case tp1 => tp1
+ }
+ }
+
+ /*
+ * To deal with the type slack between actual (run-time) types and statically known types, for each abstract type T,
+ * reflect its variance as a skolem that is upper-bounded by T (covariant position), or lower-bounded by T (contravariant).
+ *
+ * Consider the following example:
+ *
+ * class AbsWrapperCov[+A]
+ * case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B]
+ *
+ * def unwrap[T](x: AbsWrapperCov[T]): Wrapped[T] = x match {
+ * case Wrapper(wrapped) => // Wrapper's type parameter must not be assumed to be equal to T, it's *upper-bounded* by it
+ * wrapped // : Wrapped[_ <: T]
+ * }
+ *
+ * this method should type check if and only if Wrapped is covariant in its type parameter
+ *
+ * when inferring Wrapper's type parameter B from x's type AbsWrapperCov[T],
+ * we must take into account that x's actual type is AbsWrapperCov[Tactual] forSome {type Tactual <: T}
+ * as AbsWrapperCov is covariant in A -- in other words, we must not assume we know T exactly, all we know is its upper bound
+ *
+ * since method application is the only way to generate this slack between run-time and compile-time types (TODO: right!?),
+ * we can simply replace skolems that represent method type parameters as seen from the method's body
+ * by other skolems that are (upper/lower)-bounded by that type-parameter skolem
+ * (depending on the variance position of the skolem in the statically assumed type of the scrutinee, pt)
+ *
+ * see test/files/../t5189*.scala
+ */
+ private def convertToCaseConstructor(tree: Tree, caseClass: Symbol, ptIn: Type): Tree = {
+ def untrustworthyPt = (
+ ptIn =:= AnyTpe
+ || ptIn =:= NothingTpe
+ || settings.strictInference && ptIn.typeSymbol != caseClass
+ )
+ val variantToSkolem = new VariantToSkolemMap
+ val caseClassType = tree.tpe.prefix memberType caseClass
+ val caseConstructorType = caseClassType memberType caseClass.primaryConstructor
+ val tree1 = TypeTree(caseConstructorType) setOriginal tree
+ val pt = if (untrustworthyPt) caseClassType else ptIn
+
+ // have to open up the existential and put the skolems in scope
+ // can't simply package up pt in an ExistentialType, because that takes us back to square one (List[_ <: T] == List[T] due to covariance)
+ val ptSafe = logResult(s"case constructor from (${tree.summaryString}, $caseClassType, $pt)")(variantToSkolem(pt))
+ val freeVars = variantToSkolem.skolems
+
+ // use "tree" for the context, not context.tree: don't make another CaseDef context,
+ // as instantiateTypeVar's bounds would end up there
+ val ctorContext = context.makeNewScope(tree, context.owner)
+ freeVars foreach ctorContext.scope.enter
+ newTyper(ctorContext).infer.inferConstructorInstance(tree1, caseClass.typeParams, ptSafe)
+
+ // simplify types without losing safety,
+ // so that we get rid of unnecessary type slack, and so that error messages don't unnecessarily refer to skolems
+ val extrapolator = new ExistentialExtrapolation(freeVars)
+ def extrapolate(tp: Type) = extrapolator extrapolate tp
+
+ // once the containing CaseDef has been type checked (see typedCase),
+ // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems)
+ tree1 modifyType {
+ case MethodType(ctorArgs, restpe) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
+ copyMethodType(tree1.tpe, ctorArgs map (_ modifyInfo extrapolate), extrapolate(restpe)) // no need to clone ctorArgs, this is OUR method type
+ case tp => tp
+ }
+ }
+
+ def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
+ def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
+ def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
+
+ if (args.length > MaxTupleArity)
+ return duplErrorTree(TooManyArgsPatternError(fun))
+
+ def freshArgType(tp: Type): Type = tp match {
+ case MethodType(param :: _, _) => param.tpe
+ case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, freshArgType(restpe))(polyType)
+ case OverloadedType(_, _) => OverloadedUnapplyError(fun) ; ErrorType
+ case _ => UnapplyWithSingleArgError(fun) ; ErrorType
+ }
+ val unapplyMethod = unapplyMember(fun.tpe)
+ val unapplyType = fun.tpe memberType unapplyMethod
+ val unapplyParamType = firstParamType(unapplyType)
+ def isSeq = unapplyMethod.name == nme.unapplySeq
+
+ def extractor = extractorForUncheckedType(fun.pos, unapplyParamType)
+ def canRemedy = unapplyParamType match {
+ case RefinedType(_, decls) if !decls.isEmpty => false
+ case RefinedType(parents, _) if parents exists isUncheckable => false
+ case _ => extractor.nonEmpty
+ }
+
+ def freshUnapplyArgType(): Type = {
+ val GenPolyType(freeVars, unappFormal) = freshArgType(unapplyType.skolemizeExistential(context.owner, tree))
+ val unapplyContext = context.makeNewScope(context.tree, context.owner)
+ freeVars foreach unapplyContext.scope.enter
+ val pattp = newTyper(unapplyContext).infer.inferTypedPattern(tree, unappFormal, pt, canRemedy)
+ // turn any unresolved type variables in freevars into existential skolems
+ val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
+ pattp.substSym(freeVars, skolems)
+ }
+
+ val unapplyArg = (
+ context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, Flags.SYNTHETIC) setInfo (
+ if (isApplicableSafe(Nil, unapplyType, pt :: Nil, WildcardType)) pt
+ else freshUnapplyArgType()
+ )
+ )
+ val unapplyArgTree = Ident(unapplyArg) updateAttachment SubpatternsAttachment(args)
+
+ // clearing the type is necessary so that ref will be stabilized; see bug 881
+ val fun1 = typedPos(fun.pos)(Apply(Select(fun.clearType(), unapplyMethod), unapplyArgTree :: Nil))
+
+ def makeTypedUnApply() = {
+ // the union of the expected type and the inferred type of the argument to unapply
+ val glbType = glb(ensureFullyDefined(pt) :: unapplyArg.tpe_* :: Nil)
+ val wrapInTypeTest = canRemedy && !(fun1.symbol.owner isNonBottomSubClass ClassTagClass)
+ val formals = patmat.alignPatterns(fun1, args).unexpandedFormals
+ val args1 = typedArgsForFormals(args, formals, mode)
+ val result = UnApply(fun1, args1) setPos tree.pos setType glbType
+
+ if (wrapInTypeTest)
+ wrapClassTagUnapply(result, extractor, glbType)
+ else
+ result
+ }
+
+ if (fun1.tpe.isErroneous)
+ duplErrTree
+ else if (unapplyMethod.isMacro && !fun1.isInstanceOf[Apply]) {
+ if (isBlackbox(unapplyMethod)) duplErrorTree(BlackboxExtractorExpansion(tree))
+ else duplErrorTree(WrongShapeExtractorExpansion(tree))
+ } else
+ makeTypedUnApply()
+ }
+
+ def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = {
+ // TODO: disable when in unchecked match
+ // we don't create a new Context for a Match, so find the CaseDef,
+ // then go out one level and navigate back to the match that has this case
+ val args = List(uncheckedPattern)
+ val app = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args))
+ // must call doTypedUnapply directly, as otherwise we get undesirable rewrites
+ // and re-typechecks of the target of the unapply call in PATTERNmode,
+ // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object,
+ // but an arbitrary tree as is the case here
+ val res = doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt)
+
+ log(sm"""
+ |wrapClassTagUnapply {
+ | pattern: $uncheckedPattern
+ | extract: $classTagExtractor
+ | pt: $pt
+ | res: $res
+ |}""".trim)
+
+ res
+ }
+
+ // if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test
+ // return the corresponding extractor (an instance of ClassTag[`pt`])
+ def extractorForUncheckedType(pos: Position, pt: Type): Tree = {
+ if (isPastTyper || (pt eq NoType)) EmptyTree else {
+ pt match {
+ case RefinedType(parents, decls) if !decls.isEmpty || (parents exists isUncheckable) => return EmptyTree
+ case _ =>
+ }
+ // only look at top-level type, can't (reliably) do anything about unchecked type args (in general)
+ // but at least make a proper type before passing it elsewhere
+ val pt1 = pt.dealiasWiden match {
+ case tr @ TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies
+ case pt1 => pt1
+ }
+ if (isCheckable(pt1)) EmptyTree
+ else resolveClassTag(pos, pt1) match {
+ case tree if unapplyMember(tree.tpe).exists => tree
+ case _ => devWarning(s"Cannot create runtime type test for $pt1") ; EmptyTree
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 081f7a8696..4ba8d56da0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -59,7 +59,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
override def changesBaseClasses = false
override def transformInfo(sym: Symbol, tp: Type): Type = {
- if (sym.isModule && !sym.isStatic) sym setFlag (lateMETHOD | STABLE)
+ // !!! This is a sketchy way to do things.
+ // It would be better to replace the module symbol with a method symbol
+ // rather than creating this module/method hybrid which must be special
+ // cased all over the place. Look for the call sites which use(d) some
+ // variation of "isMethod && !isModule", which to an observer looks like
+ // a nonsensical condition. (It is now "isModuleNotMethod".)
+ if (sym.isModule && !sym.isStatic) {
+ sym setFlag lateMETHOD | STABLE
+ // Note that this as far as we can see it works equally well
+ // to set the METHOD flag here and dump lateMETHOD, but it does
+ // mean that under separate compilation the typer will see
+ // modules as methods (albeit stable ones with singleton types.)
+ // So for now lateMETHOD lives while we try to convince ourselves
+ // we can live without it or deliver that info some other way.
+ log(s"Stabilizing module method for ${sym.fullLocationString}")
+ }
super.transformInfo(sym, tp)
}
@@ -71,7 +86,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (sym.hasAccessBoundary) "" + sym.privateWithin.name else ""
)
- def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.normalize, tp2.normalize) match {
+ def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.dealiasWiden, tp2.dealiasWiden) match {
case (MethodType(List(), rtp1), NullaryMethodType(rtp2)) =>
rtp1 <:< rtp2
case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) =>
@@ -95,28 +110,29 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
class RefCheckTransformer(unit: CompilationUnit) extends Transformer {
- var localTyper: analyzer.Typer = typer;
+ var localTyper: analyzer.Typer = typer
var currentApplication: Tree = EmptyTree
var inPattern: Boolean = false
+ @inline final def savingInPattern[A](body: => A): A = {
+ val saved = inPattern
+ try body finally inPattern = saved
+ }
+
var checkedCombinations = Set[List[Type]]()
// only one overloaded alternative is allowed to define default arguments
- private def checkOverloadedRestrictions(clazz: Symbol): Unit = {
+ private def checkOverloadedRestrictions(clazz: Symbol, defaultClass: Symbol): Unit = {
// Using the default getters (such as methodName$default$1) as a cheap way of
// finding methods with default parameters. This way, we can limit the members to
// those with the DEFAULTPARAM flag, and infer the methods. Looking for the methods
// directly requires inspecting the parameter list of every one. That modification
// shaved 95% off the time spent in this method.
- val defaultGetters = clazz.info.findMembers(0L, DEFAULTPARAM)
+ val defaultGetters = defaultClass.info.findMembers(excludedFlags = PARAM, requiredFlags = DEFAULTPARAM)
val defaultMethodNames = defaultGetters map (sym => nme.defaultGetterToMethod(sym.name))
defaultMethodNames.toList.distinct foreach { name =>
- val methods = clazz.info.findMember(name, 0L, METHOD, false).alternatives
- def hasDefaultParam(tpe: Type): Boolean = tpe match {
- case MethodType(params, restpe) => (params exists (_.hasDefault)) || hasDefaultParam(restpe)
- case _ => false
- }
- val haveDefaults = methods filter (sym => hasDefaultParam(sym.info) && !nme.isProtectedAccessorName(sym.name))
+ val methods = clazz.info.findMember(name, 0L, requiredFlags = METHOD, stableOnly = false).alternatives
+ val haveDefaults = methods filter (sym => mexists(sym.info.paramss)(_.hasDefault) && !nme.isProtectedAccessorName(sym.name))
if (haveDefaults.lengthCompare(1) > 0) {
val owners = haveDefaults map (_.owner)
@@ -133,7 +149,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
}
}
- if (settings.lint.value) {
+
+ // Check for doomed attempt to overload applyDynamic
+ if (clazz isSubClass DynamicClass) {
+ for ((_, m1 :: m2 :: _) <- (clazz.info member nme.applyDynamic).alternatives groupBy (_.typeParams.length)) {
+ unit.error(m1.pos, "implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)")
+ }
+ }
+
+ // This has become noisy with implicit classes.
+ if (settings.lint && settings.developer) {
clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym =>
// implicit classes leave both a module symbol and a method symbol as residue
val alts = clazz.info.decl(sym.name).alternatives filterNot (_.isModule)
@@ -187,7 +212,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE)
// Delaying calling memberType as long as possible
- if (inherited ne NoSymbol) {
+ if (inherited.exists) {
val jtpe = toJavaRepeatedParam(self memberType member)
// this is a bit tortuous: we look for non-private members or bridges
// if we find a bridge everything is OK. If we find another member,
@@ -241,7 +266,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case class MixinOverrideError(member: Symbol, msg: String)
- var mixinOverrideErrors = new ListBuffer[MixinOverrideError]()
+ val mixinOverrideErrors = new ListBuffer[MixinOverrideError]()
def printMixinOverrideErrors() {
mixinOverrideErrors.toList match {
@@ -273,21 +298,26 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
else "")
}
- /** Check that all conditions for overriding `other` by `member`
- * of class `clazz` are met.
+ /* Check that all conditions for overriding `other` by `member`
+ * of class `clazz` are met.
*/
- def checkOverride(member: Symbol, other: Symbol) {
+ def checkOverride(pair: SymbolPair) {
+ import pair._
+ val member = low
+ val other = high
+ def memberTp = lowType
+ def otherTp = highType
+
debuglog("Checking validity of %s overriding %s".format(member.fullLocationString, other.fullLocationString))
- def memberTp = self.memberType(member)
- def otherTp = self.memberType(other)
- def noErrorType = other.tpe != ErrorType && member.tpe != ErrorType
+ def noErrorType = !pair.isErroneous
def isRootOrNone(sym: Symbol) = sym != null && sym.isRoot || sym == NoSymbol
- def isNeitherInClass = (member.owner != clazz) && (other.owner != clazz)
+ def isNeitherInClass = member.owner != pair.base && other.owner != pair.base
+
def objectOverrideErrorMsg = (
- "overriding " + other.fullLocationString + " with " + member.fullLocationString + ":\n" +
+ "overriding " + high.fullLocationString + " with " + low.fullLocationString + ":\n" +
"an overriding object must conform to the overridden object's class bound" +
- analyzer.foundReqMsg(classBoundAsSeen(member.tpe), classBoundAsSeen(other.tpe))
+ analyzer.foundReqMsg(pair.lowClassBound, pair.highClassBound)
)
def overrideErrorMsg(msg: String): String = {
@@ -299,7 +329,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
infoStringWithLocation(other),
infoStringWithLocation(member)
)
- else if (settings.debug.value)
+ else if (settings.debug)
analyzer.foundReqMsg(member.tpe, other.tpe)
else ""
@@ -353,8 +383,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
}
- /** Is the intersection between given two lists of overridden symbols empty?
- */
+ /* Is the intersection between given two lists of overridden symbols empty? */
def intersectionIsEmpty(syms1: List[Symbol], syms2: List[Symbol]) =
!(syms1 exists (syms2 contains _))
@@ -378,12 +407,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (!isOverrideAccessOK) {
overrideAccessError()
} else if (other.isClass) {
- overrideError("cannot be used here - class definitions cannot be overridden");
+ overrideError("cannot be used here - class definitions cannot be overridden")
} else if (!other.isDeferred && member.isClass) {
- overrideError("cannot be used here - classes can only override abstract types");
+ overrideError("cannot be used here - classes can only override abstract types")
} else if (other.isEffectivelyFinal) { // (1.2)
- overrideError("cannot override final member");
- } else if (!other.isDeferredOrDefault && !member.isAnyOverride && !member.isSynthetic) { // (*)
+ overrideError("cannot override final member")
+ } else if (!other.isDeferredOrDefault && !other.hasFlag(DEFAULTMETHOD) && !member.isAnyOverride && !member.isSynthetic) { // (*)
// (*) Synthetic exclusion for (at least) default getters, fixes SI-5178. We cannot assign the OVERRIDE flag to
// the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket.
if (isNeitherInClass && !(other.owner isSubClass member.owner))
@@ -400,7 +429,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
else if (member.isAnyOverride && (other hasFlag ACCESSOR) && other.accessed.isVariable && !other.accessed.isLazy) {
// !?! this is not covered by the spec. We need to resolve this either by changing the spec or removing the test here.
// !!! is there a !?! convention? I'm !!!ing this to make sure it turns up on my searches.
- if (!settings.overrideVars.value)
+ if (!settings.overrideVars)
overrideError("cannot override a mutable variable")
}
else if (member.isAnyOverride &&
@@ -418,13 +447,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
member.isValue && !member.isLazy) {
overrideError("must be declared lazy to override a concrete lazy value")
} else if (other.isDeferred && member.isTermMacro && member.extendedOverriddenSymbols.forall(_.isDeferred)) { // (1.9)
- overrideError("cannot override an abstract method")
+ overrideError("cannot be used here - term macros cannot override abstract methods")
} else if (other.isTermMacro && !member.isTermMacro) { // (1.10)
- overrideError("cannot override a macro")
+ overrideError("cannot be used here - only term macros can override term macros")
} else {
checkOverrideTypes()
checkOverrideDeprecated()
- if (settings.warnNullaryOverride.value) {
+ if (settings.warnNullaryOverride) {
if (other.paramss.isEmpty && !member.paramss.isEmpty) {
unit.warning(member.pos, "non-nullary method overrides nullary method")
}
@@ -432,76 +461,72 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
}
- def checkOverrideTypes() {
- if (other.isAliasType) {
- //if (!member.typeParams.isEmpty) (1.5) @MAT
- // overrideError("may not be parameterized");
- //if (!other.typeParams.isEmpty) (1.5) @MAT
- // overrideError("may not override parameterized type");
- // @M: substSym
-
- if( !(sameLength(member.typeParams, other.typeParams) && (memberTp.substSym(member.typeParams, other.typeParams) =:= otherTp)) ) // (1.6)
- overrideTypeError();
+ //if (!member.typeParams.isEmpty) (1.5) @MAT
+ // overrideError("may not be parameterized");
+ //if (!other.typeParams.isEmpty) (1.5) @MAT
+ // overrideError("may not override parameterized type");
+ // @M: substSym
+ def checkOverrideAlias() {
+ if (pair.sameKind && lowType.substSym(low.typeParams, high.typeParams) =:= highType) ()
+ else overrideTypeError() // (1.6)
+ }
+ //if (!member.typeParams.isEmpty) // (1.7) @MAT
+ // overrideError("may not be parameterized");
+ def checkOverrideAbstract() {
+ if (!(highInfo.bounds containsType lowType)) { // (1.7.1)
+ overrideTypeError(); // todo: do an explaintypes with bounds here
+ explainTypes(_.bounds containsType _, highInfo, lowType)
}
- else if (other.isAbstractType) {
- //if (!member.typeParams.isEmpty) // (1.7) @MAT
- // overrideError("may not be parameterized");
- val otherTp = self.memberInfo(other)
-
- if (!(otherTp.bounds containsType memberTp)) { // (1.7.1)
- overrideTypeError(); // todo: do an explaintypes with bounds here
- explainTypes(_.bounds containsType _, otherTp, memberTp)
- }
-
- // check overriding (abstract type --> abstract type or abstract type --> concrete type member (a type alias))
- // making an abstract type member concrete is like passing a type argument
- val kindErrors = typer.infer.checkKindBounds(List(other), List(memberTp), self, member.owner) // (1.7.2)
-
- if(!kindErrors.isEmpty)
+ // check overriding (abstract type --> abstract type or abstract type --> concrete type member (a type alias))
+ // making an abstract type member concrete is like passing a type argument
+ typer.infer.checkKindBounds(high :: Nil, lowType :: Nil, rootType, low.owner) match { // (1.7.2)
+ case Nil =>
+ case kindErrors =>
unit.error(member.pos,
"The kind of "+member.keyString+" "+member.varianceString + member.nameString+
" does not conform to the expected kind of " + other.defString + other.locationString + "." +
kindErrors.toList.mkString("\n", ", ", ""))
-
- // check a type alias's RHS corresponds to its declaration
- // this overlaps somewhat with validateVariance
- if(member.isAliasType) {
- // println("checkKindBounds" + ((List(member), List(memberTp.normalize), self, member.owner)))
- val kindErrors = typer.infer.checkKindBounds(List(member), List(memberTp.normalize), self, member.owner)
-
- if(!kindErrors.isEmpty)
+ }
+ // check a type alias's RHS corresponds to its declaration
+ // this overlaps somewhat with validateVariance
+ if (low.isAliasType) {
+ typer.infer.checkKindBounds(low :: Nil, lowType.normalize :: Nil, rootType, low.owner) match {
+ case Nil =>
+ case kindErrors =>
unit.error(member.pos,
- "The kind of the right-hand side "+memberTp.normalize+" of "+member.keyString+" "+
- member.varianceString + member.nameString+ " does not conform to its expected kind."+
+ "The kind of the right-hand side "+lowType.normalize+" of "+low.keyString+" "+
+ low.varianceString + low.nameString+ " does not conform to its expected kind."+
kindErrors.toList.mkString("\n", ", ", ""))
- } else if (member.isAbstractType) {
- if (memberTp.isVolatile && !otherTp.bounds.hi.isVolatile)
- overrideError("is a volatile type; cannot override a type with non-volatile upper bound")
- }
- } else if (other.isTerm) {
- other.cookJavaRawInfo() // #2454
- val memberTp = self.memberType(member)
- val otherTp = self.memberType(other)
- if (!overridesTypeInPrefix(memberTp, otherTp, self)) { // 8
- overrideTypeError()
- explainTypes(memberTp, otherTp)
}
-
- if (member.isStable && !otherTp.isVolatile) {
- if (memberTp.isVolatile)
- overrideError("has a volatile type; cannot override a member with non-volatile type")
- else memberTp.normalize.resultType match {
- case rt: RefinedType if !(rt =:= otherTp) && !(checkedCombinations contains rt.parents) =>
- // might mask some inconsistencies -- check overrides
- checkedCombinations += rt.parents
- val tsym = rt.typeSymbol;
- if (tsym.pos == NoPosition) tsym setPos member.pos
- checkAllOverrides(tsym, typesOnly = true)
- case _ =>
- }
+ }
+ else if (low.isAbstractType && lowType.isVolatile && !highInfo.bounds.hi.isVolatile)
+ overrideError("is a volatile type; cannot override a type with non-volatile upper bound")
+ }
+ def checkOverrideTerm() {
+ other.cookJavaRawInfo() // #2454
+ if (!overridesTypeInPrefix(lowType, highType, rootType)) { // 8
+ overrideTypeError()
+ explainTypes(lowType, highType)
+ }
+ if (low.isStable && !highType.isVolatile) {
+ if (lowType.isVolatile)
+ overrideError("has a volatile type; cannot override a member with non-volatile type")
+ else lowType.normalize.resultType match {
+ case rt: RefinedType if !(rt =:= highType) && !(checkedCombinations contains rt.parents) =>
+ // might mask some inconsistencies -- check overrides
+ checkedCombinations += rt.parents
+ val tsym = rt.typeSymbol
+ if (tsym.pos == NoPosition) tsym setPos member.pos
+ checkAllOverrides(tsym, typesOnly = true)
+ case _ =>
}
}
}
+ def checkOverrideTypes() {
+ if (high.isAliasType) checkOverrideAlias()
+ else if (high.isAbstractType) checkOverrideAbstract()
+ else if (high.isTerm) checkOverrideTerm()
+ }
def checkOverrideDeprecated() {
if (other.hasDeprecatedOverridingAnnotation) {
@@ -514,10 +539,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val opc = new overridingPairs.Cursor(clazz)
while (opc.hasNext) {
- //Console.println(opc.overriding/* + ":" + opc.overriding.tpe*/ + " in "+opc.overriding.fullName + " overrides " + opc.overridden/* + ":" + opc.overridden.tpe*/ + " in "+opc.overridden.fullName + "/"+ opc.overridden.hasFlag(DEFERRED));//debug
- if (!opc.overridden.isClass) checkOverride(opc.overriding, opc.overridden);
+ if (!opc.high.isClass)
+ checkOverride(opc.currentPair)
- opc.next
+ opc.next()
}
printMixinOverrideErrors()
@@ -549,15 +574,15 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def uncurryAndErase(tp: Type) = erasure.erasure(sym)(uncurry.transformInfo(sym, tp))
val tp1 = uncurryAndErase(clazz.thisType.memberType(sym))
val tp2 = uncurryAndErase(clazz.thisType.memberType(other))
- afterErasure(tp1 matches tp2)
+ exitingErasure(tp1 matches tp2)
})
def ignoreDeferred(member: Symbol) = (
(member.isAbstractType && !member.isFBounded) || (
- member.isJavaDefined &&
- // the test requires afterErasure so shouldn't be
+ // the test requires exitingErasure so shouldn't be
// done if the compiler has no erasure phase available
- (currentRun.erasurePhase == NoPhase || javaErasedOverridingSym(member) != NoSymbol)
+ member.isJavaDefined
+ && (currentRun.erasurePhase == NoPhase || javaErasedOverridingSym(member) != NoSymbol)
)
)
@@ -578,8 +603,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def stubImplementations: List[String] = {
// Grouping missing methods by the declaring class
val regrouped = missingMethods.groupBy(_.owner).toList
- def membersStrings(members: List[Symbol]) =
- members.sortBy("" + _.name) map (m => m.defStringSeenAs(clazz.tpe memberType m) + " = ???")
+ def membersStrings(members: List[Symbol]) = {
+ members foreach fullyInitializeSymbol
+ members.sortBy(_.name) map (m => m.defStringSeenAs(clazz.tpe_* memberType m) + " = ???")
+ }
if (regrouped.tail.isEmpty)
membersStrings(regrouped.head._2)
@@ -718,16 +745,19 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
else if (clazz.isTrait && !(clazz isSubClass AnyValClass)) {
// For non-AnyVal classes, prevent abstract methods in interfaces that override
// final members in Object; see #4431
- for (decl <- clazz.info.decls.iterator) {
- val overridden = decl.overriddenSymbol(ObjectClass)
+ for (decl <- clazz.info.decls) {
+ // Have to use matchingSymbol, not a method involving overridden symbols,
+ // because the scala type system understands that an abstract method here does not
+ // override a concrete method in Object. The jvm, however, does not.
+ val overridden = decl.matchingSymbol(ObjectClass, ObjectTpe)
if (overridden.isFinal)
unit.error(decl.pos, "trait cannot redefine final method from class AnyRef")
}
}
- /** Returns whether there is a symbol declared in class `inclazz`
- * (which must be different from `clazz`) whose name and type
- * seen as a member of `class.thisType` matches `member`'s.
+ /* Returns whether there is a symbol declared in class `inclazz`
+ * (which must be different from `clazz`) whose name and type
+ * seen as a member of `class.thisType` matches `member`'s.
*/
def hasMatchingSym(inclazz: Symbol, member: Symbol): Boolean = {
val isVarargs = hasRepeatedParam(member.tpe)
@@ -739,22 +769,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
matches(member.tpe) || (isVarargs && matches(varargsType))
}
- /** The rules for accessing members which have an access boundary are more
- * restrictive in java than scala. Since java has no concept of package nesting,
- * a member with "default" (package-level) access can only be accessed by members
- * in the exact same package. Example:
+ /* The rules for accessing members which have an access boundary are more
+ * restrictive in java than scala. Since java has no concept of package nesting,
+ * a member with "default" (package-level) access can only be accessed by members
+ * in the exact same package. Example:
*
- * package a.b;
- * public class JavaClass { void foo() { } }
+ * package a.b;
+ * public class JavaClass { void foo() { } }
*
- * The member foo() can be accessed only from members of package a.b, and not
- * nested packages like a.b.c. In the analogous scala class:
+ * The member foo() can be accessed only from members of package a.b, and not
+ * nested packages like a.b.c. In the analogous scala class:
*
- * package a.b
- * class ScalaClass { private[b] def foo() = () }
+ * package a.b
+ * class ScalaClass { private[b] def foo() = () }
*
- * The member IS accessible to classes in package a.b.c. The javaAccessCheck logic
- * is restricting the set of matching signatures according to the above semantics.
+ * The member IS accessible to classes in package a.b.c. The javaAccessCheck logic
+ * is restricting the set of matching signatures according to the above semantics.
*/
def javaAccessCheck(sym: Symbol) = (
!inclazz.isJavaDefined // not a java defined member
@@ -774,7 +804,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG
val nonMatching: List[Symbol] = clazz.info.member(member.name).alternatives.filterNot(_.owner == clazz).filterNot(_.isFinal)
- def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix);
+ def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix)
nonMatching match {
case Nil =>
issueError("")
@@ -801,7 +831,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
for (i <- 0 until seenTypes.length)
seenTypes(i) = Nil
- /** validate all base types of a class in reverse linear order. */
+ /* validate all base types of a class in reverse linear order. */
def register(tp: Type): Unit = {
// if (clazz.fullName.endsWith("Collection.Projection"))
// println("validate base type "+tp)
@@ -829,7 +859,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case tp1 :: tp2 :: _ =>
unit.error(clazz.pos, "illegal inheritance;\n " + clazz +
" inherits different type instances of " + baseClass +
- ":\n" + tp1 + " and " + tp2);
+ ":\n" + tp1 + " and " + tp2)
explainTypes(tp1, tp2)
explainTypes(tp2, tp1)
}
@@ -838,161 +868,14 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// Variance Checking --------------------------------------------------------
- private val ContraVariance = -1
- private val NoVariance = 0
- private val CoVariance = 1
- private val AnyVariance = 2
-
- private val escapedPrivateLocals = new mutable.HashSet[Symbol]
-
- val varianceValidator = new Traverser {
-
- /** Validate variance of info of symbol `base` */
- private def validateVariance(base: Symbol) {
- // A flag for when we're in a refinement, meaning method parameter types
- // need to be checked.
- var inRefinement = false
-
- def varianceString(variance: Int): String =
- if (variance == 1) "covariant"
- else if (variance == -1) "contravariant"
- else "invariant";
-
- /** The variance of a symbol occurrence of `tvar`
- * seen at the level of the definition of `base`.
- * The search proceeds from `base` to the owner of `tvar`.
- * Initially the state is covariant, but it might change along the search.
- */
- def relativeVariance(tvar: Symbol): Int = {
- val clazz = tvar.owner
- var sym = base
- var state = CoVariance
- while (sym != clazz && state != AnyVariance) {
- //Console.println("flip: " + sym + " " + sym.isParameter());//DEBUG
- // Flip occurrences of type parameters and parameters, unless
- // - it's a constructor, or case class factory or extractor
- // - it's a type parameter of tvar's owner.
- if (sym.isParameter && !sym.owner.isConstructor && !sym.owner.isCaseApplyOrUnapply &&
- !(tvar.isTypeParameterOrSkolem && sym.isTypeParameterOrSkolem &&
- tvar.owner == sym.owner)) state = -state;
- else if (!sym.owner.isClass ||
- sym.isTerm && ((sym.isPrivateLocal || sym.isProtectedLocal || sym.isSuperAccessor /* super accessors are implicitly local #4345*/) && !(escapedPrivateLocals contains sym))) {
- // return AnyVariance if `sym` is local to a term
- // or is private[this] or protected[this]
- state = AnyVariance
- } else if (sym.isAliasType) {
- // return AnyVariance if `sym` is an alias type
- // that does not override anything. This is OK, because we always
- // expand aliases for variance checking.
- // However, if `sym` does override a type in a base class
- // we have to assume NoVariance, as there might then be
- // references to the type parameter that are not variance checked.
- state = if (sym.isOverridingSymbol) NoVariance else AnyVariance
- }
- sym = sym.owner
- }
- state
- }
-
- /** Validate that the type `tp` is variance-correct, assuming
- * the type occurs itself at variance position given by `variance`
- */
- def validateVariance(tp: Type, variance: Int): Unit = tp match {
- case ErrorType =>
- case WildcardType =>
- case BoundedWildcardType(bounds) =>
- validateVariance(bounds, variance)
- case NoType =>
- case NoPrefix =>
- case ThisType(_) =>
- case ConstantType(_) =>
- // case DeBruijnIndex(_, _) =>
- case SingleType(pre, sym) =>
- validateVariance(pre, variance)
- case TypeRef(pre, sym, args) =>
-// println("validate "+sym+" at "+relativeVariance(sym))
- if (sym.isAliasType/* && relativeVariance(sym) == AnyVariance*/)
- validateVariance(tp.normalize, variance)
- else if (sym.variance != NoVariance) {
- val v = relativeVariance(sym)
- if (v != AnyVariance && sym.variance != v * variance) {
- //Console.println("relativeVariance(" + base + "," + sym + ") = " + v);//DEBUG
- def tpString(tp: Type) = tp match {
- case ClassInfoType(parents, _, clazz) => "supertype "+intersectionType(parents, clazz.owner)
- case _ => "type "+tp
- }
- unit.error(base.pos,
- varianceString(sym.variance) + " " + sym +
- " occurs in " + varianceString(v * variance) +
- " position in " + tpString(base.info) + " of " + base);
- }
- }
- validateVariance(pre, variance)
- // @M for higher-kinded typeref, args.isEmpty
- // However, these args respect variances by construction anyway
- // -- the interesting case is in type application, see checkKindBounds in Infer
- if (args.nonEmpty)
- validateVarianceArgs(args, variance, sym.typeParams)
- case ClassInfoType(parents, decls, symbol) =>
- validateVariances(parents, variance)
- case RefinedType(parents, decls) =>
- validateVariances(parents, variance)
- val saved = inRefinement
- inRefinement = true
- for (sym <- decls)
- validateVariance(sym.info, if (sym.isAliasType) NoVariance else variance)
- inRefinement = saved
- case TypeBounds(lo, hi) =>
- validateVariance(lo, -variance)
- validateVariance(hi, variance)
- case mt @ MethodType(formals, result) =>
- if (inRefinement)
- validateVariances(mt.paramTypes, -variance)
- validateVariance(result, variance)
- case NullaryMethodType(result) =>
- validateVariance(result, variance)
- case PolyType(tparams, result) =>
- // type parameters will be validated separately, because they are defined explicitly.
- validateVariance(result, variance)
- case ExistentialType(tparams, result) =>
- validateVariances(tparams map (_.info), variance)
- validateVariance(result, variance)
- case AnnotatedType(annots, tp, selfsym) =>
- if (!annots.exists(_ matches uncheckedVarianceClass))
- validateVariance(tp, variance)
- }
-
- def validateVariances(tps: List[Type], variance: Int) {
- tps foreach (tp => validateVariance(tp, variance))
- }
-
- def validateVarianceArgs(tps: List[Type], variance: Int, tparams: List[Symbol]) {
- foreach2(tps, tparams)((tp, tparam) => validateVariance(tp, variance * tparam.variance))
- }
-
- validateVariance(base.info, CoVariance)
+ object varianceValidator extends VarianceValidator {
+ private def tpString(tp: Type) = tp match {
+ case ClassInfoType(parents, _, clazz) => "supertype "+intersectionType(parents, clazz.owner)
+ case _ => "type "+tp
}
-
- override def traverse(tree: Tree) {
- tree match {
- case ClassDef(_, _, _, _) | TypeDef(_, _, _, _) =>
- validateVariance(tree.symbol)
- super.traverse(tree)
- // ModuleDefs need not be considered because they have been eliminated already
- case ValDef(_, _, _, _) =>
- if (!tree.symbol.hasLocalFlag)
- validateVariance(tree.symbol)
- case DefDef(_, _, tparams, vparamss, _, _) =>
- // No variance check for object-private/protected methods/values.
- if (!tree.symbol.hasLocalFlag) {
- validateVariance(tree.symbol)
- traverseTrees(tparams)
- traverseTreess(vparamss)
- }
- case Template(_, _, _) =>
- super.traverse(tree)
- case _ =>
- }
+ override def issueVarianceError(base: Symbol, sym: Symbol, required: Variance) {
+ currentRun.currentUnit.error(base.pos,
+ s"${sym.variance} $sym occurs in $required position in ${tpString(base.info)} of $base")
}
}
@@ -1041,7 +924,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val e = currentLevel.scope.lookupEntry(sym.name)
if ((e ne null) && sym == e.sym) {
var l = currentLevel
- while (l.scope != e.owner) l = l.outer;
+ while (l.scope != e.owner) l = l.outer
val symindex = symIndex(sym)
if (l.maxindex < symindex) {
l.refpos = pos
@@ -1057,8 +940,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def apply(tp: Type) = mapOver(tp).normalize
}
- def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.lint.value) (fn, args) match {
- case (tap@TypeApply(fun, targs), List(view: ApplyImplicitView)) if fun.symbol == Option_apply =>
+ def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.lint) (fn, args) match {
+ case (tap@TypeApply(fun, targs), List(view: ApplyImplicitView)) if fun.symbol == currentRun.runDefinitions.Option_apply =>
unit.warning(pos, s"Suspicious application of an implicit view (${view.fun}) in the argument to Option.apply.") // SI-6567
case _ =>
}
@@ -1067,164 +950,166 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case Object_eq | Object_ne | Object_== | Object_!= | Any_== | Any_!= => true
case _ => false
}
- def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match {
- case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 && isObjectOrAnyComparisonMethod(fn.symbol) =>
- def isReferenceOp = fn.symbol == Object_eq || fn.symbol == Object_ne
- def isNew(tree: Tree) = tree match {
- case Function(_, _)
- | Apply(Select(New(_), nme.CONSTRUCTOR), _) => true
- case _ => false
- }
- def underlyingClass(tp: Type): Symbol = {
- val sym = tp.widen.typeSymbol
- if (sym.isAbstractType) underlyingClass(sym.info.bounds.hi)
- else sym
- }
- val actual = underlyingClass(args.head.tpe)
- val receiver = underlyingClass(qual.tpe)
- def onTrees[T](f: List[Tree] => T) = f(List(qual, args.head))
- def onSyms[T](f: List[Symbol] => T) = f(List(receiver, actual))
-
- // @MAT normalize for consistency in error message, otherwise only part is normalized due to use of `typeSymbol`
- def typesString = normalizeAll(qual.tpe.widen)+" and "+normalizeAll(args.head.tpe.widen)
-
- /** Symbols which limit the warnings we can issue since they may be value types */
- val isMaybeValue = Set[Symbol](AnyClass, AnyRefClass, AnyValClass, ObjectClass, ComparableClass, JavaSerializableClass)
-
- // Whether def equals(other: Any) has known behavior: it is the default
- // inherited from java.lang.Object, or it is a synthetically generated
- // case equals. TODO - more cases are warnable if the target is a synthetic
- // equals.
- def isUsingWarnableEquals = {
- val m = receiver.info.member(nme.equals_)
- ((m == Object_equals) || (m == Any_equals) || isMethodCaseEquals(m))
- }
- def isMethodCaseEquals(m: Symbol) = m.isSynthetic && m.owner.isCase
- def isCaseEquals = isMethodCaseEquals(receiver.info.member(nme.equals_))
- // Whether this == or != is one of those defined in Any/AnyRef or an overload from elsewhere.
- def isUsingDefaultScalaOp = {
- val s = fn.symbol
- (s == Object_==) || (s == Object_!=) || (s == Any_==) || (s == Any_!=)
- }
- def haveSubclassRelationship = (actual isSubClass receiver) || (receiver isSubClass actual)
-
- // Whether the operands+operator represent a warnable combo (assuming anyrefs)
- // Looking for comparisons performed with ==/!= in combination with either an
- // equals method inherited from Object or a case class synthetic equals (for
- // which we know the logic.)
- def isWarnable = isReferenceOp || (isUsingDefaultScalaOp && isUsingWarnableEquals)
- def isEitherNullable = (NullClass.tpe <:< receiver.info) || (NullClass.tpe <:< actual.info)
- def isEitherValueClass = actual.isDerivedValueClass || receiver.isDerivedValueClass
- def isBoolean(s: Symbol) = unboxedValueClass(s) == BooleanClass
- def isUnit(s: Symbol) = unboxedValueClass(s) == UnitClass
- def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || isAnyNumber(s)
- def isScalaNumber(s: Symbol) = s isSubClass ScalaNumberClass
- // test is behind a platform guard
- def isJavaNumber(s: Symbol) = !forMSIL && (s isSubClass JavaNumberClass)
- // includes java.lang.Number if appropriate [SI-5779]
- def isAnyNumber(s: Symbol) = isScalaNumber(s) || isJavaNumber(s)
- def isMaybeAnyValue(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || isMaybeValue(s)
- // used to short-circuit unrelatedTypes check if both sides are special
- def isSpecial(s: Symbol) = isMaybeAnyValue(s) || isAnyNumber(s)
- // unused
- def possibleNumericCount = onSyms(_ filter (x => isNumeric(x) || isMaybeValue(x)) size)
- val nullCount = onSyms(_ filter (_ == NullClass) size)
- def isNonsenseValueClassCompare = (
- !haveSubclassRelationship
- && isUsingDefaultScalaOp
- && isEitherValueClass
- && !isCaseEquals
- )
+ /** Check the sensibility of using the given `equals` to compare `qual` and `other`. */
+ private def checkSensibleEquals(pos: Position, qual: Tree, name: Name, sym: Symbol, other: Tree) = {
+ def isReferenceOp = sym == Object_eq || sym == Object_ne
+ def isNew(tree: Tree) = tree match {
+ case Function(_, _) | Apply(Select(New(_), nme.CONSTRUCTOR), _) => true
+ case _ => false
+ }
+ def underlyingClass(tp: Type): Symbol = {
+ val sym = tp.widen.typeSymbol
+ if (sym.isAbstractType) underlyingClass(sym.info.bounds.hi)
+ else sym
+ }
+ val actual = underlyingClass(other.tpe)
+ val receiver = underlyingClass(qual.tpe)
+ def onTrees[T](f: List[Tree] => T) = f(List(qual, other))
+ def onSyms[T](f: List[Symbol] => T) = f(List(receiver, actual))
+
+ // @MAT normalize for consistency in error message, otherwise only part is normalized due to use of `typeSymbol`
+ def typesString = normalizeAll(qual.tpe.widen)+" and "+normalizeAll(other.tpe.widen)
+
+ /* Symbols which limit the warnings we can issue since they may be value types */
+ val isMaybeValue = Set[Symbol](AnyClass, AnyRefClass, AnyValClass, ObjectClass, ComparableClass, JavaSerializableClass)
+
+ // Whether def equals(other: Any) has known behavior: it is the default
+ // inherited from java.lang.Object, or it is a synthetically generated
+ // case equals. TODO - more cases are warnable if the target is a synthetic
+ // equals.
+ def isUsingWarnableEquals = {
+ val m = receiver.info.member(nme.equals_)
+ ((m == Object_equals) || (m == Any_equals) || isMethodCaseEquals(m))
+ }
+ def isMethodCaseEquals(m: Symbol) = m.isSynthetic && m.owner.isCase
+ def isCaseEquals = isMethodCaseEquals(receiver.info.member(nme.equals_))
+ // Whether this == or != is one of those defined in Any/AnyRef or an overload from elsewhere.
+ def isUsingDefaultScalaOp = sym == Object_== || sym == Object_!= || sym == Any_== || sym == Any_!=
+ def haveSubclassRelationship = (actual isSubClass receiver) || (receiver isSubClass actual)
+
+ // Whether the operands+operator represent a warnable combo (assuming anyrefs)
+ // Looking for comparisons performed with ==/!= in combination with either an
+ // equals method inherited from Object or a case class synthetic equals (for
+ // which we know the logic.)
+ def isWarnable = isReferenceOp || (isUsingDefaultScalaOp && isUsingWarnableEquals)
+ def isEitherNullable = (NullTpe <:< receiver.info) || (NullTpe <:< actual.info)
+ def isEitherValueClass = actual.isDerivedValueClass || receiver.isDerivedValueClass
+ def isBoolean(s: Symbol) = unboxedValueClass(s) == BooleanClass
+ def isUnit(s: Symbol) = unboxedValueClass(s) == UnitClass
+ def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || isAnyNumber(s)
+ def isScalaNumber(s: Symbol) = s isSubClass ScalaNumberClass
+ def isJavaNumber(s: Symbol) = s isSubClass JavaNumberClass
+ // includes java.lang.Number if appropriate [SI-5779]
+ def isAnyNumber(s: Symbol) = isScalaNumber(s) || isJavaNumber(s)
+ def isMaybeAnyValue(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || isMaybeValue(s)
+ // used to short-circuit unrelatedTypes check if both sides are special
+ def isSpecial(s: Symbol) = isMaybeAnyValue(s) || isAnyNumber(s)
+ val nullCount = onSyms(_ filter (_ == NullClass) size)
+ def isNonsenseValueClassCompare = (
+ !haveSubclassRelationship
+ && isUsingDefaultScalaOp
+ && isEitherValueClass
+ && !isCaseEquals
+ )
- def nonSensibleWarning(what: String, alwaysEqual: Boolean) = {
- val msg = alwaysEqual == (name == nme.EQ || name == nme.eq)
- unit.warning(pos, "comparing "+what+" using `"+name.decode+"' will always yield " + msg)
- }
- def nonSensible(pre: String, alwaysEqual: Boolean) =
- nonSensibleWarning(pre+"values of types "+typesString, alwaysEqual)
- def nonSensiblyEq() = nonSensible("", true)
- def nonSensiblyNeq() = nonSensible("", false)
- def nonSensiblyNew() = nonSensibleWarning("a fresh object", false)
-
- def unrelatedMsg = name match {
- case nme.EQ | nme.eq => "never compare equal"
- case _ => "always compare unequal"
- }
- def unrelatedTypes() = {
- val weaselWord = if (isEitherValueClass) "" else " most likely"
- unit.warning(pos, s"$typesString are unrelated: they will$weaselWord $unrelatedMsg")
- }
+ // Have we already determined that the comparison is non-sensible? I mean, non-sensical?
+ var isNonSensible = false
+
+ def nonSensibleWarning(what: String, alwaysEqual: Boolean) = {
+ val msg = alwaysEqual == (name == nme.EQ || name == nme.eq)
+ unit.warning(pos, s"comparing $what using `${name.decode}' will always yield $msg")
+ isNonSensible = true
+ }
+ def nonSensible(pre: String, alwaysEqual: Boolean) =
+ nonSensibleWarning(s"${pre}values of types $typesString", alwaysEqual)
+ def nonSensiblyEq() = nonSensible("", alwaysEqual = true)
+ def nonSensiblyNeq() = nonSensible("", alwaysEqual = false)
+ def nonSensiblyNew() = nonSensibleWarning("a fresh object", alwaysEqual = false)
+
+ def unrelatedMsg = name match {
+ case nme.EQ | nme.eq => "never compare equal"
+ case _ => "always compare unequal"
+ }
+ def unrelatedTypes() = if (!isNonSensible) {
+ val weaselWord = if (isEitherValueClass) "" else " most likely"
+ unit.warning(pos, s"$typesString are unrelated: they will$weaselWord $unrelatedMsg")
+ }
- if (nullCount == 2) // null == null
+ if (nullCount == 2) // null == null
+ nonSensiblyEq()
+ else if (nullCount == 1) {
+ if (onSyms(_ exists isPrimitiveValueClass)) // null == 5
+ nonSensiblyNeq()
+ else if (onTrees( _ exists isNew)) // null == new AnyRef
+ nonSensiblyNew()
+ }
+ else if (isBoolean(receiver)) {
+ if (!isBoolean(actual) && !isMaybeValue(actual)) // true == 5
+ nonSensiblyNeq()
+ }
+ else if (isUnit(receiver)) {
+ if (isUnit(actual)) // () == ()
nonSensiblyEq()
- else if (nullCount == 1) {
- if (onSyms(_ exists isPrimitiveValueClass)) // null == 5
- nonSensiblyNeq()
- else if (onTrees( _ exists isNew)) // null == new AnyRef
- nonSensiblyNew()
- }
- else if (isBoolean(receiver)) {
- if (!isBoolean(actual) && !isMaybeValue(actual)) // true == 5
+ else if (!isUnit(actual) && !isMaybeValue(actual)) // () == "abc"
+ nonSensiblyNeq()
+ }
+ else if (isNumeric(receiver)) {
+ if (!isNumeric(actual))
+ if (isUnit(actual) || isBoolean(actual) || !isMaybeValue(actual)) // 5 == "abc"
nonSensiblyNeq()
- }
- else if (isUnit(receiver)) {
- if (isUnit(actual)) // () == ()
- nonSensiblyEq()
- else if (!isUnit(actual) && !isMaybeValue(actual)) // () == "abc"
+ }
+ else if (isWarnable && !isCaseEquals) {
+ if (isNew(qual)) // new X == y
+ nonSensiblyNew()
+ else if (isNew(other) && (receiver.isEffectivelyFinal || isReferenceOp)) // object X ; X == new Y
+ nonSensiblyNew()
+ else if (receiver.isEffectivelyFinal && !(receiver isSubClass actual) && !actual.isRefinementClass) { // object X, Y; X == Y
+ if (isEitherNullable)
+ nonSensible("non-null ", false)
+ else
nonSensiblyNeq()
}
- else if (isNumeric(receiver)) {
- if (!isNumeric(actual) && !forMSIL)
- if (isUnit(actual) || isBoolean(actual) || !isMaybeValue(actual)) // 5 == "abc"
- nonSensiblyNeq()
+ }
+
+ // warn if one but not the other is a derived value class
+ // this is especially important to enable transitioning from
+ // regular to value classes without silent failures.
+ if (isNonsenseValueClassCompare)
+ unrelatedTypes()
+ // possibleNumericCount is insufficient or this will warn on e.g. Boolean == j.l.Boolean
+ else if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
+ // better to have lubbed and lost
+ def warnIfLubless(): Unit = {
+ val common = global.lub(List(actual.tpe, receiver.tpe))
+ if (ObjectTpe <:< common)
+ unrelatedTypes()
}
- else if (isWarnable && !isCaseEquals) {
- if (isNew(qual)) // new X == y
- nonSensiblyNew()
- else if (isNew(args.head) && (receiver.isEffectivelyFinal || isReferenceOp)) // object X ; X == new Y
- nonSensiblyNew()
- else if (receiver.isEffectivelyFinal && !(receiver isSubClass actual) && !actual.isRefinementClass) { // object X, Y; X == Y
- if (isEitherNullable)
- nonSensible("non-null ", false)
- else
- nonSensiblyNeq()
+ // warn if actual has a case parent that is not same as receiver's;
+ // if actual is not a case, then warn if no common supertype, as below
+ if (isCaseEquals) {
+ def thisCase = receiver.info.member(nme.equals_).owner
+ actual.info.baseClasses.find(_.isCase) match {
+ case Some(p) if p != thisCase => nonSensible("case class ", false)
+ case None =>
+ // stronger message on (Some(1) == None)
+ //if (receiver.isCase && receiver.isEffectivelyFinal && !(receiver isSubClass actual)) nonSensiblyNeq()
+ //else
+ // if a class, it must be super to thisCase (and receiver) since not <: thisCase
+ if (!actual.isTrait && !(receiver isSubClass actual)) nonSensiblyNeq()
+ else if (!haveSubclassRelationship) warnIfLubless()
+ case _ =>
}
}
-
- // warn if one but not the other is a derived value class
- // this is especially important to enable transitioning from
- // regular to value classes without silent failures.
- if (isNonsenseValueClassCompare)
- unrelatedTypes()
- // possibleNumericCount is insufficient or this will warn on e.g. Boolean == j.l.Boolean
- else if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
- // better to have lubbed and lost
- def warnIfLubless(): Unit = {
- val common = global.lub(List(actual.tpe, receiver.tpe))
- if (ObjectClass.tpe <:< common)
- unrelatedTypes()
- }
- // warn if actual has a case parent that is not same as receiver's;
- // if actual is not a case, then warn if no common supertype, as below
- if (isCaseEquals) {
- def thisCase = receiver.info.member(nme.equals_).owner
- actual.info.baseClasses.find(_.isCase) match {
- case Some(p) if p != thisCase => nonSensible("case class ", false)
- case None =>
- // stronger message on (Some(1) == None)
- //if (receiver.isCase && receiver.isEffectivelyFinal && !(receiver isSubClass actual)) nonSensiblyNeq()
- //else
- // if a class, it must be super to thisCase (and receiver) since not <: thisCase
- if (!actual.isTrait && !(receiver isSubClass actual)) nonSensiblyNeq()
- else if (!haveSubclassRelationship) warnIfLubless()
- case _ =>
- }
- }
- // warn only if they have no common supertype below Object
- else if (!haveSubclassRelationship) {
- warnIfLubless()
- }
+ // warn only if they have no common supertype below Object
+ else if (!haveSubclassRelationship) {
+ warnIfLubless()
}
+ }
+ }
+ /** Sensibility check examines flavors of equals. */
+ def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match {
+ case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 && isObjectOrAnyComparisonMethod(fn.symbol) =>
+ checkSensibleEquals(pos, qual, name, fn.symbol, args.head)
case _ =>
}
@@ -1249,8 +1134,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
/* Convert a reference to a case factory of type `tpe` to a new of the class it produces. */
def toConstructor(pos: Position, tpe: Type): Tree = {
- var rtpe = tpe.finalResultType
- assert(rtpe.typeSymbol hasFlag CASE, tpe);
+ val rtpe = tpe.finalResultType
+ assert(rtpe.typeSymbol hasFlag CASE, tpe)
localTyper.typedOperator {
atPos(pos) {
Select(New(TypeTree(rtpe)), rtpe.typeSymbol.primaryConstructor)
@@ -1268,57 +1153,61 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
finally popLevel()
}
- /** Eliminate ModuleDefs.
- * - A top level object is replaced with their module class.
- * - An inner object is transformed into a module var, created on first access.
+ /** Eliminate ModuleDefs. In all cases the ModuleDef (carrying a module symbol) is
+ * replaced with a ClassDef (carrying the corresponding module class symbol) with additional
+ * trees created as follows:
*
- * In both cases, this transformation returns the list of replacement trees:
- * - Top level: the module class accessor definition
- * - Inner: a class definition, declaration of module var, and module var accessor
+ * 1) A statically reachable object (either top-level or nested only in objects) receives
+ * no additional trees.
+ * 2) An inner object which matches an existing member (e.g. implements an interface)
+ * receives an accessor DefDef to implement the interface.
+ * 3) An inner object otherwise receives a private ValDef which declares a module var
+ * (the field which holds the module class - it has a name like Foo$module) and an
+ * accessor for that field. The instance is created lazily, on first access.
*/
- private def eliminateModuleDefs(tree: Tree): List[Tree] = {
- val ModuleDef(mods, name, impl) = tree
- val sym = tree.symbol
- val classSym = sym.moduleClass
- val cdef = ClassDef(mods | MODULE, name.toTypeName, Nil, impl) setSymbol classSym setType NoType
-
- def findOrCreateModuleVar() = localTyper.typedPos(tree.pos) {
- // See SI-5012, SI-6712.
+ private def eliminateModuleDefs(moduleDef: Tree): List[Tree] = exitingRefchecks {
+ val ModuleDef(_, _, impl) = moduleDef
+ val module = moduleDef.symbol
+ val site = module.owner
+ val moduleName = module.name.toTermName
+ // The typer doesn't take kindly to seeing this ClassDef; we have to
+ // set NoType so it will be ignored.
+ val cdef = ClassDef(module.moduleClass, impl) setType NoType
+
+ // Create the module var unless the immediate owner is a class and
+ // the module var already exists there. See SI-5012, SI-6712.
+ def findOrCreateModuleVar() = {
val vsym = (
- if (sym.owner.isTerm) NoSymbol
- else sym.enclClass.info.decl(nme.moduleVarName(sym.name.toTermName))
+ if (site.isTerm) NoSymbol
+ else site.info decl nme.moduleVarName(moduleName)
)
- // In case we are dealing with local symbol then we already have
- // to correct error with forward reference
- if (vsym == NoSymbol) gen.mkModuleVarDef(sym)
- else ValDef(vsym)
+ vsym orElse (site newModuleVarSymbol module)
}
- def createStaticModuleAccessor() = afterRefchecks {
- val method = (
- sym.owner.newMethod(sym.name.toTermName, sym.pos, (sym.flags | STABLE) & ~MODULE)
- setInfoAndEnter NullaryMethodType(sym.moduleClass.tpe)
- )
- localTyper.typedPos(tree.pos)(gen.mkModuleAccessDef(method, sym))
+ def newInnerObject() = {
+ // Create the module var unless it is already in the module owner's scope.
+ // The lookup is on module.enclClass and not module.owner lest there be a
+ // nullary method between us and the class; see SI-5012.
+ val moduleVar = findOrCreateModuleVar()
+ val rhs = gen.newModule(module, moduleVar.tpe)
+ val body = if (site.isTrait) rhs else gen.mkAssignAndReturn(moduleVar, rhs)
+ val accessor = DefDef(module, body.changeOwner(moduleVar -> module))
+
+ ValDef(moduleVar) :: accessor :: Nil
}
- def createInnerModuleAccessor(vdef: Tree) = List(
- vdef,
- localTyper.typedPos(tree.pos) {
- val vsym = vdef.symbol
- afterRefchecks {
- val rhs = gen.newModule(sym, vsym.tpe)
- val body = if (sym.owner.isTrait) rhs else gen.mkAssignAndReturn(vsym, rhs)
- DefDef(sym, body.changeOwner(vsym -> sym))
- }
- }
- )
- transformTrees(cdef :: {
- if (!sym.isStatic)
- createInnerModuleAccessor(findOrCreateModuleVar)
- else if (sym.isOverridingSymbol)
- List(createStaticModuleAccessor())
+ def matchingInnerObject() = {
+ val newFlags = (module.flags | STABLE) & ~MODULE
+ val newInfo = NullaryMethodType(module.moduleClass.tpe)
+ val accessor = site.newMethod(moduleName, module.pos, newFlags) setInfoAndEnter newInfo
+
+ DefDef(accessor, Select(This(site), module)) :: Nil
+ }
+ val newTrees = cdef :: (
+ if (module.isStatic)
+ if (module.isOverridingSymbol) matchingInnerObject() else Nil
else
- Nil
- })
+ newInnerObject()
+ )
+ transformTrees(newTrees map localTyper.typedPos(moduleDef.pos))
}
def transformStat(tree: Tree, index: Int): List[Tree] = tree match {
@@ -1332,7 +1221,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
case ModuleDef(_, _, _) => eliminateModuleDefs(tree)
case ValDef(_, _, _, _) =>
- val tree1 @ ValDef(_, _, _, rhs) = transform(tree) // important to do before forward reference check
+ val tree1 = transform(tree) // important to do before forward reference check
if (tree1.symbol.isLazy) tree1 :: Nil
else {
val lazySym = tree.symbol.lazyAccessorOrSelf
@@ -1353,7 +1242,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
catch {
case ex: TypeError =>
unit.error(tree0.pos, ex.getMessage())
- if (settings.explaintypes.value) {
+ if (settings.explaintypes) {
val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, argtps).bounds)
(argtps, bounds).zipped map ((targ, bound) => explainTypes(bound.lo, targ))
(argtps, bounds).zipped map ((targ, bound) => explainTypes(targ, bound.hi))
@@ -1376,22 +1265,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
false
}
- /** If symbol is deprecated, and the point of reference is not enclosed
- * in either a deprecated member or a scala bridge method, issue a warning.
- */
- private def checkDeprecated(sym: Symbol, pos: Position) {
+ // Note: if a symbol has both @deprecated and @migration annotations and both
+ // warnings are enabled, only the first one checked here will be emitted.
+ // I assume that's a consequence of some code trying to avoid noise by suppressing
+ // warnings after the first, but I think it'd be better if we didn't have to
+ // arbitrarily choose one as more important than the other.
+ private def checkUndesiredProperties(sym: Symbol, pos: Position) {
+ // If symbol is deprecated, and the point of reference is not enclosed
+ // in either a deprecated member or a scala bridge method, issue a warning.
if (sym.isDeprecated && !currentOwner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) {
unit.deprecationWarning(pos, "%s%s is deprecated%s".format(
sym, sym.locationString, sym.deprecationMessage map (": " + _) getOrElse "")
)
}
- }
-
- /** Similar to deprecation: check if the symbol is marked with @migration
- * indicating it has changed semantics between versions.
- */
- private def checkMigration(sym: Symbol, pos: Position) = {
- if (sym.hasMigrationAnnotation) {
+ // Similar to deprecation: check if the symbol is marked with @migration
+ // indicating it has changed semantics between versions.
+ if (sym.hasMigrationAnnotation && settings.Xmigration.value != NoScalaVersion) {
val changed = try
settings.Xmigration.value < ScalaVersion(sym.migrationVersion.get)
catch {
@@ -1403,9 +1292,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (changed)
unit.warning(pos, s"${sym.fullLocationString} has changed semantics in version ${sym.migrationVersion.get}:\n${sym.migrationMessage.get}")
}
- }
-
- private def checkCompileTimeOnly(sym: Symbol, pos: Position) = {
+ // See an explanation of compileTimeOnly in its scaladoc at scala.annotation.compileTimeOnly.
if (sym.isCompileTimeOnly) {
def defaultMsg =
sm"""Reference to ${sym.fullLocationString} should not have survived past type checking,
@@ -1474,7 +1361,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// if the unnormalized type is accessible, that's good enough
if (inaccessible.isEmpty) ()
// or if the normalized type is, that's good too
- else if ((tpe ne tpe.normalize) && lessAccessibleSymsInType(tpe.normalize, member).isEmpty) ()
+ else if ((tpe ne tpe.normalize) && lessAccessibleSymsInType(tpe.dealiasWiden, member).isEmpty) ()
// otherwise warn about the inaccessible syms in the unnormalized type
else inaccessible foreach (sym => warnLessAccessible(sym, member))
}
@@ -1485,6 +1372,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
member.typeParams.map(_.info.bounds.hi.widen) foreach checkAccessibilityOfType
}
+ private def checkByNameRightAssociativeDef(tree: DefDef) {
+ tree match {
+ case DefDef(_, name, _, params :: _, _, _) =>
+ if (settings.lint && !treeInfo.isLeftAssoc(name.decodedName) && params.exists(p => isByName(p.symbol)))
+ unit.warning(tree.pos,
+ "by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see SI-1980.")
+ case _ =>
+ }
+ }
+
/** Check that a deprecated val or def does not override a
* concrete, non-deprecated method. If it does, then
* deprecation is meaningless.
@@ -1517,8 +1414,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case TypeRef(pre, sym, args) =>
tree match {
case tt: TypeTree if tt.original == null => // SI-7783 don't warn about inferred types
- case _ =>
- checkDeprecated(sym, tree.pos)
+ // FIXME: reconcile this check with one in resetAllAttrs
+ case _ => checkUndesiredProperties(sym, tree.pos)
}
if(sym.isJavaDefined)
sym.typeParams foreach (_.cookJavaRawInfo())
@@ -1560,7 +1457,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
applyChecks(sym.annotations)
// validate implicitNotFoundMessage
analyzer.ImplicitNotFoundMsg.check(sym) foreach { warn =>
- unit.warning(tree.pos, "Invalid implicitNotFound message for %s%s:\n%s".format(sym, sym.locationString, warn))
+ unit.warning(tree.pos, f"Invalid implicitNotFound message for ${sym}%s${sym.locationString}%s:%n$warn")
}
case tpt@TypeTree() =>
@@ -1588,9 +1485,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case TypeApply(fun, targs) =>
isClassTypeAccessible(fun)
case Select(module, apply) =>
- // Fixes SI-5626. Classes in refinement types cannot be constructed with `new`. In this case,
- // the companion class is actually not a ClassSymbol, but a reference to an abstract type.
- module.symbol.companionClass.isClass
+ ( // SI-4859 `CaseClass1().InnerCaseClass2()` must not be rewritten to `new InnerCaseClass2()`;
+ // {expr; Outer}.Inner() must not be rewritten to `new Outer.Inner()`.
+ treeInfo.isQualifierSafeToElide(module) &&
+ // SI-5626 Classes in refinement types cannot be constructed with `new`. In this case,
+ // the companion class is actually not a ClassSymbol, but a reference to an abstract type.
+ module.symbol.companionClass.isClass
+ )
}
val doTransform =
@@ -1625,7 +1526,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
transform(qual)
case Apply(fn, args) =>
- // sensicality should be subsumed by the unreachability/exhaustivity/irrefutability analyses in the pattern matcher
+ // sensicality should be subsumed by the unreachability/exhaustivity/irrefutability
+ // analyses in the pattern matcher
if (!inPattern) {
checkImplicitViewOptionApply(tree.pos, fn, args)
checkSensible(tree.pos, fn, args)
@@ -1634,33 +1536,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
tree
}
private def transformSelect(tree: Select): Tree = {
- val Select(qual, name) = tree
+ val Select(qual, _) = tree
val sym = tree.symbol
- /** Note: if a symbol has both @deprecated and @migration annotations and both
- * warnings are enabled, only the first one checked here will be emitted.
- * I assume that's a consequence of some code trying to avoid noise by suppressing
- * warnings after the first, but I think it'd be better if we didn't have to
- * arbitrarily choose one as more important than the other.
- */
- checkDeprecated(sym, tree.pos)
- if(settings.Xmigration.value != NoScalaVersion)
- checkMigration(sym, tree.pos)
- checkCompileTimeOnly(sym, tree.pos)
+ checkUndesiredProperties(sym, tree.pos)
checkDelayedInitSelect(qual, sym, tree.pos)
- if (sym eq NoSymbol) {
- unit.warning(tree.pos, "Select node has NoSymbol! " + tree + " / " + tree.tpe)
- }
- else if (currentClass != sym.owner && sym.hasLocalFlag) {
- var o = currentClass
- var hidden = false
- while (!hidden && o != sym.owner && o != sym.owner.moduleClass && !o.isPackage) {
- hidden = o.isTerm || o.isPrivateLocal
- o = o.owner
- }
- if (!hidden) escapedPrivateLocals += sym
- }
+ if (!sym.exists)
+ devWarning("Select node has NoSymbol! " + tree + " / " + tree.tpe)
+ else if (sym.hasLocalFlag)
+ varianceValidator.checkForEscape(sym, currentClass)
def checkSuper(mix: Name) =
// term should have been eliminated by super accessors
@@ -1676,7 +1561,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
private def transformIf(tree: If): Tree = {
val If(cond, thenpart, elsepart) = tree
def unitIfEmpty(t: Tree): Tree =
- if (t == EmptyTree) Literal(Constant()).setPos(tree.pos).setType(UnitClass.tpe) else t
+ if (t == EmptyTree) Literal(Constant(())).setPos(tree.pos).setType(UnitTpe) else t
cond.tpe match {
case ConstantType(value) =>
@@ -1693,8 +1578,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// on Unit, in which case we had better let it slide.
val isOk = (
sym.isGetter
- || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType))
|| (sym.name containsName nme.DEFAULT_GETTER_STRING)
+ || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType))
)
if (!isOk)
unit.warning(sym.pos, s"side-effecting nullary methods are discouraged: suggest defining as `def ${sym.name.decode}()` instead")
@@ -1703,10 +1588,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// Verify classes extending AnyVal meet the requirements
private def checkAnyValSubclass(clazz: Symbol) = {
- if ((clazz isSubClass AnyValClass) && !isPrimitiveValueClass(clazz)) {
+ if (clazz.isDerivedValueClass) {
if (clazz.isTrait)
unit.error(clazz.pos, "Only classes (not traits) are allowed to extend AnyVal")
- else if ((clazz != AnyValClass) && clazz.hasFlag(ABSTRACT))
+ else if (clazz.hasAbstractFlag)
unit.error(clazz.pos, "`abstract' modifier cannot be used with value classes")
}
}
@@ -1729,21 +1614,29 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case ValDef(_, _, _, _) | DefDef(_, _, _, _, _, _) =>
checkDeprecatedOvers(tree)
checkInfiniteLoop(tree.asInstanceOf[ValOrDefDef])
- if (settings.warnNullaryUnit.value)
+ if (settings.warnNullaryUnit)
checkNullaryMethodReturnType(sym)
- if (settings.warnInaccessible.value) {
+ if (settings.warnInaccessible) {
if (!sym.isConstructor && !sym.isEffectivelyFinal && !sym.isSynthetic)
checkAccessibilityOfReferencedTypes(tree)
}
+ tree match {
+ case dd: DefDef => checkByNameRightAssociativeDef(dd)
+ case _ =>
+ }
tree
case Template(parents, self, body) =>
localTyper = localTyper.atOwner(tree, currentOwner)
validateBaseTypes(currentOwner)
- checkOverloadedRestrictions(currentOwner)
+ checkOverloadedRestrictions(currentOwner, currentOwner)
+ // SI-7870 default getters for constructors live in the companion module
+ checkOverloadedRestrictions(currentOwner, currentOwner.companionModule)
val bridges = addVarargBridges(currentOwner)
checkAllOverrides(currentOwner)
checkAnyValSubclass(currentOwner)
+ if (currentOwner.isDerivedValueClass)
+ currentOwner.primaryConstructor makeNotPrivate NoSymbol // SI-6601, must be done *after* pickler!
if (bridges.nonEmpty) deriveTemplate(tree)(_ ::: bridges) else tree
case dc@TypeTreeWithDeferredRefCheck() => abort("adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc")
@@ -1795,12 +1688,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
enterReference(tree.pos, tpt.tpe.typeSymbol)
tree
- case Typed(_, Ident(tpnme.WILDCARD_STAR)) if !isRepeatedParamArg(tree) =>
+ case treeInfo.WildcardStarArg(_) if !isRepeatedParamArg(tree) =>
unit.error(tree.pos, "no `: _*' annotation allowed here\n"+
"(such annotations are only allowed in arguments to *-parameters)")
tree
case Ident(name) =>
+ checkUndesiredProperties(sym, tree.pos)
transformCaseApply(tree,
if (name != nme.WILDCARD && name != tpnme.WILDCARD_STAR) {
assert(sym != NoSymbol, "transformCaseApply: name = " + name.debugString + " tree = " + tree + " / " + tree.getClass) //debug
@@ -1820,19 +1714,33 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case _ => tree
}
+
// skip refchecks in patterns....
result = result match {
case CaseDef(pat, guard, body) =>
- inPattern = true
- val pat1 = transform(pat)
- inPattern = false
+ val pat1 = savingInPattern {
+ inPattern = true
+ transform(pat)
+ }
treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
case LabelDef(_, _, _) if treeInfo.hasSynthCaseSymbol(result) =>
- val old = inPattern
- inPattern = true
- val res = deriveLabelDef(result)(transform) // TODO SI-7756 Too broad! The code from the original case body should be fully refchecked!
- inPattern = old
- res
+ savingInPattern {
+ inPattern = true
+ deriveLabelDef(result)(transform)
+ }
+ case Apply(fun, args) if fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol) =>
+ savingInPattern {
+ // SI-7756 If we were in a translated pattern, we can now switch out of pattern mode, as the label apply signals
+ // that we are in the user-supplied code in the case body.
+ //
+ // Relies on the translation of:
+ // (null: Any) match { case x: List[_] => x; x.reverse; case _ => }'
+ // to:
+ // <synthetic> val x2: List[_] = (x1.asInstanceOf[List[_]]: List[_]);
+ // matchEnd4({ x2; x2.reverse}) // case body is an argument to a label apply.
+ inPattern = false
+ super.transform(result)
+ }
case ValDef(_, _, _, _) if treeInfo.hasSynthCaseSymbol(result) =>
deriveValDef(result)(transform) // SI-7716 Don't refcheck the tpt of the synthetic val that holds the selector.
case _ =>
@@ -1841,14 +1749,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
result match {
case ClassDef(_, _, _, _)
| TypeDef(_, _, _, _) =>
- if (result.symbol.isLocal || result.symbol.owner.isPackageClass)
+ if (result.symbol.isLocal || result.symbol.isTopLevel)
varianceValidator.traverse(result)
+ case tt @ TypeTree() if tt.original != null =>
+ varianceValidator.traverse(tt.original) // See SI-7872
case _ =>
}
result
} catch {
case ex: TypeError =>
- if (settings.debug.value) ex.printStackTrace()
+ if (settings.debug) ex.printStackTrace()
unit.error(tree.pos, ex.getMessage())
tree
} finally {
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
index 64c5b41638..995f98cc2c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -4,7 +4,165 @@ package typechecker
trait StdAttachments {
self: Analyzer =>
- type UnaffiliatedMacroContext = scala.reflect.macros.runtime.Context
+ import global._
+
+ /** Carries information necessary to expand the host tree.
+ * At times we need to store this info, because macro expansion can be delayed until its targs are inferred.
+ * After a macro application has been successfully expanded, this attachment is destroyed.
+ */
+ type UnaffiliatedMacroContext = scala.reflect.macros.contexts.Context
type MacroContext = UnaffiliatedMacroContext { val universe: self.global.type }
case class MacroRuntimeAttachment(delayed: Boolean, typerContext: Context, macroContext: Option[MacroContext])
-} \ No newline at end of file
+
+ /** Scratchpad for the macro expander, which is used to store all intermediate data except the details about the runtime.
+ */
+ case class MacroExpanderAttachment(original: Tree, desugared: Tree)
+
+ /** Loads underlying MacroExpanderAttachment from a macro expandee or returns a default value for that attachment.
+ */
+ def macroExpanderAttachment(tree: Tree): MacroExpanderAttachment =
+ tree.attachments.get[MacroExpanderAttachment] getOrElse {
+ tree match {
+ case Apply(fn, _) if tree.isInstanceOf[ApplyToImplicitArgs] => macroExpanderAttachment(fn)
+ case _ => MacroExpanderAttachment(tree, EmptyTree)
+ }
+ }
+
+ /** After macro expansion is completed, links the expandee and the expansion result
+ * by annotating them both with a `MacroExpansionAttachment`.
+ */
+ def linkExpandeeAndDesugared(expandee: Tree, desugared: Tree): Unit = {
+ val metadata = MacroExpanderAttachment(expandee, desugared)
+ expandee updateAttachment metadata
+ desugared updateAttachment metadata
+ }
+
+ /** Is added by the macro engine to originals and results of macro expansions.
+ * Stores the original expandee as it entered the `macroExpand` function.
+ */
+ case class MacroExpansionAttachment(expandee: Tree, expanded: Any)
+
+ /** Determines whether the target is either an original or a result of a macro expansion.
+ * The parameter is of type `Any`, because macros can expand both into trees and into annotations.
+ */
+ def hasMacroExpansionAttachment(any: Any): Boolean = any match {
+ case tree: Tree => tree.attachments.get[MacroExpansionAttachment].isDefined
+ case _ => false
+ }
+
+ /** Returns the original tree of the macro expansion if the argument is a macro expansion or EmptyTree otherwise.
+ */
+ def macroExpandee(tree: Tree): Tree = tree.attachments.get[MacroExpansionAttachment].map(_.expandee).getOrElse(EmptyTree)
+
+ /** After macro expansion is completed, links the expandee and the expansion result by annotating them both with a `MacroExpansionAttachment`.
+ * The `expanded` parameter is of type `Any`, because macros can expand both into trees and into annotations.
+ */
+ def linkExpandeeAndExpanded(expandee: Tree, expanded: Any): Unit = {
+ val metadata = MacroExpansionAttachment(expandee, expanded)
+ expandee updateAttachment metadata
+ expanded match {
+ case expanded: Tree => expanded updateAttachment metadata
+ case _ => // do nothing
+ }
+ }
+
+ /** When present, suppresses macro expansion for the host.
+ * This is occasionally necessary, e.g. to prohibit eta-expansion of macros.
+ *
+ * Does not affect expandability of child nodes, there's context.withMacrosDisabled for that
+ * (but think thrice before using that API - see the discussion at https://github.com/scala/scala/pull/1639).
+ */
+ case object SuppressMacroExpansionAttachment
+
+ /** Suppresses macro expansion of the tree by putting SuppressMacroExpansionAttachment on it.
+ */
+ def suppressMacroExpansion(tree: Tree) = tree.updateAttachment(SuppressMacroExpansionAttachment)
+
+ /** Unsuppresses macro expansion of the tree by removing SuppressMacroExpansionAttachment from it and its children.
+ */
+ def unsuppressMacroExpansion(tree: Tree): Tree = {
+ tree.removeAttachment[SuppressMacroExpansionAttachment.type]
+ tree match {
+ // see the comment to `isMacroExpansionSuppressed` to learn why we need
+ // a special traversal strategy here
+ case Apply(fn, _) => unsuppressMacroExpansion(fn)
+ case TypeApply(fn, _) => unsuppressMacroExpansion(fn)
+ case _ => // do nothing
+ }
+ tree
+ }
+
+ /** Determines whether a tree should not be expanded, because someone has put SuppressMacroExpansionAttachment on it or one of its children.
+ */
+ def isMacroExpansionSuppressed(tree: Tree): Boolean =
+ ( settings.Ymacroexpand.value == settings.MacroExpand.None // SI-6812
+ || tree.attachments.get[SuppressMacroExpansionAttachment.type].isDefined
+ || (tree match {
+ // we have to account for the fact that during typechecking an expandee might become wrapped,
+ // i.e. surrounded by an inferred implicit argument application or by an inferred type argument application.
+ // in that case the expandee itself will no longer be suppressed and we need to look at the core
+ case Apply(fn, _) => isMacroExpansionSuppressed(fn)
+ case TypeApply(fn, _) => isMacroExpansionSuppressed(fn)
+ case _ => false
+ })
+ )
+
+ /** After being synthesized by the parser, primary constructors aren't fully baked yet.
+ * A call to super in such constructors is just a fill-me-in-later dummy resolved later
+ * by `parentTypes`. This attachment coordinates `parentTypes` and `typedTemplate` and
+ * allows them to complete the synthesis.
+ */
+ case class SuperArgsAttachment(argss: List[List[Tree]])
+
+ /** Convenience method for `SuperArgsAttachment`.
+ * Compared with `MacroRuntimeAttachment` this attachment has different a usage pattern,
+ * so it really benefits from a dedicated extractor.
+ */
+ def superArgs(tree: Tree): Option[List[List[Tree]]] =
+ tree.attachments.get[SuperArgsAttachment] collect { case SuperArgsAttachment(argss) => argss }
+
+ /** Determines whether the given tree has an associated SuperArgsAttachment.
+ */
+ def hasSuperArgs(tree: Tree): Boolean = superArgs(tree).nonEmpty
+
+ /** @see markMacroImplRef
+ */
+ case object MacroImplRefAttachment
+
+ /** Marks the tree as a macro impl reference, which is a naked reference to a method.
+ *
+ * This is necessary for typechecking macro impl references (see `DefaultMacroCompiler.defaultResolveMacroImpl`),
+ * because otherwise typing a naked reference will result in the "follow this method with `_' if you want to
+ * treat it as a partially applied function" errors.
+ *
+ * This mark suppresses adapt except for when the annottee is a macro application.
+ */
+ def markMacroImplRef(tree: Tree): Tree = tree.updateAttachment(MacroImplRefAttachment)
+
+ /** Unmarks the tree as a macro impl reference (see `markMacroImplRef` for more information).
+ *
+ * This is necessary when a tree that was previously deemed to be a macro impl reference,
+ * typechecks to be a macro application. Then we need to unmark it, expand it and try to treat
+ * its expansion as a macro impl reference.
+ */
+ def unmarkMacroImplRef(tree: Tree): Tree = tree.removeAttachment[MacroImplRefAttachment.type]
+
+ /** Determines whether a tree should or should not be adapted,
+ * because someone has put MacroImplRefAttachment on it.
+ */
+ def isMacroImplRef(tree: Tree): Boolean = tree.attachments.get[MacroImplRefAttachment.type].isDefined
+
+ /** Since mkInvoke, the applyDynamic/selectDynamic/etc desugarer, is disconnected
+ * from typedNamedApply, the applyDynamicNamed argument rewriter, the latter
+ * doesnā€™t know whether it needs to apply the rewriting because the application
+ * has just been desugared or it needs to hold on because itā€™s already performed
+ * a desugaring on this tree. This has led to SI-8006.
+ *
+ * This attachment solves the problem by providing a means of communication
+ * between the two Dynamic desugarers, which solves the aforementioned issue.
+ */
+ case object DynamicRewriteAttachment
+ def markDynamicRewrite(tree: Tree): Tree = tree.updateAttachment(DynamicRewriteAttachment)
+ def unmarkDynamicRewrite(tree: Tree): Tree = tree.removeAttachment[DynamicRewriteAttachment.type]
+ def isDynamicRewrite(tree: Tree): Boolean = tree.attachments.get[DynamicRewriteAttachment.type].isDefined
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index bad49385aa..b706e1af6b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -1,9 +1,11 @@
+
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package typechecker
import scala.collection.{ mutable, immutable }
@@ -28,7 +30,7 @@ import symtab.Flags._
*/
abstract class SuperAccessors extends transform.Transform with transform.TypingTransformers {
import global._
- import definitions.{ UnitClass, ObjectClass, isRepeatedParamType, isByNameParamType, Any_asInstanceOf }
+ import definitions._
import analyzer.{ restrictionError }
/** the following two members override abstract members in Transform */
@@ -60,11 +62,11 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val clazz = qual.symbol
val supername = nme.superName(name)
val superAcc = clazz.info.decl(supername).suchThat(_.alias == sym) orElse {
- debuglog("add super acc " + sym + sym.locationString + " to `" + clazz);//debug
- val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE) setAlias sym
+ debuglog(s"add super acc ${sym.fullLocationString} to $clazz")
+ val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE | ARTIFACT) setAlias sym
val tpe = clazz.thisType memberType sym match {
- case t if sym.isModule && !sym.isMethod => NullaryMethodType(t)
- case t => t
+ case t if sym.isModuleNotMethod => NullaryMethodType(t)
+ case t => t
}
acc setInfoAndEnter (tpe cloneInfo acc)
// Diagnostic for SI-7091
@@ -108,11 +110,11 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val clazz = sup.symbol
if (sym.isDeferred) {
- val member = sym.overridingSymbol(clazz);
+ val member = sym.overridingSymbol(clazz)
if (mix != tpnme.EMPTY || member == NoSymbol ||
!(member.isAbstractOverride && member.isIncompleteIn(clazz)))
unit.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+
- "unless it is overridden by a member declared `abstract' and `override'");
+ "unless it is overridden by a member declared `abstract' and `override'")
} else if (mix == tpnme.EMPTY && !sym.owner.isTrait){
// SI-4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract.
val intermediateClasses = clazz.info.baseClasses.tail.takeWhile(_ != sym.owner)
@@ -165,18 +167,6 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
log("Expanded '%s' to '%s' in %s".format(savedName, s.name, sym))
}
}
- if (settings.verbose.value && forScaladoc && !sym.isAnonymousClass) {
- println("========== scaladoc of "+sym+" =============================")
- println(toJavaDoc(expandedDocComment(sym)))
- for (member <- sym.info.members) {
- println(member+":"+sym.thisType.memberInfo(member)+"\n"+
- toJavaDoc(expandedDocComment(member, sym)))
- for ((useCase, comment, pos) <- useCases(member, sym)) {
- println("usecase "+useCase+":"+useCase.info)
- println(toJavaDoc(comment))
- }
- }
- }
super.transform(tree)
}
transformClassDef
@@ -203,7 +193,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
transformTemplate
case TypeApply(sel @ Select(This(_), name), args) =>
- mayNeedProtectedAccessor(sel, args, false)
+ mayNeedProtectedAccessor(sel, args, goToSuper = false)
// set a flag for all type parameters with `@specialized` annotation so it can be pickled
case typeDef: TypeDef if typeDef.symbol.deSkolemize.hasAnnotation(definitions.SpecializedClass) =>
@@ -231,7 +221,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
// also exists in a superclass, because they may be surprised
// to find out that a constructor parameter will shadow a
// field. See SI-4762.
- if (settings.lint.value) {
+ if (settings.lint) {
if (sym.isPrivateLocal && sym.paramss.isEmpty) {
qual.symbol.ancestors foreach { parent =>
parent.info.decls filterNot (x => x.isPrivate || x.hasLocalFlag) foreach { m2 =>
@@ -260,9 +250,9 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias)
}).asInstanceOf[Select]
debuglog("alias replacement: " + tree + " ==> " + result); //debug
- localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, true))
+ localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, beforeRefChecks = true))
} else {
- /**
+ /*
* A trait which extends a class and accesses a protected member
* of that class cannot implement the necessary accessor method
* because its implementation is in an implementation class (e.g.
@@ -279,20 +269,21 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
&& sym.enclClass != currentClass
&& !sym.owner.isPackageClass // SI-7091 no accessor needed package owned (ie, top level) symbols
&& !sym.owner.isTrait
- && (sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass)
- && (qual.symbol.info.member(sym.name) ne NoSymbol)
- && !needsProtectedAccessor(sym, tree.pos))
+ && sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass
+ && qual.symbol.info.member(sym.name).exists
+ && !needsProtectedAccessor(sym, tree.pos)
+ )
if (shouldEnsureAccessor) {
log("Ensuring accessor for call to protected " + sym.fullLocationString + " from " + currentClass)
ensureAccessor(sel)
}
else
- mayNeedProtectedAccessor(sel, EmptyTree.asList, false)
+ mayNeedProtectedAccessor(sel, EmptyTree.asList, goToSuper = false)
}
case Super(_, mix) =>
if (sym.isValue && !sym.isMethod || sym.hasAccessorFlag) {
- if (!settings.overrideVars.value)
+ if (!settings.overrideVars)
unit.error(tree.pos, "super may be not be used on " + sym.accessedOrSelf)
} else if (isDisallowed(sym)) {
unit.error(tree.pos, "super not allowed here: use this." + name.decode + " instead")
@@ -300,16 +291,16 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
transformSuperSelect(sel)
case _ =>
- mayNeedProtectedAccessor(sel, EmptyTree.asList, true)
+ mayNeedProtectedAccessor(sel, EmptyTree.asList, goToSuper = true)
}
}
transformSelect
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) if tree.symbol.isMethodWithExtension =>
- treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, withInvalidOwner(transform(rhs)))
+ case DefDef(_, _, _, _, _, _) if tree.symbol.isMethodWithExtension =>
+ deriveDefDef(tree)(rhs => withInvalidOwner(transform(rhs)))
case TypeApply(sel @ Select(qual, name), args) =>
- mayNeedProtectedAccessor(sel, args, true)
+ mayNeedProtectedAccessor(sel, args, goToSuper = true)
case Assign(lhs @ Select(qual, name), rhs) =>
def transformAssign = {
@@ -317,8 +308,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
lhs.symbol.isJavaDefined &&
needsProtectedAccessor(lhs.symbol, tree.pos)) {
debuglog("Adding protected setter for " + tree)
- val setter = makeSetter(lhs);
- debuglog("Replaced " + tree + " with " + setter);
+ val setter = makeSetter(lhs)
+ debuglog("Replaced " + tree + " with " + setter)
transform(localTyper.typed(Apply(setter, List(qual, rhs))))
} else
super.transform(tree)
@@ -377,14 +368,14 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
* typed.
*/
private def makeAccessor(tree: Select, targs: List[Tree]): Tree = {
- val Select(qual, name) = tree
+ val Select(qual, _) = tree
val sym = tree.symbol
val clazz = hostForAccessorOf(sym, currentClass)
assert(clazz != NoSymbol, sym)
debuglog("Decided for host class: " + clazz)
- val accName = nme.protName(sym.originalName)
+ val accName = nme.protName(sym.unexpandedName)
val hasArgs = sym.tpe.paramSectionCount > 0
val memberType = refChecks.toScalaRepeatedParam(sym.tpe) // fix for #2413
@@ -402,7 +393,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
val protAcc = clazz.info.decl(accName).suchThat(s => s == NoSymbol || s.tpe =:= accType(s)) orElse {
- val newAcc = clazz.newMethod(nme.protName(sym.originalName), tree.pos)
+ val newAcc = clazz.newMethod(nme.protName(sym.unexpandedName), tree.pos, newFlags = ARTIFACT)
newAcc setInfoAndEnter accType(newAcc)
val code = DefDef(newAcc, {
@@ -413,7 +404,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
args.foldLeft(base)(Apply(_, _))
})
- debuglog("" + code)
+ debuglog("created protected accessor: " + code)
storeAccessorDefinition(clazz, code)
newAcc
}
@@ -425,7 +416,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
case _ => mkApply(TypeApply(selection, targs))
}
}
- debuglog("Replaced " + tree + " with " + res)
+ debuglog(s"Replaced $tree with $res")
if (hasArgs) localTyper.typedOperator(res) else localTyper.typed(res)
}
@@ -462,12 +453,12 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
assert(clazz != NoSymbol, field)
debuglog("Decided for host class: " + clazz)
- val accName = nme.protSetterName(field.originalName)
+ val accName = nme.protSetterName(field.unexpandedName)
val protectedAccessor = clazz.info decl accName orElse {
- val protAcc = clazz.newMethod(accName, field.pos)
+ val protAcc = clazz.newMethod(accName, field.pos, newFlags = ARTIFACT)
val paramTypes = List(clazz.typeOfThis, field.tpe)
val params = protAcc newSyntheticValueParams paramTypes
- val accessorType = MethodType(params, UnitClass.tpe)
+ val accessorType = MethodType(params, UnitTpe)
protAcc setInfoAndEnter accessorType
val obj :: value :: Nil = params
@@ -496,9 +487,6 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
def accessibleThroughSubclassing =
validCurrentOwner && clazz.thisSym.isSubClass(sym.owner) && !clazz.isTrait
- def packageAccessBoundry(sym: Symbol) =
- sym.accessBoundary(sym.enclosingPackageClass)
-
val isCandidate = (
sym.isProtected
&& sym.isJavaDefined
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index 242eb9c9fe..9516f94135 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -6,10 +6,10 @@
package scala.tools.nsc
package typechecker
-import symtab.Flags
+import scala.collection.{ mutable, immutable }
import symtab.Flags._
-import scala.collection.mutable
import scala.collection.mutable.ListBuffer
+import scala.language.postfixOps
/** Synthetic method implementations for case classes and case objects.
*
@@ -94,13 +94,13 @@ trait SyntheticMethods extends ast.TreeDSL {
// like Tags and Arrays which are not robust and infer things
// which they shouldn't.
val accessorLub = (
- if (opt.experimental) {
- global.weakLub(accessors map (_.tpe.finalResultType))._1 match {
+ if (settings.Xexperimental) {
+ global.weakLub(accessors map (_.tpe.finalResultType)) match {
case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
case tp => tp
}
}
- else AnyClass.tpe
+ else AnyTpe
)
def forwardToRuntime(method: Symbol): Tree =
@@ -121,70 +121,60 @@ trait SyntheticMethods extends ast.TreeDSL {
(m0 ne meth) && !m0.isDeferred && !m0.isSynthetic && (m0.owner != AnyValClass) && (typeInClazz(m0) matches typeInClazz(meth))
}
}
- def readConstantValue[T](name: String, default: T = null.asInstanceOf[T]): T = {
- clazzMember(newTermName(name)).info match {
- case NullaryMethodType(ConstantType(Constant(value))) => value.asInstanceOf[T]
- case _ => default
- }
- }
def productIteratorMethod = {
createMethod(nme.productIterator, iteratorOfType(accessorLub))(_ =>
gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(accessorLub), List(mkThis))
)
}
- def projectionMethod(accessor: Symbol, num: Int) = {
- createMethod(nme.productAccessorName(num), accessor.tpe.resultType)(_ => REF(accessor))
- }
- /** Common code for productElement and (currently disabled) productElementName
- */
+ /* Common code for productElement and (currently disabled) productElementName */
def perElementMethod(name: Name, returnType: Type)(caseFn: Symbol => Tree): Tree =
createSwitchMethod(name, accessors.indices, returnType)(idx => caseFn(accessors(idx)))
- // def productElementNameMethod = perElementMethod(nme.productElementName, StringClass.tpe)(x => LIT(x.name.toString))
+ // def productElementNameMethod = perElementMethod(nme.productElementName, StringTpe)(x => LIT(x.name.toString))
var syntheticCanEqual = false
- /** The canEqual method for case classes.
- * def canEqual(that: Any) = that.isInstanceOf[This]
+ /* The canEqual method for case classes.
+ * def canEqual(that: Any) = that.isInstanceOf[This]
*/
def canEqualMethod: Tree = {
syntheticCanEqual = true
- createMethod(nme.canEqual_, List(AnyClass.tpe), BooleanClass.tpe)(m =>
+ createMethod(nme.canEqual_, List(AnyTpe), BooleanTpe)(m =>
Ident(m.firstParam) IS_OBJ classExistentialType(clazz))
}
- /** that match { case _: this.C => true ; case _ => false }
- * where `that` is the given method's first parameter.
+ /* that match { case _: this.C => true ; case _ => false }
+ * where `that` is the given method's first parameter.
*
- * An isInstanceOf test is insufficient because it has weaker
- * requirements than a pattern match. Given an inner class Foo and
- * two different instantiations of the container, an x.Foo and and a y.Foo
- * are both .isInstanceOf[Foo], but the one does not match as the other.
+ * An isInstanceOf test is insufficient because it has weaker
+ * requirements than a pattern match. Given an inner class Foo and
+ * two different instantiations of the container, an x.Foo and and a y.Foo
+ * are both .isInstanceOf[Foo], but the one does not match as the other.
*/
def thatTest(eqmeth: Symbol): Tree = {
Match(
Ident(eqmeth.firstParam),
List(
CaseDef(Typed(Ident(nme.WILDCARD), TypeTree(clazz.tpe)), EmptyTree, TRUE),
- CaseDef(WILD.empty, EmptyTree, FALSE)
+ CaseDef(Ident(nme.WILDCARD), EmptyTree, FALSE)
)
)
}
- /** (that.asInstanceOf[this.C])
- * where that is the given methods first parameter.
+ /* (that.asInstanceOf[this.C])
+ * where that is the given methods first parameter.
*/
def thatCast(eqmeth: Symbol): Tree =
gen.mkCast(Ident(eqmeth.firstParam), clazz.tpe)
- /** The equality method core for case classes and inline clases.
- * 1+ args:
- * (that.isInstanceOf[this.C]) && {
- * val x$1 = that.asInstanceOf[this.C]
- * (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this)
- * }
- * Drop canBuildFrom part if class is final and canBuildFrom is synthesized
+ /* The equality method core for case classes and inline clases.
+ * 1+ args:
+ * (that.isInstanceOf[this.C]) && {
+ * val x$1 = that.asInstanceOf[this.C]
+ * (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this)
+ * }
+ * Drop canBuildFrom part if class is final and canBuildFrom is synthesized
*/
def equalsCore(eqmeth: Symbol, accessors: List[Symbol]) = {
val otherName = context.unit.freshTermName(clazz.name + "$")
@@ -199,18 +189,18 @@ trait SyntheticMethods extends ast.TreeDSL {
)
}
- /** The equality method for case classes.
- * 0 args:
- * def equals(that: Any) = that.isInstanceOf[this.C] && that.asInstanceOf[this.C].canEqual(this)
- * 1+ args:
- * def equals(that: Any) = (this eq that.asInstanceOf[AnyRef]) || {
- * (that.isInstanceOf[this.C]) && {
- * val x$1 = that.asInstanceOf[this.C]
- * (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this)
- * }
- * }
+ /* The equality method for case classes.
+ * 0 args:
+ * def equals(that: Any) = that.isInstanceOf[this.C] && that.asInstanceOf[this.C].canEqual(this)
+ * 1+ args:
+ * def equals(that: Any) = (this eq that.asInstanceOf[AnyRef]) || {
+ * (that.isInstanceOf[this.C]) && {
+ * val x$1 = that.asInstanceOf[this.C]
+ * (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this)
+ * }
+ * }
*/
- def equalsCaseClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m =>
+ def equalsCaseClassMethod: Tree = createMethod(nme.equals_, List(AnyTpe), BooleanTpe) { m =>
if (accessors.isEmpty)
if (clazz.isFinal) thatTest(m)
else thatTest(m) AND ((thatCast(m) DOT nme.canEqual_)(mkThis))
@@ -218,30 +208,35 @@ trait SyntheticMethods extends ast.TreeDSL {
(mkThis ANY_EQ Ident(m.firstParam)) OR equalsCore(m, accessors)
}
- /** The equality method for value classes
- * def equals(that: Any) = (this.asInstanceOf[AnyRef]) eq that.asInstanceOf[AnyRef]) || {
- * (that.isInstanceOf[this.C]) && {
- * val x$1 = that.asInstanceOf[this.C]
- * (this.underlying == that.underlying
+ /* The equality method for value classes
+ * def equals(that: Any) = (this.asInstanceOf[AnyRef]) eq that.asInstanceOf[AnyRef]) || {
+ * (that.isInstanceOf[this.C]) && {
+ * val x$1 = that.asInstanceOf[this.C]
+ * (this.underlying == that.underlying
*/
- def equalsDerivedValueClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m =>
+ def equalsDerivedValueClassMethod: Tree = createMethod(nme.equals_, List(AnyTpe), BooleanTpe) { m =>
equalsCore(m, List(clazz.derivedValueClassUnbox))
}
- /** The hashcode method for value classes
+ /* The hashcode method for value classes
* def hashCode(): Int = this.underlying.hashCode
*/
- def hashCodeDerivedValueClassMethod: Tree = createMethod(nme.hashCode_, Nil, IntClass.tpe) { m =>
+ def hashCodeDerivedValueClassMethod: Tree = createMethod(nme.hashCode_, Nil, IntTpe) { m =>
Select(mkThisSelect(clazz.derivedValueClassUnbox), nme.hashCode_)
}
- /** The _1, _2, etc. methods to implement ProductN, disabled
- * until we figure out how to introduce ProductN without cycles.
+ /* The _1, _2, etc. methods to implement ProductN, disabled
+ * until we figure out how to introduce ProductN without cycles.
*/
- def productNMethods = {
+ /****
+ def productNMethods = {
val accs = accessors.toIndexedSeq
1 to arity map (num => productProj(arity, num) -> (() => projectionMethod(accs(num - 1), num)))
}
+ def projectionMethod(accessor: Symbol, num: Int) = {
+ createMethod(nme.productAccessorName(num), accessor.tpe.resultType)(_ => REF(accessor))
+ }
+ ****/
// methods for both classes and objects
def productMethods = {
@@ -259,19 +254,20 @@ trait SyntheticMethods extends ast.TreeDSL {
def hashcodeImplementation(sym: Symbol): Tree = {
sym.tpe.finalResultType.typeSymbol match {
- case UnitClass | NullClass => Literal(Constant(0))
- case BooleanClass => If(Ident(sym), Literal(Constant(1231)), Literal(Constant(1237)))
- case IntClass | ShortClass | ByteClass | CharClass => Ident(sym)
- case LongClass => callStaticsMethod("longHash")(Ident(sym))
- case DoubleClass => callStaticsMethod("doubleHash")(Ident(sym))
- case FloatClass => callStaticsMethod("floatHash")(Ident(sym))
- case _ => callStaticsMethod("anyHash")(Ident(sym))
+ case UnitClass | NullClass => Literal(Constant(0))
+ case BooleanClass => If(Ident(sym), Literal(Constant(1231)), Literal(Constant(1237)))
+ case IntClass => Ident(sym)
+ case ShortClass | ByteClass | CharClass => Select(Ident(sym), nme.toInt)
+ case LongClass => callStaticsMethod("longHash")(Ident(sym))
+ case DoubleClass => callStaticsMethod("doubleHash")(Ident(sym))
+ case FloatClass => callStaticsMethod("floatHash")(Ident(sym))
+ case _ => callStaticsMethod("anyHash")(Ident(sym))
}
}
def specializedHashcode = {
- createMethod(nme.hashCode_, Nil, IntClass.tpe) { m =>
- val accumulator = m.newVariable(newTermName("acc"), m.pos, SYNTHETIC) setInfo IntClass.tpe
+ createMethod(nme.hashCode_, Nil, IntTpe) { m =>
+ val accumulator = m.newVariable(newTermName("acc"), m.pos, SYNTHETIC) setInfo IntTpe
val valdef = ValDef(accumulator, Literal(Constant(0xcafebabe)))
val mixes = accessors map (acc =>
Assign(
@@ -313,11 +309,11 @@ trait SyntheticMethods extends ast.TreeDSL {
// Object_equals -> (() => createMethod(Object_equals)(m => This(clazz) ANY_EQ Ident(m.firstParam)))
)
- /** If you serialize a singleton and then deserialize it twice,
- * you will have two instances of your singleton unless you implement
- * readResolve. Here it is implemented for all objects which have
- * no implementation and which are marked serializable (which is true
- * for all case objects.)
+ /* If you serialize a singleton and then deserialize it twice,
+ * you will have two instances of your singleton unless you implement
+ * readResolve. Here it is implemented for all objects which have
+ * no implementation and which are marked serializable (which is true
+ * for all case objects.)
*/
def needsReadResolve = (
clazz.isModuleClass
@@ -335,18 +331,20 @@ trait SyntheticMethods extends ast.TreeDSL {
else Nil
)
- /** Always generate overrides for equals and hashCode in value classes,
- * so they can appear in universal traits without breaking value semantics.
+ /* Always generate overrides for equals and hashCode in value classes,
+ * so they can appear in universal traits without breaking value semantics.
*/
def impls = {
def shouldGenerate(m: Symbol) = {
!hasOverridingImplementation(m) || {
clazz.isDerivedValueClass && (m == Any_hashCode || m == Any_equals) && {
- if (settings.lint.value) {
- (clazz.info nonPrivateMember m.name) filter (m => (m.owner != AnyClass) && (m.owner != clazz) && !m.isDeferred) andAlso { m =>
- currentUnit.warning(clazz.pos, s"Implementation of ${m.name} inherited from ${m.owner} overridden in $clazz to enforce value class semantics")
- }
- }
+ // Without a means to suppress this warning, I've thought better of it.
+ //
+ // if (settings.lint) {
+ // (clazz.info nonPrivateMember m.name) filter (m => (m.owner != AnyClass) && (m.owner != clazz) && !m.isDeferred) andAlso { m =>
+ // currentUnit.warning(clazz.pos, s"Implementation of ${m.name} inherited from ${m.owner} overridden in $clazz to enforce value class semantics")
+ // }
+ // }
true
}
}
@@ -359,7 +357,7 @@ trait SyntheticMethods extends ast.TreeDSL {
// This method should be generated as private, but apparently if it is, then
// it is name mangled afterward. (Wonder why that is.) So it's only protected.
// For sure special methods like "readResolve" should not be mangled.
- List(createMethod(nme.readResolve, Nil, ObjectClass.tpe)(m => { m setFlag PRIVATE ; REF(clazz.sourceModule) }))
+ List(createMethod(nme.readResolve, Nil, ObjectTpe)(m => { m setFlag PRIVATE ; REF(clazz.sourceModule) }))
}
else Nil
)
@@ -368,11 +366,11 @@ trait SyntheticMethods extends ast.TreeDSL {
catch { case _: TypeError if reporter.hasErrors => Nil }
}
- /** If this case class has any less than public accessors,
- * adds new accessors at the correct locations to preserve ordering.
- * Note that this must be done before the other method synthesis
- * because synthesized methods need refer to the new symbols.
- * Care must also be taken to preserve the case accessor order.
+ /* If this case class has any less than public accessors,
+ * adds new accessors at the correct locations to preserve ordering.
+ * Note that this must be done before the other method synthesis
+ * because synthesized methods need refer to the new symbols.
+ * Care must also be taken to preserve the case accessor order.
*/
def caseTemplateBody(): List[Tree] = {
val lb = ListBuffer[Tree]()
@@ -382,7 +380,7 @@ trait SyntheticMethods extends ast.TreeDSL {
val original = ddef.symbol
val newAcc = deriveMethod(ddef.symbol, name => context.unit.freshTermName(name + "$")) { newAcc =>
newAcc.makePublic
- newAcc resetFlag (ACCESSOR | PARAMACCESSOR)
+ newAcc resetFlag (ACCESSOR | PARAMACCESSOR | OVERRIDE)
ddef.rhs.duplicate
}
// TODO: shouldn't the next line be: `original resetFlag CASEACCESSOR`?
diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
index d82fbd7c77..90ec3a89b8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
@@ -10,16 +10,19 @@ trait Tags {
trait Tag {
self: Typer =>
- private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = beforeTyper {
+ private val runDefinitions = currentRun.runDefinitions
+ import runDefinitions._
+
+ private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = enteringTyper {
def wrapper (tree: => Tree): Tree = if (allowMaterialization) (context.withMacrosEnabled[Tree](tree)) else (context.withMacrosDisabled[Tree](tree))
wrapper(inferImplicit(
EmptyTree,
taggedTp,
- /*reportAmbiguous =*/ true,
- /*isView =*/ false,
- /*context =*/ context,
- /*saveAmbiguousDivergent =*/ true,
- /*pos =*/ pos
+ reportAmbiguous = true,
+ isView = false,
+ context,
+ saveAmbiguousDivergent = true,
+ pos
).tree)
}
@@ -30,7 +33,7 @@ trait Tags {
* However we found out that we don't really need this concept, so it got removed.
*
* @param pos Position for error reporting. Please, provide meaningful value.
- * @param tp Type we're looking a ClassTag for, e.g. resolveClassTag(pos, IntClass.tpe) will look for ClassTag[Int].
+ * @param tp Type we're looking a ClassTag for, e.g. resolveClassTag(pos, IntTpe) will look for ClassTag[Int].
* @param allowMaterialization If true (default) then the resolver is allowed to launch materialization macros when there's no class tag in scope.
* If false then materialization macros are prohibited from running.
*
@@ -49,7 +52,7 @@ trait Tags {
* @param pre Prefix that represents a universe this type tag will be bound to.
* If `pre` is set to `NoType`, then any type tag in scope will do, regardless of its affiliation.
* If `pre` is set to `NoType`, and tag resolution involves materialization, then `mkRuntimeUniverseRef` will be used.
- * @param tp Type we're looking a TypeTag for, e.g. resolveTypeTag(pos, mkRuntimeUniverseRef, IntClass.tpe, false) will look for scala.reflect.runtime.universe.TypeTag[Int].
+ * @param tp Type we're looking a TypeTag for, e.g. resolveTypeTag(pos, mkRuntimeUniverseRef, IntTpe, false) will look for scala.reflect.runtime.universe.TypeTag[Int].
* @param concrete If true then the result must not contain unresolved (i.e. not spliced) type parameters and abstract type members.
* If false then the function will always succeed (abstract types will be reified as free types).
* @param allowMaterialization If true (default) then the resolver is allowed to launch materialization macros when there's no type tag in scope.
@@ -69,4 +72,4 @@ trait Tags {
resolveTag(pos, taggedTp, allowMaterialization)
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 88d10f1d72..fd8f9bebba 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -6,19 +6,72 @@
package scala.tools.nsc
package typechecker
-import scala.tools.nsc.symtab.Flags._
import scala.collection.mutable
import mutable.ListBuffer
import util.returning
+import scala.reflect.internal.util.shortClassOfInstance
+import scala.reflect.internal.util.StringOps._
abstract class TreeCheckers extends Analyzer {
import global._
- private def classstr(x: AnyRef) = (x.getClass.getName split """\\.|\\$""").last
+ override protected def onTreeCheckerError(pos: Position, msg: String) {
+ if (settings.fatalWarnings)
+ currentUnit.warning(pos, "\n** Error during internal checking:\n" + msg)
+ }
+
+ case class DiffResult[T](lost: List[T], gained: List[T]) {
+ def isEmpty = lost.isEmpty && gained.isEmpty
+ def lost_s = if (lost.isEmpty) "" else lost.mkString("lost: ", ", ", "")
+ def gained_s = if (gained.isEmpty) "" else gained.mkString("gained: ", ", ", "")
+ override def toString = ojoin(lost_s, gained_s)
+ }
+
+ def diffList[T](xs: List[T], ys: List[T]): DiffResult[T] =
+ DiffResult(xs filterNot ys.contains, ys filterNot xs.contains)
+
+ def diffTrees(t1: Tree, t2: Tree): DiffResult[Tree] =
+ diffList(t1 filter (_ ne t1), t2 filter (_ ne t2))
+
+ def diffTemplates(t1: Template, t2: Template): String = {
+ val parents = diffList(t1.parents, t2.parents).toString match { case "" => "" case s => "parents " + s }
+ val stats = diffList(t1.body, t2.body).toString match { case "" => "" case s => "stats " + s }
+ oempty(parents, stats) mkString ", "
+ }
+
+ def diff(t1: Tree, t2: Tree): String = (t1, t2) match {
+ case (_: Literal, _: Literal) => ""
+ case (t1: ImplDef, t2: ImplDef) => diff(t1.impl, t2.impl)
+ case (t1: Template, t2: Template) => diffTemplates(t1, t2)
+ case _ => diffTrees(t1, t2).toString // "<error: different tree classes>"
+ }
+
+ private def clean_s(s: String) = s.replaceAllLiterally("scala.collection.", "s.c.")
private def typestr(x: Type) = " (tpe = " + x + ")"
- private def treestr(t: Tree) = t + " [" + classstr(t) + "]" + typestr(t.tpe)
+ private def treestr(t: Tree) = t + " [" + classString(t) + "]" + typestr(t.tpe)
private def ownerstr(s: Symbol) = "'" + s + "'" + s.locationString
private def wholetreestr(t: Tree) = nodeToString(t) + "\n"
+ private def truncate(str: String, len: Int): String = (
+ if (str.length <= len) str
+ else (str takeWhile (_ != '\n') take len - 3) + "..."
+ )
+ private def signature(sym: Symbol) = clean_s(sym match {
+ case null => "null"
+ case _: ClassSymbol => sym.name + ": " + sym.tpe_*
+ case _ => sym.defString
+ })
+ private def classString(x: Any) = x match {
+ case null => ""
+ case t: Tree => t.shortClass
+ case s: Symbol => s.shortSymbolClass
+ case x: AnyRef => shortClassOfInstance(x)
+ }
+ private def nonPackageOwners(s: Symbol) = s.ownerChain drop 1 takeWhile (!_.hasPackageFlag)
+ private def nonPackageOwnersPlusOne(s: Symbol) = nonPackageOwners(s) ::: (s.ownerChain dropWhile (!_.hasPackageFlag) take 1)
+ private def ownersString(s: Symbol) = nonPackageOwnersPlusOne(s) match {
+ case Nil => "NoSymbol"
+ case xs => xs mkString " -> "
+ }
private def beststr(t: Tree) = "<" + {
if (t.symbol != null && t.symbol != NoSymbol) "sym=" + ownerstr(t.symbol)
@@ -26,51 +79,55 @@ abstract class TreeCheckers extends Analyzer {
else t match {
case x: DefTree => "name=" + x.name
case x: RefTree => "reference=" + x.name
- case _ => "clazz=" + classstr(t)
+ case _ => "clazz=" + classString(t)
}
} + ">"
/** This is a work in progress, don't take it too seriously.
*/
object SymbolTracker extends Traverser {
- type PhaseMap = mutable.HashMap[Symbol, List[Tree]]
+ type PhaseMap = mutable.Map[Symbol, List[Tree]]
+ def symbolTreeMap[T <: Tree]() = mutable.Map[Symbol, List[T]]() withDefaultValue Nil
- val maps = ListBuffer[(Phase, PhaseMap)]()
- def prev = maps.init.last._2
- def latest = maps.last._2
- val defSyms = mutable.HashMap[Symbol, List[DefTree]]()
+ var maps: List[(Phase, PhaseMap)] = ((NoPhase, null)) :: Nil
+ def prev = maps.tail.head._2
+ def latest = maps.head._2
+ val defSyms = symbolTreeMap[DefTree]()
val newSyms = mutable.HashSet[Symbol]()
val movedMsgs = new ListBuffer[String]
def sortedNewSyms = newSyms.toList.distinct sortBy (_.name.toString)
- def inPrev(sym: Symbol) = {
- (maps.size >= 2) && (prev contains sym)
- }
- def record(sym: Symbol, tree: Tree) = {
- if (latest contains sym) latest(sym) = latest(sym) :+ tree
- else latest(sym) = List(tree)
+ def record(tree: Tree) {
+ val sym = tree.symbol
+ if ((sym eq null) || (sym eq NoSymbol)) return
- if (inPrev(sym)) {
- val prevTrees = prev(sym)
+ val prevMap = maps.tail.head._2
+ val prevTrees = if (prevMap eq null) Nil else prevMap(sym)
- if (prevTrees exists (t => (t eq tree) || (t.symbol == sym))) ()
- else if (prevTrees exists (_.symbol.owner == sym.owner.implClass)) {
- errorFn("Noticed " + ownerstr(sym) + " moving to implementation class.")
- }
- else {
- val s1 = (prevTrees map wholetreestr).sorted.distinct
- val s2 = wholetreestr(tree)
- if (s1 contains s2) ()
- else movedMsgs += ("\n** %s moved:\n** Previously:\n%s\n** Currently:\n%s".format(ownerstr(sym), s1 mkString ", ", s2))
- }
+ tree match {
+ case t: DefTree => defSyms(sym) ::= t
+ case _ =>
+ }
+
+ if (prevTrees.isEmpty)
+ newSyms += sym
+ else if (prevTrees exists (t => (t eq tree) || (t.symbol == sym)))
+ ()
+ else if (prevTrees exists (_.symbol.owner == sym.owner.implClass))
+ errorFn("Noticed " + ownerstr(sym) + " moving to implementation class.")
+ else {
+ val s1 = (prevTrees map wholetreestr).sorted.distinct
+ val s2 = wholetreestr(tree)
+ if (s1 contains s2) ()
+ else movedMsgs += ("\n** %s moved:\n** Previously:\n%s\n** Currently:\n%s".format(ownerstr(sym), s1 mkString ", ", s2))
}
- else newSyms += sym
}
+
def reportChanges(): Unit = {
// new symbols
if (newSyms.nonEmpty) {
informFn(newSyms.size + " new symbols.")
- val toPrint = if (settings.debug.value) sortedNewSyms mkString " " else ""
+ val toPrint = if (settings.debug) sortedNewSyms mkString " " else ""
newSyms.clear()
if (toPrint != "")
@@ -89,74 +146,63 @@ abstract class TreeCheckers extends Analyzer {
}
def check(ph: Phase, unit: CompilationUnit): Unit = {
- if (maps.isEmpty || maps.last._1 != ph)
- maps += ((ph, new PhaseMap))
-
+ maps match {
+ case ((`ph`, _)) :: _ =>
+ case _ => maps ::= ((ph, symbolTreeMap[Tree]()))
+ }
traverse(unit.body)
reportChanges()
}
- override def traverse(tree: Tree): Unit = {
- val sym = tree.symbol
- if (sym != null && sym != NoSymbol) {
- record(sym, tree)
- tree match {
- case x: DefTree =>
- if (defSyms contains sym) defSyms(sym) = defSyms(sym) :+ x
- else defSyms(sym) = List(x)
- case _ => ()
- }
- }
-
+ override def traverse(tree: Tree) {
+ record(tree)
super.traverse(tree)
}
}
lazy val tpeOfTree = mutable.HashMap[Tree, Type]()
+ private lazy val reportedAlready = mutable.HashSet[(Tree, Symbol)]()
+
+ def posstr(p: Position): String = (
+ if (p eq null) "" else {
+ try p.source.path + ":" + p.line
+ catch { case _: UnsupportedOperationException => p.toString }
+ }
+ )
+
- def posstr(p: Position) =
- try p.source.path + ":" + p.line
- catch { case _: UnsupportedOperationException => p.toString }
+ def errorFn(pos: Position, msg: Any): Unit = currentUnit.warning(pos, "[check: %s] %s".format(phase.prev, msg))
+ def errorFn(msg: Any): Unit = errorFn(NoPosition, msg)
- private var hasError: Boolean = false
- def errorFn(msg: Any): Unit = {hasError = true; println("[check: %s] %s".format(phase.prev, msg))}
- def errorFn(pos: Position, msg: Any): Unit = errorFn(posstr(pos) + ": " + msg)
def informFn(msg: Any) {
- if (settings.verbose.value || settings.debug.value)
+ if (settings.verbose || settings.debug)
println("[check: %s] %s".format(phase.prev, msg))
}
def assertFn(cond: Boolean, msg: => Any) =
if (!cond) errorFn(msg)
- private def wrap[T](msg: => Any)(body: => Unit) {
+ private def wrap[T](msg: => Any)(body: => T): T = {
try body
catch { case x: Throwable =>
Console.println("Caught " + x)
Console.println(msg)
x.printStackTrace
+ null.asInstanceOf[T]
}
}
def checkTrees() {
- if (settings.verbose.value)
+ if (settings.verbose)
Console.println("[consistency check at the beginning of phase " + phase + "]")
currentRun.units foreach (x => wrap(x)(check(x)))
}
- def printingTypings[T](body: => T): T = {
- val saved = global.printTypings
- global.printTypings = true
- val result = body
- global.printTypings = saved
- result
- }
def runWithUnit[T](unit: CompilationUnit)(body: => Unit): Unit = {
- hasError = false
val unit0 = currentUnit
currentRun.currentUnit = unit
body
- currentRun.advanceUnit
+ currentRun.advanceUnit()
assertFn(currentUnit == unit, "currentUnit is " + currentUnit + ", but unit is " + unit)
currentRun.currentUnit = unit0
}
@@ -164,35 +210,37 @@ abstract class TreeCheckers extends Analyzer {
informProgress("checking "+unit)
val context = rootContext(unit)
context.checking = true
- tpeOfTree.clear
+ tpeOfTree.clear()
SymbolTracker.check(phase, unit)
val checker = new TreeChecker(context)
runWithUnit(unit) {
checker.precheck.traverse(unit.body)
checker.typed(unit.body)
checker.postcheck.traverse(unit.body)
- if (hasError) unit.warning(NoPosition, "TreeCheckers detected non-compliant trees in " + unit)
}
}
override def newTyper(context: Context): Typer = new TreeChecker(context)
class TreeChecker(context0: Context) extends Typer(context0) {
- override protected def finishMethodSynthesis(templ: Template, clazz: Symbol, context: Context): Template = {
- // If we don't intercept this all the synthetics get added at every phase,
- // with predictably unfortunate results.
- templ
- }
+ // If we don't intercept this all the synthetics get added at every phase,
+ // with predictably unfortunate results.
+ override protected def finishMethodSynthesis(templ: Template, clazz: Symbol, context: Context): Template = templ
// XXX check for tree.original on TypeTrees.
- private def treesDiffer(t1: Tree, t2: Tree) =
- errorFn(t1.pos, "trees differ\n old: " + treestr(t1) + "\n new: " + treestr(t2))
+ private def treesDiffer(t1: Tree, t2: Tree): Unit = {
+ def len1 = t1.toString.length
+ def len2 = t2.toString.length
+ def name = t1 match {
+ case t: NameTree => t.name
+ case _ => t1.summaryString
+ }
+ def summary = s"${t1.shortClass} $name differs, bytes $len1 -> $len2, "
+ errorFn(t1.pos, summary + diff(t1, t2))
+ }
+
private def typesDiffer(tree: Tree, tp1: Type, tp2: Type) =
errorFn(tree.pos, "types differ\n old: " + tp1 + "\n new: " + tp2 + "\n tree: " + tree)
- private def ownersDiffer(tree: Tree, shouldBe: Symbol) = {
- val sym = tree.symbol
- errorFn(tree.pos, sym + " has wrong owner: " + ownerstr(sym.owner) + ", should be: " + ownerstr(shouldBe))
- }
/** XXX Disabled reporting of position errors until there is less noise. */
private def noPos(t: Tree) =
@@ -204,30 +252,46 @@ abstract class TreeCheckers extends Analyzer {
if (t.symbol == NoSymbol)
errorFn(t.pos, "no symbol: " + treestr(t))
- override def typed(tree: Tree, mode: Int, pt: Type): Tree = returning(tree) {
- case EmptyTree | TypeTree() => ()
- case _ if tree.tpe != null =>
- tpeOfTree.getOrElseUpdate(tree, {
- val saved = tree.tpe
- tree.tpe = null
- saved
- })
- wrap(tree)(super.typed(tree, mode, pt) match {
- case _: Literal => ()
- case x if x ne tree => treesDiffer(tree, x)
- case _ => ()
- })
- case _ => ()
+ private def passThrough(tree: Tree) = tree match {
+ case EmptyTree | TypeTree() => true
+ case _ => tree.tpe eq null
+ }
+ override def typed(tree: Tree, mode: Mode, pt: Type): Tree = (
+ if (passThrough(tree))
+ super.typed(tree, mode, pt)
+ else
+ checkedTyped(tree, mode, pt)
+ )
+ private def checkedTyped(tree: Tree, mode: Mode, pt: Type): Tree = {
+ val typed = wrap(tree)(super.typed(tree, mode, pt))
+
+ if (tree ne typed)
+ treesDiffer(tree, typed)
+
+ tree
}
object precheck extends TreeStackTraverser {
- override def traverse(tree: Tree) {
- checkSymbolRefsRespectScope(tree)
+ private var enclosingMemberDefs: List[MemberDef] = Nil
+ private def pushMemberDef[T](md: MemberDef)(body: => T): T = {
+ enclosingMemberDefs ::= md
+ try body finally enclosingMemberDefs = enclosingMemberDefs.tail
+ }
+ override def traverse(tree: Tree): Unit = tree match {
+ case md: MemberDef => pushMemberDef(md)(traverseInternal(tree))
+ case _ => traverseInternal(tree)
+ }
+
+ private def traverseInternal(tree: Tree) {
+ if (!tree.canHaveAttrs)
+ return
+
+ checkSymbolRefsRespectScope(enclosingMemberDefs takeWhile (md => !md.symbol.hasPackageFlag), tree)
checkReturnReferencesDirectlyEnclosingDef(tree)
val sym = tree.symbol
def accessed = sym.accessed
- def fail(msg: String) = errorFn(tree.pos, msg + classstr(tree) + " / " + tree)
+ def fail(msg: String) = errorFn(tree.pos, msg + tree.shortClass + " / " + tree)
tree match {
case DefDef(_, _, _, _, _, _) =>
@@ -236,7 +300,7 @@ abstract class TreeCheckers extends Analyzer {
case _: ConstantType => ()
case _ =>
checkSym(tree)
- /** XXX: lots of syms show up here with accessed == NoSymbol. */
+ /* XXX: lots of syms show up here with accessed == NoSymbol. */
if (accessed != NoSymbol) {
val agetter = accessed.getter(sym.owner)
val asetter = accessed.setter(sym.owner)
@@ -263,15 +327,14 @@ abstract class TreeCheckers extends Analyzer {
else if (currentOwner.ownerChain takeWhile (_ != sym) exists (_ == NoSymbol))
return fail("tree symbol "+sym+" does not point to enclosing class; tree = ")
- /** XXX: temporary while Import nodes are arriving untyped. */
+ /* XXX: temporary while Import nodes are arriving untyped. */
case Import(_, _) =>
return
case _ =>
}
-
- if (tree.pos == NoPosition && tree != EmptyTree)
+ if (tree.pos == NoPosition)
noPos(tree)
- else if (tree.tpe == null && phase.id > currentRun.typerPhase.id)
+ else if (tree.tpe == null && isPastTyper)
noType(tree)
else if (tree.isDef) {
checkSym(tree)
@@ -284,7 +347,7 @@ abstract class TreeCheckers extends Analyzer {
def cond(s: Symbol) = !s.isTerm || s.isMethod || s == sym.owner
if (sym.owner != currentOwner) {
- val expected = currentOwner.ownerChain find (x => cond(x)) getOrElse fail("DefTree can't find owner: ")
+ val expected = currentOwner.ownerChain find (x => cond(x)) getOrElse { fail("DefTree can't find owner: ") ; NoSymbol }
if (sym.owner != expected)
fail(sm"""|
| currentOwner chain: ${currentOwner.ownerChain take 3 mkString " -> "}
@@ -296,58 +359,87 @@ abstract class TreeCheckers extends Analyzer {
super.traverse(tree)
}
- private def checkSymbolRefsRespectScope(tree: Tree) {
- def symbolOf(t: Tree): Symbol = Option(tree.symbol).getOrElse(NoSymbol)
- def definedSymbolOf(t: Tree): Symbol = if (t.isDef) symbolOf(t) else NoSymbol
- val info = Option(symbolOf(tree).info).getOrElse(NoType)
- val referencedSymbols: List[Symbol] = {
- val directRef = tree match {
- case _: RefTree => symbolOf(tree).toOption
- case _ => None
+ private def checkSymbolRefsRespectScope(enclosingMemberDefs: List[MemberDef], tree: Tree) {
+ def symbolOf(t: Tree): Symbol = if (t.symbol eq null) NoSymbol else t.symbol
+ def typeOf(t: Tree): Type = if (t.tpe eq null) NoType else t.tpe
+ def infoOf(t: Tree): Type = symbolOf(t).info
+ def referencesInType(tp: Type) = tp collect { case TypeRef(_, sym, _) => sym }
+ // Accessors are known to steal the type of the underlying field without cloning existential symbols at the new owner.
+ // This happens in Namer#accessorTypeCompleter. We just look the other way here.
+ if (symbolOf(tree).isAccessor)
+ return
+
+ val treeSym = symbolOf(tree)
+ val treeInfo = infoOf(tree)
+ val treeTpe = typeOf(tree)
+
+ def isOk(sym: Symbol) = treeSym hasTransOwner sym.enclosingSuchThat(x => !x.isTypeParameterOrSkolem) // account for higher order type params
+ def isEligible(sym: Symbol) = (sym ne NoSymbol) && (
+ sym.isTypeParameter
+ || sym.isLocal
+ )
+ val referencedSymbols = (treeSym :: referencesInType(treeInfo)).distinct filter (sym => isEligible(sym) && !isOk(sym))
+ def mk[T](what: String, x: T, str: T => String = (x: T) => "" + x): ((Any, String)) =
+ x -> s"%10s %-20s %s".format(what, classString(x), truncate(str(x), 80).trim)
+
+ def encls = enclosingMemberDefs.filterNot(_.symbol == treeSym).zipWithIndex map { case (md, i) => mk(s"encl(${i+1})", md.symbol, signature) }
+
+ def mkErrorMsg(outOfScope: Symbol): String = {
+
+ def front = List(
+ mk[Tree]("tree", tree),
+ mk[Position]("position", tree.pos, posstr),
+ mk("with sym", treeSym, signature)
+ )
+ def tpes = treeTpe match {
+ case NoType => Nil
+ case _ => mk[Type]("and tpe", treeTpe) :: Nil
+ }
+ def ref = mk[Symbol]("ref to", outOfScope, (s: Symbol) => s.nameString + " (" + s.debugFlagString + ")")
+
+ val pairs = front ++ tpes ++ encls ++ (ref :: Nil)
+ val width = pairs.map(_._2.length).max
+ val fmt = "%-" + width + "s"
+ val lines = pairs map {
+ case (s: Symbol, msg) => fmt.format(msg) + " in " + ownersString(s)
+ case (x, msg) => fmt.format(msg)
}
- def referencedSyms(tp: Type) = (tp collect {
- case TypeRef(_, sym, _) => sym
- }).toList
- val indirectRefs = referencedSyms(info)
- (indirectRefs ++ directRef).distinct
+ lines.mkString("Out of scope symbol reference {\n", "\n", "\n}")
}
- for {
- sym <- referencedSymbols
- // Accessors are known to steal the type of the underlying field without cloning existential symbols at the new owner.
- // This happens in Namer#accessorTypeCompleter. We just look the other way here.
- if !tree.symbol.isAccessor
- if (sym.isTypeParameter || sym.isLocal) && !(tree.symbol hasTransOwner sym.owner)
- } errorFn(s"The symbol, tpe or info of tree `(${tree}) : ${info}` refers to a out-of-scope symbol, ${sym.fullLocationString}. tree.symbol.ownerChain: ${tree.symbol.ownerChain.mkString(", ")}")
+
+ referencedSymbols foreach (sym =>
+ if (!reportedAlready((tree, sym))) {
+ errorFn("\n" + mkErrorMsg(sym))
+ reportedAlready += ((tree, sym))
+ }
+ )
}
- private def checkReturnReferencesDirectlyEnclosingDef(tree: Tree) {
- tree match {
- case _: Return =>
- path.collectFirst {
- case dd: DefDef => dd
- } match {
- case None => errorFn(s"Return node ($tree) must be enclosed in a DefDef")
- case Some(dd) =>
- if (tree.symbol != dd.symbol) errorFn(s"Return symbol (${tree.symbol}} does not reference directly enclosing DefDef (${dd.symbol})")
- }
- case _ =>
- }
+ private def checkReturnReferencesDirectlyEnclosingDef(tree: Tree): Unit = tree match {
+ case _: Return =>
+ path collectFirst { case dd: DefDef => dd } match {
+ case None => errorFn(s"Return node ($tree) must be enclosed in a DefDef")
+ case Some(dd) if tree.symbol != dd.symbol => errorFn(s"Return symbol (${tree.symbol}} does not reference directly enclosing DefDef (${dd.symbol})")
+ case _ =>
+ }
+ case _ =>
}
}
object postcheck extends Traverser {
- override def traverse(tree: Tree) {
- tree match {
- case EmptyTree | TypeTree() => ()
- case _ =>
- tpeOfTree get tree foreach { oldtpe =>
- if (oldtpe =:= tree.tpe) ()
- else typesDiffer(tree, oldtpe, tree.tpe)
-
- tree.tpe = oldtpe
- super.traverse(tree)
- }
- }
+ override def traverse(tree: Tree): Unit = tree match {
+ case EmptyTree | TypeTree() => ()
+ case _ =>
+ tpeOfTree get tree foreach { oldtpe =>
+ if (tree.tpe eq null)
+ errorFn(s"tree.tpe=null for " + tree.shortClass + " (symbol: " + classString(tree.symbol) + " " + signature(tree.symbol) + "), last seen tpe was " + oldtpe)
+ else if (oldtpe =:= tree.tpe)
+ ()
+ else
+ typesDiffer(tree, oldtpe, tree.tpe)
+
+ super.traverse(tree setType oldtpe)
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 2270e812eb..b801b644fb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -8,7 +8,6 @@ package typechecker
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
-import scala.util.control.ControlThrowable
import scala.util.control.Exception.ultimately
import symtab.Flags._
import PartialFunction._
@@ -37,15 +36,6 @@ trait TypeDiagnostics {
import global._
import definitions._
- import global.typer.{ infer, context }
-
- /** The common situation of making sure nothing is erroneous could be
- * nicer if Symbols, Types, and Trees all implemented some common interface
- * in which isErroneous and similar would be placed.
- */
- def noErroneousTypes(tps: Type*) = tps forall (x => !x.isErroneous)
- def noErroneousSyms(syms: Symbol*) = syms forall (x => !x.isErroneous)
- def noErroneousTrees(trees: Tree*) = trees forall (x => !x.isErroneous)
/** For errors which are artifacts of the implementation: such messages
* indicate that the restriction may be lifted in the future.
@@ -58,7 +48,7 @@ trait TypeDiagnostics {
/** A map of Positions to addendums - if an error involves a position in
* the map, the addendum should also be printed.
*/
- private var addendums = perRunCaches.newMap[Position, () => String]()
+ private val addendums = perRunCaches.newMap[Position, () => String]()
private var isTyperInPattern = false
/** Devising new ways of communicating error info out of
@@ -119,6 +109,22 @@ trait TypeDiagnostics {
case x => x.toString
}
+ /**
+ * [a, b, c] => "(a, b, c)"
+ * [a, B] => "(param1, param2)"
+ * [a, B, c] => "(param1, ..., param2)"
+ */
+ final def exampleTuplePattern(names: List[Name]): String = {
+ val arity = names.length
+ val varPatterNames: Option[List[String]] = sequence(names map {
+ case name if nme.isVariableName(name) => Some(name.decode)
+ case _ => None
+ })
+ def parenthesize(a: String) = s"($a)"
+ def genericParams = (Seq("param1") ++ (if (arity > 2) Seq("...") else Nil) ++ Seq(s"param$arity"))
+ parenthesize(varPatterNames.getOrElse(genericParams).mkString(", "))
+ }
+
def alternatives(tree: Tree): List[Type] = tree.tpe match {
case OverloadedType(pre, alternatives) => alternatives map pre.memberType
case _ => Nil
@@ -136,7 +142,7 @@ trait TypeDiagnostics {
else if (!member.isDeferred) member.accessed
else {
val getter = if (member.isSetter) member.getter(member.owner) else member
- val flags = if (getter.setter(member.owner) != NoSymbol) DEFERRED | MUTABLE else DEFERRED
+ val flags = if (getter.setter(member.owner) != NoSymbol) DEFERRED.toLong | MUTABLE else DEFERRED
getter.owner.newValue(getter.name.toTermName, getter.pos, flags) setInfo getter.tpe.resultType
}
@@ -153,7 +159,7 @@ trait TypeDiagnostics {
def defaultMessage = moduleMessage + preResultString + tree.tpe
def applyMessage = defaultMessage + tree.symbol.locationString
- if ((sym eq null) || (sym eq NoSymbol)) {
+ if (!tree.hasExistingSymbol) {
if (isTyperInPattern) patternMessage
else exprMessage
}
@@ -174,18 +180,13 @@ trait TypeDiagnostics {
case xs => " where " + (disambiguate(xs map (_.existentialToString)) mkString ", ")
}
- def varianceWord(sym: Symbol): String =
- if (sym.variance == 1) "covariant"
- else if (sym.variance == -1) "contravariant"
- else "invariant"
-
def explainAlias(tp: Type) = {
// Don't automatically normalize standard aliases; they still will be
// expanded if necessary to disambiguate simple identifiers.
- if ((tp eq tp.normalize) || tp.typeSymbolDirect.isInDefaultNamespace) ""
- else {
+ val deepDealias = DealiasedType(tp)
+ if (tp eq deepDealias) "" else {
// A sanity check against expansion being identical to original.
- val s = "" + DealiasedType(tp)
+ val s = "" + deepDealias
if (s == "" + tp) ""
else "\n (which expands to) " + s
}
@@ -223,12 +224,12 @@ trait TypeDiagnostics {
// force measures than comparing normalized Strings were producing error messages
// like "and java.util.ArrayList[String] <: java.util.ArrayList[String]" but there
// should be a cleaner way to do this.
- if (found.normalize.toString == tp.normalize.toString) ""
+ if (found.dealiasWiden.toString == tp.dealiasWiden.toString) ""
else " (and %s <: %s)".format(found, tp)
)
val explainDef = {
val prepend = if (isJava) "Java-defined " else ""
- "%s%s is %s in %s.".format(prepend, reqsym, varianceWord(param), param)
+ "%s%s is %s in %s.".format(prepend, reqsym, param.variance, param)
}
// Don't suggest they change the class declaration if it's somewhere
// under scala.* or defined in a java class, because attempting either
@@ -248,11 +249,11 @@ trait TypeDiagnostics {
|| ((arg <:< reqArg) && param.isCovariant)
|| ((reqArg <:< arg) && param.isContravariant)
)
- val invariant = param.variance == 0
+ val invariant = param.variance.isInvariant
if (conforms) Some("")
- else if ((arg <:< reqArg) && invariant) mkMsg(true) // covariant relationship
- else if ((reqArg <:< arg) && invariant) mkMsg(false) // contravariant relationship
+ else if ((arg <:< reqArg) && invariant) mkMsg(isSubtype = true) // covariant relationship
+ else if ((reqArg <:< arg) && invariant) mkMsg(isSubtype = false) // contravariant relationship
else None // we assume in other cases our ham-fisted advice will merely serve to confuse
}
val messages = relationships.flatten
@@ -268,7 +269,7 @@ trait TypeDiagnostics {
// For found/required errors where AnyRef would have sufficed:
// explain in greater detail.
def explainAnyVsAnyRef(found: Type, req: Type): String = {
- if (AnyRefClass.tpe <:< req) notAnyRefMessage(found) else ""
+ if (AnyRefTpe <:< req) notAnyRefMessage(found) else ""
}
// TODO - figure out how to avoid doing any work at all
@@ -300,8 +301,8 @@ trait TypeDiagnostics {
case xs => xs map (_ => "_") mkString (clazz.name + "[", ",", "]")
})+ "`"
- "\nNote: if you intended to match against the class, try "+ caseString
-
+ if (!clazz.exists) ""
+ else "\nNote: if you intended to match against the class, try "+ caseString
}
case class TypeDiag(tp: Type, sym: Symbol) extends Ordered[TypeDiag] {
@@ -309,7 +310,6 @@ trait TypeDiagnostics {
// distinguished from the other types in the same error message
private val savedName = sym.name
def restoreName() = sym.name = savedName
- def isAltered = sym.name != savedName
def modifyName(f: String => String) = sym setName newTypeName(f(sym.name.toString))
/** Prepend java.lang, scala., or Predef. if this type originated
@@ -442,6 +442,122 @@ trait TypeDiagnostics {
def permanentlyHiddenWarning(pos: Position, hidden: Name, defn: Symbol) =
contextWarning(pos, "imported `%s' is permanently hidden by definition of %s".format(hidden, defn.fullLocationString))
+ object checkUnused {
+ val ignoreNames = Set[TermName]("readResolve", "readObject", "writeObject", "writeReplace")
+
+ class UnusedPrivates extends Traverser {
+ val defnTrees = ListBuffer[MemberDef]()
+ val targets = mutable.Set[Symbol]()
+ val setVars = mutable.Set[Symbol]()
+ val treeTypes = mutable.Set[Type]()
+
+ def defnSymbols = defnTrees.toList map (_.symbol)
+ def localVars = defnSymbols filter (t => t.isLocal && t.isVar)
+
+ def qualifiesTerm(sym: Symbol) = (
+ (sym.isModule || sym.isMethod || sym.isPrivateLocal || sym.isLocal)
+ && !nme.isLocalName(sym.name)
+ && !sym.isParameter
+ && !sym.isParamAccessor // could improve this, but it's a pain
+ && !sym.isEarlyInitialized // lots of false positives in the way these are encoded
+ && !(sym.isGetter && sym.accessed.isEarlyInitialized)
+ )
+ def qualifiesType(sym: Symbol) = !sym.isDefinedInPackage
+ def qualifies(sym: Symbol) = (
+ (sym ne null)
+ && (sym.isTerm && qualifiesTerm(sym) || sym.isType && qualifiesType(sym))
+ )
+
+ override def traverse(t: Tree): Unit = {
+ t match {
+ case t: MemberDef if qualifies(t.symbol) => defnTrees += t
+ case t: RefTree if t.symbol ne null => targets += t.symbol
+ case Assign(lhs, _) if lhs.symbol != null => setVars += lhs.symbol
+ case _ =>
+ }
+ // Only record type references which don't originate within the
+ // definition of the class being referenced.
+ if (t.tpe ne null) {
+ for (tp <- t.tpe ; if !treeTypes(tp) && !currentOwner.ownerChain.contains(tp.typeSymbol)) {
+ tp match {
+ case NoType | NoPrefix =>
+ case NullaryMethodType(_) =>
+ case MethodType(_, _) =>
+ case _ =>
+ log(s"$tp referenced from $currentOwner")
+ treeTypes += tp
+ }
+ }
+ // e.g. val a = new Foo ; new a.Bar ; don't let a be reported as unused.
+ t.tpe.prefix foreach {
+ case SingleType(_, sym) => targets += sym
+ case _ =>
+ }
+ }
+ super.traverse(t)
+ }
+ def isUnusedType(m: Symbol): Boolean = (
+ m.isType
+ && !m.isTypeParameterOrSkolem // would be nice to improve this
+ && (m.isPrivate || m.isLocal)
+ && !(treeTypes.exists(tp => tp exists (t => t.typeSymbolDirect == m)))
+ )
+ def isUnusedTerm(m: Symbol): Boolean = (
+ (m.isTerm)
+ && (m.isPrivate || m.isLocal)
+ && !targets(m)
+ && !(m.name == nme.WILDCARD) // e.g. val _ = foo
+ && !ignoreNames(m.name.toTermName) // serialization methods
+ && !isConstantType(m.info.resultType) // subject to constant inlining
+ && !treeTypes.exists(_ contains m) // e.g. val a = new Foo ; new a.Bar
+ )
+ def unusedTypes = defnTrees.toList filter (t => isUnusedType(t.symbol))
+ def unusedTerms = defnTrees.toList filter (v => isUnusedTerm(v.symbol))
+ // local vars which are never set, except those already returned in unused
+ def unsetVars = localVars filter (v => !setVars(v) && !isUnusedTerm(v))
+ }
+
+ def apply(unit: CompilationUnit) = {
+ warnUnusedImports(unit)
+
+ val p = new UnusedPrivates
+ p traverse unit.body
+ val unused = p.unusedTerms
+ unused foreach { defn: DefTree =>
+ val sym = defn.symbol
+ val isDefaultGetter = sym.name containsName nme.DEFAULT_GETTER_STRING
+ val pos = (
+ if (defn.pos.isDefined) defn.pos
+ else if (sym.pos.isDefined) sym.pos
+ else sym match {
+ case sym: TermSymbol => sym.referenced.pos
+ case _ => NoPosition
+ }
+ )
+ val why = if (sym.isPrivate) "private" else "local"
+ val what = (
+ if (isDefaultGetter) "default argument"
+ else if (sym.isConstructor) "constructor"
+ else if (sym.isVar || sym.isGetter && sym.accessed.isVar) "var"
+ else if (sym.isVal || sym.isGetter && sym.accessed.isVal) "val"
+ else if (sym.isSetter) "setter"
+ else if (sym.isMethod) "method"
+ else if (sym.isModule) "object"
+ else "term"
+ )
+ unit.warning(pos, s"$why $what in ${sym.owner} is never used")
+ }
+ p.unsetVars foreach { v =>
+ unit.warning(v.pos, s"local var ${v.name} in ${v.owner} is never set - it could be a val")
+ }
+ p.unusedTypes foreach { t =>
+ val sym = t.symbol
+ val why = if (sym.isPrivate) "private" else "local"
+ unit.warning(t.pos, s"$why ${sym.fullLocationString} is never used")
+ }
+ }
+ }
+
object checkDead {
private val exprStack: mutable.Stack[Symbol] = mutable.Stack(NoSymbol)
// The method being applied to `tree` when `apply` is called.
@@ -466,17 +582,13 @@ trait TypeDiagnostics {
// Error suppression will squash some of these warnings unless we circumvent it.
// It is presumed if you are using a -Y option you would really like to hear
// the warnings you've requested.
- if (settings.warnDeadCode.value && context.unit.exists && treeOK(tree) && exprOK)
- context.warning(tree.pos, "dead code following this construct", true)
+ if (settings.warnDeadCode && context.unit.exists && treeOK(tree) && exprOK)
+ context.warning(tree.pos, "dead code following this construct", force = true)
tree
}
// The checkDead call from typedArg is more selective.
- def inMode(mode: Int, tree: Tree): Tree = {
- val modeOK = (mode & (EXPRmode | BYVALmode | POLYmode)) == (EXPRmode | BYVALmode)
- if (modeOK) apply(tree)
- else tree
- }
+ def inMode(mode: Mode, tree: Tree): Tree = if (mode.typingMonoExprByValue) apply(tree) else tree
}
private def symWasOverloaded(sym: Symbol) = sym.owner.isClass && sym.owner.info.member(sym.name).isOverloaded
@@ -497,7 +609,7 @@ trait TypeDiagnostics {
/** Report a type error.
*
- * @param pos0 The position where to report the error
+ * @param pos The position where to report the error
* @param ex The exception that caused the error
*/
def reportTypeError(context0: Context, pos: Position, ex: TypeError) {
@@ -506,7 +618,7 @@ trait TypeDiagnostics {
// but it seems that throwErrors excludes some of the errors that should actually be
// buffered, causing TypeErrors to fly around again. This needs some more investigation.
if (!context0.reportErrors) throw ex
- if (settings.debug.value) ex.printStackTrace()
+ if (settings.debug) ex.printStackTrace()
ex match {
case CyclicReference(sym, info: TypeCompleter) =>
diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala
index 60399f53bf..cb1f1f4568 100644
--- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala
@@ -4,7 +4,7 @@
*/
package scala.tools.nsc
-package interpreter
+package typechecker
import java.lang.{ reflect => r }
import r.TypeVariable
@@ -12,16 +12,12 @@ import scala.reflect.NameTransformer
import NameTransformer._
import scala.reflect.runtime.{universe => ru}
import scala.reflect.{ClassTag, classTag}
-import typechecker.DestructureTypes
-import scala.reflect.internal.util.StringOps.ojoin
-import scala.language.implicitConversions
/** A more principled system for turning types into strings.
*/
trait StructuredTypeStrings extends DestructureTypes {
val global: Global
import global._
- import definitions._
case class LabelAndType(label: String, typeName: String) { }
object LabelAndType {
@@ -33,13 +29,11 @@ trait StructuredTypeStrings extends DestructureTypes {
else elems.mkString(ldelim, mdelim, rdelim)
)
}
- val NoGrouping = Grouping("", "", "", false)
- val ListGrouping = Grouping("(", ", ", ")", false)
- val ProductGrouping = Grouping("(", ", ", ")", true)
- val ParamGrouping = Grouping("(", ", ", ")", true)
- val BlockGrouping = Grouping(" { ", "; ", "}", false)
+ val NoGrouping = Grouping("", "", "", labels = false)
+ val ListGrouping = Grouping("(", ", ", ")", labels = false)
+ val ProductGrouping = Grouping("(", ", ", ")", labels = true)
+ val BlockGrouping = Grouping(" { ", "; ", "}", labels = false)
- private implicit def lowerName(n: Name): String = "" + n
private def str(level: Int)(body: => String): String = " " * level + body
private def block(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = {
val l1 = str(level)(name + grouping.ldelim)
@@ -49,7 +43,6 @@ trait StructuredTypeStrings extends DestructureTypes {
l1 +: l2 :+ l3 mkString "\n"
}
private def maybeBlock(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = {
- import grouping._
val threshold = 70
val try1 = str(level)(name + grouping.join(nodes map (_.show(0, grouping.labels)): _*))
@@ -57,10 +50,9 @@ trait StructuredTypeStrings extends DestructureTypes {
else block(level, grouping)(name, nodes)
}
private def shortClass(x: Any) = {
- if (opt.debug) {
+ if (settings.debug) {
val name = (x.getClass.getName split '.').last
- val isAnon = name.reverse takeWhile (_ != '$') forall (_.isDigit)
- val str = if (isAnon) name else (name split '$').last
+ val str = if (TypeStrings.isAnonClass(x.getClass)) name else (name split '$').last
" // " + str
}
@@ -72,7 +64,7 @@ trait StructuredTypeStrings extends DestructureTypes {
def nodes: List[TypeNode]
def show(indent: Int, showLabel: Boolean): String = maybeBlock(indent, grouping)(mkPrefix(showLabel), nodes)
- def show(indent: Int): String = show(indent, true)
+ def show(indent: Int): String = show(indent, showLabel = true)
def show(): String = show(0)
def withLabel(l: String): this.type = modifyNameInfo(_.copy(label = l))
@@ -120,11 +112,6 @@ trait StructuredTypeStrings extends DestructureTypes {
override def emptyTypeName = "Nil"
override def typeName = "List"
}
- class TypeScope(nodes: List[TypeNode]) extends TypeProduct(nodes) {
- override def grouping = BlockGrouping
- override def typeName = "Scope"
- override def emptyTypeName = "EmptyScope"
- }
object TypeEmpty extends TypeNode {
override def grouping = NoGrouping
@@ -146,7 +133,7 @@ trait StructuredTypeStrings extends DestructureTypes {
def wrapAtom[U](value: U) = new TypeAtom(value)
}
- def show(tp: Type): String = intoNodes(tp).show
+ def show(tp: Type): String = intoNodes(tp).show()
}
@@ -158,11 +145,11 @@ trait StructuredTypeStrings extends DestructureTypes {
* "definition" is when you want strings like
*/
trait TypeStrings {
+ private type JClass = java.lang.Class[_]
private val ObjectClass = classOf[java.lang.Object]
private val primitives = Set[String]("byte", "char", "short", "int", "long", "float", "double", "boolean", "void")
- private val primitiveMap = primitives.toList map { x =>
+ private val primitiveMap = (primitives.toList map { x =>
val key = x match {
- case "void" => "Void"
case "int" => "Integer"
case "char" => "Character"
case s => s.capitalize
@@ -173,7 +160,12 @@ trait TypeStrings {
}
("java.lang." + key) -> ("scala." + value)
- } toMap
+ }).toMap
+
+ def isAnonClass(cl: Class[_]) = {
+ val xs = cl.getName.reverse takeWhile (_ != '$')
+ xs.nonEmpty && xs.forall(_.isDigit)
+ }
def scalaName(s: String): String = {
if (s endsWith MODULE_SUFFIX_STRING) s.init + ".type"
@@ -184,18 +176,16 @@ trait TypeStrings {
// Trying to put humpty dumpty back together again.
def scalaName(clazz: JClass): String = {
val name = clazz.getName
- val isAnon = clazz.isScalaAnonymous
val enclClass = clazz.getEnclosingClass
def enclPre = enclClass.getName + MODULE_SUFFIX_STRING
def enclMatch = name startsWith enclPre
scalaName(
- if (enclClass == null || isAnon || !enclMatch) name
+ if (enclClass == null || isAnonClass(clazz) || !enclMatch) name
else enclClass.getName + "." + (name stripPrefix enclPre)
)
}
- def scalaName(ct: ClassTag[_]): String = scalaName(ct.runtimeClass)
- def anyClass(x: Any): JClass = if (x == null) null else x.getClass
+ def anyClass(x: Any): JClass = if (x == null) null else x.getClass
private def brackets(tps: String*): String =
if (tps.isEmpty) ""
@@ -212,14 +202,8 @@ trait TypeStrings {
}
private def tparamString[T: ru.TypeTag] : String = {
- def typeArguments: List[ru.Type] = {
- import ru.TypeRefTag // otherwise the pattern match will be unchecked
- // because TypeRef is an abstract type
- ru.typeOf[T] match { case ru.TypeRef(_, _, args) => args; case _ => Nil }
- }
- // [Eugene to Paul] need to use not the `rootMirror`, but a mirror with the REPL's classloader
- // how do I get to it? acquiring context classloader seems unreliable because of multithreading
- def typeVariables: List[java.lang.Class[_]] = typeArguments map (targ => ru.rootMirror.runtimeClass(targ))
+ import ru._ // get TypeRefTag in scope so that pattern match works (TypeRef is an abstract type)
+ def typeArguments: List[ru.Type] = ru.typeOf[T] match { case ru.TypeRef(_, _, args) => args; case _ => Nil }
brackets(typeArguments map (jc => tvarString(List(jc))): _*)
}
@@ -231,7 +215,6 @@ trait TypeStrings {
* practice to rely on toString for correctness) generated the VALID string
* representation of the type.
*/
- def fromTypedValue[T: ru.TypeTag : ClassTag](x: T): String = fromTag[T]
def fromValue(value: Any): String = if (value == null) "Null" else fromClazz(anyClass(value))
def fromClazz(clazz: JClass): String = scalaName(clazz) + tparamString(clazz)
def fromTag[T: ru.TypeTag : ClassTag] : String = scalaName(classTag[T].runtimeClass) + tparamString[T]
@@ -251,13 +234,6 @@ trait TypeStrings {
case (res, (k, v)) => res.replaceAll(k, v)
}
}
-
- val typeTransforms = List(
- "java.lang." -> "",
- "scala.collection.immutable." -> "immutable.",
- "scala.collection.mutable." -> "mutable.",
- "scala.collection.generic." -> "generic."
- )
}
object TypeStrings extends TypeStrings { }
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 40313bdb5d..5d0d5392dd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -9,13 +9,15 @@
// Added: Thu Apr 12 18:23:58 2007
//todo: disallow C#D in superclass
//todo: treat :::= correctly
-package scala.tools.nsc
+package scala
+package tools.nsc
package typechecker
-import scala.collection.mutable
-import scala.reflect.internal.util.{ BatchSourceFile, Statistics }
+import scala.collection.{ mutable, immutable }
+import scala.reflect.internal.util.{ BatchSourceFile, Statistics, shortClassOfInstance }
import mutable.ListBuffer
import symtab.Flags._
+import Mode._
// Suggestion check whether we can do without priming scopes with symbols of outer scopes,
// like the IDE does.
@@ -24,16 +26,15 @@ import symtab.Flags._
* @author Martin Odersky
* @version 1.0
*/
-trait Typers extends Modes with Adaptations with Tags {
+trait Typers extends Adaptations with Tags with TypersTracking with PatternTypers {
self: Analyzer =>
import global._
import definitions._
import TypersStats._
- final def forArgMode(fun: Tree, mode: Int) =
- if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode
- else mode
+ final def forArgMode(fun: Tree, mode: Mode) =
+ if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode else mode
// namer calls typer.computeType(rhs) on DefDef / ValDef when tpt is empty. the result
// is cached here and re-used in typedDefDef / typedValDef
@@ -42,70 +43,84 @@ trait Typers extends Modes with Adaptations with Tags {
final val shortenImports = false
+ // allows override of the behavior of the resetTyper method w.r.t comments
+ def resetDocComments() = {
+ clearDocComments()
+ }
+
def resetTyper() {
//println("resetTyper called")
resetContexts()
resetImplicits()
transformed.clear()
- clearDocComments()
+ resetDocComments()
}
- object UnTyper extends Traverser {
- override def traverse(tree: Tree) = {
- if (tree != EmptyTree) tree.tpe = null
- if (tree.hasSymbol) tree.symbol = NoSymbol
- super.traverse(tree)
+ sealed abstract class SilentResult[+T] {
+ @inline final def fold[U](none: => U)(f: T => U): U = this match {
+ case SilentResultValue(value) => f(value)
+ case _ => none
+ }
+ @inline final def map[U](f: T => U): SilentResult[U] = this match {
+ case SilentResultValue(value) => SilentResultValue(f(value))
+ case x: SilentTypeError => x
}
+ @inline final def filter(p: T => Boolean): SilentResult[T] = this match {
+ case SilentResultValue(value) if !p(value) => SilentTypeError(TypeErrorWrapper(new TypeError(NoPosition, "!p")))
+ case _ => this
}
-/* needed for experimental version where early types can be type arguments
- class EarlyMap(clazz: Symbol) extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(NoPrefix, sym, List()) if (sym hasFlag PRESUPER) =>
- TypeRef(ThisType(clazz), sym, List())
- case _ =>
- mapOver(tp)
+ @inline final def orElse[T1 >: T](f: Seq[AbsTypeError] => T1): T1 = this match {
+ case SilentResultValue(value) => value
+ case s : SilentTypeError => f(s.reportableErrors)
}
}
-*/
+ class SilentTypeError private(val errors: List[AbsTypeError]) extends SilentResult[Nothing] {
+ def err: AbsTypeError = errors.head
+ def reportableErrors = errors match {
+ case (e1: AmbiguousImplicitTypeError) +: _ =>
+ List(e1) // DRYer error reporting for neg/t6436b.scala
+ case all =>
+ all
+ }
+ }
+ object SilentTypeError {
+ def apply(errors: AbsTypeError*): SilentTypeError = new SilentTypeError(errors.toList)
+ def unapply(error: SilentTypeError): Option[AbsTypeError] = error.errors.headOption
+ }
- sealed abstract class SilentResult[+T]
- case class SilentTypeError(err: AbsTypeError) extends SilentResult[Nothing] { }
case class SilentResultValue[+T](value: T) extends SilentResult[T] { }
def newTyper(context: Context): Typer = new NormalTyper(context)
+
private class NormalTyper(context : Context) extends Typer(context)
// A transient flag to mark members of anonymous classes
// that are turned private by typedBlock
private final val SYNTHETIC_PRIVATE = TRANS_FLAG
- private def isPastTyper = phase.id > currentRun.typerPhase.id
-
- // To enable decent error messages when the typer crashes.
- // TODO - this only catches trees which go through def typed,
- // but there are all kinds of back ways - typedClassDef, etc. etc.
- // Funnel everything through one doorway.
- var lastTreeToTyper: Tree = EmptyTree
-
- // when true:
- // - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
- // - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
- // this is disabled by: -Xoldpatmat or interactive compilation (we run it for scaladoc due to SI-5933)
- private def newPatternMatching = opt.virtPatmat && !forInteractive //&& !forScaladoc && (phase.id < currentRun.uncurryPhase.id)
+ private final val InterpolatorCodeRegex = """\$\{.*?\}""".r
+ private final val InterpolatorIdentRegex = """\$[$\w]+""".r // note that \w doesn't include $
- abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors {
+ abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with PatternTyper with TyperContextErrors {
import context0.unit
- import typeDebug.{ ptTree, ptBlock, ptLine }
+ import typeDebug.{ ptTree, ptBlock, ptLine, inGreen, inRed }
import TyperErrorGen._
+ val runDefinitions = currentRun.runDefinitions
+ import runDefinitions._
val infer = new Inferencer(context0) {
- override def isCoercible(tp: Type, pt: Type): Boolean = undoLog undo { // #3281
- tp.isError || pt.isError ||
- context0.implicitsEnabled && // this condition prevents chains of views
- inferView(EmptyTree, tp, pt, false) != EmptyTree
- }
+ // See SI-3281 re undoLog
+ override def isCoercible(tp: Type, pt: Type) = undoLog undo viewExists(tp, pt)
}
+ /** Overridden to false in scaladoc and/or interactive. */
+ def canAdaptConstantTypeToLiteral = true
+ def canTranslateEmptyListToNil = true
+ def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree
+
+ def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree =
+ typed(docDef.definition, mode, pt)
+
/** Find implicit arguments and pass them to given tree.
*/
def applyImplicitArgs(fun: Tree): Tree = fun.tpe match {
@@ -115,10 +130,7 @@ trait Typers extends Modes with Adaptations with Tags {
// paramFailed cannot be initialized with params.exists(_.tpe.isError) because that would
// hide some valid errors for params preceding the erroneous one.
var paramFailed = false
-
- def mkPositionalArg(argTree: Tree, paramName: Name) = argTree
- def mkNamedArg(argTree: Tree, paramName: Name) = atPos(argTree.pos)(new AssignOrNamedArg(Ident(paramName), (argTree)))
- var mkArg: (Tree, Name) => Tree = mkPositionalArg
+ var mkArg: (Name, Tree) => Tree = (_, tree) => tree
// DEPMETTODO: instantiate type vars that depend on earlier implicit args (see adapt (4.1))
//
@@ -129,28 +141,27 @@ trait Typers extends Modes with Adaptations with Tags {
for(ar <- argResultsBuff)
paramTp = paramTp.subst(ar.subst.from, ar.subst.to)
- val res = if (paramFailed || (paramTp.isError && {paramFailed = true; true})) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, false, context)
+ val res = if (paramFailed || (paramTp.isError && {paramFailed = true; true})) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, isView = false, context)
argResultsBuff += res
if (res.isSuccess) {
- argBuff += mkArg(res.tree, param.name)
+ argBuff += mkArg(param.name, res.tree)
} else {
- mkArg = mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args
+ mkArg = gen.mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args
if (!param.hasDefault && !paramFailed) {
- context.errBuffer.find(_.kind == ErrorKinds.Divergent) match {
- case Some(divergentImplicit) if !settings.Xdivergence211.value =>
+ context.reportBuffer.errors.collectFirst {
+ case dte: DivergentImplicitTypeError => dte
+ } match {
+ case Some(divergent) =>
// DivergentImplicit error has higher priority than "no implicit found"
// no need to issue the problem again if we are still in silent mode
if (context.reportErrors) {
- context.issue(divergentImplicit)
- context.condBufferFlush(_.kind == ErrorKinds.Divergent)
- }
- case Some(divergentImplicit: DivergentImplicitTypeError) if settings.Xdivergence211.value =>
- if (context.reportErrors) {
- context.issue(divergentImplicit.withPt(paramTp))
- context.condBufferFlush(_.kind == ErrorKinds.Divergent)
+ context.issue(divergent.withPt(paramTp))
+ context.reportBuffer.clearErrors {
+ case dte: DivergentImplicitTypeError => true
+ }
}
- case None =>
+ case _ =>
NoImplicitFoundError(fun, param)
}
paramFailed = true
@@ -176,10 +187,17 @@ trait Typers extends Modes with Adaptations with Tags {
fun
}
+ def viewExists(from: Type, to: Type): Boolean = (
+ !from.isError
+ && !to.isError
+ && context.implicitsEnabled
+ && (inferView(EmptyTree, from, to, reportAmbiguous = false) != EmptyTree)
+ )
+
def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree =
- inferView(tree, from, to, reportAmbiguous, true)
+ inferView(tree, from, to, reportAmbiguous, saveErrors = true)
- /** Infer an implicit conversion (``view'') between two types.
+ /** Infer an implicit conversion (`view`) between two types.
* @param tree The tree which needs to be converted.
* @param from The source type of the conversion
* @param to The target type of the conversion
@@ -194,12 +212,12 @@ trait Typers extends Modes with Adaptations with Tags {
debuglog("infer view from "+from+" to "+to)//debug
if (isPastTyper) EmptyTree
else from match {
- case MethodType(_, _) => EmptyTree
+ case MethodType(_, _) => EmptyTree
case OverloadedType(_, _) => EmptyTree
- case PolyType(_, _) => EmptyTree
- case _ =>
+ case PolyType(_, _) => EmptyTree
+ case _ =>
def wrapImplicit(from: Type): Tree = {
- val result = inferImplicit(tree, functionType(from :: Nil, to), reportAmbiguous, true, context, saveErrors)
+ val result = inferImplicit(tree, functionType(from.withoutAnnotations :: Nil, to), reportAmbiguous, isView = true, context, saveAmbiguousDivergent = saveErrors)
if (result.subst != EmptyTreeTypeSubstituter) {
result.subst traverse tree
notifyUndetparamsInferred(result.subst.from, result.subst.to)
@@ -237,32 +255,6 @@ trait Typers extends Modes with Adaptations with Tags {
case _ => tp
}
- /** Check that <code>tree</code> is a stable expression.
- *
- * @param tree ...
- * @return ...
- */
- def checkStable(tree: Tree): Tree = (
- if (treeInfo.isExprSafeToInline(tree)) tree
- else if (tree.isErrorTyped) tree
- else UnstableTreeError(tree)
- )
-
- /** Would tree be a stable (i.e. a pure expression) if the type
- * of its symbol was not volatile?
- */
- protected def isStableExceptVolatile(tree: Tree) = {
- tree.hasSymbol && tree.symbol != NoSymbol && tree.tpe.isVolatile &&
- { val savedTpe = tree.symbol.info
- val savedSTABLE = tree.symbol getFlag STABLE
- tree.symbol setInfo AnyRefClass.tpe
- tree.symbol setFlag STABLE
- val result = treeInfo.isExprSafeToInline(tree)
- tree.symbol setInfo savedTpe
- tree.symbol setFlag savedSTABLE
- result
- }
- }
private def errorNotClass(tpt: Tree, found: Type) = { ClassTypeRequiredError(tpt, found); false }
private def errorNotStable(tpt: Tree, found: Type) = { TypeNotAStablePrefixError(tpt, found); false }
@@ -294,16 +286,11 @@ trait Typers extends Modes with Adaptations with Tags {
)
}
- /** Check that type <code>tp</code> is not a subtype of itself.
- *
- * @param pos ...
- * @param tp ...
- * @return <code>true</code> if <code>tp</code> is not a subtype of itself.
+ /** Check that type `tp` is not a subtype of itself.
*/
def checkNonCyclic(pos: Position, tp: Type): Boolean = {
def checkNotLocked(sym: Symbol) = {
- sym.initialize
- sym.lockOK || { CyclicAliasingOrSubtypingError(pos, sym); false }
+ sym.initialize.lockOK || { CyclicAliasingOrSubtypingError(pos, sym); false }
}
tp match {
case TypeRef(pre, sym, args) =>
@@ -314,12 +301,6 @@ trait Typers extends Modes with Adaptations with Tags {
case SingleType(pre, sym) =>
checkNotLocked(sym)
-/*
- case TypeBounds(lo, hi) =>
- var ok = true
- for (t <- lo) ok = ok & checkNonCyclic(pos, t)
- ok
-*/
case st: SubType =>
checkNonCyclic(pos, st.supertype)
case ct: CompoundType =>
@@ -330,19 +311,19 @@ trait Typers extends Modes with Adaptations with Tags {
}
def checkNonCyclic(pos: Position, tp: Type, lockedSym: Symbol): Boolean = try {
- if (!lockedSym.lock(CyclicReferenceError(pos, lockedSym))) false
+ if (!lockedSym.lock(CyclicReferenceError(pos, tp, lockedSym))) false
else checkNonCyclic(pos, tp)
} finally {
lockedSym.unlock()
}
def checkNonCyclic(sym: Symbol) {
- if (!checkNonCyclic(sym.pos, sym.tpe)) sym.setInfo(ErrorType)
+ if (!checkNonCyclic(sym.pos, sym.tpe_*)) sym.setInfo(ErrorType)
}
def checkNonCyclic(defn: Tree, tpt: Tree) {
if (!checkNonCyclic(defn.pos, tpt.tpe, defn.symbol)) {
- tpt.tpe = ErrorType
+ tpt setType ErrorType
defn.symbol.setInfo(ErrorType)
}
}
@@ -373,28 +354,13 @@ trait Typers extends Modes with Adaptations with Tags {
private var scope: Scope = _
private var hiddenSymbols: List[Symbol] = _
- /** Check that type <code>tree</code> does not refer to private
+ /** Check that type `tree` does not refer to private
* components unless itself is wrapped in something private
- * (<code>owner</code> tells where the type occurs).
- *
- * @param owner ...
- * @param tree ...
- * @return ...
+ * (`owner` tells where the type occurs).
*/
def privates[T <: Tree](owner: Symbol, tree: T): T =
check(owner, EmptyScope, WildcardType, tree)
- /** Check that type <code>tree</code> does not refer to entities
- * defined in scope <code>scope</code>.
- *
- * @param scope ...
- * @param pt ...
- * @param tree ...
- * @return ...
- */
- def locals[T <: Tree](scope: Scope, pt: Type, tree: T): T =
- check(NoSymbol, scope, pt, tree)
-
private def check[T <: Tree](owner: Symbol, scope: Scope, pt: Type, tree: T): T = {
this.owner = owner
this.scope = scope
@@ -407,7 +373,7 @@ trait Typers extends Modes with Adaptations with Tags {
check(owner, scope, pt, tree setType tp1.typeSymbol.classBound)
else if (owner == NoSymbol)
tree setType packSymbols(hiddenSymbols.reverse, tp1)
- else if (!phase.erasedTypes) { // privates
+ else if (!isPastTyper) { // privates
val badSymbol = hiddenSymbols.head
SymbolEscapesScopeError(tree, badSymbol)
} else tree
@@ -470,7 +436,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
/** The qualifying class
- * of a this or super with prefix <code>qual</code>.
+ * of a this or super with prefix `qual`.
* packageOk is equal false when qualifying class symbol
*/
def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean) =
@@ -511,11 +477,6 @@ trait Typers extends Modes with Adaptations with Tags {
}
@inline
- final def typerReportAnyContextErrors[T](c: Context)(f: Typer => T): T = {
- f(newTyper(c))
- }
-
- @inline
final def withSavedContext[T](c: Context)(f: => T) = {
val savedErrors = c.flushAndReturnBuffer()
val res = f
@@ -533,8 +494,6 @@ trait Typers extends Modes with Adaptations with Tags {
typer1
} else this
- final val xtypes = false
-
/** Is symbol defined and not stale?
*/
def reallyExists(sym: Symbol) = {
@@ -553,15 +512,21 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- /** Does the context of tree <code>tree</code> require a stable type?
+ /** Does the context of tree `tree` require a stable type?
*/
- private def isStableContext(tree: Tree, mode: Int, pt: Type) =
- isNarrowable(tree.tpe) && ((mode & (EXPRmode | LHSmode)) == EXPRmode) &&
- (xtypes ||
- (pt.isStable ||
- (mode & QUALmode) != 0 && !tree.symbol.isConstant ||
- pt.typeSymbol.isAbstractType && pt.bounds.lo.isStable && !(tree.tpe <:< pt)) ||
- pt.typeSymbol.isRefinementClass && !(tree.tpe <:< pt))
+ private def isStableContext(tree: Tree, mode: Mode, pt: Type) = {
+ def ptSym = pt.typeSymbol
+ def expectsStable = (
+ pt.isStable
+ || mode.inQualMode && !tree.symbol.isConstant
+ || !(tree.tpe <:< pt) && (ptSym.isAbstractType && pt.bounds.lo.isStable || ptSym.isRefinementClass)
+ )
+
+ ( isNarrowable(tree.tpe)
+ && mode.typingExprNotLhs
+ && expectsStable
+ )
+ }
/** Make symbol accessible. This means:
* If symbol refers to package object, insert `.package` as second to last selector.
@@ -572,11 +537,13 @@ trait Typers extends Modes with Adaptations with Tags {
* @return modified tree and new prefix type
*/
private def makeAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): (Tree, Type) =
- if (isInPackageObject(sym, pre.typeSymbol)) {
+ if (context.isInPackageObject(sym, pre.typeSymbol)) {
if (pre.typeSymbol == ScalaPackageClass && sym.isTerm) {
// short cut some aliases. It seems pattern matching needs this
// to notice exhaustiveness and to generate good code when
// List extractors are mixed with :: patterns. See Test5 in lists.scala.
+ //
+ // TODO SI-6609 Eliminate this special case once the old pattern matcher is removed.
def dealias(sym: Symbol) =
(atPos(tree.pos.makeTransparent) {gen.mkAttributedRef(sym)} setPos tree.pos, sym.owner.thisType)
sym.name match {
@@ -605,66 +572,59 @@ trait Typers extends Modes with Adaptations with Tags {
(checkAccessible(tree, sym, pre, site), pre)
}
- /** Is `sym` defined in package object of package `pkg`?
- */
- private def isInPackageObject(sym: Symbol, pkg: Symbol) = {
- def isInPkgObj(sym: Symbol) =
- !sym.owner.isPackage && {
- sym.owner.isPackageObjectClass &&
- sym.owner.owner == pkg ||
- pkg.isInitialized && {
- // need to be careful here to not get a cyclic reference during bootstrap
- val pkgobj = pkg.info.member(nme.PACKAGEkw)
- pkgobj.isInitialized &&
- (pkgobj.info.member(sym.name).alternatives contains sym)
- }
- }
- pkg.isPackageClass && {
- if (sym.isOverloaded) sym.alternatives forall isInPkgObj
- else isInPkgObj(sym)
- }
- }
-
/** Post-process an identifier or selection node, performing the following:
- * 1. Check that non-function pattern expressions are stable
+ * 1. Check that non-function pattern expressions are stable (ignoring volatility concerns -- SI-6815)
+ * (and narrow the type of modules: a module reference in a pattern has type Foo.type, not "object Foo")
* 2. Check that packages and static modules are not used as values
* 3. Turn tree type into stable type if possible and required by context.
* 4. Give getClass calls a more precise type based on the type of the target of the call.
*/
- private def stabilize(tree: Tree, pre: Type, mode: Int, pt: Type): Tree = {
- if (tree.symbol.isOverloaded && !inFunMode(mode))
+ private def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = {
+ // Side effect time! Don't be an idiot like me and think you
+ // can move "val sym = tree.symbol" before this line, because
+ // inferExprAlternative side-effects the tree's symbol.
+ if (tree.symbol.isOverloaded && !mode.inFunMode)
inferExprAlternative(tree, pt)
val sym = tree.symbol
- def fail() = NotAValueError(tree, sym)
+ val isStableIdPattern = mode.typingPatternNotConstructor && tree.isTerm
- if (tree.isErrorTyped) tree
- else if ((mode & (PATTERNmode | FUNmode)) == PATTERNmode && tree.isTerm) { // (1)
- if (sym.isValue) {
- val tree1 = checkStable(tree)
- // A module reference in a pattern has type Foo.type, not "object Foo"
- if (sym.isModule && !sym.isMethod) tree1 setType singleType(pre, sym)
- else tree1
- }
- else fail()
- } else if ((mode & (EXPRmode | QUALmode)) == EXPRmode && !sym.isValue && !phase.erasedTypes) { // (2)
- fail()
- } else {
- if (sym.isStable && pre.isStable && !isByNameParamType(tree.tpe) &&
- (isStableContext(tree, mode, pt) || sym.isModule && !sym.isMethod))
- tree.setType(singleType(pre, sym))
- // To fully benefit from special casing the return type of
- // getClass, we have to catch it immediately so expressions
- // like x.getClass().newInstance() are typed with the type of x.
- else if ( isGetClass(tree.symbol)
- // TODO: If the type of the qualifier is inaccessible, we can cause private types
- // to escape scope here, e.g. pos/t1107. I'm not sure how to properly handle this
- // so for now it requires the type symbol be public.
- && pre.typeSymbol.isPublic)
- tree setType MethodType(Nil, getClassReturnType(pre))
- else
- tree
- }
+ def isModuleTypedExpr = (
+ treeInfo.admitsTypeSelection(tree)
+ && (isStableContext(tree, mode, pt) || sym.isModuleNotMethod)
+ )
+ def isStableValueRequired = (
+ isStableIdPattern
+ || mode.in(all = EXPRmode, none = QUALmode) && !phase.erasedTypes
+ )
+ // To fully benefit from special casing the return type of
+ // getClass, we have to catch it immediately so expressions like
+ // x.getClass().newInstance() are typed with the type of x. TODO: If the
+ // type of the qualifier is inaccessible, we can cause private types to
+ // escape scope here, e.g. pos/t1107. I'm not sure how to properly handle
+ // this so for now it requires the type symbol be public.
+ def isGetClassCall = isGetClass(sym) && pre.typeSymbol.isPublic
+
+ def narrowIf(tree: Tree, condition: Boolean) =
+ if (condition) tree setType singleType(pre, sym) else tree
+
+ def checkStable(tree: Tree): Tree =
+ if (treeInfo.isStableIdentifierPattern(tree)) tree
+ else UnstableTreeError(tree)
+
+ if (tree.isErrorTyped)
+ tree
+ else if (!sym.isValue && isStableValueRequired) // (2)
+ NotAValueError(tree, sym)
+ else if (isStableIdPattern) // (1)
+ // A module reference in a pattern has type Foo.type, not "object Foo"
+ narrowIf(checkStable(tree), sym.isModuleNotMethod)
+ else if (isModuleTypedExpr) // (3)
+ narrowIf(tree, true)
+ else if (isGetClassCall) // (4)
+ tree setType MethodType(Nil, getClassReturnType(pre))
+ else
+ tree
}
private def isNarrowable(tpe: Type): Boolean = unwrapWrapperTypes(tpe) match {
@@ -672,22 +632,21 @@ trait Typers extends Modes with Adaptations with Tags {
case _ => !phase.erasedTypes
}
- /**
- * @param tree ...
- * @param mode ...
- * @param pt ...
- * @return ...
- */
- def stabilizeFun(tree: Tree, mode: Int, pt: Type): Tree = {
+ def stabilizeFun(tree: Tree, mode: Mode, pt: Type): Tree = {
val sym = tree.symbol
val pre = tree match {
case Select(qual, _) => qual.tpe
- case _ => NoPrefix
+ case _ => NoPrefix
+ }
+ def stabilizable = (
+ pre.isStable
+ && sym.tpe.params.isEmpty
+ && (isStableContext(tree, mode, pt) || sym.isModule)
+ )
+ tree.tpe match {
+ case MethodType(_, _) if stabilizable => tree setType MethodType(Nil, singleType(pre, sym)) // TODO: should this be a NullaryMethodType?
+ case _ => tree
}
- if (tree.tpe.isInstanceOf[MethodType] && pre.isStable && sym.tpe.params.isEmpty &&
- (isStableContext(tree, mode, pt) || sym.isModule))
- tree.setType(MethodType(List(), singleType(pre, sym))) // TODO: should this be a NullaryMethodType?
- else tree
}
/** The member with given name of given qualifier tree */
@@ -730,14 +689,10 @@ trait Typers extends Modes with Adaptations with Tags {
context.namedApplyBlockInfo = context1.namedApplyBlockInfo
if (context1.hasErrors) {
stopStats()
- SilentTypeError(context1.errBuffer.head)
+ SilentTypeError(context1.errors: _*)
} else {
// If we have a successful result, emit any warnings it created.
- if (context1.hasWarnings) {
- context1.flushAndReturnWarningsBuffer() foreach {
- case (pos, msg) => unit.warning(pos, msg)
- }
- }
+ context1.flushAndIssueWarnings()
SilentResultValue(result)
}
} else {
@@ -775,7 +730,7 @@ trait Typers extends Modes with Adaptations with Tags {
featureTrait.owner.ownerChain.takeWhile(_ != languageFeatureModule.moduleClass).reverse
val featureName = (nestedOwners map (_.name + ".")).mkString + featureTrait.name
def action(): Boolean = {
- def hasImport = inferImplicit(EmptyTree: Tree, featureTrait.tpe, true, false, context).isSuccess
+ def hasImport = inferImplicit(EmptyTree: Tree, featureTrait.tpe, reportAmbiguous = true, isView = false, context).isSuccess
def hasOption = settings.language.value exists (s => s == featureName || s == "_")
val OK = hasImport || hasOption
if (!OK) {
@@ -849,10 +804,12 @@ trait Typers extends Modes with Adaptations with Tags {
* (14) When in mode EXPRmode, apply a view
* If all this fails, error
*/
- protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree = {
+ protected def adapt(tree: Tree, mode: Mode, pt: Type, original: Tree = EmptyTree): Tree = {
+ def hasUndets = context.undetparams.nonEmpty
+ def hasUndetsInMonoMode = hasUndets && !mode.inPolyMode
def adaptToImplicitMethod(mt: MethodType): Tree = {
- if (context.undetparams.nonEmpty) { // (9) -- should revisit dropped condition `(mode & POLYmode) == 0`
+ if (hasUndets) { // (9) -- should revisit dropped condition `hasUndetsInMonoMode`
// dropped so that type args of implicit method are inferred even if polymorphic expressions are allowed
// needed for implicits in 2.8 collection library -- maybe once #3346 is fixed, we can reinstate the condition?
context.undetparams = inferExprInstance(tree, context.extractUndetparams(), pt,
@@ -864,29 +821,28 @@ trait Typers extends Modes with Adaptations with Tags {
// avoid throwing spurious DivergentImplicit errors
if (context.hasErrors)
- return setError(tree)
-
- withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree)){ typer1 =>
- if (original != EmptyTree && pt != WildcardType)
- typer1.silent(tpr => {
- val withImplicitArgs = tpr.applyImplicitArgs(tree)
- if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway
- else tpr.typed(withImplicitArgs, mode, pt)
- }) match {
- case SilentResultValue(result) =>
- result
- case _ =>
+ setError(tree)
+ else
+ withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree))(typer1 =>
+ if (original != EmptyTree && pt != WildcardType) (
+ typer1 silent { tpr =>
+ val withImplicitArgs = tpr.applyImplicitArgs(tree)
+ if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway
+ else tpr.typed(withImplicitArgs, mode, pt)
+ }
+ orElse { _ =>
val resetTree = resetLocalAttrs(original)
debuglog(s"fallback on implicits: ${tree}/$resetTree")
- val tree1 = typed(resetTree, mode, WildcardType)
+ val tree1 = typed(resetTree, mode)
// Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
// we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin.
- tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, pt)
+ tree1 setType pluginsTyped(tree1.tpe, this, tree1, mode, pt)
if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
- }
- else
- typer1.typed(typer1.applyImplicitArgs(tree), mode, pt)
- }
+ }
+ )
+ else
+ typer1.typed(typer1.applyImplicitArgs(tree), mode, pt)
+ )
}
def instantiateToMethodType(mt: MethodType): Tree = {
@@ -895,174 +851,78 @@ trait Typers extends Modes with Adaptations with Tags {
case Block(_, tree1) => tree1.symbol
case _ => tree.symbol
}
- if (!meth.isConstructor && !meth.isTermMacro && isFunctionType(pt)) { // (4.2)
- debuglog("eta-expanding " + tree + ":" + tree.tpe + " to " + pt)
+ if (!meth.isConstructor && isFunctionType(pt)) { // (4.2)
+ debuglog(s"eta-expanding $tree: ${tree.tpe} to $pt")
checkParamsConvertible(tree, tree.tpe)
val tree0 = etaExpand(context.unit, tree, this)
- // println("eta "+tree+" ---> "+tree0+":"+tree0.tpe+" undet: "+context.undetparams+ " mode: "+Integer.toHexString(mode))
-
- if (context.undetparams.nonEmpty) {
- // #2624: need to infer type arguments for eta expansion of a polymorphic method
- // context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand)
- // need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null
- // can't type with the expected type, as we can't recreate the setup in (3) without calling typed
- // (note that (3) does not call typed to do the polymorphic type instantiation --
- // it is called after the tree has been typed with a polymorphic expected result type)
- instantiate(typed(tree0, mode, WildcardType), mode, pt)
- } else
+
+ // #2624: need to infer type arguments for eta expansion of a polymorphic method
+ // context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand)
+ // need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null
+ // can't type with the expected type, as we can't recreate the setup in (3) without calling typed
+ // (note that (3) does not call typed to do the polymorphic type instantiation --
+ // it is called after the tree has been typed with a polymorphic expected result type)
+ if (hasUndets)
+ instantiate(typed(tree0, mode), mode, pt)
+ else
typed(tree0, mode, pt)
- } else if (!meth.isConstructor && mt.params.isEmpty) { // (4.3)
- adapt(typed(Apply(tree, List()) setPos tree.pos), mode, pt, original)
- } else if (context.implicitsEnabled) {
+ }
+ else if (!meth.isConstructor && mt.params.isEmpty) // (4.3)
+ adapt(typed(Apply(tree, Nil) setPos tree.pos), mode, pt, original)
+ else if (context.implicitsEnabled)
MissingArgsForMethodTpeError(tree, meth)
- } else {
+ else
setError(tree)
- }
}
def adaptType(): Tree = {
- if (inFunMode(mode)) {
- // todo. the commented line below makes sense for typechecking, say, TypeApply(Ident(`some abstract type symbol`), List(...))
- // because otherwise Ident will have its tpe set to a TypeRef, not to a PolyType, and `typedTypeApply` will fail
- // but this needs additional investigation, because it crashes t5228, gadts1 and maybe something else
- // tree setType tree.tpe.normalize
+ // @M When not typing a type constructor (!context.inTypeConstructorAllowed)
+ // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
+ // and thus parameterized types must be applied to their type arguments
+ // @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
+ def properTypeRequired = (
+ tree.hasSymbolField
+ && !context.inTypeConstructorAllowed
+ && !(tree.symbol.isJavaDefined && context.unit.isJava)
+ )
+ // @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
+ // (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
+ // @M: removed check for tree.hasSymbolField and replace tree.symbol by tree.tpe.symbol
+ // (TypeTree's must also be checked here, and they don't directly have a symbol)
+ def kindArityMismatch = (
+ context.inTypeConstructorAllowed
+ && !sameLength(tree.tpe.typeParams, pt.typeParams)
+ )
+ // Note that we treat Any and Nothing as kind-polymorphic.
+ // We can't perform this check when typing type arguments to an overloaded method before the overload is resolved
+ // (or in the case of an error type) -- this is indicated by pt == WildcardType (see case TypeApply in typed1).
+ def kindArityMismatchOk = tree.tpe.typeSymbol match {
+ case NothingClass | AnyClass => true
+ case _ => pt == WildcardType
+ }
+
+ // todo. It would make sense when mode.inFunMode to instead use
+ // tree setType tree.tpe.normalize
+ // when typechecking, say, TypeApply(Ident(`some abstract type symbol`), List(...))
+ // because otherwise Ident will have its tpe set to a TypeRef, not to a PolyType, and `typedTypeApply` will fail
+ // but this needs additional investigation, because it crashes t5228, gadts1 and maybe something else
+ if (mode.inFunMode)
tree
- } else if (tree.hasSymbol && !tree.symbol.typeParams.isEmpty && !inHKMode(mode) &&
- !(tree.symbol.isJavaDefined && context.unit.isJava)) { // (7)
- // @M When not typing a higher-kinded type ((mode & HKmode) == 0)
- // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
- // and thus parameterized types must be applied to their type arguments
- // @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
+ else if (properTypeRequired && tree.symbol.typeParams.nonEmpty) // (7)
MissingTypeParametersError(tree)
- } else if ( // (7.1) @M: check kind-arity
- // @M: removed check for tree.hasSymbol and replace tree.symbol by tree.tpe.symbol (TypeTree's must also be checked here, and they don't directly have a symbol)
- (inHKMode(mode)) &&
- // @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
- // (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
- !sameLength(tree.tpe.typeParams, pt.typeParams) &&
- !(tree.tpe.typeSymbol == AnyClass ||
- tree.tpe.typeSymbol == NothingClass ||
- pt == WildcardType)) {
- // Check that the actual kind arity (tree.symbol.typeParams.length) conforms to the expected
- // kind-arity (pt.typeParams.length). Full checks are done in checkKindBounds in Infer.
- // Note that we treat Any and Nothing as kind-polymorphic.
- // We can't perform this check when typing type arguments to an overloaded method before the overload is resolved
- // (or in the case of an error type) -- this is indicated by pt == WildcardType (see case TypeApply in typed1).
+ else if (kindArityMismatch && !kindArityMismatchOk) // (7.1) @M: check kind-arity
KindArityMismatchError(tree, pt)
- } else tree match { // (6)
+ else tree match { // (6)
case TypeTree() => tree
case _ => TypeTree(tree.tpe) setOriginal tree
}
}
- /**
- * To deal with the type slack between actual (run-time) types and statically known types, for each abstract type T,
- * reflect its variance as a skolem that is upper-bounded by T (covariant position), or lower-bounded by T (contravariant).
- *
- * Consider the following example:
- *
- * class AbsWrapperCov[+A]
- * case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B]
- *
- * def unwrap[T](x: AbsWrapperCov[T]): Wrapped[T] = x match {
- * case Wrapper(wrapped) => // Wrapper's type parameter must not be assumed to be equal to T, it's *upper-bounded* by it
- * wrapped // : Wrapped[_ <: T]
- * }
- *
- * this method should type check if and only if Wrapped is covariant in its type parameter
- *
- * when inferring Wrapper's type parameter B from x's type AbsWrapperCov[T],
- * we must take into account that x's actual type is AbsWrapperCov[Tactual] forSome {type Tactual <: T}
- * as AbsWrapperCov is covariant in A -- in other words, we must not assume we know T exactly, all we know is its upper bound
- *
- * since method application is the only way to generate this slack between run-time and compile-time types (TODO: right!?),
- * we can simply replace skolems that represent method type parameters as seen from the method's body
- * by other skolems that are (upper/lower)-bounded by that type-parameter skolem
- * (depending on the variance position of the skolem in the statically assumed type of the scrutinee, pt)
- *
- * see test/files/../t5189*.scala
- */
- def adaptConstrPattern(): Tree = { // (5)
- def hasUnapplyMember(tp: Type) = reallyExists(unapplyMember(tp))
- val overloadedExtractorOfObject = tree.symbol filter (sym => hasUnapplyMember(sym.tpe))
- // if the tree's symbol's type does not define an extractor, maybe the tree's type does
- // this is the case when we encounter an arbitrary tree as the target of an unapply call (rather than something that looks like a constructor call)
- // (for now, this only happens due to wrapClassTagUnapply, but when we support parameterized extractors, it will become more common place)
- val extractor = overloadedExtractorOfObject orElse unapplyMember(tree.tpe)
- if (extractor != NoSymbol) {
- // if we did some ad-hoc overloading resolution, update the tree's symbol
- // do not update the symbol if the tree's symbol's type does not define an unapply member
- // (e.g. since it's some method that returns an object with an unapply member)
- if (overloadedExtractorOfObject != NoSymbol)
- tree setSymbol overloadedExtractorOfObject
-
- tree.tpe match {
- case OverloadedType(pre, alts) => tree.tpe = overloadedType(pre, alts filter (alt => hasUnapplyMember(alt.tpe)))
- case _ =>
- }
- val unapply = unapplyMember(extractor.tpe)
- val clazz = unapplyParameterType(unapply)
-
- if (unapply.isCase && clazz.isCase && !(clazz.ancestors exists (_.isCase))) {
- // convert synthetic unapply of case class to case class constructor
- val prefix = tree.tpe.prefix
- val tree1 = TypeTree(clazz.primaryConstructor.tpe.asSeenFrom(prefix, clazz.owner))
- .setOriginal(tree)
-
- val skolems = new mutable.ListBuffer[TypeSymbol]
- object variantToSkolem extends VariantTypeMap {
- def apply(tp: Type) = mapOver(tp) match {
- case TypeRef(NoPrefix, tpSym, Nil) if variance != 0 && tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
- // must initialize or tpSym.tpe might see random type params!!
- // without this, we'll get very weird types inferred in test/scaladoc/run/SI-5933.scala
- // TODO: why is that??
- tpSym.initialize
- val bounds = if (variance == 1) TypeBounds.upper(tpSym.tpe) else TypeBounds.lower(tpSym.tpe)
- // origin must be the type param so we can deskolemize
- val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
- // println("mapping "+ tpSym +" to "+ skolem + " : "+ bounds +" -- pt= "+ pt +" in "+ context.owner +" at "+ context.tree )
- skolems += skolem
- skolem.tpe
- case tp1 => tp1
- }
- }
-
- // have to open up the existential and put the skolems in scope
- // can't simply package up pt in an ExistentialType, because that takes us back to square one (List[_ <: T] == List[T] due to covariance)
- val ptSafe = variantToSkolem(pt) // TODO: pt.skolemizeExistential(context.owner, tree) ?
- val freeVars = skolems.toList
-
- // use "tree" for the context, not context.tree: don't make another CaseDef context,
- // as instantiateTypeVar's bounds would end up there
- val ctorContext = context.makeNewScope(tree, context.owner)
- freeVars foreach ctorContext.scope.enter
- newTyper(ctorContext).infer.inferConstructorInstance(tree1, clazz.typeParams, ptSafe)
-
- // simplify types without losing safety,
- // so that we get rid of unnecessary type slack, and so that error messages don't unnecessarily refer to skolems
- val extrapolate = new ExistentialExtrapolation(freeVars) extrapolate (_: Type)
- val extrapolated = tree1.tpe match {
- case MethodType(ctorArgs, res) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
- ctorArgs foreach (p => p.info = extrapolate(p.info)) // no need to clone, this is OUR method type
- copyMethodType(tree1.tpe, ctorArgs, extrapolate(res))
- case tp => tp
- }
-
- // once the containing CaseDef has been type checked (see typedCase),
- // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems)
- tree1 setType extrapolated
- } else {
- tree
- }
- } else {
- CaseClassConstructorError(tree)
- }
- }
-
def insertApply(): Tree = {
- assert(!inHKMode(mode), modeString(mode)) //@M
+ assert(!context.inTypeConstructorAllowed, mode) //@M
val adapted = adaptToName(tree, nme.apply)
- def stabilize0(pre: Type): Tree = stabilize(adapted, pre, EXPRmode | QUALmode, WildcardType)
+ def stabilize0(pre: Type): Tree = stabilize(adapted, pre, MonoQualifierModes, WildcardType)
+
// TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize
val qual = adapted match {
case This(_) =>
@@ -1083,31 +943,199 @@ trait Typers extends Modes with Adaptations with Tags {
Select(qual setPos tree.pos.makeTransparent, nme.apply)
}
}
+ def adaptConstant(value: Constant): Tree = {
+ val sym = tree.symbol
+ if (sym != null && sym.isDeprecated) {
+ val msg = sym.toString + sym.locationString + " is deprecated: " + sym.deprecationMessage.getOrElse("")
+ unit.deprecationWarning(tree.pos, msg)
+ }
+ treeCopy.Literal(tree, value)
+ }
+
+ // Ignore type errors raised in later phases that are due to mismatching types with existential skolems
+ // We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
+ // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
+ //
+ // val x: T = expr
+ //
+ // where T is the type of expr, but T contains existential skolems ts.
+ // In that case, this value definition does not typecheck.
+ // The value definition
+ //
+ // val x: T forSome { ts } = expr
+ //
+ // would typecheck. Or one can simply leave out the type of the `val`:
+ //
+ // val x = expr
+ //
+ // SI-6029 shows another case where we also fail (in uncurry), but this time the expected
+ // type is an existential type.
+ //
+ // The reason for both failures have to do with the way we (don't) transform
+ // skolem types along with the trees that contain them. We'd need a
+ // radically different approach to do it. But before investing a lot of time to
+ // to do this (I have already sunk 3 full days with in the end futile attempts
+ // to consistently transform skolems and fix 6029), I'd like to
+ // investigate ways to avoid skolems completely.
+ //
+ // upd. The same problem happens when we try to typecheck the result of macro expansion against its expected type
+ // (which is the return type of the macro definition instantiated in the context of expandee):
+ //
+ // Test.scala:2: error: type mismatch;
+ // found : $u.Expr[Class[_ <: Object]]
+ // required: reflect.runtime.universe.Expr[Class[?0(in value <local Test>)]] where type ?0(in value <local Test>) <: Object
+ // scala.reflect.runtime.universe.reify(new Object().getClass)
+ // ^
+ // Therefore following Martin's advice I use this logic to recover from skolem errors after macro expansions
+ // (by adding the ` || tree.attachments.get[MacroExpansionAttachment].isDefined` clause to the conditional above).
+ //
+ def adaptMismatchedSkolems() = {
+ def canIgnoreMismatch = (
+ !context.reportErrors && isPastTyper
+ || tree.attachments.get[MacroExpansionAttachment].isDefined
+ )
+ def bound = pt match {
+ case ExistentialType(qs, _) => qs
+ case _ => Nil
+ }
+ def msg = sm"""
+ |Recovering from existential or skolem type error in
+ | $tree
+ |with type: ${tree.tpe}
+ | pt: $pt
+ | context: ${context.tree}
+ | adapted
+ """.trim
+
+ val boundOrSkolems = if (canIgnoreMismatch) bound ++ pt.skolemsExceptMethodTypeParams else Nil
+ boundOrSkolems match {
+ case Nil => AdaptTypeError(tree, tree.tpe, pt) ; setError(tree)
+ case _ => logResult(msg)(adapt(tree, mode, deriveTypeWithWildcards(boundOrSkolems)(pt)))
+ }
+ }
+
+ def fallbackAfterVanillaAdapt(): Tree = {
+ def isPopulatedPattern = {
+ if ((tree.symbol ne null) && tree.symbol.isModule)
+ inferModulePattern(tree, pt)
+
+ isPopulated(tree.tpe, approximateAbstracts(pt))
+ }
+ if (mode.inPatternMode && isPopulatedPattern)
+ return tree
+
+ val tree1 = constfold(tree, pt) // (10) (11)
+ if (tree1.tpe <:< pt)
+ return adapt(tree1, mode, pt, original)
+
+ if (mode.typingExprNotFun) {
+ // The <: Any requirement inhibits attempts to adapt continuation types
+ // to non-continuation types.
+ if (tree.tpe <:< AnyTpe) pt.dealias match {
+ case TypeRef(_, UnitClass, _) => // (12)
+ if (settings.warnValueDiscard)
+ context.unit.warning(tree.pos, "discarded non-Unit value")
+ return typedPos(tree.pos, mode, pt)(Block(List(tree), Literal(Constant(()))))
+ case TypeRef(_, sym, _) if isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt) =>
+ if (settings.warnNumericWiden)
+ context.unit.warning(tree.pos, "implicit numeric widening")
+ return typedPos(tree.pos, mode, pt)(Select(tree, "to" + sym.name))
+ case _ =>
+ }
+ if (pt.dealias.annotations.nonEmpty && canAdaptAnnotations(tree, this, mode, pt)) // (13)
+ return typed(adaptAnnotations(tree, this, mode, pt), mode, pt)
+
+ if (hasUndets)
+ return instantiate(tree, mode, pt)
+
+ if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) {
+ // (14); the condition prevents chains of views
+ debuglog("inferring view from " + tree.tpe + " to " + pt)
+ inferView(tree, tree.tpe, pt, reportAmbiguous = true) match {
+ case EmptyTree =>
+ case coercion =>
+ def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
+ if (settings.logImplicitConv)
+ unit.echo(tree.pos, msg)
+
+ debuglog(msg)
+ val silentContext = context.makeImplicit(context.ambiguousErrors)
+ val res = newTyper(silentContext).typed(
+ new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
+ silentContext.firstError match {
+ case Some(err) => context.issue(err)
+ case None => return res
+ }
+ }
+ }
+ }
+
+ debuglog("error tree = " + tree)
+ if (settings.debug && settings.explaintypes)
+ explainTypes(tree.tpe, pt)
+
+ if (tree.tpe.isErroneous || pt.isErroneous)
+ setError(tree)
+ else
+ adaptMismatchedSkolems()
+ }
+
+ def vanillaAdapt(tree: Tree) = {
+ def applyPossible = {
+ def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
+ def hasPolymorphicApply = applyMeth.alternatives exists (_.tpe.typeParams.nonEmpty)
+ def hasMonomorphicApply = applyMeth.alternatives exists (_.tpe.paramSectionCount > 0)
+
+ dyna.acceptsApplyDynamic(tree.tpe) || (
+ if (mode.inTappMode)
+ tree.tpe.typeParams.isEmpty && hasPolymorphicApply
+ else
+ hasMonomorphicApply
+ )
+ }
+ def shouldInsertApply(tree: Tree) = mode.typingExprFun && {
+ tree.tpe match {
+ case _: MethodType | _: OverloadedType | _: PolyType => false
+ case _ => applyPossible
+ }
+ }
+ if (tree.isType)
+ adaptType()
+ else if (mode.typingExprNotFun && treeInfo.isMacroApplication(tree) && !isMacroExpansionSuppressed(tree))
+ macroExpand(this, tree, mode, pt)
+ else if (mode.typingConstructorPattern)
+ typedConstructorPattern(tree, pt)
+ else if (shouldInsertApply(tree))
+ insertApply()
+ else if (hasUndetsInMonoMode) { // (9)
+ assert(!context.inTypeConstructorAllowed, context) //@M
+ instantiatePossiblyExpectingUnit(tree, mode, pt)
+ }
+ else if (tree.tpe <:< pt)
+ tree
+ else
+ fallbackAfterVanillaAdapt()
+ }
// begin adapt
- tree.tpe match {
+ if (isMacroImplRef(tree)) {
+ if (treeInfo.isMacroApplication(tree)) adapt(unmarkMacroImplRef(tree), mode, pt, original)
+ else tree
+ } else tree.tpe match {
case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1)
adaptAnnotations(tree, this, mode, pt)
- case ct @ ConstantType(value) if inNoModes(mode, TYPEmode | FUNmode) && (ct <:< pt) && !forScaladoc && !forInteractive => // (0)
- val sym = tree.symbol
- if (sym != null && sym.isDeprecated) {
- val msg = sym.toString + sym.locationString + " is deprecated: " + sym.deprecationMessage.getOrElse("")
- unit.deprecationWarning(tree.pos, msg)
- }
- treeCopy.Literal(tree, value)
- case OverloadedType(pre, alts) if !inFunMode(mode) => // (1)
+ case ct @ ConstantType(value) if mode.inNone(TYPEmode | FUNmode) && (ct <:< pt) && canAdaptConstantTypeToLiteral => // (0)
+ adaptConstant(value)
+ case OverloadedType(pre, alts) if !mode.inFunMode => // (1)
inferExprAlternative(tree, pt)
adapt(tree, mode, pt, original)
case NullaryMethodType(restpe) => // (2)
adapt(tree setType restpe, mode, pt, original)
- case TypeRef(_, ByNameParamClass, List(arg)) if ((mode & EXPRmode) != 0) => // (2)
+ case TypeRef(_, ByNameParamClass, arg :: Nil) if mode.inExprMode => // (2)
adapt(tree setType arg, mode, pt, original)
- case tr @ TypeRef(_, sym, _) if sym.isAliasType && tr.dealias.isInstanceOf[ExistentialType] &&
- ((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
- adapt(tree setType tr.dealias.skolemizeExistential(context.owner, tree), mode, pt, original)
- case et @ ExistentialType(_, _) if ((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
- adapt(tree setType et.skolemizeExistential(context.owner, tree), mode, pt, original)
- case PolyType(tparams, restpe) if inNoModes(mode, TAPPmode | PATTERNmode | HKmode) => // (3)
+ case tp if mode.typingExprNotLhs && isExistentialType(tp) =>
+ adapt(tree setType tp.dealias.skolemizeExistential(context.owner, tree), mode, pt, original)
+ case PolyType(tparams, restpe) if mode.inNone(TAPPmode | PATTERNmode) && !context.inTypeConstructorAllowed => // (3)
// assert((mode & HKmode) == 0) //@M a PolyType in HKmode represents an anonymous type function,
// we're in HKmode since a higher-kinded type is expected --> hence, don't implicitly apply it to type params!
// ticket #2197 triggered turning the assert into a guard
@@ -1116,176 +1144,24 @@ trait Typers extends Modes with Adaptations with Tags {
// -- are we sure we want to expand aliases this early?
// -- what caused this change in behaviour??
val tparams1 = cloneSymbols(tparams)
- val tree1 = if (tree.isType) tree
- else TypeApply(tree, tparams1 map (tparam =>
- TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos
+ val tree1 = (
+ if (tree.isType) tree
+ else TypeApply(tree, tparams1 map (tparam => TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos
+ )
context.undetparams ++= tparams1
notifyUndetparamsAdded(tparams1)
adapt(tree1 setType restpe.substSym(tparams, tparams1), mode, pt, original)
- case mt: MethodType if mt.isImplicit && ((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) => // (4.1)
- adaptToImplicitMethod(mt)
- case mt: MethodType if (((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) &&
- (context.undetparams.isEmpty || inPolyMode(mode))) && !(tree.symbol != null && tree.symbol.isTermMacro) =>
+ case mt: MethodType if mode.typingExprNotFunNotLhs && mt.isImplicit => // (4.1)
+ adaptToImplicitMethod(mt)
+ case mt: MethodType if mode.typingExprNotFunNotLhs && !hasUndetsInMonoMode && !treeInfo.isMacroApplicationOrBlock(tree) =>
instantiateToMethodType(mt)
-
case _ =>
- def shouldInsertApply(tree: Tree) = inAllModes(mode, EXPRmode | FUNmode) && (tree.tpe match {
- case _: MethodType | _: OverloadedType | _: PolyType => false
- case _ => applyPossible
- })
- def applyPossible = {
- def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
- dyna.acceptsApplyDynamic(tree.tpe) || (
- if ((mode & TAPPmode) != 0)
- tree.tpe.typeParams.isEmpty && applyMeth.filter(!_.tpe.typeParams.isEmpty) != NoSymbol
- else
- applyMeth.filter(_.tpe.paramSectionCount > 0) != NoSymbol
- )
- }
- if (tree.isType)
- adaptType()
- else if (
- inExprModeButNot(mode, FUNmode) && !tree.isDef && // typechecking application
- tree.symbol != null && tree.symbol.isTermMacro && // of a macro
- !isMacroExpansionSuppressed(tree))
- macroExpand(this, tree, mode, pt)
- else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode))
- adaptConstrPattern()
- else if (shouldInsertApply(tree))
- insertApply()
- else if (context.undetparams.nonEmpty && !inPolyMode(mode)) { // (9)
- assert(!inHKMode(mode), modeString(mode)) //@M
- instantiatePossiblyExpectingUnit(tree, mode, pt)
- } else if (tree.tpe <:< pt) {
- tree
- } else {
- def fallBack: Tree = {
- if (inPatternMode(mode)) {
- if ((tree.symbol ne null) && tree.symbol.isModule)
- inferModulePattern(tree, pt)
- if (isPopulated(tree.tpe, approximateAbstracts(pt)))
- return tree
- }
- val tree1 = constfold(tree, pt) // (10) (11)
- if (tree1.tpe <:< pt) adapt(tree1, mode, pt, original)
- else {
- if (inExprModeButNot(mode, FUNmode)) {
- pt.dealias match {
- case TypeRef(_, sym, _) =>
- // note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially
- // infinite expansion if pt is constant type ()
- if (sym == UnitClass && tree.tpe <:< AnyClass.tpe) { // (12)
- if (settings.warnValueDiscard.value)
- context.unit.warning(tree.pos, "discarded non-Unit value")
- return typedPos(tree.pos, mode, pt) {
- Block(List(tree), Literal(Constant()))
- }
- } else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt)) {
- if (settings.warnNumericWiden.value)
- context.unit.warning(tree.pos, "implicit numeric widening")
- return typedPos(tree.pos, mode, pt) {
- Select(tree, "to" + sym.name)
- }
- }
- case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (13)
- return typed(adaptAnnotations(tree, this, mode, pt), mode, pt)
- case _ =>
- }
- if (!context.undetparams.isEmpty) {
- return instantiate(tree, mode, pt)
- }
- if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) {
- // (14); the condition prevents chains of views
- debuglog("inferring view from " + tree.tpe + " to " + pt)
- val coercion = inferView(tree, tree.tpe, pt, true)
- // convert forward views of delegate types into closures wrapped around
- // the delegate's apply method (the "Invoke" method, which was translated into apply)
- if (forMSIL && coercion != null && isCorrespondingDelegate(tree.tpe, pt)) {
- val meth: Symbol = tree.tpe.member(nme.apply)
- debuglog("replacing forward delegate view with: " + meth + ":" + meth.tpe)
- return typed(Select(tree, meth), mode, pt)
- }
- if (coercion != EmptyTree) {
- def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
- if (settings.logImplicitConv.value)
- unit.echo(tree.pos, msg)
-
- debuglog(msg)
- val silentContext = context.makeImplicit(context.ambiguousErrors)
- val res = newTyper(silentContext).typed(
- new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
- if (silentContext.hasErrors) context.issue(silentContext.errBuffer.head) else return res
- }
- }
- }
- if (settings.debug.value) {
- log("error tree = " + tree)
- if (settings.explaintypes.value) explainTypes(tree.tpe, pt)
- }
-
- val found = tree.tpe
- if (!found.isErroneous && !pt.isErroneous) {
- if ((!context.reportErrors && isPastTyper) || tree.attachments.get[MacroExpansionAttachment].isDefined) {
- val (bound, req) = pt match {
- case ExistentialType(qs, tpe) => (qs, tpe)
- case _ => (Nil, pt)
- }
- val boundOrSkolems = bound ++ pt.skolemsExceptMethodTypeParams
- if (boundOrSkolems.nonEmpty) {
- // Ignore type errors raised in later phases that are due to mismatching types with existential skolems
- // We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
- // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
- //
- // val x: T = expr
- //
- // where T is the type of expr, but T contains existential skolems ts.
- // In that case, this value definition does not typecheck.
- // The value definition
- //
- // val x: T forSome { ts } = expr
- //
- // would typecheck. Or one can simply leave out the type of the `val`:
- //
- // val x = expr
- //
- // SI-6029 shows another case where we also fail (in uncurry), but this time the expected
- // type is an existential type.
- //
- // The reason for both failures have to do with the way we (don't) transform
- // skolem types along with the trees that contain them. We'd need a
- // radically different approach to do it. But before investing a lot of time to
- // to do this (I have already sunk 3 full days with in the end futile attempts
- // to consistently transform skolems and fix 6029), I'd like to
- // investigate ways to avoid skolems completely.
- //
- // upd. The same problem happens when we try to typecheck the result of macro expansion against its expected type
- // (which is the return type of the macro definition instantiated in the context of expandee):
- //
- // Test.scala:2: error: type mismatch;
- // found : $u.Expr[Class[_ <: Object]]
- // required: reflect.runtime.universe.Expr[Class[?0(in value <local Test>)]] where type ?0(in value <local Test>) <: Object
- // scala.reflect.runtime.universe.reify(new Object().getClass)
- // ^
- // Therefore following Martin's advice I use this logic to recover from skolem errors after macro expansions
- // (by adding the ` || tree.attachments.get[MacroExpansionAttachment].isDefined` clause to the conditional above).
- //
- log("recovering from existential or skolem type error in tree \n" + tree + "\nwith type " + tree.tpe + "\n expected type = " + pt + "\n context = " + context.tree)
- return adapt(tree, mode, deriveTypeWithWildcards(boundOrSkolems)(pt))
- }
- }
- // create an actual error
- AdaptTypeError(tree, found, pt)
- }
- setError(tree)
- }
- }
- fallBack
- }
+ vanillaAdapt(tree)
}
}
- def instantiate(tree: Tree, mode: Int, pt: Type): Tree = {
+ def instantiate(tree: Tree, mode: Mode, pt: Type): Tree = {
inferExprInstance(tree, context.extractUndetparams(), pt)
adapt(tree, mode, pt)
}
@@ -1293,19 +1169,17 @@ trait Typers extends Modes with Adaptations with Tags {
* with expected type Unit, but if that fails, try again with pt = WildcardType
* and discard the expression.
*/
- def instantiateExpectingUnit(tree: Tree, mode: Int): Tree = {
+ def instantiateExpectingUnit(tree: Tree, mode: Mode): Tree = {
val savedUndetparams = context.undetparams
- silent(_.instantiate(tree, mode, UnitClass.tpe)) match {
- case SilentResultValue(t) => t
- case _ =>
- context.undetparams = savedUndetparams
- val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant())))
- typed(valueDiscard, mode, UnitClass.tpe)
+ silent(_.instantiate(tree, mode, UnitTpe)) orElse { _ =>
+ context.undetparams = savedUndetparams
+ val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant(()))))
+ typed(valueDiscard, mode, UnitTpe)
}
}
- def instantiatePossiblyExpectingUnit(tree: Tree, mode: Int, pt: Type): Tree = {
- if (inExprModeButNot(mode, FUNmode) && pt.typeSymbol == UnitClass)
+ def instantiatePossiblyExpectingUnit(tree: Tree, mode: Mode, pt: Type): Tree = {
+ if (mode.typingExprNotFun && pt.typeSymbol == UnitClass)
instantiateExpectingUnit(tree, mode)
else
instantiate(tree, mode, pt)
@@ -1341,7 +1215,7 @@ trait Typers extends Modes with Adaptations with Tags {
inferView(qual, qual.tpe, searchTemplate, reportAmbiguous, saveErrors) match {
case EmptyTree => qual
case coercion =>
- if (settings.logImplicitConv.value)
+ if (settings.logImplicitConv)
unit.echo(qual.pos,
"applied implicit conversion from %s to %s = %s".format(
qual.tpe, searchTemplate, coercion.symbol.defString))
@@ -1364,43 +1238,36 @@ trait Typers extends Modes with Adaptations with Tags {
def doAdapt(restpe: Type) =
//util.trace("adaptToArgs "+qual+", name = "+name+", argtpes = "+(args map (_.tpe))+", pt = "+pt+" = ")
adaptToMember(qual, HasMethodMatching(name, args map (_.tpe), restpe), reportAmbiguous, saveErrors)
- if (pt != WildcardType) {
- silent(_ => doAdapt(pt)) match {
- case SilentResultValue(result) if result != qual =>
- result
- case _ =>
- debuglog("fallback on implicits in adaptToArguments: "+qual+" . "+name)
- doAdapt(WildcardType)
- }
- } else
+
+ if (pt == WildcardType)
doAdapt(pt)
+ else silent(_ => doAdapt(pt)) filter (_ != qual) orElse (_ =>
+ logResult(s"fallback on implicits in adaptToArguments: $qual.$name")(doAdapt(WildcardType))
+ )
}
/** Try to apply an implicit conversion to `qual` so that it contains
* a method `name`. If that's ambiguous try taking arguments into
* account using `adaptToArguments`.
*/
- def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Int, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
- def onError(reportError: => Tree): Tree = {
- context.tree match {
- case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty =>
- silent(_.typedArgs(args.map(_.duplicate), mode)) match {
- case SilentResultValue(args) =>
- if (args exists (_.isErrorTyped))
- reportError
- else
- adaptToArguments(qual, name, args, WildcardType, reportAmbiguous, saveErrors)
- case _ =>
- reportError
- }
- case _ =>
- reportError
- }
- }
- silent(_.adaptToMember(qual, HasMember(name), false)) match {
- case SilentResultValue(res) => res
- case SilentTypeError(err) => onError({if (reportAmbiguous) { context.issue(err) }; setError(tree)})
+ def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Mode, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
+ def onError(reportError: => Tree): Tree = context.tree match {
+ case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty =>
+ ( silent (_.typedArgs(args.map(_.duplicate), mode))
+ filter (xs => !(xs exists (_.isErrorTyped)))
+ map (xs => adaptToArguments(qual, name, xs, WildcardType, reportAmbiguous, saveErrors))
+ orElse ( _ => reportError)
+ )
+ case _ =>
+ reportError
}
+
+ silent(_.adaptToMember(qual, HasMember(name), reportAmbiguous = false)) orElse (errs =>
+ onError {
+ if (reportAmbiguous) errs foreach (context issue _)
+ setError(tree)
+ }
+ )
}
/** Try to apply an implicit conversion to `qual` to that it contains a
@@ -1411,13 +1278,6 @@ trait Typers extends Modes with Adaptations with Tags {
if (member(qual, name) != NoSymbol) qual
else adaptToMember(qual, HasMember(name))
- private def typePrimaryConstrBody(clazz : Symbol, cbody: Tree, tparams: List[Symbol], enclTparams: List[Symbol], vparamss: List[List[ValDef]]): Tree = {
- // XXX: see about using the class's symbol....
- enclTparams foreach (sym => context.scope.enter(sym))
- namer.enterValueParams(vparamss)
- typed(cbody)
- }
-
private def validateNoCaseAncestor(clazz: Symbol) = {
if (!phase.erasedTypes) {
for (ancestor <- clazz.ancestors find (_.isCase)) {
@@ -1500,17 +1360,25 @@ trait Typers extends Modes with Adaptations with Tags {
unit.error(clazz.pos, "value class may not be a "+
(if (clazz.owner.isTerm) "local class" else "member of another class"))
if (!clazz.isPrimitiveValueClass) {
- clazz.info.decls.toList.filter(acc => acc.isMethod && acc.isParamAccessor) match {
- case List(acc) =>
- def isUnderlyingAcc(sym: Symbol) =
- sym == acc || acc.hasAccessorFlag && sym == acc.accessed
- if (acc.accessBoundary(clazz) != rootMirror.RootClass)
- unit.error(acc.pos, "value class needs to have a publicly accessible val parameter")
- else if (acc.tpe.typeSymbol.isDerivedValueClass)
- unit.error(acc.pos, "value class may not wrap another user-defined value class")
- checkEphemeral(clazz, body filterNot (stat => isUnderlyingAcc(stat.symbol)))
- case x =>
- unit.error(clazz.pos, "value class needs to have exactly one public val parameter")
+ clazz.primaryConstructor.paramss match {
+ case List(List(param)) =>
+ val decls = clazz.info.decls
+ val paramAccessor = clazz.constrParamAccessors.head
+ if (paramAccessor.isMutable)
+ unit.error(paramAccessor.pos, "value class parameter must not be a var")
+ val accessor = decls.toList.find(x => x.isMethod && x.accessedOrSelf == paramAccessor)
+ accessor match {
+ case None =>
+ unit.error(paramAccessor.pos, "value class parameter must be a val and not be private[this]")
+ case Some(acc) if acc.isProtectedLocal =>
+ unit.error(paramAccessor.pos, "value class parameter must not be protected[this]")
+ case Some(acc) =>
+ if (acc.tpe.typeSymbol.isDerivedValueClass)
+ unit.error(acc.pos, "value class may not wrap another user-defined value class")
+ checkEphemeral(clazz, body filterNot (stat => stat.symbol != null && stat.symbol.accessedOrSelf == paramAccessor))
+ }
+ case _ =>
+ unit.error(clazz.pos, "value class needs to have exactly one val parameter")
}
}
@@ -1519,126 +1387,255 @@ trait Typers extends Modes with Adaptations with Tags {
unit.error(tparam.pos, "type parameter of value class may not be specialized")
}
- def parentTypes(templ: Template): List[Tree] =
- if (templ.parents.isEmpty) List(atPos(templ.pos)(TypeTree(AnyRefClass.tpe)))
- else try {
- val clazz = context.owner
- // Normalize supertype and mixins so that supertype is always a class, not a trait.
- var supertpt = typedTypeConstructor(templ.parents.head)
- val firstParent = supertpt.tpe.typeSymbol
- var mixins = templ.parents.tail map typedType
- // If first parent is a trait, make it first mixin and add its superclass as first parent
- while ((supertpt.tpe.typeSymbol ne null) && supertpt.tpe.typeSymbol.initialize.isTrait) {
- val supertpt1 = typedType(supertpt)
- if (!supertpt1.isErrorTyped) {
- mixins = supertpt1 :: mixins
- supertpt = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
+ /** Typechecks a parent type reference.
+ *
+ * This typecheck is harder than it might look, because it should honor early
+ * definitions and also perform type argument inference with the help of super call
+ * arguments provided in `encodedtpt`.
+ *
+ * The method is called in batches (batch = 1 time per each parent type referenced),
+ * two batches per definition: once from namer, when entering a ClassDef or a ModuleDef
+ * and once from typer, when typechecking the definition.
+ *
+ * ***Arguments***
+ *
+ * `encodedtpt` represents the parent type reference wrapped in an `Apply` node
+ * which indicates value arguments (i.e. type macro arguments or super constructor call arguments)
+ * If no value arguments are provided by the user, the `Apply` node is still
+ * there, but its `args` will be set to `Nil`.
+ * This argument is synthesized by `tools.nsc.ast.Parsers.templateParents`.
+ *
+ * `templ` is an enclosing template, which contains a primary constructor synthesized by the parser.
+ * Such a constructor is a DefDef which contains early initializers and maybe a super constructor call
+ * (I wrote "maybe" because trait constructors don't call super constructors).
+ * This argument is synthesized by `tools.nsc.ast.Trees.Template`.
+ *
+ * `inMixinPosition` indicates whether the reference is not the first in the
+ * list of parents (and therefore cannot be a class) or the opposite.
+ *
+ * ***Return value and side effects***
+ *
+ * Returns a `TypeTree` representing a resolved parent type.
+ * If the typechecked parent reference implies non-nullary and non-empty argument list,
+ * this argument list is attached to the returned value in SuperArgsAttachment.
+ * The attachment is necessary for the subsequent typecheck to fixup a super constructor call
+ * in the body of the primary constructor (see `typedTemplate` for details).
+ *
+ * This method might invoke `typedPrimaryConstrBody`, hence it might cause the side effects
+ * described in the docs of that method. It might also attribute the Super(_, _) reference
+ * (if present) inside the primary constructor of `templ`.
+ *
+ * ***Example***
+ *
+ * For the following definition:
+ *
+ * class D extends {
+ * val x = 2
+ * val y = 4
+ * } with B(x)(3) with C(y) with T
+ *
+ * this method will be called six times:
+ *
+ * (3 times from the namer)
+ * typedParentType(Apply(Apply(Ident(B), List(Ident(x))), List(3)), templ, inMixinPosition = false)
+ * typedParentType(Apply(Ident(C), List(Ident(y))), templ, inMixinPosition = true)
+ * typedParentType(Apply(Ident(T), List()), templ, inMixinPosition = true)
+ *
+ * (3 times from the typer)
+ * <the same three calls>
+ */
+ private def typedParentType(encodedtpt: Tree, templ: Template, inMixinPosition: Boolean): Tree = {
+ val app = treeInfo.dissectApplied(encodedtpt)
+ val (treeInfo.Applied(core, _, argss), decodedtpt) = ((app, app.callee))
+ val argssAreTrivial = argss == Nil || argss == ListOfNil
+
+ // we cannot avoid cyclic references with `initialize` here, because when type macros arrive,
+ // we'll have to check the probe for isTypeMacro anyways.
+ // therefore I think it's reasonable to trade a more specific "inherits itself" error
+ // for a generic, yet understandable "cyclic reference" error
+ var probe = typedTypeConstructor(core.duplicate).tpe.typeSymbol
+ if (probe == null) probe = NoSymbol
+ probe.initialize
+
+ if (probe.isTrait || inMixinPosition) {
+ if (!argssAreTrivial) {
+ if (probe.isTrait) ConstrArgsInParentWhichIsTraitError(encodedtpt, probe)
+ else () // a class in a mixin position - this warrants an error in `validateParentClasses`
+ // therefore here we do nothing, e.g. don't check that the # of ctor arguments
+ // matches the # of ctor parameters or stuff like that
+ }
+ typedType(decodedtpt)
+ } else {
+ val supertpt = typedTypeConstructor(decodedtpt)
+ val supertparams = if (supertpt.hasSymbolField) supertpt.symbol.typeParams else Nil
+ def inferParentTypeArgs: Tree = {
+ typedPrimaryConstrBody(templ) {
+ val supertpe = PolyType(supertparams, appliedType(supertpt.tpe, supertparams map (_.tpeHK)))
+ val supercall = New(supertpe, mmap(argss)(_.duplicate))
+ val treeInfo.Applied(Select(ctor, nme.CONSTRUCTOR), _, _) = supercall
+ ctor setType supertpe // this is an essential hack, otherwise it will occasionally fail to typecheck
+ atPos(supertpt.pos.focus)(supercall)
+ } match {
+ case EmptyTree => MissingTypeArgumentsParentTpeError(supertpt); supertpt
+ case tpt => TypeTree(tpt.tpe) setPos supertpt.pos // SI-7224: don't .focus positions of the TypeTree of a parent that exists in source
}
}
- if (supertpt.tpe.typeSymbol == AnyClass && firstParent.isTrait)
- supertpt.tpe = AnyRefClass.tpe
- // Determine
- // - supertparams: Missing type parameters from supertype
- // - supertpe: Given supertype, polymorphic in supertparams
- val supertparams = if (supertpt.hasSymbol) supertpt.symbol.typeParams else List()
- var supertpe = supertpt.tpe
- if (!supertparams.isEmpty)
- supertpe = PolyType(supertparams, appliedType(supertpe, supertparams map (_.tpeHK)))
+ val supertptWithTargs = if (supertparams.isEmpty || context.unit.isJava) supertpt else inferParentTypeArgs
- // A method to replace a super reference by a New in a supercall
- def transformSuperCall(scall: Tree): Tree = (scall: @unchecked) match {
- case Apply(fn, args) =>
- treeCopy.Apply(scall, transformSuperCall(fn), args map (_.duplicate))
- case Select(Super(_, _), nme.CONSTRUCTOR) =>
- treeCopy.Select(
- scall,
- atPos(supertpt.pos.focus)(New(TypeTree(supertpe)) setType supertpe),
- nme.CONSTRUCTOR)
- }
+ // this is the place where we tell the typer what argss should be used for the super call
+ // if argss are nullary or empty, then (see the docs for `typedPrimaryConstrBody`)
+ // the super call dummy is already good enough, so we don't need to do anything
+ if (argssAreTrivial) supertptWithTargs else supertptWithTargs updateAttachment SuperArgsAttachment(argss)
+ }
+ }
+ /** Typechecks the mishmash of trees that happen to be stuffed into the primary constructor of a given template.
+ * Before commencing the typecheck, replaces the `pendingSuperCall` dummy with the result of `actualSuperCall`.
+ * `actualSuperCall` can return `EmptyTree`, in which case the dummy is replaced with a literal unit.
+ *
+ * ***Return value and side effects***
+ *
+ * If a super call is present in the primary constructor and is not erased by the transform, returns it typechecked.
+ * Otherwise (e.g. if the primary constructor is missing or the super call isn't there) returns `EmptyTree`.
+ *
+ * As a side effect, this method attributes the underlying fields of early vals.
+ * Early vals aren't typechecked anywhere else, so it's essential to call `typedPrimaryConstrBody`
+ * at least once per definition. It'd be great to disentangle this logic at some point.
+ *
+ * ***Example***
+ *
+ * For the following definition:
+ *
+ * class D extends {
+ * val x = 2
+ * val y = 4
+ * } with B(x)(3) with C(y) with T
+ *
+ * the primary constructor of `templ` will be:
+ *
+ * Block(List(
+ * ValDef(NoMods, x, TypeTree(), 2)
+ * ValDef(NoMods, y, TypeTree(), 4)
+ * global.pendingSuperCall,
+ * Literal(Constant(())))
+ *
+ * Note the `pendingSuperCall` part. This is the representation of a fill-me-in-later supercall dummy,
+ * which encodes the fact that supercall argss are unknown during parsing and need to be transplanted
+ * from one of the parent types. Read more about why the argss are unknown in `tools.nsc.ast.Trees.Template`.
+ */
+ private def typedPrimaryConstrBody(templ: Template)(actualSuperCall: => Tree): Tree =
treeInfo.firstConstructor(templ.body) match {
- case constr @ DefDef(_, _, _, vparamss, _, cbody @ Block(cstats, cunit)) =>
- // Convert constructor body to block in environment and typecheck it
+ case ctor @ DefDef(_, _, _, vparamss, _, cbody @ Block(cstats, cunit)) =>
val (preSuperStats, superCall) = {
val (stats, rest) = cstats span (x => !treeInfo.isSuperConstrCall(x))
(stats map (_.duplicate), if (rest.isEmpty) EmptyTree else rest.head.duplicate)
}
- val cstats1 = if (superCall == EmptyTree) preSuperStats else preSuperStats :+ superCall
- val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall match {
- case Apply(_, _) if supertparams.nonEmpty => transformSuperCall(superCall)
- case _ => cunit.duplicate
- })
- val outercontext = context.outer
-
+ val superCall1 = (superCall match {
+ case global.pendingSuperCall => actualSuperCall
+ case EmptyTree => EmptyTree
+ }) orElse cunit
+ val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall1)
+ val clazz = context.owner
assert(clazz != NoSymbol, templ)
- val cscope = outercontext.makeNewScope(constr, outercontext.owner)
- val cbody2 = newTyper(cscope) // called both during completion AND typing.
- .typePrimaryConstrBody(clazz,
- cbody1, supertparams, clazz.unsafeTypeParams, vparamss map (_.map(_.duplicate)))
-
- superCall match {
- case Apply(_, _) =>
- val treeInfo.Applied(_, _, argss) = superCall
- val sarg = argss.flatten.headOption.getOrElse(EmptyTree)
- if (sarg != EmptyTree && supertpe.typeSymbol != firstParent)
- ConstrArgsInTraitParentTpeError(sarg, firstParent)
- if (!supertparams.isEmpty)
- supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos
- case _ =>
- if (!supertparams.isEmpty)
- MissingTypeArgumentsParentTpeError(supertpt)
+ val cscope = context.outer.makeNewScope(ctor, context.outer.owner)
+ val cbody2 = { // called both during completion AND typing.
+ val typer1 = newTyper(cscope)
+ // XXX: see about using the class's symbol....
+ clazz.unsafeTypeParams foreach (sym => typer1.context.scope.enter(sym))
+ typer1.namer.enterValueParams(vparamss map (_.map(_.duplicate)))
+ typer1.typed(cbody1)
}
val preSuperVals = treeInfo.preSuperFields(templ.body)
if (preSuperVals.isEmpty && preSuperStats.nonEmpty)
- debugwarn("Wanted to zip empty presuper val list with " + preSuperStats)
+ devWarning("Wanted to zip empty presuper val list with " + preSuperStats)
else
- map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe)
+ map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt setType ldef.symbol.tpe)
+ if (superCall1 == cunit) EmptyTree
+ else cbody2 match {
+ case Block(_, expr) => expr
+ case tree => tree
+ }
case _ =>
- if (!supertparams.isEmpty)
- MissingTypeArgumentsParentTpeError(supertpt)
- }
-/* experimental: early types as type arguments
- val hasEarlyTypes = templ.body exists (treeInfo.isEarlyTypeDef)
- val earlyMap = new EarlyMap(clazz)
- List.mapConserve(supertpt :: mixins){ tpt =>
- val tpt1 = checkNoEscaping.privates(clazz, tpt)
- if (hasEarlyTypes) tpt1 else tpt1 setType earlyMap(tpt1.tpe)
+ EmptyTree
}
-*/
- //Console.println("parents("+clazz") = "+supertpt :: mixins);//DEBUG
+ /** Makes sure that the first type tree in the list of parent types is always a class.
+ * If the first parent is a trait, prepend its supertype to the list until it's a class.
+ */
+ private def normalizeFirstParent(parents: List[Tree]): List[Tree] = {
+ @annotation.tailrec
+ def explode0(parents: List[Tree]): List[Tree] = {
+ val supertpt :: rest = parents // parents is always non-empty here - it only grows
+ if (supertpt.tpe.typeSymbol == AnyClass) {
+ supertpt setType AnyRefTpe
+ parents
+ } else if (treeInfo isTraitRef supertpt) {
+ val supertpt1 = typedType(supertpt)
+ def supersuper = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
+ if (supertpt1.isErrorTyped) rest
+ else explode0(supersuper :: supertpt1 :: rest)
+ } else parents
+ }
+
+ def explode(parents: List[Tree]) =
+ if (treeInfo isTraitRef parents.head) explode0(parents)
+ else parents
+
+ if (parents.isEmpty) Nil else explode(parents)
+ }
- // Certain parents are added in the parser before it is known whether
- // that class also declared them as parents. For instance, this is an
- // error unless we take corrective action here:
- //
- // case class Foo() extends Serializable
- //
- // So we strip the duplicates before typer.
- def fixDuplicates(remaining: List[Tree]): List[Tree] = remaining match {
- case Nil => Nil
- case x :: xs =>
- val sym = x.symbol
- x :: fixDuplicates(
- if (isPossibleSyntheticParent(sym)) xs filterNot (_.symbol == sym)
- else xs
- )
- }
+ /** Certain parents are added in the parser before it is known whether
+ * that class also declared them as parents. For instance, this is an
+ * error unless we take corrective action here:
+ *
+ * case class Foo() extends Serializable
+ *
+ * So we strip the duplicates before typer.
+ */
+ private def fixDuplicateSyntheticParents(parents: List[Tree]): List[Tree] = parents match {
+ case Nil => Nil
+ case x :: xs =>
+ val sym = x.symbol
+ x :: fixDuplicateSyntheticParents(
+ if (isPossibleSyntheticParent(sym)) xs filterNot (_.symbol == sym)
+ else xs
+ )
+ }
- fixDuplicates(supertpt :: mixins) mapConserve (tpt => checkNoEscaping.privates(clazz, tpt))
- }
- catch {
- case ex: TypeError =>
- // fallback in case of cyclic errors
- // @H none of the tests enter here but I couldn't rule it out
- log("Type error calculating parents in template " + templ)
- log("Error: " + ex)
- ParentTypesError(templ, ex)
- List(TypeTree(AnyRefClass.tpe))
- }
+ def typedParentTypes(templ: Template): List[Tree] = templ.parents match {
+ case Nil => List(atPos(templ.pos)(TypeTree(AnyRefTpe)))
+ case first :: rest =>
+ try {
+ val supertpts = fixDuplicateSyntheticParents(normalizeFirstParent(
+ typedParentType(first, templ, inMixinPosition = false) +:
+ (rest map (typedParentType(_, templ, inMixinPosition = true)))))
+
+ // if that is required to infer the targs of a super call
+ // typedParentType calls typedPrimaryConstrBody to do the inferring typecheck
+ // as a side effect, that typecheck also assigns types to the fields underlying early vals
+ // however if inference is not required, the typecheck doesn't happen
+ // and therefore early fields have their type trees not assigned
+ // here we detect this situation and take preventive measures
+ if (treeInfo.hasUntypedPreSuperFields(templ.body))
+ typedPrimaryConstrBody(templ)(EmptyTree)
+
+ supertpts mapConserve (tpt => checkNoEscaping.privates(context.owner, tpt))
+ }
+ catch {
+ case ex: TypeError =>
+ // fallback in case of cyclic errors
+ // @H none of the tests enter here but I couldn't rule it out
+ // upd. @E when a definition inherits itself, we end up here
+ // because `typedParentType` triggers `initialize` for parent types symbols
+ log("Type error calculating parents in template " + templ)
+ log("Error: " + ex)
+ ParentTypesError(templ, ex)
+ List(TypeTree(AnyRefTpe))
+ }
+ }
/** <p>Check that</p>
* <ul>
@@ -1678,30 +1675,29 @@ trait Typers extends Modes with Adaptations with Tags {
if (psym.isFinal)
pending += ParentFinalInheritanceError(parent, psym)
- if (psym.hasDeprecatedInheritanceAnnotation) {
+ val sameSourceFile = context.unit.source.file == psym.sourceFile
+
+ if (psym.hasDeprecatedInheritanceAnnotation && !sameSourceFile) {
val suffix = psym.deprecatedInheritanceMessage map (": " + _) getOrElse ""
val msg = s"inheritance from ${psym.fullLocationString} is deprecated$suffix"
unit.deprecationWarning(parent.pos, msg)
}
if (psym.isSealed && !phase.erasedTypes)
- if (context.unit.source.file == psym.sourceFile)
+ if (sameSourceFile)
psym addChild context.owner
else
pending += ParentSealedInheritanceError(parent, psym)
+ val parentTypeOfThis = parent.tpe.dealias.typeOfThis
- if (!(selfType <:< parent.tpe.typeOfThis) &&
+ if (!(selfType <:< parentTypeOfThis) &&
!phase.erasedTypes &&
!context.owner.isSynthetic && // don't check synthetic concrete classes for virtuals (part of DEVIRTUALIZE)
- !settings.noSelfCheck.value && // setting to suppress this very check
!selfType.isErroneous &&
!parent.tpe.isErroneous)
{
- //Console.println(context.owner);//DEBUG
- //Console.println(context.owner.unsafeTypeParams);//DEBUG
- //Console.println(List.fromArray(context.owner.info.closure));//DEBUG
pending += ParentSelfTypeConformanceError(parent, selfType)
- if (settings.explaintypes.value) explainTypes(selfType, parent.tpe.typeOfThis)
+ if (settings.explaintypes) explainTypes(selfType, parentTypeOfThis)
}
if (parents exists (p => p != parent && p.tpe.typeSymbol == psym && !psym.isError))
@@ -1715,13 +1711,6 @@ trait Typers extends Modes with Adaptations with Tags {
for (p <- parents) validateParentClass(p, superclazz)
}
-/*
- if (settings.Xshowcls.value != "" &&
- settings.Xshowcls.value == context.owner.fullName)
- println("INFO "+context.owner+
- ", baseclasses = "+(context.owner.info.baseClasses map (_.fullName))+
- ", lin = "+(context.owner.info.baseClasses map (context.owner.thisType.baseType)))
-*/
pending.foreach(ErrorUtils.issueTypeError)
}
@@ -1731,7 +1720,7 @@ trait Typers extends Modes with Adaptations with Tags {
for (tparam <- clazz.typeParams) {
if (classinfo.expansiveRefs(tparam) contains tparam) {
val newinfo = ClassInfoType(
- classinfo.parents map (_.instantiateTypeParams(List(tparam), List(AnyRefClass.tpe))),
+ classinfo.parents map (_.instantiateTypeParams(List(tparam), List(AnyRefTpe))),
classinfo.decls,
clazz)
clazz.setInfo {
@@ -1745,27 +1734,26 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- /**
- * @param cdef ...
- * @return ...
- */
def typedClassDef(cdef: ClassDef): Tree = {
-// attributes(cdef)
val clazz = cdef.symbol
val typedMods = typedModifiers(cdef.mods)
assert(clazz != NoSymbol, cdef)
reenterTypeParams(cdef.tparams)
val tparams1 = cdef.tparams mapConserve (typedTypeDef)
- val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, parentTypes(cdef.impl))
+ val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, typedParentTypes(cdef.impl))
val impl2 = finishMethodSynthesis(impl1, clazz, context)
if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass)
checkEphemeral(clazz, impl2.body)
- if ((clazz != ClassfileAnnotationClass) &&
- (clazz isNonBottomSubClass ClassfileAnnotationClass))
- restrictionWarning(cdef.pos, unit,
- "subclassing Classfile does not\n"+
- "make your annotation visible at runtime. If that is what\n"+
- "you want, you must write the annotation class in Java.")
+
+ if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) {
+ if (!clazz.owner.isPackageClass)
+ unit.error(clazz.pos, "inner classes cannot be classfile annotations")
+ else restrictionWarning(cdef.pos, unit,
+ """|subclassing Classfile does not
+ |make your annotation visible at runtime. If that is what
+ |you want, you must write the annotation class in Java.""".stripMargin)
+ }
+
if (!isPastTyper) {
for (ann <- clazz.getAnnotation(DeprecatedAttr)) {
val m = companionSymbolOf(clazz, context)
@@ -1777,10 +1765,6 @@ trait Typers extends Modes with Adaptations with Tags {
.setType(NoType)
}
- /**
- * @param mdef ...
- * @return ...
- */
def typedModuleDef(mdef: ModuleDef): Tree = {
// initialize all constructors of the linked class: the type completer (Namer.methodSig)
// might add default getters to this object. example: "object T; class T(x: Int = 1)"
@@ -1798,17 +1782,20 @@ trait Typers extends Modes with Adaptations with Tags {
|| clazz.isSerializable
)
val impl1 = newTyper(context.make(mdef.impl, clazz, newScope)).typedTemplate(mdef.impl, {
- parentTypes(mdef.impl) ++ (
+ typedParentTypes(mdef.impl) ++ (
if (noSerializable) Nil
else {
clazz.makeSerializable()
- List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus)
+ List(TypeTree(SerializableTpe) setPos clazz.pos.focus)
}
)
})
val impl2 = finishMethodSynthesis(impl1, clazz, context)
+ if (settings.isScala211 && mdef.symbol == PredefModule)
+ ensurePredefParentsAreInSameSourceFile(impl2)
+
// SI-5954. On second compile of a companion class contained in a package object we end up
// with some confusion of names which leads to having two symbols with the same name in the
// same owner. Until that can be straightened out we will warn on companion objects in package
@@ -1828,9 +1815,7 @@ trait Typers extends Modes with Adaptations with Tags {
def pkgObjectWarning(m : Symbol, mdef : ModuleDef, restricted : String) = {
val pkgName = mdef.symbol.ownerChain find (_.isPackage) map (_.decodedName) getOrElse mdef.symbol.toString
- val pos = if (m.pos.isDefined) m.pos else mdef.pos
- debugwarn(s"${m} should be placed directly in package ${pkgName} instead of package object ${pkgName}. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.")
- debugwarn(pos.lineContent + (if (pos.isDefined) " " * (pos.column - 1) + "^" else ""))
+ context.warning(if (m.pos.isDefined) m.pos else mdef.pos, s"${m} should be placed directly in package ${pkgName} instead of package object ${pkgName}. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.")
}
}
@@ -1839,6 +1824,12 @@ trait Typers extends Modes with Adaptations with Tags {
treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType
}
+
+ private def ensurePredefParentsAreInSameSourceFile(template: Template) = {
+ val parentSyms = template.parents map (_.symbol) filterNot (_ == AnyRefClass)
+ if (parentSyms exists (_.associatedFile != PredefModule.associatedFile))
+ unit.error(template.pos, s"All parents of Predef must be defined in ${PredefModule.associatedFile}.")
+ }
/** In order to override this in the TreeCheckers Typer so synthetics aren't re-added
* all the time, it is exposed here the module/class typing methods go through it.
* ...but it turns out it's also the ideal spot for namer/typer coordination for
@@ -1862,20 +1853,17 @@ trait Typers extends Modes with Adaptations with Tags {
}
protected def enterSym(txt: Context, tree: Tree): Context =
- if (txt eq context) namer.enterSym(tree)
- else newNamer(txt).enterSym(tree)
+ if (txt eq context) namer enterSym tree
+ else newNamer(txt) enterSym tree
- /**
- * @param templ ...
- * @param parents1 ...
- * <li> <!-- 2 -->
- * Check that inner classes do not inherit from Annotation
- * </li>
- * @return ...
+ /** <!-- 2 --> Check that inner classes do not inherit from Annotation
*/
- def typedTemplate(templ: Template, parents1: List[Tree]): Template = {
+ def typedTemplate(templ0: Template, parents1: List[Tree]): Template = {
+ val templ = templ0
+ // please FIXME: uncommenting this line breaks everything
+ // val templ = treeCopy.Template(templ0, templ0.body, templ0.self, templ0.parents)
val clazz = context.owner
- clazz.annotations.map(_.completeInfo)
+ clazz.annotations.map(_.completeInfo())
if (templ.symbol == NoSymbol)
templ setSymbol clazz.newLocalDummy(templ.pos)
val self1 = templ.self match {
@@ -1901,25 +1889,41 @@ trait Typers extends Modes with Adaptations with Tags {
)
// the following is necessary for templates generated later
assert(clazz.info.decls != EmptyScope, clazz)
- enterSyms(context.outer.make(templ, clazz, clazz.info.decls), templ.body)
+ val body1 = pluginsEnterStats(this, templ.body)
+ enterSyms(context.outer.make(templ, clazz, clazz.info.decls), body1)
+ if (!templ.isErrorTyped) // if `parentTypes` has invalidated the template, don't validate it anymore
validateParentClasses(parents1, selfType)
if (clazz.isCase)
validateNoCaseAncestor(clazz)
+ if (clazz.isTrait && hasSuperArgs(parents1.head))
+ ConstrArgsInParentOfTraitError(parents1.head, clazz)
- if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.owner.isPackageClass)
+ if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.isTopLevel)
unit.error(clazz.pos, "inner classes cannot be classfile annotations")
if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members
checkFinitary(clazz.info.resultType.asInstanceOf[ClassInfoType])
- val body =
- if (isPastTyper || reporter.hasErrors) templ.body
- else templ.body flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _))
+ val body2 = {
+ val body2 =
+ if (isPastTyper || reporter.hasErrors) body1
+ else body1 flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _))
+ val primaryCtor = treeInfo.firstConstructor(body2)
+ val primaryCtor1 = primaryCtor match {
+ case DefDef(_, _, _, _, _, Block(earlyVals :+ global.pendingSuperCall, unit)) =>
+ val argss = superArgs(parents1.head) getOrElse Nil
+ val pos = wrappingPos(parents1.head.pos, primaryCtor :: argss.flatten).makeTransparent
+ val superCall = atPos(pos)(PrimarySuperCall(argss))
+ deriveDefDef(primaryCtor)(block => Block(earlyVals :+ superCall, unit) setPos pos) setPos pos
+ case _ => primaryCtor
+ }
+ body2 mapConserve { case `primaryCtor` => primaryCtor1; case stat => stat }
+ }
- val body1 = typedStats(body, templ.symbol)
+ val body3 = typedStats(body2, templ.symbol)
if (clazz.info.firstParent.typeSymbol == AnyValClass)
- validateDerivedValueClass(clazz, body1)
+ validateDerivedValueClass(clazz, body3)
if (clazz.isTrait) {
for (decl <- clazz.info.decls if decl.isTerm && decl.isEarlyInitialized) {
@@ -1927,28 +1931,24 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- treeCopy.Template(templ, parents1, self1, body1) setType clazz.tpe
+ treeCopy.Template(templ, parents1, self1, body3) setType clazz.tpe_*
}
/** Remove definition annotations from modifiers (they have been saved
- * into the symbol's ``annotations'' in the type completer / namer)
+ * into the symbol's `annotations` in the type completer / namer)
*
* However reification does need annotation definitions to proceed.
* Unfortunately, AnnotationInfo doesn't provide enough info to reify it in general case.
* The biggest problem is with the "atp: Type" field, which cannot be reified in some situations
* that involve locally defined annotations. See more about that in Reifiers.scala.
*
- * That's why the original tree gets saved into ``original'' field of AnnotationInfo (happens elsewhere).
+ * That's why the original tree gets saved into `original` field of AnnotationInfo (happens elsewhere).
* The field doesn't get pickled/unpickled and exists only during a single compilation run.
* This simultaneously allows us to reify annotations and to preserve backward compatibility.
*/
def typedModifiers(mods: Modifiers): Modifiers =
mods.copy(annotations = Nil) setPositions mods.positions
- /**
- * @param vdef ...
- * @return ...
- */
def typedValDef(vdef: ValDef): ValDef = {
val sym = vdef.symbol
val valDefTyper = {
@@ -1965,7 +1965,7 @@ trait Typers extends Modes with Adaptations with Tags {
val sym = vdef.symbol.initialize
val typedMods = typedModifiers(vdef.mods)
- sym.annotations.map(_.completeInfo)
+ sym.annotations.map(_.completeInfo())
val tpt1 = checkNoEscaping.privates(sym, typedType(vdef.tpt))
checkNonCyclic(vdef, tpt1)
@@ -2000,10 +2000,6 @@ trait Typers extends Modes with Adaptations with Tags {
}
/** Enter all aliases of local parameter accessors.
- *
- * @param clazz ...
- * @param vparamss ...
- * @param rhs ...
*/
def computeParamAliases(clazz: Symbol, vparamss: List[List[ValDef]], rhs: Tree) {
debuglog(s"computing param aliases for $clazz:${clazz.primaryConstructor.tpe}:$rhs")
@@ -2053,7 +2049,7 @@ trait Typers extends Modes with Adaptations with Tags {
orElse (superAcc getter superAcc.owner)
filter (alias => superClazz.info.nonPrivateMember(alias.name) == alias)
)
- if (alias.exists && !alias.accessed.isVariable) {
+ if (alias.exists && !alias.accessed.isVariable && !isRepeatedParamType(alias.accessed.info)) {
val ownAcc = clazz.info decl name suchThat (_.isParamAccessor) match {
case acc if !acc.isDeferred && acc.hasAccessorFlag => acc.accessed
case acc => acc
@@ -2122,14 +2118,14 @@ trait Typers extends Modes with Adaptations with Tags {
unit.error(pos, msg)
false
}
- /** Have to examine all parameters in all lists.
+ /* Have to examine all parameters in all lists.
*/
def paramssTypes(tp: Type): List[List[Type]] = tp match {
case mt @ MethodType(_, restpe) => mt.paramTypes :: paramssTypes(restpe)
case PolyType(_, restpe) => paramssTypes(restpe)
case _ => Nil
}
- def resultType = meth.tpe.finalResultType
+ def resultType = meth.tpe_*.finalResultType
def nthParamPos(n1: Int, n2: Int) =
try ddef.vparamss(n1)(n2).pos catch { case _: IndexOutOfBoundsException => meth.pos }
@@ -2141,10 +2137,10 @@ trait Typers extends Modes with Adaptations with Tags {
val sym = paramType.typeSymbol
def paramPos = nthParamPos(listIdx, paramIdx)
- /** Not enough to look for abstract types; have to recursively check the bounds
- * of each abstract type for more abstract types. Almost certainly there are other
- * exploitable type soundness bugs which can be seen by bounding a type parameter
- * by an abstract type which itself is bounded by an abstract type.
+ /* Not enough to look for abstract types; have to recursively check the bounds
+ * of each abstract type for more abstract types. Almost certainly there are other
+ * exploitable type soundness bugs which can be seen by bounding a type parameter
+ * by an abstract type which itself is bounded by an abstract type.
*/
def checkAbstract(tp0: Type, what: String): Boolean = {
def check(sym: Symbol): Boolean = !sym.isAbstractType || {
@@ -2168,51 +2164,6 @@ trait Typers extends Modes with Adaptations with Tags {
failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result type")
}
- def typedUseCase(useCase: UseCase) {
- def stringParser(str: String): syntaxAnalyzer.Parser = {
- val file = new BatchSourceFile(context.unit.source.file, str) {
- override def positionInUltimateSource(pos: Position) = {
- pos.withSource(context.unit.source, useCase.pos.start)
- }
- }
- val unit = new CompilationUnit(file)
- new syntaxAnalyzer.UnitParser(unit)
- }
- val trees = stringParser(useCase.body+";").nonLocalDefOrDcl
- val enclClass = context.enclClass.owner
- def defineAlias(name: Name) =
- if (context.scope.lookup(name) == NoSymbol) {
- lookupVariable(name.toString.substring(1), enclClass) match {
- case Some(repl) =>
- silent(_.typedTypeConstructor(stringParser(repl).typ())) match {
- case SilentResultValue(tpt) =>
- val alias = enclClass.newAliasType(name.toTypeName, useCase.pos)
- val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias)
- val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe)))
- alias setInfo newInfo
- context.scope.enter(alias)
- case _ =>
- }
- case _ =>
- }
- }
- for (tree <- trees; t <- tree)
- t match {
- case Ident(name) if name startsWith '$' => defineAlias(name)
- case _ =>
- }
- useCase.aliases = context.scope.toList
- namer.enterSyms(trees)
- typedStats(trees, NoSymbol)
- useCase.defined = context.scope.toList filterNot (useCase.aliases contains _)
- if (settings.debug.value)
- useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe)))
- }
-
- /**
- * @param ddef ...
- * @return ...
- */
def typedDefDef(ddef: DefDef): DefDef = {
val meth = ddef.symbol.initialize
@@ -2231,13 +2182,13 @@ trait Typers extends Modes with Adaptations with Tags {
val tparams1 = ddef.tparams mapConserve typedTypeDef
val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef)
- meth.annotations.map(_.completeInfo)
+ meth.annotations.map(_.completeInfo())
for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1)
if (isRepeatedParamType(vparam1.symbol.tpe))
StarParamNotLastError(vparam1)
- var tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt))
+ val tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt))
checkNonCyclic(ddef, tpt1)
ddef.tpt.setType(tpt1.tpe)
val typedMods = typedModifiers(ddef.mods)
@@ -2249,7 +2200,7 @@ trait Typers extends Modes with Adaptations with Tags {
meth.owner.isAnonOrRefinementClass))
InvalidConstructorDefError(ddef)
typed(ddef.rhs)
- } else if (meth.isTermMacro) {
+ } else if (meth.isMacro) {
// typechecking macro bodies is sort of unconventional
// that's why we employ our custom typing scheme orchestrated outside of the typer
transformedOr(ddef.rhs, typedMacroBody(this, ddef))
@@ -2305,10 +2256,10 @@ trait Typers extends Modes with Adaptations with Tags {
reenterTypeParams(tdef.tparams)
val tparams1 = tdef.tparams mapConserve typedTypeDef
val typedMods = typedModifiers(tdef.mods)
- tdef.symbol.annotations.map(_.completeInfo)
+ tdef.symbol.annotations.map(_.completeInfo())
// @specialized should not be pickled when compiling with -no-specialize
- if (settings.nospecialization.value && currentRun.compiles(tdef.symbol)) {
+ if (settings.nospecialization && currentRun.compiles(tdef.symbol)) {
tdef.symbol.removeAnnotation(definitions.SpecializedClass)
tdef.symbol.deSkolemize.removeAnnotation(definitions.SpecializedClass)
}
@@ -2332,7 +2283,7 @@ trait Typers extends Modes with Adaptations with Tags {
case ldef @ LabelDef(_, _, _) =>
if (ldef.symbol == NoSymbol)
ldef.symbol = namer.enterInScope(
- context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), UnitClass.tpe))
+ context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), UnitTpe))
case _ =>
}
}
@@ -2341,7 +2292,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (!nme.isLoopHeaderLabel(ldef.symbol.name) || isPastTyper) {
val restpe = ldef.symbol.tpe.resultType
val rhs1 = typed(ldef.rhs, restpe)
- ldef.params foreach (param => param.tpe = param.symbol.tpe)
+ ldef.params foreach (param => param setType param.symbol.tpe)
deriveLabelDef(ldef)(_ => rhs1) setType restpe
}
else {
@@ -2349,29 +2300,24 @@ trait Typers extends Modes with Adaptations with Tags {
val rhs1 = typed(ldef.rhs)
val restpe = rhs1.tpe
if (restpe == initpe) { // stable result, no need to check again
- ldef.params foreach (param => param.tpe = param.symbol.tpe)
+ ldef.params foreach (param => param setType param.symbol.tpe)
treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs1) setType restpe
} else {
context.scope.unlink(ldef.symbol)
val sym2 = namer.enterInScope(
context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), restpe))
val rhs2 = typed(resetAllAttrs(ldef.rhs), restpe)
- ldef.params foreach (param => param.tpe = param.symbol.tpe)
+ ldef.params foreach (param => param setType param.symbol.tpe)
deriveLabelDef(ldef)(_ => rhs2) setSymbol sym2 setType restpe
}
}
}
- /**
- * @param block ...
- * @param mode ...
- * @param pt ...
- * @return ...
- */
- def typedBlock(block: Block, mode: Int, pt: Type): Block = {
+ def typedBlock(block0: Block, mode: Mode, pt: Type): Block = {
val syntheticPrivates = new ListBuffer[Symbol]
try {
- namer.enterSyms(block.stats)
+ namer.enterSyms(block0.stats)
+ val block = treeCopy.Block(block0, pluginsEnterStats(this, block0.stats), block0.expr)
for (stat <- block.stats) enterLabelDef(stat)
if (phaseId(currentPeriod) <= currentRun.typerPhase.id) {
@@ -2430,7 +2376,7 @@ trait Typers extends Modes with Adaptations with Tags {
case _ => stat::Nil
})
val stats2 = typedStats(stats1, context.owner)
- val expr1 = typed(block.expr, mode & ~(FUNmode | QUALmode), pt)
+ val expr1 = typed(block.expr, mode &~ (FUNmode | QUALmode), pt)
treeCopy.Block(block, stats2, expr1)
.setType(if (treeInfo.isExprSafeToInline(block)) expr1.tpe else expr1.tpe.deconst)
} finally {
@@ -2440,12 +2386,6 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- /**
- * @param cdef ...
- * @param pattpe ...
- * @param pt ...
- * @return ...
- */
def typedCase(cdef: CaseDef, pattpe: Type, pt: Type): CaseDef = {
// verify no _* except in last position
for (Apply(_, xs) <- cdef.pat ; x <- xs dropRight 1 ; if treeInfo isStar x)
@@ -2460,85 +2400,74 @@ trait Typers extends Modes with Adaptations with Tags {
// list, so substitute the final result type of the method, i.e. the type
// of the case class.
if (pat1.tpe.paramSectionCount > 0)
- pat1 setType pat1.tpe.finalResultType
-
- if (forInteractive) {
- for (bind @ Bind(name, _) <- cdef.pat)
- if (name.toTermName != nme.WILDCARD && bind.symbol != null && bind.symbol != NoSymbol)
- namer.enterIfNotThere(bind.symbol)
+ pat1 modifyType (_.finalResultType)
+
+ for (bind @ Bind(name, _) <- cdef.pat) {
+ val sym = bind.symbol
+ if (name.toTermName != nme.WILDCARD && sym != null) {
+ if (sym == NoSymbol) {
+ if (context.scope.lookup(name) == NoSymbol)
+ namer.enterInScope(context.owner.newErrorSymbol(name))
+ } else
+ namer.enterIfNotThere(sym)
+ }
}
val guard1: Tree = if (cdef.guard == EmptyTree) EmptyTree
- else typed(cdef.guard, BooleanClass.tpe)
+ else typed(cdef.guard, BooleanTpe)
var body1: Tree = typed(cdef.body, pt)
- val contextWithTypeBounds = context.nextEnclosing(_.tree.isInstanceOf[CaseDef])
- if (contextWithTypeBounds.savedTypeBounds.nonEmpty) {
- body1.tpe = contextWithTypeBounds restoreTypeBounds body1.tpe
-
+ if (context.enclosingCaseDef.savedTypeBounds.nonEmpty) {
+ body1 modifyType context.enclosingCaseDef.restoreTypeBounds
// insert a cast if something typechecked under the GADT constraints,
// but not in real life (i.e., now that's we've reset the method's type skolems'
// infos back to their pre-GADT-constraint state)
- if (isFullyDefined(pt) && !(body1.tpe <:< pt))
- body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.normalize))
-
+ if (isFullyDefined(pt) && !(body1.tpe <:< pt)) {
+ log(s"Adding cast to pattern because ${body1.tpe} does not conform to expected type $pt")
+ body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.dealiasWiden))
+ }
}
// body1 = checkNoEscaping.locals(context.scope, pt, body1)
treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe
}
- // undo adaptConstrPattern's evil deeds, as they confuse the old pattern matcher
- // the flags are used to avoid accidentally deskolemizing unrelated skolems of skolems
- object deskolemizeGADTSkolems extends TypeMap {
- def apply(tp: Type): Type = mapOver(tp) match {
- case TypeRef(pre, sym, args) if sym.isGADTSkolem =>
- typeRef(NoPrefix, sym.deSkolemize, args)
- case tp1 => tp1
- }
- }
-
def typedCases(cases: List[CaseDef], pattp: Type, pt: Type): List[CaseDef] =
cases mapConserve { cdef =>
newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt)
}
- def adaptCase(cdef: CaseDef, mode: Int, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe))
+ def adaptCase(cdef: CaseDef, mode: Mode, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe))
- def ptOrLub(tps: List[Type], pt: Type ) = if (isFullyDefined(pt)) (pt, false) else weakLub(tps map (_.deconst))
- def ptOrLubPacked(trees: List[Tree], pt: Type) = if (isFullyDefined(pt)) (pt, false) else weakLub(trees map (c => packedType(c, context.owner).deconst))
+ def packedTypes(trees: List[Tree]): List[Type] = trees map (c => packedType(c, context.owner).deconst)
// takes untyped sub-trees of a match and type checks them
- def typedMatch(selector: Tree, cases: List[CaseDef], mode: Int, pt: Type, tree: Tree = EmptyTree): Match = {
- val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
+ def typedMatch(selector: Tree, cases: List[CaseDef], mode: Mode, pt: Type, tree: Tree = EmptyTree): Match = {
+ val selector1 = checkDead(typedByValueExpr(selector))
val selectorTp = packCaptured(selector1.tpe.widen).skolemizeExistential(context.owner, selector)
val casesTyped = typedCases(cases, selectorTp, pt)
- val (resTp, needAdapt) =
- if (opt.virtPatmat) ptOrLubPacked(casesTyped, pt)
- else ptOrLub(casesTyped map (_.tpe), pt)
-
- val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, resTp))
+ def finish(cases: List[CaseDef], matchType: Type) =
+ treeCopy.Match(tree, selector1, cases) setType matchType
- val matchTyped = treeCopy.Match(tree, selector1, casesAdapted) setType resTp
- if (!newPatternMatching) // TODO: remove this in 2.11 -- only needed for old pattern matcher
- new TypeMapTreeSubstituter(deskolemizeGADTSkolems).traverse(matchTyped)
- matchTyped
+ if (isFullyDefined(pt))
+ finish(casesTyped, pt)
+ else packedTypes(casesTyped) match {
+ case packed if sameWeakLubAsLub(packed) => finish(casesTyped, lub(packed))
+ case packed =>
+ val lub = weakLub(packed)
+ finish(casesTyped map (adaptCase(_, mode, lub)), lub)
+ }
}
- // match has been typed -- virtualize it if we're feeling experimental
- // (virtualized matches are expanded during type checking so they have the full context available)
- // otherwise, do nothing: matches are translated during phase `patmat` (unless -Xoldpatmat)
- def virtualizedMatch(match_ : Match, mode: Int, pt: Type) = {
- import patmat.{vpmName, PureMatchTranslator, OptimizingMatchTranslator}
+ // match has been typed -- virtualize it during type checking so the full context is available
+ def virtualizedMatch(match_ : Match, mode: Mode, pt: Type) = {
+ import patmat.{ vpmName, PureMatchTranslator }
// TODO: add fallback __match sentinel to predef
val matchStrategy: Tree =
- if (!(newPatternMatching && opt.experimental && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen
- else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
- case SilentResultValue(ms) => ms
- case _ => null
- }
+ if (!(settings.Xexperimental && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen
+ else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match)), reportAmbiguousErrors = false) orElse (_ => null)
if (matchStrategy ne null) // virtualize
typed((new PureMatchTranslator(this.asInstanceOf[patmat.global.analyzer.Typer] /*TODO*/, matchStrategy)).translateMatch(match_), mode, pt)
@@ -2568,13 +2497,11 @@ trait Typers extends Modes with Adaptations with Tags {
* an alternative TODO: add partial function AST node or equivalent and get rid of this synthesis --> do everything in uncurry (or later)
* however, note that pattern matching codegen is designed to run *before* uncurry
*/
- def synthesizePartialFunction(paramName: TermName, paramPos: Position, tree: Tree, mode: Int, pt0: Type): Tree = {
- assert(pt0.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt0.")
-
- val pt = deskolemizeGADTSkolems(pt0)
- val targs = pt.normalize.typeArgs
+ def synthesizePartialFunction(paramName: TermName, paramPos: Position, tree: Tree, mode: Mode, pt: Type): Tree = {
+ assert(pt.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt.")
+ val targs = pt.dealiasWiden.typeArgs
- // if targs.head isn't fully defined, we can translate --> error
+ // if targs.head isn't fully defined, we can't translate --> error
targs match {
case argTp :: _ if isFullyDefined(argTp) => // ok
case _ => // uh-oh
@@ -2586,18 +2513,16 @@ trait Typers extends Modes with Adaptations with Tags {
val argTp :: resTp :: Nil = targs
// targs must conform to Any for us to synthesize an applyOrElse (fallback to apply otherwise -- typically for @cps annotated targs)
- val targsValidParams = targs forall (_ <:< AnyClass.tpe)
+ val targsValidParams = targs forall (_ <:< AnyTpe)
- val anonClass = (context.owner
- newAnonymousFunctionClass tree.pos
- addAnnotation AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List()))
+ val anonClass = context.owner newAnonymousFunctionClass tree.pos addAnnotation SerialVersionUIDAnnotation
import CODE._
val Match(sel, cases) = tree
// need to duplicate the cases before typing them to generate the apply method, or the symbols will be all messed up
- val casesTrue = cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE_typed)).duplicate.asInstanceOf[CaseDef])
+ val casesTrue = cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE)).duplicate.asInstanceOf[CaseDef])
// must generate a new tree every time
def selector: Tree = gen.mkUnchecked(
@@ -2701,8 +2626,15 @@ trait Typers extends Modes with Adaptations with Tags {
default -> gen.scalaFunctionConstr(List(A1Tpt), B1Tpt)
)
}
- val rhs = methodBodyTyper.virtualizedMatch(match_, mode, B1.tpe)
- val defdef = DefDef(methodSym, Modifiers(methodSym.flags), originals, rhs)
+ def newParam(param: Symbol): ValDef = {
+ val vd = ValDef(param, EmptyTree)
+ val tt @ TypeTree() = vd.tpt
+ tt setOriginal (originals(param) setPos param.pos.focus)
+ vd
+ }
+
+ val rhs = methodBodyTyper.virtualizedMatch(match_, mode, B1.tpe)
+ val defdef = newDefDef(methodSym, rhs)(vparamss = mapParamss(methodSym)(newParam), tpt = TypeTree(B1.tpe))
(defdef, matchResTp)
}
@@ -2714,12 +2646,12 @@ trait Typers extends Modes with Adaptations with Tags {
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
methodBodyTyper.context.scope enter paramSym
- methodSym setInfo MethodType(List(paramSym), BooleanClass.tpe)
+ methodSym setInfo MethodType(List(paramSym), BooleanTpe)
- val defaultCase = mkDefaultCase(FALSE_typed)
- val match_ = methodBodyTyper.typedMatch(selector, casesTrue :+ defaultCase, mode, BooleanClass.tpe)
+ val defaultCase = mkDefaultCase(FALSE)
+ val match_ = methodBodyTyper.typedMatch(selector, casesTrue :+ defaultCase, mode, BooleanTpe)
- DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanClass.tpe))
+ DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanTpe))
}
// only used for @cps annotated partial functions
@@ -2728,7 +2660,7 @@ trait Typers extends Modes with Adaptations with Tags {
val methodSym = anonClass.newMethod(nme.apply, tree.pos, FINAL | OVERRIDE)
val paramSym = mkParam(methodSym)
- methodSym setInfo MethodType(List(paramSym), AnyClass.tpe)
+ methodSym setInfo MethodType(List(paramSym), AnyTpe)
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym))
// should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
@@ -2764,7 +2696,7 @@ trait Typers extends Modes with Adaptations with Tags {
members foreach (m => anonClass.info.decls enter m.symbol)
val typedBlock = typedPos(tree.pos, mode, pt) {
- Block(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(
+ Block(ClassDef(anonClass, NoMods, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(
Apply(Select(New(Ident(anonClass.name).setSymbol(anonClass)), nme.CONSTRUCTOR), List())
))
}
@@ -2776,31 +2708,200 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
+ /** Synthesize and type check the implementation of a type with a Single Abstract Method
+ *
+ * `{ (p1: T1, ..., pN: TN) => body } : S`
+ *
+ * expands to (where `S` is the expected type that defines a single abstract method named `apply`)
+ *
+ * `{
+ * def apply$body(p1: T1, ..., pN: TN): T = body
+ * new S {
+ * def apply(p1: T1, ..., pN: TN): T = apply$body(p1,..., pN)
+ * }
+ * }`
+ *
+ * If 'T' is not fully defined, it is inferred by type checking
+ * `apply$body` without a result type before type checking the block.
+ * The method's inferred result type is used instead of T`. [See test/files/pos/sammy_poly.scala]
+ *
+ * The `apply` method is identified by the argument `sam`; `S` corresponds to the argument `samClassTp`,
+ * and `resPt` is derived from `samClassTp` -- it may be fully defined, or not...
+ *
+ * The function's body is put in a method outside of the class definition to enforce scoping.
+ * S's members should not be in scope in `body`.
+ *
+ * The restriction on implicit arguments (neither S's constructor, nor sam may take an implicit argument list),
+ * is largely to keep the implementation of type inference (the computation of `samClassTpFullyDefined`) simple.
+ *
+ * NOTE: it would be nicer to not have to type check `apply$body` separately when `T` is not fully defined.
+ * However T must be fully defined before we type the instantiation, as it'll end up as a parent type,
+ * which must be fully defined. Would be nice to have some kind of mechanism to insert type vars in a block of code,
+ * and have the instantiation of the first occurrence propagate to the rest of the block.
+ */
+ def synthesizeSAMFunction(sam: Symbol, fun: Function, resPt: Type, samClassTp: Type, mode: Mode): Tree = {
+ // assert(fun.vparams forall (vp => isFullyDefined(vp.tpt.tpe))) -- by construction, as we take them from sam's info
+ val sampos = fun.pos
+
+ // if the expected sam type is fully defined, use it for the method's result type
+ // otherwise, NoType, so that type inference will determine the method's result type
+ // resPt is syntactically contained in samClassTp, so if the latter is fully defined, so is the former
+ // ultimately, we want to fully define samClassTp as it is used as the superclass of our anonymous class
+ val samDefTp = if (isFullyDefined(resPt)) resPt else NoType
+ val bodyName = newTermName(sam.name + "$body")
+
+ // `def '${sam.name}\$body'($p1: $T1, ..., $pN: $TN): $resPt = $body`
+ val samBodyDef =
+ DefDef(NoMods,
+ bodyName,
+ Nil,
+ List(fun.vparams.map(_.duplicate)), // must duplicate as we're also using them for `samDef`
+ TypeTree(samDefTp) setPos sampos.focus,
+ fun.body)
+
+ // If we need to enter the sym for the body def before type checking the block,
+ // we'll create a nested context, as explained below.
+ var nestedTyper = this
+
+ // Type check body def before classdef to fully determine samClassTp (if necessary).
+ // As `samClassTp` determines a parent type for the class,
+ // we can't type check `block` in one go unless `samClassTp` is fully defined.
+ val samClassTpFullyDefined =
+ if (isFullyDefined(samClassTp)) samClassTp
+ else try {
+ // This creates a symbol for samBodyDef with a type completer that'll be triggered immediately below.
+ // The symbol is entered in the same scope used for the block below, and won't thus be reentered later.
+ // It has to be a new scope, though, or we'll "get ambiguous reference to overloaded definition" [pos/sammy_twice.scala]
+ // makeSilent: [pos/nonlocal-unchecked.scala -- when translation all functions to sams]
+ val nestedCtx = enterSym(context.makeNewScope(context.tree, context.owner).makeSilent(), samBodyDef)
+ nestedTyper = newTyper(nestedCtx)
+
+ // NOTE: this `samBodyDef.symbol.info` runs the type completer set up by the enterSym above
+ val actualSamType = samBodyDef.symbol.info
+
+ // we're trying to fully define the type arguments for this type constructor
+ val samTyCon = samClassTp.typeSymbol.typeConstructor
+
+ // the unknowns
+ val tparams = samClassTp.typeSymbol.typeParams
+ // ... as typevars
+ val tvars = tparams map freshVar
+
+ // 1. Recover partial information:
+ // - derive a type from samClassTp that has the corresponding tparams for type arguments that aren't fully defined
+ // - constrain typevars to be equal to type args that are fully defined
+ val samClassTpMoreDefined = appliedType(samTyCon,
+ (samClassTp.typeArgs, tparams, tvars).zipped map {
+ case (a, _, tv) if isFullyDefined(a) => tv =:= a; a
+ case (_, p, _) => p.typeConstructor
+ })
+
+ // the method type we're expecting the synthesized sam to have, based on the expected sam type,
+ // where fully defined type args to samClassTp have been preserved,
+ // with the unknown args replaced by their corresponding type param
+ val expectedSamType = samClassTpMoreDefined.memberInfo(sam)
- /**
- * @param fun ...
- * @param mode ...
- * @param pt ...
- * @return ...
+ // 2. make sure the body def's actual type (formals and result) conforms to
+ // sam's expected type (in terms of the typevars that represent the sam's class's type params)
+ actualSamType <:< expectedSamType.substituteTypes(tparams, tvars)
+
+ // solve constraints tracked by tvars
+ val targs = solvedTypes(tvars, tparams, tparams map varianceInType(sam.info), upper = false, lubDepth(sam.info :: Nil))
+
+ debuglog(s"sam infer: $samClassTp --> ${appliedType(samTyCon, targs)} by $actualSamType <:< $expectedSamType --> $targs for $tparams")
+
+ // a fully defined samClassTp
+ appliedType(samTyCon, targs)
+ } catch {
+ case _: NoInstance | _: TypeError =>
+ devWarning(sampos, s"Could not define type $samClassTp using ${samBodyDef.symbol.rawInfo} <:< ${samClassTp memberInfo sam} (for $sam)")
+ samClassTp
+ }
+
+ // `final override def ${sam.name}($p1: $T1, ..., $pN: $TN): $resPt = ${sam.name}\$body'($p1, ..., $pN)`
+ val samDef =
+ DefDef(Modifiers(FINAL | OVERRIDE | SYNTHETIC),
+ sam.name.toTermName,
+ Nil,
+ List(fun.vparams),
+ TypeTree(samBodyDef.tpt.tpe) setPos sampos.focus,
+ Apply(Ident(bodyName), fun.vparams map (p => Ident(p.name)))
+ )
+
+ val serializableParentAddendum =
+ if (typeIsSubTypeOfSerializable(samClassTp)) Nil
+ else List(TypeTree(SerializableTpe))
+
+ val classDef =
+ ClassDef(Modifiers(FINAL), tpnme.ANON_FUN_NAME, tparams = Nil,
+ gen.mkTemplate(
+ parents = TypeTree(samClassTpFullyDefined) :: serializableParentAddendum,
+ self = emptyValDef,
+ constrMods = NoMods,
+ vparamss = ListOfNil,
+ body = List(samDef),
+ superPos = sampos.focus
+ )
+ )
+
+ // type checking the whole block, so that everything is packaged together nicely
+ // and we don't have to create any symbols by hand
+ val block =
+ nestedTyper.typedPos(sampos, mode, samClassTpFullyDefined) {
+ Block(
+ samBodyDef,
+ classDef,
+ Apply(Select(New(Ident(tpnme.ANON_FUN_NAME)), nme.CONSTRUCTOR), Nil)
+ )
+ }
+
+ classDef.symbol addAnnotation SerialVersionUIDAnnotation
+ block
+ }
+
+ /** Type check a function literal.
+ *
+ * Based on the expected type pt, potentially synthesize an instance of
+ * - PartialFunction,
+ * - a type with a Single Abstract Method (under -Xexperimental for now).
*/
- private def typedFunction(fun: Function, mode: Int, pt: Type): Tree = {
+ private def typedFunction(fun: Function, mode: Mode, pt: Type): Tree = {
val numVparams = fun.vparams.length
- if (numVparams > definitions.MaxFunctionArity)
- return MaxFunctionArityError(fun)
-
- def decompose(pt: Type): (Symbol, List[Type], Type) =
- if ((isFunctionType(pt) || (pt.typeSymbol == PartialFunctionClass && numVparams == 1 && fun.body.isInstanceOf[Match])) && // see bug901 for a reason why next conditions are needed
- ( pt.normalize.typeArgs.length - 1 == numVparams
- || fun.vparams.exists(_.tpt.isEmpty)
- ))
- (pt.typeSymbol, pt.normalize.typeArgs.init, pt.normalize.typeArgs.last)
- else
- (FunctionClass(numVparams), fun.vparams map (x => NoType), WildcardType)
+ val FunctionSymbol =
+ if (numVparams > definitions.MaxFunctionArity) NoSymbol
+ else FunctionClass(numVparams)
+
+ /* The Single Abstract Member of pt, unless pt is the built-in function type of the expected arity,
+ * as `(a => a): Int => Int` should not (yet) get the sam treatment.
+ */
+ val sam =
+ if (!settings.Xexperimental || pt.typeSymbol == FunctionSymbol) NoSymbol
+ else samOf(pt)
- val (clazz, argpts, respt) = decompose(pt)
- if (argpts.lengthCompare(numVparams) != 0)
+ /* The SAM case comes first so that this works:
+ * abstract class MyFun extends (Int => Int)
+ * (a => a): MyFun
+ *
+ * Note that the arity of the sam must correspond to the arity of the function.
+ */
+ val samViable = sam.exists && sameLength(sam.info.params, fun.vparams)
+ val (argpts, respt) =
+ if (samViable) {
+ val samInfo = pt memberInfo sam
+ (samInfo.paramTypes, samInfo.resultType)
+ } else {
+ pt baseType FunctionSymbol match {
+ case TypeRef(_, FunctionSymbol, args :+ res) => (args, res)
+ case _ => (fun.vparams map (_ => if (pt == ErrorType) ErrorType else NoType), WildcardType)
+ }
+ }
+
+ if (!FunctionSymbol.exists)
+ MaxFunctionArityError(fun)
+ else if (argpts.lengthCompare(numVparams) != 0)
WrongNumberOfParametersError(fun, argpts)
else {
+ var issuedMissingParameterTypeError = false
foreach2(fun.vparams, argpts) { (vparam, argpt) =>
if (vparam.tpt.isEmpty) {
vparam.tpt.tpe =
@@ -2808,19 +2909,18 @@ trait Typers extends Modes with Adaptations with Tags {
else {
fun match {
case etaExpansion(vparams, fn, args) =>
- silent(_.typed(fn, forFunMode(mode), pt)) match {
- case SilentResultValue(fn1) if context.undetparams.isEmpty =>
- // if context,undetparams is not empty, the function was polymorphic,
+ silent(_.typed(fn, mode.forFunMode, pt)) filter (_ => context.undetparams.isEmpty) map { fn1 =>
+ // if context.undetparams is not empty, the function was polymorphic,
// so we need the missing arguments to infer its type. See #871
//println("typing eta "+fun+":"+fn1.tpe+"/"+context.undetparams)
val ftpe = normalize(fn1.tpe) baseType FunctionClass(numVparams)
if (isFunctionType(ftpe) && isFullyDefined(ftpe))
return typedFunction(fun, mode, ftpe)
- case _ =>
}
case _ =>
}
- MissingParameterTypeError(fun, vparam, pt)
+ MissingParameterTypeError(fun, vparam, pt, withTupleAddendum = !issuedMissingParameterTypeError)
+ issuedMissingParameterTypeError = true
ErrorType
}
if (!vparam.tpt.pos.isDefined) vparam.tpt setPos vparam.pos.focus
@@ -2830,7 +2930,7 @@ trait Typers extends Modes with Adaptations with Tags {
fun.body match {
// translate `x => x match { <cases> }` : PartialFunction to
// `new PartialFunction { def applyOrElse(x, default) = x match { <cases> } def isDefinedAt(x) = ... }`
- case Match(sel, cases) if (sel ne EmptyTree) && newPatternMatching && (pt.typeSymbol == PartialFunctionClass) =>
+ case Match(sel, cases) if (sel ne EmptyTree) && (pt.typeSymbol == PartialFunctionClass) =>
// go to outer context -- must discard the context that was created for the Function since we're discarding the function
// thus, its symbol, which serves as the current context.owner, is not the right owner
// you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner)
@@ -2839,22 +2939,26 @@ trait Typers extends Modes with Adaptations with Tags {
if (p.tpt.tpe == null) p.tpt setType outerTyper.typedType(p.tpt).tpe
outerTyper.synthesizePartialFunction(p.name, p.pos, fun.body, mode, pt)
+
+ // Use synthesizeSAMFunction to expand `(p1: T1, ..., pN: TN) => body`
+ // to an instance of the corresponding anonymous subclass of `pt`.
+ case _ if samViable =>
+ newTyper(context.outer).synthesizeSAMFunction(sam, fun, respt, pt, mode)
+
+ // regular Function
case _ =>
val vparamSyms = fun.vparams map { vparam =>
enterSym(context, vparam)
if (context.retyping) context.scope enter vparam.symbol
vparam.symbol
}
- val vparams = fun.vparams mapConserve (typedValDef)
- // for (vparam <- vparams) {
- // checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); ()
- // }
+ val vparams = fun.vparams mapConserve typedValDef
val formals = vparamSyms map (_.tpe)
val body1 = typed(fun.body, respt)
val restpe = packedType(body1, fun.symbol).deconst.resultType
- val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe)
- // body = checkNoEscaping.locals(context.scope, restpe, body)
- treeCopy.Function(fun, vparams, body1).setType(funtpe)
+ val funtpe = appliedType(FunctionSymbol, formals :+ restpe: _*)
+
+ treeCopy.Function(fun, vparams, body1) setType funtpe
}
}
}
@@ -2872,13 +2976,8 @@ trait Typers extends Modes with Adaptations with Tags {
val att = templ.attachments.get[CompoundTypeTreeOriginalAttachment].getOrElse(CompoundTypeTreeOriginalAttachment(Nil, Nil))
templ.removeAttachment[CompoundTypeTreeOriginalAttachment]
templ updateAttachment att.copy(stats = stats1)
- for (stat <- stats1 if stat.isDef) {
- val member = stat.symbol
- if (!(context.owner.ancestors forall
- (bc => member.matchingSymbol(bc, context.owner.thisType) == NoSymbol))) {
- member setFlag OVERRIDE
- }
- }
+ for (stat <- stats1 if stat.isDef && stat.symbol.isOverridingSymbol)
+ stat.symbol setFlag OVERRIDE
}
}
@@ -2886,17 +2985,6 @@ trait Typers extends Modes with Adaptations with Tags {
case Some(imp1: Import) => imp1
case _ => log("unhandled import: "+imp+" in "+unit); imp
}
- private def isWarnablePureExpression(tree: Tree) = tree match {
- case EmptyTree | Literal(Constant(())) => false
- case _ =>
- !tree.isErrorTyped && (treeInfo isExprSafeToInline tree) && {
- val sym = tree.symbol
- (sym == null) || !(sym.isModule || sym.isLazy) || {
- debuglog("'Pure' but side-effecting expression in statement position: " + tree)
- false
- }
- }
- }
def typedStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
val inBlock = exprOwner == context.owner
@@ -2911,7 +2999,7 @@ trait Typers extends Modes with Adaptations with Tags {
case imp @ Import(_, _) =>
imp.symbol.initialize
if (!imp.symbol.isError) {
- context = context.makeNewImport(imp)
+ context = context.make(imp)
typedImport(imp)
} else EmptyTree
case _ =>
@@ -2925,7 +3013,7 @@ trait Typers extends Modes with Adaptations with Tags {
} else newTyper(context.make(stat, exprOwner))
// XXX this creates a spurious dead code warning if an exception is thrown
// in a constructor, even if it is the only thing in the constructor.
- val result = checkDead(localTyper.typed(stat, EXPRmode | BYVALmode, WildcardType))
+ val result = checkDead(localTyper.typedByValueExpr(stat))
if (treeInfo.isSelfOrSuperConstrCall(result)) {
context.inConstructorSuffix = true
@@ -2933,7 +3021,7 @@ trait Typers extends Modes with Adaptations with Tags {
ConstructorsOrderError(stat)
}
- if (isWarnablePureExpression(result)) context.warning(stat.pos,
+ if (treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos,
"a pure expression does nothing in statement position; " +
"you may be omitting necessary parentheses"
)
@@ -2942,8 +3030,8 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- /** 'accessor' and 'accessed' are so similar it becomes very difficult to
- * follow the logic, so I renamed one to something distinct.
+ /* 'accessor' and 'accessed' are so similar it becomes very difficult to
+ * follow the logic, so I renamed one to something distinct.
*/
def accesses(looker: Symbol, accessed: Symbol) = accessed.hasLocalFlag && (
(accessed.isParamAccessor)
@@ -2960,7 +3048,7 @@ trait Typers extends Modes with Adaptations with Tags {
(e.sym.isType || inBlock || (e.sym.tpe matches e1.sym.tpe)))
// default getters are defined twice when multiple overloads have defaults. an
// error for this is issued in RefChecks.checkDefaultsInOverloaded
- if (!e.sym.isErroneous && !e1.sym.isErroneous && !e.sym.hasDefaultFlag &&
+ if (!e.sym.isErroneous && !e1.sym.isErroneous && !e.sym.hasDefault &&
!e.sym.hasAnnotation(BridgeClass) && !e1.sym.hasAnnotation(BridgeClass)) {
log("Double definition detected:\n " +
((e.sym.getClass, e.sym.info, e.sym.ownerChain)) + "\n " +
@@ -2984,7 +3072,7 @@ trait Typers extends Modes with Adaptations with Tags {
// SI-5877 The decls of a package include decls of the package object. But we don't want to add
// the corresponding synthetics to the package class, only to the package object class.
def shouldAdd(sym: Symbol) =
- inBlock || !isInPackageObject(sym, context.owner)
+ inBlock || !context.isInPackageObject(sym, context.owner)
for (sym <- scope if shouldAdd(sym))
for (tree <- context.unit.synthetics get sym) {
newStats += typedStat(tree) // might add even more synthetics to the scope
@@ -3004,7 +3092,7 @@ trait Typers extends Modes with Adaptations with Tags {
def matches(stat: Tree, synt: Tree) = (stat, synt) match {
// synt is default arg for stat
case (DefDef(_, statName, _, _, _, _), DefDef(mods, syntName, _, _, _, _)) =>
- mods.hasDefaultFlag && syntName.toString.startsWith(statName.toString)
+ mods.hasDefault && syntName.toString.startsWith(statName.toString)
// synt is companion module
case (ClassDef(_, className, _, _), ModuleDef(_, moduleName, _)) =>
@@ -3037,42 +3125,14 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- def typedArg(arg: Tree, mode: Int, newmode: Int, pt: Type): Tree = {
- val typedMode = onlyStickyModes(mode) | newmode
- val t = withCondConstrTyper((mode & SCCmode) != 0)(_.typed(arg, typedMode, pt))
+ def typedArg(arg: Tree, mode: Mode, newmode: Mode, pt: Type): Tree = {
+ val typedMode = mode.onlySticky | newmode
+ val t = withCondConstrTyper(mode.inSccMode)(_.typed(arg, typedMode, pt))
checkDead.inMode(typedMode, t)
}
- def typedArgs(args: List[Tree], mode: Int) =
- args mapConserve (arg => typedArg(arg, mode, 0, WildcardType))
-
- /** Type trees in `args0` against corresponding expected type in `adapted0`.
- *
- * The mode in which each argument is typed is derived from `mode` and
- * whether the arg was originally by-name or var-arg (need `formals0` for that)
- * the default is by-val, of course.
- *
- * (docs reverse-engineered -- AM)
- */
- def typedArgs(args0: List[Tree], mode: Int, formals0: List[Type], adapted0: List[Type]): List[Tree] = {
- val sticky = onlyStickyModes(mode)
- def loop(args: List[Tree], formals: List[Type], adapted: List[Type]): List[Tree] = {
- if (args.isEmpty || adapted.isEmpty) Nil
- else {
- // No formals left or * indicates varargs.
- val isVarArgs = formals.isEmpty || formals.tail.isEmpty && isRepeatedParamType(formals.head)
- val typedMode = sticky | (
- if (isVarArgs) STARmode | BYVALmode
- else if (isByNameParamType(formals.head)) 0
- else BYVALmode
- )
- val tree = typedArg(args.head, mode, typedMode, adapted.head)
- // formals may be empty, so don't call tail
- tree :: loop(args.tail, formals drop 1, adapted.tail)
- }
- }
- loop(args0, formals0, adapted0)
- }
+ def typedArgs(args: List[Tree], mode: Mode) =
+ args mapConserve (arg => typedArg(arg, mode, NOmode, WildcardType))
/** Does function need to be instantiated, because a missing parameter
* in an argument closure overlaps with an uninstantiated formal?
@@ -3114,26 +3174,25 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
+ def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
// TODO_NMT: check the assumption that args nonEmpty
def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
def preSelectOverloaded(fun: Tree): Tree = {
- if (fun.hasSymbol && fun.symbol.isOverloaded) {
+ if (fun.hasSymbolField && fun.symbol.isOverloaded) {
// remove alternatives with wrong number of parameters without looking at types.
- // less expensive than including them in inferMethodAlternatvie (see below).
+ // less expensive than including them in inferMethodAlternative (see below).
def shapeType(arg: Tree): Type = arg match {
case Function(vparams, body) =>
- functionType(vparams map (vparam => AnyClass.tpe), shapeType(body))
+ functionType(vparams map (_ => AnyTpe), shapeType(body))
case AssignOrNamedArg(Ident(name), rhs) =>
NamedType(name, shapeType(rhs))
case _ =>
- NothingClass.tpe
+ NothingTpe
}
val argtypes = args map shapeType
val pre = fun.symbol.tpe.prefix
-
var sym = fun.symbol filter { alt =>
// must use pt as expected type, not WildcardType (a tempting quick fix to #2665)
// now fixed by using isWeaklyCompatible in exprTypeArgs
@@ -3145,20 +3204,19 @@ trait Typers extends Modes with Adaptations with Tags {
// Types: "refs = Array(Map(), Map())". I determined that inference fails if there are at
// least two invariant type parameters. See the test case I checked in to help backstop:
// pos/isApplicableSafe.scala.
- isApplicableSafe(context.undetparams, followApply(pre.memberType(alt)), argtypes, pt)
+ isApplicableSafe(context.undetparams, followApply(pre memberType alt), argtypes, pt)
}
if (sym.isOverloaded) {
- val sym1 = sym filter (alt => {
// eliminate functions that would result from tupling transforms
// keeps alternatives with repeated params
- hasExactlyNumParams(followApply(alt.tpe), argtypes.length) ||
- // also keep alts which define at least one default
- alt.tpe.paramss.exists(_.exists(_.hasDefault))
- })
+ val sym1 = sym filter (alt =>
+ isApplicableBasedOnArity(pre memberType alt, argtypes.length, varargsStar = false, tuplingAllowed = false)
+ || alt.tpe.params.exists(_.hasDefault)
+ )
if (sym1 != NoSymbol) sym = sym1
}
if (sym == NoSymbol) fun
- else adapt(fun setSymbol sym setType pre.memberType(sym), forFunMode(mode), WildcardType)
+ else adapt(fun setSymbol sym setType pre.memberType(sym), mode.forFunMode, WildcardType)
} else fun
}
@@ -3167,28 +3225,30 @@ trait Typers extends Modes with Adaptations with Tags {
fun.tpe match {
case OverloadedType(pre, alts) =>
def handleOverloaded = {
- val undetparams = context.extractUndetparams()
-
- val argtpes = new ListBuffer[Type]
- val amode = forArgMode(fun, mode)
- val args1 = args map {
- case arg @ AssignOrNamedArg(Ident(name), rhs) =>
- // named args: only type the righthand sides ("unknown identifier" errors otherwise)
- val rhs1 = typedArg(rhs, amode, BYVALmode, WildcardType)
- argtpes += NamedType(name, rhs1.tpe.deconst)
- // the assign is untyped; that's ok because we call doTypedApply
- atPos(arg.pos) { new AssignOrNamedArg(arg.lhs, rhs1) }
- case arg =>
- val arg1 = typedArg(arg, amode, BYVALmode, WildcardType)
- argtpes += arg1.tpe.deconst
- arg1
+ val undetparams = context.undetparams
+ val (args1, argTpes) = context.savingUndeterminedTypeParams() {
+ val amode = forArgMode(fun, mode)
+ def typedArg0(tree: Tree) = typedArg(tree, amode, BYVALmode, WildcardType)
+ args.map {
+ case arg @ AssignOrNamedArg(Ident(name), rhs) =>
+ // named args: only type the righthand sides ("unknown identifier" errors otherwise)
+ val rhs1 = typedArg0(rhs)
+ // the assign is untyped; that's ok because we call doTypedApply
+ val arg1 = treeCopy.AssignOrNamedArg(arg, arg.lhs, rhs1)
+ (arg1, NamedType(name, rhs1.tpe.deconst))
+ case arg @ treeInfo.WildcardStarArg(repeated) =>
+ val arg1 = typedArg0(arg)
+ (arg1, RepeatedType(arg1.tpe.deconst))
+ case arg =>
+ val arg1 = typedArg0(arg)
+ (arg1, arg1.tpe.deconst)
+ }.unzip
}
- context.undetparams = undetparams
if (context.hasErrors)
setError(tree)
else {
- inferMethodAlternative(fun, undetparams, argtpes.toList, pt, varArgsOnly = treeInfo.isWildcardStarArgList(args))
- doTypedApply(tree, adapt(fun, forFunMode(mode), WildcardType), args1, mode, pt)
+ inferMethodAlternative(fun, undetparams, argTpes, pt)
+ doTypedApply(tree, adapt(fun, mode.forFunMode, WildcardType), args1, mode, pt)
}
}
handleOverloaded
@@ -3196,65 +3256,62 @@ trait Typers extends Modes with Adaptations with Tags {
case mt @ MethodType(params, _) =>
val paramTypes = mt.paramTypes
// repeat vararg as often as needed, remove by-name
- val formals = formalTypes(paramTypes, args.length)
+ val argslen = args.length
+ val formals = formalTypes(paramTypes, argslen)
- /** Try packing all arguments into a Tuple and apply `fun`
- * to that. This is the last thing which is tried (after
- * default arguments)
+ /* Try packing all arguments into a Tuple and apply `fun`
+ * to that. This is the last thing which is tried (after
+ * default arguments)
*/
- def tryTupleApply: Option[Tree] = {
- // if 1 formal, 1 arg (a tuple), otherwise unmodified args
- val tupleArgs = actualArgs(tree.pos.makeTransparent, args, formals.length)
-
- if (!sameLength(tupleArgs, args) && !isUnitForVarArgs(args, params)) {
+ def tryTupleApply: Tree = (
+ if (eligibleForTupleConversion(paramTypes, argslen) && !phase.erasedTypes) {
+ val tupleArgs = List(atPos(tree.pos.makeTransparent)(gen.mkTuple(args)))
// expected one argument, but got 0 or >1 ==> try applying to tuple
// the inner "doTypedApply" does "extractUndetparams" => restore when it fails
val savedUndetparams = context.undetparams
- silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) match {
- case SilentResultValue(t) =>
+ silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) map { t =>
// Depending on user options, may warn or error here if
// a Unit or tuple was inserted.
- Some(t) filter (tupledTree =>
- !inExprModeButNot(mode, FUNmode)
- || tupledTree.symbol == null
- || checkValidAdaptation(tupledTree, args)
+ val keepTree = (
+ !mode.typingExprNotFun
+ || t.symbol == null
+ || checkValidAdaptation(t, args)
)
- case _ =>
- context.undetparams = savedUndetparams
- None
- }
- } else None
- }
+ if (keepTree) t else EmptyTree
+ } orElse { _ => context.undetparams = savedUndetparams ; EmptyTree }
+ }
+ else EmptyTree
+ )
- /** Treats an application which uses named or default arguments.
- * Also works if names + a vararg used: when names are used, the vararg
- * parameter has to be specified exactly once. Note that combining varargs
- * and defaults is ruled out by typedDefDef.
+ /* Treats an application which uses named or default arguments.
+ * Also works if names + a vararg used: when names are used, the vararg
+ * parameter has to be specified exactly once. Note that combining varargs
+ * and defaults is ruled out by typedDefDef.
*/
def tryNamesDefaults: Tree = {
val lencmp = compareLengths(args, formals)
def checkNotMacro() = {
- if (fun.symbol != null && fun.symbol.filter(sym => sym != null && sym.isTermMacro && !sym.isErroneous) != NoSymbol)
- tryTupleApply getOrElse duplErrorTree(NamedAndDefaultArgumentsNotSupportedForMacros(tree, fun))
+ if (treeInfo.isMacroApplication(fun))
+ tryTupleApply orElse duplErrorTree(NamedAndDefaultArgumentsNotSupportedForMacros(tree, fun))
}
if (mt.isErroneous) duplErrTree
- else if (inPatternMode(mode)) {
+ else if (mode.inPatternMode) {
// #2064
duplErrorTree(WrongNumberOfArgsError(tree, fun))
} else if (lencmp > 0) {
- tryTupleApply getOrElse duplErrorTree(TooManyArgsNamesDefaultsError(tree, fun))
+ tryTupleApply orElse duplErrorTree(TooManyArgsNamesDefaultsError(tree, fun))
} else if (lencmp == 0) {
// we don't need defaults. names were used, so this application is transformed
// into a block (@see transformNamedApplication in NamesDefaults)
val (namelessArgs, argPos) = removeNames(Typer.this)(args, params)
if (namelessArgs exists (_.isErroneous)) {
duplErrTree
- } else if (!isIdentity(argPos) && !sameLength(formals, params))
- // !isIdentity indicates that named arguments are used to re-order arguments
+ } else if (!allArgsArePositional(argPos) && !sameLength(formals, params))
+ // !allArgsArePositional indicates that named arguments are used to re-order arguments
duplErrorTree(MultipleVarargError(tree))
- else if (isIdentity(argPos) && !isNamedApplyBlock(fun)) {
+ else if (allArgsArePositional(argPos) && !isNamedApplyBlock(fun)) {
// if there's no re-ordering, and fun is not transformed, no need to transform
// more than an optimization, e.g. important in "synchronized { x = update-x }"
checkNotMacro()
@@ -3297,69 +3354,47 @@ trait Typers extends Modes with Adaptations with Tags {
if (!(context.diagnostic contains note)) context.diagnostic = note :: context.diagnostic
doTypedApply(tree, if (blockIsEmpty) fun else fun1, allArgs, mode, pt)
} else {
- tryTupleApply getOrElse duplErrorTree(NotEnoughArgsError(tree, fun, missing))
+ tryTupleApply orElse duplErrorTree(NotEnoughArgsError(tree, fun, missing))
}
}
}
}
if (!sameLength(formals, args) || // wrong nb of arguments
- (args exists isNamed) || // uses a named argument
+ (args exists isNamedArg) || // uses a named argument
isNamedApplyBlock(fun)) { // fun was transformed to a named apply block =>
// integrate this application into the block
- if (dyna.isApplyDynamicNamed(fun)) dyna.typedNamedApply(tree, fun, args, mode, pt)
+ if (dyna.isApplyDynamicNamed(fun) && isDynamicRewrite(fun)) dyna.typedNamedApply(tree, fun, args, mode, pt)
else tryNamesDefaults
} else {
val tparams = context.extractUndetparams()
if (tparams.isEmpty) { // all type params are defined
def handleMonomorphicCall: Tree = {
- // In order for checkDead not to be misled by the unfortunate special
- // case of AnyRef#synchronized (which is implemented with signature T => T
- // but behaves as if it were (=> T) => T) we need to know what is the actual
- // target of a call. Since this information is no longer available from
- // typedArg, it is recorded here.
- val args1 =
- // no expected type when jumping to a match label -- anything goes (this is ok since we're typing the translation of well-typed code)
- // ... except during erasure: we must take the expected type into account as it drives the insertion of casts!
- // I've exhausted all other semi-clean approaches I could think of in balancing GADT magic, SI-6145, CPS type-driven transforms and other existential trickiness
- // (the right thing to do -- packing existential types -- runs into limitations in subtyping existential types,
- // casting breaks SI-6145,
- // not casting breaks GADT typing as it requires sneaking ill-typed trees past typer)
- if (!phase.erasedTypes && fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol))
+ // no expected type when jumping to a match label -- anything goes (this is ok since we're typing the translation of well-typed code)
+ // ... except during erasure: we must take the expected type into account as it drives the insertion of casts!
+ // I've exhausted all other semi-clean approaches I could think of in balancing GADT magic, SI-6145, CPS type-driven transforms and other existential trickiness
+ // (the right thing to do -- packing existential types -- runs into limitations in subtyping existential types,
+ // casting breaks SI-6145,
+ // not casting breaks GADT typing as it requires sneaking ill-typed trees past typer)
+ def noExpectedType = !phase.erasedTypes && fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol)
+
+ val args1 = (
+ if (noExpectedType)
typedArgs(args, forArgMode(fun, mode))
else
- typedArgs(args, forArgMode(fun, mode), paramTypes, formals)
+ typedArgsForFormals(args, paramTypes, forArgMode(fun, mode))
+ )
// instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
// val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
// precise(foo) : foo.type => foo.type
- val restpe = mt.resultType(args1 map (arg => gen.stableTypeFor(arg) getOrElse arg.tpe))
+ val restpe = mt.resultType(args1 map (arg => gen stableTypeFor arg orElse arg.tpe))
def ifPatternSkipFormals(tp: Type) = tp match {
- case MethodType(_, rtp) if (inPatternMode(mode)) => rtp
+ case MethodType(_, rtp) if (mode.inPatternMode) => rtp
case _ => tp
}
- // Replace the Delegate-Chainer methods += and -= with corresponding
- // + and - calls, which are translated in the code generator into
- // Combine and Remove
- if (forMSIL) {
- fun match {
- case Select(qual, name) =>
- if (isSubType(qual.tpe, DelegateClass.tpe)
- && (name == encode("+=") || name == encode("-="))) {
- val n = if (name == encode("+=")) nme.PLUS else nme.MINUS
- val f = Select(qual, n)
- // the compiler thinks, the PLUS method takes only one argument,
- // but he thinks it's an instance method -> still two ref's on the stack
- // -> translated by backend
- val rhs = treeCopy.Apply(tree, f, args)
- return typed(Assign(qual, rhs))
- }
- case _ => ()
- }
- }
-
- /**
+ /*
* This is translating uses of List() into Nil. This is less
* than ideal from a consistency standpoint, but it shouldn't be
* altered without due caution.
@@ -3367,7 +3402,7 @@ trait Typers extends Modes with Adaptations with Tags {
* forced during kind-arity checking, so it is guarded by additional
* tests to ensure we're sufficiently far along.
*/
- if (args.isEmpty && !forInteractive && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply))
+ if (args.isEmpty && canTranslateEmptyListToNil && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply))
atPos(tree.pos)(gen.mkNil setType restpe)
else
constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe))
@@ -3381,7 +3416,7 @@ trait Typers extends Modes with Adaptations with Tags {
doTypedApply(tree, fun, args, mode, pt)
} else {
def handlePolymorphicCall = {
- assert(!inPatternMode(mode), modeString(mode)) // this case cannot arise for patterns
+ assert(!mode.inPatternMode, mode) // this case cannot arise for patterns
val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt)
val strictTargs = map2(lenientTargs, tparams)((targ, tparam) =>
if (targ == WildcardType) tparam.tpeHK else targ)
@@ -3407,9 +3442,8 @@ trait Typers extends Modes with Adaptations with Tags {
// define the undetparams which have been fixed by this param list, replace the corresponding symbols in "fun"
// returns those undetparams which have not been instantiated.
val undetparams = inferMethodInstance(fun, tparams, args1, pt)
- val result = doTypedApply(tree, fun, args1, mode, pt)
- context.undetparams = undetparams
- result
+ try doTypedApply(tree, fun, args1, mode, pt)
+ finally context.undetparams = undetparams
}
}
handlePolymorphicCall
@@ -3423,157 +3457,45 @@ trait Typers extends Modes with Adaptations with Tags {
if (!tree.isErrorTyped) setError(tree) else tree
// @H change to setError(treeCopy.Apply(tree, fun, args))
- case otpe if inPatternMode(mode) && unapplyMember(otpe).exists =>
+ // SI-7877 `isTerm` needed to exclude `class T[A] { def unapply(..) }; ... case T[X] =>`
+ case HasUnapply(unapply) if mode.inPatternMode && fun.isTerm =>
doTypedUnapply(tree, fun0, fun, args, mode, pt)
case _ =>
- duplErrorTree(ApplyWithoutArgsError(tree, fun))
- }
- }
-
- def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
- def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
- def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
-
- val otpe = fun.tpe
-
- if (args.length > MaxTupleArity)
- return duplErrorTree(TooManyArgsPatternError(fun))
-
- //
- def freshArgType(tp: Type): (List[Symbol], Type) = tp match {
- case MethodType(param :: _, _) =>
- (Nil, param.tpe)
- case PolyType(tparams, restpe) =>
- createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t)))
- // No longer used, see test case neg/t960.scala (#960 has nothing to do with it)
- case OverloadedType(_, _) =>
- OverloadedUnapplyError(fun)
- (Nil, ErrorType)
- case _ =>
- UnapplyWithSingleArgError(fun)
- (Nil, ErrorType)
- }
-
- val unapp = unapplyMember(otpe)
- val unappType = otpe.memberType(unapp)
- val argDummy = context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, SYNTHETIC) setInfo pt
- val arg = Ident(argDummy) setType pt
-
- val uncheckedTypeExtractor =
- if (unappType.paramTypes.nonEmpty)
- extractorForUncheckedType(tree.pos, unappType.paramTypes.head)
- else None
-
- if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
- //Console.println("UNAPP: need to typetest, arg.tpe = "+arg.tpe+", unappType = "+unappType)
- val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
- val unapplyContext = context.makeNewScope(context.tree, context.owner)
- freeVars foreach unapplyContext.scope.enter
-
- val typer1 = newTyper(unapplyContext)
- val pattp = typer1.infer.inferTypedPattern(tree, unappFormal, arg.tpe, canRemedy = uncheckedTypeExtractor.nonEmpty)
-
- // turn any unresolved type variables in freevars into existential skolems
- val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
- arg.tpe = pattp.substSym(freeVars, skolems)
- argDummy setInfo arg.tpe
- }
-
- // setType null is necessary so that ref will be stabilized; see bug 881
- val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg)))
-
- if (fun1.tpe.isErroneous) duplErrTree
- else {
- val resTp = fun1.tpe.finalResultType.normalize
- val nbSubPats = args.length
- val (formals, formalsExpanded) =
- extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol, treeInfo.effectivePatternArity(args))
- if (formals == null) duplErrorTree(WrongNumberOfArgsError(tree, fun))
- else {
- val args1 = typedArgs(args, mode, formals, formalsExpanded)
- // This used to be the following (failing) assert:
- // assert(isFullyDefined(pt), tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt)
- // I modified as follows. See SI-1048.
- val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
-
- val itype = glb(List(pt1, arg.tpe))
- arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
- val unapply = UnApply(fun1, args1) setPos tree.pos setType itype
-
- // if the type that the unapply method expects for its argument is uncheckable, wrap in classtag extractor
- // skip if the unapply's type is not a method type with (at least, but really it should be exactly) one argument
- // also skip if we already wrapped a classtag extractor (so we don't keep doing that forever)
- if (uncheckedTypeExtractor.isEmpty || fun1.symbol.owner.isNonBottomSubClass(ClassTagClass)) unapply
- else wrapClassTagUnapply(unapply, uncheckedTypeExtractor.get, unappType.paramTypes.head)
- }
+ if (treeInfo.isMacroApplication(tree)) duplErrorTree(MacroTooManyArgumentListsError(tree, fun.symbol))
+ else duplErrorTree(ApplyWithoutArgsError(tree, fun))
}
}
- def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = {
- // TODO: disable when in unchecked match
- // we don't create a new Context for a Match, so find the CaseDef, then go out one level and navigate back to the match that has this case
- // val thisCase = context.nextEnclosing(_.tree.isInstanceOf[CaseDef])
- // val unchecked = thisCase.outer.tree.collect{case Match(selector, cases) if cases contains thisCase => selector} match {
- // case List(Typed(_, tpt)) if tpt.tpe hasAnnotation UncheckedClass => true
- // case t => println("outer tree: "+ (t, thisCase, thisCase.outer.tree)); false
- // }
- // println("wrapClassTagUnapply"+ (!isPastTyper && infer.containsUnchecked(pt), pt, uncheckedPattern))
- // println("wrapClassTagUnapply: "+ extractor)
- // println(util.Position.formatMessage(uncheckedPattern.pos, "made unchecked type test into a checked one", true))
-
- val args = List(uncheckedPattern)
- val app = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args))
- // must call doTypedUnapply directly, as otherwise we get undesirable rewrites
- // and re-typechecks of the target of the unapply call in PATTERNmode,
- // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object,
- // but an arbitrary tree as is the case here
- doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt)
- }
-
- // if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test
- // return the corresponding extractor (an instance of ClassTag[`pt`])
- def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (!opt.virtPatmat || isPastTyper) None else {
- // only look at top-level type, can't (reliably) do anything about unchecked type args (in general)
- pt.normalize.typeConstructor match {
- // if at least one of the types in an intersection is checkable, use the checkable ones
- // this avoids problems as in run/matchonseq.scala, where the expected type is `Coll with scala.collection.SeqLike`
- // Coll is an abstract type, but SeqLike of course is not
- case RefinedType(parents, _) if (parents.length >= 2) && (parents.exists(tp => !infer.containsUnchecked(tp))) =>
- None
-
- case ptCheckable if infer.containsUnchecked(ptCheckable) =>
- val classTagExtractor = resolveClassTag(pos, ptCheckable)
-
- if (classTagExtractor != EmptyTree && unapplyMember(classTagExtractor.tpe) != NoSymbol)
- Some(classTagExtractor)
- else None
-
- case _ => None
- }
- }
-
/**
* Convert an annotation constructor call into an AnnotationInfo.
- *
- * @param annClass the expected annotation class
*/
- def typedAnnotation(ann: Tree, mode: Int = EXPRmode, selfsym: Symbol = NoSymbol, annClass: Symbol = AnnotationClass, requireJava: Boolean = false): AnnotationInfo = {
- lazy val annotationError = AnnotationInfo(ErrorType, Nil, Nil)
+ def typedAnnotation(ann: Tree, mode: Mode = EXPRmode, selfsym: Symbol = NoSymbol): AnnotationInfo = {
var hasError: Boolean = false
val pending = ListBuffer[AbsTypeError]()
+ def finish(res: AnnotationInfo): AnnotationInfo = {
+ if (hasError) {
+ pending.foreach(ErrorUtils.issueTypeError)
+ ErroneousAnnotation
+ }
+ else res
+ }
+
def reportAnnotationError(err: AbsTypeError) = {
pending += err
hasError = true
- annotationError
+ ErroneousAnnotation
}
- /** Calling constfold right here is necessary because some trees (negated
- * floats and literals in particular) are not yet folded.
+ /* Calling constfold right here is necessary because some trees (negated
+ * floats and literals in particular) are not yet folded.
*/
def tryConst(tr: Tree, pt: Type): Option[LiteralAnnotArg] = {
- val const: Constant = typed(constfold(tr), EXPRmode, pt) match {
+ // The typed tree may be relevantly different than the tree `tr`,
+ // e.g. it may have encountered an implicit conversion.
+ val ttree = typed(constfold(tr), pt)
+ val const: Constant = ttree match {
case l @ Literal(c) if !l.isErroneous => c
case tree => tree.tpe match {
case ConstantType(c) => c
@@ -3582,29 +3504,36 @@ trait Typers extends Modes with Adaptations with Tags {
}
if (const == null) {
- reportAnnotationError(AnnotationNotAConstantError(tr)); None
+ reportAnnotationError(AnnotationNotAConstantError(ttree)); None
} else if (const.value == null) {
reportAnnotationError(AnnotationArgNullError(tr)); None
} else
Some(LiteralAnnotArg(const))
}
- /** Converts an untyped tree to a ClassfileAnnotArg. If the conversion fails,
- * an error message is reported and None is returned.
+ /* Converts an untyped tree to a ClassfileAnnotArg. If the conversion fails,
+ * an error message is reported and None is returned.
*/
def tree2ConstArg(tree: Tree, pt: Type): Option[ClassfileAnnotArg] = tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) if (pt.typeSymbol == ArrayClass) =>
reportAnnotationError(ArrayConstantsError(tree)); None
case ann @ Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
- val annInfo = typedAnnotation(ann, mode, NoSymbol, pt.typeSymbol, true)
+ val annInfo = typedAnnotation(ann, mode, NoSymbol)
+ val annType = annInfo.tpe
+
+ if (!annType.typeSymbol.isSubClass(pt.typeSymbol))
+ reportAnnotationError(AnnotationTypeMismatchError(tpt, annType, annType))
+ else if (!annType.typeSymbol.isSubClass(ClassfileAnnotationClass))
+ reportAnnotationError(NestedAnnotationError(ann, annType))
+
if (annInfo.atp.isErroneous) { hasError = true; None }
else Some(NestedAnnotArg(annInfo))
// use of Array.apply[T: ClassTag](xs: T*): Array[T]
// and Array.apply(x: Int, xs: Int*): Array[Int] (and similar)
case Apply(fun, args) =>
- val typedFun = typed(fun, forFunMode(mode), WildcardType)
+ val typedFun = typed(fun, mode.forFunMode)
if (typedFun.symbol.owner == ArrayModule.moduleClass && typedFun.symbol.name == nme.apply)
pt match {
case TypeRef(_, ArrayClass, targ :: _) =>
@@ -3632,50 +3561,48 @@ trait Typers extends Modes with Adaptations with Tags {
}
// begin typedAnnotation
- val (fun, argss) = {
- def extract(fun: Tree, outerArgss: List[List[Tree]]):
- (Tree, List[List[Tree]]) = fun match {
- case Apply(f, args) =>
- extract(f, args :: outerArgss)
- case Select(New(tpt), nme.CONSTRUCTOR) =>
- (fun, outerArgss)
- case _ =>
- reportAnnotationError(UnexpectedTreeAnnotation(fun))
- (setError(fun), outerArgss)
- }
- extract(ann, List())
- }
-
- val res = if (fun.isErroneous) annotationError
- else {
- val typedFun @ Select(New(tpt), _) = typed(fun, forFunMode(mode), WildcardType)
- val annType = tpt.tpe
+ val treeInfo.Applied(fun0, targs, argss) = ann
+ if (fun0.isErroneous)
+ return finish(ErroneousAnnotation)
+ val typedFun0 = typed(fun0, mode.forFunMode)
+ val typedFunPart = (
+ // If there are dummy type arguments in typeFun part, it suggests we
+ // must type the actual constructor call, not only the select. The value
+ // arguments are how the type arguments will be inferred.
+ if (targs.isEmpty && typedFun0.exists(t => t.tpe != null && isDummyAppliedType(t.tpe)))
+ logResult(s"Retyped $typedFun0 to find type args")(typed(argss.foldLeft(fun0)(Apply(_, _))))
+ else
+ typedFun0
+ )
+ val treeInfo.Applied(typedFun @ Select(New(annTpt), _), _, _) = typedFunPart
+ val annType = annTpt.tpe
- if (typedFun.isErroneous) annotationError
+ finish(
+ if (typedFun.isErroneous)
+ ErroneousAnnotation
else if (annType.typeSymbol isNonBottomSubClass ClassfileAnnotationClass) {
// annotation to be saved as java classfile annotation
val isJava = typedFun.symbol.owner.isJavaDefined
- if (!annType.typeSymbol.isNonBottomSubClass(annClass)) {
- reportAnnotationError(AnnotationTypeMismatchError(tpt, annClass.tpe, annType))
- } else if (argss.length > 1) {
+ if (argss.length > 1) {
reportAnnotationError(MultipleArgumentListForAnnotationError(ann))
- } else {
+ }
+ else {
val annScope = annType.decls
.filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined)
- val names = new scala.collection.mutable.HashSet[Symbol]
- def hasValue = names exists (_.name == nme.value)
+ val names = mutable.Set[Symbol]()
names ++= (if (isJava) annScope.iterator
else typedFun.tpe.params.iterator)
+
+ def hasValue = names exists (_.name == nme.value)
val args = argss match {
- case List(List(arg)) if !isNamed(arg) && hasValue =>
- List(new AssignOrNamedArg(Ident(nme.value), arg))
- case as :: _ => as
+ case (arg :: Nil) :: Nil if !isNamedArg(arg) && hasValue => gen.mkNamedArg(nme.value, arg) :: Nil
+ case args :: Nil => args
}
val nvPairs = args map {
case arg @ AssignOrNamedArg(Ident(name), rhs) =>
val sym = if (isJava) annScope.lookup(name)
- else typedFun.tpe.params.find(p => p.name == name).getOrElse(NoSymbol)
+ else findSymbol(typedFun.tpe.params)(_.name == name)
if (sym == NoSymbol) {
reportAnnotationError(UnknownAnnotationNameError(arg, name))
(nme.ERROR, None)
@@ -3699,47 +3626,33 @@ trait Typers extends Modes with Adaptations with Tags {
reportAnnotationError(AnnotationMissingArgError(ann, annType, sym))
}
- if (hasError) annotationError
+ if (hasError) ErroneousAnnotation
else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setOriginal(Apply(typedFun, args).setPos(ann.pos))
}
- } else if (requireJava) {
- reportAnnotationError(NestedAnnotationError(ann, annType))
- } else {
+ }
+ else {
val typedAnn = if (selfsym == NoSymbol) {
// local dummy fixes SI-5544
val localTyper = newTyper(context.make(ann, context.owner.newLocalDummy(ann.pos)))
- localTyper.typed(ann, mode, annClass.tpe)
- } else {
- // Since a selfsym is supplied, the annotation should have
- // an extra "self" identifier in scope for type checking.
- // This is implemented by wrapping the rhs
- // in a function like "self => rhs" during type checking,
- // and then stripping the "self =>" and substituting
- // in the supplied selfsym.
+ localTyper.typed(ann, mode, annType)
+ }
+ else {
+ // Since a selfsym is supplied, the annotation should have an extra
+ // "self" identifier in scope for type checking. This is implemented
+ // by wrapping the rhs in a function like "self => rhs" during type
+ // checking, and then stripping the "self =>" and substituting in
+ // the supplied selfsym.
val funcparm = ValDef(NoMods, nme.self, TypeTree(selfsym.info), EmptyTree)
- val func = Function(List(funcparm), ann.duplicate)
- // The .duplicate of annot.constr
- // deals with problems that
- // accur if this annotation is
- // later typed again, which
- // the compiler sometimes does.
- // The problem is that "self"
- // ident's within annot.constr
- // will retain the old symbol
- // from the previous typing.
- val fun1clazz = FunctionClass(1)
- val funcType = typeRef(fun1clazz.tpe.prefix,
- fun1clazz,
- List(selfsym.info, annClass.tpe))
-
- (typed(func, mode, funcType): @unchecked) match {
- case t @ Function(List(arg), rhs) =>
- val subs =
- new TreeSymSubstituter(List(arg.symbol),List(selfsym))
- subs(rhs)
- }
+ // The .duplicate of annot.constr deals with problems that accur
+ // if this annotation is later typed again, which the compiler
+ // sometimes does. The problem is that "self" ident's within
+ // annot.constr will retain the old symbol from the previous typing.
+ val func = Function(funcparm :: Nil, ann.duplicate)
+ val funcType = appliedType(FunctionClass(1), selfsym.info, annType)
+ val Function(arg :: Nil, rhs) = typed(func, mode, funcType)
+
+ rhs.substituteSymbols(arg.symbol :: Nil, selfsym :: Nil)
}
-
def annInfo(t: Tree): AnnotationInfo = t match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
AnnotationInfo(annType, args, List()).setOriginal(typedAnn).setPos(t.pos)
@@ -3763,36 +3676,33 @@ trait Typers extends Modes with Adaptations with Tags {
if (annType.typeSymbol == DeprecatedAttr && argss.flatten.size < 2)
unit.deprecationWarning(ann.pos, "@deprecated now takes two arguments; see the scaladoc.")
- if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) annotationError
+ if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) ErroneousAnnotation
else annInfo(typedAnn)
}
- }
-
- if (hasError) {
- pending.foreach(ErrorUtils.issueTypeError)
- annotationError
- } else res
+ )
}
/** Compute an existential type from raw hidden symbols `syms` and type `tp`
*/
- def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, Some(context0.owner))
-
- def isReferencedFrom(ctx: Context, sym: Symbol): Boolean =
- ctx.owner.isTerm &&
- (ctx.scope.exists { dcl => dcl.isInitialized && (dcl.info contains sym) }) ||
- {
- var ctx1 = ctx.outer
- while ((ctx1 != NoContext) && (ctx1.scope eq ctx.scope)) ctx1 = ctx1.outer
- (ctx1 != NoContext) && isReferencedFrom(ctx1, sym)
- }
-
- def isCapturedExistential(sym: Symbol) =
- (sym hasAllFlags (EXISTENTIAL | CAPTURED)) && {
- val start = if (Statistics.canEnable) Statistics.startTimer(isReferencedNanos) else null
- try !isReferencedFrom(context, sym)
- finally if (Statistics.canEnable) Statistics.stopTimer(isReferencedNanos, start)
- }
+ def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, context0.owner)
+
+ def isReferencedFrom(ctx: Context, sym: Symbol): Boolean = (
+ ctx.owner.isTerm && (ctx.scope.exists { dcl => dcl.isInitialized && (dcl.info contains sym) }) || {
+ var ctx1 = ctx.outer
+ while ((ctx1 != NoContext) && (ctx1.scope eq ctx.scope))
+ ctx1 = ctx1.outer
+
+ (ctx1 != NoContext) && isReferencedFrom(ctx1, sym)
+ }
+ )
+
+ def isCapturedExistential(sym: Symbol) = (
+ (sym hasAllFlags EXISTENTIAL | CAPTURED) && {
+ val start = if (Statistics.canEnable) Statistics.startTimer(isReferencedNanos) else null
+ try !isReferencedFrom(context, sym)
+ finally if (Statistics.canEnable) Statistics.stopTimer(isReferencedNanos, start)
+ }
+ )
def packCaptured(tpe: Type): Type = {
val captured = mutable.Set[Symbol]()
@@ -3804,26 +3714,31 @@ trait Typers extends Modes with Adaptations with Tags {
/** convert local symbols and skolems to existentials */
def packedType(tree: Tree, owner: Symbol): Type = {
- def defines(tree: Tree, sym: Symbol) =
- sym.isExistentialSkolem && sym.unpackLocation == tree ||
- tree.isDef && tree.symbol == sym
- def isVisibleParameter(sym: Symbol) =
- sym.isParameter && (sym.owner == owner) && (sym.isType || !owner.isAnonymousFunction)
+ def defines(tree: Tree, sym: Symbol) = (
+ sym.isExistentialSkolem && sym.unpackLocation == tree
+ || tree.isDef && tree.symbol == sym
+ )
+ def isVisibleParameter(sym: Symbol) = (
+ sym.isParameter
+ && (sym.owner == owner)
+ && (sym.isType || !owner.isAnonymousFunction)
+ )
def containsDef(owner: Symbol, sym: Symbol): Boolean =
(!sym.hasPackageFlag) && {
var o = sym.owner
while (o != owner && o != NoSymbol && !o.hasPackageFlag) o = o.owner
o == owner && !isVisibleParameter(sym)
}
- var localSyms = scala.collection.immutable.Set[Symbol]()
- var boundSyms = scala.collection.immutable.Set[Symbol]()
+ var localSyms = immutable.Set[Symbol]()
+ var boundSyms = immutable.Set[Symbol]()
def isLocal(sym: Symbol): Boolean =
if (sym == NoSymbol || sym.isRefinementClass || sym.isLocalDummy) false
else if (owner == NoSymbol) tree exists (defines(_, sym))
else containsDef(owner, sym) || isRawParameter(sym) || isCapturedExistential(sym)
def containsLocal(tp: Type): Boolean =
tp exists (t => isLocal(t.typeSymbol) || isLocal(t.termSymbol))
- val normalizeLocals = new TypeMap {
+
+ val dealiasLocals = new TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) =>
if (sym.isAliasType && containsLocal(tp) && (tp.dealias ne tp)) apply(tp.dealias)
@@ -3876,25 +3791,25 @@ trait Typers extends Modes with Adaptations with Tags {
for (sym <- remainingSyms) addLocals(sym.existentialBound)
}
- val normalizedTpe = normalizeLocals(tree.tpe)
- addLocals(normalizedTpe)
- packSymbols(localSyms.toList, normalizedTpe)
+ val dealiasedType = dealiasLocals(tree.tpe)
+ addLocals(dealiasedType)
+ packSymbols(localSyms.toList, dealiasedType)
}
def typedClassOf(tree: Tree, tpt: Tree, noGen: Boolean = false) =
if (!checkClassType(tpt) && noGen) tpt
else atPos(tree.pos)(gen.mkClassOf(tpt.tpe))
- protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Int): Tree = {
+ protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Mode): Tree = {
for (wc <- tree.whereClauses)
- if (wc.symbol == NoSymbol) { namer.enterSym(wc); wc.symbol setFlag EXISTENTIAL }
+ if (wc.symbol == NoSymbol) { namer enterSym wc; wc.symbol setFlag EXISTENTIAL }
else context.scope enter wc.symbol
val whereClauses1 = typedStats(tree.whereClauses, context.owner)
- for (vd @ ValDef(_, _, _, _) <- tree.whereClauses)
+ for (vd @ ValDef(_, _, _, _) <- whereClauses1)
if (vd.symbol.tpe.isVolatile)
AbstractionFromVolatileTypeError(vd)
val tpt1 = typedType(tree.tpt, mode)
- existentialTransform(tree.whereClauses map (_.symbol), tpt1.tpe)((tparams, tp) => {
+ existentialTransform(whereClauses1 map (_.symbol), tpt1.tpe)((tparams, tp) => {
val original = tpt1 match {
case tpt : TypeTree => atPos(tree.pos)(ExistentialTypeTree(tpt.original, tree.whereClauses))
case _ => {
@@ -3908,7 +3823,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
// lifted out of typed1 because it's needed in typedImplicit0
- protected def typedTypeApply(tree: Tree, mode: Int, fun: Tree, args: List[Tree]): Tree = fun.tpe match {
+ protected def typedTypeApply(tree: Tree, mode: Mode, fun: Tree, args: List[Tree]): Tree = fun.tpe match {
case OverloadedType(pre, alts) =>
inferPolyAlternatives(fun, args map (_.tpe))
val tparams = fun.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree)
@@ -3917,7 +3832,7 @@ trait Typers extends Modes with Adaptations with Tags {
// as we don't know which alternative to choose... here we do
map2Conserve(args, tparams) {
//@M! the polytype denotes the expected kind
- (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
+ (arg, tparam) => typedHigherKindedType(arg, mode, Kind.FromParams(tparam.typeParams))
}
} else // @M: there's probably something wrong when args.length != tparams.length... (triggered by bug #320)
// Martin, I'm using fake trees, because, if you use args or arg.map(typedType),
@@ -3932,13 +3847,13 @@ trait Typers extends Modes with Adaptations with Tags {
if (sameLength(tparams, args)) {
val targs = args map (_.tpe)
checkBounds(tree, NoPrefix, NoSymbol, tparams, targs, "")
- if (fun.symbol == Predef_classOf)
- typedClassOf(tree, args.head, true)
+ if (isPredefClassOf(fun.symbol))
+ typedClassOf(tree, args.head, noGen = true)
else {
if (!isPastTyper && fun.symbol == Any_isInstanceOf && targs.nonEmpty) {
val scrutineeType = fun match {
case Select(qual, _) => qual.tpe
- case _ => AnyClass.tpe
+ case _ => AnyTpe
}
checkCheckable(tree, targs.head, scrutineeType, inPattern = false)
}
@@ -3997,17 +3912,17 @@ trait Typers extends Modes with Adaptations with Tags {
// else false
}
- def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
+ def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
def argToBinding(arg: Tree): Tree = arg match {
case AssignOrNamedArg(i @ Ident(name), rhs) =>
- atPos(i.pos.withEnd(rhs.pos.endOrPoint)) {
+ atPos(i.pos.withEnd(rhs.pos.end)) {
gen.mkTuple(List(atPos(i.pos)(CODE.LIT(name.toString)), rhs))
}
case _ =>
gen.mkTuple(List(CODE.LIT(""), arg))
}
- val t = treeCopy.Apply(orig, fun, args map argToBinding)
+ val t = treeCopy.Apply(orig, unmarkDynamicRewrite(fun), args map argToBinding)
wrapErrors(t, _.typed(t, mode, pt))
}
@@ -4031,11 +3946,14 @@ trait Typers extends Modes with Adaptations with Tags {
*
*/
def mkInvoke(cxTree: Tree, tree: Tree, qual: Tree, name: Name): Option[Tree] = {
- log(s"dyna.mkInvoke($cxTree, $tree, $qual, $name)")
+ debuglog(s"dyna.mkInvoke($cxTree, $tree, $qual, $name)")
val treeInfo.Applied(treeSelection, _, _) = tree
- def isDesugaredApply = treeSelection match {
- case Select(`qual`, nme.apply) => true
- case _ => false
+ def isDesugaredApply = {
+ val protoQual = macroExpandee(qual) orElse qual
+ treeSelection match {
+ case Select(`protoQual`, nme.apply) => true
+ case _ => false
+ }
}
acceptsApplyDynamicWithType(qual, name) map { tp =>
// If tp == NoType, pass only explicit type arguments to applyXXX. Not used at all
@@ -4047,20 +3965,20 @@ trait Typers extends Modes with Adaptations with Tags {
def applyOp(args: List[Tree]) = if (hasNamed(args)) nme.applyDynamicNamed else nme.applyDynamic
def matches(t: Tree) = isDesugaredApply || treeInfo.dissectApplied(t).core == treeSelection
- /** Note that the trees which arrive here are potentially some distance from
- * the trees of direct interest. `cxTree` is some enclosing expression which
- * may apparently be arbitrarily larger than `tree`; and `tree` itself is
- * too small, having at least in some cases lost its explicit type parameters.
- * This logic is designed to use `tree` to pinpoint the immediately surrounding
- * Apply/TypeApply/Select node, and only then creates the dynamic call.
- * See SI-6731 among others.
+ /* Note that the trees which arrive here are potentially some distance from
+ * the trees of direct interest. `cxTree` is some enclosing expression which
+ * may apparently be arbitrarily larger than `tree`; and `tree` itself is
+ * too small, having at least in some cases lost its explicit type parameters.
+ * This logic is designed to use `tree` to pinpoint the immediately surrounding
+ * Apply/TypeApply/Select node, and only then creates the dynamic call.
+ * See SI-6731 among others.
*/
def findSelection(t: Tree): Option[(TermName, Tree)] = t match {
case Apply(fn, args) if hasStar(args) => DynamicVarArgUnsupported(tree, applyOp(args)) ; None
case Apply(fn, args) if matches(fn) => Some((applyOp(args), fn))
case Assign(lhs, _) if matches(lhs) => Some((nme.updateDynamic, lhs))
case _ if matches(t) => Some((nme.selectDynamic, t))
- case _ => t.children flatMap findSelection headOption
+ case _ => (t.children flatMap findSelection).headOption
}
findSelection(cxTree) match {
case Some((opName, treeInfo.Applied(_, targs, _))) =>
@@ -4069,48 +3987,38 @@ trait Typers extends Modes with Adaptations with Tags {
val nameStringLit = atPos(treeSelection.pos.withStart(treeSelection.pos.point).makeTransparent) {
Literal(Constant(name.decode))
}
- atPos(qual.pos)(Apply(fun, List(nameStringLit)))
+ markDynamicRewrite(atPos(qual.pos)(Apply(fun, List(nameStringLit))))
case _ =>
setError(tree)
}
}
}
-
- def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = {
- silent(typeTree) match {
- case SilentResultValue(r) => r
- case SilentTypeError(err) => DynamicRewriteError(tree, err)
- }
- }
+ def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = silent(typeTree) orElse (err => DynamicRewriteError(tree, err.head))
}
- final def deindentTyping() = context.typingIndentLevel -= 2
- final def indentTyping() = context.typingIndentLevel += 2
- @inline final def printTyping(s: => String) = {
- if (printTypings)
- println(context.typingIndent + s.replaceAll("\n", "\n" + context.typingIndent))
- }
- @inline final def printInference(s: => String) = {
- if (printInfers)
- println(s)
- }
+ def typed1(tree: Tree, mode: Mode, pt: Type): Tree = {
+ // Lookup in the given class using the root mirror.
+ def lookupInOwner(owner: Symbol, name: Name): Symbol =
+ if (mode.inQualMode) rootMirror.missingHook(owner, name) else NoSymbol
- def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
- def isPatternMode = inPatternMode(mode)
+ // Lookup in the given qualifier. Used in last-ditch efforts by typedIdent and typedSelect.
+ def lookupInRoot(name: Name): Symbol = lookupInOwner(rootMirror.RootClass, name)
+ def lookupInEmpty(name: Name): Symbol = rootMirror.EmptyPackageClass.info member name
- //Console.println("typed1("+tree.getClass()+","+Integer.toHexString(mode)+","+pt+")")
- //@M! get the type of the qualifier in a Select tree, otherwise: NoType
- def prefixType(fun: Tree): Type = fun match {
- case Select(qualifier, _) => qualifier.tpe
-// case Ident(name) => ??
- case _ => NoType
- }
+ def lookupInQualifier(qual: Tree, name: Name): Symbol = (
+ if (name == nme.ERROR || qual.tpe.widen.isErroneous)
+ NoSymbol
+ else lookupInOwner(qual.tpe.typeSymbol, name) orElse {
+ NotAMemberError(tree, qual, name)
+ NoSymbol
+ }
+ )
def typedAnnotated(atd: Annotated): Tree = {
val ann = atd.annot
val arg1 = typed(atd.arg, mode, pt)
- /** mode for typing the annotation itself */
- val annotMode = mode & ~TYPEmode | EXPRmode
+ /* mode for typing the annotation itself */
+ val annotMode = (mode &~ TYPEmode) | EXPRmode
def resultingTypeTree(tpe: Type) = {
// we need symbol-ful originals for reification
@@ -4129,7 +4037,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (ann.tpe == null) {
// an annotated type
val selfsym =
- if (!settings.selfInAnnots.value)
+ if (!settings.selfInAnnots)
NoSymbol
else
arg1.tpe.selfsym orElse {
@@ -4165,7 +4073,7 @@ trait Typers extends Modes with Adaptations with Tags {
// Erroneous annotations were already reported in typedAnnotation
arg1 // simply drop erroneous annotations
else {
- ann.tpe = atype
+ ann setType atype
resultingTypeTree(atype)
}
} else {
@@ -4176,7 +4084,7 @@ trait Typers extends Modes with Adaptations with Tags {
else {
if (ann.tpe == null) {
val annotInfo = typedAnnotation(ann, annotMode)
- ann.tpe = arg1.tpe.withAnnotation(annotInfo)
+ ann setType arg1.tpe.withAnnotation(annotInfo)
}
val atype = ann.tpe
Typed(arg1, resultingTypeTree(atype)) setPos tree.pos setType atype
@@ -4200,7 +4108,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (name != tpnme.WILDCARD) namer.enterInScope(sym)
else context.scope.enter(sym)
- tree setSymbol sym setType sym.tpe
+ tree setSymbol sym setType sym.tpeHK
case name: TermName =>
val sym =
@@ -4208,7 +4116,9 @@ trait Typers extends Modes with Adaptations with Tags {
else context.owner.newValue(name, tree.pos)
if (name != nme.WILDCARD) {
- if ((mode & ALTmode) != 0) VariableInPatternAlternativeError(tree)
+ if (context.inPatAlternative)
+ VariableInPatternAlternativeError(tree)
+
namer.enterInScope(sym)
}
@@ -4233,16 +4143,16 @@ trait Typers extends Modes with Adaptations with Tags {
def typedArrayValue(tree: ArrayValue) = {
val elemtpt1 = typedType(tree.elemtpt, mode)
- val elems1 = tree.elems mapConserve (elem => typed(elem, mode, elemtpt1.tpe))
- treeCopy.ArrayValue(tree, elemtpt1, elems1)
- .setType(
- (if (isFullyDefined(pt) && !phase.erasedTypes) pt
- else arrayType(elemtpt1.tpe)).notNull)
+ val elems1 = tree.elems mapConserve (elem => typed(elem, mode, elemtpt1.tpe))
+ // see run/t6126 for an example where `pt` does not suffice (tagged types)
+ val tpe1 = if (isFullyDefined(pt) && !phase.erasedTypes) pt else arrayType(elemtpt1.tpe)
+
+ treeCopy.ArrayValue(tree, elemtpt1, elems1) setType tpe1
}
def typedAssign(lhs: Tree, rhs: Tree): Tree = {
// see SI-7617 for an explanation of why macro expansion is suppressed
- def typedLhs(lhs: Tree) = typed(lhs, EXPRmode | LHSmode, WildcardType)
+ def typedLhs(lhs: Tree) = typed(lhs, EXPRmode | LHSmode)
val lhs1 = unsuppressMacroExpansion(typedLhs(suppressMacroExpansion(lhs)))
val varsym = lhs1.symbol
@@ -4257,7 +4167,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (treeInfo.mayBeVarGetter(varsym)) {
lhs1 match {
case treeInfo.Applied(Select(qual, name), _, _) =>
- val sel = Select(qual, nme.getterToSetter(name.toTermName)) setPos lhs.pos
+ val sel = Select(qual, name.setterName) setPos lhs.pos
val app = Apply(sel, List(rhs)) setPos tree.pos
return typed(app, mode, pt)
@@ -4268,12 +4178,12 @@ trait Typers extends Modes with Adaptations with Tags {
// // setter-rewrite has been done above, so rule out methods here, but, wait a minute, why are we assigning to non-variables after erasure?!
// (phase.erasedTypes && varsym.isValue && !varsym.isMethod)) {
if (varsym.isVariable || varsym.isValue && phase.erasedTypes) {
- val rhs1 = typed(rhs, EXPRmode | BYVALmode, lhs1.tpe)
- treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitClass.tpe
+ val rhs1 = typedByValueExpr(rhs, lhs1.tpe)
+ treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitTpe
}
else if(dyna.isDynamicallyUpdatable(lhs1)) {
- val rhs1 = typed(rhs, EXPRmode | BYVALmode, WildcardType)
- val t = atPos(lhs1.pos.withEnd(rhs1.pos.endOrPoint)) {
+ val rhs1 = typedByValueExpr(rhs)
+ val t = atPos(lhs1.pos.withEnd(rhs1.pos.end)) {
Apply(lhs1, List(rhs1))
}
dyna.wrapErrors(t, _.typed1(t, mode, pt))
@@ -4281,51 +4191,53 @@ trait Typers extends Modes with Adaptations with Tags {
else fail()
}
- def typedIf(tree: If) = {
- val cond1 = checkDead(typed(tree.cond, EXPRmode | BYVALmode, BooleanClass.tpe))
- val thenp = tree.thenp
- val elsep = tree.elsep
- if (elsep.isEmpty) { // in the future, should be unnecessary
- val thenp1 = typed(thenp, UnitClass.tpe)
- treeCopy.If(tree, cond1, thenp1, elsep) setType thenp1.tpe
- } else {
- var thenp1 = typed(thenp, pt)
- var elsep1 = typed(elsep, pt)
- def thenTp = packedType(thenp1, context.owner)
- def elseTp = packedType(elsep1, context.owner)
-
- // println("typedIf: "+(thenp1.tpe, elsep1.tpe, ptOrLub(List(thenp1.tpe, elsep1.tpe)),"\n", thenTp, elseTp, thenTp =:= elseTp))
- val (owntype, needAdapt) =
- // in principle we should pack the types of each branch before lubbing, but lub doesn't really work for existentials anyway
- // in the special (though common) case where the types are equal, it pays to pack before comparing
- // especially virtpatmat needs more aggressive unification of skolemized types
- // this breaks src/library/scala/collection/immutable/TrieIterator.scala
- if ( opt.virtPatmat && !isPastTyper
- && thenp1.tpe.annotations.isEmpty && elsep1.tpe.annotations.isEmpty // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
- && thenTp =:= elseTp
- ) (thenp1.tpe.deconst, false) // use unpacked type. Important to deconst, as is done in ptOrLub, otherwise `if (???) 0 else 0` evaluates to 0 (SI-6331)
- // TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
- else ptOrLub(thenp1.tpe :: elsep1.tpe :: Nil, pt)
-
- if (needAdapt) { //isNumericValueType(owntype)) {
- thenp1 = adapt(thenp1, mode, owntype)
- elsep1 = adapt(elsep1, mode, owntype)
- }
- treeCopy.If(tree, cond1, thenp1, elsep1) setType owntype
- }
- }
-
- // under -Xexperimental (and not -Xoldpatmat), and when there's a suitable __match in scope, virtualize the pattern match
+ def typedIf(tree: If): If = {
+ val cond1 = checkDead(typedByValueExpr(tree.cond, BooleanTpe))
+ // One-legged ifs don't need a lot of analysis
+ if (tree.elsep.isEmpty)
+ return treeCopy.If(tree, cond1, typed(tree.thenp, UnitTpe), tree.elsep) setType UnitTpe
+
+ val thenp1 = typed(tree.thenp, pt)
+ val elsep1 = typed(tree.elsep, pt)
+
+ // in principle we should pack the types of each branch before lubbing, but lub doesn't really work for existentials anyway
+ // in the special (though common) case where the types are equal, it pays to pack before comparing
+ // especially virtpatmat needs more aggressive unification of skolemized types
+ // this breaks src/library/scala/collection/immutable/TrieIterator.scala
+ // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
+ def samePackedTypes = (
+ !isPastTyper
+ && thenp1.tpe.annotations.isEmpty
+ && elsep1.tpe.annotations.isEmpty
+ && packedType(thenp1, context.owner) =:= packedType(elsep1, context.owner)
+ )
+ def finish(ownType: Type) = treeCopy.If(tree, cond1, thenp1, elsep1) setType ownType
+ // TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
+ // @PP: This was doing the samePackedTypes check BEFORE the isFullyDefined check,
+ // which based on everything I see everywhere else was a bug. I reordered it.
+ if (isFullyDefined(pt))
+ finish(pt)
+ // Important to deconst, otherwise `if (???) 0 else 0` evaluates to 0 (SI-6331)
+ else thenp1.tpe.deconst :: elsep1.tpe.deconst :: Nil match {
+ case tp :: _ if samePackedTypes => finish(tp)
+ case tpes if sameWeakLubAsLub(tpes) => finish(lub(tpes))
+ case tpes =>
+ val lub = weakLub(tpes)
+ treeCopy.If(tree, cond1, adapt(thenp1, mode, lub), adapt(elsep1, mode, lub)) setType lub
+ }
+ }
+
+ // When there's a suitable __match in scope, virtualize the pattern match
// otherwise, type the Match and leave it until phase `patmat` (immediately after typer)
// empty-selector matches are transformed into synthetic PartialFunction implementations when the expected type demands it
def typedVirtualizedMatch(tree: Match): Tree = {
val selector = tree.selector
val cases = tree.cases
if (selector == EmptyTree) {
- if (newPatternMatching && (pt.typeSymbol == PartialFunctionClass))
+ if (pt.typeSymbol == PartialFunctionClass)
synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, tree, mode, pt)
else {
- val arity = if (isFunctionType(pt)) pt.normalize.typeArgs.length - 1 else 1
+ val arity = if (isFunctionType(pt)) pt.dealiasWiden.typeArgs.length - 1 else 1
val params = for (i <- List.range(0, arity)) yield
atPos(tree.pos.focusStart) {
ValDef(Modifiers(PARAM | SYNTHETIC),
@@ -4333,7 +4245,11 @@ trait Typers extends Modes with Adaptations with Tags {
}
val ids = for (p <- params) yield Ident(p.name)
val selector1 = atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) }
- val body = treeCopy.Match(tree, selector1, cases)
+ // SI-8120 If we don't duplicate the cases, the original Match node will share trees with ones that
+ // receive symbols owned by this function. However if, after a silent mode session, we discard
+ // this Function and try a different approach (e.g. applying a view to the reciever) we end up
+ // with orphaned symbols which blows up far down the pipeline (or can be detected with -Ycheck:typer).
+ val body = treeCopy.Match(tree, selector1, (cases map duplicateAndKeepPositions).asInstanceOf[List[CaseDef]])
typed1(atPos(tree.pos) { Function(params, body) }, mode, pt)
}
} else
@@ -4352,9 +4268,9 @@ trait Typers extends Modes with Adaptations with Tags {
val DefDef(_, name, _, _, restpt, _) = enclMethod.tree
if (restpt.tpe eq null) {
ReturnWithoutTypeError(tree, enclMethod.owner)
- } else {
- context.enclMethod.returnsSeen = true
- val expr1: Tree = typed(expr, EXPRmode | BYVALmode | RETmode, restpt.tpe)
+ }
+ else {
+ val expr1 = context withinReturnExpr typedByValueExpr(expr, restpt.tpe)
// Warn about returning a value if no value can be returned.
if (restpt.tpe.typeSymbol == UnitClass) {
// The typing in expr1 says expr is Unit (it has already been coerced if
@@ -4364,7 +4280,7 @@ trait Typers extends Modes with Adaptations with Tags {
unit.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded")
}
val res = treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
- val tp = pluginsTypedReturn(NothingClass.tpe, this, res, restpt.tpe)
+ val tp = pluginsTypedReturn(NothingTpe, this, res, restpt.tpe)
res.setType(tp)
}
}
@@ -4381,7 +4297,7 @@ trait Typers extends Modes with Adaptations with Tags {
// given a dealiased type.
val tpt0 = typedTypeConstructor(tpt) modifyType (_.dealias)
if (checkStablePrefixClassType(tpt0))
- if (tpt0.hasSymbol && !tpt0.symbol.typeParams.isEmpty) {
+ if (tpt0.hasSymbolField && !tpt0.symbol.typeParams.isEmpty) {
context.undetparams = cloneSymbols(tpt0.symbol.typeParams)
notifyUndetparamsAdded(context.undetparams)
TypeTree().setOriginal(tpt0)
@@ -4390,8 +4306,8 @@ trait Typers extends Modes with Adaptations with Tags {
else tpt0
}
- /** If current tree <tree> appears in <val x(: T)? = <tree>>
- * return `tp with x.type' else return `tp`.
+ /* If current tree <tree> appears in <val x(: T)? = <tree>>
+ * return `tp with x.type' else return `tp`.
*/
def narrowRhs(tp: Type) = { val sym = context.tree.symbol
context.tree match {
@@ -4411,7 +4327,7 @@ trait Typers extends Modes with Adaptations with Tags {
NotAMemberError(tpt, TypeTree(tp), nme.CONSTRUCTOR)
setError(tpt)
}
- else if (!( tp == sym.thisSym.tpe // when there's no explicit self type -- with (#3612) or without self variable
+ else if (!( tp == sym.typeOfThis // when there's no explicit self type -- with (#3612) or without self variable
// sym.thisSym.tpe == tp.typeOfThis (except for objects)
|| narrowRhs(tp) <:< tp.typeOfThis
|| phase.erasedTypes
@@ -4441,36 +4357,15 @@ trait Typers extends Modes with Adaptations with Tags {
else adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
case MethodType(formals, _) =>
if (isFunctionType(pt)) expr1
- else expr1 match {
- case Select(qual, name) if (forMSIL &&
- pt != WildcardType &&
- pt != ErrorType &&
- isSubType(pt, DelegateClass.tpe)) =>
- val scalaCaller = newScalaCaller(pt)
- addScalaCallerInfo(scalaCaller, expr1.symbol)
- val n: Name = scalaCaller.name
- val del = Ident(DelegateClass) setType DelegateClass.tpe
- val f = Select(del, n)
- //val f1 = TypeApply(f, List(Ident(pt.symbol) setType pt))
- val args: List[Tree] = if(expr1.symbol.isStatic) List(Literal(Constant(null)))
- else List(qual) // where the scala-method is located
- val rhs = Apply(f, args)
- typed(rhs)
- case _ =>
- adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
- }
+ else adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
case ErrorType =>
expr1
case _ =>
UnderscoreEtaError(expr1)
}
- /**
- * @param args ...
- * @return ...
- */
- def tryTypedArgs(args: List[Tree], mode: Int): Option[List[Tree]] = {
- val c = context.makeSilent(false)
+ def tryTypedArgs(args: List[Tree], mode: Mode): Option[List[Tree]] = {
+ val c = context.makeSilent(reportAmbiguousErrors = false)
c.retyping = true
try {
val res = newTyper(c).typedArgs(args, mode)
@@ -4479,184 +4374,156 @@ trait Typers extends Modes with Adaptations with Tags {
case ex: CyclicReference =>
throw ex
case te: TypeError =>
- // @H some of typer erros can still leak,
+ // @H some of typer errors can still leak,
// for instance in continuations
None
- } finally {
- c.flushBuffer()
}
}
- /** Try to apply function to arguments; if it does not work, try to convert Java raw to existentials, or try to
- * insert an implicit conversion.
+ /* Try to apply function to arguments; if it does not work, try to convert Java raw to existentials, or try to
+ * insert an implicit conversion.
*/
def tryTypedApply(fun: Tree, args: List[Tree]): Tree = {
val start = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
- def onError(typeError: AbsTypeError): Tree = {
- if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, start)
+ def onError(typeErrors: Seq[AbsTypeError]): Tree = {
+ if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, start)
- // If the problem is with raw types, copnvert to existentials and try again.
- // See #4712 for a case where this situation arises,
- if ((fun.symbol ne null) && fun.symbol.isJavaDefined) {
- val newtpe = rawToExistential(fun.tpe)
- if (fun.tpe ne newtpe) {
- // println("late cooking: "+fun+":"+fun.tpe) // DEBUG
- return tryTypedApply(fun setType newtpe, args)
- }
+ // If the problem is with raw types, copnvert to existentials and try again.
+ // See #4712 for a case where this situation arises,
+ if ((fun.symbol ne null) && fun.symbol.isJavaDefined) {
+ val newtpe = rawToExistential(fun.tpe)
+ if (fun.tpe ne newtpe) {
+ // println("late cooking: "+fun+":"+fun.tpe) // DEBUG
+ return tryTypedApply(fun setType newtpe, args)
}
+ }
+ def treesInResult(tree: Tree): List[Tree] = tree :: (tree match {
+ case Block(_, r) => treesInResult(r)
+ case Match(_, cases) => cases
+ case CaseDef(_, _, r) => treesInResult(r)
+ case Annotated(_, r) => treesInResult(r)
+ case If(_, t, e) => treesInResult(t) ++ treesInResult(e)
+ case Try(b, catches, _) => treesInResult(b) ++ catches
+ case Typed(r, Function(Nil, EmptyTree)) => treesInResult(r)
+ case Select(qual, name) => treesInResult(qual)
+ case Apply(fun, args) => treesInResult(fun) ++ args.flatMap(treesInResult)
+ case TypeApply(fun, args) => treesInResult(fun) ++ args.flatMap(treesInResult)
+ case _ => Nil
+ })
+ def errorInResult(tree: Tree) = treesInResult(tree) exists (err => typeErrors.exists(_.errPos == err.pos))
- def treesInResult(tree: Tree): List[Tree] = tree :: (tree match {
- case Block(_, r) => treesInResult(r)
- case Match(_, cases) => cases
- case CaseDef(_, _, r) => treesInResult(r)
- case Annotated(_, r) => treesInResult(r)
- case If(_, t, e) => treesInResult(t) ++ treesInResult(e)
- case Try(b, catches, _) => treesInResult(b) ++ catches
- case Typed(r, Function(Nil, EmptyTree)) => treesInResult(r)
- case _ => Nil
- })
- def errorInResult(tree: Tree) = treesInResult(tree) exists (_.pos == typeError.errPos)
-
- val retry = (typeError.errPos != null) && (fun :: tree :: args exists errorInResult)
- printTyping {
- val funStr = ptTree(fun) + " and " + (args map ptTree mkString ", ")
- if (retry) "second try: " + funStr
- else "no second try: " + funStr + " because error not in result: " + typeError.errPos+"!="+tree.pos
- }
- if (retry) {
- val Select(qual, name) = fun
- tryTypedArgs(args, forArgMode(fun, mode)) match {
- case Some(args1) =>
- val qual1 =
- if (!pt.isError) adaptToArguments(qual, name, args1, pt, true, true)
- else qual
- if (qual1 ne qual) {
- val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
- return typed1(tree1, mode | SNDTRYmode, pt)
- }
- case _ => ()
- }
+ val retry = (typeErrors.forall(_.errPos != null)) && (fun :: tree :: args exists errorInResult)
+ typingStack.printTyping({
+ val funStr = ptTree(fun) + " and " + (args map ptTree mkString ", ")
+ if (retry) "second try: " + funStr
+ else "no second try: " + funStr + " because error not in result: " + typeErrors.head.errPos+"!="+tree.pos
+ })
+ if (retry) {
+ val Select(qual, name) = fun
+ tryTypedArgs(args, forArgMode(fun, mode)) match {
+ case Some(args1) =>
+ val qual1 =
+ if (!pt.isError) adaptToArguments(qual, name, args1, pt, reportAmbiguous = true, saveErrors = true)
+ else qual
+ if (qual1 ne qual) {
+ val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
+ return context withinSecondTry typed1(tree1, mode, pt)
+ }
+ case _ => ()
}
- issue(typeError)
- setError(treeCopy.Apply(tree, fun, args))
+ }
+ typeErrors foreach issue
+ setError(treeCopy.Apply(tree, fun, args))
}
- silent(_.doTypedApply(tree, fun, args, mode, pt)) match {
- case SilentResultValue(t) =>
- t
- case SilentTypeError(err) =>
- onError(err)
- }
+ silent(_.doTypedApply(tree, fun, args, mode, pt)) orElse onError
}
def normalTypedApply(tree: Tree, fun: Tree, args: List[Tree]) = {
+ // TODO: replace `fun.symbol.isStable` by `treeInfo.isStableIdentifierPattern(fun)`
val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable
- if (args.isEmpty && stableApplication && isPatternMode) {
- // treat stable function applications f() as expressions.
- //
- // [JZ] According to Martin, this is related to the old pattern matcher, which
- // needs to typecheck after a the translation of `x.f` to `x.f()` in a prior
- // compilation phase. As part of SI-7377, this has been tightened with `args.isEmpty`,
- // but we should remove it altogether in Scala 2.11.
- typed1(tree, mode & ~PATTERNmode | EXPRmode, pt)
- } else {
- val funpt = if (isPatternMode) pt else WildcardType
- val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
- val opeqStart = if (Statistics.canEnable) Statistics.startTimer(failedOpEqNanos) else null
-
- def onError(reportError: => Tree): Tree = {
- fun match {
- case Select(qual, name)
- if !isPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
- val qual1 = typedQualifier(qual)
- if (treeInfo.isVariableOrGetter(qual1)) {
- if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart)
- convertToAssignment(fun, qual1, name, args)
- } else {
- if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
- reportError
- }
- case _ =>
- if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
- reportError
- }
- }
- silent(_.typed(fun, forFunMode(mode), funpt),
- if ((mode & EXPRmode) != 0) false else context.ambiguousErrors,
- if ((mode & EXPRmode) != 0) tree else context.tree) match {
- case SilentResultValue(fun1) =>
- val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
- if (Statistics.canEnable) Statistics.incCounter(typedApplyCount)
- def isImplicitMethod(tpe: Type) = tpe match {
- case mt: MethodType => mt.isImplicit
- case _ => false
- }
- val useTry = (
- !isPastTyper
- && fun2.isInstanceOf[Select]
- && !isImplicitMethod(fun2.tpe)
- && ((fun2.symbol eq null) || !fun2.symbol.isConstructor)
- && (mode & (EXPRmode | SNDTRYmode)) == EXPRmode
- )
- val res =
- if (useTry) tryTypedApply(fun2, args)
- else doTypedApply(tree, fun2, args, mode, pt)
-
- /*
- if (fun2.hasSymbol && fun2.symbol.isConstructor && (mode & EXPRmode) != 0) {
- res.tpe = res.tpe.notNull
- }
- */
- // TODO: In theory we should be able to call:
- //if (fun2.hasSymbol && fun2.symbol.name == nme.apply && fun2.symbol.owner == ArrayClass) {
- // But this causes cyclic reference for Array class in Cleanup. It is easy to overcome this
- // by calling ArrayClass.info here (or some other place before specialize).
- if (fun2.symbol == Array_apply && !res.isErrorTyped) {
- val checked = gen.mkCheckInit(res)
- // this check is needed to avoid infinite recursion in Duplicators
- // (calling typed1 more than once for the same tree)
- if (checked ne res) typed { atPos(tree.pos)(checked) }
- else res
- } else
- res
- case SilentTypeError(err) =>
- onError({issue(err); setError(tree)})
- }
+ val funpt = if (mode.inPatternMode) pt else WildcardType
+ val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
+ val opeqStart = if (Statistics.canEnable) Statistics.startTimer(failedOpEqNanos) else null
+
+ def onError(reportError: => Tree): Tree = fun match {
+ case Select(qual, name) if !mode.inPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
+ val qual1 = typedQualifier(qual)
+ if (treeInfo.isVariableOrGetter(qual1)) {
+ if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart)
+ convertToAssignment(fun, qual1, name, args)
+ }
+ else {
+ if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
+ reportError
+ }
+ case _ =>
+ if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
+ reportError
+ }
+ val silentResult = silent(
+ op = _.typed(fun, mode.forFunMode, funpt),
+ reportAmbiguousErrors = !mode.inExprMode && context.ambiguousErrors,
+ newtree = if (mode.inExprMode) tree else context.tree
+ )
+ silentResult match {
+ case SilentResultValue(fun1) =>
+ val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
+ if (Statistics.canEnable) Statistics.incCounter(typedApplyCount)
+ val noSecondTry = (
+ isPastTyper
+ || context.inSecondTry
+ || (fun2.symbol ne null) && fun2.symbol.isConstructor
+ || isImplicitMethodType(fun2.tpe)
+ )
+ val isFirstTry = fun2 match {
+ case Select(_, _) => !noSecondTry && mode.inExprMode
+ case _ => false
+ }
+ if (isFirstTry)
+ tryTypedApply(fun2, args)
+ else
+ doTypedApply(tree, fun2, args, mode, pt)
+ case err: SilentTypeError =>
+ onError({
+ err.reportableErrors foreach issue
+ args foreach (arg => typed(arg, mode, ErrorType))
+ setError(tree)
+ })
}
}
- def typedApply(tree: Apply) = {
- val fun = tree.fun
- val args = tree.args
- fun match {
- case Block(stats, expr) =>
- typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt)
- case _ =>
- normalTypedApply(tree, fun, args) match {
- case Apply(Select(New(tpt), name), args)
- if (tpt.tpe != null &&
- tpt.tpe.typeSymbol == ArrayClass &&
- args.length == 1 &&
- erasure.GenericArray.unapply(tpt.tpe).isDefined) => // !!! todo simplify by using extractor
- // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len)
- // convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len), where Array HK gets applied (N-1) times
- // [Eugene] no more MaxArrayDims. ClassTags are flexible enough to allow creation of arrays of arbitrary dimensionality (w.r.t JVM restrictions)
- val Some((level, componentType)) = erasure.GenericArray.unapply(tpt.tpe)
- val tagType = List.iterate(componentType, level)(tpe => appliedType(ArrayClass.toTypeConstructor, List(tpe))).last
- atPos(tree.pos) {
- val tag = resolveClassTag(tree.pos, tagType)
- if (tag.isEmpty) MissingClassTagError(tree, tagType)
- else typed(new ApplyToImplicitArgs(Select(tag, nme.newArray), args))
+ // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len)
+ // convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len)
+ // where Array HK gets applied (N-1) times
+ object ArrayInstantiation {
+ def unapply(tree: Apply) = tree match {
+ case Apply(Select(New(tpt), name), arg :: Nil) if tpt.tpe != null && tpt.tpe.typeSymbol == ArrayClass =>
+ Some(tpt.tpe) collect {
+ case erasure.GenericArray(level, componentType) =>
+ val tagType = (1 until level).foldLeft(componentType)((res, _) => arrayType(res))
+
+ resolveClassTag(tree.pos, tagType) match {
+ case EmptyTree => MissingClassTagError(tree, tagType)
+ case tag => atPos(tree.pos)(new ApplyToImplicitArgs(Select(tag, nme.newArray), arg :: Nil))
}
- case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => //SI-5696
- TooManyArgumentListsForConstructor(tree)
- case tree1 =>
- tree1
}
+ case _ => None
}
}
+ def typedApply(tree: Apply) = tree match {
+ case Apply(Block(stats, expr), args) =>
+ typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt)
+ case Apply(fun, args) =>
+ normalTypedApply(tree, fun, args) match {
+ case ArrayInstantiation(tree1) => typed(tree1, mode, pt)
+ case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => TooManyArgumentListsForConstructor(tree) //SI-5696
+ case tree1 => tree1
+ }
+ }
+
def convertToAssignment(fun: Tree, qual: Tree, name: Name, args: List[Tree]): Tree = {
val prefix = name.toTermName stripSuffix nme.EQL
def mkAssign(vble: Tree): Tree =
@@ -4710,8 +4577,6 @@ trait Typers extends Modes with Adaptations with Tags {
case This(_) => qual1.symbol
case _ => qual1.tpe.typeSymbol
}
- //println(clazz+"/"+qual1.tpe.typeSymbol+"/"+qual1)
-
def findMixinSuper(site: Type): Type = {
var ps = site.parents filter (_.typeSymbol.name == mix)
if (ps.isEmpty)
@@ -4719,11 +4584,6 @@ trait Typers extends Modes with Adaptations with Tags {
if (ps.isEmpty) {
debuglog("Fatal: couldn't find site " + site + " in " + site.parents.map(_.typeSymbol.name))
if (phase.erasedTypes && context.enclClass.owner.isImplClass) {
- // println(qual1)
- // println(clazz)
- // println(site)
- // println(site.parents)
- // println(mix)
// the reference to super class got lost during erasure
restrictionError(tree.pos, unit, "traits may not select fields or methods from super[C] where C is a class")
ErrorType
@@ -4741,7 +4601,7 @@ trait Typers extends Modes with Adaptations with Tags {
val owntype = (
if (!mix.isEmpty) findMixinSuper(clazz.tpe)
- else if ((mode & SUPERCONSTRmode) != 0) clazz.info.firstParent
+ else if (context.inSuperInit) clazz.info.firstParent
else intersectionType(clazz.info.parents)
)
treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype)
@@ -4755,14 +4615,28 @@ trait Typers extends Modes with Adaptations with Tags {
if (isStableContext(tree, mode, pt)) tree setType clazz.thisType else tree
}
- /** Attribute a selection where <code>tree</code> is <code>qual.name</code>.
- * <code>qual</code> is already attributed.
- *
- * @param qual ...
- * @param name ...
- * @return ...
+ /* Attribute a selection where `tree` is `qual.name`.
+ * `qual` is already attributed.
*/
def typedSelect(tree: Tree, qual: Tree, name: Name): Tree = {
+ val t = typedSelectInternal(tree, qual, name)
+ // Checking for OverloadedTypes being handed out after overloading
+ // resolution has already happened.
+ if (isPastTyper) t.tpe match {
+ case OverloadedType(pre, alts) =>
+ if (alts forall (s => (s.owner == ObjectClass) || (s.owner == AnyClass) || isPrimitiveValueClass(s.owner))) ()
+ else if (settings.debug) printCaller(
+ s"""|Select received overloaded type during $phase, but typer is over.
+ |If this type reaches the backend, we are likely doomed to crash.
+ |$t has these overloads:
+ |${alts map (s => " " + s.defStringSeenAs(pre memberType s)) mkString "\n"}
+ |""".stripMargin
+ )("")
+ case _ =>
+ }
+ t
+ }
+ def typedSelectInternal(tree: Tree, qual: Tree, name: Name): Tree = {
def asDynamicCall = dyna.mkInvoke(context.tree, tree, qual, name) map { t =>
dyna.wrapErrors(t, (_.typed1(t, mode, pt)))
}
@@ -4771,82 +4645,65 @@ trait Typers extends Modes with Adaptations with Tags {
// symbol not found? --> try to convert implicitly to a type that does have the required
// member. Added `| PATTERNmode` to allow enrichment in patterns (so we can add e.g., an
// xml member to StringContext, which in turn has an unapply[Seq] method)
- if (name != nme.CONSTRUCTOR && inExprModeOr(mode, PATTERNmode)) {
- val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, true, true)
+ if (name != nme.CONSTRUCTOR && mode.inAny(EXPRmode | PATTERNmode)) {
+ val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, reportAmbiguous = true, saveErrors = true)
if ((qual1 ne qual) && !qual1.isErrorTyped)
return typed(treeCopy.Select(tree, qual1, name), mode, pt)
}
NoSymbol
}
if (phase.erasedTypes && qual.isInstanceOf[Super] && tree.symbol != NoSymbol)
- qual.tpe = tree.symbol.owner.tpe
+ qual setType tree.symbol.owner.tpe
if (!reallyExists(sym)) {
def handleMissing: Tree = {
- if (context.unit.isJava && name.isTypeName) {
- // SI-3120 Java uses the same syntax, A.B, to express selection from the
- // value A and from the type A. We have to try both.
- val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) }
- if (tree1 != EmptyTree) return typed1(tree1, mode, pt)
- }
-
- // try to expand according to Dynamic rules.
- asDynamicCall foreach (x => return x)
-
- debuglog(
- "qual = " + qual + ":" + qual.tpe +
- "\nSymbol=" + qual.tpe.termSymbol + "\nsymbol-info = " + qual.tpe.termSymbol.info +
- "\nscope-id = " + qual.tpe.termSymbol.info.decls.hashCode() + "\nmembers = " + qual.tpe.members +
- "\nname = " + name + "\nfound = " + sym + "\nowner = " + context.enclClass.owner)
-
- def makeInteractiveErrorTree = {
- val tree1 = tree match {
- case Select(_, _) => treeCopy.Select(tree, qual, name)
- case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
- }
- setError(tree1)
- }
-
- if (name == nme.ERROR && forInteractive)
- return makeInteractiveErrorTree
-
- if (!qual.tpe.widen.isErroneous) {
- if ((mode & QUALmode) != 0) {
- val lastTry = rootMirror.missingHook(qual.tpe.typeSymbol, name)
- if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt)
+ def errorTree = missingSelectErrorTree(tree, qual, name)
+ def asTypeSelection = (
+ if (context.unit.isJava && name.isTypeName) {
+ // SI-3120 Java uses the same syntax, A.B, to express selection from the
+ // value A and from the type A. We have to try both.
+ atPos(tree.pos)(gen.convertToSelectFromType(qual, name)) match {
+ case EmptyTree => None
+ case tree1 => Some(typed1(tree1, mode, pt))
+ }
}
- NotAMemberError(tree, qual, name)
- }
-
- if (forInteractive) makeInteractiveErrorTree else setError(tree)
+ else None
+ )
+ debuglog(s"""
+ |qual=$qual:${qual.tpe}
+ |symbol=${qual.tpe.termSymbol.defString}
+ |scope-id=${qual.tpe.termSymbol.info.decls.hashCode}
+ |members=${qual.tpe.members mkString ", "}
+ |name=$name
+ |found=$sym
+ |owner=${context.enclClass.owner}
+ """.stripMargin)
+
+ // 1) Try converting a term selection on a java class into a type selection.
+ // 2) Try expanding according to Dynamic rules.
+ // 3) Try looking up the name in the qualifier.
+ asTypeSelection orElse asDynamicCall getOrElse (lookupInQualifier(qual, name) match {
+ case NoSymbol => setError(errorTree)
+ case found => typed1(tree setSymbol found, mode, pt)
+ })
}
handleMissing
- } else {
+ }
+ else {
val tree1 = tree match {
case Select(_, _) => treeCopy.Select(tree, qual, name)
case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
}
val (result, accessibleError) = silent(_.makeAccessible(tree1, sym, qual.tpe, qual)) match {
+ case SilentTypeError(err: AccessTypeError) =>
+ (tree1, Some(err))
case SilentTypeError(err) =>
- if (err.kind != ErrorKinds.Access) {
- context issue err
- return setError(tree)
- }
- else (tree1, Some(err))
+ context issue err
+ return setError(tree)
case SilentResultValue(treeAndPre) =>
(stabilize(treeAndPre._1, treeAndPre._2, mode, pt), None)
}
- def isPotentialNullDeference() = {
- !isPastTyper &&
- !sym.isConstructor &&
- !(qual.tpe <:< NotNullClass.tpe) && !qual.tpe.isNotNull &&
- !(List(Any_isInstanceOf, Any_asInstanceOf) contains result.symbol) // null.is/as is not a dereference
- }
- // unit is null here sometimes; how are we to know when unit might be null? (See bug #2467.)
- if (settings.warnSelectNullable.value && isPotentialNullDeference && unit != null)
- unit.warning(tree.pos, "potential null pointer dereference: "+tree)
-
result match {
// could checkAccessible (called by makeAccessible) potentially have skipped checking a type application in qual?
case SelectFromTypeTree(qual@TypeTree(), name) if qual.tpe.typeArgs.nonEmpty => // TODO: somehow the new qual is not checked in refchecks
@@ -4861,7 +4718,7 @@ trait Typers extends Modes with Adaptations with Tags {
case _ if accessibleError.isDefined =>
// don't adapt constructor, SI-6074
val qual1 = if (name == nme.CONSTRUCTOR) qual
- else adaptToMemberWithArgs(tree, qual, name, mode, false, false)
+ else adaptToMemberWithArgs(tree, qual, name, mode, reportAmbiguous = false, saveErrors = false)
if (!qual1.isErrorTyped && (qual1 ne qual))
typed(Select(qual1, name) setPos tree.pos, mode, pt)
else
@@ -4876,320 +4733,115 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- def typedSelectOrSuperCall(tree: Select) = {
- val qual = tree.qualifier
- val name = tree.name
- qual match {
- case _: Super if name == nme.CONSTRUCTOR =>
- val qual1 =
- typed(qual, EXPRmode | QUALmode | POLYmode | SUPERCONSTRmode, WildcardType)
- // the qualifier type of a supercall constructor is its first parent class
- typedSelect(tree, qual1, nme.CONSTRUCTOR)
- case _ =>
- if (Statistics.canEnable) Statistics.incCounter(typedSelectCount)
- var qual1 = checkDead(typedQualifier(qual, mode))
- if (name.isTypeName) qual1 = checkStable(qual1)
-
- val tree1 = // temporarily use `filter` and an alternative for `withFilter`
- if (name == nme.withFilter)
- silent(_ => typedSelect(tree, qual1, name)) match {
- case SilentResultValue(result) =>
- result
- case _ =>
- silent(_ => typed1(Select(qual1, nme.filter) setPos tree.pos, mode, pt)) match {
- case SilentResultValue(result2) =>
- unit.deprecationWarning(
- tree.pos, "`withFilter' method does not yet exist on " + qual1.tpe.widen +
- ", using `filter' method instead")
- result2
- case SilentTypeError(err) =>
- WithFilterError(tree, err)
- }
- }
- else
- typedSelect(tree, qual1, name)
-
- if (tree.isInstanceOf[PostfixSelect])
- checkFeature(tree.pos, PostfixOpsFeature, name.decode)
- if (tree1.symbol != null && tree1.symbol.isOnlyRefinementMember)
- checkFeature(tree1.pos, ReflectiveCallsFeature, tree1.symbol.toString)
-
- if (qual1.hasSymbolWhich(_.isRootPackage)) treeCopy.Ident(tree1, name)
- else tree1
+ // temporarily use `filter` as an alternative for `withFilter`
+ def tryWithFilterAndFilter(tree: Select, qual: Tree): Tree = {
+ def warn() = unit.deprecationWarning(tree.pos, s"`withFilter' method does not yet exist on ${qual.tpe.widen}, using `filter' method instead")
+ silent(_ => typedSelect(tree, qual, nme.withFilter)) orElse { _ =>
+ silent(_ => typed1(Select(qual, nme.filter) setPos tree.pos, mode, pt)) match {
+ case SilentResultValue(res) => warn() ; res
+ case SilentTypeError(err) => WithFilterError(tree, err)
+ }
}
}
+ def typedSelectOrSuperCall(tree: Select) = tree match {
+ case Select(qual @ Super(_, _), nme.CONSTRUCTOR) =>
+ // the qualifier type of a supercall constructor is its first parent class
+ typedSelect(tree, typedSelectOrSuperQualifier(qual), nme.CONSTRUCTOR)
+ case Select(qual, name) =>
+ if (Statistics.canEnable) Statistics.incCounter(typedSelectCount)
+ val qualTyped = checkDead(typedQualifier(qual, mode))
+ val qualStableOrError = (
+ if (qualTyped.isErrorTyped || !name.isTypeName || treeInfo.admitsTypeSelection(qualTyped))
+ qualTyped
+ else
+ UnstableTreeError(qualTyped)
+ )
+ val tree1 = name match {
+ case nme.withFilter => tryWithFilterAndFilter(tree, qualStableOrError)
+ case _ => typedSelect(tree, qualStableOrError, name)
+ }
+ def sym = tree1.symbol
+ if (tree.isInstanceOf[PostfixSelect])
+ checkFeature(tree.pos, PostfixOpsFeature, name.decode)
+ if (sym != null && sym.isOnlyRefinementMember && !sym.isMacro)
+ checkFeature(tree1.pos, ReflectiveCallsFeature, sym.toString)
+
+ qualStableOrError.symbol match {
+ case s: Symbol if s.isRootPackage => treeCopy.Ident(tree1, name)
+ case _ => tree1
+ }
+ }
- /** Attribute an identifier consisting of a simple name or an outer reference.
+ /* A symbol qualifies if:
+ * - it exists
+ * - it is not stale (stale symbols are made to disappear here)
+ * - if we are in a constructor pattern, method definitions do not qualify
+ * unless they are stable. Otherwise, 'case x :: xs' would find the :: method.
+ */
+ def qualifies(sym: Symbol) = (
+ sym.hasRawInfo
+ && reallyExists(sym)
+ && !(mode.typingConstructorPattern && sym.isMethod && !sym.isStable)
+ )
+
+ /* Attribute an identifier consisting of a simple name or an outer reference.
*
- * @param tree The tree representing the identifier.
- * @param name The name of the identifier.
- * Transformations: (1) Prefix class members with this.
- * (2) Change imported symbols to selections
+ * @param tree The tree representing the identifier.
+ * @param name The name of the identifier.
+ * Transformations: (1) Prefix class members with this.
+ * (2) Change imported symbols to selections
*/
def typedIdent(tree: Tree, name: Name): Tree = {
- var errorContainer: AbsTypeError = null
- def ambiguousError(msg: String) = {
- assert(errorContainer == null, "Cannot set ambiguous error twice for identifier")
- errorContainer = AmbiguousIdentError(tree, name, msg)
- }
- def identError(tree: AbsTypeError) = {
- assert(errorContainer == null, "Cannot set ambiguous error twice for identifier")
- errorContainer = tree
- }
-
- var defSym: Symbol = tree.symbol // the directly found symbol
- var pre: Type = NoPrefix // the prefix type of defSym, if a class member
- var qual: Tree = EmptyTree // the qualifier tree if transformed tree is a select
- var inaccessibleSym: Symbol = NoSymbol // the first symbol that was found but that was discarded
- // for being inaccessible; used for error reporting
- var inaccessibleExplanation: String = ""
-
- // If a special setting is given, the empty package will be checked as a
- // last ditch effort before failing. This method sets defSym and returns
- // true if a member of the given name exists.
- def checkEmptyPackage(): Boolean = {
- defSym = rootMirror.EmptyPackageClass.tpe.nonPrivateMember(name)
- defSym != NoSymbol
- }
- def startingIdentContext = (
- // ignore current variable scope in patterns to enforce linearity
- if ((mode & (PATTERNmode | TYPEPATmode)) == 0) context
- else context.outer
- )
- // A symbol qualifies if it exists and is not stale. Stale symbols
- // are made to disappear here. In addition,
- // if we are in a constructor of a pattern, we ignore all definitions
- // which are methods (note: if we don't do that
- // case x :: xs in class List would return the :: method)
- // unless they are stable or are accessors (the latter exception is for better error messages).
- def qualifies(sym: Symbol): Boolean = {
- sym.hasRawInfo && // this condition avoids crashing on self-referential pattern variables
- reallyExists(sym) &&
- ((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod || sym.hasFlag(ACCESSOR))
- }
-
- if (defSym == NoSymbol) {
- var defEntry: ScopeEntry = null // the scope entry of defSym, if defined in a local scope
-
- var cx = startingIdentContext
- while (defSym == NoSymbol && cx != NoContext && (cx.scope ne null)) { // cx.scope eq null arises during FixInvalidSyms in Duplicators
- pre = cx.enclClass.prefix
- defEntry = cx.scope.lookupEntry(name)
- if ((defEntry ne null) && qualifies(defEntry.sym)) {
- // Right here is where SI-1987, overloading in package objects, can be
- // seen to go wrong. There is an overloaded symbol, but when referring
- // to the unqualified identifier from elsewhere in the package, only
- // the last definition is visible. So overloading mis-resolves and is
- // definition-order dependent, bad things. See run/t1987.scala.
- //
- // I assume the actual problem involves how/where these symbols are entered
- // into the scope. But since I didn't figure out how to fix it that way, I
- // catch it here by looking up package-object-defined symbols in the prefix.
- if (isInPackageObject(defEntry.sym, pre.typeSymbol)) {
- defSym = pre.member(defEntry.sym.name)
- if (defSym ne defEntry.sym) {
- qual = gen.mkAttributedQualifier(pre)
- log(sm"""
- | !!! Overloaded package object member resolved incorrectly.
- | prefix: $pre
- | Discarded: ${defEntry.sym.defString}
- | Using: ${defSym.defString}
- """)
- }
- }
- else
- defSym = defEntry.sym
- }
- else {
- cx = cx.enclClass
- val foundSym = pre.member(name) filter qualifies
- defSym = foundSym filter (context.isAccessible(_, pre, false))
- if (defSym == NoSymbol) {
- if ((foundSym ne NoSymbol) && (inaccessibleSym eq NoSymbol)) {
- inaccessibleSym = foundSym
- inaccessibleExplanation = analyzer.lastAccessCheckDetails
- }
- cx = cx.outer
- }
- }
- }
-
- val symDepth = if (defEntry eq null) cx.depth
- else cx.depth - (cx.scope.nestingLevel - defEntry.owner.nestingLevel)
- var impSym: Symbol = NoSymbol // the imported symbol
- var imports = context.imports // impSym != NoSymbol => it is imported from imports.head
+ // setting to enable unqualified idents in empty package (used by the repl)
+ def inEmptyPackage = if (settings.exposeEmptyPackage) lookupInEmpty(name) else NoSymbol
- // Java: A single-type-import declaration d in a compilation unit c of package p
- // that imports a type named n shadows, throughout c, the declarations of:
- //
- // 1) any top level type named n declared in another compilation unit of p
- //
- // A type-import-on-demand declaration never causes any other declaration to be shadowed.
- //
- // Scala: Bindings of different kinds have a precedence deļ¬ned on them:
- //
- // 1) Deļ¬nitions and declarations that are local, inherited, or made available by a
- // package clause in the same compilation unit where the deļ¬nition occurs have
- // highest precedence.
- // 2) Explicit imports have next highest precedence.
- def depthOk(imp: ImportInfo) = (
- imp.depth > symDepth
- || (unit.isJava && imp.isExplicitImport(name) && imp.depth == symDepth)
- )
- while (!reallyExists(impSym) && !imports.isEmpty && depthOk(imports.head)) {
- impSym = imports.head.importedSymbol(name)
- if (!impSym.exists) imports = imports.tail
- }
+ def issue(err: AbsTypeError) = {
+ // Avoiding some spurious error messages: see SI-2388.
+ val suppress = reporter.hasErrors && (name startsWith tpnme.ANON_CLASS_NAME)
+ if (!suppress)
+ ErrorUtils.issueTypeError(err)
- // detect ambiguous definition/import,
- // update `defSym` to be the final resolved symbol,
- // update `pre` to be `sym`s prefix type in case it is an imported member,
- // and compute value of:
-
- if (defSym.exists && impSym.exists) {
- // imported symbols take precedence over package-owned symbols in different
- // compilation units. Defined symbols take precedence over erroneous imports.
- if (defSym.isDefinedInPackage &&
- (!currentRun.compiles(defSym) ||
- context.unit.exists && defSym.sourceFile != context.unit.source.file))
- defSym = NoSymbol
- else if (impSym.isError || impSym.name == nme.CONSTRUCTOR)
- impSym = NoSymbol
- }
- if (defSym.exists) {
- if (impSym.exists)
- ambiguousError(
- "it is both defined in "+defSym.owner +
- " and imported subsequently by \n"+imports.head)
- else if (!defSym.owner.isClass || defSym.owner.isPackageClass || defSym.isTypeParameterOrSkolem)
- pre = NoPrefix
- else
- qual = atPos(tree.pos.focusStart)(gen.mkAttributedQualifier(pre))
- } else {
- if (impSym.exists) {
- var impSym1: Symbol = NoSymbol
- var imports1 = imports.tail
-
- /** It's possible that seemingly conflicting identifiers are
- * identifiably the same after type normalization. In such cases,
- * allow compilation to proceed. A typical example is:
- * package object foo { type InputStream = java.io.InputStream }
- * import foo._, java.io._
- */
- def ambiguousImport() = {
- // The types of the qualifiers from which the ambiguous imports come.
- // If the ambiguous name is a value, these must be the same.
- def t1 = imports.head.qual.tpe
- def t2 = imports1.head.qual.tpe
- // The types of the ambiguous symbols, seen as members of their qualifiers.
- // If the ambiguous name is a monomorphic type, we can relax this far.
- def mt1 = t1 memberType impSym
- def mt2 = t2 memberType impSym1
- def characterize = List(
- s"types: $t1 =:= $t2 ${t1 =:= t2} members: ${mt1 =:= mt2}",
- s"member type 1: $mt1",
- s"member type 2: $mt2",
- s"$impSym == $impSym1 ${impSym == impSym1}",
- s"${impSym.debugLocationString} ${impSym.getClass}",
- s"${impSym1.debugLocationString} ${impSym1.getClass}"
- ).mkString("\n ")
-
- // The symbol names are checked rather than the symbols themselves because
- // each time an overloaded member is looked up it receives a new symbol.
- // So foo.member("x") != foo.member("x") if x is overloaded. This seems
- // likely to be the cause of other bugs too...
- if (t1 =:= t2 && impSym.name == impSym1.name)
- log(s"Suppressing ambiguous import: $t1 =:= $t2 && $impSym == $impSym1")
- // Monomorphism restriction on types is in part because type aliases could have the
- // same target type but attach different variance to the parameters. Maybe it can be
- // relaxed, but doesn't seem worth it at present.
- else if (mt1 =:= mt2 && name.isTypeName && impSym.isMonomorphicType && impSym1.isMonomorphicType)
- log(s"Suppressing ambiguous import: $mt1 =:= $mt2 && $impSym and $impSym1 are equivalent")
- else {
- log(s"Import is genuinely ambiguous:\n " + characterize)
- ambiguousError(s"it is imported twice in the same scope by\n${imports.head}\nand ${imports1.head}")
- }
- }
- while (errorContainer == null && !imports1.isEmpty &&
- (!imports.head.isExplicitImport(name) ||
- imports1.head.depth == imports.head.depth)) {
- impSym1 = imports1.head.importedSymbol(name)
- if (reallyExists(impSym1)) {
- if (imports1.head.isExplicitImport(name)) {
- if (imports.head.isExplicitImport(name) ||
- imports1.head.depth != imports.head.depth) ambiguousImport()
- impSym = impSym1
- imports = imports1
- } else if (!imports.head.isExplicitImport(name) &&
- imports1.head.depth == imports.head.depth) ambiguousImport()
- }
- imports1 = imports1.tail
- }
- defSym = impSym
- val qual0 = imports.head.qual
- if (!(shortenImports && qual0.symbol.isPackage)) // optimization: don't write out package prefixes
- qual = atPos(tree.pos.focusStart)(resetPos(qual0.duplicate))
- pre = qual.tpe
- }
- else if (settings.exposeEmptyPackage.value && checkEmptyPackage())
- log("Allowing empty package member " + name + " due to settings.")
- else {
- if ((mode & QUALmode) != 0) {
- val lastTry = rootMirror.missingHook(rootMirror.RootClass, name)
- if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt)
- }
- if (settings.debug.value) {
- log(context.imports)//debug
- }
- if (inaccessibleSym eq NoSymbol) {
- // Avoiding some spurious error messages: see SI-2388.
- if (reporter.hasErrors && (name startsWith tpnme.ANON_CLASS_NAME)) ()
- else identError(SymbolNotFoundError(tree, name, context.owner, startingIdentContext))
- } else
- identError(InferErrorGen.AccessError(
- tree, inaccessibleSym, context.enclClass.owner.thisType, context.enclClass.owner,
- inaccessibleExplanation
- ))
- defSym = context.owner.newErrorSymbol(name)
- }
- }
- }
- if (errorContainer != null) {
- ErrorUtils.issueTypeError(errorContainer)
setError(tree)
- } else {
- if (defSym.owner.isPackageClass)
- pre = defSym.owner.thisType
-
- // Inferring classOf type parameter from expected type.
- if (defSym.isThisSym) {
- typed1(This(defSym.owner) setPos tree.pos, mode, pt)
- }
+ }
+ // ignore current variable scope in patterns to enforce linearity
+ val startContext = if (mode.typingPatternOrTypePat) context.outer else context
+ val nameLookup = tree.symbol match {
+ case NoSymbol => startContext.lookupSymbol(name, qualifies)
+ case sym => LookupSucceeded(EmptyTree, sym)
+ }
+ import InferErrorGen._
+ nameLookup match {
+ case LookupAmbiguous(msg) => issue(AmbiguousIdentError(tree, name, msg))
+ case LookupInaccessible(sym, msg) => issue(AccessError(tree, sym, context, msg))
+ case LookupNotFound =>
+ inEmptyPackage orElse lookupInRoot(name) match {
+ case NoSymbol => issue(SymbolNotFoundError(tree, name, context.owner, startContext))
+ case sym => typed1(tree setSymbol sym, mode, pt)
+ }
+ case LookupSucceeded(qual, sym) =>
+ (// this -> Foo.this
+ if (sym.isThisSym)
+ typed1(This(sym.owner) setPos tree.pos, mode, pt)
// Inferring classOf type parameter from expected type. Otherwise an
// actual call to the stubbed classOf method is generated, returning null.
- else if (isPredefMemberNamed(defSym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty)
+ else if (isPredefClassOf(sym) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty)
typedClassOf(tree, TypeTree(pt.typeArgs.head))
else {
- val tree1 = (
- if (qual == EmptyTree) tree
- // atPos necessary because qualifier might come from startContext
- else atPos(tree.pos)(Select(qual, name) setAttachments tree.attachments)
- )
- val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual)
- // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right?
- val tree3 = stabilize(tree2, pre2, mode, pt)
+ val pre1 = if (sym.isTopLevel) sym.owner.thisType else if (qual == EmptyTree) NoPrefix else qual.tpe
+ val tree1 = if (qual == EmptyTree) tree else atPos(tree.pos)(Select(atPos(tree.pos.focusStart)(qual), name))
+ val (tree2, pre2) = makeAccessible(tree1, sym, pre1, qual)
// SI-5967 Important to replace param type A* with Seq[A] when seen from from a reference, to avoid
// inference errors in pattern matching.
- tree3 setType dropRepeatedParamType(tree3.tpe)
+ stabilize(tree2, pre2, mode, pt) modifyType dropIllegalStarTypes
+ }) setAttachments tree.attachments
}
}
- }
def typedIdentOrWildcard(tree: Ident) = {
val name = tree.name
if (Statistics.canEnable) Statistics.incCounter(typedIdentCount)
- if ((name == nme.WILDCARD && (mode & (PATTERNmode | FUNmode)) == PATTERNmode) ||
- (name == tpnme.WILDCARD && (mode & TYPEmode) != 0))
+ if ((name == nme.WILDCARD && mode.typingPatternNotConstructor) ||
+ (name == tpnme.WILDCARD && mode.inTypeMode))
tree setType makeFullyDefined(pt)
else
typedIdent(tree, name)
@@ -5216,40 +4868,70 @@ trait Typers extends Modes with Adaptations with Tags {
}
def typedAppliedTypeTree(tree: AppliedTypeTree) = {
- val tpt = tree.tpt
- val args = tree.args
- val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType)
+ val tpt = tree.tpt
+ val args = tree.args
+ val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType)
+ def isPoly = tpt1.tpe.isInstanceOf[PolyType]
+ def isComplete = tpt1.symbol.rawInfo.isComplete
+
if (tpt1.isErrorTyped) {
tpt1
- } else if (!tpt1.hasSymbol) {
+ } else if (!tpt1.hasSymbolField) {
AppliedTypeNoParametersError(tree, tpt1.tpe)
} else {
val tparams = tpt1.symbol.typeParams
+
if (sameLength(tparams, args)) {
// @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
- val args1 =
- if (!tpt1.symbol.rawInfo.isComplete)
- args mapConserve (typedHigherKindedType(_, mode))
- // if symbol hasn't been fully loaded, can't check kind-arity
- else map2Conserve(args, tparams) { (arg, tparam) =>
- //@M! the polytype denotes the expected kind
- typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
+ val args1 = map2Conserve(args, tparams) { (arg, tparam) =>
+ def ptParams = Kind.FromParams(tparam.typeParams)
+
+ // if symbol hasn't been fully loaded, can't check kind-arity except when we're in a pattern,
+ // where we can (we can't take part in F-Bounds) and must (SI-8023)
+ val pt = if (mode.typingPatternOrTypePat) {
+ tparam.initialize; ptParams
}
+ else if (isComplete) ptParams
+ else Kind.Wildcard
+
+ typedHigherKindedType(arg, mode, pt)
+ }
val argtypes = args1 map (_.tpe)
- foreach2(args, tparams)((arg, tparam) => arg match {
- // note: can't use args1 in selector, because Bind's got replaced
- case Bind(_, _) =>
- if (arg.symbol.isAbstractType)
- arg.symbol setInfo // XXX, feedback. don't trackSymInfo here!
- TypeBounds(
- lub(List(arg.symbol.info.bounds.lo, tparam.info.bounds.lo.subst(tparams, argtypes))),
- glb(List(arg.symbol.info.bounds.hi, tparam.info.bounds.hi.subst(tparams, argtypes))))
- case _ =>
- })
+ foreach2(args, tparams) { (arg, tparam) =>
+ // note: can't use args1 in selector, because Binds got replaced
+ val asym = arg.symbol
+ def abounds = asym.info.bounds
+ def tbounds = tparam.info.bounds
+ def enhanceBounds(): Unit = {
+ val TypeBounds(lo0, hi0) = abounds
+ val TypeBounds(lo1, hi1) = tbounds.subst(tparams, argtypes)
+ val lo = lub(List(lo0, lo1))
+ val hi = glb(List(hi0, hi1))
+ if (!(lo =:= lo0 && hi =:= hi0))
+ asym setInfo logResult(s"Updating bounds of ${asym.fullLocationString} in $tree from '$abounds' to")(TypeBounds(lo, hi))
+ }
+ if (asym != null && asym.isAbstractType) {
+ // See pos/t1786 to follow what's happening here.
+ def canEnhanceIdent = (
+ asym.hasCompleteInfo
+ && tparam.exists /* sometimes it is NoSymbol */
+ && tparam.hasCompleteInfo /* SI-2940 */
+ && !tparam.isFBounded /* SI-2251 */
+ && !tparam.isHigherOrderTypeParameter
+ && !(abounds.hi <:< tbounds.hi)
+ && asym.isSynthetic /* this limits us to placeholder tparams, excluding named ones */
+ )
+ arg match {
+ case Bind(_, _) => enhanceBounds()
+ case Ident(name) if canEnhanceIdent => enhanceBounds()
+ case _ =>
+ }
+ }
+ }
val original = treeCopy.AppliedTypeTree(tree, tpt1, args1)
val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal original
- if(tpt1.tpe.isInstanceOf[PolyType]) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
+ if (isPoly) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
TypeTreeWithDeferredRefCheck(){ () =>
// wrap the tree and include the bounds check -- refchecks will perform this check (that the beta reduction was indeed allowed) and unwrap
// we can't simply use original in refchecks because it does not contains types
@@ -5262,7 +4944,7 @@ trait Typers extends Modes with Adaptations with Tags {
AppliedTypeNoParametersError(tree, tpt1.tpe)
} else {
//Console.println("\{tpt1}:\{tpt1.symbol}:\{tpt1.symbol.info}")
- if (settings.debug.value) Console.println(tpt1+":"+tpt1.symbol+":"+tpt1.symbol.info)//debug
+ if (settings.debug) Console.println(tpt1+":"+tpt1.symbol+":"+tpt1.symbol.info)//debug
AppliedTypeWrongNumberOfArgsError(tree, tpt1, tparams)
}
}
@@ -5271,7 +4953,8 @@ trait Typers extends Modes with Adaptations with Tags {
val sym: Symbol = tree.symbol
if ((sym ne null) && (sym ne NoSymbol)) sym.initialize
- def typedPackageDef(pdef: PackageDef) = {
+ def typedPackageDef(pdef0: PackageDef) = {
+ val pdef = treeCopy.PackageDef(pdef0, pdef0.pid, pluginsEnterStats(this, pdef0.stats))
val pid1 = typedQualifier(pdef.pid).asInstanceOf[RefTree]
assert(sym.moduleClass ne NoSymbol, sym)
val stats1 = newTyper(context.make(tree, sym.moduleClass, sym.info.decls))
@@ -5279,142 +4962,100 @@ trait Typers extends Modes with Adaptations with Tags {
treeCopy.PackageDef(tree, pid1, stats1) setType NoType
}
- def typedDocDef(docdef: DocDef) = {
- if (forScaladoc && (sym ne null) && (sym ne NoSymbol)) {
- val comment = docdef.comment
- fillDocComment(sym, comment)
- val typer1 = newTyper(context.makeNewScope(tree, context.owner))
- for (useCase <- comment.useCases) {
- typer1.silent(_.typedUseCase(useCase)) match {
- case SilentTypeError(err) =>
- unit.warning(useCase.pos, err.errMsg)
- case _ =>
- }
- for (useCaseSym <- useCase.defined) {
- if (sym.name != useCaseSym.name)
- unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
- }
- }
- }
- typed(docdef.definition, mode, pt)
- }
-
- /**
+ /*
* The typer with the correct context for a method definition. If the method is a default getter for
* a constructor default, the resulting typer has a constructor context (fixes SI-5543).
*/
def defDefTyper(ddef: DefDef) = {
- val isConstrDefaultGetter = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
+ val isConstrDefaultGetter = ddef.mods.hasDefault && sym.owner.isModuleClass &&
nme.defaultGetterToMethod(sym.name) == nme.CONSTRUCTOR
newTyper(context.makeNewScope(ddef, sym)).constrTyperIf(isConstrDefaultGetter)
}
def typedAlternative(alt: Alternative) = {
- val alts1 = alt.trees mapConserve (alt => typed(alt, mode | ALTmode, pt))
- treeCopy.Alternative(tree, alts1) setType pt
+ context withinPatAlternative (
+ treeCopy.Alternative(tree, alt.trees mapConserve (alt => typed(alt, mode, pt))) setType pt
+ )
}
-
def typedStar(tree: Star) = {
- if ((mode & STARmode) == 0 && !isPastTyper)
+ if (!context.starPatterns && !isPastTyper)
StarPatternWithVarargParametersError(tree)
- treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt)
- }
- def typedUnApply(tree: UnApply) = {
- val fun1 = typed(tree.fun)
- val tpes = formalTypes(unapplyTypeList(tree.fun.pos, tree.fun.symbol, fun1.tpe, tree.args), tree.args.length)
- val args1 = map2(tree.args, tpes)(typedPattern)
- treeCopy.UnApply(tree, fun1, args1) setType pt
+ treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt)
}
-
- def typedTry(tree: Try) = {
- var block1 = typed(tree.block, pt)
- var catches1 = typedCases(tree.catches, ThrowableClass.tpe, pt)
-
- for (cdef <- catches1 if !isPastTyper && cdef.guard.isEmpty) {
- def warn(name: Name) = context.warning(cdef.pat.pos, s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning.")
+ def issueTryWarnings(tree: Try): Try = {
+ def checkForCatchAll(cdef: CaseDef) {
def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol
- cdef.pat match {
+ def warn(name: Name) = {
+ val msg = s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning."
+ context.warning(cdef.pat.pos, msg)
+ }
+ if (cdef.guard.isEmpty) cdef.pat match {
case Bind(name, i @ Ident(_)) if unbound(i) => warn(name)
- case i @ Ident(name) if unbound(i) => warn(name)
- case _ =>
+ case i @ Ident(name) if unbound(i) => warn(name)
+ case _ =>
}
}
-
- val finalizer1 =
- if (tree.finalizer.isEmpty) tree.finalizer
- else typed(tree.finalizer, UnitClass.tpe)
- val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)), pt)
- if (needAdapt) {
- block1 = adapt(block1, mode, owntype)
- catches1 = catches1 map (adaptCase(_, mode, owntype))
+ if (!isPastTyper) tree match {
+ case Try(_, Nil, fin) =>
+ if (fin eq EmptyTree)
+ context.warning(tree.pos, "A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.")
+ case Try(_, catches, _) =>
+ catches foreach checkForCatchAll
}
+ tree
+ }
- treeCopy.Try(tree, block1, catches1, finalizer1) setType owntype
+ def typedTry(tree: Try) = {
+ val Try(block, catches, fin) = tree
+ val block1 = typed(block, pt)
+ val catches1 = typedCases(catches, ThrowableTpe, pt)
+ val fin1 = if (fin.isEmpty) fin else typed(fin, UnitTpe)
+
+ def finish(ownType: Type) = treeCopy.Try(tree, block1, catches1, fin1) setType ownType
+
+ issueTryWarnings(
+ if (isFullyDefined(pt))
+ finish(pt)
+ else block1 :: catches1 map (_.tpe.deconst) match {
+ case tpes if sameWeakLubAsLub(tpes) => finish(lub(tpes))
+ case tpes =>
+ val lub = weakLub(tpes)
+ val block2 = adapt(block1, mode, lub)
+ val catches2 = catches1 map (adaptCase(_, mode, lub))
+ treeCopy.Try(tree, block2, catches2, fin1) setType lub
+ }
+ )
}
def typedThrow(tree: Throw) = {
- val expr1 = typed(tree.expr, EXPRmode | BYVALmode, ThrowableClass.tpe)
- treeCopy.Throw(tree, expr1) setType NothingClass.tpe
+ val expr1 = typedByValueExpr(tree.expr, ThrowableTpe)
+ treeCopy.Throw(tree, expr1) setType NothingTpe
}
def typedTyped(tree: Typed) = {
- val expr = tree.expr
- val tpt = tree.tpt
- tpt match {
- case Function(List(), EmptyTree) =>
- // find out whether the programmer is trying to eta-expand a macro def
- // to do that we need to typecheck the tree first (we need a symbol of the eta-expandee)
- // that typecheck must not trigger macro expansions, so we explicitly prohibit them
- // however we cannot do `context.withMacrosDisabled`
- // because `expr` might contain nested macro calls (see SI-6673)
- val exprTyped = typed1(suppressMacroExpansion(expr), mode, pt)
- exprTyped match {
- case macroDef if macroDef.symbol != null && macroDef.symbol.isTermMacro && !macroDef.symbol.isErroneous =>
- MacroEtaError(exprTyped)
- case _ =>
- typedEta(checkDead(exprTyped))
- }
-
- case Ident(tpnme.WILDCARD_STAR) =>
- val exprTyped = typed(expr, onlyStickyModes(mode), WildcardType)
- def subArrayType(pt: Type) =
- if (isPrimitiveValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt)
- else {
- val tparam = context.owner freshExistential "" setInfo TypeBounds.upper(pt)
- newExistentialType(List(tparam), arrayType(tparam.tpe))
- }
-
- val (exprAdapted, baseClass) = exprTyped.tpe.typeSymbol match {
- case ArrayClass => (adapt(exprTyped, onlyStickyModes(mode), subArrayType(pt)), ArrayClass)
- case _ => (adapt(exprTyped, onlyStickyModes(mode), seqType(pt)), SeqClass)
- }
- exprAdapted.tpe.baseType(baseClass) match {
- case TypeRef(_, _, List(elemtp)) =>
- treeCopy.Typed(tree, exprAdapted, tpt setType elemtp) setType elemtp
- case _ =>
- setError(tree)
+ if (treeInfo isWildcardStarType tree.tpt)
+ typedStarInPattern(tree, mode.onlySticky, pt)
+ else if (mode.inPatternMode)
+ typedInPattern(tree, mode.onlySticky, pt)
+ else tree match {
+ // find out whether the programmer is trying to eta-expand a macro def
+ // to do that we need to typecheck the tree first (we need a symbol of the eta-expandee)
+ // that typecheck must not trigger macro expansions, so we explicitly prohibit them
+ // however we cannot do `context.withMacrosDisabled`
+ // because `expr` might contain nested macro calls (see SI-6673)
+ //
+ // Note: apparently `Function(Nil, EmptyTree)` is the secret parser marker
+ // which means trailing underscore.
+ case Typed(expr, Function(Nil, EmptyTree)) =>
+ typed1(suppressMacroExpansion(expr), mode, pt) match {
+ case macroDef if treeInfo.isMacroApplication(macroDef) => MacroEtaError(macroDef)
+ case exprTyped => typedEta(checkDead(exprTyped))
}
-
- case _ =>
- val tptTyped = typedType(tpt, mode)
- val exprTyped = typed(expr, onlyStickyModes(mode), tptTyped.tpe.deconst)
- val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped)
-
- if (isPatternMode) {
- val uncheckedTypeExtractor = extractorForUncheckedType(tpt.pos, tptTyped.tpe)
-
- // make fully defined to avoid bounded wildcard types that may be in pt from calling dropExistential (SI-2038)
- val ptDefined = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
- val ownType = inferTypedPattern(tptTyped, tptTyped.tpe, ptDefined, canRemedy = uncheckedTypeExtractor.nonEmpty)
- treeTyped setType ownType
-
- uncheckedTypeExtractor match {
- case None => treeTyped
- case Some(extractor) => wrapClassTagUnapply(treeTyped, extractor, tptTyped.tpe)
- }
- } else
- treeTyped setType tptTyped.tpe
+ case Typed(expr, tpt) =>
+ val tpt1 = typedType(tpt, mode) // type the ascribed type first
+ val expr1 = typed(expr, mode.onlySticky, tpt1.tpe.deconst) // then type the expression with tpt1 as the expected type
+ treeCopy.Typed(tree, expr1, tpt1) setType tpt1.tpe
}
}
@@ -5430,8 +5071,8 @@ trait Typers extends Modes with Adaptations with Tags {
//val undets = context.undetparams
// @M: fun is typed in TAPPmode because it is being applied to its actual type parameters
- val fun1 = typed(fun, forFunMode(mode) | TAPPmode, WildcardType)
- val tparams = fun1.symbol.typeParams
+ val fun1 = typed(fun, mode.forFunMode | TAPPmode)
+ val tparams = if (fun1.symbol == null) Nil else fun1.symbol.typeParams
//@M TODO: val undets_fun = context.undetparams ?
// "do args first" (by restoring the context.undetparams) in order to maintain context.undetparams on the function side.
@@ -5441,8 +5082,7 @@ trait Typers extends Modes with Adaptations with Tags {
// @M maybe the well-kindedness check should be done when checking the type arguments conform to the type parameters' bounds?
val args1 = if (sameLength(args, tparams)) map2Conserve(args, tparams) {
- //@M! the polytype denotes the expected kind
- (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
+ (arg, tparam) => typedHigherKindedType(arg, mode, Kind.FromParams(tparam.typeParams))
}
else {
//@M this branch is correctly hit for an overloaded polymorphic type. It also has to handle erroneous cases.
@@ -5460,10 +5100,9 @@ trait Typers extends Modes with Adaptations with Tags {
def typedApplyDynamic(tree: ApplyDynamic) = {
assert(phase.erasedTypes)
- val reflectiveCalls = !(settings.refinementMethodDispatch.value == "invoke-dynamic")
- val qual1 = typed(tree.qual, AnyRefClass.tpe)
- val args1 = tree.args mapConserve (arg => if (reflectiveCalls) typed(arg, AnyRefClass.tpe) else typed(arg))
- treeCopy.ApplyDynamic(tree, qual1, args1) setType (if (reflectiveCalls) AnyRefClass.tpe else tree.symbol.info.resultType)
+ val qual1 = typed(tree.qual, AnyRefTpe)
+ val args1 = tree.args mapConserve (arg => typed(arg, AnyRefTpe))
+ treeCopy.ApplyDynamic(tree, qual1, args1) setType AnyRefTpe
}
def typedReferenceToBoxed(tree: ReferenceToBoxed) = {
@@ -5475,20 +5114,72 @@ trait Typers extends Modes with Adaptations with Tags {
treeCopy.ReferenceToBoxed(tree, id1) setType tpe
}
+ // Warn about likely interpolated strings which are missing their interpolators
+ def warnMissingInterpolator(lit: Literal): Unit = if (!isPastTyper) {
+ // attempt to avoid warning about trees munged by macros
+ def isMacroExpansion = {
+ // context.tree is not the expandee; it is plain new SC(ps).m(args)
+ //context.tree exists (t => (t.pos includes lit.pos) && hasMacroExpansionAttachment(t))
+ // testing pos works and may suffice
+ //openMacros exists (_.macroApplication.pos includes lit.pos)
+ // tests whether the lit belongs to the expandee of an open macro
+ openMacros exists (_.macroApplication.attachments.get[MacroExpansionAttachment] match {
+ case Some(MacroExpansionAttachment(_, t: Tree)) => t exists (_ == lit)
+ case _ => false
+ })
+ }
+ // attempt to avoid warning about the special interpolated message string
+ // for implicitNotFound or any standard interpolation (with embedded $$).
+ def isRecognizablyNotForInterpolation = context.enclosingApply.tree match {
+ case Apply(Select(Apply(RefTree(_, nme.StringContext), _), _), _) => true
+ case Apply(Select(New(RefTree(_, tpnme.implicitNotFound)), _), _) => true
+ case _ => isMacroExpansion
+ }
+ def requiresNoArgs(tp: Type): Boolean = tp match {
+ case PolyType(_, restpe) => requiresNoArgs(restpe)
+ case MethodType(Nil, restpe) => requiresNoArgs(restpe) // may be a curried method - can't tell yet
+ case MethodType(p :: _, _) => p.isImplicit // implicit method requires no args
+ case _ => true // catches all others including NullaryMethodType
+ }
+ def isPlausible(m: Symbol) = m.alternatives exists (m => requiresNoArgs(m.info))
+
+ def maybeWarn(s: String): Unit = {
+ def warn(message: String) = context.unit.warning(lit.pos, s"$message Did you forget the interpolator?")
+ def suspiciousSym(name: TermName) = context.lookupSymbol(name, _ => true).symbol
+ def suspiciousExpr = InterpolatorCodeRegex findFirstIn s
+ def suspiciousIdents = InterpolatorIdentRegex findAllIn s map (s => suspiciousSym(s drop 1))
+
+ // heuristics - no warning on e.g. a string with only "$asInstanceOf"
+ if (s contains ' ') (
+ if (suspiciousExpr.nonEmpty)
+ warn("That looks like an interpolated expression!") // "${...}"
+ else
+ suspiciousIdents find isPlausible foreach (sym => warn(s"`$$${sym.name}` looks like an interpolated identifier!")) // "$id"
+ )
+ }
+ lit match {
+ case Literal(Constant(s: String)) if !isRecognizablyNotForInterpolation => maybeWarn(s)
+ case _ =>
+ }
+ }
+
def typedLiteral(tree: Literal) = {
- val value = tree.value
- tree setType (
- if (value.tag == UnitTag) UnitClass.tpe
- else ConstantType(value))
+ if (settings.lint) warnMissingInterpolator(tree)
+
+ tree setType (if (tree.value.tag == UnitTag) UnitTpe else ConstantType(tree.value))
}
def typedSingletonTypeTree(tree: SingletonTypeTree) = {
- val ref1 = checkStable(
- context.withImplicitsDisabled(
- typed(tree.ref, EXPRmode | QUALmode | (mode & TYPEPATmode), AnyRefClass.tpe)
- )
- )
- tree setType ref1.tpe.resultType
+ val refTyped =
+ context.withImplicitsDisabled {
+ typed(tree.ref, MonoQualifierModes | mode.onlyTypePat, AnyRefTpe)
+ }
+
+ if (!refTyped.isErrorTyped)
+ tree setType refTyped.tpe.resultType
+
+ if (treeInfo.admitsTypeSelection(refTyped)) tree
+ else UnstableTreeError(refTyped)
}
def typedSelectFromTypeTree(tree: SelectFromTypeTree) = {
@@ -5498,8 +5189,8 @@ trait Typers extends Modes with Adaptations with Tags {
}
def typedTypeBoundsTree(tree: TypeBoundsTree) = {
- val lo1 = typedType(tree.lo, mode)
- val hi1 = typedType(tree.hi, mode)
+ val lo1 = if (tree.lo.isEmpty) TypeTree(NothingTpe) else typedType(tree.lo, mode)
+ val hi1 = if (tree.hi.isEmpty) TypeTree(AnyTpe) else typedType(tree.hi, mode)
treeCopy.TypeBoundsTree(tree, lo1, hi1) setType TypeBounds(lo1.tpe, hi1.tpe)
}
@@ -5520,11 +5211,13 @@ trait Typers extends Modes with Adaptations with Tags {
case _ => tree
}
}
- else
+ else {
// we should get here only when something before failed
// and we try again (@see tryTypedApply). In that case we can assign
// whatever type to tree; we just have to survive until a real error message is issued.
- tree setType AnyClass.tpe
+ devWarning(tree.pos, s"Assigning Any type to TypeTree because tree.original is null: tree is $tree/${System.identityHashCode(tree)}, sym=${tree.symbol}, tpe=${tree.tpe}")
+ tree setType AnyTpe
+ }
}
def typedFunction(fun: Function) = {
if (fun.symbol == NoSymbol)
@@ -5533,104 +5226,126 @@ trait Typers extends Modes with Adaptations with Tags {
typerWithLocalContext(context.makeNewScope(fun, fun.symbol))(_.typedFunction(fun, mode, pt))
}
- // begin typed1
- //if (settings.debug.value && tree.isDef) log("typing definition of "+sym);//DEBUG
- tree match {
- case tree: Ident => typedIdentOrWildcard(tree)
- case tree: Select => typedSelectOrSuperCall(tree)
- case tree: Apply => typedApply(tree)
+ // Trees only allowed during pattern mode.
+ def typedInPatternMode(tree: Tree): Tree = tree match {
+ case tree: Alternative => typedAlternative(tree)
+ case tree: Star => typedStar(tree)
+ case _ => abort(s"unexpected tree in pattern mode: ${tree.getClass}\n$tree")
+ }
+
+ def typedTypTree(tree: TypTree): Tree = tree match {
case tree: TypeTree => typedTypeTree(tree)
- case tree: Literal => typedLiteral(tree)
- case tree: This => typedThis(tree)
- case tree: ValDef => typedValDef(tree)
- case tree: DefDef => defDefTyper(tree).typedDefDef(tree)
- case tree: Block => typerWithLocalContext(context.makeNewScope(tree, context.owner))(_.typedBlock(tree, mode, pt))
- case tree: If => typedIf(tree)
- case tree: TypeApply => typedTypeApply(tree)
case tree: AppliedTypeTree => typedAppliedTypeTree(tree)
- case tree: Bind => typedBind(tree)
- case tree: Function => typedFunction(tree)
- case tree: Match => typedVirtualizedMatch(tree)
- case tree: New => typedNew(tree)
- case tree: Assign => typedAssign(tree.lhs, tree.rhs)
- case tree: AssignOrNamedArg => typedAssign(tree.lhs, tree.rhs) // called by NamesDefaults in silent typecheck
- case tree: Super => typedSuper(tree)
case tree: TypeBoundsTree => typedTypeBoundsTree(tree)
- case tree: Typed => typedTyped(tree)
- case tree: ClassDef => newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
- case tree: ModuleDef => newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
- case tree: TypeDef => typedTypeDef(tree)
- case tree: LabelDef => labelTyper(tree).typedLabelDef(tree)
- case tree: PackageDef => typedPackageDef(tree)
- case tree: DocDef => typedDocDef(tree)
- case tree: Annotated => typedAnnotated(tree)
case tree: SingletonTypeTree => typedSingletonTypeTree(tree)
case tree: SelectFromTypeTree => typedSelectFromTypeTree(tree)
case tree: CompoundTypeTree => typedCompoundTypeTree(tree)
case tree: ExistentialTypeTree => typedExistentialTypeTree(tree)
- case tree: Return => typedReturn(tree)
- case tree: Try => typedTry(tree)
- case tree: Throw => typedThrow(tree)
- case tree: Alternative => typedAlternative(tree)
- case tree: Star => typedStar(tree)
- case tree: UnApply => typedUnApply(tree)
- case tree: ArrayValue => typedArrayValue(tree)
- case tree: ApplyDynamic => typedApplyDynamic(tree)
- case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
case tree: TypeTreeWithDeferredRefCheck => tree // TODO: retype the wrapped tree? TTWDRC would have to change to hold the wrapped tree (not a closure)
- case tree: Import => assert(forInteractive, "!forInteractive") ; tree setType tree.symbol.tpe // should not happen in normal circumstances.
- case _ => abort(s"unexpected tree: ${tree.getClass}\n$tree")
+ case _ => abort(s"unexpected type-representing tree: ${tree.getClass}\n$tree")
+ }
+
+ def typedMemberDef(tree: MemberDef): Tree = tree match {
+ case tree: ValDef => typedValDef(tree)
+ case tree: DefDef => defDefTyper(tree).typedDefDef(tree)
+ case tree: ClassDef => newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
+ case tree: ModuleDef => newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
+ case tree: TypeDef => typedTypeDef(tree)
+ case tree: PackageDef => typedPackageDef(tree)
+ case _ => abort(s"unexpected member def: ${tree.getClass}\n$tree")
+ }
+
+ // Trees not allowed during pattern mode.
+ def typedOutsidePatternMode(tree: Tree): Tree = tree match {
+ case tree: Block => typerWithLocalContext(context.makeNewScope(tree, context.owner))(_.typedBlock(tree, mode, pt))
+ case tree: If => typedIf(tree)
+ case tree: TypeApply => typedTypeApply(tree)
+ case tree: Function => typedFunction(tree)
+ case tree: Match => typedVirtualizedMatch(tree)
+ case tree: New => typedNew(tree)
+ case tree: Assign => typedAssign(tree.lhs, tree.rhs)
+ case tree: AssignOrNamedArg => typedAssign(tree.lhs, tree.rhs) // called by NamesDefaults in silent typecheck
+ case tree: Super => typedSuper(tree)
+ case tree: Annotated => typedAnnotated(tree)
+ case tree: Return => typedReturn(tree)
+ case tree: Try => typedTry(tree)
+ case tree: Throw => typedThrow(tree)
+ case tree: ArrayValue => typedArrayValue(tree)
+ case tree: ApplyDynamic => typedApplyDynamic(tree)
+ case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
+ case tree: LabelDef => labelTyper(tree).typedLabelDef(tree)
+ case tree: DocDef => typedDocDef(tree, mode, pt)
+ case _ => abort(s"unexpected tree: ${tree.getClass}\n$tree")
+ }
+
+ // Trees allowed in or out of pattern mode.
+ def typedInAnyMode(tree: Tree): Tree = tree match {
+ case tree: Ident => typedIdentOrWildcard(tree)
+ case tree: Bind => typedBind(tree)
+ case tree: Apply => typedApply(tree)
+ case tree: Select => typedSelectOrSuperCall(tree)
+ case tree: Literal => typedLiteral(tree)
+ case tree: Typed => typedTyped(tree)
+ case tree: This => typedThis(tree) // SI-6104
+ case tree: UnApply => abort(s"unexpected UnApply $tree") // turns out UnApply never reaches here
+ case _ =>
+ if (mode.inPatternMode)
+ typedInPatternMode(tree)
+ else
+ typedOutsidePatternMode(tree)
+ }
+
+ // begin typed1
+ tree match {
+ case tree: TypTree => typedTypTree(tree)
+ case tree: MemberDef => typedMemberDef(tree)
+ case _ => typedInAnyMode(tree)
}
}
- /**
- * @param tree ...
- * @param mode ...
- * @param pt ...
- * @return ...
- */
- def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+ def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
lastTreeToTyper = tree
- indentTyping()
-
- val ptPlugins = pluginsPt(pt, this, tree, mode)
-
+ def body = (
+ if (printTypings && !phase.erasedTypes && !noPrintTyping(tree))
+ typingStack.nextTyped(tree, mode, pt, context)(typedInternal(tree, mode, pt))
+ else
+ typedInternal(tree, mode, pt)
+ )
val startByType = if (Statistics.canEnable) Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null
if (Statistics.canEnable) Statistics.incCounter(visitsByType, tree.getClass)
- try {
- if (context.retyping &&
- (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins))) {
+ try body
+ finally if (Statistics.canEnable) Statistics.popTimer(byTypeStack, startByType)
+ }
+
+ private def typedInternal(tree: Tree, mode: Mode, pt: Type): Tree = {
+ val ptPlugins = pluginsPt(pt, this, tree, mode)
+ def retypingOk = (
+ context.retyping
+ && (tree.tpe ne null)
+ && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins))
+ )
+ def runTyper(): Tree = {
+ if (retypingOk) {
tree.tpe = null
if (tree.hasSymbol) tree.symbol = NoSymbol
}
-
val alreadyTyped = tree.tpe ne null
- var tree1: Tree = if (alreadyTyped) tree else {
- printTyping(
- ptLine("typing %s: pt = %s".format(ptTree(tree), ptPlugins),
- "undetparams" -> context.undetparams,
- "implicitsEnabled" -> context.implicitsEnabled,
- "enrichmentEnabled" -> context.enrichmentEnabled,
- "mode" -> modeString(mode),
- "silent" -> context.bufferErrors,
- "context.owner" -> context.owner
- )
- )
- typed1(tree, mode, dropExistential(ptPlugins))
- }
+ val shouldPrint = !alreadyTyped && !phase.erasedTypes
+ val ptWild = if (mode.inPatternMode)
+ ptPlugins // SI-5022 don't widen pt for patterns as types flow from it to the case body.
+ else
+ dropExistential(ptPlugins) // FIXME: document why this is done.
+ val tree1: Tree = if (alreadyTyped) tree else typed1(tree, mode, ptWild)
+ if (shouldPrint)
+ typingStack.showTyped(tree1)
+
// Can happen during erroneous compilation - error(s) have been
// reported, but we need to avoid causing an NPE with this tree
if (tree1.tpe eq null)
return setError(tree)
- if (!alreadyTyped) {
- printTyping("typed %s: %s%s".format(
- ptTree(tree1), tree1.tpe,
- if (isSingleType(tree1.tpe)) " with underlying "+tree1.tpe.widen else "")
- )
- }
+ tree1 modifyType (pluginsTyped(_, this, tree1, mode, ptPlugins))
- tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, ptPlugins)
val result =
if (tree1.isEmpty) tree1
else {
@@ -5638,84 +5353,82 @@ trait Typers extends Modes with Adaptations with Tags {
if (hasPendingMacroExpansions) macroExpandAll(this, result) else result
}
- if (!alreadyTyped) {
- printTyping("adapted %s: %s to %s, %s".format(
- tree1, tree1.tpe.widen, ptPlugins, context.undetparamsString)
- ) //DEBUG
- }
- if (!isPastTyper) signalDone(context.asInstanceOf[analyzer.Context], tree, result)
+ if (shouldPrint)
+ typingStack.showAdapt(tree1, result, ptPlugins, context)
+
+ if (!isPastTyper)
+ signalDone(context.asInstanceOf[analyzer.Context], tree, result)
+
result
- } catch {
+ }
+
+ try runTyper() catch {
case ex: TypeError =>
- tree.tpe = null
+ tree.clearType()
// The only problematic case are (recoverable) cyclic reference errors which can pop up almost anywhere.
- printTyping("caught %s: while typing %s".format(ex, tree)) //DEBUG
-
+ typingStack.printTyping(tree, "caught %s: while typing %s".format(ex, tree)) //DEBUG
reportTypeError(context, tree.pos, ex)
setError(tree)
case ex: Exception =>
- if (settings.debug.value) // @M causes cyclic reference error
- Console.println("exception when typing "+tree+", pt = "+ptPlugins)
+ // @M causes cyclic reference error
+ devWarning(s"exception when typing $tree, pt=$ptPlugins")
if (context != null && context.unit.exists && tree != null)
- logError("AT: " + (tree.pos).dbgString, ex)
+ logError("AT: " + tree.pos, ex)
throw ex
}
- finally {
- deindentTyping()
- if (Statistics.canEnable) Statistics.popTimer(byTypeStack, startByType)
- }
}
def atOwner(owner: Symbol): Typer =
- newTyper(context.make(context.tree, owner))
+ newTyper(context.make(owner = owner))
def atOwner(tree: Tree, owner: Symbol): Typer =
newTyper(context.make(tree, owner))
- /** Types expression or definition <code>tree</code>.
- *
- * @param tree ...
- * @return ...
+ /** Types expression or definition `tree`.
*/
def typed(tree: Tree): Tree = {
- val ret = typed(tree, EXPRmode, WildcardType)
+ val ret = typed(tree, context.defaultModeForTyped, WildcardType)
ret
}
- def typedPos(pos: Position, mode: Int, pt: Type)(tree: Tree) = typed(atPos(pos)(tree), mode, pt)
+ def typedByValueExpr(tree: Tree, pt: Type = WildcardType): Tree = typed(tree, EXPRmode | BYVALmode, pt)
+
+ def typedPos(pos: Position, mode: Mode, pt: Type)(tree: Tree) = typed(atPos(pos)(tree), mode, pt)
def typedPos(pos: Position)(tree: Tree) = typed(atPos(pos)(tree))
// TODO: see if this formulation would impose any penalty, since
// it makes for a lot less casting.
// def typedPos[T <: Tree](pos: Position)(tree: T): T = typed(atPos(pos)(tree)).asInstanceOf[T]
- /** Types expression <code>tree</code> with given prototype <code>pt</code>.
- *
- * @param tree ...
- * @param pt ...
- * @return ...
+ /** Types expression `tree` with given prototype `pt`.
*/
def typed(tree: Tree, pt: Type): Tree =
- typed(tree, EXPRmode, pt)
+ typed(tree, context.defaultModeForTyped, pt)
+
+ def typed(tree: Tree, mode: Mode): Tree =
+ typed(tree, mode, WildcardType)
- /** Types qualifier <code>tree</code> of a select node.
- * E.g. is tree occurs in a context like <code>tree.m</code>.
+ /** Types qualifier `tree` of a select node.
+ * E.g. is tree occurs in a context like `tree.m`.
*/
- def typedQualifier(tree: Tree, mode: Int, pt: Type): Tree =
- typed(tree, EXPRmode | QUALmode | POLYmode | mode & TYPEPATmode, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit
+ def typedQualifier(tree: Tree, mode: Mode, pt: Type): Tree =
+ typed(tree, PolyQualifierModes | mode.onlyTypePat, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit
- /** Types qualifier <code>tree</code> of a select node.
- * E.g. is tree occurs in a context like <code>tree.m</code>.
+ /** Types qualifier `tree` of a select node.
+ * E.g. is tree occurs in a context like `tree.m`.
*/
- def typedQualifier(tree: Tree, mode: Int): Tree =
+ def typedQualifier(tree: Tree, mode: Mode): Tree =
typedQualifier(tree, mode, WildcardType)
def typedQualifier(tree: Tree): Tree = typedQualifier(tree, NOmode, WildcardType)
/** Types function part of an application */
- def typedOperator(tree: Tree): Tree =
- typed(tree, EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType)
+ def typedOperator(tree: Tree): Tree = typed(tree, OperatorModes)
- /** Types a pattern with prototype <code>pt</code> */
+ // the qualifier type of a supercall constructor is its first parent class
+ private def typedSelectOrSuperQualifier(qual: Tree) =
+ context withinSuperInit typed(qual, PolyQualifierModes)
+
+ /** Types a pattern with prototype `pt` */
def typedPattern(tree: Tree, pt: Type): Tree = {
// We disable implicits because otherwise some constructs will
// type check which should not. The pattern matcher does not
@@ -5737,30 +5450,28 @@ trait Typers extends Modes with Adaptations with Tags {
// TODO: can we achieve the pattern matching bit of the string interpolation SIP without this?
typingInPattern(context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt))) match {
case tpt if tpt.isType => PatternMustBeValue(tpt, pt); tpt
- case pat => pat
+ case pat => pat
}
}
/** Types a (fully parameterized) type tree */
- def typedType(tree: Tree, mode: Int): Tree =
- typed(tree, forTypeMode(mode), WildcardType)
+ def typedType(tree: Tree, mode: Mode): Tree =
+ typed(tree, mode.forTypeMode, WildcardType)
/** Types a (fully parameterized) type tree */
def typedType(tree: Tree): Tree = typedType(tree, NOmode)
- /** Types a higher-kinded type tree -- pt denotes the expected kind*/
- def typedHigherKindedType(tree: Tree, mode: Int, pt: Type): Tree =
- if (pt.typeParams.isEmpty) typedType(tree, mode) // kind is known and it's *
- else typed(tree, HKmode, pt)
-
- def typedHigherKindedType(tree: Tree, mode: Int): Tree =
- typed(tree, HKmode, WildcardType)
+ /** Types a higher-kinded type tree -- pt denotes the expected kind and must be one of `Kind.WildCard` and `Kind.FromParams` */
+ def typedHigherKindedType(tree: Tree, mode: Mode, pt: Type): Tree =
+ if (pt != Kind.Wildcard && pt.typeParams.isEmpty) typedType(tree, mode) // kind is known and it's *
+ else context withinTypeConstructorAllowed typed(tree, NOmode, pt)
- def typedHigherKindedType(tree: Tree): Tree = typedHigherKindedType(tree, NOmode)
+ def typedHigherKindedType(tree: Tree, mode: Mode): Tree =
+ context withinTypeConstructorAllowed typed(tree)
/** Types a type constructor tree used in a new or supertype */
- def typedTypeConstructor(tree: Tree, mode: Int): Tree = {
- val result = typed(tree, forTypeMode(mode) | FUNmode, WildcardType)
+ def typedTypeConstructor(tree: Tree, mode: Mode): Tree = {
+ val result = typed(tree, mode.forTypeMode | FUNmode, WildcardType)
// get rid of type aliases for the following check (#1241)
result.tpe.dealias match {
@@ -5781,7 +5492,7 @@ trait Typers extends Modes with Adaptations with Tags {
def computeType(tree: Tree, pt: Type): Type = {
// macros employ different logic of `computeType`
- assert(!context.owner.isTermMacro, context.owner)
+ assert(!context.owner.isMacro, context.owner)
val tree1 = typed(tree, pt)
transformed(tree) = tree1
val tpe = packedType(tree1, context.owner)
@@ -5789,60 +5500,63 @@ trait Typers extends Modes with Adaptations with Tags {
tpe
}
- def computeMacroDefType(tree: Tree, pt: Type): Type = {
- assert(context.owner.isTermMacro, context.owner)
- assert(tree.symbol.isTermMacro, tree.symbol)
- assert(tree.isInstanceOf[DefDef], tree.getClass)
- val ddef = tree.asInstanceOf[DefDef]
+ def computeMacroDefType(ddef: DefDef, pt: Type): Type = {
+ assert(context.owner.isMacro, context.owner)
+ assert(ddef.symbol.isMacro, ddef.symbol)
- val tree1 =
+ val rhs1 =
if (transformed contains ddef.rhs) {
// macro defs are typechecked in `methodSig` (by calling this method) in order to establish their link to macro implementation asap
// if a macro def doesn't have explicitly specified return type, this method will be called again by `assignTypeToTree`
// here we guard against this case
transformed(ddef.rhs)
} else {
- val tree1 = typedMacroBody(this, ddef)
- transformed(ddef.rhs) = tree1
- tree1
+ val rhs1 = typedMacroBody(this, ddef)
+ transformed(ddef.rhs) = rhs1
+ rhs1
}
- val isMacroBodyOkay = !tree.symbol.isErroneous && !(tree1 exists (_.isErroneous)) && tree1 != EmptyTree
+ val isMacroBodyOkay = !ddef.symbol.isErroneous && !(rhs1 exists (_.isErroneous)) && rhs1 != EmptyTree
val shouldInheritMacroImplReturnType = ddef.tpt.isEmpty
- if (isMacroBodyOkay && shouldInheritMacroImplReturnType) computeMacroDefTypeFromMacroImpl(ddef, tree1.symbol) else AnyClass.tpe
- }
-
- def transformedOr(tree: Tree, op: => Tree): Tree = transformed.get(tree) match {
- case Some(tree1) => transformed -= tree; tree1
- case None => op
+ if (isMacroBodyOkay && shouldInheritMacroImplReturnType) {
+ val commonMessage = "macro defs must have explicitly specified return types"
+ def reportFailure() = {
+ ddef.symbol.setFlag(IS_ERROR)
+ unit.error(ddef.pos, commonMessage)
+ }
+ def reportWarning(inferredType: Type) = {
+ val explanation = s"inference of $inferredType from macro impl's c.Expr[$inferredType] is deprecated and is going to stop working in 2.12"
+ unit.deprecationWarning(ddef.pos, s"$commonMessage ($explanation)")
+ }
+ computeMacroDefTypeFromMacroImplRef(ddef, rhs1) match {
+ case ErrorType => ErrorType
+ case NothingTpe => NothingTpe
+ case NoType => reportFailure(); AnyTpe
+ case tpe => reportWarning(tpe); tpe
+ }
+ } else AnyTpe
}
- def transformedOrTyped(tree: Tree, mode: Int, pt: Type): Tree = transformed.get(tree) match {
- case Some(tree1) => transformed -= tree; tree1
- case None => typed(tree, mode, pt)
+ def transformedOr(tree: Tree, op: => Tree): Tree = transformed remove tree match {
+ case Some(tree1) => tree1
+ case _ => op
}
-/*
- def convertToTypeTree(tree: Tree): Tree = tree match {
- case TypeTree() => tree
- case _ => TypeTree(tree.tpe)
+ def transformedOrTyped(tree: Tree, mode: Mode, pt: Type): Tree = transformed remove tree match {
+ case Some(tree1) => tree1
+ case _ => typed(tree, mode, pt)
}
-*/
}
}
object TypersStats {
import scala.reflect.internal.TypesStats._
- import scala.reflect.internal.BaseTypeSeqsStats._
val typedIdentCount = Statistics.newCounter("#typechecked identifiers")
val typedSelectCount = Statistics.newCounter("#typechecked selections")
val typedApplyCount = Statistics.newCounter("#typechecked applications")
val rawTypeFailed = Statistics.newSubCounter (" of which in failed", rawTypeCount)
val subtypeFailed = Statistics.newSubCounter(" of which in failed", subtypeCount)
val findMemberFailed = Statistics.newSubCounter(" of which in failed", findMemberCount)
- val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
- val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
- val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
val failedSilentNanos = Statistics.newSubTimer("time spent in failed", typerNanos)
val failedApplyNanos = Statistics.newSubTimer(" failed apply", typerNanos)
val failedOpEqNanos = Statistics.newSubTimer(" failed op=", typerNanos)
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala
new file mode 100644
index 0000000000..550fd4e68d
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala
@@ -0,0 +1,168 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package typechecker
+
+import scala.collection.mutable
+import scala.reflect.internal.util.{ BatchSourceFile, Statistics }
+import mutable.ListBuffer
+import Mode._
+
+trait TypersTracking {
+ self: Analyzer =>
+
+ import global._
+ import typeDebug._
+
+ // To enable decent error messages when the typer crashes.
+ // TODO - this only catches trees which go through def typed,
+ // but there are all kinds of back ways - typedClassDef, etc. etc.
+ // Funnel everything through one doorway.
+ var lastTreeToTyper: Tree = EmptyTree
+
+ def fullSiteString(context: Context): String = {
+ def owner_long_s = (
+ if (settings.debug.value) {
+ def flags_s = context.owner.debugFlagString match {
+ case "" => ""
+ case s => " with flags " + inLightMagenta(s)
+ }
+ s", a ${context.owner.shortSymbolClass}$flags_s"
+ }
+ else ""
+ )
+ def marker = if (context.bufferErrors) "silent" else "site"
+ def undet_s = context.undetparams match {
+ case Nil => ""
+ case ps => ps.mkString(" solving: ", ",", "")
+ }
+ def implicits_s = (
+ if (context.enrichmentEnabled)
+ if (context.implicitsEnabled) ""
+ else inLightRed("enrichment only")
+ else inLightRed("implicits disabled")
+ )
+
+ s"($marker$undet_s: ${context.siteString}$owner_long_s) $implicits_s"
+ }
+
+ object typingStack {
+ val out = new java.io.PrintWriter(System.err, true)
+
+ // TODO - account for colors so the color of a multiline string
+ // doesn't infect the connector lines
+ private def currentIndent = "| " * depth
+
+ private var trees: List[Frame] = Nil
+ private var depth = 0
+ private def atLowerIndent[T](body: => T): T = {
+ depth -= 1
+ try body finally depth += 1
+ }
+ private def resetIfEmpty(s: String) = if (trees.isEmpty) resetColor(s) else s
+
+ private def truncAndOneLine(s: String): String = {
+ val s1 = s.replaceAll("\\s+", " ")
+ if (s1.length < 60 || settings.debug.value) s1 else s1.take(57) + "..."
+ }
+
+ private class Frame(val tree: Tree) { }
+ private def greenType(tp: Type): String = tpe_s(tp, inGreen)
+ private def greenType(tree: Tree): String = tree match {
+ case null => "[exception]"
+ case md: MemberDef if md.tpe == NoType => inBlue(s"[${md.keyword} ${md.name}]") + " " + greenType(md.symbol.tpe)
+ case _ if tree.tpe.isComplete => greenType(tree.tpe)
+ case _ => "<?>"
+ }
+ def indented(s: String): String =
+ if (s == "") "" else currentIndent + s.replaceAll("\n", "\n" + currentIndent)
+
+ @inline final def runWith[T](t: Tree)(body: => T): T = {
+ push(t)
+ try body finally pop(t)
+ }
+ def push(t: Tree): Unit = {
+ trees ::= new Frame(t)
+ depth += 1
+ }
+ def pop(t: Tree): Unit = {
+ val frame = trees.head
+ assert(frame.tree eq t, ((frame.tree, t)))
+ trees = trees.tail
+ depth -= 1
+ }
+ def show(s: String) { if (s != "") out.println(s) }
+
+ def showPush(tree: Tree, context: Context) {
+ showPush(tree, NOmode, WildcardType, context)
+ }
+ def showPush(tree: Tree, mode: Mode, pt: Type, context: Context) {
+ def tree_s = truncAndOneLine(ptTree(tree))
+ def pt_s = if (pt.isWildcard || context.inTypeConstructorAllowed) "" else s": pt=$pt"
+ def all_s = List(tree_s, pt_s, mode, fullSiteString(context)) filterNot (_ == "") mkString " "
+
+ atLowerIndent(show(indented("""|-- """ + all_s)))
+ }
+ def showPop(typedTree: Tree): Tree = {
+ val s = greenType(typedTree)
+ show(resetIfEmpty(indented("""\-> """ + s)))
+ typedTree
+ }
+ def showAdapt(original: Tree, adapted: Tree, pt: Type, context: Context) {
+ if (!noPrintAdapt(original, adapted)) {
+ def tree_s1 = inLightCyan(truncAndOneLine(ptTree(original)))
+ def pt_s = if (pt.isWildcard) "" else s" based on pt $pt"
+ def tree_s2 = adapted match {
+ case tt: TypeTree => "is now a TypeTree(" + tpe_s(tt.tpe, inCyan) + ")"
+ case _ => "adapted to " + inCyan(truncAndOneLine(ptTree(adapted))) + pt_s
+ }
+ show(indented(s"[adapt] $tree_s1 $tree_s2"))
+ }
+ }
+ def showTyped(tree: Tree) {
+ def class_s = tree match {
+ case _: RefTree => ""
+ case _ => " " + tree.shortClass
+ }
+ if (!noPrintTyping(tree))
+ show(indented(s"[typed$class_s] " + truncAndOneLine(ptTree(tree))))
+ }
+
+ def nextTyped(tree: Tree, mode: Mode, pt: Type, context: Context)(body: => Tree): Tree =
+ nextTypedInternal(tree, showPush(tree, mode, pt, context))(body)
+
+ def nextTypedInternal(tree: Tree, pushFn: => Unit)(body: => Tree): Tree = (
+ if (noPrintTyping(tree))
+ body
+ else
+ runWith(tree) { pushFn ; showPop(body) }
+ )
+
+ @inline final def printTyping(tree: Tree, s: => String) = {
+ if (printTypings && !noPrintTyping(tree))
+ show(indented(s))
+ }
+ @inline final def printTyping(s: => String) = {
+ if (printTypings)
+ show(indented(s))
+ }
+ }
+ def tpe_s(tp: Type, colorize: String => String): String = tp match {
+ case OverloadedType(pre, alts) => alts map (alt => tpe_s(pre memberType alt, colorize)) mkString " <and> "
+ case _ => colorize(tp.toLongString)
+ }
+ // def sym_s(s: Symbol) = if (s eq null) "" + s else s.getClass.getName split '.' last;
+
+ // Some trees which are typed with mind-numbing frequency and
+ // which add nothing by being printed. Did () type to Unit? Let's
+ // gamble on yes.
+ private def printingOk(t: Tree) = printTypings && (settings.debug.value || !noPrint(t))
+ def noPrintTyping(t: Tree) = (t.tpe ne null) || !printingOk(t)
+ def noPrintAdapt(tree1: Tree, tree2: Tree) = !printingOk(tree1) || (
+ (tree1.tpe == tree2.tpe)
+ && (tree1.symbol == tree2.symbol)
+ )
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 31c5a61a8c..ffac29b4b8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -12,8 +12,7 @@ import symtab.Flags._
* @author Martin Odersky
* @version 1.0
*/
-trait Unapplies extends ast.TreeDSL
-{
+trait Unapplies extends ast.TreeDSL {
self: Analyzer =>
import global._
@@ -21,8 +20,8 @@ trait Unapplies extends ast.TreeDSL
import CODE.{ CASE => _, _ }
import treeInfo.{ isRepeatedParamType, isByNameParamType }
- private val unapplyParamName = nme.x_0
-
+ private def unapplyParamName = nme.x_0
+ private def caseMods = Modifiers(SYNTHETIC | CASE)
// In the typeCompleter (templateSig) of a case class (resp it's module),
// synthetic `copy` (reps `apply`, `unapply`) methods are added. To compute
@@ -31,51 +30,17 @@ trait Unapplies extends ast.TreeDSL
// moduleClass symbol of the companion module.
class ClassForCaseCompanionAttachment(val caseClass: ClassDef)
- /** returns type list for return type of the extraction
- * @see extractorFormalTypes
+ /** Returns unapply or unapplySeq if available, without further checks.
*/
- def unapplyTypeList(pos: Position, ufn: Symbol, ufntpe: Type, args: List[Tree]) = {
- assert(ufn.isMethod, ufn)
- val nbSubPats = args.length
- //Console.println("utl "+ufntpe+" "+ufntpe.typeSymbol)
- ufn.name match {
- case nme.unapply | nme.unapplySeq =>
- val (formals, _) = extractorFormalTypes(pos, unapplyUnwrap(ufntpe), nbSubPats, ufn, treeInfo.effectivePatternArity(args))
- if (formals == null) throw new TypeError(s"$ufn of type $ufntpe cannot extract $nbSubPats sub-patterns")
- else formals
- case _ => throw new TypeError(ufn+" is not an unapply or unapplySeq")
- }
- }
+ def directUnapplyMember(tp: Type): Symbol = (tp member nme.unapply) orElse (tp member nme.unapplySeq)
- /** returns type of the unapply method returning T_0...T_n
- * for n == 0, boolean
- * for n == 1, Some[T0]
- * else Some[Product[Ti]]
+ /** Filters out unapplies with multiple (non-implicit) parameter lists,
+ * as they cannot be used as extractors
*/
- def unapplyReturnTypeExpected(argsLength: Int) = argsLength match {
- case 0 => BooleanClass.tpe
- case 1 => optionType(WildcardType)
- case n => optionType(productType((List fill n)(WildcardType)))
- }
+ def unapplyMember(tp: Type): Symbol = directUnapplyMember(tp) filter (sym => !hasMultipleNonImplicitParamLists(sym))
- /** returns unapply or unapplySeq if available */
- def unapplyMember(tp: Type): Symbol = (tp member nme.unapply) match {
- case NoSymbol => tp member nme.unapplySeq
- case unapp => unapp
- }
- /** returns unapply member's parameter type. */
- def unapplyParameterType(extractor: Symbol) = extractor.tpe.params match {
- case p :: Nil => p.tpe.typeSymbol
- case _ => NoSymbol
- }
-
- def copyUntyped[T <: Tree](tree: T): T =
- returning[T](tree.duplicate)(UnTyper traverse _)
-
- def copyUntypedInvariant(td: TypeDef): TypeDef = {
- val copy = treeCopy.TypeDef(td, td.mods &~ (COVARIANT | CONTRAVARIANT), td.name, td.tparams, td.rhs)
-
- returning[TypeDef](copy.duplicate)(UnTyper traverse _)
+ object HasUnapply {
+ def unapply(tp: Type): Option[Symbol] = unapplyMember(tp).toOption
}
private def toIdent(x: DefTree) = Ident(x.name) setPos x.pos.focus
@@ -87,8 +52,15 @@ trait Unapplies extends ast.TreeDSL
}
private def constrParamss(cdef: ClassDef): List[List[ValDef]] = {
- val DefDef(_, _, _, vparamss, _, _) = treeInfo firstConstructor cdef.impl.body
- mmap(vparamss)(copyUntyped[ValDef])
+ val ClassDef(_, _, _, Template(_, _, body)) = resetLocalAttrs(cdef.duplicate)
+ val DefDef(_, _, _, vparamss, _, _) = treeInfo firstConstructor body
+ vparamss
+ }
+
+ private def constrTparamsInvariant(cdef: ClassDef): List[TypeDef] = {
+ val ClassDef(_, _, tparams, _) = resetLocalAttrs(cdef.duplicate)
+ val tparamsInvariant = tparams.map(tparam => copyTypeDef(tparam)(mods = tparam.mods &~ (COVARIANT | CONTRAVARIANT)))
+ tparamsInvariant
}
/** The return value of an unapply method of a case class C[Ts]
@@ -97,25 +69,19 @@ trait Unapplies extends ast.TreeDSL
*/
private def caseClassUnapplyReturnValue(param: Name, caseclazz: ClassDef) = {
def caseFieldAccessorValue(selector: ValDef): Tree = {
- val accessorName = selector.name
- val privateLocalParamAccessor = caseclazz.impl.body.collectFirst {
- case dd: ValOrDefDef if dd.name == accessorName && dd.mods.isPrivateLocal => dd.symbol
- }
- privateLocalParamAccessor match {
- case None =>
- // Selecting by name seems to be the most straight forward way here to
- // avoid forcing the symbol of the case class in order to list the accessors.
- val maybeRenamedAccessorName = caseAccessorName(caseclazz.symbol, accessorName)
- Ident(param) DOT maybeRenamedAccessorName
- case Some(sym) =>
- // But, that gives a misleading error message in neg/t1422.scala, where a case
- // class has an illegal private[this] parameter. We can detect this by checking
- // the modifiers on the param accessors.
- //
- // We just generate a call to that param accessor here, which gives us an inaccessible
- // symbol error, as before.
- Ident(param) DOT sym
+ // Selecting by name seems to be the most straight forward way here to
+ // avoid forcing the symbol of the case class in order to list the accessors.
+ def selectByName = Ident(param) DOT caseAccessorName(caseclazz.symbol, selector.name)
+ // But, that gives a misleading error message in neg/t1422.scala, where a case
+ // class has an illegal private[this] parameter. We can detect this by checking
+ // the modifiers on the param accessors.
+ // We just generate a call to that param accessor here, which gives us an inaccessible
+ // symbol error, as before.
+ def localAccessor = caseclazz.impl.body find {
+ case t @ ValOrDefDef(mods, selector.name, _, _) => mods.isPrivateLocal
+ case _ => false
}
+ localAccessor.fold(selectByName)(Ident(param) DOT _.symbol)
}
// Working with trees, rather than symbols, to avoid cycles like SI-5082
@@ -128,11 +94,16 @@ trait Unapplies extends ast.TreeDSL
/** The module corresponding to a case class; overrides toString to show the module's name
*/
def caseModuleDef(cdef: ClassDef): ModuleDef = {
- // > MaxFunctionArity is caught in Namers, but for nice error reporting instead of
- // an abrupt crash we trim the list here.
- def primaries = constrParamss(cdef).head take MaxFunctionArity map (_.tpt)
- def inheritFromFun = !cdef.mods.hasAbstractFlag && cdef.tparams.isEmpty && constrParamss(cdef).length == 1
- def createFun = gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true)
+ val params = constrParamss(cdef)
+ def inheritFromFun = !cdef.mods.hasAbstractFlag && cdef.tparams.isEmpty && (params match {
+ case List(ps) if ps.length <= MaxFunctionArity => true
+ case _ => false
+ })
+ def createFun = {
+ def primaries = params.head map (_.tpt)
+ gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true)
+ }
+
def parents = if (inheritFromFun) List(createFun) else Nil
def toString = DefDef(
Modifiers(OVERRIDE | FINAL | SYNTHETIC),
@@ -149,15 +120,13 @@ trait Unapplies extends ast.TreeDSL
ModuleDef(
Modifiers(cdef.mods.flags & AccessFlags | SYNTHETIC, cdef.mods.privateWithin),
cdef.name.toTermName,
- Template(parents, emptyValDef, NoMods, Nil, ListOfNil, body, cdef.impl.pos.focus))
+ gen.mkTemplate(parents, noSelfType, NoMods, Nil, body, cdef.impl.pos.focus))
}
- private val caseMods = Modifiers(SYNTHETIC | CASE)
-
/** The apply method corresponding to a case class
*/
def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef): DefDef = {
- val tparams = cdef.tparams map copyUntypedInvariant
+ val tparams = constrTparamsInvariant(cdef)
val cparamss = constrParamss(cdef)
def classtpe = classType(cdef, tparams)
atPos(cdef.pos.focus)(
@@ -173,7 +142,7 @@ trait Unapplies extends ast.TreeDSL
/** The unapply method corresponding to a case class
*/
def caseModuleUnapplyMeth(cdef: ClassDef): DefDef = {
- val tparams = cdef.tparams map copyUntypedInvariant
+ val tparams = constrTparamsInvariant(cdef)
val method = constrParamss(cdef) match {
case xs :: _ if xs.nonEmpty && isRepeatedParamType(xs.last.tpt) => nme.unapplySeq
case _ => nme.unapply
@@ -228,7 +197,7 @@ trait Unapplies extends ast.TreeDSL
treeCopy.ValDef(vd, Modifiers(flags), vd.name, tpt, rhs)
}
- val tparams = cdef.tparams map copyUntypedInvariant
+ val tparams = constrTparamsInvariant(cdef)
val paramss = classParamss match {
case Nil => Nil
case ps :: pss =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/Variances.scala b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
deleted file mode 100644
index ea436a71fb..0000000000
--- a/src/compiler/scala/tools/nsc/typechecker/Variances.scala
+++ /dev/null
@@ -1,94 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package typechecker
-
-import symtab.Flags.{ VarianceFlags => VARIANCES, _ }
-
-/** Variances form a lattice, 0 <= COVARIANT <= Variances, 0 <= CONTRAVARIANT <= VARIANCES
- */
-trait Variances {
-
- val global: Global
- import global._
-
- /** Flip between covariant and contravariant */
- private def flip(v: Int): Int = {
- if (v == COVARIANT) CONTRAVARIANT
- else if (v == CONTRAVARIANT) COVARIANT
- else v
- }
-
- /** Map everything below VARIANCES to 0 */
- private def cut(v: Int): Int =
- if (v == VARIANCES) v else 0
-
- /** Compute variance of type parameter `tparam` in types of all symbols `sym`. */
- def varianceInSyms(syms: List[Symbol])(tparam: Symbol): Int =
- (VARIANCES /: syms) ((v, sym) => v & varianceInSym(sym)(tparam))
-
- /** Compute variance of type parameter `tparam` in type of symbol `sym`. */
- def varianceInSym(sym: Symbol)(tparam: Symbol): Int =
- if (sym.isAliasType) cut(varianceInType(sym.info)(tparam))
- else varianceInType(sym.info)(tparam)
-
- /** Compute variance of type parameter `tparam` in all types `tps`. */
- def varianceInTypes(tps: List[Type])(tparam: Symbol): Int =
- (VARIANCES /: tps) ((v, tp) => v & varianceInType(tp)(tparam))
-
- /** Compute variance of type parameter `tparam` in all type arguments
- * <code>tps</code> which correspond to formal type parameters `tparams1`.
- */
- def varianceInArgs(tps: List[Type], tparams1: List[Symbol])(tparam: Symbol): Int = {
- var v: Int = VARIANCES;
- for ((tp, tparam1) <- tps zip tparams1) {
- val v1 = varianceInType(tp)(tparam)
- v = v & (if (tparam1.isCovariant) v1
- else if (tparam1.isContravariant) flip(v1)
- else cut(v1))
- }
- v
- }
-
- /** Compute variance of type parameter `tparam` in all type annotations `annots`. */
- def varianceInAttribs(annots: List[AnnotationInfo])(tparam: Symbol): Int = {
- (VARIANCES /: annots) ((v, annot) => v & varianceInAttrib(annot)(tparam))
- }
-
- /** Compute variance of type parameter `tparam` in type annotation `annot`. */
- def varianceInAttrib(annot: AnnotationInfo)(tparam: Symbol): Int = {
- varianceInType(annot.atp)(tparam)
- }
-
- /** Compute variance of type parameter <code>tparam</code> in type <code>tp</code>. */
- def varianceInType(tp: Type)(tparam: Symbol): Int = tp match {
- case ErrorType | WildcardType | NoType | NoPrefix | ThisType(_) | ConstantType(_) =>
- VARIANCES
- case BoundedWildcardType(bounds) =>
- varianceInType(bounds)(tparam)
- case SingleType(pre, sym) =>
- varianceInType(pre)(tparam)
- case TypeRef(pre, sym, args) =>
- if (sym == tparam) COVARIANT
- // tparam cannot occur in tp's args if tp is a type constructor (those don't have args)
- else if (tp.isHigherKinded) varianceInType(pre)(tparam)
- else varianceInType(pre)(tparam) & varianceInArgs(args, sym.typeParams)(tparam)
- case TypeBounds(lo, hi) =>
- flip(varianceInType(lo)(tparam)) & varianceInType(hi)(tparam)
- case RefinedType(parents, defs) =>
- varianceInTypes(parents)(tparam) & varianceInSyms(defs.toList)(tparam)
- case MethodType(params, restpe) =>
- flip(varianceInSyms(params)(tparam)) & varianceInType(restpe)(tparam)
- case NullaryMethodType(restpe) =>
- varianceInType(restpe)(tparam)
- case PolyType(tparams, restpe) =>
- flip(varianceInSyms(tparams)(tparam)) & varianceInType(restpe)(tparam)
- case ExistentialType(tparams, restpe) =>
- varianceInSyms(tparams)(tparam) & varianceInType(restpe)(tparam)
- case AnnotatedType(annots, tp, _) =>
- varianceInAttribs(annots)(tparam) & varianceInType(tp)(tparam)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
index 5c6f525c6f..e6f95eb0d6 100644
--- a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
@@ -8,15 +8,7 @@ package util
import scala.reflect.internal.Chars._
-abstract class CharArrayReader { self =>
-
- val buf: Array[Char]
-
- def decodeUni: Boolean = true
-
- /** An error routine to call on bad unicode escapes \\uxxxx. */
- protected def error(offset: Int, msg: String): Unit
-
+trait CharArrayReaderData {
/** the last read character */
var ch: Char = _
@@ -29,13 +21,32 @@ abstract class CharArrayReader { self =>
/** The start offset of the line before the current one */
var lastLineStartOffset: Int = 0
- private var lastUnicodeOffset = -1
+ protected var lastUnicodeOffset = -1
+
+ def copyFrom(cd: CharArrayReaderData): this.type = {
+ this.ch = cd.ch
+ this.charOffset = cd.charOffset
+ this.lineStartOffset = cd.lineStartOffset
+ this.lastLineStartOffset = cd.lastLineStartOffset
+ this.lastUnicodeOffset = cd.lastUnicodeOffset
+ this
+ }
+}
+
+abstract class CharArrayReader extends CharArrayReaderData { self =>
+
+ val buf: Array[Char]
+
+ def decodeUni: Boolean = true
+
+ /** An error routine to call on bad unicode escapes \\uxxxx. */
+ protected def error(offset: Int, msg: String): Unit
/** Is last character a unicode escape \\uxxxx? */
def isUnicodeEscape = charOffset == lastUnicodeOffset
/** Advance one character; reducing CR;LF pairs to just LF */
- final def nextChar() {
+ final def nextChar(): Unit = {
if (charOffset >= buf.length) {
ch = SU
} else {
@@ -43,7 +54,10 @@ abstract class CharArrayReader { self =>
ch = c
charOffset += 1
if (c == '\\') potentialUnicode()
- else if (c < ' ') { skipCR(); potentialLineEnd() }
+ if (ch < ' ') {
+ skipCR()
+ potentialLineEnd()
+ }
}
}
@@ -63,7 +77,7 @@ abstract class CharArrayReader { self =>
}
/** Interpret \\uxxxx escapes */
- private def potentialUnicode() {
+ private def potentialUnicode() = {
def evenSlashPrefix: Boolean = {
var p = charOffset - 2
while (p >= 0 && buf(p) == '\\') p -= 1
@@ -94,13 +108,17 @@ abstract class CharArrayReader { self =>
}
/** replace CR;LF by LF */
- private def skipCR() {
- if (ch == CR)
- if (charOffset < buf.length && buf(charOffset) == LF) {
- charOffset += 1
- ch = LF
+ private def skipCR() =
+ if (ch == CR && charOffset < buf.length)
+ buf(charOffset) match {
+ case LF =>
+ charOffset += 1
+ ch = LF
+ case '\\' =>
+ if (lookaheadReader.getu == LF)
+ potentialUnicode()
+ case _ =>
}
- }
/** Handle line ends */
private def potentialLineEnd() {
@@ -121,5 +139,6 @@ abstract class CharArrayReader { self =>
def error(offset: Int, msg: String) = self.error(offset, msg)
/** A mystery why CharArrayReader.nextChar() returns Unit */
def getc() = { nextChar() ; ch }
+ def getu() = { require(buf(charOffset) == '\\') ; ch = '\\' ; charOffset += 1 ; potentialUnicode() ; ch }
}
}
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index a62c87e713..d2ba61cc0b 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -11,9 +11,9 @@ import java.net.URL
import scala.collection.{ mutable, immutable }
import io.{ File, Directory, Path, Jar, AbstractFile }
import scala.reflect.internal.util.StringOps.splitWhere
-import scala.reflect.ClassTag
import Jar.isJarOrZip
import File.pathSeparator
+import scala.collection.convert.WrapAsScala.enumerationAsScalaIterator
import java.net.MalformedURLException
import java.util.regex.PatternSyntaxException
import scala.reflect.runtime.ReflectionUtils
@@ -26,18 +26,16 @@ import scala.reflect.runtime.ReflectionUtils
* @author Stepan Koltsov
*/
object ClassPath {
+ import scala.language.postfixOps
+
/** Expand single path entry */
private def expandS(pattern: String): List[String] = {
val wildSuffix = File.separator + "*"
- /** Get all subdirectories, jars, zips out of a directory. */
+ /* Get all subdirectories, jars, zips out of a directory. */
def lsDir(dir: Directory, filt: String => Boolean = _ => true) =
dir.list filter (x => filt(x.name) && (x.isDirectory || isJarOrZip(x))) map (_.path) toList
- def basedir(s: String) =
- if (s contains File.separator) s.substring(0, s.lastIndexOf(File.separator))
- else "."
-
if (pattern == "*") lsDir(Directory("."))
else if (pattern endsWith wildSuffix) lsDir(Directory(pattern dropRight 2))
else if (pattern contains '*') {
@@ -59,22 +57,6 @@ object ClassPath {
/** Split the classpath, apply a transformation function, and reassemble it. */
def map(cp: String, f: String => String): String = join(split(cp) map f: _*)
- /** Split the classpath, filter according to predicate, and reassemble. */
- def filter(cp: String, p: String => Boolean): String = join(split(cp) filter p: _*)
-
- /** Split the classpath and map them into Paths */
- def toPaths(cp: String): List[Path] = split(cp) map (x => Path(x).toAbsolute)
-
- /** Make all classpath components absolute. */
- def makeAbsolute(cp: String): String = fromPaths(toPaths(cp): _*)
-
- /** Join the paths as a classpath */
- def fromPaths(paths: Path*): String = join(paths map (_.path): _*)
- def fromURLs(urls: URL*): String = fromPaths(urls map (x => Path(x.getPath)) : _*)
-
- /** Split the classpath and map them into URLs */
- def toURLs(cp: String): List[URL] = toPaths(cp) map (_.toURL)
-
/** Expand path and possibly expanding stars */
def expandPath(path: String, expandStar: Boolean = true): List[String] =
if (expandStar) split(path) flatMap expandS
@@ -100,9 +82,6 @@ object ClassPath {
)
}
- /** A useful name filter. */
- def isTraitImplementation(name: String) = ReflectionUtils.isTraitImplementation(name)
-
def specToURL(spec: String): Option[URL] =
try Some(new URL(spec))
catch { case _: MalformedURLException => None }
@@ -116,6 +95,12 @@ object ClassPath {
*/
def isValidName(name: String): Boolean = true
+ /** Filters for assessing validity of various entities.
+ */
+ def validClassFile(name: String) = endsClass(name) && isValidName(name)
+ def validPackage(name: String) = (name != "META-INF") && (name != "") && (name.charAt(0) != '.')
+ def validSourceFile(name: String) = endsScala(name) || endsJava(name)
+
/** From the representation to its identifier.
*/
def toBinaryName(rep: T): String
@@ -127,31 +112,29 @@ object ClassPath {
/** Creators for sub classpaths which preserve this context.
*/
def sourcesInPath(path: String): List[ClassPath[T]] =
- for (file <- expandPath(path, false) ; dir <- Option(AbstractFile getDirectory file)) yield
+ for (file <- expandPath(path, expandStar = false) ; dir <- Option(AbstractFile getDirectory file)) yield
new SourcePath[T](dir, this)
def contentsOfDirsInPath(path: String): List[ClassPath[T]] =
- for (dir <- expandPath(path, false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield
+ for (dir <- expandPath(path, expandStar = false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield
newClassPath(entry)
- def classesAtAllURLS(path: String): List[ClassPath[T]] =
- (path split " ").toList flatMap classesAtURL
-
- def classesAtURL(spec: String) =
- for (url <- specToURL(spec).toList ; location <- Option(AbstractFile getURL url)) yield
- newClassPath(location)
-
def classesInExpandedPath(path: String): IndexedSeq[ClassPath[T]] =
- classesInPathImpl(path, true).toIndexedSeq
+ classesInPathImpl(path, expand = true).toIndexedSeq
- def classesInPath(path: String) = classesInPathImpl(path, false)
+ def classesInPath(path: String) = classesInPathImpl(path, expand = false)
// Internal
private def classesInPathImpl(path: String, expand: Boolean) =
for (file <- expandPath(path, expand) ; dir <- Option(AbstractFile getDirectory file)) yield
newClassPath(dir)
+
+ def classesInManifest(used: Boolean) =
+ if (used) for (url <- manifests) yield newClassPath(AbstractFile getResources url) else Nil
}
+ def manifests = Thread.currentThread().getContextClassLoader().getResources("META-INF/MANIFEST.MF").filter(_.getProtocol() == "jar").toList
+
class JavaContext extends ClassPathContext[AbstractFile] {
def toBinaryName(rep: AbstractFile) = {
val name = rep.name
@@ -161,9 +144,7 @@ object ClassPath {
def newClassPath(dir: AbstractFile) = new DirectoryClassPath(dir, this)
}
- object DefaultJavaContext extends JavaContext {
- override def isValidName(name: String) = !ReflectionUtils.scalacShouldntLoadClassfile(name)
- }
+ object DefaultJavaContext extends JavaContext
private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class"
private def endsScala(s: String) = s.length > 6 && s.substring(s.length - 6) == ".scala"
@@ -217,8 +198,7 @@ abstract class ClassPath[T] {
def sourcepaths: IndexedSeq[AbstractFile]
/**
- * Represents classes which can be loaded with a ClassfileLoader/MsilFileLoader
- * and / or a SourcefileLoader.
+ * Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
*/
case class ClassRep(binary: Option[T], source: Option[AbstractFile]) {
def name: String = binary match {
@@ -231,16 +211,16 @@ abstract class ClassPath[T] {
/** Filters for assessing validity of various entities.
*/
- def validClassFile(name: String) = endsClass(name) && context.isValidName(name)
- def validPackage(name: String) = (name != "META-INF") && (name != "") && (name.charAt(0) != '.')
- def validSourceFile(name: String) = endsScala(name) || endsJava(name)
+ def validClassFile(name: String) = context.validClassFile(name)
+ def validPackage(name: String) = context.validPackage(name)
+ def validSourceFile(name: String) = context.validSourceFile(name)
/**
* Find a ClassRep given a class name of the form "package.subpackage.ClassName".
* Does not support nested classes on .NET
*/
def findClass(name: String): Option[AnyClassRep] =
- splitWhere(name, _ == '.', true) match {
+ splitWhere(name, _ == '.', doDropIndex = true) match {
case Some((pkg, rest)) =>
val rep = packages find (_.name == pkg) flatMap (_ findClass rest)
rep map {
@@ -284,7 +264,7 @@ class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends
else if (f.isDirectory && validPackage(f.name))
packageBuf += new SourcePath[T](f, context)
}
- (packageBuf.result, classBuf.result)
+ (packageBuf.result(), classBuf.result())
}
lazy val (packages, classes) = traverse()
@@ -297,7 +277,7 @@ class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends
class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[AbstractFile]) extends ClassPath[AbstractFile] {
def name = dir.name
override def origin = dir.underlyingSource map (_.path)
- def asURLs = if (dir.file == null) Nil else List(dir.toURL)
+ def asURLs = if (dir.file == null) List(new URL(name)) else List(dir.toURL)
def asClasspathString = dir.path
val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
@@ -305,13 +285,26 @@ class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[Ab
private def traverse() = {
val classBuf = immutable.Vector.newBuilder[ClassRep]
val packageBuf = immutable.Vector.newBuilder[DirectoryClassPath]
- dir foreach { f =>
- if (!f.isDirectory && validClassFile(f.name))
- classBuf += ClassRep(Some(f), None)
- else if (f.isDirectory && validPackage(f.name))
- packageBuf += new DirectoryClassPath(f, context)
+ dir foreach {
+ f =>
+ // Optimization: We assume the file was not changed since `dir` called
+ // `Path.apply` and categorized existent files as `Directory`
+ // or `File`.
+ val isDirectory = f match {
+ case pf: io.PlainFile => pf.givenPath match {
+ case _: io.Directory => true
+ case _: io.File => false
+ case _ => f.isDirectory
+ }
+ case _ =>
+ f.isDirectory
+ }
+ if (!isDirectory && validClassFile(f.name))
+ classBuf += ClassRep(Some(f), None)
+ else if (isDirectory && validPackage(f.name))
+ packageBuf += new DirectoryClassPath(f, context)
}
- (packageBuf.result, classBuf.result)
+ (packageBuf.result(), classBuf.result())
}
lazy val (packages, classes) = traverse()
@@ -409,15 +402,3 @@ class JavaClassPath(
containers: IndexedSeq[ClassPath[AbstractFile]],
context: JavaContext)
extends MergedClassPath[AbstractFile](containers, context) { }
-
-object JavaClassPath {
- def fromURLs(urls: Seq[URL], context: JavaContext): JavaClassPath = {
- val containers = {
- for (url <- urls ; f = AbstractFile getURL url ; if f != null) yield
- new DirectoryClassPath(f, context)
- }
- new JavaClassPath(containers.toIndexedSeq, context)
- }
- def fromURLs(urls: Seq[URL]): JavaClassPath =
- fromURLs(urls, ClassPath.DefaultJavaContext)
-}
diff --git a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
deleted file mode 100644
index 9cf2c535df..0000000000
--- a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
+++ /dev/null
@@ -1,144 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package util
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.input.{ Reader }
-import scala.util.parsing.input.CharArrayReader.EofCh
-import scala.collection.mutable.ListBuffer
-
-/** A simple command line parser to replace the several different
- * simple ones spread around trunk.
- *
- * XXX Note this has been completely obsolesced by scala.tools.cmd.
- * I checked it back in as part of rolling partest back a month
- * rather than go down the rabbit hole of unravelling dependencies.
- */
-
-trait ParserUtil extends Parsers {
- protected implicit class ParserPlus[+T](underlying: Parser[T]) {
- def !~>[U](p: => Parser[U]): Parser[U] = (underlying ~! p) ^^ { case a~b => b }
- def <~![U](p: => Parser[U]): Parser[T] = (underlying ~! p) ^^ { case a~b => a }
- }
-}
-
-case class CommandLine(
- args: List[String],
- unaryArguments: List[String],
- binaryArguments: List[String]
-) {
- def this(args: List[String]) = this(args, Nil, Nil)
- def this(args: Array[String]) = this(args.toList, Nil, Nil)
- def this(line: String) = this(CommandLineParser tokenize line, Nil, Nil)
-
- def withUnaryArgs(xs: List[String]) = copy(unaryArguments = xs)
- def withBinaryArgs(xs: List[String]) = copy(binaryArguments = xs)
-
- def originalArgs = args
- def assumeBinary = true
- def enforceArity = true
- def onlyKnownOptions = false
-
- val Terminator = "--"
- val ValueForUnaryOption = "true" // so if --opt is given, x(--opt) = true
-
- def mapForUnary(opt: String) = Map(opt -> ValueForUnaryOption)
- def errorFn(msg: String) = println(msg)
-
- /** argMap is option -> argument (or "" if it is a unary argument)
- * residualArgs are what is left after removing the options and their args.
- */
- lazy val (argMap, residualArgs) = {
- val residualBuffer = new ListBuffer[String]
-
- def stripQuotes(s: String) = {
- def isQuotedBy(c: Char) = s.length > 0 && s.head == c && s.last == c
- if (List('"', '\'') exists isQuotedBy) s.tail.init else s
- }
-
- def isValidOption(s: String) = !onlyKnownOptions || (unaryArguments contains s) || (binaryArguments contains s)
- def isOption(s: String) = (s startsWith "-") && (isValidOption(s) || { unknownOption(s) ; false })
- def isUnary(s: String) = isOption(s) && (unaryArguments contains s)
- def isBinary(s: String) = isOption(s) && !isUnary(s) && (assumeBinary || (binaryArguments contains s))
-
- def unknownOption(opt: String) =
- errorFn("Option '%s' not recognized.".format(opt))
- def missingArg(opt: String, what: String) =
- errorFn("Option '%s' requires argument, found %s instead.".format(opt, what))
-
- def loop(args: List[String]): Map[String, String] = {
- def residual(xs: List[String]) = { residualBuffer ++= xs ; Map[String, String]() }
- if (args.isEmpty) return Map()
- val hd :: rest = args
- if (rest.isEmpty) {
- if (isBinary(hd) && enforceArity)
- missingArg(hd, "EOF")
-
- if (isOption(hd)) mapForUnary(hd) else residual(args)
- }
- else
- if (hd == Terminator) residual(rest)
- else {
- val hd1 :: hd2 :: rest = args
-
- if (hd2 == Terminator) mapForUnary(hd1) ++ residual(rest)
- else if (isUnary(hd1)) mapForUnary(hd1) ++ loop(hd2 :: rest)
- else if (isBinary(hd1)) {
- // Disabling this check so
- // --scalacopts "-verbose" works. We can't tell if it's quoted,
- // the shell does us in.
- //
- // if (isOption(hd2) && enforceArity)
- // missingArg(hd1, hd2)
-
- Map(hd1 -> hd2) ++ loop(rest)
- }
- else { residual(List(hd1)) ++ loop(hd2 :: rest) }
- }
- }
-
- (loop(args), residualBuffer map stripQuotes toList)
- }
-
- def isSet(arg: String) = args contains arg
- def get(arg: String) = argMap get arg
- def getOrElse(arg: String, orElse: => String) = if (isSet(arg)) apply(arg) else orElse
- def apply(arg: String) = argMap(arg)
-
- override def toString() = "CommandLine(\n%s)\n" format (args map (" " + _ + "\n") mkString)
-}
-
-object CommandLineParser extends RegexParsers with ParserUtil {
- override def skipWhitespace = false
-
- def elemExcept(xs: Elem*): Parser[Elem] = elem("elemExcept", x => x != EofCh && !(xs contains x))
- def elemOf(xs: Elem*): Parser[Elem] = elem("elemOf", xs contains _)
- def escaped(ch: Char): Parser[String] = "\\" + ch
- def mkQuoted(ch: Char): Parser[String] = (
- elem(ch) !~> rep(escaped(ch) | elemExcept(ch)) <~ ch ^^ (_.mkString)
- | failure("Unmatched %s in input." format ch)
- )
-
- /** Apparently windows can't deal with the quotes sticking around. */
- lazy val squoted: Parser[String] = mkQuoted('\'') // ^^ (x => "'%s'" format x)
- lazy val dquoted: Parser[String] = mkQuoted('"') // ^^ (x => "\"" + x + "\"")
- lazy val token: Parser[String] = """\S+""".r
-
- lazy val argument: Parser[String] = squoted | dquoted | token
- lazy val commandLine: Parser[List[String]] = phrase(repsep(argument, whiteSpace))
-
- class ParseException(msg: String) extends RuntimeException(msg)
-
- def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x))
- def tokenize(line: String, errorFn: String => Unit): List[String] = {
- parse(commandLine, line.trim) match {
- case Success(args, _) => args
- case NoSuccess(msg, rest) => errorFn(msg) ; Nil
- }
- }
- def apply(line: String) = new CommandLine(tokenize(line))
-}
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
index dde53dc640..ba44126df2 100755
--- a/src/compiler/scala/tools/nsc/util/DocStrings.scala
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -74,7 +74,7 @@ object DocStrings {
else idx :: findAll(str, idx)(p)
}
- /** Produces a string index, which is a list of ``sections'', i.e
+ /** Produces a string index, which is a list of `sections`, i.e
* pairs of start/end positions of all tagged sections in the string.
* Every section starts with an at sign and extends to the next at sign,
* or to the end of the comment string, but excluding the final two
diff --git a/src/compiler/scala/tools/nsc/util/Exceptional.scala b/src/compiler/scala/tools/nsc/util/Exceptional.scala
index 34344263e8..1608ffa425 100644
--- a/src/compiler/scala/tools/nsc/util/Exceptional.scala
+++ b/src/compiler/scala/tools/nsc/util/Exceptional.scala
@@ -3,8 +3,6 @@ package util
import java.util.concurrent.ExecutionException
import java.lang.reflect.{ InvocationTargetException, UndeclaredThrowableException }
-import scala.reflect.internal.util.StringOps._
-import scala.language.implicitConversions
object Exceptional {
def unwrap(x: Throwable): Throwable = x match {
diff --git a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala b/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
deleted file mode 100644
index 5421843438..0000000000
--- a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package util
-
-import scala.collection.mutable
-
-trait FreshNameCreator {
- /** Do not call before after type checking ends.
- * PP: I think that directive needs to lose a word somewhere.
- */
- def newName(): String
- def newName(prefix: String): String
-
- @deprecated("use newName(prefix)", "2.9.0")
- def newName(pos: scala.reflect.internal.util.Position, prefix: String): String = newName(prefix)
- @deprecated("use newName()", "2.9.0")
- def newName(pos: scala.reflect.internal.util.Position): String = newName()
-}
-
-object FreshNameCreator {
- class Default extends FreshNameCreator {
- protected var counter = 0
- protected val counters = mutable.HashMap[String, Int]() withDefaultValue 0
-
- /**
- * Create a fresh name with the given prefix. It is guaranteed
- * that the returned name has never been returned by a previous
- * call to this function (provided the prefix does not end in a digit).
- */
- def newName(prefix: String): String = {
- val safePrefix = prefix.replaceAll("""[<>]""", """\$""")
- counters(safePrefix) += 1
-
- safePrefix + counters(safePrefix)
- }
- def newName(): String = {
- counter += 1
- "$" + counter + "$"
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
index b7ed7903bc..58a5442465 100644
--- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package util
import scala.reflect.internal.Chars._
@@ -14,74 +15,32 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int,
def this(buf: IndexedSeq[Char], decodeUni: Boolean, error: String => Unit) =
this(buf, 0, /* 1, 1, */ decodeUni, error)
- /** produce a duplicate of this char array reader which starts reading
- * at current position, independent of what happens to original reader
- */
- def dup: JavaCharArrayReader = clone().asInstanceOf[JavaCharArrayReader]
-
- /** layout constant
- */
- val tabinc = 8
-
/** the line and column position of the current character
*/
var ch: Char = _
var bp = start
- var oldBp = -1
- var oldCh: Char = _
-
- //private var cline: Int = _
- //private var ccol: Int = _
def cpos = bp
var isUnicode: Boolean = _
- var lastLineStartPos: Int = 0
- var lineStartPos: Int = 0
- var lastBlankLinePos: Int = 0
-
- private var onlyBlankChars = false
- //private var nextline = startline
- //private var nextcol = startcol
-
- private def markNewLine() {
- lastLineStartPos = lineStartPos
- if (onlyBlankChars) lastBlankLinePos = lineStartPos
- lineStartPos = bp
- onlyBlankChars = true
- //nextline += 1
- //nextcol = 1
- }
-
- def hasNext: Boolean = if (bp < buf.length) true
- else {
- false
- }
- def last: Char = if (bp > start + 2) buf(bp - 2) else ' ' // XML literals
+ def hasNext = bp < buf.length
def next(): Char = {
- //cline = nextline
- //ccol = nextcol
val buf = this.buf.asInstanceOf[collection.mutable.WrappedArray[Char]].array
if(!hasNext) {
ch = SU
return SU // there is an endless stream of SU's at the end
}
- oldBp = bp
- oldCh = ch
ch = buf(bp)
isUnicode = false
bp = bp + 1
ch match {
case '\t' =>
- // nextcol = ((nextcol - 1) / tabinc * tabinc) + tabinc + 1;
case CR =>
- if (bp < buf.size && buf(bp) == LF) {
+ if (bp < buf.length && buf(bp) == LF) {
ch = LF
bp += 1
}
- markNewLine()
case LF | FF =>
- markNewLine()
case '\\' =>
def evenSlashPrefix: Boolean = {
var p = bp - 2
@@ -90,34 +49,23 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int,
}
def udigit: Int = {
val d = digit2int(buf(bp), 16)
- if (d >= 0) { bp += 1; /* nextcol = nextcol + 1 */ }
- else error("error in unicode escape");
+ if (d >= 0) bp += 1
+ else error("error in unicode escape")
d
}
- // nextcol += 1
if (buf(bp) == 'u' && decodeUni && evenSlashPrefix) {
do {
bp += 1 //; nextcol += 1
- } while (buf(bp) == 'u');
+ } while (buf(bp) == 'u')
val code = udigit << 12 | udigit << 8 | udigit << 4 | udigit
ch = code.asInstanceOf[Char]
isUnicode = true
}
case _ =>
- if (ch > ' ') onlyBlankChars = false
- // nextcol += 1
}
ch
}
- def rewind() {
- if (oldBp == -1) throw new IllegalArgumentException
- bp = oldBp
- ch = oldCh
- oldBp = -1
- oldCh = 'x'
- }
-
def copy: JavaCharArrayReader =
new JavaCharArrayReader(buf, bp, /* nextcol, nextline, */ decodeUni, error)
}
diff --git a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
deleted file mode 100644
index 77a19d3ead..0000000000
--- a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
+++ /dev/null
@@ -1,170 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2006-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-// $Id$
-
-package scala.tools.nsc
-package util
-
-import java.io.File
-import java.net.URL
-import java.util.StringTokenizer
-import scala.util.Sorting
-import scala.collection.mutable
-import scala.tools.nsc.io.{ AbstractFile, MsilFile }
-import ch.epfl.lamp.compiler.msil.{ Type => MSILType, Assembly }
-import ClassPath.ClassPathContext
-import scala.reflect.runtime.ReflectionUtils.isTraitImplementation
-
-/** Keeping the MSIL classpath code in its own file is important to make sure
- * we don't accidentally introduce a dependency on msil.jar in the jvm.
- */
-
-object MsilClassPath {
- def collectTypes(assemFile: AbstractFile) = {
- var res: Array[MSILType] = MSILType.EmptyTypes
- val assem = Assembly.LoadFrom(assemFile.path)
- if (assem != null) {
- // DeclaringType == null: true for non-inner classes
- res = assem.GetTypes() filter (_.DeclaringType == null)
- Sorting.stableSort(res, (t1: MSILType, t2: MSILType) => (t1.FullName compareTo t2.FullName) < 0)
- }
- res
- }
-
- /** On the java side this logic is in PathResolver, but as I'm not really
- * up to folding MSIL into that, I am encapsulating it here.
- */
- def fromSettings(settings: Settings): MsilClassPath = {
- val context =
- if (settings.inline.value) new MsilContext
- else new MsilContext { override def isValidName(name: String) = !isTraitImplementation(name) }
-
- import settings._
- new MsilClassPath(assemextdirs.value, assemrefs.value, sourcepath.value, context)
- }
-
- class MsilContext extends ClassPathContext[MsilFile] {
- def toBinaryName(rep: MsilFile) = rep.msilType.Name
- def newClassPath(assemFile: AbstractFile) = new AssemblyClassPath(MsilClassPath collectTypes assemFile, "", this)
- }
-
- private def assembleEntries(ext: String, user: String, source: String, context: MsilContext): List[ClassPath[MsilFile]] = {
- import ClassPath._
- val etr = new mutable.ListBuffer[ClassPath[MsilFile]]
- val names = new mutable.HashSet[String]
-
- // 1. Assemblies from -Xassem-extdirs
- for (dirName <- expandPath(ext, expandStar = false)) {
- val dir = AbstractFile.getDirectory(dirName)
- if (dir ne null) {
- for (file <- dir) {
- val name = file.name.toLowerCase
- if (name.endsWith(".dll") || name.endsWith(".exe")) {
- names += name
- etr += context.newClassPath(file)
- }
- }
- }
- }
-
- // 2. Assemblies from -Xassem-path
- for (fileName <- expandPath(user, expandStar = false)) {
- val file = AbstractFile.getFile(fileName)
- if (file ne null) {
- val name = file.name.toLowerCase
- if (name.endsWith(".dll") || name.endsWith(".exe")) {
- names += name
- etr += context.newClassPath(file)
- }
- }
- }
-
- def check(n: String) {
- if (!names.contains(n))
- throw new AssertionError("Cannot find assembly "+ n +
- ". Use -Xassem-extdirs or -Xassem-path to specify its location")
- }
- check("mscorlib.dll")
- check("scalaruntime.dll")
-
- // 3. Source path
- for (dirName <- expandPath(source, expandStar = false)) {
- val file = AbstractFile.getDirectory(dirName)
- if (file ne null) etr += new SourcePath[MsilFile](file, context)
- }
-
- etr.toList
- }
-}
-import MsilClassPath._
-
-/**
- * A assembly file (dll / exe) containing classes and namespaces
- */
-class AssemblyClassPath(types: Array[MSILType], namespace: String, val context: MsilContext) extends ClassPath[MsilFile] {
- def name = {
- val i = namespace.lastIndexOf('.')
- if (i < 0) namespace
- else namespace drop (i + 1)
- }
- def asURLs = List(new java.net.URL(name))
- def asClasspathString = sys.error("Unknown") // I don't know what if anything makes sense here?
-
- private lazy val first: Int = {
- var m = 0
- var n = types.length - 1
- while (m < n) {
- val l = (m + n) / 2
- val res = types(l).FullName.compareTo(namespace)
- if (res < 0) m = l + 1
- else n = l
- }
- if (types(m).FullName.startsWith(namespace)) m else types.length
- }
-
- lazy val classes = {
- val cls = new mutable.ListBuffer[ClassRep]
- var i = first
- while (i < types.length && types(i).Namespace.startsWith(namespace)) {
- // CLRTypes used to exclude java.lang.Object and java.lang.String (no idea why..)
- if (types(i).Namespace == namespace)
- cls += ClassRep(Some(new MsilFile(types(i))), None)
- i += 1
- }
- cls.toIndexedSeq
- }
-
- lazy val packages = {
- val nsSet = new mutable.HashSet[String]
- var i = first
- while (i < types.length && types(i).Namespace.startsWith(namespace)) {
- val subns = types(i).Namespace
- if (subns.length > namespace.length) {
- // example: namespace = "System", subns = "System.Reflection.Emit"
- // => find second "." and "System.Reflection" to nsSet.
- val end = subns.indexOf('.', namespace.length + 1)
- nsSet += (if (end < 0) subns
- else subns.substring(0, end))
- }
- i += 1
- }
- val xs = for (ns <- nsSet.toList)
- yield new AssemblyClassPath(types, ns, context)
-
- xs.toIndexedSeq
- }
-
- val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
-
- override def toString() = "assembly classpath "+ namespace
-}
-
-/**
- * The classpath when compiling with target:msil. Binary files are represented as
- * MSILType values.
- */
-class MsilClassPath(ext: String, user: String, source: String, context: MsilContext)
-extends MergedClassPath[MsilFile](MsilClassPath.assembleEntries(ext, user, source, context), context) { } \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/util/MultiHashMap.scala b/src/compiler/scala/tools/nsc/util/MultiHashMap.scala
deleted file mode 100644
index 67987c6e52..0000000000
--- a/src/compiler/scala/tools/nsc/util/MultiHashMap.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-package scala.tools.nsc.util
-
-import scala.collection.{ mutable, immutable }
-
-/** A hashmap with set-valued values, and an empty set as default value
- */
-class MultiHashMap[K, V] extends mutable.HashMap[K, immutable.Set[V]] {
- override def default(key: K): immutable.Set[V] = Set()
-}
diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
deleted file mode 100644
index 1f6fa68f57..0000000000
--- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
+++ /dev/null
@@ -1,168 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package util
-
-import java.lang.{ ClassLoader => JClassLoader }
-import java.lang.reflect.{ Constructor, Modifier, Method }
-import java.io.{ File => JFile }
-import java.net.{ URLClassLoader => JURLClassLoader }
-import java.net.URL
-import scala.reflect.runtime.ReflectionUtils.unwrapHandler
-import ScalaClassLoader._
-import scala.util.control.Exception.{ catching }
-import scala.language.implicitConversions
-import scala.reflect.{ ClassTag, classTag }
-
-trait HasClassPath {
- def classPathURLs: Seq[URL]
-}
-
-/** A wrapper around java.lang.ClassLoader to lower the annoyance
- * of java reflection.
- */
-trait ScalaClassLoader extends JClassLoader {
- /** Executing an action with this classloader as context classloader */
- def asContext[T](action: => T): T = {
- val saved = contextLoader
- try { setContext(this) ; action }
- finally setContext(saved)
- }
- def setAsContext() { setContext(this) }
-
- /** Load and link a class with this classloader */
- def tryToLoadClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, false)
- /** Load, link and initialize a class with this classloader */
- def tryToInitializeClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, true)
-
- private def tryClass[T <: AnyRef](path: String, initialize: Boolean): Option[Class[T]] =
- catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt
- Class.forName(path, initialize, this).asInstanceOf[Class[T]]
-
- /** Create an instance of a class with this classloader */
- def create(path: String): AnyRef =
- tryToInitializeClass[AnyRef](path) map (_.newInstance()) orNull
-
- def constructorsOf[T <: AnyRef : ClassTag]: List[Constructor[T]] =
- classTag[T].runtimeClass.getConstructors.toList map (_.asInstanceOf[Constructor[T]])
-
- /** The actual bytes for a class file, or an empty array if it can't be found. */
- def classBytes(className: String): Array[Byte] = classAsStream(className) match {
- case null => Array()
- case stream => io.Streamable.bytes(stream)
- }
-
- /** An InputStream representing the given class name, or null if not found. */
- def classAsStream(className: String) =
- getResourceAsStream(className.replaceAll("""\.""", "/") + ".class")
-
- /** Run the main method of a class to be loaded by this classloader */
- def run(objectName: String, arguments: Seq[String]) {
- val clsToRun = tryToInitializeClass(objectName) getOrElse (
- throw new ClassNotFoundException(objectName)
- )
- val method = clsToRun.getMethod("main", classOf[Array[String]])
- if (!Modifier.isStatic(method.getModifiers))
- throw new NoSuchMethodException(objectName + ".main is not static")
-
- try asContext(method.invoke(null, Array(arguments.toArray: AnyRef): _*)) // !!! : AnyRef shouldn't be necessary
- catch unwrapHandler({ case ex => throw ex })
- }
-
- /** A list comprised of this classloader followed by all its
- * (non-null) parent classloaders, if any.
- */
- def loaderChain: List[ScalaClassLoader] = this :: (getParent match {
- case null => Nil
- case p => p.loaderChain
- })
-}
-
-/** Methods for obtaining various classloaders.
- * appLoader: the application classloader. (Also called the java system classloader.)
- * extLoader: the extension classloader.
- * bootLoader: the boot classloader.
- * contextLoader: the context classloader.
- */
-object ScalaClassLoader {
- /** Returns loaders which are already ScalaClassLoaders unaltered,
- * and translates java.net.URLClassLoaders into scala URLClassLoaders.
- * Otherwise creates a new wrapper.
- */
- implicit def apply(cl: JClassLoader): ScalaClassLoader = cl match {
- case cl: ScalaClassLoader => cl
- case cl: JURLClassLoader => new URLClassLoader(cl.getURLs.toSeq, cl.getParent)
- case _ => new JClassLoader(cl) with ScalaClassLoader
- }
- def contextLoader = apply(Thread.currentThread.getContextClassLoader)
- def appLoader = apply(JClassLoader.getSystemClassLoader)
- def extLoader = apply(appLoader.getParent)
- def bootLoader = apply(null)
- def contextChain = loaderChain(contextLoader)
-
- def pathToErasure[T: ClassTag] = pathToClass(classTag[T].runtimeClass)
- def pathToClass(clazz: Class[_]) = clazz.getName.replace('.', JFile.separatorChar) + ".class"
- def locate[T: ClassTag] = contextLoader getResource pathToErasure[T]
-
- /** Tries to guess the classpath by type matching the context classloader
- * and its parents, looking for any classloaders which will reveal their
- * classpath elements as urls. It it can't find any, creates a classpath
- * from the supplied string.
- */
- def guessClassPathString(default: String = ""): String = {
- val classpathURLs = contextChain flatMap {
- case x: HasClassPath => x.classPathURLs
- case x: JURLClassLoader => x.getURLs.toSeq
- case _ => Nil
- }
- if (classpathURLs.isEmpty) default
- else JavaClassPath.fromURLs(classpathURLs).asClasspathString
- }
-
- def loaderChain(head: JClassLoader) = {
- def loop(cl: JClassLoader): List[JClassLoader] =
- if (cl == null) Nil else cl :: loop(cl.getParent)
-
- loop(head)
- }
- def setContext(cl: JClassLoader) =
- Thread.currentThread.setContextClassLoader(cl)
- def savingContextLoader[T](body: => T): T = {
- val saved = contextLoader
- try body
- finally setContext(saved)
- }
-
- class URLClassLoader(urls: Seq[URL], parent: JClassLoader)
- extends JURLClassLoader(urls.toArray, parent)
- with ScalaClassLoader
- with HasClassPath {
-
- private var classloaderURLs: Seq[URL] = urls
- private def classpathString = ClassPath.fromURLs(urls: _*)
- def classPathURLs: Seq[URL] = classloaderURLs
- def classPath: ClassPath[_] = JavaClassPath fromURLs classPathURLs
-
- /** Override to widen to public */
- override def addURL(url: URL) = {
- classloaderURLs :+= url
- super.addURL(url)
- }
- def toLongString = urls.mkString("URLClassLoader(\n ", "\n ", "\n)\n")
- }
-
- def fromURLs(urls: Seq[URL], parent: ClassLoader = null): URLClassLoader =
- new URLClassLoader(urls, parent)
-
- /** True if supplied class exists in supplied path */
- def classExists(urls: Seq[URL], name: String): Boolean =
- fromURLs(urls) tryToLoadClass name isDefined
-
- /** Finding what jar a clazz or instance came from */
- def origin(x: Any): Option[URL] = originOfClass(x.getClass)
- def originOfClass(x: Class[_]): Option[URL] =
- Option(x.getProtectionDomain.getCodeSource) flatMap (x => Option(x.getLocation))
-}
diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
index 2b87280c24..b804bfb842 100644
--- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala
+++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
@@ -3,17 +3,17 @@
* @author Martin Odersky
*/
-package scala.tools
+package scala
+package tools
package nsc
package util
-import java.io.{File, FileInputStream, PrintStream}
+import java.io.PrintStream
import java.lang.Long.toHexString
import java.lang.Float.intBitsToFloat
import java.lang.Double.longBitsToDouble
import scala.reflect.internal.{Flags, Names}
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
-import interpreter.ByteCode.scalaSigBytesForPath
object ShowPickled extends Names {
import PickleFormat._
@@ -94,7 +94,6 @@ object ShowPickled extends Names {
case ANNOTATEDtpe => "ANNOTATEDtpe"
case ANNOTINFO => "ANNOTINFO"
case ANNOTARGARRAY => "ANNOTARGARRAY"
- // case DEBRUIJNINDEXtpe => "DEBRUIJNINDEXtpe"
case EXISTENTIALtpe => "EXISTENTIALtpe"
case TREE => "TREE"
case MODIFIERS => "MODIFIERS"
@@ -109,7 +108,7 @@ object ShowPickled extends Names {
var result = 0L
var b = 0L
do {
- b = data(idx)
+ b = data(idx).toLong
idx += 1
result = (result << 7) + (b & 0x7f)
} while((b & 0x80) != 0L)
@@ -165,7 +164,7 @@ object ShowPickled extends Names {
out.print(" %s[%s]".format(toHexString(pflags), flagString))
}
- /** Might be info or privateWithin */
+ /* Might be info or privateWithin */
val x = buf.readNat()
if (buf.readIndex == end) {
printFlags(None)
@@ -177,9 +176,9 @@ object ShowPickled extends Names {
}
}
- /** Note: the entries which require some semantic analysis to be correctly
- * interpreted are for the most part going to tell you the wrong thing.
- * It's not so easy to duplicate the logic applied in the UnPickler.
+ /* Note: the entries which require some semantic analysis to be correctly
+ * interpreted are for the most part going to tell you the wrong thing.
+ * It's not so easy to duplicate the logic applied in the UnPickler.
*/
def printEntry(i: Int) {
buf.readIndex = index(i)
@@ -251,7 +250,7 @@ object ShowPickled extends Names {
case SYMANNOT =>
printSymbolRef(); printTypeRef(); buf.until(end, printAnnotArgRef)
case ANNOTATEDtpe =>
- printTypeRef(); buf.until(end, printAnnotInfoRef);
+ printTypeRef(); buf.until(end, printAnnotInfoRef)
case ANNOTINFO =>
printTypeRef(); buf.until(end, printAnnotArgRef)
case ANNOTARGARRAY =>
@@ -272,8 +271,7 @@ object ShowPickled extends Names {
for (i <- 0 until index.length) printEntry(i)
}
- def fromFile(path: String) = fromBytes(io.File(path).toByteArray)
- def fromName(name: String) = fromBytes(scalaSigBytesForPath(name) getOrElse Array())
+ def fromFile(path: String) = fromBytes(io.File(path).toByteArray())
def fromBytes(data: => Array[Byte]): Option[PickleBuffer] =
try Some(new PickleBuffer(data, 0, data.length))
catch { case _: Exception => None }
@@ -288,7 +286,7 @@ object ShowPickled extends Names {
def main(args: Array[String]) {
args foreach { arg =>
- (fromFile(arg) orElse fromName(arg)) match {
+ fromFile(arg) match {
case Some(pb) => show(arg + ":", pb)
case _ => Console.println("Cannot read " + arg)
}
diff --git a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
index 2601798b96..4e1cf02a6e 100644
--- a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
+++ b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
@@ -6,7 +6,7 @@ package util
import java.io.PrintStream
/** A simple tracer
- * @param out: The print stream where trace info shoul be sent
+ * @param out: The print stream where trace info should be sent
* @param enabled: A condition that must be true for trace info to be produced.
*/
class SimpleTracer(out: PrintStream, enabled: Boolean = true) {
@@ -14,6 +14,5 @@ class SimpleTracer(out: PrintStream, enabled: Boolean = true) {
if (enabled) out.println(msg+value)
value
}
- def withOutput(out: PrintStream) = new SimpleTracer(out, enabled)
def when(enabled: Boolean): SimpleTracer = new SimpleTracer(out, enabled)
}
diff --git a/src/compiler/scala/tools/nsc/util/StackTracing.scala b/src/compiler/scala/tools/nsc/util/StackTracing.scala
new file mode 100644
index 0000000000..fa4fe29f28
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/StackTracing.scala
@@ -0,0 +1,76 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ */
+
+package scala.tools.nsc.util
+
+private[util] trait StackTracing extends Any {
+
+ /** Format a stack trace, returning the prefix consisting of frames that satisfy
+ * a given predicate.
+ * The format is similar to the typical case described in the JavaDoc
+ * for [[java.lang.Throwable#printStackTrace]].
+ * If a stack trace is truncated, it will be followed by a line of the form
+ * `... 3 elided`, by analogy to the lines `... 3 more` which indicate
+ * shared stack trace segments.
+ * @param e the exception
+ * @param p the predicate to select the prefix
+ */
+ def stackTracePrefixString(e: Throwable)(p: StackTraceElement => Boolean): String = {
+ import collection.mutable.{ ArrayBuffer, ListBuffer }
+ import compat.Platform.EOL
+ import util.Properties.isJavaAtLeast
+
+ val sb = ListBuffer.empty[String]
+
+ type TraceRelation = String
+ val Self = new TraceRelation("")
+ val CausedBy = new TraceRelation("Caused by: ")
+ val Suppressed = new TraceRelation("Suppressed: ")
+
+ val suppressable = isJavaAtLeast("1.7")
+
+ def clazz(e: Throwable) = e.getClass.getName
+ def because(e: Throwable): String = e.getCause match { case null => null ; case c => header(c) }
+ def msg(e: Throwable): String = e.getMessage match { case null => because(e) ; case s => s }
+ def txt(e: Throwable): String = msg(e) match { case null => "" ; case s => s": $s" }
+ def header(e: Throwable): String = s"${clazz(e)}${txt(e)}"
+
+ val indent = "\u0020\u0020"
+
+ val seen = new ArrayBuffer[Throwable](16)
+ def unseen(t: Throwable) = {
+ def inSeen = seen exists (_ eq t)
+ val interesting = (t != null) && !inSeen
+ if (interesting) seen += t
+ interesting
+ }
+
+ def print(e: Throwable, r: TraceRelation, share: Array[StackTraceElement], indents: Int): Unit = if (unseen(e)) {
+ val trace = e.getStackTrace
+ val frames = (
+ if (share.nonEmpty) {
+ val spare = share.reverseIterator
+ val trimmed = trace.reverse dropWhile (spare.hasNext && spare.next == _)
+ trimmed.reverse
+ } else trace
+ )
+ val prefix = frames takeWhile p
+ val margin = indent * indents
+ val indented = margin + indent
+ sb append s"${margin}${r}${header(e)}"
+ prefix foreach (f => sb append s"${indented}at $f")
+ if (frames.size < trace.size) sb append s"$indented... ${trace.size - frames.size} more"
+ if (r == Self && prefix.size < frames.size) sb append s"$indented... ${frames.size - prefix.size} elided"
+ print(e.getCause, CausedBy, trace, indents)
+ if (suppressable) {
+ import scala.language.reflectiveCalls
+ type Suppressing = { def getSuppressed(): Array[Throwable] }
+ for (s <- e.asInstanceOf[Suppressing].getSuppressed) print(s, Suppressed, frames, indents + 1)
+ }
+ }
+ print(e, Self, share = Array.empty, indents = 0)
+
+ sb mkString EOL
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/util/TreeSet.scala b/src/compiler/scala/tools/nsc/util/TreeSet.scala
deleted file mode 100644
index d2e9238e8f..0000000000
--- a/src/compiler/scala/tools/nsc/util/TreeSet.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package util
-
-/** Sets implemented as binary trees.
- *
- * @author Martin Odersky
- * @version 1.0
- */
-class TreeSet[T >: Null <: AnyRef](less: (T, T) => Boolean) extends Set[T] {
-
- private class Tree(val elem: T) {
- var l: Tree = null
- var r: Tree = null
- }
-
- private var tree: Tree = null
-
- def findEntry(x: T): T = {
- def find(t: Tree): T = {
- if (t eq null) null
- else if (less(x, t.elem)) find(t.l)
- else if (less(t.elem, x)) find(t.r)
- else t.elem
- }
- find(tree)
- }
-
- def addEntry(x: T) {
- def add(t: Tree): Tree = {
- if (t eq null) new Tree(x)
- else if (less(x, t.elem)) { t.l = add(t.l); t }
- else if (less(t.elem, x)) { t.r = add(t.r); t }
- else t
- }
- tree = add(tree)
- }
-
- def iterator = toList.iterator
-
- override def foreach[U](f: T => U) {
- def loop(t: Tree) {
- if (t ne null) {
- loop(t.l)
- f(t.elem)
- loop(t.r)
- }
- }
- loop(tree)
- }
- override def toList = {
- val xs = scala.collection.mutable.ListBuffer[T]()
- foreach(xs += _)
- xs.toList
- }
-
- override def toString(): String = {
- if (tree eq null) "<empty>" else "(..." + tree.elem + "...)"
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
index b1f4696d3e..4f7a9ff878 100644
--- a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
+++ b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
@@ -7,9 +7,9 @@ class WorkScheduler {
type Action = () => Unit
- private var todo = new mutable.Queue[Action]
- private var throwables = new mutable.Queue[Throwable]
- private var interruptReqs = new mutable.Queue[InterruptReq]
+ private val todo = new mutable.Queue[Action]
+ private val throwables = new mutable.Queue[Throwable]
+ private val interruptReqs = new mutable.Queue[InterruptReq]
/** Called from server: block until one of todo list, throwables or interruptReqs is nonempty */
def waitForMoreWork() = synchronized {
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index d34d4ee092..4237f36ade 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -3,14 +3,13 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter, StringWriter }
package object util {
-
- implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
-
// forwarder for old code that builds against 2.9 and 2.10
val Chars = scala.reflect.internal.Chars
@@ -18,16 +17,9 @@ package object util {
type HashSet[T >: Null <: AnyRef] = scala.reflect.internal.util.HashSet[T]
val HashSet = scala.reflect.internal.util.HashSet
- def onull[T](value: T, orElse: => T): T = if (value == null) orElse else value
-
/** Apply a function and return the passed value */
def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
- /** Frequency counter */
- def freq[T](xs: Traversable[T]): Map[T, Int] = xs groupBy identity mapValues (_.size)
-
- def freqrank[T](xs: Traversable[(T, Int)]): List[(Int, T)] = xs.toList map (_.swap) sortBy (-_._1)
-
/** Execute code and then wait for all non-daemon Threads
* created and begun during its execution to complete.
*/
@@ -54,18 +46,6 @@ package object util {
(result, ts2 filterNot (ts1 contains _))
}
- /** Given a function and a block of code, evaluates code block,
- * calls function with milliseconds elapsed, and returns block result.
- */
- def millisElapsedTo[T](f: Long => Unit)(body: => T): T = {
- val start = System.currentTimeMillis
- val result = body
- val end = System.currentTimeMillis
-
- f(end - start)
- result
- }
-
/** Generate a string using a routine that wants to write on a stream. */
def stringFromWriter(writer: PrintWriter => Unit): String = {
val stringWriter = new StringWriter()
@@ -83,60 +63,58 @@ package object util {
}
def stackTraceString(ex: Throwable): String = stringFromWriter(ex printStackTrace _)
- lazy val trace = new SimpleTracer(System.out)
- lazy val errtrace = new SimpleTracer(System.err)
-
- @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
- val StringOps = scala.reflect.internal.util.StringOps
-
- @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
- type StringOps = scala.reflect.internal.util.StringOps
-
- @deprecated("Moved to scala.reflect.internal.util.TableDef", "2.10.0")
- val TableDef = scala.reflect.internal.util.TableDef
+ /** A one line string which contains the class of the exception, the
+ * message if any, and the first non-Predef location in the stack trace
+ * (to exclude assert, require, etc.)
+ */
+ def stackTraceHeadString(ex: Throwable): String = {
+ val frame = ex.getStackTrace.dropWhile(_.getClassName contains "Predef") take 1 mkString ""
+ val msg = ex.getMessage match { case null | "" => "" ; case s => s"""("$s")""" }
+ val clazz = ex.getClass.getName.split('.').last
- @deprecated("Moved to scala.reflect.internal.util.TableDef", "2.10.0")
- type TableDef[T] = scala.reflect.internal.util.TableDef[T]
+ s"$clazz$msg @ $frame"
+ }
- @deprecated("scala.reflect.internal.util.WeakHashSet", "2.10.0")
- type WeakHashSet[T <: AnyRef] = scala.reflect.internal.util.WeakHashSet[T]
+ implicit class StackTraceOps(private val e: Throwable) extends AnyVal with StackTracing {
+ /** Format the stack trace, returning the prefix consisting of frames that satisfy
+ * a given predicate.
+ * The format is similar to the typical case described in the JavaDoc
+ * for [[java.lang.Throwable#printStackTrace]].
+ * If a stack trace is truncated, it will be followed by a line of the form
+ * `... 3 elided`, by analogy to the lines `... 3 more` which indicate
+ * shared stack trace segments.
+ * @param p the predicate to select the prefix
+ */
+ def stackTracePrefixString(p: StackTraceElement => Boolean): String = stackTracePrefixString(e)(p)
+ }
- @deprecated("Moved to scala.reflect.internal.util.Position", "2.10.0")
- val Position = scala.reflect.internal.util.Position
+ lazy val trace = new SimpleTracer(System.out)
+ // These four deprecated since 2.10.0 are still used in (at least)
+ // the sbt 0.12.4 compiler interface.
@deprecated("Moved to scala.reflect.internal.util.Position", "2.10.0")
type Position = scala.reflect.internal.util.Position
-
@deprecated("Moved to scala.reflect.internal.util.NoPosition", "2.10.0")
val NoPosition = scala.reflect.internal.util.NoPosition
-
@deprecated("Moved to scala.reflect.internal.util.FakePos", "2.10.0")
val FakePos = scala.reflect.internal.util.FakePos
-
@deprecated("Moved to scala.reflect.internal.util.FakePos", "2.10.0")
type FakePos = scala.reflect.internal.util.FakePos
- @deprecated("Moved to scala.reflect.internal.util.OffsetPosition", "2.10.0")
- type OffsetPosition = scala.reflect.internal.util.OffsetPosition
-
+ // These three were still used in scala-refactoring.
@deprecated("Moved to scala.reflect.internal.util.RangePosition", "2.10.0")
type RangePosition = scala.reflect.internal.util.RangePosition
-
@deprecated("Moved to scala.reflect.internal.util.SourceFile", "2.10.0")
type SourceFile = scala.reflect.internal.util.SourceFile
+ @deprecated("Moved to scala.reflect.internal.util.BatchSourceFile", "2.10.0")
+ type BatchSourceFile = scala.reflect.internal.util.BatchSourceFile
- @deprecated("Moved to scala.reflect.internal.util.NoSourceFile", "2.10.0")
- val NoSourceFile = scala.reflect.internal.util.NoSourceFile
-
- @deprecated("Moved to scala.reflect.internal.util.NoFile", "2.10.0")
- val NoFile = scala.reflect.internal.util.NoFile
-
- @deprecated("Moved to scala.reflect.internal.util.ScriptSourceFile", "2.10.0")
- val ScriptSourceFile = scala.reflect.internal.util.ScriptSourceFile
+ @deprecated("Moved to scala.reflect.internal.util.AbstractFileClassLoader", "2.11.0")
+ type AbstractFileClassLoader = scala.reflect.internal.util.AbstractFileClassLoader
- @deprecated("Moved to scala.reflect.internal.util.ScriptSourceFile", "2.10.0")
- type ScriptSourceFile = scala.reflect.internal.util.ScriptSourceFile
+ @deprecated("Moved to scala.reflect.internal.util.ScalaClassLoader", "2.11.0")
+ val ScalaClassLoader = scala.reflect.internal.util.ScalaClassLoader
- @deprecated("Moved to scala.reflect.internal.util.BatchSourceFile", "2.10.0")
- type BatchSourceFile = scala.reflect.internal.util.BatchSourceFile
+ @deprecated("Moved to scala.reflect.internal.util.ScalaClassLoader", "2.11.0")
+ type ScalaClassLoader = scala.reflect.internal.util.ScalaClassLoader
}
diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala
index d35ac43424..bb0bbd79a3 100644
--- a/src/compiler/scala/tools/reflect/FastTrack.scala
+++ b/src/compiler/scala/tools/reflect/FastTrack.scala
@@ -2,7 +2,10 @@ package scala.tools
package reflect
import scala.reflect.reify.Taggers
-import scala.tools.nsc.typechecker.{Analyzer, Macros}
+import scala.tools.nsc.typechecker.{ Analyzer, Macros }
+import scala.reflect.runtime.Macros.currentMirror
+import scala.reflect.api.Universe
+import scala.tools.reflect.quasiquotes.{ Quasiquotes => QuasiquoteImpls }
/** Optimizes system macro expansions by hardwiring them directly to their implementations
* bypassing standard reflective load and invoke to avoid the overhead of Java/Scala reflection.
@@ -12,30 +15,43 @@ trait FastTrack {
import global._
import definitions._
-
import scala.language.implicitConversions
- private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } = new { val c: c0.type = c0 } with Taggers
- private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } = new { val c: c0.type = c0 } with MacroImplementations
+ import treeInfo.Applied
+
+ private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } =
+ new { val c: c0.type = c0 } with Taggers
+ private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } =
+ new { val c: c0.type = c0 } with MacroImplementations
+ private implicit def context2quasiquote(c0: MacroContext): QuasiquoteImpls { val c: c0.type } =
+ new { val c: c0.type = c0 } with QuasiquoteImpls
+ private def makeBlackbox(sym: Symbol)(pf: PartialFunction[Applied, MacroContext => Tree]) =
+ sym -> new FastTrackEntry(pf, isBlackbox = true)
+ private def makeWhitebox(sym: Symbol)(pf: PartialFunction[Applied, MacroContext => Tree]) =
+ sym -> new FastTrackEntry(pf, isBlackbox = false)
- implicit def fastTrackEntry2MacroRuntime(entry: FastTrackEntry): MacroRuntime = args => entry.run(args.c)
- type FastTrackExpander = PartialFunction[(MacroContext, Tree), Tree]
- case class FastTrackEntry(sym: Symbol, expander: FastTrackExpander) {
- def validate(c: MacroContext): Boolean = expander.isDefinedAt((c, c.expandee))
- def run(c: MacroContext): Any = {
- val result = expander((c, c.expandee))
- c.Expr[Nothing](result)(c.WeakTypeTag.Nothing)
+ final class FastTrackEntry(pf: PartialFunction[Applied, MacroContext => Tree], val isBlackbox: Boolean) extends (MacroArgs => Any) {
+ def validate(tree: Tree) = pf isDefinedAt Applied(tree)
+ def apply(margs: MacroArgs): margs.c.Expr[Nothing] = {
+ val MacroArgs(c, _) = margs
+ // Macros validated that the pf is defined here - and there's not much we could do if it weren't.
+ c.Expr[Nothing](pf(Applied(c.expandee))(c))(c.WeakTypeTag.Nothing)
}
}
- lazy val fastTrack: Map[Symbol, FastTrackEntry] = {
- var registry = Map[Symbol, FastTrackEntry]()
- implicit class BindTo(sym: Symbol) { def bindTo(expander: FastTrackExpander): Unit = if (sym != NoSymbol) registry += sym -> FastTrackEntry(sym, expander) }
- materializeClassTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List())) => c.materializeClassTag(tt.tpe) }
- materializeWeakTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = false) }
- materializeTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = true) }
- ApiUniverseReify bindTo { case (c, Apply(TypeApply(_, List(tt)), List(expr))) => c.materializeExpr(c.prefix.tree, EmptyTree, expr) }
- ReflectRuntimeCurrentMirror bindTo { case (c, _) => scala.reflect.runtime.Macros.currentMirror(c).tree }
- StringContext_f bindTo { case (c, app@Apply(Select(Apply(_, parts), _), args)) => c.macro_StringInterpolation_f(parts, args, app.pos) }
- registry
+ /** A map from a set of pre-established macro symbols to their implementations. */
+ def fastTrack: Map[Symbol, FastTrackEntry] = fastTrackCache()
+ private val fastTrackCache = perRunCaches.newGeneric[Map[Symbol, FastTrackEntry]] {
+ val runDefinitions = currentRun.runDefinitions
+ import runDefinitions._
+ Map[Symbol, FastTrackEntry](
+ makeBlackbox( materializeClassTag) { case Applied(_, ttag :: Nil, _) => _.materializeClassTag(ttag.tpe) },
+ makeBlackbox( materializeWeakTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = false) },
+ makeBlackbox( materializeTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = true) },
+ makeBlackbox( ApiUniverseReify) { case Applied(_, ttag :: Nil, (expr :: _) :: _) => c => c.materializeExpr(c.prefix.tree, EmptyTree, expr) },
+ makeBlackbox( StringContext_f) { case Applied(Select(Apply(_, ps), _), _, args) => c => c.macro_StringInterpolation_f(ps, args.flatten, c.expandee.pos) },
+ makeBlackbox(ReflectRuntimeCurrentMirror) { case _ => c => currentMirror(c).tree },
+ makeWhitebox( QuasiquoteClass_api_apply) { case _ => _.expandQuasiquote },
+ makeWhitebox(QuasiquoteClass_api_unapply) { case _ => _.expandQuasiquote }
+ )
}
}
diff --git a/src/compiler/scala/tools/reflect/FrontEnd.scala b/src/compiler/scala/tools/reflect/FrontEnd.scala
index f0d3d5973d..e3341a451f 100644
--- a/src/compiler/scala/tools/reflect/FrontEnd.scala
+++ b/src/compiler/scala/tools/reflect/FrontEnd.scala
@@ -21,7 +21,7 @@ trait FrontEnd {
def hasErrors = ERROR.count > 0
def hasWarnings = WARNING.count > 0
- case class Info(val pos: Position, val msg: String, val severity: Severity)
+ case class Info(pos: Position, msg: String, severity: Severity)
val infos = new scala.collection.mutable.LinkedHashSet[Info]
/** Handles incoming info */
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
index f4f385f8b3..a9ed419b1e 100644
--- a/src/compiler/scala/tools/reflect/MacroImplementations.scala
+++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala
@@ -1,10 +1,9 @@
package scala.tools.reflect
-import scala.reflect.macros.{ReificationException, UnexpectedReificationException}
-import scala.reflect.macros.runtime.Context
+import scala.reflect.macros.contexts.Context
import scala.collection.mutable.ListBuffer
import scala.collection.mutable.Stack
-import scala.reflect.internal.util.OffsetPosition
+import scala.reflect.internal.util.Position
abstract class MacroImplementations {
val c: Context
@@ -35,7 +34,7 @@ abstract class MacroImplementations {
val argStack = Stack(args : _*)
def defval(value: Tree, tpe: Type): Unit = {
- val freshName = newTermName(c.fresh("arg$"))
+ val freshName = newTermName(c.freshName("arg$"))
evals += ValDef(Modifiers(), freshName, TypeTree(tpe) setPos value.pos.focus, value) setPos value.pos
ids += Ident(freshName)
}
@@ -92,10 +91,11 @@ abstract class MacroImplementations {
def isConversion(idx: Int) = isPercent(idx) && !charAtIndexIs(idx + 1, 'n') && !charAtIndexIs(idx + 1, '%')
var idx = 0
- def errorAtIndex(idx: Int, msg: String) = c.error(new OffsetPosition(strTree.pos.source, strTree.pos.point + idx), msg)
+ def errorAtIndex(idx: Int, msg: String) = c.error(Position.offset(strTree.pos.source, strTree.pos.point + idx), msg)
def wrongConversionString(idx: Int) = errorAtIndex(idx, "wrong conversion string")
def illegalConversionCharacter(idx: Int) = errorAtIndex(idx, "illegal conversion character")
- def nonEscapedPercent(idx: Int) = errorAtIndex(idx, "percent signs not directly following splicees must be escaped")
+ def nonEscapedPercent(idx: Int) = errorAtIndex(idx,
+ "conversions must follow a splice; use %% for literal %, %n for newline")
// STEP 1: handle argument conversion
// 1) "...${smth}" => okay, equivalent to "...${smth}%s"
@@ -106,7 +106,7 @@ abstract class MacroImplementations {
// 6) "...${smth}[%legalJavaConversion]" => okay, according to http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html
// 7) "...${smth}[%illegalJavaConversion]" => error
if (!first) {
- val arg = argStack.pop
+ val arg = argStack.pop()
if (isConversion(0)) {
// PRE str is not empty and str(0) == '%'
// argument index parameter is not allowed, thus parse
@@ -163,9 +163,9 @@ abstract class MacroImplementations {
Literal(Constant(fstring)),
newTermName("format")),
List(ids: _* )
- );
+ )
Block(evals.toList, atPos(origApplyPos.focus)(expr)) setPos origApplyPos.makeTransparent
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala
index 116ae24cdd..3ae21b6b98 100644
--- a/src/compiler/scala/tools/reflect/ReflectMain.scala
+++ b/src/compiler/scala/tools/reflect/ReflectMain.scala
@@ -4,7 +4,6 @@ package reflect
import scala.tools.nsc.Driver
import scala.tools.nsc.Global
import scala.tools.nsc.Settings
-import scala.tools.nsc.util.ClassPath.DefaultJavaContext
import scala.tools.nsc.util.ScalaClassLoader
import scala.tools.util.PathResolver
@@ -16,4 +15,4 @@ object ReflectMain extends Driver {
}
override def newCompiler(): Global = new ReflectGlobal(settings, reporter, classloaderFromSettings(settings))
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/reflect/StdTags.scala b/src/compiler/scala/tools/reflect/StdTags.scala
index a3bc9b9bd1..5c53c81e8b 100644
--- a/src/compiler/scala/tools/reflect/StdTags.scala
+++ b/src/compiler/scala/tools/reflect/StdTags.scala
@@ -1,7 +1,6 @@
package scala.tools
package reflect
-import java.lang.{Class => jClass}
import scala.reflect.{ClassTag, classTag}
import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
@@ -24,7 +23,7 @@ trait StdTags {
}
})
- private def tagOfStaticClass[T: ClassTag]: u.TypeTag[T] =
+ protected def tagOfStaticClass[T: ClassTag]: u.TypeTag[T] =
u.TypeTag[T](
m,
new TypeCreator {
@@ -35,8 +34,6 @@ trait StdTags {
lazy val tagOfString = tagOfStaticClass[String]
lazy val tagOfFile = tagOfStaticClass[scala.tools.nsc.io.File]
lazy val tagOfDirectory = tagOfStaticClass[scala.tools.nsc.io.Directory]
- lazy val tagOfStdReplVals = tagOfStaticClass[scala.tools.nsc.interpreter.StdReplVals]
- lazy val tagOfIMain = tagOfStaticClass[scala.tools.nsc.interpreter.IMain]
lazy val tagOfThrowable = tagOfStaticClass[java.lang.Throwable]
lazy val tagOfClassLoader = tagOfStaticClass[java.lang.ClassLoader]
lazy val tagOfBigInt = tagOfStaticClass[BigInt]
@@ -52,7 +49,7 @@ object StdRuntimeTags extends StdTags {
}
abstract class StdContextTags extends StdTags {
- val tc: scala.reflect.macros.Context
+ val tc: scala.reflect.macros.contexts.Context
val u: tc.universe.type = tc.universe
val m = tc.mirror
}
diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala
index ab814b617d..236b868842 100644
--- a/src/compiler/scala/tools/reflect/ToolBox.scala
+++ b/src/compiler/scala/tools/reflect/ToolBox.scala
@@ -21,6 +21,12 @@ trait ToolBox[U <: scala.reflect.api.Universe] {
*/
def frontEnd: FrontEnd
+ /** @see `Typers.typecheck`
+ */
+ @deprecated("Use `tb.typecheck` instead", "2.11.0")
+ def typeCheck(tree: u.Tree, pt: u.Type = u.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree =
+ typecheck(tree, pt, silent, withImplicitViewsDisabled, withMacrosDisabled)
+
/** Typechecks a tree using this ToolBox.
* This populates symbols and types of the tree and possibly transforms it to reflect certain desugarings.
*
@@ -35,7 +41,7 @@ trait ToolBox[U <: scala.reflect.api.Universe] {
* `withImplicitViewsDisabled` recursively prohibits implicit views (though, implicit vals will still be looked up and filled in), default value is false
* `withMacrosDisabled` recursively prohibits macro expansions and macro-based implicits, default value is false
*/
- def typeCheck(tree: u.Tree, pt: u.Type = u.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree
+ def typecheck(tree: u.Tree, pt: u.Type = u.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree
/** Infers an implicit value of the expected type `pt` in top-level context.
* Optional `pos` parameter provides a position that will be associated with the implicit search.
@@ -47,7 +53,7 @@ trait ToolBox[U <: scala.reflect.api.Universe] {
* If `silent` is false, `TypeError` will be thrown in case of an inference error.
* If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs.
* Such errors don't vanish and can be inspected by turning on -Xlog-implicits.
- * Unlike in `typeCheck`, `silent` is true by default.
+ * Unlike in `typecheck`, `silent` is true by default.
*/
def inferImplicitValue(pt: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree
@@ -61,7 +67,7 @@ trait ToolBox[U <: scala.reflect.api.Universe] {
* If `silent` is false, `TypeError` will be thrown in case of an inference error.
* If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs.
* Such errors don't vanish and can be inspected by turning on -Xlog-implicits.
- * Unlike in `typeCheck`, `silent` is true by default.
+ * Unlike in `typecheck`, `silent` is true by default.
*/
def inferImplicitView(tree: u.Tree, from: u.Type, to: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree
@@ -101,4 +107,4 @@ trait ToolBox[U <: scala.reflect.api.Universe] {
/** Represents an error during toolboxing
*/
-case class ToolBoxError(val message: String, val cause: Throwable = null) extends Throwable(message, cause)
+case class ToolBoxError(message: String, cause: Throwable = null) extends Throwable(message, cause)
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 8803980dac..af13b7d0ba 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -1,14 +1,13 @@
-package scala.tools
+package scala
+package tools
package reflect
+import scala.tools.cmd.CommandLineParser
+import scala.tools.nsc.Global
import scala.tools.nsc.reporters._
import scala.tools.nsc.CompilerCommand
-import scala.tools.nsc.Global
-import scala.tools.nsc.typechecker.Modes
-import scala.tools.nsc.io.VirtualDirectory
-import scala.tools.nsc.interpreter.AbstractFileClassLoader
-import scala.tools.nsc.util.FreshNameCreator
-import scala.tools.nsc.ast.parser.Tokens.EOF
+import scala.tools.nsc.io.{AbstractFile, VirtualDirectory}
+import scala.tools.nsc.util.AbstractFileClassLoader
import scala.reflect.internal.Flags._
import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, NoFile}
import java.lang.{Class => jClass}
@@ -16,7 +15,7 @@ import scala.compat.Platform.EOL
import scala.reflect.NameTransformer
import scala.reflect.api.JavaUniverse
import scala.reflect.io.NoAbstractFile
-import scala.tools.nsc.interactive.RangePositions
+import scala.reflect.internal.FatalError
abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
@@ -32,8 +31,15 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, factorySelf.mirror.classLoader)
lazy val mirror: u.Mirror = u.runtimeMirror(classLoader)
- class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter: Reporter)
- extends ReflectGlobal(settings, reporter, toolBoxSelf.classLoader) {
+ lazy val arguments = CommandLineParser.tokenize(options)
+ lazy val virtualDirectory =
+ arguments.iterator.sliding(2).collectFirst{ case Seq("-d", dir) => dir } match {
+ case Some(outDir) => AbstractFile.getDirectory(outDir)
+ case None => new VirtualDirectory("(memory)", None)
+ }
+
+ class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter0: Reporter)
+ extends ReflectGlobal(settings, reporter0, toolBoxSelf.classLoader) {
import definitions._
private val trace = scala.tools.nsc.util.trace when settings.debug.value
@@ -50,7 +56,6 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
// should be called after every use of ToolBoxGlobal in order to prevent leaks
- // there's the `withCleanupCaches` method defined below, which provides a convenient interface for that
def cleanupCaches(): Unit = {
perRunCaches.clearAll()
undoLog.clear()
@@ -59,10 +64,6 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
lastSeenContext = null
}
- def withCleanupCaches[T](body: => T): T =
- try body
- finally cleanupCaches()
-
def verify(expr: Tree): Unit = {
// Previously toolboxes used to typecheck their inputs before compiling.
// Actually, the initial demo by Martin first typechecked the reified tree,
@@ -76,13 +77,14 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val typed = expr filter (t => t.tpe != null && t.tpe != NoType && !t.isInstanceOf[TypeTree])
if (!typed.isEmpty) throw ToolBoxError("reflective toolbox has failed: cannot operate on trees that are already typed")
- val freeTypes = expr.freeTypes
- if (freeTypes.length > 0) {
- var msg = "reflective toolbox has failed:" + EOL
- msg += "unresolved free type variables (namely: " + (freeTypes map (ft => "%s %s".format(ft.name, ft.origin)) mkString ", ") + "). "
- msg += "have you forgot to use TypeTag annotations for type parameters external to a reifee? "
- msg += "if you have troubles tracking free type variables, consider using -Xlog-free-types"
- throw ToolBoxError(msg)
+ if (expr.freeTypes.nonEmpty) {
+ val ft_s = expr.freeTypes map (ft => s" ${ft.name} ${ft.origin}") mkString "\n "
+ throw ToolBoxError(s"""
+ |reflective toolbox failed due to unresolved free type variables:
+ |$ft_s
+ |have you forgotten to use TypeTag annotations for type parameters external to a reifee?
+ |if you have troubles tracking free type variables, consider using -Xlog-free-types
+ """.stripMargin.trim)
}
}
@@ -103,9 +105,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
if (namesakes.length > 0) name += ("$" + (namesakes.length + 1))
freeTermNames += (ft -> newTermName(name + nme.REIFY_FREE_VALUE_SUFFIX))
})
- var expr = new Transformer {
+ val expr = new Transformer {
override def transform(tree: Tree): Tree =
- if (tree.hasSymbol && tree.symbol.isFreeTerm) {
+ if (tree.hasSymbolField && tree.symbol.isFreeTerm) {
tree match {
case Ident(_) =>
val freeTermRef = Ident(freeTermNames(tree.symbol.asFreeTerm))
@@ -124,7 +126,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
verify(expr0)
// need to wrap the expr, because otherwise you won't be able to typecheck macros against something that contains free vars
- var (expr, freeTerms) = extractFreeTerms(expr0, wrapFreeTermRefs = false)
+ val exprAndFreeTerms = extractFreeTerms(expr0, wrapFreeTermRefs = false)
+ var expr = exprAndFreeTerms._1
+ val freeTerms = exprAndFreeTerms._2
val dummies = freeTerms.map{ case (freeTerm, name) => ValDef(NoMods, name, TypeTree(freeTerm.info), Select(Ident(PredefModule), newTermName("$qmark$qmark$qmark"))) }.toList
expr = Block(dummies, wrapIntoTerm(expr))
@@ -133,9 +137,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
// it inaccessible then please put it somewhere designed for that
// rather than polluting the empty package with synthetics.
val ownerClass = rootMirror.EmptyPackageClass.newClassSymbol(newTypeName("<expression-owner>"))
- build.setTypeSignature(ownerClass, ClassInfoType(List(ObjectClass.tpe), newScope, ownerClass))
+ build.setTypeSignature(ownerClass, ClassInfoType(List(ObjectTpe), newScope, ownerClass))
val owner = ownerClass.newLocalDummy(expr.pos)
- var currentTyper = analyzer.newTyper(analyzer.rootContext(NoCompilationUnit, EmptyTree).make(expr, owner))
+ val currentTyper = analyzer.newTyper(analyzer.rootContext(NoCompilationUnit, EmptyTree).make(expr, owner))
val wrapper1 = if (!withImplicitViewsDisabled) (currentTyper.context.withImplicitsEnabled[Tree] _) else (currentTyper.context.withImplicitsDisabled[Tree] _)
val wrapper2 = if (!withMacrosDisabled) (currentTyper.context.withMacrosEnabled[Tree] _) else (currentTyper.context.withMacrosDisabled[Tree] _)
def wrapper (tree: => Tree) = wrapper1(wrapper2(tree))
@@ -148,30 +152,30 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val expr1 = wrapper(transform(currentTyper, expr))
var (dummies1, unwrapped) = expr1 match {
- case Block(dummies, unwrapped) => (dummies, unwrapped)
- case unwrapped => (Nil, unwrapped)
+ case Block(dummies, unwrapped) => ((dummies, unwrapped))
+ case unwrapped => ((Nil, unwrapped))
}
- var invertedIndex = freeTerms map (_.swap)
+ val invertedIndex = freeTerms map (_.swap)
// todo. also fixup singleton types
unwrapped = new Transformer {
override def transform(tree: Tree): Tree =
tree match {
- case Ident(name) if invertedIndex contains name =>
+ case Ident(name: TermName) if invertedIndex contains name =>
Ident(invertedIndex(name)) setType tree.tpe
case _ =>
super.transform(tree)
}
}.transform(unwrapped)
- new TreeTypeSubstituter(dummies1 map (_.symbol), dummies1 map (dummy => SingleType(NoPrefix, invertedIndex(dummy.symbol.name)))).traverse(unwrapped)
+ new TreeTypeSubstituter(dummies1 map (_.symbol), dummies1 map (dummy => SingleType(NoPrefix, invertedIndex(dummy.symbol.name.toTermName)))).traverse(unwrapped)
unwrapped = if (expr0.isTerm) unwrapped else unwrapFromTerm(unwrapped)
unwrapped
}
- def typeCheck(expr: Tree, pt: Type, silent: Boolean, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean): Tree =
+ def typecheck(expr: Tree, pt: Type, silent: Boolean, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean): Tree =
transformDuringTyper(expr, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)(
(currentTyper, expr) => {
trace("typing (implicit views = %s, macros = %s): ".format(!withImplicitViewsDisabled, !withMacrosDisabled))(showAttributed(expr, true, true, settings.Yshowsymkinds.value))
- currentTyper.silent(_.typed(expr, analyzer.EXPRmode, pt), reportAmbiguousErrors = false) match {
+ currentTyper.silent(_.typed(expr, pt), reportAmbiguousErrors = false) match {
case analyzer.SilentResultValue(result) =>
trace("success: ")(showAttributed(result, true, true, settings.Yshowsymkinds.value))
result
@@ -199,10 +203,10 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
def wrap(expr0: Tree): ModuleDef = {
val (expr, freeTerms) = extractFreeTerms(expr0, wrapFreeTermRefs = true)
- val (obj, mclazz) = rootMirror.EmptyPackageClass.newModuleAndClassSymbol(
+ val (obj, _) = rootMirror.EmptyPackageClass.newModuleAndClassSymbol(
nextWrapperModuleName())
- val minfo = ClassInfoType(List(ObjectClass.tpe), newScope, obj.moduleClass)
+ val minfo = ClassInfoType(List(ObjectTpe), newScope, obj.moduleClass)
obj.moduleClass setInfo minfo
obj setInfo obj.moduleClass.tpe
@@ -212,7 +216,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val (fv, name) = schema
meth.newValueParameter(name, newFlags = if (fv.hasStableFlag) STABLE else 0) setInfo appliedType(definitions.FunctionClass(0).tpe, List(fv.tpe.resultType))
}
- meth setInfo MethodType(freeTerms.map(makeParam).toList, AnyClass.tpe)
+ meth setInfo MethodType(freeTerms.map(makeParam).toList, AnyTpe)
minfo.decls enter meth
def defOwner(tree: Tree): Symbol = tree find (_.isDef) map (_.symbol) match {
case Some(sym) if sym != null && sym != NoSymbol => sym.owner
@@ -223,17 +227,16 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val moduledef = ModuleDef(
obj,
- Template(
- List(TypeTree(ObjectClass.tpe)),
- emptyValDef,
+ gen.mkTemplate(
+ List(TypeTree(ObjectTpe)),
+ noSelfType,
NoMods,
List(),
- List(List()),
List(methdef),
NoPosition))
trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymkinds.value))
- var cleanedUp = resetLocalAttrs(moduledef)
+ val cleanedUp = resetLocalAttrs(moduledef)
trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymkinds.value))
cleanedUp.asInstanceOf[ModuleDef]
}
@@ -249,7 +252,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
throwIfErrors()
val className = mdef.symbol.fullName
- if (settings.debug.value) println("generated: "+className)
+ if (settings.debug) println("generated: "+className)
def moduleFileName(className: String) = className + "$"
val jclazz = jClass.forName(moduleFileName(className), true, classLoader)
val jmeth = jclazz.getDeclaredMethods.find(_.getName == wrapperMethodName).get
@@ -276,19 +279,10 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
def parse(code: String): Tree = {
- val run = new Run
reporter.reset()
- val file = new BatchSourceFile("<toolbox>", code)
- val unit = new CompilationUnit(file)
- phase = run.parserPhase
- val parser = new syntaxAnalyzer.UnitParser(unit)
- val parsed = parser.templateStats()
- parser.accept(EOF)
+ val tree = gen.mkTreeOrBlock(newUnitParser(code, "<toolbox>").parseStatsOrPackages())
throwIfErrors()
- parsed match {
- case expr :: Nil => expr
- case stats :+ expr => Block(stats, expr)
- }
+ tree
}
def showAttributed(artifact: Any, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = {
@@ -309,56 +303,60 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
// reporter doesn't accumulate errors, but the front-end does
def throwIfErrors() = {
- if (frontEnd.hasErrors) {
- var msg = "reflective compilation has failed: " + EOL + EOL
- msg += frontEnd.infos map (_.msg) mkString EOL
- throw ToolBoxError(msg)
- }
+ if (frontEnd.hasErrors) throw ToolBoxError(
+ "reflective compilation has failed:" + EOL + EOL + (frontEnd.infos map (_.msg) mkString EOL)
+ )
}
}
- // todo. is not going to work with quoted arguments with embedded whitespaces
- lazy val arguments = options.split(" ")
+ trait CompilerApi {
+ val compiler: ToolBoxGlobal
+ val importer: compiler.Importer { val from: u.type }
+ val exporter: u.Importer { val from: compiler.type }
+ }
- lazy val virtualDirectory =
- (arguments zip arguments.tail).collect{ case ("-d", dir) => dir }.lastOption match {
- case Some(outDir) => scala.tools.nsc.io.AbstractFile.getDirectory(outDir)
- case None => new VirtualDirectory("(memory)", None)
+ object withCompilerApi {
+ private object api extends CompilerApi {
+ lazy val compiler: ToolBoxGlobal = {
+ try {
+ val errorFn: String => Unit = msg => frontEnd.log(scala.reflect.internal.util.NoPosition, msg, frontEnd.ERROR)
+ val command = new CompilerCommand(arguments.toList, errorFn)
+ command.settings.outputDirs setSingleOutput virtualDirectory
+ val instance = new ToolBoxGlobal(command.settings, frontEndToReporter(frontEnd, command.settings))
+ if (frontEnd.hasErrors) {
+ throw ToolBoxError(
+ "reflective compilation has failed: cannot initialize the compiler:" + EOL + EOL +
+ (frontEnd.infos map (_.msg) mkString EOL)
+ )
+ }
+ instance
+ } catch {
+ case ex: Throwable =>
+ throw ToolBoxError(s"reflective compilation has failed: cannot initialize the compiler due to $ex", ex)
+ }
+ }
+
+ lazy val importer = compiler.mkImporter(u)
+ lazy val exporter = importer.reverse
}
- lazy val compiler: ToolBoxGlobal = {
- try {
- val errorFn: String => Unit = msg => frontEnd.log(scala.reflect.internal.util.NoPosition, msg, frontEnd.ERROR)
- val command = new CompilerCommand(arguments.toList, errorFn)
- val settings = command.settings
- settings.outputDirs setSingleOutput virtualDirectory
- val reporter = frontEndToReporter(frontEnd, command.settings)
- val instance =
- if (settings.Yrangepos.value) new ToolBoxGlobal(settings, reporter) with RangePositions
- else new ToolBoxGlobal(settings, reporter)
- if (frontEnd.hasErrors) {
- var msg = "reflective compilation has failed: cannot initialize the compiler: " + EOL + EOL
- msg += frontEnd.infos map (_.msg) mkString EOL
- throw ToolBoxError(msg)
- }
- instance
- } catch {
- case ex: Throwable =>
- var msg = "reflective compilation has failed: cannot initialize the compiler due to %s".format(ex.toString)
- throw ToolBoxError(msg, ex)
+ private val toolBoxLock = new Object
+ def apply[T](f: CompilerApi => T): T = toolBoxLock.synchronized {
+ try f(api)
+ catch { case ex: FatalError => throw ToolBoxError(s"fatal compiler error", ex) }
+ finally api.compiler.cleanupCaches()
}
}
- lazy val importer = compiler.mkImporter(u)
- lazy val exporter = importer.reverse
+ def typecheck(tree: u.Tree, expectedType: u.Type, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree = withCompilerApi { compilerApi =>
+ import compilerApi._
- def typeCheck(tree: u.Tree, expectedType: u.Type, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree = compiler.withCleanupCaches {
- if (compiler.settings.verbose.value) println("importing "+tree+", expectedType = "+expectedType)
- var ctree: compiler.Tree = importer.importTree(tree)
- var cexpectedType: compiler.Type = importer.importType(expectedType)
+ if (compiler.settings.verbose) println("importing "+tree+", expectedType = "+expectedType)
+ val ctree: compiler.Tree = importer.importTree(tree)
+ val cexpectedType: compiler.Type = importer.importType(expectedType)
- if (compiler.settings.verbose.value) println("typing "+ctree+", expectedType = "+expectedType)
- val ttree: compiler.Tree = compiler.typeCheck(ctree, cexpectedType, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)
+ if (compiler.settings.verbose) println("typing "+ctree+", expectedType = "+expectedType)
+ val ttree: compiler.Tree = compiler.typecheck(ctree, cexpectedType, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)
val uttree = exporter.importTree(ttree)
uttree
}
@@ -368,51 +366,56 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
def inferImplicitView(tree: u.Tree, from: u.Type, to: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree = {
- val viewTpe = u.appliedType(u.definitions.FunctionClass(1).toTypeConstructor, List(from, to))
+ val functionTypeCtor = u.definitions.FunctionClass(1).asClass.toTypeConstructor
+ val viewTpe = u.appliedType(functionTypeCtor, List(from, to))
inferImplicit(tree, viewTpe, isView = true, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = pos)
}
- private def inferImplicit(tree: u.Tree, pt: u.Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: u.Position): u.Tree = compiler.withCleanupCaches {
- if (compiler.settings.verbose.value) println("importing "+pt, ", tree = "+tree+", pos = "+pos)
- var ctree: compiler.Tree = importer.importTree(tree)
- var cpt: compiler.Type = importer.importType(pt)
- var cpos: compiler.Position = importer.importPosition(pos)
+ private def inferImplicit(tree: u.Tree, pt: u.Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: u.Position): u.Tree = withCompilerApi { compilerApi =>
+ import compilerApi._
+
+ if (compiler.settings.verbose) println(s"importing pt=$pt, tree=$tree, pos=$pos")
+ val ctree: compiler.Tree = importer.importTree(tree)
+ val cpt: compiler.Type = importer.importType(pt)
+ val cpos: compiler.Position = importer.importPosition(pos)
- if (compiler.settings.verbose.value) println("inferring implicit %s of type %s, macros = %s".format(if (isView) "view" else "value", pt, !withMacrosDisabled))
+ if (compiler.settings.verbose) println("inferring implicit %s of type %s, macros = %s".format(if (isView) "view" else "value", pt, !withMacrosDisabled))
val itree: compiler.Tree = compiler.inferImplicit(ctree, cpt, isView = isView, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = cpos)
val uitree = exporter.importTree(itree)
uitree
}
- def resetAllAttrs(tree: u.Tree): u.Tree = {
+ def resetAllAttrs(tree: u.Tree): u.Tree = withCompilerApi { compilerApi =>
+ import compilerApi._
val ctree: compiler.Tree = importer.importTree(tree)
val ttree: compiler.Tree = compiler.resetAllAttrs(ctree)
val uttree = exporter.importTree(ttree)
uttree
}
- def resetLocalAttrs(tree: u.Tree): u.Tree = {
+ def resetLocalAttrs(tree: u.Tree): u.Tree = withCompilerApi { compilerApi =>
+ import compilerApi._
val ctree: compiler.Tree = importer.importTree(tree)
val ttree: compiler.Tree = compiler.resetLocalAttrs(ctree)
val uttree = exporter.importTree(ttree)
uttree
}
- def showAttributed(tree: u.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String =
- compiler.showAttributed(importer.importTree(tree), printTypes, printIds, printKinds)
-
- def parse(code: String): u.Tree = {
- if (compiler.settings.verbose.value) println("parsing "+code)
+ def parse(code: String): u.Tree = withCompilerApi { compilerApi =>
+ import compilerApi._
+ if (compiler.settings.verbose) println("parsing "+code)
val ctree: compiler.Tree = compiler.parse(code)
val utree = exporter.importTree(ctree)
utree
}
- def compile(tree: u.Tree): () => Any = {
- if (compiler.settings.verbose.value) println("importing "+tree)
+ def compile(tree: u.Tree): () => Any = withCompilerApi { compilerApi =>
+ import compilerApi._
+
+ if (compiler.settings.verbose) println("importing "+tree)
val ctree: compiler.Tree = importer.importTree(tree)
- if (compiler.settings.verbose.value) println("compiling "+ctree)
+ if (compiler.settings.verbose) println("compiling "+ctree)
compiler.compile(ctree)
}
diff --git a/src/compiler/scala/tools/reflect/WrappedProperties.scala b/src/compiler/scala/tools/reflect/WrappedProperties.scala
index 7ce0171c0b..523287fc66 100644
--- a/src/compiler/scala/tools/reflect/WrappedProperties.scala
+++ b/src/compiler/scala/tools/reflect/WrappedProperties.scala
@@ -25,6 +25,7 @@ trait WrappedProperties extends PropertiesTrait {
override def clearProp(name: String) = wrap(super.clearProp(name)).orNull
override def envOrElse(name: String, alt: String) = wrap(super.envOrElse(name, alt)) getOrElse alt
override def envOrNone(name: String) = wrap(super.envOrNone(name)).flatten
+ override def envOrSome(name: String, alt: Option[String]) = wrap(super.envOrNone(name)).flatten orElse alt
def systemProperties: List[(String, String)] = {
import scala.collection.JavaConverters._
diff --git a/src/compiler/scala/tools/reflect/package.scala b/src/compiler/scala/tools/reflect/package.scala
index 3f880bf7f8..1055894121 100644
--- a/src/compiler/scala/tools/reflect/package.scala
+++ b/src/compiler/scala/tools/reflect/package.scala
@@ -32,7 +32,7 @@ package object reflect {
/** Creates a reporter that prints messages to the console according to the settings.
*
- * ``minSeverity'' determines minimum severity of the messages to be printed.
+ * `minSeverity` determines minimum severity of the messages to be printed.
* 0 stands for INFO, 1 stands for WARNING and 2 stands for ERROR.
*/
// todo. untangle warningsAsErrors from Reporters. I don't feel like moving this flag here!
@@ -52,7 +52,7 @@ package object reflect {
override def hasWarnings = reporter.hasWarnings
def display(info: Info): Unit = info.severity match {
- case API_INFO => reporter.info(info.pos, info.msg, false)
+ case API_INFO => reporter.info(info.pos, info.msg, force = false)
case API_WARNING => reporter.warning(info.pos, info.msg)
case API_ERROR => reporter.error(info.pos, info.msg)
}
@@ -76,7 +76,6 @@ package object reflect {
private[reflect] def frontEndToReporter(frontEnd: FrontEnd, settings0: Settings): Reporter = new AbstractReporter {
val settings = settings0
- import frontEnd.{Severity => ApiSeverity}
val API_INFO = frontEnd.INFO
val API_WARNING = frontEnd.WARNING
val API_ERROR = frontEnd.ERROR
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
new file mode 100644
index 0000000000..8a54519401
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
@@ -0,0 +1,204 @@
+package scala.tools.reflect
+package quasiquotes
+
+import scala.collection.{immutable, mutable}
+import scala.reflect.internal.Flags._
+import scala.reflect.macros.TypecheckException
+
+class Cardinality private[Cardinality](val value: Int) extends AnyVal {
+ def pred = { assert(value - 1 >= 0); new Cardinality(value - 1) }
+ def succ = new Cardinality(value + 1)
+ override def toString = if (value == 0) "no dots" else "." * (value + 1)
+}
+
+object Cardinality {
+ val NoDot = new Cardinality(0)
+ val DotDot = new Cardinality(1)
+ val DotDotDot = new Cardinality(2)
+ object Dot { def unapply(card: Cardinality) = card != NoDot }
+ def parseDots(part: String) = {
+ if (part.endsWith("...")) (part.stripSuffix("..."), DotDotDot)
+ else if (part.endsWith("..")) (part.stripSuffix(".."), DotDot)
+ else (part, NoDot)
+ }
+}
+
+/** Defines abstractions that provide support for splicing into Scala syntax.
+ */
+trait Holes { self: Quasiquotes =>
+ import global._
+ import Cardinality._
+ import definitions._
+ import universeTypes._
+
+ protected lazy val IterableTParam = IterableClass.typeParams(0).asType.toType
+ protected def inferParamImplicit(tfun: Type, targ: Type) = c.inferImplicitValue(appliedType(tfun, List(targ)), silent = true)
+ protected def inferLiftable(tpe: Type): Tree = inferParamImplicit(liftableType, tpe)
+ protected def inferUnliftable(tpe: Type): Tree = inferParamImplicit(unliftableType, tpe)
+ protected def isLiftableType(tpe: Type) = inferLiftable(tpe) != EmptyTree
+ protected def isNativeType(tpe: Type) =
+ (tpe <:< treeType) || (tpe <:< nameType) || (tpe <:< modsType) ||
+ (tpe <:< flagsType) || (tpe <:< symbolType)
+ protected def isBottomType(tpe: Type) =
+ tpe <:< NothingClass.tpe || tpe <:< NullClass.tpe
+ protected def stripIterable(tpe: Type, limit: Option[Cardinality] = None): (Cardinality, Type) =
+ if (limit.map { _ == NoDot }.getOrElse { false }) (NoDot, tpe)
+ else if (tpe != null && !isIterableType(tpe)) (NoDot, tpe)
+ else if (isBottomType(tpe)) (NoDot, tpe)
+ else {
+ val targ = IterableTParam.asSeenFrom(tpe, IterableClass)
+ val (card, innerTpe) = stripIterable(targ, limit.map { _.pred })
+ (card.succ, innerTpe)
+ }
+ protected def iterableTypeFromCard(n: Cardinality, tpe: Type): Type = {
+ if (n == NoDot) tpe
+ else appliedType(IterableClass.toType, List(iterableTypeFromCard(n.pred, tpe)))
+ }
+
+ /** Hole encapsulates information about splices in quasiquotes.
+ * It packs together a cardinality of a splice, pre-reified tree
+ * representation (possibly preprocessed) and position.
+ */
+ abstract class Hole {
+ val tree: Tree
+ val pos: Position
+ val cardinality: Cardinality
+ }
+
+ object Hole {
+ def apply(card: Cardinality, tree: Tree): Hole =
+ if (method != nme.unapply) new ApplyHole(card, tree)
+ else new UnapplyHole(card, tree)
+ def unapply(hole: Hole): Some[(Tree, Cardinality)] = Some((hole.tree, hole.cardinality))
+ }
+
+ class ApplyHole(card: Cardinality, splicee: Tree) extends Hole {
+ val (strippedTpe, tpe): (Type, Type) = {
+ if (stripIterable(splicee.tpe)._1.value < card.value) cantSplice()
+ val (_, strippedTpe) = stripIterable(splicee.tpe, limit = Some(card))
+ if (isBottomType(strippedTpe)) cantSplice()
+ else if (isNativeType(strippedTpe)) (strippedTpe, iterableTypeFromCard(card, strippedTpe))
+ else if (isLiftableType(strippedTpe)) (strippedTpe, iterableTypeFromCard(card, treeType))
+ else cantSplice()
+ }
+
+ val tree = {
+ def inner(itpe: Type)(tree: Tree) =
+ if (isNativeType(itpe)) tree
+ else if (isLiftableType(itpe)) lifted(itpe)(tree)
+ else global.abort("unreachable")
+ if (card == NoDot) inner(strippedTpe)(splicee)
+ else iterated(card, strippedTpe, inner(strippedTpe))(splicee)
+ }
+
+ val pos = splicee.pos
+
+ val cardinality = stripIterable(tpe)._1
+
+ protected def cantSplice(): Nothing = {
+ val (iterableCard, iterableType) = stripIterable(splicee.tpe)
+ val holeCardMsg = if (card != NoDot) s" with $card" else ""
+ val action = "splice " + splicee.tpe + holeCardMsg
+ val suggestCard = card != iterableCard || card != NoDot
+ val spliceeCardMsg = if (card != iterableCard && iterableCard != NoDot) s"using $iterableCard" else "omitting the dots"
+ val cardSuggestion = if (suggestCard) spliceeCardMsg else ""
+ val suggestLifting = (card == NoDot || iterableCard != NoDot) && !(iterableType <:< treeType) && !isLiftableType(iterableType)
+ val liftedTpe = if (card != NoDot) iterableType else splicee.tpe
+ val liftSuggestion = if (suggestLifting) s"providing an implicit instance of Liftable[$liftedTpe]" else ""
+ val advice =
+ if (isBottomType(iterableType)) "bottom type values often indicate programmer mistake"
+ else "consider " + List(cardSuggestion, liftSuggestion).filter(_ != "").mkString(" or ")
+ c.abort(splicee.pos, s"Can't $action, $advice")
+ }
+
+ protected def lifted(tpe: Type)(tree: Tree): Tree = {
+ val lifter = inferLiftable(tpe)
+ assert(lifter != EmptyTree, s"couldnt find a liftable for $tpe")
+ val lifted = Apply(lifter, List(tree))
+ atPos(tree.pos)(lifted)
+ }
+
+ protected def iterated(card: Cardinality, tpe: Type, elementTransform: Tree => Tree = identity)(tree: Tree): Tree = {
+ assert(card != NoDot)
+ def reifyIterable(tree: Tree, n: Cardinality): Tree = {
+ def loop(tree: Tree, n: Cardinality): Tree =
+ if (n == NoDot) elementTransform(tree)
+ else {
+ val x: TermName = c.freshName()
+ val wrapped = reifyIterable(Ident(x), n.pred)
+ val xToWrapped = Function(List(ValDef(Modifiers(PARAM), x, TypeTree(), EmptyTree)), wrapped)
+ Select(Apply(Select(tree, nme.map), List(xToWrapped)), nme.toList)
+ }
+ if (tree.tpe != null && (tree.tpe <:< listTreeType || tree.tpe <:< listListTreeType)) tree
+ else atPos(tree.pos)(loop(tree, n))
+ }
+ reifyIterable(tree, card)
+ }
+ }
+
+ class UnapplyHole(val cardinality: Cardinality, pat: Tree) extends Hole {
+ val (placeholderName, pos, tptopt) = pat match {
+ case Bind(pname, inner @ Bind(_, Typed(Ident(nme.WILDCARD), tpt))) => (pname, inner.pos, Some(tpt))
+ case Bind(pname, inner @ Typed(Ident(nme.WILDCARD), tpt)) => (pname, inner.pos, Some(tpt))
+ case Bind(pname, inner) => (pname, inner.pos, None)
+ }
+ val treeNoUnlift = Bind(placeholderName, Ident(nme.WILDCARD))
+ lazy val tree =
+ tptopt.map { tpt =>
+ val TypeDef(_, _, _, typedTpt) =
+ try c.typeCheck(TypeDef(NoMods, TypeName("T"), Nil, tpt))
+ catch { case TypecheckException(pos, msg) => c.abort(pos.asInstanceOf[c.Position], msg) }
+ val tpe = typedTpt.tpe
+ val (iterableCard, _) = stripIterable(tpe)
+ if (iterableCard.value < cardinality.value)
+ c.abort(pat.pos, s"Can't extract $tpe with $cardinality, consider using $iterableCard")
+ val (_, strippedTpe) = stripIterable(tpe, limit = Some(cardinality))
+ if (strippedTpe <:< treeType) treeNoUnlift
+ else
+ unlifters.spawn(strippedTpe, cardinality).map {
+ Apply(_, treeNoUnlift :: Nil)
+ }.getOrElse {
+ c.abort(pat.pos, s"Can't find $unliftableType[$strippedTpe], consider providing it")
+ }
+ }.getOrElse { treeNoUnlift }
+ }
+
+ /** Full support for unliftable implies that it's possible to interleave
+ * deconstruction with higher cardinality and unlifting of the values.
+ * In particular extraction of List[Tree] as List[T: Unliftable] requires
+ * helper extractors that would do the job: UnliftListElementwise[T]. Similarly
+ * List[List[Tree]] needs UnliftListOfListsElementwise[T].
+ *
+ * See also "unlift list" tests in UnapplyProps.scala
+ */
+ object unlifters {
+ private var records = List.empty[(Type, Cardinality)]
+ // Materialize unlift helper that does elementwise
+ // unlifting for corresponding cardinality and type.
+ def spawn(tpe: Type, card: Cardinality): Option[Tree] = {
+ val unlifter = inferUnliftable(tpe)
+ if (unlifter == EmptyTree) None
+ else if (card == NoDot) Some(unlifter)
+ else {
+ val idx = records.indexWhere { p => p._1 =:= tpe && p._2 == card }
+ val resIdx = if (idx != -1) idx else { records +:= (tpe, card); records.length - 1}
+ Some(Ident(TermName(nme.QUASIQUOTE_UNLIFT_HELPER + resIdx)))
+ }
+ }
+ // Returns a list of vals that will defined required unlifters
+ def preamble(): List[Tree] =
+ records.zipWithIndex.map { case ((tpe, card), idx) =>
+ val name = TermName(nme.QUASIQUOTE_UNLIFT_HELPER + idx)
+ val helperName = card match {
+ case DotDot => nme.UnliftListElementwise
+ case DotDotDot => nme.UnliftListOfListsElementwise
+ }
+ val lifter = inferUnliftable(tpe)
+ assert(helperName.isTermName)
+ // q"val $name: $u.build.${helperName.toTypeName} = $u.build.$helperName($lifter)"
+ ValDef(NoMods, name,
+ AppliedTypeTree(Select(Select(u, nme.build), helperName.toTypeName), List(TypeTree(tpe))),
+ Apply(Select(Select(u, nme.build), helperName), lifter :: Nil))
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
new file mode 100644
index 0000000000..1bd9323752
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
@@ -0,0 +1,196 @@
+package scala.tools.reflect
+package quasiquotes
+
+import scala.tools.nsc.ast.parser.{Parsers => ScalaParser}
+import scala.tools.nsc.ast.parser.Tokens._
+import scala.compat.Platform.EOL
+import scala.reflect.internal.util.{BatchSourceFile, SourceFile, FreshNameCreator}
+import scala.collection.mutable.ListBuffer
+import scala.util.Try
+
+/** Builds upon the vanilla Scala parser and teams up together with Placeholders.scala to emulate holes.
+ * A principled solution to splicing into Scala syntax would be a parser that natively supports holes.
+ * Unfortunately, that's outside of our reach in Scala 2.11, so we have to emulate.
+ */
+trait Parsers { self: Quasiquotes =>
+ import global.{Try => _, _}
+
+ abstract class Parser extends {
+ val global: self.global.type = self.global
+ } with ScalaParser {
+ def parse(code: String): Tree = {
+ try {
+ val file = new BatchSourceFile(nme.QUASIQUOTE_FILE, code)
+ new QuasiquoteParser(file).parseRule(entryPoint)
+ } catch {
+ case mi: MalformedInput => c.abort(correspondingPosition(mi.offset), mi.msg)
+ }
+ }
+
+ def correspondingPosition(offset: Int): Position = {
+ val posMapList = posMap.toList
+ def containsOffset(start: Int, end: Int) = start <= offset && offset <= end
+ def fallbackPosition = posMapList match {
+ case (pos1, (start1, end1)) :: _ if start1 > offset => pos1
+ case _ :+ ((pos2, (start2, end2))) if offset > end2 => pos2.withPoint(pos2.point + (end2 - start2))
+ }
+ posMapList.sliding(2).collect {
+ case (pos1, (start1, end1)) :: _ if containsOffset(start1, end1) => (pos1, offset - start1)
+ case (pos1, (_, end1)) :: (_, (start2, _)) :: _ if containsOffset(end1, start2) => (pos1, end1)
+ case _ :: (pos2, (start2, end2)) :: _ if containsOffset(start2, end2) => (pos2, offset - start2)
+ }.map { case (pos, offset) =>
+ pos.withPoint(pos.point + offset)
+ }.toList.headOption.getOrElse(fallbackPosition)
+ }
+
+ override def token2string(token: Int): String = token match {
+ case EOF => "end of quote"
+ case _ => super.token2string(token)
+ }
+
+ def entryPoint: QuasiquoteParser => Tree
+
+ class QuasiquoteParser(source0: SourceFile) extends SourceFileParser(source0) { parser =>
+ def isHole: Boolean = isIdent && isHole(in.name)
+
+ def isHole(name: Name): Boolean = holeMap.contains(name)
+
+ override implicit lazy val fresh: FreshNameCreator = new FreshNameCreator(nme.QUASIQUOTE_PREFIX)
+
+ override val treeBuilder = new ParserTreeBuilder {
+ override implicit def fresh: FreshNameCreator = parser.fresh
+
+ // q"(..$xs)"
+ override def makeTupleTerm(trees: List[Tree]): Tree =
+ Apply(Ident(nme.QUASIQUOTE_TUPLE), trees)
+
+ // tq"(..$xs)"
+ override def makeTupleType(trees: List[Tree]): Tree =
+ AppliedTypeTree(Ident(tpnme.QUASIQUOTE_TUPLE), trees)
+
+ // q"{ $x }"
+ override def makeBlock(stats: List[Tree]): Tree = stats match {
+ case (head @ Ident(name)) :: Nil if isHole(name) => Block(Nil, head)
+ case _ => super.makeBlock(stats)
+ }
+
+ // tq"$a => $b"
+ override def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
+ AppliedTypeTree(Ident(tpnme.QUASIQUOTE_FUNCTION), argtpes :+ restpe)
+ }
+ import treeBuilder.{global => _, unit => _, _}
+
+ // q"def foo($x)"
+ override def allowTypelessParams = true
+
+ // q"foo match { case $x }"
+ override def caseClause(): CaseDef =
+ if (isHole && lookingAhead { in.token == CASE || in.token == RBRACE || in.token == SEMI }) {
+ val c = makeCaseDef(Apply(Ident(nme.QUASIQUOTE_CASE), List(Ident(ident()))), EmptyTree, EmptyTree)
+ while (in.token == SEMI) in.nextToken()
+ c
+ } else
+ super.caseClause()
+
+ override def caseBlock(): Tree = super.caseBlock() match {
+ case Block(Nil, expr) => expr
+ case other => other
+ }
+
+ override def isAnnotation: Boolean = super.isAnnotation || (isHole && lookingAhead { isAnnotation })
+
+ override def isModifier: Boolean = super.isModifier || (isHole && lookingAhead { isModifier })
+
+ override def isLocalModifier: Boolean = super.isLocalModifier || (isHole && lookingAhead { isLocalModifier })
+
+ override def isTemplateIntro: Boolean = super.isTemplateIntro || (isHole && lookingAhead { isTemplateIntro })
+
+ override def isDclIntro: Boolean = super.isDclIntro || (isHole && lookingAhead { isDclIntro })
+
+ override def isStatSep(token: Int) = token == EOF || super.isStatSep(token)
+
+ override def expectedMsg(token: Int): String =
+ if (isHole) expectedMsgTemplate(token2string(token), "splicee")
+ else super.expectedMsg(token)
+
+ // $mods def foo
+ // $mods T
+ override def readAnnots(annot: => Tree): List[Tree] = in.token match {
+ case AT =>
+ in.nextToken()
+ annot :: readAnnots(annot)
+ case _ if isHole && lookingAhead { isAnnotation || isModifier || isDefIntro || isIdent || isStatSep || in.token == LPAREN } =>
+ val ann = Apply(Select(New(Ident(tpnme.QUASIQUOTE_MODS)), nme.CONSTRUCTOR), List(Literal(Constant(in.name.toString))))
+ in.nextToken()
+ ann :: readAnnots(annot)
+ case _ =>
+ Nil
+ }
+
+ override def refineStat(): List[Tree] =
+ if (isHole && !isDclIntro) {
+ val result = ValDef(NoMods, in.name, Ident(tpnme.QUASIQUOTE_REFINE_STAT), EmptyTree) :: Nil
+ in.nextToken()
+ result
+ } else super.refineStat()
+
+ override def ensureEarlyDef(tree: Tree) = tree match {
+ case Ident(name: TermName) if isHole(name) => ValDef(NoMods | Flag.PRESUPER, name, Ident(tpnme.QUASIQUOTE_EARLY_DEF), EmptyTree)
+ case _ => super.ensureEarlyDef(tree)
+ }
+
+ override def isTypedParam(tree: Tree) = super.isTypedParam(tree) || (tree match {
+ case Ident(name) if isHole(name) => true
+ case _ => false
+ })
+
+ override def topStat = super.topStat.orElse {
+ case _ if isHole =>
+ val stats = ValDef(NoMods, in.name, Ident(tpnme.QUASIQUOTE_PACKAGE_STAT), EmptyTree) :: Nil
+ in.nextToken()
+ stats
+ }
+
+ override def enumerator(isFirst: Boolean, allowNestedIf: Boolean = true) =
+ if (isHole && lookingAhead { in.token == EOF || in.token == RPAREN || isStatSep }) {
+ val res = build.SyntacticValFrom(Bind(in.name, Ident(nme.WILDCARD)), Ident(nme.QUASIQUOTE_FOR_ENUM)) :: Nil
+ in.nextToken()
+ res
+ } else super.enumerator(isFirst, allowNestedIf)
+ }
+ }
+
+ object TermParser extends Parser {
+ def entryPoint = { parser =>
+ parser.templateOrTopStatSeq() match {
+ case head :: Nil => Block(Nil, head)
+ case lst => gen.mkTreeOrBlock(lst)
+ }
+ }
+ }
+
+ object TypeParser extends Parser {
+ def entryPoint = _.typ()
+ }
+
+ object CaseParser extends Parser {
+ def entryPoint = _.caseClause()
+ }
+
+ object PatternParser extends Parser {
+ def entryPoint = { parser =>
+ val pat = parser.noSeq.pattern1()
+ gen.patvarTransformer.transform(pat)
+ }
+ }
+
+ object ForEnumeratorParser extends Parser {
+ def entryPoint = { parser =>
+ val enums = parser.enumerator(isFirst = false, allowNestedIf = false)
+ assert(enums.length == 1)
+ enums.head
+ }
+ }
+
+ object FreshName extends FreshNameExtractor(nme.QUASIQUOTE_PREFIX)
+}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
new file mode 100644
index 0000000000..bdb44ad9a2
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
@@ -0,0 +1,177 @@
+package scala.tools.reflect
+package quasiquotes
+
+import java.util.UUID.randomUUID
+import scala.collection.{immutable, mutable}
+
+/** Emulates hole support (see Holes.scala) in the quasiquote parser (see Parsers.scala).
+ * A principled solution to splicing into Scala syntax would be a parser that natively supports holes.
+ * Unfortunately, that's outside of our reach in Scala 2.11, so we have to emulate.
+ * This trait stores knowledge of how to represent the holes as something understandable by the parser
+ * and how to recover holes from the results of parsing the produced representation.
+ */
+trait Placeholders { self: Quasiquotes =>
+ import global._
+ import Cardinality._
+ import universeTypes._
+
+ // Step 1: Transform Scala source with holes into vanilla Scala source
+
+ lazy val holeMap = new HoleMap()
+ lazy val posMap = mutable.ListMap[Position, (Int, Int)]()
+ lazy val code = {
+ val sb = new StringBuilder()
+ val sessionSuffix = randomUUID().toString.replace("-", "").substring(0, 8) + "$"
+
+ def appendPart(value: String, pos: Position) = {
+ val start = sb.length
+ sb.append(value)
+ val end = sb.length
+ posMap += pos -> ((start, end))
+ }
+
+ def appendHole(tree: Tree, cardinality: Cardinality) = {
+ val placeholderName = c.freshName(TermName(nme.QUASIQUOTE_PREFIX + sessionSuffix))
+ sb.append(placeholderName)
+ val holeTree =
+ if (method != nme.unapply) tree
+ else Bind(placeholderName, tree)
+ holeMap(placeholderName) = Hole(cardinality, holeTree)
+ }
+
+ val iargs = method match {
+ case nme.apply => args
+ case nme.unapply =>
+ val (dummy @ Ident(nme.SELECTOR_DUMMY)) :: Nil = args
+ dummy.attachments.get[SubpatternsAttachment].get.patterns
+ case _ => global.abort("unreachable")
+ }
+
+ foreach2(iargs, parts.init) { case (tree, (p, pos)) =>
+ val (part, cardinality) = parseDots(p)
+ appendPart(part, pos)
+ appendHole(tree, cardinality)
+ }
+ val (p, pos) = parts.last
+ appendPart(p, pos)
+
+ sb.toString
+ }
+
+ class HoleMap {
+ private var underlying = immutable.SortedMap[String, Hole]()
+ private val accessed = mutable.Set[String]()
+ def unused: Set[Name] = (underlying.keys.toSet -- accessed).map(TermName(_))
+ def contains(key: Name) = underlying.contains(key.toString)
+ def apply(key: Name) = {
+ val s = key.toString
+ accessed += s
+ underlying(s)
+ }
+ def update(key: Name, hole: Hole) = {
+ underlying += key.toString -> hole
+ }
+ def get(key: Name) = {
+ val s = key.toString
+ accessed += s
+ underlying.get(s)
+ }
+ def toList = underlying.toList
+ }
+
+ // Step 2: Transform vanilla Scala AST into an AST with holes
+
+ trait HolePlaceholder {
+ def matching: PartialFunction[Any, Name]
+ def unapply(scrutinee: Any): Option[Hole] = {
+ val name = matching.lift(scrutinee)
+ name.flatMap { holeMap.get(_) }
+ }
+ }
+
+ object Placeholder extends HolePlaceholder {
+ def matching = {
+ case name: Name => name
+ case Ident(name) => name
+ case Bind(name, Ident(nme.WILDCARD)) => name
+ case TypeDef(_, name, List(), TypeBoundsTree(EmptyTree, EmptyTree)) => name
+ case ValDef(_, name, TypeTree(), EmptyTree) => name
+ }
+ }
+
+ object ModsPlaceholder extends HolePlaceholder {
+ def matching = {
+ case Apply(Select(New(Ident(tpnme.QUASIQUOTE_MODS)), nme.CONSTRUCTOR), List(Literal(Constant(s: String)))) => TermName(s)
+ }
+ }
+
+ object AnnotPlaceholder extends HolePlaceholder {
+ def matching = {
+ case Apply(Select(New(Ident(name)), nme.CONSTRUCTOR), Nil) => name
+ }
+ }
+
+ object TuplePlaceholder {
+ def unapply(tree: Tree): Option[List[Tree]] = tree match {
+ case Apply(Ident(nme.QUASIQUOTE_TUPLE), args) => Some(args)
+ case _ => None
+ }
+ }
+
+ object TupleTypePlaceholder {
+ def unapply(tree: Tree): Option[List[Tree]] = tree match {
+ case AppliedTypeTree(Ident(tpnme.QUASIQUOTE_TUPLE), args) => Some(args)
+ case _ => None
+ }
+ }
+
+ object FunctionTypePlaceholder {
+ def unapply(tree: Tree): Option[(List[Tree], Tree)] = tree match {
+ case AppliedTypeTree(Ident(tpnme.QUASIQUOTE_FUNCTION), args :+ res) => Some((args, res))
+ case _ => None
+ }
+ }
+
+ object SymbolPlaceholder {
+ def unapply(scrutinee: Any): Option[Hole] = scrutinee match {
+ case Placeholder(hole: ApplyHole) if hole.tpe <:< symbolType => Some(hole)
+ case _ => None
+ }
+ }
+
+ object CasePlaceholder {
+ def unapply(tree: Tree): Option[Hole] = tree match {
+ case CaseDef(Apply(Ident(nme.QUASIQUOTE_CASE), List(Placeholder(hole))), EmptyTree, EmptyTree) => Some(hole)
+ case _ => None
+ }
+ }
+
+ object RefineStatPlaceholder {
+ def unapply(tree: Tree): Option[Hole] = tree match {
+ case ValDef(_, Placeholder(hole), Ident(tpnme.QUASIQUOTE_REFINE_STAT), _) => Some(hole)
+ case _ => None
+ }
+ }
+
+ object EarlyDefPlaceholder {
+ def unapply(tree: Tree): Option[Hole] = tree match {
+ case ValDef(_, Placeholder(hole), Ident(tpnme.QUASIQUOTE_EARLY_DEF), _) => Some(hole)
+ case _ => None
+ }
+ }
+
+ object PackageStatPlaceholder {
+ def unapply(tree: Tree): Option[Hole] = tree match {
+ case ValDef(NoMods, Placeholder(hole), Ident(tpnme.QUASIQUOTE_PACKAGE_STAT), EmptyTree) => Some(hole)
+ case _ => None
+ }
+ }
+
+ object ForEnumPlaceholder {
+ def unapply(tree: Tree): Option[Hole] = tree match {
+ case build.SyntacticValFrom(Bind(Placeholder(hole), Ident(nme.WILDCARD)), Ident(nme.QUASIQUOTE_FOR_ENUM)) =>
+ Some(hole)
+ case _ => None
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
new file mode 100644
index 0000000000..7d777ef7d5
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
@@ -0,0 +1,60 @@
+package scala.tools.reflect
+package quasiquotes
+
+import scala.reflect.macros.runtime.Context
+
+abstract class Quasiquotes extends Parsers
+ with Holes
+ with Placeholders
+ with Reifiers {
+ val c: Context
+ val global: c.universe.type = c.universe
+ import c.universe._
+
+ def debug(msg: String): Unit =
+ if (settings.Yquasiquotedebug.value) println(msg)
+
+ lazy val (universe: Tree, args, parts, parse, reify, method) = c.macroApplication match {
+ case Apply(build.SyntacticTypeApplied(Select(Select(Apply(Select(universe0, _), List(Apply(_, parts0))), interpolator0), method0), _), args0) =>
+ debug(s"\nparse prefix:\nuniverse=$universe0\nparts=$parts0\ninterpolator=$interpolator0\nmethod=$method0\nargs=$args0\n")
+ val parts1 = parts0.map {
+ case lit @ Literal(Constant(s: String)) => s -> lit.pos
+ case part => c.abort(part.pos, "Quasiquotes can only be used with literal strings")
+ }
+ val reify0 = method0 match {
+ case nme.apply => new ApplyReifier().reifyFillingHoles(_)
+ case nme.unapply => new UnapplyReifier().reifyFillingHoles(_)
+ case other => global.abort(s"Unknown quasiquote api method: $other")
+ }
+ val parse0 = interpolator0 match {
+ case nme.q => TermParser.parse(_)
+ case nme.tq => TypeParser.parse(_)
+ case nme.cq => CaseParser.parse(_)
+ case nme.pq => PatternParser.parse(_)
+ case nme.fq => ForEnumeratorParser.parse(_)
+ case other => global.abort(s"Unknown quasiquote flavor: $other")
+ }
+ (universe0, args0, parts1, parse0, reify0, method0)
+ case _ =>
+ global.abort(s"Couldn't parse call prefix tree ${c.macroApplication}.")
+ }
+
+ lazy val u = universe // shortcut
+ lazy val universeTypes = new definitions.UniverseDependentTypes(universe)
+
+ def expandQuasiquote = {
+ debug(s"\nmacro application:\n${c.macroApplication}\n")
+ debug(s"\ncode to parse:\n$code\n")
+ val tree = parse(code)
+ debug(s"parsed:\n${showRaw(tree)}\n$tree\n")
+ val reified = reify(tree)
+ val sreified =
+ reified
+ .toString
+ .replace("scala.reflect.runtime.`package`.universe.build.", "")
+ .replace("scala.reflect.runtime.`package`.universe.", "")
+ .replace("scala.collection.immutable.", "")
+ debug(s"reified tree:\n$sreified\n")
+ reified
+ }
+}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
new file mode 100644
index 0000000000..87ab52414c
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
@@ -0,0 +1,423 @@
+package scala.tools.reflect
+package quasiquotes
+
+import java.lang.UnsupportedOperationException
+import scala.reflect.reify.{Reifier => ReflectReifier}
+import scala.reflect.internal.Flags._
+
+trait Reifiers { self: Quasiquotes =>
+ import global._
+ import global.build.{Select => _, Ident => _, TypeTree => _, _}
+ import global.treeInfo._
+ import global.definitions._
+ import Cardinality._
+ import universeTypes._
+
+ abstract class Reifier(val isReifyingExpressions: Boolean) extends {
+ val global: self.global.type = self.global
+ val universe = self.universe
+ val reifee = EmptyTree
+ val mirror = EmptyTree
+ val concrete = false
+ } with ReflectReifier {
+ lazy val typer = throw new UnsupportedOperationException
+
+ def isReifyingPatterns: Boolean = !isReifyingExpressions
+ def action = if (isReifyingExpressions) "splice" else "extract"
+ def holesHaveTypes = isReifyingExpressions
+
+ /** Map that stores freshly generated names linked to the corresponding names in the reified tree.
+ * This information is used to reify names created by calls to freshTermName and freshTypeName.
+ */
+ var nameMap = collection.mutable.HashMap.empty[Name, Set[TermName]].withDefault { _ => Set() }
+
+ /** Wraps expressions into:
+ * a block which starts with a sequence of vals that correspond
+ * to fresh names that has to be created at evaluation of the quasiquote
+ * and ends with reified tree:
+ *
+ * {
+ * val name$1: universe.TermName = universe.build.freshTermName(prefix1)
+ * ...
+ * val name$N: universe.TermName = universe.build.freshTermName(prefixN)
+ * tree
+ * }
+ *
+ * Wraps patterns into:
+ * a call into anonymous class' unapply method required by unapply macro expansion:
+ *
+ * new {
+ * def unapply(tree) = tree match {
+ * case pattern if guard => Some(result)
+ * case _ => None
+ * }
+ * }.unapply(<unapply-selector>)
+ *
+ * where pattern corresponds to reified tree and guard represents conjunction of equalities
+ * which check that pairs of names in nameMap.values are equal between each other.
+ */
+ def wrap(tree: Tree) =
+ if (isReifyingExpressions) {
+ val freshdefs = nameMap.iterator.map {
+ case (origname, names) =>
+ assert(names.size == 1)
+ val FreshName(prefix) = origname
+ val nameTypeName = if (origname.isTermName) tpnme.TermName else tpnme.TypeName
+ val freshName = if (origname.isTermName) nme.freshTermName else nme.freshTypeName
+ // q"val ${names.head}: $u.$nameTypeName = $u.build.$freshName($prefix)"
+ ValDef(NoMods, names.head, Select(u, nameTypeName),
+ Apply(Select(Select(u, nme.build), freshName), Literal(Constant(prefix)) :: Nil))
+ }.toList
+ // q"..$freshdefs; $tree"
+ SyntacticBlock(freshdefs :+ tree)
+ } else {
+ val freevars = holeMap.toList.map { case (name, _) => Ident(name) }
+ val isVarPattern = tree match { case Bind(name, Ident(nme.WILDCARD)) => true case _ => false }
+ val cases =
+ if(isVarPattern) {
+ val Ident(name) :: Nil = freevars
+ // cq"$name: $treeType => $SomeModule($name)" :: Nil
+ CaseDef(Bind(name, Typed(Ident(nme.WILDCARD), TypeTree(treeType))),
+ EmptyTree, Apply(Ident(SomeModule), List(Ident(name)))) :: Nil
+ } else {
+ val (succ, fail) = freevars match {
+ case Nil =>
+ // (q"true", q"false")
+ (Literal(Constant(true)), Literal(Constant(false)))
+ case head :: Nil =>
+ // (q"$SomeModule($head)", q"$NoneModule")
+ (Apply(Ident(SomeModule), List(head)), Ident(NoneModule))
+ case vars =>
+ // (q"$SomeModule((..$vars))", q"$NoneModule")
+ (Apply(Ident(SomeModule), List(SyntacticTuple(vars))), Ident(NoneModule))
+ }
+ val guard =
+ nameMap.collect { case (_, nameset) if nameset.size >= 2 =>
+ nameset.toList.sliding(2).map { case List(n1, n2) =>
+ // q"$n1 == $n2"
+ Apply(Select(Ident(n1), nme.EQ), List(Ident(n2)))
+ }
+ }.flatten.reduceOption[Tree] { (l, r) =>
+ // q"$l && $r"
+ Apply(Select(l, nme.ZAND), List(r))
+ }.getOrElse { EmptyTree }
+ // cq"$tree if $guard => $succ" :: cq"_ => $fail" :: Nil
+ CaseDef(tree, guard, succ) :: CaseDef(Ident(nme.WILDCARD), EmptyTree, fail) :: Nil
+ }
+ // q"new { def unapply(tree: $AnyClass) = { ..${unlifters.preamble()}; tree match { case ..$cases } } }.unapply(..$args)"
+ Apply(
+ Select(
+ SyntacticNew(Nil, Nil, noSelfType, List(
+ DefDef(NoMods, nme.unapply, Nil, List(List(ValDef(NoMods, nme.tree, TypeTree(AnyClass.toType), EmptyTree))), TypeTree(),
+ SyntacticBlock(unlifters.preamble() :+ Match(Ident(nme.tree), cases))))),
+ nme.unapply),
+ args)
+ }
+
+ def reifyFillingHoles(tree: Tree): Tree = {
+ val reified = reifyTree(tree)
+ holeMap.unused.foreach { hole =>
+ c.abort(holeMap(hole).pos, s"Don't know how to $action here")
+ }
+ wrap(reified)
+ }
+
+ override def reifyTree(tree: Tree): Tree =
+ reifyTreePlaceholder(tree) orElse
+ reifyTreeSyntactically(tree)
+
+ def reifyTreePlaceholder(tree: Tree): Tree = tree match {
+ case Placeholder(hole: ApplyHole) if hole.tpe <:< treeType => hole.tree
+ case Placeholder(Hole(tree, NoDot)) if isReifyingPatterns => tree
+ case Placeholder(hole @ Hole(_, card @ Dot())) => c.abort(hole.pos, s"Can't $action with $card here")
+ case TuplePlaceholder(args) => reifyTuple(args)
+ case TupleTypePlaceholder(args) => reifyTupleType(args)
+ case FunctionTypePlaceholder(argtpes, restpe) => reifyFunctionType(argtpes, restpe)
+ case CasePlaceholder(hole) => hole.tree
+ case RefineStatPlaceholder(hole) => reifyRefineStat(hole)
+ case EarlyDefPlaceholder(hole) => reifyEarlyDef(hole)
+ case PackageStatPlaceholder(hole) => reifyPackageStat(hole)
+ // for enumerators are checked not during splicing but during
+ // desugaring of the for loop in SyntacticFor & SyntacticForYield
+ case ForEnumPlaceholder(hole) => hole.tree
+ case _ => EmptyTree
+ }
+
+ override def reifyTreeSyntactically(tree: Tree) = tree match {
+ case RefTree(qual, SymbolPlaceholder(Hole(tree, _))) if isReifyingExpressions =>
+ mirrorBuildCall(nme.RefTree, reify(qual), tree)
+ case This(SymbolPlaceholder(Hole(tree, _))) if isReifyingExpressions =>
+ mirrorCall(nme.This, tree)
+ case SyntacticTraitDef(mods, name, tparams, earlyDefs, parents, selfdef, body) =>
+ reifyBuildCall(nme.SyntacticTraitDef, mods, name, tparams, earlyDefs, parents, selfdef, body)
+ case SyntacticClassDef(mods, name, tparams, constrmods, vparamss, earlyDefs, parents, selfdef, body) =>
+ reifyBuildCall(nme.SyntacticClassDef, mods, name, tparams, constrmods, vparamss,
+ earlyDefs, parents, selfdef, body)
+ case SyntacticPackageObjectDef(name, earlyDefs, parents, selfdef, body) =>
+ reifyBuildCall(nme.SyntacticPackageObjectDef, name, earlyDefs, parents, selfdef, body)
+ case SyntacticObjectDef(mods, name, earlyDefs, parents, selfdef, body) =>
+ reifyBuildCall(nme.SyntacticObjectDef, mods, name, earlyDefs, parents, selfdef, body)
+ case SyntacticNew(earlyDefs, parents, selfdef, body) =>
+ reifyBuildCall(nme.SyntacticNew, earlyDefs, parents, selfdef, body)
+ case SyntacticDefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ reifyBuildCall(nme.SyntacticDefDef, mods, name, tparams, vparamss, tpt, rhs)
+ case SyntacticValDef(mods, name, tpt, rhs) if tree != noSelfType =>
+ reifyBuildCall(nme.SyntacticValDef, mods, name, tpt, rhs)
+ case SyntacticVarDef(mods, name, tpt, rhs) =>
+ reifyBuildCall(nme.SyntacticVarDef, mods, name, tpt, rhs)
+ case SyntacticValFrom(pat, rhs) =>
+ reifyBuildCall(nme.SyntacticValFrom, pat, rhs)
+ case SyntacticValEq(pat, rhs) =>
+ reifyBuildCall(nme.SyntacticValEq, pat, rhs)
+ case SyntacticFilter(cond) =>
+ reifyBuildCall(nme.SyntacticFilter, cond)
+ case SyntacticFor(enums, body) =>
+ reifyBuildCall(nme.SyntacticFor, enums, body)
+ case SyntacticForYield(enums, body) =>
+ reifyBuildCall(nme.SyntacticForYield, enums, body)
+ case SyntacticAssign(lhs, rhs) =>
+ reifyBuildCall(nme.SyntacticAssign, lhs, rhs)
+ case SyntacticApplied(fun, argss) if argss.nonEmpty =>
+ reifyBuildCall(nme.SyntacticApplied, fun, argss)
+ case SyntacticTypeApplied(fun, targs) if targs.nonEmpty =>
+ reifyBuildCall(nme.SyntacticTypeApplied, fun, targs)
+ case SyntacticFunction(args, body) =>
+ reifyBuildCall(nme.SyntacticFunction, args, body)
+ case SyntacticIdent(name, isBackquoted) =>
+ reifyBuildCall(nme.SyntacticIdent, name, isBackquoted)
+ case Block(Nil, Placeholder(Hole(tree, DotDot))) =>
+ mirrorBuildCall(nme.SyntacticBlock, tree)
+ case Block(Nil, other) =>
+ reifyTree(other)
+ case Block(stats, last) =>
+ reifyBuildCall(nme.SyntacticBlock, stats :+ last)
+ case Try(block, catches, finalizer) =>
+ reifyBuildCall(nme.SyntacticTry, block, catches, finalizer)
+ case Match(selector, cases) =>
+ reifyBuildCall(nme.SyntacticMatch, selector, cases)
+ // parser emits trees with scala package symbol to ensure
+ // that some names hygienically point to various scala package
+ // members; we need to preserve this symbol to preserve
+ // correctness of the trees produced by quasiquotes
+ case Select(id @ Ident(nme.scala_), name) if id.symbol == ScalaPackage =>
+ reifyBuildCall(nme.ScalaDot, name)
+ case _ =>
+ super.reifyTreeSyntactically(tree)
+ }
+
+ override def reifyName(name: Name): Tree = name match {
+ case Placeholder(hole: ApplyHole) =>
+ if (!(hole.tpe <:< nameType)) c.abort(hole.pos, s"$nameType expected but ${hole.tpe} found")
+ hole.tree
+ case Placeholder(hole: UnapplyHole) => hole.treeNoUnlift
+ case FreshName(prefix) if prefix != nme.QUASIQUOTE_NAME_PREFIX =>
+ def fresh() = c.freshName[TermName](nme.QUASIQUOTE_NAME_PREFIX)
+ def introduceName() = { val n = fresh(); nameMap(name) += n; n}
+ def result(n: Name) = if (isReifyingExpressions) Ident(n) else Bind(n, Ident(nme.WILDCARD))
+ if (isReifyingPatterns) result(introduceName())
+ else result(nameMap.get(name).map { _.head }.getOrElse { introduceName() })
+ case _ =>
+ super.reifyName(name)
+ }
+
+ def reifyTuple(args: List[Tree]) = args match {
+ case Nil => reify(Literal(Constant(())))
+ case List(hole @ Placeholder(Hole(_, NoDot))) => reify(hole)
+ case List(Placeholder(_)) => reifyBuildCall(nme.SyntacticTuple, args)
+ // in a case we only have one element tuple without
+ // any cardinality annotations this means that this is
+ // just an expression wrapped in parentheses
+ case List(other) => reify(other)
+ case _ => reifyBuildCall(nme.SyntacticTuple, args)
+ }
+
+ def reifyTupleType(args: List[Tree]) = args match {
+ case Nil => reify(Select(Ident(nme.scala_), tpnme.Unit))
+ case List(hole @ Placeholder(Hole(_, NoDot))) => reify(hole)
+ case List(Placeholder(_)) => reifyBuildCall(nme.SyntacticTupleType, args)
+ case List(other) => reify(other)
+ case _ => reifyBuildCall(nme.SyntacticTupleType, args)
+ }
+
+ def reifyFunctionType(argtpes: List[Tree], restpe: Tree) =
+ reifyBuildCall(nme.SyntacticFunctionType, argtpes, restpe)
+
+ def reifyConstructionCheck(name: TermName, hole: Hole) = hole match {
+ case _: UnapplyHole => hole.tree
+ case _: ApplyHole => mirrorBuildCall(name, hole.tree)
+ }
+
+ def reifyRefineStat(hole: Hole) = reifyConstructionCheck(nme.mkRefineStat, hole)
+
+ def reifyEarlyDef(hole: Hole) = reifyConstructionCheck(nme.mkEarlyDef, hole)
+
+ def reifyAnnotation(hole: Hole) = reifyConstructionCheck(nme.mkAnnotation, hole)
+
+ def reifyPackageStat(hole: Hole) = reifyConstructionCheck(nme.mkPackageStat, hole)
+
+ /** Splits list into a list of groups where subsequent elements are considered
+ * similar by the corresponding function.
+ *
+ * Example:
+ *
+ * > group(List(1, 1, 0, 0, 1, 0)) { _ == _ }
+ * List(List(1, 1), List(0, 0), List(1), List(0))
+ *
+ */
+ def group[T](lst: List[T])(similar: (T, T) => Boolean) = lst.foldLeft[List[List[T]]](List()) {
+ case (Nil, el) => List(List(el))
+ case (ll :+ (last @ (lastinit :+ lastel)), el) if similar(lastel, el) => ll :+ (last :+ el)
+ case (ll, el) => ll :+ List(el)
+ }
+
+ /** Reifies list filling all the valid holeMap.
+ *
+ * Reification of non-trivial list is done in two steps:
+ *
+ * 1. split the list into groups where every placeholder is always
+ * put in a group of it's own and all subsquent non-holeMap are
+ * grouped together; element is considered to be a placeholder if it's
+ * in the domain of the fill function;
+ *
+ * 2. fold the groups into a sequence of lists added together with ++ using
+ * fill reification for holeMapĀ and fallback reification for non-holeMap.
+ *
+ * Example:
+ *
+ * reifyMultiCardinalityList(lst) {
+ * // first we define patterns that extract high-cardinality holeMap (currently ..)
+ * case Placeholder(IterableType(_, _)) => tree
+ * } {
+ * // in the end we define how single elements are reified, typically with default reify call
+ * reify(_)
+ * }
+ *
+ * Sample execution of previous concrete list reifier:
+ *
+ * > val lst = List(foo, bar, qq$f3948f9s$1)
+ * > reifyMultiCardinalityList(lst) { ... } { ... }
+ * q"List($foo, $bar) ++ ${holeMap(qq$f3948f9s$1).tree}"
+ */
+ def reifyMultiCardinalityList[T](xs: List[T])(fill: PartialFunction[T, Tree])(fallback: T => Tree): Tree
+
+ /** Reifies arbitrary list filling ..$x and ...$y holeMap when they are put
+ * in the correct position. Fallbacks to regular reification for non-high cardinality
+ * elements.
+ */
+ override def reifyList(xs: List[Any]): Tree = reifyMultiCardinalityList(xs) {
+ case Placeholder(Hole(tree, DotDot)) => tree
+ case CasePlaceholder(Hole(tree, DotDot)) => tree
+ case RefineStatPlaceholder(h @ Hole(_, DotDot)) => reifyRefineStat(h)
+ case EarlyDefPlaceholder(h @ Hole(_, DotDot)) => reifyEarlyDef(h)
+ case PackageStatPlaceholder(h @ Hole(_, DotDot)) => reifyPackageStat(h)
+ case ForEnumPlaceholder(Hole(tree, DotDot)) => tree
+ case List(Placeholder(Hole(tree, DotDotDot))) => tree
+ } {
+ reify(_)
+ }
+
+ def reifyAnnotList(annots: List[Tree]): Tree = reifyMultiCardinalityList(annots) {
+ case AnnotPlaceholder(h @ Hole(_, DotDot)) => reifyAnnotation(h)
+ } {
+ case AnnotPlaceholder(h: ApplyHole) if h.tpe <:< treeType => reifyAnnotation(h)
+ case AnnotPlaceholder(h: UnapplyHole) if h.cardinality == NoDot => reifyAnnotation(h)
+ case other => reify(other)
+ }
+
+ // These are explicit flags except those that are used
+ // to overload the same tree for two different concepts:
+ // - MUTABLE that is used to override ValDef for vars
+ // - TRAIT that is used to override ClassDef for traits
+ val nonOverloadedExplicitFlags = ExplicitFlags & ~MUTABLE & ~TRAIT
+
+ def ensureNoExplicitFlags(m: Modifiers, pos: Position) = {
+ // Traits automatically have ABSTRACT flag assigned to
+ // them so in that case it's not an explicit flag
+ val flags = if (m.isTrait) m.flags & ~ABSTRACT else m.flags
+ if ((flags & nonOverloadedExplicitFlags) != 0L)
+ c.abort(pos, s"Can't $action modifiers together with flags, consider merging flags into modifiers")
+ }
+
+ override def mirrorSelect(name: String): Tree =
+ Select(universe, TermName(name))
+
+ override def mirrorCall(name: TermName, args: Tree*): Tree =
+ Apply(Select(universe, name), args.toList)
+
+ override def mirrorBuildCall(name: TermName, args: Tree*): Tree =
+ Apply(Select(Select(universe, nme.build), name), args.toList)
+
+ override def scalaFactoryCall(name: String, args: Tree*): Tree =
+ call("scala." + name, args: _*)
+ }
+
+ class ApplyReifier extends Reifier(isReifyingExpressions = true) {
+ def reifyMultiCardinalityList[T](xs: List[T])(fill: PartialFunction[T, Tree])(fallback: T => Tree): Tree =
+ if (xs.isEmpty) mkList(Nil)
+ else {
+ def reifyGroup(group: List[T]): Tree = group match {
+ case List(elem) if fill.isDefinedAt(elem) => fill(elem)
+ case elems => mkList(elems.map(fallback))
+ }
+ val head :: tail = group(xs) { (a, b) => !fill.isDefinedAt(a) && !fill.isDefinedAt(b) }
+ tail.foldLeft[Tree](reifyGroup(head)) { (tree, lst) => Apply(Select(tree, nme.PLUSPLUS), List(reifyGroup(lst))) }
+ }
+
+ override def reifyModifiers(m: Modifiers) =
+ if (m == NoMods) super.reifyModifiers(m)
+ else {
+ val (modsPlaceholders, annots) = m.annotations.partition {
+ case ModsPlaceholder(_) => true
+ case _ => false
+ }
+ val (mods, flags) = modsPlaceholders.map {
+ case ModsPlaceholder(hole: ApplyHole) => hole
+ }.partition { hole =>
+ if (hole.tpe <:< modsType) true
+ else if (hole.tpe <:< flagsType) false
+ else c.abort(hole.pos, s"$flagsType or $modsType expected but ${hole.tpe} found")
+ }
+ mods match {
+ case hole :: Nil =>
+ if (flags.nonEmpty) c.abort(flags(0).pos, "Can't splice flags together with modifiers, consider merging flags into modifiers")
+ if (annots.nonEmpty) c.abort(hole.pos, "Can't splice modifiers together with annotations, consider merging annotations into modifiers")
+ ensureNoExplicitFlags(m, hole.pos)
+ hole.tree
+ case _ :: hole :: Nil =>
+ c.abort(hole.pos, "Can't splice multiple modifiers, consider merging them into a single modifiers instance")
+ case _ =>
+ val baseFlags = reifyFlags(m.flags)
+ val reifiedFlags = flags.foldLeft[Tree](baseFlags) { case (flag, hole) => Apply(Select(flag, nme.OR), List(hole.tree)) }
+ mirrorFactoryCall(nme.Modifiers, reifiedFlags, reify(m.privateWithin), reifyAnnotList(annots))
+ }
+ }
+
+ }
+ class UnapplyReifier extends Reifier(isReifyingExpressions = false) {
+ def reifyMultiCardinalityList[T](xs: List[T])(fill: PartialFunction[T, Tree])(fallback: T => Tree): Tree = xs match {
+ case init :+ last if fill.isDefinedAt(last) =>
+ init.foldRight[Tree](fill(last)) { (el, rest) =>
+ val cons = Select(Select(Select(Ident(nme.scala_), nme.collection), nme.immutable), nme.CONS)
+ Apply(cons, List(fallback(el), rest))
+ }
+ case _ =>
+ mkList(xs.map(fallback))
+ }
+
+ override def reifyModifiers(m: Modifiers) =
+ if (m == NoMods) super.reifyModifiers(m)
+ else {
+ val mods = m.annotations.collect { case ModsPlaceholder(hole: UnapplyHole) => hole }
+ mods match {
+ case hole :: Nil =>
+ if (m.annotations.length != 1) c.abort(hole.pos, "Can't extract modifiers together with annotations, consider extracting just modifiers")
+ ensureNoExplicitFlags(m, hole.pos)
+ hole.treeNoUnlift
+ case _ :: hole :: _ =>
+ c.abort(hole.pos, "Can't extract multiple modifiers together, consider extracting a single modifiers instance")
+ case Nil =>
+ mirrorFactoryCall(nme.Modifiers, reifyFlags(m.flags), reify(m.privateWithin), reifyAnnotList(m.annotations))
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala
index 21137aca56..3cfc1eb2a1 100644
--- a/src/compiler/scala/tools/util/Javap.scala
+++ b/src/compiler/scala/tools/util/Javap.scala
@@ -6,13 +6,14 @@
package scala.tools
package util
-import java.lang.reflect.{ GenericSignatureFormatError, Method, Constructor }
-import java.lang.{ ClassLoader => JavaClassLoader }
import scala.tools.nsc.util.ScalaClassLoader
-import java.io.{ InputStream, PrintWriter, ByteArrayInputStream, FileNotFoundException }
-import scala.tools.nsc.io.File
-import Javap._
-import scala.language.reflectiveCalls
+import java.io.PrintWriter
+
+trait JpResult {
+ def isError: Boolean
+ def value: Any
+ def show(): Unit
+}
trait Javap {
def loader: ScalaClassLoader
@@ -29,147 +30,3 @@ object NoJavap extends Javap {
def tryFile(path: String): Option[Array[Byte]] = None
def tryClass(path: String): Array[Byte] = Array()
}
-
-class JavapClass(
- val loader: ScalaClassLoader = ScalaClassLoader.appLoader,
- val printWriter: PrintWriter = new PrintWriter(System.out, true)
-) extends Javap {
-
- lazy val parser = new JpOptions
-
- val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
- val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull
- private def failed = (EnvClass eq null) || (PrinterClass eq null)
-
- val PrinterCtr = (
- if (failed) null
- else PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass)
- )
-
- def findBytes(path: String): Array[Byte] =
- tryFile(path) getOrElse tryClass(path)
-
- def apply(args: Seq[String]): List[JpResult] = {
- if (failed) List(new JpError("Could not load javap tool. Check that JAVA_HOME is correct."))
- else args.toList filterNot (_ startsWith "-") map { path =>
- val bytes = findBytes(path)
- if (bytes.isEmpty) new JpError("Could not find class bytes for '%s'".format(path))
- else new JpSuccess(newPrinter(new ByteArrayInputStream(bytes), newEnv(args)))
- }
- }
-
- def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter =
- if (failed) null
- else PrinterCtr.newInstance(in, printWriter, env)
-
- def newEnv(opts: Seq[String]): FakeEnvironment = {
- lazy val env: FakeEnvironment = EnvClass.newInstance()
-
- if (failed) null
- else parser(opts) foreach { case (name, value) =>
- val field = EnvClass getDeclaredField name
- field setAccessible true
- field.set(env, value.asInstanceOf[AnyRef])
- }
-
- env
- }
-
- /** Assume the string is a path and try to find the classfile
- * it represents.
- */
- def tryFile(path: String): Option[Array[Byte]] = {
- val file = File(
- if (path.endsWith(".class")) path
- else path.replace('.', '/') + ".class"
- )
- if (!file.exists) None
- else try Some(file.toByteArray) catch { case x: Exception => None }
- }
- /** Assume the string is a fully qualified class name and try to
- * find the class object it represents.
- */
- def tryClass(path: String): Array[Byte] = {
- val extName = (
- if (path endsWith ".class") (path dropRight 6).replace('/', '.')
- else path
- )
- loader.classBytes(extName)
- }
-}
-
-object Javap {
- val Env = "sun.tools.javap.JavapEnvironment"
- val Printer = "sun.tools.javap.JavapPrinter"
-
- def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) =
- cl.tryToInitializeClass[AnyRef](Env).isDefined
-
- // "documentation"
- type FakeEnvironment = AnyRef
- type FakePrinter = AnyRef
-
- def apply(path: String): Unit = apply(Seq(path))
- def apply(args: Seq[String]): Unit = new JavapClass() apply args foreach (_.show())
-
- sealed trait JpResult {
- type ResultType
- def isError: Boolean
- def value: ResultType
- def show(): Unit
- // todo
- // def header(): String
- // def fields(): List[String]
- // def methods(): List[String]
- // def signatures(): List[String]
- }
- class JpError(msg: String) extends JpResult {
- type ResultType = String
- def isError = true
- def value = msg
- def show() = println(msg)
- }
- class JpSuccess(val value: AnyRef) extends JpResult {
- type ResultType = AnyRef
- def isError = false
- def show() = value.asInstanceOf[{ def print(): Unit }].print()
- }
-
- class JpOptions {
- private object Access {
- final val PRIVATE = 0
- final val PROTECTED = 1
- final val PACKAGE = 2
- final val PUBLIC = 3
- }
- private val envActionMap: Map[String, (String, Any)] = {
- val map = Map(
- "-l" -> (("showLineAndLocal", true)),
- "-c" -> (("showDisassembled", true)),
- "-s" -> (("showInternalSigs", true)),
- "-verbose" -> (("showVerbose", true)),
- "-private" -> (("showAccess", Access.PRIVATE)),
- "-package" -> (("showAccess", Access.PACKAGE)),
- "-protected" -> (("showAccess", Access.PROTECTED)),
- "-public" -> (("showAccess", Access.PUBLIC)),
- "-all" -> (("showallAttr", true))
- )
- map ++ List(
- "-v" -> map("-verbose"),
- "-p" -> map("-private")
- )
- }
- def apply(opts: Seq[String]): Seq[(String, Any)] = {
- opts flatMap { opt =>
- envActionMap get opt match {
- case Some(pair) => List(pair)
- case _ =>
- val charOpts = opt.tail.toSeq map ("-" + _)
- if (charOpts forall (envActionMap contains _))
- charOpts map envActionMap
- else Nil
- }
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index 0af1011bda..5526660509 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -3,37 +3,46 @@
* @author Paul Phillips
*/
-package scala.tools
+package scala
+package tools
package util
-import java.net.{ URL, MalformedURLException }
import scala.tools.reflect.WrappedProperties.AccessControl
-import nsc.{ Settings, GenericRunnerSettings }
-import nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader }
-import nsc.io.{ File, Directory, Path, AbstractFile }
+import scala.tools.nsc.{ Settings }
+import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
+import scala.reflect.io.{ File, Directory, Path, AbstractFile }
+import scala.reflect.runtime.ReflectionUtils
import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
import PartialFunction.condOpt
import scala.language.postfixOps
// Loosely based on the draft specification at:
-// https://wiki.scala-lang.org/display/SW/Classpath
+// https://wiki.scala-lang.org/display/SIW/Classpath
object PathResolver {
- // Imports property/environment functions which suppress
- // security exceptions.
+ // Imports property/environment functions which suppress security exceptions.
import AccessControl._
-
- def firstNonEmpty(xs: String*) = xs find (_ != "") getOrElse ""
-
- /** Map all classpath elements to absolute paths and reconstruct the classpath.
- */
- def makeAbsolute(cp: String) = ClassPath.map(cp, x => Path(x).toAbsolute.path)
+ import scala.compat.Platform.EOL
+
+ implicit class MkLines(val t: TraversableOnce[_]) extends AnyVal {
+ def mkLines: String = t.mkString("", EOL, EOL)
+ def mkLines(header: String, indented: Boolean = false, embraced: Boolean = false): String = {
+ val space = "\u0020"
+ val sep = if (indented) EOL + space * 2 else EOL
+ val (lbrace, rbrace) = if (embraced) (space + "{", EOL + "}") else ("", "")
+ t.mkString(header + lbrace + sep, sep, rbrace + EOL)
+ }
+ }
+ implicit class AsLines(val s: String) extends AnyVal {
+ // sm"""...""" could do this in one pass
+ def asLines = s.trim.stripMargin.lines.mkLines
+ }
/** pretty print class path */
def ppcp(s: String) = split(s) match {
case Nil => ""
case Seq(x) => x
- case xs => xs map ("\n" + _) mkString
+ case xs => xs.mkString(EOL, EOL, "")
}
/** Values found solely by inspecting environment or property variables.
@@ -46,8 +55,7 @@ object PathResolver {
/** Environment variables which java pays attention to so it
* seems we do as well.
*/
- def classPathEnv = envOrElse("CLASSPATH", "")
- def sourcePathEnv = envOrElse("SOURCEPATH", "")
+ def sourcePathEnv = envOrElse("SOURCEPATH", "")
def javaBootClassPath = propOrElse("sun.boot.class.path", searchForBootClasspath)
def javaExtDirs = propOrEmpty("java.ext.dirs")
@@ -58,20 +66,14 @@ object PathResolver {
def javaUserClassPath = propOrElse("java.class.path", "")
def useJavaClassPath = propOrFalse("scala.usejavacp")
- override def toString = """
+ override def toString = s"""
|object Environment {
- | scalaHome = %s (useJavaClassPath = %s)
- | javaBootClassPath = <%d chars>
- | javaExtDirs = %s
- | javaUserClassPath = %s
- | scalaExtDirs = %s
- |}""".trim.stripMargin.format(
- scalaHome, useJavaClassPath,
- javaBootClassPath.length,
- ppcp(javaExtDirs),
- ppcp(javaUserClassPath),
- ppcp(scalaExtDirs)
- )
+ | scalaHome = $scalaHome (useJavaClassPath = $useJavaClassPath)
+ | javaBootClassPath = <${javaBootClassPath.length} chars>
+ | javaExtDirs = ${ppcp(javaExtDirs)}
+ | javaUserClassPath = ${ppcp(javaUserClassPath)}
+ | scalaExtDirs = ${ppcp(scalaExtDirs)}
+ |}""".asLines
}
/** Default values based on those in Environment as interpreted according
@@ -86,7 +88,6 @@ object PathResolver {
def scalaHome = Environment.scalaHome
def scalaHomeDir = Directory(scalaHome)
- def scalaHomeExists = scalaHomeDir.isDirectory
def scalaLibDir = Directory(scalaHomeDir / "lib")
def scalaClassesDir = Directory(scalaHomeDir / "classes")
@@ -109,33 +110,67 @@ object PathResolver {
// classpath as set up by the runner (or regular classpath under -nobootcp)
// and then again here.
def scalaBootClassPath = ""
- // scalaLibDirFound match {
- // case Some(dir) if scalaHomeExists =>
- // val paths = ClassPath expandDir dir.path
- // join(paths: _*)
- // case _ => ""
- // }
-
def scalaExtDirs = Environment.scalaExtDirs
-
def scalaPluginPath = (scalaHomeDir / "misc" / "scala-devel" / "plugins").path
- override def toString = """
+ override def toString = s"""
|object Defaults {
- | scalaHome = %s
- | javaBootClassPath = %s
- | scalaLibDirFound = %s
- | scalaLibFound = %s
- | scalaBootClassPath = %s
- | scalaPluginPath = %s
- |}""".trim.stripMargin.format(
- scalaHome,
- ppcp(javaBootClassPath),
- scalaLibDirFound, scalaLibFound,
- ppcp(scalaBootClassPath), ppcp(scalaPluginPath)
- )
+ | scalaHome = $scalaHome
+ | javaBootClassPath = ${ppcp(javaBootClassPath)}
+ | scalaLibDirFound = $scalaLibDirFound
+ | scalaLibFound = $scalaLibFound
+ | scalaBootClassPath = ${ppcp(scalaBootClassPath)}
+ | scalaPluginPath = ${ppcp(scalaPluginPath)}
+ |}""".asLines
+ }
+
+ /** Locations discovered by supplemental heuristics.
+ */
+ object SupplementalLocations {
+
+ /** The platform-specific support jar.
+ *
+ * Usually this is `tools.jar` in the jdk/lib directory of the platform distribution.
+ *
+ * The file location is determined by probing the lib directory under JDK_HOME or JAVA_HOME,
+ * if one of those environment variables is set, then the lib directory under java.home,
+ * and finally the lib directory under the parent of java.home. Or, as a last resort,
+ * search deeply under those locations (except for the parent of java.home, on the notion
+ * that if this is not a canonical installation, then that search would have little
+ * chance of succeeding).
+ */
+ def platformTools: Option[File] = {
+ val jarName = "tools.jar"
+ def jarPath(path: Path) = (path / "lib" / jarName).toFile
+ def jarAt(path: Path) = {
+ val f = jarPath(path)
+ if (f.isFile) Some(f) else None
+ }
+ val jdkDir = {
+ val d = Directory(jdkHome)
+ if (d.isDirectory) Some(d) else None
+ }
+ def deeply(dir: Directory) = dir.deepFiles find (_.name == jarName)
+
+ val home = envOrSome("JDK_HOME", envOrNone("JAVA_HOME")) map (p => Path(p))
+ val install = Some(Path(javaHome))
+
+ (home flatMap jarAt) orElse (install flatMap jarAt) orElse (install map (_.parent) flatMap jarAt) orElse
+ (jdkDir flatMap deeply)
+ }
+ override def toString = s"""
+ |object SupplementalLocations {
+ | platformTools = $platformTools
+ |}""".asLines
+ }
+
+ // used in PathResolver constructor
+ private object NoImplClassJavaContext extends JavaContext {
+ override def isValidName(name: String): Boolean =
+ !ReflectionUtils.scalacShouldntLoadClassfile(name)
}
+ // called from scalap
def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = {
val s = new Settings()
s.classpath.value = path
@@ -153,18 +188,21 @@ object PathResolver {
}
else {
val settings = new Settings()
- val rest = settings.processArguments(args.toList, false)._2
+ val rest = settings.processArguments(args.toList, processAll = false)._2
val pr = new PathResolver(settings)
println(" COMMAND: 'scala %s'".format(args.mkString(" ")))
println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" ")))
- pr.result.show
+ pr.result.show()
}
}
}
-import PathResolver.{ Defaults, Environment, firstNonEmpty, ppcp }
class PathResolver(settings: Settings, context: JavaContext) {
- def this(settings: Settings) = this(settings, if (settings.inline.value) new JavaContext else DefaultJavaContext)
+ import PathResolver.{ Defaults, Environment, AsLines, MkLines, ppcp }
+
+ def this(settings: Settings) = this(settings,
+ if (settings.YnoLoadImplClass) PathResolver.NoImplClassJavaContext
+ else DefaultJavaContext)
private def cmdLineOrElse(name: String, alt: String) = {
(commandLineFor(name) match {
@@ -188,6 +226,7 @@ class PathResolver(settings: Settings, context: JavaContext) {
object Calculated {
def scalaHome = Defaults.scalaHome
def useJavaClassPath = settings.usejavacp.value || Defaults.useJavaClassPath
+ def useManifestClassPath= settings.usemanifestcp.value
def javaBootClassPath = cmdLineOrElse("javabootclasspath", Defaults.javaBootClassPath)
def javaExtDirs = cmdLineOrElse("javaextdirs", Defaults.javaExtDirs)
def javaUserClassPath = if (useJavaClassPath) Defaults.javaUserClassPath else ""
@@ -227,43 +266,37 @@ class PathResolver(settings: Settings, context: JavaContext) {
classesInPath(scalaBootClassPath), // 4. The Scala boot class path.
contentsOfDirsInPath(scalaExtDirs), // 5. The Scala extension class path.
classesInExpandedPath(userClassPath), // 6. The Scala application class path.
+ classesInManifest(useManifestClassPath), // 8. The Manifest class path.
sourcesInPath(sourcePath) // 7. The Scala source path.
)
lazy val containers = basis.flatten.distinct
- override def toString = """
+ override def toString = s"""
|object Calculated {
- | scalaHome = %s
- | javaBootClassPath = %s
- | javaExtDirs = %s
- | javaUserClassPath = %s
- | useJavaClassPath = %s
- | scalaBootClassPath = %s
- | scalaExtDirs = %s
- | userClassPath = %s
- | sourcePath = %s
- |}""".trim.stripMargin.format(
- scalaHome,
- ppcp(javaBootClassPath), ppcp(javaExtDirs), ppcp(javaUserClassPath),
- useJavaClassPath,
- ppcp(scalaBootClassPath), ppcp(scalaExtDirs), ppcp(userClassPath),
- ppcp(sourcePath)
- )
+ | scalaHome = $scalaHome
+ | javaBootClassPath = ${ppcp(javaBootClassPath)}
+ | javaExtDirs = ${ppcp(javaExtDirs)}
+ | javaUserClassPath = ${ppcp(javaUserClassPath)}
+ | useJavaClassPath = $useJavaClassPath
+ | scalaBootClassPath = ${ppcp(scalaBootClassPath)}
+ | scalaExtDirs = ${ppcp(scalaExtDirs)}
+ | userClassPath = ${ppcp(userClassPath)}
+ | sourcePath = ${ppcp(sourcePath)}
+ |}""".asLines
}
def containers = Calculated.containers
lazy val result = {
val cp = new JavaClassPath(containers.toIndexedSeq, context)
- if (settings.Ylogcp.value) {
- Console.println("Classpath built from " + settings.toConciseString)
- Console.println("Defaults: " + PathResolver.Defaults)
- Console.println("Calculated: " + Calculated)
+ if (settings.Ylogcp) {
+ Console print f"Classpath built from ${settings.toConciseString} %n"
+ Console print s"Defaults: ${PathResolver.Defaults}"
+ Console print s"Calculated: $Calculated"
val xs = (Calculated.basis drop 2).flatten.distinct
- println("After java boot/extdirs classpath has %d entries:" format xs.size)
- xs foreach (x => println(" " + x))
+ Console print (xs mkLines (s"After java boot/extdirs classpath has ${xs.size} entries:", indented = true))
}
cp
}
diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala
index 1b06ce2ff2..1d39a59cf4 100644
--- a/src/compiler/scala/tools/util/SocketServer.scala
+++ b/src/compiler/scala/tools/util/SocketServer.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.tools.util
+package scala
+package tools.util
import java.net.{ ServerSocket, SocketException, SocketTimeoutException }
import java.io.{ PrintWriter, BufferedReader }
@@ -16,8 +17,8 @@ trait CompileOutputCommon {
def verbose: Boolean
def info(msg: String) = if (verbose) echo(msg)
- def echo(msg: String) = {Console println msg; Console.flush}
- def warn(msg: String) = {Console.err println msg; Console.flush}
+ def echo(msg: String) = {Console println msg; Console.flush()}
+ def warn(msg: String) = {Console.err println msg; Console.flush()}
def fatal(msg: String) = { warn(msg) ; sys.exit(1) }
}
diff --git a/src/compiler/scala/tools/util/VerifyClass.scala b/src/compiler/scala/tools/util/VerifyClass.scala
index d208a9f9c2..3c203e1cf2 100644
--- a/src/compiler/scala/tools/util/VerifyClass.scala
+++ b/src/compiler/scala/tools/util/VerifyClass.scala
@@ -3,7 +3,7 @@ package scala.tools.util
import scala.tools.nsc.io._
import java.net.URLClassLoader
import scala.collection.JavaConverters._
-
+import scala.language.postfixOps
object VerifyClass {